I have the following client side javascript code.
<html>
<script type="text/javascript" src="js/jquery.min.js"></script>
<script>
$(document).ready(function() {
//var parameters = "a=" + JSON.stringify({ Code: "xyyyzz"});
var parameters = "a=" + JSON.stringify({ Code: "x#yyy#zz"});
alert(parameters);
$.getJSON('http://localhost:8080', parameters)
.done(function(str){
alert("success");
})
.fail(function(e) {
alert("failure");
});
});
</script>
</html>
and the following server side node.js code
var http = require('http');
var URL = require('url');
var queryString = require( "querystring" );
handler = function(req, res) {
url = URL.parse(req.url);
var queryObj = queryString.parse( url.query );
var obj = JSON.parse( queryObj.a );
console.log( obj.Code);
};
host = '127.0.0.1';
port = 8080;
http.createServer(handler).listen(port, host);
console.log('Server running at http://' + host + ':' + port + '/');
Now, if I load the .html file, the JSON call is made from the javascript. However, the parameter is not passed in full. It is passed only until the first # character and so causes the nodejs server to crash on the parse function. Now if I uncomment the previous line in the .js where a string without the # is passed, the nodejs server can succesfully parse the parameters.
So my question is, Is JSON incapable of encoding special characters like # in a string ? Or is this a bug ? Or do I need to add anything else to fix this so that x#yyy#zz is passed as a whole string to the nodejs server ?
Your data needs to be url encoded, # has a special meaning in urls.
If you pass an object to $.getJSON it will be automatically encoded for you
var parameters = {"a" : JSON.stringify({ Code: "x#yyy#zz"})};
or you could manually encode it
var parameters = "a=" + encodeURIComponent(JSON.stringify({ Code: "x#yyy#zz"}));
Related
The URL that the getJSON request is sent to definitely exists, but the request fails with a 404 error. The URL just hosts a JSON object: here. I've also tried using the same request but replacing the destination with a local JSON file hosted in the same directory, which also fails with a 404 error. I'm guessing this
means that the error is either with my getJSON request, or with my node.js server configuration.
This is the function that makes the getJSON call:
function loginFunction(){
//check browser support
if(typeof(Storage) !== "undefined"){
//store dat shit
sessionStorage.setItem("username", document.getElementById('username').value);
sessionStorage.setItem("password", document.getElementById('password').value);
$(document).ready(function(){
$.getJSON(createUsernameURL(), function(data){
console.log(data);
var responseUsername = data.username;
document.getElementById("unresult").innerHTML = responseUsername;
var responsePassword = data.password;
document.getElementById("pwresult").innerHTML = responsePassword;
});
});
}else{
document.getElementById("pwresult").innerHTML = "your browser is out of date";
}
And this is the config file for my node.js server:
const http = require('http');
const express = require('express');
const app = express();
app.listen(3000,function(){
console.log(__dirname)
});
app.get('/', (req,res) => {
res.sendFile(__dirname + '/index.html');
});
app.use("/static", express.static(__dirname + '/static'));
The createUsernameURL() function just appends a couple pieces of user-entered information to a base URL, but even hard-coding the exact database link mentioned above gives the same issues.
So I'm trying to write a basic file server in Node.js, and all the images I've tried uploading and storing on it are coming back as corrupted. The problem seems to have something to do with the way that Node Buffers handle being converted to UTF-8 and back again (which I have to do in order to get the POST body headers out and away from the binary data).
Here's a simple Node server that shows my current approach and the problems I've been having:
var http = require('http');
var server = http.createServer(function(request, response) {
if (request.method === "GET") {
// on GET request, output a simple web page with a file upload form
var mypage = '<!doctype html><html><head><meta charset="utf-8">' +
'<title>Submit POST Form</title></head>\r\n<body>' +
'<form action="http://127.0.0.1:8008" method="POST" ' +
'enctype="multipart/form-data"> <input name="upload" ' +
'type="file"><p><button type="submit">Submit</button>' +
'</p></form></body></html>\r\n';
response.writeHead(200, {
"Content-Type": "text/html",
"Content-Length": mypage.length
});
response.end(mypage);
} else if (request.method === "POST") {
// if we have a return post request, let's capture it
var upload = new Buffer([]);
// get the data
request.on('data', function(chunk) {
// copy post data
upload = Buffer.concat([upload, chunk]);
});
// when we have all the data
request.on('end', function() {
// convert to UTF8 so we can pull out the post headers
var str = upload.toString('utf8');
// get post headers with a regular expression
var re = /(\S+)\r\nContent-Disposition:\s*form-data;\s*name="\w+";\s*filename="[^"]*"\r\nContent-Type: (\S+)\r\n\r\n/i,
reMatch = str.match(re);
var lengthOfHeaders = reMatch[0].length,
boundary = reMatch[1],
mimeType = reMatch[2];
// slice headers off top of post body
str = str.slice(lengthOfHeaders);
// remove the end boundary
str = str.replace("\r\n" + boundary + "--\r\n", '');
// convert back to buffer
var rawdata = new Buffer(str, 'utf8');
// echo back to client
response.writeHead(200, {
"Content-Type": mimeType
});
response.end(rawdata);
});
}
});
server.listen(8008);
console.log("server running on port 8008");
To test it, run the script in node and go to 127.0.0.1:8008 in your browser. Try uploading an image and submitting the form. The image comes back as corrupt every time -- even though the script should just be directly echoing the image data back to the browser.
So does anyone know what I'm doing wrong here? Is there a better way to handle POST body headers in Node that I haven't figured out yet? (And before anyone says anything, no, I don't want to use Express. I want to figure out and understand this problem.)
The problem seems to have something to do with the way that Node Buffers handle being converted to UTF-8 and back again
I guess you are right about that, convert to UTF-8 is a bad idea, but can do it just to work with the file and get the headers and boundaries positions, but keep the buffer file untouched, and when you have all the positions to get the header and boundary out of the file just copy the buffer to a new buffer like that
originalBuffer.copy(newBuffer,0, positionHeader, positionEndBoundary)
var http = require('http');
var fs = require('fs');
var connections = 0;
var server = http.createServer(function (req, res) {
connections++;
console.log(req.url,"connections: "+connections);
if(req.url == '/'){
res.writeHead(200, { 'content-type': 'text/html' });
res.end(
'<form action="/upload" enctype="multipart/form-data" method="post">' +
'<input type="file" name="upload" multiple="multiple"><br>' +
'<input type="submit" value="Upload">' +
'</form>'
);
}
var body = new Buffer([]);
if (req.url == '/upload') {
req.on('data', function (foo) {
//f.write(foo);
body = Buffer.concat([body,foo]);
if(isImage(body.toString())){
console.log("é imagem do tipo "+isImage(body.toString()));
}
else{
console.log("Não é imagem");
res.end("Não é imagem");
}
console.log(body.length, body.toString().length);
});
req.on('end', function () {
// console.log(req.headers);
//I converted the buffer to "utf 8" but i kept the original buffer
var str = body.toString();
console.log(str.length);
imageType = isImage(body.toString());
//get the index of the last header character
//I'm just using the string to find the postions to cut the headers and boundaries
var index = str.indexOf(imageType)+(imageType+"\r\n\r\n").length;
// var headers= str.slice(0,index).split(';');
// console.log(headers);
//Here comes the trick
/*
*I have to cut the last boundaries, so i use the lastIndexOf to cut the second boundary
* And maybe that is the corruption issues, because, I'm not sure, but I guess
* the UTF-8 format only use 7bits to represent all characters, and the buffer can use 8bits, or two hex,
*So, i need to take the difference here (body.length-str.length)
*/
var indexBoundayToBuffer = str.lastIndexOf('------WebKitFormBoundary')+(body.length-str.length);
console.log(index, indexBoundayToBuffer);
//maybe you can change this to use less memory, whatever
var newBuffer = Buffer.alloc(body.length);
/*
*And now use the index, and the indexBoudayToBuffer and you will have only the binary
*/
body.copy(newBuffer,0,index,indexBoundayToBuffer);
// f.end();
//file type
var type = imageType.substr("image/".length);
console.log("END");
fs.writeFile("nameFile."+type,newBuffer,function(err,ok){
if(err){
console.log(err);
return false;
}
res.end();
});
});
}
});
function isImage(str){
if(str.indexOf('image/png')!=-1) return 'image/png';
else if(str.indexOf('image/jpeg')!=-1) return 'image/jpeg';
else if(str.indexOf('image/bmp'!=-1)) return 'image/bmp';
else if(str.indexOf('image/gif'!=-1)) return 'image/gif';
else false;
}
var port = process.env.PORT || 8080;
server.listen(port, function () {
console.log('Recording connections on port %s', port);
});
You really shouldn't use regular expressions like that to parse multipart payloads as it can easily make trying to parse your image data very unreliable. There are modules on npm that parse forms for you such as busboy, multiparty, or formidable. None of them use regular expressions and they don't require Express.
I'm creating HTTP server and inside i'm sending a request to to yahoo finance website and getting some data from it, what i want to do is to print to browser the data i got from yahoo finance.
the thing is that response.write isn't working inside the request.
Here is my code:
var http = require('http');
var request = require('request');
var cheerio = require('cheerio');
var util = require('util');
var host = "127.0.0.1";
var port = 1400;
var server = http.createServer(function (req, res) {
//writing the headers of our response
res.writeHead(200, {'Content-Type':'text/plain'});
// Variable Deceleration
// TODO: move from the global scope
var ticker = "IBM";
var yUrl = "http://finance.yahoo.com/q/ks?s=" + ticker;
var keyStr = new Array();
//
// The main call to fetch the data, parse it and work on it.
//
request(yUrl, function (error, response, body) {
if (!error && response.statusCode == 200) {
var $ = cheerio.load(body);
// the keys - We get them from a certain class attribute
var span = $('.time_rtq_ticker>span');
stockValue = $(span).text();
res.write("trying to print something");
console.log("Stock - " + ticker + " --> text " + stockValue );
}
}); // -- end of request --
res.write('Welcome to StockWach\n');
//printing out back to the client the last line
res.end('end of demo');
});
server.listen(port, host, function () {
console.log("Listening : " + host +":" + port);
});
You have to end the response (res.end();). Almost all browsers buffer some number of bytes from the response before showing anything, so you won't see the trying to print something until the response has ended.
If you use something like cURL though, you will see the trying to print something right away before the response is ended.
I'm using Node's request module.
The response I get is "gziped" or otherwise encoded.
How can I
1. Build the request to not encode the response?
2. Decode the response?
The data is coming from http://api.stackexchange.com.
var myRequest = require('request');
var zlib = require('zlib');
var stackRequest = require('request');
var apikey = '<MyKey>';
var fromdate = '1359417601';
var tagged = 'node.js';
stackRequest(
{ method: 'GET'
, uri: 'http://api.stackexchange.com/2.1/questions?key=' + apikey +
'&site=stackoverflow&fromdate=' + fromdate + '&order=desc&' +
'sort=activity&tagged=' + tagged + '&filter=default'
}, function(err, response, body) {
console.log(response.body); // How can I decode this?
});
The encoding has nothing to do with request. StackOverflow's API returns GZip encoded data always, as explained in the API documentation. You need to use Node's zlib module to unzip the contents. This is a simple example:
var zlib = require('zlib');
// Other code
, function(err, response, body) {
zlip.gunzip(body, function(err, data){
console.log(data);
});
});
The main downside of this, which is bad, is that this forces the request module to process the entire response content into one Buffer as body. Instead, you should normally use Node's Stream system to send the data from the request directly through the unzipping library, so that you use less memory. You'll still need to join the parts together to parse the JSON, but it is still better.
var zlib = require('zlib');
var request = require('request');
var apikey = '<MyKey>';
var fromdate = '1359417601';
var tagged = 'node.js';
var compressedStream = request('http://api.stackexchange.com/2.1/questions?' +
'key=' + apikey + '&site=stackoverflow&fromdate=' + fromdate +
'&order=desc&sort=activity&tagged=' + tagged + '&filter=default');
var decompressedStream = compressedStream.pipe(zlib.createGunzip());
var chunks = [];
decompressedStream.on('data', function(chunk){
chunks.push(chunk);
});
decompressedStream.on('end', function(){
var body = Buffer.concat(chunks);
var data = JSON.parse(body);
// Do your thing
});
First set accept: identity as a header. If stacked change doesn't send data as regular UTF8, then it's a bug on their end.
Secondly, you want to set the encoding as UTF8 so the response isn't a buffer.
Using Node v0.2.0 I am trying to fetch an image from a server, convert it into a base64 string and then embed it on the page in an image tag. I have the following code:
var express = require('express'),
request = require('request'),
sys = require('sys');
var app = express.createServer(
express.logger(),
express.bodyDecoder()
);
app.get('/', function(req, res){
if(req.param("url")) {
var url = unescape(req.param("url"));
request({uri:url}, function (error, response, body) {
if (!error && response.statusCode == 200) {
var data_uri_prefix = "data:" + response.headers["content-type"] + ";base64,";
var buf = new Buffer(body);
var image = buf.toString('base64');
image = data_uri_prefix + image;
res.send('<img src="'+image+'"/>');
}
});
}
});
app.listen(3000);
Note: This code requires "express" and "request". And of course, node. If you have npm installed, it should be as simple as "npm install express" or "npm install request".
Unfortunately, this doesn't work as expected. If I do the conversion with the Google logo, then I get the following at the beginning of the string:
77+9UE5HDQoaCgAAAA1JSERSAAABEwAAAF8IAwAAAO+/ve+/ve+/vSkAAAMAUExURQBzCw5xGiNmK0t+U++/vQUf77+9BiHvv70WKO+/vQkk77+9D
However if I use an online Base64 encoder with the same image, then it works perfectly. The string starts like this:
iVBORw0KGgoAAAANSUhEUgAAARMAAABfCAMAAAD8mtMpAAADAFBMVEUAcwsOcRojZitLflOWBR+aBiGQFiipCSS8DCm1Cya1FiyNKzexKTjDDSrLDS
Where am I going wrong that this isn't working correctly? I have tried so many different js base64 implementations and they all don't work in the same way. The only thing I can think of is that I am trying to convert the wrong thing into base64, but what should I convert if that is the case?
The problem is encoding and storing binary data in javascript strings. There's a pretty good section on this under Buffers at http://nodejs.org/api.html.
Unfortunately, the easiest way to fix this involved changing the request npm. I had to add response.setEncoding('binary'); on line 66 just below var buffer; in /path/to/lib/node/.npm/request/active/package/lib/main.js. This will work fine for this request but not others. You might want to hack it so that this is only set based on some other passed option.
I then changed var buf = new Buffer(body) to var buf = new Buffer(body, 'binary');. After this, everything worked fine.
Another way to do this, if you really didn't want to touch the request npm, would be to pass in an object that implements Writable Stream in the responseBodyStream argument to request. This object would then store the streamed data from the response in it's own buffer. Maybe there is a library that does this already... i'm not sure.
I'm going to leave it here for now, but feel free to comment if you want me to clarify anything.
EDIT
Check out comments. New solution at http://gist.github.com/583836
The following code (available at https://gist.github.com/804225)
var URL = require('url'),
sURL = 'http://nodejs.org/logo.png',
oURL = URL.parse(sURL),
http = require('http'),
client = http.createClient(80, oURL.hostname),
request = client.request('GET', oURL.pathname, {'host': oURL.hostname})
;
request.end();
request.on('response', function (response)
{
var type = response.headers["content-type"],
prefix = "data:" + type + ";base64,",
body = "";
response.setEncoding('binary');
response.on('end', function () {
var base64 = new Buffer(body, 'binary').toString('base64'),
data = prefix + base64;
console.log(data);
});
response.on('data', function (chunk) {
if (response.statusCode == 200) body += chunk;
});
});
should also produce a data URI without requiring any external modules.
This works for me using request:
const url = 'http://host/image.png';
request.get({url : url, encoding: null}, (err, res, body) => {
if (!err) {
const type = res.headers["content-type"];
const prefix = "data:" + type + ";base64,";
const base64 = body.toString('base64');
const dataUri = prefix + base64;
}
});
No need for any intermediate buffers. The key is to set encoding to null.