How to get POST parameters via node.js - javascript

I a m kinda stuck. I made a request to a server. I want the server to computer a operation for me, and send me back the result.
Request :
ar http = require('http');
http.createServer(function(req, res){
var response = "Hello from " + req.client.remoteAddress + ":" + req.client.remotePort + "\n to " + req.client.localAddress + ":" + req.client.localPort;
console.log(response);
var XMLHttpRequest = require("xmlhttprequest").XMLHttpRequest;
const Http = new XMLHttpRequest();
console.log("start");
n = 15;
const urlfibo='http://172.22.0.4:8899';
Http.open("POST", urlfibo,false);
var params = 'value=15';
Http.setRequestHeader('Content-type', 'application/x-www-form-urlencoded');
Http.send(params);
console.log(Http.responseText);
response += Http.responseText;
res.writeHead(200, {'Content-Type': 'text/plain'});
res.write(response);
res.end();
On my server, i don't know where to find the parameter i passed. I've tryed the solution in the doc, but my code is never running into it :
```
response += "\n ON" + req.on
let body = [];
req.on('data', (chunk) => {
response += "bla"
body.push(chunk);
}).on('end', () => {
body = Buffer.concat(body).toString();
// at this point, `body` has the entire request body stored in it as a string
});
My question is : on y server (second part of code), how do i get my params i passed when i sent the request ? I want to do it with nodejs without using frameworks
Thank you

chunk will have all informations because is an object, you just have to assign this values for a new object and look for the parameters. Bug make sure to have in your code:
const bodyParser = require('body-parser');
app.use(bodyParser);

Related

How can a JS Client pass POST parameters to a JS Server?

I'd like to have a pair of Javascript client and server that communicate
using JSON without involving a Web Browser. I can get it going as long as
the client does not try to send anything to the server. When it does, I get
the error message (% node poster.js):
.../node_modules/xhr2/lib/xhr2.js:281
throw new NetworkError(`Unsupported protocol ${this._url.protocol}`);
Can somebody help me with this protocol issue please. What I have so far:
a server that I can start with "pm2 start jsonServer.js" -
var port = 62022
var http = require('http')
var srvr = http.createServer (function (req, res) {
console.log ('Request type: ' + typeof(req))
console.log (req)
res.writeHead(200, {'Content-Type': 'text/plain'});
res.write('Hello World!\n');
res.write(req);
res.end();
})
var scriptName = __filename.split(__dirname+"/").pop();
console.log ('Script: ' + scriptName +'. Listening to port ' + port)
srvr.listen(port)
... and a client that is
var XMLHttpRequest = require('xhr2')
var serialize = function(object) {
return JSON.stringify(object, null, 2)
}
let xhr = new XMLHttpRequest();
xhr.open("POST", "localhost:62022");
xhr.setRequestHeader("Accept", "application/json");
xhr.setRequestHeader("Content-Type", "application/json");
xhr.onload = () => console.log(xhr.responseText);
let data = `{
"Id": 912,
"First Name": "Archibald",
"Last Name": "Haddock"
}`;
//console.log (serialize(data));
//xhr.send(serialize(data));
console.log (data);
xhr.send(data);
This is what is needed after posting 'Hello World!' on the server (res.end does not wait for the async data transfer):
let data = '';
req.on('data', chunk => {
data += chunk;
});
req.on('end', () => {
// do whatever with data
res.write ('Data: ' + data + '\n')
res.end()
});

How can i make a response.write in the request module

I'm creating HTTP server and inside i'm sending a request to to yahoo finance website and getting some data from it, what i want to do is to print to browser the data i got from yahoo finance.
the thing is that response.write isn't working inside the request.
Here is my code:
var http = require('http');
var request = require('request');
var cheerio = require('cheerio');
var util = require('util');
var host = "127.0.0.1";
var port = 1400;
var server = http.createServer(function (req, res) {
//writing the headers of our response
res.writeHead(200, {'Content-Type':'text/plain'});
// Variable Deceleration
// TODO: move from the global scope
var ticker = "IBM";
var yUrl = "http://finance.yahoo.com/q/ks?s=" + ticker;
var keyStr = new Array();
//
// The main call to fetch the data, parse it and work on it.
//
request(yUrl, function (error, response, body) {
if (!error && response.statusCode == 200) {
var $ = cheerio.load(body);
// the keys - We get them from a certain class attribute
var span = $('.time_rtq_ticker>span');
stockValue = $(span).text();
res.write("trying to print something");
console.log("Stock - " + ticker + " --> text " + stockValue );
}
}); // -- end of request --
res.write('Welcome to StockWach\n');
//printing out back to the client the last line
res.end('end of demo');
});
server.listen(port, host, function () {
console.log("Listening : " + host +":" + port);
});
You have to end the response (res.end();). Almost all browsers buffer some number of bytes from the response before showing anything, so you won't see the trying to print something until the response has ended.
If you use something like cURL though, you will see the trying to print something right away before the response is ended.

Requests bigger than 8 kb

I want to evaluate round trip time between client and server. Here the user can choose how big the request/response size of the message (body) should be. At client side I used Ajax-Post method to send 100 messages in an interval of 100 ms to the http-server. Unfortunatley I got the problem in node.js, that the httpServer.js cannot handle client request sizes of bigger than 8 kb. In this case the variable responseSizeServer in httpServer.js gets the value "undefined" and the console throw an error: "Invalid Array length". The question is why the variable responseSizeServer has got the value undefined ins this case? I suppose that the http-Server.js handle the .end method faster than the incoming request from the client. What do you think and how can it be solved? Thanks in advance :)
Here is the code:
client:
var i = 0;
var iterations = 100;
function connectSpeed(){
run = window.setInterval("startSpeed()", 100);
}
function startSpeed()
{
//Variablen
var requestSizeClient = 8 *1024; // 8 kb request Size client
var responseSizeServer = 16 * 1024; // 16 kb response size server
var xmlhttp;
xmlhttp = new XMLHttpRequest();
xmlhttp.open("POST","http://localhost:8000", true);
xmlhttp.setRequestHeader("Content-type", "application/x-www-form-urlencoded");
xmlhttp.onreadystatechange=function(){
if (xmlhttp.readyState==4 && xmlhttp.status==200){
var receiveTimeCl = new Date().getTime().toString();
//evaluate response from httpServer.js
var message = xmlhttp.responseText;
}
}
//send data to the server
xmlhttp.send(new Date().getTime().toString() + '#' + new Array((eval(requestSizeClient+1))-(new Date().getTime().toString().length+3)).join('X') + '#' + responseSizeServer);
i++;
if(i==iterations) {
window.clearInterval(run);
i=0;
}
}// end start-speed
server: (httpServer.js)
var http = require('http');
http.createServer(function (req, res) {
var receiveTimeServer;
var clientMsg;
var sendTimeClient;
var responseSizeServer;
var message;
req.on('data', function (chunk) {
receiveTimeServer = new Date().getTime().toString();
message = chunk.toString('utf8');
clientMsg = message.split('#');
responseSizeServer = parseInt(clientMsg[2]);
sendTimeClient = clientMsg[0];
res.writeHead(200, {'Content-Type': 'text/plain','Access-Control-Allow-Origin' : '*'});
res.end(receiveTimeServer + '#' + new Date().getTime().toString() + '#' + sendTimeClient + '#' + new Array(responseSizeServer).join('X'));
});
}).listen(8000);
console.log('Ajax_Server running');
Requests in Node.js are streams, and each data event corresponds to an incoming chunk. If the request is big, then multiple chunks are emitted: one can then handle a request while it is being sent, and don't have to wait for the full request to be received before handling it.
In your specific case, what you want is to buffer the full message and then handle it:
var message = '';
req.on('data', function (chunk) {
message += chunk; // concatenate all chunks
});
req.on('end', function() {
receiveTimeServer = new Date().getTime().toString();
message = message.toString('utf8');
clientMsg = message.split('#');
responseSizeServer = parseInt(clientMsg[2]);
sendTimeClient = clientMsg[0];
res.writeHead(200, {'Content-Type': 'text/plain','Access-Control-Allow-Origin' : '*'});
res.end(receiveTimeServer + '#' + new Date().getTime().toString() + '#' + sendTimeClient + '#' + new Array(responseSizeServer).join('X'));
});
You can find a thorough explanation with code samples in Node.js's streams documentation.

How can I force Node's request module to not encode the response?

I'm using Node's request module.
The response I get is "gziped" or otherwise encoded.
How can I
1. Build the request to not encode the response?
2. Decode the response?
The data is coming from http://api.stackexchange.com.
var myRequest = require('request');
var zlib = require('zlib');
var stackRequest = require('request');
var apikey = '<MyKey>';
var fromdate = '1359417601';
var tagged = 'node.js';
stackRequest(
{ method: 'GET'
, uri: 'http://api.stackexchange.com/2.1/questions?key=' + apikey +
'&site=stackoverflow&fromdate=' + fromdate + '&order=desc&' +
'sort=activity&tagged=' + tagged + '&filter=default'
}, function(err, response, body) {
console.log(response.body); // How can I decode this?
});
The encoding has nothing to do with request. StackOverflow's API returns GZip encoded data always, as explained in the API documentation. You need to use Node's zlib module to unzip the contents. This is a simple example:
var zlib = require('zlib');
// Other code
, function(err, response, body) {
zlip.gunzip(body, function(err, data){
console.log(data);
});
});
The main downside of this, which is bad, is that this forces the request module to process the entire response content into one Buffer as body. Instead, you should normally use Node's Stream system to send the data from the request directly through the unzipping library, so that you use less memory. You'll still need to join the parts together to parse the JSON, but it is still better.
var zlib = require('zlib');
var request = require('request');
var apikey = '<MyKey>';
var fromdate = '1359417601';
var tagged = 'node.js';
var compressedStream = request('http://api.stackexchange.com/2.1/questions?' +
'key=' + apikey + '&site=stackoverflow&fromdate=' + fromdate +
'&order=desc&sort=activity&tagged=' + tagged + '&filter=default');
var decompressedStream = compressedStream.pipe(zlib.createGunzip());
var chunks = [];
decompressedStream.on('data', function(chunk){
chunks.push(chunk);
});
decompressedStream.on('end', function(){
var body = Buffer.concat(chunks);
var data = JSON.parse(body);
// Do your thing
});
First set accept: identity as a header. If stacked change doesn't send data as regular UTF8, then it's a bug on their end.
Secondly, you want to set the encoding as UTF8 so the response isn't a buffer.

Node.js: Read params passed in the URL

In rails I do a POST request to my server:
response = Typhoeus::Request.post("http://url.localtunnel.com/request?from=ola&to=ole")
result = JSON.parse(response.body)
In the Node.js app, I want to read From and To:
app.post '/request', (req,res) ->
console.log "I have received a request, sweet!!"
sys.log req.params.from
#sys.log "From: " + req.from + ", To: " + req.to + ", Id: " + req.id
How do I do it?
Thanks
The answer is:
Checks query string params (req.query), ex: ?id=12
Something like this:
var http = require('http'), url = require('url');
http.createServer(function(request, response) {
response.writeHead(200, {"Content-Type":"text/plain"});
var _url = url.parse(request.url, true);
response.write("Hello " + _url.query["from"] + "!\n"); // etc: _url.query["to"]...
response.close();
}).listen(8000);
url.parse is a key point... Or you can use querystring.parse(urlString)
You can read more at http://nodejs.org docs section.

Categories