I'm using the following to send POST data to a secure nodejs server:
File: main.js
var strdata = JSON.stringify({"data":"thisdata"});
var options = {
host: '192.168.1.63',
port: 3001,
path: '/saveconfig',
method: 'POST',
rejectUnauthorized: false,
requestCert: true,
agent: false,
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
'Content-Length': Buffer.byteLength(strdata)
}
};
var req = https.request(options, function(res) {
console.log('STATUS: ' + res.statusCode);
console.log('HEADERS: ' + JSON.stringify(res.headers));
res.setEncoding('utf8');
res.on('data', function (chunk) {
console.log('BODY: ' + chunk);
});
});
console.log(req.write(strdata));
console.log(req.end());
req.on('error', function(e) {
console.log('problem with request: ' + e.message);
});
req.on('finish', function() {
console.log("finished request");
});
In an otherwise functional expressjs server, these are the appropriate snippets:
File: app.js
app.post('/saveconfig', function() {
data.saveconfig; console.log("received request"); } );
app.get('/getconfig', data.getconfig);
File: data.js
exports.saveconfig = function(req, res) {
console.log("saveing config");
res.send(200);
res.end();
};
exports.getconfig = function(req, res) {
res.send("get OK");
}
With app.js running on the server (Ubuntu), I run main.js from the client (Windows 7). req.write and req.end execute and "finished request" logs to the console, but the request callback never fires.
On the server in app.js, the app.post event fires and logs "received request" to the console. But "saving config" never logs to the console. Only after I kill (^C) main.js, express then logs to the console "POST /saveconfig".
I know I'm missing something simple, but I've read dozens of coding examples and you can likely gather from my snippet, I've tried everything I can find or think of. I'd guess the request isn't finishing, but I don't know why. What is missing to get "exports.saveconfig" to fire?
additional information
The answer posted below fixed my problem. Because I'm new to stackoverflow, I can't post my own answer, but here's the rest of the story...
I appreciate your help. Being still new to JavaScript, I found I can learn a lot about an object by converting it to string. I was originally attempting to convert the req parameter to a string using a custom function. I just discovered it was apparently running into an endless loop after using JSON.stringify instead.
The code looked something like this:
exports.saveconfig = function (db) {
return function(req, res) {
console.log("saving config");
console.log(mymodule.serialize(req));
res.end("OK");
console.log(req.body);
};
};
I would have thought the above code should have logged the following to the console- even if the serialize method was in an endless loop:
POST /saveconfig
saving config
[nothing because of the endless loop]
Instead I got:
saving config
connections property is deprecated. Use getConnections() method
Being new to JavaScript, I assumed something was wrong with the request, the server, or some plumbing in-between. Of course, then the debugging code I added (along with my ignorance of JS) compounded the problem.
Changing
app.post('/saveconfig', function() {
data.saveconfig; console.log("received request"); } );
to
app.post('/saveconfig', datarts.saveconfig);
and removing the endless loop fixed the problem.
The problem is in you're app.js. You use data.saveConfig inside your callback without calling it. The right way would be
app.post('/saveconfig', function(req,res) {
data.saveconfig(req, res);
console.log("received request");
});
app.get('/getconfig', data.getconfig);
or (I assume the console.log is just for debugging purposes):
app.post('/saveconfig', data.saveconfig);
app.get('/getconfig', data.getconfig);
You could do your console.log() inside your data.saveconfig method if you want to go with the second example.
Related
I have a NodeJS code where, basically this is what happens:
HTTP request (app.get) -> Send a request to a low level socket(using net.Socket()) -> Get response from socket -> res.send(response from socket)
This doesn't work because the net.Socket() uses async functions and events (client.on("data", callback)).
I tried something like this:
app.get("/", function(req, res){
client.connect("localhost", 420, function(){
client.write("example data");
});
client.on("data", function(data){
client.destroy();
res.send(data);
});
});
But it doesn't work because it says I am re-sending the headers (the res object won't change since the function is an event, not a sync function).
Any ideas? Or a library for sync socket requests? I have tried the following:
Synket
sync-socket
netlinkwrapper
And they don't work.
Edit: I am trying something like this:
async function sendData(client, res){
client.on('data', function(data){
console.log("Got data!");
res.send(""+data);
res.end();
console.log("Sent data!");
client.destroy();
console.log("Killed connection");
return;
});
}
app.get("/", function(req, res){
var body = req.query;
client.connect(420, "localhost", function(){
client.write("some random data");
console.log("Connected & Data sent!");
sendData(client, res);
});
});
It works the first time I try to access the page, but the second time the app crashes and I get this error:
_http_outgoing.js:489
throw new Error('Can\'t set headers after they are sent.');
^
Error: Can't set headers after they are sent.
client triggles data event multiple times.
`app.get("/", function(req, res){
client.connect("localhost", 420, function(){
client.write("example data");
});
client.pipe(res);
});
});
`
It turns out that, each time a request comes to that endpoint, a new subscription has been registered by calling the client.on('eventName', cb). So, calls starting from the second one will trigger multiples of those registrations and will cause that header error.
So, a workaround for that:
socket.on('event', (packet) => {
socket.removeAllListeners('event')
res.json({ packet });
});
That would do the trick but I'm not sure if it's a bad practice to continuously add/remove the same event.
Edit
Found a better way. once will ensure that registered event only will run once, just as we want:
socket.once('event', (packet) => {
res.json({ packet });
});
I have a code like this:
var options = {
host: "https://basic:authentication#website.com",
path: "/api/address"
};
var request = https.get(options, function(response){
var str = "";
response.on('data', function(chunk){
str+=chunk;
});
response.on('end', function(){
console.log(str);
res.json(str);
});
});
request.end();
request.on('error', function(err){
console.log(err);
});
This gives me
{ [Error: getaddrinfo ENOTFOUND] code: 'ENOTFOUND', errno: 'ENOTFOUND', syscall: 'getaddrinfo' }
I don't know what's wrong because if I change the request to look like this:
var request = https.get("https://basic:authentication#website.com/api/address", function(response){
It works and gets an answer from the api. The problem is that I can't input parameters into the call if I do it this way. Does anyone have tips?
The problem is that your host value is not correct, it should really just be the hostname. For the HTTP basic auth, you can use the auth setting. For example:
var options = {
host: "website.com",
path: "/api/address",
auth: "basic:authentication"
};
Also, explicitly calling request.end() is unnecessary since https.get() internally does that for you automatically.
I should also note that since it seems like you're responding to an existing request with a new, external request, you can simplify it further by simply piping the external response to the existing response:
https.get(options, function(response) {
// You should probably check `response.statusCode` first ...
res.set('Content-Type', 'application/json');
response.pipe(res);
}).on('error', function(err) {
// ...
});
I am having a real hard time standing up bidirectional data transfer from a html page to my node.js application and then back to the html page.
I'm pretty sure I'm all over the correct solution, but I'm just not getting it to work.
I'm using the following for my node.js application:
var express = require('/usr/lib/node_modules/express');
var app = express();
app.use(function(err, req, res, next){
console.error(err.stack);
res.send(500, 'Something broke!');
});
app.use(express.bodyParser());
app.post('/namegame', function(req, res)
{
console.log('Got request name: ' + req.body.name);
setTimeout(function()
{
var newName = "New Name-O";
res.send({name: newName});
}, 2000);
}
);
app.listen(8088, function() { console.log("Server is up and running"); });
The following is my html page:
<html>
<head>
<script src="http://ajax.googleapis.com/ajax/libs/jquery/1.7/jquery.js"></script>
<script type="text/javascript">
// wait for the DOM to be loaded
$(document).ready(function()
{
alert("Got here!");
function DisplayName(response)
{
alert(response.newName);
}
$("#NameGameIt").click(function(event)
{
alert("How about now?");
//event.PreventDefault();
var nameSubmitted = document.getElementById("name");
//var nameSubmitted = "help!!";
alert("Name: " + nameSubmitted.value);
$.ajax({
type: "POST",
url: "http://127.0.0.1:8088/namegame",
data: {name: nameSubmitted.value},
dataType: "json",
timeout: 2500,
success: function() { alert("???");},
error: function(error1, error2, error3)
{ alert("error"); },
complete: function(arg1, arg2, arg3)
{ alert("complete"); }
});
});
});
</script>
</head>
<body>
<h1>Test form for Ajax</h1>
</br>
Name: <input type="text" id="name" name="name"></br>
<input type="button" value="NameGameIt" id="NameGameIt">
</form>
</body>
</html>
So, when I run the node application, the server comes up just fine. When I fire up the html page, I get the alert "Got here!" When I press the button "NameGameIt", I get the alert "How about now?" followed by the alert "Name: " + whatever name I entered. Once I click off that alert, the node application immediately sees the post and prints out the name to the console. Two seconds later when the node sends the response, the browser will go right into the error handler on the ajax post. The only useful data to come out in the error handler is that it is an "error" which isn't really useful.
So I know the message is getting to the node from the html page because it prints out the name that I sent and I know the html page is getting the message back from the node because it won't error out until the timeout on the node happens triggering the send. But, I have no idea why it keeps on sending me to the error handler instead of the success. I've stepped through all the way in the node code using node-inspector and it seems to be building the packet correctly with a 200 code for success and it calls .end inside .send after it's done making the packet so I don't think either of those things are biting me.
I'm about to go nuts! If anyone sees what I'm missing or has any new ideas on ways to gather more information, I would be very grateful for the help.
Your code is perfectly fine, but you're almost certainly running into a cross-domain AJAX request issue. You might be opening this HTML file on the local filesystem and making requests that way, which is what is causing this problem.
To fix it, add app.use(express.static('public')); like so:
var express = require('/usr/lib/node_modules/express');
var app = express();
app.use(function(err, req, res, next){
console.error(err.stack);
res.send(500, 'Something broke!');
});
app.use(express.bodyParser());
app.use(express.static('public'));
app.post('/namegame', function(req, res)
{
console.log('Got request name: ' + req.body.name);
setTimeout(function()
{
var newName = "New Name-O";
res.send({name: newName});
}, 2000);
}
);
app.listen(8088, function() { console.log("Server is up and running"); });
and then place your html file in the 'public' folder. Once you launch your server, you can visit http://127.0.0.1:8088/file.html and your code will run fine.
In my case adding this to the app.js works.
app.use(function(req, res, next) {
res.header("Access-Control-Allow-Origin", "*");
res.header("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept");
next();
});
https://enable-cors.org/server_expressjs.html
I used this code to pipe an image to my clients:
req.pipe(fs.createReadStream(__dirname+'/imgen/cached_images/' + link).pipe(res))
it does work but sometimes the image is not transferred completely. But no error is thrown neither on client side (browser) nor on server side (node.js).
My second try was
var img = fs.readFileSync(__dirname+'/imgen/cached_images/' + link);
res.writeHead(200, {
'Content-Type' : 'image/png'
});
res.end(img, 'binary');
but it leads to the same strange behaviour...
Does anyone got a clue for me?
(abstracted code...)
var http = require('http');
http.createServer(function (req, res) {
Imgen.generateNew(
'virtualtwins/www_leonardocampus_de/overview/28',
'www.leonardocampus.de',
'overview',
'28',
null,
[],
[],
function (link) {
fs.stat(__dirname+'/imgen/cached_images/' + link, function(err, file_info) {
if (err) { console.log('err', err); }
console.log('file info', file_info.size);
res.writeHead(200, 'image/png');
fs.createReadStream(__dirname+'/imgen/cached_images/' + link).pipe(res);
});
}
);
}).listen(13337, '127.0.0.1');
Imgen.generateNew just creates a new file, saves it to the disk and gives back the path (link).
I've used this before and all that is needed is that in the function (req, res) {:
var path = ...;
res.writeHead(200, {
'Content-Type' : 'image/png'
});
fs.createReadStream(path).pipe(res);
where path is the computed path to the file to send. .pipe() will transfer the data from the read stream to the write stream and call end when the read stream ends, so there is no need to use res.end() after.
What the problem was: I had 2 different writeStreams! If WriteStream#1 is closed, the second should be closed too and then it all should be piped.
But node is asynchronous so while one has been closed, the other one hasn't. Even the stream.end() was called... well you always should wait for the close event!
I'm brand new to node.js, but I wanted to play around with some basic code and make a few requests. At the moment, I'm playing around with the OCW search (http://www.ocwsearch.com/), and I'm trying to make a few basic requests using their sample search request:
However, no matter what request I try to make (even if I just query google.com), it's returning me
<html>
<head><title>301 Moved Permanently</title></head>
<body bgcolor="white">
<center><h1>301 Moved Permanently</h1></center>
<hr><center>nginx/0.7.65</center>
</body>
</html>
I'm not too sure what's going on. I've looked up nginx, but most questions asked about it seemed to be asked by people who were setting up their own servers. I've tried using an https request instead, but that returns an error 'ENOTFOUND'.
My code below:
var http = require('http');
http.createServer(function (request, response) {
response.writeHead(200, {'Content-Type': 'text/plain'});
response.end('Hello World\n');
var options = {
host:'ocwsearch.com',
path:
'/api/v1/search.json?q=statistics&contact=http%3a%2f%2fwww.ocwsearch.com%2fabout/',
method: 'GET'
}
var req = http.request(options, function(res) {
console.log("statusCode: ", res.statusCode);
console.log("headers: ", res.headers);
res.on('data', function(d) {
process.stdout.write(d);
});
});
req.end();
req.on('error', function(e) {
console.error(e);
});
}).listen(8124);
console.log('Server running at http://127.0.0.1:8124/');
Sorry if this is a really simple question, and thanks for any help you can give!
For me the website I was trying to GET was redirecting me to the secure protocol. So I changed
require('http');
to
require('https');
The problem is that Node.JS's HTTP Request module isn't following the redirect you are given.
See this question for more: How do you follow an HTTP Redirect in Node.js?
Basically, you can either look through the headers and handle the redirect yourself, or use one of the handful of modules for this. I've used the "request" library, and have had good luck with it myself. https://github.com/mikeal/request
var http = require('http');
var find_link = function(link, callback){
var root ='';
var f = function(link){
http.get(link, function(res) {
if (res.statusCode == 301) {
f(res.headers.location);
} else {
callback(link);
}
});
}
f(link, function(t){i(t,'*')});
}
find_link('http://somelink.com/mJLsASAK',function(link){
console.log(link);
});
function i(data){
console.log( require('util').inspect(data,{depth:null,colors:true}) )
}
This question is old now, but I got the same 301 error and these answers didn't actually help me to solve the problem.
I wrote the same code:
var options = {
hostname: 'google.com',
port: 80,
path: '/',
method: 'GET',
headers: {
'Content-Type': 'text/plain',
}
};
var http = require('http');
var req = http.request(options, function(res) {
console.log('STATUS:',res.statusCode);
console.log('HEADERS: ', JSON.stringify(res.headers));
res.setEncoding('utf8');
res.on('data', function(chunk) {
console.log(chunk);
});
res.on('end', function() {
console.log('No more data in response.');
});
});
req.on('error', function(e) {
console.log('problem with request: ', e.message);
});
console.log(req);
req.end();
so after some time I realized that there's a really tiny mistake in this code which is hostname part:
var options = {
hostname: 'google.com',
...
you have to add "www." before your URL to get html content, otherwise there would be 301 error.
var options = {
hostname: 'www.google.com',