Response.write doesn't display variable string - javascript

So, I'm learning node, tried to write simple server by my self. When it comes to write data to response, it doesn't work properly. I converted data to String, i tried to display data to the console and it had benn displayed normally, but when I pass it to response.write() nothing changes at my page. I created file called 'lorem' in poems/lorem directory, I write this path to my link in browser.
As you can see, I was writting some debugging stuff, but I deleted most of it.
var http = require('http');
var fs = require('fs');
var path = require('path');
var port = process.argv[2];
var readFileContent = (filePath) =>
{
var readData = '';
fs.readFile(path.normalize(process.cwd() + filePath), 'utf-8', (err, data) =>
{
let invalid = false;
if(err || data === undefined)
{
invalid = true;
}
if(!invalid) readData = data;
console.log(readData); // logs data from a file
});
return readData;
}
var serverHandler = (request, response) =>
{
let responseData = readFileContent(request.url);
if(responseData != undefined)
{
response.writeHead(200, {'Content-Type':'text/html'});
response.write(''+ responseData.toString()); // there is a problem: writes data only when I pass some exact string (e.g. 'LOREM')
}
else{
response.writeHead(404);
response.write('Error occured');
}
response.end(responseData.toString());
}
http.createServer(serverHandler).listen(+ port);
console.log('Server is lisntening on port ' + port);
There some console output for 'C:/folderpath/ node server.js 8080' cmd:
Server is lisntening on port 8080
lorem
lorem
lorem
Content of 'poems/lorem' file == 'lorem'

I think your problem is with asynchronicity.
Provide your file reading function with a callback, which returns the file content.
Like this:
var readFileContent = (filePath, callback) =>
{
fs.readFile(path.normalize(process.cwd() + filePath), 'utf-8', (err, data) =>
{
let invalid = false;
if(err || data === undefined)
{
invalid = true;
}
if(!invalid) readData = data;
callback(readData)
});
}
And in your server handler function do the following:
var serverHandler = (request, response) =>
{
readFileContent(request.url, (responseData) => {
if(responseData != undefined) {
response.writeHead(200, {'Content-Type':'text/html'});
response.write(''+ responseData.toString());
}
else {
response.writeHead(404);
response.write('Error occured');
}
response.end(responseData.toString());
});
}

Related

Callback is not a function -manage asynchronous call Node js

I am reading files from ftp using the code below.
var JSFtp = require("jsftp");
var config = require('./config.json');
var FtpService = function () {};
// Connect to FTP
var Ftp = new JSFtp({
host: config.ftp.host,
port: config.ftp.port,
user: config.ftp.user,
pass: config.ftp.pass
});
FtpService.prototype.getFTPDirectoryFiles = function (callback) {
Ftp.list(config.ftp.FilePath, function(err, res) {
if(err){
console.log('File Listing Failed', err);
callback(null,err);
return;
}
else{
console.log(res);
callback(null,res);
}
});
};
FtpService.prototype.closeFtp = function () {
console.log('Disconnect to FTP');
};
module.exports = new FtpService();
Now i include this ftp service js file in my index.js as
var ftp = require('./ftpservice.js');
ftpfiles = ftp.getFTPDirectoryFiles();
console.log(ftpfiles);
getFTPDirectoryFiles returns the list of file. But if i call it via index.js i get undefined ftpfiles. This is because of the asynchronous nature of node js.
so i thought of adding callback but
I am getting the error Callback is not defined in function FtpService.prototype.getFTPDirectoryFiles
In this line:
ftpfiles = ftp.getFTPDirectoryFiles()
you are not passing the callback that that function requires and are trying to use a return value that the function does not return.
You need to do something like this:
var ftp = require('./ftpservice.js');
ftp.getFTPDirectoryFiles(function(err, ftpfiles) {
if (err) {
console.log(err);
} else {
console.log(ftpfiles);
}
});
You need to pass a callbackfunction in your function getFTPDirectoryFiles();
var ftp = require('./ftpservice.js');
var ftpFiles;
function setFtpFiles(err, res) {
if (err) throw err;
ftpFiles = res; // to use "ftpFiles" variable later
console.log(res);
}
ftp.getFTPDirectoryFiles(setFtpFiles);
1 Don't change args order to call callback. (replace callback(null,err); and callback(null,res); by callback(err,res);)
2 You need define a specifc function (your callaback) an give it to ftp.getFTPDirectoryFiles().
var JSFtp = require("jsftp");
var config = require('./config.json');
var FtpService = function () {};
// Connect to FTP
var Ftp = new JSFtp({
host: config.ftp.host,
port: config.ftp.port,
user: config.ftp.user,
pass: config.ftp.pass
});
FtpService.prototype.getFTPDirectoryFiles = function (callback) {
Ftp.list(config.ftp.FilePath, function(err, res) {
if(err){
console.log('File Listing Failed', err);
callback(err, res);
return;
}
else{
console.log(res);
callback(err, res);
}
});
};
FtpService.prototype.getFTPDirectoryFilesSimplify = function (callback) {
// no console.log, but very more simple !
Ftp.list(config.ftp.FilePath, callback);
};
FtpService.prototype.closeFtp = function () {
console.log('Disconnect to FTP');
};
and then :
var ftp = require('./ftpservice.js');
ftpfiles = ftp.getFTPDirectoryFiles(function(err,res){
// do your specifc job here using err and res
});
console.log(ftpfiles);

Different response on NodeJS and Postman [duplicate]

I want to open a page up in node and process the contents in my application. Something like this seems to work well:
var opts = {host: host, path:pathname, port: 80};
http.get(opts, function(res) {
var page = '';
res.on('data', function (chunk) {
page += chunk;
});
res.on('end', function() {
// process page
});
This doesn't work, however, if the page returns an 301/302 redirect. How would I do that in a reusable way in case there are multiple redirects? Is there a wrapper module on top of the http to more easily handle processing http responses from a node application?
If all you want to do is follow redirects but still want to use the built-in HTTP and HTTPS modules, I suggest you use https://github.com/follow-redirects/follow-redirects.
yarn add follow-redirects
npm install follow-redirects
All you need to do is replace:
var http = require('http');
with
var http = require('follow-redirects').http;
... and all your requests will automatically follow redirects.
With TypeScript you can also install the types
npm install #types/follow-redirects
and then use
import { http, https } from 'follow-redirects';
Disclosure: I wrote this module.
Is there a wrapper module on top of the http to more easily handle processing http responses from a node application?
request
Redirection logic in request
Make another request based on response.headers.location:
const request = function(url) {
lib.get(url, (response) => {
var body = [];
if (response.statusCode == 302) {
body = [];
request(response.headers.location);
} else {
response.on("data", /*...*/);
response.on("end", /*...*/);
};
} ).on("error", /*...*/);
};
request(url);
Update:
Now you can follow all redirects with var request = require('request'); using the followAllRedirects param.
request({
followAllRedirects: true,
url: url
}, function (error, response, body) {
if (!error) {
console.log(response);
}
});
Here is my (recursive) approach to download JSON with plain node, no packages required.
import https from "https";
function get(url, resolve, reject) {
https.get(url, (res) => {
// if any other status codes are returned, those needed to be added here
if(res.statusCode === 301 || res.statusCode === 302) {
return get(res.headers.location, resolve, reject)
}
let body = [];
res.on("data", (chunk) => {
body.push(chunk);
});
res.on("end", () => {
try {
// remove JSON.parse(...) for plain data
resolve(JSON.parse(Buffer.concat(body).toString()));
} catch (err) {
reject(err);
}
});
});
}
async function getData(url) {
return new Promise((resolve, reject) => get(url, resolve, reject));
}
// call
getData("some-url-with-redirect").then((r) => console.log(r));
Here is function I use to fetch the url that have redirect:
const http = require('http');
const url = require('url');
function get({path, host}, callback) {
http.get({
path,
host
}, function(response) {
if (response.headers.location) {
var loc = response.headers.location;
if (loc.match(/^http/)) {
loc = new Url(loc);
host = loc.host;
path = loc.path;
} else {
path = loc;
}
get({host, path}, callback);
} else {
callback(response);
}
});
}
it work the same as http.get but follow redirect.
In case of PUT or POST Request. if you receive statusCode 405 or method not allowed. Try this implementation with "request" library, and add mentioned properties.
followAllRedirects: true,
followOriginalHttpMethod: true
const options = {
headers: {
Authorization: TOKEN,
'Content-Type': 'application/json',
'Accept': 'application/json'
},
url: `https://${url}`,
json: true,
body: payload,
followAllRedirects: true,
followOriginalHttpMethod: true
}
console.log('DEBUG: API call', JSON.stringify(options));
request(options, function (error, response, body) {
if (!error) {
console.log(response);
}
});
}
If you have https server, change your url to use https:// protocol.
I got into similar issue with this one. My url has http:// protocol and I want to make a POST request, but the server wants to redirect it to https. What happen is that, turns out to be node http behavior sends the redirect request (next) in GET method which is not the case.
What I did is to change my url to https:// protocol and it works.
Possibly a little bit of a necromancing post here, but...
here's a function that follows up to 10 redirects, and detects infinite redirect loops. also parses result into JSON
Note - uses a callback helper (shown at the end of this post)
( TLDR; full working demo in context here or remixed-version here)
function getJSON(url,cb){
var callback=errBack(cb);
//var callback=errBack(cb,undefined,false);//replace previous line with this to turn off logging
if (typeof url!=='string') {
return callback.error("getJSON:expecting url as string");
}
if (typeof cb!=='function') {
return callback.error("getJSON:expecting cb as function");
}
var redirs = [url],
fetch = function(u){
callback.info("hitting:"+u);
https.get(u, function(res){
var body = [];
callback.info({statusCode:res.statusCode});
if ([301,302].indexOf(res.statusCode)>=0) {
if (redirs.length>10) {
return callback.error("excessive 301/302 redirects detected");
} else {
if (redirs.indexOf(res.headers.location)<0) {
redirs.push(res.headers.location);
return fetch(res.headers.location);
} else {
return callback.error("301/302 redirect loop detected");
}
}
} else {
res.on('data', function(chunk){
body.push(chunk);
callback.info({onData:{chunkSize:chunk.length,chunks:body.length}});
});
res.on('end', function(){
try {
// convert to a single buffer
var json = Buffer.concat(body);
console.info({onEnd:{chunks:body.length,bodyLength:body.length}});
// parse the buffer as json
return callback.result(JSON.parse(json),json);
} catch (err) {
console.error("exception in getJSON.fetch:",err.message||err);
if (json.length>32) {
console.error("json==>|"+json.toString('utf-8').substr(0,32)+"|<=== ... (+"+(json.length-32)+" more bytes of json)");
} else {
console.error("json==>|"+json.toString('utf-8')+"|<=== json");
}
return callback.error(err,undefined,json);
}
});
}
});
};
fetch(url);
}
Note - uses a callback helper (shown below)
you can paste this into the node console and it should run as is.
( or for full working demo in context see here )
var
fs = require('fs'),
https = require('https');
function errBack (cb,THIS,logger) {
var
self,
EB=function(fn,r,e){
if (logger===false) {
fn.log=fn.info=fn.warn=fn.errlog=function(){};
} else {
fn.log = logger?logger.log : console.log.bind(console);
fn.info = logger?logger.info : console.info.bind(console);
fn.warn = logger?logger.warn : console.warn.bind(console);
fn.errlog = logger?logger.error : console.error.bind(console);
}
fn.result=r;
fn.error=e;
return (self=fn);
};
if (typeof cb==='function') {
return EB(
logger===false // optimization when not logging - don't log errors
? function(err){
if (err) {
cb (err);
return true;
}
return false;
}
: function(err){
if (err) {
self.errlog(err);
cb (err);
return true;
}
return false;
},
function () {
return cb.apply (THIS,Array.prototype.concat.apply([undefined],arguments));
},
function (err) {
return cb.apply (THIS,Array.prototype.concat.apply([typeof err==='string'?new Error(err):err],arguments));
}
);
} else {
return EB(
function(err){
if (err) {
if (typeof err ==='object' && err instanceof Error) {
throw err;
} else {
throw new Error(err);
}
return true;//redundant due to throw, but anyway.
}
return false;
},
logger===false
? self.log //optimization :resolves to noop when logger==false
: function () {
self.info("ignoring returned arguments:",Array.prototype.concat.apply([],arguments));
},
function (err) {
throw typeof err==='string'?new Error(err):err;
}
);
}
}
function getJSON(url,cb){
var callback=errBack(cb);
if (typeof url!=='string') {
return callback.error("getJSON:expecting url as string");
}
if (typeof cb!=='function') {
return callback.error("getJSON:expecting cb as function");
}
var redirs = [url],
fetch = function(u){
callback.info("hitting:"+u);
https.get(u, function(res){
var body = [];
callback.info({statusCode:res.statusCode});
if ([301,302].indexOf(res.statusCode)>=0) {
if (redirs.length>10) {
return callback.error("excessive 302 redirects detected");
} else {
if (redirs.indexOf(res.headers.location)<0) {
redirs.push(res.headers.location);
return fetch(res.headers.location);
} else {
return callback.error("302 redirect loop detected");
}
}
} else {
res.on('data', function(chunk){
body.push(chunk);
console.info({onData:{chunkSize:chunk.length,chunks:body.length}});
});
res.on('end', function(){
try {
// convert to a single buffer
var json = Buffer.concat(body);
callback.info({onEnd:{chunks:body.length,bodyLength:body.length}});
// parse the buffer as json
return callback.result(JSON.parse(json),json);
} catch (err) {
// read with "bypass refetch" option
console.error("exception in getJSON.fetch:",err.message||err);
if (json.length>32) {
console.error("json==>|"+json.toString('utf-8').substr(0,32)+"|<=== ... (+"+(json.length-32)+" more bytes of json)");
} else {
console.error("json==>|"+json.toString('utf-8')+"|<=== json");
}
return callback.error(err,undefined,json);
}
});
}
});
};
fetch(url);
}
var TLDs,TLDs_fallback = "com.org.tech.net.biz.info.code.ac.ad.ae.af.ag.ai.al.am.ao.aq.ar.as.at.au.aw.ax.az.ba.bb.bd.be.bf.bg.bh.bi.bj.bm.bn.bo.br.bs.bt.bv.bw.by.bz.ca.cc.cd.cf.cg.ch.ci.ck.cl.cm.cn.co.cr.cu.cv.cw.cx.cy.cz.de.dj.dk.dm.do.dz.ec.ee.eg.er.es.et.eu.fi.fj.fk.fm.fo.fr.ga.gb.gd.ge.gf.gg.gh.gi.gl.gm.gn.gp.gq.gr.gs.gt.gu.gw.gy.hk.hm.hn.hr.ht.hu.id.ie.il.im.in.io.iq.ir.is.it.je.jm.jo.jp.ke.kg.kh.ki.km.kn.kp.kr.kw.ky.kz.la.lb.lc.li.lk.lr.ls.lt.lu.lv.ly.ma.mc.md.me.mg.mh.mk.ml.mm.mn.mo.mp.mq.mr.ms.mt.mu.mv.mw.mx.my.mz.na.nc.ne.nf.ng.ni.nl.no.np.nr.nu.nz.om.pa.pe.pf.pg.ph.pk.pl.pm.pn.pr.ps.pt.pw.py.qa.re.ro.rs.ru.rw.sa.sb.sc.sd.se.sg.sh.si.sj.sk.sl.sm.sn.so.sr.st.su.sv.sx.sy.sz.tc.td.tf.tg.th.tj.tk.tl.tm.tn.to.tr.tt.tv.tw.tz.ua.ug.uk.us.uy.uz.va.vc.ve.vg.vi.vn.vu.wf.ws.ye.yt.za.zm.zw".split(".");
var TLD_url = "https://gitcdn.xyz/repo/umpirsky/tld-list/master/data/en/tld.json";
var TLD_cache = "./tld.json";
var TLD_refresh_msec = 15 * 24 * 60 * 60 * 1000;
var TLD_last_msec;
var TLD_default_filter=function(dom){return dom.substr(0,3)!="xn-"};
function getTLDs(cb,filter_func){
if (typeof cb!=='function') return TLDs;
var
read,fetch,
CB_WRAP=function(tlds){
return cb(
filter_func===false
? cb(tlds)
: tlds.filter(
typeof filter_func==='function'
? filter_func
: TLD_default_filter)
);
},
check_mtime = function(mtime) {
if (Date.now()-mtime > TLD_refresh_msec) {
return fetch();
}
if (TLDs) return CB_WRAP (TLDs);
return read();
};
fetch = function(){
getJSON(TLD_url,function(err,data){
if (err) {
console.log("exception in getTLDs.fetch:",err.message||err);
return read(true);
} else {
TLDs=Object.keys(data);
fs.writeFile(TLD_cache,JSON.stringify(TLDs),function(err){
if (err) {
// ignore save error, we have the data
CB_WRAP(TLDs);
} else {
// get mmtime for the file we just made
fs.stat(TLD_cache,function(err,stats){
if (!err && stats) {
TLD_last_msec = stats.mtimeMs;
}
CB_WRAP(TLDs);
});
}
});
}
});
};
read=function(bypassFetch) {
fs.readFile(TLD_cache,'utf-8',function(err,json){
try {
if (err) {
if (bypassFetch) {
// after a http errror, we fallback to hardcoded basic list of tlds
// if the disk file is not readable
console.log("exception in getTLDs.read.bypassFetch:",err.messsage||err);
throw err;
}
// if the disk read failed, get the data from the CDN server instead
return fetch();
}
TLDs=JSON.parse(json);
if (bypassFetch) {
// we need to update stats here as fetch called us directly
// instead of being called by check_mtime
return fs.stat(TLD_cache,function(err,stats){
if (err) return fetch();
TLD_last_msec =stats.mtimeMs;
return CB_WRAP(TLDs);
});
}
} catch (e){
// after JSON error, if we aren't in an http fail situation, refetch from cdn server
if (!bypassFetch) {
return fetch();
}
// after a http,disk,or json parse error, we fallback to hardcoded basic list of tlds
console.log("exception in getTLDs.read:",err.messsage||err);
TLDs=TLDs_fallback;
}
return CB_WRAP(TLDs);
});
};
if (TLD_last_msec) {
return check_mtime(TLD_last_msec);
} else {
fs.stat(TLD_cache,function(err,stats){
if (err) return fetch();
TLD_last_msec =stats.mtimeMs;
return check_mtime(TLD_last_msec);
});
}
}
getTLDs(console.log.bind(console));

Getting error can not get header after they send when read files from directory?

I am trying to get the name and created date of the files. In the code below it throws error when I call the api. It is reading the directory and printing all the file names but it's not sending back to callback. Any idea what is implemented wrong?
service.js
var fs = require('fs');
var path = require('path');
var async = require('async');
var currentDate = new Date();
var objToReturn = [];
var logsDirectory = './logs'
function readDirectory(env, callback) {
fs.readdir(logsDirectory + '/' + env, function(err, files) {
// loop through each file
async.eachSeries(files, function(file, done) {
var dirPath = logsDirectory + '/' + env;
var filePath = path.join(dirPath, file);
var fileInfo = {};
fs.stat(filePath, function(err, stats) {
if (err) {
console.info("File doesn't exist");
} else {
fileInfo.fileDate = stats.birthtime;
fileInfo.filename = file;
objToReturn.push(fileInfo);
done();
}
});
});
},
function(err) {
if (err) {
console.info('error', err);
return;
}
// when you're done reading all the files, do something...
console.log('before Callback', objToReturn);
callback(objToReturn);
});
}
exports.readDirectory = readDirectory;
app.js
var stDirectory = require('./app/serverfiles/stDir');
app.get('/getAllFiles',function(req,res){
var env = req.query.env
console.log('printing',env);
stDirectory.readDirectory(env,function(files){
res.json(files);
console.log('Api files',files);
});
});
There are a few issues:
instead of passing the "final" handler to async.eachSeries(), you're passing it to fs.readdir(), so callback will never get called;
you're declaring objToReturn outside of the function, which isn't a good idea because multiple requests could be handled in parallel;
you're not handling any errors properly;
you should really use the Node.js callback idiom of calling callbacks with two arguments, the first being errors (if there are any) and the second being the result of the asynchronous operation.
The code below should fix these issues:
function readDirectory(env, callback) {
let objToReturn = [];
fs.readdir(
logsDirectory + "/" + env,
function(err, files) {
if (err) return callback(err);
// loop through each file
async.eachSeries(files, function(file, done) {
var dirPath = logsDirectory + "/" + env;
var filePath = path.join(dirPath, file);
var fileInfo = {};
fs.stat(filePath, function(err, stats) {
if (err) {
console.info("File doesn't exist");
return done(err);
} else {
fileInfo.fileDate = stats.birthtime;
fileInfo.filename = file;
objToReturn.push(fileInfo);
done();
}
});
}, function(err) {
if (err) {
console.info("error", err);
return callback(err);
}
// when you're done reading all the files, do something...
console.log("before Callback", objToReturn);
callback(null, objToReturn);
}
);
}
// To call it:
stDirectory.readDirectory(env, function(err, files) {
if (err) {
res.sendStatus(500);
} else {
res.json(files);
console.log('Api files',files);
}
});
You should also consider using async.mapSeries() instead of async.eachSeries() and using a separate array (objToReturn).

node can't set headers after they are sent

i know this question asked many time before but still i'm struggling to figure this out. i have a set of js files. first one is index.js
app.all('/backend/*/*', function(req, res){ // backend/product/getProduct
serviceType = req.params[0];
methodType = req.params[1];
exports.serviceType= serviceType;
exports.methodType= methodType;
main.checkService()
});
in here im extracting the params and call checkService method in main.js file
main.js
function checkService(){
switch(index.serviceType){
case 'product':
product.checkMethod();
break;
default :
console.log('no such service')
}
}
then it move to product.js file
function checkMethod(){
var methodName = index.methodType,
res = index.res,
req = index.req;
switch(methodName){
case 'samplePost':
var body = req.body;
proHan.samplePost(body,function(data,msg,status){
sendRes(data,msg,status);
});
break;
default :
console.log('no such method')
}
function sendRes(jsonObj,msg,status){
var resObj = {
status : status,
result : jsonObj,
message : msg
}
res.json(resObj);
}
first it moves to samplePost method in handler.js
once the http req finised executing, callback return the results and call sendRes method and send the json
function samplePost(jsonString,cb){
var res = config.setClient('nodeSample');
// jsonString = JSON.parse(jsonString);
res.byKeyField('name').sendPost(jsonString,function(data,msg,status){
cb(data,msg,status);
});
}
to send http req i written a common file. that is config.js
function setClient(_cls){
var client = new Client(url);
return client;
}
function parentClient(url){
this.postBody = {
"Object":{},
"Parameters":{
"KeyProperty":""
}
};
}
function paramChild(){
parentClient.apply( this, arguments );
this.byKeyField = function(_key){
this.postBody.Parameters.KeyProperty = _key;
return this;
}
}
function Client(url){
parentClient.apply( this, arguments );
this.sendPost = function(_body,cb){
_body = (_body) || {};
this.postBody.Object = _body;
var options = {
host : 'www.sample.com',
port : 3300,
path: '/somrthing',
headers: {
'securityToken' : '123'
}
};
options.method = "POST";
var req = http.request(options, function(response){
var str = ''
response.on('data', function (chunk) {
str += chunk;
});
response.on('end', function () {
cb(JSON.parse('[]'),'success',200)
});
});
//This is the data we are posting, it needs to be a string or a buffer
req.on('error', function(response) {
cb(JSON.parse('[]'),response.errno,response.code)
});
req.write(JSON.stringify(this.postBody));
req.end();
}
}
paramChild.prototype = new parentClient();
Client.prototype = new paramChild();
when i send the first req its work but from then again the server crashes. it seems like i can't call res.end method again in a callback method. how can i fix this. thank you.
you can't call res.end two times. Here is a simple exemple to deal with callback with a basic node server.
const http = require('http');
const hostname = '127.0.0.1';
const port = 4242;
let something = true;
function callback(req, res) {
something = !something;
res.setHeader('Content-Type', 'text/plain');
res.end('Callback Hello World\n');
}
const server = http.createServer((req, res) => {
res.statusCode = 200;
if (something) {
callback(req, res);
} else {
something = !something;
res.setHeader('Content-Type', 'text/plain');
res.end('Hello World\n');
}
});
server.listen(port, hostname, () => {
console.log(`Server running at http://${hostname}:${port}/`);
});

NodeJS: creating a hash and return the value of a function

I have a list of tags that I need to extract. the list is called list.
I'm trying to find all 'og:*' meta that correspond to the list and are available in a fetched html. Then I need to return a hash to the user in JSON that contains these meta tags. But the process method return undefined rather than the hash.
var http = require('http');
var url = require('url');
var request = require('request');
var jsdom = require("jsdom");
var fs = require('fs');
var cssom = require('cssom');
var list = ['title', 'description']; //here the og-tags I need to extract
var meta = {};
function process(url) {
request(url, function (error, response, body) {
if (!error && response.statusCode == 200) {
jsdom.env({
html: body,
scripts: [
'http://code.jquery.com/jquery-1.5.min.js'
],
done: function(errors, window) {
var $ = window.$;
$('meta[property^="og:"]').each(function() {
for (var element in list) {
if ($(this).attr('property') == 'og:' + list[element]) {
meta[list[element]] = $(this).attr('content');
// this works well, if I do console.log(meta), I get the hash correctly filled.
}
}
});
}
});
}
});
return meta; // this is where the probleme is. This return undefined.
}
http.createServer(function (request, response) {
request.setEncoding('utf8');
response.writeHead(200, {'Content-Type': 'text/plain'});
process(url.parse(request.url, true).query['content'], function(result) {
console.log(result); // prints no result
});
response.end();
}).listen(8124);
console.log('Server running at http://0.0.0.0:8124');
Because request is asynchronous, you need to make process asynchronous as well. That means having process accept a callback parameter that it will call once meta is available. As it is now, process is returning meta before the request callback populates it.
function process(url, callback) {
request(url, function (error, response, body) {
if (!error && response.statusCode == 200) {
jsdom.env({
html: body,
scripts: [
'http://code.jquery.com/jquery-1.5.min.js'
],
done: function(errors, window) {
var $ = window.$;
$('meta[property^="og:"]').each(function() {
for (var element in list) {
if ($(this).attr('property') == 'og:' + list[element]) {
meta[list[element]] = $(this).attr('content');
callback(null, meta);
}
}
});
}
});
} else {
callback(error);
}
});
}
http.createServer(function (request, response) {
request.setEncoding('utf8');
response.writeHead(200, {'Content-Type': 'text/plain'});
process(url.parse(request.url, true).query['content'], function(error, result) {
console.log(result); // prints no result
});
response.end();
}).listen(8124);

Categories