I am trying to install protractor in my Mac. The installation is done via command line but one of the scripts is failing due to connectivity problems. The main problem is that I am behind a corporate proxy server.
I set the proxy server in my console and npm is also configured with the correct proxy settings.
The script that fails is here
and it contains the following
#!/usr/bin/env node
var fs = require('fs');
var os = require('os');
var url = require('url');
var http = require('http');
var AdmZip = require('adm-zip')
// Download the Selenium Standalone jar and the ChromeDriver binary to
// ./selenium/
// Thanks to http://www.hacksparrow.com/using-node-js-to-download-files.html
// for the outline of this code.
var SELENIUM_URL =
'http://selenium.googlecode.com/files/selenium-server-standalone-2.35.0.jar';
var CHROMEDRIVER_URL_MAC =
'https://chromedriver.googlecode.com/files/chromedriver_mac32_2.2.zip';
var CHROMEDRIVER_URL_LINUX32 =
'https://chromedriver.googlecode.com/files/chromedriver_linux32_2.2.zip';
var CHROMEDRIVER_URL_LINUX64 =
'https://chromedriver.googlecode.com/files/chromedriver_linux64_2.2.zip';
var CHROMEDRIVER_URL_WINDOWS =
'https://chromedriver.googlecode.com/files/chromedriver_win32_2.2.zip';
var DOWNLOAD_DIR = './selenium/';
var START_SCRIPT_FILENAME = DOWNLOAD_DIR + 'start';
var chromedriver_url = '';
var start_script = 'java -jar selenium/selenium-server-standalone-2.35.0.jar';
if (!fs.existsSync(DOWNLOAD_DIR) || !fs.statSync(DOWNLOAD_DIR).isDirectory()) {
fs.mkdirSync(DOWNLOAD_DIR);
}
console.log(
'When finished, start the Selenium Standalone Server with ./selenium/start \n');
// Function to download file using HTTP.get
var download_file_httpget = function(file_url, callback) {
console.log('downloading ' + file_url + '...');
var options = {
host: url.parse(file_url).host,
port: 80,
path: url.parse(file_url).pathname
};
var file_name = url.parse(file_url).pathname.split('/').pop();
var file_path = DOWNLOAD_DIR + file_name;
var file = fs.createWriteStream(file_path);
http.get(options, function(res) {
res.on('data', function(data) {
file.write(data);
}).on('end', function() {
file.end(function() {
console.log(file_name + ' downloaded to ' + file_path);
if (callback) {
callback(file_path);
}
});
});
});
};
download_file_httpget(SELENIUM_URL);
if (!(process.argv[2] == '--nocd')) {
if (os.type() == 'Darwin') {
chromedriver_url = CHROMEDRIVER_URL_MAC;
} else if (os.type() == 'Linux') {
if (os.arch() == 'x64') {
chromedriver_url = CHROMEDRIVER_URL_LINUX64;
} else {
chromedriver_url = CHROMEDRIVER_URL_LINUX32;
}
} else if (os.type() == 'Windows_NT') {
chromedriver_url = CHROMEDRIVER_URL_WINDOWS;
}
var chromedriver_zip = chromedriver_url.split('/').pop();
start_script += ' -Dwebdriver.chrome.driver=./selenium/chromedriver';
download_file_httpget(chromedriver_url, function(file_name) {
var zip = new AdmZip(file_name);
zip.extractAllTo(DOWNLOAD_DIR);
if (os.type() != 'Windows_NT') {
fs.chmod(DOWNLOAD_DIR + 'chromedriver', 0755);
}
});
}
var start_script_file = fs.createWriteStream(START_SCRIPT_FILENAME);
start_script_file.write(start_script);
start_script_file.end(function() {
fs.chmod(START_SCRIPT_FILENAME, 0755);
});
How can we modify this script in order to resolve the connectivity issue?
In download_file_httpget, you're calling http.get(...). That makes a direct connection to the target server.
Configuring a proxy in npm only affects npm. Node's http module has no concept of proxy servers.
If you need to make a request through a proxy, consider using the request module, which does support proxies.
Related
Introduction
I have cloned the project from this git link here
App is running on localhost fine.I am willing to deploy this demo project on windows server where it will run on https://localhost:8443. I have no idea how to deploy this specific demo project, although i have successfully deployed another simple node application on iis.
What can be causing problem, i think from script server.js
var path = require('path');
var url = require('url');
var express = require('express');
var minimist = require('minimist');
var ws = require('ws');
var kurento = require('kurento-client');
var fs = require('fs');
var https = require('https');
var argv = minimist(process.argv.slice(2), {
default: {
as_uri: 'https://localhost:8443/',
ws_uri: 'ws://93.104.213.28:8888/kurento'
}
});
var options =
{
key: fs.readFileSync('keys/server.key'),
cert: fs.readFileSync('keys/server.crt')
};
var app = express();
/*
* Definition of global variables.
*/
var idCounter = 0;
var candidatesQueue = {};
var kurentoClient = null;
var presenter = null;
var viewers = [];
var noPresenterMessage = 'No active presenter. Try again later...';
/*
* Server startup
*/
var asUrl = url.parse(argv.as_uri);
var port = asUrl.port;
var server = https.createServer(options, app).listen(port, function() {
console.log('Kurento Tutorial started');
console.log('Open ' + url.format(asUrl) + ' with a WebRTC capable browser');
});
var wss = new ws.Server({
server : server,
path : '/one2many'
}); .....other code
I have been trying this for 2 days but no luck.Any expert might help.
Thanks for your time.
I'm trying to figure out how can I call a grpc server from a javascript function in my nodejs application, and update my page... I don't know how can I manage it.
This is the proto file:
syntax = "proto3";
package helloworld;
service Greeter {
rpc SayHello (HelloRequest) returns (HelloReply) {}
}
message HelloRequest {
string name = 1;
}
message HelloReply {
string message = 1;
}
this is the server:
var PROTO_PATH = __dirname + '/helloworld.proto';
var grpc = require('grpc');
var hello_proto = grpc.load(PROTO_PATH).helloworld;
function sayHello(call, callback) {
callback(null, {message: 'Hello ' + call.request.name});
}
function main() {
var server = new grpc.Server();
server.addProtoService(hello_proto.Greeter.service, {sayHello: sayHello});
server.bind('0.0.0.0:50051', grpc.ServerCredentials.createInsecure());
server.start();
}
main();
this is the javascript function :
var PROTO_PATH = __dirname + '/helloworld.proto';
var grpc = require('grpc');
var hello_proto = grpc.load(PROTO_PATH).helloworld;
function test() {
var client = new hello_proto.Greeter('localhost:50051',
grpc.credentials.createInsecure());
client.sayHello(function(err, response) {
console.log('Greeting:', response.message);
document.getElementById("para").innerHTML = 'Mex Received from grpc server ' + response.message;
});
}
when I click a button on my page i call the function test and i would like to update the element(id=para).
Any idea?
var request = require('request');
var cheerio = require('cheerio');
request('http://www.gatherproxy.com/proxylist/anonymity/?t=Elite', function (error, response, html) {
if (!error && response.statusCode == 200) {
var $ = cheerio.load(html);
var temp = $('#tblproxy tbody tr.loading-row')
console.log(temp.attr('class'))
}
});
The webpage is at http://www.gatherproxy.com/zh/proxylist/anonymity/?t=Elite
I want to get this element and its selector is #tblproxy > tbody > tr.loading-row
I tried the same thing in the google console,
var s = $('#tblproxy > tbody > tr.loading-row')
undefined
s.attr('class')
"loading-row"
But it doesn't work in the context of cheerio, the output for the program is undefined, any idea ?
I noticed that the element, tbody, that you're trying to query is loaded asynchronously. This is beyond the scope of what the request module is capable of. You can use phantomjs in simulating a web page in a headless manner and get the html from a web page module. If you want to create more customized web page modules you can refer to the phantomjs documentation.
Fork this github repo demo .
First, create a webpage module to get the html of a specific page.
phantom/request.js
'use strict';
var page = require('webpage').create();
var system = require('system');
page.open(system.args[1], function(status) {
console.log(page.evaluate(function() {
return document.documentElement.innerHTML;
}));
phantom.exit();
});
Second, create a phantomjs cli wrapper for all web page modules inside the phantom directory.
lib/phantom.js
'use strict';
var path = require('path');
var spawn = require('child_process').spawn;
var phantomjs = require('phantomjs');
var fs = require('fs');
var binPath = phantomjs.path;
var slice = Array.prototype.slice;
var phantomPath = path.join(
__dirname,
'..',
'phantom'
);
exports = module.exports = function() {
var args = slice.call(arguments);
var callback = args.pop();
var command = spawn(binPath, args);
command.stdout.on('data', function(data) {
callback(null, data.toString());
});
command.stderr.on('data', function(data) {
callback({ message: data.toString() }, null);
});
};
// create methods base on the ./phantom directory web page modules
fs.readdirSync(phantomPath).reduce(function(context, filename) {
var index = path.basename(filename, '.js');
context[index] = function() {
exports.apply(null, [path.join(phantomPath, filename)].concat(slice.call(arguments)));
};
}, exports);
Lastly, use the lib/phantom.js script's request method to get the html page.
index.js
'use strict';
var phantom = require('./lib/phantom');
var cheerio = require('cheerio');
var address = 'http://www.gatherproxy.com/proxylist/anonymity/?t=Elite';
phantom.request(address, function(err, html) {
if(err) {
console.log('error');
return;
}
var $ = cheerio.load(html);
var temp = $('#tblproxy tbody tr.loading-row');
console.log(temp.attr('class'));
});
From the code source of the page, there is no tbody in #tblproxy, so remove it from the selector:
var temp = $('#tblproxy tr.loading-row');
Update
Following bublik42's comment, if a tbody appears randomly, you can use find():
var temp = $('#tblproxy').find('tr.loading-row');
Ok, i am just starting to learn node.js and i am having a little difficulty getting a good grasp on the async nature of it and when/how to use callbacks to get data passed along like i need it.
The concept of what i am trying to build is this. I have a small node.js app that uses the FS package - defined as var fs = require("fs"). The app is responding to localhost:4000 right now. When i hit that url, the app will use fs.readdir() to get all of the virtual host files in the directory that i pass to readdir().
Next, the app loops through those files and parses each one line by line and word by word (quick and dirty for now). I am using fs.readFile() to read the file and then literally doing lines = data.toString().split("\n") and then var words = lines[l].split(" ") to get to the data in the file i need. Each virtual host file looks something like this:
<VirtualHost *:80>
ServerName some-site.local
DocumentRoot "/path/to/the/docroot"
<Directory "/path/to/the/docroot">
Options Indexes FollowSymLinks Includes ExecCGI
AllowOverride All
Require all granted
</Directory>
ErrorLog "logs/some-site.local-error_log"
</VirtualHost>
Here is my main.js file (routes file):
var express = require("express"),
router = express.Router(),
async = require("async"),
directoryReader = require(__dirname + "/../lib/directoryReader");
fileReader = require(__dirname + "/../lib/fileReader");
router.get("/", function(req, res) {
var response = [];
directoryReader.read(function(files) {
async.each(files, function(file, callback) {
fileReader.read(file, function(data) {
if (data.length > 0) {
response.push(data);
}
callback();
});
}, function(err){
if (err) throw err;
res.json(response);
});
});
});
module.exports = router;
My directoryReader.js file:
var fs = require("fs"),
fileReader = require(__dirname + "/../lib/fileReader");
var directoryReader = {
read: function(callback) {
var self = this;
fs.readdir("/etc/apache2/sites-available", function (err, files) {
if (err) throw err;
var output = [];
for(f in files) {
output.push(files[f]);
}
callback(output);
});
}
};
module.exports = directoryReader;
And my fileReader.js file:
var fs = require("fs");
var fileReader = {
read: function(file, callback) {
fs.readFile("/etc/apache2/sites-available/" + file, { encoding: "utf8" }, function (err, data) {
if (err) throw err;
var vHostStats = ["servername", "documentroot", "errorlog"],
lines = data.toString().split("\n"),
output = [];
for(l in lines) {
var words = lines[l].split(" ");
for(w in words) {
if (words[w].toLowerCase() == "<virtualhost") {
var site = {
"servername" : "",
"documentroot" : "",
"errorlog" : "",
"gitbranch" : ""
}
w++;
}
if (vHostStats.indexOf(words[w].toLowerCase()) !== -1) {
var key = words[w].toLowerCase();
w++;
site[key] = words[w];
}
if (words[w].toLowerCase() == "</virtualhost>" && site.documentroot != "") {
w++;
output.push(site);
var cmd = "cd " + site["documentroot"] + " && git rev-parse --abbrev-ref HEAD";
var branch = ...; // get Git branch based on the above command
site["gitbranch"] = branch;
}
}
}
callback(output);
});
}
};
module.exports = fileReader;
All of this code will spit out json. This all works fine, expect for one part. The line in the fileReader.js file:
var branch = ...; // get Git branch based on the above command
I am trying to get this code to run a shell command and get the Git branch based on the document root directory. I then want to take the branch returned and add the value to the gitbranch proptery of the current site object during the loop. Hope this makes sense. I know there are probably questions on SO that cover something similar to this and i have looked at many of them. I fear i am just not educated enough in node.js yet to apply the answers to those SO questions to my particular use case.
Please let me know if there's anything i can add that can help anyoe answer this question. I should note that this app is for personal uses only, so the solution really just has to work, not be super elegant.
UPDATE: (5/1/2015)
Probably not the best solution but i got what i wanted by using the new execSync added to v0.12.x
if (words[w].toLowerCase() == "</virtualhost>" && site.documentroot != "") {
var cmd = "cd " + site["documentroot"] + " && git rev-parse --abbrev-ref HEAD";
var branch = sh(cmd, { encoding: "utf8" });
site["gitbranch"] = branch.toString().trim();
w++;
output.push(site);
}
I am really stuck by nodejs cache system. I have this structure for my project :
Project/
apps/
jobs_processor/
app.js
processors.js
processors/
libs/
queue_manager.js
queue_manager.js require processors.js
var processors = require("../apps/jobs_processor/processors.js");
app.js require also processor.js
var processors = require("./processors.js");
If I take into account the documentation, I must have the same path may be to obtain the same object, is that right ? If so, how can I achieve that (have the same path) ?
Thanks.
EDIT:
If found a solution to my problem.
Here is the first version of queue_manager.js file
var _ = require("lodash");
var Utils = require("./utilities");
var Processors = require("../apps/jobs_processor/processors");
var Logger = require("./logger");
var QUEUES_CACHE = {};
exports.createJob = createJob;
exports.getCacheObject = getCacheObject;
function createJob(name, data) {
var cacheId = name.replace(/ /g, "_");
Logger.info("Cache ID: " + cacheId);
if (!QUEUES_CACHE[ cacheId ]) {
_.each(Processors, function (processor) {
Logger.debug("PROCESSOR NAME: " + processor.name);
Logger.debug("JOB NAME: " + name);
if (processor.name === name)
QUEUES_CACHE[ cacheId ] = processor;
});
if (!QUEUES_CACHE[ cacheId ])
throw new Error("Processor for job \"" + name + "\" not found.");
}
Logger.debug(Object.keys(QUEUES_CACHE));
return QUEUES_CACHE[ cacheId ].queue.add(data);
}
function getCacheObject() {
return QUEUES_CACHE;
}
And now the last version of the same file
var _ = require("lodash");
var Utils = require("./utilities");
var Logger = require("./logger");
exports.createJob = createJob;
function createJob(name, data) {
var Processors = require("../apps/jobs_processor/processors");
var processor;
_.each(Processors, function (element) {
Logger.debug("Processor name: " + element.name);
if (element.name === name)
processor = element;
});
return processor.queue.add(data);
}
Each time that i called createJob method, I require the processors module which is an array of each job processor that I have created.
Node.js will resolve the path before caching the module.
As long as your relative paths resolve to the same absolute path on disk, you're fine.