npm project behind a corporate proxy global - javascript

I found already a lot of helpful tutorials to set the proxy locally and global to install packages and so on.
Now I started a new project and I figured out how to reuse the proxy settings:
#! /usr/bin/env node
var http = require("http");
var shell = require('shelljs');
var request = require('request');
var iplocation = require('iplocation')
// setup proxy
var proxyUrl = shell.exec('npm config get proxy', {silent:true}).stdout;
var proxiedRequest = request.defaults({
'proxy': proxyUrl,
'https-proxy' : proxyUrl,
'strict-ssl' : false
});
// get location (works)
proxiedRequest('http://ipinfo.io/216.58.194.46', function (error, response, body) {
console.log('error:', error);
console.log('statusCode:', response && response.statusCode);
console.log('body:', body);
});
// doesn't work
iplocation('56.70.97.8').then(res => {
console.log(res.iplocation)
})
.catch(err => {
console.error(err);
})
Is there a way to set it someway global for the project so other npm packages could use it too?
I tried a local .npmrc file in the projects folder but it doesn't affect the environment at all.
Any hints are welcome. Thanks

This SO answer1, SO answer2 explains different ways to set npm proxy. See if it helps you.
You could add functions like proxy_on and proxy_off to your bashrc which will let you set global npm config and toggle it from your command line.
Refer to this gist for the code.

Related

Is Dotenv still a necessary package with 6.x.x Node?

I recently set up a simple project with a .env file and called the env variables in my code with process.env.[variable name] and it totally worked without adding the dotenv package to my project.
Has node incorporated this natively? I tried googling but it didn't turn up any useful information so I am kind of confused. Thought it would be easy to confirm or deny.
Here is my 'app':
// Load the SDK and UUID
var AWS = require('aws-sdk');
var uuid = require('node-uuid');
// Create an S3 client
var s3 = new AWS.S3({
region: 'us-east-1',
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY
});
// Create a bucket and upload something into it
var bucketName = 'node-sdk-sample-' + uuid.v4();
var keyName = 'hello_colorado.txt';
s3.createBucket({Bucket: bucketName}, function() {
var params = {Bucket: bucketName, Key: keyName, Body: 'Coloradoical!'};
s3.putObject(params, function(err, data) {
if (err)
console.log(err)
else
console.log("Successfully uploaded data to " + bucketName + "/" + keyName);
});
});
And my package.json (without dotenv):
{
"dependencies": {
"aws-sdk": ">= 2.0.9",
"node-uuid": ">= 1.4.1"
}
}
Just a thought, could it be related to the fact that I am running my application from the command line with node simple.js? If so, can you explain why?
No, Node.js does not read .env files automatically.
Possible explanations for what's going on:
Perhaps the environment variable you are using is already set in your shell before you run the program.
Perhaps your AWS credentials are being stored/utilized some other way by your machine. (Based on our comment conversation, this looks like the case for you, but I'm including other things to generically help others who might be seeing something similar.)
Perhaps one of the modules you are loading is reading the .env file.
Suggested additional info from #maxwell:
AWS CLI help pages indicate that the precedence for configuration values is:
command line options
environment variables
configuration file
So sounds like the information was coming out of a configuration file for #maxwell.
I also hit the case that .env was read into process.env "automatically", which puzzled me a lot. I then found out it was because I install a zsh plugin called dotenv which will automatically load ENV variables from .env file when you cd into the directory.

node.js Error: connect ECONNREFUSED; response from server

I have a problem with this little program:
var http = require("http");
var request = http.request({
hostname: "localhost",
port: 8000,
path: "/",
method: "GET"
}, function(response) {
var statusCode = response.statusCode;
var headers = response.headers;
var statusLine = "HTTP/" + response.httpVersion + " " +statusCode + " " + http.STATUS_CODES[statusCode];
console.log(statusLine);
for (header in headers) {
console.log(header + ": " + headers[header]);
}
console.log();
response.setEncoding("utf8");
response.on("data", function(data) {
process.stdout.write(data);
});
response.on("end", function() {
console.log();
});
});
The result in console is this:
events.js:141
throw er; // Unhandled 'error' event
^
Error: connect ECONNREFUSED 127.0.0.1:8000
at Object.exports._errnoException (util.js:870:11)
at exports._exceptionWithHostPort (util.js:893:20)
at TCPConnectWrap.afterConnect [as oncomplete] (net.js:1063:14)
I do not understand why this happens.
From your code, It looks like your file contains code that makes get request to localhost (127.0.0.1:8000).
The problem might be you have not created server on your local machine which listens to port 8000.
For that you have to set up server on localhost which can serve your request.
Create server.js
var express = require('express');
var app = express();
app.get('/', function (req, res) {
res.send('Hello World!'); // This will serve your request to '/'.
});
app.listen(8000, function () {
console.log('Example app listening on port 8000!');
});
Run server.js : node server.js
Run file that contains code to make request.
Please use [::1] instead of localhost, and make sure that the port is correct, and put the port inside the link.
const request = require('request');
let json = {
"id": id,
"filename": filename
};
let options = {
uri: "http://[::1]:8000" + constants.PATH_TO_API,
// port:443,
method: 'POST',
json: json
};
request(options, function (error, response, body) {
if (error) {
console.error("httpRequests : error " + error);
}
if (response) {
let statusCode = response.status_code;
if (callback) {
callback(body);
}
}
});
I solved this problem with redis-server, you can install that like this!
sudo apt-get install redis-server
after that see the port and change it!
I had the same problem on my mac, but in my case, the problem was that I did not run the database (sudo mongod) before; the problem was solved when I first ran the mondo sudod on the console and, once it was done, on another console, the connection to the server ...
Just run the following command in the node project:
npm install
Its worked for me.
i ran the local mysql database, but not in administrator mode, which threw this error
If you have stopped the mongod.exe service from the task manager, you need to restart the service. In my case I stopped the service from task manager and on restart it doesn't automatically started.
I got this error because my AdonisJS server was not running before I ran the test. Running the server first fixed it.
If this is the problem with connecting to the redis server (if your redis.createClient function does not work although you are sure that you have written the right parameters to the related function), just simply type redis-server in another terminal screen. This probably gonna fix the issue.
P.S.: Sorry if this is a duplicate answer but there is no accepted answer, so, I wanted to share my solution too.
This is very slight error. When I was implementing Event server between my processes on nodejs.
Just check for the package you're using to run your server like Axios.
Maybe you might have relocated the files or disconnected some cache data on the browsers.
It's simple ,In your relevant directory run the following command
I was using axios so I used
npm i axios
Restart the server
npm start
This will work.
use a proxy property in your code it should work just fine
const https = require('https');
const request = require('request');
request({
'url':'https://teamtreehouse.com/chalkers.json',
'proxy':'http://xx.xxx.xxx.xx'
},
function (error, response, body) {
if (!error && response.statusCode == 200) {
var data = body;
console.log(data);
}
}
);

Make Node server.js file safer by removing security information

I have a public git[hub] project, and am now ready to switch it from development to production. We are in the research field, so we like to share our code too!
I have a server.js file that we start with node server.js like most tutorials.
In it, there is connection information for the SQL server, and the location of the HTTPS certificates. It looks something like this:
var https = require('https');
var express = require('express');
var ... = require('...');
var fs = require('fs');
var app = express();
var Sequelize = require('sequelize'),
// ('database', 'username', 'password');
sequelize = new Sequelize('db', 'uname', 'pwd', {
logging: function () {},
dialect: 'mysql',
…
});
…
var secureServer = https.createServer({
key: fs.readFileSync('./location/to/server.key'),
cert: fs.readFileSync('./location/to/server.crt'),
ca: fs.readFileSync('./location/to/ca.crt'),
requestCert: true,
rejectUnauthorized: false
}, app).listen('8443', function() {
var port = secureServer.address().port;
console.log('Secure Express server listening at localhost:%s', port);
});
In PHP you can have the connection information in another file, then import the files (and therefore variables) into scope to use. Is this possible for the SQL connection (db, uname, pwd) and the file locations of the certs (just to be safe) so that we can commit the server.js file to git and ignore/not follow the secret file?
You can do this in a lot of different ways. One would be to use environment variables like MYSQL_USER=foo MYSQL_PASSWD=bar node server.js and then use process.env.MYSQL_USER in the code.
You can also read from files as you have suggested. You can do require("config.json") and node will automatically parse and import the JSON as JavaScript constructs. You can then .gitignore config.json and perhaps provide an example.config.json.
If you want to support both of these at once there is at least one library that allows you to do this simply: nconf.
You can always just store the configuration information in a JSON file. Node natively supports JSON files. You can simply require it:
var conf = require('myconfig.json');
var key = fs.readFileSync(conf.ssl_keyfile);
There are also 3rd party libraries for managing JSON config files that add various features. I personally like config.json because it allows you to publish a sample config file with empty values then, without modifying the sample config file, you can override those values using a .local.json file. It makes it easier to deal with config files in repos and also makes it easier to publish changes to the config file.
Here is great writeup about how you should organise your deployments
Basically all application critical variables like db password, secret keys, etc., should be accessible via environment variables.
You could do something like this
// config.js
const _ = require('lodash');
const env = process.env.NODE_ENV || 'development';
const config = {
default: {
mysql: {
poolSize: 5,
},
},
development: {
mysql: {
url: 'mysql://localhost/database',
},
},
production: {
mysql: {
url: process.env.DB_URI,
},
},
};
module.exports = _.default(config.default, config[env]);
// app.js
const config = require('./config');
// ....
const sequelize = new Sequelize(config.mysql.url);
Code is not perfect, but should be enough to get the idea.

simple node.js example in aws lambda

I am trying to send a simple request with aws lambda.
My module structure is as follows:
mylambda
|-- index.js
|-- node_modules
| |-- request
I zip the file up and it is uploaded to lambda.
Then I invoke it, and it returns the following error. "errorMessage": "Cannot find module 'index'"
Here is the contents of the index.js file
var request = require('request');
exports.handler = function(event, context) {
var headers = { 'User-Agent': 'Super Agent/0.0.1', 'Content-Type': 'application/x-www-form-urlencoded' }
// Configure the request
var options = {
url: 'https://myendpoint',
method: 'POST',
headers: headers,
form: {'payload': {"text":""} }
}
// Start the request
request(options, function (error, response, body) {
if (!error && response.statusCode == 200) {
console.log(body)
}
})
console.log('value1 =', event.key1);
context.succeed(event.key1); // Echo back the first key value
};
Any help is appreciated, Thanks
All working now, I had to increase the Timeout(s) seconds in advanced settings, as it was taking longer than 3 seconds.
Also I had to ensure my node modules were correctly installed. I had messed up the request module when trying to figure out what was wrong.
To reinstall the module, I deleted then re-installed request.
deleted node_modules
npm init
added the dependancies "request" : "*" in the package.json,
npm install. Compressed the zip and uploaded, all working now. :)
You have to zip and upload subfolders only, not a root folder. You have to zip following folders as per your example, then upload:
|-- index.js
|-- node_modules
|-- request
Task: Write an aws lamda function:
How I have see us doing:
We write code in aws editor and run that
Not running as expected, put a lot of consoles there(because we can't debug our code)
Wait for some seconds then see the consoles in another window, keep changing the windows until we resolve our problem
4.changing the windows takes a lot of time and effort.
why can't we?
write the code in our server (not aws editor) and then send that code to aws.
Yes, we can.
new Function (https://davidwalsh.name/new-function) Blessing in disguise
concept.
Sample code:
let fs = require('fs');
const aws = require("aws-sdk");
const s3 = new aws.S3(),
async = require('async');
aws.config = {
"accessKeyId": "xyz",
"secretAccessKey": "xyz",
"region": "us-east-1"
};
fs.readFile('path to your code file', 'utf-8', async (err, code) => {
if (err) return res.status(500).send({ err });
async function uploadToS3(docs) { (only this function has to go into aws editor)
let func = new Function('docs', "aws", "s3", 'async', `${code}`);
return func(docs, aws, s3, async);
}
let resp = await uploa`enter code here`dToS3(req.files.docs);(this line will call aws lambda function from our server)
return res.send({ resp });
});
Code which I have written in my file:
docs = Array.isArray(docs) ? docs : [docs]
let funArray = [];
docs.forEach((value) => {
funArray.push(function (callback) {
s3.upload({
Bucket: "xxx",
Body: value.data,
Key: "anurag" + "/" + new Date(),
ContentType: value.mimetype
}, function (err, res) {
if (err) {
return callback(err, null);
}
return callback(null, res);
});
});
});
return new Promise((resolve, reject) => {
async.parallel(funArray, (err, data) => {
resolve(data);
});
});
Benefit:
As the whole code will be written in our familiar IDE, it will be easy to debug.
Only two lines have to go into aws editor (isn't it easy).
quite easy to modify/update the code(as the code will in our repo, we may not even have to go to aws editor).
yes we can other third parties libraries, but the above thing is written in pure JavaScript, no third party library is utilized there.
Also, here you don't have to deploy your code.
Sometimes our libraries total size increased to 5 MB and AWS lambda editor stop supporting it.
It will resolve this problem as well, now we will send only the required function from a library, not the whole library
on an average an async library contains around 100s of functions, but we use 1-2 functions. So, now we will send only the function which we are going to use.
Note:
I searched this a lot but nowhere found this kind of thing.
The above piece of code will upload docs to the s3 bucket.

"Object is not a function" when passing a Node.js HTTP server object to Socket.IO

This was working a few months ago when I was creating an HTTPS server, but I switched to http (not sure this switch is directly related, just mentioning it in case) today when revisiting this application, where I create a server and pass it to socket.io:
init.js
var server = require(dirPath + "/custom_modules/server").serve(80);
var socket = require(dirPath + "/custom_modules/socket").socket(server);
It is important that I pass the server to socket.io (I know there are alternate ways of initializing the socket) this way because that's how it has to be done in order to encrypt the websocket connection when I switch back to serving HTTPS later.
So my server module:
//serve files
module.exports.serve = function(port) {
//var server = https.createServer(options, function(req, res) { // SSL Disabled
var server = http.createServer(function(req, res) {
// Parse & process URL
var reqInfo = url.parse(req.url, true, true), path = reqInfo.pathname;
// Quickly handle preloaded requests
if (preloaded[path])
preloadReqHandler(req, res, preloaded[path], path);
// Handle general requests
else
generalReqHandler(req, res, reqInfo);
}).listen(port);
return server; //this should be returning an http server object for socket.io
};
and my socket module:
module.exports.socket = function(server) {
//create socket
var socket = require(dirPath + '/node_modules/socket.io')(server);
// ^ error
// .. snip ..
//handle client connection
socket.on("connection", function(client) {
// .. snip ..
});
};
and my error:
/home/ec2-user/Sales_Freak/server/custom_modules/socket.js:17
var socket = require(dirPath + '/node_modules/socket.io')(server);
^
TypeError: object is not a function
at Object.module.exports.socket (/home/ec2-user/Sales_Freak/server/custom_modules/socket.js:17:59)
at Object.<anonymous> (/home/ec2-user/Sales_Freak/server/init.js:16:59)
Assume all of the necessary Node.JS modules are required properly above. What silly mistake am I making today?
The exported module is not a function, refer to your previous statement:
var socket = require(dirPath + "/custom_modules/socket").socket(server);
And compare that to your current statement:
var socket = require(dirPath + '/node_modules/socket.io')(server);
I think you meant to do this instead.
var socket = require(dirPath + '/node_modules/socket.io').socket(server);
This might or might not be helpful to others, but my problem was that I changed the directory of my Node.js server files and socket.io wasn't installed in the new location.
The module was there in node_modules but not installed. I'm actually not sure how installation works with npm modules, but the module existed and therefore didnt throw an error saying it didnt exist, but did not act like it was really there until I did npm install socket.io
If you get this error in this situation, you forgot install socket.io.

Categories