I've been working on a command line app in node that does some file system reads and express app stuff, and all my tests are passing locally, however Travis seems to be having an issue when building (timing out, which is a Jasmine Node thing). Here's what I've got
.travis.yml
language: node_js
node_js:
- '>=0.10'
before_script:
- npm install -g grunt-cli
- npm link
script:
- "grunt --verbose"
sickmerge_spec.js
// Set the test environment
process.env.NODE_ENV = 'test';
// Dependencies
var exec = require('child_process').exec,
fs = require('fs'),
version = require('../package.json').version,
request = require('request');
// Helper function to exectue the sickmerge cli
function execSickmerge (options, callback) {
exec('sickmerge ' + options, function(err, stdout) {
console.log(stdout);
callback(stdout);
});
}
// ... Lots of other tests that exec the command, test below fails
describe('web application services', function() {
it('should respond when with a 200 when http requested', function(done) {
execSickmerge('./spec/fixtures/javascript.js', function() {
request('http://127.0.0.1:3000/', function(err, response) {
expect(response.statusCode).toEqual(200);
done();
});
});
});
});
The command code
#! /usr/bin/env node
/*
* sickmerge
* https://github.com/jgriffith/sickmerge
*
* Copyright (c) 2013 jgriffith
* Licensed under the MIT license.
*/
/*
* Module Dependencies/Setup
*/
var fs = require('fs'),
program = require('commander'),
syntaxOptions = require('./lib/syntax'),
version = require('./package.json').version,
fileLocation,
env = require('./lib/config')();
// Program Setup and Options
program
.version(version)
.usage('[options] <conflicted file location>')
.option('-h, --hostname [value]', 'The host URL you wish to query in the browser (defaults to localhost).')
.option('-o, --syntaxes', 'Will show the available syntax options for syntax highlighting.')
.option('-p, --port <n>', 'The port you wish to deploy on (defaults to 3000).', parseInt)
.option('-s, --syntax [value]', 'The language of the file for syntax highlighting (optional), defaults to no highlighting. Run with "-o" to see the available options.')
.option('-m, --merge [value]', 'Specify the initial view in the middle (merged) window on instantiation. Valid options are "yours", "theirs", and "both". Defaults to "yours"')
.parse(process.argv);
// Store the file location so we can persist later
fileLocation = program.args[0];
// For printing available syntax options
function printSyntaxOptions () {
console.log('Available options include:\n' + syntaxOptions.showSupportedSyntaxes());
return;
}
// If the user wants to see the syntax options
if (program.syntaxes) {
printSyntaxOptions();
return;
}
// No File given, print help since it's required
if (!fileLocation) {
program.outputHelp();
return;
}
// Invalid merge option
if (program.merge && ['yours', 'theirs', 'both'].indexOf(program.merge) === -1) {
console.log('You\'ve specified an invalid initial merged view: "' + program.merge + '".\nPlease use either "yours", "theirs", or "both"');
return;
}
// Invalid syntax option
if (program.syntax && syntaxOptions.indexOf(program.syntax) === -1) {
console.log('You\'ve specified an invalid syntax option: ' + program.syntax);
printSyntaxOptions();
return;
}
// Read the passed file, strip the git comments, and build the web service
fs.readFile(fileLocation, function(err, result) {
if (err) return console.log('There was an error loading your file! ' + err);
// Setup parameters, load additional files
var hostname = (program.hostname) ? program.hostname : 'localhost',
port = (program.port) ? program.port : 3000,
merge = (program.merge) ? program.merge : 'yours',
extension = fileLocation.split('.').pop(),
syntax = (program.syntax) ? program.syntax : syntaxOptions.getSyntax(extension),
threeWayMerge = require('./lib/gitStrip')(result.toString(), merge),
express = require('express'),
app = express(),
path = require('path'),
open = require('open');
// Web server setup
app.use(express.bodyParser());
app.set('views', __dirname + '/views');
app.set('view engine', 'ejs');
app.use(express.static(path.join(__dirname, 'public')));
// Build the base route for the page
app.get('/', function (req, res) {
res.render('editor', {
title: fileLocation,
syntax: syntax,
body: threeWayMerge
});
});
// Post route for saving the file (this is final) and closes the process
app.post('/save', function (req, res) {
var content = req.body.content;
fs.writeFile(fileLocation, content, function (err) {
if (err) throw "There was an issues saving your file: " + err;
res.send('complete');
process.exit();
});
});
// Get route for cancelling the file (this is final) and closes the process
app.get('/cancel', function (req, res) {
res.send('terminated');
process.exit();
});
console.log(
'Sickmerge is waiting for changes.\n' +
'Visit http://' + hostname + ':' + port + '/ in your browser to make changes\n' +
'Pressing "Save" or "Cancel" will do the action and close the sickmerge program.\n'+
'Press CTRL+C if you\'ve closed your web browser and didn\'t click either of those buttons.'
);
app.listen(port);
if( env !== 'test') open('http://' + hostname + ':' + port);
});
You can also see the code on github, and the builds in Travis. I originally thought app.list() was preventing execution of the console, but removing it didn't work either.
Does Travis just block certain ports? Does it not allow filesystem queries?
It looks like the current script is working which is:
.travis.yml
language: node_js
node_js:
- '>=0.10'
before_install:
- npm install -g grunt-cli
before_script:
- npm link
script:
- "grunt --verbose"
Does it not allow filesystem queries?
Travis-CI does allow filesystem queries.
Related
Having a problem where the following code returns Cannot read property 'swaggerUi' of undefined each time I run node app.js. The code below is the entirety of app.js.
I have tried a bunch of different npm installs, computer restarts, and all of that. Nothing seems to change it.
var SwaggerExpress = require('swagger-express-mw');
var app = require('express')();
module.exports = app; // for testing
var config = {
appRoot: __dirname // required config
};
SwaggerExpress.create(config, function(err, swaggerExpress) {
if (err) { throw err; }
// install middleware
swaggerExpress.register(app);
var port = process.env.PORT || 10010;
app.use(swaggerExpress.runner.swaggerTools.swaggerUi());
app.listen(port, function() {
console.log('Server running at http://127.0.0.1:' + port + '/');
});
if (swaggerExpress.runner.swagger.paths['/hiMom']) {
console.log('try this:\http://127.0.0.1:' + port + '/docs');
}
});
Here is a github sample link that might be be helpful
sample-swagger-for-nodejs.
SOLVED: I'm not sure what the problem was, but getting a fresh clone of the GitHub repo solved the problem.
Hi I am using gulp and nodemon utility to automate my build process. It was working before I manually debug my application using vscode. I don't want to debug it now, want to run it simply.
I am running this command to start gulp and automate build if any changes in JS files but I am getting an error.
I checked some threads where it is suggested to use set DEBUG=express:* & node bin/www and it is working. I don't want to do this, don't know what it does. I wanna use gulp.
$ gulp
[18:11:31] Using gulpfile D:\api\gulpfile.js
[18:11:31] Starting 'default'...
[18:11:31] Finished 'default' after 101 ms
[18:11:31] [nodemon] 1.12.0
[18:11:31] [nodemon] to restart at any time, enter `rs`
[18:11:31] [nodemon] watching: *.*
[18:11:31] [nodemon] starting `node ./bin/www`
'\"node .\bin\www\"' is not recognized as an internal or external command,
operable program or batch file.
[18:11:31] [nodemon] app crashed - waiting for file changes before starting...
gulpfile.js
const gulp = require("gulp"),
nodemon = require("gulp-nodemon");
gulp.task("default", () => {
nodemon({ ext: "js" });
});
www
#!/usr/bin/env node
/**
* Module dependencies.
*/
var app = require('../app');
var debug = require('debug')('cryptocurrency-api:server');
var http = require('http');
/**
* Get port from environment and store in Express.
*/
var port = normalizePort(process.env.PORT || '3000');
app.set('port', port);
/**
* Create HTTP server.
*/
var server = http.createServer(app);
/**
* Listen on provided port, on all network interfaces.
*/
server.listen(port);
server.on('error', onError);
server.on('listening', onListening);
/**
* Normalize a port into a number, string, or false.
*/
function normalizePort(val) {
var port = parseInt(val, 10);
if (isNaN(port)) {
// named pipe
return val;
}
if (port >= 0) {
// port number
return port;
}
return false;
}
/**
* Event listener for HTTP server "error" event.
*/
function onError(error) {
if (error.syscall !== 'listen') {
throw error;
}
var bind = typeof port === 'string'
? 'Pipe ' + port
: 'Port ' + port;
// handle specific listen errors with friendly messages
switch (error.code) {
case 'EACCES':
console.error(bind + ' requires elevated privileges');
process.exit(1);
break;
case 'EADDRINUSE':
console.error(bind + ' is already in use');
process.exit(1);
break;
default:
throw error;
}
}
/**
* Event listener for HTTP server "listening" event.
*/
function onListening() {
var addr = server.address();
var bind = typeof addr === 'string'
? 'pipe ' + addr
: 'port ' + addr.port;
debug('Listening on ' + bind);
}
Probably something is corrupted with nodemon version 1.12.0.
Had the same problem and solved it by removing and reinstalling an older version.
Remove nodemon using:
yarn global remove nodemon
And then reinstall - this time specifying an older version
yarn global install nodemon#1.11.0
I am currently using crypto.js module to hash things. It was working for a while then I started getting this error:
Here is the foundation of my server:
process.stdout.write('\033c'); // Clear the console on startup
var
express = require("express"),
app = express(),
http = require("http").Server(app),
io = require("socket.io")(http),
path = require("path"),
colorworks = require("colorworks").create(),
fs = require("fs"),
crypto = require("crypto");
function md5(msg){
return crypto.createHash("md5").update(msg).digest("base64");
}
function sha256(msg) {
return crypto.createHash("sha256").update(msg).digest("base64");
}
http.listen(443, function(){
// Create the http server so it can be accessed via 127.0.0.1:443 in a web browser.
console.log("NJ project webserver is running on port 443.");
// Notify the console that the server is up and running
});
app.use(express.static(__dirname + "/public"));
app.get("/", function(request, response){
response.sendFile(__dirname + "/public/index.html");
});
I am aware that these functions are creating the problem:
function md5(msg){
return crypto.createHash("md5").update(msg).digest("base64");
}
function sha256(msg) {
return crypto.createHash("sha256").update(msg).digest("base64");
}
The problem being, if these functions don't work (which they don't anymore), roughly 200 lines of code will go to waste.
This error is triggered by attempting to hash a variable that does not exist:
function md5(msg){
return crypto.createHash("md5").update(msg).digest("base64");
}
function sha256(msg) {
return crypto.createHash("sha256").update(msg).digest("base64");
}
md5(non_existent); // This variable does not exist.
What kind of data are you trying to hash ? Where does it come from ?
I would check the value of msg first then I would try :
crypto.createHash('md5').update(msg.toString()).digest('hex');
You could also use these packages instead:
https://www.npmjs.com/package/md5
https://www.npmjs.com/package/js-sha256
I'm relative new to NODEJS and I'm struggling with a basic problem, which is the correct use of global variables, I read a lot about it but it seems I can't make it work properly, I'll post some codes for a better view of the problem.
I have this simple js running as a server:
myapi.js
var http = require('http');
var express = require('express');
var app = express();
var exec = require('child_process').exec, child;
var fs = require('fs');
var jUptime;
var ipExp = require('./getDown');
var filesD = [];
var path = "/media/pi/01D16F03D7563070/movies";
app.use(express['static'](__dirname ));
exec("sudo /sbin/ifconfig eth0 | grep 'inet addr:' | cut -d: -f2 | awk '{print $1}'", function(error, stdout, stderr){
ip = stdout;
exports.ipAdd = ip;
console.log(ip);
});
app.get('/files', function(req, res) {
fs.readdir(path, function(err, files) {
if (err){
console.log("Non riesco a leggere i files");
}
filesD=files;
console.log(filesD);
});
res.status(200).send(filesD);
});
app.get('/up', function(req, res) {
child = exec("uptime -p", function(error, stdout, stderr){
jUptime = [{uptime: stdout}];
});
res.status(200).send(jUptime);
});
app.get('*', function(req, res) {
res.status(404).send('Richiesta non riconosciuta');
});
app.use(function(err, req, res, next) {
if (req.xhr) {
res.status(500).send('Qualcosa รจ andato storto');
} else {
next(err);
}
});
app.listen(3000);
console.log('Server attivo sulla porta 3000');
And then I have this JS used in a simple web page:
getDown.js
var ip = require('./myapi').ipAdd;
function gDownloads() {
var url;
var jqxhr;
var dat;
url = 'http://' + ip + '/files';
jqxhr = $.getJSON(url, function(dat) {
for(i=0; i<dat.length; i++){
$('#downLoad').append('<p>' + dat[i] + '</p>');
}
$('#bId').append(dat.length);
})
.done(function() {
console.log("OK");
})
.fail(function(data) {
console.log("Fallito: "+data);
})
};
The problem is that when I navigate to the html page that use getDown.js I get the following error on getDown.js
require is not defined
I need to pass the variable that contains the IP address in myapi.js to use it in getDown.js, I hope I explain myself good enough, thanks in advance.
require is global that exists in Node.js code, that is, on the javascript code executing in the server.
Your server will respond to the client and give it an HTML page to render. That HTML page could tell the browser to also request a javascript file from the server. When it receives that file, the client will execute it. The client does not have a require global (you can test it by opening up the console and typing require)
Using Browserify
Or you can write Node-style code, requiring your global like you're doing, but then run the code through browserify. This will create a new javascript bundle that can be executed by the client, so you should tell your html page to use that bundle instead of getDown.js.
Here is a basic example of doing using browserify like this.
module.js
function getIp() {
return 123456;
}
module.exports = {
getIp: getIp
};
main.js
var module = require('./module');
function getIp() {
var ip = module.getIp();
return ip;
};
console.log(getIp());
compile bundle
$ browserify main.js -o public/bundle.js
index.html
<script type="text/javascript" src="public/bundle.js"></script>
Global variable on the client
To use a global variable on the client which is known by the server, you can pass that variable to your rendering engine (possibly Jade if you're using Express) and have it interpolate that variable into a <script> tag which defines some globals. Leave a comment if that's the approach you'd prefer and I can add some more details.
Let me know if you have more questions!
I'm just wondering whether it is at all possible to transfer a directory from a unix server to my local machine using the ssh2 module in node.js. I have connected to the remote host and can read the directory as well as transfer single files, but there are 28 folders in the directory which each contain files and sub directories. What I'd like to do is take an exact copy of the main directory from the server to my local machine.
I was using fastGet with single files, but transferring a directory gives: Error: EISDIR, open __dirname/../localdirectory/ which I think implies I can't use fastGet to get an entire directory. I also tried using the exec command to try and scp it over, but I couldn't work out the syntax for the local directory:
// c is an active connection
c.exec('scp filethatexists.extension /../filepath/newname.extension', function(err, stream) {
if (err) {
console.log("error: " + err);
stream.end;
};
stream.on('data', function(data, extended) {
console.log((extended === 'stderr' ? 'STDERR: ' : 'STDOUT: ') + data);
});
stream.on('end', function() {
console.log('Stream :: EOF');
});
stream.on('close', function() {
console.log('Stream :: close');
});
stream.on('exit', function(code, signal) {
console.log('Stream :: exit :: code: ' + code + ', signal: ' + signal);
c.end();
});
});
This just results in the EOF calling. This code was just me testing If I could get a single file transferring.
Can anyone provide me with any assistance? Thank you in advance.
A couple of solutions:
You could recursively traverse the directory (making directories and transferring files as needed) using the sftp methods
Tar the directory (compress it too if you want) to stdout (e.g. tar cf - mydir) and then process that incoming stdout data with the tar module (and the built-in zlib module first if you end up compressing the directory).
// Requires:
// * `npm install tar-fs`
// * `ssh2` v0.5.x or newer
var tar = require('tar-fs');
var zlib = require('zlib');
function transferDir(conn, remotePath, localPath, compression, cb) {
var cmd = 'tar cf - "' + remotePath + '" 2>/dev/null';
if (typeof compression === 'function')
cb = compression;
else if (compression === true)
compression = 6;
if (typeof compression === 'number'
&& compression >= 1
&& compression <= 9)
cmd += ' | gzip -' + compression + 'c 2>/dev/null';
else
compression = undefined;
conn.exec(cmd, function(err, stream) {
if (err)
return cb(err);
var exitErr;
var tarStream = tar.extract(localPath);
tarStream.on('finish', function() {
cb(exitErr);
});
stream.on('exit', function(code, signal) {
if (typeof code === 'number' && code !== 0) {
exitErr = new Error('Remote process exited with code '
+ code);
} else if (signal) {
exitErr = new Error('Remote process killed with signal '
+ signal);
}
}).stderr.resume();
if (compression)
stream = stream.pipe(zlib.createGunzip());
stream.pipe(tarStream);
});
}
// USAGE ===============================================================
var ssh = require('ssh2');
var conn = new ssh();
conn.on('ready', function() {
transferDir(conn,
'/home/foo',
__dirname + '/download',
true, // uses compression with default level of 6
function(err) {
if (err) throw err;
console.log('Done transferring');
conn.end();
});
}).connect({
host: '192.168.100.10',
port: 22,
username: 'foo',
password: 'bar'
});
i m also trying to downlaod folders using ssh. It took me more than 10 days and i'm still trying to do that. But in the mean time i found some other code which will do the same thing for me.This code below will download every folder and file inside a directory
enter image description here