I'm looking for a way to SSH into a virtual machine and then execute certain scripts inside the virtual machine using Node.js
So far I've created the shell script that automate the login to the virtual machine but i'm unable to figure out how to move forward.
My shell script for login to remote server
spawn ssh root#localhost
expect "password"
send "123456"
send "\r"
interact
This is my server.js
var http = require('http');
var execProcess = require("./exec_process.js");
http.createServer(function (req, res) {
res.writeHead(200, {'Content-Type': 'text/plain'});
execProcess.result("sh sshpass.sh", function(err, response){
if(!err){
res.end(response);
}else {
res.end("Error: ", err);
}
});
}).listen(3000);
console.log('Server listening on port 3000');
exec_process.js
var exec = require('child_process').exec;
var result = function(command, cb){
var child = exec(command, function(err, stdout, stderr){
if(err != null){
return cb(new Error(err), null);
}else if(typeof(stderr) != "string"){
return cb(new Error(stderr), null);
}else{
return cb(null, stdout);
}
});
}
exports.result = result;
Any help is appreciated thanks
Why dont you use simple-ssh ?
I made a simple example of how to load a file with a list of commands and execute them in chain.
exampleList : (commands must be separated in new lines)
echo "Testing the first command"
ls
sshtool.js : (it can be buggy, example: if any command contains \n)
const _ = require("underscore");
//:SSH:
const SSH = require('simple-ssh');
const ssh = new SSH({
host: 'localhost',
user: 'username',
pass: 'sshpassword'
});
//example usage : sshtool.js /path/to/command.list
function InitTool(){
console.log("[i] SSH Command Tool");
if(process.argv[2]){AutomaticMode();}
else{console.log("[missing argument : path to file containing the list of commands]");}
}
//:MODE:Auto
function AutomaticMode(){
const CMDFileName = process.argv[2];
console.log(" ~ Automatic Command Input Mode");
//Load the list of commands to be executed in order
const fs = require('fs');
fs.readFile(process.argv[2], "utf8", (err, data) => {
if(err){console.log("[!] Error Loading Command List File :\n",err);}else{
var CMDList = data.split("\n"); // split the document into lines
CMDList.length = CMDList.length - 1; //fix the last empty line
_.each(CMDList, function(this_command, i){
ssh.exec(this_command, {
out: function(stdout) {
console.log("[+] executing command",i,"/",CMDList.length,"\n $["+this_command+"]","\n"+stdout);
}
});
});
console.log("[i]",CMDList.length,"commands will be performed.\n");
ssh.start();
}
});
}
//:Error Handling:
ssh.on('error', function(err) {
console.log('[!] Error :',err);
ssh.end();
});
InitTool();
It uses underscore for looping through the command list.
Related
I'm trying to make a way to boot up a Minecraft server from nodejs, but I'm having trouble making a way to run commands from nodejs.
const { spawn } = require('node:child_process')
const express = require('express');
const bodyParser = require('body-parser');
const app = express();
const fs = require('fs');
app.get('/start', (req, res) => {
fs.writeFile('minecraftstatus.txt', 'on', (err) => {
if (err) throw err;
});
const command = spawn('java', ['-jar', '-Xms2048M','-Xmx2048M', '-Dfile.encoding=utf8', 'server.jar', 'nogui'])
// the `data` event is fired every time data is
// output from the command
command.stdout.on('data', output => {
// the output data is captured and printed in the callback
fs.appendFile('console.txt', ("\n" + output.toString()), 'utf-8', err => {
console.log(err)
})
console.log("Output: ", output.toString())
})
res.status(200).send("OK")
});
app.listen(80, () => {
console.log('Server started on port 80');
});
From what you see above, whenever a user sends a GET request, it sends a command and appends any output to a text file. I need to make a way in order to send commands to Minecraft. I need to send commands to the same shell that nodejs ran the command.
I've tried this:
app.get('/mcstop', (req, res) => {
try {
const command2 = spawn('/stop')
// the `data` event is fired every time data is
// output from the command
command2.stdout.on('data', output => {
// the output data is captured and printed in the callback
console.log("Output: ", output.toString())
})
res.status(200).send("OK")
}
catch {
console.log("Oh no...")
}
});
Where it sends /stop to the shell, but it seems like it isn't being ran on the same shell as where the Minecraft server was created from.
How could I achieve this?
I am establishing a good connection with my Linux server. What I want to do now is running some shell commands after I got the connection with the server like doing automation for example cd images after that mkdir newFolder etc.
The main idea of this is to connect to a Linux server from a webpage and when I click some buttons it will establish a connection to that server and some work like restarting Apache or bouncing the application by running a script.
var fs = require('fs');
var path = require('path');
var server = require('http').createServer(onRequest);
var io = require('socket.io')(server);
var SSHClient = require('ssh2').Client;
// Load static files into memory
var staticFiles = {};
var basePath = path.join(require.resolve('xterm'), '..');
staticFiles['/xterm.css'] = fs.readFileSync(path.join(basePath, '../css/xterm.css'));
staticFiles['/xterm.js'] = fs.readFileSync(path.join(basePath, 'xterm.js'));
basePath = path.join(require.resolve('xterm-addon-fit'), '..');
staticFiles['/xterm-addon-fit.js'] = fs.readFileSync(path.join(basePath, 'xterm-addon-fit.js'));
staticFiles['/'] = fs.readFileSync('index.html');
// Handle static file serving
function onRequest(req, res) {
var file;
if (req.method === 'GET' && (file = staticFiles[req.url])) {
res.writeHead(200, {
'Content-Type': 'text/'
+ (/css$/.test(req.url)
? 'css'
: (/js$/.test(req.url) ? 'javascript' : 'html'))
});
return res.end(file);
}
res.writeHead(404);
res.end();
}
io.on('connection', function(socket) {
var conn = new SSHClient();
conn.on('ready', function() {
socket.emit('data', '\r\n*** SSH CONNECTION ESTABLISHED ***\r\n');
conn.shell(function(err, stream) {
if (err)
return socket.emit('data', '\r\n*** SSH SHELL ERROR: ' + err.message + ' ***\r\n');
socket.on('data', function(data) {
stream.write(data);
});
stream.on('data', function(d) {
socket.emit('data', d.toString('binary'));
}).on('close', function() {
conn.end();
});
});
}).on('close', function() {
socket.emit('data', '\r\n*** SSH CONNECTION CLOSED ***\r\n');
}).on('error', function(err) {
socket.emit('data', '\r\n*** SSH CONNECTION ERROR: ' + err.message + ' ***\r\n');
}).connect({
host: '192.168.560.1',
port: 22,
username: 'USER',
password: 'anything'
});
});
let port = 8000;
console.log('Listening on port', port)
server.listen(port);
To execute the linux commands on the so created ssh terminal do as below:
Before this snippet:
socket.on('data', function(data) {
stream.write(data);
});
Add stream.write('ls');
I am using xterm.js in my web project to have a terminal on the web page. Every time I refresh my page or reconnect socket when a socket connection is broken due to internet fluctuation from the client. The current PWD directory is lost and it falls to specified CWD directory which is user home in my case. So again I have to do cd where I was working.
How can I connect and remain at same PWD where I was last time before page refreshing or socket disconnect?
One of the things I tried is to store term object and connect through the same object when reconnecting if it is already present. Not deleting process and object in on WebSocket disconnect.
var http = require('http');
var express = require('express');
var app = express();
var expressWs = require('express-ws')(app);
var pty = require('node-pty');
var cors = require('cors');
app.use(cors());
app.options('*', cors());
var terminals = {}; //global terminals
function getUser(token) {
return new Promise((resolve, reject) => {
try {
return http.get({
host: '',
path: '',
headers: {'token': token}
}, function(response) {
// Continuously update stream with data
var body = '';
response.on('data', function(d) {
body += d;
});
response.on('end', function() {
return resolve(JSON.parse(body));
});
});
} catch (err) {
console.log('Api failed');
console.log(err);
reject;
}
})
}
app.ws('/terminals/:user_id', function (ws, req) {
try {
getUser(req.params.user_id) /* cheking with api if user exist in my database*/
.then(user_info => {
if(terminals[parseInt(req.params.user_id)]){
var term = terminals[parseInt(req.params.user_id)];
}else {
var term = pty.spawn(process.platform === 'win32' ? 'cmd.exe' : 'bash', [], {
name: 'xterm-color',
cwd: cwd,
env: process.env
});
terminals[parseInt(req.params.user_id)] = term;
}
term.on('data', function(data) {
ws.send(data);
});
ws.on('message', function(msg) {
term.write(msg);
});
ws.on('close', function () {
// process.kill(term.pid);
// delete terminals[parseInt(req.params.pid)];
// delete logs[req.params.pid];
});
})
.catch(err => {
})
} catch (err) {
console.log('Terminal webSocket failed');
console.log(err);
}
});
app.listen(3000);
This is not working for me. This gets me connect only first time but when I refresh my page terminal does not connect with existing store object.
Also, this has a problem if the spawned process is killed by the system but it still remains in javascript object and script try to reconnect with same term object it will fail.
Any guidelines how to achieve reconnect with same PWD.
Details
OS version: Mac OS ,
xterm.js version: 2.2.3
This can be solved very easily by just updating the ~/.bashrc on server
Putting below two line in ~/.bashrc file worked for me
PROMPT_COMMAND+='printf %s "$PWD" > ~/.storepwd'
[ -s ~/.lastdirectory ] && cd `cat ~/.lastdirectory`
Ref Save last working directory on Bash logout
I am wondering if there is any disadvantage to starting a server in a process and then running tests against that server in the same process.
Obviously there are some performance concerns, but if we are testing accuracy instead of performance, are there any major concerns with code like the following?
var fs = require('fs');
var path = require('path');
var http = require('http');
var supertest = require('supertest');
var assert = require('assert');
describe('#Test - handleXml()*', function() {
var self = this;
var server;
var payload = ''; // stringified XML
var xmlPath = path.resolve('test', 'test_data', 'xml_payloads', 'IVR_OnDemandErrorCode.xml');
before(function(done) {
var config = self.config = require('univ-config')(module, this.test.parent.title, 'config/test-config');
server = createServer().on('listening', function() {
done(null);
});
});
beforeEach(function(done) {
fs.readFile(xmlPath, 'utf8', function(err, content) {
assert(err == null);
payload = content;
done();
});
});
it('should accept request Content-type "text/xml or application/xml"', function(done) {
supertest(server)
.post('/event')
.set('Content-Type', 'application/xml')
.send(payload)
.expect(200, done);
});
it('should transform XML payload into JSON object', function(done) {
supertest(server)
.post('/event')
.set('Content-type', 'application/xml')
.send(payload)
.expect(200)
.end(function(err, res) {
assert(err == null,'Error is not null');
var jsonifiedXml = JSON.parse(res.text);
assert(typeof jsonifiedXml === 'object','jsonifiedXml not an object');
done();
});
});
describe('JSONified XML', function() {
it('should have proper key casing', function(done) {
supertest(server)
.post('/event')
.set('Content-type', 'application/xml')
.send(payload)
.expect(200)
.end(function(err, res) {
assert(err == null);
var payload = JSON.parse(res.text);
payload = payload.events[0].data;
assert(payload.hasOwnProperty('ppv'),'Bad value for ppv');
assert(payload.hasOwnProperty('mac'),'Bad value for mac');
assert(payload.hasOwnProperty('appName'),'Bad value for appName');
assert(payload.hasOwnProperty('divisionId'),'Bad value for divisionId');
assert(payload.hasOwnProperty('callTime'),'Bad value for callTime');
assert(payload.hasOwnProperty('callDate'),'Bad value for callDate');
assert(payload.hasOwnProperty('ivrLOB'),'Bad value for ivrLOB');
done();
});
});
});
});
function createServer(opts) {
//Note: this is a good pattern, definitely
var handleXml = require(path.resolve('lib', 'handleXml'));
var server = http.createServer(function(req, res) {
handleXml(req, res, function(err) {
res.statusCode = err ? (err.status || 500) : 200;
res.end(err ? err.message : JSON.stringify(req.body));
});
});
server.listen(5999); //TODO: which port should this be listening on? a unused port, surely
return server;
}
That's the standard way of testing a the http endpoints in a node application. But you are not going to want to have a createServer() function in each test. You will have a common function that creates a server that you can use through out your application, including to start the production server.
You right in noticing the having the server listen to a port doesn't actually do anything for you.
For this reason, it's common to have what I call an application factory that starts everything about a server, but does not listen to a port. That way I can access the server from a test or a script. The production app gets booted from a minimal index file:
var createServer = require('./AppFactory');
var server = createServer();
server.listen(5999);
I am trying to upload a file on ftp server using node.js as below-
I am using library- https://github.com/sergi/jsftp
var fs = require('fs');
var Ftp = new JSFtp({
host: "ftp.some.net",
port: 21, // defaults to 21
user: "username", // defaults to "anonymous"
pass: "pass",
debugMode: true // defaults to "#anonymous"
});
Uploading file-
exports.UploadToFtP = function (req, res) {
Ftp.put('public/Test.html', '/Test/index.html', function (err) {
if (!err)
res.send(200);
else
res.send(err);
});
};
I tried uploading file with this method above and it responds me back with 200 OK . But I get no file on server.
Is this has to do something with connection time out of server ? Why this is not writing file on server?
If the debug mode is on, the jsftp instance will emit jsftp_debug events.
In order to react to print all debug events, we would listen to the debug messages like this:
Ftp.on('jsftp_debug', function(eventType, data) {
console.log('DEBUG: ', eventType);
console.log(JSON.stringify(data, null, 2));
});
The raw FTP accepts no parameters and returns the farewell
message from the server. Embed raw FTP function in the FTP GET method
We can use raw FTP commands directly as well. In this case, we use FTP
'QUIT' method, which accepts no parameters and returns the farewell
message from the server
ftp.raw.quit(function(err, res) {
if (err)
return console.error(err);
console.log("FTP session finalized! See you soon!");
});
The file needs to be converted to bytes first.
var fs = require('fs');
fs.readFile('example.txt', function (err, data ) {
Ftp.put(data, 'example.txt', function (err) {
if (!err) {
console.log('OK');
} else {
console.log('ERR');
}
});
});