Transfer entire directory using ssh2 in Nodejs - javascript

I'm just wondering whether it is at all possible to transfer a directory from a unix server to my local machine using the ssh2 module in node.js. I have connected to the remote host and can read the directory as well as transfer single files, but there are 28 folders in the directory which each contain files and sub directories. What I'd like to do is take an exact copy of the main directory from the server to my local machine.
I was using fastGet with single files, but transferring a directory gives: Error: EISDIR, open __dirname/../localdirectory/ which I think implies I can't use fastGet to get an entire directory. I also tried using the exec command to try and scp it over, but I couldn't work out the syntax for the local directory:
// c is an active connection
c.exec('scp filethatexists.extension /../filepath/newname.extension', function(err, stream) {
if (err) {
console.log("error: " + err);
stream.end;
};
stream.on('data', function(data, extended) {
console.log((extended === 'stderr' ? 'STDERR: ' : 'STDOUT: ') + data);
});
stream.on('end', function() {
console.log('Stream :: EOF');
});
stream.on('close', function() {
console.log('Stream :: close');
});
stream.on('exit', function(code, signal) {
console.log('Stream :: exit :: code: ' + code + ', signal: ' + signal);
c.end();
});
});
This just results in the EOF calling. This code was just me testing If I could get a single file transferring.
Can anyone provide me with any assistance? Thank you in advance.

A couple of solutions:
You could recursively traverse the directory (making directories and transferring files as needed) using the sftp methods
Tar the directory (compress it too if you want) to stdout (e.g. tar cf - mydir) and then process that incoming stdout data with the tar module (and the built-in zlib module first if you end up compressing the directory).
// Requires:
// * `npm install tar-fs`
// * `ssh2` v0.5.x or newer
var tar = require('tar-fs');
var zlib = require('zlib');
function transferDir(conn, remotePath, localPath, compression, cb) {
var cmd = 'tar cf - "' + remotePath + '" 2>/dev/null';
if (typeof compression === 'function')
cb = compression;
else if (compression === true)
compression = 6;
if (typeof compression === 'number'
&& compression >= 1
&& compression <= 9)
cmd += ' | gzip -' + compression + 'c 2>/dev/null';
else
compression = undefined;
conn.exec(cmd, function(err, stream) {
if (err)
return cb(err);
var exitErr;
var tarStream = tar.extract(localPath);
tarStream.on('finish', function() {
cb(exitErr);
});
stream.on('exit', function(code, signal) {
if (typeof code === 'number' && code !== 0) {
exitErr = new Error('Remote process exited with code '
+ code);
} else if (signal) {
exitErr = new Error('Remote process killed with signal '
+ signal);
}
}).stderr.resume();
if (compression)
stream = stream.pipe(zlib.createGunzip());
stream.pipe(tarStream);
});
}
// USAGE ===============================================================
var ssh = require('ssh2');
var conn = new ssh();
conn.on('ready', function() {
transferDir(conn,
'/home/foo',
__dirname + '/download',
true, // uses compression with default level of 6
function(err) {
if (err) throw err;
console.log('Done transferring');
conn.end();
});
}).connect({
host: '192.168.100.10',
port: 22,
username: 'foo',
password: 'bar'
});

i m also trying to downlaod folders using ssh. It took me more than 10 days and i'm still trying to do that. But in the mean time i found some other code which will do the same thing for me.This code below will download every folder and file inside a directory
enter image description here

Related

Problem using child-process vs terminal while executing a command

I try to execute a command using a child-process and I can't execute by absolute path using nodejs, but when I use terminal, everything is fine.
Why is that?
My code is right here:
const cp = require('child_process');
const commandExecutor = 'node-install/target/node/yarn/dist/bin/yarn.exe';
const symlinkFolder = 'node-install/target/node/target/symlink';
const workingDirectories = [];
Array.from(process.argv).forEach((value, index) => {
if (index >= 2) {
workingDirectories[index - 2] = value;
}
});
workingDirectories.forEach(function(workingDirectory) {
const argumentsUnlink = 'unlink #item# --link-folder ' + symlinkFolder + ' --cwd ' + workingDirectory;
const unlinkCommand = commandExecutor + ' ' + argumentsUnlink;
const execution = cp.exec(
unlinkCommand,
function (error, stdout, stderr) {
console.log(stdout);
console.log(error);
console.log(stderr);
});
execution.on('exit', function (code) {
let message = 'Child process exited with exit code ' + code + ' on route ' + workingDirectory;
console.log(message);
});
});
An example of command is:
node-install/target/node/yarn/dist/bin/yarn.exe unlink #item# --link-folder node-install/target/node/target/symlink --cwd appointments/target/generated-sources/frontend/
But the error I've got is:
'node-install' is not recognized as an internal or external command, operable program or batch file.
While I execute command from terminal, everything is fine.
One of the possible problems - NodeJs unable to locate the file by relative path. You can use construct absolute path to fix this, few options to help if node-install is located in your project root (not ultimate list):
__dirname, which returns the directory of current module, so if
node-install/../..
index.js
then in index.js we can use
const commandExecutor = `${__dirname}/node-install/target/node/yarn/dist/bin/yarn.exe`;
process.cwd(), which returns full path of the process root, so if you start nodejs from folder having node-install, then you can refer to exe like this:
const commandExecutor = `${process.cwd()}/node-install/target/node/yarn/dist/bin/yarn.exe`;

ffmpeg running in cloudfunction silently fails/never finishes

I am trying to implement a Cloudfunction which would run ffmpeg on a Google bucket upload. I have been playing with a script based on https://kpetrovi.ch/2017/11/02/transcoding-videos-with-ffmpeg-in-google-cloud-functions.html
The original script needs little tuning as the library evolved a bit. My current version is here:
const {Storage} = require('#google-cloud/storage');
const storage = new Storage();
const ffmpeg = require('fluent-ffmpeg');
const ffmpeg_static = require('ffmpeg-static');
console.log("Linking ffmpeg path to:", ffmpeg_static)
ffmpeg.setFfmpegPath(ffmpeg_static);
exports.transcodeVideo = (event, callback) => {
const bucket = storage.bucket(event.bucket);
console.log(event);
if (event.name.indexOf('uploads/') === -1) {
console.log("File " + event.name + " is not to be processed.")
return;
}
// ensure that you only proceed if the file is newly createdxxs
if (event.metageneration !== '1') {
callback();
return;
}
// Open write stream to new bucket, modify the filename as needed.
const targetName = event.name.replace("uploads/", "").replace(/[.][a-z0-9]+$/, "");
console.log("Target name will be: " + targetName);
const remoteWriteStream = bucket.file("processed/" + targetName + ".mp4")
.createWriteStream({
metadata: {
//metadata: event.metadata, // You may not need this, my uploads have associated metadata
contentType: 'video/mp4', // This could be whatever else you are transcoding to
},
});
// Open read stream to our uploaded file
const remoteReadStream = bucket.file(event.name).createReadStream();
// Transcode
ffmpeg()
.input(remoteReadStream)
.outputOptions('-c:v copy') // Change these options to whatever suits your needs
.outputOptions('-c:a aac')
.outputOptions('-b:a 160k')
.outputOptions('-f mp4')
.outputOptions('-preset fast')
.outputOptions('-movflags frag_keyframe+empty_moov')
// https://github.com/fluent-ffmpeg/node-fluent-ffmpeg/issues/346#issuecomment-67299526
.on('start', (cmdLine) => {
console.log('Started ffmpeg with command:', cmdLine);
})
.on('end', () => {
console.log('Successfully re-encoded video.');
callback();
})
.on('error', (err, stdout, stderr) => {
console.error('An error occured during encoding', err.message);
console.error('stdout:', stdout);
console.error('stderr:', stderr);
callback(err);
})
.pipe(remoteWriteStream, { end: true }); // end: true, emit end event when readable stream ends
};
This version correctly runs and I can see this in logs:
2020-06-16 21:24:22.606 Function execution took 912 ms, finished with status: 'ok'
2020-06-16 21:24:52.902 Started ffmpeg with command: ffmpeg -i pipe:0 -c:v copy -c:a aac -b:a 160k -f mp4 -preset fast -movflags frag_keyframe+empty_moov pipe:1
It seems the function execution ends before the actual ffmpeg command, which then never finishes.
Is there a way to make the ffmpeg "synchronous" or "blocking" so that it finishes before the function execution?
From google cloud documentation it seems the function should accept three arguments: (data, context, callback) have you tried this or do you know that context is optional? From the docs it seems that if the function accepts three arguments is treated as a background function, if it accepts only two arguments, is treated as a background function only if it returns a Promise.
More than this some other point:
1: here no callback function is called, if in your tests your function exited with that log line, it is another point suggesting that calling the second argument as a callback function is a required step to make process finish:
if (event.name.indexOf('uploads/') === -1) {
console.log("File " + event.name + " is not to be processed.")
return;
}
I would suggest to add some other console.log (or many other, if you prefer) to clarify the flow: in your question you pasted only 1 log line, it is not so much helpful more to say it is logged after the system log line
the link you used as tutorial is almost three years old, it could be that google cloud has changed its interface in the mean while.
Once said that, if acceptint three arguments rather than only two doesn't solve your problem, you can try changing your function in a Promise:
exports.transcodeVideo = (event, callback) => new Promise((resolve, reject) => {
const bucket = storage.bucket(event.bucket);
console.log(event);
if (event.name.indexOf('uploads/') === -1) {
console.log("File " + event.name + " is not to be processed.")
return resolve(); // or reject if this is an error case
}
// ensure that you only proceed if the file is newly createdxxs
if (event.metageneration !== '1') {
return resolve(); // or reject if this is an error case
}
// Open write stream to new bucket, modify the filename as needed.
const targetName = event.name.replace("uploads/", "").replace(/[.][a-z0-9]+$/, "");
console.log("Target name will be: " + targetName);
const remoteWriteStream = bucket.file("processed/" + targetName + ".mp4")
.createWriteStream({
metadata: {
//metadata: event.metadata, // You may not need this, my uploads have associated metadata
contentType: 'video/mp4', // This could be whatever else you are transcoding to
},
});
// Open read stream to our uploaded file
const remoteReadStream = bucket.file(event.name).createReadStream();
// Transcode
ffmpeg()
.input(remoteReadStream)
.outputOptions('-c:v copy') // Change these options to whatever suits your needs
.outputOptions('-c:a aac')
.outputOptions('-b:a 160k')
.outputOptions('-f mp4')
.outputOptions('-preset fast')
.outputOptions('-movflags frag_keyframe+empty_moov')
// https://github.com/fluent-ffmpeg/node-fluent-ffmpeg/issues/346#issuecomment-67299526
.on('start', (cmdLine) => {
console.log('Started ffmpeg with command:', cmdLine);
})
.on('end', () => {
console.log('Successfully re-encoded video.');
resolve();
})
.on('error', (err, stdout, stderr) => {
console.error('An error occured during encoding', err.message);
console.error('stdout:', stdout);
console.error('stderr:', stderr);
reject(err);
})
.pipe(remoteWriteStream, { end: true }); // end: true, emit end event when readable stream ends
});
Hope this helps.

Nodejs ssh2 run multiple command only one terminal

I am in a trouble while coding ssh2 module in my project. I tried to run multiple commands on one terminal for ruling remote Linux system. For example "bc" command provides you a basic calculator and you can run it for basic operations. but that kind of processes need to be awake when you are using (it will accepts two or more input and it will give a response as a result).
I need to create a system like work with websocket and ssh. When a websocket received a command ,ssh node need to execute this message and Module need to send it's response via websocket.send()
I am using Node.js websocket,ssh2 client.
Here my code :
#!/usr/bin/node
var Connection = require('ssh2');
var conn = new Connection();
var command="";
var http = require('http');
var WebSocketServer = require('websocket').server;
var firstcom=true;
conn.on('ready', function() {
console.log('Connection :: ready');
// conn.shell(onShell);
});
var onShell = function(err, stream) {
// stream.write(command+'\n');
stream.on('data', function(data) {
console.log('STDOUT: ' + data);
});
stream.stderr.on('data', function(data) {
console.log('STDERR: ' + data);
});
}
var webSocketsServerPort=5000;
var ssh2ConnectionControl=false;
var server = http.createServer(function (req, res) {
//blahbalh
}).listen(webSocketsServerPort, function() {
console.log((new Date()) + " Server is listening on port:: " + webSocketsServerPort);
});
//console.log((new Date()) + 'server created');
wsServer = new WebSocketServer({
httpServer: server,
// autoAcceptConnections: false
});
wsServer.on('request', function(request) {
console.log((new Date()) + ' Connection from origin ' + request.origin + '.');
var wsconnection = request.accept('echo-protocol', request.origin);
console.log((new Date()) + ' Connection accepted.');
if(!ssh2ConnectionControl){
conn.connect({
host: 'localhost',
port: 22,
username: 'attilaakinci',
password: '1'
});
ssh2ConnectionControl=true;
console.log('SSH Connected.');
}
wsconnection.on('message', function(message) {
if (message.type === 'utf8') {
console.log('Received Message: ' + message.utf8Data);
command=message.utf8Data;
//if(firstcom){
// conn.shell(onShell);
// firstcom=false;
//}else{
conn.exec(message.utf8Data,onShell);
//}
wsconnection.send(message.utf8Data);
}
else{
console.log('Invalid message');
}
});
wsconnection.on('close', function(reasonCode, description) {
console.log((new Date()) + ' Peer ' + wsconnection.remoteAddress + ' disconnected.');
});
});
You should use conn.shell() instead of conn.exec() if you want a real interactive shell. conn.exec() is typically for executing one-liner commands, so it does not persist "shell state" between conn.exec() calls (e.g. working directory, etc.).
You should also be aware of possible limits by your SSH server has set up as far as how many simultaneous shell/exec requests are allowed per connection. I think the default limit for this on OpenSSH's server is 10.
This is an old question but I wanted to provide a alternative method usings sh2shell which wraps ssh2.shell by mscdex, used above. The example below only covers making the ssh connection, running the commands and processing the result.
Using ssh2shel it is possible to run any number of commands sequentually in the context of the previous commands in the same shell session and then return the output for each command (onCommandComplete event) and/or return all session text on disconnection using a callback function.
See the ssh2shell readme for examples and lots of info. There are also tested scripts for working code examples.
var host = {
//ssh2.client.connect options
server: {
host: 120.0.0.1,
port: 22,
userName: username,
password: password
},
debug: false,
//array of commands run in the same session
commands: [
"echo $(pwd)",
command1,
command2,
command3
],
//process each command response
onCommandComplete: function( command, response, sshObj) {
//handle just one command or do it for all of the each time
if (command === "echo $(pwd)"){
this.emit("msg", response);
}
}
};
//host object can be defined earlier and host.commands = [....] set repeatedly later with each reconnection.
var SSH2Shell = require ('ssh2shell');
var SSH = new SSH2Shell(host),
callback = function( sessionText ){
console.log ( "-----Callback session text:\n" + sessionText);
console.log ( "-----Callback end" );
}
SSH.connect(callback)
To see what is happening at process level set debug to true.

Travis CI times out on node jasmine tests, but passes locally

I've been working on a command line app in node that does some file system reads and express app stuff, and all my tests are passing locally, however Travis seems to be having an issue when building (timing out, which is a Jasmine Node thing). Here's what I've got
.travis.yml
language: node_js
node_js:
- '>=0.10'
before_script:
- npm install -g grunt-cli
- npm link
script:
- "grunt --verbose"
sickmerge_spec.js
// Set the test environment
process.env.NODE_ENV = 'test';
// Dependencies
var exec = require('child_process').exec,
fs = require('fs'),
version = require('../package.json').version,
request = require('request');
// Helper function to exectue the sickmerge cli
function execSickmerge (options, callback) {
exec('sickmerge ' + options, function(err, stdout) {
console.log(stdout);
callback(stdout);
});
}
// ... Lots of other tests that exec the command, test below fails
describe('web application services', function() {
it('should respond when with a 200 when http requested', function(done) {
execSickmerge('./spec/fixtures/javascript.js', function() {
request('http://127.0.0.1:3000/', function(err, response) {
expect(response.statusCode).toEqual(200);
done();
});
});
});
});
The command code
#! /usr/bin/env node
/*
* sickmerge
* https://github.com/jgriffith/sickmerge
*
* Copyright (c) 2013 jgriffith
* Licensed under the MIT license.
*/
/*
* Module Dependencies/Setup
*/
var fs = require('fs'),
program = require('commander'),
syntaxOptions = require('./lib/syntax'),
version = require('./package.json').version,
fileLocation,
env = require('./lib/config')();
// Program Setup and Options
program
.version(version)
.usage('[options] <conflicted file location>')
.option('-h, --hostname [value]', 'The host URL you wish to query in the browser (defaults to localhost).')
.option('-o, --syntaxes', 'Will show the available syntax options for syntax highlighting.')
.option('-p, --port <n>', 'The port you wish to deploy on (defaults to 3000).', parseInt)
.option('-s, --syntax [value]', 'The language of the file for syntax highlighting (optional), defaults to no highlighting. Run with "-o" to see the available options.')
.option('-m, --merge [value]', 'Specify the initial view in the middle (merged) window on instantiation. Valid options are "yours", "theirs", and "both". Defaults to "yours"')
.parse(process.argv);
// Store the file location so we can persist later
fileLocation = program.args[0];
// For printing available syntax options
function printSyntaxOptions () {
console.log('Available options include:\n' + syntaxOptions.showSupportedSyntaxes());
return;
}
// If the user wants to see the syntax options
if (program.syntaxes) {
printSyntaxOptions();
return;
}
// No File given, print help since it's required
if (!fileLocation) {
program.outputHelp();
return;
}
// Invalid merge option
if (program.merge && ['yours', 'theirs', 'both'].indexOf(program.merge) === -1) {
console.log('You\'ve specified an invalid initial merged view: "' + program.merge + '".\nPlease use either "yours", "theirs", or "both"');
return;
}
// Invalid syntax option
if (program.syntax && syntaxOptions.indexOf(program.syntax) === -1) {
console.log('You\'ve specified an invalid syntax option: ' + program.syntax);
printSyntaxOptions();
return;
}
// Read the passed file, strip the git comments, and build the web service
fs.readFile(fileLocation, function(err, result) {
if (err) return console.log('There was an error loading your file! ' + err);
// Setup parameters, load additional files
var hostname = (program.hostname) ? program.hostname : 'localhost',
port = (program.port) ? program.port : 3000,
merge = (program.merge) ? program.merge : 'yours',
extension = fileLocation.split('.').pop(),
syntax = (program.syntax) ? program.syntax : syntaxOptions.getSyntax(extension),
threeWayMerge = require('./lib/gitStrip')(result.toString(), merge),
express = require('express'),
app = express(),
path = require('path'),
open = require('open');
// Web server setup
app.use(express.bodyParser());
app.set('views', __dirname + '/views');
app.set('view engine', 'ejs');
app.use(express.static(path.join(__dirname, 'public')));
// Build the base route for the page
app.get('/', function (req, res) {
res.render('editor', {
title: fileLocation,
syntax: syntax,
body: threeWayMerge
});
});
// Post route for saving the file (this is final) and closes the process
app.post('/save', function (req, res) {
var content = req.body.content;
fs.writeFile(fileLocation, content, function (err) {
if (err) throw "There was an issues saving your file: " + err;
res.send('complete');
process.exit();
});
});
// Get route for cancelling the file (this is final) and closes the process
app.get('/cancel', function (req, res) {
res.send('terminated');
process.exit();
});
console.log(
'Sickmerge is waiting for changes.\n' +
'Visit http://' + hostname + ':' + port + '/ in your browser to make changes\n' +
'Pressing "Save" or "Cancel" will do the action and close the sickmerge program.\n'+
'Press CTRL+C if you\'ve closed your web browser and didn\'t click either of those buttons.'
);
app.listen(port);
if( env !== 'test') open('http://' + hostname + ':' + port);
});
You can also see the code on github, and the builds in Travis. I originally thought app.list() was preventing execution of the console, but removing it didn't work either.
Does Travis just block certain ports? Does it not allow filesystem queries?
It looks like the current script is working which is:
.travis.yml
language: node_js
node_js:
- '>=0.10'
before_install:
- npm install -g grunt-cli
before_script:
- npm link
script:
- "grunt --verbose"
Does it not allow filesystem queries?
Travis-CI does allow filesystem queries.

Downloading N number of remote files using Node.js synchronously

I'm working on a simple app using Node.js which needs to do the following when given a valid URL
Retrieve the HTML of the remote page, save it locally.
Spider the HTML (using cheerio) and record all JS and CSS file references.
Make HTTP request for each JS/CSS file and save it to the server by file name.
Zip up the html, css, and js files and stream the resulting file to the browser.
I've got 1 and 2 working, and the first half of #3 but I'm running into issues with the synchronous nature of the downloads. My code is running too fast and generating file names for the CSS and JS files, but none of the content. I'm guessing this is because my code isn't synchronous. The problem is that I don't know in advance how many files there might be and all of them have to be there before the ZIP file can be generated.
Here's the flow of my app as it currently exists. I've left out the helper methods as they don't affect synchronicity. Can any of you provide input as to what I should do?
http.get(fullurl, function(res) {
res.on('data', function (chunk) {
var $source = $(''+chunk),
js = getJS($source, domain),
css = getCSS($source, domain),
uniqueName = pw(),
dir = [baseDir,'jsd-', uniqueName, '/'].join(''),
jsdir = dir + 'js/',
cssdir = dir + 'css/',
html = rewritePaths($source);
// create tmp directory
fs.mkdirSync(dir);
console.log('creating index.html');
// save index file
fs.writeFileSync(dir + 'index.html', html);
// create js directory
fs.mkdirSync(jsdir);
// Save JS files
js.forEach(function(jsfile){
var filename = jsfile.split('/').reverse()[0];
request(jsfile).pipe(fs.createWriteStream(jsdir + filename));
console.log('creating ' + filename);
});
// create css directory
fs.mkdirSync(cssdir);
// Save CSS files
css.forEach(function(cssfile){
var filename = cssfile.split('/').reverse()[0];
request(cssfile).pipe(fs.createWriteStream(cssdir + filename));
console.log('creating ' + filename);
});
// write zip file to /tmp
writeZip(dir,uniqueName);
// https://npmjs.org/package/node-zip
// http://stuk.github.com/jszip/
});
}).on('error', function(e) {
console.log("Got error: " + e.message);
});
The way you are downloading file through request module is asynchronous
request(cssfile).pipe(fs.createWriteStream(cssdir + filename));
instead of download like that you need to do like this create a seperate function
function download (localFile, remotePath, callback) {
var localStream = fs.createWriteStream(localFile);
var out = request({ uri: remotePath });
out.on('response', function (resp) {
if (resp.statusCode === 200){
out.pipe(localStream);
localStream.on('close', function () {
callback(null, localFile);
});
}
else
callback(new Error("No file found at given url."),null);
})
};
you need to use async module by colan https://github.com/caolan/async for
// Save JS files
async.forEach(js,function(jsfile,cb){
var filename = jsfile.split('/').reverse()[0];
download(jsdir + filename,jsfile,function(err,result){
//handle error here
console.log('creating ' + filename);
cb();
})
},function(err){
// create css directory
fs.mkdirSync(cssdir);
// Save CSS files
css.forEach(function(cssfile){
var filename = cssfile.split('/').reverse()[0];
request(cssfile).pipe(fs.createWriteStream(cssdir + filename));
console.log('creating ' + filename);
});
// write zip file to /tmp
writeZip(dir,uniqueName);
});

Categories