I try to execute a command using a child-process and I can't execute by absolute path using nodejs, but when I use terminal, everything is fine.
Why is that?
My code is right here:
const cp = require('child_process');
const commandExecutor = 'node-install/target/node/yarn/dist/bin/yarn.exe';
const symlinkFolder = 'node-install/target/node/target/symlink';
const workingDirectories = [];
Array.from(process.argv).forEach((value, index) => {
if (index >= 2) {
workingDirectories[index - 2] = value;
}
});
workingDirectories.forEach(function(workingDirectory) {
const argumentsUnlink = 'unlink #item# --link-folder ' + symlinkFolder + ' --cwd ' + workingDirectory;
const unlinkCommand = commandExecutor + ' ' + argumentsUnlink;
const execution = cp.exec(
unlinkCommand,
function (error, stdout, stderr) {
console.log(stdout);
console.log(error);
console.log(stderr);
});
execution.on('exit', function (code) {
let message = 'Child process exited with exit code ' + code + ' on route ' + workingDirectory;
console.log(message);
});
});
An example of command is:
node-install/target/node/yarn/dist/bin/yarn.exe unlink #item# --link-folder node-install/target/node/target/symlink --cwd appointments/target/generated-sources/frontend/
But the error I've got is:
'node-install' is not recognized as an internal or external command, operable program or batch file.
While I execute command from terminal, everything is fine.
One of the possible problems - NodeJs unable to locate the file by relative path. You can use construct absolute path to fix this, few options to help if node-install is located in your project root (not ultimate list):
__dirname, which returns the directory of current module, so if
node-install/../..
index.js
then in index.js we can use
const commandExecutor = `${__dirname}/node-install/target/node/yarn/dist/bin/yarn.exe`;
process.cwd(), which returns full path of the process root, so if you start nodejs from folder having node-install, then you can refer to exe like this:
const commandExecutor = `${process.cwd()}/node-install/target/node/yarn/dist/bin/yarn.exe`;
Related
I am currently having trouble with what is most likely a very simple function for most.
I have what I call a Find & Read function.
The "find" part of the function utilises a child_process with the find -name command in order to locate a given file by name and extension,
the "Read" part of the function is where the problem seems to be.
The "Read" part of the function is supposed to use the results from the command executed in the child_process, in a seperate variable, in order to start reading the file using fs.readFile, however everything I have tried presents me with the following error in the console...
Locating File...
File Found: this/way/to/the/File.txt
Reading File...
Reading File Failed!
Error: ENOENT: no such file or directory, open 'this/way/to/the/File.txt'
at Object.openSync (node:fs:585:3)
at Object.readFileSync (node:fs:453:35)
at /home/user/test/index.js:23:8
at ChildProcess.exithandler (node:child_process:390:7)
at ChildProcess.emit (node:events:527:28)
at maybeClose (node:internal/child_process:1092:16)
at Socket.<anonymous> (node:internal/child_process:451:11)
at Socket.emit (node:events:527:28)
at Pipe.<anonymous> (node:net:710:12) {
errno: -2,
syscall: 'open',
code: 'ENOENT',
path: 'this/way/to/the/File.txt'
}
However the isn't correct, the file actually does exist inside the directory found by the command executed in the child_process but it's still throwing me errors saying the file doesn't exist... if the file didn't exist, it wouldn't have located and printed its location in the console... so I'm not sure where the error is coming from here, hence why I've come StackOverflow for help.
I am using the JavaScript coding below to to try and achieve this...
Find & Read function
var exec = require("child_process").exec, child;
var fs = require('fs');
var path = require('path');
// This seems to be working fine to locate the file
var folder = '/just/some/folder/location/';
var file = "File.txt";
console.log("Locating File...")
console.log();
exec('find -name ' + file, { cwd: folder }, (error, stdout, stderr) => {
var filePath = stdout.substring(stdout.indexOf("./") + 2).split("\n")[0];
if (error !== null) {
console.log("Locating Failed Failed!");
console.log(error);
console.log(stderr);
return;
}
// but it fails to read the file afterwards
console.log("File Found: " + filePath); // print filepath in the log to confirm the file was found
console.log();
console.log("Reading File...");
fs.readFile(filePath, 'utf8', (error, data) => {
if (error) {
console.log('Reading File Failed');
console.log(error);
return;
}
});
});
Problem solved.
The problem was located within the filePath variable.
What was happening was, I didn't join both the folder variable & the filePath variable together with path.join() once the file was located!
For some reason as well, the instance of .trim("\n")[0]) at the end of the filePath variable needed to be changed to an instance of .trim("\n"))... Because for some reason the former coding was causing problems with the function as well.
Fix that was implemented
The below instance of the filePath variable...
var filePath = stdout.substring(stdout.indexOf("./") + 2).split("\n")[0];
Was was changed to the following...
var filePath = path.join(apkFolder, stdout.substring(stdout.indexOf("./") + 2).split("\n"));
Which now runs beautifully, the file gets located and it gets read beautifully!
Here's the Full fixed code below...
var folder = '/just/some/folder/location/';
var file = "File.txt";
console.log("Locating File...")
console.log();
exec('find -name ' + file, { cwd: folder }, (error, stdout, stderr) => {
var filePath = path.join(folder, stdout.substring(stdout.indexOf("./") + 2).split("\n")); // Fix contained here
if (error !== null) {
console.log("Locating File Failed!");
console.log(error);
console.log(stderr);
return;
}
console.log("File Found: " + filePath);
console.log();
console.log("Reading File...");
fs.readFileSync(filePath, 'utf8', (error, data) => {
if (error) {
console.log('Reading File Failed');
console.log(error);
return;
}
});
});
I'm using npm node-cmd in my react app and it is failing on cmd.get (sending commands to the terminal).
All my code works in vanilla html and javascript but I need it to work in React. I'm thinking my setup is wrong in react.
import cmd from 'node-cmd';
let projDir = result.substr(0, result.lastIndexOf('/')+1);
let projFile = result.substr(result.lastIndexOf('/')+1);
let copyFile = "cp '" + projFile + "' pproXML.gz";
let unzip = "gunzip -d pproXML.gz";
let rename = "mv pproXML pproXML.prproj";
let targetXml = projDir + 'pproXML.prproj';
let cmdStr =
"cd ..'" + projDir + "'\n" +
copyFile + "\n" +
unzip + "\n" +
rename + "\n" +
"ls";
// FAILS HERE
cmd.get(
cmdStr,
function(err, data, stderr){
alert(data);
}
I'm getting TypeError: exec is not a function
exec is not a function tell me that things get wrong on node-cmd.
Just open the lib:
var exec = require('child_process').exec;
That line will never work from browser. Just try with a single ls
I am trying to Display only the vmRSS Attribute. When I run the command command
cat ./Status
I get alot of attributes and their corresponding values. What I am trying to do is to Display only the vmRSS programmatically. I can do it in the console as follows:
cat ./status | grep VmR
but how can I do it programmatically.
my attempts
const ls2 = spawn('cat', ['/proc/' + process.pid + '/status']);
Since child_process spawn launch a shell in a subprocess, I think you're better to do that within the current shell instead, with child_process exec() command.
Here is an example (thanks to #Inian):
const { exec } = require('child_process');
exec('grep VmR /proc/' + process.pid + '/status', (err, stdout) => {
if (err) return console.log(err)
console.log(stdout) // VmRSS: 13408 kB
})
Otherwise, if you don't want to spawn a shell to get that info, you could use fs to read the file, something like that:
const fs = require('fs');
let process_status = '/proc/' + process.pid + '/status';
fs.readFile(process_status, { encoding: 'utf8' }, (err, buf) => {
if (err) return console.log(err)
let lines = buf.split('\n') // each line in an array
let line = lines.filter(line => /VmRSS/.test(line)) // find interesting line
let VmR = line[0].replace(/\t/g, '') // clean output removing tabulation
console.log(VmR) // VmRSS: 13208 kB
})
I created this testcase to prove that the cat method is not working for me using the IPFS javascript library. What am I doing wrong ? My console output does not draw anything from within the 'node.files.cat' function, its as if that (err,filestream) callback is not being called at all. I I know my multihash is somewhat working because if I change it I get a fatal error. However right now it is seemingly just locking up and pausing after NODE READY.
const IPFS = require('ipfs')
const path = require('path')
const os = require('os')
const fs = require('fs')
console.log('ipfs test ')
var mhash = "Qmc5LfkMVAhvzip2u2RjRBRhgVthtSokHsz4Y5bgaBCW2R";
// Create the IPFS node instance
const node = new IPFS()
node.on('ready', () => {
// Your node is now ready to use \o/
console.log('NODE READY')
/*
THIS WORKS
var test_rstream = fs.createReadStream( path.join(__dirname, '.', '/public/sample_land_file.json') )
var wstream = fs.createWriteStream(os.tmpdir() + '/lobc_cache/'+'Qmc5LfkMVAhvzip2u2RjRBRhgVthtSokHsz4Y5bgaBCW2R');
wstream.on('finish', function() {
console.log('Written ' + wstream.bytesWritten + ' ' + wstream.path);
test_rstream.close()
});
test_rstream.pipe(wstream);
*/
node.files.cat("Qmc5LfkMVAhvzip2u2RjRBRhgVthtSokHsz4Y5bgaBCW2R", function (err, filestream) {
console.log('WHY ISNT THIS FIRING ') // i never see this logged
console.log(filestream)
console.log(os.tmpdir())
if (!fs.existsSync(os.tmpdir() + '/lobc_cache')){
fs.mkdirSync(os.tmpdir() + '/lobc_cache');
}
var wstream = fs.createWriteStream(os.tmpdir() + '/lobc_cache/'+'Qmc5LfkMVAhvzip2u2RjRBRhgVthtSokHsz4Y5bgaBCW2R');
result = '';
wstream.on('finish', function() {
console.log('Written ' + wstream.bytesWritten + ' ' + wstream.path);
filestream.close()
});
filestream.pipe(wstream);
// wstream.end();
// file will be a stream containing the data of the file requested
})
// stopping a node
node.stop(() => {
// node is now 'offline'
})
})
node.on('start', () => {
console.log('NODE START')
})
This looks like a bug. A quick way to solve it is just to put the node.files.cat inside the callback for .on('ready'). Seems that bitswap is dropping requests before the node is online.
Let me know if this works.
I'm just wondering whether it is at all possible to transfer a directory from a unix server to my local machine using the ssh2 module in node.js. I have connected to the remote host and can read the directory as well as transfer single files, but there are 28 folders in the directory which each contain files and sub directories. What I'd like to do is take an exact copy of the main directory from the server to my local machine.
I was using fastGet with single files, but transferring a directory gives: Error: EISDIR, open __dirname/../localdirectory/ which I think implies I can't use fastGet to get an entire directory. I also tried using the exec command to try and scp it over, but I couldn't work out the syntax for the local directory:
// c is an active connection
c.exec('scp filethatexists.extension /../filepath/newname.extension', function(err, stream) {
if (err) {
console.log("error: " + err);
stream.end;
};
stream.on('data', function(data, extended) {
console.log((extended === 'stderr' ? 'STDERR: ' : 'STDOUT: ') + data);
});
stream.on('end', function() {
console.log('Stream :: EOF');
});
stream.on('close', function() {
console.log('Stream :: close');
});
stream.on('exit', function(code, signal) {
console.log('Stream :: exit :: code: ' + code + ', signal: ' + signal);
c.end();
});
});
This just results in the EOF calling. This code was just me testing If I could get a single file transferring.
Can anyone provide me with any assistance? Thank you in advance.
A couple of solutions:
You could recursively traverse the directory (making directories and transferring files as needed) using the sftp methods
Tar the directory (compress it too if you want) to stdout (e.g. tar cf - mydir) and then process that incoming stdout data with the tar module (and the built-in zlib module first if you end up compressing the directory).
// Requires:
// * `npm install tar-fs`
// * `ssh2` v0.5.x or newer
var tar = require('tar-fs');
var zlib = require('zlib');
function transferDir(conn, remotePath, localPath, compression, cb) {
var cmd = 'tar cf - "' + remotePath + '" 2>/dev/null';
if (typeof compression === 'function')
cb = compression;
else if (compression === true)
compression = 6;
if (typeof compression === 'number'
&& compression >= 1
&& compression <= 9)
cmd += ' | gzip -' + compression + 'c 2>/dev/null';
else
compression = undefined;
conn.exec(cmd, function(err, stream) {
if (err)
return cb(err);
var exitErr;
var tarStream = tar.extract(localPath);
tarStream.on('finish', function() {
cb(exitErr);
});
stream.on('exit', function(code, signal) {
if (typeof code === 'number' && code !== 0) {
exitErr = new Error('Remote process exited with code '
+ code);
} else if (signal) {
exitErr = new Error('Remote process killed with signal '
+ signal);
}
}).stderr.resume();
if (compression)
stream = stream.pipe(zlib.createGunzip());
stream.pipe(tarStream);
});
}
// USAGE ===============================================================
var ssh = require('ssh2');
var conn = new ssh();
conn.on('ready', function() {
transferDir(conn,
'/home/foo',
__dirname + '/download',
true, // uses compression with default level of 6
function(err) {
if (err) throw err;
console.log('Done transferring');
conn.end();
});
}).connect({
host: '192.168.100.10',
port: 22,
username: 'foo',
password: 'bar'
});
i m also trying to downlaod folders using ssh. It took me more than 10 days and i'm still trying to do that. But in the mean time i found some other code which will do the same thing for me.This code below will download every folder and file inside a directory
enter image description here