I want to write cmd ouput to file instead of stdout - javascript

I can cmd in node.js using child-process and spawn, I want the ouput of this command to be written into file instead of stdout.
test.js
const expect = require('chai').expect;
const { spawn } = require('child_process')
let path = require('path');
let fs = require('fs');
//tried but didn't work
1) const cmd = spawn(ansysfnonetclient, options, {
stdio: [
0, // Use parent's stdin for child.
'pipe', // Pipe child's stdout to parent.
fs.openSync('err.out', 'w') // Direct child's stderr to a file.
]
});
2) const cmd = spawn(ansysfnonetclient, options, {shell: true, stdio: 'inherit'});
it('run the cmd and write o/p to file', function (done) {
this.timeout(30000);
let options = ['-h','-o','temp.log'];
let ansysfnonetclient = path.resolve(__dirname,'../../../../../../../../saoptest/annetclient.exe');
const cmd = spawn(ansysfnonetclient, options, {shell: true, stdio: 'inherit'});
console.log(cmd);
done();
});

stdio option passes 3 "files":
input
output
error output
If you want to pipe regular output to a file, you have to pass the file as second item in stdio:
const { spawn } = require('child_process');
const fs = require('fs');
const stdio = [
0,
fs.openSync('std.out', 'w'),
fs.openSync('err.out', 'w')
];
const child = spawn('echo', ['hello world!'], {stdio});
Read more about it at https://nodejs.org/api/child_process.html#child_process_options_stdio.

const expect = require('chai').expect;
const { spawn } = require('child_process')
let path = require('path');
let fs = require('fs');
```
const cmd = spawn(ansysfnonetclient, options, {shell: true, stdio: 'inherit'});
cmd.stdout.on('data',function(chunk) {
fs.writeFile(path.resolve(__dirname,'../../../../../../../../output.log'), chunk.toString(), function(err) {
if(err)
{
return console.log(err);
}
console.log("The file was saved!");
});
```
Inspired from this post Node.js: Capture STDOUT of `child_process.spawn`

Related

directory is not creating

Am trying to create a directory with subfolders in my application. The new request will create folders only if the parent folder is already there but not creating if root folder is not there.
import { mkdir } from 'fs';
mkdir(join(__dirname, '../folder_to_create_directory/', req.body.path), (err) => {
if (err) {
return "error";
}
return "success"
});
The req.body.path is a path string eg: test/folder/subfolder. The code will work only if we create the "test" folder manually (it is not returning "success" message too even though the directory is being created). IF the test folder is not there then the directory is not creating.
expected output:-
folder_to_create_directory/test/folder/subfolder
You can use fs library to work with a file system.
For nested dirs:
var fs = require('fs');
var dir = join(__dirname, '../folder_to_create_directory/', req.body.path);
if (!fs.existsSync(dir)){
fs.mkdirSync(dir, { recursive: true });
}
Or, for individual dirs:
var fs = require('fs');
var dir = join(__dirname, '../folder_to_create_directory/', req.body.path);
if (!fs.existsSync(dir)){
fs.mkdirSync(dir);
}
you are missing an option "{recursive: true}". Try this example:
const { mkdir } = require("fs");
const {join} = require('path')
const path = join(__dirname, "../folder_to_create_directory", 'test/folder/subfolder')
mkdir(path, { recursive: true }, (err) => {
if (err) {
return "error";
}
return "success";
});

How to unzip file with Node.js

I need to zip and unzip file with Node.js but I have a problem.
const fs = require("fs");
const zlib = require('zlib');
function UnZip(zip, paths) {
var inp = fs.createReadStream("f:/test.zip");
var Exzip = zlib.createUnzip();
inp.pipe(Exzip).pipe("f:/");
}
Error:
TypeError: dest.on is not a function
Here is how you can do it with zlib module.
const fs = require('fs');
const zlib = require('zlib');
const fileContents = fs.createReadStream('file1.txt.gz');
const writeStream = fs.createWriteStream('file1.txt');
const unzip = zlib.createGunzip();
fileContents.pipe(unzip).pipe(writeStream);
Zipping the file
const archiver = require('archiver'),
archive = archiver('zip'),
fs = require('fs'),
output = fs.createWriteStream( 'mocks.zip');
archive.pipe(output);
// temp.txt file must be available in your folder where you
// are writting the code or you can give the whole path
const file_buffer = fs.readFileSync('temp.txt')
archive.append(file_buffer, { name: 'tttt.txt'});
archive.finalize().then((err, bytes) => {
if (err) {
throw err;
}
console.log(err + bytes + ' total bytes');
});
unzipping a file
const unzip = require('unzip'),
fs = require('fs');
fs.createReadStream('temp1.zip').pipe(unzip.Extract({ path: 'path' }))

Solidity: How to compile multiple smart contracts in compile.js file?

I would like to compile multiple contracts in one compile.js file but I'm not sure how to do it.
My compile.js file with a single contract looks like this:
const path = require('path');
const fs = require('fs');
const solc = require('solc');
const lotteryPath = path.resolve(__dirname, 'contracts', 'Lottery.sol');
const source = fs.readFileSync(lotteryPath, 'utf8');
module.exports = solc.compile(source, 1);
How can I add more contracts to the compile.js file? I understand that the 1 must be changed to the number of contracts, but not sure what else is required?
Here is an example I did. You can find it in my public repo. Briefly, I have a "build" folder where I write the output of each compiled contract to Json files.
const path = require("path"); //nodejs ’path’ module
const solc = require("solc"); //solidity compiler module
const fs = require("fs-extra"); //file system module
// Feth path of build
const buildPath = path.resolve(__dirname, "build");
const contractspath = path.resolve(__dirname, "contracts");
// Removes folder build and every file in it
fs.removeSync(buildPath);
// Fetch all Contract files in Contracts folder
const fileNames = fs.readdirSync(contractspath);
// Gets ABI of all contracts into variable input
const input = fileNames.reduce(
(input, fileName) => {
const filePath = path.resolve(__dirname, "contracts", fileName);
const source = fs.readFileSync(filePath, "utf8");
return { sources: { ...input.sources, [fileName]: source } };
},
{ sources: {} }
);
// Compile all contracts
const output = solc.compile(input, 1).contracts;
// Re-Create build folder for output files from each contract
fs.ensureDirSync(buildPath);
// Output contains all objects from all contracts
// Write the contents of each to different files
for (let contract in output) {
fs.outputJsonSync(
path.resolve(buildPath, contract.split(":")[1] + ".json"),
output[contract]
);
}
Basically, if you do not change your path struct to mine, you have to change you this part of the above code:
// Feth path of build
const buildPath = path.resolve(__dirname, "build");
const contractspath = path.resolve(__dirname, "contracts");
The approved solution does not work for solidity >0.6.0 and <=0.8.1.
For the mentioned versions, I solved it as follows:
const path = require("path");
const fs = require("fs-extra");
const solc = require("solc");
const buildPath = path.resolve(__dirname, "build");
fs.removeSync(buildPath);
const contractPath = path.resolve(__dirname, "contracts");
const fileNames = fs.readdirSync(contractPath);
const compilerInput = {
language: "Solidity",
sources: fileNames.reduce((input, fileName) => {
const filePath = path.resolve(contractPath, fileName);
const source = fs.readFileSync(filePath, "utf8");
return { ...input, [fileName]: { content: source } };
}, {}),
settings: {
outputSelection: {
"*": {
"*": ["abi", "evm.bytecode.object"],
},
},
},
};
// Compile All contracts
const compiled = JSON.parse(solc.compile(JSON.stringify(compilerInput)));
fs.ensureDirSync(buildPath);
fileNames.map((fileName) => {
const contracts = Object.keys(compiled.contracts[fileName]);
contracts.map((contract) => {
fs.outputJsonSync(
path.resolve(buildPath, contract + ".json"),
compiled.contracts[fileName][contract]
);
});
});
be sure to check that your pragma solidity x.x.x matches with the version specified in your package.json. For example, if I'm using solidity 0.6.12 my solidity compiles would be:
"dependencies": {
...
"solc": "^0.6.12",
...
}
compile.js:
const path= require('path');
const solc = require('solc');
const fs = require('fs-extra');
const builtPath = path.resolve(__dirname, 'build');
//remove file in build module
fs.removeSync(builtPath);
const healthPath = path.resolve(__dirname, 'contract','health.sol');
//read content present in file
console.log(healthPath);
const source = fs.readFileSync(healthPath,'utf8');
//compile contract
const output = solc.compile(source,1).contracts;
//create build folder
fs.ensureDirSync(builtPath);
console.log(output);
for(let contract in output)
{
fs.outputJsonSync(
path.resolve(buildPath, contract.replace(':','')+ '.json'),
output[contract]
);
}

How to run(sync) multiple .js files from a directory

I'm trying to run multiple .js files from a directory:
exampleDir/test.js
console.log('hi');
run.js
const fs = require('fs');
const {execFileSync} = require('child_process');
const testsFolder = './exampleDir/';
const files = fs.readdirSync(testsFolder);
const funcs = files.map(function(file) {
const out = execFileSync(`node ${testsFolder}${file}`);
console.log(out.toString());
});
however I get:
> example#1.0.0 test /home/perdugames/example
> node ./run.js
child_process.js:624
throw err;
^
Error: spawnSync node ./exampleDir/test.js ENOENT
...
File paths should be specified unambiguously, preferably independent of current working directory and relative to current module. To create a new Node process, there are spawn and spawnSync in child_process:
...
const path = require('path');
const testsFolder = path.join(__dirname, './exampleDir/');
const files = fs.readdirSync(testsFolder);
const funcs = files.map(function(file) {
const filePath = path.join(testsFolder, file);
const out = spawnSync(filePath);
console.log(out.stdout.toString());
});

nodejs load file

I want to load test.txt with nodejs.
var fs = require('fs');
fs.readFile('./test.txt', function (err, data) {
if (err) {
throw err;
}
console.log(data);
});
The path of the server is C:\server\test\server.js. The test.txt is located in the same directory, but I get this error: Error: ENOENT, no such file or directory 'C:\Users\User\test.txt'
Paths in Node are resolved relatively to the current working directory. Prefix your path with __dirname to resolve the path to the location of your Node script.
var fs = require('fs');
fs.readFile( __dirname + '/test.txt', function (err, data) {
if (err) {
throw err;
}
console.log(data.toString());
});
With Node 0.12, it's possible to do this synchronously now:
var fs = require('fs');
var path = require('path');
// Buffer mydata
var BUFFER = bufferFile('../test.txt');
function bufferFile(relPath) {
return fs.readFileSync(path.join(__dirname, relPath)); // zzzz....
}
fs is the file system. readFileSync() returns a Buffer, or string if you ask.
fs correctly assumes relative paths are a security issue. path is a work-around.
To load as a string, specify the encoding:
return fs.readFileSync(path,{ encoding: 'utf8' });
You should use __dirname to get the directory name the file is located instead of the current working directory:
fs.readFile(__dirname + "/test.txt", ...);
Use path and fs:
const fs = require("fs");
const pth = require("path");
Sync:
let data = fs.readFileSync(pth.join(__dirname,"file.txt"));
console.log(data + "");
A-Sync:
fs.readFile(pth.join(__dirname,"file.txt"), (err, data) => {
console.log(data + "");
});
And that; If you need to read the file continuously and send it to the client and the file size is not large, you may be able to keep a copy of it:
const exp = require("express");
const app = exp();
const fs = require("fs");
const pth = require("path");
let file = "";
app.get("/file", (q, r) => {
if (file === "")
file = fs.readFileSync(pth.join(__dirname,"file.txt")) + "";
r.writeHead(200, { "Content-Type": "text/plain" });
r.write(file);
r.end();
});
so if it is in the same directory just do this
fs.readFile(__dirname+'/foo.txt',function(e,d){console.log(d)})
If it's in same directory it should work. I have tested with the same code, with a file name.txt and it's working fine:
var fs = require('fs');
fs.readFile('./test.txt', function (err, data) {
if (err) {
throw err;
}
console.log(data.toString());
});

Categories