how to pass an array with fork in node js? - javascript

I have an array and i want to send it to a child process.
But the problem is i get it in child process as a string.
How can i fix it?
Thanks.
parent file
const {fork} = require('child_process');
var botsList = [];
fork('./app.js', [botsList]);
child file:
var botsList = process.argv[2];
console.log(typeof botsList);

app.js will receive a string, but depending on the complexity of botsList, you could use JSON.stringify and JSON.parse
index.js will do something like:
const {fork} = require('child_process');
var botsList = ["botA", "botB", "botC"];
fork('./app.js', [JSON.stringify(botsList)]);
And app.js will do parsing of the argument:
let botsList = process.argv[2];
try {
botsList = JSON.parse(botsList);
} catch (e) {
console.log('Could not parse string as JSON');
}
console.log(botsList);
console.log(typeof botsList);
And the output:

Related

How to use Cspell --file-list stdin

I wanted to use cspell --file-list command as a child process in Node Js.
I wanted to pass large array of strings to this child process and feed it by stdin.
var child = spawn('cspell --file-list',[], {shell:true});
Now I wanted to pass strings one by one to this child process.
Can someone help me in this with small example.
send files as an argument:
const spawn = require('child_process').spawn;
const cmd = 'cspell';
const checkFiles = (files) => {
const proc = spawn(cmd, ['--file-list'].concat(files), {shell: true });
const buffers = [];
proc.stdout.on('data', (chunk) => buffers.push(chunk));
proc.stderr.on('data', (data) => {
console.error(`stderr: ${data.toString()}`);
});
proc.stdout.on('end', () => {
const result = (Buffer.concat(buffers)).toString();
console.log(`done, result:\n${result}`);
});
};
// pass files array
checkFiles(['some-file', 'another-file']);

in nodejs, spawn direct stdout and stderr to log file doesn't work

here is my code:
var spawn = require("child_process").spawn
var fs = require("fs")
var p = spawn("ls", ["prprpr"])
var log = fs.createWriteStream("/tmp/prpr.log")
p.stdout.pipe(log)
p.stderr.pipe(log)
when i cat /tmp/prpr.log, it return empty, but when i redirect to process.stdout and process.stderr, it output error correctly
var spawn = require("child_process").spawn
var fs = require("fs")
var p = spawn("ls", ["prprpr"])
var log = fs.createWriteStream("/tmp/prpr.log")
p.stdout.pipe(process.stdout)
p.stderr.pipe(process.stderr)
how to make spawn stdout and stderr to disk file?
my node version:
> roroco#roroco ~/Dropbox/js/ro-evernote $ node -v
v7.3.0
From the docs:
const child_process = require("child_process");
const fs = require("fs");
const out = fs.openSync("./stdout.log", "a");
const err = fs.openSync("./stderr.log", "a");
const child = child_process.spawn("ls", ["-alc"], {
stdio: [process.stdin, out, err]
});
You just need to pass the file descriptors to the stdio (no need to create streams). There are a lot of flavors for this here - specifically take note of how detached and passing 'ignore' for stdin impact how the child survives the parent process.
Keep in mind passing file descriptors is one of Node's only ways of IPC.
I find the solution:
when i use following another way to redirect stdout:
var fs = require("fs")
let log = fs.createWriteStream("/tmp/prpr.log");
const child = require("child_process").spawn('ls', ["prprpr"], {stdio: [null, log, log]});
it will raise
TypeError: Incorrect value for stdio stream: WriteStream {
it's the reason why I cannot write to log
I find the solution in this answer, WriteStream can writable when "open"
and use following code will work:
var fs = require("fs")
let log = fs.createWriteStream("/tmp/prpr.log");
log.on("open", function () {
const child = require("child_process").spawn('ls', ["prprpr"], {stdio: [null, log, log]});
})

Nodejs Convert text to JSON

For some reason I'm having such a hard time converting this txt file to an actual javascript array.
myJson.txt
{"action": "key press", "timestamp": 1523783621, "user": "neovim"}
{"action": "unlike", "timestamp": 1523784584, "user": "r00k"}
{"action": "touch", "timestamp": 1523784963, "user": "eevee"}
{"action": "report as spam", "timestamp": 1523786005, "user": "moxie"}
Currently what I have that doesn't work
const fs = require('fs');
function convert(input_file_path) {
const file = fs.readFileSync(input_file_path, 'utf8');
const newFormat = file
.replace(/(\r\n\t|\n|\r\t)/gm,'')
.replace(/}{/g, '},{');
console.log([JSON.parse(newFormat)]);
}
convert('myJson.txt');
Since your file contains a JSON object per line, you could read that file line by line, using readline.
Each line is then parsed, and push into an array, which is then returned (resolved) after the file is fully read.
'use strict';
const fs = require('fs');
const readline = require('readline');
function convert(file) {
return new Promise((resolve, reject) => {
const stream = fs.createReadStream(file);
// Handle stream error (IE: file not found)
stream.on('error', reject);
const reader = readline.createInterface({
input: stream
});
const array = [];
reader.on('line', line => {
array.push(JSON.parse(line));
});
reader.on('close', () => resolve(array));
});
}
convert('myJson.txt')
.then(res => {
console.log(res);
})
.catch(err => console.error(err));
I would have done this in this way
var fs = require('fs');
var readline = require('readline');
var array = [];
var input = null;
var rd = readline.createInterface({
input: fs.createReadStream(__dirname+'/demo.txt')
});
rd.on('line', function(line) {
array.push(JSON.parse(line));
});
rd.on('close', function(d){
array.forEach(e=>console.log(e.action))
})
What's happening here is, I am reading the lines of the file on by one using readline which is one of the core modules of nodejs. Listening on the events and doing what needed.
And yeah, you'll have to parse the line to JSON for sure ;)
Thanks
The problem with your code is that you're trying to parse JS array as JSON array. while JSON array string should be only string.
Here what you're trying to do:
jsArray = ['{"foo": "bar"}, {"foo":"baz"}']
This is a valid JS array of a single string value '{"foo": "bar"}, {"foo":"baz"}'.
while
jsonArrayStr = '["{"foo": "bar"}, {"foo":"baz"}"]'
This is a valid JSON array string (as the square brackets is part of the string).
So as to get your code running, you need to add the square brackets to your string before parsing it.
function convert(input_file_path) {
const file = fs.readFileSync(input_file_path, 'utf8');
const newFormat = file
.replace("{", "[{")
.replace(/}$/, "}]")
console.log(JSON.parse('[' + newFormat + ']'));
}
What I'm doing in the script is reading the content of text file line by line and storing it to array along with converting it to JSON object. When we reach last line and our JSON array/object has all the data. Now you can write this object to a new file fs.writeFileSync() after converting JSON object to string with JSON.stringify().
Note :- You've to install Line reader package i.e. npm install line-reader
var lineReader = require('line-reader');
var fs = require('fs')
var jsonObj = {};
var obj = [];
var file = "fileName.json"
var num= 0;
lineRead();
async function lineRead(){
lineReader.eachLine('input.txt', function(line, last) {
// to check on which line we're.
console.log(num);
num++;
convertJson(line)
if(last){
//when it's last line we convert json obj to string and save it to new file.
var data = JSON.stringify(obj)
fs.writeFileSync(file,data);
}
});
}
function convertJson(data){
var currentVal = data
var value = JSON.parse(data)
var temp = value;
//storing the value in json object
jsonObj = value;
obj.push(jsonObj);
}
}

get specific file content via node js

I use the following code to get specific file which is located in my c folder,
my question is how to get specific content inside this file for example
i've text file and the content is
name:test 1 test2 test 3
user: foo bar
I need that when the file was read to get the test1 test2 test3 as "string" when I find "key" of name in the text file.
How can I do that ?
fs = require('fs')
fs.readFile('c//myfile.txt', 'utf8', function (err,data) {
if (err) {
return console.log(err);
}
});
The other answer will have issues when your value contains ':'.
For example when your line is 'name: bla:bli:blub', you would only get 'bla' as a result.
So this is my suggestion that wont have that problem:
You could place this snippet into your readFile method:
var keyValueObject = {};
data.split("\n").forEach(function(element) {
var keyValueSeperatorPosition = element.indexOf(':');
var key = element.substr(0, keyValueSeperatorPosition);
var value = element.substring(keyValueSeperatorPosition + 1);
keyValueObject[key] = value;
});
You can then access your values using:
keyValueObject['name']
EDIT1:
done it a little more generic now. keyVals is now an array of objects with key and val pairs of your file
fs = require('fs')
var keyVals = [];
fs.readFile('c//myfile.txt', 'utf8', function (err,data) {
if (err) {
return console.log(err);
}
var lines = data.split('\n');
lines.forEach(function(line){
var pair = line.split(':');
keyVals.push({key:pair[0],val:pair[1]});
});
});
access it like this:
keyVals.forEach(function(element){
console.log(element.key, element.value);
})
I hope this helps

how to read a file, store data and then write it

I have a text file with a ton of values that I want to convert to meaningful JSON using node.js fs module.
I want to store the first value of every line in an array unless the value is already present.
7000111,-1.31349,36.699959,1004,
7000111,-1.311739,36.698589,1005,
8002311,-1.262245,36.765884,2020,
8002311,-1.261135,36.767544,2021,
So for this case, I'd like to write to a file:
[7000111, 8002311]
Here's what I have so far. It writes [] to the file.
var fs = require('fs');
var through = require('through');
var split = require('split');
var shape_ids = [];
var source = fs.createReadStream('data/shapes.txt');
var target = fs.createWriteStream('./output3.txt');
var tr = through(write, end);
source
.pipe(split())
.pipe(tr)
// Function definitions
function write(line){
var line = line.toString();
var splitted = line.split(',');
// if it's not in array
if (shape_ids.indexOf(splitted[0]) > -1){
shape_ids.push(splitted[0]);
}
}
function end(){
shape_ids = JSON.stringify(shape_ids);
target.write(shape_ids);
console.log('data written');
}
The code is using the split and through modules
How do I store values in the array and write the populated array to the file?
== === ====== =================
Update:
This is what I want to do, but it's in Ruby:
shape_ids = []
File.open("data/shapes.txt").readlines.each do |line|
data = line.split(',')
shape_id = data.first
if !shape_ids.include? shape_id
shape_ids.push(shape_id)
end
end
puts shape_ids # array of unique shape_ids
Can I do this in javascript?
Unless you are super comfortable with the new Stream API in node, use the event-stream module to accomplish this:
var fs = require('fs');
var es = require('event-stream');
function getIds(src, target, callback) {
var uniqueIDs = [];
es.pipeline(
fs.createReadStream(src),
es.split(),
es.map(function (line, done) {
var id = line.split(',').shift();
if (uniqueIDs.indexOf(id) > -1) return done();
uniqueIDs.push(id);
done(null);
}),
es.wait(function (err, text) {
// Here we create our JSON — keep in mind that valid JSON starts
// as an object, not an array
var data = JSON.stringify({ ids: uniqueIDs});
fs.writeFile(target, data, function (err) {
if ('function' == typeof callback) callback(err);
});
})
);
}
getIds('./values.txt', './output.json');
Unfortunately there is no "easy" way to keep this as a pure stream flow so you have to "wait" until the data is done filtering before turning into a JSON string. Hope that helps!

Categories