node streams - get maximum call stack exceeded - javascript

I cant seem to figure out why I'm getting this error with my stream pipeline. I think I have exhausted all paths, so is there something Im missing: Here is what I have:
var myCsvStream = fs.createReadStream('csv_files/myCSVFile.csv');
var csv = require('fast-csv');
var myData = [];
var myFuncs = {
parseCsvFile: function (filepath) {
var csvStream;
csvStream = csv
.parse({headers: true, objectMode: true, trim: true})
.on('data', function (data) {
myData.push(data);
})
.on('end', function () {
console.log('done parsing counties')
});
return csvStream;
}
}
myCsvStream
.pipe(myFuncs.parseCsvFile())
.pipe(process.stdout);
The process.stdout is just so I can see that the data can continue on to the next stream, however, when adding pipe(process.stdout) or even a through2 duplex stream I get this maximum callstack reached error. Any Ideas?

I think you should write it that way :
var myCsvStream = fs.createReadStream('csv_files/myCSVFile.csv');
var csv = require('fast-csv');
var csvStream = csv
.parse({headers: true, objectMode: true, trim: true})
.on('data', function (data) {
myData.push(data);
})
.on('end', function () {
console.log('done parsing counties')
});
myCsvStream
.pipe(csvStream)
.pipe(process.stdout);
After you can wrap it all up in a function.

Related

Writing on stdin when using dockerode

How can I write to the container's stdin, when using dockerode library? I tried doing it in a multiple ways, but nothing seems to work.
My current code that is not able to write to stdin:
export async function nameToStdName(
pluginName: string,
pluginDescription: string,
pluginId: number,
numberOfDuplicates: number
) {
const docker = new Docker();
const input = `${pluginName}; ${pluginDescription}`;
// Run the docker container and pass input from a string
const dockerImageName = 'name-to-stdname';
const dockerCmd = ['python', '/app/main.py', '-i', pluginId.toString(), '-v', numberOfDuplicates.toString()];
const options = {
cmd: dockerCmd,
AttachStdin: true,
AttachStdout: true,
Tty: false,
};
const container = await docker.createContainer({
Image: dockerImageName,
...options,
});
await container.start();
const stream = await container.attach({
stream: true,
stdin: true,
stdout: true,
});
// Handle output from container's stdout
let name = "";
stream.on('data', (data: Stream) => {
console.log(`Received output: ${data.toString()}`);
name += data.toString();
});
// Pass input to container's stdin
stream.write(input);
await container.wait();
return name;
}

Nodejs child_proccess.spawn no output using stdio: 'inherit'

I'm using node.js child_proccess.spawn() in order to execute few command lines in CMD and get the output.
I have encountered few issues:
When i'm trying to spawn the proccess witout stdio: 'inherit' option - The CMD freezes after executing the last command and won't print out the results.
When I add the stdio: 'inherit' option, I get the results printed to my terminal but I cant catch the output with child.stdout.on..
Is there any possible way to capture the terminal output or to avoid the proccess from being stuck?
function executeCommands (){
const firstCommand = 'do something1'
const secondCommand = 'do something2'
const thirdCommand = 'do something3'
let child = require('child_process').spawn(`${firstCommand} && ${secondCommand} &&
${thirdCommand}`, [], {shell: true,stdio: 'inherit'})
child.stdout.setEncoding('utf8')
child.stdout.on('data', (data) => {
console.log('stdout',data)
})
child.stdio.on('data', (data) => {
console.log('stdio',data)
})
child.stderr.on('data', (data) => {
console.log('stderr',data.toString())
})
}
Use child_process
const { execSync } = require("node:child_process");
const npmVersion = execSync("npm -v", { encoding: "utf-8" });
console.log(npmVersion);
// 8.15.0
if you want to use spawnSync
const { spawnSync } = require("node:child_process");
const npmVersion = spawnSync("npm", ["-v"], { encoding: "utf-8" });
console.log(npmVersion.stdout);
// 8.15.0

Get all images exif and pass to route as object

how to get all Images exif and pass it to view in Expressjs
basically it has to wait for 2 promisses
get filnames with readdir and get exifs and return as array to view
here what I have but lost.
const express = require('express');
const exifr = require('exifr');
const fs = require('fs');
const fsPromises = fs.promises;
const app = express();
app.set('view engine', 'ejs')
let defaultOptions = {
// Segments (JPEG APP Segment, PNG Chunks, HEIC Boxes, etc...)
tiff: false,
xmp: false,
icc: false,
iptc: true,
jfif: false, // (jpeg only)
ihdr: false, // (png only)
// Sub-blocks inside TIFF segment
ifd0: false, // aka image
ifd1: false, // aka thumbnail
exif: false,
gps: true,
}
const photosDir = './photos/'
app.use('/photos', express.static('photos'))
async function listDir() {
try {
return fsPromises.readdir(photosDir) .then(filenames => {
for (let filename of filenames) {
//console.log(filename)
exifr.parse(photosDir+filename, defaultOptions).then(function(data){
console.log(data)
return data
})
}
});
} catch (err) {
console.error('Error occured while reading directory!', err);
}
}
listDir()
app.get('/', async (_,res) => {
let data = await listDir()
console.log(data)
res.render('index',{"files": data })
});
app.listen(3000, () => console.log('Example app is listening on port 3000.'));
Use Promise.all, this will resolve after all exifr.parse promises have resolved.
return fsPromises.readdir(photosDir)
.then(filenames => Promise.all(filenames.map(filename =>
exifr.parse(photosDir+filename, defaultOptions)
.then(function(data) {
console.log(data)
return data
})
)));

Can't get the data from the stream

I have some code that in which I build a minIO client and the I try to access an object through the getObject method and the perform other tasks. The thing is I can't get the data from the stream. I have tried to with Buffer.from and push the data in the buff array. No result. Any thoughts??? Thank you!
The code is below:
var Minio = require('minio')
var Buffer = require('buffer').Buffer;
async function call(){
var minioClient = new Minio.Client({
endPoint: 'localhost',
port: 9000,
useSSL: false,
accessKey: 'admin',
secretKey: 'password'
});
minioClient.listBuckets(function (err, buckets) {
if (err)
return console.log(err);
console.log('buckets :', buckets);
});
var buff = [];
var size = 0;
await minioClient.getObject("test2", "test.jpg").then( function(dataStream) {
dataStream.on('data', async function(chunk) {
buff.push(chunk)
size += chunk.length
})
dataStream.on('end', function() {
console.log('End. Total size = ' + size)
console.log("End Buffer : " + buff)
})
dataStream.on('error', function(err) {
console.log(err)
})
}).catch((err)=>console.log(err));
console.log("Buffer = " + buff);
return buff;
}
var data = call();
console.log("Data: " + data);
the reponse is below:
Data: [object Promise]
buckets : [
{ name: 'test1', creationDate: 2021-08-25T18:36:40.544Z },
{ name: 'test2', creationDate: 2021-08-25T19:42:47.558Z }
]
End. Total size = 3844
End Buffer: ����►JFIF☺☺☺����☺vPhotoshop 3.08BIM♦♦☺N∟☻☻☻∟☻¶ww.public-domain-image.com∟☻t,Murphy Karen, U.S. Fish and Wildlife Service∟☻zww.public-domain-im
age.com∟☻(�Please consider to donate and to link back to http://www..public-domain-image.com also give credit for this and any other pictures you used.∟☻A
"http://www.public-domain-image.com∟☻18991230∟☻<♂000000+01008BIM♦♂��C♥☻☻♥☻☻♥♥♥♥♦♥♥♦♣♣♦♦♣
♀
♀♀♂
¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶����♥☺"☻◄☺♥◄☺��▼☺♣☺☺☺☺☺☺☺☻♥♦♣♠
♂���►☻☺♥♥☻♦♥♣♣♦♦☺}☺☻♥♦◄♣↕!1A♠‼Qa"q¶2��#B��§R��$3br�
▬↨↑↓→%&'()*456789:CDEFGHIJSTUVWXYZcdefghijstuvwxyz���������������������������������������������������������������������������▼☺♥☺☺☺☺☺☺☺☺☺☺☻♥♦♣♠
♂���◄☻☺☻♦♦♥♦♣♦♦☺☻w☺☻♥◄♦♣!1♠↕AQaq‼"2¶B���� #3R�§br�
▬$4�%�↨↑↓→&'()*56789:CDEFGHIJSTUVWXYZcdefghijstuvwxyz��������������������������������������������������������������������������♀♥☺☻◄♥◄?���M∟q�VŞ�↨▼-hY��oO
Ҵ���0�§̏a2���↑`/?J�ob#lm▼�^����▲♣Zk}�☼CBв�ه♀
♀{��֡r�F>��→|����T�↔���-��RXm���6zRǤ^2)►�3�"�[M?ΙC(�z�d�X���c∟V�:∟�����6gA��8��o��"y⌂�8<
y�q�x�c�=☼j��↔M▼♫�>L`�zUۭ♠%�8DeR0♫}k☼D�y/e�k�펽8�(#�#��%�m��r驔w:�☼٫FP(�ۊ��4peO�~U�౟)Xd→�
☺��G�r�j��4§�►�~n��t��3*�9◄�d:y���zԏK'♀=3�U�q\
��i→�?J�9O;�∟�ٔ�¶�m��~u�Gf:c"��¿�i�→\Æ�����↔=�a��z��:n1�Vc���L��Br�dZ��L↑���w◄�;V�Yv�2K'�]�!O§�W!��� |��§��_K,�B=��$��~N����U�>����n�§�↑&a)3'K�o8<h��0
����G�7↨♥.rߕI���R�<v���K[��iJ�#v��~�▬`�¨�B���U��U=M[x�����X�♠�z�+��ע��⌂F��Gj���a�1�>ұ4�6�↑�nF��♀◄\�M;�IE=ϋ���"�▼ː8����J͹#�kҌ�3�D♂▲��L��o�M�Q��z|ڛ'tX��☼ީ�◄
�♣W↕mۃ��ٮq�↔h��(�pĜ��!�n�U♂����¶���Q�+�;∟���Eh����q�rI��5NI<�·▲�j�Bw�=�G+lDS���aj��c▲��]<(#ҹ�{���mX�.��\�Vu�tGc�8N���7��☼5��� �����(∟�*3����Y�,Y�9�◄x
�ڹ(n�j��▼'�§�ܩ7���x♫?:��`pX‼^/�
�T����?)������Sqc�♂��3Ϧ=k�Q�s�▬2���*�▬▼�Y�Ѝ‼�=s^/�
�P�↓�.↑��⌂�[o☺�sD#o►�§▲���G,{��=^mj�,��4♥��☻�7�l$�↨��♫?�☼�!��36���_�'��∟��A⌂g��#�↨JO�☼�↔�5����;��\B�(�♦���3��z��q�%������
ci���y, ��§.��Kx→�M�X�y�3F�☼8�G��x���:Yۻ_�#�ƕr��I�YTM�߰#�▼Ҳӫ4�>=☼����;������T�<���������O�Jl:��3־t�⌂d▼☼I!W�/�G'��
�t�؏�ן�uV�����♣X���>↑�pm���~hʓ�§�=q����&��`o��[VmgWq�]W���]f��♦��^�t���X♫�+���G����EN��♂�F1��ULd▼ҩ��2~���~�GN9�n�>C��H��#�cm�u�+1f�9����4���Ea֟�g§��▲�ȩ���
�
t�0�m�W?�s���ţ�4�Y�#�N~���▼Xܫ�↑��`��׍�ѕmȥJ������1#?19♦(������↓4���<��♥7J<�܌�∟��:zn8?ʻ☼;�=�ƭ��CScb2I���u2����9"=¶�c��Wx8�29§q��}☼���C����
QD��⌂.jm�^j����g>�♦�:
��♂ o��B��4�$\��ٹ8�9Q�E�{a!�Q�Z�l�/|TV�nS�sV�✔!s�����`�E�/O��;q��Mi��1�}�ǡ�M▬�♥☻I�↕���5�[�▲B�,:m/�����=*qF֙n#���↔8?S]%�1�! ���
�↑���#��V♂�1�ܲ�V�2D��I►���r�3�ֹӻ:,|��↔�+�u`ib�]�,�I��X��m ����W▬�I>�♫���5w����∟��$#�y�O��V`�↨ܟ����!fY1��↔vը��♫��h�%HԊm�u��]�+Hw∟���L�?J�↨↕6�߇?�7�▼0���Lw5▬
�qA.E֝���Gc��D�▲☺+�ʫ_�z���☼���ݕ�}*���9→��§A!�♥�♥?έ[� 92↨���
�s�WIiek$�2∟Ɩhś∟�§H'����E�m↨m�����;�w��b����⌂♥|♥�⌂��t▬q�→eNJ↓ss;�#ʌG1��g↑↔k�T����/�5���u=8˗���ac:�u���#j�G�C��k��i�♥�♀[���}��?�}�&�k7��h_���U;�
�ׇ3ivr���S�+?�4�4���∟↨%[���F�♣�F♥1s�v����ކ±�↑-�Ҵ�,�\▲��R<yH݉��.s�槎c#r�O�d�6��%�:U�9�p3�8§fw4�઀ͳ�ˌ�b̽�>�C�c�F;�O;z��n~�h♫cR6��_�Ҭ�Ŕ.q�:VLst���Y�G�ڋ�r���54
sw^���16�►☼�J_�,��♂.s�♫*�K�6waq���M��.t����,↨AYC��H##�‼��o���$a��t�*��↓↑�M�⌂�A�֨Jz�����
↕�a�]E��ۇux�*2Us�^}I4�g�Nw��փ�Qx�O��P�R�c�<�)?���Ew�?�֡j���{[����v����→��H�W���x�nLHǟ�♥���K���%�>P˟�-�z�:�☻�֚z�k���
A running example. to get the the data as you need it.
var minioClient = new Minio.Client({
endPoint: 'play.min.io',
port: 9000,
useSSL: true,
accessKey: 'Q3AM3UQ867SPQQA43P2F',
secretKey: 'zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG'
})
async function call() {
const promise = new Promise((resolve, reject) => {
var buff = [];
var size = 0;
minioClient.getObject("sph-my-bucket", "test-replication.txt").then(function(dataStream) {
dataStream.on('data', async function(chunk) {
buff.push(chunk)
size += chunk.length
})
dataStream.on('end', function() {
console.log('End. Total size = ' + size)
// console.log("End Buffer : " + buff)
resolve(buff)
})
dataStream.on('error', function(err) {
console.log(err)
reject(err)
})
}).catch(reject);
})
return promise
}
async function getData() {
const data = await call()
console.log(data.toString())
}
getData()
Async/await is basically a sintax sugar for Promises. But you have to keep in mind that an asyncfunction always returns a Promise, even if you explicitly don't wrap its return in one.
With this said, as we know, if we want to retrieve the information, just call Promise.then() and assign the result to your global variable. Note: setTimeout just adds a delay to until the Promise fullfill.
async function getAsync() {
var response = await fetch('https://placekitten.com/500/500');
var data = await response.text();
return data;
}
var myData;
getAsync().then(data => myData = data);
setTimeout(() => console.log(myData), 3e3)

Write to a CSV in Node.js

I am struggling to find a way to write data to a CSV in Node.js.
There are several CSV plugins available however they only 'write' to stdout.
Ideally I want to write on a row-by-row basis using a loop.
You can use fs (https://nodejs.org/api/fs.html#fs_fs_writefile_file_data_options_callback):
var dataToWrite;
var fs = require('fs');
fs.writeFile('form-tracking/formList.csv', dataToWrite, 'utf8', function (err) {
if (err) {
console.log('Some error occured - file either not saved or corrupted file saved.');
} else{
console.log('It\'s saved!');
}
});
The docs for node-csv-parser (npm install csv) specifically state that it can be used with streams (see fromStream, toStream). So it's not hard-coded to use stdout.
Several other CSV parsers also come up when you npm search csv -- you might want to look at them too.
Here is a simple example using csv-stringify to write a dataset that fits in memory to a csv file using fs.writeFile.
import stringify from 'csv-stringify';
import fs from 'fs';
let data = [];
let columns = {
id: 'id',
name: 'Name'
};
for (var i = 0; i < 10; i++) {
data.push([i, 'Name ' + i]);
}
stringify(data, { header: true, columns: columns }, (err, output) => {
if (err) throw err;
fs.writeFile('my.csv', output, (err) => {
if (err) throw err;
console.log('my.csv saved.');
});
});
If you want to use a loop as you say you can do something like this with Node fs:
let fs = require("fs")
let writeStream = fs.createWriteStream('/path/filename.csv')
someArrayOfObjects.forEach((someObject, index) => {
let newLine = []
newLine.push(someObject.stringPropertyOne)
newLine.push(someObject.stringPropertyTwo)
....
writeStream.write(newLine.join(',')+ '\n', () => {
// a line was written to stream
})
})
writeStream.end()
writeStream.on('finish', () => {
console.log('finish write stream, moving along')
}).on('error', (err) => {
console.log(err)
})
In case you don't wanna use any library besides fs, you can do it manually.
let fileString = ""
let separator = ","
let fileType = "csv"
let file = `fileExample.${fileType}`
Object.keys(jsonObject[0]).forEach(value=>fileString += `${value}${separator}`)
fileString = fileString.slice(0, -1)
fileString += "\n"
jsonObject.forEach(transaction=>{
Object.values(transaction).forEach(value=>fileString += `${value}${separator}`)
fileString = fileString.slice(0, -1)
fileString += "\n"
})
fs.writeFileSync(file, fileString, 'utf8')
For those who prefer fast-csv:
const { writeToPath } = require('#fast-csv/format');
const path = `${__dirname}/people.csv`;
const data = [{ name: 'Stevie', id: 10 }, { name: 'Ray', id: 20 }];
const options = { headers: true, quoteColumns: true };
writeToPath(path, data, options)
.on('error', err => console.error(err))
.on('finish', () => console.log('Done writing.'));
**In case you don't wanna use any library besides fs, you can do it manually. More over you can filter the data as you want to write to CSV file
**
router.get('/apiname', (req, res) => {
const data = arrayOfObject; // you will get from somewhere
/*
// Modify old data (New Key Names)
let modifiedData = data.map(({ oldKey1: newKey1, oldKey2: newKey2, ...rest }) => ({ newKey1, newKey2, ...rest }));
*/
const path = './test'
writeToFile(path, data, (result) => {
// get the result from callback and process
console.log(result) // success or error
});
});
writeToFile = (path, data, callback) => {
fs.writeFile(path, JSON.stringify(data, null, 2), (err) => { // JSON.stringify(data, null, 2) help you to write the data line by line
if (!err) {
callback('success');
// successfull
}
else {
callback('error');
// some error (catch this error)
}
});
}
this is the code that worked for me in nest js
import { Parser } from "json2csv";
const csv = require('csvtojson');
const csvFilePath = process.cwd() + '/' + file.path;
let csv data = await csv().fromFile(csvFilePath); /// read data from csv into an array of json
/// * from here how to write data into csv *
data.push({
label: value,
.......
})
}
const fields = [
'field1','field2', ...
]
const parser = new Parser({ fields, header:false }); /// if dont want header else remove header: false
const csv = parser.parse(data);
appendFileSync('./filename.csv',`${csv}\n`); // remove /n if you dont want new line at the end

Categories