node js child process timeout - javascript

Need to know, how can I tackle the child process problem in Node JS. Basically, I need to convert the full pdf file into a jpeg. My process is getting killed. Most of the time files conversion is getting failed. How can I handle the long-running process? once the job is finished. I need to add there reference in the database. Could you please help.
const path = require('path');
const cp = require('child_process');
const src = path.join(__dirname, `/pdf-inputs/git.pdf`);
const exp = path.join(__dirname, `/jpg-outputs/git/git-%2d.jpg`);
const density = 300;
class Converter {
constructor(src, exp, den) {
this.src = src;
this.exp = exp;
this.density = den;
}
start() {
return new Promise((resolve, reject) => {
const child = cp.spawn('convert', ['-density', '300', `${this.src}`, `${this.exp}`], {
detached: true,
stdio: 'ignore',
});
console.log('process started =======> ', this)
child.stdout.on('data', data => {
console.log('process sucessful =======> ')
return resolve (data);
});
child.stderr.on('data', err => {
console.log('imagemagic process failed =======> ')
return reject(err);
})
child.on('error', (err) => {
console.log('child process error ========> ', err);
})
child.on('exit', (code, signal) => {
console.log(`child process exited with code: ${code} and signal ${signal}`)})
})
}
}
const c = new Converter(src, exp, density);
c.start().then(r => console.log('r ==> ', r)).catch(e => console.log('e ==> ', e));
~ Meet

Related

backup mongodb without pain using nodejs

This is my module for creating a mongoDB back up with nodejs server:
const root = require('./root');
const { spawn } = require('child_process');
const config = require('../config.json');
setTimeout(() => {
backupMongoDB();
}, 1000);
function backupMongoDB() {
const DB_NAME = 'pors_db';
const DATE = getTodayDate();
const ARCHIVE_PATH = `${root}/db_backup/${DB_NAME}-${DATE}.gzip`;
const child = spawn('mongodump', [
`--db=${DB_NAME}`,
`--archive=${ARCHIVE_PATH}`,
'--gzip',
]);
child.stdout.on('data', (data) => {
console.log('stdout:\n', data);
});
child.stderr.on('data', (data) => {
console.log('stderr:\n', Buffer.from(data).toString());
});
child.on('error', (error) => {
console.log('error:\n', error);
});
child.on('exit', (code, signal) => {
if(code) console.log('Process exit with code:', code);
else if(signal) console.log('Process killed with signal:', signal);
else console.log('Backup is successfull..');
});
function getTodayDate() {
const date = new Date();
const dd = String(date.getDate()).padStart(2, '0');
const mm = String(date.getMonth() + 1).padStart(2, '0');
const yyyy = date.getFullYear();
return `${yyyy}-${mm}-${dd}`;
}
}
module.exports = backupMongoDB;
This gives me an error:
error creating intents to dump. error for getting collections for database pors_db : unauthorized command listCollections requires authentication.
I tried to connect via this options but it returns the error that it's not a function:
const child = spawn('mongodump', [
`--db=${DB_NAME}`,
`--uri=${config.MONGODB_URI_SERVER}` // this is what I addded
`--archive=${ARCHIVE_PATH}`,
'--gzip',
]);
How can I backup my db without pain ?

Connection resets after 60 seconds in node.js upload application

I've written an application in node.js consisting of a server and a client for storing/uploading files.
For reproduction purposes, here's a proof of concept using a null write stream in the server and a random read stream in the client.
Using node.js 12.19.0 on Ubuntu 18.04. The client depends on node-fetch v2.6.1.
The issue I have is after 60 seconds the connection is reset and haven't found a way to make this work.
Any ideas are appreciated.
Thank you.
testServer.js
// -- DevNull Start --
var util = require('util')
, stream = require('stream')
, Writable = stream.Writable
, setImmediate = setImmediate || function (fn) { setTimeout(fn, 0) }
;
util.inherits(DevNull, Writable);
function DevNull (opts) {
if (!(this instanceof DevNull)) return new DevNull(opts);
opts = opts || {};
Writable.call(this, opts);
}
DevNull.prototype._write = function (chunk, encoding, cb) {
setImmediate(cb);
}
// -- DevNull End --
const http = require('http');
const server = http.createServer();
server.on('request', async (req, res) => {
try {
req.socket.on('end', function() {
console.log('SOCKET END: other end of the socket sends a FIN packet');
});
req.socket.on('timeout', function() {
console.log('SOCKET TIMEOUT');
});
req.socket.on('error', function(error) {
console.log('SOCKET ERROR: ' + JSON.stringify(error));
});
req.socket.on('close', function(had_error) {
console.log('SOCKET CLOSED. IT WAS ERROR: ' + had_error);
});
const writeStream = DevNull();
const promise = new Promise((resolve, reject) => {
req.on('end', resolve);
req.on('error', reject);
});
req.pipe(writeStream);
await promise;
res.writeHead(200);
res.end('OK');
} catch (err) {
res.writeHead(500);
res.end(err.message);
}
});
server.listen(8081)
.on('listening', () => { console.log('Listening on port', server.address().port); });
testClient.js
// -- RandomStream Start --
var crypto = require('crypto');
var stream = require('stream');
var util = require('util');
var Readable = stream.Readable;
function RandomStream(length, options) {
// allow calling with or without new
if (!(this instanceof RandomStream)) {
return new RandomStream(length, options);
}
// init Readable
Readable.call(this, options);
// save the length to generate
this.lenToGenerate = length;
}
util.inherits(RandomStream, Readable);
RandomStream.prototype._read = function (size) {
if (!size) size = 1024; // default size
var ready = true;
while (ready) { // only cont while push returns true
if (size > this.lenToGenerate) { // only this left
size = this.lenToGenerate;
}
if (size) {
ready = this.push(crypto.randomBytes(size));
this.lenToGenerate -= size;
}
// when done, push null and exit loop
if (!this.lenToGenerate) {
this.push(null);
ready = false;
}
}
};
// -- RandomStream End --
const fetch = require('node-fetch');
const runSuccess = async () => { // Runs in ~35 seconds
const t = Date.now();
try {
const resp = await fetch('http://localhost:8081/test', {
method: 'PUT',
body: new RandomStream(256e6) // new RandomStream(1024e6)
});
const data = await resp.text();
console.log(Date.now() - t, data);
} catch (err) {
console.warn(Date.now() - t, err);
}
};
const runFail = async () => { // Fails after 60 seconds
const t = Date.now();
try {
const resp = await fetch('http://localhost:8081/test', {
method: 'PUT',
body: new RandomStream(1024e6)
});
const data = await resp.text();
console.log(Date.now() - t, data);
} catch (err) {
console.warn(Date.now() - t, err);
}
};
// runSuccess().then(() => process.exit(0));
runFail().then(() => process.exit(0));
I tried (unsuccessfully) to reproduce what you are seeing based on your code example. Neither the success call is completing in ~35 seconds nor is the error being thrown in 60 seconds.
However, that being said, I think what is happening here is that your client is terminating the request.
You can increase the timeout by adding a httpAgent to the fetch PUT call. You can then set a timeout in the httpAgent.
const http = require('http');
...
const runFail = async () => { // Fails after 60 seconds
const t = Date.now();
try {
const resp = await fetch('http://localhost:8081/test', {
method: 'PUT',
body: new RandomStream(1024e6),
agent: new http.Agent({ keepAlive: true, timeout: 300000 })
});
const data = await resp.text();
console.log(Date.now() - t, data);
} catch (err) {
console.warn(Date.now() - t, err);
}
};
See the fetch docs for adding a custom http(s) agent here
See options for creating http(s) agent here
This turned out to be a bug in node.js
Discussion here: https://github.com/nodejs/node/issues/35661

Close Event Triggers Before Data Events on File Stream

I've got a script that adds JSON data from a file to a DynamoDB table. The script uses the "fs" module to open a read stream to the json file and retrieve the data line by line. As the data is returned, it's inserted into a DynamoDB table. When the operation ends, an execution summary is given with number of records processed, successfully inserted, and unsuccessfully inserted. The problem is the summary executes before the file has completely processed. As result the numbers are wrong.
The script...
ddb_table_has_records(table_name, (err, dat) => {
if (dat.Count === 0 || force) {
const transformStream = JSONStream.parse("*");
const inputStream = fs.createReadStream(import_file);
let record_position = 0;
let count_imported_successful = 0;
let count_imported_fail = 0;
inputStream.pipe(transformStream).on("data", (Item) => {
const params = {
TableName: table_name,
Item
}
ddb_client.put(params, (err, data) => {
++record_position;
if (err) {
console.error("Unable to add mapping for record " + record_position + ", error = " + err);
++count_imported_fail;
} else {
console.log("PutItem succeeded " + record_position);
++count_imported_successful;
}
});
}).on("close", () => {
console.log("=".repeat(70));
console.log(`'Completed: ${import_file}' has been loaded into '${table_name}'.`);
console.log(` Record Count: ${record_position}`);
console.log(` Imported Record Count: ${count_imported_successful}`);
console.log(` Rejected Record Count: ${count_imported_fail}`);
});
} else {
console.log("=".repeat(70));
console.log(`Completed: Skipping import of '${import_file}' into '${table_name}'.`);
};
});
When this runs, it looks like the following
PS C:\> node --max-old-space-size=8192 .\try.js 'foo' 'us-west-2' 'development' '.\data.json' true
Target Profile: development
Target Region: us-west-2
Target Table: foo
Source File: .\data.json
Force Import: true
Confirming Table's State...
======================================================================
'Completed: .\data.json' has been loaded into 'foo'.
Record Count: 0
Imported Record Count: 0
Rejected Record Count: 0
PutItem succeeded 1
PutItem succeeded 2
PutItem succeeded 3
PutItem succeeded 4
...
The portion of the code that gets the record counts runs before the inserts completes so the records imported and rejected numbers are always wrong. It looks like the file stream closes while inserts are occurring. I've tried changing from the "close" to "end" event, same result.
Test this script with the following call...
node --max-old-space-size=8192 .\data.load.js 'foo' 'us-west-1' 'dev' '.\foo.default.json' true
Here is the content for the script I ultimately used...
'use strict'
if (process.argv.length < 6) {
throw new Error ('Please pass the table-name, aws-Region, aws-Profile, and file-path to the script.');
}
let [, , TableName, Region, Profile, ImportFile, Force] = process.argv;
process.env.AWS_SDK_LOAD_CONFIG = true;
process.env.AWS_PROFILE = Profile;
Force = typeof(Force) !== 'undefined' ? Force : false;
const AWS = require('aws-sdk');
const fs = require('fs');
const JSONStream = require('JSONStream');
AWS.config.update({ region: Region });
const ddbc = new AWS.DynamoDB.DocumentClient();
console.log('Target Profile: ', Profile);
console.log('Target Region: ', Region);
console.log('Target Table: ', TableName);
console.log('Source File: ', ImportFile);
console.log('Force Import: ', Force);
// Returns the number of records in a specified table
const ddb_table_has_items = (TableName) => {
return new Promise((resolve, reject) => {
const ddb_query_parameters = { TableName, Select: 'COUNT' }
ddbc.scan(ddb_query_parameters, (error, data) => {
(error) ? reject(error) : resolve(data);
});
});
}
const ddb_table_upsert_items = (TableName, Item) => {
return new Promise((reject, resolve) => {
const ddb_insert_payload = { TableName, Item };
ddbc.put(ddb_insert_payload, (error, data) => {
(error) ? reject(error) : resolve(data);
});
});
}
const ddb_bulk_load = (TableName, ImportFile) => {
return new Promise ( (resolve, reject) => {
let count_succeeded = 0;
let count_failed = 0;
let count_attempted = 0;
let inserts = [];
const json_stream = JSONStream.parse( "*" );
const source_data_stream = fs.createReadStream(ImportFile);
const ddb_source_item = source_data_stream.pipe(json_stream);
ddb_source_item.on("data", (source_data_item) => {
count_attempted++;
let ddb_insert = ddb_table_upsert_items(TableName, source_data_item)
.then( (data) => count_succeeded++ )
.catch( (error) => count_failed++ );
inserts.push(ddb_insert);
});
ddb_source_item.on("end", () => {
Promise.all(inserts)
.then(() => {
resolve({count_succeeded, count_failed, count_attempted});
})
.catch((error) => {
console.log(error);
reject(error);
});
});
ddb_source_item.on("error", (error) => {
reject(error);
});
});
}
(async () => {
try {
let proceed_with_import = false;
if ( Force.toString().toLowerCase() === 'true' ) {
proceed_with_import = true;
} else {
const table_scan = await ddb_table_has_items(TableName);
proceed_with_import = ( table_scan.Count === 0 );
}
if (proceed_with_import) {
let ddb_inserts = await ddb_bulk_load(TableName, ImportFile);
console.log("=".repeat(75));
console.log("Completed: '%s' has been loaded into '%s'.", ImportFile, TableName);
console.log(" Insert Attempted: %s", ddb_inserts.count_attempted);
console.log(" Insert Succeeded: %s", ddb_inserts.count_succeeded);
console.log(" Insert Failed : %s", ddb_inserts.count_failed);
}
} catch (error) {
console.log(error);
}
})();
Wrapping each insert in a promise, pushing the insert-promises into an array, and using promise all on that array did the trick. I execute the promise all once we're finished reading from the file; once the "end" event is emitted on the ddb_source_item stream.

How to pull out handler using module exports?

I am building a node application, and trying to neatly organize my code. I wrote a serial module that imports the serial libs and handles the connection. My intention was to write a basic module and then reuse it over and over again in different projects as needed. The only part that changes per use is how the incoming serial data is handled. For this reason I would like to pull out following handler and redefine it as per the project needs. How can I use module exports to redefine only this section of the file?
I have tried added myParser to exports, but that gives me a null and I would be out of scope.
Handler to redefine/change/overload for each new project
myParser.on('data', (data) => {
console.log(data)
//DO SOMETHING WITH DATA
});
Example usage: main.js
const serial = require('./serial');
const dataParser = require('./dataParser');
const serial = require('./serial');
//call connect with CL args
serial.connect(process.argv[2], Number(process.argv[3]))
serial.myParser.on('data',(data) => {
//Do something unique with data
if (dataParser.parse(data) == 0)
serial.send('Error');
});
Full JS Module below serial.js
const SerialPort = require('serialport');
const ReadLine = require('#serialport/parser-readline');
const _d = String.fromCharCode(13); //char EOL
let myPort = null;
let myParser = null;
function connect(port, baud) {
let portName = port || `COM1`;
let baudRate = baud || 115200;
myPort = new SerialPort(portName, {baudRate: baudRate})
myParser = myPort.pipe(new ReadLine({ delimiter: '\n'}))
//Handlers
myPort.on('open', () => {
console.log(`port ${portName} open`)
});
myParser.on('data', (data) => {
console.log(data)
});
myPort.on('close', () => {
console.log(`port ${portName} closed`)
});
myPort.on('error', (err) => {
console.error('port error: ' + err)
});
}
function getPorts() {
let portlist = [];
SerialPort.list((err, ports) => {
ports.forEach(port => {
portlist.push(port.comName)
});
})
return portlist;
}
function send(data) {
myPort.write(JSON.stringify(data) + _d, function (err) {
if (err) {
return console.log('Error on write: ', err.message);
}
console.log(`${data} sent`);
});
}
function close() {
myPort.close();
}
module.exports = {
connect, getPorts, send, close
}
The problem is that a module is used where a class or a factory would be appropriate. myParser cannot exist without connect being called, so it doesn't make sense to make it available as module property, it would be unavailable by default, and multiple connect calls would override it.
It can be a factory:
module.exports = function connect(port, baud) {
let portName = port || `COM1`;
let baudRate = baud || 115200;
let myPort = new SerialPort(portName, {baudRate: baudRate})
let myParser = myPort.pipe(new ReadLine({ delimiter: '\n'}))
//Handlers
myPort.on('open', () => {
console.log(`port ${portName} open`)
});
myParser.on('data', (data) => {
console.log(data)
});
myPort.on('close', () => {
console.log(`port ${portName} closed`)
});
myPort.on('error', (err) => {
console.error('port error: ' + err)
});
function getPorts() {
let portlist = [];
SerialPort.list((err, ports) => {
ports.forEach(port => {
portlist.push(port.comName)
});
})
return portlist;
}
function send(data) {
myPort.write(JSON.stringify(data) + _d, function (err) {
if (err) {
return console.log('Error on write: ', err.message);
}
console.log(`${data} sent`);
});
}
function close() {
myPort.close();
}
return {
myParser, getPorts, send, close
};
}
So it could be used like:
const serial = require('./serial');
const connection = serial(...);
connection.myParser.on('data',(data) => {
//Do something unique with data
if (dataParser.parse(data) == 0)
connection.send('Error');
});

Error: Cloud Functions for Firebase spawn EACCES (ghostscript)

I tried using Firebase Cloud Functions to create a thumbnail of a PDF file.
After the call of gs I get the following error:
2018-06-12T11:29:08.685Z E makeThumbnail: Error: spawn EACCES
at exports._errnoException (util.js:1020:11)
at ChildProcess.spawn (internal/child_process.js:328:11)
at exports.spawn (child_process.js:370:9)
at Object.exec (/user_code/node_modules/gs/index.js:86:28)
at Promise (/user_code/index.js:95:12)
at mkdirp.then.then (/user_code/index.js:86:12)
2018-06-12T11:29:08.698166767Z D makeThumbnail: Function execution took 780 ms, finished with status: 'error'
Is it necessary to use a component like ghostscript to use a plan other than Spark?
In addition, my code. Maybe I just do not see my problem in the code
const functions = require('firebase-functions');
const mkdirp = require('mkdirp-promise');
const gcs = require('#google-cloud/storage')();
const admin = require('firebase-admin');
const spawn = require('child-process-promise').spawn;
const path = require('path');
const os = require('os');
const fs = require('fs');
const gs = require('gs');
const THUMB_MAX_HEIGHT = 200;
const THUMB_MAX_WIDTH = 200;
const THUMB_PREFIX = 'thumb_';
const gs_exec_path = path.join(__dirname, './lambda-ghostscript/bin/gs');
try{admin.initializeApp(functions.config().firebase); } catch(e) {}
exports.makeThumbnail = functions.storage.object().onFinalize((object) => {
const filePath = object.name;
const contentType = object.contentType;
const fileDir = path.dirname(filePath);
const fileName = path.basename(filePath);
const thumbFilePath = path.normalize(path.join(fileDir, `${THUMB_PREFIX} ${fileName}`));
const tempLocalFile = path.join(os.tmpdir(), filePath);
const tempLocalDir = path.dirname(tempLocalFile);
const tempLocalThumbFile = path.join(os.tmpdir(), thumbFilePath);
const tmp_dir = os.tmpdir();
if (fileName.startsWith(THUMB_PREFIX)) {
console.log('Is thumbnail');
return null;
}
const bucket = gcs.bucket(object.bucket);
const file = bucket.file(filePath);
const thumbFile = bucket.file(thumbFilePath);
const metadata = {
contentType: contentType,
};
return mkdirp(tmp_dir).then(() => {
console.log("Dir Created");
console.log(tempLocalFile);
return file.download({destination: tempLocalFile});
}).then(() => {
console.log("File downloaded");
if(!contentType.startsWith("image/")){
return new Promise((resolve, reject) => {
const pg= 1;
gs().batch().nopause()
.option(`-dFirstPage=${pg}`)
.option(`-dLastPage=${pg}`)
.executablePath(gs_exec_path)
.device('png16m')
.output(tempLocalThumbFile+".png")
.input(tempLocalFile)
.exec(err => err ? reject(err) : resolve());
});
}
else
{
var args = [ tempLocalFile, '-thumbnail', `${THUMB_MAX_WIDTH}x${THUMB_MAX_HEIGHT}>`, tempLocalThumbFile ];
return spawn('convert', args, {capture: ['stdout', 'stderr']});
}
}).then(() => {
return bucket.upload(tempLocalThumbFile, { destination: thumbFilePath });
}).then(() => {
fs.unlinkSync(tempLocalFile);
fs.unlinkSync(tempLocalThumbFile);
return result[0];
});
});
After hours of scratching my head and running same code over and over again pointlessly, I've finally found the problem!
The executable path that you've defined is not correct. It should be 'gs'.
Here's a complete gs() call sample:
gs()
.batch()
.option('-dFirstPage=2')
.option('-dLastPage=2')
.nopause()
.res(90)
.executablePath('gs')
.device('jpeg')
.output(tempNewPath2)
.input(tempFilePath)
.exec((err, stdout, stderr) => {
if (!err) {
console.log('gs executed w/o error');
console.log('stdout', stdout);
console.log('stderr', stderr);
resolve();
} else {
console.log('gs error:', err);
reject(err);
}
});
For more help, you can go through a sample repo that I created for this issue
https://github.com/krharsh17/ghostscript-firebase-sample

Categories