Close Event Triggers Before Data Events on File Stream - javascript

I've got a script that adds JSON data from a file to a DynamoDB table. The script uses the "fs" module to open a read stream to the json file and retrieve the data line by line. As the data is returned, it's inserted into a DynamoDB table. When the operation ends, an execution summary is given with number of records processed, successfully inserted, and unsuccessfully inserted. The problem is the summary executes before the file has completely processed. As result the numbers are wrong.
The script...
ddb_table_has_records(table_name, (err, dat) => {
if (dat.Count === 0 || force) {
const transformStream = JSONStream.parse("*");
const inputStream = fs.createReadStream(import_file);
let record_position = 0;
let count_imported_successful = 0;
let count_imported_fail = 0;
inputStream.pipe(transformStream).on("data", (Item) => {
const params = {
TableName: table_name,
Item
}
ddb_client.put(params, (err, data) => {
++record_position;
if (err) {
console.error("Unable to add mapping for record " + record_position + ", error = " + err);
++count_imported_fail;
} else {
console.log("PutItem succeeded " + record_position);
++count_imported_successful;
}
});
}).on("close", () => {
console.log("=".repeat(70));
console.log(`'Completed: ${import_file}' has been loaded into '${table_name}'.`);
console.log(` Record Count: ${record_position}`);
console.log(` Imported Record Count: ${count_imported_successful}`);
console.log(` Rejected Record Count: ${count_imported_fail}`);
});
} else {
console.log("=".repeat(70));
console.log(`Completed: Skipping import of '${import_file}' into '${table_name}'.`);
};
});
When this runs, it looks like the following
PS C:\> node --max-old-space-size=8192 .\try.js 'foo' 'us-west-2' 'development' '.\data.json' true
Target Profile: development
Target Region: us-west-2
Target Table: foo
Source File: .\data.json
Force Import: true
Confirming Table's State...
======================================================================
'Completed: .\data.json' has been loaded into 'foo'.
Record Count: 0
Imported Record Count: 0
Rejected Record Count: 0
PutItem succeeded 1
PutItem succeeded 2
PutItem succeeded 3
PutItem succeeded 4
...
The portion of the code that gets the record counts runs before the inserts completes so the records imported and rejected numbers are always wrong. It looks like the file stream closes while inserts are occurring. I've tried changing from the "close" to "end" event, same result.

Test this script with the following call...
node --max-old-space-size=8192 .\data.load.js 'foo' 'us-west-1' 'dev' '.\foo.default.json' true
Here is the content for the script I ultimately used...
'use strict'
if (process.argv.length < 6) {
throw new Error ('Please pass the table-name, aws-Region, aws-Profile, and file-path to the script.');
}
let [, , TableName, Region, Profile, ImportFile, Force] = process.argv;
process.env.AWS_SDK_LOAD_CONFIG = true;
process.env.AWS_PROFILE = Profile;
Force = typeof(Force) !== 'undefined' ? Force : false;
const AWS = require('aws-sdk');
const fs = require('fs');
const JSONStream = require('JSONStream');
AWS.config.update({ region: Region });
const ddbc = new AWS.DynamoDB.DocumentClient();
console.log('Target Profile: ', Profile);
console.log('Target Region: ', Region);
console.log('Target Table: ', TableName);
console.log('Source File: ', ImportFile);
console.log('Force Import: ', Force);
// Returns the number of records in a specified table
const ddb_table_has_items = (TableName) => {
return new Promise((resolve, reject) => {
const ddb_query_parameters = { TableName, Select: 'COUNT' }
ddbc.scan(ddb_query_parameters, (error, data) => {
(error) ? reject(error) : resolve(data);
});
});
}
const ddb_table_upsert_items = (TableName, Item) => {
return new Promise((reject, resolve) => {
const ddb_insert_payload = { TableName, Item };
ddbc.put(ddb_insert_payload, (error, data) => {
(error) ? reject(error) : resolve(data);
});
});
}
const ddb_bulk_load = (TableName, ImportFile) => {
return new Promise ( (resolve, reject) => {
let count_succeeded = 0;
let count_failed = 0;
let count_attempted = 0;
let inserts = [];
const json_stream = JSONStream.parse( "*" );
const source_data_stream = fs.createReadStream(ImportFile);
const ddb_source_item = source_data_stream.pipe(json_stream);
ddb_source_item.on("data", (source_data_item) => {
count_attempted++;
let ddb_insert = ddb_table_upsert_items(TableName, source_data_item)
.then( (data) => count_succeeded++ )
.catch( (error) => count_failed++ );
inserts.push(ddb_insert);
});
ddb_source_item.on("end", () => {
Promise.all(inserts)
.then(() => {
resolve({count_succeeded, count_failed, count_attempted});
})
.catch((error) => {
console.log(error);
reject(error);
});
});
ddb_source_item.on("error", (error) => {
reject(error);
});
});
}
(async () => {
try {
let proceed_with_import = false;
if ( Force.toString().toLowerCase() === 'true' ) {
proceed_with_import = true;
} else {
const table_scan = await ddb_table_has_items(TableName);
proceed_with_import = ( table_scan.Count === 0 );
}
if (proceed_with_import) {
let ddb_inserts = await ddb_bulk_load(TableName, ImportFile);
console.log("=".repeat(75));
console.log("Completed: '%s' has been loaded into '%s'.", ImportFile, TableName);
console.log(" Insert Attempted: %s", ddb_inserts.count_attempted);
console.log(" Insert Succeeded: %s", ddb_inserts.count_succeeded);
console.log(" Insert Failed : %s", ddb_inserts.count_failed);
}
} catch (error) {
console.log(error);
}
})();
Wrapping each insert in a promise, pushing the insert-promises into an array, and using promise all on that array did the trick. I execute the promise all once we're finished reading from the file; once the "end" event is emitted on the ddb_source_item stream.

Related

mongoose.Query.prototype.exec patching causes other Query prototype functions to not work

Caching patch code
import mongoose, { mongo } from 'mongoose';
import { createClient } from 'redis';
//redis init
const redisUrl = 'redis://127.0.0.1/6379'
const client = createClient();
client.on('error', (err) => console.log('Redis Client Error', err));
client.connect();
client.on("connect", (err) => {
if (err) throw err;
else console.log("Redis Connected!");
});
mongoose.Query.prototype._exec = mongoose.Query.prototype.exec;
mongoose.Query.prototype.cache = function (options = {}) {
this.useCache = true;
this.hashKey = JSON.stringify(options.hashKey || '')
return this;
}
mongoose.Query.prototype.exec = async function () {
if (!this.useCache) {
console.log();
return await mongoose.Query.prototype._exec.apply(this,arguments);
}
const key = JSON.stringify(Object.assign({}, this.getQuery(), {
collection: this.mongooseCollection.name + this.op,
}))
//See if we have a value for 'key' in redis
let cachedValue = await client.sendCommand(['HGET', this.hashKey, key])
//return if present
if (cachedValue) {
cachedValue = JSON.parse(cachedValue);
//Hydrating Model and Arrays of Models
return Array.isArray(cachedValue) ?
cachedValue.map(d => new this.model(d)) :
this.model(cachedValue);
}
//otherwise set into redis
let result = await mongoose.Query.prototype._exec.apply(this,arguments);
await client.hSet(this.hashKey, key, JSON.stringify(result));
client.expire(this.hashKey, 3000)
return result;
}
let projects = await db.Project.find({
companyId: userCompanyId,
})
.limit(limit)
.cache({ hashKey: userCompanyId });
Trying to apply caching to a paginated response, however the caching function is working whereas others like skip and limit are not working.
I tried the solution from mongoose.Query.prototype.exec() patching not working with populate however it did not resolve the issue.

What's the correct way to use async/await and Promises with Javascript (and therefore Node.js) [duplicate]

This question already has answers here:
Using async/await with a forEach loop
(33 answers)
Is it an anti-pattern to use async/await inside of a new Promise() constructor?
(5 answers)
Closed 1 year ago.
I'm actually building a script to extract data from a MySQL database and then populating a MongoDB. In the process there are some asynchronous stuff like establishing a connection to MySQL (through Sequelize library) and MongoDB (through Mongoose library), and some synchronous stuff like fetching and converting data.
I read a lot about async/await and Promises and my script is globally doing what I want want, but still have some issues.
Here's the code :
Migration.class.mjs
import MigrationBase from './Base/MigrationBase.class.mjs';
export default class Migration extends MigrationBase
{
constructor(config) {
super(config);
this.mysqlData = {};
this.mongoData = {};
}
async run() {
await this.selectMySQLData();
let docs = await this.convertMySQLToMongo();
await this.checkConvertedData(docs);
await this.insertMongoData();
}
async selectMySQLData() {
return new Promise(async resolve => {
await this.runSequelize();
console.log('B - Grabbing MySQL data\n');
for(var key in this.mysqlModels) {
if (this.mysqlModels.hasOwnProperty(key)) {
let search = { raw: true };
this.mysqlData[key] = await this.mysqlModels[key].findAll(search);
}
}
await this.closeSequelize();
resolve();
});
};
convertMySQLToMongo() {
return new Promise(async resolve => {
console.log('D - Convert MySQL data to MongoDB\n');
let customersDocument = this.defaultDocuments.customers;
let personalInfosDocument = this.defaultDocuments.personal_infos;
let billingInfosDocument = this.defaultDocuments.billing_infos;
// ... etc ...
await Object.entries(this.mysqlData.customer).forEach(async keyRow => {
let [key, row] = keyRow;
await Object.entries(row).forEach(async keyValue => {
customersDocument = await this._processCustomersFields(customersDocument, 'Customer', keyValue);
personalInfosDocument = await this._processPersonalInfosFields(personalInfosDocument, 'PersonalInfo', keyValue);
billingInfosDocument = await this._processBillingInfosFields(billingInfosDocument, 'BillingInfo', keyValue);
// ... etc ...
});
resolve([
customersDocument,
personalInfosDocument,
billingInfosDocument,
// ... etc ...
]);
});
};
checkConvertedData([
customersDocument,
personalInfosDocument,
billingInfosDocument,
// ... etc ...
]) {
return new Promise(resolve => {
console.log('E - Checking converted data');
if (! this._isNull(customersDocument, 'Customers')) {
this.mongoData.customers = customersDocument;
}
if (! this._isNull(personalInfosDocument, 'PersonalInfos')) {
this.mongoData.personal_infos = personalInfosDocument;
}
if (! this._isNull(billingInfosDocument, 'BillingInfos')) {
} this.mongoData.billing_infos = billingInfosDocument;
// ... etc ...
resolve();
});
}
async insertMongoData() {
return new Promise(async resolve => {
await this.runMongoose();
console.log('G - Insert MongoDB data.');
await this.mongoModels.customers.create(this.mongoData.customers);
await this.mongoModels.personal_infos.create(this.mongoData.personal_infos);
await this.mongoModels.billing_infos.create(this.mongoData.billing_infos);
// ... etc ...
await this.closeMongoose();
resolve();
});
};
_processCustomersFields(defaultDoc, docName, [colName, val]) {
return new Promise(resolve => {
switch (colName) {
case 'id_customer':
console.log(`${docName}: ${colName} => ${val}`);
defaultDoc.id = val;
break;
case 'email_customer':
console.log(`${docName}: ${colName} => ${val}`);
defaultDoc.email = val;
break;
case 'password_customer':
console.log(`${docName}: ${colName} => ${val}`);
defaultDoc.password = val;
break;
// ... etc ...
}
resolve(defaultDoc);
});
}
_processPersonalInfosFields(defaultDoc, docName, [colName, val]) {
return new Promise(resolve => {
switch (colName) {
// ... Same kind of code as in _processCustomersFields() ...
}
resolve(defaultDoc);
});
}
_processBillingInfosFields(defaultDoc, docName, [colName, val]) {
return new Promise(resolve => {
switch (colName) {
// ... Same kind of code as in _processCustomersFields() ...
}
resolve(defaultDoc);
});
}
_isNull(document, mongoName) {
if (document !== null) {
console.log(`\n${mongoName}:\n`, JSON.stringify(document));
return false;
} else {
console.log(`Error processing \`${mongoName}\` data!`);
return true;
}
}
_valueExists(val) {
return (val !== null && val !== "" && typeof val !== "undefined")
? true
: false
;
}
}
MigrationBase.class.mjs
import Sequelize from 'sequelize';
import DataTypes from 'sequelize';
import Mongoose from 'mongoose';
import Crypto from 'crypto';
import Models from '../../../models.mjs';
import Schemas from '../../../schemas.mjs';
export default class MigrationBase
{
constructor(config) {
this.config = config;
this.sequelize = this.createSequelize();
this.mongoose = Mongoose;
this.defaultDocuments = this.initDefaultDocuments();
this.mysqlModels = this.initMysqlModels();
this.mongoModels = this.initMongoSchemas();
this.mysqlData = {};
this.mongoData = {};
}
createSequelize() {
return new Sequelize(
this.config.mysql.dbName,
this.config.mysql.dbUser,
this.config.mysql.dbPass,
this.config.sequelize
);
}
initDefaultDocuments() {
const defaultDocument = {
"deleted_at": 0 // Thu Jan 01 1970 01:00:00 GMT+0100
};
let defaultDocuments = {
"customers": Object.assign({}, defaultDocument),
"personal_infos": Object.assign({}, defaultDocument),
"billing_infos": Object.assign({}, defaultDocument)
// ... etc ...
};
return defaultDocuments;
}
initMysqlModels() {
return {
"customer": Models.Customer(this.sequelize, DataTypes),
"billing_address": Models.BillingAddress(this.sequelize, DataTypes),
// ... etc ...
};
}
initMongoSchemas() {
return {
"customers": this.mongoose.model('Customer', Schemas.Customers),
"personal_infos": this.mongoose.model('PersonalInfo', Schemas.PersonalInfos),
"billing_infos": this.mongoose.model('BillingInfo', Schemas.BillingInfos),
// ... etc ...
}
}
async runSequelize() {
console.log('A - Connection to MySQL');
try {
await this.sequelize.authenticate();
console.log('Connection to MySQL has been established successfully.\n');
} catch (err) {
console.error("Unable to connect to the MySQL database:", err + '\n');
}
}
async closeSequelize() {
console.log('C - Closing MySQL connection.\n');
await this.sequelize.close();
};
runMongoose() {
return new Promise(async resolve => {
console.log('F - Connection to MongoDB');
try {
await this.mongoose.connect(
`mongodb://${this.config.mongo.dbHost}:${this.config.mongo.dbPort}/${this.config.mongo.dbName}`,
{ useNewUrlParser: true, useUnifiedTopology: true }
);
console.log('Connection to MongoDB has been established successfully.');
} catch (err) {
console.error('Unable to connect to the MongoDB database: ', err);
}
resolve();
});
}
async closeMongoose() {
console.log('H - Closing MongoDB connection.');
await this.mongoose.connection.close();
};
}
And here is the Logs output:
A - Connection to MySQL
Connection to MySQL has been established successfully.
B - Grabbing MySQL data
C - Closing MySQL connection.
D - Convert MySQL data to MongoDB
Customer: id_customer => 1
Customer: email_customer => contact#example.com
Customer: password_customer => 0a1b2c3d4e5f0a1b2c3d4e5f0a1b2c3d
// ... etc ...
PersonalInfo: id_customer => 1
PersonalInfo: lastname_customer => Doe
PersonalInfo: firstname_customer => John
// ... etc ...
E - Checking converted data
Customers:
{"deleted_at":0,"id":"000000000000000000000001","email":"contact#example.com","password":"0a1b2c3d4e5f0a1b2c3d4e5f0a1b2c3d", ... etc ... }
PersonalInfos:
{"deleted_at":0,"customer_id":"000000000000000000000001","last_name":"Doe","first_name":"John", ... etc ... }
BillingInfos:
{"deleted_at":0}
BillingInfos: id_customer => 1
BillingInfo: company => ExampleCompany
F - Connection to MongoDB
BillingInfos: lastname => Doe
BillingInfo: firstname => John
Connection to MongoDB has been established successfully.
G - Insert MongoDB data.
/home/user/Workspaces/namespace/project-name/node_modules/mongoose/lib/document.js:2757
this.$__.validationError = new ValidationError(this);
^
ValidationError: BillingInfos validation failed: id_customer: Cast to ObjectId failed for value "1" (type number) at path "customer_id", values: Path `values` is required., id: Path `id` is required.
Here we can see in the right order:
A - Connection to MySQL
B - Grabbing MySQL data
C - Closing MySQL connection
D - Convert MySQL data to MongoDB
Then we can see E - Checking converted data but the conversion process is not finished, despite the await statement and the fact that it return a Promise.
After that we can also see BillingInfos: id_customer => 1 and BillingInfo: company => ExampleCompany meaning that the convertion process is still doing stuff in the loop.
Then F - Connection to MongoDB
Then another convertion logs BillingInfos: lastname => Doe and BillingInfo: firstname => John (convertion process is still doing stuff in the loop).
Then G - Insert MongoDB data.
And finally a Validation Error because some Mongo Documents are incomplete and so the rules are not fullfiled.
Question?
So the question is what am I doing wrong here ?
As I said I read a lot about async/await and Promises but still struglle to understand why it's not working.
Thanks in advance and let me know if you need more info.
That's because await will not work inside forEach(), which you are trying to do in your convertMySQLToMongo() function.
There are many ways in which you can solve it, one of the ways is using for ... of instead of forEach()
for (const keyRow of Object.entries(this.mysqlData.customer)) {
let [key, row] = keyRow;
for (const keyValue of Object.entries(row)) {
customersDocument = await this._processCustomersFields(customersDocument, 'Customer', keyValue);
personalInfosDocument = await this._processPersonalInfosFields(personalInfosDocument, 'PersonalInfo', keyValue);
billingInfosDocument = await this._processBillingInfosFields(billingInfosDocument, 'BillingInfo', keyValue);
}
}

nodejs TypeError: Cannot read property 'Message' of undefined

I have a lambda function in AWS. I use it to retrieve information from a REST API. When I test it runs returns a 200 status code, but an "ERROR TypeError: Cannot read property 'Message' of undefined
at smsResponder (/var/task/smsResponder.js:33:35)" also shows. I have googled, and tried to use .responseText.
My code is below. Should I be using return or something of the sort?
'use strict'
const AWS = require('aws-sdk')
AWS.config.update({ region: process.env.AWS_REGION || 'us-east-1' })
const { getStock } = require('./getStock')
const KEYWORD = 'stock'
const validateStock = function (elementValue){
let stockTest = AAPL
return stockTest.test(elementValue)
}
const sendSMS = async function (params) {
const pinpoint = new AWS.Pinpoint()
console.log('sendSMS called: ', params)
return new Promise((resolve, reject) => {
pinpoint.sendMessages(params, function(err, data) {
if(err) {
console.error(err)
reject(err)
} else {
console.log("Message sent. Data: ", data)
resolve(data)
}
})
})
}
const smsResponder = async (event) => {
const msg = JSON.parse(event.Sns.Message)
const msgWords = msg.messageBody.split(" ")
// Check the first word of the text message is the keyword
if (msgWords[0].toLowerCase() !== KEYWORD) return console.log('No keyword found - exiting')
// Validate stock name and get price
let message =''
const stockCode = msgWords[1]
if (validateStock(stockCode)) {
message = await getStock(stockCode)
} else {
message = 'Invalid stock symbol - text me in the format "stock stocksymbol".'
}
// Send the SMS response
const params = {
ApplicationId: process.env.ApplicationId,
MessageRequest: {
Addresses: {
[msg.originationNumber]: {
ChannelType: 'SMS'
}
},
MessageConfiguration: {
SMSMessage: {
Body: message,
MessageType: 'PROMOTIONAL',
OriginationNumber: msg.destinationNumber
}
}
}
}
return console.log(await sendSMS(params))
}
module.exports = { smsResponder }
The SNS-Event is differently structured, it should be event.Records[0].Sns.Message .
Here are the docs:
https://docs.aws.amazon.com/lambda/latest/dg/with-sns.html

Returning Output from AWS.DynamoDB.DocumentClient.Scan() Call

I've got a function that returns the number of records from a DynamoDB table (Things):
const table = 'Things';
const region = 'us-east-1';
const profile = 'development';
process.env.AWS_SDK_LOAD_CONFIG = true;
process.env.AWS_PROFILE = profile;
const AWS = require('aws-sdk');
AWS.config.update({ region: region });
function ddb_table_has_records(table_name) {
const ddb_client = new AWS.DynamoDB.DocumentClient();
const ddb_query_parameters = {
TableName: table_name,
Select: 'COUNT'
}
const results = ddb_client.scan(ddb_query_parameters).promise();
results.then((data) => {
console.log(data.Count);
return data;
}).catch((err) => {
console.log("Error: ", err);
})
}
console.log(ddb_table_has_records(table));
When I run this code, I get the following...
PS C:\> node .\get-count-thing.js
undefined
3951
I'm not capturing the data from the scan in the following; although, I see it in the console.log() call:
console.log(ddb_table_has_records(table));
What am I mucking up?
Posting my fix in-case anyone has the same question. I had to make two changes to retrieve the items from the table; I needed to...
...project ALL_ATTRIBUTES
...iterate over the collection of Items returned
The following was my function with changes:
function ddb_table_has_records(table_name) {
const ddb_client = new AWS.DynamoDB.DocumentClient();
const ddb_query_parameters = {
TableName: table_name,
Select: 'ALL_ATTRIBUTES'
}
const results = ddb_client.scan(ddb_query_parameters).promise();
results.then((data) => {
console.log(data.Count);
data.Items.forEach((thing) => {
console.log(thing);
});
}).catch((err) => {
console.log("Error: ", err);
})
}

How to pull out handler using module exports?

I am building a node application, and trying to neatly organize my code. I wrote a serial module that imports the serial libs and handles the connection. My intention was to write a basic module and then reuse it over and over again in different projects as needed. The only part that changes per use is how the incoming serial data is handled. For this reason I would like to pull out following handler and redefine it as per the project needs. How can I use module exports to redefine only this section of the file?
I have tried added myParser to exports, but that gives me a null and I would be out of scope.
Handler to redefine/change/overload for each new project
myParser.on('data', (data) => {
console.log(data)
//DO SOMETHING WITH DATA
});
Example usage: main.js
const serial = require('./serial');
const dataParser = require('./dataParser');
const serial = require('./serial');
//call connect with CL args
serial.connect(process.argv[2], Number(process.argv[3]))
serial.myParser.on('data',(data) => {
//Do something unique with data
if (dataParser.parse(data) == 0)
serial.send('Error');
});
Full JS Module below serial.js
const SerialPort = require('serialport');
const ReadLine = require('#serialport/parser-readline');
const _d = String.fromCharCode(13); //char EOL
let myPort = null;
let myParser = null;
function connect(port, baud) {
let portName = port || `COM1`;
let baudRate = baud || 115200;
myPort = new SerialPort(portName, {baudRate: baudRate})
myParser = myPort.pipe(new ReadLine({ delimiter: '\n'}))
//Handlers
myPort.on('open', () => {
console.log(`port ${portName} open`)
});
myParser.on('data', (data) => {
console.log(data)
});
myPort.on('close', () => {
console.log(`port ${portName} closed`)
});
myPort.on('error', (err) => {
console.error('port error: ' + err)
});
}
function getPorts() {
let portlist = [];
SerialPort.list((err, ports) => {
ports.forEach(port => {
portlist.push(port.comName)
});
})
return portlist;
}
function send(data) {
myPort.write(JSON.stringify(data) + _d, function (err) {
if (err) {
return console.log('Error on write: ', err.message);
}
console.log(`${data} sent`);
});
}
function close() {
myPort.close();
}
module.exports = {
connect, getPorts, send, close
}
The problem is that a module is used where a class or a factory would be appropriate. myParser cannot exist without connect being called, so it doesn't make sense to make it available as module property, it would be unavailable by default, and multiple connect calls would override it.
It can be a factory:
module.exports = function connect(port, baud) {
let portName = port || `COM1`;
let baudRate = baud || 115200;
let myPort = new SerialPort(portName, {baudRate: baudRate})
let myParser = myPort.pipe(new ReadLine({ delimiter: '\n'}))
//Handlers
myPort.on('open', () => {
console.log(`port ${portName} open`)
});
myParser.on('data', (data) => {
console.log(data)
});
myPort.on('close', () => {
console.log(`port ${portName} closed`)
});
myPort.on('error', (err) => {
console.error('port error: ' + err)
});
function getPorts() {
let portlist = [];
SerialPort.list((err, ports) => {
ports.forEach(port => {
portlist.push(port.comName)
});
})
return portlist;
}
function send(data) {
myPort.write(JSON.stringify(data) + _d, function (err) {
if (err) {
return console.log('Error on write: ', err.message);
}
console.log(`${data} sent`);
});
}
function close() {
myPort.close();
}
return {
myParser, getPorts, send, close
};
}
So it could be used like:
const serial = require('./serial');
const connection = serial(...);
connection.myParser.on('data',(data) => {
//Do something unique with data
if (dataParser.parse(data) == 0)
connection.send('Error');
});

Categories