Node js promise chaining issue - javascript

I am trying to connect to Salesforce using node js / jsforce library and use promises. Unfortunately one of the methods is executing prior to getting connection.
i have method A : makeconnection which returns the connection
i have method B : which loads data from Salesforce based on the connection reference from method A
I have method C : which gets dependencies from Salesforce based on connection from method A
I would like the following order to be executed A ==> B ==> C
Unfortunately C seems to run first followed by A and B so the connection is null and it fails
roughly this is the code
let jsforce = require("jsforce");
const sfdcSoup = require("sfdc-soup");
const fs = require("fs");
let _ = require("lodash");
let trgarr = [];
let clsarr = [];
let entityarr = [];
function makeConnection() {
return new Promise((resolve,reject) => {
const conn = new jsforce.Connection({
loginUrl: "https://test.salesforce.com",
instanceUrl: "salesforce.com",
serverUrl: "xxx",
version: "50.0"
});
conn.login(username, password, function (err, userInfo) {
if (err) {
return console.error(err);
}
// console.log(conn.accessToken);
//console.log(conn.instanceUrl);
//console.log("User ID: " + userInfo.id);
//console.log("Org ID: " + userInfo.organizationId);
console.log("logged in");
});
resolve(conn);
});
}
function loadClasses(conn) {
return new Promise((resolve,reject) => {
const querystr =
"select apiVersion,name,body from apexClass where NamespacePrefix = null";
let query = conn
.query(querystr)
.on("record", function (rec) {
clsarr.push(rec);
})
.on("end", function () {
console.log("number of class is " + clsarr.length);
console.log("loaded all classes");
});
resolve(conn,clsarr);
});
}
async function getDependencies(conn) {
return new Promise((resolve,reject) => {
let entryPoint = {
name: "xxx",
type: "CustomField",
id: yyy
};
let connection = {
token: conn.accessToken,
url: "abc.com",
apiVersion: "50.0"
};
let usageApi = sfdcSoup.usageApi(connection, entryPoint);
usageApi.getUsage().then((response) => {
console.log(response.stats);
console.log(response.csv);
});
});
}
async function run() {
makeConnection().then(conn => loadClasses(conn)).then(conn=>getDependencies(conn));
}
run();
I keep getting an error that says UnhandledPromiseRejectionWarning: Error: Access token and URL are required on the connection object
The reason is connection needs to be obtained from method A and sent to Method C , which is not happening. Can you please guide where i might be wrong?
Also why is method C getting executed before A and B. why does my promise chaining not work as promised?
I am running the code in Vscode and using Node 14

Your 2 method have minor correction first method makeConnection, the resolve should be inside login after console.log("logged in")
And second loadClasses, the resolve should be inside 'end' event. Please check below 2 method.
function makeConnection() {
return new Promise((resolve,reject) => {
const conn = new jsforce.Connection({
loginUrl: "https://test.salesforce.com",
instanceUrl: "salesforce.com",
serverUrl: "xxx",
version: "50.0"
});
conn.login(username, password, function (err, userInfo) {
if (err) {
return console.error(err);
}
// console.log(conn.accessToken);
//console.log(conn.instanceUrl);
//console.log("User ID: " + userInfo.id);
//console.log("Org ID: " + userInfo.organizationId);
console.log("logged in");
resolve(conn);
});
});
}
function loadClasses(conn) {
return new Promise((resolve,reject) => {
const querystr =
"select apiVersion,name,body from apexClass where NamespacePrefix = null";
let query = conn
.query(querystr)
.on("record", function (rec) {
clsarr.push(rec);
})
.on("end", function () {
console.log("number of class is " + clsarr.length);
console.log("loaded all classes");
resolve(conn,clsarr);
});
});
}

you should use promise series if methods are depending on each other if methods do not depend then you should use a promise parallel.
READ MORE ABOUT PROMISE SERIES AND PARALLEL.

Related

mongoose.Query.prototype.exec patching causes other Query prototype functions to not work

Caching patch code
import mongoose, { mongo } from 'mongoose';
import { createClient } from 'redis';
//redis init
const redisUrl = 'redis://127.0.0.1/6379'
const client = createClient();
client.on('error', (err) => console.log('Redis Client Error', err));
client.connect();
client.on("connect", (err) => {
if (err) throw err;
else console.log("Redis Connected!");
});
mongoose.Query.prototype._exec = mongoose.Query.prototype.exec;
mongoose.Query.prototype.cache = function (options = {}) {
this.useCache = true;
this.hashKey = JSON.stringify(options.hashKey || '')
return this;
}
mongoose.Query.prototype.exec = async function () {
if (!this.useCache) {
console.log();
return await mongoose.Query.prototype._exec.apply(this,arguments);
}
const key = JSON.stringify(Object.assign({}, this.getQuery(), {
collection: this.mongooseCollection.name + this.op,
}))
//See if we have a value for 'key' in redis
let cachedValue = await client.sendCommand(['HGET', this.hashKey, key])
//return if present
if (cachedValue) {
cachedValue = JSON.parse(cachedValue);
//Hydrating Model and Arrays of Models
return Array.isArray(cachedValue) ?
cachedValue.map(d => new this.model(d)) :
this.model(cachedValue);
}
//otherwise set into redis
let result = await mongoose.Query.prototype._exec.apply(this,arguments);
await client.hSet(this.hashKey, key, JSON.stringify(result));
client.expire(this.hashKey, 3000)
return result;
}
let projects = await db.Project.find({
companyId: userCompanyId,
})
.limit(limit)
.cache({ hashKey: userCompanyId });
Trying to apply caching to a paginated response, however the caching function is working whereas others like skip and limit are not working.
I tried the solution from mongoose.Query.prototype.exec() patching not working with populate however it did not resolve the issue.

What's the correct way to use async/await and Promises with Javascript (and therefore Node.js) [duplicate]

This question already has answers here:
Using async/await with a forEach loop
(33 answers)
Is it an anti-pattern to use async/await inside of a new Promise() constructor?
(5 answers)
Closed 1 year ago.
I'm actually building a script to extract data from a MySQL database and then populating a MongoDB. In the process there are some asynchronous stuff like establishing a connection to MySQL (through Sequelize library) and MongoDB (through Mongoose library), and some synchronous stuff like fetching and converting data.
I read a lot about async/await and Promises and my script is globally doing what I want want, but still have some issues.
Here's the code :
Migration.class.mjs
import MigrationBase from './Base/MigrationBase.class.mjs';
export default class Migration extends MigrationBase
{
constructor(config) {
super(config);
this.mysqlData = {};
this.mongoData = {};
}
async run() {
await this.selectMySQLData();
let docs = await this.convertMySQLToMongo();
await this.checkConvertedData(docs);
await this.insertMongoData();
}
async selectMySQLData() {
return new Promise(async resolve => {
await this.runSequelize();
console.log('B - Grabbing MySQL data\n');
for(var key in this.mysqlModels) {
if (this.mysqlModels.hasOwnProperty(key)) {
let search = { raw: true };
this.mysqlData[key] = await this.mysqlModels[key].findAll(search);
}
}
await this.closeSequelize();
resolve();
});
};
convertMySQLToMongo() {
return new Promise(async resolve => {
console.log('D - Convert MySQL data to MongoDB\n');
let customersDocument = this.defaultDocuments.customers;
let personalInfosDocument = this.defaultDocuments.personal_infos;
let billingInfosDocument = this.defaultDocuments.billing_infos;
// ... etc ...
await Object.entries(this.mysqlData.customer).forEach(async keyRow => {
let [key, row] = keyRow;
await Object.entries(row).forEach(async keyValue => {
customersDocument = await this._processCustomersFields(customersDocument, 'Customer', keyValue);
personalInfosDocument = await this._processPersonalInfosFields(personalInfosDocument, 'PersonalInfo', keyValue);
billingInfosDocument = await this._processBillingInfosFields(billingInfosDocument, 'BillingInfo', keyValue);
// ... etc ...
});
resolve([
customersDocument,
personalInfosDocument,
billingInfosDocument,
// ... etc ...
]);
});
};
checkConvertedData([
customersDocument,
personalInfosDocument,
billingInfosDocument,
// ... etc ...
]) {
return new Promise(resolve => {
console.log('E - Checking converted data');
if (! this._isNull(customersDocument, 'Customers')) {
this.mongoData.customers = customersDocument;
}
if (! this._isNull(personalInfosDocument, 'PersonalInfos')) {
this.mongoData.personal_infos = personalInfosDocument;
}
if (! this._isNull(billingInfosDocument, 'BillingInfos')) {
} this.mongoData.billing_infos = billingInfosDocument;
// ... etc ...
resolve();
});
}
async insertMongoData() {
return new Promise(async resolve => {
await this.runMongoose();
console.log('G - Insert MongoDB data.');
await this.mongoModels.customers.create(this.mongoData.customers);
await this.mongoModels.personal_infos.create(this.mongoData.personal_infos);
await this.mongoModels.billing_infos.create(this.mongoData.billing_infos);
// ... etc ...
await this.closeMongoose();
resolve();
});
};
_processCustomersFields(defaultDoc, docName, [colName, val]) {
return new Promise(resolve => {
switch (colName) {
case 'id_customer':
console.log(`${docName}: ${colName} => ${val}`);
defaultDoc.id = val;
break;
case 'email_customer':
console.log(`${docName}: ${colName} => ${val}`);
defaultDoc.email = val;
break;
case 'password_customer':
console.log(`${docName}: ${colName} => ${val}`);
defaultDoc.password = val;
break;
// ... etc ...
}
resolve(defaultDoc);
});
}
_processPersonalInfosFields(defaultDoc, docName, [colName, val]) {
return new Promise(resolve => {
switch (colName) {
// ... Same kind of code as in _processCustomersFields() ...
}
resolve(defaultDoc);
});
}
_processBillingInfosFields(defaultDoc, docName, [colName, val]) {
return new Promise(resolve => {
switch (colName) {
// ... Same kind of code as in _processCustomersFields() ...
}
resolve(defaultDoc);
});
}
_isNull(document, mongoName) {
if (document !== null) {
console.log(`\n${mongoName}:\n`, JSON.stringify(document));
return false;
} else {
console.log(`Error processing \`${mongoName}\` data!`);
return true;
}
}
_valueExists(val) {
return (val !== null && val !== "" && typeof val !== "undefined")
? true
: false
;
}
}
MigrationBase.class.mjs
import Sequelize from 'sequelize';
import DataTypes from 'sequelize';
import Mongoose from 'mongoose';
import Crypto from 'crypto';
import Models from '../../../models.mjs';
import Schemas from '../../../schemas.mjs';
export default class MigrationBase
{
constructor(config) {
this.config = config;
this.sequelize = this.createSequelize();
this.mongoose = Mongoose;
this.defaultDocuments = this.initDefaultDocuments();
this.mysqlModels = this.initMysqlModels();
this.mongoModels = this.initMongoSchemas();
this.mysqlData = {};
this.mongoData = {};
}
createSequelize() {
return new Sequelize(
this.config.mysql.dbName,
this.config.mysql.dbUser,
this.config.mysql.dbPass,
this.config.sequelize
);
}
initDefaultDocuments() {
const defaultDocument = {
"deleted_at": 0 // Thu Jan 01 1970 01:00:00 GMT+0100
};
let defaultDocuments = {
"customers": Object.assign({}, defaultDocument),
"personal_infos": Object.assign({}, defaultDocument),
"billing_infos": Object.assign({}, defaultDocument)
// ... etc ...
};
return defaultDocuments;
}
initMysqlModels() {
return {
"customer": Models.Customer(this.sequelize, DataTypes),
"billing_address": Models.BillingAddress(this.sequelize, DataTypes),
// ... etc ...
};
}
initMongoSchemas() {
return {
"customers": this.mongoose.model('Customer', Schemas.Customers),
"personal_infos": this.mongoose.model('PersonalInfo', Schemas.PersonalInfos),
"billing_infos": this.mongoose.model('BillingInfo', Schemas.BillingInfos),
// ... etc ...
}
}
async runSequelize() {
console.log('A - Connection to MySQL');
try {
await this.sequelize.authenticate();
console.log('Connection to MySQL has been established successfully.\n');
} catch (err) {
console.error("Unable to connect to the MySQL database:", err + '\n');
}
}
async closeSequelize() {
console.log('C - Closing MySQL connection.\n');
await this.sequelize.close();
};
runMongoose() {
return new Promise(async resolve => {
console.log('F - Connection to MongoDB');
try {
await this.mongoose.connect(
`mongodb://${this.config.mongo.dbHost}:${this.config.mongo.dbPort}/${this.config.mongo.dbName}`,
{ useNewUrlParser: true, useUnifiedTopology: true }
);
console.log('Connection to MongoDB has been established successfully.');
} catch (err) {
console.error('Unable to connect to the MongoDB database: ', err);
}
resolve();
});
}
async closeMongoose() {
console.log('H - Closing MongoDB connection.');
await this.mongoose.connection.close();
};
}
And here is the Logs output:
A - Connection to MySQL
Connection to MySQL has been established successfully.
B - Grabbing MySQL data
C - Closing MySQL connection.
D - Convert MySQL data to MongoDB
Customer: id_customer => 1
Customer: email_customer => contact#example.com
Customer: password_customer => 0a1b2c3d4e5f0a1b2c3d4e5f0a1b2c3d
// ... etc ...
PersonalInfo: id_customer => 1
PersonalInfo: lastname_customer => Doe
PersonalInfo: firstname_customer => John
// ... etc ...
E - Checking converted data
Customers:
{"deleted_at":0,"id":"000000000000000000000001","email":"contact#example.com","password":"0a1b2c3d4e5f0a1b2c3d4e5f0a1b2c3d", ... etc ... }
PersonalInfos:
{"deleted_at":0,"customer_id":"000000000000000000000001","last_name":"Doe","first_name":"John", ... etc ... }
BillingInfos:
{"deleted_at":0}
BillingInfos: id_customer => 1
BillingInfo: company => ExampleCompany
F - Connection to MongoDB
BillingInfos: lastname => Doe
BillingInfo: firstname => John
Connection to MongoDB has been established successfully.
G - Insert MongoDB data.
/home/user/Workspaces/namespace/project-name/node_modules/mongoose/lib/document.js:2757
this.$__.validationError = new ValidationError(this);
^
ValidationError: BillingInfos validation failed: id_customer: Cast to ObjectId failed for value "1" (type number) at path "customer_id", values: Path `values` is required., id: Path `id` is required.
Here we can see in the right order:
A - Connection to MySQL
B - Grabbing MySQL data
C - Closing MySQL connection
D - Convert MySQL data to MongoDB
Then we can see E - Checking converted data but the conversion process is not finished, despite the await statement and the fact that it return a Promise.
After that we can also see BillingInfos: id_customer => 1 and BillingInfo: company => ExampleCompany meaning that the convertion process is still doing stuff in the loop.
Then F - Connection to MongoDB
Then another convertion logs BillingInfos: lastname => Doe and BillingInfo: firstname => John (convertion process is still doing stuff in the loop).
Then G - Insert MongoDB data.
And finally a Validation Error because some Mongo Documents are incomplete and so the rules are not fullfiled.
Question?
So the question is what am I doing wrong here ?
As I said I read a lot about async/await and Promises but still struglle to understand why it's not working.
Thanks in advance and let me know if you need more info.
That's because await will not work inside forEach(), which you are trying to do in your convertMySQLToMongo() function.
There are many ways in which you can solve it, one of the ways is using for ... of instead of forEach()
for (const keyRow of Object.entries(this.mysqlData.customer)) {
let [key, row] = keyRow;
for (const keyValue of Object.entries(row)) {
customersDocument = await this._processCustomersFields(customersDocument, 'Customer', keyValue);
personalInfosDocument = await this._processPersonalInfosFields(personalInfosDocument, 'PersonalInfo', keyValue);
billingInfosDocument = await this._processBillingInfosFields(billingInfosDocument, 'BillingInfo', keyValue);
}
}

Stripe functions returning calls after function finished

Relatively new to Javascript however, i'm trying to work with Stripe and my way around a user submitting another payment method and then paying an invoice with that method. if the payment fails again - it should remove the subscription alltogether. I'm using Firebase Realtime Database with GCF & Node.js 8.
Here is what i have so far
exports.onSecondPaymentAttempt = functions.database.ref("users/{userId}/something/somethingHistory/{subDbId}/newPayment").onCreate((snapshot, context)=>{
var s = snapshot.val();
var fields = s.split(",");
const cardToken = fields[0];
const cus_id = fields[1];
const conn_id = fields[2];
const subDbId = context.params.subDbId;
const userId = context.params.userId;
return stripe.customers.createSource(
cus_id,
{source: cardToken},{
stripeAccount: `${conn_id}`,
},
(err, card)=> {
console.log(err);
if(err){
return console.log("error attaching card "+ err)
}else{
const invoiceNo = admin.database().ref(`users/${userId}/something/somethingHistory/${subDbId}`)
return invoiceNo.once('value').then(snapshot=>{
const invoiceNumber = snapshot.child("invoiceId").val();
const subId = snapshot.child("subscriptionId").val();
return stripe.invoices.pay(
invoiceNumber,
{
expand: ['payment_intent','charge','subscription'],
},{
stripeAccount: `${conn_id}`,
},
(err, invoice)=>{
if(err){
return console.log("error paying invoice "+ err)
}else{
if(invoice.payment_intent.status==="succeeded"){
//DO SOME CODE
return console.log("New Payment succeeded for "+invoiceNumber)
}else{
//DO SOME OTHER CODE
//CANCEL
return stripe.subscriptions.del(
subId,{
stripeAccount: `${conn_id}`,
},
(err, confirmation)=> {
if(err){
return console.log("Subscription error")
}else{
return console.log("Subscription cancelled")
}
});
}
}
});
})
}
});
To me it looks like an incredibly inefficient / ugly way of achieving the effect and overall the user is sitting waiting for a response for approx 15 seconds although the function finishes its execution after 1862ms - I still get responses up to 5 - 10 seconds after.
What's the most efficient way of achieving the same desired effect of registering a new payment source, paying subscription and then handling the result of that payment?
You should use the Promises returned by the Stripe asynchronous methods, as follows (untested, it probably needs some fine tuning, in particular with the objects passed to the Stripe methods):
exports.onSecondPaymentAttempt = functions.database.ref("users/{userId}/something/somethingHistory/{subDbId}/newPayment").onCreate((snapshot, context) => {
var s = snapshot.val();
var fields = s.split(",");
const cardToken = fields[0];
const cus_id = fields[1];
const conn_id = fields[2];
const subDbId = context.params.subDbId;
const userId = context.params.userId;
return stripe.customers.createSource(
//Format of this object to be confirmed....
cus_id,
{ source: cardToken },
{ stripeAccount: `${conn_id}` }
)
.then(card => {
const invoiceNo = admin.database().ref(`users/${userId}/something/somethingHistory/${subDbId}`)
return invoiceNo.once('value')
})
.then(snapshot => {
const invoiceNumber = snapshot.child("invoiceId").val();
const subId = snapshot.child("subscriptionId").val();
return stripe.invoices.pay(
invoiceNumber,
{ expand: ['payment_intent', 'charge', 'subscription'] },
{ stripeAccount: `${conn_id}` }
)
})
.then(invoice => {
if (invoice.payment_intent.status === "succeeded") {
//DO SOME CODE
console.log("New Payment succeeded for " + invoiceNumber)
return null;
} else {
//DO SOME OTHER CODE
//CANCEL
return stripe.subscriptions.del(
subId, {
stripeAccount: `${conn_id}`,
});
}
})
.catch(err => {
//....
return null;
})
});
I would suggest you watch the three videos about "JavaScript Promises" from the official Firebase video series, which explain why it is key to correctly chain and return the promises returned by the asynchronous methods.

Close Event Triggers Before Data Events on File Stream

I've got a script that adds JSON data from a file to a DynamoDB table. The script uses the "fs" module to open a read stream to the json file and retrieve the data line by line. As the data is returned, it's inserted into a DynamoDB table. When the operation ends, an execution summary is given with number of records processed, successfully inserted, and unsuccessfully inserted. The problem is the summary executes before the file has completely processed. As result the numbers are wrong.
The script...
ddb_table_has_records(table_name, (err, dat) => {
if (dat.Count === 0 || force) {
const transformStream = JSONStream.parse("*");
const inputStream = fs.createReadStream(import_file);
let record_position = 0;
let count_imported_successful = 0;
let count_imported_fail = 0;
inputStream.pipe(transformStream).on("data", (Item) => {
const params = {
TableName: table_name,
Item
}
ddb_client.put(params, (err, data) => {
++record_position;
if (err) {
console.error("Unable to add mapping for record " + record_position + ", error = " + err);
++count_imported_fail;
} else {
console.log("PutItem succeeded " + record_position);
++count_imported_successful;
}
});
}).on("close", () => {
console.log("=".repeat(70));
console.log(`'Completed: ${import_file}' has been loaded into '${table_name}'.`);
console.log(` Record Count: ${record_position}`);
console.log(` Imported Record Count: ${count_imported_successful}`);
console.log(` Rejected Record Count: ${count_imported_fail}`);
});
} else {
console.log("=".repeat(70));
console.log(`Completed: Skipping import of '${import_file}' into '${table_name}'.`);
};
});
When this runs, it looks like the following
PS C:\> node --max-old-space-size=8192 .\try.js 'foo' 'us-west-2' 'development' '.\data.json' true
Target Profile: development
Target Region: us-west-2
Target Table: foo
Source File: .\data.json
Force Import: true
Confirming Table's State...
======================================================================
'Completed: .\data.json' has been loaded into 'foo'.
Record Count: 0
Imported Record Count: 0
Rejected Record Count: 0
PutItem succeeded 1
PutItem succeeded 2
PutItem succeeded 3
PutItem succeeded 4
...
The portion of the code that gets the record counts runs before the inserts completes so the records imported and rejected numbers are always wrong. It looks like the file stream closes while inserts are occurring. I've tried changing from the "close" to "end" event, same result.
Test this script with the following call...
node --max-old-space-size=8192 .\data.load.js 'foo' 'us-west-1' 'dev' '.\foo.default.json' true
Here is the content for the script I ultimately used...
'use strict'
if (process.argv.length < 6) {
throw new Error ('Please pass the table-name, aws-Region, aws-Profile, and file-path to the script.');
}
let [, , TableName, Region, Profile, ImportFile, Force] = process.argv;
process.env.AWS_SDK_LOAD_CONFIG = true;
process.env.AWS_PROFILE = Profile;
Force = typeof(Force) !== 'undefined' ? Force : false;
const AWS = require('aws-sdk');
const fs = require('fs');
const JSONStream = require('JSONStream');
AWS.config.update({ region: Region });
const ddbc = new AWS.DynamoDB.DocumentClient();
console.log('Target Profile: ', Profile);
console.log('Target Region: ', Region);
console.log('Target Table: ', TableName);
console.log('Source File: ', ImportFile);
console.log('Force Import: ', Force);
// Returns the number of records in a specified table
const ddb_table_has_items = (TableName) => {
return new Promise((resolve, reject) => {
const ddb_query_parameters = { TableName, Select: 'COUNT' }
ddbc.scan(ddb_query_parameters, (error, data) => {
(error) ? reject(error) : resolve(data);
});
});
}
const ddb_table_upsert_items = (TableName, Item) => {
return new Promise((reject, resolve) => {
const ddb_insert_payload = { TableName, Item };
ddbc.put(ddb_insert_payload, (error, data) => {
(error) ? reject(error) : resolve(data);
});
});
}
const ddb_bulk_load = (TableName, ImportFile) => {
return new Promise ( (resolve, reject) => {
let count_succeeded = 0;
let count_failed = 0;
let count_attempted = 0;
let inserts = [];
const json_stream = JSONStream.parse( "*" );
const source_data_stream = fs.createReadStream(ImportFile);
const ddb_source_item = source_data_stream.pipe(json_stream);
ddb_source_item.on("data", (source_data_item) => {
count_attempted++;
let ddb_insert = ddb_table_upsert_items(TableName, source_data_item)
.then( (data) => count_succeeded++ )
.catch( (error) => count_failed++ );
inserts.push(ddb_insert);
});
ddb_source_item.on("end", () => {
Promise.all(inserts)
.then(() => {
resolve({count_succeeded, count_failed, count_attempted});
})
.catch((error) => {
console.log(error);
reject(error);
});
});
ddb_source_item.on("error", (error) => {
reject(error);
});
});
}
(async () => {
try {
let proceed_with_import = false;
if ( Force.toString().toLowerCase() === 'true' ) {
proceed_with_import = true;
} else {
const table_scan = await ddb_table_has_items(TableName);
proceed_with_import = ( table_scan.Count === 0 );
}
if (proceed_with_import) {
let ddb_inserts = await ddb_bulk_load(TableName, ImportFile);
console.log("=".repeat(75));
console.log("Completed: '%s' has been loaded into '%s'.", ImportFile, TableName);
console.log(" Insert Attempted: %s", ddb_inserts.count_attempted);
console.log(" Insert Succeeded: %s", ddb_inserts.count_succeeded);
console.log(" Insert Failed : %s", ddb_inserts.count_failed);
}
} catch (error) {
console.log(error);
}
})();
Wrapping each insert in a promise, pushing the insert-promises into an array, and using promise all on that array did the trick. I execute the promise all once we're finished reading from the file; once the "end" event is emitted on the ddb_source_item stream.

How to pull out handler using module exports?

I am building a node application, and trying to neatly organize my code. I wrote a serial module that imports the serial libs and handles the connection. My intention was to write a basic module and then reuse it over and over again in different projects as needed. The only part that changes per use is how the incoming serial data is handled. For this reason I would like to pull out following handler and redefine it as per the project needs. How can I use module exports to redefine only this section of the file?
I have tried added myParser to exports, but that gives me a null and I would be out of scope.
Handler to redefine/change/overload for each new project
myParser.on('data', (data) => {
console.log(data)
//DO SOMETHING WITH DATA
});
Example usage: main.js
const serial = require('./serial');
const dataParser = require('./dataParser');
const serial = require('./serial');
//call connect with CL args
serial.connect(process.argv[2], Number(process.argv[3]))
serial.myParser.on('data',(data) => {
//Do something unique with data
if (dataParser.parse(data) == 0)
serial.send('Error');
});
Full JS Module below serial.js
const SerialPort = require('serialport');
const ReadLine = require('#serialport/parser-readline');
const _d = String.fromCharCode(13); //char EOL
let myPort = null;
let myParser = null;
function connect(port, baud) {
let portName = port || `COM1`;
let baudRate = baud || 115200;
myPort = new SerialPort(portName, {baudRate: baudRate})
myParser = myPort.pipe(new ReadLine({ delimiter: '\n'}))
//Handlers
myPort.on('open', () => {
console.log(`port ${portName} open`)
});
myParser.on('data', (data) => {
console.log(data)
});
myPort.on('close', () => {
console.log(`port ${portName} closed`)
});
myPort.on('error', (err) => {
console.error('port error: ' + err)
});
}
function getPorts() {
let portlist = [];
SerialPort.list((err, ports) => {
ports.forEach(port => {
portlist.push(port.comName)
});
})
return portlist;
}
function send(data) {
myPort.write(JSON.stringify(data) + _d, function (err) {
if (err) {
return console.log('Error on write: ', err.message);
}
console.log(`${data} sent`);
});
}
function close() {
myPort.close();
}
module.exports = {
connect, getPorts, send, close
}
The problem is that a module is used where a class or a factory would be appropriate. myParser cannot exist without connect being called, so it doesn't make sense to make it available as module property, it would be unavailable by default, and multiple connect calls would override it.
It can be a factory:
module.exports = function connect(port, baud) {
let portName = port || `COM1`;
let baudRate = baud || 115200;
let myPort = new SerialPort(portName, {baudRate: baudRate})
let myParser = myPort.pipe(new ReadLine({ delimiter: '\n'}))
//Handlers
myPort.on('open', () => {
console.log(`port ${portName} open`)
});
myParser.on('data', (data) => {
console.log(data)
});
myPort.on('close', () => {
console.log(`port ${portName} closed`)
});
myPort.on('error', (err) => {
console.error('port error: ' + err)
});
function getPorts() {
let portlist = [];
SerialPort.list((err, ports) => {
ports.forEach(port => {
portlist.push(port.comName)
});
})
return portlist;
}
function send(data) {
myPort.write(JSON.stringify(data) + _d, function (err) {
if (err) {
return console.log('Error on write: ', err.message);
}
console.log(`${data} sent`);
});
}
function close() {
myPort.close();
}
return {
myParser, getPorts, send, close
};
}
So it could be used like:
const serial = require('./serial');
const connection = serial(...);
connection.myParser.on('data',(data) => {
//Do something unique with data
if (dataParser.parse(data) == 0)
connection.send('Error');
});

Categories