I'm new to using Node and fetch and of course I'm having some problems with my code. Essentially I'm trying to implement a project where I get some json data through an API request and store it into a mysql database. These data is contained in multiple pages and therefore I used a simple for cycle for multiple fetching. I do this 2 times as I have to get data from 2 different object lists. For storing the data I first established a mysql connection and later I execute the sql query inside another for iterating the single object data.
It performes correctly both extraction of json data and storage in mysql database but once I execute node index.js on the terminal, the process keeps on running and the terminal gets suspended until I force the process to terminate.
I used why-is-node-running and found out this:
Here's the code of index.js:
import mysql from 'mysql';
import fetch from 'node-fetch';
import log from 'why-is-node-running';
const URL0 = "https://atlas.ripe.net/api/v2/probes/?status=1";
const sql = "INSERT INTO probes (id, country, longitude, latitude) VALUES (?,?,?,?)";
const sql1 = "INSERT INTO anchors (id, country, longitude, latitude) VALUES (?,?,?,?)";
const PG_SIZE = 100;
let num_pages_probes=120;
let i=0, j=1, k=1, a=0;
const con = mysql.createConnection({
host:'localhost',
user:'root',
password:'',
database:'probes&anchors'
});
con.connect((err)=>{
if(err){
console.log("Connection not proper");
}else{
console.log("connected");
}
});
/*
fetch(URL0)
.then((response) => {
if (!response.ok) {
throw new Error("HTTP error! status: "
+ response.status);
} else {
return response.json();
}
})
.then((data) => {
num_pages_probes = Math.ceil(data.count/PG_SIZE);
console.log(num_pages_probes);
});
*/
for (j; j<=2; j++){
console.log("j="+j);
let URLi = "https://atlas.ripe.net/api/v2/probes/?page="+j+"&status=1";
fetch(URLi)
.then((response) => {
if (!response.ok) {
throw new Error("HTTP error! status: "
+ response.status);
} else {
return response.json();
}
})
.then((data) => {
for (let probe of data.results){
i++;
let id0 = probe.id;
let country = probe.country_code;
let longitude = probe.geometry.coordinates[0];
let latitude = probe.geometry.coordinates[1];
con.query(sql, [id0, country, longitude, latitude], function (err, result) {
if (err) throw err;
console.log("1 record inserted");
});
console.log("id0: "+id0+"\t"+"cc: "+country+"\t"+"long: "+longitude+"\t"+"lati: "+latitude);
console.log(i);
}
// con.end();
});
}
for (k; k<=2; k++){
console.log("k="+k);
let URLi = "https://atlas.ripe.net/api/v2/anchors/?page="+k;
fetch(URLi)
.then((response) => {
if (!response.ok) {
throw new Error("HTTP error! status: "
+ response.status);
} else {
return response.json();
}
})
.then((data) => {
for (let anchor of data.results){
a++;
let id0 = anchor.id;
let country = anchor.country;
let longitude = anchor.geometry.coordinates[0];
let latitude = anchor.geometry.coordinates[1];
con.query(sql1, [id0, country, longitude, latitude], function (err, result) {
if (err) throw err;
console.log("1 record inserted");
});
console.log("id0: "+id0+"\t"+"cc: "+country+"\t"+"long: "+longitude+"\t"+"lati: "+latitude);
console.log(a);
}
});
}
setTimeout(function () {
log() // logs out active handles that are keeping node running
}, 100)
Can someone help me out please? I don't know where to put my hands on.
PS. I purposely limited the cycle to 2 but it would actually be like 120.
You are not closing your mysql connection which keep your proccess up.
You probably want to close your connection when all your fetch/inserts are done, the tricks here is to ensure you've completed all your inserts before closing your connection.
You can have a look at async/await syntax, it will help you ensure you are closing only when you've done your inserts.
A very simplified version would look like:
const fn = async () => {
const con = mysql.createConnection({ ... });
for (...) {
const res = await fetch({ ... });
const data = await res.json();
await con.query({ ... });
}
await con.close();
}
fn();
NOTE: The mysql lib seems to only work with callback, so you will probably have to promisify the methods you need (see utils.promisify)
Related
I am making an Api with Node and Mongo that receives large volumes of data, I was receiving an error because the size of the records that were stored in mongo exceeded 16 MB. So I opted for the alternative offered by mongo in its gridFS documentation, to insert the records, which I had no problems with. But I am having conflicts to insert and filter since I don't know how to do it, I read the documentation and there are several ways. But I can't figure out how to filter (find a record by its field) and how to update.
The function to create a record works but it performs some necessary steps such as storing the json it receives in a file and then reading it and with that creating the record, I would have liked to find a more practical solution such as only inserting the json it receives without having to create a file with its content and then get the information from that file I attach the code to see if you can tell me how to solve this problem:
const { MongoClient, ObjectId, GridFSBucket,} = require('mongodb');
const { config } = require('../../config');
//const USER = encodeURIComponent(config.noRelDbUser);
//const PASSWORD = encodeURIComponent(config.noRelDbPassword);
const DB_NAME = config.noRelDbName;
const fs = require('fs');
const removeFile = require('../../modules/results/utils/RemoveFile');
// const MONGO_URI = `mongodb://${USER}:${PASSWORD}#${config.dbHost}:${config.dbPort}/admin?retryWrites=true&w=majority`
const MONGO_URI = `mongodb://${config.noRelDbHost}:${config.noRelDbPort}`;
class MongoLib {
constructor() {
this.client = new MongoClient(MONGO_URI, { useNewUrlParser: true, useUnifiedTopology: true });
this.dbName = DB_NAME;
}
connect() {
if (!MongoLib.connection) {
MongoLib.connection = new Promise((resolve, reject) => {
this.client.connect((err) => {
if (err) {
reject(err);
}
resolve(this.client.db(this.dbName));
});
});
}
return MongoLib.connection;
}
create(collection, data) {
return this.connect()
.then((db) => {
return db.collection(collection).insertOne(data);
})
.then((result) => result.insertedId);
}
async createWithForBigData(collection, data, vr_id , remove=false){
let vrule_id = vr_id;
return this.connect().then((db)=>{
try{
var bucket = new GridFSBucket(db, {
bucketName: collection,
chunkSizeBytes: 260000 ,
});
bucket
bucket.find()
let uploadStream = fs.createReadStream(data).pipe(bucket.openUploadStream(`resultsdetail${vrule_id}`));
let id = uploadStream.id;
uploadStream.on('error', (err) => {
console.log({ message: "Error uploading file" });
throw new Error(err);
});
bucket.find()
uploadStream.on('finish', () => {
console.log({ message: "File uploaded successfully, stored under Mongo ObjectID: " + id });
if(remove === true){
console.log('remueve archivo archivo de directorio storebigdata');
removeFile(data);
}
return id;
});
}catch(err){
console.log('ocurrió un error al almacenar big data',err);
throw new Error(err);
}
})
}
findBigData(){
//
}
UpdateBigData(){
//
}
}
module.exports = MongoLib;
Data added else is running perfectly I want my if statement solution.
const user = await Data.findOne({ email: req.body.email }).select("email").lean();
// here i want to update
if (user) {
console.log("data updated");
}
else {
const dataa = new Data(req.body)
let savedData = await dataa.save().then((resp) => console.log(resp))
console.log(savedData, "after save");
}
This question already has answers here:
Using async/await with a forEach loop
(33 answers)
Is it an anti-pattern to use async/await inside of a new Promise() constructor?
(5 answers)
Closed 1 year ago.
I'm actually building a script to extract data from a MySQL database and then populating a MongoDB. In the process there are some asynchronous stuff like establishing a connection to MySQL (through Sequelize library) and MongoDB (through Mongoose library), and some synchronous stuff like fetching and converting data.
I read a lot about async/await and Promises and my script is globally doing what I want want, but still have some issues.
Here's the code :
Migration.class.mjs
import MigrationBase from './Base/MigrationBase.class.mjs';
export default class Migration extends MigrationBase
{
constructor(config) {
super(config);
this.mysqlData = {};
this.mongoData = {};
}
async run() {
await this.selectMySQLData();
let docs = await this.convertMySQLToMongo();
await this.checkConvertedData(docs);
await this.insertMongoData();
}
async selectMySQLData() {
return new Promise(async resolve => {
await this.runSequelize();
console.log('B - Grabbing MySQL data\n');
for(var key in this.mysqlModels) {
if (this.mysqlModels.hasOwnProperty(key)) {
let search = { raw: true };
this.mysqlData[key] = await this.mysqlModels[key].findAll(search);
}
}
await this.closeSequelize();
resolve();
});
};
convertMySQLToMongo() {
return new Promise(async resolve => {
console.log('D - Convert MySQL data to MongoDB\n');
let customersDocument = this.defaultDocuments.customers;
let personalInfosDocument = this.defaultDocuments.personal_infos;
let billingInfosDocument = this.defaultDocuments.billing_infos;
// ... etc ...
await Object.entries(this.mysqlData.customer).forEach(async keyRow => {
let [key, row] = keyRow;
await Object.entries(row).forEach(async keyValue => {
customersDocument = await this._processCustomersFields(customersDocument, 'Customer', keyValue);
personalInfosDocument = await this._processPersonalInfosFields(personalInfosDocument, 'PersonalInfo', keyValue);
billingInfosDocument = await this._processBillingInfosFields(billingInfosDocument, 'BillingInfo', keyValue);
// ... etc ...
});
resolve([
customersDocument,
personalInfosDocument,
billingInfosDocument,
// ... etc ...
]);
});
};
checkConvertedData([
customersDocument,
personalInfosDocument,
billingInfosDocument,
// ... etc ...
]) {
return new Promise(resolve => {
console.log('E - Checking converted data');
if (! this._isNull(customersDocument, 'Customers')) {
this.mongoData.customers = customersDocument;
}
if (! this._isNull(personalInfosDocument, 'PersonalInfos')) {
this.mongoData.personal_infos = personalInfosDocument;
}
if (! this._isNull(billingInfosDocument, 'BillingInfos')) {
} this.mongoData.billing_infos = billingInfosDocument;
// ... etc ...
resolve();
});
}
async insertMongoData() {
return new Promise(async resolve => {
await this.runMongoose();
console.log('G - Insert MongoDB data.');
await this.mongoModels.customers.create(this.mongoData.customers);
await this.mongoModels.personal_infos.create(this.mongoData.personal_infos);
await this.mongoModels.billing_infos.create(this.mongoData.billing_infos);
// ... etc ...
await this.closeMongoose();
resolve();
});
};
_processCustomersFields(defaultDoc, docName, [colName, val]) {
return new Promise(resolve => {
switch (colName) {
case 'id_customer':
console.log(`${docName}: ${colName} => ${val}`);
defaultDoc.id = val;
break;
case 'email_customer':
console.log(`${docName}: ${colName} => ${val}`);
defaultDoc.email = val;
break;
case 'password_customer':
console.log(`${docName}: ${colName} => ${val}`);
defaultDoc.password = val;
break;
// ... etc ...
}
resolve(defaultDoc);
});
}
_processPersonalInfosFields(defaultDoc, docName, [colName, val]) {
return new Promise(resolve => {
switch (colName) {
// ... Same kind of code as in _processCustomersFields() ...
}
resolve(defaultDoc);
});
}
_processBillingInfosFields(defaultDoc, docName, [colName, val]) {
return new Promise(resolve => {
switch (colName) {
// ... Same kind of code as in _processCustomersFields() ...
}
resolve(defaultDoc);
});
}
_isNull(document, mongoName) {
if (document !== null) {
console.log(`\n${mongoName}:\n`, JSON.stringify(document));
return false;
} else {
console.log(`Error processing \`${mongoName}\` data!`);
return true;
}
}
_valueExists(val) {
return (val !== null && val !== "" && typeof val !== "undefined")
? true
: false
;
}
}
MigrationBase.class.mjs
import Sequelize from 'sequelize';
import DataTypes from 'sequelize';
import Mongoose from 'mongoose';
import Crypto from 'crypto';
import Models from '../../../models.mjs';
import Schemas from '../../../schemas.mjs';
export default class MigrationBase
{
constructor(config) {
this.config = config;
this.sequelize = this.createSequelize();
this.mongoose = Mongoose;
this.defaultDocuments = this.initDefaultDocuments();
this.mysqlModels = this.initMysqlModels();
this.mongoModels = this.initMongoSchemas();
this.mysqlData = {};
this.mongoData = {};
}
createSequelize() {
return new Sequelize(
this.config.mysql.dbName,
this.config.mysql.dbUser,
this.config.mysql.dbPass,
this.config.sequelize
);
}
initDefaultDocuments() {
const defaultDocument = {
"deleted_at": 0 // Thu Jan 01 1970 01:00:00 GMT+0100
};
let defaultDocuments = {
"customers": Object.assign({}, defaultDocument),
"personal_infos": Object.assign({}, defaultDocument),
"billing_infos": Object.assign({}, defaultDocument)
// ... etc ...
};
return defaultDocuments;
}
initMysqlModels() {
return {
"customer": Models.Customer(this.sequelize, DataTypes),
"billing_address": Models.BillingAddress(this.sequelize, DataTypes),
// ... etc ...
};
}
initMongoSchemas() {
return {
"customers": this.mongoose.model('Customer', Schemas.Customers),
"personal_infos": this.mongoose.model('PersonalInfo', Schemas.PersonalInfos),
"billing_infos": this.mongoose.model('BillingInfo', Schemas.BillingInfos),
// ... etc ...
}
}
async runSequelize() {
console.log('A - Connection to MySQL');
try {
await this.sequelize.authenticate();
console.log('Connection to MySQL has been established successfully.\n');
} catch (err) {
console.error("Unable to connect to the MySQL database:", err + '\n');
}
}
async closeSequelize() {
console.log('C - Closing MySQL connection.\n');
await this.sequelize.close();
};
runMongoose() {
return new Promise(async resolve => {
console.log('F - Connection to MongoDB');
try {
await this.mongoose.connect(
`mongodb://${this.config.mongo.dbHost}:${this.config.mongo.dbPort}/${this.config.mongo.dbName}`,
{ useNewUrlParser: true, useUnifiedTopology: true }
);
console.log('Connection to MongoDB has been established successfully.');
} catch (err) {
console.error('Unable to connect to the MongoDB database: ', err);
}
resolve();
});
}
async closeMongoose() {
console.log('H - Closing MongoDB connection.');
await this.mongoose.connection.close();
};
}
And here is the Logs output:
A - Connection to MySQL
Connection to MySQL has been established successfully.
B - Grabbing MySQL data
C - Closing MySQL connection.
D - Convert MySQL data to MongoDB
Customer: id_customer => 1
Customer: email_customer => contact#example.com
Customer: password_customer => 0a1b2c3d4e5f0a1b2c3d4e5f0a1b2c3d
// ... etc ...
PersonalInfo: id_customer => 1
PersonalInfo: lastname_customer => Doe
PersonalInfo: firstname_customer => John
// ... etc ...
E - Checking converted data
Customers:
{"deleted_at":0,"id":"000000000000000000000001","email":"contact#example.com","password":"0a1b2c3d4e5f0a1b2c3d4e5f0a1b2c3d", ... etc ... }
PersonalInfos:
{"deleted_at":0,"customer_id":"000000000000000000000001","last_name":"Doe","first_name":"John", ... etc ... }
BillingInfos:
{"deleted_at":0}
BillingInfos: id_customer => 1
BillingInfo: company => ExampleCompany
F - Connection to MongoDB
BillingInfos: lastname => Doe
BillingInfo: firstname => John
Connection to MongoDB has been established successfully.
G - Insert MongoDB data.
/home/user/Workspaces/namespace/project-name/node_modules/mongoose/lib/document.js:2757
this.$__.validationError = new ValidationError(this);
^
ValidationError: BillingInfos validation failed: id_customer: Cast to ObjectId failed for value "1" (type number) at path "customer_id", values: Path `values` is required., id: Path `id` is required.
Here we can see in the right order:
A - Connection to MySQL
B - Grabbing MySQL data
C - Closing MySQL connection
D - Convert MySQL data to MongoDB
Then we can see E - Checking converted data but the conversion process is not finished, despite the await statement and the fact that it return a Promise.
After that we can also see BillingInfos: id_customer => 1 and BillingInfo: company => ExampleCompany meaning that the convertion process is still doing stuff in the loop.
Then F - Connection to MongoDB
Then another convertion logs BillingInfos: lastname => Doe and BillingInfo: firstname => John (convertion process is still doing stuff in the loop).
Then G - Insert MongoDB data.
And finally a Validation Error because some Mongo Documents are incomplete and so the rules are not fullfiled.
Question?
So the question is what am I doing wrong here ?
As I said I read a lot about async/await and Promises but still struglle to understand why it's not working.
Thanks in advance and let me know if you need more info.
That's because await will not work inside forEach(), which you are trying to do in your convertMySQLToMongo() function.
There are many ways in which you can solve it, one of the ways is using for ... of instead of forEach()
for (const keyRow of Object.entries(this.mysqlData.customer)) {
let [key, row] = keyRow;
for (const keyValue of Object.entries(row)) {
customersDocument = await this._processCustomersFields(customersDocument, 'Customer', keyValue);
personalInfosDocument = await this._processPersonalInfosFields(personalInfosDocument, 'PersonalInfo', keyValue);
billingInfosDocument = await this._processBillingInfosFields(billingInfosDocument, 'BillingInfo', keyValue);
}
}
When I run the send route I have error:
MongoError: Cannot call abortTransaction twice
and
MongoError: Cannot call abortTransaction after calling commitTransaction.
I have two collections car and color. And in the same time I want to add to arrays:
car.colors.push(model); color.brands.push(year); and save in database. But I want use withTransaction and session from mongoose. I don't want the situation that due to error, car.colors.push(model); will be saved in database, but color.brands.push(year); won't be saved.
module.exports.send = async (req, res) => {
const sess = await mongoose.startSession();
if(role === 'car') {
try {
await sess.withTransaction(async () => {
const car = await Cars.findOne({ _id: sender});
const color = await Colors.findOne({_id: keeper});
let model = {
contentInfo : {
msg : msg
}
};
let year = {
contentInfo : {
msg : msg
}
}
car.colors.push(model);
color.brands.push(year);
await car.save({session: sess});
await color.save({session: sess});
await sess.commitTransaction();
sess.endSession();
return res.json(car);
});
} catch (error) {
await sess.abortTransaction();
sess.endSession();
throw error;
}
}
}
Relatively new to Javascript however, i'm trying to work with Stripe and my way around a user submitting another payment method and then paying an invoice with that method. if the payment fails again - it should remove the subscription alltogether. I'm using Firebase Realtime Database with GCF & Node.js 8.
Here is what i have so far
exports.onSecondPaymentAttempt = functions.database.ref("users/{userId}/something/somethingHistory/{subDbId}/newPayment").onCreate((snapshot, context)=>{
var s = snapshot.val();
var fields = s.split(",");
const cardToken = fields[0];
const cus_id = fields[1];
const conn_id = fields[2];
const subDbId = context.params.subDbId;
const userId = context.params.userId;
return stripe.customers.createSource(
cus_id,
{source: cardToken},{
stripeAccount: `${conn_id}`,
},
(err, card)=> {
console.log(err);
if(err){
return console.log("error attaching card "+ err)
}else{
const invoiceNo = admin.database().ref(`users/${userId}/something/somethingHistory/${subDbId}`)
return invoiceNo.once('value').then(snapshot=>{
const invoiceNumber = snapshot.child("invoiceId").val();
const subId = snapshot.child("subscriptionId").val();
return stripe.invoices.pay(
invoiceNumber,
{
expand: ['payment_intent','charge','subscription'],
},{
stripeAccount: `${conn_id}`,
},
(err, invoice)=>{
if(err){
return console.log("error paying invoice "+ err)
}else{
if(invoice.payment_intent.status==="succeeded"){
//DO SOME CODE
return console.log("New Payment succeeded for "+invoiceNumber)
}else{
//DO SOME OTHER CODE
//CANCEL
return stripe.subscriptions.del(
subId,{
stripeAccount: `${conn_id}`,
},
(err, confirmation)=> {
if(err){
return console.log("Subscription error")
}else{
return console.log("Subscription cancelled")
}
});
}
}
});
})
}
});
To me it looks like an incredibly inefficient / ugly way of achieving the effect and overall the user is sitting waiting for a response for approx 15 seconds although the function finishes its execution after 1862ms - I still get responses up to 5 - 10 seconds after.
What's the most efficient way of achieving the same desired effect of registering a new payment source, paying subscription and then handling the result of that payment?
You should use the Promises returned by the Stripe asynchronous methods, as follows (untested, it probably needs some fine tuning, in particular with the objects passed to the Stripe methods):
exports.onSecondPaymentAttempt = functions.database.ref("users/{userId}/something/somethingHistory/{subDbId}/newPayment").onCreate((snapshot, context) => {
var s = snapshot.val();
var fields = s.split(",");
const cardToken = fields[0];
const cus_id = fields[1];
const conn_id = fields[2];
const subDbId = context.params.subDbId;
const userId = context.params.userId;
return stripe.customers.createSource(
//Format of this object to be confirmed....
cus_id,
{ source: cardToken },
{ stripeAccount: `${conn_id}` }
)
.then(card => {
const invoiceNo = admin.database().ref(`users/${userId}/something/somethingHistory/${subDbId}`)
return invoiceNo.once('value')
})
.then(snapshot => {
const invoiceNumber = snapshot.child("invoiceId").val();
const subId = snapshot.child("subscriptionId").val();
return stripe.invoices.pay(
invoiceNumber,
{ expand: ['payment_intent', 'charge', 'subscription'] },
{ stripeAccount: `${conn_id}` }
)
})
.then(invoice => {
if (invoice.payment_intent.status === "succeeded") {
//DO SOME CODE
console.log("New Payment succeeded for " + invoiceNumber)
return null;
} else {
//DO SOME OTHER CODE
//CANCEL
return stripe.subscriptions.del(
subId, {
stripeAccount: `${conn_id}`,
});
}
})
.catch(err => {
//....
return null;
})
});
I would suggest you watch the three videos about "JavaScript Promises" from the official Firebase video series, which explain why it is key to correctly chain and return the promises returned by the asynchronous methods.