Storing the data to MongoDB collection to specific name - javascript

I create a script which receives the data from Binance API at sends it to MongoDB.
The scripts starts every hour with Node-Schedule package and as well it receive three different data's depending on their symbol (BTCUSDT, ETHUSDT, ATOMBTC). I also create a script which is automatically stores the receiving data to MongoDB collection.
Goal: I would like to store the specific data to specific collection. My though was to made a something like if statement and have the symbol name same as collection name. For example
if symbol name = collection name => save to collection
Is this way can help me out? I will have three symbols and three collections. Both of them will have same names.
Full Code
var today = new Date();
var date = today.getFullYear() + '-' + (today.getMonth() + 1) + '-' + today.getDate();
var time = today.getHours() + ":" + today.getMinutes() + ":" + today.getSeconds();
var dateTime = date + ' ' + time;
var symbols = ["BTCUSDT", "ETHUSDT", "ATOMBTC"];
let cnt = 0;
const callIt = () => {
fetch(`https://api.binance.com/api/v3/klines?symbol=${symbols[cnt]}&interval=1h&limit=1`)
.then(res => res.json())
.then(data => {
const btcusdtdata = data.map(d => {
return {
Open: parseFloat(d[1]),
High: parseFloat(d[2]),
Low: parseFloat(d[3]),
Close: parseFloat(d[4]),
Volume: parseFloat(d[5])
}
});
console.log(btcusdtdata);
saveToBTCUSDT(btcusdtdata);
cnt++;
if (cnt < symbols.length) setTimeout(callIt, 3000)
})
.catch((err) => {
console.log(err);
})
};
const j = schedule.scheduleJob('0 * * * *', callIt)
const saveToBTCUSDT = function(BTCdata) {
const url = 'mongodb+srv://username:password#cluster0-1kunr.mongodb.net/<dbname>?retryWrites=true&w=majority';
MongoClient.connect(url, { useNewUrlParser: true, useUnifiedTopology: true }, (err, db) => {
if (err) throw err;
const dbo = db.db('CryptoCurrencies');
const myobj = { Name: 'BTCUSDT', Array: BTCdata, Date: dateTime };
dbo.collection('BTCUSDT').insertOne(myobj, (error, res) => {
if (error) throw error;
console.log('1 document inserted');
db.close();
});
});
};

You can pass the current index (count) as a parameter and then address the desired collection with the index.
...
saveToBTCUSDT(btcusdtdata, cnt);
...
const saveToBTCUSDT = function(BTCdata, index) {
const url = 'mongodb+srv://username:password#cluster0-1kunr.mongodb.net/<dbname>?retryWrites=true&w=majority';
MongoClient.connect(url, { useNewUrlParser: true, useUnifiedTopology: true }, (err, db) => {
if (err) throw err;
const dbo = db.db('CryptoCurrencies');
const myobj = { Name: symbols[index], Array: BTCdata, Date: dateTime };
dbo.collection( symbols[index] ).insertOne(myobj, (error, res) => {
if (error) throw error;
console.log('1 document inserted');
db.close();
});
});
};

Related

Need integrate Note JS (note-telegram-bot-api) and MySQL

The main thing is that I have errors in nesting, but I can’t figure out how to fix it correctly (after if (data === '301') ). There is an exchange of a bot with a user according to the principle of a bot question - an answer in an arbitrary form of the user, then a variable is assigned to this answer, so that later this information can be easily transferred to the database, and then the next question is similar. All data is filled in correctly in the database, but if the user wants to leave another request (there is an engineerOptions97 button at the end), dubbing starts and all answers come out twice, then four times, and so on.
const TelegramApi = require('node-telegram-bot-api')
const {botOptions, engineerOptions, engineerOptions23, engineerOptions99, engineerOptions21, engineerOptions22, engineerOptions98, engineerOptions25, engineerOptions97, engineerOptions999, engineerOptions96} = require('./button')
const token = 'хх' //token here
const bot = new TelegramApi(token, {polling: true})
const start = () => {
var mysql = require('mysql');
var pool = mysql.createPool({
host: "localhost",
user: "root",
database: "node_bot",
password: "",
connectionLimit: "20",
queueLimit: "0",
waitForConnections: "true"
});
pool.getConnection(function(err, connection) {
if (err) throw err;
bot.on('callback_query', async msg => {
const data = msg.data;
const chatID = msg.message.chat.id;
const opts = {
reply_markup:{
},
parse_mode: 'Markdown'
};
var d = new Date()
var datestring = d.getFullYear() + "." + (d.getMonth()+1) + "." + d.getDate()
var timestring = d.getHours() + ":" + d.getMinutes()
if (data === '301') {
bot.sendMessage(chatID, `Where we need fix?`).then(function (result11) {
console.log(result11)
bot.on('message', async msg => {
var place = msg.text;
console.log(place);
bot.sendMessage(chatID, `What we need fix?`).then(function (result12) {
console.log(result12)
bot.on('message', reqtext => {
const texttext = reqtext.text;
console.log(texttext);
bot.sendMessage(chatID, `We understand thanks`, engineerOptions97)
pool.query("INSERT INTO `request` (`date`, `time`, `block`, `place`, `text`) VALUES ('"+datestring+"', '"+timestring+"', 'САНТЕХНИКИ', '"+place+"', '"+texttext+"')", function(err, results) {
connection.release();
if(err) console.log(err);
console.log(results);
});
})
})
})
})
}
return
})
});
}
start()
It works but not as clean as I would like. And my requests are duplicated

Finding data between two dates by using a query in mongoose

How would I write a query which checks the createdAt and finds all objects between 2022-05-22 and 2022-06-22? I tried the following code/ API, but it didn’t give back the right data :
getPoDetails = async (req, res, next) => {
try {
let fromDate = req.query.fromDate;
let toDate = req.query.toDate;
var findQry = {};
if(fromDate && toDate) {
findQry.createdAt = {$gte: new Date(fromDate), $lte: new Date(toDate)};
}
allmodels.poDetailsModel.find(findQry)
.then(data => res.status(200).send(data))
.catch(err => res.status(400).send(err));
} catch (error) {
next(error);
}
}
API link : localhost:8000/getPoDetails?fromDate=2022-05-22&toDate=2022-06-22

backup mongodb without pain using nodejs

This is my module for creating a mongoDB back up with nodejs server:
const root = require('./root');
const { spawn } = require('child_process');
const config = require('../config.json');
setTimeout(() => {
backupMongoDB();
}, 1000);
function backupMongoDB() {
const DB_NAME = 'pors_db';
const DATE = getTodayDate();
const ARCHIVE_PATH = `${root}/db_backup/${DB_NAME}-${DATE}.gzip`;
const child = spawn('mongodump', [
`--db=${DB_NAME}`,
`--archive=${ARCHIVE_PATH}`,
'--gzip',
]);
child.stdout.on('data', (data) => {
console.log('stdout:\n', data);
});
child.stderr.on('data', (data) => {
console.log('stderr:\n', Buffer.from(data).toString());
});
child.on('error', (error) => {
console.log('error:\n', error);
});
child.on('exit', (code, signal) => {
if(code) console.log('Process exit with code:', code);
else if(signal) console.log('Process killed with signal:', signal);
else console.log('Backup is successfull..');
});
function getTodayDate() {
const date = new Date();
const dd = String(date.getDate()).padStart(2, '0');
const mm = String(date.getMonth() + 1).padStart(2, '0');
const yyyy = date.getFullYear();
return `${yyyy}-${mm}-${dd}`;
}
}
module.exports = backupMongoDB;
This gives me an error:
error creating intents to dump. error for getting collections for database pors_db : unauthorized command listCollections requires authentication.
I tried to connect via this options but it returns the error that it's not a function:
const child = spawn('mongodump', [
`--db=${DB_NAME}`,
`--uri=${config.MONGODB_URI_SERVER}` // this is what I addded
`--archive=${ARCHIVE_PATH}`,
'--gzip',
]);
How can I backup my db without pain ?

Returning Output from AWS.DynamoDB.DocumentClient.Scan() Call

I've got a function that returns the number of records from a DynamoDB table (Things):
const table = 'Things';
const region = 'us-east-1';
const profile = 'development';
process.env.AWS_SDK_LOAD_CONFIG = true;
process.env.AWS_PROFILE = profile;
const AWS = require('aws-sdk');
AWS.config.update({ region: region });
function ddb_table_has_records(table_name) {
const ddb_client = new AWS.DynamoDB.DocumentClient();
const ddb_query_parameters = {
TableName: table_name,
Select: 'COUNT'
}
const results = ddb_client.scan(ddb_query_parameters).promise();
results.then((data) => {
console.log(data.Count);
return data;
}).catch((err) => {
console.log("Error: ", err);
})
}
console.log(ddb_table_has_records(table));
When I run this code, I get the following...
PS C:\> node .\get-count-thing.js
undefined
3951
I'm not capturing the data from the scan in the following; although, I see it in the console.log() call:
console.log(ddb_table_has_records(table));
What am I mucking up?
Posting my fix in-case anyone has the same question. I had to make two changes to retrieve the items from the table; I needed to...
...project ALL_ATTRIBUTES
...iterate over the collection of Items returned
The following was my function with changes:
function ddb_table_has_records(table_name) {
const ddb_client = new AWS.DynamoDB.DocumentClient();
const ddb_query_parameters = {
TableName: table_name,
Select: 'ALL_ATTRIBUTES'
}
const results = ddb_client.scan(ddb_query_parameters).promise();
results.then((data) => {
console.log(data.Count);
data.Items.forEach((thing) => {
console.log(thing);
});
}).catch((err) => {
console.log("Error: ", err);
})
}

Close Event Triggers Before Data Events on File Stream

I've got a script that adds JSON data from a file to a DynamoDB table. The script uses the "fs" module to open a read stream to the json file and retrieve the data line by line. As the data is returned, it's inserted into a DynamoDB table. When the operation ends, an execution summary is given with number of records processed, successfully inserted, and unsuccessfully inserted. The problem is the summary executes before the file has completely processed. As result the numbers are wrong.
The script...
ddb_table_has_records(table_name, (err, dat) => {
if (dat.Count === 0 || force) {
const transformStream = JSONStream.parse("*");
const inputStream = fs.createReadStream(import_file);
let record_position = 0;
let count_imported_successful = 0;
let count_imported_fail = 0;
inputStream.pipe(transformStream).on("data", (Item) => {
const params = {
TableName: table_name,
Item
}
ddb_client.put(params, (err, data) => {
++record_position;
if (err) {
console.error("Unable to add mapping for record " + record_position + ", error = " + err);
++count_imported_fail;
} else {
console.log("PutItem succeeded " + record_position);
++count_imported_successful;
}
});
}).on("close", () => {
console.log("=".repeat(70));
console.log(`'Completed: ${import_file}' has been loaded into '${table_name}'.`);
console.log(` Record Count: ${record_position}`);
console.log(` Imported Record Count: ${count_imported_successful}`);
console.log(` Rejected Record Count: ${count_imported_fail}`);
});
} else {
console.log("=".repeat(70));
console.log(`Completed: Skipping import of '${import_file}' into '${table_name}'.`);
};
});
When this runs, it looks like the following
PS C:\> node --max-old-space-size=8192 .\try.js 'foo' 'us-west-2' 'development' '.\data.json' true
Target Profile: development
Target Region: us-west-2
Target Table: foo
Source File: .\data.json
Force Import: true
Confirming Table's State...
======================================================================
'Completed: .\data.json' has been loaded into 'foo'.
Record Count: 0
Imported Record Count: 0
Rejected Record Count: 0
PutItem succeeded 1
PutItem succeeded 2
PutItem succeeded 3
PutItem succeeded 4
...
The portion of the code that gets the record counts runs before the inserts completes so the records imported and rejected numbers are always wrong. It looks like the file stream closes while inserts are occurring. I've tried changing from the "close" to "end" event, same result.
Test this script with the following call...
node --max-old-space-size=8192 .\data.load.js 'foo' 'us-west-1' 'dev' '.\foo.default.json' true
Here is the content for the script I ultimately used...
'use strict'
if (process.argv.length < 6) {
throw new Error ('Please pass the table-name, aws-Region, aws-Profile, and file-path to the script.');
}
let [, , TableName, Region, Profile, ImportFile, Force] = process.argv;
process.env.AWS_SDK_LOAD_CONFIG = true;
process.env.AWS_PROFILE = Profile;
Force = typeof(Force) !== 'undefined' ? Force : false;
const AWS = require('aws-sdk');
const fs = require('fs');
const JSONStream = require('JSONStream');
AWS.config.update({ region: Region });
const ddbc = new AWS.DynamoDB.DocumentClient();
console.log('Target Profile: ', Profile);
console.log('Target Region: ', Region);
console.log('Target Table: ', TableName);
console.log('Source File: ', ImportFile);
console.log('Force Import: ', Force);
// Returns the number of records in a specified table
const ddb_table_has_items = (TableName) => {
return new Promise((resolve, reject) => {
const ddb_query_parameters = { TableName, Select: 'COUNT' }
ddbc.scan(ddb_query_parameters, (error, data) => {
(error) ? reject(error) : resolve(data);
});
});
}
const ddb_table_upsert_items = (TableName, Item) => {
return new Promise((reject, resolve) => {
const ddb_insert_payload = { TableName, Item };
ddbc.put(ddb_insert_payload, (error, data) => {
(error) ? reject(error) : resolve(data);
});
});
}
const ddb_bulk_load = (TableName, ImportFile) => {
return new Promise ( (resolve, reject) => {
let count_succeeded = 0;
let count_failed = 0;
let count_attempted = 0;
let inserts = [];
const json_stream = JSONStream.parse( "*" );
const source_data_stream = fs.createReadStream(ImportFile);
const ddb_source_item = source_data_stream.pipe(json_stream);
ddb_source_item.on("data", (source_data_item) => {
count_attempted++;
let ddb_insert = ddb_table_upsert_items(TableName, source_data_item)
.then( (data) => count_succeeded++ )
.catch( (error) => count_failed++ );
inserts.push(ddb_insert);
});
ddb_source_item.on("end", () => {
Promise.all(inserts)
.then(() => {
resolve({count_succeeded, count_failed, count_attempted});
})
.catch((error) => {
console.log(error);
reject(error);
});
});
ddb_source_item.on("error", (error) => {
reject(error);
});
});
}
(async () => {
try {
let proceed_with_import = false;
if ( Force.toString().toLowerCase() === 'true' ) {
proceed_with_import = true;
} else {
const table_scan = await ddb_table_has_items(TableName);
proceed_with_import = ( table_scan.Count === 0 );
}
if (proceed_with_import) {
let ddb_inserts = await ddb_bulk_load(TableName, ImportFile);
console.log("=".repeat(75));
console.log("Completed: '%s' has been loaded into '%s'.", ImportFile, TableName);
console.log(" Insert Attempted: %s", ddb_inserts.count_attempted);
console.log(" Insert Succeeded: %s", ddb_inserts.count_succeeded);
console.log(" Insert Failed : %s", ddb_inserts.count_failed);
}
} catch (error) {
console.log(error);
}
})();
Wrapping each insert in a promise, pushing the insert-promises into an array, and using promise all on that array did the trick. I execute the promise all once we're finished reading from the file; once the "end" event is emitted on the ddb_source_item stream.

Categories