Web3: Wait all async calls within a loop are completed - javascript

I am using Web3 to get a list of smart contract and then iterate (loop) through each of them to get multiple variables of the smart contracts. Unfortunately, I am not able to execute a function once all the async calls within my loop are done.
Logic:
Get the number of Games
For i = 0 until i < Games
Get the smart contract address (from Smart Contract)
Get the Start Time value (from Smart Contract)
Get the End Time value (from Smart Contract)
(Once all calls of the loop are done)
Order the Games by Start Time
Display the Games
When I do console.log(contractInstanceGame) (step 3) after my loop, the array is empty as the previous calls are not completed.
Code:
var contractInstanceGame = [];
contractAddressRegistry = '0xc0b55bff524b953a5248ccb5a60b00647052ae8b';
// Fetch all the contract addresses
let contractRegistry = web3.eth.contract(contractAbiRegistry);
let contractInstanceRegistry = contractRegistry.at(contractAddressRegistry);
contractInstanceRegistry.numberOfGames(function(err, res) {
if (!err) {
let numberOfGames = res.toNumber();
for (i = 0; i < numberOfGames; i++) {
let contractGame = web3.eth.contract(contractAbiGame);
contractInstanceRegistry.games(i, function(err, res) {
if (!err) {
// Create the object
contractInstanceGame[i] = [];
contractInstanceGame[i]['Caller'] = contractGame.at(res);
contractInstanceGame[i]['Address'] = res;
// Get the Start Time
contractInstanceGame[i]['Caller'].startTime(function(err, res) {
if (!err) {
contractInstanceGame[i]['StartTime'] = res.toNumber();
} else {
console.error("Could not get the Game start time: " + err);
}
});
// Get the End Time
contractInstanceGame[i]['Caller'].endTime(function(err, res) {
if (!err) {
contractInstanceGame[i]['EndTime'] = res.toNumber();
} else {
console.error("Could not get the Game end time: " + err);
}
});
} else {
console.error("Could not get the Game contract address: " + err);
}
});
}
console.log(contractInstanceGame);
// Perform the Order of contractInstanceGame by Start Time`
// Display contractInstanceGame
} else {
console.error("Could not get the number of Games: " + err);
}
EDIT:
Examples of the solutions I tried:
Using then() on the call itself does not work as I am facing the following error:
inpage.js:14 Uncaught Error: The MetaMask Web3 object does not support synchronous methods like eth_call without a callback parameter. See https://github.com/MetaMask/faq/blob/master/DEVELOPERS.md#dizzy-all-async---think-of-metamask-as-a-light-client for details.
contractInstanceRegistry.numberOfGames()
.then(function(x){
console.log(x);
});
I also tried to Promisifed and use await, but I am facing the error: Uncaught SyntaxError: await is only valid in async function
let numberOfGames = promisify(cb => contractInstanceRegistry.numberOfGames(cb));
let numberOfGamesX = await numberOfGames;

Related

How can I return different values from a function depending on code inside an Axios promise? NodeJS - a

I have a block of code that calls an Api and saves results if there are differences or not. I would like to return different values for DATA as layed out on the code. But this is obviously not working since Its returning undefined.
let compare = (term) => {
let DATA;
//declare empty array where we will push every thinkpad computer for sale.
let arrayToStore = [];
//declare page variable, that will be the amount of pages based on the primary results
let pages;
//this is the Initial get request to calculate amount of iterations depending on result quantities.
axios.get('https://api.mercadolibre.com/sites/MLA/search?q='+ term +'&condition=used&category=MLA1652&offset=' + 0)
.then(function (response) {
//begin calculation of pages
let amount = response.data.paging.primary_results;
//since we only care about the primary results, this is fine. Since there are 50 items per page, we divide
//amount by 50, and round it up, since the last page can contain less than 50 items
pages = Math.ceil(amount / 50);
//here we begin the for loop.
for(i = 0; i < pages; i++) {
// So for each page we will do an axios request in order to get results
//Since each page is 50 as offset, then i should be multiplied by 50.
axios.get('https://api.mercadolibre.com/sites/MLA/search?q='+ term +'&condition=used&category=MLA1652&offset=' + i * 50)
.then((response) => {
const cleanUp = response.data.results.map((result) => {
let image = result.thumbnail.replace("I.jpg", "O.jpg");
return importante = {
id: result.id,
title: result.title,
price: result.price,
link: result.permalink,
image: image,
state: result.address.state_name,
city: result.address.city_name
}
});
arrayToStore.push(cleanUp);
console.log(pages, i)
if (i === pages) {
let path = ('./compare/yesterday-' + term +'.json');
if (fs.existsSync(path)) {
console.log("Loop Finished. Reading data from Yesterday")
fs.readFile('./compare/yesterday-' + term +'.json', (err, data) => {
if (err) throw err;
let rawDataFromYesterday = JSON.parse(data);
// test
//first convert both items to check to JSON strings in order to check them.
if(JSON.stringify(rawDataFromYesterday) !== JSON.stringify(arrayToStore)) {
//Then Check difference using id, otherwise it did not work. Using lodash to help.
let difference = _.differenceBy(arrayToStore[0], rawDataFromYesterday[0],'id');
fs.writeFileSync('./compare/New'+ term + '.json', JSON.stringify(difference));
//if they are different save the new file.
//Then send it via mail
console.log("different entries, wrote difference to JSON");
let newMail = mail(difference, term);
fs.writeFileSync('./compare/yesterday-' + term +'.json', JSON.stringify(arrayToStore));
DATA = {
content: difference,
message: "These were the differences, items could be new or deleted.",
info: "an email was sent, details are the following:"
}
return DATA;
} else {
console.log("no new entries, cleaning up JSON");
fs.writeFileSync('./compare/New'+ term + '.json', []);
DATA = {
content: null,
message: "There were no difference from last consultation",
info: "The file" + './compare/New'+ term + '.json' + ' was cleaned'
}
return DATA;
}
});
} else {
console.error("error");
console.log("file did not exist, writing new file");
fs.writeFileSync('./compare/yesterday-' + term +'.json', JSON.stringify(arrayToStore));
DATA = {
content: arrayToStore,
message: "There were no registries of the consultation",
info: "Writing new file to ' " + path + "'"
}
return DATA;
}
}
})
}
}).catch(err => console.log(err));
}
module.exports = compare
So I export this compare function, which I call on my app.js.
What I want is to make this compare function return the DATA object, so I can display the actual messages on the front end,
My hopes would be, putting this compare(term) function inside a route in app.js like so:
app.get("/api/compare/:term", (req, res) => {
let {term} = req.params
let data = compare(term);
res.send(data);
})
But as I said, Its returning undefined. I tried with async await, or returning the whole axios first axios call, but Im always returning undefined.
Thank you

NodeJS processing for API calls sequentially, but resulting in stack overflow

OK, so I have a situation where I cannot just fire thousands of requests to an API server.
I have a Node process (no UI) that I need to have process each API response/update sequentially, waiting for completion before sending the next request.
I may be making this more complicated than I think - not sure. I can only figure out how to do this with recursive calls, but this results in a stack overflow as there can be thousands of records. The general process is this:
get rows from SQL table with ID's (result)
formulate and send of an API call to retrieve ID's info
if returned data has image data, write it back to SQL table
wait on this process so not to bombard API server with thousands of requests all at once
repeat until last ID is processed (can be thousands, more than stack space)
Here's sample code (not actual so ignore syntax errors if any)...
UPDATED: actual running code with sensitive items removed
var g_con = null; //...yeah I know, globals are bad
//
// [ found updating ]
//
function getSetImage(result, row, found) {
if(row >= result.length) { //...exit on no row or last row processed
con.end();
return;
}
item = result[row]; //...next SQL row
if((item !== undefined) && (item.autoid !== undefined)) {
//...assemble API and send request
//
let url = 'https://...API header...'
+ item.autoid
+ '...API params...';
request(url, (error, response, body) => {
if(response.statusCode !== 200)
throw('Server is not responding\n' + response.statusMessage);
let imageData = JSON.parse(body);
if((imageData.value[0] !== undefined) &&
(imageData.value[0].DETAIL !== undefined) &&
(imageData.value[0].DETAIL.Value.length) ) {
//...post back to SQL
//
found++;
console.log('\n' + item.autoid + '/['+ item.descr + '], ' + 'Found:' + found);
qry = 'update inventory set image = "'+imageData.value[0].DETAIL.Value+'" where autoid = "'+item.autoid+'";';
g_con.query(qry, (err) => {
if (err) {
console.log('ERROR:',err.message, '\nSQL:['+err.sql+']\n');
throw err.message;
}
});
row++;
setTimeout(()=>{getSetImage(result, row, found)}, 0); //...nested call after SQL
} else {
row++;
process.stdout.write('.'); //...show '.' for record, but no image
setTimeout(()=>{getSetImage(result, row, found)}, 0); //...nested call after SQL
}
}); //...request callback
}
// } else {
// throw '\nERROR! result['+row+'] undefined? Images found: '+found;
// }
}
//
// [ main lines ]
//
(() => {
let params = null;
try {
params = JSON.parse(fs.readFileSync('./config.json'));
//...load autoids array from SQL inventory table - saving autoids
// autoids in INVENTRY join on par_aid's in INVENTRYIMAGES
//
g_con = mysql.createConnection(params.SQLConnection);
g_con.connect((err) => { if(err) {
console.log('ERROR:',err.message);
throw err.message;
}
});
//...do requested query and return data or an error
//
let qry = 'select autoid, descr from inventory order by autoid;';
g_con.query(qry, (err, results, flds) => {
if (err || flds === undefined) {
console.log('ERROR:',err.message, '\nSQL:['+err.sql+']\n');
throw err.message;
}
console.log('Results length:',results.length);
let row = 0;
let found = 0;
getSetImage(results, row, found);
});
}
catch (err) {
console.log('Error parsing config parameters!');
console.log(err);
}
})();
So here's the answer using Promises (except for MySQL):
//
// [ found updating ]
//
async function getSetImage(data) {
for(let item of data) {
if(item && item.autoid) {
//...assemble API and send request
//
let url = g_URLHeader + g_URLPartA + item.autoid + g_URLPartB;
let image = await got(url).json().catch(err => {
console.log(err);
err.message = 'API server is not responding';
throw err;
});
if(image && image.value[0] && image.value[0].DETAIL &&
image.value[0].DETAIL.Value.length ) {
console.log('\nFound: ['+item.autoid+' - '+item.descr
+ '] a total of ' + g_found + ' in ' + g_count + ' rows');
g_found++;
//...post back to SQL
//
let qry = 'update inventory set image = "'
+ image.value[0].DETAIL.Value
+ '" where autoid = "'
+ item.autoid+'";';
await g_con.query(qry, (err) => {
if (err) {
console.log('ERROR:',err.message, '\nSQL:['+err.sql+']\n');
throw err.message;
}
});
} else {
process.stdout.write('.'); //...show '.' for record, but no image
} //...if/else image.value
g_count++;
} //...if item
} //...for()
}
As I've said in all my comments, this would be a ton simpler using promises and async/await. To do that, you need to switch all your asynchronous operations over to equivalents that use promises.
Here's a general outline based on the original pseudo-code you posted:
// use got() for promise version of request
const got = require('got');
// use require("mysql2/promise" for promise version of mysql
async function getSetImage(data) {
for (let item of data) {
if (item && item.id) {
let url = uriHeader + uriPartA + item.id + uriPartB;
let image = await got(url).json().catch(err => {
// log and modify error, then rethrow
console.log(err);
err.msg = 'API Server is not responding\n';
throw err;
});
if (image.value && image.value.length) {
console.log('\nFound image for ' + item.id + '\n');
let qry = 'update inventory set image = "' + image.value + '" where id = "' + item.id + '";';
await con.query(qry).catch(err => {
console.log('ERROR:', err.message, '\nSQL:[' + err.sql + ']\n');
throw err;
});
}
} else {
// no image data found
process.stdout.write('.'); //...show '.' for record, but no image
}
}
}
//...sql query is done, returning "result" - data rows
getSetImage(result).then(() => {
console.log("all done");
}).catch(err => {
console.log(err);
});
Some notes about this code:
The request() library is no longer getting new features and is in maintenance mode and you need to change to a different library to get built-in promise support. You could use request-promise (also in maintenance mode), but I recommend one of the newer libraries such as got() that is more actively being developed. It has some nice features (automatically checks status for you to be 2xx, built-in JSON parsing, etc...) which I've used above to save code.
mysql2/promise has built-in promise support which you get with const mysql = require('mysql2/promise');. I'd recommend you switch to it.
Because of the user of async/await here, you can just loop through your data in a regular for loop. And, no recursion required. And, no stack build-up.
The way promises work by default, any rejected promises will automatically terminate the flow here. The only reason I'm using .catch() in a couple places is just for custom logging and tweaking of the error object. I then rethrow which propagates the error back to the caller for you.
You can tweak the error handling to your desire. The usual convention with promises is to throw an Error object (not a string) and that's often what callers are expecting to see if the promise rejects.
This code can be easily customized to log errors and continue on to subsequent items in the array. Your original code did not appear to do that so I wrote it to abort if it got an error.

Hashchain generating using async

I'm trying to generate a hashchain using the following code:
var async = require('async');
var _ = require('lodash');
var offset = 1e7;
var games = 1e7;
var game = games;
var serverSeed = '238asd1231hdsad123nds7a182312nbds1';
function loop(cb) {
var parallel = Math.min(game, 1000);
var inserts = _.range(parallel).map(function() {
return function(cb) {
serverSeed = genGameHash(serverSeed);
game--;
query('INSERT INTO `hash` SET `hash` = ' + pool.escape(serverSeed));
};
});
async.parallel(inserts, function(err) {
if (err) throw err;
// Clear the current line and move to the beginning.
var pct = 100 * (games - game) / games;
console.log('PROGRESS: ' + pct.toFixed(2) + '%')
if (game > 0){
loop(cb);
}else {
console.log('Done');
cb();
}
});
}
loop(function() {
console.log('Finished with SEED: ', serverSeed);
});
When I run this code it generates a hash chain of 1k hash's, while I'm trying to generate a chain of 1m hash's. It seems like async isn't working properly, but I have no idea why, there are no errors in console, nothing that points out a flaw.
Any ideas?
Do you can run it with smaller games (about 3000)?
Your parallel function nerver send done signal because the callback of inserts item never trigged. I think query function has two pramasters query(sql: string, callback?: (err, result) => void) (Typescript style).
I suggest you change your logic and flow like below block code:
var inserts = _.range(parallel).map(function() {
return function(cb) {
serverSeed = genGameHash(serverSeed);
query('INSERT INTO `hash` SET `hash` = ' + pool.escape(serverSeed), function(err, result) {
if(result && !err) {
game--;
}
cb(); // remember call the callback
});
};
});
In your code, you have used async.parallel, I think it is not good idea, too many connection has be open(1m). Recommeded for this case is parallelLimit

Node/Express - How to wait until For Loop is over to respond with JSON

I have a function in my express app that makes multiple queries within a For Loop and I need to design a callback that responds with JSON when the loop is finished. But, I'm not sure how to do this in Node yet. Here is what I have so far, but it's not yet working...
exports.contacts_create = function(req, res) {
var contacts = req.body;
(function(res, contacts) {
for (var property in contacts) { // for each contact, save to db
if( !isNaN(property) ) {
contact = contacts[property];
var newContact = new Contact(contact);
newContact.user = req.user.id
newContact.save(function(err) {
if (err) { console.log(err) };
}); // .save
}; // if !isNAN
}; // for
self.response();
})(); // function
}; // contacts_create
exports.response = function(req, res, success) {
res.json('finished');
};
There are a few problems with your code besides just the callback structure.
var contacts = req.body;
(function(res, contacts) {
...
})(); // function
^ you are redefining contacts and res in the parameter list, but not passing in any arguments, so inside your function res and contacts will be undefined.
Also, not sure where your self variable is coming from, but maybe you defined that elsewhere.
As to the callback structure, you're looking for something like this (assuming contacts is an Array):
exports.contacts_create = function(req, res) {
var contacts = req.body;
var iterator = function (i) {
if (i >= contacts.length) {
res.json('finished'); // or call self.response() or whatever
return;
}
contact = contacts[i];
var newContact = new Contact(contact);
newContact.user = req.user.id
newContact.save(function(err) {
if (err)
console.log(err); //if this is really a failure, you should call response here and return
iterator(i + 1); //re-call this function with the next index
});
};
iterator(0); //start the async "for" loop
};
However, you may want to consider performing your database saves in parallel. Something like this:
var savesPending = contacts.length;
var saveCallback = function (i, err) {
if (err)
console.log('Saving contact ' + i + ' failed.');
if (--savesPending === 0)
res.json('finished');
};
for (var i in contacts) {
...
newContact.save(saveCallback.bind(null, i));
}
This way you don't have to wait for each save to complete before starting the next round-trip to the database.
If you're unfamiliar with why I used saveCallback.bind(null, i), it's basically so the callback can know which contact failed in the event of an error. See Function.prototype.bind if you need a reference.

When to close MongoDB database connection in Nodejs

Working with Nodejs and MongoDB through Node MongoDB native driver. Need to retrieve some documents, and make modification, then save them right back. This is an example:
db.open(function (err, db) {
db.collection('foo', function (err, collection) {
var cursor = collection.find({});
cursor.each(function (err, doc) {
if (doc != null) {
doc.newkey = 'foo'; // Make some changes
db.save(doc); // Update the document
} else {
db.close(); // Closing the connection
}
});
});
});
With asynchronous nature, if the process of updating the document takes longer, then when cursor reaches the end of documents, database connection is closed. Not all updates are saved to the database.
If the db.close() is omitted, all the documents are correctly updated, but the application hangs, never exits.
I saw a post suggesting using a counter to track number of updates, when fall back to zero, then close the db. But am I doing anything wrong here? What is the best way to handle this kind of situation? Does db.close() have to be used to free up resource? Or does a new db connection needs to open?
Here's a potential solution based on the counting approach (I haven't tested it and there's no error trapping, but it should convey the idea).
The basic strategy is: Acquire the count of how many records need to be updated, save each record asynchronously and a callback on success, which will decrement the count and close the DB if the count reaches 0 (when the last update finishes). By using {safe:true} we can ensure that each update is successful.
The mongo server will use one thread per connection, so it's good to either a) close unused connections, or b) pool/reuse them.
db.open(function (err, db) {
db.collection('foo', function (err, collection) {
var cursor = collection.find({});
cursor.count(function(err,count)){
var savesPending = count;
if(count == 0){
db.close();
return;
}
var saveFinished = function(){
savesPending--;
if(savesPending == 0){
db.close();
}
}
cursor.each(function (err, doc) {
if (doc != null) {
doc.newkey = 'foo'; // Make some changes
db.save(doc, {safe:true}, saveFinished);
}
});
})
});
});
It's best to use a pooled connection and then call db.close() in cleanup function at the end of your application's life:
process.on('SIGINT', cleanup);
process.on('SIGTERM', cleanup);
See http://mongodb.github.io/node-mongodb-native/driver-articles/mongoclient.html
A bit old thread, but anyway.
Here an extended example to the answer given by pkopac, since I had to figure out the rest of the details:
const client = new MongoClient(uri);
(async () => await client.connect())();
// use client to work with db
const find = async (dbName, collectionName) => {
try {
const collection = client.db(dbName).collection(collectionName);
const result = await collection.find().toArray()
return result;
} catch (err) {
console.error(err);
}
}
const cleanup = (event) => { // SIGINT is sent for example when you Ctrl+C a running process from the command line.
client.close(); // Close MongodDB Connection when Process ends
process.exit(); // Exit with default success-code '0'.
}
process.on('SIGINT', cleanup);
process.on('SIGTERM', cleanup);
Here is a link to the difference between SIGINT and SIGTERM.
I had to add the process.exit(), otherwise my node web-server didn't exit cleanly when doing Ctrl + C on the running process in command line.
I found that using counter may apply to simple scenario, but may be hard in complicated situations. Here is a solution that I come up by closing the database connection when database connection is idle:
var dbQueryCounter = 0;
var maxDbIdleTime = 5000; //maximum db idle time
var closeIdleDb = function(connection){
var previousCounter = 0;
var checker = setInterval(function(){
if (previousCounter == dbQueryCounter && dbQueryCounter != 0) {
connection.close();
clearInterval(closeIdleDb);
} else {
previousCounter = dbQueryCounter;
}
}, maxDbIdleTime);
};
MongoClient.connect("mongodb://127.0.0.1:27017/testdb", function(err, connection)(
if (err) throw err;
connection.collection("mycollection").find({'a':{'$gt':1}}).toArray(function(err, docs) {
dbQueryCounter ++;
});
//do any db query, and increase the dbQueryCounter
closeIdleDb(connection);
));
This can be a general solution for any database Connections. maxDbIdleTime can be set as the same value as db query timeout or longer.
This is not very elegant, but I can't think of a better way to do this. I use NodeJs to run a script that queries MongoDb and Mysql, and the script hangs there forever if the database connections are not closed properly.
Here's a solution I came up with. It avoids using toArray and it's pretty short and sweet:
var MongoClient = require('mongodb').MongoClient;
MongoClient.connect("mongodb://localhost:27017/mydb", function(err, db) {
let myCollection = db.collection('myCollection');
let query = {}; // fill in your query here
let i = 0;
myCollection.count(query, (err, count) => {
myCollection.find(query).forEach((doc) => {
// do stuff here
if (++i == count) db.close();
});
});
});
I came up with a solution that involves a counter like this. It does not depend on a count() call nor does it wait for a time out. It will close the db after all the documents in each() are exhausted.
var mydb = {}; // initialize the helper object.
mydb.cnt = {}; // init counter to permit multiple db objects.
mydb.open = function(db) // call open to inc the counter.
{
if( !mydb.cnt[db.tag] ) mydb.cnt[db.tag] = 1;
else mydb.cnt[db.tag]++;
};
mydb.close = function(db) // close the db when the cnt reaches 0.
{
mydb.cnt[db.tag]--;
if ( mydb.cnt[db.tag] <= 0 ) {
delete mydb.cnt[db.tag];
return db.close();
}
return null;
};
So that each time you are going to make a call like db.each() or db.save() you would use these methods to ensure the db is ready while working and closed when done.
Example from OP:
foo = db.collection('foo');
mydb.open(db); // *** Add here to init the counter.**
foo.find({},function(err,cursor)
{
if( err ) throw err;
cursor.each(function (err, doc)
{
if( err ) throw err;
if (doc != null) {
doc.newkey = 'foo';
mydb.open(db); // *** Add here to prevent from closing prematurely **
foo.save(doc, function(err,count) {
if( err ) throw err;
mydb.close(db); // *** Add here to close when done. **
});
} else {
mydb.close(db); // *** Close like this instead. **
}
});
});
Now, this assumes that the second to last callback from each makes it through the mydb.open() before the last callback from each goes to mydb.close().... so, of course, let me know if this is an issue.
So: put a mydb.open(db) before a db call and put a mydb.close(db) at the return point of the callback or after the db call (depending on the call type).
Seems to me that this kind of counter should be maintained within the db object but this is my current workaround. Maybe we could create a new object that takes a db in the constructor and wrap the mongodb functions to handle the close better.
Based on the suggestion from #mpobrien above, I've found the async module to be incredibly helpful in this regard. Here's an example pattern that I've come to adopt:
const assert = require('assert');
const async = require('async');
const MongoClient = require('mongodb').MongoClient;
var mongodb;
async.series(
[
// Establish Covalent Analytics MongoDB connection
(callback) => {
MongoClient.connect('mongodb://localhost:27017/test', (err, db) => {
assert.equal(err, null);
mongodb = db;
callback(null);
});
},
// Insert some documents
(callback) => {
mongodb.collection('sandbox').insertMany(
[{a : 1}, {a : 2}, {a : 3}],
(err) => {
assert.equal(err, null);
callback(null);
}
)
},
// Find some documents
(callback) => {
mongodb.collection('sandbox').find({}).toArray(function(err, docs) {
assert.equal(err, null);
console.dir(docs);
callback(null);
});
}
],
() => {
mongodb.close();
}
);
Modern way of doing this without counters, libraries or any custom code:
let MongoClient = require('mongodb').MongoClient;
let url = 'mongodb://yourMongoDBUrl';
let database = 'dbName';
let collection = 'collectionName';
MongoClient.connect(url, { useNewUrlParser: true }, (mongoError, mongoClient) => {
if (mongoError) throw mongoError;
// query as an async stream
let stream = mongoClient.db(database).collection(collection)
.find({}) // your query goes here
.stream({
transform: (readElement) => {
// here you can transform each element before processing it
return readElement;
}
});
// process each element of stream (async)
stream.on('data', (streamElement) => {
// here you process the data
console.log('single element processed', streamElement);
});
// called only when stream has no pending elements to process
stream.once('end', () => {
mongoClient.close().then(r => console.log('db successfully closed'));
});
});
Tested it on version 3.2.7 of mongodb driver but according to link might be valid since version 2.0

Categories