I'm trying to move my database to another server.
I have collection with 410k~ documents, I want to move them partially 100 by 100 to my new server with mongodb database.
There is my code:
var mongoose = require('mongoose');
var itemdataModel = require('./model/ItemData');
mongoose.connection.on('error', function(err) {
console.log('MongoDB Connection Error.' + err);
process.exit(1);
});
mongoose.connection.on('close', function() {
console.log('Connection closed');
process.exit(1);
});
const ins = async (itemz) => {
try {
console.log("Inserting..")
await mongoose.connect('<url to new database>', { useNewUrlParser: true });
await gamedataModel.insertMany(itemz,
async (err, result) => {
if (err) {
console.log("Insert query error" + err)
await mongoose.connection.close()
} else {
console.log("Inserted!");
await mongoose.connection.close()
}
});
} catch (e) {
console.log('insert error ' + e)
}
}
(async () => {
mongoose.connect('<url to old database>', { useNewUrlParser: true });
const wyn = await itemdataModel.find({}).countDocuments()
console.log('Documents count: ' + wyn)
for (let i = 0; i < wyn; i += 100) {
const docs = await itemdataModel.find({status: 'processed'}, '', {'skip': i, 'limit': 100 }).lean().exec()
console.log('Selected ' + docs.length + ' documents to move')
await mongoose.connection.close()
await ins(docs)
}
})();
There is problem with connecting to second database in "ins" function, there is my console output:
Documents count: 411975
Selected 100 docs to move
Connection closed
How to get it working ?
It is required to close first connection before starting another to insert 100 documents, then close it and back to my loop to move another 100 docs?
I do not want to overload the servers by opening 411k/100 connections at once on both of them
I would not use nodejs for this task. Mongodb comes with utilities for backing up and restoring which will almost certainly do it better and more efficiently than you can, especially if you're adding the mongoose overhead.
https://docs.mongodb.com/manual/tutorial/backup-and-restore-tools/
Related
I'm testing MySQL locally with Node, and every time I run the file, the program doesn't end unless I manually terminate it (with ctrl+c). I searched online, and I especially looked at this question (not a duplicate—it doesn't ask about my error), which said that the program will end if I call pool.end(). However, when I call the function, it gives me an error: Pool is closed, which leads me to believe that the pool is already closed. But then why doesn't my program end? My code looks something like this:
// database.js
const mysql = require("mysql2");
const {HOST, USER, DATABASE, PASSWORD} = process.env;
const pool = mysql.createPool({
host: HOST,
user: USER,
database: DATABASE,
password: PASSWORD,
waitForConnections: true,
connectionLimit: 50,
queueLimit: 0
});
exports.set = data => {
return new Promise((resolve, reject) => {
pool.execute("INSERT INTO `mytable` (name, email) VALUES (?, ?);", [data.name, data.email], function(err) {
if(err) reject(err);
else resolve();
});
});
};
exports.get = data => {
return new Promise((resolve, reject) => {
pool.query("SELECT * FROM `mytable` WHERE `name`=? AND `email`=?;", [data.name, data.email], function(err, result) {
if(err) reject(err);
else resolve(result);
});
});
};
exports.getAll = () => {
return new Promise((resolve, reject) => {
pool.query("SELECT * FROM `mytable`;", function(err, result) {
if(err) reject(err);
else resolve(result);
});
});
};
exports.end = () => {
pool.end();
};
And then in my main file:
const db = require("./database");
(async () => {
const data = { name: "John Doe", email: "johndoe#example.com" };
await db.set(data);
const result = await db.get(data);
console.log(result);
db.end(); // I added this line as mentioned in https://stackoverflow.com/questions/61976788/nodejs-program-not-exiting-after-completion
})();
It gives the error Pool is closed with a stack trace. If I don't include the db.end() line the program just won't end (at the moment db.end() doesn't solve the problem either). Does anyone know the solution?
Update
As suggested in the comments, I installed a library called why-is-node-still-running to assess the situation, but the logs didn't make much sense. Here are the logs for reference:
There are 6 handle(s) keeping the process running:
TCPWRAP
C:\[REDACTED]\node_modules\mysql
2\lib\connection.js:45 - this.stream = Net.connect(
C:\[REDACTED]\node_modules\mysql
2\lib\pool_connection.js:7 - super(options);
C:\[REDACTED]\node_modules\mysql
2\lib\pool.js:51 - connection = new PoolConnection(this, {
C:\[REDACTED]\node_modules\mysql
2\lib\pool.js:139 - this.getConnection((err, conn) => {
C:\[REDACTED]\database.js:39
- pool.query("SELECT * FROM `registration`", function
(err, result) {
DNSCHANNEL
(unknown stack trace)
GETADDRINFOREQWRAP
Update 2
When I put db.end() inside setTimeout() (without a provided delay number), setImmediate(), or process.nextTick() (in my main file), things work perfectly, no error is thrown, and the program ends normally. Although it works, I still didn't get to the root cause, so can someone tell me what happened?
I would like to send "Hello world" from one nodejs server to another using node-serialport. I have verified that the radios connecting the two are connected and sending info because they keep displaying buffer information after running my current code.
here is what I have so far.
server1
// Import dependencies
const SerialPort = require("serialport");
const Readline = require("#serialport/parser-readline");
var sf = require('sf');
//SerialPort.list(function (err, results) {
// if (err) {
// throw err;
// }
SerialPort.list().then(ports => {
ports.forEach(function(port) {
console.log(port.path);
console.log(port.pnpId);
console.log(port.manufacturer);
});
});
// Defining the serial port
const port = new SerialPort('COM3',{baudRate: 9600}, function (err) {
if (err) {
return console.log('Port Error: ', err.message)
}
})
port.write('main screen turn on', function(err) {
if (err) {
return console.log('Error on write: ', err.message)
}
console.log('message written')
})
// Read data that is available but keep the stream in "paused mode"
port.on('readable', function () {
console.log('Data:', port.read())
})
// Switches the port into "flowing mode"
port.on('data', function (data) {
console.log('Data:', data)
})
// Pipe the data into another stream (like a parser or standard out)
const lineStream = port.pipe(new Readline())
lineStream.on('data', console.log)
server 2
// Import dependencies
// in Ubuntu need to run command: sudo chmod 666 /dev/ttyS0 to open port for use
const SerialPort = require("serialport");
const Readline = require("#serialport/parser-readline");
var stoploop = true;
// Defining the serial port
const port = new SerialPort('/dev/ttyUSB0', function (err) {
if (err) {
return console.log('Error: ', err.message)
}
})
port.write('chicken butt', function(err) {
if (err) {
return console.log('Error on write: ', err.message)
}
console.log('message written')
})
// port.write("hello?");
// Read data that is available but keep the stream in "paused mode"
port.on('readable', function () {
console.log('Data:', port.read())
})
// Switches the port into "flowing mode"
port.on('data', function (data) {
console.log('Data:', data)
})
// Pipe the data into another stream (like a parser or standard out)
const lineStream = port.pipe(new Readline())
any help or even an example of how to send hello world between the two would be greatly appreciated! please let me know if any more info is needed.
edit : I recently tried doing something like
port.on('data', (data) => {
try {
console.log(data.toString());
} catch (err) {
console.log('Oops');
}
});
this is taking data that used to appear as <buffer # # # # #> and turning it into an odd string like "(
)))) ) ) )))
!)☺)!))) ) )
)(☺!�"
I found the answer myself!
I was using the wrong baudRate, and also needed to stringify the data being sent as a JSON string
Followed a tutorial to get this working for stored procedures inside of Oracle. I have my GET/SELECT statements working correctly where based on the user making the GET call it changes the pool so that the SELECTs are from the correct user.
Pool creations that work for GET/SELECT
async function initialize() {
await oracledb.createPool({
user: 'user1',
password: 'pass1',
connectString: 'oracledb.website/dev',
poolAlias: 'pool1'
});
await oracledb.createPool({
user: 'user2',
password: 'pass2',
connectString: 'oracledb.website/dev',
poolAlias: 'pool2'
});
}
The tutorial I followed for stored procedures can be found here: https://blogs.oracle.com/opal/using-dbmsoutput-with-nodejs-and-node-oracledb
You will see that in this example he has a new pool being created for every request.
oracledb.createPool(
dbconfig,
function(err, pool) {
if (err)
console.error(err.message)
else
doit(pool);
});
var doit = function(pool) {
Note that the dbConfig used above is an array like:
dbconfig.hrPool.user = 'user3';
dbconfig.hrPool.password = 'pass3';
dbconfig.hrPool.connectString = 'oracle.site/dev';
This will cause issues if you specify a poolAlias and you will quickly end up trying to create a pool alias that already exists with an error like:
"NJS-046: poolAlias "pool1" already exists in the connection pool cache.
I have attempted to update this code myself but I am not familiar enough with asyc/waterfalls/callbacks to get it to keep going.
What I attempted is below (it never actually runs anything):
return new Promise(async (resolve, reject) => {
async.waterfall(
[
function(cb) {
oracledb.getConnection('pool1');
},
enableDbmsOutput,
createDbmsOutput,
fetchDbmsOutputLine
],
function (err, conn, cb) {
if (err) {
console.error("In waterfall error cb: ==>", err, "<== THIS IS WHERE THE ORACLE ERROR WILL SHOW!");
// Release the Oracle Connection
conn.release(function (err) {
if (err) console.error(err.message);
});
}
}
);
var enableDbmsOutput = function (conn, cb) {
conn.execute(
"BEGIN DBMS_OUTPUT.ENABLE(NULL); END;",
function(err) { return cb(err, conn); });
};
var createDbmsOutput = function (conn, cb) {
console.log('I NEVER MAKE IT HERE')
conn.execute(query
,function(err) { return cb(err, conn); });
};
var fetchDbmsOutputLine = function (conn, cb) {
conn.execute(
"BEGIN DBMS_OUTPUT.GET_LINE(:ln, :st); END;",
{ ln: { dir: oracledb.BIND_OUT, type: oracledb.STRING, maxSize: 32767 },
st: { dir: oracledb.BIND_OUT, type: oracledb.NUMBER } },
function(err, result) {
if (err) {
return cb(err, conn);
} else if (result.outBinds.st == 1) {
return cb(null, conn); // no more output
} else {
resolve(result);
return fetchDbmsOutputLine(conn, cb);
}
});
};
})
}
Would really appreciate any help!
The blog you quoted is a command-line script and only creates a pool once. That happens at the start of the script. It also is an old blog post. All its async module calls and JS callbacks should/would now be replaced by Node.js's newer async/await syntax. Also avoid using Promise() directly - code gets too confusing.
Since you are creating some kind of web listener, you should create the pool during app start up, but not for each web request.
Check the node-oracledb example webapp.js.
async function init() {
try {
await oracledb.createPool({
user: dbConfig.user,
password: dbConfig.password,
connectString: dbConfig.connectString
});
// Create HTTP server and listen on port httpPort
const server = http.createServer();
server.on('error', (err) => {
console.log('HTTP server problem: ' + err);
});
server.on('request', (request, response) => {
handleRequest(request, response);
});
await server.listen(httpPort);
console.log("Server is running at http://localhost:" + httpPort);
} catch (err) {
console.error("init() error: " + err.message);
}
}
async function handleRequest(request, response) {
... // Your code to handle each web request goes here.
}
init();
You may also wait to review the PL/SQL procedure example plsqlproc.js.
I'm trying to setup a NodeJS application with GraphiQL API and MySQL database connection. All of it seem to work until I'm trying to get the data that was fetched from the database be available for GraphQL to be able to do something with it.
Here we have the app.js file, which is the starting point of the backend. Assume that all the imports and declarations are valid.
app.use('/api', graphql({
schema: buildSchema(schema),
rootValue: resolvers,
graphiql: true
}));
The rootValue is as follows.
const resolvers = {
regions: () => {
var a = manager.getRegions();
console.log("a: " + a);
return a;
}
};
The manager object. I have the manager object incase I want to change database type in the future.
const manager = {
getRegions : function() {
console.log("getRegions entered...");
return processQuery("regions");
}
};
Finally, under the MySQL script we have.
const processQuery = (query) => {
var res = null;
switch (query) {
case 'regions':
default:
db.query(SELECT_REGIONS_QUERY, (err, rows) => {
if (err) {
throw err;
} else {
res = JSON.stringify(rows);
console.log("Stringify: " + res);
}
});
}
return res;
}
I've read numerous pages and even stackoverflow posts about Promise, callback functions and async/await but neither (atleast to code attempts made by me) seem to make the printout under the rootValue be printed last...
I saw an implementation done by Academind that uses MongoDB instead and he doesn't seem to have to care about this issue. Any ideas on how to solve this? Thank you!
What you can to is make the processQuery an asynchronous function and just wait for the db.query to be solved.
const processQuery = async (query) => {
var res = null;
switch (query) {
case 'regions':
default:
await db.query(SELECT_REGIONS_QUERY, (err, rows) => {
if (err) {
throw err;
} else {
res = JSON.stringify(rows);
console.log("Stringify: " + res);
}
});
}
return res;
}
I am attempting to use NodeJS with the Tedious (http://pekim.github.io/tedious/) sql server plugin to make multiple database calls. My intent is to:
1. Open a connection
2. Start a transaction
3. Make multiple database (stored procedure) calls, which will not return any data.
4. Commit transaction (or roll back on error).
5. Close connection
Here is an example .js file, (without using a transaction) for NodeJS where I am attempting to make multiple database calls and it is failing with the error "Requests can only be made in the LoggedIn state, not the SentClientRequest state." Nothing I try resolves this issue.
Does anyone know how to resolve this?
var Connection = require('tedious').Connection;
var Request = require('tedious').Request;
var config = {
userName: 'login',
password: 'password',
server: '127.0.0.1',
options: { rowCollectionOnDone: true }
};
var max = 1;
for (var i = 0; i < max; i++) {
var connection = new Connection(config);
function executeStatement() {
request = new Request("select 42, 'hello world'", function (err, rowCount) {
if (err) {
console.log(err);
} else {
console.log(rowCount + ' rows');
}
});
request.on('row', function (columns) {
columns.forEach(function (column) {
console.log(column.value);
});
});
request.on('doneInProc', function (rowCount, more, rows) {
});
request.on('doneProc', function (rowCount, more, rows) {
console.log('statement completed!')
connection.execSql(request);
});
request.on('returnStatus', function (status) {
console.log('statement completed!')
});
connection.execSql(request);
}
connection.on('connect', function (err) {
// If no error, then good to go...
executeStatement();
});
}
console.log('Done!');
You're trying to execute a statement on a connection that is not established. You're missing an error handler before you call executeStatement.
connection.on('connect', function (err) {
if (err) {
console.log(err); // replace with your code
return;
};
// If no error, then good to go...
executeStatement();
});
Edit:
How to execute multiple statements in a transaction in serial:
var statements = ["select 1", "select 2", "select 3"];
var transaction = new sql.Transaction(connection);
transaction.begin(function(err) {
// ... error checks
async.mapSeries(statements, function(statement, next) {
var request = new sql.Request(transaction);
request.query(statement, next);
}, function(err, results) {
// ... error checks
transaction.commit(function(err, recordset) {
// ... error checks
console.log("Transaction commited.");
});
});
});
You should use tedious connection pools to create a pool of multiple connections.
For node js, a npm module is available at : https://www.npmjs.com/package/tedious-connection-pool
For every new value inside for loop you can acquire a new connection and use connection.reset on doneInProc event.
The case which you have been doing is performing 1st iteration of for loop correctly(LoggedIn State) and as you have proceeded without closing or releasing the connection you are using same connection object (SentClientRequest state).
Hence the same object is at final state when the code reaches second iteration of for loop.
Hope it resolves your issue
you can use Tedious Connection pools https://github.com/pekim/tedious-connection-pool
As #zevsuld and #mannutech said, tedious-connection-pool will enable multiple connections, and prevent erring out when simultaneous requests come into your server.
Below is a generic example that allows you to write multiple queries within one connection pool, and expose them for use in your api. I'm just adding this in case others come along who are trying to accomplish this type of implementation.
const ConnectionPool = require('tedious-connection-pool');
const path = require('path');
require('dotenv').config({
path: path.join(__dirname, '../../.env')
})
let Request = require('tedious').Request;
let poolConfig = {
min: 10,
max: 50,
log: true
}
let connectionConfig = {
userName: process.env.user,
password: process.env.password,
server: process.env.server
};
//create the pool
let pool = new ConnectionPool(poolConfig, connectionConfig);
pool.on('error', function(err) {
console.error(err);
});
// At this point in the code, we have established a connection pool. If you run node, you'll see it log out all then connections to your database.
// Let's add some methods which your server might use in fulfilling requests to various endpoints.
let query1 = (cb, res, query) => {
// acquire a connection:
pool.acquire(function(err, connection) {
if (err) {
console.error(err);
return;
} else {
// form your query
let sql_query = `SELECT column1, colum2 from TABLE WHERE column1 LIKE '${query.param}%%' ORDER BY column1 ASC`
// use the connection as usual:
request = new Request(sql_query, (err, rowCount) => {
if (err) {
console.log(err);
return;
} else {
// console.log('rowCount:', rowCount);
}
//release the connection back to the pool when finished
connection.release();
});
let records = [];
request.on("row", function(columns) {
let rowArray = [];
columns.forEach(function(column) {
rowArray.push(column.value);
});
records.push(rowArray);
});
request.on("doneInProc", function() {
cb(records, res);
});
// lastly exectue the request on the open connection.
connection.execSql(request);
}
});
};
let query2 = (cb, res, query) => {
// acquire a connection:
pool.acquire(function(err, connection) {
if (err) {
console.error(err);
return;
} else {
// form your query
let sql_query = `SELECT column3, colum4 from TABLE2 WHERE column3 LIKE '${query.param}%%' ORDER BY column3 ASC`;
// use the connection as usual:
request = new Request(sql_query, (err, rowCount) => {
if (err) {
console.log(err);
return;
} else {
// console.log('rowCount:', rowCount);
}
//release the connection back to the pool when finished
connection.release();
});
let records = [];
request.on("row", function(columns) {
let rowArray = [];
columns.forEach(function(column) {
rowArray.push(column.value);
});
records.push(rowArray);
});
request.on("doneInProc", function() {
cb(records, res);
});
// lastly exectue the request on the open connection.
connection.execSql(request);
}
});
};
// Let's expose these two functions to the rest of your API:
module.exports = {
query1,
query2
}