I wrote a database population script (in JavaScript) so that I could easily run it on my local and on the server. I want to run it from the command line so that all I have to do is push it to the server/gear and execute it. The script runs fine on my local, but when I try to run it on the OpenShift gear it gives the following error:
MongoDB shell version: 2.4.9
connecting to: 127.XX.XXX.X:27017/admin
Sat Sep 12 12:20:25.979 count failed: { "ok" : 0, "errmsg" : "unauthorized" }
at src/mongo/shell/query.js:180
failed to load: ./Create.js
I'm trying to execute the command:
[xxxxxx-xxxxxx.rhcloud.com scripts]\> mongo ./Create.js
I have included the innards of my file. I left out all the document creation stuff. Just know I added document to an array.
// Switch to the DB we want
var connection;
try {
// Try to connect to local first
connection = new Mongo();
}
catch(err) {
// If error, connect to OpenShift server
connection = new Mongo('127.XX.XXX.X:27017');
}
db = connection.getDB('xxxxxxx');
// Create array to store documents
var newDocs = [];
...
// Documents are added to newDocs here
...
// If any new documents exist, insert them
if(newDocs.length) {
var bulk = db.xxxxxxx.initializeUnorderedBulkOp();
newDocs.forEach(function (doc) {
bulk.insert(doc);
});
bulk.execute();
print('NEW DOCS INSERTED');
}
else {
print('NO NEW DOCS');
}
Looks like you are missing your username/password to connect to your mongodb database on openshift? look at the error you got "unauthorized..."
I used db.auth('username', 'password'); to authenticate. Ultimately, I added a variable to the script to track if I was running the script locally or on the server. Then I used a variable for the password so that I wouldn't have the DB password just hanging out in the script. I pass the password in from the command line with the --eval option. Here is the altered head of the script:
// Switch to the DB we want
var connection;
var isLocal = true;
try {
// Try to connect to local first
connection = new Mongo();
}
catch(err) {
// If error, connect to OpenShift server
connection = new Mongo('127.XX.XXX.X:27017');
isLocal = false;
}
db = connection.getDB('xxxxxxx');
if(!isLocal) {
// If on server, authenticate
db.auth('username', password);
}
// Create array to store documents
var newDocs = [];
Then, to run the script from the command line on the server, I use the following:
mongo --eval "var password = 'xxxxxxx';" Create.js
Related
I have a new situation. I had a javascript running successfully on aArduino YUN board that was basically querying a MySql server located on a server with an IP address (aaa.bbb.ccc.ddd). I am basically trying to replace Arduino YUN with Raspberry Pi 4. This is what I did to run this javascript on the Raspebrry PI.Successfully installed all the modules that were there in the Arduino YUN board and all modules, packages have been successfully installed. Now I am running to issues when trying to access MySql server from the Raspberry PI. Looks like the script is giving null output and I don't know how to connect the Raspberry PI to MySql server or did I make all configurations correctly as in my old board Arduino YUN.
Here is code:
connect2mysql = require('remote_mysql_connection.js');
var queryMySql = "SELECT product FROM catalog WHERE product_id =123;";
console.log(connect2mysql.query(mysqlQuery))
Here is the remote_mysql_connection.js script that I am importing:
remote_mysql_connection.js
var shell = require('shelljs');
module.exports = {
user: 'test',
host: 'aa.bbb.cc.ddd',
password: 'blahblah',
database: 'geeko',
async: true,
/* Parses MySQL text output into an array of row objects. */
parseOutput: function(output)
{
var rows = [];
var outputLines = output.match(/[^\r\n]+/gm);
console.log(outputLines);
var columnNames = outputLines[0].split('\t'); //first line of output contains column headers
for(var n=1; n<outputLines.length; n++)
{
var row = {};
var fields = outputLines[n].split('\t'); //the values for
for(var c=0; c<columnNames.length; c++)
{
row[columnNames[c]] = fields[c];
}
rows.push(row);
}
return rows;
},
/* Executes a SQL query. After the query is finished, it calls the passed callback function with the result. */
query : function(query, callback)
{
var sql = "use "+this.database+"; "+query;
var options = {silent:true, async:this.async};
var command = 'mysql -u '+this.user+' -h '+this.host+' -p'+this.password+' -e "'+sql+'"';
//shell.exec() runs the command, the output data is processed by the function supplied below
shell.exec(command, options, function(code, output) {
var result = [];
console.log('Query:', query);
console.log(output);
//Parse the output if it is a select query
if((/^(\s+)?select/i).test(query)) result = module.exports.parseOutput(output);
if(callback) callback(result, output); //pass result back to user supplied callback function
});
}
};
Present output:
null
[ 2023-01-03T14:38:56.617Z ] 'Caught exception: TypeError: Cannot read property \'0\' of null'
TypeError: Cannot read property '0' of null
at Object.parseOutput (/home/pi/Documents/.../remote_mysql_connection.js:15:32)
at /home/pi/Documents/../remote_mysql_connection.js:44:63
at /home/pi/node_modules/shelljs/src/exec.js:145:9
at ChildProcess.exithandler (child_process.js:301:5)
at ChildProcess.emit (events.js:198:13)
at maybeClose (internal/child_process.js:982:16)
at Socket.stream.socket.on (internal/child_process.js:389:11)
at Socket.emit (events.js:198:13)
at Pipe._handle.close (net.js:607:12)
Compilation failed.
How to check and confirm connection between Raspberry Pi 4 and a remote MySql database located on a different server with an IP address?
UPDATE:
Interestingly, there seems to be a good connection between the RPi and remote MySql server (IP:'aa.bbb.cc.ddd'). How I know? I wrote a small Python script using the same credentials as above javascript and MySql query I was using in the original program (which is JavaScript). The program ran successfully and gave the correct response. It is just a question of having all the required packages, and dependencies that support the MySql connection through JavaScript. Still don't why my JavaScript is not connecting to the remote MySql server when my python script connects successfully.
I'm currently using the 'mongodb' package to connect to a local mongo server but the application keeps crashing after creating a user. The data is been written to the local database but when the promise is calling the callback function the MongoDB package throws an error (authorization is not defined). Any ideas as of why this could be happening?
The server crashes inside the /auth/signup route. Essentially I parse the user input to check that the data provided by the user is in the correct format and then I create the structure that I'll write back to the database. I'm currently writing back a class called User with a structure like this:
class User {
strategy = ...
email = ...
displayName = ...
salt = ...
password = ...
}
After checking that no other user with this particular email has been created, I create the new User instance and insert it into mongoDB as shown below:
db.collection('users').insertOne(user, (err, result) => {
if(err){ return res.status(400).send({ error: 'failed to store new user to DB'}); }
//respond to request indicating the user was created
return res.status(200).send({token: authorization.tokenForUser(user)});
});
The application crashed after writing but before the callback. This is the error I'm getting:
.../auth/node_modules/mongodb/lib/utils.js:132
throw err;
^
ReferenceError: authorization is not defined
at db.collection.insertOne (.../auth/routes/authRoutes.js:93:45)
at result (.../auth/node_modules/mongodb/lib/utils.js:410:17)
at executeCallback (.../auth/node_modules/mongodb/lib/utils.js:402:9)
at insertDocuments (.../auth/node_modules/mongodb/lib/operations/collection_ops.js:891:19)
at handleCallback (.../auth/node_modules/mongodb/lib/utils.js:128:55)
at coll.s.topology.insert (.../auth/node_modules/mongodb/lib/operations/collection_ops.js:863:5)
at .../auth/node_modules/mongodb-core/lib/connection/pool.js:397:18
at process._tickCallback (internal/process/next_tick.js:150:11)
I believe the errors has to do with the way I'm using the mongoDB pool I initialize when the application starts running. Any help would be much appreciated.
Can someone please take a look at this code and tell me why I am getting the below error message. I have looked at this every which way and can't understand why or where it's breaking.
The below code shows hard values for docDbClient, but I have also used the "process.env.Document..." system variables to no effect. This is largely taken from the Node Botbuilder samples found online. It is supposed to connect to a CosmosDb database. This should just power up. Using the Bot Framework Emulator, produces the error message at the command prompt where the server is running. Trying via published web page, it just breaks with no error message.
Thank you, in advance!
var restify = require('restify');
var builder = require('botbuilder');
var botbuilder_azure = require("botbuilder-azure");
var builder_cognitiveservices = require('botbuilder-cognitiveservices');
// Setup Restify Server
var server = restify.createServer();
server.listen(process.env.port || process.env.PORT || 3978, function() {
console.log('%s listening to %s', server.name, server.url);
});
// Create chat connector for communicating with the Bot Framework Service
var connector = new builder.ChatConnector({
appId: process.env.MicrosoftAppId,
appPassword: process.env.MicrosoftAppPassword,
openIdMetadata: process.env.BotOpenIdMetadata
});
// Listen for messages from users
server.post('/api/messages', connector.listen());
var docDbClient = new botbuilder_azure.DocumentDbClient({
host: 'https://xxxxx.table.cosmosdb.azure.com:443',
masterKey: 'xxxxxxxxxxxxxxxxxxxxxxxxxx',
database: 'TablesDB',
collection: 'botdata'
});
var tableStorage = new botbuilder_azure.AzureBotStorage({ gzipData: false }, docDbClient);
// Create your bot with a function to receive messages from the user
var bot = new builder.UniversalBot(connector, function(session) {
session.send('You said: %s', session.message.text);
session.endDialog();
}).set('storage', tableStorage); // Register in Azure Storage
Error:
Error: Failed to initialize azure table client. Error: Error: Error Code:
400 Error Body: {"odata.error":{"code":"BadRequest","message":{"lang":"en-
us","value":"One of the input values is invalid.\r\nActivityId: 676a8f3c-
f287-490c-9062-021cb29ff78a, documentdb-dotnet-sdk/1.20.0 Host/64-bit
MicrosoftWindowsNT/6.2.9200.0\nRequestID:676a8f3c-f287-490c-9062-
021cb29ff78a\n"}}}
at C:\...\Coffee-Bot\node_modules\botbuilder-azure\lib\AzureBotStorage.js:177:32
at C:\...\Coffee-Bot\node_modules\botbuilder-azure\lib\DocumentDbClient.js:15:17
at C:\...\Coffee-Bot\node_modules\botbuilder-azure\lib\DocumentDbClient.js:76:17
at C:\...\Coffee-Bot\node_modules\documentdb\lib\queryIterator.js:141:28
at C:\...\Coffee-Bot\node_modules\documentdb\lib\queryExecutionContext\proxyQueryExecutionContext.js:71:32
at C:\...\Coffee-Bot\node_modules\documentdb\lib\queryExecutionContext\defaultQueryExecutionContext.js:62:17
at C:\...\Coffee-Bot\node_modules\documentdb\lib\queryExecutionContext\defaultQueryExecutionContext.js:81:32
at C:\...\Coffee-Bot\node_modules\documentdb\lib\queryExecutionContext\defaultQueryExecutionContext.js:136:28
at successCallback (C:\...\Coffee-Bot\node_modules\documentdb\lib\documentclient.js:2360:33)
at C:\...\Coffee-Bot\node_modules\documentdb\lib\documentclient.js:2410:25
You seem to be mixing a Cosmos DB Table endpoint and a DocumentDB client instance, which explains the 400 Bad Request.
For DocumentDB API (note host has .documents. not .table.):
var docDbClient = new botbuilder_azure.DocumentDbClient({
host: 'https://xxxxx.documents.cosmosdb.azure.com:443',
masterKey: 'xxxxxxxxxxxxxxxxxxxxxxxxxx',
database: 'botdocs',
collection: 'botdata'
});
var tableStorage = new botbuilder_azure.AzureBotStorage({ gzipData: false }, docDbClient);
To use Azure Table storage for bot state (this is regular Table storage, as in storage account, not Cosmos DB Table API):
var azureTableClient = new azure.AzureTableClient(tableName, storageName, storageKey);
var tableStorage = new azure.AzureBotStorage({gzipData: false}, azureTableClient);
In theory, if you pass a Cosmos DB Table endpoint to azure.AzureTableClient() you can use Cosmos as Table storage, the Table APIs are compatible between Azure Storage and Cosmos. I don't see any immediate benefit over standard DocumentDB type.
Ref:
https://learn.microsoft.com/en-us/bot-framework/nodejs/bot-builder-nodejs-state-azure-cosmosdb
https://learn.microsoft.com/en-us/bot-framework/nodejs/bot-builder-nodejs-state-azure-table-storage
I have the following example code (on a node.js server) that should insert data into an sqlite table and then run a child process which copies the sqlite database file to another directory. The problem is that the copied version does not contain the newly inserted data. When I set a timeout before executing the command everything works but I would prefer to use a callback or event.
const sqlite3 = require('sqlite3').verbose();
const db = new sqlite3.Database('test.db');
const exec = require('child_process').exec;
db.serialize(function() {
var val = Date.now()/1000;
db.run("INSERT INTO test (val) VALUES (?);", [val]);
db.close();
exec('/bin/cp -rf /path0/test.db /path1/');
});
as the documentations says about close function :
Database#close([callback])
Closes the database.
callback (optional): If provided, this function will be called when the database was closed successfully or when an error occurred. The first > argument is an error object. When it is null, closing succeeded. If no callback is provided and an error occurred, an error event with the error object as the only parameter will be emitted on the database object. If closing succeeded, a close event with no parameters is emitted, regardless of whether a callback was provided or not.
you should be able to provide a callback to the close function to be run after the db is closed, if i understand your code well it should be something like this :
const sqlite3 = require('sqlite3').verbose();
const db = new sqlite3.Database('test.db');
const exec = require('child_process').exec;
db.serialize(function() {
var val = Date.now()/1000;
db.run("INSERT INTO test (val) VALUES (?);", [val]);
db.close(() => { exec('/bin/cp -rf /path0/test.db /path1/') });
});
reference is here!
I'm trying to understand and use sync-npm module, but not sure how to change my functions below to match sync format... (https://www.npmjs.com/package/sync)
Basically I'm trying to use input data (which is formed as a list in client side) I receive from frontend(client) and send it to node.js via socket. I tried to store it in my global variable 'query', but I learned that it doesn't get updated. So when I tried to print 'query' outside of socket function, it doesn't work.
It sounds like I should use sync module, but I'm not quite sure how to implement that in my code...If anyone could give me an idea how to change my functions below, it would be great..thanks!
Receiving input data from frontend and sending it to node.js via socket
var server = app.listen(3001);
var socket = require('socket.io');
var io = socket(server);
var query = []
// Register a callback function to run when we have connection
io.sockets.on('connection',newConnection);
function newConnection(socket){
console.log('new connection: ' + socket.id);
socket.on('search', newSearch);
function newSearch(final){
query.push(final)
console.log(query[0]);
console.log(Array.isArray(query[0])); //returns True
console.log(query[0][0]); // this also works
console.log(query[0][1]);
}
}
console.log('print');
console.log(query);
// this only gets printed in the beginning as an empty array
Ultimately, I'm parsing that list of input data and concat into my sql select phrase. Below is my DB portion code:
var mysql = require('mysql');
var connection = mysql.createConnection({
host : '~~~',
user : '~~~',
password : '~~~',
database : '~~~'
});
connection.connect();
console.log('mysql connected');
var sql = 'select' + '*' + 'from EBN ';
//ideally data in the 'query' list will concat with this sql string
connection.query(sql,function(err,rows,fields){
if(err){
console.log(err);
}
else{
fs.writeFileSync('search.json', JSON.stringify(rows), 'utf8');
}
});
Firstly, you should wrap the code that does the query execution in a function, something like
var mysql = require('mysql');
var connection = mysql.createConnection({
host : '~~~',
user : '~~~',
password : '~~~',
database : '~~~'
});
connection.connect();
console.log('mysql connected');
function executeQuery(query)
var sql = 'select' + '*' + 'from EBN ';
//do something with query and add it to the sql string
connection.query(sql,function(err,rows,fields){
if(err){
console.log(err);
}
else{
fs.writeFileSync('search.json', JSON.stringify(rows), 'utf8');
}
}
}
This is necessary because you don't want to execute a query right after starting the server, but only after you received the query message via socket. So now you can call executeQuery(final) in your socket message handler.
You should learn how asynchronous programming and callbacks works in nodejs/javascript, and why you should use it as much as possible for server applications (e.g. use fs.writeFile instead of writeFileSync). You can use packages like sync to make life easier for you, but only when you know exactly what you want to do. Just throwing something with the name 'sync' in it at a problem that might be caused by asynchronicity is not going to work.