How to properly use Parse.query.equalTo? - javascript

In my web application when the user logs in they should see all the documents they made. However when I test my application, I see all the documents including the ones that I didn't make. I believe this is because I am not using Parse.query.equalTo correctly.
Here is my code:
router.post('/login', function (req, res) {
var tempname = req.body.username;
var username = tempname.toLowerCase();
var password = req.body.password;
var login = Promise.promisify(Parse.User.logIn);
var promerror = Promise.reject("error");
var Files = Parse.Object.extend("File");
var query = new Parse.Query(Files);
query.include('user');
var finds = query.find;
var doc = {};
var name, url;
return login(username, password).catch(function (error) {
}).then(function (user, error) {
query.equalTo("user", Parse.User.current());
console.log(JSON.stringify(Parse.User.current()));
if (user) {
res.render('logins', { Message: "Username and/or password don't match" });
}
var temp;
}).catch(function (err) {
res.render('logins');
}).then(query.find(function (results) {
for (var i = 0; i < results.length; i++) {
var object = results[i];
var userID = object.get('user');
console.log("current user");
var codefile = object.get('javaFile');
temp = codefile.name();
name = temp.split("-").pop();
url = codefile.url();
doc[name] = url;
}
})).catch(function (error) {
promerror(error);
}).finally(function () {
res.render('filename', { title: 'File Name', FIles: JSON.stringify(doc) });
}).done();
});
When the user logs in, it should go to the database retrieve all the documents the user made, and save it to doc, the doc is then send to the client side to be displayed.
here is the code for user saving a file:
newFile.save({ user: Parse.User.current(), fileName: newnames, javaFile: parseFile })

I'm not sure, but try setting the File user field to the object id instead of the user object itself. Parse objects sometimes will return a pointer, sometimes the id, etc. So you might be getting unexpected behavior where you're setting the field to Parse.User.current(). Besides that, you look like you're using equalTo correctly.
Otherwise, basically the only way to debug cloud code is to console.log and view the logs.
As a side note, you should be setting the ACL of each File before you save it... that way, a user wouldn't be able to view another user's files anyway (unless these files are meant to be public).

Related

How do I refresh data from json-file with charts.js?

I'm attempting to make a live graph with data from my sensors, and currently that's working, but I'm not sure how to get it to update by itself.
I'm using node/express/pug, and I have a backend which listens for MQTT messages and appends them to a json file, keeping the latest 30 values or so.
The frontend's routing index.js parses the JSON and structures it in arrays, which are passed to the pug template, which the javascript inside it can then access.
My question is, in order for this to dynamically reload, can I still do it like this, through index.js, or do I need to do it another way? Ideally some sort of notification when the file is updated from the backend to front end, but a timer will be perfectly adequate. The updates will be at about 1000ms.
I am quite a beginner in javascript/web development.
My index.js file, the last part is the relevant part:
var router = express.Router();
var db;
var fs = require('fs');
var moment = require('moment');
/* GET home page. */
router.get('/', function(req, res, next) {
res.render('index', { title: 'Sematek StrainMonitor' });
next();
});
//when rawdata is loaded, load client
router.get('/rawdata', function(req, res) {
var dateFrom = req.query.from;
var dateTo = req.query.to;
var dateFromEpoch = moment(dateFrom).unix();
var dateToEpoch = moment(dateTo).unix();
let searchQuery;
if ((dateFromEpoch) && (dateToEpoch)) {
searchQuery = "{epoch : { $gt : " + dateFromEpoch + ", $lt : " + dateToEpoch + "}}";
};
req.conn.then(client=> client.db('test').collection('sensor0').find({searchQuery}).toArray(function(err, docs) {
if(err) { console.error(err) }
if (!docs) {
console.log("oops.. didn't retrieve any docs from the mongodb serv");
}
const dataPairsDB = docs.map(e => ({x: e.epoch, y: e.data}) );
let datasetDB = [];
let labelsetDB = [];
dataPairsDB.forEach((num,index) => {
labelsetDB.push(moment.unix(dataPairsDB[index].x).format('HH:mm:ss'));
datasetDB.push(dataPairsDB[index].y);
});
console.log(req.query)
//gets CloudMQTT values from JSON file and converts it into two arrays for Chart.js display
let data = JSON.parse(fs.readFileSync("./json/latest-value.json"));
const dataPairs = data.readings.map(e => ({x: e.epoch, y: e.data}) );
let dataset = [];
let labelset = [];
dataPairs.forEach((num,index) => {
labelset.push(moment.unix(dataPairs[index].x).format('HH:mm:ss'));
dataset.push(dataPairs[index].y);
});
if (req.cookies.isLoggedIn == 'true') {
res.render('rawdata', {
docs : docs,
datasetDB : JSON.stringify(datasetDB),
labelsetDB : JSON.stringify(labelsetDB),
dataset : JSON.stringify(dataset),
labelset : JSON.stringify(labelset)
});
} else {
res.redirect(401,'/');
}
}))
});
From my rawdata.pug file:
canvas(id="line-chart" )
script(src='https://cdnjs.cloudflare.com/ajax/libs/Chart.js/2.5.0/Chart.min.js')
script(type='text/javascript').
var dataset = JSON.parse('!{dataset}');
var labelset = JSON.parse('!{labelset}');
script(src='/scripts/charter.js')
Just have the webpage subscribe to the same MQTT topic (using MQTT over Websockets see the Paho Javascript client) and update live without having to go back to the back end and pull all the historical data every time a new value is added.

AWS managing users via Cognito

I have used many services from AWS, some were easy, while some were a bit difficult. After 2 days of searching everywhere, I can say documentation for this service is misleading.
I have simple task to do. I want to change a user attribute in the Cognito pool. And to make things easy, I just need to change an Email, and thats it. Application is an Backoffice (Express/Node), where admins can change user's email.
After reading and reading, I am getting more confused. Apparently, the aws-sdk library, the one I am familiar with, has some Cognito API's that I could use. Getting a working example on how to use them, turned out to be a nightmare.
Then I found out there is a library, but only to be used on the client side. After some tweaks I got it running in Node.js. The tweak was to expose a fetch library in global Node.js namespace.
I was able to add a new user. But for all my intentions, I can't change any of the attributes (like email). The library wants me to provide Username (real user) and a password.
I do have a Username (in this case an email), but I don't have the password.
All I need to do is to connect to the service, and send new attribute for the user and thats it.
This is what I have so far (mainly hacked code samples, from variety of places), and I cant get it to work:
var poolData = {
UserPoolId : 'euXXXXXXX',
ClientId : 'XXXXXXXXXXXX'
};
var userPool = new AmazonCognitoIdentity.CognitoUserPool(poolData);
Ok The above line makes a connection to the existing user pool.
Now if I were to do this:
var attributeList = [];
var dataEmail = {
Name : 'email',
Value : 'email#mydomain.com'
};
var dataPhoneNumber = {
Name : 'phone_number',
Value : '+15555555555'
};
var attributeEmail = new AmazonCognitoIdentity.CognitoUserAttribute(dataEmail);
var attributePhoneNumber = new AmazonCognitoIdentity.CognitoUserAttribute(dataPhoneNumber);
attributeList.push(attributeEmail);
attributeList.push(attributePhoneNumber);
userPool.signUp('username', 'password', attributeList, null, function(err, result){
if (err) {
alert(err.message || JSON.stringify(err));
return;
}
cognitoUser = result.user;
console.log('user name is ' + cognitoUser.getUsername());
});
I can see in AWS console that the user is being added. Great.
Now how to change the attributes of the existing user?
All of examples like this and this
Suggest the following:
Use case 8. Update user attributes for an authenticated user.
var attributeList = [];
var attribute = {
Name : 'nickname',
Value : 'joe'
};
var attribute = new AmazonCognitoIdentity.CognitoUserAttribute(attribute);
attributeList.push(attribute);
cognitoUser.updateAttributes(attributeList, function(err, result) {
if (err) {
alert(err.message || JSON.stringify(err));
return;
}
console.log('call result: ' + result);
});
The problem here is I cant authenticate the user. I can't know user's password, only his email. This is after all a simple Backoffice program, where I just need to change users email.
What can I do in this case?
To update the attributes of a Cognito User Pool-user as an admin, you should use adminUpdateUserAttributes function from the aws-sdk class CognitoIdentityServiceProvider.
let AWS = require('aws-sdk');
let cognitoISP = new AWS.CognitoIdentityServiceProvider({ region: 'your-region-here' });
function updateUserAttribute(name, value, username, userPoolId){
return new Promise((resolve, reject) => {
let params = {
UserAttributes: [
{
Name: name, // name of attribute
Value: value // the new attribute value
}
],
UserPoolId: userPoolId,
Username: username
};
cognitoISP.adminUpdateUserAttributes(params, (err, data) => err ? reject(err) : resolve(data));
});
}

Node/Express Redirect After POST

I have an Express post route which updates a mongo db based on data sent to it from a DataTables Editor inline field. So far so good and the database is updating fine. Where it falls over is after the update query executes successfully and I want to then redirect back to the originating page. The Node console seems to indicate that the GET route to the originating page is being called, but the page itself doesn't load.
The code for the POST function is as follows:
router.post('/edit', function(req,res){
var theKeys = _.keys(req.body);
var theMeat = theKeys[1];
var bits1 = theMeat.split("][");
// this is the updated value
var newVal = req.body[theMeat];
// this is the row _id
var cleanId = bits1[0].replace("data[row_","");
// this is the field to update
var cleanRow = bits1[1].replace("]","");
// cast the id string back to an ObjectId
var updId = new ObjectId(cleanId);
var query = {};
query[cleanRow] = newVal;
var MongoClient = mongodb.MongoClient;
var url = 'mongodb://localhost:27017/gts';
MongoClient.connect(url, function(err, db){
if(err){
console.log("Error connecting to the server", err);
}else{
//console.log("Connected for Edit");
var collection = db.collection('events');
collection.updateOne({"_id":updId},{$set:query},function(err,result){
if(err){
console.log("Error adding message", err);
}else if(result){
//console.log("Update attempted", result.result);
res.redirect('/admin');
}else{
console.log("Unknown error");
}
});
}
});
});
The GET route works fine when called directly, but seems to halt when called like this.
I'm pretty sure that there's nothing in the POST route that is causing this as the same thing happens when I strip out everything except the redirect itself.
router.post('/test',function(req,res){
res.redirect('/admin');
});
Please help!

Get JSON from javascript file and display in route file using NodeJS

I am using Nodejs Express. I currently have a script that produces an array of objects from Google API. I need to take that JSON data and use it in my templates. How can I call the function in my script from my route file?
This is my script file:
var Spreadsheet = require('edit-google-spreadsheet');
Spreadsheet.load({
debug: true,
spreadsheetId: '1eWmSV4Eq-se4gZSvBfW-J-lEOLwNopEfMavZByJ-qD8',
worksheetId: 'owrromd',
// 1. Username and Password
username: 'user',
password: 'pass',
}, function sheetReady(err, spreadsheet) {
//use speadsheet!
spreadsheet.receive(function(err, rows, info) {
if (err) throw err;
var announcementArray = [];
//console.log(rows);
for (x in rows) {
var eachObject = rows[x]
var side = eachObject['1'];
//console.log(side);
var type = eachObject['2'];
//console.log(type);
var announcement = eachObject['3'];
//console.log(announcement);
var announcementItem = {};
announcementItem.side = side;
announcementItem.type = type;
announcementItem.announcement = announcement;
announcementArray.push(announcementItem);
}
announcementArray.shift();
console.log(announcementArray);
});
});
This is my route js file:
module.exports=function(app){
app.get('/', function(req,res){
res.render('index', {title:"Home page", description:"The is the description"});
});
}
Change the content of the script file, let's call it loadSheet.js
var Spreadsheet = require('edit-google-spreadsheet');
function loadSheet() {
Spreadsheet.load({
debug: true,
spreadsheetId: '1eWmSV4Eq-se4gZSvBfW-J-lEOLwNopEfMavZByJ-qD8',
worksheetId: 'owrromd',
// 1. Username and Password
username: 'user',
password: 'pass',
}, function sheetReady(err, spreadsheet) {
//use speadsheet!
spreadsheet.receive(function(err, rows, info) {
if (err) throw err;
var announcementArray = [];
//console.log(rows);
for (x in rows) {
var eachObject = rows[x]
var side = eachObject['1'];
//console.log(side);
var type = eachObject['2'];
//console.log(type);
var announcement = eachObject['3'];
//console.log(announcement);
var announcementItem = {};
announcementItem.side = side;
announcementItem.type = type;
announcementItem.announcement = announcement;
announcementArray.push(announcementItem);
}
announcementArray.shift();
console.log(announcementArray);
});
});
}
//Export it to module
exports.loadSheet = loadSheet;
Then in the route js:
var ls = require('./loadSheet.js'); //Load the module, get the name of the script file
app.get('/', function(req,res){
res.render('index', {title:"Home page", description:"The is the description"});
ls.loadSheet();
});
So you'd adapt the module the other response created. But you are going to need to give a callback to loadSheet. I am cutting out the main body of that function for clarity.
var Spreadsheet = require('edit-google-spreadsheet');
function loadSheet(theCallback) { //take a callback here.
Spreadsheet.load({...yourdata...}, function sheetReady(...) {
// create your announcementArray
// then call the callback
theCallBack(undefined,announcementArray);
});
});
}
//Export it to module
exports.loadSheet = loadSheet;
Then, from your routes, you can get it like so:
var ls = require('./loadsheet.js'); // assumes in same dir as routes
app.get('/', function(req,res){
ls.loadSheet(function(err,result){
res.render('myTemplate',result);
});
});
I am going to assume you can take care of getting the result data into your template. You can look in the index template to see how it pulls in the data. I don't know whether you are using Jade or EJS.
Note, this is all sort of hackish but addresses your functional question. Let me know if you need a little further direction.

How to pass data to Mongodb using Node.js, websocket and total.js

I am trying to pass data to Mongodb using Websocket and total.js.
In my homepage.html I can get the user input and connect to the server via websocket after clicking the save button.
In default.js is my server side code. At this point the app hat got the user input and connected to the server correctly, but how can I save data to mongodb now?
This is my homepage.html
<br />
<div>
<input type="text" name="message" placeholder="Service" maxlength="200" style="width:500px" />
<button name="send" >Save</div>
</div>
<br />
<script type="text/javascript">
var socket = null;
$(document).ready(function() {
connect();
$('button').bind('click', function() {
if (this.name === 'send') {
console.log(send());
return;
}
});
});
function connect() {
if (socket !== null)
return;
socket = new WebSocket('ws://127.0.0.1:8000/');
socket.onopen = function() {
console.log('open');
};
socket.onmessage = function(e) {
var el = $('#output');
var m = JSON.parse(decodeURIComponent(e.data)).message;
el.val(m + '\n' + el.val());
};
socket.onclose = function(e) {
// e.reason ==> total.js client.close('reason message');
console.log('close');
};
}
function send() {
var el = $('input[name="message"]');
var msg = el.val();
if (socket !== null && msg.length > 0)
socket.send(encodeURIComponent(JSON.stringify({ message: msg })));
el.val('');
return msg;
}
This is my default.js
exports.install = function(framework) {
framework.route('/', view_homepage);
framework.route('/usage/', view_usage);
framework.websocket('/', socket_homepage, ['json']);
};
function view_usage() {
var self = this;
self.plain(self.framework.usage(true));
}
function view_homepage() {
var self = this;
self.view('homepage');
}
function socket_homepage() {
var controller = this;
controller.on('open', function(client) {
console.log('Connect');
});
controller.on('message', function(client, message) {
console.log(message);
/*
var self = this;
var message = MODEL('message').schema;
var model = self.body;
var message = new message({ message: model.message }).save(function(err) {
if (err)
self.throw500(err);
// Read all messages
message.find(self.callback());
});
*/
});
controller.on('error', function(error, client) {
framework.error(error, 'websocket', controller.uri);
});
}
Any help Please!!!
This is complete project
---Update---
In this function i use to save data to MongoDB
but it didn't give any error.also Didnt save the data to database.i not sure my code is write or wrong
controller.on('message', function(client, message) {
console.log(message);
/*
var self = this;
var message = MODEL('message').schema;
var model = self.body;
var message = new message({ message: model.message }).save(function(err) {
if (err)
self.throw500(err);
// Read all messages
message.find(self.callback());
});
*/
});
This my mongoose.js
var mongoose = require('mongoose');
mongoose.connect('mongodb://totaldemo:123456#ds029979.mongolab.com:29979/totaldemo');
global.mongoose = mongoose;
This is my user.js
var userSchema = mongoose.Schema({ user: String})
exports.schema = mongoose.model('user', userSchema,'user');
exports.name = 'user';
I don't know totaljs framework at all, but i see some issues already with plain javascript.
First of all, i suggest You set up Your model like this:
var mongoose = require('mongoose');
var Schema = mongoose.Schema;
var userSchema = new Schema({
user: String
});
module.exports = mongoose.model('User', userSchema);
and then in controller, when You import:
var User = require('path/to/user/file')
You can use it like this straight away:
User.find()
Also - i totally dont get what are You doing later.
You defined user model and exported NOTHING MORE than a STRING. Only tthing it will do is, that when You import that user to variable User, the User.name will === to 'user' string. so in Your example it would be:
var User = require('path/to/user/model/file')
console.log(User.name) // prints 'user'
and nothing more! There are no models attached to that export. Maybe its how totaljs works, but i VERY doubt it.
Later on - You try to ... use message model. Where it comes from? You defined user model, not message model.
Another thing - as i stated - i dont know totaljs, but I doubt it ask YOu to define var model, and then never use variable model.
I strongly suggest using plain node with mongoose first, then try to integrate it with any fullstack.
For sure its not a solution, but maybe it points out some problems in Your code and will help.
EDIT:
I looked quickly in totaljs, and it looks that You really should export string (which is little weird and doing magic stuff:) ), but its NOT mongoose, and i guess will ONLY work with native totaljs model solution. You cant use mongoose and totaljs like that. I dont know how much not using native totaljs models system ruins framework other options, but its probably safer to use native one.
Honestly, i dont have time to look deeper into docs, but google says nothing about sql or even mongo inside of totaljs docs... so, You have to figure it out :)
EDIT2 i found https://github.com/totaljs/examples/tree/master/mongoose and it looks weird... check if that example works (looks like You seen it, Your code is similar :)). check if You're mongod is working, check if You can conenct from plain node...
Honestly sorry, i surrender. Totaljs has to much magic and abstraction for me to help You out with this :(. Hope You will find Your answer.

Categories