I have a model: api/models/Agency.js
attributes: {
// attributes for agency
// One agency can have many owners
owners: {
collection: 'user',
required: true,
via: 'agencies'
},
employees: {
collection: 'user',
via: 'employer'
}
}
The model has a many to many relationship with Users; as many Users can own many Agencies.
I want to protect the Blueprint controller for Agency (AgencyController.js) with a Policy called isOwner.js; which makes sure the User is the owner of the Agency before allowing them to make an edit. I have based the policy on the example found in the Sails.js docs, where I am trying to ensure the userId (found in the session) is the owner of the resource.
api/policies/isOwner.js:
module.exports = function(req, res, next) {
var userId = req.session.passport.user;
req.options.where = req.options.where || {};
req.options.where.owners = userId;
return next();
};
Then in my config/policies.js file I have added the following:
AgencyController: {
destroy: ['isOwner'],
update: ['isOwner']
},
This doesn't work. I think its because of the many to many relationship between the two models. My question is can I create a policy that can query a many to many relationship? Or is it only possible through a one to many relationship?
Thanks.
I couldn't find a way of nicely protecting the blueprint methods by a policy alone so I created a service that checked the user was the owner of the model, and then extended the update and destroy methods in my controller.
api/services/isOwner.js:
/**
* Only allow access to models if they are the owner.
* Assumes an attribute called owners on the model and assumes it has a relationship that can be queried through the
* 'populate' waterline method.
*/
var actionUtil = require( 'sails/lib/hooks/blueprints/actionUtil' );
var _ = require( 'underscore' );
/**
* #param req
* #param res
* #param is {function} called if the user is the owner
* #param isnt {function} called if the user is not the owner. If not present will redirect 403 not authorised.
*/
module.exports = function isOwner( req, res, is, isnt ){
var ownerEmail = req.options.where.owner;
var Model = actionUtil.parseModel( req );
isnt = isnt || res.forbidden;
is = is || function(){
sails.log.warn( 'No callback defined for isOwner' );
res.ok();
};
Model.findOne({ id: req.params.id }).populate( 'owners' ).exec( function( error, model ){
var canEdit = _.find( model.owners, function( owner ){
return owner.email === ownerEmail;
});
canEdit ? is() : isnt();
});
};
api/controllers/AgencyController.js:
var update = require( 'sails/lib/hooks/blueprints/actions/update' );
var isOwner = require( '../services/isOwner' );
module.exports = {
/**
* Override the default blueprint update behaviour so only the owner can update a record.
*
* #param req
* #param res
*/
update: function( req, res ){
isOwner( req, res, update );
}
};
Doesn't feel like the nicest way of doing it but it's the only way I could think of. Just thought I'd share it on here just incase someone comes across the same problem OR someone has a better solution.
Related
I have multiple files that start with comments like:
/*
* #title Force email verification
* #overview Only allow access to users with verified emails.
* #gallery true
* #category access control
*
* This rule will only allow access users that have verified their emails.
*
* > Note: It might be a better UX to make this verification from your application.
*
* If you are using [Lock](https://auth0.com/docs/lock), the default behavior is to log in a user immediately after they have signed up.
* To prevent this from immediately displaying an error to the user, you can pass the following option to `lock.show()` or similar: `loginAfterSignup: false`.
*
* If you are using [auth0.js](https://auth0.com/docs/libraries/auth0js), the equivalent option is `auto_login: false`.
*
*/
//jshint -W025
function (user, context, callback) {
if (!user.email_verified) {
return callback(new UnauthorizedError('Please verify your email before logging in.'));
} else {
return callback(null, user, context);
}
}
All files contains two types of comments i.e /**/ and // Now I am reading this file in my javascript code and want to remove comments and get the actual code in the variable e.g
function (user, context, callback) {
if (!user.email_verified) {
return callback(new UnauthorizedError('Please verify your email before logging in.'));
} else {
return callback(null, user, context);
}
}
I have tried using strip-comments and parse-comments npm but none of these work. Here is the code:
const fs = require('fs');
const path = require('path');
const strip = require('strip-comments');
module.exports = function (ruleFileName, globals, stubs) {
globals = globals || {};
stubs = stubs || {};
const fileName = path.join(__dirname, '../src/rules', ruleFileName + '.js');
const data = fs.readFileSync(fileName, 'utf8');
const code = strip(data);
console.log(code);
return compile(code, globals, stubs);
}
and with parse-comments I tried like:
const parsed = parseComments(data)[0];
const code = data.split('\n').slice(parsed.comment.end).join('\n').trim();
I think strip comment is not working because it takes string as an argument but fs.readFileSync doesn't return string. I have also tried data.toString()but that also didn't work. So how can I strip comments from the content? Is there any other solution?
try use regx to replace /\/\*[\s\S]*?\*\/|([^:]|^)\/\/.*$/gm
var Text = `/*
* #title Force email verification
* #overview Only allow access to users with verified emails.
* #gallery true
* #category access control
*
* This rule will only allow access users that have verified their emails.
*
* > Note: It might be a better UX to make this verification from your application.
*
* If you are using [Lock](https://auth0.com/docs/lock), the default behavior is to log in a user immediately after they have signed up.
* To prevent this from immediately displaying an error to the user, you can pass the following option to "lock.show()" or similar: "loginAfterSignup: false".
*
* If you are using [auth0.js](https://auth0.com/docs/libraries/auth0js), the equivalent option is "auto_login: false".
*
*/
//jshint -W025
function (user, context, callback) {
if (!user.email_verified) {
return callback(new UnauthorizedError('Please verify your email before logging in.'));
} else {
return callback(null, user, context);
}
}`
console.log(Text.replace(/\/\*[\s\S]*?\*\/|([^:]|^)\/\/.*$/gm,''))
like this
https://codepen.io/anon/pen/eQKrWP
When I try to call a transaction on an asset that is inherited from a abstract base class asset, the call fails with Error: Invalid or missing identifier for Type <type> in namespace <name.spa.ce>
user.cto
namespace com.aczire.alm.base.user
import com.aczire.alm.base.*
abstract participant User {
o String uid
o String title optional
o String firstName optional
o String lastName optional
o UserTransactionLogEntry[] logEntries
}
concept UserTransactionLogEntry {
//--> User modified_by
o String comment optional
o DateTime timestamp
}
abstract transaction UserTransaction {
o String comment
}
abstract event UserTransactionEvent {
o String comment
}
admin.cto
namespace com.aczire.alm.base.user.admin
import com.aczire.alm.base.*
import com.aczire.alm.base.user.*
participant Admin identified by uname extends User {
o String uname
}
abstract transaction AdminUserTransaction extends UserTransaction {
o Admin user
--> Admin modified_by
}
abstract event AdminUserTransactionEvent extends UserTransactionEvent {
--> Admin user
--> Admin modified_by
}
transaction CreateUser extends AdminUserTransaction {
}
admin.js
/**
* Create a User
* #param {com.aczire.alm.base.user.admin.CreateUser} createUser - the CreateUser transaction
* #transaction
*/
function createUser(newuser) {
console.log('createUser');
var factory = getFactory();
var NS_AU = 'com.aczire.alm.base.user.admin';
var user = factory.newResource(NS_AU, 'Admin', newuser.uname);
user.uid = newuser.uid;
// save the order
return getAssetRegistry(NS_AU)
.then(function (registry) {
return registry.add(user);
})
.then(function(){
var userCreatedEvent = factory.newEvent(NS_AU, 'UserCreatedEvent');
userCreatedEvent.user = user;
userCreatedEvent.comment = 'Created new admin user - ' + newuser.uname + '!';
emit(userCreatedEvent);
});
}
I tried making the parameters to the TP as User, Admin; moving the transaction arguments to base class as User type; moving the transaction to the base class etc.. But nothing seems to work.
Does the inheritance works differently here?
Error shown in composer playground.
Your admin.js has some issues. The error is because Admin is a participant so you have to getParticipantRegistry() not Assetregistry and then add the user. Also in your model file admin.cto there is no event named UserCreatedEvent. So you first need to add it to the model and then emit the event. To add the user try changing to this.
/**
* Create a User
* #param {com.aczire.alm.base.user.admin.CreateUser} createUser - the CreateUser transaction
* #transaction
*/
function createUser(newuser) {
console.log('createUser');
var NS_AU = 'com.aczire.alm.base.user.admin';
var factory = getFactory();
var testuser = factory.newResource(NS_AU, 'Admin', newuser.user.uname);
testuser.uid=newuser.user.uid
testuser.logEntries=newuser.user.logEntries
// save the order
return getParticipantRegistry('com.aczire.alm.base.user.admin.Admin')
.then(function (registry) {
return registry.add(testuser);
});
}
I made a prototype for a real-time voting system by using deepstream.io and rethinkdb as persistence store. So far, it works, and multiple browsers can stay in sync in getting the latest number of vote counts (see screenshot below), however, one thing I don't like it is the vote count is incremented in the browser, which is sent to the deepstream.io remote server.
the JS code embedded in the client's browser:
/**
* Login to deepstream as Frank
*/
var ds = deepstream( 'localhost:6020' ).login({ username: 'Frank'});
var name = 'upvote';
var upVotes;
var voteText = $('.streaming-prices .vote');
var record = ds.record.getRecord(name);
$('#upvote_btn').click(function() {
// on button click, increment the vote counts
// set the record stored in the rethinkdb storage
record.set({
count: ++upVotes
});
});
record.subscribe('count', function(newVal) {
console.info('count updated, newVal: ', newVal);
upVotes = newVal;
voteText.text(newVal);
});
the server.js code:
var PermissionHandler = require( './permission-handler' );
var DeepstreamServer = require( 'deepstream.io' ),
RdbC = require( 'deepstream.io-storage-rethinkdb' ),
server = new DeepstreamServer();
server.set('host', '0.0.0.0');
server.set('port', 6020);
server.set( 'tcpHost', '0.0.0.0' );
server.set( 'tcpPort', '6022' );
server.set( 'permissionHandler', new PermissionHandler() );
server.set( 'storage', new RdbC({
port: 28015,
host: '127.0.0.1',
splitChar: '/',
database: 'votings',
defaultTable: 'question'
}));
server.start();
So you can see that the js code in the client increments the vote counts directly and updates the record, which is to be sent to the deepstream.io server for updating the database. I don't like this part, because I don't want the user to possibly mess up the total vote counts. Instead, I would like the client just to send something like +1 to the server, and let the server update the total counts for persistence. I am not sure if this is possible, can someone shed some light? I appreciate it
Good point, allowing clients to manipulate votes might not work so well!
I would do it using an rpc, that way in the browser you can let a trusted provider increment the record on the users behalf.
The three things you'll need to do is:
request the rpc via the browser
respond to it from a rpc provider ( a trusted client that can respond to the rpc )
add permissions to reject any attempts to change the record from non-providers
Code would look something as follows:
The JS code in browser
/**
* Login to deepstream as Frank
*/
var ds = deepstream( 'localhost:6020' ).login( { username: 'Frank'} );
var voteText = $('.streaming-prices .vote');
var name = 'vote';
var record = ds.record.getRecord( name );
$('#upvote_btn').click(function() {
// on button click, increment the vote counts
// set the record stored in the rethinkdb storage
ds.rpc.make( 'upvote', {}, function( error, response ){
//notify user when upvote succesfull
});
});
record.subscribe('count', function( newVal ) {
console.info( 'count updated, newVal: ', newVal);
voteText.text(newVal);
});
The rpc provider that handles upvotes
/**
* Login to deepstream as Frank
*/
var ds = deepstream( 'localhost:6021' ).login( { username: 'upvote-provider' } );
var name = 'vote';
var record = ds.record.getRecord( name );
ds.rpc.provide( 'upvote', function( data, response ){
record.set( 'count', record.get( 'count' ) + 1 );
response.send();
});
The server should also have permissions to only allow the upvote-provider to change the votes.
canPerformAction: function( username, message, callback ) {
/**
* If the user is a provider, it has all permissions
*/
if( username === 'upvote-provider' ) {
callback( null, true );
}
/**
* Otherwise the client request is valid unless it's trying to change the score
*/
else {
var messageObj = Deepstream.readMessage( message );
var isAllowed = messageObj.name !== 'vote' || messageObj.isRead;
var errorMessage = isAllowed ? null : 'Can\'t change votes via client, use \'upvote\' rpc instead';
callback( errorMessage, isAllowed );
}
}
I am writing a package as part of a small application I am working on and one thing I need to do is fetch json data from an endpoint and populate it to a Server side collection.
I have been receiving error messages telling me I need to put by server side collection update function into a Fiber, or Meteor.bindEnvironment, or Meteor._callAsync.
I am puzzled, because there are no clear and concise explanations telling me what these do exactly, what they are, if and when they are being deprecated or whether or not their use is good practice.
Here is a look at what is important inside my package file
api.addFiles([
'_src/collections.js'
], 'server');
A bit of psuedo code:
1) Set up a list of Mongo.Collection items
2) Populate these using a function I have written called httpFetch() and run this for each collection, returning a resolved promise if the fetch was successful.
3) Call this httpFetch function inside an underscore each() loop, going through each collection I have, fetching the json data, and attempting to insert it to the Server side Mongo DB.
Collections.js looks like what is below. Wrapping the insert function in a Fiber seems to repress the error message but no data is being inserted to the DB.
/**
* Server side component makes requests to a remote
* endpoint to populate server side Mongo Collections.
*
* #class Server
* #static
*/
Server = {
Fiber: Npm.require('fibers'),
/**
* Collections to be populated with content
*
* #property Collections
* #type {Object}
*/
Collections: {
staticContent: new Mongo.Collection('staticContent'),
pages: new Mongo.Collection('pages'),
projects: new Mongo.Collection('projects'),
categories: new Mongo.Collection('categories'),
formations: new Mongo.Collection('formations')
},
/**
* Server side base url for making HTTP calls
*
* #property baseURL
* #type {String}
*/
baseURL: 'http://localhost:3000',
/**
* Function to update all server side collections
*
* #method updateCollections()
* #return {Object} - a resolved or rejected promise
*/
updateCollections: function() {
var deferred = Q.defer(),
self = this,
url = '',
collectionsUpdated = 0;
_.each(self.Collections, function(collection) {
// collection.remove();
url = self.baseURL + '/content/' + collection._name + '.json';
self.httpFetch(url).then(function(result) {
jsonData = EJSON.parse(result.data);
_.each(jsonData.items, function(item) {
console.log('inserting item with id ', item.id);
self.Fiber(function() {
collection.update({testID: "Some random data"}
});
});
deferred.resolve({
status: 'ok',
message: 'Collection updated from url: ' + url
});
}).fail(function(error) {
return deferred.reject({
status: 'error',
message: 'Could not update collection: ' + collection._name,
data: error
});
});
});
return deferred.promise;
},
/**
* Function to load an endpoint from a given url
*
* #method httpFetch()
* #param {String} url
* #return {Object} - A resolved promise if the data was
* received or a rejected promise.
*/
httpFetch: function(url) {
var deferred = Q.defer();
HTTP.call(
'GET',
url,
function(error, result) {
if(error) {
deferred.reject({
status: 'error',
data: error
});
}
else {
deferred.resolve({
status: 'ok',
data: result.content
});
}
}
);
return deferred.promise;
}
};
I am still really stuck on this problem, and from what I have tried before from reading other posts, I still can't seem to figure out the 'best practice' way of getting this working, or getting it working at all.
There are plenty of suggestions from 2011/2012 but I would be reluctant to use them, since Meteor is in constant flux and even a minor update can break quite a lot of things.
Thanks
Good news : the solution is actually much simpler than all the code you've written so far.
From what I've grasped, you wrote an httpFetch function which is using the asynchronous version of HTTP.get decorated with promises. Then you are trying to run your collection update in a new Fiber because async HTTP.get called introduced a callback continued by the use of promise then.
What you need to do in the first place is using the SYNCHRONOUS version of HTTP.get which is available on the server, this will allow you to write this type of code :
updateCollections:function(){
// we are inside a Meteor.method so this code is running inside its own Fiber
_.each(self.Collections, function(collection) {
var url=// whatever
// sync HTTP.get : we get the result right away (from a
// code writing perspective)
var result=HTTP.get(url);
// we got our result and we are still in the method Fiber : we can now
// safely call collection.update without the need to worry about Fiber stuff
});
You should read carefully the docs about the HTTP module : http://docs.meteor.com/#http_call
I now have this working. It appears the problem was with my httpFetch function returning a promise, which was giving rise to the error:
"Error: Meteor code must always run within a Fiber. Try wrapping callbacks that you pass to non-Meteor libraries with Meteor.bindEnvironment."
I changed this httpFetch function to run a callback when HTTP.get() had called with success or error.
Inside this callback is the code to parse the fetched data and insert it into my collections, and this is the crucial part that is now working.
Below is the amended Collections.js file with comments to explain everything.
Server = {
/**
* Collections to be populated with content
*
* #property Collections
* #type {Object}
*/
Collections: {
staticContent: new Mongo.Collection('staticContent'),
pages: new Mongo.Collection('pages'),
projects: new Mongo.Collection('projects'),
categories: new Mongo.Collection('categories'),
formations: new Mongo.Collection('formations')
},
/**
* Server side base url for making HTTP calls
*
* #property baseURL
* #type {String}
*/
baseURL: 'http://localhost:3000',
/**
* Function to update all server side collections
*
* #method updateCollections()
* #return {Object} - a resolved or rejected promise
*/
updateCollections: function() {
var deferred = Q.defer(),
self = this,
collectionsUpdated = 0;
/**
* Loop through each collection, fetching its data from the json
* endpoint.
*/
_.each(self.Collections, function(collection) {
/**
* Clear out old collection data
*/
collection.remove({});
/**
* URL endpoint containing json data. Note the name of the collection
* is also the name of the json file. They need to match.
*/
var url = self.baseURL + '/content/' + collection._name + '.json';
/**
* Make Meteor HTTP Get using the function below.
*/
self.httpFetch(url, function(err, res) {
if(err) {
/**
* Reject promise if there was an error
*/
deferred.reject({
status: 'error',
message: 'Error fetching content for url ' + url,
data: err
});
}
else {
/**
* Populate fetched data from json endpoint
*/
var jsonData = res.content;
data = EJSON.parse(res.content);
/**
* Pick out and insert each item into its collection
*/
_.each(data.items, function(item) {
collection.insert(item);
});
collectionsUpdated++;
}
if(collectionsUpdated === _.size(self.Collections)) {
/**
* When we have updated all collections, resovle the promise
*/
deferred.resolve({
status: 'ok',
message: 'All collections updated',
data: {
collections: self.Collections,
count: collectionsUpdated
}
});
}
});
});
/**
* Return the promise
*/
return deferred.promise;
},
/**
* Function to load an endpoint from a given url
*
* #method httpFetch()
* #param {String} url
* #param {Function} cb - Callback in the event of an error
* #return undefined
*/
httpFetch: function(url, cb) {
var res = HTTP.get(
url,
function(error, result) {
cb(error, result);
}
);
}
};
I am am trying to do a simple restful put command. My problem is that I need to do a put command into a different end point to my store.
I have my rest adapter
DS.RESTAdapter.reopen({
namespace: 'datastore'
});
I need to be able to call an end point, but not sure how to do it;
Something like,
store('foundItems', JSON)
Where foundItems is the end point.
you'll want to create a custom RESTAdapter and override buildURL, this should help you start
App.PeopleAdapter = DS.RESTAdapter.extend({
host: 'http://www.google.com',
namespace: 'api/v1',
/**
Builds a URL for a given type and optional ID.
By default, it pluralizes the type's name (for example,
'post' becomes 'posts' and 'person' becomes 'people').
If an ID is specified, it adds the ID to the path generated
for the type, separated by a `/`.
#method buildURL
#param {String} type
#param {String} id
#returns String
*/
buildURL: function(type, id) {
var url = [],
host = get(this, 'host'),
prefix = this.urlPrefix();
if (type) { url.push(this.pathForType(type)); }
if (id) { url.push(id); }
if (prefix) { url.unshift(prefix); }
url = url.join('/');
if (!host && url) { url = '/' + url; }
return url;
},
});