Requirejs dynamic reference to sub module methods? - javascript

Using requires, I’ve split larger class structures down into modules that use other modules within directory. There’s a main file that instantiates the other sub modules. This is an API class with two modules. One that deals with posting data to the endpoint, and the other that holds functions that are helpers to that post module:
define([
'./Post',
'./Helper'
], function (PostModule, HelperModule) {
'use strict';
var module = function () {
this.Post = new PostModule();
this.Helper = new HelperModule();
};
return module;
});
Now I can chain these modules like this:
var foo = new ApiModule();
foo.Post.postToAPI();
foo.Helper.matchAPIactionToEvent();
which is exactly what I want..
BUT, the problem is within the Post.js file, is that it doesn’t know anything about the Helper.js file. So I can’t take advantage of any of those methods. What I would like to do within the Post.js file is to be able to reference the other functions within the same class like so:
define([
'../environment',
'loglevel',
'../utility/Utility',
'jquery'
], function (EnvironmentModule, log, UtilityModule, $) {
'use strict';
var module = function () {
var environment,
utility,
api;
environment = new EnvironmentModule();
utility = new UtilityModule();
this.postToAPI = function (event, payload, callback) {
var apiEndpoint,
requestIdString,
eventAndAction;
apiEndpoint = environment.getEnvironmentPath().API;
requestIdString = utility.String.generateRandomString(32);
/*** THIS IS HOW I WANT TO CALL THE HELPER METHODS ***/
eventAndAction = this.Helper.matchAPIactionToEvent(event);
/*** THIS IS HOW I WANT TO CALL THE HELPER METHODS ***/
payload.event = eventAndAction.event;
payload.action = eventAndAction.action;
payload.requestID = requestIdString;
payload = $.param(payload);
$.post(apiEndpoint + '?' + payload, function (result) {
if (callback) {
callback(result);
}
});
return;
};
};
return module;
});
I figured out a working solution to the this, where I pass '../api/Helper' as one of the array values in the define statement of Post.js; but I don’t want to do that. What I want is to have Post.js be able to access any method from any other modules that are contained within the same directory. That way I don’t have to explicitly define them. It seems wrong to instantiate a new ApiModule() within Post.js. Here’s the directory structure:
Modules/api/Api.js
Modules/api/Post.js
Modules/api/Helper.js
...
I hope that makes sense. Is this possible?

Since you want any of the child modules to access any other child modules, what you could do is pass the parent module as an argument to the constructors of the child modules:
var module = function () {
this.Post = new PostModule(this);
this.Helper = new HelperModule(this);
};
Have the child modules store this information:
var module = function (parent) {
this.parent = parent;
Then use this this.parent to call the methods on other child modules:
eventAndAction = this.parent.Helper.matchAPIactionToEvent(event);
Note that if you would load '../api/Helper' with RequireJS in Post.js as you were thinking of doing, you would not be able to use the same object instance as the one defined on the parent module. You'd have to construct a new object for use in Post.js.

Related

CommonJS module pattern

I have taken this from
Flux architecture
var AppDispatcher = require('../dispatcher/AppDispatcher');
var EventEmitter = require('events').EventEmitter;
var TodoConstants = require('../constants/TodoConstants');
var assign = require('object-assign');
var CHANGE_EVENT = 'change';
var _todos = {}; // collection of todo items
/**
* Create a TODO item.
* #param {string} text The content of the TODO
*/
function create(text) {
// Using the current timestamp in place of a real id.
var id = Date.now();
_todos[id] = {
id: id,
complete: false,
text: text
};
}
/**
* Delete a TODO item.
* #param {string} id
*/
function destroy(id) {
delete _todos[id];
}
var TodoStore = assign({}, EventEmitter.prototype, {
/**
* Get the entire collection of TODOs.
* #return {object}
*/
getAll: function() {
return _todos;
},
emitChange: function() {
this.emit(CHANGE_EVENT);
},
/**
* #param {function} callback
*/
addChangeListener: function(callback) {
this.on(CHANGE_EVENT, callback);
},
/**
* #param {function} callback
*/
removeChangeListener: function(callback) {
this.removeListener(CHANGE_EVENT, callback);
},
dispatcherIndex: AppDispatcher.register(function(payload) {
var action = payload.action;
var text;
switch(action.actionType) {
case TodoConstants.TODO_CREATE:
text = action.text.trim();
if (text !== '') {
create(text);
TodoStore.emitChange();
}
break;
case TodoConstants.TODO_DESTROY:
destroy(action.id);
TodoStore.emitChange();
break;
// add more cases for other actionTypes, like TODO_UPDATE, etc.
}
return true; // No errors. Needed by promise in Dispatcher.
})
});
where it says
There are a few important things to note in the above code. To start, we are maintaining a private data structure called _todos. This object contains all the individual to-do items. Because this variable lives outside the class, but within the closure of the module, it remains private — it cannot be directly changed from outside of the module. This helps us preserve a distinct input/output interface for the flow of data by making it impossible to update the store without using an action.
The bold part is unclear to me. How can js interpreter know that all those code is inside a module closure and not in the globals scope?
From where the module closure start and where ends?
As far as I know
The scope of a variable declared with var is its current execution context, which is either the enclosing function or, for variables declared outside any function, global.
Any explanation?
You're actually missing the last line from the extract you quoted :
module.exports = TodoStore;
CommonJS is an API to define modules that use the following conventions :
Every file defines a module and is executed in an isolated environment ; that is to say that the variables it defines won't be available from outside the module.
To allow importing other modules, a global variable require is made available to the module, allowing it to import other modules
In the same way, the variable module is made available to the module so that it can set its exports attribute to define what should be exported by your module ; the value you set for module.exports in your module a.js is exactly what require('./a') will return.
Every JS environment that implements CommonJS has to know these rules. That includes Node.js, of course, but also bundlers like Browserify and Webpack that will package your codes so that these conventions are respected.
This way, you can control which part of your module will be exported.
P.S. : note that you can also use the exports var to define your exports, and that its use is slightly different from module.exports. See this StackOverflow question and answer for details
Common JS pattern uses the constructor function to define your utilities.
It is defined in the form of class.
var moduleName = function() {
// private variables
// public functions
this.method1 = function() {
// logic1 goes here
};
this.method2 = function() {
// logic2 goes here
};
};
So we are going to export the class to other modules using
module.exports = moduleName;
So that other modules can import it, instantiate it and then use the functionality.
How to use it?
var module = require('moduleName'); //importing the module created above
Here the module definition is fetched,executed and then available in 'module' variable.
This variable name can be anything
var objectOfModule = new module(); //instance is created
objectOfModule .method1(); //use1
console.log(objectOfModule .method2()); //use2
Thanks.

How to Augument commonjs module from requiring code?

I am wondering how I can augument a commonjs module from another module that requires it.
Let's assume that I have three files, two commonjs modules as below:
my-example-module.js
function MyExampleModule(){}
MyExampleModule.prototype = {
bindings: {
some: 'random',
prop: 'values'
}
}
module.exports = MyExampleModule;
another-example-module.js
var MyExampleModule = require('./my-example-module');
function AnotherExampleModule(){}
AnotherExampleModule.prototype = {
getMyExampleModuleBindings: function(){
var module = new MyExampleModule();
return module.bindings;
}
}
module.exports = AnotherExampleModule;
app.js
var MyExampleModule = require('./my-example-module');
var AnotherExampleModule = require('./another-example-module');
//modify?!?
var anotherExampleModule = new AnotherExampleModule();
console.log(anotherExampleModule.getMyExampleModuleBindings());
So what I want to do is have //modify?!? be some kind of code that will alter the original MyExampleModule prototype so when anything else attempts to require MyExampleModule it will get the modified version.
A concrete question would be - what should I replace //modify?!? with so I get logged out the the console with the assumption that my-example-module.js is read only.
{
some: 'random',
prop: 'values',
added: 'binding'
}
If you want to do this in nodejs/iojs, this is pretty simple. When node imports a CommonJS module (let's call it A), it creates an internal object.
When another module (B) wants to load the same module (A), it just gets the reference to that internal object. So if you change something on MyExampleModule in app.js it is also applied to the MyExampleModule in another-example-module.js:
app.js
var MyExampleModule = require('./my-example-module');
var AnotherExampleModule = require('./another-example-module');
//modify:
MyExampleModule.prototype.bindings = {
some: 'random',
prop: 'values',
added: 'binding'
};
var anotherExampleModule = new AnotherExampleModule();
console.log(anotherExampleModule.getMyExampleModuleBindings());
Since you create a new instance of MyExampleModule in another-example-module.js after you call MyExampleModule.prototype.bindings = {...} in app.js, the new instance will already be created with the modified .prototype.
While I haven't tested this in browserify, it certainly works in webpacks implementation of CommonJS as well.
Check out the working example on runnable (app.js is called server.js):
http://code.runnable.com/VZPdN5k65gE5vUIz

node.js expose variable to module?

I have read a lot of articles about how to create modules in node.js, and you can use module.exports to expose module internals to the file which includes it.. awesome!
How does this work the other way around? I'll use the following as an example :-)
USER.JS
function User() {
this.property = 'value';
this.doSomething = function() {
var getStuff = mainFileFunction();
// do something with getStuff
}
module.exports = User;
MAIN.JS
var myArray = [];
myArray.push('something');
myArray.push('something else');
mainFileFunction() {
for(thing in myArray) {
return myArray[thing];
}
}
var u = new user();
log(u.property); <--- THIS IS EXPOSED, COOL!
u.doSomething(); <--- This will throw an error because mainFileFunction is not defined in user.js :-(
If I were to move mainFileFunction to the user file, then it still wouldn't work because the myArray array wouldn't be defined... and if I were to move that over too, I wouldn't be able to use it in other functions in main (which I want to) :-)
I'm sorry if I'm missing something really obvious here... What I want is to expose the parts of my choosing from modules I include (module.export works for that) but I also want to expose everything from the main file to all the includes..
or just expose everything to everything? is that totally messy and horrible??
Just to explain what I am trying to do here... I want to have classes defined in separate files, but I want to instantiate a bunch of them as objects in the main file and store them in arrays.. I want the objects to contain methods which can access arrays of the other object types.
Thanks guys! :-)
You can use globals, or have a proper circular dependency (requireing both files), however - this is usually a bad habit which can lead to maintainability problems in the future.
Instead, you can use dependency injection and inject doSomething into your module.
This basically gives you the following for free:
You can test User with a simple mock implementation of doSomething later and verify the correctness of your code
The dependencies of a user are explicit and not implicit, which makes it obvious what a user needs.
I'll provide two implementations, one using constructor dependency injection and one with a module wide setting.
USER.JS
function User(dependentFunction) {
this.property = 'value';
this.doSomething = function() {
var getStuff = dependentFunction();
// do something with getStuff
}
}
module.exports = User;
MAIN.JS
...
var u = new User(mainFileFunction);
u.doSomething(); // this will now work, using mainFileFunction
What happens here is fairly simple, and we know what's going on.
This can also be a module wide setting
USER.JS
function User(depFunc) {
this.property = 'value';
this.doSomething = function() {
var getStuff = depFunc();
// do something with getStuff
}
}
function UserFactory(depFunc){
return function(){
return new User(depFunc);
}
}
module.exports = UserFactory;
MAIN.JS
var getUser = UserFactory(mainFileFunction);
var u = getUser(); // will return a new user with the right function
+1 to Benjamin answer for dependency injection.
I would like to add another way to inject objects in your modules by passing the dependency in the require like require('./module.js')(dependentFunction);
//MAIN.js
var db = function() {
this.rows = [];
this.insert = function(name) {
this.rows.push(name);
console.log('Db: Inserting user with name ' + name);
}
this.getAll = function(){
return this.rows;
}
}
var fakeDb = new db();
var user = require('./user.js')(fakeDb);
user.add('Jhon');
user.add('Rose');
user.list();
and
//users.js
module.exports = function(db) {
return {
add: function(name) {
db.insert(name);
},
list: function() {
var users = db.getAll();
var i = users.length;
console.log('listing users\n-------');
while(i--) {
console.log(users[i]);
}
}
}
}
You should pass mainFileFunction as a parameter to the constructor of user.
USER.JS
function User(mainFileFunction) {
this.property = 'value';
this.doSomething = function() {
var getStuff = mainFileFunction();
// do something with getStuff
}
module.exports = User;
In your main.js use the following
var u = new user(mainFileFunction);
How about moving mainFileFunction to user.js, and have the function accept an array as an argument:
mainFileFunction(array) {
for(thing in array) {
return array[thing];
}
}
And then when you call it from main.js, pass the function your array:
u.doSomething(myArray);

How to expand nodejs module

I would like to expand nodeJs module.
For example, I want to add a function into Utilities module.
like detect empty JSON object
function isEmptyObject(obj) {
return !Object.keys(obj).length;
}
So, I can use util.isEmptyObject() to detect empty JSON object.
Is it a good way to expand nodeJs module?
You can extend "utils" with defining your "UtilEx" module:
/* utils_ex.js */
var utils = require('utils');
utils.isEmptyObject = function() { };
exports = module.exports = utils;
/* end of utils_ex.js" */
/// client js
var utils = require('utils_ex');

How do I set Backbone Views as singleton by default?

all my Backbone.Views are only used once in their final state. (Except item views).
Currently I handle Backbone.Views as Singleton this way:
var Singletonizer = function(Singleton) {
if (Singleton._instance) return Singleton._instance;
Singleton._instance = new Singleton();
Singletonizer(Singleton);
};
Unfortunately it isn't that nice to add this little function as dependency to each amd module in my repository.
Is there another way to handle this? Maybe overwriting the base view class?
Just have your module return a function besides your view constructor, one that returns a single instance of it instead, not unlike the following. This way when you load the module you will not automatically get an instance whether you like it or not. Instead, after loading our "FailedXhrView" module, we then get our singleton by calling FailedXhrView()
'use strict';
define(['jquery',
'underscore',
'backbone',
'text!templates/failedXhr.html'],
function($, _, Backbone, failedXhrTemplate) {
var FailedXhrView = Backbone.View.extend({
el : $('#failedxhr-modal-container'),
template : _.template(failedXhrTemplate),
render : function() {
this.$el.html(this.template({}));
this.$el.find('failedxhr-modal-containee').modal();
return this;
}
});
var instance;
return function() {
if (!instance) {
instance = new FailedXhrView();
}
return instance;
}
});
From the very recommendable book Recipes with Backbone:
From here:
// definition
MyApplication.Views.List = Backbone.View.extend();
// instantiation
$(function() {
MyApplication.ListView = new MyApplication.Views.List({
el: $('#list')
});
})
To here:
$(function() {
MyApplication.ListView = new (Backbone.View.extend({
initialize: function() {
// ...
}
}))({el: $('#list')})
});
We assign an instantiation of an anonymous class to MyApplication.ListView. In this approach, we are doing the typical extension of a top-level Backbone class with custom attributes and methods. The difference, however, is that we do not assign the result to a class name as we did earlier. Instead, we immediately create a new instance of this anonymous class. Lastly, MyApplication.ListView is assigned to the resulting object.
I have to say I've never used techniques like this. It looks to messy for me, I prefer legibility than architecture.

Categories