Class autoloader in Ember.js? - javascript

I am looking for an autoloader, similar to how they operate in languages (e.g. http://php.net/manual/en/language.oop5.autoload.php). I merely specify the algorithm for finding the file and it's automatically loaded into the app.
My initial thinking is a build process that scans directories and builds an index file. Is there a better way?

Here's my solution using browserify and a node.js build script, but I'm curious if anyone has a better solution:
build.js:
var glob = require("glob");
var fs = require('fs');
var path = require('path');
function buildFile(directory, build_file, suffix) {
glob(directory, function(err, files) {
if (fs.existsSync(build_file)) {
fs.unlinkSync(build_file);
}
fs.appendFileSync(build_file, 'module.exports = {');
var controllers = {};
files.forEach(function (file) {
var key = path.basename(file, '.js')+suffix;
var value = "require('"+file+"')";
fs.appendFileSync(build_file, '\n '+key+': '+value + ',');
});
fs.appendFileSync(build_file, '\n}');
});
};
buildFile('./controllers/*.js' , './controllers.js', 'Controller');
buildFile('./routes/*.js' , './routes.js' , 'Route');
app.js:
var App = Ember.Application.create();
App.reopen(require('./controllers.js'));
App.reopen(require('./routes.js'));
routes.js (example output from build.js):
module.exports = {
ApplicationRoute: require('./routes/Application.js'),
IndexRoute: require('./routes/Index.js'),
RecoverRoute: require('./routes/Recover.js'),
RegisterRoute: require('./routes/Register.js'),
UsersRoute: require('./routes/Users.js'),
UsersNewRoute: require('./routes/UsersNew.js'),
ValidateRoute: require('./routes/Validate.js'),
}
I use Grunt.js to watch and rebuild automatically when changes occur.

You probably want to use something like RequireJS: http://requirejs.org/
RequireJS will allow you to specify dependencies which will be loaded as needed. You can also run the RequireJS optimizer to compile your templates and JavaScript in to one file to deploy to your production servers.

One could use a pre-made tool like Yeoman's ember generator or ember tools. They are opinionated about the project's folder structure.

Related

How to output files to the parent of their source directory using Webpack in Gulp?

So far I have this code, which I got from here:
var gulp = require('gulp');
var webpack = require('webpack-stream');
var named = require('vinyl-named');
gulp.task('default', function() {
return gulp.src('*/lib/app.js', { base: '.' })
.pipe(named())
.pipe(webpack())
.pipe(gulp.dest('.'));
});
My folder structure is like:
site1/lib/app.js
site2/lib/app.js
I want to create the output files like the following, with each file containing only their respective lib/app.js file's code (and any require()s made in them):
site1/app.js
site2/app.js
However, the code I have now just outputs to the project's root directory. I've tried several combinations, such as removing the { base: '.' }, but nothing works. If I remove the named() and webpack() pipes, though, then the current code actually outputs to the correct directory. So, in the process, it seems like perhaps Webpack loses the originating directory information?
Also, it possible to get a solution that also works with Webpack's "watch: true" option, so that compiling modified files is quick, rather than using Gulp to always iterate through every single file on every file change?
I assume you want to create a app.js for each site that packs only the code for that site (and not the others).
In that case you can use gulp-foreach to effectively iterate over all your app.js files and send each one down its own stream. Then you can use the node.js built-in path module to figure out where the parent directory for each app.js file is and write it there with gulp.dest().
var gulp = require('gulp');
var webpack = require('webpack-stream');
var named = require('vinyl-named');
var foreach = require('gulp-foreach');
var path = require('path');
gulp.task('default', function() {
return gulp.src('*/lib/app.js')
.pipe(foreach(function(stream, file) {
var parentDir = path.dirname(path.dirname(file.path));
return stream
.pipe(named())
.pipe(webpack())
.pipe(gulp.dest(parentDir));
}));
});
If you want to use webpack({watch:true}) you'll have to use a different approach. The following uses glob to iterate over all the app.js files. Each app.js file is again send down its own stream, however this time all the streams are merged before being returned.
var gulp = require('gulp');
var webpack = require('webpack-stream');
var named = require('vinyl-named');
var path = require('path');
var merge = require('merge-stream');
var glob = require('glob');
gulp.task('default', function() {
return merge.apply(null, glob.sync('*/lib/app.js').map(function(file) {
var parentDir = path.dirname(path.dirname(file));
return gulp.src(file)
.pipe(named())
.pipe(webpack({watch:true}))
.pipe(gulp.dest(parentDir));
}));
});

How can I insert content into a file during a gulp build?

I managed to accomplish my task using a gulp plugin called gulp-insert like this:
gulp.task('compile-js', function () {
// Minify and bundle client scripts.
var scripts = gulp.src([
srcDir + '/routes/**/*.js',
srcDir + '/shared/js/**/*.js'
])
// Sort angular files so the module definition appears
// first in the bundle.
.pipe(gulpAngularFilesort())
// Add angular dependency injection annotations before
// minifying the bundle.
.pipe(gulpNgAnnotate())
// Begin building source maps for easy debugging of the
// bundled code.
.pipe(gulpSourcemaps.init())
.pipe(gulpConcat('bundle.js'))
// Buffer the bundle.js file and replace the appConfig
// placeholder string with a stringified config object.
.pipe(gulpInsert.transform(function (contents) {
return contents.replace("'{{{appConfigObj}}}'", JSON.stringify(config));
}))
.pipe(gulpUglify())
// Finish off sourcemap tracking and write the map to the
// bottom of the bundle file.
.pipe(gulpSourcemaps.write())
.pipe(gulp.dest(buildDir + '/shared/js'));
return scripts.pipe(gulpLivereload());
});
What I'm doing is reading our app's configuration file which is managed by the config module on npm. Getting our config file from server-side code is a snap using var config = require('config');, but we're a single-page app and frequently need access to the configuration settings on the client-side. To do that I stuff the config object into an Angular service.
Here's the Angular service before gulp build.
angular.module('app')
.factory('appConfig', function () {
return '{{{appConfigObj}}}';
});
The placeholder is in a string so that it's valid JavaScript for some of the other gulp plugins that process the file first. The gulpInsert utility lets me insert the config like this.
.pipe(gulpInsert.transform(function (contents) {
return contents.replace("'{{{appConfigObj}}}'", JSON.stringify(config));
}))
This works but feels a little hacky. Not to mention that it has to buffer the whole bundled file just so I can perform the operation. Is there a more elegant way to accomplish the same thing? Preferably one that allows the stream to keep flowing smoothly without buffering the whole bundle at the end? Thanks!
Have you checked gulp-replace-task?
Something like
[...]
.pipe(gulpSourcemaps.init())
.pipe(replace({
patterns: [{
match: '{{{appConfigObj}}}',
replacement: config
}],
usePrefix: false
})
.pipe(gulpUglify())
[...]
Admittedly, this feels a bit hacky, too, but maybe slightly better... I'm using envify and gulp-env in a React project. You could do something like this.
gulpfile.js:
var config = require('config');
var envify = require('envify');
gulp.task('env', function () {
env({
vars: {
APP_CONFIG: JSON.stringify(config)
}
});
});
gulp.task('compile-js', ['env'], function () {
// ... replace `gulp-insert` with `envify`
});
factory:
angular.module('app')
.factory('appConfig', function () {
return process.env.APP_CONFIG;
});

node.js configure submodules

[EDIT]
Thanks to Stafano that formalized my question in a better way:
You have a module
-) There are several files in this module
-) All these files depend on a configuration whose path is unknown to the module itself
-) This module does not do much on its own, and is meant to be used by other applications
-) These applications should inject a configuration path into the module before it can be used
So i have this module, used from another application. It's composed of other submodules and i want to configure it using a configuration object.
I already tried to inject the configuration in my submodels but i had the same problem exposed in the original question.
For example my module use mongoDB (with mongoose) as a store.
// app.js
// in the config object i have the URI to the mongo instance (in order to create a connection).
var myModule = require('myModule')(config);
// myModule.js
// files
// myModule/index.js expose the module's functionalities
// is the entry point so I create the mongoose connection
var mongoose = require('mongoose');
module.exports = function(config){
var connection = mongoose.createConnection(config.store.URL);
// I need to expose this connection to the others submodules.
}
// myModule/storeController.js contains the business logic that use the store (createItem, deleteItem, get...) and requrie mongoose and my Models (store in the models folder)
var mongoose = require('mongoose');
var Item = require('./models/item.js');
exports.createItem = function(item){
Item.save(item, function(err, item){
if (err) throw
...
});
}
// myModule/models/item.js
// In this module i need to use the connection application in the configuration.
var mongoose = require('mongoose');
var connection = // i don't know how to get it
var ItemSchema = new mongoose.Schema({
name: String
});
module.exports = mongoose.model('item', ItemSchema);
If I inject the configuration obj to the item.js i can't do the module.exports of my model.
I hope that this example can clarify my question, but the problem is the simple, expose an object after get it as a parameter.
[PREVIOUS]
I have a node.js application that require a module. This module accept the coniguration file path (a JSON file).
I need to load that configuration on require and expose it to the module.
How can I achieve this behavior?
Something like:
// app.js
var myModule = require('myModule')(__dirname + '/config/myModuleCnfig.json');
// myModule.js
module.exports = function(configPath){
var config = require(configPath);
module.exports = config; // This is wrong
}
Is there another way to get the configuration path, configure the module and share the configuration??
With "share the configuration" i mean that i want to give the possibility to other files of my module to use that configuration.
Thanks for any suggestions!
FINAL EDIT:
After many misunderstandings, your problem is finally clear to me. To summarise what's in the comments, here is the situation:
You have a module
There are several files in this module
All these files depend on a configuration whose path is unknown to the module
itself
This module does not do much on its own, and is meant to be
used by other applications
These applications should inject a
configuration path into the module before it can be used
Since you cannot modify dynamically what a module exports, you should use another approach. As with most situations that you encounter in programming, there is not one way which is always right, as much pedends on your requirements and limitations.
The easiest way to do this (which I don't recommend) is to use a global variable, which you set in your myModule.js file and will be used by the other files in your module. The biggest drawback of this approach is that you wouldn't be able to use multiple instances of the module at the same time with different configurations. Also, any other module could easily modify (deliberately or not) you configuration at any time, by simply changing the value of the global variable, so it's also a security risk.
A much better way, which will probably require more work on your part - depending on how many files you have - is to implement some kind of Inversion of Control (IoC). In your case, you could turn all your exports into functions that accept a config, and then initialise them by passing the active configuration after you require the module. I don't know the specifics of your implementation, so here is some sample code:
// submodule1.js
module.exports = function(config) {
// return something that uses the configuration
}
// myModule.js
var fs = require('fs');
var submodule1 = require('./submodule1');
var submodule2 = require('./submodule2');
// ...
module.exports = function(configPath){
var config = JSON.parse(fs.readFileSync(configPath));
var sm1 = submodule1(config);
var sm2 = submodule2(config);
return /* an object that uses sm1 and sm2 */;
}
If your module is quite complex, you can use some IoC library that does the binding for you. An good one could be Electrolite.
Hope this helps.
PREVIOUS ANSWER:
You can use a library called jsop:
var jsop = require('jsop');
var config = jsop('./config/myModuleCnfig.json');
If you don't want to add a dependency to this module, the linked GitHub page also has a snippet that you can use to load the json config using only native methods.
EDIT: I just realised that this module is only for node 0.11, which you are probably not using. Since you don't probably need the writing functionality, you can use the following snippet instead:
var fs = require('fs')
var config = JSON.parse(fs.readFileSync('./config/myModuleCnfig.json'))
EDIT 2:
Now I think I understand your problem better. To pass the path to the required configuration, you can do something like this:
// myModule.js
var fs = require('fs')
module.exports = function(configPath){
var config = JSON.parse(fs.readFileSync(configPath))
return config;
}

Is there a way to dynamically set src and dest in gulpfile.js?

I have multiple sets of js modules that I would like to concat into separate files. I don't want to have to create a seperate concat task for each file. It would make more sense to be able to pass arguments into the gulp task "concat". Unfortunately gulp doesn't allow arguments to be passed into tasks(I'm sure for good reason).
Any ideas of how I can accomplish this?
Use Case
A specific scenario would be website that has a global.js file for all pages as well as page specific js files.
Creating a task for each page specific js file will quickly make the gulpfile.js hard to manage as the site grows.
My dev invironment:
I have a dev/js/ directory which has multiple sub-directories. Each sub-directory contains modules for a specific js file. So each sub-directory needs to be concatenated into it's own file within lib/js/.
Perhaps requirejs?
Maybe I should just look into using a module loader like requirejs.
I needed to take modules from my source sub-directory (src/modules/), concatenate a specific file to each individually (src/distribution), then pipe the result to a sub-directory in my distribution folder (dist/js/modules/).
I wasn't sure how many modules would end up being written for this project so I wanted to do it dynamically and found this to be the best (simplest) solution:
gulp.task("modules:js", () => {
let modules = fs.readdirSync("src/modules");
let concatModule = (module) => {
return gulp.src([
'src/distribution',
module
])
.pipe(concat(module))
.pipe(gulp.dest("build/js/modules"));
}
for (let module of modules) {
concatModule(module);
};
});
You could make concatJS a higher-order function:
var concatJS = function (src, filename, dest) {
return function() {
gulp.src(src)
.pipe(concat(filename))
.pipe(gulp.dest(dest));
};
};
gulp.task('concat-1', concatJS('src/module-1', 'module-1.js', 'build/module-1'));
gulp.task('concat-2', concatJS('src/module-2', 'module-2.js', 'build/module-2'));
//etc...
Note: You'd probably be better off using a bundler like browserify or webpack. Since asking this question I have switched to browserify rather than trying to roll my own solution.
Improved Solution:
var fs = require("fs");
/* other requires omitted */
/* Set JS dev directory in one place */
var jsSrc = "dev/js/";
var jsDest = "lib/js/";
var concat = function (path) {
path = path.replace(/\\/g, "/");
var src = path.replace(/(\/[^\/]+?\.js$)|(\/$)/, "/*.js";
var filename = src.match(/\/([^\/]+?)(\/[^\/]+?\.js$)/)[1] + ".js";
gulp.src(src)
.pipe(concat(filename)
.pipe(gulp.dest(jsDest));
});
/* The concat task just runs the concat function for
* each directory in the javascript development directory.
* It will take a performance hit, but allows concat to be
* run as a dependency in a pinch.
*/
gulp.task("concat", function () {
var dirArr = fs.readdirSync(jsDev);
for (var d in dirArr) {
var path = jsDev+dirArr[d]+"/";
concat(path);
}
});
/* Run "concat" as a dependency of the default task */
gulp.taks("default", ["concat"], function () {
var JSWatcher = gulp.watch([jsSrc+"**/*.js"]);
JSWatcher.on("change", function (event) {
concat(event.path);
});
});
Alright, I think this works. It's a little bit of a hack though, and doesn't work for all use cases.
... removed previous example to save space ...

Compiling dynamically required modules with Browserify

I am using Browserify to compile a large Node.js application into a single file (using options --bare and --ignore-missing [to avoid troubles with lib-cov in Express]). I have some code to dynamically load modules based on what is available in a directory:
var fs = require('fs'),
path = require('path');
fs.readdirSync(__dirname).forEach(function (file) {
if (file !== 'index.js' && fs.statSync(path.join(__dirname, file)).isFile()) {
module.exports[file.substring(0, file.length-3)] = require(path.join(__dirname, file));
}
});
I'm getting strange errors in my application where aribtrary text files are being loaded from the directory my compiled file is loaded in. I think it's because paths are no longer set correctly, and because Browserify won't be able to require() the correct files that are dynamically loaded like this.
Short of making a static index.js file, is there a preferred method of dynamically requiring a directory of modules that is out-of-the-box compatible with Browserify?
This plugin allows to require Glob patterns: require-globify
Then, with a little hack you can add all the files on compilation and not executing them:
// Hack to compile Glob files. Don´t call this function!
function ಠ_ಠ() {
require('views/**/*.js', { glob: true })
}
And, for example, you could require and execute a specific file when you need it :D
var homePage = require('views/'+currentView)
Browserify does not support dynamic requires - see GH issue 377.
The only method for dynamically requiring a directory I am aware of: a build step to list the directory files and write the "static" index.js file.
There's also the bulkify transform, as documented here:
https://github.com/chrisdavies/tech-thoughts/blob/master/browserify-include-directory.md
Basically, you can do this in your app.js or whatever:
var bulk = require('bulk-require');
// Require all of the scripts in the controllers directory
bulk(__dirname, ['controllers/**/*.js']);
And my gulpfile has something like this in it:
gulp.task('js', function () {
return gulp.src('./src/js/init.js')
.pipe(browserify({
transform: ['bulkify']
}))
.pipe(rename('app.js'))
.pipe(uglify())
.pipe(gulp.dest('./dest/js'));
});

Categories