Ordening files in Grunt with external file - javascript

I'm configuring Grunt with grunt-contrib-concat to concatenate like 20 javascript files. They have to be in a specific order and I'm wondering if there is a neat way to do this, without messing up my Gruntfile.js.
What I did and what worked well, was declaring an variable called 'libraries' with a function which returned a string with all the files in the right order.
var libraries = new (function () {
return [
'/javascript/libs/jquery.min.js',
'/javascript/libs/jquery.address.js',
'/javascript/libs/jquery.console.js'
];
});
And then concat (simplified, just an example):
concat: {
libs: {
files: {
'libs.js' : [libraries],
},
},
main: {
files: {
'main.js' : [main]
}
}
},
So when I call 'libraries' in my task configuration everything works fine, but I would like to declare this list in a separate file.
Unfortunately I couldn't find anything, nor do I know if this is even possible. Hope that someone could help me out! Thanks in advance :-)

I found a solution! Since Grunt is build on NodeJS, it's possible to use module.exports. What I did was setting an external file called libraries.js, which is in my Grunt directory.
var exports = module.exports = {};
exports.customLibrary = function () {
return [
// Path to a library
// Path to another library
// and so on...
];
};
exports.mainScripts = function () {
return [
// Path to a library
// Path to another library
// and so on...
];
};
Then I import this module by declaring a variable in Gruntfile.js
var libraries = require('../javascript/libraries.js');
To use the methods declared in libraries.js I set two more variables which returns a string with all the necessary files in the desired order:
var customLibrary = libraries.customLibrary();
var mainScripts = libraries.mainScripts();
I use these variables to define the source in the concat task. Hope this is helpful!

Related

Webpack - Require.context -How to require all .js files except `_test.js` in a directory?

My goal was to create a file that would
Require all of the JS files in a directory that didn't end in _test.js
Do a module.exports equal to an array of module names returned from those view files.
I thought I had it with this:
// Automagically crawls through this directory, finds every js file inside any
// subdirectory, removes test files, and requires the resulting list of files,
// registering the exported module names as dependencies to the myApp.demoApp.views module.
var context = require.context('.', true, /\/.*\/.*\.js$/);
var moduleNames = _.chain(context.keys())
.filter(function(key) {
console.log(key, key.indexOf('_test.js') == -1);
return key.indexOf('_test.js') == -1;
})
.map(function(key) {
console.log("KEY", key);
return context(key)
})
.value();
module.exports = angular.module('myApp.demoApp.views', moduleNames).name;
#2 is working as intended
#1 Unfortunately I was naive. While the module names are filtered out, this still requires all of the files with _test so the test files end up in my built code.
I tried to fix this by updating the regex but JS doesn't support regex negative-look-behind and I'm not regex savvy enough to do it without that.
Ok, I was able to use the answer in Slava.K's comment to answer my question. Here's the final code below. I had to include (?!.*index) in the regex because this code was including itself index.views.js.
var context = require.context('.', true, /^(?!.*index).*\/(?!.*test).*\.js$/);
var moduleNames = _.chain(context.keys())
.map(function(key) {
return context(key)
})
.value();
module.exports = angular.module('myApp.demoApp.views', moduleNames).name;

Use gulp for typescript compilation

Following angular 2 quick start guide, guys there use typescript compiler and tsconfig.json file, to work with it. I was looking up ways to use gulp for this and indeed there seem to be ways to achieve this, however I'm a bit confused to correct implementation with angular 2.
essentially gulp-typescript and gulp-tslint seem to be two plugins to achieve this and somehow tsconfig.json file is also in play here, although I don't grasp why.
Could anyone provide example for implementation that will achieve above? I believe all development .ts files should be within src folder and javascript needs to be pumped over to build folder. (assume for now that both folders have setup from angular 2 quick start)
I've setup gulp to work from gulpfile.js/ folder. In this folder are index.js, config.js and tasks/ folder, and in tasks/typescript.js is task that compiles TypeScript (tasks folder has 15 other tasks). So instead of one huge gulpfile.js I have manageable chunks that each do just one thing...
gulpfile.js/index.js
var gulp = require('gulp');
var config = require('./config.js');
var plugins = require('gulp-load-plugins')();
plugins.brsync = require('browser-sync').create();
plugins.builder = require('systemjs-builder');
function run(name) {
return require('./tasks/' + name)(gulp, plugins, config);
}
// ...other tasks, in alphabetical order! (:
gulp.task('typescript', run('typescript'));
gulpfile.js/config.js
var distDir = 'dist';
var staticDir = isGAE() ? '/static' : '';
module.exports = {
SRC: {
TYPESCRIPT: 'src/scripts/**/*.ts',
},
DST: {
MAPS: './maps',
SCRIPTS: distDir + staticDir + '/scripts',
},
};
gulpfile.js/tasks/typescript.js
module.exports = function (gulp, plugins, CONFIG) {
return function typescript() {
var tsProject = plugins.typescript.createProject('tsconfig.json');
var tsReporter = plugins.typescript.reporter.fullReporter();
var stream = gulp
.src(CONFIG.SRC.TYPESCRIPT, { since: gulp.lastRun('typescript') })
.pipe(plugins.sourcemaps.init())
.pipe(plugins.typescript(tsProject, undefined, tsReporter))
.pipe(plugins.sourcemaps.write(CONFIG.DST.MAPS,
{sourceMappingURLPrefix: '/scripts'}))
.pipe(gulp.dest(CONFIG.DST.SCRIPTS))
.on('error', plugins.util.log);
return stream;
};
};
gulpfile.js/tasks/watch.js
module.exports = function (gulp, plugins, CONFIG) {
return function watch() {
plugins.brsync.init(CONFIG.BRSYNC);
gulp.watch(CONFIG.SRC.TEST, () => queue_tasks(['karma']));
gulp.watch(CONFIG.SRC.TYPESCRIPT, () => queue_tasks(['typescript'], brsync_reload));
};
};
I had an issue with gulp watch: if you're watching files, work on more then one file and save them all it will run a task multiple times, which can be annoying. Check the link for implementation of queue_tasks() function...
Also note that I'm using Gulp 4:
gulp.src(CONFIG.SRC.TYPESCRIPT, { since: gulp.lastRun('typescript') })
I've added in src() option since to cache files, and pass only changed files down the pipe. I implemented this just 2 days ago and didn't test it with typescript files (works in other places), so if there are issues just remove it...

How can I insert content into a file during a gulp build?

I managed to accomplish my task using a gulp plugin called gulp-insert like this:
gulp.task('compile-js', function () {
// Minify and bundle client scripts.
var scripts = gulp.src([
srcDir + '/routes/**/*.js',
srcDir + '/shared/js/**/*.js'
])
// Sort angular files so the module definition appears
// first in the bundle.
.pipe(gulpAngularFilesort())
// Add angular dependency injection annotations before
// minifying the bundle.
.pipe(gulpNgAnnotate())
// Begin building source maps for easy debugging of the
// bundled code.
.pipe(gulpSourcemaps.init())
.pipe(gulpConcat('bundle.js'))
// Buffer the bundle.js file and replace the appConfig
// placeholder string with a stringified config object.
.pipe(gulpInsert.transform(function (contents) {
return contents.replace("'{{{appConfigObj}}}'", JSON.stringify(config));
}))
.pipe(gulpUglify())
// Finish off sourcemap tracking and write the map to the
// bottom of the bundle file.
.pipe(gulpSourcemaps.write())
.pipe(gulp.dest(buildDir + '/shared/js'));
return scripts.pipe(gulpLivereload());
});
What I'm doing is reading our app's configuration file which is managed by the config module on npm. Getting our config file from server-side code is a snap using var config = require('config');, but we're a single-page app and frequently need access to the configuration settings on the client-side. To do that I stuff the config object into an Angular service.
Here's the Angular service before gulp build.
angular.module('app')
.factory('appConfig', function () {
return '{{{appConfigObj}}}';
});
The placeholder is in a string so that it's valid JavaScript for some of the other gulp plugins that process the file first. The gulpInsert utility lets me insert the config like this.
.pipe(gulpInsert.transform(function (contents) {
return contents.replace("'{{{appConfigObj}}}'", JSON.stringify(config));
}))
This works but feels a little hacky. Not to mention that it has to buffer the whole bundled file just so I can perform the operation. Is there a more elegant way to accomplish the same thing? Preferably one that allows the stream to keep flowing smoothly without buffering the whole bundle at the end? Thanks!
Have you checked gulp-replace-task?
Something like
[...]
.pipe(gulpSourcemaps.init())
.pipe(replace({
patterns: [{
match: '{{{appConfigObj}}}',
replacement: config
}],
usePrefix: false
})
.pipe(gulpUglify())
[...]
Admittedly, this feels a bit hacky, too, but maybe slightly better... I'm using envify and gulp-env in a React project. You could do something like this.
gulpfile.js:
var config = require('config');
var envify = require('envify');
gulp.task('env', function () {
env({
vars: {
APP_CONFIG: JSON.stringify(config)
}
});
});
gulp.task('compile-js', ['env'], function () {
// ... replace `gulp-insert` with `envify`
});
factory:
angular.module('app')
.factory('appConfig', function () {
return process.env.APP_CONFIG;
});

Is there a way to dynamically set src and dest in gulpfile.js?

I have multiple sets of js modules that I would like to concat into separate files. I don't want to have to create a seperate concat task for each file. It would make more sense to be able to pass arguments into the gulp task "concat". Unfortunately gulp doesn't allow arguments to be passed into tasks(I'm sure for good reason).
Any ideas of how I can accomplish this?
Use Case
A specific scenario would be website that has a global.js file for all pages as well as page specific js files.
Creating a task for each page specific js file will quickly make the gulpfile.js hard to manage as the site grows.
My dev invironment:
I have a dev/js/ directory which has multiple sub-directories. Each sub-directory contains modules for a specific js file. So each sub-directory needs to be concatenated into it's own file within lib/js/.
Perhaps requirejs?
Maybe I should just look into using a module loader like requirejs.
I needed to take modules from my source sub-directory (src/modules/), concatenate a specific file to each individually (src/distribution), then pipe the result to a sub-directory in my distribution folder (dist/js/modules/).
I wasn't sure how many modules would end up being written for this project so I wanted to do it dynamically and found this to be the best (simplest) solution:
gulp.task("modules:js", () => {
let modules = fs.readdirSync("src/modules");
let concatModule = (module) => {
return gulp.src([
'src/distribution',
module
])
.pipe(concat(module))
.pipe(gulp.dest("build/js/modules"));
}
for (let module of modules) {
concatModule(module);
};
});
You could make concatJS a higher-order function:
var concatJS = function (src, filename, dest) {
return function() {
gulp.src(src)
.pipe(concat(filename))
.pipe(gulp.dest(dest));
};
};
gulp.task('concat-1', concatJS('src/module-1', 'module-1.js', 'build/module-1'));
gulp.task('concat-2', concatJS('src/module-2', 'module-2.js', 'build/module-2'));
//etc...
Note: You'd probably be better off using a bundler like browserify or webpack. Since asking this question I have switched to browserify rather than trying to roll my own solution.
Improved Solution:
var fs = require("fs");
/* other requires omitted */
/* Set JS dev directory in one place */
var jsSrc = "dev/js/";
var jsDest = "lib/js/";
var concat = function (path) {
path = path.replace(/\\/g, "/");
var src = path.replace(/(\/[^\/]+?\.js$)|(\/$)/, "/*.js";
var filename = src.match(/\/([^\/]+?)(\/[^\/]+?\.js$)/)[1] + ".js";
gulp.src(src)
.pipe(concat(filename)
.pipe(gulp.dest(jsDest));
});
/* The concat task just runs the concat function for
* each directory in the javascript development directory.
* It will take a performance hit, but allows concat to be
* run as a dependency in a pinch.
*/
gulp.task("concat", function () {
var dirArr = fs.readdirSync(jsDev);
for (var d in dirArr) {
var path = jsDev+dirArr[d]+"/";
concat(path);
}
});
/* Run "concat" as a dependency of the default task */
gulp.taks("default", ["concat"], function () {
var JSWatcher = gulp.watch([jsSrc+"**/*.js"]);
JSWatcher.on("change", function (event) {
concat(event.path);
});
});
Alright, I think this works. It's a little bit of a hack though, and doesn't work for all use cases.
... removed previous example to save space ...

Relative paths with RequireJS modules/packages

I'm fairly new to RequireJS and I've run into a bit of a problem. I've written a little framework built on Backbone using RequireJS and I want it to be re-usable in different projects. So, with some searching I learned that require allows packages. This seemed like what I was looking for. I have a main.js file to launch my app that essentially looks like this:
require.config({
packages: ['framework']
});
require(['framework'], function(framework) {
framework.createDash();
});
Then in the same directory as my main.js I have another directory called "framework" which contains another main.js which looks like this:
define(function(require, exports, module) {
exports.createDash = function(dash, element) {
require(['dash/dash.model', 'dash/dash.view'], function(DashModel, DashView) {
return new DashView({
model: new DashModel(dash),
el: element ? element : window
});
});
};
});
In searching I found this page which indicates that the 'require' argument should be scoped to the submodule. However, when I try to require things they are still relative to my original main.js. I've tried a number of things and searched for hours to no avail. Is there any way I can have my require/define calls within my package included relative to the main.js in it's root?
You need to define your submodule as package in the require configuration:
require.config({
packages: [
{ name: 'packagename',
location: 'path/to/your/package/root', // default 'packagename'
main: 'scriptfileToLoad' // default 'main'
}]
... some other stuff ...
});
To load your module you just need to use your 'packagename' at the requirements:
define(['jquery', 'packagename'], function($, MyPackage) {
MyPackage.useIt()
});
In your package you must use the ./ prefix to load your files relative to your submodule:
define(['globalDependency', './myLocalFile'], function(Asdf, LocalFile) {
LocalFile.finallyLoaded();
});
There is a useful shortcut: If your package name equals to your location and your main file is called 'main.js', then you can replace this
packages: [
{ name: 'packagename',
location: 'packagename',
main: 'main'
}]
to this:
packages: ['packagename']
As far as I can see, you already tried to define a package but did you also use the ./ prefix? Without this prefix require will try to find the files in it's global root-path. And without a package, ./ will be useless because the relative path is the same as the global root-path.
Cheers
I figured out the answer to my question, and the solution (they were not the same apparently). I guess I'll post it here in case it can help someone else in the future.
Essentially what I was wanting was to load my framework within its own context. I found the context option under the configuration section on require's website and an example of how to use it. Originally I tried this by doing something like:
var req = require.config({
baseUrl: 'framework',
context: 'framework',
paths: {
jQuery: 'lib/jquery/jquery-1.7.min.js',
Underscore: 'lib/underscore/underscore.min.js',
Backbone: 'lib/backbone/backbone.min.js',
etc...
}
});
req(['main'], function() {});
There were two problems with this. First, my 'req' variable was being defined outside of the framework, but I wanted the framework to define it's own paths. And second, whenever a file outside of the framework would require a file within the framework, which would in turn require 'jQuery', for example, then jQuery (or whatever else) wouldn't be required from within the context of the framework instance of require and so it couldn't find the file.
What I ended up doing was defining my framework's main.js to look something like this:
var paths = {
jQuery: 'lib/jquery/jquery-1.7.min.js',
Underscore: 'lib/underscore/underscore.min.js',
Backbone: 'lib/backbone/backbone.min.js',
etc...
};
define(function() {
var exports = {};
exports.initialize = function(baseUrl, overridePaths, callback) {
if(!overridePaths) {
overridePaths = {};
}
if(baseUrl && baseUrl[baseUrl.length - 1] != '/') {
baseUrl = baseUrl + '/';
}
var fullpaths = {};
for(var path in paths) {
// Don't add baseUrl to anything that looks like a full URL like 'http://...' or anything that begins with a forward slash
if(paths[path].match(/^(?:.*:\/\/|\/)/)) {
fullpaths[path] = paths[path];
}
else {
fullpaths[path] = baseUrl + paths[path];
}
}
var config = {paths: fullpaths};
for(var pathName in overridePaths) {
config.paths[pathName] = overridePaths[pathName];
}
require.config(config);
// Do anything else you need to do such as defining more functions for exports
if(callback) {
callback();
}
}
return exports;
});
And then in my project's main.js file I just do this:
require(['framework/main'], function(framework) {
// NOTE: This setTimeout() call is used because, for whatever reason, if you make
// a 'require' call in here or in the framework without it, it will just hang
// and never actually go fetch the files in the browser. There's probably a
// better way to handle this, but I don't know what it is.
setTimeout(function() {
framework.initialize('framework', null, function() {
// Do stuff here
}
}, 0);
});
This takes whatever is passed in to the framework's initialize() method for 'baseURL' and prepends that to any paths that the framework defines that do not start with a forward slash or 'anything://', unless they are override paths. This allows the package using the framework to override things like 'jQuery'.
This worked for me, adding a "./" prefix to the module names:
define(function (require, exports, module) {
exports.createDash = function (dash, element) {
require([ './dash/dash.model', './dash/dash.view' ], function (DashModel, DashView) {
return new DashView({
model : new DashModel(dash),
el : element ? element : window
});
});
};
});
A process that worked well for me for allowing a package with submodules to be used directly from data-main or from an outside framework, assuming that a main.js (or other package main) is called by a particular name, was to use var baseUrl = require.toUrl('packageName') + '/../' as a prefix to a require.config({ paths: { ... } }) configuration file. For instance:
var music21Base = require.toUrl('music21') + '/../';
require.config({ paths: {
'jquery': music21Base + 'ext/jquery/jquery.2.1.10.min';
'subModuleLoader': music21Base + 'src/subModuleLoader';
} });
The setting of context: "xxx" worked fine for calling normal modules with ./modName but did not work for the paths argument for me.

Categories