I managed to accomplish my task using a gulp plugin called gulp-insert like this:
gulp.task('compile-js', function () {
// Minify and bundle client scripts.
var scripts = gulp.src([
srcDir + '/routes/**/*.js',
srcDir + '/shared/js/**/*.js'
])
// Sort angular files so the module definition appears
// first in the bundle.
.pipe(gulpAngularFilesort())
// Add angular dependency injection annotations before
// minifying the bundle.
.pipe(gulpNgAnnotate())
// Begin building source maps for easy debugging of the
// bundled code.
.pipe(gulpSourcemaps.init())
.pipe(gulpConcat('bundle.js'))
// Buffer the bundle.js file and replace the appConfig
// placeholder string with a stringified config object.
.pipe(gulpInsert.transform(function (contents) {
return contents.replace("'{{{appConfigObj}}}'", JSON.stringify(config));
}))
.pipe(gulpUglify())
// Finish off sourcemap tracking and write the map to the
// bottom of the bundle file.
.pipe(gulpSourcemaps.write())
.pipe(gulp.dest(buildDir + '/shared/js'));
return scripts.pipe(gulpLivereload());
});
What I'm doing is reading our app's configuration file which is managed by the config module on npm. Getting our config file from server-side code is a snap using var config = require('config');, but we're a single-page app and frequently need access to the configuration settings on the client-side. To do that I stuff the config object into an Angular service.
Here's the Angular service before gulp build.
angular.module('app')
.factory('appConfig', function () {
return '{{{appConfigObj}}}';
});
The placeholder is in a string so that it's valid JavaScript for some of the other gulp plugins that process the file first. The gulpInsert utility lets me insert the config like this.
.pipe(gulpInsert.transform(function (contents) {
return contents.replace("'{{{appConfigObj}}}'", JSON.stringify(config));
}))
This works but feels a little hacky. Not to mention that it has to buffer the whole bundled file just so I can perform the operation. Is there a more elegant way to accomplish the same thing? Preferably one that allows the stream to keep flowing smoothly without buffering the whole bundle at the end? Thanks!
Have you checked gulp-replace-task?
Something like
[...]
.pipe(gulpSourcemaps.init())
.pipe(replace({
patterns: [{
match: '{{{appConfigObj}}}',
replacement: config
}],
usePrefix: false
})
.pipe(gulpUglify())
[...]
Admittedly, this feels a bit hacky, too, but maybe slightly better... I'm using envify and gulp-env in a React project. You could do something like this.
gulpfile.js:
var config = require('config');
var envify = require('envify');
gulp.task('env', function () {
env({
vars: {
APP_CONFIG: JSON.stringify(config)
}
});
});
gulp.task('compile-js', ['env'], function () {
// ... replace `gulp-insert` with `envify`
});
factory:
angular.module('app')
.factory('appConfig', function () {
return process.env.APP_CONFIG;
});
Related
Following angular 2 quick start guide, guys there use typescript compiler and tsconfig.json file, to work with it. I was looking up ways to use gulp for this and indeed there seem to be ways to achieve this, however I'm a bit confused to correct implementation with angular 2.
essentially gulp-typescript and gulp-tslint seem to be two plugins to achieve this and somehow tsconfig.json file is also in play here, although I don't grasp why.
Could anyone provide example for implementation that will achieve above? I believe all development .ts files should be within src folder and javascript needs to be pumped over to build folder. (assume for now that both folders have setup from angular 2 quick start)
I've setup gulp to work from gulpfile.js/ folder. In this folder are index.js, config.js and tasks/ folder, and in tasks/typescript.js is task that compiles TypeScript (tasks folder has 15 other tasks). So instead of one huge gulpfile.js I have manageable chunks that each do just one thing...
gulpfile.js/index.js
var gulp = require('gulp');
var config = require('./config.js');
var plugins = require('gulp-load-plugins')();
plugins.brsync = require('browser-sync').create();
plugins.builder = require('systemjs-builder');
function run(name) {
return require('./tasks/' + name)(gulp, plugins, config);
}
// ...other tasks, in alphabetical order! (:
gulp.task('typescript', run('typescript'));
gulpfile.js/config.js
var distDir = 'dist';
var staticDir = isGAE() ? '/static' : '';
module.exports = {
SRC: {
TYPESCRIPT: 'src/scripts/**/*.ts',
},
DST: {
MAPS: './maps',
SCRIPTS: distDir + staticDir + '/scripts',
},
};
gulpfile.js/tasks/typescript.js
module.exports = function (gulp, plugins, CONFIG) {
return function typescript() {
var tsProject = plugins.typescript.createProject('tsconfig.json');
var tsReporter = plugins.typescript.reporter.fullReporter();
var stream = gulp
.src(CONFIG.SRC.TYPESCRIPT, { since: gulp.lastRun('typescript') })
.pipe(plugins.sourcemaps.init())
.pipe(plugins.typescript(tsProject, undefined, tsReporter))
.pipe(plugins.sourcemaps.write(CONFIG.DST.MAPS,
{sourceMappingURLPrefix: '/scripts'}))
.pipe(gulp.dest(CONFIG.DST.SCRIPTS))
.on('error', plugins.util.log);
return stream;
};
};
gulpfile.js/tasks/watch.js
module.exports = function (gulp, plugins, CONFIG) {
return function watch() {
plugins.brsync.init(CONFIG.BRSYNC);
gulp.watch(CONFIG.SRC.TEST, () => queue_tasks(['karma']));
gulp.watch(CONFIG.SRC.TYPESCRIPT, () => queue_tasks(['typescript'], brsync_reload));
};
};
I had an issue with gulp watch: if you're watching files, work on more then one file and save them all it will run a task multiple times, which can be annoying. Check the link for implementation of queue_tasks() function...
Also note that I'm using Gulp 4:
gulp.src(CONFIG.SRC.TYPESCRIPT, { since: gulp.lastRun('typescript') })
I've added in src() option since to cache files, and pass only changed files down the pipe. I implemented this just 2 days ago and didn't test it with typescript files (works in other places), so if there are issues just remove it...
So I have a simple use case, and it seems very similar to the usecase described in the readme for https://github.com/jonkemp/gulp-useref.
I'm trying to generate a templates.js file with all of the Angular templates during the build. I am trying to do this and NOT have a templates.js file in my local project. So the idea was to merge the output of the template stream into the useref stream so that the resulting scripts.js file would contain all of the files indicated in my index file AND the generated templates ouput.
Here's what I have in the gulp task:
gulp.task('usemin:dist', ['clean:dist'], function() {
var templatesStream = gulp.src([
'./app/**/*.html',
'!./app/index.html',
'!./app/404.html'
]).pipe(templateCache({
module: 'myCoolApp'
}));
var assets = $useref.assets({
additionalStreams: [templatesStream]
});
return gulp.src('./app/index.html')
.pipe(assets)
.pipe(assets.restore())
.pipe($useref())
.pipe(gulp.dest('./dist/'));
});
Now this should allow me to merge the output of the templatesStream and turn it all into one scripts.js file, I think...
I've also tried having <script src="scripts/templates.js"></script> of many forms sitting in my index file to try and assist it. None seem to work.
Anyone else doing this same type of thing? Seems like a common use-case.
I was able to get this to work by looking closely at the test cases.
I now have a templates.js script tag on my index.html file which will 404 while in my local environment.
My gulp task looks like this:
gulp.task('useref:dist', ['clean:dist'], function() {
var templateStream = gulp.src([
'./app/**/*.html',
'!./app/index.html',
'!./app/404.html'
]).pipe(templateCache({
module: 'digitalWorkspaceApp'
}));
var assets = $useref.assets({
additionalStreams: [templateStream]
});
var jsFilter = $filter('**/*.js', {restore: true});
return gulp.src('./app/index.html')
.pipe(assets)
.pipe($useref())
.pipe(gulp.dest('./dist/'));
});
Immediately I can't really see the difference, but it may have all hinged on the addition of this non-existent file in my index.html.
I have multiple sets of js modules that I would like to concat into separate files. I don't want to have to create a seperate concat task for each file. It would make more sense to be able to pass arguments into the gulp task "concat". Unfortunately gulp doesn't allow arguments to be passed into tasks(I'm sure for good reason).
Any ideas of how I can accomplish this?
Use Case
A specific scenario would be website that has a global.js file for all pages as well as page specific js files.
Creating a task for each page specific js file will quickly make the gulpfile.js hard to manage as the site grows.
My dev invironment:
I have a dev/js/ directory which has multiple sub-directories. Each sub-directory contains modules for a specific js file. So each sub-directory needs to be concatenated into it's own file within lib/js/.
Perhaps requirejs?
Maybe I should just look into using a module loader like requirejs.
I needed to take modules from my source sub-directory (src/modules/), concatenate a specific file to each individually (src/distribution), then pipe the result to a sub-directory in my distribution folder (dist/js/modules/).
I wasn't sure how many modules would end up being written for this project so I wanted to do it dynamically and found this to be the best (simplest) solution:
gulp.task("modules:js", () => {
let modules = fs.readdirSync("src/modules");
let concatModule = (module) => {
return gulp.src([
'src/distribution',
module
])
.pipe(concat(module))
.pipe(gulp.dest("build/js/modules"));
}
for (let module of modules) {
concatModule(module);
};
});
You could make concatJS a higher-order function:
var concatJS = function (src, filename, dest) {
return function() {
gulp.src(src)
.pipe(concat(filename))
.pipe(gulp.dest(dest));
};
};
gulp.task('concat-1', concatJS('src/module-1', 'module-1.js', 'build/module-1'));
gulp.task('concat-2', concatJS('src/module-2', 'module-2.js', 'build/module-2'));
//etc...
Note: You'd probably be better off using a bundler like browserify or webpack. Since asking this question I have switched to browserify rather than trying to roll my own solution.
Improved Solution:
var fs = require("fs");
/* other requires omitted */
/* Set JS dev directory in one place */
var jsSrc = "dev/js/";
var jsDest = "lib/js/";
var concat = function (path) {
path = path.replace(/\\/g, "/");
var src = path.replace(/(\/[^\/]+?\.js$)|(\/$)/, "/*.js";
var filename = src.match(/\/([^\/]+?)(\/[^\/]+?\.js$)/)[1] + ".js";
gulp.src(src)
.pipe(concat(filename)
.pipe(gulp.dest(jsDest));
});
/* The concat task just runs the concat function for
* each directory in the javascript development directory.
* It will take a performance hit, but allows concat to be
* run as a dependency in a pinch.
*/
gulp.task("concat", function () {
var dirArr = fs.readdirSync(jsDev);
for (var d in dirArr) {
var path = jsDev+dirArr[d]+"/";
concat(path);
}
});
/* Run "concat" as a dependency of the default task */
gulp.taks("default", ["concat"], function () {
var JSWatcher = gulp.watch([jsSrc+"**/*.js"]);
JSWatcher.on("change", function (event) {
concat(event.path);
});
});
Alright, I think this works. It's a little bit of a hack though, and doesn't work for all use cases.
... removed previous example to save space ...
I am using Browserify to compile a large Node.js application into a single file (using options --bare and --ignore-missing [to avoid troubles with lib-cov in Express]). I have some code to dynamically load modules based on what is available in a directory:
var fs = require('fs'),
path = require('path');
fs.readdirSync(__dirname).forEach(function (file) {
if (file !== 'index.js' && fs.statSync(path.join(__dirname, file)).isFile()) {
module.exports[file.substring(0, file.length-3)] = require(path.join(__dirname, file));
}
});
I'm getting strange errors in my application where aribtrary text files are being loaded from the directory my compiled file is loaded in. I think it's because paths are no longer set correctly, and because Browserify won't be able to require() the correct files that are dynamically loaded like this.
Short of making a static index.js file, is there a preferred method of dynamically requiring a directory of modules that is out-of-the-box compatible with Browserify?
This plugin allows to require Glob patterns: require-globify
Then, with a little hack you can add all the files on compilation and not executing them:
// Hack to compile Glob files. Don´t call this function!
function ಠ_ಠ() {
require('views/**/*.js', { glob: true })
}
And, for example, you could require and execute a specific file when you need it :D
var homePage = require('views/'+currentView)
Browserify does not support dynamic requires - see GH issue 377.
The only method for dynamically requiring a directory I am aware of: a build step to list the directory files and write the "static" index.js file.
There's also the bulkify transform, as documented here:
https://github.com/chrisdavies/tech-thoughts/blob/master/browserify-include-directory.md
Basically, you can do this in your app.js or whatever:
var bulk = require('bulk-require');
// Require all of the scripts in the controllers directory
bulk(__dirname, ['controllers/**/*.js']);
And my gulpfile has something like this in it:
gulp.task('js', function () {
return gulp.src('./src/js/init.js')
.pipe(browserify({
transform: ['bulkify']
}))
.pipe(rename('app.js'))
.pipe(uglify())
.pipe(gulp.dest('./dest/js'));
});
I've written a function which I'd like to use as a Grunt task. I can do this by adding this to the Gruntfile:
grunt.registerTask('foo', function () {
// code here
});
However, it makes more sense to keep the function code in a separate file. I plan to define a bunch of these custom tasks and I don't want to bloat the Gruntfile.
I'm not sure what the preferred way of registering such tasks is. I have found this to work:
grunt.registerTask('foo', function () {
require('./path/to/foo.js')(grunt);
});
So, I'm having the inline function like in the fist example, but this time, I'm loading an external file and invoking it immediately. In that external file, I of course have to write:
module.exports = function (grunt) {
// code here
}
This works, but it feels hackish. Is there a more proper way of doing this?
Short answer: the alternative to this
grunt.registerTask('foo', function () {
require('./path/to/foo.js')(grunt);
});
is http://gruntjs.com/api/grunt#grunt.loadtasks
Long answer:
Normally when you have tasks in external files there are served as other nodejs modules. So, if that is something that you will use in several projects you may want to register it in the registry. Later inside your Gruntfile.js you will have:
grunt.loadNpmTasks('yout-module-here');
The grunt's documentation says:
Load tasks from the specified Grunt plugin. This plugin must be installed locally via npm, and must be relative to the Gruntfile
However, if you don't want to upload anything to the registry you should use loadTasks
grunt.loadTasks('path/to/your/task/directory');
So, once the task is loaded you may use it in your configuration.
Here is a simple grunt task placed in external file:
'use strict';
module.exports = function(grunt) {
grunt.registerMultiTask('nameoftask', 'description', function() {
var self = this;
// this.data here contains your configuration
});
};
And later in Gruntfile.js
grunt.initConfig({
nameoftask: {
task: {
// parameters here
}
}
});
I had a similar problem.
I wanted to modularize my grunt config and custom tasks by functionnalities (big UX/UI blocks) rather than by technical features. AND I wanted to keep the config files next to task files... (better when working on a large legacy codebase with an varied team - 5 persons with varying JS knowledge)
So I externalized my tasks like Krasimir did.
In the gruntfile, I wrote :
//power of globbing for loading tasks
var tasksLocations = ['./grunt-config/default_tasks.js', './grunt-config/**/tasks.js'];
var taskFiles = grunt.file.expand({
filter: "isFile"
}, tasksLocations);
taskFiles.forEach(function(path) {
grunt.log.writeln("=> loading & registering : " + path);
require(path)(grunt);
});
You will find the whole boilerplate gruntfile here (external config and tasks loading) : https://gist.github.com/0gust1/7683132