make some operations on factor-bundle's partial bundles - javascript

I am using gulp with browserify and factor-bundle.
I have the following code:
b = browserify({
entries: [ 'a.js', 'b.js'],
plugin: [ [ 'factor-bundle', { outputs: [ 'build/a.js', 'build/b.js' ] } ] ]
})
.bundle()
.pipe(source('bundle.js'))
.pipe(buffer())
.pipe(gulp.dest('/build/common'));
I want to pipe some actions (like uglify, bundle-collapser or other job) on the parial bundles ('build/a.js' and 'build/b.js'). I tried to use the method described on the factor-bundle's page:
b.plugin('factor-bundle', { outputs: [ write('x'), write('y') ] });
function write (name) {
return concat(function (body) {
console.log('// ----- ' + name + ' -----');
console.log(body.toString('utf8'));
});
}
But I don't understand the write() method and don't know how to perform uglification and how to gulp.dest the result.
Any idea? explanation?

The write() method returns a writable stream that allows you to pipe bundles
generated by the factor-bundle plugin through further downstream transformations.
For instance, your write() method may look something like this:
var path = require('path');
var file = require('gulp-file');
var sourcemaps = require('gulp-sourcemaps');
function write (filepath) {
return concat(function (content) {
// create new vinyl file from content and use the basename of the
// filepath in scope as its basename.
return file(path.basename(filepath), content, { src: true })
// uglify content
.pipe(uglify())
// write content to build directory
.pipe(gulp.dest('./build/scripts'))
});
}
And you would use it like this:
browserify({
entries: [ 'a.js', 'b.js'],
plugin: [ [ 'factor-bundle', { outputs: [ write('a.js'), write('b.js') ] } ] ]
})
.bundle()
.pipe(write('common.js'))
// Could have use these instead, but it wouldn't be as DRY.
// .pipe(source('common.js'))
// .pipe(uglify())
// .pipe(gulp.dest('./build/scripts'))
Using the factor-bundle plugin affects the output of browserify after
.bundle() is called. Normally, it would generate bundles as readable streams
mapping to each of your entry files, then you would be able to apply further
transformations to them.
Instead you will get a single readable stream that contains a bundle with the
shared common modules from the supplied entry files, which I have called
common.js on the example above. Then you need to handle the transfomations
of the readable streams that map to each entry file separately.
In the example above I have added writable streams to the outputs array, arranged
in the same order as my entry files, which receive their respective bundle as
readable stream and apply further transformations to them
You could also leverage the factor.pipeline event:
var b = browserify({ ... });
b.on('factor.pipeline', function (id, pipeline) {
pipeline.get('wrap').push(write(id));
});
b.plugin(factor);
return b.bundle().pipe(write('common.js'));
I think it is worth noting that applying further downstream work to the outputs
is completely detached from the pipeline. So if you were using gulp and returned
the stream from browserify, the task would have completed prematurely because
it would still be performing operations on the entry files. I haven't run into
issues with this yet.
Hope this helps.

This is a bit old, but it might be usefull to someone else.
The answer above from #Christian helped me, but i had to solve the issue of the task completion. I did it by adding a counter for opened streams, and calling the task callback once they are all closed.
gulp.task('build:js:compile', function(cb) {
const apps = getAllJavascriptFilesPaths(); // this returns an array of full path to the js files i want to bundle
const dist = 'dist'; // the output path
const files = [];
const streams = [];
let openedStreams = 0;
// We use browserify factor-bundle to get the shared code in a separated js file, and not in all apps files
// The write function here handles the post processing of each browserified app by returning a writable stream
// We check the number of opened streams, and call the callback once they are all closed, to be sure the task is
// complete
function write(filepath) {
openedStreams++;
return concat(function (content) {
// create new vinyl file from content and use the basename of the
// filepath in scope as its basename.
return file(path.basename(filepath), content, { src: true })
.pipe(uglify())
.pipe(gulp.dest(dist))
.on('finish', function () {
openedStreams--;
if (openedStreams == 0) {
cb();
}
});
});
}
apps.forEach(function (file) {
files.push(file);
streams.push(write(file)));
});
browserify(files)
.plugin(factor, { outputs: streams })
.transform("babelify", {presets: 'babel-preset-env'})
.bundle()
.pipe(write('common.js'));
});

Related

How to integrate es6 with gulp-develop-server

I am trying to transfer an old node-express project over to be able to use es6. I have seen many posts about using gulp with es6. Most of them discuss using a syntax like this:
const gulp = require("gulp");
const babel = require("gulp-babel");
gulp.src('./index.js')
.pipe(
babel({
presets: [
["#babel/env", { modules: false }],
],
})
)
However my existing project's gulpfile does't use gulp.src at all. Instead, it uses gulp-develop-server. The gulpfile looks like this:
const gulp = require("gulp");
const devServer = require("gulp-develop-server");
const spawn = require("child_process").spawn;
const fs = require("fs");
const basedir = ".";
function serverRestart(done) {
// perform some cleanup code here
devServer.restart();
done();
}
function serverStart() {
devServer.listen({
path: basedir + "/index.js",
});
}
function serverWatch() {
serverStart();
gulp.watch(
[
basedir + "/paths/**/*",
// more directories to watch
],
serverRestart
);
}
function reload(done) {
serverWatch();
done();
}
function defaultTask() {
let p;
gulp.watch(["gulpfile.js"], killProcess);
spawnChild();
function killProcess(e) {
if (p && !p.killed) {
devServer.kill();
p.kill("SIGINT");
spawnChild();
}
}
function spawnChild() {
p = spawn("gulp", ["reload"], { stdio: "inherit" });
}
}
process.stdin.resume();
process.on("exit", handleExit.bind(null, { cleanup: true }));
process.on("SIGINT", handleExit.bind(null, { exit: true }));
process.on("uncaughtException", handleExit.bind(null, { exit: true }));
function handleExit(options, err) {
// perform some cleanup code here
if (options.cleanup) {
devServer.kill();
}
if (err) {
console.log(err.stack);
}
if (options.exit) {
process.exit();
}
}
gulp.task("serverRestart", serverRestart);
gulp.task("serverStart", serverStart);
gulp.task("serverWatch", serverWatch);
gulp.task("reload", reload);
gulp.task("default", defaultTask);
The existing flow is important because it executes needed code for setup and cleanup every time I hit save, which runs serverRestart. I've been trying a few different methods based on the other questions which recommended using gulp.src().pipe(), but I havne't had much luck integrating it with the existing pattern which uses gulp-develop-server. I am trying to not have to rewrite the whole gulpfile. Is there a simple way to integrate babel with my existing gulpfile such that I can use es6 in my source code?
There's an example with CoffeeScript in the gulp-develop-server documentation.
Using that as a model, try this:
function serverStart() {
devServer.listen({
path: "./dist/index.js",
});
}
function serverWatch() {
serverStart();
gulp.watch(
[
basedir + "/paths/**/*",
],
serverRestart
);
}
function serverRestart() {
gulp.src('./index.js')
.pipe(
babel({
presets: [
["#babel/env", { modules: false }],
],
})
)
.pipe( gulp.dest( './dist' ) )
.pipe( devServer() );
}
Other suggestions
That being said, your existing Gulp file doesn't actually really use Gulp. That is, everything is defined as a function and it doesn't leverage any of Gulp's useful features, like managing task dependencies. This is because (pre-es6), this was a very simple project. The Gulp tasks in that file are an over-elaborate way to watch files and run a server. The same could be done (with less code) using nodemon.
With the introduction of React and more complicated build processes, Gulp seems to have fallen out of favor with the community (and in my personal experience, Gulp was a time sinkhole anyhow).
If the main change you want to make is to use import, you can simply use a more recent Node version. You'll surely run into the error SyntaxError: Cannot use import statement outside a module. Simply rename the file to .mjs and it will work. This provides a way to incrementally migrate files to import syntax. Other features should automatically work (and are all backwards-compatible, anyhow). Once your project is mostly, or all, compliant, you can add "type": "module" to your package.json file, then rename all of your require-style js files to .cjs, and rename all of your .mjs files to .js, or leave them as .mjs. Read more about the rules of mixing CommonJS and Module imports in the Node.js blog post (note that some things may have changed since that article was written).

How to compile SASS and minify CSS and create its map with gulp 4 in same task

How to compile SASS and minify CSS and create its map with gulp 4 in same task
Im using Gulp 4, i wonder if there is a way to put the css with its map and also put the css minified with its map, But in the same task, i mean something like this:
- css
- main.css
- main.css.map
- main.min.css
- main.min.css.map
My current code actually does it but i have two task
const gulp = require('gulp');
const autoprefixer = require('gulp-autoprefixer');
const cleanCSS = require('gulp-clean-css');
const sass = require('gulp-sass');
const sourcemaps = require('gulp-sourcemaps');
const rename = require('gulp-rename');
//declare the scr folder
let root = '../src' + '/';
let scssFolder = root + 'scss/';
//declare the build folder
let build = '../build/' + '/';
let cssFolder = build + 'css';
// Compile scss into css
function css() {
return gulp
.src(scssFolder + 'main.scss')
.pipe(sourcemaps.init({ loadMaps: true }))
.pipe(
sass({
outputStyle: 'expanded',
}).on('error', sass.logError)
)
.pipe(autoprefixer('last 2 versions'))
.pipe(sourcemaps.write('./'))
.pipe(gulp.dest(cssFolder));
}
//minify css
function minCSS() {
return gulp
.src(scssFolder + 'main.scss')
.pipe(sourcemaps.init({ loadMaps: true }))
.pipe(
sass({
outputStyle: 'compressed',
}).on('error', sass.logError)
)
.pipe(autoprefixer('last 2 versions'))
.pipe(rename({ suffix: '.min' }))
.pipe(sourcemaps.write('./'))
.pipe(gulp.dest(cssFolder));
}
exports.css = css;
exports.minCSS = minCSS;
and id like to know either if i can put in one task or how can i call them in one task for example:
function css() {
return gulp
.src(scssFolder + 'main.scss')
.pipe(sourcemaps.init({ loadMaps: true }))
.pipe(
sass({
outputStyle: 'expanded',
}).on('error', sass.logError)
)
.pipe(autoprefixer('last 2 versions'))
.pipe(sourcemaps.write('./'))
.pipe(gulp.dest(cssFolder))
//Put here the minify code
.pipe(cleanCSS())
.pipe(sourcemaps.write('./'))
.pipe(gulp.dest(cssFolder));
}
but the previous code doesn´t work because it creates main.css and main.css.map
Create new function where you run both functions in series from your first code.
Example
function compileAndMinify(){
return gulp.series(css(),minCss());
}
Ok, so I have what I think might be the ultimate solution for this problem if you are using gulp 4. Also I am using babel to write my gulp file in es6 via "gulpfile.babel.js" so pardon if this example looks weird (I split my gulp 4 builds up into multiple js modules).
This is a non question specific answer, but it illustrates how I output min and non min css and source maps in the same task, by having my gulp task run it twice via gulp 4 parallel.
'use strict';
import config from './config';
import yargs from 'yargs';
import { src, dest, parallel, series } from 'gulp';
import concat from 'gulp-concat';
import rename from 'gulp-rename';
import csso from 'gulp-csso';
import sass from 'gulp-sass';
import tildeImporter from 'node-sass-tilde-importer';
import sassVar from 'gulp-sass-variables';
import sourcemaps from 'gulp-sourcemaps';
sass.compiler = require('node-sass');
var args = yargs.argv;
class isolatedVendorBuild {
static build(done, destination, fileName) {
//this is the actual gulp-sass build function, that will be wrapped by two other gulp4 tasks
let internalBuild = ((shouldMin) => {
//store the stream to a variable before returning it, so we can conditionally add things to it
let ret = src(config.paths.srcSharedIsolatedVendorScss)
.pipe(sourcemaps.init())
.pipe(concat(fileName))
.pipe(sassVar({
$env: args.prod ? 'prod' : 'dev'
}))
.pipe(sass({
importer: tildeImporter,
outputStyle: 'nested'
}).on('error', sass.logError));
if (shouldMin) {
//if the function was called with shouldMin true, then reset the stream from the previous stream but with the rename .min and csso call added to it
ret = ret.pipe(rename({ suffix: '.min' }));
ret.pipe(csso({ sourceMap: true }));
}
//reset the stream to the previous ret and add sourcemaps.write and destination output to it
ret = ret.pipe(sourcemaps.write('.'))
.pipe(dest(destination));
//return the complete stream
return ret;
});
//create two wrapper functions for the internal build to be called in gulp since gulp can't pass arguments to functions treated as gulp tasks
function buildStylesUnMinified() {
return internalBuild(false); //pass false for shouldMin and it will output the unminified css and source map
}
function buildStylesMinified() {
return internalBuild(true); //pass true for shouldMin and it will output the minified css and source map with the .min suffix added to the file name.
}
//the magic, we use gulp parallel to run the unminified version and minified version of this sass build at the same time calculating two separate streams of css output at the same time.
return parallel(buildStylesUnMinified, buildStylesMinified)(done);
}
}
export default isolatedVendorBuild;
I've seen other solutions that involve outputting the non minified css first, then using that as an input for the minification task. That works, but it forces synchronous dependencies and that get's build times crawling to a snails pace in complex builds.
I came up with this method just recently solving for this in a new project with multiple scss output files. I like it because both minified and unminified tasks run at the same time and I was able to get my main build to let the entire scss output process be async, as in, not depend on it completing before doing the scripts and stuff.

Gulp-sourcemaps returning only returning `source` js file

My sourcemaps are only returning the source value no matter what.
Broswerify.js
// add custom browserify options here
var customOpts = {
entries: ['./frontend/js/app.js'],
debug: true
};
var opts = assign({}, watchify.args, customOpts);
var b = watchify(browserify(opts));
b.transform(require("jadeify"), { compileDebug: true, pretty: false });
// add transformations here
// i.e. b.transform(coffeeify);
gulp.task('browserify', bundle); // so you can run `gulp js` to build the file
b.on('update', bundle); // on any dep update, runs the bundler
b.on('log', gutil.log); // output build logs to terminal
function bundle() {
return b.bundle()
// log errors if they happen
.on('error', gutil.log.bind(gutil, 'Browserify Error'))
.pipe(source('main.js'))
// optional, remove if you don't need to buffer file contents
.pipe(buffer())
// optional, remove if you dont want sourcemaps
.pipe(sourcemaps.init({loadMaps: true})) // loads map from browserify file
// Add transformation tasks to the pipeline here.
.pipe(sourcemaps.write('./')) // writes .map file
.pipe(gulp.dest('./public/js'))
.on('end', function () {
browserSync.reload();
});
}
main.js.map
{"version":3,"sources":["/main.js","/main.js","/main.js","/main.js","/main.js","/main.js","/main.js","/main.js","/main.js","/main.js","/main.js","/main.js","/main.js","/main.js","/main.js","/main.js","/main.js","/main.js","/main.js","/main.js","/main.js","/main.js","/main.js","/main.js","/main.js","/main.js","/main.js","/main.js","/main.js","/main.js","/main.js","/main.js","/main.js","/main.js","/main.js","/main.js","/main.js"],"names":[],"mappings":"AAAA;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;
This of course means that all files are referenced as main.js which is less then helpful :(
I am also using browser-sync, but I don't believe this has any effect on sourcemaps.
I was using gulp-sourcemaps v2.3.0
Upgraded to v2.6.0 and now its working. I thank you sir

Gulp Bundle + Browserify on multiple files

So I have asimple gulp task function which currently converts my main.jsx to a main.js file:
gulp.task("bundle", function () {
return browserify({
entries: "./app/main.jsx",
debug: true
}).transform(reactify)
.bundle()
.pipe(source("main.js"))
.pipe(gulp.dest("app/dist"))
});
I was wondering if it would be possible to put multiple bundles in this gulp.task?
My ideal outcome would be being able to do:
main.jsx to main.js
otherPage.jsx to otherPage.js
otherPage2.jsx to otherPage2.js
All in one gulp task.
I have searched onliine but cannot seem to find anything relevant, any help or advice is appreciated, thank you in advance.
If you want to create a bundle for each file you need to loop over the respective files, create a stream for each file and then merge the streams afterwards (using merge-stream):
var merge = require('merge-stream');
gulp.task("bundle", function () {
var files = [ "main", "otherPage", "otherPage2" ];
return merge(files.map(function(file) {
return browserify({
entries: "./app/" + file + ".jsx",
debug: true
}).transform(reactify)
.bundle()
.pipe(source(file + ".js"))
.pipe(gulp.dest("app/dist"))
}));
});
The above requires that you maintain a list of files manually as an array. It's also possible to write a task that bundles all .jsx files in the app directory without having to maintain an explicit array of the files. You just need the glob package to determine the array of files for you:
var merge = require('merge-stream');
var glob = require('glob');
var path = require('path');
gulp.task("bundle", function () {
var files = glob.sync('./app/*.jsx');
return merge(files.map(function(file) {
return browserify({
entries: file,
debug: true
}).transform(reactify)
.bundle()
.pipe(source(path.basename(file, '.jsx') + ".js"))
.pipe(gulp.dest("app/dist"))
}));
});

Calling grunt.config.set inside grunt.util.spawn doesn't seem to have any effect

I'm trying to set the current Git SHA in my project's Grunt configuration, but when I try to access it from another task it isn't available, What am I missing?
grunt.registerTask('sha', function () {
var done = this.async();
grunt.util.spawn({
cmd: 'git',
args: ['rev-parse', '--short', 'HEAD']
}, function (err, res) {
if (err) {
grunt.fail.fatal(err);
} else {
grunt.config.set('git', {sha: res.stdout});
if (grunt.option('debug') || grunt.option('verbose')) {
console.log("[sha]:", res.stdout);
}
}
done();
});
});
After running the task, I expect the config to be available in another task configuration:
requirejs: {
dist: {
...
out: '<%= app.dist %>/scripts/module_name.<%= git.sha %>.js'
...
}
}
So... What's the problem?
The problem is that Require JS is writing to the file public/scripts/module_name..js, the SHA is not available in the configuration (when the filename should be public/scripts/module_name.d34dc0d3.js).
UPDATE:
The problem is that I'm running requirejs tasks with grunt-concurrent, so the Grunt configuration is not available for requirejs.
grunt.registerTask('build', [
...
'getsha',
'concurrent:dist',
...
]);
And the concurrent task, looks like:
concurrent: {
dist: [
...
'requirejs',
...
]
}
Since grunt-concurrent will spawn tasks in child processes, they do not have access to the context of the parent process. Which is why doing grunt.config.set() within the parent context is not available in the config of the child context.
Some of the solutions to make the change available in the child context are:
Write the data to the file system
Write the data to a temporary file with grunt.file.write('./tmp/gitsha', res.stdout) and then have the task being ran in a child process read the temporary file:
out: (function() {
var out = grunt.config('app.dist') + '/scripts/module_name.';
if (grunt.file.exists('./tmp/gitsha')) {
out += grunt.file.read('./tmp/gitsha');
} else {
out += 'unknown';
}
return out + '.js';
}())
Use a socket
This is a very convoluted solution but a solution nonetheless. See the net node docs: http://nodejs.org/api/net.html#net_net_createserver_options_connectionlistener on creating a server on the parent process then have the child process connect to the socket for the data.
Or check out https://github.com/shama/blackbox for a library that makes this method a bit simpler.
Fork the parent process instead of spawn/exec
Another method is to use fork: http://nodejs.org/api/child_process.html#child_process_child_process_fork_modulepath_args_options instead of grunt-concurrent. Fork lets you send messages to child processes with child.send('gitsha') and receive them in the child with process.on('message', function(gitsha) {})
This method also can get very convoluted.
Use a proxy task
Have your sha task set the config as you're currently doing:
grunt.registerTask('sha', function() {
grunt.config.set('git', { sha: '1234' });
});
Change your concurrent config to call a proxy task with the sha:
grunt.initConfig({
concurrent: {
dist: [
'proxy:requirejs:<%= git.sha %>'
]
}
});
Then create a proxy task that runs a task with setting the passed value first:
grunt.registerTask('proxy', function(task, gitsha) {
grunt.config.set('git', { sha: gitsha });
grunt.task.run(task);
});
The above can be simplified to set values specifically on requirejs but just shown here as a generic example that can be applied with any task.

Categories