Include external JavaScript file in Node-Gulp-Webpack build - javascript

I have an app built with Node-Gulp-Webpack.
I have an external JavaScript file external.js that is not local to my app, but I want to be able to require it in my app. This file is on a path for which there is an environment variable named MY_PATH.
How can I include this external.js as-is in my build without making a Node module out of it?
So far, this does not work and doesn't return a useful error message:
var external = require(process.env.MY_PATH + '/external.js');

Use gulp-remote-src, the example:
var gulp = require('gulp');
var uglify = require('gulp-uglify');
var remoteSrc = require('gulp-remote-src');
gulp.task('remote', function() {
remoteSrc(['app.js', 'jquery.js'], {
base: 'http://myapps.com/assets/',
})
.pipe(uglify())
.pipe(gulp.dest('./dist/'));
})

Related

Gulp minify and don't copy original files

I want to minify some JS files with Gulp, but can't seem to get control over the process. I want only the minified version in the destination, and am currently getting copies of the originals as well.
I'm thinking I may need the rename package, but am not sure how to use it for this task - I would presumably need some kind of variable to hold the current file name for each script.
Any help much appreciated. The code is below:
var gulp = require( 'gulp' );
var minify = require( 'gulp-minify' );
var rename = require( 'gulp-rename' );
//script paths
var jsFiles = 'js/**/*.js',
jsDest = 'js/dist/';
gulp.task('scripts', function() {
return gulp.src(jsFiles)
.pipe(minify())
.pipe(gulp.dest(jsDest));
});
Set the noSource config like the following
.pipe(minify({noSource: true})

Include new module installed using npm install

I'm working on a project that uses Node and Gulp, and I want to add a new Javascript library for some new features in the site.
I found this library that does what I need: http://imakewebthings.com/waypoints/
It says in the library website that I need to run npm install waypoints, so I did just that, and a new waypoints directory was created in my node_modules directory. Then I'm said to include the new library in my project like this:
<script src="/path/to/noframework.waypoints.min.js"></script>
But, the library is one level above the document root of my project, so I can't just do src="/node_modules/waypoints/src/waypoint.js" because that folder is not reachable from the web browser.
I tried to add the new module to the gulpfile.js file of my project but it still doesn't work.
var waypoints = require('waypoints'),
I still get a "Uncaught ReferenceError: Waypoint is not defined" error.
What am I missing? How do I get node and/or gulp to load this new library?
EDIT
This is what I have in my gulpfile.js where I "think" it's including the libraries for use in the client side:
var customJSScripts = ['src/js/custom/**/*.js'];
var libsJSscripts = ['src/js/libs/**/*.js'];
var allJSScripts = ['src/js/**/*.js'];
var outputFileName = 'custom.min.js';
var outputFolder = './dist/js/'
gulp.task('jshint', function() {
return gulp.src(customJSScripts)
.pipe(jshint({'esversion': 6}))
.pipe(jshint.reporter('jshint-stylish'));
});
gulp.task('concat-scripts', function() {
if (envVars.minifyJS) {
gulp.src(customJSScripts)
.pipe(sourcemaps.init())
.pipe(concat(outputFileName))
.pipe(uglify())
.pipe(sourcemaps.write('.'))
.pipe(gulp.dest(outputFolder));
} else {
gulp.src(customJSScripts)
.pipe(sourcemaps.init())
.pipe(concat(outputFileName))
.pipe(sourcemaps.write('.'))
.pipe(gulp.dest(outputFolder));
}
gulp.src(libsJSscripts)
.pipe(sourcemaps.init())
.pipe(concat('libs.min.js'))
.pipe(sourcemaps.write('.'))
.pipe(gulp.dest(outputFolder))
});
You don't need to require() waypoints, since you are not using it on the server. What you should do is add another task and make it a dependency for concat-scripts like below:
gulp.task('move-waypoint', function() {
var required_files = [
'../node_modules/waypoints/waypoints.min.js' //make sure this is the right path
];
return gulp.src(required_files, {base: '../node_modules/waypoints/'})
.pipe(gulp.dest(outputFolder));
});
gulp.task('concat-scripts', ['move-waypoint'],function() { //will execute the move-waypoint task first
//your usual code here...
});
You can then look at the path of waypoint in the output folder to determine the script path to include in your html page.

How to output files to the parent of their source directory using Webpack in Gulp?

So far I have this code, which I got from here:
var gulp = require('gulp');
var webpack = require('webpack-stream');
var named = require('vinyl-named');
gulp.task('default', function() {
return gulp.src('*/lib/app.js', { base: '.' })
.pipe(named())
.pipe(webpack())
.pipe(gulp.dest('.'));
});
My folder structure is like:
site1/lib/app.js
site2/lib/app.js
I want to create the output files like the following, with each file containing only their respective lib/app.js file's code (and any require()s made in them):
site1/app.js
site2/app.js
However, the code I have now just outputs to the project's root directory. I've tried several combinations, such as removing the { base: '.' }, but nothing works. If I remove the named() and webpack() pipes, though, then the current code actually outputs to the correct directory. So, in the process, it seems like perhaps Webpack loses the originating directory information?
Also, it possible to get a solution that also works with Webpack's "watch: true" option, so that compiling modified files is quick, rather than using Gulp to always iterate through every single file on every file change?
I assume you want to create a app.js for each site that packs only the code for that site (and not the others).
In that case you can use gulp-foreach to effectively iterate over all your app.js files and send each one down its own stream. Then you can use the node.js built-in path module to figure out where the parent directory for each app.js file is and write it there with gulp.dest().
var gulp = require('gulp');
var webpack = require('webpack-stream');
var named = require('vinyl-named');
var foreach = require('gulp-foreach');
var path = require('path');
gulp.task('default', function() {
return gulp.src('*/lib/app.js')
.pipe(foreach(function(stream, file) {
var parentDir = path.dirname(path.dirname(file.path));
return stream
.pipe(named())
.pipe(webpack())
.pipe(gulp.dest(parentDir));
}));
});
If you want to use webpack({watch:true}) you'll have to use a different approach. The following uses glob to iterate over all the app.js files. Each app.js file is again send down its own stream, however this time all the streams are merged before being returned.
var gulp = require('gulp');
var webpack = require('webpack-stream');
var named = require('vinyl-named');
var path = require('path');
var merge = require('merge-stream');
var glob = require('glob');
gulp.task('default', function() {
return merge.apply(null, glob.sync('*/lib/app.js').map(function(file) {
var parentDir = path.dirname(path.dirname(file));
return gulp.src(file)
.pipe(named())
.pipe(webpack({watch:true}))
.pipe(gulp.dest(parentDir));
}));
});

Compiling dynamically required modules with Browserify

I am using Browserify to compile a large Node.js application into a single file (using options --bare and --ignore-missing [to avoid troubles with lib-cov in Express]). I have some code to dynamically load modules based on what is available in a directory:
var fs = require('fs'),
path = require('path');
fs.readdirSync(__dirname).forEach(function (file) {
if (file !== 'index.js' && fs.statSync(path.join(__dirname, file)).isFile()) {
module.exports[file.substring(0, file.length-3)] = require(path.join(__dirname, file));
}
});
I'm getting strange errors in my application where aribtrary text files are being loaded from the directory my compiled file is loaded in. I think it's because paths are no longer set correctly, and because Browserify won't be able to require() the correct files that are dynamically loaded like this.
Short of making a static index.js file, is there a preferred method of dynamically requiring a directory of modules that is out-of-the-box compatible with Browserify?
This plugin allows to require Glob patterns: require-globify
Then, with a little hack you can add all the files on compilation and not executing them:
// Hack to compile Glob files. Don´t call this function!
function ಠ_ಠ() {
require('views/**/*.js', { glob: true })
}
And, for example, you could require and execute a specific file when you need it :D
var homePage = require('views/'+currentView)
Browserify does not support dynamic requires - see GH issue 377.
The only method for dynamically requiring a directory I am aware of: a build step to list the directory files and write the "static" index.js file.
There's also the bulkify transform, as documented here:
https://github.com/chrisdavies/tech-thoughts/blob/master/browserify-include-directory.md
Basically, you can do this in your app.js or whatever:
var bulk = require('bulk-require');
// Require all of the scripts in the controllers directory
bulk(__dirname, ['controllers/**/*.js']);
And my gulpfile has something like this in it:
gulp.task('js', function () {
return gulp.src('./src/js/init.js')
.pipe(browserify({
transform: ['bulkify']
}))
.pipe(rename('app.js'))
.pipe(uglify())
.pipe(gulp.dest('./dest/js'));
});

Class autoloader in Ember.js?

I am looking for an autoloader, similar to how they operate in languages (e.g. http://php.net/manual/en/language.oop5.autoload.php). I merely specify the algorithm for finding the file and it's automatically loaded into the app.
My initial thinking is a build process that scans directories and builds an index file. Is there a better way?
Here's my solution using browserify and a node.js build script, but I'm curious if anyone has a better solution:
build.js:
var glob = require("glob");
var fs = require('fs');
var path = require('path');
function buildFile(directory, build_file, suffix) {
glob(directory, function(err, files) {
if (fs.existsSync(build_file)) {
fs.unlinkSync(build_file);
}
fs.appendFileSync(build_file, 'module.exports = {');
var controllers = {};
files.forEach(function (file) {
var key = path.basename(file, '.js')+suffix;
var value = "require('"+file+"')";
fs.appendFileSync(build_file, '\n '+key+': '+value + ',');
});
fs.appendFileSync(build_file, '\n}');
});
};
buildFile('./controllers/*.js' , './controllers.js', 'Controller');
buildFile('./routes/*.js' , './routes.js' , 'Route');
app.js:
var App = Ember.Application.create();
App.reopen(require('./controllers.js'));
App.reopen(require('./routes.js'));
routes.js (example output from build.js):
module.exports = {
ApplicationRoute: require('./routes/Application.js'),
IndexRoute: require('./routes/Index.js'),
RecoverRoute: require('./routes/Recover.js'),
RegisterRoute: require('./routes/Register.js'),
UsersRoute: require('./routes/Users.js'),
UsersNewRoute: require('./routes/UsersNew.js'),
ValidateRoute: require('./routes/Validate.js'),
}
I use Grunt.js to watch and rebuild automatically when changes occur.
You probably want to use something like RequireJS: http://requirejs.org/
RequireJS will allow you to specify dependencies which will be loaded as needed. You can also run the RequireJS optimizer to compile your templates and JavaScript in to one file to deploy to your production servers.
One could use a pre-made tool like Yeoman's ember generator or ember tools. They are opinionated about the project's folder structure.

Categories