I want the gulp calls below to run synchronously, one after the other. But they do not follow an order.
The run-sequence node module doesn't help here, as I'm not trying to run gulp tasks in series (i.e. it has syntax similar to gulp.task("mytask", ["foo", "bar", "baz"] etc.), but rather gulp "calls" in series, as you see below.
gulp.task("dostuff", function (callback) {
gulp
.src("...")
.pipe(gulp.dest("...");
gulp
.src("...")
.pipe(gulp.dest("...");
gulp
.src("...")
.pipe(gulp.dest("...");
callback();
});
How do I make them run one after the other?
You can use async as a control flow for your calls to get them in only one task, also avoiding you to get a "pyramid effect". So something like this should be good for your use-case:
var async = require('async');
gulp.task('yeah', function (cb) {
async.series([
function (next) {
gulp.src('...')
.pipe(gulp.dest('...')
.on('end', next);
},
function (next) {
gulp.src('...')
.pipe(gulp.dest('...')
.on('end', next);
},
function (next) {
gulp.src('...')
.pipe(gulp.dest('...')
.on('end', next);
}
], cb);
});
That will also allow you to have some error handling and target better where a problem occured.
Well, it's just streams so you could listen for the end event (Watch out for the pyramid of doom!)
gulp.task("dostuff", function (callback) {
gulp
.src("...")
.pipe(gulp.dest("..."))
.on('end', function () {
gulp
.src("...")
.pipe(gulp.dest("..."))
.on('end', function () {
gulp
.src("...")
.pipe(gulp.dest("..."))
.on('end', callback);
});
});
});
But it's probably a better pattern to split it up in multiple tasks each one with a dependency on the previous one.
run-sequence:
Runs a sequence of gulp tasks in the specified order. This function
is designed to solve the situation where you have defined run-order,
but choose not to or cannot use dependencies.
npm install --save-dev run-sequence
// runSequence will ensure this task will run the following tasks in the listed order
gulp.task('things-to-do', callback => runSequence(
'clean-up-workspace', // clean up before copying new files
'copy-new-files', // wait till copy-new-files done before linting
'lint', // wait till lint is done before running minify
'minify', // wait till minify is done before starting laundry and dinner
['do-laundry', // do laundry and cook dinner at the same time
'cook-dinner'],
'bath-cat', // don't bath the cat till both laundry and dinner are done
callback
));
https://www.npmjs.com/package/run-sequence
Use synchronous mode for glob
Then return the result of gulp.src:
gulp.task('render', function() {
var appJsFiles = _.map(glob.sync('src/**/*.js'), function(f) {
return f.slice(6);
});
// Render the file.
return gulp.src('src/template.html')
.pipe(template({
scripts: appJsFiles,
styles: ['style1.css', 'style2.css', 'style3.css']
}))
.pipe(gulp.dest(config.build_dir));
});
you can add something like this after the last pipe
.pipe(gulp.dest(FINAL_DEST))
.on('end', () => gulp.src(['build']) .pipe(clean()))
It should work like this ....
gulp.src("...")
.pipe(gulp.dest("..."))
.on('end', () => {
gulp.src("...")
.pipe(gulp.dest("..."))
})
.on('end', () => {
gulp.src("...")
.pipe(gulp.dest("..."))
})
.on('end', () => {
gulp.src("...")
.pipe(gulp.dest("..."))
})
but it doesn't ....
instead it wors like this ....
gulp.src("...")
.pipe(gulp.dest("..."))
.on('end', () => {
gulp.src("...")
.pipe(gulp.dest("..."))
.on('end', () => {
gulp.src("...")
.pipe(gulp.dest("..."))
.on('end', () => {
gulp.src("...")
.pipe(gulp.dest("..."))
})
})
})
Related
I've looked at many different posts here on SO using Gulp
CLI version: 2.3.0
Local version: 4.0.2
and have tried multiple approaches to the gulp 4 default task declaration:
exports.default = gulp.series(clean, (done) => {
gulp.parallel(styles, scripts);
done();
});
gulp.task('default',
gulp.series(clean, (done) => {
gulp.parallel(scripts, styles);
done();
});
);
function production () {
return gulp.series(clean, function (done) {
gulp.parallel(styles, scripts);
done();
})
}
exports.default = production
Clean function is simple:
function clean(cb) {
console.log("clean");
del(["dist"]);
cb();
}
For testing I have created a test function:
function test() {
console.log("in test");
return (
gulp.src(paths.styles.src)
.pipe(notify("Styles rendered"))
);
}
exports.test = test;
And a test default function:
exports.default = gulp.series(clean, (done) => {
console.log("before styles and scripts");
gulp.parallel(test);
console.log("after styles and scripts");
done();
});
Printed to the terminal are:
"clean"
"before styles and scripts"
"after styles and scripts"
But no "in test".
What am I missing here?
Not sure if this is the way Gulp is designed, but based on this answer, the code that works for me is:
exports.default = gulp.series(clean, (callbackA) => {
return gulp.parallel(styles, scripts, (callbackB) =>
{
callbackA();
callbackB();
})();
});
And the three tasks i'm calling are:
function clean(cb) {
del(["dist"]);
cb();
}
// Define tasks after requiring dependencies
function styles() {
console.log("in styles");
return (
gulp.src(paths.styles.src)
.pipe(sass())
.on("error", sass.logError)
.pipe(sourcemaps.init())
.pipe(sass())
.on("error", sass.logError)
.pipe(postcss([autoprefixer(), cssnano()]))
.pipe(sourcemaps.write())
.pipe(gulp.dest(paths.styles.dest))
// Add browsersync stream pipe after compilation
.pipe(browserSync.stream())
);
}
// Expose the task by exporting it
// This allows you to run it from the commandline using
// $ gulp style
exports.styles = styles;
function scripts() {
// Start by calling browserify with our entry pointing to our main javascript file
return ( gulp.src(paths.scripts.src)
.pipe(babel())
.pipe(uglify())
// Then write the resulting files to a folder
.pipe(gulp.dest(paths.scripts.dest))
);
}
// Expose the task, this allows us to call this task
// by running `$ gulp build' in the terminal
exports.scripts = scripts;
Aside from Gulp docs and here, this was a nice source.
I'm playing with gulp and I stumbled on this issue that I can't really understand.
I have some tasks:
gulp.task('concat-js', (done) => {
gulp.src('src/app/**/*.js')
.pipe(concat('app-script.js'))
.pipe(gulp.dest('src/app/dist/'));
done();
});
gulp.task('minify-js', (done) => {
gulp.src('src/app/dist/*.js')
.pipe(minify({noSource: true, ignoreFiles: ['*-min.js']}))
.pipe(gulp.dest('src/app/dist/'));
done();
});
gulp.task('compress',
gulp.series('concat-js', 'minify-js', function(done) {
// default task code here
done();
})
);
gulp.task(
'run',
gulp.series('compress', () => {
...
}
));
My goal is that when run is executed, I get concatenated of my source files and then minifed version of that file. But I'm running in some kind of problem with executing those tasks in series.
When I execute run for the first time, application doesn't run and only app-script.js gets created. Running it for the second time creates app-script-min.js file.
As far as I understand minify-js doesn't wait for concat-js to finish creating file.
I think this might be because done() is called before gulp is completed. Try change
gulp.task('concat-js', (done) => {
gulp.src('src/app/**/*.js')
.pipe(concat('app-script.js'))
.pipe(gulp.dest('src/app/dist/'));
done();
});
to
gulp.task('concat-js', (done) => {
gulp.src('src/app/**/*.js')
.pipe(concat('app-script.js'))
.pipe(gulp.dest('src/app/dist/'))
.on('finish', function() {
done();
});
});
I also found this resource which might be interesting for you. http://www.ericcredeur.com/gulp/javascript/2015/07/15/cascading-gulp-tasks.html
I want to run this function and then read the file with fs.readFile
function download(url){
mkdirp('/directory', function(err) {
request(url).pipe(fs.createWriteStream('/directory/example.txt));
});
}
How would I do this asynchronously?
I like to work with fs-extra
Install it like this
npm i fs-extra
An example snippet for your use case could be:
var fs = require('fs-extra')
function download(url){
// ensureDir creates the directorie(s) if not exist
fs.ensureDir(path.join(__dirname, '/directory'), err => {
if (err) {
console.log(`failed to create directory`)
return false
}
fs.writeFileSync(path.join(__dirname, '/directory/example.txt'), 'YOUR-TEXT-TO-WRITE-INTO-THE-FILE')
console.log(fs.readFileSync(path.join(__dirname, '/directory/example.txt')))
})
}
You could create a read stream pipe once the first pipe has finished.
function download(url) {
mkdirp('/directory', function(err) {
request(url)
.pipe(fs.createWriteStream('/directory/example.txt'))
.on('finish', function() {
fs.createReadStream('/directory/example.txt').pipe(process.stdout);
});
;
});
}
You can of course pipe it to somewhere else other than process.stdout.
I am having an issue where after several restarts triggered by gulp watch, I get a warning that states I may have a memory leak. I assume it's because I am starting a new instance of gulp watch with every restart, triggering a new server and another new watch, over and over. I am not sure that's the case and not sure how to fix it if it is.
Here is my gulpfile.js.
const gulp = require('gulp');
const exec = require('child_process').exec;
const gutil = require('gulp-util');
const eslint = require('gulp-eslint');
gulp.task('dev', (() => {
gulp.start('test');
gulp.start('lint');
gulp.start('server');
gulp.start('watch');
return gutil.log('Gulp is running!');
}));
gulp.task('server', (cb) => {
exec('npm start', (err, stdout, stderr) => {
gutil.log(stdout);
gutil.log(stderr);
cb(err);
});
});
gulp.task('test', (cb) => {
exec('npm test', (err, stdout, stderr) => {
gutil.log(stdout);
gutil.log(stderr);
cb(err);
});
});
gulp.task('watch', () => {
gulp.watch(['./app.js', './views/*', './routes/*'], ['dev']);
});
gulp.task('lint', () => {
return gulp.src(['**/*.js', '!node_modules/**'])
.pipe(eslint())
.pipe(eslint.format());
});
And my error is:
(node:808) Warning: Possible EventEmitter memory leak detected. 11 change listeners added. Use emitter.setMaxListeners() to increase limit
Your gulp.watch calls the task dev which in turn creates another gulp.watch. My money is on that.
As another user noted, you're calling gulp dev in gulp watch basically creating an inifinity loop, which would cause that.
To add, I would encourage using a default task:
gulp.task('default', ['dev', 'test', 'lint', 'watch', 'server']);
I have added the following registerTask calls to this Gruntfile.js
grunt.task.registerTask('runDebug', 'Run the app with debug flag', function() {
var done = this.async();
grunt.util.spawn({
cmd: 'node',
args: ['--debug', './node_modules/nodemon/nodemon.js', 'index.js'],
opts: {
stdio: 'inherit'
}
}, function (error, result, code) {
if (error) {
grunt.log.write (result);
grunt.fail.fatal(error);
}
done();
});
grunt.log.writeln ('node started');
grunt.util.spawn({
cmd: 'node-inspector',
args: ['&'],
opts: {
//cwd: current working directory
}
},
function (error, result, code) {
if (error) {
grunt.log.write (result);
grunt.fail.fatal(error);
}
done();
});
grunt.log.writeln ('inspector started');
});
grunt.task.registerTask('debug', ['runDebug', 'compile', 'watch']);
The new debug task is similar to the existing server task. However, grunt server command runs compile, watch, and runNode tasks, whereas grunt debug command only runs runDebug task.
What am I missing here? Why aren't the compile and watch tasks run with grunt debug command.
Your code is calling the done() function returned by this.async() more than once. This could be confusing Grunt. I would suggest calling a function of your own which could named spawned() instead of calling done() directly in your callbacks. The function could be something like:
var expected_spawns = 2;
function spawned() {
if (!--expected_spawns)
done();
if (expected_spawns < 0)
throw new Error("too many spawns!") // Or some of Grunt's fail functions.
}
This way done() will be called once.