I want to add an option for when debugging my generator or when working offline that will download npm and bower stuff from cache (by using --cache-min 999999 and --offline respectively).
Currently, this is my code (which both installs the dependencies and calls grunt bower):
CallumGenerator.prototype.installDeps = function () {
var cb = this.async();
this.installDependencies({
skipInstall: this.options['skip-install'],
callback: function () {
this.spawnCommand('grunt', ['bower'])
.on('close', function () {
cb();
});
}.bind(this)
});
};
It looks like I'll most likely have to call .npmInstall() and .bowerInstall() manually in order to specify options (I think?), but I don't know how to specify any options. To clarify, this is how I would do it in the console:
npm install --cache-min 999999 --save-dev grunt-contrib-less
bower install --offline --save jquery#1.10.2
You can't specify options directly from #installDependencies see: https://github.com/yeoman/generator/blob/master/lib/actions/install.js#L44-L69
You can specify them for both #npmInstall and bowerInstall https://github.com/yeoman/generator/blob/master/lib/actions/install.js#L121-L143
The options you pass are in the form of an object hash and will be parsed by dargs node modules, so you should follow the module conventions for declaring options
The code I used, which should be fine for anyone to use (you might want to get rid of the final callback, though):
CallumGenerator.prototype.installDeps = function () {
var cb = this.async();
this.npmInstall(null, {
skipInstall: this.options['skip-install'],
cacheMin: this.cachedDeps ? 999999 : 0
}, function () {
this.bowerInstall(null, {
skipInstall: this.options['skip-install'],
offline: this.cachedDeps
}, function () {
this.spawnCommand('grunt', ['bower'])
.on('close', function () {
cb();
});
}.bind(this));
}.bind(this));
};
It works fine. this.cachedDeps will define whether the cache is used or not.
Related
I'm launching native apps with the help of WebdriverIO and mocha, but unable to communicate with the device, but able to launch the application but not interact with the element.
android_app_test.js
const webdriverio = require('webdriverio');
const androidOptions = require('../../../helpers/caps').androidOptions;
const assert = require('chai').assert;
androidOptions.capabilities.appPackage = "com.google.android.calculator"
androidOptions.capabilities.appActivity = "com.android.calculator2.Calculator"
describe('Create Chrome web session', function () {
let client;
before(async function () {
client = await webdriverio.remote(androidOptions)
});
after(async function () {
await client.deleteSession();
});
it('should create and destroy Android browser session', async function () {
const elem = await $('#digit_2')
elem.waitForDisplayed(3000);
await client.touchClick('digit_2');
});
});
config.js
var Mocha = require('mocha'), fs = require('fs');
var mocha = new Mocha({
reporter: 'mochawesome-screenshots',
reporterOptions: {
reportDir: 'customReportDir',
reportName: 'customReportName',
reportTitle: 'customReportTitle',
reportPageTitle: 'customReportPageTitle',
takePassedScreenshot: true,
clearOldScreenshots: true,
shortScrFileNames: true,
jsonReport: false,
multiReport: false
},
timeout: 600000,
})
var file = ['./test/basic/app/']; //location of the test js
for (var i = 0; i < file.length; i++) {
fs.readdirSync(file[i]).forEach(function (filename) {
mocha.addFile(file[i] + filename);
});
}
mocha.run(function (failures) {
process.on('exit', function () {
process.exit(failures);
});
});
package.json
"scripts": {
"test": "mocha config.js"
},
Not sure about that, i think something was wrong in my configuration or else
The $ global is added through the WebdriverIO test runner. Since you're using wdio through standalone mode, you don't get access to those globals. Try this instead:
const elem = await client.$('#digit_2')
Make sure you're using the newest version of Webdriver.io. Webdriver.io v5 is the latest version that also implements the $('selector') shortcut.
If you're using Webdriver.io v4 - you may still need to use browser.element('selector') to find your elements.
It appears from the tags in your question, and the code you posted you maybe on version 4.
$ is usually used as a shorthand to run JQuery functions (such as your $('#digit_2'), in the "android_app_test.js" file).
From the WebdriverIO's doc:
The $ command is a short way to call the findElement command in order to fetch a single element on the page. It returns an object that with an extended prototype to call action commands without passing in a selector. However if you still pass in a selector it will look for that element first and call the action on that element.
To fix this you have to install JQuery with this commands:
In a terminal run:
npm install --save jquery
npm install --save-dev #types/jquery
then import it at the top of your "android_app_test.js" file like this
import * as $ from "jquery";
I have a gulp task in which I want to take some source files and copy them to build/premium and build/free and then remove some extra files from
build/free.
My attempt at that was doing this:
gulp.task("build", ["clean"], function () {
gulp.src(["src/*", "!src/composer.*", "LICENSE"])
.pipe(gulp.dest("build/premium"))
.pipe(del(["build/free/plugins/*", "!build/free/plugins/index.php"]))
.pipe(gulp.dest("build/free"));
});
Which results in an error:
TypeError: dest.on is not a function
at DestroyableTransform.Stream.pipe (stream.js:45:8)
at Gulp.<anonymous> (/Users/gezim/projects/myproj/gulpfile.js:9:6)
How do I accomplish this the deleting port? Is there a better way altogether to do this?
This is a simple clean task implementation with gulp-del:
var del = require('gulp-del');
gulp.task('clean', function(){
return del(['folderA/js', 'folderA/css', 'folderB/js']);
});
In your case you can just call it after build (read "use build as a dependency"):
gulp.task("build", function () {
return gulp.src(['src/*', '!src/composer.*', 'LICENSE'])
.pipe(gulp.dest("build/premium"))
.pipe(gulp.dest("build/free"));
});
gulp.task("complete-build", ["build"] function(){
return del(['build/free/plugins/*', '!build/free/plugins/index.php']);
});
Then call the "complete-build" task to perform it.
To be honest this is more a "Grunt"-like approach to the problem, but done with Gulp. Perhaps the recommendation to filter things before writing them in the build/free folder is more in the Gulp spirit.
Update 2/2018
The delete module has been renamed to del now as reported by #gerl:
var del = require('del');
gulp.task('clean', function(){
return del(['folderA/js', 'folderA/css', 'folderB/js']);
});
I would use gulp-filter to drop only what should not be copied from the 2nd destination.
I interpreted the intent of the task as wanting everything present in src to be present in build/premium. However, build/free should exclude everything which was originally in src/plugins but should still include src/plugins/index.php.
Here is a working gulpfile:
var gulp = require("gulp");
var filter = require("gulp-filter");
var del = require("del");
gulp.task("clean", function () {
return del("build");
});
gulp.task("build", ["clean"], function () {
return gulp.src(["src/**", "!src/composer.*", "LICENSE"])
.pipe(gulp.dest("build/premium"))
.pipe(filter(["**", "!plugins/**", "plugins/index.php"]))
.pipe(gulp.dest("build/free"));
});
The patterns passed to filter are relative paths. Since the gulp.src pattern has src/** it means they are relative to src.
Note also that del cannot be passed straight to .pipe() as it returns a promise. It can be returned from a task, like the clean task does.
I have two gulp tasks
gulp.task('build:dev', ['task1', task2', task3'], () => {
doCssmin({'destination': ['file1', 'file2']});
});
gulp.task('build:prod', ['task1', task2', task3'], () => {
doCssmin({'destination': ['file3', 'file4']});
});
Now I have cssMin task which is shared between build:dev and build:prod so I created that as a function to share with both of them.
function doCssmin(files) {
_.each(files, function(val, key) {
gulp.src(val)
.pipe(minifyCss({compatibility: 'ie8'}))
.pipe(rename(basename(key)))
.pipe(gulp.dest(dirname(key)));
});
}
and doCssmin accept files parameter as build:dev and build:prod they have different files to minify css. If I run build:dev or build:prod doCssmin will be run asynchronously. But doCssmin depends on task2.
My question is how do I extract doCssmin to a gulp task and accept files parameter so it can be shared with build:dev and build:prod tasks?
Not sure if it's too confusing?
Yesterday, I answered a question on making your Gulp tasks more DRY. I believe a good chunk of that answer applies here as well.
To support loops with that, you want to merge the streams that you receive from gulp.src. Using the example here:
gulp.task('test', function() {
var bootstrap = gulp.src('bootstrap/js/*.js')
.pipe(gulp.dest('public/bootstrap'));
var jquery = gulp.src('jquery.cookie/jquery.cookie.js')
.pipe(gulp.dest('public/jquery'));
return merge(bootstrap, jquery);
});
For an array, you can use Array.prototype.map to convert the items into streams, then merge all of those:
function minifyCss(paths) {
return merge.apply(this, paths.map(function (path) {
gulp.src(path)
.pipe(minifyCss({compatibility: 'ie8'}))
.pipe(rename(basename(path)))
.pipe(gulp.dest(dirname(path)));
}));
}
gulp.task('test', function () {
return minifyCss(['foo', 'bar']);
});
We are trying to use only nodeJS with minimal dependencies to other packages, the challenge we now encounter is HandelbarsJS. We found a package, Assemble who can generate html for us. Only, it is very very slow, about 3 seconds each time, of these 3 seconds, there are 2,5 / 2,7 seconds of the next line:
var assemble = require('assemble');
Our package.json script section:
"scripts": {
"build:handlebars": "node scripts/handlebars.js",
"watch:handlebars": "nodemon --watch assets --exec \"npm run build:handlebars\"" }
the script/handlebars.js file
#! /usr/bin/env node
var assemble = require('assemble');
var extname = require('gulp-extname');
console.log(Date.now() - start);
assemble.data('assets/templates/data/*.json');
assemble.layouts('assets/templates/layouts/*.hbs');
assemble.partials('assets/templates/partials/*.hbs');
assemble.src('assets/templates/*.hbs', { layout: 'default' })
.pipe(extname())
.pipe(assemble.dest('build/'));
Each time, when we save a .hbs file, Nodemon restart and the external javascript file will be called.
How can we ensure that 'require' get called only once, or whether they remain in memory?
Thank you!
Since you want to accomplish using this with assemble, but without gulp, I recommend chokidar.
npm install chokidar --save
Now you can require chokidar like this:
var chokidar = require('chokidar');
Then define a little helper that runs handler whenever something in a pattern changes:
function watch(patterns, handler) {
chokidar.watch(patterns, {
ignoreInitial: false
}).on('add', handler).on('change', handler).on('unlink', handler);
}
Now we can alter the script like this:
#! /usr/bin/env node
var assemble = require('assemble');
var extname = require('gulp-extname');
var chokidar = require('chokidar');
console.log(Date.now() - start);
assemble.data('assets/templates/data/*.json');
assemble.layouts('assets/templates/layouts/*.hbs');
assemble.partials('assets/templates/partials/*.hbs');
// Enable --watch command line for Chokidar, otherwise, just run!
if (process.argv.pop() === '--watch') {
watch('assets', runOnce);
} else {
runOnce();
}
function watch(patterns, handler) {
chokidar.watch(patterns, {
ignoreInitial: false
}).on('add', handler).on('change', handler).on('unlink', handler);
}
function runOnce() {
assemble.src('assets/templates/*.hbs', { layout: 'default' })
.pipe(extname())
.pipe(assemble.dest('build/'));
}
And instead of nodemon, this will keep your script alive and running. So, in npm, you want this:
"scripts": {
"build:handlebars": "node scripts/handlebars.js",
"watch:handlebars": "node scripts/handlebars.js --watch"
}
Whenever a file changes, the script will now run, without re-invoking from scratch.
The beta version of assemble is based on gulp and has a cli that you can use just like you would use gulp, but if you don't want to use the cli and use npm scripts instead, you can do something based on #roel-van-uden's answer without chokidar and also be able to reload the actual assets (e.g. data, layouts, partials)
#! /usr/bin/env node
var start = Date.now();
var assemble = require('assemble');
var extname = require('gulp-extname');
assemble.task('assets', function () {
console.log(Date.now() - start);
assemble.data('assets/templates/data/*.json');
assemble.layouts('assets/templates/layouts/*.hbs');
assemble.partials('assets/templates/partials/*.hbs');
return assemble.src('assets/templates/*.hbs', { layout: 'default' })
.pipe(extname())
.pipe(assemble.dest('build/'));
});
assemble.task('watch', ['assets'], function () {
assemble.watch('./assets/**/*.*', ['assets]');
});
// Enable --watch command line
if (process.argv.pop() === '--watch') {
assemble.run(['watch']);
} else {
assemble.run(['assets']);
}
The following Gulpjs task works fine when editing files in the glob match:
// watch task.
gulp.task('watch', ['build'], function () {
gulp.watch(src + '/js/**/*.js', ['scripts']);
gulp.watch(src + '/img//**/*.{jpg,jpeg,png,gif}', ['copy:images']);
gulp.watch(src + '/less/*.less', ['styles']);
gulp.watch(src + '/templates/**/*.{swig,json}', ['html']);
});
// build task.
gulp.task('build', ['clean'], function() {
return gulp.start('copy', 'scripts', 'less', 'htmlmin');
});
However it doesn't work (it's not triggered) for new or deleted files. Is there something I'm missing?
EDIT: even using grunt-watch plugin it seems not working:
gulp.task('scripts', function() {
return streamqueue(
{ objectMode: true },
gulp.src([
vendor + '/jquery/dist/jquery.min.js',
vendor + '/bootstrap/dist/js/bootstrap.min.js'
]),
gulp.src([
src + '/js/**/*.js'
]).pipe(plugins.uglify())
)
.pipe(plugins.concat(pkg.name + '.min.js'))
.pipe(gulp.dest(dest + '/js/'));
});
gulp.task('watch', ['build'], function () {
plugins.watch({glob: src + '/js/**/*.js'}, function () {
gulp.start('scripts');
});
});
EDIT: Solved, it was this issue. Globs starting with ./ (that was the value of src) seems not working ATM.
Edit: Apparently gulp.watch does work with new or deleted files now. It did not when the question was asked.
The rest of my answer still stands: gulp-watch is usually a better solution because it lets you perform specific actions only on the files that have been modified, while gulp.watch only lets you run complete tasks. For a project of a reasonable size, this will quickly become too slow to be useful.
You aren't missing anything. gulp.watch does not work with new or deleted files. It's a simple solution designed for simple projects.
To get file watching that can look for new files, use the gulp-watch plugin, which is much more powerful. Usage looks like this:
var watch = require('gulp-watch');
// in a task
watch({glob: <<glob or array of globs>> })
.pipe( << add per-file tasks here>> );
// if you'd rather rerun the whole task, you can do this:
watch({glob: <<glob or array of globs>>}, function() {
gulp.start( <<task name>> );
});
Personally, I recommend the first option. This allows for much faster, per-file processes. It works great during development with livereload as long as you aren't concatenating any files.
You can wrap up your streams either using my lazypipe library, or simply using a function and stream-combiner like this:
var combine = require('stream-combiner');
function scriptsPipeline() {
return combine(coffeeescript(), uglify(), gulp.dest('/path/to/dest'));
}
watch({glob: 'src/scripts/**/*.js' })
.pipe(scriptsPipeline());
UPDATE October 15, 2014
As pointed out by #pkyeck below, apparently the 1.0 release of gulp-watch changed the format slightly, so the above examples should now be:
var watch = require('gulp-watch');
// in a task
watch(<<glob or array of globs>>)
.pipe( << add per-file tasks here>> );
// if you'd rather rerun the whole task, you can do this:
watch(<<glob or array of globs>>, function() {
gulp.start( <<task name>> );
});
and
var combine = require('stream-combiner');
function scriptsPipeline() {
return combine(coffeeescript(), uglify(), gulp.dest('/path/to/dest'));
}
watch('src/scripts/**/*.js')
.pipe(scriptsPipeline());
Both gulp.watch() and require('gulp-watch')() will trigger for new/deleted files however not if you use absolute directories. In my tests I did not use "./" for relative directories BTW.
Both won't trigger if whole directories are deleted though.
var watch = require('gulp-watch');
//Wont work for new files until gaze is fixed if using absolute dirs. It won't trigger if whole directories are deleted though.
//gulp.watch(config.localDeploy.path + '/reports/**/*', function (event) {
//gulp.watch('src/app1/reports/**/*', function (event) {
// console.log('*************************** Event received in gulp.watch');
// console.log(event);
// gulp.start('localDeployApp');
});
//Won't work for new files until gaze is fixed if using absolute dirs. It won't trigger if whole directories are deleted though. See https://github.com/floatdrop/gulp-watch/issues/104
//watch(config.localDeploy.path + '/reports/**/*', function() {
watch('src/krfs-app/reports/**/*', function(event) {
console.log("watch triggered");
console.log(event);
gulp.start('localDeployApp');
//});
If src is an absolute path (starting with /), your code is not going to detect new or deleted files. However there's still a way:
Instead of:
gulp.watch(src + '/js/**/*.js', ['scripts']);
write:
gulp.watch('js/**/*.js', {cwd: src}, ['scripts']);
and it will work!
Globs must have a separate base directory specified and that base location must not be specified in the glob itself.
If you have lib/*.js, it'll look under the current working dir which is process.cwd()
Gulp uses Gaze to watch files and in the Gulp API doc we see that we can pass Gaze specific options to the watch function: gulp.watch(glob[, opts], tasks)
Now in the Gaze doc we can find that the current working dir (glob base dir) is the cwd option.
Which leads us to alexk's answer:
gulp.watch('js/**/*.js', {cwd: src}, ['scripts']);
I know this is an older question, but I thought I'd throw the solution I came up with. None of the gulp plugins I found would notify me of new or renamed files. So I ended up wrapping monocle in a convenience function.
Here's an example of how that function is used:
watch({
root: config.src.root,
match: [{
when: 'js/**',
then: gulpStart('js')
}, {
when: '+(scss|css)/**',
then: gulpStart('css')
}, {
when: '+(fonts|img)/**',
then: gulpStart('assets')
}, {
when: '*.+(html|ejs)',
then: gulpStart('html')
}]
});
I should note that gulpStart is also a convenience function I made.
And here is the actual watch module.
module.exports = function (options) {
var path = require('path'),
monocle = require('monocle'),
minimatch = require('minimatch');
var fullRoot = path.resolve(options.root);
function onFileChange (e) {
var relativePath = path.relative(fullRoot, e.fullPath);
options.match.some(function (match) {
var isMatch = minimatch(relativePath, match.when);
isMatch && match.then();
return isMatch;
});
}
monocle().watchDirectory({
root: options.root,
listener: onFileChange
});
};
Pretty simple, eh? The whole thing can be found over at my gulp starter kit: https://github.com/chrisdavies/gulp_starter_kit
It is important to note that it looks like gulp.watch only reports changed and deleted files on Windows but listens for new and deleted files by default on OSX:
https://github.com/gulpjs/gulp/issues/675
You should use 'gulp-watch' for new/renamed/deleted files instead of gulp.watch
var gulpwatch = require('gulp-watch');
var source = './assets',
destination = './dest';
gulp.task('copy-changed-assets', function() {
gulpwatch(source+'/**/*', function(obj){
gulp.src( obj.path, { "base": source})
.pipe(gulp.dest(destination));
});
});