How to run a shell command from Grunt task function - javascript

I'm trying to move some icons in my app directory based on a function i have inside my Gruntfile.js. Would it be possible to do something like this? I've tried the following (going into dev or staging folder and copying all files to the previous directory), but coudn't get it to work. Thanks in advance.
grunt.registerTask('setAppIcon', 'Task that sets the app icon', function(environment) {
if (environment.toLowerCase() == "development") {
grunt.task.run(['exec:command:cd app/lib/extras/res/icon/ios/dev && cp -a . ../']);
} else if (environment.toLowerCase() == "staging") {
grunt.task.run(['exec:command:cd app/lib/extras/res/icon/ios/staging && cp -a . ../']);
}
});

Yes, it's possible to achieve your requirement, however, when you invoke the grunt.task.run command inside your function (i.e. custom task) you need to provide a reference to a task to run.
If you define a separate Target, (Let's call call them copy_dev and copy_staging - as shown in the example below), for each cd ... && cp ... command in the grunt-exec Task it should work successfully.
Gruntfile.js
The following Gruntfile.js gist shows how this can be achieved:
module.exports = function (grunt) {
grunt.loadNpmTasks('grunt-exec');
grunt.initConfig({
exec: {
copy_dev: {
cmd: 'cd app/lib/extras/res/icon/ios/dev && cp -a . ../'
},
copy_staging: {
cmd: 'cd app/lib/extras/res/icon/ios/staging && cp -a . ../'
}
}
});
grunt.registerTask('setAppIcon', 'Task that sets the app icon', function() {
var environment = process.env.NODE_ENV;
// Exit early if NODE_ENV variable has not been set.
if (!environment) {
grunt.log.writeln(
'\"setAppIcon\"" task failed - NODE_ENV has not been set.'['yellow']
)
return
}
if (environment.toLowerCase() == "development") {
grunt.task.run('exec:copy_dev');
grunt.log.writeln('>> Copying icons from \"dev\"...')
} else if (environment.toLowerCase() == "staging") {
grunt.task.run('exec:copy_staging');
grunt.log.writeln('>> Copying icons from \"staging\"...')
}
});
grunt.registerTask('default', [ 'setAppIcon' ]);
};
Additional notes
Inside the custom task/function named setAppIcon we obtain the current node environment using nodes builtin process.env
When running $ grunt via your CLI (using the gist shown above), and assuming your process.env.NODE_ENV variable has not been set, or it has possibly been unset by running $ unset NODE_ENV, you will see the following message:
"setAppIcon"" task failed - NODE_ENV has not been set.
However, if the process.env.NODE_ENV variable has been set to either development or staging the files will be copied as expected.
For example running either of the following via your CLI will work successfully:
$ export NODE_ENV=development && grunt
or
$ export NODE_ENV=staging && grunt
You will also see either of the following messages logged to the console:
>> Copying icons from "dev"...
or
>> Copying icons from "staging"...
After process.env.NODE_ENV has been set to either development or staging then just running $ grunt via your CLI, will copy files according to which environment is set.

Related

Uglify and Minify AngularJS Source Code without NodeJS

I want to uglify then minify my AngularJS source codes.
I have been searching for samples then I found grunt but grunt needs NodeJS our website does not run with NodeJS.
I can't find any good alternatives.
Any ideas?
Uglify code is only needed when you want to publish your code. The server doesn't need it, because it doesn't take into account spaces in code.
To clear some things up I'm showing what "grunt" is on my development machine below:
shaun#laptop:~/.npm$ which grunt
/home/shaun/local/bin/grunt
shaun#laptop:~/.npm$ ls -al /home/shaun/local/bin/grunt
lrwxrwxrwx 1 shaun shaun 39 Apr 15 2015 /home/shaun/local/bin/grunt -> ../lib/node_modules/grunt-cli/bin/grunt
shaun#laptop:~/.npm$ cat /home/shaun/local/lib/node_modules/grunt-cli/bin/grunt
#!/usr/bin/env node
'use strict';
process.title = 'grunt';
// Especially badass external libs.
var findup = require('findup-sync');
var resolve = require('resolve').sync;
// Internal libs.
var options = require('../lib/cli').options;
var completion = require('../lib/completion');
var info = require('../lib/info');
var path = require('path');
var basedir = process.cwd();
var gruntpath;
// Do stuff based on CLI options.
if ('completion' in options) {
completion.print(options.completion);
} else if (options.version) {
info.version();
} else if (options.base && !options.gruntfile) {
basedir = path.resolve(options.base);
} else if (options.gruntfile) {
basedir = path.resolve(path.dirname(options.gruntfile));
}
try {
gruntpath = resolve('grunt', {basedir: basedir});
} catch (ex) {
gruntpath = findup('lib/grunt.js');
// No grunt install found!
if (!gruntpath) {
if (options.version) { process.exit(); }
if (options.help) { info.help(); }
info.fatal('Unable to find local grunt.', 99);
}
}
// Everything looks good. Require local grunt and run it.
require(gruntpath).cli();
As you can see Grunt is a node script so it does require node to run a grunt based plugin. That said you can just download and run any node script from github or wherever, they are just JS files.
https://github.com/mishoo/UglifyJS2
^^ if you were to clone the above repository and had node installed you could just run
git clone https://github.com/mishoo/UglifyJS2.git
cd UglifyJS2
bin/uglify -m -- /full/path/to/input.js
# note the above assumes you already have node installed on the
# development machine since the bin/uglify file is interpreted/run
# by node VM
This will output the mangled js which you can then put on the server (without node at all). To reiterate your build process/tools don't need to be installed on the server (probably shouldn't be ideally).

grunt-exec doesn't recognize a custom function defined in bash

Grunt version:
CLI - v1.2.0
local - v1.0.1
I'm using grunt-exec to start a local DynamoDB server. I'm doing this by creating a custom function in .bashrc then calling it inside grunt-exec. I also tried explicitly creating an alias, which didn't fix it.
~/.bashrc
runDynamo () {
java -Djava.library.path=~/DynamoDBServer/DynamoDBLocal_lib -jar ~/DynamoDBServer/DynamoDBLocal.jar -sharedDb
}
Gruntfile.js
// ...
exec: {
dynamo: {
// Run DynamoDB locally at port 8000
// This alias has been set during the inital installation
command: "runDynamo"
}
}
// ...
var mode = grunt.option("mode") || "test";
grunt.registerTask("run", ["exec:" + mode]);
When I run grunt run --mode=dynamo, I get the following error in stdout:
Running "exec:dynamo" (exec) task
>> /bin/sh: 1: runDynamo: not found
>> Exited with code: 127.
>> Error executing child process: Error: Process exited with code 127.
The command works fine when used directly in bash (i.e. $ runDynamo), so I'm not sure why grunt-exec isn't working here.

How to use in node properties for deployment and local usage

I've created node application which I can run locally and in the cloud
Now I want that it be done somehow smoother and cleaner ,so I try to put some property in config.json file to check if I want to deploy the app or use it locally but I need to update manually this property before I change the propose , there is a better way to do it with node ?
let runnerServer = `http://localhost:8060/service/runner/${server.address().port}`;
if (cfg.isHosted) {
blogServer = `http://${serverName}/service/runner/${server.address().port}`;
}
and in the conig.json I've the field isHosted which I change manually(true/false) if I want to deploy or not...
update
maybe I can use process.env.PORT but this is just one example that I need to use in my code , currently I've several of fork that need to konw if Im in deployment or running locally ..
One option is to use use node's in built object called process.env (https://nodejs.org/api/process.html) and use two config files per se. This approach is somewhat similar to what you are doing but may be cleaner
config.localhost.json
config.production.json
then by setting properties on this object based on environment such as process.env.NODE_ENV = 'localhost' or process.env.NODE_ENV = 'production', you could read the corresponding file to import the configurations.
var config = require('./config.production.json');
if(process.env.NODE_ENV === 'localhost')
{
config = require('./config.localhost.json');
}
So to set this environment variable when running locally on your dev box , if
OSX - then on terminal export NODE_ENV=localhost
WINDOWS - then on cmd line set NODE_ENV=localhost
An easy way to solve this, if every environment configuration can be in the repo:
config.json:
production: {
// prod config
},
staging: {
// staging config
},
devel: {
// devel config
}
config.js:
const environment = process.env['ENV'] || 'devel';
module.exports = require('./config.json')[environment];
then in your package.json you could add the following scripts:
package.json
// npm stuff
scripts {
prod: "ENV=production node index.js",
stage: "ENV=staging node index.js",
dev: "ENV=devel node index.js"
}
and with this setup, you can run each configuration with the following commands:
production: npm run prod
staging: npm run stage
devel: npm run dev

How to run Docker and node.js with remote configurations

I'd like to provide an easy and simple Docker container for an open source application that takes an URL of a configuration file as an argument and uses this file.
The Dockerfile is pretty straight forward:
FROM phusion/baseimage
# Use baseimage-docker's init system.
CMD ["/sbin/my_init"]
RUN curl -sL https://deb.nodesource.com/setup_4.x | sudo -E bash -
RUN apt-get update
RUN apt-get install -y nodejs git
ADD . /src
RUN cd /src; npm install; npm update
ENV NODE_ENV production
CMD ["/usr/bin/node", "/src/gitevents.js"]
I found no way of adding the file when the container runs (with ADD or ENTRYPOINT), so I'm trying to work it out in node.js:
docker run -e "CONFIG_URL=https://gist.githubusercontent.com/PatrickHeneise/c97ba221495df0cd9a3b/raw/fda1b8cd53874735349c6310a6643e6fc589a404/gitevents_config.js" gitevents
this sets CONFIG_URL as a environment variable that I can use in node. However, I need to download a file then, which is async, which kind of doesn't work in the current setup.
if (process.env.NODE_ENV === 'production') {
var exists = fs.accessSync(path.join(__dirname, 'common', 'production.js'), fs.R_OK);
if (exists) {
config = require('./production');
} else {
// https download, but then `config` is undefined when running the app the first time.
}
}
There's no synchronous download in node.js, any recommendations how I could solve this?
I'd love to have Docker do the job with ADD or CMD doing a curl download, but I'm not sure how that works?
Another thing would be to consider that your "config file" is not a file but just text and pass the content to the container at runtime.
CONFIG="$(curl -sL https://gist.githubusercontent.com/PatrickHeneise/c97ba221495df0cd9a3b/raw/fda1b8cd53874735349c6310a6643e6fc589a404/gitevents_config.js)"
docker run -e "CONFIG_URL=${CONFIG}" gitevents
How about a combination of ENTRYPOINT and environment variable? You'd have ENTRYPOINT in the Dockerfile set to a shell script that would download the configuration file specified in the environment variable and then start the application.
Since the entry point script would receive whatever is in CMD as it's arguments, the application start step could be accomplished by something like
# Execute CMD.
eval "$#"
I managed to re-write my config script to work asynchronous, still not the best solution in my eyes.
var config = {};
var https = require('https');
var fs = require('fs');
var path = require('path');
config.load = function(fn) {
if (process.env.NODE_ENV === 'production') {
fs.access(path.join(__dirname, 'production.js'), fs.R_OK, function(error, exists) {
if (exists) {
config = require('./production');
} else {
var file = fs.createWriteStream(path.join(__dirname, 'production.js'));
var url = process.env.CONFIG_URL;
if (!url) {
process.exit(-1);
} else {
https.get(url, function(response) {
response.pipe(file);
file.on('finish', function() {
file.close(function() {
return fn(require('./production'));
});
});
});
}
}
});
} else if (process.env.NODE_ENV === 'test') {
return fn(require('./test'));
} else {
return fn(require('./development'));
}
};
module.exports = exports = config;

Disable Jasmine's fdescribe() and fit() based on environment

fdescribe() and fit() are great for reducing noise when you're working on a subset of tests. I sometimes forget to change them back to describe()/it() before merging my branch into master. (It's okay to have them in separate branch while working on code - i.e. a pre-commit check wouldn't work for me.)
My CI environment is Codeship. Is there a solution to this problem that would fail the tests in Codeship if it came across any focused methods?
Using something like no-focused-tests would be okay. Any idea how to enable this rule as an error in Codeship and disable it locally?
Edit 14.11.19:
To make things easier I created an installable package you can find at https://www.npmjs.com/package/tslint-jasmine
Original post:
If you're using TSLint and (like me) found that all the defocus and tslint-jasmine-noSkipOrFocus checkers are not working for you, I created a Gist for that: https://gist.github.com/djungowski/7d9126bb79970446b4ffeb5656c6bf1f
How to use:
Save Gist in a a folder called TSLint/Rules as noJasmineFocusRule.js
Add the Rules folder to your TSLint config: rulesDirectory: 'TSLint/Rules'
Enable option with "no-jasmine-focus": true
Using something like no-focused-tests would be okay. Any idea how to enable this rule as an error in Codeship and disable it locally?
You could use a combination of environment variables and redefining the fdescribe/fit global functions:
npm i --save cross-env
package.json:
"scripts": {
"test": "jasmine",
"test-safe": "cross-env FOCUSED_TESTS=off jasmine"
},
disableFocusedTestsIfNecessary.js (included after jasmine defines its globals):
if (process.env.FOCUSED_TESTS === "off") {
console.log("Focused tests must be off");
global.fdescribe = global.fit = function() {
throw new Error("fdescribe and fit are disabled in this environment");
};
}
else {
console.log("Focused tests enabled");
}
Tell codeship to run npm run test-safe instead of npm run test
For those interested, if you are using jasmine and eslint, you can use this plugin to ensure no focused tests: https://github.com/tlvince/eslint-plugin-jasmine.
First install eslint globally npm install -g eslint.
Then install the eslint-plugin-jasmine library npm install --save-dev eslint-plugin-jasmine.
Create a .eslintrc file which would look something like this:
{
"rules": {
"semi": 2
},
"plugins": ["jasmine"],
"env": {
"jasmine": true
},
"extends": "plugin:jasmine/recommended",
}
Then you are ready to run the linter eslint -c ./.eslintrc app.js
I'm late to the party.
I had a similar issue with my builds. We don't use ts / eslint so I just wrote a quick script to throw an error that would fail my dockerfile / build.
Here it is.
#!/bin/sh
files=$(find "./.." -type f -name '*.spec*')
errored=false
echo "Checking for focused tests"
for file in $files
do
if grep -E "fdescribe|fit" $file; [ $? -eq 0 ]; then
echo "-Focusing a test in the file $file"
errored=true
fi
done
if $errored; then
echo "Some tests were focused"
exit 1
else
echo "No tests were focused"
fi
This isn't the best solution. But it works for my needs.
To setup:
npm i lodash
npm i minimist
I call this from my gulp tasks:
node .\\build\\throwIfFocusedTest.js e2e/
node .\\build\\throwIfFocusedTest.js src/
throwIfFocusedTest.js:
const walkSync = require('./walkSync').default;
const _ = require('lodash');
const argv = require('minimist')(process.argv);
const fs = require('fs');
if (argv._.length !== 3) {
throw 'expecting 1 command line argument';
}
const directory = argv._[2];
const files = walkSync(directory);
const scriptFiles = _.filter(files, f => f.endsWith('.js') || f.endsWith('.ts'));
const invalidStrings = [
'fdescribe',
'fit',
];
_.each(scriptFiles, fileName => {
const contents = fs.readFileSync(fileName, 'utf8');
invalidStrings.forEach(is => {
if (contents.includes(is)) {
console.error(`throwIfFocusedTest: ${directory}: File contains ${is}: ${fileName}`);
process.exit(1);
}
});
});
console.log(`throwIfFocusedTest: ${directory}: No files contain: ${invalidStrings.join(', ')}`);
walkSync.js:
/**
* From: https://gist.github.com/kethinov/6658166
*/
exports.default = function walkSync(dir, filelist) {
var fs = fs || require('fs'),
files = fs.readdirSync(dir);
filelist = filelist || [];
files.forEach(function (file) {
var path = dir + file;
if (fs.statSync(dir + file).isDirectory()) {
filelist = walkSync(dir + file + '/', filelist);
}
else {
filelist.push(path);
}
});
return filelist;
};
If you're willing to fail on when tests are marked for focus or skip (fit + xit), there's a relatively new Karma feature that solves the problem with no plugins. Karma now supports a failOnSkippedTests config file / CLI option, which, per the docs, causes "failure on tests deliberately disabled, eg fit() or xit()".

Categories