Run a npm script from gulp task - javascript

How to run a npm script command from inside a gulp task?
package.json
"scripts":
{
"tsc": "tsc -w"
}
gulpfile.js
gulp.task('compile:app', function(){
return gulp.src('src/**/*.ts')
.pipe(/*npm run tsc*/)
.pipe(gulp.dest('./dist'))
.pipe(connect.reload());
});
I want to do this because running npm run tsc does not give me any error but if I use gulp-typescript to compile .ts then I get bunch of errors.

You can get the equivalent using gulp-typescript
var gulp = require('gulp');
var ts = require('gulp-typescript');
gulp.task('default', function () {
var tsProject = ts.createProject('tsconfig.json');
var result = tsProject.src().pipe(ts(tsProject));
return result.js.pipe(gulp.dest('release'));
});
gulp.task('watch', ['default'], function() {
gulp.watch('src/*.ts', ['default']);
});
Then on your package.json
"scripts": {
"gulp": "gulp",
"gulp-watch": "gulp watch"
}
Then run
npm run gulp-watch
Alternatively using shell
var gulp = require('gulp');
var shell = require('gulp-shell');
gulp.task('default', function () {
return gulp.src('src/**/*.ts')
.pipe(shell('npm run tsc'))
.pipe(gulp.dest('./dist'))
.pipe(connect.reload());
});
gulp-shell has been blacklisted you can see why here
Another alternative would be setting up webpack.

Wasted about 1 hour on this simple thing, looking for a ~complete answer, so adding another here:
If you question is only on typescript (tsc), see https://stackoverflow.com/a/36633318/984471
Else, see below for a generic answer.
The question title is generic, so a generic example is given below first, then the answer.
Generic example:
Install nodejs, if you haven't, preferably LTS version, from here: https://nodejs.org/
Install below:
npm install --save-dev gulp gulp-run
File package.json has below contents (other contents can be there):
{
"name": "myproject",
"scripts": {
"cmd1": "echo \"yay! cmd1 command is run.\" && exit 1",
}
}
Create a file gulpfile.js with below contents:
var gulp = require('gulp');
var run = require('gulp-run');
gulp.task('mywatchtask1', function () {
// watch for javascript file (*.js) changes, in current directory (./)
gulp.watch('./*.js', function () {
// run an npm command called `test`, when above js file changes
return run('npm run cmd1').exec();
// uncomment below, and comment above, if you have problems
// return run('echo Hello World').exec();
});
});
Run the task mywatchtask1 using gulp?
gulp mywatchtask1
Now, gulp is its watching for js file changes in the current directory
if any changes happen then the npm command cmd1 is run, it will print yay! cmd1 command is run. everytime the one of the js file changes.
For this question: as another example:
a) package.json will have
"tsc": "tsc -w",
instead of the below:
"cmd1": "echo \"yay! cmd1 command is run.\" && exit 1",
b) and, gulpfile.js will have:
return run('npm run tsc').exec();
instead of below:
return run('npm run cmd1').exec();
Hope that helps.

You can try to implement it using childprecess node package or
use https://www.npmjs.com/package/gulp-run
var run = require('gulp-run');
gulp.task('compile:app', function(){
return gulp.src(['src/**/*.js','src/**/*.map'])
.pipe(run('npm run tsc'))
.pipe(gulp.dest('./dist'))
.pipe(connect.reload());
});

Related

Reading enviromental variable problem in node.js

Here is my excel.js:
let test = async () => {
console.log(process.env.DATABASE_HOST);
.......
}
test();
Here is my package.json fragment:
"scripts": {
.............
"excel": "cross-env NODE_ENV=development node ./server/excel.js",
"test": "react-scripts test"
}
My .env.development is stored in the application root folder.
Here is my .env.development:
DATABASE_HOST=dbServer
When I execute the following command line in the application root folder:
npm run excel
It should return "dbServer", unfortunately, it returns undefined.
How can I fix it?
Install dotenv package, and require it require('dotenv').config()

How to run webpack with certain options from node

I currently have a package.json file that includes this script:
"build": "webpack --inline --colors --progress --display-error-details --display-cached",
I also have a webpack.config.js at the root of my repository.
I'm trying to make my npm scripts less verbose by moving them to separate .js files. I've done some simple ones (clean with rimraf and a simple copy), but I'm struggling with calling webpack from a (node) javascript file. This is what I've tried:
// contents of ./build/compile.js
var webpack = require('webpack');
var webpackConfig = require('../webpack.config.js');
webpack(webpackConfig);
This does nothing. I've also tried:
// contents of ./build/compile.js
var webpack = require('webpack');
webpack({
inline: true,
colors: true,
// and so on
});
This also does nothing. Just calling webpack() also does nothing...
And by nothing, I mean it also doesn't throw an error.
So how can I call webpack, make it use my config file, but also pass along the flags like --inline --colors --progress ...?
Regarding progress, this answer worked for me..
https://stackoverflow.com/a/31069781
var ProgressPlugin = require('webpack/lib/ProgressPlugin')
var compiler = webpack(cfg)
compiler.apply(new ProgressPlugin((percentage, msg) => {
console.log((percentage * 100) + '%', msg);
}))
compiler.run((err, stats) => {
if (err) return reject(err);
resolve(stats)
})

How to automatically zip files with Node.js and npm

Is there a way to automatically zip certain files at the build time with Node.js and npm?
For example, I have a project, that file structure looks like this:
Project/
--lib/
--node_modules/
--test/
--index.js
--package.json
I want to be able to zip lib folder, certain modules from node_modules and index.js into some zip archive to upload it on the AWS Lambda, for example. I do not need test folder or test Node.js modules (mocha and chai) to be zipped. I have even created a bash script for generating zip file, but is there a way to automatically execute this script, when 'npm install' is called?
This should be a standard problem and it should have a standard solution, but I was unable to discover such.
UPDATE
thanks to michael, decided to use gulp. This is my script, in case some one else will need it for AWS Lambda:
var gulp = require('gulp');
var clean = require('gulp-clean');
var zip = require('gulp-zip');
var merge = require('merge-stream');
gulp.task('clean', function () {
var build = gulp.src('build', {read: false})
.pipe(clean());
var dist = gulp.src('dist', {read: false})
.pipe(clean());
return merge(build, dist);
});
gulp.task('build', function() {
var index = gulp.src('index.js')
.pipe(gulp.dest('build'));
var lib = gulp.src('lib/**')
.pipe(gulp.dest('build/lib'));
var async = gulp.src('node_modules/async/**')
.pipe(gulp.dest('build/node_modules/async'));
var collections = gulp.src('node_modules/collections/**')
.pipe(gulp.dest('build/node_modules/collections'));
var underscore = gulp.src('node_modules/underscore/**')
.pipe(gulp.dest('build/node_modules/underscore'));
var util = gulp.src('node_modules/util/**')
.pipe(gulp.dest('build/node_modules/util'));
var xml2js = gulp.src('node_modules/xml2js/**')
.pipe(gulp.dest('build/node_modules/xml2js'));
return merge(index, lib, async, collections, underscore, util, xml2js);
});
gulp.task('zip', ['build'], function() {
return gulp.src('build/*')
.pipe(zip('archive.zip'))
.pipe(gulp.dest('dist'));
});
gulp.task('default', ['zip']);
I realize this answer comes years too late for the original poster. But I had virtually the same question about packaging up a Lambda function, so for posterity, here's a solution that doesn't require any additional devDependencies (like gulp or grunt) and just uses npm pack along with the following package.json (but does assume you have sed and zip available to you):
{
"name": "my-lambda",
"version": "1.0.0",
"scripts": {
"postpack": "tarball=$(npm list --depth 0 | sed 's/#/-/g; s/ .*/.tgz/g; 1q;'); tar -tf $tarball | sed 's/^package\\///' | zip -#r package; rm $tarball"
},
"files": [
"/index.js",
"/lib"
],
"dependencies": {
"async": "*",
"collections": "*",
"underscore": "*",
"util": "*",
"xml2js": "*"
},
"bundledDependencies": [
"async",
"collections",
"underscore",
"util",
"xml2js"
],
"devDependencies": {
"chai": "*",
"mocha": "*"
}
}
Given the above package.json, calling npm pack will produce a package.zip file that contains:
index.js
lib/
node_modules/
├── async/
├── collections/
├── underscore/
├── util/
└── xml2js/
The files array is a whitelist of what to include. Here, it's just index.js and the lib directory.
However, npm will also automatically include package.json, README (and variants like README.md, CHANGELOG (and its variants), and LICENSE (and the alternative spelling LICENCE) unless you explicitly exclude them (e.g. with .npmignore).
The bundledDependencies array specifies what packages to bundle. In this case, it's all the dependencies but none of the devDependencies.
Finally, the postpack script is run after npm pack because npm pack generates a tarball, but we need to generate a zip for AWS Lambda.
A more detailed explanation of what the postpack script is doing is available at https://hackernoon.com/package-lambda-functions-the-easy-way-with-npm-e38fc14613ba (and is also the source of the general approach).
If you're UNIX-based you could also just use the zip command in one of your scripts:
"scripts": {
"zip": "zip -r build.zip build/"
"build": "build",
"build-n-zip": "build && zip
}
The above creates a build.zip at the root, which is a zipped up version of the /build folder.
If you wanted to zip multiple folders/files, just add them to the end:
"scripts": {
"zip": "zip -r build.zip build/ some-file.js some-other-folder/"
}
Note
If a build.zip already exists in the folder, the default behaviour is for zip to add files to that existing archive. So many people who are continuously building will probably want to delete the build.zip first:
"scripts": {
"zip": "rm -f build.zip && zip -r build.zip build",
"build": "build",
"build-n-zip": "yarn build && yarn zip"
}
I would go with gulp using gulp-sftp, gulp-tar and gulp-gzip and an alias as command. Create a file called .bash_aliases in your users home folder containing
alias installAndUpload='npm install && gulp runUploader'
After a reboot you can call both actions at once with this alias.
A gulp file could look something like this
var gulp = require('gulp');
var watch = require('gulp-watch');
var sftp = require('gulp-sftp');
var gzip = require('gulp-gzip');
gulp.task('runUploader', function () {
gulp.src('.path/to/folder/to/compress/**')
.pipe(tar('archive.tar'))
.pipe(gzip())
.pipe(gulp.dest('path/to/folder/to/store')) // if you want a local copy
.pipe(sftp({
host: 'website.com',
user: 'johndoe',
pass: '1234'
}))
});
Of course, you can also add gulp-watch to automatically create the tar/zip and upload it whenever there is a change in the directory.
You should take a look to npm scripts.
You'll still need a bash script laying around in your repository, but it will be automatically triggered by some npm tasks when they are executed.
npm-pack-zip worked for me.
npm install --save-dev npm-pack-zip
To publish the whole lambda using aws I used this node script in package.json:
"publish": "npm-pack-zip && aws lambda update-function-code --function-name %npm_package_name% --zip-file fileb://%npm_package_name%.zip && rm %npm_package_name%.zip"
You can use Zip-Build, this little package will use the data in your package.json file and create a compressed file named project-name_version.zip.
Disclaimer: I am a developer of this library.
How to use zip-build
Just install in your project as dev dependency with:
$ npm install --save-dev zip-build
Then modify the build script in your package.json, adding && zip-build at the end, like this:
"scripts": {
"build": your-build-script && zip-build
}
If your build directory is named different than build and your desired directory for compressed files is named different than dist, you can provide the directory names as arguments for zip-build:
"scripts": {
"build": your-build-script && zip-build build-dirname zip-dirname
}
If you need automate tasks take a look to Grunt or Gulp.
In the case of Grunt needed plugins:
https://www.npmjs.com/package/grunt-zip
https://www.npmjs.com/package/grunt-aws-lambda
Check out my gist at https://gist.github.com/ctulek/6f16352ebdfc166ce905
This uses gulp for all the tasks you mentioned except creating the lambda function initially (it only updates the code)
It assumes every lambda function is implemented in its own folder, and you need to define your AWS credential profile.

Gulp command opens gulp.js in notepad instead of running it

I installed it correctly I think.
My package.json
{
"name": "my-project",
"version": "0.1.0",
"devDependencies": {
"gulp": "^3.8.11",
"gulp-concat": "^2.5.2"
}
}
my gulp.js
var gulp = require('gulp');
var concat = require('gulp-concat');
gulp.task('scripts', function() {
return gulp.src('js/*.js')
.pipe(concat('main.js'))
.pipe(gulp.dest('build/js'));
});
gulp.task('default', ['scripts']);
my folder are as follow :
ROOT/package.json
ROOT/gulp.js
ROOT/node-modules/ <-- my modules are here
ROOT/js/ <-- my js are here
When I run gulp in command line, being in the ROOT folder, gulp.js just opens in windows notepad and that's all..
Why is it doing that?
problem solved by a simple action: renaming gulp.js to gulpfile.js (sigh..)
Two steps:
Step-1) renaming gulp.js to gulpfile.js as exposed in answer above; Step-2) using npm install -g gulp-cli

Sending command line arguments to npm script

The scripts portion of my package.json currently looks like this:
"scripts": {
"start": "node ./script.js server"
}
...which means I can run npm start to start the server. So far so good.
However, I would like to be able to run something like npm start 8080 and have the argument(s) passed to script.js (e.g. npm start 8080 => node ./script.js server 8080). Is this possible?
npm 2 and newer
It's possible to pass args to npm run since npm 2 (2014). The syntax is as follows:
npm run <command> [-- <args>]
Note the -- separator, used to separate the params passed to npm command itself, and the params passed to your script.
With the example package.json:
"scripts": {
"grunt": "grunt",
"server": "node server.js"
}
here's how to pass the params to those scripts:
npm run grunt -- task:target // invokes `grunt task:target`
npm run server -- --port=1337 // invokes `node server.js --port=1337`
Note: If your param does not start with - or --, then having an explicit -- separator is not needed; but it's better to do it anyway for clarity.
npm run grunt task:target // invokes `grunt task:target`
Note below the difference in behavior (test.js has console.log(process.argv)): the params which start with - or -- are passed to npm and not to the script, and are silently swallowed there.
$ npm run test foobar
['C:\\Program Files\\nodejs\\node.exe', 'C:\\git\\myrepo\\test.js', 'foobar']
$ npm run test -foobar
['C:\\Program Files\\nodejs\\node.exe', 'C:\\git\\myrepo\\test.js']
$ npm run test --foobar
['C:\\Program Files\\nodejs\\node.exe', 'C:\\git\\myrepo\\test.js']
$ npm run test -- foobar
['C:\\Program Files\\nodejs\\node.exe', 'C:\\git\\myrepo\\test.js', 'foobar']
$ npm run test -- -foobar
['C:\\Program Files\\nodejs\\node.exe', 'C:\\git\\myrepo\\test.js', '-foobar']
$ npm run test -- --foobar
['C:\\Program Files\\nodejs\\node.exe', 'C:\\git\\myrepo\\test.js', '--foobar']
The difference is clearer when you use a param actually used by npm:
$ npm test --help // this is disguised `npm --help test`
npm test [-- <args>]
aliases: tst, t
To get the parameter value, see this question. For reading named parameters, it's probably best to use a parsing library like yargs or minimist; nodejs exposes process.argv globally, containing command line parameter values, but this is a low-level API (whitespace-separated array of strings, as provided by the operating system to the node executable).
You asked to be able to run something like npm start 8080. This is possible without needing to modify script.js or configuration files as follows.
For example, in your "scripts" JSON value, include--
"start": "node ./script.js server $PORT"
And then from the command-line:
$ PORT=8080 npm start
I have confirmed that this works using bash and npm 1.4.23. Note that this work-around does not require GitHub npm issue #3494 to be resolved.
You could also do that:
In package.json:
"scripts": {
"cool": "./cool.js"
}
In cool.js:
console.log({ myVar: process.env.npm_config_myVar });
In CLI:
npm --myVar=something run-script cool
Should output:
{ myVar: 'something' }
Update: Using npm 3.10.3, it appears that it lowercases the process.env.npm_config_ variables? I'm also using better-npm-run, so I'm not sure if this is vanilla default behavior or not, but this answer is working. Instead of process.env.npm_config_myVar, try process.env.npm_config_myvar
jakub.g's answer is correct, however an example using grunt seems a bit complex.
So my simpler answer:
- Sending a command line argument to an npm script
Syntax for sending command line arguments to an npm script:
npm run [command] [-- <args>]
Imagine we have an npm start task in our package.json to kick off webpack dev server:
"scripts": {
"start": "webpack-dev-server --port 5000"
},
We run this from the command line with npm start
Now if we want to pass in a port to the npm script:
"scripts": {
"start": "webpack-dev-server --port process.env.port || 8080"
},
running this and passing the port e.g. 5000 via command line would be as follows:
npm start --port:5000
- Using package.json config:
As mentioned by jakub.g, you can alternatively set params in the config of your package.json
"config": {
"myPort": "5000"
}
"scripts": {
"start": "webpack-dev-server --port process.env.npm_package_config_myPort || 8080"
},
npm start will use the port specified in your config, or alternatively you can override it
npm config set myPackage:myPort 3000
- Setting a param in your npm script
An example of reading a variable set in your npm script. In this example NODE_ENV
"scripts": {
"start:prod": "NODE_ENV=prod node server.js",
"start:dev": "NODE_ENV=dev node server.js"
},
read NODE_ENV in server.js either prod or dev
var env = process.env.NODE_ENV || 'prod'
if(env === 'dev'){
var app = require("./serverDev.js");
} else {
var app = require("./serverProd.js");
}
As of npm 2.x, you can pass args into run-scripts by separating with --
Terminal
npm run-script start -- --foo=3
Package.json
"start": "node ./index.js"
Index.js
console.log('process.argv', process.argv);
I had been using this one-liner in the past, and after a bit of time away from Node.js had to try and rediscover it recently. Similar to the solution mentioned by #francoisrv, it utilizes the npm_config_* variables.
Create the following minimal package.json file:
{
"name": "argument",
"version": "1.0.0",
"scripts": {
"argument": "echo \"The value of --foo is '${npm_config_foo}'\""
}
}
Run the following command:
npm run argument --foo=bar
Observe the following output:
The value of --foo is 'bar'
All of this is nicely documented in the npm official documentation:
https://docs.npmjs.com/using-npm/config
Note: The Environment Variables heading explains that variables inside scripts do behave differently to what is defined in the documentation. This is true when it comes to case sensitivity, as well whether the argument is defined with a space or equals sign.
Note: If you are using an argument with hyphens, these will be replaced with underscores in the corresponding environment variable. For example, npm run example --foo-bar=baz would correspond to ${npm_config_foo_bar}.
Note: For non-WSL Windows users, see #Doctor Blue's comments below... TL;DR replace ${npm_config_foo} with %npm_config_foo%.
Use process.argv in your code then just provide a trailing $* to your scripts value entry.
As an example try it with a simple script which just logs the provided arguments to standard out echoargs.js:
console.log('arguments: ' + process.argv.slice(2));
package.json:
"scripts": {
"start": "node echoargs.js $*"
}
Examples:
> npm start 1 2 3
arguments: 1,2,3
process.argv[0] is the executable (node), process.argv[1] is your script.
Tested with npm v5.3.0 and node v8.4.0
Most of the answers above cover just passing the arguments into your NodeJS script, called by npm. My solution is for general use.
Just wrap the npm script with a shell interpreter (e.g. sh) call and pass the arguments as usual. The only exception is that the first argument number is 0.
For example, you want to add the npm script someprogram --env=<argument_1>, where someprogram just prints the value of the env argument:
package.json
"scripts": {
"command": "sh -c 'someprogram --env=$0'"
}
When you run it:
% npm run -s command my-environment
my-environment
If you want to pass arguments to the middle of an npm script, as opposed to just having them appended to the end, then inline environment variables seem to work nicely:
"scripts": {
"dev": "BABEL_ARGS=-w npm run build && cd lib/server && nodemon index.js",
"start": "npm run build && node lib/server/index.js",
"build": "mkdir -p lib && babel $BABEL_ARGS -s inline --stage 0 src -d lib",
},
Here, npm run dev passes the -w watch flag to babel, but npm run start just runs a regular build once.
For PowerShell users on Windows
The accepted answer did not work for me with npm 6.14. Neither adding no -- nor including it once does work. However, putting -- twice or putting "--" once before the arguments does the trick. Example:
npm run <my_script> -- -- <my arguments like --this>
Suspected reason
Like in bash, -- instructs PowerShell to treat all following arguments as literal strings, and not options (E.g see this answer). The issues seems to be that the command is interpreted one time more than expected, loosing the '--'. For instance, by doing
npm run <my_script> -- --option value
npm will run
<my_script> value
However, doing
npm run <my_script> "--" --option value
results in
<my_script> "--option" "value"
which works fine.
This doesn't really answer your question but you could always use environment variables instead:
"scripts": {
"start": "PORT=3000 node server.js"
}
Then in your server.js file:
var port = process.env.PORT || 3000;
I've found this question while I was trying to solve my issue with running sequelize seed:generate cli command:
node_modules/.bin/sequelize seed:generate --name=user
Let me get to the point. I wanted to have a short script command in my package.json file and to provide --name argument at the same time
The answer came after some experiments. Here is my command in package.json
"scripts: {
"seed:generate":"NODE_ENV=development node_modules/.bin/sequelize seed:generate"
}
... and here is an example of running it in terminal to generate a seed file for a user
> yarn seed:generate --name=user
> npm run seed:generate -- --name=user
FYI
yarn -v
1.6.0
npm -v
5.6.0
Note: This approach modifies your package.json on the fly, use it if you have no alternative.
I had to pass command line arguments to my scripts which were something like:
"scripts": {
"start": "npm run build && npm run watch",
"watch": "concurrently \"npm run watch-ts\" \"npm run watch-node\"",
...
}
So, this means I start my app with npm run start.
Now if I want to pass some arguments, I would start with maybe:
npm run start -- --config=someConfig
What this does is: npm run build && npm run watch -- --config=someConfig. Problem with this is, it always appends the arguments to the end of the script. This means all the chained scripts don't get these arguments(Args maybe or may not be required by all, but that's a different story.). Further when the linked scripts are called then those scripts won't get the passed arguments. i.e. The watch script won't get the passed arguments.
The production usage of my app is as an .exe, so passing the arguments in the exe works fine but if want to do this during development, it gets problamatic.
I couldn't find any proper way to achieve this, so this is what I have tried.
I have created a javascript file: start-script.js at the parent level of the application, I have a "default.package.json" and instead of maintaining "package.json", I maintain "default.package.json". The purpose of start-script.json is to read default.package.json, extract the scripts and look for npm run scriptname then append the passed arguments to these scripts. After this, it will create a new package.json and copy the data from default.package.json with modified scripts and then call npm run start.
const fs = require('fs');
const { spawn } = require('child_process');
// open default.package.json
const defaultPackage = fs.readFileSync('./default.package.json');
try {
const packageOb = JSON.parse(defaultPackage);
// loop over the scripts present in this object, edit them with flags
if ('scripts' in packageOb && process.argv.length > 2) {
const passedFlags = ` -- ${process.argv.slice(2).join(' ')}`;
// assuming the script names have words, : or -, modify the regex if required.
const regexPattern = /(npm run [\w:-]*)/g;
const scriptsWithFlags = Object.entries(packageOb.scripts).reduce((acc, [key, value]) => {
const patternMatches = value.match(regexPattern);
// loop over all the matched strings and attach the desired flags.
if (patternMatches) {
for (let eachMatchedPattern of patternMatches) {
const startIndex = value.indexOf(eachMatchedPattern);
const endIndex = startIndex + eachMatchedPattern.length;
// save the string which doen't fall in this matched pattern range.
value = value.slice(0, startIndex) + eachMatchedPattern + passedFlags + value.slice(endIndex);
}
}
acc[key] = value;
return acc;
}, {});
packageOb.scripts = scriptsWithFlags;
}
const modifiedJSON = JSON.stringify(packageOb, null, 4);
fs.writeFileSync('./package.json', modifiedJSON);
// now run your npm start script
let cmd = 'npm';
// check if this works in your OS
if (process.platform === 'win32') {
cmd = 'npm.cmd'; // https://github.com/nodejs/node/issues/3675
}
spawn(cmd, ['run', 'start'], { stdio: 'inherit' });
} catch(e) {
console.log('Error while parsing default.package.json', e);
}
Now, instead of doing npm run start, I do node start-script.js --c=somethis --r=somethingElse
The initial run looks fine, but haven't tested thoroughly. Use it, if you like for you app development.
I find it's possible to just pass variables exactly as you would to Node.js:
// index.js
console.log(process.env.TEST_ENV_VAR)
// package.json
...
"scripts": { "start": "node index.js" },
...
TEST_ENV_VAR=hello npm start
Prints out "hello"
Separate your arguments using -- from the script and add all the required arguments, we can later access them by index.
npm run start -- myemail#gmail.com 100
You can get params in node using
const params = process.argv.slice(2);
console.log(params);
Output
['myemail#gmail.com', '100']
From what I see, people use package.json scripts when they would like to run script in simpler way. For example, to use nodemon that installed in local node_modules, we can't call nodemon directly from the cli, but we can call it by using ./node_modules/nodemon/nodemon.js. So, to simplify this long typing, we can put this...
...
scripts: {
'start': 'nodemon app.js'
}
...
... then call npm start to use 'nodemon' which has app.js as the first argument.
What I'm trying to say, if you just want to start your server with the node command, I don't think you need to use scripts. Typing npm start or node app.js has the same effort.
But if you do want to use nodemon, and want to pass a dynamic argument, don't use script either. Try to use symlink instead.
For example using migration with sequelize. I create a symlink...
ln -s node_modules/sequelize/bin/sequelize sequelize
... And I can pass any arguement when I call it ...
./sequlize -h /* show help */
./sequelize -m /* upgrade migration */
./sequelize -m -u /* downgrade migration */
etc...
At this point, using symlink is the best way I could figure out, but I don't really think it's the best practice.
I also hope for your opinion to my answer.
I know there is an approved answer already, but I kinda like this JSON approach.
npm start '{"PROJECT_NAME_STR":"my amazing stuff", "CRAZY_ARR":[0,7,"hungry"], "MAGICAL_NUMBER_INT": 42, "THING_BOO":true}';
Usually I have like 1 var I need, such as a project name, so I find this quick n' simple.
Also I often have something like this in my package.json
"scripts": {
"start": "NODE_ENV=development node local.js"
}
And being greedy I want "all of it", NODE_ENV and the CMD line arg stuff.
You simply access these things like so in your file (in my case local.js)
console.log(process.env.NODE_ENV, starter_obj.CRAZY_ARR, starter_obj.PROJECT_NAME_STR, starter_obj.MAGICAL_NUMBER_INT, starter_obj.THING_BOO);
You just need to have this bit above it (I'm running v10.16.0 btw)
var starter_obj = JSON.parse(JSON.parse(process.env.npm_config_argv).remain[0]);
Anyhoo, question already answered. Thought I'd share, as I use this method a lot.
I settled for something like this, look at the test-watch script:
"scripts": {
"dev": "tsc-watch --onSuccess \"node ./dist/server.js\"",
"test": "tsc && cross-env NODE_OPTIONS=--experimental-vm-modules NODE_NO_WARNINGS=1 jest",
"test-watch": "cross-env NODE_OPTIONS=--experimental-vm-modules NODE_NO_WARNINGS=1 tsc-watch --onSuccess",
},
You invoke the test-watch script like this:
// Run all tests with odata in their name
npm run test-watch "jest odata"
npm run script_target -- < argument > Basically this is the way of passing the command line arguments but it will work only in case of when script have only one command running like I am running a command i.e. npm run start -- 4200
"script":{
"start" : "ng serve --port="
}
This will run for passing command line parameters but what if we run more then one command together like npm run build c:/workspace/file
"script":{
"build" : "copy c:/file <arg> && ng build"
}
but it will interpreter like this while running copy c:/file && ng build c:/work space/file
and we are expected something like this
copy c:/file c:/work space/file && ng build
Note :- so command line parameter only work ad expected in case of only one command in a script.
I read some answers above in which some of them are writing that you can access the command line parameter using $ symbol but this will not gonna work
Try cross-env NPM package.
Easy to use. Easy to install. Cross all platform.
Example:
set arguments for command
// package.json
"scripts": {
“test”: “node test.js”,
“test-with-env-arg”: “cross-env YourEnvVarName=strValue yarn test,
}
get arguments from process.env
// test.js
const getCommandLineArg = Boolean(process.env.YourEnvVarName === 'true') // Attention: value of process.env.* is String type, not number || boolean
i had the same issue when i need to deploy to different environments
here is the package.json pre and post the updates.
scripts:
{"deploy-sit": "sls deploy --config resources-sit.yml",
"deploy-uat": "sls deploy --config resources-uat.yml",
"deploy-dev": "sls deploy --config resources-dev.yml"}
but here is the correct method to adopt the environment variables rather than repeating ourselves
scripts:{"deploy-env": "sls deploy --config resources-$ENV_VAR.yml"}
finally you can deploy by running
ENV_VAR=dev npm run deploy-env
Using npm 9.3.1, you can pass variables to scripts by this way:
Inside "scripts" tag in package.json, put the variable with "$"
"scripts": {
...
"generate": "nest g controller $NAME && nest g service $NAME && nest g module $NAME"
},
When you call the script, just pass the variable with her value in the console before the script
NAME=auth npm run generate
or
NAME=auth yarn generate
In this example, the script will generate controllers, services e modules with the name passed for an Nest application.

Categories