I am working on a browserify transform and I am passing in options.
//excerpt from package.json of my application that is using my transform and extension
"browserify": {
"transform": [
["mytransform", {"extensions": ["my-extension"] } ]
]
}
The transform is working and is called on the first file and then on files depended on by that file. The problem I am having is that I am losing the options.
In my transform I have
module.exports = function (file, options) {
console.log('processing? ', file, options);
options = options || {};
options.extensions = options.extensions || [];
var extensions = options.extensions.map(function(extensionId){
return require(extensionId)();
}),
data = '';
var stream = through(write, end);
return stream;
function write(buf) { data += buf; }
function end() {
var out = processFile(extensions, file, data);
stream.queue(out || data);
stream.queue(null);
}
};
The following is the output. The options are there for the first file, but then nothing in the second file
processing? /path/to/cascadingAmdDepWithPlugin.js { extensions: [ 'my-extension' ]}
processing? /path/to/node_modules/dojo/number.js { }
How do I get my options to carry through to all files being handled by my transform?
Things are working as expected. I didn't carefully read the documentation carefully enough. Transforms aren't applied to files within node_modules. Solutions are to either specify the transform as global or update package.json of the project within node_modules. Once I did either of those things, my code worked as expected.
Related
I am working on a WordPress plugin and have all the files in my working directory and run gulp in that project folder. Now, I'd like to have a watch task that copies all the changes to my local WP installation for testing.
Therefore I am looking for a way to sync (only in one direction) the project folder with the plugin folder of WP.
I managed to get it to work with gulp-directory-sync
...
var dirSync = require("gulp-directory-sync");
var localDir = "../newDir/";
var buildDir = "./buildDir/";
...
function copy_to_local_folder() {
return pipeline(
gulp.src(buildDir+'**/*'),
dirSync( buildDir, localDir, { printSummary: true } )
);
}
function watch_local() {
gulp.watch(buildDir+'**/*', copy_to_local_folder);
exports.default = watch_local;
However, the plugin hasn't been updated in 4 years and according to this answer, it is not doing it the proper "gulp way" (e.g. not using gulp-src) and this task should be possible with other basic gulp functions.
Copying changed files is pretty easy, but also keeping track of deleted files is more complicated. I also would prefer to only update changed/deleted/new files and not clearing the folder every time before coping all files.
Starting with the updated code in the aforementioned answer, I tried to implement it and made changes to make it work.
...
var newer = require("gulp-newer");
var pipeline = require("readable-stream").pipeline;
var del = require("del");
var localDir = "../newDir/";
var buildDir = "./buildDir/";
function copy_to_local_folder() {
return pipeline(
gulp.src([buildDir+'**/*']),
newer(localDir),
gulp.dest(localDir),
);
}
function watch_local() {
var watcher = gulp.watch(buildDir + '**/*', copy_to_local_folder );
watcher.on('unlink', function(path) {
console.log(path);
var newPath = './'+path;
newPath = newPath.replace(buildDir, localDir);
console.log(newPath);
(async () => {
const deletedPaths = await del(newPath, {dryRun: true, force: true});
console.log('Deleted files and directories:\n', deletedPaths.join('\n'));
})();
});
}
exports.default = watch_local;
With this code, the folder gets updated when I change or delete files, but it does not trigger when I delete an entire folder. Which is probably because I use unlink and not unlinkDir. But even if I use the version of the function below, it doesn't get triggered by deleting a folder (with containing files).
watcher.on('unlinkDir', function(path) {
console.log('folder deleted');
console.log(path);
var newPath = './'+path;
newPath = newPath.replace(buildDir, localDir);
console.log(newPath);
});
What am I doing wrong?
Or is there in general a better way to achieve this?
PS: I'm using
node v11.15.0
gulp v4.0.2
on Linux
deleting files and folders in VS Code
Update:
When I run it with:
watcher.on('unlink', ... and delete a file:
it works
with the console.log output and the ( async () => ...
and Starting and Finished for copy_to_local_folder
watcher.on('unlinkDir', ... and delete a folder:
it works not
nothing happens in the console output
(not even Starting)
watcher.on('unlinkDir', ... and delete a file:
Starting and Finished for copy_to_local_folder
but not the console.log and ( async () => ...
watcher.on('add', ... and watcher.on('addDir', ...
work both
Seems to me that the watcher.on('unlinkDir', ... does never get triggered ... is unlinkDir not supported by gulp-watch?
I have several typescript files, some of them export a const named APIS.
I'm trying to access those exports (I want to concatenated all of them to a single file), but it doesn't seem to work. I'm obviously doing something wrong, but I'm not sure what.
For example, I have a folder named services, with 2 files: service1.ts, service2.ts.
service1.ts:
...
export const APIS = [ { "field1" : "blabla" } ];
service2.ts: does not contain the APIS var.
This is my gulpfile.js:
var gulp = require('gulp');
var concat = require('gulp-concat');
var map = require('gulp-map');
gulp.task('default', function() {
return gulp.src('.../services/*.ts')
.pipe(map(function(file) {
return file.APIS;
}))
.pipe(concat('all.js'))
.pipe(gulp.dest('./test/'));
});
When I run this task, I get nothing. When I added console.log(file.APIS); to the map function, I get undefined for all the values (although it is defined in service1.ts!).
This is following to: Extracting typescript exports to json file using gulp
EDIT: OK, so I tried saving the exports in a .js file instead of a .ts file, and now I can access those vars using require:
gulp.task('default', function() {
return gulp.src('./**/*.service.export.js')
.pipe(map(function(file) {
var fileObj = require(file.path);
...
}))
Now if I try console.log(fileObj.APIS); I get the correct values. What I'm still confused about is how I can pass these value on, and create a single file out of all these vars. Is it possible to push them into an array?
This will not work as you think it would work. Gulp itself knows nothing about typescript files, that file is a vinyl-file and has no knowledge about the typescript code within its content.
Edit
Based on your example, you can do something like this:
var gulp = require('gulp');
var concat = require('gulp-concat');
var map = require('gulp-map');
var fs = require('fs');
gulp.task('test', function ()
{
var allConstants = [];
var stream = gulp.src('./**/*.service.export.js')
.pipe(map(function(file)
{
var obj = require(file.path);
if (obj.APIS != null)
allConstants = allConstants.concat(obj.APIS);
return file;
}));
stream.on("end", function (cb)
{
// Do your own formatting here
var content = allConstants.map(function (constants)
{
return Object.keys(constants).reduce(function (aggregatedString, key)
{
return aggregatedString + key + " : " + constants[key];
}, "");
}).join(", ");
fs.writeFile('filename.txt', content, cb);
});
return stream;
});
Suggestion
If you want to collect multiple variables into a single file i.e. a common variables file I suggest gulp-replace.
Steps
Create a file, require it and use tags within that file to place your variables.
Advice
If you are already using services don't create an array. Instead create an object (JSON) where every property is a constant. i.e.
var constants = {
const_1: 0,
const_2: 1,
const_3: 2,
}
My goal is to fake out getting some requirejs code working via babel. I've found that if I add the following: if (typeof define !== "function") { var define = require("amdefine")(module); } to the top of every file while running in nodejs things seem to work out.
Here is some code I wrote, which I thought would work or nearly work:
function injectDefine(babel) {
var header = 'if (typeof define !== "function") { var define = require("amdefine")(module); }';
return new babel.Plugin('amdefine', {
visitor: {
Program: {
enter: function(path, file) {
path.unshiftContainer(
'body',
babel.types.expressionStatement(
babel.types.stringLiteral(header)
)
);
},
},
},
});
}
require('babel-core/register')({
stage: 0,
plugins: [{transformer: injectDefine}],
});
require('../components/button');
The components/button file is just me trying to test that some file can load.
Other notes: I'm using babel 5, and I can't upgrade right now. I also can't use a .babelrc very easily right now.
Tip 1: the environment variable BABEL_DISABLE_CACHE=1 is needed if you are doing heavy testing of plugins. If you had a script that you ran like npm run unit you may instead want to run like BABEL_DISABLE_CACHE=1 npm run unit while testing your plugin.
Tip 2: babel.parse will give you a full program out of some source. The easiest thing you could do is babel.parse(header).program.body[0].
The following ended up working:
function injectDefine(babel) {
var header = 'if (typeof define !== "function") { var define = require("amdefine")(module); }';
return new babel.Plugin('amdefine', {
visitor: {
Program: {
enter: function(node, parent) {
node.body.unshift(
babel.parse(header).program.body[0]
);
},
},
},
});
}
require('babel-core/register')({
cache: false,
stage: 0,
plugins: [injectDefine],
});
At this stage, a cleaner solution can be to use #babel/traverse and #babel/types.
Let's suppose you want to append a comment to the top of every file, you could use some code like the following:
// Import the required modules
import * as t from "#babel/types";
import traverse from "#babel/traverse";
// Get your ast (for this, you can use #babel/parser)
// Traverse your ast
traverse(ast, {
// When the current node is the Program node (so the main node)
Program(path) {
// Insert at the beginning a string "Hello World" --> not valid JS code
path.unshiftContainer('body', t.stringLiteral("Hello World"));
}
});
I'm using Gulp in a VS2015 project to run jscs on JavaScript files with the fix option set. The intention is to modify the same file that is read (viz., source and destination are the same).
var gulp = require('gulp');
var jscs = require('gulp-jscs');
var chmod = require('gulp-chmod');
var exec = require('gulp-exec');
var ourJsFiles = // an array of files and globbed paths
gulp.task('jscs', function (callback) {
ourJsFiles.forEach(function (fn) {
gulp.src(fn, { base: './' })
.pipe(jscs({
"preset": "google",
"maximumLineLength": 160,
"validateIndentation": 3,
"fix": true
}))
.pipe(gulp.dest('./'));
});
callback();
});
But I do not want to process any files that are read-only. Is there already a way to detect this in Gulp on Windows?
There is a plugin which allows you to work with subset of files: gulp-filter.
One of options is to pass filter function which will receive vinyl file object, so for e.g. you could use stat.mode property of that object which holds permissions and do something like:
var filter = require('gulp-filter');
...
var writableFiles = filter(function (file) {
//https://github.com/nodejs/node-v0.x-archive/issues/3045
var numericPermission = '0'+(e.stat.mode & parseInt('777', 8)).toString(8);
return numericPermission[1]==='6'
});
...
gulp.src(....)
.pipe(writableFiles)
I have a project where I have to generate translated static pages.
The choice was to use gulp because it helps a lot in minifying resources, watch for file changes and re-compile, and can also inject html templates in several pages.
I used:
- 'gulp-inject': for inserting templates into final files
- 'gulp-translate-html': for translating because I have '.json' dictionaries
So I have two issues:
'gulp-translate-html'
This uses the json as input for translating, using the following code:
gulp.task('translate', function() {
return gulp.src('./temp/en/template.html')
.pipe(translateHtml({
messages: require('./dictionary/en.json'),
templateSettings: {
interpolate: /{{([\s\S]+?)}}/g
}
}))
.pipe(gulp.dest('./en'));
});
I created a watch on the '.json' file, when modified, it should re-apply the translation. But somehow it uses the old file instead of the modified one.
Is there a workaround for this? Or other plugin that I could use for the json files?
'gulp-inject'
In the code-sample above, I translated only one file. But I need to do so for several languages that have different destinations, so I used a loop for the languages.(sorry for the code indentation)
var gulp = require('gulp'),
inject = require('gulp-inject'),
translateHtml = require('gulp-translate-html');
var languages = ['en', 'de'];
gulp.task('injectContent', function() {
/* the base file used as a reference*/
var target = gulp.src('./templates/base/baseTemplate.html');
/* get each language*/
languages.forEach(function(lang) {
target.pipe(inject(gulp.src('./templates/partials/injectTemplate.html'), {
relative: true,
starttag: '<!-- inject:template -->',
transform: function (filePath, file) {
return file.contents.toString('utf8');
}
}))
/* put the merged files into "temp" folder under its language folder*/
.pipe(gulp.dest('./temp/'+lang));
});
});
/* The translation has to be made after the injection above is finished*/
gulp.task('translate', ['injectContent'] function() {
/* get each language*/
languages.forEach(function(lang) {
gulp.src('./temp/'+ lang +'/baseTemplate.html')
.pipe(translateHtml({
messages: require('./dictionary/'+lang+'.json');,
templateSettings: {
interpolate: /{{([\s\S]+?)}}/g
}
}))
.pipe(gulp.dest('./'+lang)); /* put file in the "en" or "de" language folder*/
});
});
gulp.task('watch', function() {
gulp.watch(['./templates/**/*.html', './dictionary/*.json'], ['translate']);
});
gulp.task('default', ['translate', 'watch']);
Here I want the 'injectContent' task to be ran before the 'translation' task, but the latter runs too soon. This happens because there is not a specific return gulp callback in the 'injectContent', right?
How can I merge the results and not let the tasks intercalate?
Just found a solution for the caching issue from point 1:
Based on this answer: https://stackoverflow.com/a/16060619/944637 I deleted the cache and then the "require" function could reload the file from the filesystem and not from cache.
I added delete require.cache[require.resolve('./dictionary/en.json')]; at the beginning of the 'translate' task, before return.
EDIT: Just found the solution on Point 2 to merge the results using "merge-stream", in this answer here: https://stackoverflow.com/a/26786529
so my code turned out to be like this:
....
merge = require('merge-stream');
gulp.task('injectContent', function() {
var tasks = languages.map(function(lang){
return gulp.src('./templates/base/injectContent.html')
.pipe(plumber())
.pipe(debug())
.pipe(inject(gulp.src('./templates/partials/injectTemplate.html'), {
relative: true,
starttag: '<!-- inject:release -->',
transform: function (filePath, file) {
return file.contents.toString('utf8');
}
}))
.pipe(gulp.dest('./temp/'+lang));
});
return merge(tasks);
});
gulp.task('translate', ['injectContent'], function() {
for (var i in languages) {
var lang = languages[i];
delete require.cache[require.resolve('./dictionary/'+lang+'.json')];
gulp.src('./temp/'+lang+'/injectContent.html')
.pipe(plumber())
.pipe(debug())
.pipe(translateHtml({
messages: require('./dictionary/'+lang+'.json'),
templateSettings: {
interpolate: /{{([\s\S]+?)}}/g // this is for Angular-like variable syntax
}
}))
.pipe(gulp.dest('./'+lang));
}
});
....