I've got gulpfile.js set up like this:
var scripts = [
'bower_components/timezone-js/src/date.js',
'bower_components/jquery/jquery.min.js',
'bower_components/jquery-migrate/jquery-migrate.js',
'bower_components/jquery-ui/ui/minified/jquery-ui.min.js',
'bower_components/jqueryui-touch-punch/jquery.ui.touch-punch.min.js',
...
];
gulp.task('scripts', function () {
return gulp.src(scripts, {base: '.'})
.pipe(plumber(plumberOptions))
.pipe(sourcemaps.init({
loadMaps: false,
debug: debug,
}))
...
i.e., all my script files are exact matches. No globbing.
Every now and then I mess up a file path or the author changes the directory structure. I want to be notified when this happens instead of the script silently being excluded and causing run-time errors.
Is there some way for me to make gulp.src report these kinds of errors?
Use gulp-expect-file as per this answer.
var coffee = require('gulp-coffee');
var expect = require('gulp-expect-file');
gulp.task('mytask', function() {
var files = ['idontexist.html'];
return gulp.src(files)
.pipe(expect(files))
.pipe(coffee());
});
(Thanks rve)
gulp.src is actually just an alias to vinyl-fs.src which looks like this:
function src(glob, opt) {
opt = opt || {};
var pass = through.obj();
if (!isValidGlob(glob)) {
throw new Error('Invalid glob argument: ' + glob);
}
// return dead stream if empty array
if (Array.isArray(glob) && glob.length === 0) {
process.nextTick(pass.end.bind(pass));
return pass;
}
var options = defaults(opt, {
read: true,
buffer: true
});
var globStream = gs.create(glob, options);
// when people write to use just pass it through
var outputStream = globStream
.pipe(through.obj(createFile))
.pipe(getStats(options));
if (options.read !== false) {
outputStream = outputStream
.pipe(getContents(options));
}
return outputStream.pipe(pass);
}
It in turn uses glob-stream which uses glob. You can probably bypass most of that and use through2 directly to create a pipe from the array files. I haven't figured out how to do this yet.
Related
For example, here is a simple gulp plugin
'use strict';
var match = require('gulp-match');
var ternaryStream = require('ternary-stream');
var through2 = require('through2');
module.exports = function (condition, trueChild, falseChild, minimatchOptions) {
if (!trueChild) {
throw new Error('gulp-if: child action is required');
}
if (typeof condition === 'boolean') {
// no need to evaluate the condition for each file
// other benefit is it never loads the other stream
return condition ? trueChild : (falseChild || through2.obj());
}
function classifier (file) {
return !!match(file, condition, minimatchOptions);
}
return ternaryStream(classifier, trueChild, falseChild);
};
And here is a typical use case
gulp.task('task', function() {
gulp.src('./src/*.js')
.pipe(gulpif(condition, uglify()))
.pipe(gulp.dest('./dist/'));
});
I am trying to figure out what is going on
According to the documentation, every gulp plugin is suppose to pass in a vinyl file and return a vinyl file.
But where is the passed in vinyl file ?
The only passed in parameter is condition, trueChild, falseChild, minimatchOptions
Furthermore how does pipe receive the return vinyl file from the plugin?
I have several typescript files, some of them export a const named APIS.
I'm trying to access those exports (I want to concatenated all of them to a single file), but it doesn't seem to work. I'm obviously doing something wrong, but I'm not sure what.
For example, I have a folder named services, with 2 files: service1.ts, service2.ts.
service1.ts:
...
export const APIS = [ { "field1" : "blabla" } ];
service2.ts: does not contain the APIS var.
This is my gulpfile.js:
var gulp = require('gulp');
var concat = require('gulp-concat');
var map = require('gulp-map');
gulp.task('default', function() {
return gulp.src('.../services/*.ts')
.pipe(map(function(file) {
return file.APIS;
}))
.pipe(concat('all.js'))
.pipe(gulp.dest('./test/'));
});
When I run this task, I get nothing. When I added console.log(file.APIS); to the map function, I get undefined for all the values (although it is defined in service1.ts!).
This is following to: Extracting typescript exports to json file using gulp
EDIT: OK, so I tried saving the exports in a .js file instead of a .ts file, and now I can access those vars using require:
gulp.task('default', function() {
return gulp.src('./**/*.service.export.js')
.pipe(map(function(file) {
var fileObj = require(file.path);
...
}))
Now if I try console.log(fileObj.APIS); I get the correct values. What I'm still confused about is how I can pass these value on, and create a single file out of all these vars. Is it possible to push them into an array?
This will not work as you think it would work. Gulp itself knows nothing about typescript files, that file is a vinyl-file and has no knowledge about the typescript code within its content.
Edit
Based on your example, you can do something like this:
var gulp = require('gulp');
var concat = require('gulp-concat');
var map = require('gulp-map');
var fs = require('fs');
gulp.task('test', function ()
{
var allConstants = [];
var stream = gulp.src('./**/*.service.export.js')
.pipe(map(function(file)
{
var obj = require(file.path);
if (obj.APIS != null)
allConstants = allConstants.concat(obj.APIS);
return file;
}));
stream.on("end", function (cb)
{
// Do your own formatting here
var content = allConstants.map(function (constants)
{
return Object.keys(constants).reduce(function (aggregatedString, key)
{
return aggregatedString + key + " : " + constants[key];
}, "");
}).join(", ");
fs.writeFile('filename.txt', content, cb);
});
return stream;
});
Suggestion
If you want to collect multiple variables into a single file i.e. a common variables file I suggest gulp-replace.
Steps
Create a file, require it and use tags within that file to place your variables.
Advice
If you are already using services don't create an array. Instead create an object (JSON) where every property is a constant. i.e.
var constants = {
const_1: 0,
const_2: 1,
const_3: 2,
}
I just have a quick question there:
I am using Node.JS to write a commandline tool that validates JSON Files with JSON Schemas. So, now I have a problem that when wanting to get all the schemas, that I always get "undefined" for using a async function but otherwise only sync functions.
For this commandline tool async is NOT needed.
Could someone help me out and give me a hand on how to make it work just fine?
var getJSONSchemaFiles = function (dir) {
results2 = [];
var recursive = require('recursive-readdir');
recursive(dir, function (err, files) {
// Files is an array of filename
// console.log(files);
files.forEach(function (entry) {
if (entry.indexOf(".schema.json") > -1) {
results2.push(entry);
}
});
console.log(results2);
});
return results2;
};
I am using the npm "recursive-readdir" but I think that I do not even need a npm for this kind of thing?
Ok, this enumerates all files under the given path synchronously:
var fs = require('fs');
function recursiveReaddir(path) {
var stat = fs.lstatSync(path);
if(stat.isFile())
return [path];
if(!stat.isDirectory())
return [];
return [].concat.apply([], fs.readdirSync(path).map(function(fname) {
return recursiveReaddir(path + '/' + fname);
}));
}
Use glob module https://github.com/isaacs/node-glob. There is async and Sync methods like: glob.sync(pattern, [options]); and glob(pattern, [options], cb);
Example from their docs:
var glob = require("glob")
// options is optional
glob("**/*.js", options, function (er, files) {
// files is an array of filenames.
// If the `nonull` option is set, and nothing
// was found, then files is ["**/*.js"]
// er is an error object or null.
})
I'm trying to copy a sqlite database from the data folder in my extension directory, to the profile folder, in order to use it.
So for now, I'm trying with that:
const {Cc, Ci, Cu} = require("chrome");
const {NetUtils} = Cu.import("resource://gre/modules/NetUtil.jsm");
const data = require('sdk/self').data;
Cu.import("resource://gre/modules/Services.jsm");
Cu.import("resource://gre/modules/FileUtils.jsm");
var file = Cc["#mozilla.org/file/directory_service;1"].
getService(Ci.nsIProperties).
get("TmpD", Ci.nsIFile);
file.append("searchEngines.sqlite");
file.createUnique(Ci.nsIFile.NORMAL_FILE_TYPE, 0666);
// Then, we need an output stream to our output file.
var ostream = Cc["#mozilla.org/network/file-output-stream;1"].createInstance(Ci.nsIFileOutputStream);
ostream.init(file, -1, -1, 0);
// Finally, we need an input stream to take data from.
var iStreamData = NetUtil.ioService.newChannel(data.url("searchEngines.sqlite"), null, null).open();
let istream = Cc["#mozilla.org/io/string-input-stream;1"].createInstance(Ci.nsIStringInputStream);
istream.setData(iStreamData, iStreamData.length);
NetUtil.asyncCopy(istream, ostream, function(aResult) {
console.log(aResult); // return 0
})
console.log(FileUtils.getFile("ProfD", ["searchEngines.sqlite"]).exists()); // return false
let dbConn = Services.storage.openDatabase(file);
The file seems to exist since the console.log(file.exists()) return FALSE and is not populated (the console.log(aResult) return 0).
Where is my mistake, and is there a better way to do that?
Besides that it uses sync I/O (opening the channel with .open instead of .asyncOpen), the NetUtil.asyncCopy operation is still async, meaning the code
NetUtil.asyncCopy(istream, ostream, function(aResult) {
console.log(aResult); // return 0
})
console.log(FileUtils.getFile("ProfD", ["searchEngines.sqlite"]).exists()); // return false
let dbConn = Services.storage.openDatabase(file);
will try to open the file before the copy likely finishes!
However, file.exists() will be likely true, because you already opened the file for writing. It's just that the file is still blank because the data copy isn't done (or even started) yet. (Actually, it is true, because you're checking searchEngines.sqlite in ProfD and not TmpD, but if you correct that the previous statement would apply).
You can only use the file when/after your callback to .asyncCopy is done, e.g.
NetUtil.asyncCopy(istream, ostream, function(aResult) {
console.log(aResult);
console.log(FileUtils.getFile("ProfD", ["searchEngines.sqlite"]).exists()); // return false
let dbConn = Services.storage.openDatabase(file);
// ...
});
PS: You might want to .asyncOpen the channel, then use NetUtil.asyncFetch and pass the resulting stream to .asyncCopy to be truly async for smallish files, since this caches the contents in memory first.
For large files you could create a variant of the NetUtil.asyncFetch implementation that feeds the .outputStream end directly to NetUtils.asyncCopy. That is a bit more complicated, so I won't be writing this up in detail until somebody is truly interested in this and ask the corresponding question.
Edit, so here is how I'd write it:
const data = require('sdk/self').data;
Cu.import("resource://gre/modules/Services.jsm");
Cu.import("resource://gre/modules/NetUtil.jsm");
function copyDataURLToFile(url, file, callback) {
NetUtil.asyncFetch(url, function(istream) {
var ostream = Cc["#mozilla.org/network/file-output-stream;1"].
createInstance(Ci.nsIFileOutputStream);
ostream.init(file, -1, -1, Ci.nsIFileOutputStream.DEFER_OPEN);
NetUtil.asyncCopy(istream, ostream, function(result) {
callback && callback(file, result);
});
});
}
var file = Services.dirsvc.get("TmpD", Ci.nsIFile);
file.append("searchEngines.sqlite");
copyDataURLToFile(data.url("searchEngine.sqlite"), file, function(file, result) {
console.log(result);
console.log(file.exists());
console.log(file.fileSize);
});
Try using OS.File it's much more straight forward.
Cu.import("resource://gre/modules/FileUtils.jsm");
Cu.import("resource://gre/modules/osfile.jsm")
var fromPath = FileUtils.getFile("ProfD", ["searchEngines.sqlite"]).path;
var toPath = FileUtils.getFile("TmpD", ["searchEngines.sqlite"]).path;;
var promise = OS.File.copy(fromPath, toPath);
var dbConn;
promise.then(
function(aStat) {
alert('success will now open connection');
dbConn = Services.storage.openDatabase(toPath);
},
function(aReason) {
console.log('promise rejected', aReason);
alert('copy failed, see console for details');
}
);
I created plugin for send json data in json file.
But I don't understand why send my object json in pipe, and not write file directly in my plugin.
I want use my plugin whit this syntax:
gulp.task('js-hash', function()
{
// Get all js in redis
gulp.src('./build/js/**/*.js')
.pipe(getHashFile('/build/js/'))
.pipe(gulp.dest('./build/js/hash.json'));
});
And not that:
gulp.task('js-hash', function()
{
// Get all js in redis
gulp.src('./build/js/**/*.js')
.pipe(getHashFile('./build/js/hash.json', '/build/js/'));
});
This is my plugin:
var through = require('through2');
var gutil = require('gulp-util');
var crypto = require('crypto');
var fs = require('fs');
var PluginError = gutil.PluginError;
// Consts
const PLUGIN_NAME = 'get-hash-file';
var json = {};
function getHashFile(filename, basename)
{
if (!filename) {
throw PluginError(PLUGIN_NAME, "Missing filename !");
}
// Creating a stream through which each file will pass
var stream = through.obj(function (file, enc, callback) {
if (file.isNull()) {
this.push(file); // Do nothing if no contents
return callback();
}
if (file.isBuffer()) {
var hash = crypto.createHash('sha256').update(String(file.contents)).digest('hex');
json[file.path.replace(file.cwd+basename, '')] = hash;
return callback();
}
if (file.isStream()) {
this.emit('error', new PluginError(PLUGIN_NAME, 'Stream not supported!'));
return callback();
}
}).on('finish', function () {
fs.writeFile(filename, JSON.stringify(json), function(err) {
if (err) {
throw err;
}
});
});
// returning the file stream
return stream;
}
// Exporting the plugin main function
module.exports = getHashFile;
Your are idea
Nothing prevents you from doing this... besides not respecting plugins guidelines!
Users actually assume a plugin will stream files and that they can pipe them to other plugins.
If I get your code right, you're trying to generate a file that contains all sha hashes of inbound files. Why not let users take this file and pipe it to other plugins? You'd be surprised what people could do.
While this question looks a bit opinion-based, you could definitely put the focus on how to deal with files that may not belong to the main stream of files. Issues like this can be found in many plugins; for example, gulp-uglify authors are wondering how they can add source-maps without mixing js and source map downstream.