In meteor I can read a file like this:
myjson = JSON.parse(Assets.getText("lib/myfile.json"))
Now i want to iterate through a folder, and read all the available json files. What would be the best way to do this without installing extra NPM packages.
Thank you for your time.
I'm not sure if this is the best way, but is certainly an easy one:
var fs = Npm.require('fs');
fs.readdir('./assets/app/myFolder', function(e, r) {
_.each(r, function(filename) {
Assets.getText('myFolder/' + filename);
});
});
I wrapped Hubert OGs code into a function with Meteor.bindEnvironment. I believe this is necessary because of fibre not available outside of the Meteor environement.
see https://www.eventedmind.com/feed/49CkbYeyKoa7MyH5R
Beware that external Node packages have different document root than Meteor.
var done, files;
var fs = Npm.require('fs');
files = fs.readdirSync("../../../../../server/collections/lib/", function(e, r) {});
done = Meteor.bindEnvironment(function(files) {
return _.each(files, function(filename) {
var myjson;
myjson = JSON.parse(Assets.getText("lib/" + filename));
/* do Something */
});
}, function(e) {
throw e;
});
done(files);
Related
I am working on a WordPress plugin and have all the files in my working directory and run gulp in that project folder. Now, I'd like to have a watch task that copies all the changes to my local WP installation for testing.
Therefore I am looking for a way to sync (only in one direction) the project folder with the plugin folder of WP.
I managed to get it to work with gulp-directory-sync
...
var dirSync = require("gulp-directory-sync");
var localDir = "../newDir/";
var buildDir = "./buildDir/";
...
function copy_to_local_folder() {
return pipeline(
gulp.src(buildDir+'**/*'),
dirSync( buildDir, localDir, { printSummary: true } )
);
}
function watch_local() {
gulp.watch(buildDir+'**/*', copy_to_local_folder);
exports.default = watch_local;
However, the plugin hasn't been updated in 4 years and according to this answer, it is not doing it the proper "gulp way" (e.g. not using gulp-src) and this task should be possible with other basic gulp functions.
Copying changed files is pretty easy, but also keeping track of deleted files is more complicated. I also would prefer to only update changed/deleted/new files and not clearing the folder every time before coping all files.
Starting with the updated code in the aforementioned answer, I tried to implement it and made changes to make it work.
...
var newer = require("gulp-newer");
var pipeline = require("readable-stream").pipeline;
var del = require("del");
var localDir = "../newDir/";
var buildDir = "./buildDir/";
function copy_to_local_folder() {
return pipeline(
gulp.src([buildDir+'**/*']),
newer(localDir),
gulp.dest(localDir),
);
}
function watch_local() {
var watcher = gulp.watch(buildDir + '**/*', copy_to_local_folder );
watcher.on('unlink', function(path) {
console.log(path);
var newPath = './'+path;
newPath = newPath.replace(buildDir, localDir);
console.log(newPath);
(async () => {
const deletedPaths = await del(newPath, {dryRun: true, force: true});
console.log('Deleted files and directories:\n', deletedPaths.join('\n'));
})();
});
}
exports.default = watch_local;
With this code, the folder gets updated when I change or delete files, but it does not trigger when I delete an entire folder. Which is probably because I use unlink and not unlinkDir. But even if I use the version of the function below, it doesn't get triggered by deleting a folder (with containing files).
watcher.on('unlinkDir', function(path) {
console.log('folder deleted');
console.log(path);
var newPath = './'+path;
newPath = newPath.replace(buildDir, localDir);
console.log(newPath);
});
What am I doing wrong?
Or is there in general a better way to achieve this?
PS: I'm using
node v11.15.0
gulp v4.0.2
on Linux
deleting files and folders in VS Code
Update:
When I run it with:
watcher.on('unlink', ... and delete a file:
it works
with the console.log output and the ( async () => ...
and Starting and Finished for copy_to_local_folder
watcher.on('unlinkDir', ... and delete a folder:
it works not
nothing happens in the console output
(not even Starting)
watcher.on('unlinkDir', ... and delete a file:
Starting and Finished for copy_to_local_folder
but not the console.log and ( async () => ...
watcher.on('add', ... and watcher.on('addDir', ...
work both
Seems to me that the watcher.on('unlinkDir', ... does never get triggered ... is unlinkDir not supported by gulp-watch?
I added a self written WebSocket-Client library. When I require in node.js it works fine, just as in Node-RED's function-node with registering it in settings.js and requireing it by global.get("RWSjs").
Now I had to write a Node by myself and wanted to require this file, and it doesn't work. Node-RED always gives me the "node not deployed" error, which is, I think, because of a javascript syntax error.
How can I require a self written module in a self written node's .js?
Thanks a lot in advance, Peter :)
Edit:
some Code:
eval-R-char.js (Code for the node)
module.exports = function(RED) {
// doesn't work:
var RWSjs = global.get("RWSjs");
function EvalRCharNode(config) {
RED.nodes.createNode(this,config);
this.instruction = config.instruction;
var node = this;
this.on('input', function(msg) {
//msg.payload = msg.payload.toLowerCase();
msg.payload = "Instruction: " + this.instruction;
node.send(msg);
});
}
RED.nodes.registerType("eval-R-char",EvalRCharNode);
}
You shouldn't use the context to require modules when writing your own nodes, it is purely a workaround as you can't use require in the function node.
You should just require as normal in your custom node.
So in this case:
module.exports = function(RED) {
//assuming your module is in the RWS.js file in the same directory
var RWSjs = require('./RWS.js');
function EvalRCharNode(config) {
RED.nodes.createNode(this,config);
this.instruction = config.instruction;
var node = this;
this.on('input', function(msg) {
//msg.payload = msg.payload.toLowerCase();
msg.payload = "Instruction: " + this.instruction;
node.send(msg);
});
}
RED.nodes.registerType("eval-R-char",EvalRCharNode);
}
I have several typescript files, some of them export a const named APIS.
I'm trying to access those exports (I want to concatenated all of them to a single file), but it doesn't seem to work. I'm obviously doing something wrong, but I'm not sure what.
For example, I have a folder named services, with 2 files: service1.ts, service2.ts.
service1.ts:
...
export const APIS = [ { "field1" : "blabla" } ];
service2.ts: does not contain the APIS var.
This is my gulpfile.js:
var gulp = require('gulp');
var concat = require('gulp-concat');
var map = require('gulp-map');
gulp.task('default', function() {
return gulp.src('.../services/*.ts')
.pipe(map(function(file) {
return file.APIS;
}))
.pipe(concat('all.js'))
.pipe(gulp.dest('./test/'));
});
When I run this task, I get nothing. When I added console.log(file.APIS); to the map function, I get undefined for all the values (although it is defined in service1.ts!).
This is following to: Extracting typescript exports to json file using gulp
EDIT: OK, so I tried saving the exports in a .js file instead of a .ts file, and now I can access those vars using require:
gulp.task('default', function() {
return gulp.src('./**/*.service.export.js')
.pipe(map(function(file) {
var fileObj = require(file.path);
...
}))
Now if I try console.log(fileObj.APIS); I get the correct values. What I'm still confused about is how I can pass these value on, and create a single file out of all these vars. Is it possible to push them into an array?
This will not work as you think it would work. Gulp itself knows nothing about typescript files, that file is a vinyl-file and has no knowledge about the typescript code within its content.
Edit
Based on your example, you can do something like this:
var gulp = require('gulp');
var concat = require('gulp-concat');
var map = require('gulp-map');
var fs = require('fs');
gulp.task('test', function ()
{
var allConstants = [];
var stream = gulp.src('./**/*.service.export.js')
.pipe(map(function(file)
{
var obj = require(file.path);
if (obj.APIS != null)
allConstants = allConstants.concat(obj.APIS);
return file;
}));
stream.on("end", function (cb)
{
// Do your own formatting here
var content = allConstants.map(function (constants)
{
return Object.keys(constants).reduce(function (aggregatedString, key)
{
return aggregatedString + key + " : " + constants[key];
}, "");
}).join(", ");
fs.writeFile('filename.txt', content, cb);
});
return stream;
});
Suggestion
If you want to collect multiple variables into a single file i.e. a common variables file I suggest gulp-replace.
Steps
Create a file, require it and use tags within that file to place your variables.
Advice
If you are already using services don't create an array. Instead create an object (JSON) where every property is a constant. i.e.
var constants = {
const_1: 0,
const_2: 1,
const_3: 2,
}
I just have a quick question there:
I am using Node.JS to write a commandline tool that validates JSON Files with JSON Schemas. So, now I have a problem that when wanting to get all the schemas, that I always get "undefined" for using a async function but otherwise only sync functions.
For this commandline tool async is NOT needed.
Could someone help me out and give me a hand on how to make it work just fine?
var getJSONSchemaFiles = function (dir) {
results2 = [];
var recursive = require('recursive-readdir');
recursive(dir, function (err, files) {
// Files is an array of filename
// console.log(files);
files.forEach(function (entry) {
if (entry.indexOf(".schema.json") > -1) {
results2.push(entry);
}
});
console.log(results2);
});
return results2;
};
I am using the npm "recursive-readdir" but I think that I do not even need a npm for this kind of thing?
Ok, this enumerates all files under the given path synchronously:
var fs = require('fs');
function recursiveReaddir(path) {
var stat = fs.lstatSync(path);
if(stat.isFile())
return [path];
if(!stat.isDirectory())
return [];
return [].concat.apply([], fs.readdirSync(path).map(function(fname) {
return recursiveReaddir(path + '/' + fname);
}));
}
Use glob module https://github.com/isaacs/node-glob. There is async and Sync methods like: glob.sync(pattern, [options]); and glob(pattern, [options], cb);
Example from their docs:
var glob = require("glob")
// options is optional
glob("**/*.js", options, function (er, files) {
// files is an array of filenames.
// If the `nonull` option is set, and nothing
// was found, then files is ["**/*.js"]
// er is an error object or null.
})
I created plugin for send json data in json file.
But I don't understand why send my object json in pipe, and not write file directly in my plugin.
I want use my plugin whit this syntax:
gulp.task('js-hash', function()
{
// Get all js in redis
gulp.src('./build/js/**/*.js')
.pipe(getHashFile('/build/js/'))
.pipe(gulp.dest('./build/js/hash.json'));
});
And not that:
gulp.task('js-hash', function()
{
// Get all js in redis
gulp.src('./build/js/**/*.js')
.pipe(getHashFile('./build/js/hash.json', '/build/js/'));
});
This is my plugin:
var through = require('through2');
var gutil = require('gulp-util');
var crypto = require('crypto');
var fs = require('fs');
var PluginError = gutil.PluginError;
// Consts
const PLUGIN_NAME = 'get-hash-file';
var json = {};
function getHashFile(filename, basename)
{
if (!filename) {
throw PluginError(PLUGIN_NAME, "Missing filename !");
}
// Creating a stream through which each file will pass
var stream = through.obj(function (file, enc, callback) {
if (file.isNull()) {
this.push(file); // Do nothing if no contents
return callback();
}
if (file.isBuffer()) {
var hash = crypto.createHash('sha256').update(String(file.contents)).digest('hex');
json[file.path.replace(file.cwd+basename, '')] = hash;
return callback();
}
if (file.isStream()) {
this.emit('error', new PluginError(PLUGIN_NAME, 'Stream not supported!'));
return callback();
}
}).on('finish', function () {
fs.writeFile(filename, JSON.stringify(json), function(err) {
if (err) {
throw err;
}
});
});
// returning the file stream
return stream;
}
// Exporting the plugin main function
module.exports = getHashFile;
Your are idea
Nothing prevents you from doing this... besides not respecting plugins guidelines!
Users actually assume a plugin will stream files and that they can pipe them to other plugins.
If I get your code right, you're trying to generate a file that contains all sha hashes of inbound files. Why not let users take this file and pipe it to other plugins? You'd be surprised what people could do.
While this question looks a bit opinion-based, you could definitely put the focus on how to deal with files that may not belong to the main stream of files. Issues like this can be found in many plugins; for example, gulp-uglify authors are wondering how they can add source-maps without mixing js and source map downstream.