I'm learning my way through node and gulp and trying to do something that there may already be a solution for but I'm doing it as a learning exercise. The idea is that I want to scan all the files in a directory, read the files and look for the gulp.task line, read in the task name and the comment above it. This information will be used to generate an object then sent to a file in order to make something readable by gulp-list.
I'm stuck trying to add items into the object during the reading of the file. Nothing I have tried so far enables me to add a key and value to the object.
Any help you can give would be great. Also if you know of another (potentially easier way) I would be really interested to hear. I've had a look at gulp-task-list but this does not seem to support the multiple file approach I want to use.
var gulp = require('gulp')
fs = require('fs');
var path = './gulp/tasks';
var taskList = {};
// DESC: Rebuilds the list
gulp.task('build:list', function() {
fs.readdir(path, function(err, files) {
if (err) throw err;
files.forEach(function (file) {
fs.readFile(path+'/'+file, function(err, data) {
if (err) throw err;
lines = data.toString().split("\n");
lines.forEach(function (item, index, array) {
if (match = item.match(/^gulp\.task\(\'(.*)\'/)) {
console.log(match[1]);
taskList[match[1]] = true;
}
})
});
})
})
console.log(taskList);
});
So I found a solution, I figured out that it's probably not possible to alter a variable out of scope while in an async function. I'm not entirely sure why but I am sure I will learn that over time unless anyone wants to point me in the right direction.
My solution, in full including writing out the JSON file
var gulp = require('gulp')
fs = require('fs')
gutil = require('gulp-util');
var path = './gulp/tasks';
var taskList = {};
// Rebuilds the task list
gulp.task('build:list', function() {
files = fs.readdirSync(path);
files.forEach(function (file) {
var contents = fs.readFileSync(path+'/'+file);
var lines = contents.toString().split("\n");
lines.forEach(function (item, index, array) {
if (match = item.match(/^gulp\.task\(\'(.*?)\'/)) {
taskList[match[1]] = array[index - 1];
}
})
});
fs.writeFileSync('./tasks.json', JSON.stringify(taskList));
gutil.log("Task list built");
});
The second solution I thought of might be a lot easier, I thought maybe I can read each file and concat all of the files into a single file which then might be able to replace the gulpfile.js file which then might allow me to use another tool to get all the task names and descriptions. Or possibly concat the files together and then instead of process multiple file I can process one. If I come up with one of these solutions I will update this.
Related
How to read two .txt files and turn these 2 files to a 2d array?
I already have a code like this :
var fs = require('fs')
file = './text1.txt'
fs.readFile(file,'utf-8', (e,d) => {
textByLine = d.split('\r\n');
console.log("test : " + textByLine[1])
})
source
I succeeded to store the file in a 1d array but now I have 2 files and I want to store them in a 2d array.
How to do this?
Thank you
You can have a variable at top with an empty array, after you read the files and push that result to that variable , like this:
const 2dArray = [];
const fillArray = (path)=> {
fs.readFile(path,'utf-8', (e,d) => {
2dArray.push(d.split('\r\n')) // the result is already an array.
});
});
after that you can call each file like this :
// read the files and push the result to the variable 2dArray
fillArray('./text1.txt');
fillArray('./text2.txt');
//you can read the 1st result of your 1st file array like this
const firstPartOfArray = 2dArray[0][0]; // text1 first result value
if you don't need to have the result files in order i strongly recommend to use async function.
also you can use thinks like fs-jetpack package to handle this, or glob
I have problem with Model.find() mongoose method.On development environment it works properly but when I deployed my app on heroku and mLab it doesn't work properly.In my project I have search menu with drop down list of items which are coming from database and I pass them to handlebars template .When click on the dropdown there are items from which you can choose :link
But when i go to production doesn't work:http://s1356.photobucket.com/user/acho999/media/production%20-evironment_zpsb49om1nz.png.html
My js file have down code.And I do not know where I am wrong.I read a lot of articles.I disabled autoindex in my models schema, i read about mongodb driver i update it, also compatability of MongoDb and Mongoose, they are last resions...
I have no idea what is wrong.Please help me with this.
const express = require("express");
const router = express.Router();
const Condition = require("../models/Condition");
const Manufacturer = require("../models/Manufacturer");
const Material = require("../models/Material");
const Kind = require("../models/Kind");
const Type = require("../models/Type");
const db = require("../connection/databaseConn");
router.get("/categoriesLoad",(req,res)=>{
let types = [];
let kinds = [];
let materials = [];
let conditions = [];
let manufacturers = [];
//db.then(()=>{
Type.find({},(err,t)=>{
t.forEach(x=>types.push(x));
});
Kind.find({},(err,t)=>{
t.forEach(x=>kinds.push(x));
});
Material.find({},(err,t)=>{
t.forEach(x=>materials.push(x));
});
Condition.find({},(err,t)=>{
t.forEach(x=>conditions.push(x));
});
Manufacturer.find({},(err,t)=>{
t.forEach(x=>manufacturers.push(x));
}).then(()=>{
let add = req.session.addMessage;
req.session.addMessage ="";
res.render("adminArea",{types,kinds,materials,conditions,manufacturers,add});
});
//})
});
module.exports = router;
a couple of things to look at :
is your database filled with objects.
is your code synchronous buid.
manufacturer will execute the .then maybe befor everything else is even done.
all those .find()'s will execute at the same time so if manafacturer is done it will execute .then() and then you get empty lists.
The major problem is that you cannot ensure Manufacturer.find will finish after all the other database queries are completed.
Another problem is, never mix callbacks and Promises, try to unify the coding style, while callbacks are so ES5, I suggest using Promise only.
Try this:
Promise.all([
Type.find().exec(),
Kind.find().exec(),
Material.find().exec(),
Condition.find().exec(),
Manufacturer.find().exec()
]).then(([types, kinds, materials, conditions, manufacturers]) => {
const add = req.session.addMessage;
req.session.addMessage = '';
res.render('adminArea', {
types,
kinds,
materials,
conditions,
manufacturers,
add
});
});
I'm trying to achieve this with Gulp.
Read every .json file in a given directory including subdirectories.
Transform them in some way, for example add a new root level, etc.
Save them into a new directory keeping original structure.
The point where I'm lost is how to pipe reading/writing JSON to src.
I have the following skeleton now.
gulp.task("migratefiles", function () {
return gulp.src("files/**/*.json")
.pipe(/* WHAT HERE? */)
.pipe(gulp.dest("processed"));
});
There's a number of way you can do this:
(1) Use the gulp-json-transform plugin:
var jsonTransform = require('gulp-json-transform');
gulp.task("migratefiles", function () {
return gulp.src("files/**/*.json")
.pipe(jsonTransform(function(json, file) {
var transformedJson = {
"newRootLevel": json
};
return transformedJson;
}))
.pipe(gulp.dest("processed"));
});
Pros:
Easy to use
Supports asynchronous processing (if you return a Promise)
Gives access to path of each file
Cons:
Only rudimentary output formatting
(2) Use the gulp-json-editor plugin:
var jeditor = require('gulp-json-editor');
gulp.task("migratefiles", function () {
return gulp.src("files/**/*.json")
.pipe(jeditor(function(json) {
var transformedJson = {
"newRootLevel": json
};
return transformedJson;
}))
.pipe(gulp.dest("processed"));
});
Pros:
Easy to use
Automatically recognizes the indentation your input files use (two spaces, four spaces, tabs etc.) and formats your output files accordingly
Supports various js-beautify options
Cons:
Doesn't seem to support asynchronous processing
Doesn't seem to have a way to access path of each file
(3) Do it manually (by directly accessing the vinyl file object using map-stream):
var map = require('map-stream');
gulp.task("migratefiles", function () {
return gulp.src("files/**/*.json")
.pipe(map(function(file, done) {
var json = JSON.parse(file.contents.toString());
var transformedJson = {
"newRootLevel": json
};
file.contents = new Buffer(JSON.stringify(transformedJson));
done(null, file);
}))
.pipe(gulp.dest("processed"));
});
Pros:
Full control/access over everything
Supports asynchronous processing (through a done callback)
Cons:
Harder to use
I have a gulp task which can take parameters, specifically the parameter "--only {filename.ext}".
The problem is, if it's e.g. --only fOObaR.jade it also matches the file called foobar.jade, which is okay.
But then it passes the fOObaR through the pipe instead of the matched (and right) foobar, resulting in an HTML file also called fOObaR.html.
Is there any way I can get the filename of the match, instead of the filename which I pass through gulp.src()?
I already tried gulp-tap with no luck.
I'm thinking of something like:
Get all files from the source directory beforehand
Filter those out that don't match a certain criteria
Start the task
Start gulp-tap
Go through all files we already got beforehand
If the given file matches one of those, return file from the list to get the correct filename
Trying to get this to work now. If someone has useful tips or a better way to do this, let me know! =)
Here is my task, simplified (and hopefully helpful):
var gulp = require('gulp');
var jade = require('gulp-jade');
var args = require('yargs').argv;
gulp.task('html', function () {
var source = [];
var sourceGlob = '*.jade';
var only = args.only || '';
if (only.indexOf('.jade') > -1) {
sourceGlob = only;
}
source.push(paths.src.views + '/**/' + sourceGlob);
return gulp.src(source)
.pipe(jade({ pretty: true }))
.pipe(gulp.dest(paths.dest.views));
});
I have written code that parses a dictionary returned from Firebase containing images encoded using base64. I want it to simply write these images to file, and it does, but I receive the following error after my writes finish:
smalloc.cc:280: void node::smalloc::SliceOnto(const v8::FunctionCallbackInfo<v8::Value>&): Assertion `end <= source_len' failed.
This is my code:
// Iterate through each key in each page if each request
for (var key in request) {
var obj = request[key];
if (typeof (obj) == "object") {
for (var prop in obj) {
item++;
if(obj.hasOwnProperty(prop)) {
switch (prop) {
case "img":
var media = new ReceivedMedia(obj[prop]);
var filename = transaction.tid + "-" + item + "." + media.extension;
filename = filename.slice(10);
require('fs').writeFileSync(filename, media.b64, 'base64', function(err) {
if (err) throw err;
});
break;
}
}
}
}
}
My images come out fine, but the error is a little weird and I would prefer to not to occur. Would anyone have an idea as to why this is happening? That would be super helpful :)
Note: ReceivedMedia is a class I defined as:
function ReceivedMedia(media) {
this.b64 = media.b64;
this.extension = media.extension;
this.posx = media.posx;
this.posy = media.posy;
}
Side question: If I use writeFile instead of writeFileSync one of my images is corrupted and the other contains no data. If after that happens I run my node script again, the files save correctly. I would also like some explanation as to why that happens, from my understanding one of these is synchronous (writeFileSync I am guessing) and the other is asynchronous (writeFile I am assuming).
A Google search for your error message description found this discussion of the issue in io.js and this discussion in node.js and it sounds like it is a bug that has been fixed (not sure if the fix has been released in a full build yet).
The node.js fix is here.
If you were desparate to fix it now in your own build, you'd have to apply the fix to your own code tree and rebuild it. Otherwise, you'll have to investigate or inquire when this fix will get into an official release (I'm not personally sure how that process works for node.js).