Where does this "_.startsWith" come from? - javascript

I am trying to understand the following code from the browserify-css repo:
var gulp = require('gulp');
var gutil = require('gulp-util');
var path = require('path');
var browserify = require('browserify');
var sourceStream = require('vinyl-source-stream');
var fse = require('fs-extra');
var bundleStream = browserify()
.add('src/index.js')
.transform(require('browserify-css'), {
rootDir: 'src',
processRelativeUrl: function(relativeUrl) {
var stripQueryStringAndHashFromPath = function(url) {
return url.split('?')[0].split('#')[0];
};
var rootDir = path.resolve(process.cwd(), 'src');
var relativePath = stripQueryStringAndHashFromPath(relativeUrl);
var queryStringAndHash = relativeUrl.substring(relativePath.length);
//
// Copying files from '../node_modules/bootstrap/' to 'dist/vendor/bootstrap/'
//
var prefix = '../node_modules/';
if (_.startsWith(relativePath, prefix)) {
var vendorPath = 'vendor/' + relativePath.substring(prefix.length);
var source = path.join(rootDir, relativePath);
var target = path.join(rootDir, vendorPath);
gutil.log('Copying file from ' + JSON.stringify(source) + ' to ' + JSON.stringify(target));
fse.copySync(source, target);
// Returns a new path string with original query string and hash fragments
return vendorPath + queryStringAndHash;
}
return relativeUrl;
}
})
.bundle();
bundleStream
.pipe(sourceStream(bundleFile))
.pipe(gulp.dest(browserifyConfig.dest));
I don't understand the part
_.startsWith(relativePath, prefix)
Where is the underscore coming from? It's supposed to be javascript executed by a task runner. I've found that in the NodeJS REPL the underscore character outputs the result of the last executed expression, but that functionality can't be used inside scripts. It's also not an underscore.js instance because it is not being declared anywhere. startsWith is a String method.
So what am I missing?

That code is using the lodash library. You can see in this section of the readme that they're importing lodash with var _ = require('lodash');

Related

Nodejs - How to get the version of a dependency

Is there a way to get the version of an external dependency in JS code, without hardcoding it?
If you wanted to get the value of express you could do something like the following. You are looping over each folder in the node modules and adding the name and the version to an object.
const fs = require('fs');
const dirs = fs.readdirSync('node_modules');
const packages = {};
dirs.forEach(function(dir) {
const file = 'node_modules/' + dir + '/package.json';
const json = require(file);
const name = json.name;
const version = json.version;
packages[name] = name;
packages[version] = version;
});
console.log(packages['react-native']); // will log the version

gulp-concat : from unknown source to the same destination

I recently started to use gulp to keep my dev project organized and I've run into a little something that I cant figure out. So this is my task :
gulp.task('jsassemble', function () {
return gulp
.src('vendor/proj/**/**/src/assets/js/*.js')
.pipe(concat('all.js'))
.pipe(gulp.dest('public/js'));
});
As you can see, it'll fetch every js file in vendor/proj/anyFolder/anySubFolder/src/assets/js, put them together, rename the newly created js 'all.js' and then put it in public/js. The problem is that I would like to have gulp to keep the folder hierarchy, for example :
Source = vendor/proj/anyFolder1/anySubFolder1/src/assets/js/*.js
Destination = public/js/anyFolder1/anySubFolder1/src/assets/js/all.js
Source = vendor/proj/anyFolder1/anySubFolder2/src/assets/js/*.js
Destination = public/js/anyFolder1/anySubFolder2/src/assets/js/all.js
Instead of simply having everything on those folder into a 1 and only public/js/all.js
Is there anyway to do it ? I've tried to google it first but I wasn't able to properly formulate my question in a few words and was given not-wanted results :/
You could create the function which keep your folder hierarchy. In this page (http://www.jamescrowley.co.uk/2014/02/17/using-gulp-packaging-files-by-folder/) you find the solution.
var fs = require('fs');
var path = require('path');
var es = require('event-stream');
var gulp = require('gulp');
var concat = require('gulp-concat');
var rename = require('gulp-rename');
var uglify = require('gulp-uglify');
var scriptsPath = './src/scripts/';
function getFolders(dir){
return fs.readdirSync(dir)
.filter(function(file){
return fs.statSync(path.join(dir, file)).isDirectory();
});
}
gulp.task('scripts', function() {
var folders = getFolders(scriptsPath);
var tasks = folders.map(function(folder) {
return gulp.src(path.join(scriptsPath, folder, '/*.js'))
.pipe(concat(folder + '.js'))
.pipe(gulp.dest(scriptsPath))
.pipe(uglify())
.pipe(rename(folder + '.min.js'))
.pipe(gulp.dest(scriptsPath));
});
return es.concat.apply(null, tasks);
});
Thanks to #caballerog who puts my on the right path, here's the explained code :
//get every folder from a 'pathTo/Something'
function getFolders(dir){
return fs.readdirSync(dir)
return fs.statSync(path.join(dir, file)).isDirectory();
}
var projectsRoot = 'vendor/proj/';
var pathToJsFiles = '/src/assets/js/';
var pathToPublic = 'public/js/';
gulp.task('scripts', function() {
var sites = [];
var pathToProjects = [];
// Fetching every folders in vendor/proj
projects = getFolders(projectsRoot);
// Fetching every subfolder in vendor/proj/something
for(index in projects){
sites.push(getFolders(projectsRoot + '/' + projects[index]));
}
// Pushing every projects/site that exists into an array
for(var i=0;i<projects.length;i++){
for(var j=0; j<sites.length; j++)
if(sites[i][j] != null)
pathToProjects.push(projects[i] + '/' + sites[i][j]);
}
// Fetching every JS on vendor/proj/pathToAProject/pathToJsFiles
// concatenate them together
// and sending them to pathToPublic/pathToAProject/all.js
var tasks = pathToProjects.map(function(pathToAProject) {
return gulp.src( projectsRoot + pathToAProject + pathToJsFiles + '/*.js')
.pipe(concat('all.js'))
.pipe(gulp.dest(pathToPublic + pathToAProject));
});
return es.concat.apply(null, tasks);
});
TL:DR = get every JS file in pathToPublic/someFolder/someFolder/PathToJS.

Node.js List All Modules Required in a File

I must be missing something here. I have a mocha test file that is set up like so:
require('globals');
var library1 = require('library1'),
library2 = require('library2');
it('tests stuff', function() {
...
});
Where the globals file just contains a before and afterEach block, so that they apply to all my test files.
What I'm trying to do is determine, within the before and afterEach blocks, which files that I have require'd in the test file that those blocks are currently running in. So in the example test file I gave, I would need afterEach to output a list that contains globals, library1, and library2.
I have attempted to use Node.js's module.children property, but for some reason that is only returning me globals, and excluding library1 and library2.
Edit: What am I missing that module.children wouldn't be returning library1 and library2?
Here is an example of a script that will parse requires in a file.
var fs = require('fs');
function parseRequires(absPath){
var requiredFiles = [];
var contents = fs.readFileSync(absPath, 'utf8').split('\n');
contents.forEach(function(line){
var re = /(?:require\('?"?)(.*?)(?:'?"?\))/;
var matches = re.exec(line);
if(matches){
requiredFiles.push(matches[1]);
}
});
return requiredFiles;
}
module.exports = parseRequires;
I made a test script in the same directory
var fs = require('fs');
var os = require('os');
function A(){}
var http = require("http");
var parseRequires = require("./parseRequires");
console.log( parseRequires( __dirname + '/testRequires.js') );
results in console: [ 'fs', 'os', 'http', './parseRequires' ]
This is one solution, though I'm sure there is an easier way to do it with nodejs built in functionality
Here is another solution I came up with using require.cache. It involves parsing require.cache for the files that have been required and provides the name of the file that ends up being required
var someModule = require('./someModule');
var path = require('path');
for( var p in require.cache){
console.log( trim( p ) );
}
function trim(p){
var re = /(.*?).js/;
var basename = path.basename( p );
var moduleName = re.exec( basename )[1];
return moduleName;
}
Test files:
someModule.js
var fs = require('fs');
var os = require('os');
var _ = require('lodash');
var test = require('./test');
test.js
var a = require('lodash'),
jQuery = require('jquery');
Running getRequires.js results in
getRequires
someModule
index
test
jquery
Notice that it includes itself, so there would have to be something that trims that out as well.
Also notice index, that comes from lodash
Here is a modification that shows the file paths as well
var someModule = require('./someModule');
var path = require('path');
for( var p in require.cache){
console.log( trim( p ) );
}
function trim(p){
var re = /(.*?).js/;
var basename = path.basename( p );
var moduleName = re.exec( basename )[1];
return [ moduleName, p ];
}
That gets us:
[ 'getRequires',
'/<myroot>/require/getRequires.js' ]
[ 'someModule',
'/<myroot>/require/someModule.js' ]
[ 'index',
'/<myroot>/require/node_modules/lodash/index.js' ]
[ 'test', '/<myroot>/require/test.js' ]
[ 'jquery',
'/<myroot>/require/node_modules/jquery/dist/jquery.js' ]
require is the directory I'm doing the code in.
Let me know about your thoughts on this solutions. There is more to be done but I think this can get you started.

Gulp, using current filename in `pipe()` function

I'm trying to render each file in my gulp source files with it's own json file, but I can't figure out how to access the current filename in the pipe function.
var gulp = require('gulp');
var handlebars = require('handlebars');
var gulpHandlebars = require('gulp-compile-handlebars');
gulp.task('compile-with-sample-data', function () {
var options = {}
return gulp.src('./src/**/*.html')
.pipe(gulpHandlebars({ data: require('./data/' + filename +'.json') }, options))
.pipe(gulp.dest('./build/'));
});
Where I can get it to work with the same file each time by just using require('./data/orders-complete.json'):
var gulp = require('gulp');
var handlebars = require('handlebars');
var gulpHandlebars = require('gulp-compile-handlebars');
gulp.task('compile-with-sample-data', function () {
var options = {}
return gulp.src('./src/**/*.html')
.pipe(gulpHandlebars({ data: require('./data/orders-complete.json') }, options))
.pipe(gulp.dest('./build/'));
});
It's not clear how I would do this.
Use gulp-tap, it enables you to get the file name and even change it for downstream pipes.

Node.js require caching

I am really stuck by nodejs cache system. I have this structure for my project :
Project/
apps/
jobs_processor/
app.js
processors.js
processors/
libs/
queue_manager.js
queue_manager.js require processors.js
var processors = require("../apps/jobs_processor/processors.js");
app.js require also processor.js
var processors = require("./processors.js");
If I take into account the documentation, I must have the same path may be to obtain the same object, is that right ? If so, how can I achieve that (have the same path) ?
Thanks.
EDIT:
If found a solution to my problem.
Here is the first version of queue_manager.js file
var _ = require("lodash");
var Utils = require("./utilities");
var Processors = require("../apps/jobs_processor/processors");
var Logger = require("./logger");
var QUEUES_CACHE = {};
exports.createJob = createJob;
exports.getCacheObject = getCacheObject;
function createJob(name, data) {
var cacheId = name.replace(/ /g, "_");
Logger.info("Cache ID: " + cacheId);
if (!QUEUES_CACHE[ cacheId ]) {
_.each(Processors, function (processor) {
Logger.debug("PROCESSOR NAME: " + processor.name);
Logger.debug("JOB NAME: " + name);
if (processor.name === name)
QUEUES_CACHE[ cacheId ] = processor;
});
if (!QUEUES_CACHE[ cacheId ])
throw new Error("Processor for job \"" + name + "\" not found.");
}
Logger.debug(Object.keys(QUEUES_CACHE));
return QUEUES_CACHE[ cacheId ].queue.add(data);
}
function getCacheObject() {
return QUEUES_CACHE;
}
And now the last version of the same file
var _ = require("lodash");
var Utils = require("./utilities");
var Logger = require("./logger");
exports.createJob = createJob;
function createJob(name, data) {
var Processors = require("../apps/jobs_processor/processors");
var processor;
_.each(Processors, function (element) {
Logger.debug("Processor name: " + element.name);
if (element.name === name)
processor = element;
});
return processor.queue.add(data);
}
Each time that i called createJob method, I require the processors module which is an array of each job processor that I have created.
Node.js will resolve the path before caching the module.
As long as your relative paths resolve to the same absolute path on disk, you're fine.

Categories