I have a large amount of javascript files split into 4 subdirectory in my app. In grunt I grab all of them and compile them into one file. These files do not have a module.exports function.
I want to use webpack and split it into 4 parts. I don't want to manually go in and require all my files.
I would like to create a plugin that on compilation walks the directory trees, then grabs all the .js file names and paths, then requires all files in the sub directories and adds it to the output.
I want all the files in each directories compiled into a module that I could then require from my entry point file, or include in the assets that http://webpack.github.io/docs/plugins.html mentions.
When adding a new file I just want to drop it in the correct directory and know it will be included.
Is there a way to do this with webpack or a plugin that someone has written to do this?
This is what I did to achieve this:
function requireAll(r) { r.keys().forEach(r); }
requireAll(require.context('./modules/', true, /\.js$/));
In my app file I ended up putting the require
require.context(
"./common", // context folder
true, // include subdirectories
/.*/ // RegExp
)("./" + expr + "")
courtesy of this post: https://github.com/webpack/webpack/issues/118
It is now adding all my files. I have a loader for html and css and it seems to work great.
How about a map of all the files in a folder?
// {
// './image1.png': 'data:image/png;base64,iVBORw0KGgoAAAANS',
// './image2.png': 'data:image/png;base64,iVBP7aCASUUASf892',
// }
Do this:
const allFiles = (ctx => {
let keys = ctx.keys();
let values = keys.map(ctx);
return keys.reduce((o, k, i) => { o[k] = values[i]; return o; }, {});
})(require.context('./path/to/folder', true, /.*/));
Example of how to get a map of all images in the current folder.
const IMAGES_REGEX = /\.(png|gif|ico|jpg|jpeg)$/;
function mapFiles(context) {
const keys = context.keys();
const values = keys.map(context);
return keys.reduce((accumulator, key, index) => ({
...accumulator,
[key]: values[index],
}), {});
}
const allImages = mapFiles(require.context('./', true, IMAGES_REGEX));
All merits to #splintor (thanks).
But here it is my own derived version.
Benefits:
What modules export is gathered under a {module_name: exports_obj} object.
module_name is build from its file name.
...without extension and replacing slashes by underscores (in case of subdirectory scanning).
Added comments to ease customization.
I.e. you may want to not include files in subdirectories if, say, they are there to be manually required for root level modules.
EDIT: If, like me, you're sure your modules won't return anything else than (at least at root level) a regular javascript object, you
can also "mount" them replicating their original directory structure
(see Code (Deep Version) section at the end).
Code (Original Version):
function requireAll(r) {
return Object.fromEntries(
r.keys().map(function(mpath, ...args) {
const result = r(mpath, ...args);
const name = mpath
.replace(/(?:^[.\/]*\/|\.[^.]+$)/g, '') // Trim
.replace(/\//g, '_') // Relace '/'s by '_'s
;
return [name, result];
})
);
};
const allModules = requireAll(require.context(
// Any kind of variables cannot be used here
'#models' // (Webpack based) path
, true // Use subdirectories
, /\.js$/ // File name pattern
));
Example:
Sample output for eventual console.log(allModules);:
{
main: { title: 'Webpack Express Playground' },
views_home: {
greeting: 'Welcome to Something!!',
title: 'Webpack Express Playground'
}
}
Directory tree:
models
├── main.js
└── views
└── home.js
Code (Deep Version):
function jsonSet(target, path, value) {
let current = target;
path = [...path]; // Detach
const item = path.pop();
path.forEach(function(key) {
(current[key] || (current[key] = {}));
current = current[key];
});
current[item] = value;
return target;
};
function requireAll(r) {
const gather = {};
r.keys().forEach(function(mpath, ...args) {
const result = r(mpath, ...args);
const path = mpath
.replace(/(?:^[.\/]*\/|\.[^.]+$)/g, '') // Trim
.split('/')
;
jsonSet(gather, path, result);
});
return gather;
};
const models = requireAll(require.context(
// Any kind of variables cannot be used here
'#models' // (Webpack based) path
, true // Use subdirectories
, /\.js$/ // File name pattern
));
Example:
Result of previous example using this version:
{
main: { title: 'Webpack Express Playground' },
views: {
home: {
greeting: 'Welcome to Something!!',
title: 'Webpack Express Playground'
}
}
}
this works for me :
function requireAll(r) { r.keys().forEach(r); }
requireAll(require.context('./js/', true, /\.js$/));
NOTE: this can require .js files in subdirs of ./js/ recursively.
Related
I have this JavaScript data file(src/test/test.js):
module.exports = {
"title": "...",
"Number": "number1",
"Number2": ({ number1 }) => number1 / 2,
}
I want to pass this file verbatim(functions preserved) to a page, so that the page can use that data to build itself. I already have the page template and everything else sorted out, I just need to find a way to pass this into the page.
The first approach I tried is requireing this file in gatsby-node.js and then passing it as pageContext.
gatsby-node.js
const path = require('path');
exports.createPages = ({actions, graphql}) => {
const { createPage } = actions;
return graphql(`
query loadQuery {
allFile(filter: {sourceInstanceName: {eq: "test"}}) {
edges {
node {
relativePath
absolutePath
}
}
}
}
`).then(result => {
if (result.errors) {
throw result.errors;
}
for (const node of result.data.allFile.edges.map(e => e.node)) {
const data = require(node.absolutePath);
createPage({
path: node.relativePath,
component: path.resolve('./src/templates/test.js'),
context: data,
});
}
});
};
gatsby-config.js
module.exports = {
plugins: [
{
resolve: `gatsby-source-filesystem`,
options: {
name: `test`,
path: `${__dirname}/src/test/`,
},
},
],
}
src/templates/test.js
import React from 'react';
const index = ({ pageContext }) => (
<p>{pageContext.Number2()}</p>
);
export default index;
However, I get this warning when running the dev server:
warn Error persisting state: ({ number1 }) => number1 / 2 could not be cloned.
If I ignore it and try to use the function anyway, Gatsby crashes with this error:
WebpackError: TypeError: pageContext.Number2 is not a function
After searching for a while, I found this:
The pageContext was always serialized so it never worked to pass a function and hence this isn't a bug. We might have not failed before though.
- Gatsby#23675
which told me this approach wouldn't work.
How could I pass this data into a page? I've considered JSON instead, however, JSON can't contain functions.
I've also tried finding a way to register a JSX object directly, however I couldn't find a way.
Regarding the main topic, as you spotted, can't be done that way because the data is serialized.
How could I pass this data into a page? I've considered JSON instead,
however, JSON can't contain functions.
Well, this is partially true. You can always do something like:
{"function":{"arguments":"a,b,c","body":"return a*b+c;"}}
And then:
let func = new Function(function.arguments, function.body);
In this case, you are (de)serializing a JSON function, creating and casting a function based on JSON parameters. This approach may work in your scenario.
Regarding the JSX, I guess you can try something like:
for (const node of result.data.allFile.edges.map(e => e.node)) {
const data = require(node.absolutePath);
createPage({
path: node.relativePath,
component: path.resolve('./src/templates/test.js'),
context:{
someComponent: () => <h1>Hi!</h1>
},
});
}
And then:
import React from 'react';
const Index = ({ pageContext: { someComponent: SomeComponent} }) => (
return <div><SomeComponent /></div>
);
export default index;
Note: I don't know if it's a typo from the question but index should be capitalized as Index
In this case, you are aliasing the someComponent as SomeComponent, which is a valid React component.
In the below script would I like to read the content of config.toml, and have marked where I would like to do that. What confuses me is how to include the toml.js file which contains the toml reader function.
Throughout the script will I need to read 3 different toml files.
Question
How should I include the toml.js function and how to reuse the function to read 3 different files?
Disclaimer: I am sorry for this super noob question, but this is my first project, and with 6 different ways to write a function, I find it very confusing.
index.js
'use strict'
var minimist = require('minimist')
const toml = require('./src/toml')
module.exports = () => {
var argv = minimist(process.argv.slice(2), {
string: 'input',
string: 'project',
boolean: ['validate'],
boolean: ['help'],
alias: { i: 'input', v: 'validate', h: 'help', p: 'project' },
unknown: function () { console.log('Unkown argument') }
})
if (argv.help || argv.h) {
// help output goes here
}
// read config.toml
}
src/toml.js
module.exports = (filename) => {
const TOML = require('#iarna/toml')
const fs = require('fs');
return TOML.parse(fs.readFileSync(filename, 'utf-8'));
}
It looks like you have everything set up correctly. Inside index.js you should be able to use toml('filename.toml'). This function returns the contents of filename.toml as an object.
src/toml.js exports a function that parses a .toml file. When you use const toml = require('./src/toml') inside index.js, you are assigning toml to the exports of ./src/toml.js (which is the parsing function). This means that in index.js, toml represents the function in ./src/toml.js.
You can then use the toml('filename.toml') as many times as you want in index.js.
Here is your index.js code modified to read the config.toml file and store the object in config...
'use strict'
var minimist = require('minimist')
const toml = require('./src/toml')
module.exports = () => {
var argv = minimist(process.argv.slice(2), {
string: 'input',
string: 'project',
boolean: ['validate'],
boolean: ['help'],
alias: { i: 'input', v: 'validate', h: 'help', p: 'project' },
unknown: function () { console.log('Unkown argument') }
})
if (argv.help || argv.h) {
// help output goes here
}
// read config.toml
const config = toml('config.toml')
}
You should be able to call toml('path/to/config/that/you/want/to/read.toml')
You have required a module src/toml.js. This module exports a function - doesn't really matter how this function is declared in this case. Whenever you import that module - you are given this function.
So:
const iCanCallThisReferenceHoweverIWant = require('./src/toml');
iCanCallThisReferenceHoweverIWant('path/to/a/toml/file.toml');
I think you need to require your dependencies in the toml.js outside of the function definition - I assume it may yell at you about that, but I'm not super-duper-confident about it :)
Proposition + some refactoring:
src/toml.js
const TOML = require('#iarna/toml')
const fs = require('fs');
const readTOML = (filename) => TOML.parse(fs.readFileSync(filename, 'utf-8'));
module.exports = readTOML;
I have multiple files where i need to rename the hashed css from the manifest.json but i cant seem to get it to work.
I have tried creating an array with multiple sources and destinations but i think im missing something. Basically i want it to change the values in multiple files to match the specific css files from the manifest.json.
import gulp from 'gulp';
import mergeStream from 'merge-stream';
import rev from 'gulp-rev';
import revRewrite from 'gulp-rev-rewrite';
import revDelete from 'gulp-rev-delete-original';
import { hashFiles, hashReplace } from './hashTask.config';
// imported arrays here for you to see ////
// Paths to hash all the files to bust the cache
const hashFiles = [
'./css/**/*.css',
'../../../../com-holding-page/css/*.css',
'!./css/lib/**'
];
const hashReplace = [
{ src : '../../../../com-holding-page/index.html', dest : '../../../../com-holding-page/' },
{ src : '../../../../app/design/frontend/test/default/template/page/html/head.phtml', dest : '../../../../app/design/frontend/test/default/template/page/html/' },
{ src : '../../../../app/design/frontend/test/default/layout/local.xml', dest : '../../../../app/design/frontend/test/default/layout/' }
];
//////////
const hashTask = () => {
return gulp.src(hashFiles, {base: 'css'})
.pipe(rev())
.pipe(revDelete()) // Remove the unrevved files
.pipe(gulp.dest('./css/'))
.pipe(rev.manifest('rev-manifest.json', {
merge: true // Merge with the existing manifest if one exists
}))
.pipe(gulp.dest('./css/'))
};
const hashIncludes = () => {
const manifest = gulp.src('./css/rev-manifest.json');
return mergeStream(
hashReplace
.map( (file) => {
return gulp.src(file.src)
.pipe(revRewrite({
manifest: manifest,
replaceInExtensions: ['.html', '.phtml', '.xml']
}))
.pipe(gulp.dest(file.dest));
})
);
};
export { hashTask, hashIncludes };
change
const manifest = gulp.src('./css/rev-manifest.json');
to this
const { readFileSync } = require('fs'); // import this in header
// replace the below line of code with your code and make sure the rev-manifest file path is correct
const manifest = readFileSync('./rev-manifest.json'); // my rev file path points to the root of the project folder
I'm trying to read directory tree and save it into a JSON file using node.js with the following logic:
Read specified directory
Transform (or get in the first place if possible?) the paths into relative format. Something like this:
Directory: C:/test
{
{"C:/test folder 1": [
{"folder 1": ["file1.txt", "file2.txt"]},
"file1.txt",
"file2.txt",
...
]},
{"C:/test folder 2": [
{"folder 1": ["file1.txt", "file2.txt"]},
"file1.txt",
"file2.txt",
...
]},
"file1.txt",
"file2.txt",
"file3.txt",
...
}
Stream that object into a JSON file (I need to stream it because the object is quite big)
I was trying to use different npm modules like:
walkdir, walker, node-klaw + fs-extra.writeJson, json-object-stream, etc.
But I keep getting different errors trying to stream it into a json.
Following script should work for you, just replace __dirname with absolute path of location that you want to list. And update filename where to write it
I am not checking here if first time call actually gets valid folder, and there might be thing with / regarding on OS
removeNamespace flag here is only so that first level keeps absolute path and not in following
var fs = require("fs");
function getContentForLocation(location, removeNamespace) {
let result = [];
let content = fs.readdirSync(location);
content.forEach(c => {
if(fs.statSync(`${location}/${c}`).isFile()) result.push(c);
else {
let name = removeNamespace ? c : `${location}/${c}`;
result.push({
[`${name}`]: getContentForLocation(`${location}/${c}`, true)
})
}
});
return result;
}
let tree = JSON.stringify(getContentForLocation(__dirname))
fs.writeFileSync(`${__dirname}/test.json`, tree)
Async version:
var fs = require("fs");
function getContentForLocation(location, callback, removeNamespace) {
let result = [];
let folderNames = [];
fs.readdir(location, (err, content) => {
content.forEach(c => {
if(fs.statSync(`${location}/${c}`).isFile()) result.push(c);
else folderNames.push(c);
});
if(folderNames.length === 0) callback(result)
else folderNames.forEach(folder => {
let name = removeNamespace ? folder : `${location}/${folder}`;
getContentForLocation(`${location}/${folder}`, resolveCallback.bind(this, name, folder),true)
})
});
function resolveCallback(name, folder, content) {
result.push({ [name]: content });
folderNames = folderNames.filter(f => f !== folder);
if(folderNames.length === 0) callback(result);
}
}
getContentForLocation(__dirname, results => {
console.log("resolved", results)
fs.writeFile(`${__dirname}/test.json`, JSON.stringify(results), (err) => { console.log(err) })
});
I have a large amount of javascript files split into 4 subdirectory in my app. In grunt I grab all of them and compile them into one file. These files do not have a module.exports function.
I want to use webpack and split it into 4 parts. I don't want to manually go in and require all my files.
I would like to create a plugin that on compilation walks the directory trees, then grabs all the .js file names and paths, then requires all files in the sub directories and adds it to the output.
I want all the files in each directories compiled into a module that I could then require from my entry point file, or include in the assets that http://webpack.github.io/docs/plugins.html mentions.
When adding a new file I just want to drop it in the correct directory and know it will be included.
Is there a way to do this with webpack or a plugin that someone has written to do this?
This is what I did to achieve this:
function requireAll(r) { r.keys().forEach(r); }
requireAll(require.context('./modules/', true, /\.js$/));
In my app file I ended up putting the require
require.context(
"./common", // context folder
true, // include subdirectories
/.*/ // RegExp
)("./" + expr + "")
courtesy of this post: https://github.com/webpack/webpack/issues/118
It is now adding all my files. I have a loader for html and css and it seems to work great.
How about a map of all the files in a folder?
// {
// './image1.png': 'data:image/png;base64,iVBORw0KGgoAAAANS',
// './image2.png': 'data:image/png;base64,iVBP7aCASUUASf892',
// }
Do this:
const allFiles = (ctx => {
let keys = ctx.keys();
let values = keys.map(ctx);
return keys.reduce((o, k, i) => { o[k] = values[i]; return o; }, {});
})(require.context('./path/to/folder', true, /.*/));
Example of how to get a map of all images in the current folder.
const IMAGES_REGEX = /\.(png|gif|ico|jpg|jpeg)$/;
function mapFiles(context) {
const keys = context.keys();
const values = keys.map(context);
return keys.reduce((accumulator, key, index) => ({
...accumulator,
[key]: values[index],
}), {});
}
const allImages = mapFiles(require.context('./', true, IMAGES_REGEX));
All merits to #splintor (thanks).
But here it is my own derived version.
Benefits:
What modules export is gathered under a {module_name: exports_obj} object.
module_name is build from its file name.
...without extension and replacing slashes by underscores (in case of subdirectory scanning).
Added comments to ease customization.
I.e. you may want to not include files in subdirectories if, say, they are there to be manually required for root level modules.
EDIT: If, like me, you're sure your modules won't return anything else than (at least at root level) a regular javascript object, you
can also "mount" them replicating their original directory structure
(see Code (Deep Version) section at the end).
Code (Original Version):
function requireAll(r) {
return Object.fromEntries(
r.keys().map(function(mpath, ...args) {
const result = r(mpath, ...args);
const name = mpath
.replace(/(?:^[.\/]*\/|\.[^.]+$)/g, '') // Trim
.replace(/\//g, '_') // Relace '/'s by '_'s
;
return [name, result];
})
);
};
const allModules = requireAll(require.context(
// Any kind of variables cannot be used here
'#models' // (Webpack based) path
, true // Use subdirectories
, /\.js$/ // File name pattern
));
Example:
Sample output for eventual console.log(allModules);:
{
main: { title: 'Webpack Express Playground' },
views_home: {
greeting: 'Welcome to Something!!',
title: 'Webpack Express Playground'
}
}
Directory tree:
models
├── main.js
└── views
└── home.js
Code (Deep Version):
function jsonSet(target, path, value) {
let current = target;
path = [...path]; // Detach
const item = path.pop();
path.forEach(function(key) {
(current[key] || (current[key] = {}));
current = current[key];
});
current[item] = value;
return target;
};
function requireAll(r) {
const gather = {};
r.keys().forEach(function(mpath, ...args) {
const result = r(mpath, ...args);
const path = mpath
.replace(/(?:^[.\/]*\/|\.[^.]+$)/g, '') // Trim
.split('/')
;
jsonSet(gather, path, result);
});
return gather;
};
const models = requireAll(require.context(
// Any kind of variables cannot be used here
'#models' // (Webpack based) path
, true // Use subdirectories
, /\.js$/ // File name pattern
));
Example:
Result of previous example using this version:
{
main: { title: 'Webpack Express Playground' },
views: {
home: {
greeting: 'Welcome to Something!!',
title: 'Webpack Express Playground'
}
}
}
this works for me :
function requireAll(r) { r.keys().forEach(r); }
requireAll(require.context('./js/', true, /\.js$/));
NOTE: this can require .js files in subdirs of ./js/ recursively.