debundle single browser js code into original files - javascript

I have a js file with following structure:
define("example/example", ["exports", "example/something", "example/other", "ex/sthg"], function(e, t, n, a, r) {
e["default"] = {something: 'example'}
}), define("examle/module", ["exports"], function(e) {
///
}), define("examle/module2", ["exports"], function(e) {
// and so on...
}), require("example/sthg")["default"].callSomething({
//
});
I'm looking for a way to unpack this file into original files, like example/example.js, example/module, example/module2 etc.
I've tried browser-unpack, but with no effect so I assume that isn't bundled with browserify. Syntax looks similar to AMD or RequireJS.
Is there any away to debundle file like that?

Quick monkey solution, which helps me enough - it creates modules tree with import list and function(e, t, n... as a file content. Maybe it can be useful for someone. In my case, it can be fully debundled (of course only to js, not jsx), but I don't want to play with js parsers.
var fs = require('fs');
var path = require('path');
var mkdirp = require('mkdirp');
const defineArgs = ['e', 't', 'n', 'a', 'r']
function parseFunction(func, deps) {
let funcStr = func.toString(),
depsArr = [],
depsStr = ''
// just to know what etnar is
for(let i in deps) {
if(deps[i]!='exports') {
depsArr.push('const ' + defineArgs[i] + ' = require("' + deps[i] + '")')
}
}
depsStr = depsArr.join('\n')
return depsStr + '\n' + funcStr
}
// override define
var define = function(filename, deps, func) {
console.log(filename + ': ' + deps)
let outPath = 'src/' + filename + '.js',
outDir = path.dirname(outPath),
out = parseFunction(func, deps)
try {
fs.lstatSync(outDir)
} catch (e) {
mkdirp(outDir, function(err) { if(err){console.error(err)} })
}
fs.writeFile(outPath, out, function(err) {
if(err){console.error(err)}
})
}
// paste your script below and run this via node
// define("example/example", ["exports", "example/something", .......

Related

Browserify require - inline way. How?

Is there any possibility to include one file into another with Browserify?
I mean not standard Browserify behavior, but pasting one file into another in a specific place.
file1.js
console.log("abc");
file2.js
requirePaste("file1.js");
console.log("def");
output.js
console.log("abc");
console.log("def");
I need it for spreading an ES6 class into multiple files like this pattern:
ObjectManager.js
ObjectManager_Events.js
ObjectManager_Rendering.js
These files are about one class. So I can make something like this:
class ObjectManager {
constructor() {
}
requirePaste("./ObjectManager_Events");
requirePaste("./ObjectManager_Rendering");
}
EDIT:
I made a simple transform plugin for Browserify and it works great. There is one problem though it won't work with Watchify. this is because inline-required files aren't counted as being watched. Any idea on how to fix this?
const through = require('through2');
const fs = require('fs');
const path = require('path');
const regex = /require\([\s]*\/\*inline\*\/[\s]*"(.+)"\)/g;
function process(pathToFile, contents) {
while ( (occurence = regex.exec(contents)) ) {
contents = processOne(pathToFile, contents, occurence);
}
return contents;
}
function processOne(pathToFile, contents, occurence) {
const dir = path.dirname(pathToFile);
const includePath = occurence[1] + ".js";
const range = [occurence.index, occurence.index+occurence[0].length-1];
const pathToInnerFile = dir + "/" + includePath;
var innerContents = fs.readFileSync(pathToInnerFile, 'utf8');
innerContents = process(pathToInnerFile, innerContents);
var output = "";
output += contents.substring(0, range[0]);
output += innerContents;
output += contents.substring(range[1]+1);
return output;
}
module.exports = function(pathToFile) {
return through(
function(buf, enc, next) {
var result = process(pathToFile, buf.toString('utf8'));
this.push(result);
next();
}
);
};
That did the trick:
Basically we inform Watchify about file by emiting 'file' event.
const includedFiles = [];
function process(pathToFile, contents) {
includedFiles.push(pathToFile);
while ( (occurence = regex.exec(contents)) ) {
contents = processOne(pathToFile, contents, occurence);
}
return contents;
}
...
module.exports = function(pathToFile) {
return through(
function(buf, enc, next) {
var result = process(pathToFile, buf.toString('utf8'));
this.push(result);
includedFiles.forEach((filePath) => this.emit("file", filePath));
includedFiles.length = 0;
next();
}
);
};

IPFS in Javascript 'cat' function doesn't work

I created this testcase to prove that the cat method is not working for me using the IPFS javascript library. What am I doing wrong ? My console output does not draw anything from within the 'node.files.cat' function, its as if that (err,filestream) callback is not being called at all. I I know my multihash is somewhat working because if I change it I get a fatal error. However right now it is seemingly just locking up and pausing after NODE READY.
const IPFS = require('ipfs')
const path = require('path')
const os = require('os')
const fs = require('fs')
console.log('ipfs test ')
var mhash = "Qmc5LfkMVAhvzip2u2RjRBRhgVthtSokHsz4Y5bgaBCW2R";
// Create the IPFS node instance
const node = new IPFS()
node.on('ready', () => {
// Your node is now ready to use \o/
console.log('NODE READY')
/*
THIS WORKS
var test_rstream = fs.createReadStream( path.join(__dirname, '.', '/public/sample_land_file.json') )
var wstream = fs.createWriteStream(os.tmpdir() + '/lobc_cache/'+'Qmc5LfkMVAhvzip2u2RjRBRhgVthtSokHsz4Y5bgaBCW2R');
wstream.on('finish', function() {
console.log('Written ' + wstream.bytesWritten + ' ' + wstream.path);
test_rstream.close()
});
test_rstream.pipe(wstream);
*/
node.files.cat("Qmc5LfkMVAhvzip2u2RjRBRhgVthtSokHsz4Y5bgaBCW2R", function (err, filestream) {
console.log('WHY ISNT THIS FIRING ') // i never see this logged
console.log(filestream)
console.log(os.tmpdir())
if (!fs.existsSync(os.tmpdir() + '/lobc_cache')){
fs.mkdirSync(os.tmpdir() + '/lobc_cache');
}
var wstream = fs.createWriteStream(os.tmpdir() + '/lobc_cache/'+'Qmc5LfkMVAhvzip2u2RjRBRhgVthtSokHsz4Y5bgaBCW2R');
result = '';
wstream.on('finish', function() {
console.log('Written ' + wstream.bytesWritten + ' ' + wstream.path);
filestream.close()
});
filestream.pipe(wstream);
// wstream.end();
// file will be a stream containing the data of the file requested
})
// stopping a node
node.stop(() => {
// node is now 'offline'
})
})
node.on('start', () => {
console.log('NODE START')
})
This looks like a bug. A quick way to solve it is just to put the node.files.cat inside the callback for .on('ready'). Seems that bitswap is dropping requests before the node is online.
Let me know if this works.

Node.js require caching

I am really stuck by nodejs cache system. I have this structure for my project :
Project/
apps/
jobs_processor/
app.js
processors.js
processors/
libs/
queue_manager.js
queue_manager.js require processors.js
var processors = require("../apps/jobs_processor/processors.js");
app.js require also processor.js
var processors = require("./processors.js");
If I take into account the documentation, I must have the same path may be to obtain the same object, is that right ? If so, how can I achieve that (have the same path) ?
Thanks.
EDIT:
If found a solution to my problem.
Here is the first version of queue_manager.js file
var _ = require("lodash");
var Utils = require("./utilities");
var Processors = require("../apps/jobs_processor/processors");
var Logger = require("./logger");
var QUEUES_CACHE = {};
exports.createJob = createJob;
exports.getCacheObject = getCacheObject;
function createJob(name, data) {
var cacheId = name.replace(/ /g, "_");
Logger.info("Cache ID: " + cacheId);
if (!QUEUES_CACHE[ cacheId ]) {
_.each(Processors, function (processor) {
Logger.debug("PROCESSOR NAME: " + processor.name);
Logger.debug("JOB NAME: " + name);
if (processor.name === name)
QUEUES_CACHE[ cacheId ] = processor;
});
if (!QUEUES_CACHE[ cacheId ])
throw new Error("Processor for job \"" + name + "\" not found.");
}
Logger.debug(Object.keys(QUEUES_CACHE));
return QUEUES_CACHE[ cacheId ].queue.add(data);
}
function getCacheObject() {
return QUEUES_CACHE;
}
And now the last version of the same file
var _ = require("lodash");
var Utils = require("./utilities");
var Logger = require("./logger");
exports.createJob = createJob;
function createJob(name, data) {
var Processors = require("../apps/jobs_processor/processors");
var processor;
_.each(Processors, function (element) {
Logger.debug("Processor name: " + element.name);
if (element.name === name)
processor = element;
});
return processor.queue.add(data);
}
Each time that i called createJob method, I require the processors module which is an array of each job processor that I have created.
Node.js will resolve the path before caching the module.
As long as your relative paths resolve to the same absolute path on disk, you're fine.

Node.js Combine module/exports from multiple files

I wish to split a large configuration .js file into multiple smaller files yet still combine them into the same module. Is this common practice and what is the best approach so that the module would not need extending when new files are added.
An example such as but not needing to update math.js when a new file is added.
math
- add.js
- subtract.js
- math.js
// add.js
module.exports = function(v1, v2) {
return v1 + v2;
}
// subtract.js
module.exports = function(v1, v2) {
return v1 - v2;
}
// math.js
var add = require('./add');
exports.add = add;
var subtract = require('./subtract');
exports.subtract = subtract;
// app.js
var math = require('./math');
console.log('add = ' + math.add(5,5));
console.log('subtract =' + math.subtract(5,5));
You can use the spread operator ... or if that doesnt work Object.assign.
module.exports = {
...require('./some-library'),
};
Or:
Object.assign(module.exports, require('./some-library'));
If your NodeJs allows the spread (...) operator (check it here), you can do:
module.exports = {
...require('./src/add'),
...require('./src/subtract')
}
You can do this
// math.js
module.exports = function (func){
return require('./'+func);
}
//use it like this
// app.js
var math = require('./math');
console.log('add = ' + math('add')(5,5));
console.log('subtract =' + math('subtract')(5,5));
You could create a subfolder to assign functions and other objects automaticly.
mymodule.js
const pluginDir = __dirname + '/plugins/';
const fs = require('fs');
module.exports = {};
fs.readdirSync(pluginDir).forEach(file => {
Object.assign(module.exports,require(pluginDir + file));
});
plugins/mymodule.myfunction.js
module.exports = {
myfunction: function() {
console.log("It works!");
}
};
index.js
const mymodule = require('mymodule.js');
mymodule.myfunction();
// It works!

Kendo Modules and RequireJS (r.js) not playing nice. (Error on loading)

I am using Kendo UI version 2013.1.514 and RequireJS (r.js version 2.1.6)
My project runs perfectly under the standard RequireJS on-demand loading.
However, when I try to use the optimizer, none of the Kendo will load. Including any of them gives the famous-and-irritation Uncaught Error: Mismatched anonymous define() module: error.
This is my configuration:
{
"baseUrl": "../Scripts",
"name": "../Scripts/js_modules/base_module.js",
"include": [],
paths: {
k: "Frameworks/kendo-2013.1.514-fixed",
jquery: "Frameworks/jQuery/jquery.min",
jplugin: "Frameworks/jQuery",
f: "Frameworks/"
},
shim: {
'jquery.dataSelector': {
deps: ['jquery'],
exports: 'jquery.dataSelector'
},
},
"exclude": [],
"optimize": "none",
"out": "built-base-modules.js"
}
And base_module.js
define( function( require ) {
// Don't do anything with them.
// Just define them.
require("jquery");
require("k/kendo.core.min");
//require("k/kendo.userevents.min");
//require("kendoize/kendoize")
});
I am not sure if this is related or not, but the dependency tracing does not appear to work correctly either. (It will successfully trace one level deep, but not two levels deep. I had attempted to add core.min and userevents.min manually to see if that resolved the issue.)
Has anyone experienced this issue with Kendo? Or perhaps something similar? I checked through a bunch of existing questions, but didn't find anything connected to this setup.
I can post additional information, if needed, but the detailed console.log message crashed somewhere inside of require.js -- not at a usable syntax error.
Additional Information
The HTML/Javascript on the page itself
<script src="/Business/Scripts/require.js"></script>
<script>
(function () {
"use strict";
var configObject = {
shim: {
'jquery.dataSelector': {
deps: ['jquery'],
exports: 'jquery.dataSelector'
},
},
baseUrl: "http://760.j6.local:80/Business/Scripts",
paths: {
app: "http://760.j6.local:80/Business",
k: "http://760.j6.local:80/Business" + "/Scripts/Frameworks/kendo-2013.1.514",
jquery: "http://760.j6.local:80/Business" + "/Scripts/Frameworks/jQuery/jquery.min",
jplugin: "http://760.j6.local:80/Business" + "/Scripts/Frameworks/jQuery",
f: "http://760.j6.local:80/Business" + "/Scripts/Frameworks/",
}
};
requirejs.config(configObject);
}());
</script>
<script src="/Business/_build/built-base-modules.js"></script>
I solved it.
While Kendo does support RequireJS, it uses the 'require' is a dynamic way that is not very friendly to optimizer.
My solution was to write a script that would "unwrap" the script, extract out the needed dependencies and then write the script into a new file.
var kendoFiles = ["kendo.autocomplete.min.js", "kendo.binder.min.js",
"kendo.calendar.min.js", "kendo.colorpicker.min.js",
"kendo.columnmenu.min.js", "kendo.combobox.min.js",
"kendo.core.min.js", "kendo.data.min.js", "kendo.data.odata.min.js",
"kendo.data.xml.min.js", "kendo.dataviz.chart.min.js",
"kendo.dataviz.core.min.js", "kendo.dataviz.gauge.min.js",
"kendo.dataviz.min.js", "kendo.dataviz.sparkline.min.js",
"kendo.dataviz.stock.min.js", "kendo.dataviz.svg.min.js",
"kendo.dataviz.themes.min.js", "kendo.dataviz.vml.min.js",
"kendo.datepicker.min.js", "kendo.datetimepicker.min.js",
"kendo.draganddrop.min.js", "kendo.dropdownlist.min.js",
"kendo.editable.min.js", "kendo.editor.min.js",
"kendo.filtermenu.min.js", "kendo.fx.min.js", "kendo.grid.min.js",
"kendo.groupable.min.js", "kendo.imagebrowser.min.js",
"kendo.list.min.js", "kendo.listview.min.js", "kendo.menu.min.js",
"kendo.mobile.actionsheet.min.js",
"kendo.mobile.application.min.js", "kendo.mobile.button.min.js",
"kendo.mobile.buttongroup.min.js", "kendo.mobile.listview.min.js",
"kendo.mobile.loader.min.js", "kendo.mobile.min.js",
"kendo.mobile.modalview.min.js", "kendo.mobile.navbar.min.js",
"kendo.mobile.pane.min.js", "kendo.mobile.popover.min.js",
"kendo.mobile.scroller.min.js", "kendo.mobile.scrollview.min.js",
"kendo.mobile.shim.min.js", "kendo.mobile.splitview.min.js",
"kendo.mobile.switch.min.js", "kendo.mobile.tabstrip.min.js",
"kendo.mobile.view.min.js", "kendo.multiselect.min.js",
"kendo.numerictextbox.min.js", "kendo.pager.min.js",
"kendo.panelbar.min.js", "kendo.popup.min.js",
"kendo.reorderable.min.js", "kendo.resizable.min.js",
"kendo.router.min.js", "kendo.selectable.min.js",
"kendo.slider.min.js", "kendo.sortable.min.js",
"kendo.splitter.min.js", "kendo.tabstrip.min.js",
"kendo.timepicker.min.js", "kendo.tooltip.min.js",
"kendo.touch.min.js", "kendo.treeview.min.js",
"kendo.upload.min.js", "kendo.userevents.min.js",
"kendo.validator.min.js", "kendo.view.min.js",
"kendo.window.min.js"
];
var sourcePath = "../../Scripts/Frameworks/kendo-2013.1.514";
var destPath = "../../Scripts/kendo-rs";
function processFiles() {
var i = -1;
var l = kendoFiles.length;
function nextStep() {
i++;
if (i < l) {
var fileName = kendoFiles[i];
processOne(fileName, nextStep);
} else {
console.log("All finished");
}
}
nextStep();
}
function processOne(fileName, callback) {
console.log("Processing: " + fileName);
var fullName = sourcePath + "/" + fileName;
fs = require('fs');
fs.readFile(fullName, 'utf8', function (err, data) {
if (err) {
console.log(err);
} else {
getFileDependencies(fileName, data);
callback();
}
});
}
function saveCode(fileName, code, moduleDependencies) {
var fs = require('fs');
var moduleDependenciesString = '"' + moduleDependencies.join('", "') +
'"';
var newCode = "define([" + moduleDependenciesString + "]," + "\r\n" +
code + "\r\n" +
");";
fs.writeFile(destPath + "/" + fileName, newCode, function (err) {
if (err) {
console.log(err);
} else {
console.log(fileName + " was saved!");
}
});
}
function getFileDependencies(fileName, code) {
// * This is where the magic happens.
// the kendo modules call define with the dependencies and the function.
define = function (moduleDependencies, code) {
for (i = 0; i < moduleDependencies.length; i++) {
var str = moduleDependencies[i];
str = str.replace("./", "k/");
moduleDependencies[i] = str;
}
/// OPTIONAL STEP
/// Set this to your jQuery path. If you don't include jQuery globally,
/// you run the risk of a race condition.
moduleDependencies.push("jquery");
console.log("Found dependencies: [" + moduleDependencies.join(":") +
"]");
saveCode(fileName, code, moduleDependencies);
};
define.amd = true; // Needed to make sure define gets called
try {
var z = eval(code);
} catch (e) {
// Yes, pokeman error handling...
// We don't care if the code actually runs, so long as 'define' gets called.
}
}
console.log("Starting");
processFiles();
We have the same problem as described: require.js + kendo + optimization resulting in the very same error after a successful build process.
However, the solution from above does not suite me well as it deals with 3rd party code. I looked for another solution and I found out that if I exclude the whole kendo from the build it will be still downloaded at runtime. I use kendo.mobile.min.js which includes all dependencies and plays nicely with the rest of the project, which is optimized. As a result the whole app needs to download just 3 js files: require, optimized build and kendo.
In other words, if you can afford loading kendo as a separate resource, add the following to the build config:
{
baseUrl: "...",
exclude: ['kendo.min'],
include: ['jquery'],
(...)
}

Categories