Loading bundled AMD modules with SystemJS - javascript

I have a couple of AMD modules compiled using TypeScript's --outFile option into a single file:
define("partA", ["require", "exports"], function (require, exports) {
"use strict";
function partAFunc() {
console.log('partAFunc');
return 'partAFunc';
}
exports.partAFunc = partAFunc;
});
define("partB", ["require", "exports"], function (require, exports) {
"use strict";
exports.partB = 42;
});
define("partC", ["require", "exports"], function (require, exports) {
...
});
Now I want to load only the partA module and call its partAfunc() so I can do the following in Node.js:
SystemJS.config({
map: {
'main': 'my-bundle.js',
},
});
SystemJS.import('main').then((m) => {
SystemJS.import('partA').then((m) => {
m.partAFunc();
});
});
The first import SystemJS.import('main') just registers all the modules and then SystemJS.import('partA') works because module partA is already registered (or at least I guess that's what it does).
However, why I can't just use SystemJS.import('partA') and set the bundle as a dependency:
SystemJS.config({
meta: {
'partA': {
deps: [ 'my-bundle.js' ],
}
}
});
SystemJS.import('partA').then((m) => {
m.partAFunc();
});
The meta is completely ignored. The doc at https://github.com/systemjs/systemjs/blob/master/docs/config-api.md#meta says:
Dependencies to load before this module. Goes through regular paths and map normalization. Only supported for the cjs, amd and global formats.
It looks like SystemJS first checks whether the file partA exists (which obviously doesn't) and throws an error (I tested it with an existing file and the meta config worked):
(node:60981) UnhandledPromiseRejectionWarning: Unhandled promise rejection (rejection id: 1): Error: ENOENT: no such file or directory, open '/Users/.../partA'
Instantiating /Users/.../partA
Loading partA
I'd expect that also the following should work when the first variant works with two nested SystemJS.import calls.
SystemJS.config({
map: {
'partA': 'my-bundle.js',
},
});
SystemJS.import('partA').then((m) => {
// m.partAFunc();
console.log(m)
});
This prints an empty object. It looks like when there're more than one module in a single file it just registers them and doesn't load any of them?
I read all the documents in https://github.com/systemjs/systemjs/tree/master/docs but I think I'm still lost.

What you need to do is use the bundles setting and set your bundle like this:
bundles: {
'my-bundle.js': ['partA', 'partB', 'partC'],
},
Roughly, this tells SystemJS "when you look for module partA, fetch and execute the module named my-bundle.js and you'll find partA there."
Your approach using meta cannot work. Your meta setting does not say "don't try to fetch a module named partA and instead fetch my-bundle.js" it says "when you process partA, in addition to the dependencies it already has, add my-bundle.js to the list of dependencies." SystemJS will still fetch partA. There's no reason for it to wait until it has executed my-bundle.js before it tries to fetch it, so it launches the fetch immediately and it fails.

Related

Load ES6 module A if available, fall back to module B if not available

I'm currently working on a JavaScript library where we would like to migrate legacy AMD modules to ES6 modules.
The sample code for this library uses an AMD plugin that looks like this :
define([
"require",
"../../common/ModuleLoad"
], function(require,
ModuleLoad) {
/**
* Dynamically load the info panel.
* The actual implementation depends on whether the military symbology package is present.
*/
return {
load: function(name, parentRequire, onload, config) {
ModuleLoad.loadConditional(
"./SpotReportInfoPanelMilSym",
"./SpotReportInfoPanelFallback",
require,
onload,
config,
["MilSymMissing", "Military Symbology has not been installed.<br />This sample has additional features if Military Symbology is installed."]
);
}
};
});
This plugin has the following module as a dependency :
define([
"./toast",
"./request"
], function(toast,
request) {
return {
loadConditional: function(preferred, fallback, parentRequire, onload, config, message) {
if (config && config.isBuild) {
onload();
} else {
//do ajax call to check for presence of module.
return request(parentRequire.toUrl(preferred + ".js")).then(function() {
//the module exists -> load it using the AMD module loader
parentRequire([preferred], function(module) {
onload(module);
})
}).catch(function(err) {
//the module is not present -> return fallback
if(message) {
toast.error(message[0], message[1], err);
}
parentRequire([fallback], function(module) {
onload(module);
})
});
}
}
};
});
Basically, what this code does, is check for the existence of a file named SpotReportInfoPanelMilSym.js and return its content as an AMD module. If SpotReportInfoPanelMilSym.js is not found, it falls back to SpotReportInfoPanelFallback.js.
The reason for this, is that SpotReportInfoPanelMilSym.js is only shipped with an optional add-on for this library, whereas SpotReportInfoPanelFallback.js is always available. And the sample is supposed to automatically pick the right SpotReportInfoPanel implementation depending on the availability of this add-on.
Is there a way to achieve this same behavior with ES6 modules?
We're planning to ship our sample code with Webpack, so a Webpack based solution would be acceptable. However, I would prefer our sample code to work with any loader or bundler and thus would like to avoid and Webpack-specific stuff in our sample code if possible at all.

require.js require a module with index.js

So I'm trying to set up Typescript and Chutzpah for testing purposes. Typescript is set up to output in this format:
define(['require', 'exports', './someModule'], function(require, exports, someModule) {
//examplecode
});
Which works fine, the problem occurs when someModule is actually a directory with an index.js.
/app
app.js
/someModule
index.js
require.js is unable to resolve someModule in this way and the test fails.
Is there any way to tell require.js that this is a module?
RequireJS won't automatically check for the presence of index.js and load that as your module. You need to tell RequireJS that when you want to load someModule, it should load someModule/index. I'd set a map in my call to require.config:
require.config({
[ ... ]
map: {
'*': {
someModule: 'someModule/index',
}
},
});
You have to adjust the name you give there so that it is a path relative to your baseUrl. It's not clear from the information you give in your question what it should be.
(For the record, there's also a packages setting that you could probably tweak to do what you want but putting something packages says "this is a package", which is not what you appear to have here. So I would not use it for what you are trying to do.)
I didn't like the configuration in map either. The most simple way I accomplished this was writing a plugin for require.
Let's name the plugin mod, where it is to be used as mod!module/someModule, you can also call it index as in index!module/someModule, whatever suits you best.
define(function(require, exports, module) {
// loading module/someModule/index.js with `mod!`
var someModule = require('mod!module/someModule');
// whatever this is about ..
module.exports = { .. };
});
So lets assume you have paths set in require's configuration with some sort of project structure:
- app
- modules
- someModule/index.js // the index we want to load
- someModule/..
- someModule/..
- etc
- plugins
- mod.js // plugin to load a module with index.js
Requires config:
require.config({
paths: {
'module': 'app/modules',
// the plugin we're going to use so
// require knows what mod! stands for
'mod': 'app/plugins/mod.js'
}
});
To read all the aspects of how to write a plugin, read the docs at requirejs.org. The simplest version would be to just rewrite the name of the requested "module" you are attempting to access and pass it back to load.
app/plugins/mod.js
(function() {
define(function () {
function parse(name, req) {
return req.toUrl(name + '/index.js');
}
return {
normalize: function(name, normalize) {
return normalize(name);
},
load:function (name, req, load) {
req([parse(name, req)], function(o) {
load(o);
});
}
};
});
})();
This is not production code, it's just a simple way to demonstrate that requires config wasn't meant to solve problems like this.

Loading webpack module in a require.js based project returns null

I'm trying to load a library that compiles to Webpack in a require.js project. While the library exposes an object, it returns null when required from the require.js project :
define(function(require, exports, module) {
[...]
require("./ext/mylib.core.js"); // -> null
})
Is there any flags that I can use in Webpack to enable AMD compliance ? There are some references to AMD in the generated library but as it is it does not seem to do anything.
The solution was in Webpack documentation : there is an outputLibrary flag that can be set to "amd" or "umd" and in that case webpack produces amd compliant modules.
EDIT 3:/EDIT: 4
Webpack is not cooperating it may seem, so another possibility would be to expose the module with the shim config option:
require.config({
paths: {
// Tell require where to find the webpack thingy
yourModule: 'path/to/the/webpack/asset'
},
shim: {
// This lets require ignore that there is no define
// call but will instead use the specified global
// as the module export
yourModule: {
exports: 'theGlobalThatIsPutInPlaceByWebpack'
}
}
});
This obviously only works in the case that the webpack stuff is putting something in the global scope. Hope this helps!
EDIT 2:
So I got the question wrong as pointed out in the comments. I didn't find any built-in functionality to produce AMD modules from webpack - the end result seems to be a static asset js file. You could wrap the result in a
define(function () {
return /* the object that webpack produces */;
});
block, maybe with the help of some after-build event (e.g. using this after build plugin for webpack). Then you should be able to require the module with an AMD loader.
Original Answer:
require.js loads it's dependencies asynchronously, you have to declare them explicitly when you're not using the r.js optimizer or the like. So if the module exposes an AMD definition it should work like this:
// It works the way you did it ...
define(['path/to/your/module'], function (require, exports, module) {
require('path/to/your/module'); // -> { ... }
});
// ... but I personally prefer this explicit syntax + it is
// friendlier to a code minifier
define(['path/to/your/module'], function (yourModule) {
console.log(yourModule); // { ... }
});
Maybe you have to configure your require instance, there are docs for that.
EDIT1: as pointed out the way the module is being accessed is not wrong but the dependencies were missing, so I added code that is closer to the original question.

Use node require inside a method called using requireJs

Is it possible to use the default node require function in a file that has been called through requirejs?
define(["require", "exports"], function(require, exports) {
//...
var Schema = require(DaoPublic._schemasDirectory + schemaFilename);
}
I always get ReferenceError: module is not defined, I also tried to load the schema using requireJs, same, because the file itself is coded as CommonJs, not AMD compatible.
Any solution?
Note that the loaded schema is in CommonJS and I need to keep this way, since it's used by several DAO, some in AMD and other in CommonJs. (Funny part)
Example of requested file (schema):
var userSchema = {
/**
* User Login, used as id to connect between all our platforms.
*/
login: {
type: String,
match: /^[a-zA-Z0-9_-]+$/,
trim: true,
required: true,
notEmpty: true,
unique: true,
check: {
minLength: 4,
maxLength: 16
}
}
};
module.exports = userSchema;
The problem is that your code is set so that RequireJS is able to find the CommonJS module by itself. However, when RequireJS is running in Node and cannot find a module, it will call Node's require function, which is what you need. So it is possible (with RequireJS) to have an AMD module use Node's require but the trick is getting RequireJS to not see the module in the first place.
Proof of Concept
Here's a proof of concept. The main file named test.js:
var requirejs = require("requirejs");
function myRequire(path) {
if (path.lastIndexOf("schemas/", 0) === 0)
path = "./" + path;
return require(path);
}
requirejs.config({
paths: {
"schemas": "BOGUS"
},
nodeRequire: myRequire
});
requirejs(['foo'], function (foo) {
console.log(foo);
});
The file foo.js:
define(["require", "exports"], function(require, exports) {
return require("./schemas/x") + " by way of foo";
});
The file schemas/x.js:
module.exports = "x";
If you run it with node test.js, you'll get on the console:
x by way of foo
Explanation
I'm calling this a "proof of concept" because I've not considered all eventualities.
The paths setting is there to throw RequireJS off track. BOGUS must be a non-existent directory. When RequireJS tries to load the module ./schemas/x, it tries to load the file ./BOGUS/x.js and does not find it. So it calls Node's require.
The nodeRequire setting tells RequireJS that Node's require function is myRequire. This is a useful lie.
The myRequire function changes the path to add the ./ at the start before calling Node's require. The issue here is that for some reason RequireJS transforms ./schemas/x to schemas/x before it gives the path to Node's require function, and Node will then be unable to find the module. Adding back the ./ at the start of the path name fixes this. I've tried a whole bunch of path variants but none of them worked. Some variants were such that RequireJS was able to find the module by itself and thus never tried calling Node's require or they prevented Node from finding the module. There may be a better way to fix this, which I've not found. (This is one reason why I'm calling this a "proof of concept".) Note that I've designed this function to only alter the paths that start with schemas/.
Other Possibilities
I've looked at other possibilities but they did not appear to me very promising. For instance, customizing NODE_PATH would eliminate myRequire but such customization is not always doable or desirable.

RequireJS plugin: load timeouts experienced when using plugin

Using RequireJS I'm building an app which make extensive use of widgets. For each widget I have at least 3 separate files:
request.js containing code for setting up request/response handlers to request a widget in another part of my application
controller.js containing handling between model and view
view.js containing handling between user and controller
Module definition in request.js:
define(['common/view/widget/entity/term/list/table/controller'],
function(WidgetController) { ... });
Module definition in controller.js:
define(['common/view/widget/entity/term/list/table/view'],
function(WidgetView) { ... });
Module definition of view.js is:
define(['module','require'],function(module,require) {
'use strict';
var WidgetView = <constructor definition>;
return WidgetView;
});
I have lots of these little situations as above in the case of widgets I have developed. What I dislike is using the full path every time when a module is requiring another module and both are located in the same folder. I'd like to simply specify as follows (assuming we have a RequireJS plugin which solves this for us):
define(['currentfolder!controller'],
function(WidgetController) { ... });
For this, I have written a small plugin, as I couldn't find it on the web:
define({
load: function (name, parentRequire, onload, config) {
var path = parentRequire.toUrl('.').substring(config.baseUrl.length) + '/' + name;
parentRequire([path], function (value) {
onload(value);
});
}
});
As you might notice, in its basic form it looks like the example of the RequireJS plugins documentation.
Now in some cases, the above works fine (e.g. from the request.js to the controller.js), but in other cases a load timeout occurs (from controller.js to view.js). When I look at the paths which are generated, all are proper RequireJS paths. Looking at the load timeouts, the following is logged:
Timestamp: 13-09-13 17:27:10
Error: Error: Load timeout for modules: currentfolder!view_unnormalized2,currentfolder!view
http://requirejs.org/docs/errors.html#timeout
Source File: http://localhost/app/vendor/requirejs/require.js?msv15z
Line: 159
The above log was from a test I did with only loading the view.js from controller.js using currentfolder!view in the list of modules in the define statement. Since I only requested currentfolder!view once, I'm confused as to why I both see currentfolder!view_unnormalized2 and currentfolder!view in the message.
Any idea as to why this might be happening?
My answer may not answer your primary questions, but it will help you achieve what you're trying to do with your plugin.
In fact, Require.js support relative paths for requiring modules when using CommonJS style. Like so:
define(function( require, exports, module ) {
var relativeModule = require("./subfolder/module");
module.exports = function() {
console.log( relativeModule );
};
});

Categories