Upon starting my server, I'm attempting to utilize the RequireJS optimizer to combine all my RequireJS modules into a single file. Once the optimizer is finished, I am attempting to utilize the modules but it doesn't seem to be working.
var path = require('path');
var fs = require('fs');
var requirejs = require('requirejs');
requirejs.config({
baseUrl: __dirname,
nodeRequire: require
});
requirejs.optimize({
baseUrl: path.join(__dirname, 'foo'),
dir: 'build',
modules: [{
name: 'main',
include: [ 'src/bar' ]
}]
}, function (data) {
console.log(fs.readFileSync(path.join(__dirname, 'build', 'main.js'), 'utf-8'));
var main = requirejs('/build/main.js'));
var bar = requirejs('src/bar');
}, function (error) {
console.log(error);
});
The output from the console.log is the concatenated files as expected, but bar is undefined.
If I run the following script after the previous script, bar is defined.
var requirejs = require('requirejs');
requirejs.config({
baseUrl: __dirname,
nodeRequire: require
});
var main = requirejs('/build/main.js'));
var bar = requirejs('src/bar');
console.log(bar);
Can anyone offer any insight into what may be preventing the first script from working?
Thanks,
Jake
Your path for baseUrl in requirejs.config was wrong. I've also added removeCombined in the optimizer configuration to prevent loading files that have been combined into main.
However, that not all that was needed. You see I also save the value from requirejs.config in r and then I use this r to load the modules. This is the same thing you would do if you used RequireJS contexts: you'd save the value of the requirejs.config call for each context so that you can use one function to load things from one context and the other to load things from the other context. I have to admit I don't know why this makes the code work. It smells like a bug to me.
Another thing I discovered is that requirejs.optimize swallows any exception thrown in its completion callback. This is definitely a bug. If it ha not been swallowing exceptions, you'd have had a better idea of what was going on.
Here is the code:
var path = require('path');
var fs = require('fs');
var requirejs = require('requirejs');
var r = requirejs.config({
baseUrl: path.join(__dirname, 'build'),
nodeRequire: require
});
requirejs.optimize({
baseUrl: path.join(__dirname, 'foo'),
dir: 'build',
// You should do this to make sure that you are not accidentally loading
// files that have *not* been combined.
removeCombined: true,
modules: [{
name: 'main',
include: [ 'src/bar' ]
}]
}, function (data) {
// console.log(fs.readFileSync(path.join(__dirname, 'build', 'main.js'), 'utf-8'));
var main = r('main');
console.log(main);
var bar = r('src/bar');
console.log(bar);
}, function (error) {
console.log(error);
});
Related
I have a nw.js native application with angular.js inside. My app bundled with webpack and contains native node.js modules. My entry point is index.js file that I organized like this:
var gui = require('nw.gui');
var angular = require('angular');
require('./app.css');
// other modules
var myApp = angular.module('myApp', [
'ngRaven',
'ngMaterial',
'ngMessages'
]).constant(
'fs', require('fs')
)
require('./services')(myApp);
require('./directives')(myApp);
require('./factories')(myApp);
require('./filters')(myApp);
require('./controllers')(myApp);
require('./app.js')(myApp);
My webpack config looks like this:
const path = require('path');
const config = {
entry: [
'./app/index.js'
],
output: {
path: path.resolve(__dirname, 'app'),
filename: 'bundle.js'
},
devtool: "source-map",
target: 'node-webkit',
module:{
// css, html loaders
},
node: {
os: true,
fs: true,
child_process: true,
__dirname: true,
__filename: true
}
};
module.exports = config;
So every dependency include Node.js native modules like fs, path, child_process bundled in one big file bundle.js that i include in html and then package my nw.js app. So my app structure looks like:
my_project:
--app
----controllers
------welcome
--------welcome.js // Page controller
--------welcome.html // Page HTML
------index.js // here I include each page controller
----app.js // My angular app initialization
----index.js // here I include all dependencies
I'm trying to run tests with this structure. I tried karma+jasmine, karma+mocha, tried different configurations, my last one looks like:
module.exports = function (config) {
config.set({
basePath: '',
frameworks: ['jasmine'],
files: [
'app/bundle.js',
'app/**/*spec.js'
],
exclude: [],
preprocessors: {
'app/bundle.js': ['webpack']
},
reporters: ['progress'],
port: 9876,
colors: true,
logLevel: config.LOG_INFO,
autoWatch: true,
browsers: ['ChromeHeadlessNoSandbox'],
customLaunchers: {
ChromeHeadlessNoSandbox: {
base: 'ChromeHeadless',
flags: ['--no-sandbox']
}
},
singleRun: true,
webpack: {
// you don't need to specify the entry option because
// karma watches the test entry points
// webpack watches dependencies
// ... remainder of webpack configuration (or import)
},
webpackMiddleware: {
// webpack-dev-middleware configuration
// i.e.
noInfo: true,
// and use stats to turn off verbose output
stats: {
// options i.e.
chunks: false
}
}
});
};
But my tests still not see the angular application.
describe('Welcome page', function() {
beforeEach(angular.mock.module('WelcomePageCtrl'));
});
P.S I don't require exactly karma and jasminne, so any solution will be appreciated. I just want to cover my project with tests
I have gone through something similar myself. I don't think you need the bundle.js for your tests.
Here is how would do it:
I assume your controller/service/etc implementation is as follows:
/* app/controller/welcome.js */
module.exports = function(app) {
return app.controller("MyCtrl", function($scope) {
$scope.name = "lebouf";
});
};
I like my test code to sit right beside the code I'm testing (Welcome.spec.js in the same directory as Welcome.js). That test code would look like so:
/* app/controller/welcome.spec.js */
beforeEach(function() {
var app = angular.module("myApp", []); //module definition
require("./welcome")(app);
});
beforeEach(mockModule("myApp"));
describe("MyCtrl", () => {
var scope = {};
beforeEach(mockInject(function($controller) {
$controller("MyCtrl", {
$scope: scope
});
}));
it("has name in its scope", function() {
expect(scope.name).to.equal("not lebouf"); //the test will fail
});
});
Except, this is an angular controller we're testing and it's not that simple. We need the angular object itself. So lets set it up. I'll explain why it is done how it is done next:
/* test-setup.js */
const jsdom = require("jsdom").jsdom; //v5.6.1
const chai = require("chai");
global.document = jsdom("<html><head></head><body></body></html>", {});
global.window = document.defaultView;
global.window.mocha = true;
global.window.beforeEach = beforeEach;
global.window.afterEach = afterEach;
require("angular/angular");
require("angular-mocks");
global.angular = window.angular;
global.mockInject = angular.mock.inject;
global.mockModule = angular.mock.module;
global.expect = chai.expect;
console.log("ALL SET");
And we'll run the tests as:
node_modules/.bin/mocha ./init.js app/**/*.spec.js
#or preferably as `npm test` by copying the abev statement into package.json
extra info
Here is how init.js is setup as is:
jsdom: f you require("angular/angular") you'll see that it needs a window instance. jsdom can create documents and windows and so on without a web browser!
window.mocha: we need angular-mocks to populate our angular with the necessary utilities. But if you look at the code you'll notice that window.mocha || window.jasmine needs to be true. Thats whywindow.mocha = true`
window.beforeEach, window.afterEach: the same reason as above; because angular-mocks.js demands it.
I set some global variables that I plan to use commonly in my tests: angular, expect, mockInject, mockModule.
Also these may provide some additional information:
https://kasperlewau.github.io/post/angular-without-karma/
https://gist.github.com/rikukissa/dcb422eb3b464cc184ae
I'm trying to load test script according to custom argument passed through start command to start intern test.
To do this I'm trying to require the specific test script inside a test but i am getting Attempt to require unloaded module error.
This is my code set up. Can someone help on on this or sugest some alternative work around to make this work.
define(function (require) {
var intern = require('intern');
var AdvalentAutomationTestSuite = require('intern!object');
AdvalentAutomationTestSuite({
name: 'Advalent Automation Test',
'AdvalentTestSets': function () {
return this.remote
.then(function () {
var product = intern.args.product;
var script = 'Automation/TestScripts/FRG/' + product + '-Config';
require(script)
})
},
});
});
Update:
Including intern.js file:
define(function (require) {
var intern = require('intern');
console.log(intern)
return {
proxyPort: 9000,
proxyUrl: 'http://localhost:9000/',
defaultTimeout: 120000,
capabilities: {
'selenium_version': '2.48.2',
},
environments: [
{browserName: 'chrome', version: '48', platform: ['WINDOWS'], chromeOptions: {args: ['start-maximized']}},
],
maxConcurrency: 3,
tunnel: 'NullTunnel',
reporters: [
{id: 'JUnit', filename: 'test-reports/report.xml'},
{id: 'Runner'},
],
Loaders: {
'host-node': 'dojo/dojo',
'host-browser': 'node_modules/dojo/dojo.js'
},
loaderOptions: {
packages: [{name: 'intern-tutorial', location: '.'}]
},
functionalSuites: [
'Automation/TestScripts/FRG/FRG-Config',
],
defaultTimeout: 70000,
excludeInstrumentation: /^(?:tests|node_modules)\//
}
});
You should be fine with the default loader, although as #Troopers points out, it's loaders, not Loaders. The problem is that you're doing a dynamic require with a computed name:
var script = 'Automation/TestScripts/FRG/' + product + '-Config';
require(script)
AMD loaders don't completely support the require(script) syntax since they don't load modules synchronously. When a module is written in CJS-compatibility mode, the loader fakes it by scanning the module code for require calls and then preloading and caching the modules before executing the module code. When the require(script) call is eventually executed, the preloaded module is returned.
When you use a computed module name, the loader can't preload the module being required, so the synchronous require call will fail. To load a module with a computed name you'll need to use the require([ dependency ]) syntax, like:
var script = 'Automation/TestScripts/FRG/' + product + '-Config';
return new Promise(function (resolve) {
require([ script ], resolve);
});
At a higher level, though, it seems odd to be doing this in a test in the first place. It seems like something that should be handled at the module or config levels. For example, assuming 'Automation/TestScripts/FRG/' + product + '-Config' is a functional test suite, the config could simply add that suite to the functionalSuites list if the required command line argument were provided.
You need to specify a loader in your configuration file :
loaders: {
"host-node": "requirejs",
"host-browser": "node_modules/requirejs/require.js"
},
And install the npm package requirejs
The documentation is here
After few hit and trial I managed to make it work by changing my intern.js as follows:
define(function (require) {
var intern = require('intern');
var product = intern.args.product
return {
functionalSuites: [
'Automation/TestScripts/FRG/' + product + '-Config.js',
],
// rest of config code ...
}
});
Please suggest if there's any better way to do this.
I have some repositories (A, B, C and TestRepo) all of them in local, i get karma to load spec files from repos A or B or C with a param (node script).
Now im trying to transpile the files cause some test are using import so i need babel. I configured babel as a preprocessor and it seems like it is working but know i get this error:
Uncaught Error: Module name "builder" has not been loaded yet for context: _. Use require([])
I tried all configs i know but still not working.
My karma.config.js is this:
module.exports = function(config) {
config.set({
basePath: '',
frameworks: ['jasmine', 'requirejs'],
files: [
'test-main.js'
],
exclude: [
],
preprocessors: {
},
reporters: ['progress'],
port: 9876,
colors: true,
logLevel: config.LOG_INFO,
autoWatch: true,
browsers: ['Chrome'],
singleRun: false,
concurrency: Infinity
});
}
My test-main.js for requirejs:
var allTestFiles = []
var TEST_REGEXP = /(spec|test)\.js$/i
Object.keys(window.__karma__.files).forEach(function (file) {
if (TEST_REGEXP.test(file)) {
var normalizedTestModule = file.replace(/^\/base\/|\.js$/g, '')
allTestFiles.push(normalizedTestModule)
}
})
require.config({
baseUrl: '/base',
deps: allTestFiles,
callback: window.__karma__.start
})
And my node file, where i set karma files, preprocessor and so on base on the repository i got from params is this:
const cfg = require('karma').config;
const stopper = require('karma').stopper;
const runner = require('karma').runner;
const Server = require('karma').Server;
const path = require('path');
const REPOSITORY = process.argv[2] || '';
let karmaConfig = cfg.parseConfig(path.resolve('./karma.conf.js'), { port: 9876 } );
let BASE_PATH;
if (REPOSITORY !== 'all') {
BASE_PATH = `/Users/fernando.delolmo/${REPOSITORY}`;
karmaConfig.files.push({pattern: BASE_PATH + '/test/**/*.spec.js'});
karmaConfig.files.push({pattern: BASE_PATH + '/src/**/*.js', included: true});
karmaConfig.preprocessors[BASE_PATH + '/src/**/*.js'] = ['babel'];
karmaConfig.preprocessors[BASE_PATH + '/test/**/*.spec.js'] = ['babel'];
karmaConfig.exclude.push(BASE_PATH + '/node_modules/**/*.spec.js');
} else {
BASE_PATH = '/Users/fernando.delolmo/';
karmaConfig.files.push({pattern: BASE_PATH + '**/test/**/*.spec.js'});
karmaConfig.files.push({pattern: BASE_PATH + '**/src/**/*.js', included: true});
karmaConfig.preprocessors[BASE_PATH + '**/src/**/*.js'] = ['babel'];
karmaConfig.preprocessors[BASE_PATH + '**/test/**/*.spec.js'] = ['babel'];
karmaConfig.exclude.push(BASE_PATH + '**/node_modules/**/*.spec.js');
}
karmaConfig.basePath = BASE_PATH;
var server = new Server(karmaConfig, function(exitCode) {
process.exit(exitCode);
});
server.start();
server.on('browser_start', function() {
runner.run(karmaConfig, function(exitCode) {
process.exit(exitCode);
});
});
server.on('browser_complete', function() {
stopper.stop(karmaConfig, function(exitCode) {
process.exit(exitCode);
});
});
Its my first time configuring karma, requirejs and babel all together so any help will be great :)
Okay finally i got what's the problem here. Just in case some one get here finding any solution to problems like this.
If you want to run test with ES2015 modules with karma, you will use babel to transpile into CommonJS modules at first, good! but... you will get errors like export is undefined.. and.. why?? Simple, export/import still not supported on the browsers, this means you CAN'T run CommonJS modules on the browser without any module bundler (browersify or webpack).
So, keep this clear:
1st -> Babel only transpile your ES2015 code and test into CommonJS modules
2nd -> Webpack / Browersify get CommonJS modules and bundle them into a bundle.js and then you can run that on the browser with karma.
I'm having trouble visualizing how I can leverage the DllPlugin/DllReferencePlugin with Webpack while also using Grunt for the building. For those without knowledge, the DllPlugin creates a separate bundle that can be shared with other bundles. It also creates a manifest file (important) to help with the linking. Then, the DllReferencePlugin is used by another bundle when building to grab the previous made DllPlugin Bundle. To do this, it requires the manifest file created previously.
In Grunt, this would require the manifest file created before grunt even runs, no? Heres a simplified code example:
webpack.dll.js
// My Dll Bundles, which creates
// - ./bundles/my_dll.js
// - ./bundles/my_dll-manifest.json
module.exports = {
entry: {
my_dll : './dll.js'
},
// where to send final bundle
output: {
path: './bundles',
filename: "[name].js"
},
// CREATES THE MANIFEST
plugins: [
new webpack.DllPlugin({
path: "./bundles/[name]-manifest.json",
name: "[name]_lib"
})
]
};
webpack.app.js
// My Referencing Bundle, which includes
// - ./bundles/app.js
module.exports = {
entry: {
my_app : './app.js'
},
// where to send final bundle
output: {
path: './bundles',
filename: "[name].js"
},
// SETS UP THE REFERENCE TO THE DLL
plugins: [
new webpack.DllReferencePlugin({
context: '.',
// IMPORTANT LINE, AND WHERE EVERYTHING SEEMS TO FAIL
manifest: require('./bundles/my_dll-manifest.json')
})
]
};
If you look in the second section, webpack.app.js, I've commented where everything would seem to fail in grunt. For the DllReferencePlugin to work, it needs the manifest file from the DllPlugin, but in a Grunt workflow, grunt will load both of these configurations on initialization of grunt itself, causing the manifest: require('./bundles/my_dll-manifest.json') line to fail, because the previous grunt step that builds webpack.dll.js has not completed, meaning manifest does not yet exist.
var path = require("path");
var util = require('util')
var webpack = require("webpack");
var MyDllReferencePlugin = function(options){
webpack.DllReferencePlugin.call(this, options);
}
MyDllReferencePlugin.prototype.apply = function(compiler) {
if (typeof this.options.manifest == 'string') {
this.options.manifest = require(this.options.manifest);
}
webpack.DllReferencePlugin.prototype.apply.call(this, compiler);
};
// My Referencing Bundle, which includes
// - ./bundles/app.js
module.exports = {
entry: {
my_app : './app.js'
},
// where to send final bundle
output: {
path: './bundles',
filename: "[name].js"
},
// SETS UP THE REFERENCE TO THE DLL
plugins: [
new MyDllReferencePlugin({
context: '.',
// IMPORTANT LINE, AND WHERE EVERYTHING SEEMS TO FAIL
manifest: path.resolve('./bundles/my_dll-manifest.json')
})
]
};
I have a web app that I compile with webpack. One of the modules that my code uses is named table.js. Until recently, it's just been another module and has been compiled into my bundle.js file with everything else.
Now I need to run table.js in a Web Worker, so I need to pull it and its dependencies into a separate file that can be loaded both standalone and by my other modules.
At first I thought to include table.js in my webpack.config.js's entry.
var config = {
...
entry: {
app: [ './src/main.js', './src/classes/table.js' ],
vendors: [],
},
...
}
That didn't work. Then I thought to separate it out like my vendors bundle.
var config = {
/* for vendors (and other modules) we have a CDN for */
addExternal: function (name, globalVar) {
this.externals[name] = globalVar;
this.entry.vendors.push(name);
},
/* for vendors we don't have a CDN for */
addVendor: function (name, path) {
this.resolve.alias[name] = path;
this.entry.vendors.push(name);
},
addPlugin: function (plugin) {
this.plugins.push(plugin);
},
entry: {
app: [ './src/main.js' ],
vendors: [],
table: [ __dirname + '/src/classes/table.js' ]
},
plugins: [],
externals: { },
output: {
path: __dirname + '/public/dist/',
filename: 'bundle.js',
publicPath: '/dist/',
sourceMapFile: '[file].map'
},
resolve: {
alias: { 'table': './src/classes/table.js' },
extensions: [ '', '.js', '.jsx' ]
},
...
}
/* add vendors and externals */
...
config.addPlugin(new CommonsChunkPlugin('vendors', 'vendors.js'));
config.addPlugin(new CommonsChunkPlugin('table', 'table.js'));
This seems to pull Table and its dependencies into a chunk of bundle.js, 1.bundle.js. Unfortunately, then calling import Table from 'table' causes this error:
ERROR in CommonsChunkPlugin: While running in normal mode it's not allowed to use a non-entry chunk (table)
I also have a circular dependency between TableStore and Table. TableStore needs to stay in bundle.js because it shouldn't be loaded into the Web Worker. Previously, when I've needed to throw things into a separate chunk, I've done:
if (someThingNeedsRequiring) {
require.ensure([], () => {
require('something');
}
}
With the circular dependency, this doesn't seem to work.
/* table.js */
let _inWebWorker = self instanceof Window,
TableStore = null;
if (!_inWebWorker) {
require.ensure([], function() { TableStore = require('../stores/table-store'); } );
}
/* table-store.js */
import Table from 'table';
Could someone set me straight on the correct way to have my webpack.config.js and how to use my imports in my module files?
(It's been quite a while since I figured this out, and I haven't touched the project in nearly six months, so I may have missed some of the details. Comment if it's not working, and I'll try to figure out what I'm missing.)
webpack.config
It turns out there are two handy-dandy JavaScript packages for doing what I want: worker-loader and workerjs.
npm install --save workerjs worker-loader
I added this in my webpack.config.js:
var config = {
// ...
worker: {
output: {
filename: '[name].worker.js',
chunkFilename: '[name].worker.js'
}
},
// ...
}
require()
In order to specify that I want my class to be run in a WebWorker file, my require looks like:
// ecmaScript 6
import TableWorker from 'worker?name=tableRoller!path/to/table';
// ecmaScript 5
var TableWorker = require('worker?name=tableRoller!path/to/table');
TableWorker is just a variable name I used for table.js's export default class Table {...}. The name=tableRoller specifies the generated outputted [name].worker.js filename. For example, I have another WebWorker named distCalc.worker.js, so my import looks like:
import DistWorker from 'worker?name=distCalc!path/to/distWorker';
Note that in this case, distWorker only ever runs in a WebWorker, while Table is used in both my main.js entry point and my tableRoller.worker.js WebWorker file.
workerjs and worker-loader generate a new entry point file and pull in all of the dependencies of those classes. Tobias Koppers (worker-loader) and Eugene Ware (workerjs) are geniuses.
Detecting WebWorker
My _inWebWorker detection is:
let _inWebWorker = typeof Window === 'undefined';
Change output filename in your webpack.config.js file
output: {
path: __dirname + '/public/dist/',
filename: '[name].js',
publicPath: '/dist/',
sourceMapFile: '[file].map'
},
then Webpack can separate your entries with its name in dist directory.