I have a working rollup.config.js file but need to run a separate packaging script after Rollup is done. My plan was to use Rollup's watchers via their JS API. However, I cannot get the JS API to work at all.
I am referencing this code from the Rollup site...
const loadConfigFile = require('rollup/dist/loadConfigFile');
const path = require('path');
const rollup = require('rollup');
loadConfigFile(path.resolve(__dirname, 'rollup.config.js'))
.then(async ({options, warnings}) => {
warnings.flush();
const bundle = await rollup.rollup(options);
await Promise.all(options.output.map(bundle.write));
rollup.watch(options);
})
but I keep getting an error Unknown input options: 0.......Error: You must supply options.input to rollup
My rollup.config.js is as follow...
import svelte from 'rollup-plugin-svelte';
import resolve from '#rollup/plugin-node-resolve';
import commonjs from '#rollup/plugin-commonjs';
import livereload from 'rollup-plugin-livereload';
import { terser } from "rollup-plugin-terser";
import replace from '#rollup/plugin-replace';
import json from '#rollup/plugin-json';
const production = !process.env.ROLLUP_WATCH;
export default {
input: 'src/main.js',
output: {
sourcemap: true,
format: 'iife',
name: 'app',
file: 'public/bundle.js'
},
plugins: [
json(),
production && replace({
'eruda': ``,
exclude: 'node_modules/**',
delimiters: ['import * as eruda from \'', '\'']
}),
production && replace({
'eruda': ``,
exclude: 'node_modules/**',
delimiters: ['', '.init()']
}),
svelte({
dev: !production,
css: css => {
css.write('public/bundle.css');
}
}),
resolve({ browser: true }),
commonjs(),
!production && livereload('public'),
production && terser()
],
watch: {
clearScreen: false
}
};
Any thoughts are appreciated!
I think the example at rollupjs.org is wrong. Shouldn't it be like this instead?
const loadConfigFile = require('rollup/dist/loadConfigFile')
const path = require('path')
const rollup = require('rollup')
// load the config file next to the current script;
// the provided config object has the same effect as passing "--format es"
// on the command line and will override the format of all outputs
loadConfigFile(path.resolve(__dirname, 'rollup.config.js'), {format: 'es'})
.then(({options, warnings}) => {
// "warnings" wraps the default `onwarn` handler passed by the CLI.
// This prints all warnings up to this point:
console.log(`We currently have ${warnings.count} warnings`)
// This prints all deferred warnings
warnings.flush()
// options is an "inputOptions" object with an additional "output"
// property that contains an array of "outputOptions".
// The following will generate all outputs and write them to disk the same
// way the CLI does it:
options.map(async options => {
const bundle = await rollup.rollup(options)
await Promise.all(options.output.map(bundle.write))
// You can also pass this directly to "rollup.watch"
rollup.watch(options)
})
})
Figured it out, apparently the options returned from loadConfigFile is an array so I had to do options[0] inside the async function
Related
in my project i am using cypress with plain javascript. i am facing the challenge of importing the modules (page objects) via aliases instead of spaghetti code like ../../../../folder/page.js.
I don't use typescript or react.js and don't have a src folder/directory.
my tests run locally in the browser or via a docker image (pipeline).
I would like to transform from this:
import { LoginPage } from "../../pages/loginPage.js";
to something like this:
import { LoginPage } from "#Pages/loginPage.js";
but I always get an error:
Error: Webpack Compilation Error
./cypress/e2e/accountOverview/accountOverviewPageTest.spec.js
Module not found: Error: Can't resolve 'Pages/loginPage.js' in 'C:\Users\User\automated_frontend_tests\automated_frontend_tests\cypress\e2e\accountOverview'
resolve 'Pages/loginPage.js' in 'C:\Users\User\automated_frontend_tests\automated_frontend_tests\cypress\e2e\accountOverview'
Parsed request is a module
using description file: C:\Users\User\automated_frontend_tests\automated_frontend_tests\package.json (relative path: ./cypress/e2e/accountOverview)
Field 'browser' doesn't contain a valid alias configuration
Looked for and couldn't find the file at the following paths:
[C:\Users\User\automated_frontend_tests\automated_frontend_tests\cypress\e2e\accountOverview\node_modules]
[C:\Users\User\automated_frontend_tests\automated_frontend_tests\cypress\e2e\node_modules]
[C:\Users\User\automated_frontend_tests\automated_frontend_tests\cypress\node_modules]
[C:\Users\node_modules]
[C:\node_modules]
[C:\Users\User\automated_frontend_tests\automated_frontend_tests\node_modules\Pages\loginPage.js]
[C:\Users\User\automated_frontend_tests\node_modules\Pages\loginPage.js]
[C:\Users\User\node_modules\Pages\loginPage.js]
[C:\Users\User\automated_frontend_tests\automated_frontend_tests\node_modules\Pages\loginPage.js.js]
[C:\Users\User\automated_frontend_tests\node_modules\Pages\loginPage.js.js]
[C:\Users\User\node_modules\Pages\loginPage.js.js]
[C:\Users\User\automated_frontend_tests\automated_frontend_tests\node_modules\Pages\loginPage.js.json]
[C:\Users\User\automated_frontend_tests\node_modules\Pages\loginPage.js.json]
[C:\Users\User\node_modules\Pages\loginPage.js.json]
[C:\Users\User\automated_frontend_tests\automated_frontend_tests\node_modules\Pages\loginPage.js.jsx]
[C:\Users\User\automated_frontend_tests\node_modules\Pages\loginPage.js.jsx]
[C:\Users\User\node_modules\Pages\loginPage.js.jsx]
[C:\Users\User\automated_frontend_tests\automated_frontend_tests\node_modules\Pages\loginPage.js.mjs]
[C:\Users\User\automated_frontend_tests\node_modules\Pages\loginPage.js.mjs]
[C:\Users\User\node_modules\Pages\loginPage.js.mjs]
[C:\Users\User\automated_frontend_tests\automated_frontend_tests\node_modules\Pages\loginPage.js.coffee]
[C:\Users\User\automated_frontend_tests\node_modules\Pages\loginPage.js.coffee]
[C:\Users\User\node_modules\Pages\loginPage.js.coffee]
# ./cypress/e2e/accountOverview/accountOverviewPageTest.spec.js 5:17-46
I have tried several solutions, including:
//webpack.config.js
module.exports = {
resolve: {
alias: {
"#pages": path.resolve(__dirname, "cypress/pages/*"),
},
},
};
//testspec file
import { LoginPage } from "#pages/loginPage.js";
const loginPage = new LoginPage();
#Uzair Khan:
I tried your solution, but it still didn't work. The error message remains the same. It seems that the IDE does not search in the correct folder, but only in ...\node_modules\#page\loginPage.js which makes no sense.
If I enter const loginPage = new LoginPage(), the module LoginPage() cannot be found by the IDE either. Something is wrong with the solution. Do I still have to install any packages via NPM?
In your webpack.config.js file add resolve.alias which you want to make alias. It looks like something this below:
resolve: {
alias: {
'#page': path.resolve(__dirname, '{path you want to make alias}')
}
}
Since you are using cypress, you have to update the resolve path in cypress.config.js. Here is mine cypress.config.js
import { defineConfig } from 'cypress'
import webpack from '#cypress/webpack-preprocessor'
import preprocessor from '#badeball/cypress-cucumber-preprocessor'
import path from 'path'
export async function setupNodeEvents (on, config) {
// This is required for the preprocessor to be able to generate JSON reports after each run, and more,
await preprocessor.addCucumberPreprocessorPlugin(on, config)
on(
'file:preprocessor',
webpack({
webpackOptions: {
resolve: {
extensions: ['.ts', '.js', '.mjs'],
alias: {
'#page': path.resolve('cypress/support/pages/')
}
},
module: {
rules: [
{
test: /\.feature$/,
use: [
{
loader: '#badeball/cypress-cucumber-preprocessor/webpack',
options: config
}
]
}
]
}
}
})
)
// Make sure to return the config object as it might have been modified by the plugin.
return config
}
And import in other file via that alias you set in cypress.config.js. Here is mine for example:
import page from '#page/visit.js'
const visit = new page()
When('I visit duckduckgo.com', () => {
visit.page()
})
I think both answers are nearly there, this is what I have for src files:
const webpack = require('#cypress/webpack-preprocessor')
...
module.exports = defineConfig({
...
e2e: {
setupNodeEvents(on, config) {
...
// #src alias
const options = {
webpackOptions: {
resolve: {
alias: {
'#src': path.resolve(__dirname, './src')
},
},
},
watchOptions: {},
}
on('file:preprocessor', webpack(options))
...
path.resolve() resolves a relative path into an absolute one, so you need to start the 2nd param with ./ or ../.
Also, don't use wildcard * in the path, you just need a single folder that will be substituted for the alias in the import statement.
If in doubt, check the folder returned (in the terminal)
module.exports = defineConfig({
...
e2e: {
setupNodeEvents(on, config) {
const pagesFolder = path.resolve(__dirname, './cypress/pages')
console.log('pagesFolder', pagesFolder)
I am trying to replace all the image paths in a css background url to a base64 encoded string while doing a rollup build.
I am using rollup-plugin-postcss for this, I am able to generate a separate .css file in the build, but I want the image paths to be replaced with base64 encoded data URL.
Something like this:
background: url('images/sample.png');
to
background: url('data:image/png;base64,R0lGODlhyAAiALM...DfD0QAADs=');
Here is what I have been doing:
import postcss from 'rollup-plugin-postcss'
...
plugins: [
postcss({
extensions: ['.css'],
extract: path.resolve('dist/index.css'),
}),
]
A possible solution would be to use postcss-url:
import postcssurl from 'postcss-url';
postcss({
..., // postcss options
plugins: [
postcssurl({
url: 'inline',
}),
],
}),
I am not not sure if it is possible with rollup-plugin-postcss, Try using “image-to-base64” npm it’s very simple,
Download command :
npm i -S image-to-base64
Example :
const imageToBase64 = require('image-to-base64');
//or
//import imageToBase64 from 'image-to-base64/browser';
imageToBase64("path/to/file.jpg") // Path to the image
.then(
(response) => {
console.log(response); // "cGF0aC90by9maWxlLmpwZw=="
}
)
.catch(
(error) => {
console.log(error); // Logs an error if there was one
}
)
There is more examples here
https://www.npmjs.com/package/image-to-base64
If you need more help you can reply to this comment.
You can use postcss-inline-base64
Here is a working example based on this rollup starter:
import resolve from '#rollup/plugin-node-resolve';
import commonjs from '#rollup/plugin-commonjs';
import { terser } from 'rollup-plugin-terser';
import path from 'path'
import postcss from 'rollup-plugin-postcss'
import base64Inliner from 'postcss-inline-base64'
// `npm run build` -> `production` is true
// `npm run dev` -> `production` is false
const production = !process.env.ROLLUP_WATCH;
const __dirname = path.resolve();
const baseDir = path.join(__dirname, 'public')
export default {
input: 'src/main.js',
output: {
file: 'public/bundle.js',
format: 'iife', // immediately-invoked function expression — suitable for <script> tags
sourcemap: true
},
plugins: [
postcss({
extensions: ['.css'],
extract: path.resolve('public/styles.css'),
plugins: [base64Inliner({ baseDir })]
}),
...
]
};
I then created a css file in the src folder:
body {
background: url('b64---./images/test.png---');
}
In index.js:
import "./styles.css";
In index.html
....
<title>rollup-starter-app</title>
<link href="styles.css" rel="stylesheet" />
....
I was able to get:
I found a code in a website for converting png, jpg to webp but it doesnt work for me. I have two jpg files in /images/ folder
const imagemin = require("imagemin"),
webp = require("imagemin-webp");
const outputFolder = "./images/webp";
const produceWebP = async () => {
await imagemin(["images/*.png"], {
destination: outputFolder,
plugins: [
webp({
lossless: true,
}),
],
});
console.log("PNGs processed");
await imagemin(["images/*.{jpg,jpeg}"], {
destination: outputFolder,
plugins: [
webp({
quality: 65,
}),
],
});
console.log("JPGs and JPEGs processed");
};
produceWebP();
When i run node index.js i take the message you see in the photo
The issue here is in Error [ERR_REQUIRE_ESM]: Must use import to load ES Module.
There are two types of modules in NodeJS: CommonJS and ECMAScript modules (ESM).
CommonJS uses const webp = require("imagemin-webp") syntax.
While ESM uses import webp from "imagemin-webp" syntax to achieve the same result.
Your index.js is CommonJS and the imagemin npm module is ESM and the error ocures when you try to use require() call to import ESM module.
There are two possible solutions for this:
convert your index.js from CommonJS to ESM (preferred)
use asynchronous import() call instead of require() to import ESM module from CommonJS
First (and preferred) option is to convert your code to ESM:
rename index.js to index.mjs (.mjs extension indicates ESM syntax)
change all require() calls to import something from 'library' calls
run it as node index.mjs
index.mjs:
// using ES import syntax here
import imagemin from "imagemin";
import webp from "imagemin-webp";
// the rest of the file is unchanged
const outputFolder = "./images/webp";
const produceWebP = async () => {
await imagemin(["images/*.png"], {
destination: outputFolder,
plugins: [
webp({
lossless: true,
}),
],
});
console.log("PNGs processed");
await imagemin(["images/*.{jpg,jpeg}"], {
destination: outputFolder,
plugins: [
webp({
quality: 65,
}),
],
});
console.log("JPGs and JPEGs processed");
};
produceWebP();
Second option is to use asynchronous import() call to import ESM modules from CommonJS module as indicated in NodeJS docs.
It is not preferred because import() is asynchronous, I'd like to use await to get the result like await import() but this in-turn requires to be called inside another async function.
index.js:
const outputFolder = "./images/webp";
const produceWebP = async () => {
// Load ESM modules using import(),
// it returns a Promise which resolves to
// default export as 'default' and other named exports.
// In this case we need default export.
const imagemin = (await import("imagemin")).default;
const webp = (await import("imagemin-webp")).default;
await imagemin(["images/*.png"], {
destination: outputFolder,
plugins: [
webp({
lossless: true,
}),
],
});
console.log("PNGs processed");
await imagemin(["images/*.{jpg,jpeg}"], {
destination: outputFolder,
plugins: [
webp({
quality: 65,
}),
],
});
console.log("JPGs and JPEGs processed");
};
produceWebP();
P.S.
Please note that ESM can export more than one entry (default and named exports) while CommonJS can only export one entry.
I'm trying to set up a React component library using Rollup, which my React app then installs and uses. The consuming React application is being rendered on the server. I have managed to get this set up working with Webpack, wherein I'm bundling my React component library into one single file and sending it down. Problem is this is a huge file and negates the point of SSR (part of it at least) as it takes a long time to download.
So I decided to use Rollup to split up the components into individual ES modules which are then pulled in and used as required by the app. The problem is that I can't get this to work with SSR at all. This is my first time using Rollup so I may have missed something obvious. Here is my rollup.config.js
import babel from 'rollup-plugin-babel';
import resolve from 'rollup-plugin-node-resolve';
import commonjs from 'rollup-plugin-commonjs';
import css from 'rollup-plugin-css-only';
import alias from '#rollup/plugin-alias';
import image from '#rollup/plugin-image';
import json from '#rollup/plugin-json';
import replace from '#rollup/plugin-replace';
import namedDeps from './named-dependencies';
import stylus from './rollup-plugin-stylus-v2';
import postcssUrl from './rollup-plugin-postcss-url';
import autoExternal from 'rollup-plugin-auto-external';
import url from '#rollup/plugin-url';
import fs from 'fs';
const namedDepsObject = namedDeps
.map(depPath => ({
keys: Object.keys(require(depPath)).filter(v => v !== 'default'),
name: depPath,
}))
.reduce((acc, val) => {
acc[val.name] = val.keys;
return acc;
}, {});
let ComponentsList = require('./components-file'); // This has an array of component src and name
const rollConfigObject = fileName => {
const configOpts = {
input: `${fileName.src}`,
output: [
{
file: `dist/ssr/esm/${fileName.name}.js`,
format: 'esm',
}
],
plugins: [
autoExternal(),
url({
include: ['**/*.svg', '**/*.png', '**/*.jpg', '**/*.gif', '**/*.woff', '**/*.woff2', '**/*.eot', '**/*.ttf', ],
emitFiles: false
}),
babel({
exclude: 'node_modules/**',
presets: ['#babel/env', '#babel/react'],
plugins: ['#babel/plugin-syntax-dynamic-import'],
runtimeHelpers: true,
}),
resolve(),
commonjs({
sourceMap: false,
namedExports: {
...namedDepsObject,
},
}),
image(),
json(),
stylus(),
postcssUrl(),
css({
output: function(styles, styleNodes){
fs.appendFileSync(__dirname + '/dist/ssr/styles.css', styles);
}
}),
],
};
return { ...configOpts };
};
export default ComponentsList.map(
moduleName => {
return rollConfigObject(moduleName);
}
)
Now this creates the separate component bundles like I want, but the problem is that it seems to be including code within the createCommonjsModule methods from node_modules which is then injecting browser objects like document, which when I attempt to render on the server throws an error.
I am trying to not include node_modules at all, so when I try to run this with only the babel plugin, it throws an Error: Could not resolve entry module and refuses to build. An example is Braintree, it's adding a lot of code with document usage.
I'm not sure how to get it to not inject node_modules code within the component. In the app, I'm using Webpack to run these modules through babel, but objects like document will pass through and then refuse to work on the server.
Any suggestions would be great here, been struggling with this for a number of days now. Thanks!
I got the config to work after I updated my babel setting to
babel({
exclude: [
'../../node_modules/**',
'../node_modules/**',
'node_modules/**',
],
presets: [
[
'#babel/env',
{
modules: false,
targets: {
esmodules: true,
},
},
],
'#babel/react',
],
plugins: [
'#babel/plugin-proposal-object-rest-spread',
'#babel/plugin-transform-runtime',
'#babel/plugin-syntax-dynamic-import',
'#babel/plugin-proposal-class-properties',
],
runtimeHelpers: true,
}),
commonjs({ sourceMap: false }),
Let's say that I have a node.js application, which does NOT go through my webpack bundling:
Node App
const Html = require('./build/ssr-bundle.js');
let result = Html.ssrbundle.render();
console.log(result);
Here is my ES6/JSX file, which is getting processed by webpack and I want to be able to access that render function in my node app (you guessed right, I am trying to SSR react stuff ;) )
src/Html.js -(webpack)-> build/ssr-bundle.js
import React from 'react';
import ReactDOMServer from 'react-dom/server';
import CustomComponent from './custom-component.js';
module.exports = {
render : function () {
return ReactDOMServer.renderToString(<CustomComponent />);
} };
And here is my Webpack config
webpack.config.js
var path = require('path');
module.exports = {
entry: {
ssr: './src/Html.js',
//frontend: './src/frontend-Html.js'
},
output: {
path: path.resolve(__dirname, 'build'),
filename: 'ssr-bundle.js',
library: 'ssrbundle'
},
module: {
rules: [
{
test: /\.js$/,
loader: 'babel-loader',
query: {
presets: ['env','react'],
plugins: ["transform-es2015-destructuring", "transform-object-rest-spread"]
}
},
{
test:/\.css$/,
use:['style-loader','css-loader']
}
]
},
stats: {
colors: true
},
devtool: 'source-map'
};
Whatever I do, I cannot figure out how to properly use that exported variable "ssrbundle" and subsequently the render function. If I had my node app included in the bundle, everything would be all right, but this is not what I want to do.
As apokryfos suggested, I played around with the libraryTarget Webpack setting. You can find more info on using Webpack to author a library (what I was really trying to achieve) here:
https://webpack.js.org/guides/author-libraries/
and here are code examples:
https://github.com/kalcifer/webpack-library-example/blob/master/webpack.config.babel.js.
What did the trick for me, was to set the libraryTarget to "umd" , which is different than the "var" setting which is set by default and is suitable i.e. for including the script in an HTML file