I am trying to save form data to a spreadsheet in Next.js but I keep getting this error which appears as soon as I import google-spreadsheet
Error
./node_modules/google-spreadsheet/node_modules/google-auth-library/build/src/auth/googleauth.js:17:0
Module not found: Can't resolve 'child_process'
Bellow is what I have that is causing the error.
// The error appears when I do this import
import { GoogleSpreadsheet } from "google-spreadsheet";
const SPREADSHEET_ID = process.env.NEXT_PUBLIC_SPREADSHEET_ID;
const SHEET_ID = process.env.NEXT_PUBLIC_SHEET_ID;
const CLIENT_EMAIL = process.env.NEXT_PUBLIC_GOOGLE_CLIENT_EMAIL;
const PRIVATE_KEY = process.env.NEXT_PUBLIC_GOOGLE_SERVICE_PRIVATE_KEY;
const doc = new GoogleSpreadsheet(SPREADSHEET_ID);
const appendSpreadsheet = async (row) => {
try {
await doc.useServiceAccountAuth({
client_email: CLIENT_EMAIL,
private_key: PRIVATE_KEY,
});
// loads document properties and worksheets
await doc.loadInfo();
const sheet = doc.sheetsById[SHEET_ID];
const result = await sheet.addRow(row);
return result;
} catch (e) {
console.error("Error: ", e);
}
};
I just solve it.
Please create next.config.js file in your root.
And fill it below.
module.exports = {
webpack: config => {
config.node = {
fs: 'empty',
child_process: 'empty',
net: 'empty',
dns: 'empty',
tls: 'empty',
};
return config;
},
};
Hoorai!
I was having this problem with nextjs 12. Here's what fixed it for me:
My code:
const doc = new GoogleSpreadsheet(SPREADSHEET_ID);
await doc.useServiceAccountAuth({
client_email: process.env.GOOGLE_SERVICE_ACCOUNT_EMAIL,
private_key: process.env.GOOGLE_PRIVATE_KEY,
});
await doc.loadInfo();
console.log('title', doc.title);
My next.config.js:
const nextConfig = {
reactStrictMode: true,
webpack: (config, { isServer }) => {
if (!isServer) {
config.resolve.fallback.fs = false
config.resolve.fallback.tls = false
config.resolve.fallback.net = false
config.resolve.fallback.child_process = false
}
return config
},
future: {
webpack5: true,
},
fallback: {
fs: false,
tls: false,
net: false,
child_process: false
},
}
module.exports = nextConfig;
Took inspiration/fix from here
Found this answer due to a similar issue. I later learned for next.js, with some of these api libraries, you must call call this type of code (serverside) in two contexts getStaticProps or getServerSideProps. See this and this for more details.
Try changing the import statement to:
const { GoogleSpreadsheet } = require('google-spreadsheet');
Source: https://www.npmjs.com/package/google-spreadsheet
The reason is that the library you require uses some nodejs native modules, like path, fs or child_process.
As part of the build process nextjs will create js bundles for your client and server separately. The issue is that your client build cannot resolve those nodejs modules. As a workaround you can tell nextjs to ignore these modules for the client build only.
next.config.js
const nextConfig = {
webpack: (config, { isServer }) => {
if (!isServer) {
config.resolve.fallback = {
fs: false,
path: false,
}
}
return config
}
}
module.exports = nextConfig;
the library does not support ES6 feature yet
if you look to the module export you will find somthing like this :
module.exports = {
GoogleSpreadsheet,
GoogleSpreadsheetWorksheet,
GoogleSpreadsheetRow,
GoogleSpreadsheetFormulaError,
};
https://github.com/theoephraim/node-google-spreadsheet/blob/master/index.js
change the import statement to commonjs modules like this :
const { GoogleSpreadsheet } = require('google-spreadsheet');
Related
I'm trying to load locale files based on the locale code given by Next.js. But whenever I'm trying to do a dynamic import, an error happens and the import path is wrong:
Unable to load translation file "index": Error: Cannot find module './index.en.json'
Code to import:
try {
Object.assign(messages, await import(`#/locales/${name}.${locale}.json`))
} catch(err) {
console.log(`Unable to load translation file "${name}": ${err}`)
error = err
}
tsconfig.json:
"baseUrl": ".",
"paths": {
"#/locales/*": ["locales/*"]
}
next.config.js with webpack config:
module.exports = {
reactStrictMode: true,
i18n: {
locales: ['en', 'de'],
defaultLocale: 'en',
},
webpack: (config) => {
config.resolve.alias[`#/locales`] = path.resolve(__dirname, "locales")
return config
}
}
EDIT:
Okay, I found my mistake, the file was named index.de.json instead of index.en.json. But still I want to know why the error message shows a wrong path.
You can try with this way. If it does not work please check #/locales/ path was correctly set.
Example:
import(`#/locales/${name}.${locale}.json`).then((json) => {
console.log(json)
Object.assign(messages, json)
}).catch((err) => {
console.log(`Unable to load translation file "${name}": ${err}`)
error = err
});
I am using vitejs to compile my react app statically, however after build .env imports become undefined which is not the case on development stage.
reading the docs I've found out that these variables are replace by their corresponding values, but upon looking at the source/compiled code in the dev tools after serving it shows an empty object with the env name/key
i might have a wrong configuration in vite.config.ts so here it is.
//vite.config.ts
import { defineConfig, loadEnv } from 'vite';
import reactRefresh from '#vitejs/plugin-react-refresh';
import { getAliases } from 'vite-aliases';
const aliases = getAliases({
path: 'src',
prefix: '#',
});
export default ({ mode }) => {
process.env = { ...process.env, ...loadEnv(mode, process.cwd()) };
// import.meta.env.VITE_NAME available here with: process.env.VITE_NAME
// import.meta.env.VITE_PORT available here with: process.env.VITE_PORT
const plugins = mode === 'development' ? [reactRefresh()] : [];
return defineConfig({
plugins,
publicDir: 'src/assets',
resolve: {
alias: aliases,
},
build: {
chunkSizeWarningLimit: 1500,
},
});
};
And also the code where I'm referencing these env var
//config.ts
export const config = () => {
const url = import.meta.env.VITE_SERVER_URL;
const api = import.meta.env.VITE_API_ENDPOINT;
const auth = import.meta.env.VITE_AUTH_ENDPOINT;
const isProd = import.meta.env.MODE === 'production';
const isDev = import.meta.env.MODE === 'development';
console.log(url, api, auth);
return {
api: (endpoint: string) => `${url}${api}${endpoint}`,
auth: (endpoint: string) => `${url}${auth}${endpoint}`,
test: (endpoint: string) => `${url}test${endpoint}`,
isProd,
isDev,
};
};
I just realized what the ViteJS documentation says and I'll leave it in case someone also suffers from this.
You don't have to use VITE_. You can use any prefix you like as long as you define it in the envPrefix option on the vite config.
I'm using Puppeteer to test a client function within a react environment - the function itself doesn't use React, but is meant to be imported in es6 react modules and run inside a end user DOM environment. I need Puppeteer since this function relies on properties such as innerText, that aren't available in jsdom.
This function takes a DOM element as an argument, however I am having trouble writing test files for it. Here is a sample of my code:
import path from 'path';
import puppeteer from 'puppeteer';
import {getSelectionRange, setSelectionRange} from './selection';
describe(
'getSelection should match setSelection',
() => {
let browser;
let page;
beforeAll(async done => {
try {
browser = await puppeteer.launch();
page = await browser.newPage();
await page.goto(
`file://${path.join(process.env.ROOT,
'testFiles/selection_range_test.html')}`
);
await page.exposeFunction(
'setSelectionRange',
(el, start, end) => setSelectionRange(el, start, end)
);
await page.exposeFunction(
'getSelectionRange',
el => getSelectionRange(el)
);
} catch(error) {
console.error(error);
}
done();
});
afterAll(async done => {
await browser.close();
done();
});
it('should match on a node with only one text node children', async () => {
const {selection, element, argEl} = await page.evaluate(async () => {
const stn = document.getElementById('single-text-node');
// Since console.log will output in the Puppeteer browser and not in node console,
// I added a line inside the selectionRange function to return the element it receives
// as an argument.
const argEl = await window.setSelectionRange(stn, 1, 10);
const selectionRange = await window.getSelectionRange(stn);
return {selection: selectionRange, element: stn, argEl};
});
// Outputs <div id="single-text-node">...</div>
// (the content is long so I skipped it, but it displays the correct value here)
console.log(element.outerHTML);
// Outputs {}
console.log(argEl);
});
}
);
As described in the comments, the element that is directly returned from page.evaluate() is correct, but when passed as an argument, the function receives an empty object. I suspect a scope issue but I am totally out of solutions here.
Sadly I couldn't find any solution that wouldn't invoke transpiling my files, but hopefully I managed to make it work correctly.
The key point was to create a second transpile configuration that will generate a code directly usable by a web browser, using UMD format. Since I use rollup, here is my rollup,config.js file:
import babel from 'rollup-plugin-babel';
import commonjs from 'rollup-plugin-commonjs';
import resolve from 'rollup-plugin-node-resolve';
import pkg from './package.json';
// The content that is actually exported to be used within a React or Node application.
const libConfig = [
{
inlineDynamicImports: true,
input: './src/index.js',
output: [
{
file: './lib/index.js',
format: 'cjs'
},
],
external: [...Object.keys(pkg.dependencies || {})],
plugins: [
commonjs(),
resolve(),
babel({exclude: 'node_modules/**'})
]
}
];
// Used to generate a bundle that is directly executable within a browser environment, for E2E testing.
const testConfig = [
{
inlineDynamicImports: true,
input: './src/index.js',
output: [
{
file: './dist/index.js',
format: 'umd',
name: 'tachyon'
},
],
external: [...Object.keys(pkg.dependencies || {})],
plugins: [
commonjs(),
resolve(),
babel({runtimeHelpers: true})
]
}
];
const config = process.env.NODE_ENV === 'test' ? testConfig : libConfig;
export default config;
I then rewrote my scripts a bit so my test bundle is generated on each test run.
package.json
{
"scripts": {
"build:test": "NODE_ENV=test rollup -c && NODE_ENV=",
"build": "rollup -c",
"test": "yarn build:test && jest"
},
}
Finally, I added the transpiled script to my selection_test.html file:
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Selection range test</title>
<script src="../dist/index.js"></script>
</head>
...
Which lets me write my test file like this:
import path from 'path';
import puppeteer from 'puppeteer';
import {describe, beforeAll, afterAll, it} from '#jest/globals';
describe(
'getSelection should match setSelection',
() => {
let browser;
let page;
beforeAll(async done => {
try {
browser = await puppeteer.launch({
headless: true,
args: ['--disable-web-security', '--disable-features=SameSiteByDefaultCookies,CookiesWithoutSameSiteMustBeSecure'],
});
page = await browser.newPage();
await page.goto(`file://${path.join(process.env.ROOT, 'tests/selection_test.html')}`, {waitUntil: 'networkidle0'});
await page.setBypassCSP(true);
} catch(error) {
console.error(error);
}
done();
});
afterAll(async done => {
await browser.close();
done();
});
it('should match on a node with only one text node children', async () => {
const data = await page.evaluate(() => {
// Fix eslint warnings.
window.tachyon = window.tachyon || null;
if (window.tachyon == null) {
return new Error(`cannot find tachyon module`);
}
const stn = document.getElementById('single-text-node');
const witnessRange = tachyon.setRange(stn, 1, 10);
const selectionRange = tachyon.getRange(stn);
return {witnessRange, selectionRange, element: stn.outerHTML};
});
console.log(data); // Outputs the correct values
/*
{
witnessRange: { start: 1, end: 10 },
selectionRange: {
absolute: { start: 1, end: 10 },
start: { container: {}, offset: 1 },
end: { container: {}, offset: 10 }
},
element: '<div id="single-text-node">Lorem ... sem.</div>'
}
*/
});
}
);
The only remaining issue is that start.container and end.container within the results of getRange are undefined, but it seems more likely an issue from puppeteer that cannot handle the Range startContainer and endContainer properties - I was able to pass DOM references between the content of page.evaluate and my module function without any issues, so it doesn't look like the problem anymore.
I'm trying to build a Dapp with Nodejs and IPFS/OrbitDB every time, I try to start my App I get the error:
this.node = new IPFS({
^
TypeError: IPFS is not a constructor
This is my basic code without a specific Swarm:
const Ipfs = require('ipfs');
const OrbitDB = require('orbit-db');
class chatroom {
constructor(IPFS, OrbitDB) {
this.OrbitDB = OrbitDB;
this.node = new IPFS({
preload: {enable: false},
repo: "./ipfs",
EXPERIMENTAL: {pubsub: true},
config: {
Bootstrap: [],
Addresses: {Swarm: []}
}
});
this.node.on("error", (e) => {throw (e)});
this.node.on("ready", this._init.bind(this));
}
async _init(){
this.orbitdb = await this.OrbitDB.createInstance(this.node);
this.onready();
}
}
module.exports = exports = new chatroom(Ipfs, OrbitDB);
I'm running on the following version of IPFS: ipfs#0.42.0
I tried it also on an empty Nodejs App and there I had the same error also when I added a specific Swarm to connect to.
I would really appreciate your help, thx for your time in advance.
Kind regards
beni
I did it now like that:
const IPFS = require('ipfs');
async function createNode() {
let node = await IPFS.create(
{
repo: (() => `repo-${Math.random()}`)(),
"Addresses": {
"Swarm": [
"/ip4/0.0.0.0/tcp/4001"
],
"API": "/ip4/127.0.0.1/tcp/5001",
"Gateway": "/ip4/127.0.0.1/tcp/8080"
}
}
);
try {
await node.start();
console.log('Node started!');
} catch (error) {
console.error('Node failed to start!', error);
}
}
(thx #Eugene)
I've performed git clone command with isomorphic-git on browser-side. But I don't know where those files are stored and how to read those files with JavaScript.
Code snippet:
import { configure } from 'browserfs'
import { plugins, clone } from 'isomorphic-git'
import FS from '#isomorphic-git/lightning-fs'
configure({ fs: "IndexedDB", options: {} }, (err) => {
if (err) return console.log(err);
window.fs = new FS("fs");
plugins.set('fs', window.fs);
(async () => {
console.log(new Date())
await clone({
dir: '/',
corsProxy: 'https://cors.isomorphic-git.org',
url: 'https://github.com/isomorphic-git/isomorphic-git.git',
singleBranch: true,
depth: 1
})
console.log(new Date())
})()
})
Network activities observed by Chrome DevTools:
IndexedDB:
Detail tutorial here: https://isomorphic-git.org/docs/en/browser.html
You could try API from #isomorphic-git/lightning-fs operate files.
import LightningFS from '#isomorphic-git/lightning-fs'
const fs = new LightningFS('fs')
const pfs = fs.promises
const pck = pfs.readFile('/package.json')
const pckContent = new TextDecoder('utf-8').decode(pck)
console.log('pckContent', pckContent)