"export '...' was not found in '...' " while webpack build - javascript

I developed using electron-boilerplate.
I set up environment on my desktop same with my co-worker, node#10.2.0, npm#5.6.0, only difference was OS(co-worker : Windows7 64bit, me: Windows 10 64bit).
However, warning messages outputs follows on runtime.
warning in ./src/pages/config.js
"export 'login' was not found in '../lib/hoeat-api'
The starting point of the program is as follows.
npm run build/start.js
Here is the start.js code:
const childProcess = require("child_process");
const electron = require("electron");
const webpack = require("webpack");
const config = require("./webpack.app.config");
const env = "development";
const compiler = webpack(config(env));
let electronStarted = false;
const watching = compiler.watch({}, (err, stats) => {
if (!err && !stats.hasErrors() && !electronStarted) {
electronStarted = true;
childProcess
.spawn(electron, ["."], { stdio: "inherit" })
.on("close", () => {
watching.close();
});
}
});
Why does the build using webpack fail?
Thanks for your help. thank you

Related

How can I stop getting the 'Can't resolve async_hooks' error when using npm start?

I've been following a tutorial on how to connect to a mongoDB collection. The tutorial works fine but I'm trying the same code in a project I'm working on and have been getting the following error constantly:
./node_modules/raw-body/index.js
Module not found: Can't resolve 'async_hooks' in '*:\*\*\Desktop\Projects\testing-area\node_modules\raw-body'
I've tried:
-deleting node_modules and then running npm install
-running npm update to bring all dependencies to the latest version
-updating npm itself
I've read that async_hooks is used for backend work and if you try to use it in the frontend, it can cause this issue. Problem is, I don't really know a way around it.
Here's the code I'm trying to use to connect to the mongodb collection:
//give functions of mongo db to MongoClient
const { MongoClient } = require('mongodb')
let dbConnection
const bark = (input) => {
console.log(input)
}
module.exports = {
connectToDb: (cb) => {
MongoClient.connect("mongodb://localhost:27017/Treasures")
.then((client) => {
dbConnection = client.db()
return cb()
})
.catch(err => {
bark("----------")
bark("Pants shat when trying to connect to DB:\n")
bark(err)
return cb(err)
bark("----------")
})
},
getDb: () => dbConnection
}
And then in a component I have this, to try and get the data from the collection:
// Imports/Requires for mongoDb connection
const { ObjectID } = require("bson");
const express = require("express");
const { connectToDb, getDb } = require("../../db")
// COMPONENT STARTS HERE:
export const TreasureGen = () => {
//init app and middleware
const app = express();
//db connection
let db
connectToDb((err) => {
if(!err)
{
app.listen(3000, () => {
bark("App listening on port 3000")
})
db = getDb()
}
})

Spawn process inside process or detached it pkg builder

I dont sure what the problem here, mongod process not spawn inside program.exe that create with pkg. i test it first before compile the script can launch mongod process. after i tested it, spawn cant read pkg filesystem ( snapshot ).
const { spawn } = require('child_process');
const { parse } = require('path')
let processPath = parse(process.argv[0]);
let processDir = processPath.dir;
const args = [
'-f', `${__dirname}\\configs\\mongodb.yml`,
'--dbpath', `${processDir}\\database\\data`,
'--logpath', `${processDir}\\database\\log\\system.log`,
];
const options = {
cwd: `${processDir}\\bin`
};
const mongod = spawn('mongod', args, options);
mongod.stdout.on('data', chunk => {
console.log(chunk.toString())
});
mongod.stdout.on('error', chunk => {
console.log(chunk.toString())
});
mongod.on('spawn', () => {
console.log('success')
});
mongod.on('error', function(error) {
console.log(error)
});
Build Dir
build
build/program.exe
build/bin
build/bin/mongod.exe
build/database
build/database/data
build/database/log/system.log
Package.json pkg configurations
"bin": "dist/application.js",
"pkg": {
"targets": ["node16-win-x64"],
"outputPath": "dist/build",
"assets": [
"dist/configs/*"
]
}
Here is my solution to this issue, tested on Linux Ubuntu 22.04 LTS.
Case scenario:
I needed to include an executable file hello_world as an asset into /snapshot/project/bin/hello_world virtual path and based on some conditions execute it inside the Linux environment.
The problem:
I was getting the following error when I've been trying to execute the command via child_process.spawn:
/bin/sh: 1: /snaponshot/project/bin/hello_world: not found
So clearly my OS is trying to execute hello_world command via /bin/sh, however, the system is unable to access to /snapshot virtual filesystem, therefor not able to execute it.
The workaround:
Clearly, the main file system is unable to access the virtual file system, but we can do the opposite, by copying our executable file from the virtual file system into the main file system and executing it from there, basically, this is what I did:
//node packages
const fs = require('fs');
const os = require('os');
const path = require('path');
const {execSync, spawn} = require('child_process');
// executable file name
const executable = 'hello_world';
//file path to the asset executable file
const remoteControlFilePath = path.join(__dirname, `../bin/${executable}`);
let executableFileFullPath = remoteControlFilePath;
// avoid the workaround if the parent process in not pkg-ed version.
if (process.pkg) {
// creating a temporary folder for our executable file
const destination = fs.mkdtempSync(`${os.tmpdir()}${path.sep}`);
const destinationPath = path.join(destination, executable);
executableFileFullPath = destinationPath;
// copy the executable file into the temporary folder
fs.copyFileSync(remoteControlFilePath, destinationPath);
// on Linux systems you need to manually make the file executable
execSync(`chmod +x ${destinationPath}`);
}
// using {detached: true}, execute the command independently of its parent process
// to avoid the main parent process' failing if the child process failed as well.
const child = spawn(executableFileFullPath, {detached: true});
child.stdout.on('data', (data) => {
console.log(`child stdout:\n${data}`);
});
child.stderr.on('data', (data) => {
console.error(`child stderr:\n${data}`);
});
child.on('exit', (code, signal) => {
console.log(`child process exited with code ${code} and signal ${signal}`);
});

Solidity: How to compile multiple smart contracts in compile.js file?

I would like to compile multiple contracts in one compile.js file but I'm not sure how to do it.
My compile.js file with a single contract looks like this:
const path = require('path');
const fs = require('fs');
const solc = require('solc');
const lotteryPath = path.resolve(__dirname, 'contracts', 'Lottery.sol');
const source = fs.readFileSync(lotteryPath, 'utf8');
module.exports = solc.compile(source, 1);
How can I add more contracts to the compile.js file? I understand that the 1 must be changed to the number of contracts, but not sure what else is required?
Here is an example I did. You can find it in my public repo. Briefly, I have a "build" folder where I write the output of each compiled contract to Json files.
const path = require("path"); //nodejs ’path’ module
const solc = require("solc"); //solidity compiler module
const fs = require("fs-extra"); //file system module
// Feth path of build
const buildPath = path.resolve(__dirname, "build");
const contractspath = path.resolve(__dirname, "contracts");
// Removes folder build and every file in it
fs.removeSync(buildPath);
// Fetch all Contract files in Contracts folder
const fileNames = fs.readdirSync(contractspath);
// Gets ABI of all contracts into variable input
const input = fileNames.reduce(
(input, fileName) => {
const filePath = path.resolve(__dirname, "contracts", fileName);
const source = fs.readFileSync(filePath, "utf8");
return { sources: { ...input.sources, [fileName]: source } };
},
{ sources: {} }
);
// Compile all contracts
const output = solc.compile(input, 1).contracts;
// Re-Create build folder for output files from each contract
fs.ensureDirSync(buildPath);
// Output contains all objects from all contracts
// Write the contents of each to different files
for (let contract in output) {
fs.outputJsonSync(
path.resolve(buildPath, contract.split(":")[1] + ".json"),
output[contract]
);
}
Basically, if you do not change your path struct to mine, you have to change you this part of the above code:
// Feth path of build
const buildPath = path.resolve(__dirname, "build");
const contractspath = path.resolve(__dirname, "contracts");
The approved solution does not work for solidity >0.6.0 and <=0.8.1.
For the mentioned versions, I solved it as follows:
const path = require("path");
const fs = require("fs-extra");
const solc = require("solc");
const buildPath = path.resolve(__dirname, "build");
fs.removeSync(buildPath);
const contractPath = path.resolve(__dirname, "contracts");
const fileNames = fs.readdirSync(contractPath);
const compilerInput = {
language: "Solidity",
sources: fileNames.reduce((input, fileName) => {
const filePath = path.resolve(contractPath, fileName);
const source = fs.readFileSync(filePath, "utf8");
return { ...input, [fileName]: { content: source } };
}, {}),
settings: {
outputSelection: {
"*": {
"*": ["abi", "evm.bytecode.object"],
},
},
},
};
// Compile All contracts
const compiled = JSON.parse(solc.compile(JSON.stringify(compilerInput)));
fs.ensureDirSync(buildPath);
fileNames.map((fileName) => {
const contracts = Object.keys(compiled.contracts[fileName]);
contracts.map((contract) => {
fs.outputJsonSync(
path.resolve(buildPath, contract + ".json"),
compiled.contracts[fileName][contract]
);
});
});
be sure to check that your pragma solidity x.x.x matches with the version specified in your package.json. For example, if I'm using solidity 0.6.12 my solidity compiles would be:
"dependencies": {
...
"solc": "^0.6.12",
...
}
compile.js:
const path= require('path');
const solc = require('solc');
const fs = require('fs-extra');
const builtPath = path.resolve(__dirname, 'build');
//remove file in build module
fs.removeSync(builtPath);
const healthPath = path.resolve(__dirname, 'contract','health.sol');
//read content present in file
console.log(healthPath);
const source = fs.readFileSync(healthPath,'utf8');
//compile contract
const output = solc.compile(source,1).contracts;
//create build folder
fs.ensureDirSync(builtPath);
console.log(output);
for(let contract in output)
{
fs.outputJsonSync(
path.resolve(buildPath, contract.replace(':','')+ '.json'),
output[contract]
);
}

How to run(sync) multiple .js files from a directory

I'm trying to run multiple .js files from a directory:
exampleDir/test.js
console.log('hi');
run.js
const fs = require('fs');
const {execFileSync} = require('child_process');
const testsFolder = './exampleDir/';
const files = fs.readdirSync(testsFolder);
const funcs = files.map(function(file) {
const out = execFileSync(`node ${testsFolder}${file}`);
console.log(out.toString());
});
however I get:
> example#1.0.0 test /home/perdugames/example
> node ./run.js
child_process.js:624
throw err;
^
Error: spawnSync node ./exampleDir/test.js ENOENT
...
File paths should be specified unambiguously, preferably independent of current working directory and relative to current module. To create a new Node process, there are spawn and spawnSync in child_process:
...
const path = require('path');
const testsFolder = path.join(__dirname, './exampleDir/');
const files = fs.readdirSync(testsFolder);
const funcs = files.map(function(file) {
const filePath = path.join(testsFolder, file);
const out = spawnSync(filePath);
console.log(out.stdout.toString());
});

How to stop webdriver without crashing node js

is it possible to stop the selenium webdriver without stopping node?
I have following issue:
I try to create a API tool that does some web automation when getting a get request. So I am basically running selenium webdriver on a get request to /start via Express. I want the tool to check for different elements and when it somehow fails I want it to stop selenium but NOT the node instance (since a new get request could be send).
This is my code so far:
"use strict";
const webdriver = require('selenium-webdriver'),
Express = require('express'),
By = webdriver.By,
until = webdriver.until,
Keys = webdriver.Key,
app = new Express();
app.get("/automate", (req, res) => {
start(res);
});
function start(res) {
const driver = new webdriver.Builder().forBrowser('chrome').build();
driver.get('https://www.google.com/');
// # --- foo --- #
let errMessage = {pos: "FOO", message: "Ooops friendly robot has some troubles!"}
checkStep("#foo", errMessage);
driver.findElement(By.id("foo"))
.sendKeys("fooz");
// # --- bar --- #
errMessage = {pos: "BAR", message: "Ooops friendly robot has some troubles!"}
checkStep("#bar", errMessage);
driver.findElement(By.id("bar"))
.sendKeys("baz");
// etc…
function checkStep(selector, errMessage) {
driver.findElement(By.css(selector))
.then(() => {
console.log(`${selector} => found`);
})
.catch(err => {
console.log(`Error: ${err}`);
res.send(errMessage);
driver.quit();
});
}
}
app.get("*", (req, res) => {
res.send("Hello World");
});
// start the server
const port = process.env.PORT || 3000;
const env = process.env.NODE_ENV || 'production';
app.listen(port, err => {
if (err) { return console.error(err); }
console.info(`Server running on http://localhost:${port} [${env}]`);
});
it is actually working so far that when selenium do not find the element the response from the API is correct. In Selenium I get back:
{
"pos": "FOO",
"message": "Ooops friendly robot has some troubles!"
}
So far all good.
BUT unfortunately stopping selenium is also stopping Node from running.
The error I get is following:
throw error;
^
WebDriverError: no such session
(Driver info: chromedriver=2.30.477690 (c53f4ad87510ee97b5c3425a14c0e79780cdf262),platform=Ma
c OS X 10.12.5 x86_64)
at WebDriverError
Please help, thank you!
ps: I am not using webdriverio as you can see I use this package: https://www.npmjs.com/package/selenium-webdriver
Ok, I got this working. It’s a bit difficult solution but it works:
Using Child Processes
Basically, every time the app gets a get request to /automate it will now create a child process in node which runs the selenium scripts (a child process is kind of like using another thread. Here is a very good tutorial on child processes):
index.js
"use strict";
const Express = require('express');
const { spawn } = require('child_process');
const data = require('./data.json');
const app = new Express();
app.get("/automate", (req, res) => {
const child = spawn(
process.execPath,
[`${__dirname}/test.js`, JSON.stringify(data)],
{ stdio: ['inherit', 'inherit', 'inherit', 'pipe'] }
);
child.stdio[3].on('data', data => {
const response = JSON.parse(data.toString());
res.send(response);
console.log(response);
child.kill();
});
});
app.get("*", (req, res) => {
res.send("Hello World");
});
const port = process.env.PORT || 3000;
const env = process.env.NODE_ENV || 'production';
app.listen(port, err => {
if (err) { return console.error(err); }
console.info(`Server running on http://localhost:${port} [${env}]`);
});
test.js
"use strict";
// hook with argument 3, that is "pipe" from parent
const Net = require('net'),
pipe = new Net.Socket({ fd: 3 });
const data = JSON.parse(process.argv[2]);
const webdriver = require('selenium-webdriver'),
By = webdriver.By,
until = webdriver.until,
Keys = webdriver.Key;
function start() {
const driver = new webdriver.Builder().forBrowser('chrome').build();
driver.get('https://www.google.com/');
// # --- foo --- #
let errMessage = {pos: "lst-ib", message: "Ooops friendly robot has some troubles!"}
checkStep("#lst-ib")
.sendKeys("fooz");
driver.get('https://www.facebook.com/');
driver.get('https://www.google.com/');
driver.get('https://www.facebook.com/');
// # --- bar --- #
errMessage = {pos: "BAR", message: "Ooops friendly robot has some troubles!"}
checkStep("#bar")
.sendKeys("baz");
function checkStep(selector) {
driver.findElement(By.css(selector))
.then(() => {
console.log(`${selector} => found`);
})
.catch(err => {
console.log(`${selector} => not found`);
publish(errMessage);
driver.quit();
});
}
}
function publish(message) {
pipe.write(JSON.stringify(message));
}
start();
It is working like a charm: on each request opening a new child process and killing that child process if it sends some message while also responding with the message to the client. Like this you can easily have several selenium instances simultaneously.
You’re welcome.
ps: If you hate all this asyncron stuff from Selenium webdriver-sync seems like a good choice. It basically wraps the selenium code to be syncon instead of asyncron. Like that I am able to use try {} catch {} and to driver.quit(); without any errors for code that comes later. (But this comes with a disadvantage: it is actually blocking your other nodejs code.)

Categories