While I was working on Cypress trying to download a .xlsx report and further manipulate the data in it for further verification, problem I faced was when Cypress was running test with the electron browser-it prompted a window based popup.
Moreover, when i selected chrome browser for running tests, the default directory of download directory could not be modified. Hence, manipulation of data wasn't possible if it's not present in the project directory as it would cause faliures in the CI execution...
Any workaround for this would be appreciated.
I solved it with the index.js file in the plugins folder by doing the following stuff:
const cypressTypeScriptPreprocessor = require('./cy-ts-preprocessor');
const path = require('path');
const fs = require('fs');
const RESULT_FOLDER = 'results';
const downloadDirectory = path.join(__dirname, '..', RESULT_FOLDER);
module.exports = on => {
on('file:preprocessor', cypressTypeScriptPreprocessor);
on('before:browser:launch', (browser = {}, options) => {
if (fs.existsSync(downloadDirectory)) {
fs.rmdirSync(downloadDirectory, { recursive: true });
}
if (browser.family === 'chromium' && browser.name !== 'electron') {
options.preferences.default['download'] = { default_directory: downloadDirectory };
return options;
}
if (browser.family === 'firefox') {
options.preferences['browser.download.dir'] = downloadDirectory;
options.preferences['browser.download.folderList'] = 2;
return options;
}
});
};
The documentation for that you will find here: https://docs.cypress.io/api/plugins/browser-launch-api.html#Change-download-directory
Be aware this works for Chromium browsers but currently not for the Electron browser in CI mode. Cypress knows about the issue and is currently implementing a solution for that: https://github.com/cypress-io/cypress/issues/949#issuecomment-755975882
You can change the download path in your test like below.
const downloadFolder = path.resolve(__dirname, '../users/user/source/repos/containingRoot/cypress/downloads');
Related
I created an electron app that has user configuration/settings (changing of theme, tracking and persisting window's current size and position). I saved the json files that contained all of these in user AppData (app.getPath('userData')) working well on development but after being packaged with electron-builder, the app no longer communicates the AppData. Tried to change the theme but the new colors could not be written in AppData, size and position not changing. Frustrated ๐๐ญ...
Please what next should I do if I must use the AppData
const { app, BrowserWindow, ipcMain } = require("electron");
let isDev = require('electron-is-dev');
const path = require('path');
const fs = require('fs');
const { setColors } = require('./ipc');
const userDir = app.getPath('userData');
let isConfigDir = fs.existsSync(path.join(userDir, 'config'));
let isSizeDir = fs.existsSync(path.join(userDir, 'config/size.json'))
let isPosDir = fs.existsSync(path.join(userDir, 'config/pos.json'));
let isColorDir = fs.existsSync(path.join(userDir, 'config/colors.json'));
//create config dir if not already exists
if(!isConfigDir){
fs.mkdirSync(path.join(userDir, '/config'))
}
//check and create config files if not already exist
let pos;
if(!isPosDir){
let pos_ = {"x":636,"y":0};
fs.writeFileSync(path.join(userDir, 'config/pos.json'), JSON.stringify(pos_));
pos = pos_
}else{
pos = JSON.parse(fs.readFileSync(path.join(userDir, 'config/pos.json'), "utf8"))
}
let size
if(!isSizeDir){
let size_ = {"width":701,"height":768}
fs.writeFileSync(path.join(userDir, 'config/size.json'), JSON.stringify(size_));
size = size_;
}else{
size = JSON.parse(fs.readFileSync(path.join(userDir, 'config/size.json'), "utf8"))
}
ipcMain.handle("getColors", (event, args)=>{
let colors;
if(!isColorDir){
let colors_ = {"bg":"gold","text":"#000"}
fs.writeFileSync(path.join(userDir, 'config/colors.json'), JSON.stringify(colors_));
colors = colors_
return colors;
}else{
colors = JSON.parse(fs.readFileSync(path.join(userDir, 'config/colors.json'), "utf8"));
return colors;
}
})
let win;
function createWin(){
win = new BrowserWindow({
width:size.width,
height: size.height,
x: pos.x,
y: pos.y,
title: 'BMS',
webPreferences:{
preload: path.join(__dirname, "preload.js")
}
});
isDev ? win.loadURL('http://localhost:3000') : win.loadFile('views/build/index.html')
win.on("closed", ()=>win = null);
// set window size in file size.json when the system resized
win.on('resized', ()=>{
let size = {
width: win.getBounds().width,
height: win.getBounds().height,
}
fs.writeFileSync(path.join(userDir, 'config/size.json'), JSON.stringify(size))
})
// set window position in file size.json when the window moves
win.on('move', ()=>{
let pos = {
x: win.getBounds().x,
y: win.getBounds().y
}
fs.writeFileSync(path.join(userDir, 'config/pos.json'), JSON.stringify(pos))
})
}
app.on("ready", ()=>{
//create window
createWin();
setColors(userDir)
})
app.on("window-all-closed", ()=>{
if(process.platform !== 'darwin'){
app.quit()
}
})
app.on('active', ()=>{
if(win===null){
createWin()
}
})
Looking at your code, I suspect that your /config folder does not exists when running your application. So, all your variables isSizeDir, isPosDir, etc. are false and stays false even after creating the folder on line 17.
I would move the /config folder creation after line 11.
You should take a look at the electron-json-storage package. The package will handle where to put the persisent data depending on the user OS system, and the JSON parsing. You just need to call the package with whatever JS Object you want to store.
I am using this package in my differents electron projects, and it really makes it easy. I recommand using it instead of struggling with the 'fs' Node API.
Got it working now.
So basically.
app.getPath('userData') gives you path up into electron app dir so I have been writing into my electron app directory.
But using app.getPath('appData') get path unto electron app directory level and does not include it. hence won't be packages by electron-builder and it always be in the pc app dir.
const appDir = app.getPath('appData');
instead of
const appDir = app.getPath('userData');
I got this from Jay. chatted with him earlier. His youtube channel - ithinktechnologies
Thanks #Guillaume and #Dony for your time. Means a lot to me
When building our production app in Gatsby, I see something like this:
window.___chunkMapping={
"app":[],
"component---src-templates-page-tsx":[],
"component---src-templates-pages-newsletter-tsx":[]
}
Is it possible to hash these paths instead of printing them out? We donโt want to expose too much from what is happening in the back.
I tried setting these configs in webpack:
output: {
filename: `[chunkhash:2][contenthash:5].js`,
chunkFilename: `[chunkhash:2][contenthash:5].js`,
},
And it successfully hashes .js files but not the template paths.
I upvoted this question when I first saw it, I think it's definitely should be done in production build.
Unfortunately, componentChunkName (the template path in question) is generated by Gatsby in createPage & not handled by webpack.
The code that generates componentChunkName is over here: github
I tried to modify the code as follow:
const { kebabCase } = require(`lodash`)
const path = require(`path`)
+ const uuidv5 = require(`uuid/v5`)
const { store } = require(`../redux`)
const generateComponentChunkName = componentPath => {
const program = store.getState().program
let directory = `/`
if (program && program.directory) {
directory = program.directory
}
const name = path.relative(directory, componentPath)
- return `component---${kebabCase(name)}`
+ return process.env.NODE_ENV === `production`
+ ? `component---${uuidv5(name, uuidv5.URL)}`
+ : `component---${kebabCase(name)}`
}
exports.generateComponentChunkName = generateComponentChunkName
This successfully hides all the component names in production build:
app: Array [ "/app-e593b3d93932ed3a0363.js" ]
"component---11d478fe-6a55-579c-becf-625ab1e57cf4": Array [ "/component---11d478fe-6a55-579c-becf-625ab1e57cf4-76c90ae50035c52657a0.js" ]
"component---15c76861-b723-5e0a-823c-b6832aeeb0a0": Array [ "/component---15c76861-b723-5e0a-823c-b6832aeeb0a0-18eb457ba6c147e1b31b.js" ]
...
None of the local unit tests failed, my clicking-around-until-something-breaks test also hasn't yielded any errors. I might submit a PR later today to see if the maintainers have some insights on why this is not a good idea.
Edit: I opened an issue instead: github, you can subscribe to the issue to see how it resolves.
I am working on a WordPress plugin and have all the files in my working directory and run gulp in that project folder. Now, I'd like to have a watch task that copies all the changes to my local WP installation for testing.
Therefore I am looking for a way to sync (only in one direction) the project folder with the plugin folder of WP.
I managed to get it to work with gulp-directory-sync
...
var dirSync = require("gulp-directory-sync");
var localDir = "../newDir/";
var buildDir = "./buildDir/";
...
function copy_to_local_folder() {
return pipeline(
gulp.src(buildDir+'**/*'),
dirSync( buildDir, localDir, { printSummary: true } )
);
}
function watch_local() {
gulp.watch(buildDir+'**/*', copy_to_local_folder);
exports.default = watch_local;
However, the plugin hasn't been updated in 4 years and according to this answer, it is not doing it the proper "gulp way" (e.g. not using gulp-src) and this task should be possible with other basic gulp functions.
Copying changed files is pretty easy, but also keeping track of deleted files is more complicated. I also would prefer to only update changed/deleted/new files and not clearing the folder every time before coping all files.
Starting with the updated code in the aforementioned answer, I tried to implement it and made changes to make it work.
...
var newer = require("gulp-newer");
var pipeline = require("readable-stream").pipeline;
var del = require("del");
var localDir = "../newDir/";
var buildDir = "./buildDir/";
function copy_to_local_folder() {
return pipeline(
gulp.src([buildDir+'**/*']),
newer(localDir),
gulp.dest(localDir),
);
}
function watch_local() {
var watcher = gulp.watch(buildDir + '**/*', copy_to_local_folder );
watcher.on('unlink', function(path) {
console.log(path);
var newPath = './'+path;
newPath = newPath.replace(buildDir, localDir);
console.log(newPath);
(async () => {
const deletedPaths = await del(newPath, {dryRun: true, force: true});
console.log('Deleted files and directories:\n', deletedPaths.join('\n'));
})();
});
}
exports.default = watch_local;
With this code, the folder gets updated when I change or delete files, but it does not trigger when I delete an entire folder. Which is probably because I use unlink and not unlinkDir. But even if I use the version of the function below, it doesn't get triggered by deleting a folder (with containing files).
watcher.on('unlinkDir', function(path) {
console.log('folder deleted');
console.log(path);
var newPath = './'+path;
newPath = newPath.replace(buildDir, localDir);
console.log(newPath);
});
What am I doing wrong?
Or is there in general a better way to achieve this?
PS: I'm using
node v11.15.0
gulp v4.0.2
on Linux
deleting files and folders in VS Code
Update:
When I run it with:
watcher.on('unlink', ... and delete a file:
it works
with the console.log output and the ( async () => ...
and Starting and Finished for copy_to_local_folder
watcher.on('unlinkDir', ... and delete a folder:
it works not
nothing happens in the console output
(not even Starting)
watcher.on('unlinkDir', ... and delete a file:
Starting and Finished for copy_to_local_folder
but not the console.log and ( async () => ...
watcher.on('add', ... and watcher.on('addDir', ...
work both
Seems to me that the watcher.on('unlinkDir', ... does never get triggered ... is unlinkDir not supported by gulp-watch?
I am writing a unit test case for the , question is mentioned in the link How to stub/mock submodules of a require of nodejs using sinon
when I include a require
const index=require('./index.js');
It has a library require inside it
const library= require('./library.js');
the library.js file has a require which reads config.json file(this config file is also required inside above index.js) as below
const readConfig = require('read-config');
const config = readConfig('./config.json');
I have tried many ways as suggested in the above link but I am failing
const stubs = {
'./library': function (response) {
assert.equal(some, null);
return 'Some ' + argument;
},
'../library1.js': {
function(paths, opts){
var config='./config.json'
return config;
}
},
}
const index=proxyquire('./index.js',stubs)
When I run my unit test case I am still getting the below error
throw configNotFound(configPath);
^
ReadConfigError: Config file not found: ./config.json
I would like to know which part of the code I am missing badly that the code throws the error
I am trying to edit the index.js and all the related files where config is read with the below code
var path = require('path');
var pathToJson = path.resolve(__dirname, '../config.json');
// Load config
var config = fs.readFile(pathToJson , 'utf8', function (err, data) {
if (err) throw err;
config = JSON.parse(data);
});
Here challenge is that I cannot change the node code
You problem is likely to be path resolution. If ./config.json is relative to where you are running Node from (process.cwd()), then it'll work. If it's relative to your library module, then you can do something like:
// Works for JS and JSON
const configPath = require.resolve('./config.json');
// Works in general
const configPath = require('path').join(__dirname, 'config.json');
// Then
const readConfig = require('read-config');
const config = readConfig(configPath);
It's difficult to say if this is the case without knowing more about your project layout and how you're starting your app.
I'm writing an isomorphic Key Value Store with webpack.
This is currently my approach to load the libraries, which obviously doesn't work, because webpack wants to resolve both require.
Whats' the right approach?
var db = null;
if (typeof window === 'undefined') {
// node context
db = require('level');
} else {
// browser context
db = require('gazel');
}
I know, that you can provide a target to webpack.
But I have no idea how to use that.
Thanks!
I think resolve.alias would work for you. You would set db module to point at level or gazel depending on which build you are creating.
webpack.config.js
module.exports = {
plugins: [
new webpack.DefinePlugin({
"process.env": {
BROWSER: JSON.stringify(true)
}
})
]}
your-universal.js
var db = null;
if (!process.env.BROWSER) {
// node context
db = require('level');
} else {
// browser context
db = require('gazel');
}