Using Cypress functions in different files - javascript

I am trying to use cypress functions in files different from the main one (which is the test file). I am wondering if it is possible.
Actually, I did this: this is the code in my test.js file; note that the first function is what I'm trying to do; the second function works normally and I have no problem with that. The reason why I am trying to do that is that I could need to reuse the same function multiple times.
my tree folders:
static_copied
pages
cities
Rome
New York
Bombay
Tokyo
London
Moscow
test.js file:
const pathCities = 'static_copied/pages/cities'
it('Retrieve cities from static and divide links', () => {
let cities1 = misc.retrieveCities()
console.log(cities1)
// this works
cy.task('readFolder', pathCities).then(cities => {
console.log('cities ', cities, typeof cities) // prints an array of cities, and 'object'
})
})
})
my misc.help.js file:
const pathCities = 'static_copied/pages/cities'
module.exports = {
retrieveCities,
[...]
}
[...]
function retrieveCities() {
cy.task('readFolder', pathCities).then(res => {
console.log('here', res, typeof res)
return res
})
}
and finally my cypress/plugins/index.js file:
const fs = require('fs')
// opens devTools by default
module.exports = (on, config) => {
[...]
// reads a folder, both folder and file names
on('task', {
readFolder(path) {
let foldersAnFiles = fs.readdirSync(path, 'utf8')
console.log('--->', foldersAnFiles, typeof foldersAnFiles)
let folders = []
// if its a file, exclude from result
foldersAnFiles.filter(function (folder) {
if (folder.indexOf('.') === -1) {
folders.push(folder)
}
})
return folders
},
})
}
What happens is that in misc.help.js file, print is correct: in retrieveCities() function, this console log console.log('here', res, typeof res) correctly prints an array.
But when i return it in the main test file, console.log(cities1) prints undefined.
Is there a way to pass to the main file my result?

Add this to your commands file and it then call cy.retrieveCities() in any test file and it will work.
Cypress.Commands.add('retrieveCities', () => {
return cy.task('readFolder', pathCities).then(res => {
return res
})
})

Related

Electron nedb-promises storage file gets replaced on every app start

I'm now trying for a few hours to understand why this happens, in my electron app i would like to use the nedb-promises package("nedb-promises": "^6.2.1",). Installation and configuration works so far but on every app start (dev & prod) the db file got replaced by a new / empty one. Should'nt the package not handle that?
I've took the code from this example:
https://shivekkhurana.medium.com/persist-data-in-electron-apps-using-nedb-5fa35500149a
// db.js
const {app} = require('electron');
const Datastore = require('nedb-promises');
const dbFactory = (fileName) => Datastore.create({
filename: `${process.env.NODE_ENV === 'development' ? '.' : app.getPath('userData')}/data/${fileName}`,
timestampData: true,
autoload: true
});
const db = {
customers: dbFactory('customers.db'),
tasks: dbFactory('tasks.db')
};
module.exports = db;
import db from './db'
....
// load task should not be important because file is already replaced when arriving here
ipcMain.handle('Elements:Get', async (event, args) => {
// 'Select * from Customers
let data = await db.customers.find({});
console.log(data);
return data;
})
...
// Set an item
ipcMain.handle('Element:Save', async (event, data) => {
console.log(data)
const result = db.customers.insertOne(data.item).then((newDoc) => {
console.log(newDoc)
return newDoc
}).catch((err) => {
console.log("Error while Adding")
console.log(err)
});
console.log(result);
return result;
})
Note: After "adding" newDoc contains the new element and when checking the file manually in the filesystem it is added. When i now close the app and open again the file got replaced.
I've checked the docs up and down - i have no clue what i'm doing wrong - thanks for your help.

Export logging in Nodejs between files

I have two files in nodejs :
index.js
function.js
The index.js is my main file in which i call the functions inside function.js. In function.js i need to use logging, the problem is i didn't figure out how to use it.
function.js
module.exports = {
Exemplfunciton: async () => {
app.log('#### This is just an exemple im trying to run')
}
checkCalcul:async(a,b) = > {
log.(`The Val of A : ${a}, the Val of B: ${b}`
return a+b
}
}
index.js
const functionToCall = require('/function.js)
module.exports = app => {
functionToCall.Exemplfunciton()
functionToCall.checkCalcul(4,5)
}
Will return
app is not defined
tried it without the app in the function.js it returned to me
log not defined.
I only need to use the app.log between the functions ( my main one the index.js and the function.js )
Pass as an argument
module.exports = app => {
functionToCall.Exemplfunciton(app) // add here
}
Then consume
module.exports = {
Exemplfunciton: async (app) => { // add here
app.log('#### This is just an exemple im trying to run')
}
}
To log in Node.js, you should use console https://nodejs.org/api/console.html
Example
module.exports = {
ExampleFunction: async () => {
console.log('#### This is just an example I\'m trying to run')
}
}
const functionToCall = require('./function.js')
functionToCall.ExampleFunction() // logs #### This is just an example I\'m trying to run
Consider extracting the log functionality out into its own file that can be referenced by function.js, index.js, and anything else in your app. For example:
logger.js
module.exports = {
log: function() {
/* aggregate logs and send to your logging service, like TrackJS.com */
}
}
function.js
var logger = require(“./log.js”);d
module.exports = {
exampleFunction: function() {
logger.log(“foo bar”);
}
};
index.js
var functions = require(“./functions.js”);
var logger = require(“./log.js”);
functions.exampleFunction();
logger.log(“foo”);
You should send the logs off to a service like TrackJS to aggregate, report, and alert you to production problems.

How to stop this function from executing more than once

I have this upload function. It works well, except that it uploads the files twice.
startUpload(event: HTMLInputEvent) {
console.log(event) // logs once
this.tasks$ = from([Array.from(event.target.files)]).pipe(
map(files => files.map((file, index) => {
console.log(file) // logs twice
const path = `test/${index}_${file.name}`
const customMetadata = { app: 'Angular!' }
return this.afstorage.upload(path, file, { customMetadata });
})
)
)
this.snapshots$ = this.tasks$.pipe(
map(files =>
files.map(file =>
file.snapshotChanges(),
),
)
)
this.progresses$ = this.tasks$.pipe(
map(files =>
files.map(file =>
file.percentageChanges()
),
)
)
}
How do I prevent it from uploading more than once?
I think the problem is here
files.map((file, index) => {
console.log(file) // logs twice
const path = `test/${index}_${file.name}`
const customMetadata = { app: 'Angular!' }
return this.afstorage.upload(path, file, { customMetadata });
})
The arrow function provided as an argument to .map will be called once for each file in the files array.
So my guess is that this is some kind of file uploader, and you've specified 2 files to be uploaded, or possibly the same file twice.
Update
In response to your comment, if you only want to call this once, e.g. with the first file, you could replace the code above with
const index = 0;
const file = files[0];
console.log(file) // logs twice
const path = `test/${index}_${file.name}`
const customMetadata = { app: 'Angular!' }
return this.afstorage.upload(path, file, { customMetadata });
This will only upload the first file, but skipping the rest of the files doesn't seem like a good idea.
I think there's a problem with your question. You've said you only want this code to execute once, but if multiple files are chosen, how in that case will the other files be uploaded?

Adding multiple BigQuery JSON credential files in Node project

I've been working on a Node project that involves fetching some data from BigQuery. Everything has been fine so far; I have my credential.json file (from BigQuery) and the project works as expected.
However, I want to implement a new feature in the project and this would involve fetching another set of data from BigQuery. I have an entirely different credential.json file for this new dataset. My project seems to recognize only the initial credential.json file I had (I named them differently though).
Here's a snippet of how I linked my first credential.json file:
function createCredentials(){
try{
const encodedCredentials = process.env.GOOGLE_AUTH_KEY;
if (typeof encodedCredentials === 'string' && encodedCredentials.length > 0) {
const google_auth = atob(encodedCredentials);
if (!fs.existsSync('credentials.json')) {
fs.writeFile("credentials.json", google_auth, function (err, google_auth) {
if (err) console.log(err);
console.log("Successfully Written to File.");
});
}
}
}
catch (error){
logger.warn(`Ensure that the environment variable for GOOGLE_AUTH_KEY is set correctly: full errors is given here: ${error.message}`)
process.kill(process.pid, 'SIGTERM')
}
}
Is there a way to fuse my two credential.json files together? If not, how can I separately declare which credential.json file to use?
If not, how can I separately declare which credential.json file to use?
What I would do I would create a function which is the exit point to BigQuery and pass an identifier to your function which credential to generate, This credential will then be used when calling BigQuery.
The below code assume you changed this
function createCredentials(){
try{
const encodedCredentials = process.env.GOOGLE_AUTH_KEY;
To this:
function createCredentials(auth){
try{
const encodedCredentials = auth;
And you can use it like this
import BigQuery from '#google-cloud/bigquery';
import {GoogApi} from "../apiManager" //Private code to get Token from client DB
if (!global._babelPolyfill) {
var a = require("babel-polyfill")
}
describe('Check routing', async () => {
it('Test stack ', async (done, auth) => {
//Fetch client Auth from local Database
//Replace the 2 value below with real values
const tableName = "myTest";
const dataset = "myDataset";
try {
const bigquery = new BigQuery({
projectId: `myProject`,
keyFilename: this.createCredentials(auth)
});
await bigquery.createDataset(dataset)
.then(
args => {
console.log(`Create dataset, result is: ${args}`)
})
.catch(err => {
console.log(`Error in the process: ${err.message}`)
})
} catch (err) {
console.log("err", err)
}
})
})

How to check if each file in a path is accessible for reading or not

I am trying to read the contents of a specific path. for that purpose, i used the following code:
code1:
const contentsOfPersonalFolder = fs.readdirSync(rootPathToPersonal);
but i know in advance that i do not have access permission to read some of the contents that will be returned from the previous line of code.
To check whether or not I have access permission to read some files, i would use the following code
code2:
try {
fs.accessSync(path, fs.constants.R_OK);
logger.info('The directory: ', path, 'can be read');
} catch (err) {
logger.error('The directory: ', path, 'can not be read due inaccessibility');
}
The problem now is, the code in code1 will return an array of all available files in the specified path. and if one of the these files is not
accessible due read right protection, then it will throw and the program will throw.
what i want to achieve is to iterate through all the available files in the specified path in code1, and then check each item using the code in code2 and
if the file is accessible for reading i would like to do some logic, and if it is not accessible for reading i would do something else.
please let me know how to achieve that.
you could use fs.access to check the users permissions
https://nodejs.org/api/fs.html#fs_fs_access_path_mode_callback
const testFolder = './tests/';
const fs = require('fs');
fs.readdir(testFolder, (err, files) => {
files.forEach(file => {
console.log(file);
fs.access(file, fs.constants.R_OK, (err) => {
if (err) {
console.error("file is not readable");
return;
}
// do your reading operations
});
});
})
const fs = require('fs');
const isAvailableToRead = file => {
try {
fs.accessSync(file, fs.constants.R_OK);
return true;
} catch (err) {
return false;
}
}
const readDirectory = path => {
const files = fs.readdirSync(path);
files.forEach(file => {
if(isAvailableToRead(file)) {
console.log(`Do some logic ${file}`);
}
});
}
readDirectory(__dirname);

Categories