UPDATE: I believe I have found a (probably hacky) way to solve both problems, will post my updated code and solutions tomorrow for anyone interested.
I am trying to create a CLI application and decided I wanted to format the log output for a better user experience, including adding a verbose mode. I thus installed winston and managed to get it working how I wanted to. I then installed jest as I the app is getting more complex so I wanted to automate testing. I decided to make my first test for the logger as it was the last thing I was working on and I have immediately run into problems. My plan was to count the lines of output from the console log and check they were equal to ten: this would then mean I wouldn't have to hardcode in any error specific error messages which may change if I decide to change the log formatting at a later date. I would also like a test that changes the environment variable from dev to prod to ensure that it works correctly in both environments: does anyone know if this is possible? I am currently using dotenv for managing my environment variables and have added the relevant code to my jest config file so it will read the variables correctly. I have been reading through various stack overflow posts and the jest docs about mock functions in an attempt to solve these problems but it is all flying over my head. I don't strictly need these test to work to get on with my app as I'm pretty confident it is all working fine but it is frustrating to not be able to solve these problems and it would be useful to know in the future in case I do need to make a test that relies on the log output. Can anyone help?
set-log-to.js
const { createLogger, format, transports } = require('winston');
const { combine, printf, errors } = format;
const nodeEnvironment = process.env.NODE_ENV;
const cc = require('../constants/chalk-classes');
const {
fatal,
caveat,
victory,
error,
warn,
attempt,
success,
info,
debug,
plain,
} = cc;
const config = {
levels: {
fatal: 0,
caveat: 0,
victory: 0,
error: 1,
warn: 2,
attempt: 3,
success: 3,
info: 3,
verbose: 4,
debug: 5,
},
};
const formatting = combine(
errors({ stack: true }),
printf((infoObj) => {
const { level, message } = infoObj;
switch (level) {
case 'fatal':
return `${fatal(`${level}:`)} ${plain(message)}`;
case 'caveat':
return `${caveat(`${level}:`)} ${plain(message)}`;
case 'victory':
return `${victory(`${level}:`)} ${plain(message)}`;
case 'error':
return `${error(`${level}: ${message}`)}`;
case 'warn':
return `${warn(`${level}: ${message}`)}`;
case 'attempt':
return `${attempt(message)}`;
case 'success':
return `${success(message)}`;
case 'info':
return `${info(message)}`;
case 'verbose':
return `${plain(message)}`;
case 'debug':
return `${debug(level)}: ${plain(message)}`;
}
})
);
function setLevel(level) {
if (!level) {
if (nodeEnvironment === 'dev') {
return (level = 'debug');
} else {
return (level = 'warn');
}
} else {
return level;
}
}
function setLogTo(level) {
level = setLevel(level);
const log = createLogger({
levels: config.levels,
level,
transports: [
new transports.Console({
format: formatting,
}),
],
});
return log;
}
module.exports = setLogTo;
set-log-to.test.js
const setLogTo = require('../set-log-to');
test('All log levels function correctly', () => {
let log = setLogTo('debug');
log.fatal('This is fatal');
log.caveat('This is a caveat');
log.victory('This is a victory');
log.error('This is an error');
log.warn('This is a warning');
log.attempt('This is an attempt');
log.success('This is a success');
log.info('This is some info');
log.verbose('This is verbose');
log.debug('This is a debug');
expect(???).toEqual(10);
});
test('Logger does not print debug as standard', () => {
let log = setLogTo();
log.warn('This is a warning');
log.verbose('This is a verbose statement');
log.debug('This is a debug statement');
expect(???).toEqual(1);
});
test('Logger does not print info when set to error', () => {
let log = setLogTo('error');
log.info('This is an info statement');
log.error('This is an error')
expect(???).toEqual(1);
});
test('Dotenv works correctly', () => {
let log = setLogTo();
log.debug('This is a debug');
nodeEnvironment = 'prod';
log.debug('This us a debug');
expect(???).toEqual(1)
});
Ok, so Winston doesn't send messages to the console, or if it does it's the global console. Either way I figured that the best way around this would be to add a file logger to the console logger, subsequently reading the lines from the file rather than the console and then deleting the file after ever test. This then caused problems as standard Jest behaviour is to run its test simultaneously, so each test was attempting to access the same temporary file at the same time. This was easy to fix by passing the --runInBand option to jest in my package.json file which makes Jest run the tests sequentially.
To solve the environment variable problem, it turns out that Jest sets the NODE_ENV variable to 'test' when it starts up. I added a jest.resetModules() to the beforeEach statement allowing me to manually declare the environment at the start of each test. I stored the jest test environment in a constant at the start of the file so I could use this for the majority of the tests that did not require explicit environment variables to be set. I also created an afterAll() function to make sure that process.env was set back to the test environment for any subsequent tests.
Code:
package.json:
"logtest": "jest --runInBand ./src/helper/tests/set-log-to.test.js",
set-log-to.test.js:
const fs = require('fs');
const path = require('path');
const winston = require('winston');
let setLogTo = require('../set-log-to');
const { snipconDir } = require('../../constants/core-dirs');
const logTests = path.join(snipconDir, './temp/log-tests.txt');
const testEnv = process.env;
function createTestLog(level) {
let log = setLogTo(level);
log.add(
new winston.transports.File({
filename: logTests,
})
);
return log;
}
function allLogLevels(log) {
log.fatal('This is fatal');
log.caveat('This is a caveat');
log.victory('This is a victory');
log.error('This is an error');
log.warn('This is a warning');
log.attempt('This is an attempt');
log.success('This is a success');
log.info('This is some info');
log.verbose('This is verbose');
log.debug('This is a debug');
}
async function getLines() {
let data = await fs.promises.readFile(logTests);
let lines = data.toString().split('\n');
lines.pop();
return lines;
}
beforeEach(async (done) => {
jest.resetModules();
try {
await fs.promises.writeFile(logTests, '');
done();
} catch (err) {
console.log(err);
}
});
afterEach(async (done) => {
try {
await fs.promises.unlink(logTests);
done();
} catch (err) {
console.error('Failed to delete log-tests.js');
}
});
afterAll(() => {
process.env = testEnv;
});
test('When level is explicitly set to debug, all messages show', async () => {
process.env = testEnv;
let log = createTestLog('debug');
allLogLevels(log);
let lines = await getLines();
expect(lines.length).toEqual(10);
});
test('Standard behaviour shows messages from level 2 and below', async () => {
process.env = testEnv;
let log = createTestLog();
allLogLevels(log);
let lines = await getLines();
expect(lines.length).toEqual(5);
});
test('When explicitly set to error level, loggger displays all level 0 messages', async () => {
process.env = testEnv;
let log = createTestLog('error');
allLogLevels(log);
let lines = await getLines();
expect(lines.length).toEqual(3);
});
test('Verbose level displays messages from level 4 and below', async () => {
process.env = testEnv;
let log = createTestLog('verbose');
allLogLevels(log);
let lines = await getLines();
expect(lines.length).toEqual(9);
});
test('Development environment displays all messages', async () => {
process.env.NODE_ENV = 'dev';
let log = createTestLog();
allLogLevels(log);
let lines = await getLines();
expect(lines.length).toEqual(10);
});
test('Production environment displays messages from level 2 and below', async () => {
process.env.NODE_ENV = 'prod';
let log = createTestLog();
allLogLevels(log);
let lines = await getLines();
expect(lines.length).toEqual(5);
});
set-log-to.js:
const { createLogger, format, transports } = require('winston');
const { combine, printf, errors } = format;
const cc = require('../constants/chalk-classes');
const {
fatal,
caveat,
victory,
error,
warn,
attempt,
success,
info,
debug,
plain,
} = cc;
const config = {
levels: {
fatal: 0,
caveat: 0,
victory: 0,
error: 1,
warn: 2,
attempt: 3,
success: 3,
info: 3,
verbose: 4,
debug: 5,
},
};
const formatting = combine(
errors({ stack: true }),
printf((infoObj) => {
const { level, message } = infoObj;
switch (level) {
case 'fatal':
return `${fatal(`${level}:`)} ${plain(message)}`;
case 'caveat':
return `${caveat(`${level}:`)} ${plain(message)}`;
case 'victory':
return `${victory(`${level}:`)} ${plain(message)}`;
case 'error':
return `${error(`${level}: ${message}`)}`;
case 'warn':
return `${warn(`${level}: ${message}`)}`;
case 'attempt':
return `${attempt(message)}`;
case 'success':
return `${success(message)}`;
case 'info':
return `${info(message)}`;
case 'verbose':
return `${plain(message)}`;
case 'debug':
return `${debug(level)}: ${plain(message)}`;
}
})
);
function setLevel(level) {
if (!level) {
if (process.env.NODE_ENV === 'dev') {
return (level = 'debug');
} else {
return (level = 'warn');
}
} else {
return level;
}
}
function setLogTo(level) {
level = setLevel(level);
const log = createLogger({
levels: config.levels,
level,
transports: [
new transports.Console({
format: formatting,
}),
],
});
return log;
}
module.exports = setLogTo;
Related
I'd like to test my implementation of express-validator rules in my middleware. I understand that I shouldn't test 3rd party code, but in my (perhaps flawed) view, I'm testing my implementation rather than their code.
A cut down version would usually look like this:
// routes.js
router.post('/example',
[
body('email')
.isString()
.withMessage('Invalid characters, please use letters and numbers only'))
],
//// To be replaced by:
// validateLogin(),
controller.exampleFn());
but I need to be able to extract it for testing, which I do by running the validations imperatively:
// validation.js
// parallel processing
const validate = validations => {
return async (req, res, next) => {
await Promise.all(validations.map(validation => validation.run(req)));
const errors = validationResult(req);
if (errors.isEmpty()) {
return next();
}
const error = new Error();
error.message = process.env.NODE_ENV === 'development'? 'Validation Failed':'Error';
error.statusCode = !errors.isEmpty()? 422:500;
error.errors = errors.array({onlyFirstError: true});
next(error);
return error;
};
};
const validateLogin = () => {
const exampleValidationRules = [
body('email')
.isString()
.withMessage('Invalid characters, please use letters and numbers only')
];
return validation(exampleValidationRules);
}
module.exports = {
validateLogin
};
I can then call the middleware in my routes, and in my test files.
For example:
// auth.test.js
describe('Unit Tests', () => {
it('should return 422 if email validation fails', async() => {
const wrongEmailReq = { body: {email: 'nic#hotmail.com'} };
const notStringReq = { body: {email:1} };
const res = {
statusCode: 500,
status: (code) => {this.statusCode = code; return this},
};
// Function to be tested
const validationFn = validateLogin();
const wrongEmail = await validationFn(wrongEmailReq, res, ()=>{});
const notString = await validationFn(notStringReq, res, ()=>{});
expect(wrongEmail.statusCode).to.be.equal(422);
expect(wrongEmail.errors[0].param).to.be.equal('email');
expect(notString.statusCode).to.be.equal(422);
expect(notString.errors[0].param).to.be.equal('email');
return;
});
I'm just slightly confused about how I'd test the 'success' case. validationFn returns next() if successful. But that would just be undefined.
Should I just test expect(correct.statusCode).to.be.undefined;? That doesn't seem specific enough.
Also, is there actually any advantage to unit testing this function over using http-chai to run the requests? I thought perhaps it was more lightweight, but unsure.
I have a dependency that calls the evaluate() function. And I'm attempting to deploy it to heroku however, according to the readme file for puppeteer : https://github.com/puppeteer/puppeteer/blob/main/docs/troubleshooting.md : at the very bottom, that calling evaluate with an async function won't work because while puppeteer uses Function.prototype.toString() to serialize functions while transpilers could be changing the output code in such a way it's incompatible with puppeteer. Which seems to be happening with my code because it's not reading or evaluating after I pass the argument ['--no-sandbox'] to the browser. Which is necessary according to the section about deploying on heroku. This is the dependency script that is the issue: And below is the whole dependency file.
// puppeteer-extra is a drop-in replacement for puppeteer,
// it augments the installed puppeteer with plugin functionality
const puppeteer = require('puppeteer-extra');
// add stealth plugin and use defaults (all evasion techniques)
const StealthPlugin = require('puppeteer-extra-plugin-stealth');
puppeteer.use(StealthPlugin());
// load helper function to detect stealth plugin
const { warnIfNotUsingStealth } = require("../helpers/helperFunctions.js");
/**
* Scrapes all collections from the Rankings page at https://opensea.io/rankings
* options = {
* nbrOfPages: number of pages that should be scraped? (defaults to 1 Page = top 100 collections)
* debug: [true,false] enable debugging by launching chrome locally (omit headless mode)
* logs: [true,false] show logs in the console
* browserInstance: browser instance created with puppeteer.launch() (bring your own puppeteer instance)
* }
*/
const rankings = async (type = "total", optionsGiven = {}, chain = undefined) => {
const optionsDefault = {
debug: false,
logs: false,
browserInstance: undefined,
};
const options = { ...optionsDefault, ...optionsGiven };
const { debug, logs, browserInstance } = options;
const customPuppeteerProvided = Boolean(optionsGiven.browserInstance);
logs && console.log(`=== OpenseaScraper.rankings() ===\n`);
// init browser
let browser = browserInstance;
if (!customPuppeteerProvided) {
browser = await puppeteer.launch({
headless: !debug, // when debug is true => headless should be false
args: ['--start-maximized', '--no-sandbox'],
});
}
customPuppeteerProvided && warnIfNotUsingStealth(browser);
const page = await browser.newPage();
const url = getUrl(type, chain);
logs && console.log("...opening url: " + url);
await page.goto(url);
logs && console.log("...🚧 waiting for cloudflare to resolve");
await page.waitForSelector('.cf-browser-verification', {hidden: true});
logs && console.log("extracting __NEXT_DATA variable");
const __NEXT_DATA__ = await page.evaluate(() => {
const nextDataStr = document.getElementById(`${__NEXT_DATA__}`).innerText;
return JSON.parse(nextDataStr);
});
// extract relevant info
const top100 = _parseNextDataVarible(__NEXT_DATA__);
logs && console.log(`🥳 DONE. Total ${top100.length} Collections fetched: `);
return top100;
}
function _parseNextDataVarible(__NEXT_DATA__) {
const extractFloorPrice = (statsV2) => {
try {
return {
amount: Number(statsV2.floorPrice.eth),
currency: "ETH",
}
} catch(err) {
return null;
}
}
const extractCollection = (obj) => {
return {
name: obj.name,
slug: obj.slug,
logo: obj.logo,
isVerified: obj.isVerified,
floorPrice: extractFloorPrice(obj.statsV2),
// statsV2: obj.statsV2, // 🚧 comment back in if you need additional stats
};
}
return __NEXT_DATA__.props.relayCache[0][1].json.data.rankings.edges.map(obj => extractCollection(obj.node));
}
function getUrl(type, chain) {
chainExtraQueryParameter = chain ? `&chain=${chain}` : ''
if (type === "24h") {
return `https://opensea.io/rankings?sortBy=one_day_volume${chainExtraQueryParameter}`;
} else if (type === "7d") {
return `https://opensea.io/rankings?sortBy=seven_day_volume${chainExtraQueryParameter}`;
} else if (type === "30d") {
return `https://opensea.io/rankings?sortBy=thirty_day_volume${chainExtraQueryParameter}`;
} else if (type === "total") {
return `https://opensea.io/rankings?sortBy=total_volume${chainExtraQueryParameter}`;
}
throw new Error(`Invalid type provided. Expected: 24h,7d,30d,total. Got: ${type}`);
}
module.exports = rankings;
This is why I need to know how to do it
//Require module
const express = require('express');
const { evaluate, compile, parse } = require('mathjs');
// Express Initialize
const app = express();
const port = 8000;
app.listen(port, () => {
console.log('listen port 8000');
})
//create api
app.get('/hello_world', (req, res) => {
const expression = "A B A";
console.log(expression.length);
let response;
const scope = {
A: 5,
B: 4
}
try {
const parsedExp = parse(expression);
const compiled = parsedExp.compile();
const result = compiled.evaluate(scope);
response = {
"expression": parsedExp.toString(),
"variables": parsedExp.args,
"result": result
}
console.log("success");
res.send(JSON.stringify(response));
} catch (error) {
console.log(error);
res.send(JSON.stringify(error));
}
})
The code and calculation are working fine. but it's taking multiply by default. Is there a way we can stop this default behavior and throw an error message to the user that please enter your desired operator?
I tried even with normal javascript code by splitting with space and tried to check if of +,-,*,/,^ operator but the user can still give multiple spaces then writes another variable
Help appreciated
There is currently no option to disable implicit multiplication, but there is a (currently open) github issue for that. And in the comments of that issue there is a workaround to find any implicit multiplications and throw an error if one is found.
try {
const parsedExp = parse(expression);
parsedExp.traverse((node, path, parent) => {
if (node.type === 'OperatorNode' && node.op === '*' && node['implicit']) {
throw new Error('Invalid syntax: Implicit multiplication found');
}
});
...
} catch (error) {
console.log(error);
res.send(JSON.stringify(error));
}
The original error say's: Cannot destructure property 'firstime' of 'undefined' or 'null'.
I am developing web-base desktop application for Windows pc using node.js and Electron.
I am trying to persist some data in user data directory, I found the idea and using the same approach in this link.
Writing and fetching data works fine, however the error occurred at the first time of fetching the data.
here is the code for UserPreferences class
const electron = require('electron');
const path = require('path');
const fs = require('fs');
class UserPreferences {
constructor(opts) {
const userDataPath = (electron.app || electron.remote.app).getPath('userData');
this.path = path.join(userDataPath, opts.configName + '.json');
this.data = parseDataFile(this.path, opts.defaults);
console.log(userDataPath);
}
get(key) {
return this.data[key];
}
set(key, val) {
this.data[key] = val;
fs.writeFileSync(this.path, JSON.stringify(this.data));
}
}
function parseDataFile(filePath, defaults) {
try {
return JSON.parse(fs.readFileSync(filePath));
} catch (error) {
return defaults;
}
}
module.exports = UserPreferences;
and here's the function for using the UserPreferences class
function isFirstTime() {
try{
const userAccount = new UserPreferences({
configName: 'fipes-user-preferences', // We'll call our data file 'user-preferences'
defaults: {
user: { firstime: true, accountid: 0, profileid: '' }
}
});
var { firstime, accountid, profileid } = userAccount.get('user');
if (firstime === true) { //check if firstime of running application
//do something
} else {
//do something
}
}catch(err){
console.log(err.message);
}
}
the error occurred on the line where I am checking weather the firstime is true or false.
First of all do not declare a object like var { firstTime, .. } like this. if you do this firstTime will be a property of an anonymous object. That you can never access elsewhere. Check what is the output of userAccount.get('user') function, output contain some object like { firstime: true, accountid: "test", profileid: "test" } then try this. Hope this helps you.
var result=userAccount.get('user');
if(result.firstTime===true){
//your code
}
Here is a version of UserPreferences which will be more natural to use as you write your code. You can create it like you see in isFirstTime.
console.debug(userPreferences[accountId]);
userPreferences[accountId] = 1;
This is preferred because there is no reason for a developer not to treat UserPreferences as an object. Another good idea would be separating the writing to the file into a separate flush method, in case you are updating preferences often.
const electron = require("electron");
const fs = require("fs");
const path = require("path");
class UserPreferences {
constructor(defaultPrefs, pathToPrefs) {
const app = electron.app || electron.remote.app;
this.pathToPrefs = path.join(app.getPath("userData"), pathToPrefs + ".json");
try {
this.store = require(this.pathToPrefs);
}
catch (error) {
this.store = defaultPrefs;
}
return new Proxy(this, {
get(target, property) {
return target.store[property];
},
set(target, property, value) {
target.store[property] = value;
fs.writeFileSync(target.pathToPrefs, JSON.stringify(target.store));
}
});
}
}
module.exports = UserPreferences;
Here is a pure version of isFirstTime, that should do what you want, while maintaining a more robust method of checking for isFirstTime. The check can also be changed so check whether lastSignIn is equal to createdAt (with appropriate defaults, of course).
function isFirstTime() {
const account = new UserPreferences({
user: {
accountId: 0,
createdAt: new Date(),
lastSignIn: null,
profileId: ""
}
}, "fipes-user-preferences");
const {lastSignIn} = account;
return lastSignIn === null;
}
After trying all manner of methods to test a route's mongoose save() throwing, I was not really sure how it should be done. I'm aiming for 100 % coverage with istanbul. Here's the core setup:
model.js
let mongoose = require('mongoose');
let Schema = mongoose.Schema;
let PasteSchema = new Schema(
{
message: { type: String, required: true },
tags: [String],
marked: { type: Boolean, default: false },
createdAt: { type: Date, default: Date.now },
updatedAt: Date
}
);
module.exports = mongoose.model('paste', PasteSchema);
controller.js
let Paste = require('./model');
// Other stuff
// I use a bit non-standard DELETE /pastes/:id for this
const markPaste = (req, res) => {
Paste.findById({ _id: req.params.id }, (err, paste) => {
if (!paste) {
res.status(404).json({ result: 'Paste not found' });
return;
}
paste.marked = true;
paste.updatedAt = new Date();
paste.save((err) => {
err
? res.status(400).json({ result: err })
: res.json({ result: 'Paste marked' });
});
});
}
module.exports = {
markPaste,
// Other stuff
}
routes.js
const express = require('express');
const app = express();
const pastes = require('./apps/pastes/controller'); // The file above
app.route('/pastes/:id')
.delete(pastes.markPaste);
module.exports = app;
In the below test, I want to simulate an error being thrown in the paste.save((err) => { above.
process.env.NODE_ENV = 'test';
let mongoose = require('mongoose');
let Paste = require('../apps/pastes/model');
let server = require('../index');
let chai = require('chai');
chai.use(require('chai-http'));
chai.use(require('chai-date-string'));
let expect = chai.expect;
let sinon = require('sinon');
let sandbox = sinon.createSandbox();
let pastes = require('../apps/pastes/controller');
let httpMocks = require('node-mocks-http');
// Other tests
Then the test I want to test save() error in the route:
it('should handle an error during the save in the endpoint', (done) => {
// Create a paste to be deleted
const pasteItem = new Paste({ message: 'Test 1', tags: ['integration', 'test'] });
pasteItem.save()
.then((paste) => {
// Attempt code from below goes here
})
.catch((err) => {
console.log('Should not go here');
});
done();
});
I didn't really find any clear reference to this in various Stack questions, or online, so here's how I did it:
The secret is in using the sinon sandbox, which applies even inside the route context during tests. Here is the working test:
it('should handle an error during the save in the endpoint', (done) => {
const pasteItem = new Paste({ message: 'Test 1', tags: ['integration', 'test'] });
pasteItem.save()
.then((paste) => {
// the sandbox is defined in the imports
// This forces calling save() to raise an error
sandbox.stub(mongoose.Model.prototype, 'save').yields({ error: 'MongoError' });
chai.request(server)
.delete('/pastes/' + paste._id)
.end((err, res) => {
// It applies within the route handling, so we get the expected 400
expect(res).to.have.status(400);
done();
});
})
.catch((err) => {
console.log('Should not go here');
});
});
If you would call it outside of the sandbox, you would break all subsequent tests that use sinon. Ie.
// This would break things unintendedly
sinon.stub(mongoose.Model.prototype, 'save').yields({ error: 'MongoError' });
// This only breaks things (on purpose) in the test we want it to break in:
sandbox.stub(mongoose.Model.prototype, 'save').yields({ error: 'MongoError' });
If you have multiple things within the particular sandbox instance, you can of course restore the "unbroken" state within the test with sandbox.restore(); after the test case.
->
=============================== Coverage summary ===============================
Statements : 100% ( 60/60 )
Branches : 100% ( 14/14 )
Functions : 100% ( 0/0 )
Lines : 100% ( 57/57 )
================================================================================
Yay!