In Mocha test scripts the try/catch block is swallowing errors - javascript

I am facing an issue with try/catch in Mocha script. Eventhough some of the tests are failing but Mocha is displaying as "All are passed". Below is my test script file help me in fixing this issue. In this code, I am posting a input string from an array to an URL and expecting XML response. I am using Xml2js module to parse Xml response to JSON string and comparing XML child elements with expect
var expect = require('chai').expect;
var request = require('supertest');
var xml2js = require('xml2js');
var PrettyError = require('pretty-error');
var pe = new PrettyError();
var async = require('async');
var fs = require('fs');
var parser = new xml2js.Parser();
var build = require('./config').Build;
var build_url = build.Environment[build.Type].HostUrl;
var inputArray1 = [];
describe('Verifying Incoming post output response', function() {
var index = 0;
validateFields(index);
function validateFields(index) {
var input = inputArray1[index];
var actualInputArray = input.split(",");
describe(actualInputArray[0], function() {
it(actualInputArray[1], function(done) {
request(build_url).post('/lead')
.send(actualInputArray[2])
.end(function(err, res) {
var resxml = res.text;
console.log("resxml", resxml);
parser.parseString(resxml, function(err, result) {
try {
expect(result['Header']['RESULT']).to.deep.equal([actualInputArray[3]]);
expect(result['Header']['STATUS_CODE']).to.deep.equal([actualInputArray[4]]);
expect(result['Header']['MESSAGE']).to.deep.equal( [actualInputArray[5]]);
} catch (err) {
console.log(pe.render(err.message));
}
index = index + 1;
if (index < inputArray1.length) {
validateFields(index);
}
done();
});
});
}).timeout(5000);
});
}
});

When you catch the error the done() method is still called and resolve the promise, so all your tests will pass anyway.
Try to add done(new Error("Your error message")); to the catch and let fail the test in case of error.

Related

how to read specific value from json in an archived file. Using javascript ,and jszip

I am reading a json file from within a zip file using jszip. I can open it and get the information that I want into the console from my function. I can't however get that information out of my javascript function. Maybe I am doing it wrong. Don't usually code using js.
const JSZip = require("jszip");
const fs = require("fs");
var myReturn;
function readJsons(bookPath,bookName,fileName,filePath,keyOption){
fs.readFile(bookPath + bookName, function(err, data) {
if (err) throw err;
JSZip.loadAsync(data).then(function (zip) {
// Read the contents of the '.txt' file
zip.file(filePath + fileName).async("string").then(function (data) {
var mydata = JSON.parse(data);
//gets the value of the key entered
myReturn = JSON.stringify(mydata[0][keyOption]); //value here should be "test book"
console.log(myReturn); //printed in console is "test book" works to here
return myReturn;
});
});
});
}
console.log(readJsons('simplelbook.zip','','frontMatter.json','','bookName'));
The problem is that you are returning inside the callback, so you aren't returning in the actual function. The solution would be using async/await instead:
const JSZip = require("jszip");
const fs = require("fs");
const util = require("util"); // require the util module
const readFile = util.promisify(fs.readFile); // transform fs.readFile into a more compatible form
async function readJsons(bookPath, bookName, fileName, filePath, keyOption) {
try {
// this part does the same thing, but with different syntax
const data = await readFile(bookPath + bookName);
const zip = await JSZip.loadAsync(data);
const jsonData = await zip.file(filePath + fileName).async("string");
const mydata = JSON.parse(jsonData);
const myReturn = JSON.stringify(mydata[0][keyOption]);
return myReturn; // return the data, simple as that
} catch (e) {
console.error(e); // error handling
}
}
(async () => { // self executing async function so we can use await
console.log(
await readJsons("simplelbook.zip", "", "frontMatter.json", "", "bookName")
);
})()
Notice I have imported the util module to turn fs.readFile into a function that is more suited for async/await :)

How to read a file before runing a set of mocha test

I am trying to read a file which contains all the inputs for a set of mocha test. While trying to read these file i just get that the inputs(a Map) is null.
Scrapper_cheerio.js is written in the async/ await format however, don't believe that has anyting to do with a file not being read.
if I understand before correctly, the stuff in the before block should happen before any test occur. Currently this is what I have tried :
const assert = require('chai').assert;
const fs = require('fs');
const scrapper_cheerio = require('../scrapper/Scrapper_cheerio');
describe('test1', function(){
var inputs = null;
before( function(done){
fs.readFile('./inputs.txt', 'utf8',
function(err, fileContents){
if(err) throw err;
const string_data = fileContents;
const data = eval(string_data);
inputs = new Map(data);
done();
});
});
describe('is_null_input()', function(){
const is_null_input = inputs.get('is_null_input');
const json_result = scrapper_cheerio.is_null_json(is_null_input);
it('should return a json string', function(){
assert.isObject(json_result, 'is json object');
});
});
});
I am just expecting this first test to be true because it happens to be the easiest to test however, I cannot read the file. I am very new to mocha testing hence I may, and probably do have some glaring and obvious errors. Any and all help is greatly appreciated.
Edit 1 :
so it was pointed out before quickly being erased (why it was really helpful, thank you.) that I need to do an async call and i did. I can now read the file and I can see that I'm reading it however I cannot pass the value to the bottom
describe('test1', function(done){
var inputs = null;
before('test1', function() {
const string_data = fs.readFileSync('./inputs.txt', 'utf8');
const data = eval(string_data);
console.log('data uptop')
inputs = new Map(data);
console.log(inputs);
done();
});
describe('is_null_input()', function(done){
const is_null_input = inputs.get('is_null_input');
var json_result = scrapper_cheerio.is_null_json(is_null_input);
console.log('these are json results', json_result);
it('should return a json string', function(){
assert.isObject(json_result, 'is json object');
});
});
});
So I can read the file however it is not reachable in the describe.
Here is a simplified working test that should get you jumpstarted:
inputs.txt
[[ 'is_null_input', true ]]
code.test.js
const fs = require('fs');
const expect = require('chai').expect;
describe('test1', function () {
var inputs = null;
before('test1', function (done) {
fs.readFile('./inputs.txt', 'utf8', (err, data) => {
if (err) throw err;
inputs = new Map(eval(data));
done();
});
});
describe('is_null_input()', function () {
it('should return a json string', function () {
const is_null_input = inputs.get('is_null_input');
expect(is_null_input).to.equal(true); // Success!
// ...
// var json_result = scrapper_cheerio.is_null_json(is_null_input);
// console.log('these are json results', json_result);
// assert.isObject(json_result, 'is json object');
});
});
});
Details
The Mocha run cycle runs all describe callbacks first, and describe callbacks always run synchronously (you can't use done, an async function, or return a Promise in a describe callback).
So this line:
const is_null_input = inputs.get('is_null_input');
...was running before the before, meaning inputs was still null.
Moving that code inside of the test means it will run after the before and inputs will be defined.
It is best practice to not block the JavaScript event loop with readFileSync, so the example above uses done in combination with readFile.

Node.js - Array undefined but logging on console

What I'm trying to do is to download a csv file, read it line by line and to add the splitted line (on ',') to tmparray.
This code works and prints all the element in the array.
var request = require('request');
var fs = require('fs');
readline = require('readline');
try {
request('https://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/2.5_day.csv').pipe(fs.createWriteStream("MyCsv.txt"));
} catch (e) {
console.error(e);
}
var inputFile = 'MyCsv.csv';
var tmparray;
//read the file
var rd = readline.createInterface({
input: fs.createReadStream('/home/nome/Node/MyCsv.csv')
});
try {
//read line by line
rd.on('line', (line) => {
tmparray += line.split(",");
//print the elements
tmparray.forEach((element) => {
console.log(element);
}, this);
});
} catch (e) {
console.error(e);
}
What I want to do is to print the array after I assigned it.
I've tried this:
var request = require('request');
var fs = require('fs');
readline = require('readline');
try {
request('https://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/2.5_day.csv').pipe(fs.createWriteStream("MyCsv.txt"));
} catch (e) {
console.error(e);
}
var inputFile = 'MyCsv.csv';
var tmparray;
//read the file
var rd = readline.createInterface({
input: fs.createReadStream('/home/nome/Node/MyCsv.csv')
});
try {
//read line by line
rd.on('line', (line) => {
tmparray += line.split(",");
});
} catch (e) {
console.error(e);
} finally {
console.log(tmparray); // undefined
// or this: console.log(tmparray[0]) can't read the property '0' of undefined
}
but the array is printed as if it is undefined
The problem is that rd.on(...) is asynchronous.
That means that you are telling rd that when it reads a line, it should add it to tmparray — but that doesn't actually happen yet. It happens moments later, after you console.log(tmparray).
You should say rd.on('close', () => console.log(tmparray)) to tell Node "when you have finished reading rd, then log the data".
There are a couple of other issues in the code but they should be easier to find once this is fixed. Looking at it, I think line isn't an event on readable streams so you should say rd.on('data', ...) instead; and you're trying to build up an array using the + operator which doesn't work. It will probably convert everything to strings though, so it should log something fairly reasonable for now.
Why not use the csv package it will give you the same result, Here is an example of transforming csv file into array:
const csv = require('csv')
, request = require('request');
var url = 'https://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/2.5_day.csv';
request(url, function (err, response, data) {
if (err) throw err;
csv.parse(data, function (err, data) {
if (err) throw err;
// here you get your array
console.log(data);
});
});

Using knex to read from file and insert into database end in error

i have a small piece of code which reads a line from a file, parse it and then inserts into my database.
However after 10 to 12 thousand lines i always get this error:
Unhandled rejection Error: Knex: Timeout acquiring a connection. The pool is probably full. Are you missing a .transacting(trx) call?
The first file has about 150k lines.
Already tried to manipulate directly the transaction, but had no success.
Any ideas on how to manage resources correctly in order to make all the file?
Here goes the code i'm trying for now:
var fs = require('fs');
var knexfile = require('./knexfile');
var knex = require('knex')(knexfile.production);
var readline = require('readline');
var rl = readline.createInterface({
terminal : false,
input : fs.createReadStream('FOO_BAR.TXT') // About 150k lines
});
knex.transaction(function(tx){
rl.on('line', function(line) {
knex("dadosbrutos").insert({ // this table does exists
AA_DATA : line.substring(0,8),
BB_DATA : line.substring(8,16),
CC_DATA : line.substring(36,44)
}).then(function(){
tx.commit(); // dies after 12k inserts
});
});
});
i created a module to promisify readline https://www.npmjs.com/package/readline-promise, but ill include the source in this post with my answer.
also make sure that you are not exceeding the maximum transaction size for your database.
is there a reason you need to use a transaction? if not, you can just do all of the inserts without a transaction.
var fs = require('fs');
var promise = require('bluebird');
var knexfile = require('./knexfile');
var knex = require('knex')(knexfile.production);
// iterates through each line calling callback
function each(cfg) {
return function(callback) {
return new promise(function(resolve, reject) {
// create an array to store callbacks
var rl, cbs = [];
// create an interface
try {
rl = readline.createInterface(cfg);
}
catch(err) {
return reject(err);
}
// handle a new line
rl.on('line', function(line) {
cbs.push(callback(line));
});
// handle close
rl.on('close', function() {
promise.all(cbs).then(function() {
resolve({
lines: cbs.length
});
})
.caught(function(err) {
reject(err);
});
});
});
};
}
knex.transaction(function(tx){
return each({
terminal : false,
input : fs.createReadStream('FOO_BAR.TXT')
})(function(line) {
return knex("dadosbrutos").insert({ // this table does exists
AA_DATA : line.substring(0,8),
BB_DATA : line.substring(8,16),
CC_DATA : line.substring(36,44)
}).transacting(tx);
})
.then(function() {
tx.commit();
});
});

return value with nested function using the request NPM package

I'm scraping the ghost blogging platform and the package I'm using to do so is request, but I'm not sure how to return a value of a nested request. I commented out the area that is causing the issue. Thanks!
var express = require('express');
var fs = require('fs');
var request = require('request');
var cheerio = require('cheerio');
var app = express();
app.get('/', function(req, res){
var url = 'http://<subdomain>.ghost.io';
var articles = [];
request(url, function(error, response, html){
if(!error){
var $ = cheerio.load(html);
var post;
$('article').each(function(index) {
var self = $(this);
var article = {
header : self.find('h2.post-title').text(),
route: url + self.find('h2.post-title a').attr('href'),
content : '',
author: self.find('footer a').text(),
timestamp : self.find('time.post-date').text()
};
request(article.route, function(error, response, html) {
$ = cheerio.load(html);
post = $('section.post-content').text();
return post; //*** this is what I can't return ***//
//*** I'd like it to be the value of article.content ***//
});
console.log(post); //*** undefined ***//
articles.push(article);
});
fs.writeFile('posts.json', JSON.stringify(articles, null, 4), function(err){
console.log('Posts created.');
});
}
});
})
app.listen('8000');
console.log('Watching for changes.');
exports = module.exports = app;
So your problem boils down to having a list of URLs, and wanting to (asynchronously, because node.js) request all of them, and then know when they've all finished and then do something with the collected results.
The async module (npm install async) will let you do that like this:
var request = require('request');
var async = require('async');
var urls = ["http://google.com", "http://yahoo.com"];
async.map(urls, request, function(err, results) {
// First 100 characters of http://google.com
console.log(results[0].body.substr(0, 100));
// First 100 characters of http://yahoo.com
console.log(results[1].body.substr(0, 100));
});
So you can apply this to your problem by doing the following:
Synchronously create the entire articles list.
Use async.map on the list.
In the callback to async.map, you have a list of all the responses; you can process them synchronously.

Categories