This is my basic JS to concatenate two strings, the context.getVariable is something which I want to mock using Sinon,
//util.js
var p;
function concat(p) {
var first_name = context.getVariable('first_name');
var res = p.concat(first_name);
return res;
}
concat(p);
I have added this test.js,
var expect = require('expect.js');
var sinon = require('sinon');
var rewire = require('rewire');
var app = rewire('./util.js');
var fakeContext = {
getVariable: function(s) {}
}
var contextGetVariableMethod;
beforeEach(function () {
contextGetVariableMethod = sinon.stub(fakeContext, 'getVariable');
});
afterEach(function() {
contextGetVariableMethod.restore();
});
describe('feature: concat', function() {
it('should concat two strings', function() {
contextGetVariableMethod.withArgs('first_name').returns("SS");
app.__set__('context', fakeContext);
var concat = app.__get__('concat');
expect(concat("988")).to.equal("988SS");
});
});
I am running,
node_modules.bin> mocha R:\abc\js-unit-test\test.js
util.js:7
var first_name = context.getVariable('first_name');
^
TypeError: context.getVariable is not a function
You need to import actual context here and then use sinon.stub to mock that getVariable method so it it will get that method when your actual code is running.
var expect = require('expect.js');
var sinon = require('sinon');
var rewire = require('rewire');
var context = require('context') // give correct path here
var app = rewire('./util.js');
var fakeContext = {
getVariable: function(s) {}
}
beforeEach(function () {
contextGetVariableMethod = sinon.stub(context, 'getVariable');
});
afterEach(function() {
contextGetVariableMethod.restore();
});
describe('feature: concat', function() {
it('should concat two strings', function() {
contextGetVariableMethod.withArgs('first_name').returns("SS");
var concat = app.__get__('concat');
expect(concat("988")).to.equal("988SS");
});
});
Related
I can't get toBeCalled() working in my Jade test script.
I get the following error msg when run the Jade test:
Error: toBeCalled() should be used on a mock function or a jasmine spy
I have a call to jest.unmock('../fooey') so not sure why I'm getting the error ?
fooey.js
var foo = function(phrase) {
return bar(phrase);
}
var bar = function(greeting) {
console.log(greeting + " Watz up?");
}
foo("Hi Bob!");
module.exports.foo = foo;
module.exports.bar = bar;
fooey-test.js:
jest.unmock('../fooey'); // unmock to use the actual implementation.
describe('fooey()', () => {
const foo = require('../fooey').foo;
const bar = require('../fooey').bar;
it('bar() is called.', () => {
foo("Hi Bob!");
expect(bar).toBeCalled();
});
});
I got it working with both a mock of bar() and also using spyOn()...
fooey.js
var Fooey = function() {
// var self = this;
this.foo = function(phrase) {
// var result = self.bar(phrase);
var result = this.bar(phrase);
return result;
// return "Junky."
};
this.bar = function(greeting) {
var result = greeting + " Watz up?"
console.log(result);
return result;
};
};
module.exports = Fooey;
fooey-test.js
jest.unmock('../fooey'); // unmock to use the actual implementation.
describe('fooey()', () => {
const Fooey = require('../fooey');
const fooey = new Fooey();
it('mock: bar() is called.', () => {
var myFooey = {
foo: fooey.foo,
bar: jest.genMockFunction()
};
myFooey.foo("Hello");
expect(myFooey.bar).toBeCalledWith("Hello");
});
it('bar() is called with "Hi Bob!".', () => {
spyOn(fooey, 'bar');
fooey.foo("Hi Bob!");
expect(fooey.bar).toHaveBeenCalledWith("Hi Bob!");
});
});
Completely loosing my mind on this:
I have an express app:
here is a snapshot of a few files for this example:
app.js
-models
--Event.js
--Match.js
routes
--matches.js
app.js:
global.__base = __dirname + '/';
var MatchModel = require('./models/Match');
var EventModel = require('./models/Event');
//...
matches.js:
var EventModel = require(__base + 'models/Event');
var MatchModel = require('../models/Match.js')
router.get('/', [passport.authenticate('bearer', {session: false}), function (req, res) {
EventModel.something()
}])
Event.js:
var MatchModel = require(__base + 'models/Match')
function something() {
MatchModel.createQuery()
return "jizz"
}
module.exports = {
createQuery : createQuery,
}
Match.js:
function createQuery() {
//..
}
module.exports = {
createQuery:createQuery
}
when the GET matches/ API is called:
Inside Event.js, MatchModel.createQuery() gives the error MatchModel.createQuery() is not a function. but if I move var MatchModel = require(__base + 'models/Match') inside the something() function, it works.
You can create a primary function createQuery and define its prototypes as per requirements and then export that function to be used in you Event.js
Inside Event.js
var matchModel = require(./Match.js)
exports.getSometing = function(){
var testSomething = new matchModel();
var resultOfCreateQuery = testSomething.createQuery()
console.log('Result of createQuery : '+resultOfCreateQuery);
}
Inside Match.js
function something(){
//another logic
}
something.prototype.createQuery= function(){
//create query logic
return "jizz"
}
module.exports = something;
I hope this helps :)
You will have to require them and append ()
Event.js:
var MatchModel = require(__base + 'models/Match')()
function EventModel () {
function something() {
MatchModel.createQuery()
return "jizz"
}
}
module.exports = EventModel
Match.js:
function MatchModel () {
function createQuery () {
//Do Something
}
}
module.exports = MatchModel
See https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/Object/prototype
I have the following code:
var Promise = require('bluebird');
Promise.longStackTraces();
var path = require('path');
var fs = Promise.promisifyAll(require('fs-extra'));
var clone = require('nodegit').Clone.clone;
var tar = require('tar-fs');
var zlib = require('zlib');
var gzip = zlib.createGzip();
var globAsync = Promise.promisify(require('glob'));
module.exports = Archive;
function Archive(pkg) {
var self = this;
var tmp_dir_name = '.tmp';
var code_dir_name = 'code';
var files_dir_name = 'files';
var output_dir_name = 'archives';
var coverall_docs_dir_name = 'coverall_documents';
// the archive's name (no extension):
self.name = pkg.name;
self.recipient_name = pkg.recipient_name;
// path to letter.tex:
self.tex_letter_path = path.resolve(pkg.files.letter);
// path to resume.tex:
self.tex_resume_path = path.resolve(pkg.files.resume);
// path to merged.pdf (letter.pdf + resume.pdf):
self.pdf_package_path = path.resolve(pkg.compiled_files.package);
// temp dir where the archive is assembled:
self.tmp_path = path.resolve(tmp_dir_name, pkg.name);
// path to final archive:
self.output_path = path.resolve(output_dir_name, self.name + '.tar.gz');
// where to copy files to be added to the archive:
self.files_path = path.resolve(tmp_dir_name, self.name, files_dir_name);
// where the tex files are within the archive:
self.coverall_docs_path = path.resolve(self.files_path, code_dir_name, coverall_docs_dir_name);
}
Archive.prototype.make = Promise.method(function() {
var self = this;
return self._prepareFilesDir()
.then(self._copyFiles.bind(self))
.then(self._writeArchive.bind(self))
.then(self._delTmpDir.bind(self));
});
// ********************************
// * Private functions
// ********************************
Archive.prototype._prepareFilesDir = function() {
var self = this;
return fs.emptyDirAsync(self.tmp_path);
};
Archive.prototype._copyFiles = function() {
var self = this;
var sources = {
tex_letter_path: path.resolve(self.tex_letter_path, '..'),
tex_resume_path: path.resolve(self.tex_resume_path, '..'),
tex_letter_shared_path: path.resolve(self.tex_letter_path, '../../shared'),
pdf_package_path: self.pdf_package_path
};
var destinations = {
letter_path: path.resolve(self.coverall_docs_path, 'coverletters', self.recipient_name.toLowerCase()),
resume_path: path.resolve(self.coverall_docs_path, 'resume'),
letter_shared_path: path.resolve(self.coverall_docs_path, 'coverletters/shared'),
pdf_package_path: path.resolve(self.files_path, 'pdf', self.recipient_name.toLowerCase() + '.pdf'),
coverall_repo_path: path.resolve(self.files_path, 'code/coverall')
};
var filters = {
tex: function(filename) {
var contains_dot = /\./gm;
var hidden = /\/\./gm;
var cls_or_tex_file = /\.(cls|tex)$/gm;
var is_a_dir = !contains_dot.test(filename);
var is_not_hidden = (contains_dot.test(filename) && !hidden.test(filename));
var is_cls_or_tex = cls_or_tex_file.test(filename);
// it doesn't contain a dot or it isn't a hidden file or it is a cls/tex file
var is_allowed = is_a_dir || is_not_hidden || is_cls_or_tex;
return is_allowed;
},
pdf: /[^\.].*\.pdf/
};
var copyLetter = function() {
return fs.copyAsync(sources.tex_letter_path, destinations.letter_path, { filter: filters.tex });
};
function copyShared() {
return fs.copyAsync(sources.tex_letter_shared_path, destinations.letter_shared_path, { filter: filters.tex });
}
function copyResume() {
return fs.copyAsync(sources.tex_resume_path, destinations.resume_path, { filter: filters.tex });
}
function copyPdf() {
return fs.copyAsync(sources.pdf_package_path, destinations.pdf_package_path, { filter: filters.pdf });
}
function copyJs() {
return clone('https://github.com/coaxial/coverall.git', destinations.coverall_repo_path);
}
return Promise.all([
copyLetter(),
copyShared(),
copyResume(),
copyPdf(),
copyJs()
]);
};
Archive.prototype._writeArchive = function() {
var self = this;
var archive_dir_path = path.resolve(self.output_path, '..');
var tarPromise = function() {
return new Promise(function(resolve, reject) {
tar.pack(self.files_path)
.pipe(gzip)
.pipe(fs.createWriteStream(self.output_path))
.on('error', reject)
.on('finish', resolve);
});
};
return fs.ensureDirAsync(archive_dir_path)
.then(tarPromise);
};
Archive.prototype._delTmpDir = function() {
var self = this;
return fs.removeAsync(self.tmp_path);
};
and I am testing it with:
/*eslint-env mocha */
var chai = require('chai');
var chaiAsPromised = require("chai-as-promised");
var expect = chai.expect;
var Promise = require('bluebird');
Promise.longStackTraces();
var Archive = require('../lib/archive');
var path = require('path');
var fs = Promise.promisifyAll(require('fs-extra'));
var globAsync = Promise.promisify(require('glob'));
var tar = require('tar-fs');
var zlib = Promise.promisifyAll(require('zlib'));
var _ = require('lodash');
chai.use(chaiAsPromised);
describe.only('Archive', function() {
var pkg;
beforeEach(function() {
pkg = {
name: 'test_0790feebb1',
recipient_name: 'Test',
files: {
letter: '../coverall_documents/coverletters/test/letter.tex',
resume: '../coverall_documents/resume/resume.tex'
},
compiled_files: {
package: '../coverall_documents/coverletters/test/test.pdf'
}
};
});
// after(function() {
// return Promise.all([
// 'archives/test*',
// 'test/.tmp'
// ].map(function(glob_pattern) {
// return globAsync(glob_pattern)
// .each(function(filename) {
// // make every file writeable so the git packfiles can be removed
// return fs.chmodAsync(filename, '755')
// .then(function() { fs.removeAsync(filename); });
// })
// }));
// });
describe('#make', function() {
it('creates an archive', function() {
var modified_pkg = _.cloneDeep(pkg);
modified_pkg.name = 'test_0000000001';
var archive_location = path.resolve('archives', modified_pkg.name + '.tar.gz');
var test_archive = new Archive(modified_pkg);
return test_archive.make()
.then(function() { return fs.statAsync(archive_location); })
.then(function(file) { return expect(file).to.exist; })
.catch(function(e) { return expect(e).to.not.exist; });
});
it('creates a gzip compressed archive', function() {
var modified_pkg = _.cloneDeep(pkg);
modified_pkg.name = 'test_0000000002';
var archive_location = path.resolve('archives', modified_pkg.name + '.tar.gz');
var test_archive = new Archive(modified_pkg);
// inspired from https://github.com/mafintosh/gunzip-maybe/blob/master/index.js#L6-L11
var isGzipped = function(data) {
var GZIP_MAGIC_BYTES = [0x1f, 0x8b];
var DEFLATE_COMPRESSION_METHOD = 0x08;
var buffer = data[1];
if (buffer[0] !== GZIP_MAGIC_BYTES[0] && buffer[1] !== GZIP_MAGIC_BYTES[1]) return false;
if (buffer[2] !== DEFLATE_COMPRESSION_METHOD) return false;
return true;
};
return test_archive.make()
.then(function() { return fs.openAsync(archive_location, 'r'); })
.then(function(fd) {
var buffer = new Buffer(10);
var buffer_offset = 0;
var buffer_length = 10;
var file_position = 0;
return fs.readAsync(fd, buffer, buffer_offset, buffer_length, file_position);
})
.then(function(data) { console.log('data', data); return data; })
.then(function(data) { return expect(isGzipped(data)).to.be.true; })
});
it('has the correct directory structure', function() {
var modified_pkg = _.cloneDeep(pkg);
modified_pkg.name = 'test_0000000003';
var archive_location = path.resolve('archives', modified_pkg.name + '.tar.gz');
var test_archive = new Archive(modified_pkg);
var tmp_extract_path = path.resolve('test/.tmp');
var tarPromise = function(archive_path) {
return new Promise(function(resolve, reject) {
fs.createReadStream(archive_path)
.pipe(zlib.Unzip())
.pipe(tar.extract(tmp_extract_path))
.on('error', reject)
.on('finish', resolve);
})
};
var verifyDir = function() {
return Promise.all([
'code',
'pdf',
'code/coverall',
'code/coverall_documents',
'code/coverall_documents/coverletters',
'code/coverall_documents/coverletters/test',
'code/coverall_documents/coverletters/shared',
'code/coverall_documents/resume',
'code/coverall_documents/coverletters'
].map(function(subpath) {
return expect(fs.statAsync(path.resolve(tmp_extract_path, subpath)))
.to.be.fulfilled;
}))
};
return test_archive.make()
.then(function() { return tarPromise(archive_location); })
.then(function() { return verifyDir(); });
});
it('removes the temporary dir', function() {
var modified_pkg = _.cloneDeep(pkg);
modified_pkg.name = 'test_0000000004';
var archive_location = path.resolve('archives', modified_pkg.name + '.tar.gz');
var test_archive = new Archive(modified_pkg);
var tmp_dir = path.resolve('.tmp');
return test_archive.make()
.then(function() { return expect(fs.statAsync(tmp_dir)).to.be.rejected; });
});
});
});
Which results in:
$ mocha test
Archive
#make
✓ creates an archive (644ms)
1) creates a gzip compressed archive
2) has the correct directory structure
3) removes the temporary dir
1 passing (2s)
3 failing
1) Archive #make creates a gzip compressed archive:
Uncaught Error: write after end
at writeAfterEnd (_stream_writable.js:167:12)
at Gzip.Writable.write (_stream_writable.js:214:5)
at ondata (node_modules/tar-fs/node_modules/tar-stream/node_modules/readable-stream/lib/_stream_readable.js:574:20)
at readableAddChunk (node_modules/tar-fs/node_modules/tar-stream/node_modules/readable-stream/lib/_stream_readable.js:198:16)
at Readable.push (node_modules/tar-fs/node_modules/tar-stream/node_modules/readable-stream/lib/_stream_readable.js:162:10)
at Pack._encode (node_modules/tar-fs/node_modules/tar-stream/pack.js:154:17)
at Pack.entry (node_modules/tar-fs/node_modules/tar-stream/pack.js:100:10)
at onstat (node_modules/tar-fs/index.js:108:19)
at node_modules/tar-fs/index.js:40:9
at FSReqWrap.oncomplete (fs.js:95:15)
2) Archive #make has the correct directory structure:
AssertionError: expected false to be true
at Context.<anonymous> (test/archive_spec.js:96:10)
3) Archive #make removes the temporary dir:
Uncaught Error: write after end
at writeAfterEnd (_stream_writable.js:167:12)
at Gzip.Writable.write (_stream_writable.js:214:5)
at ondata (node_modules/tar-fs/node_modules/tar-stream/node_modules/readable-stream/lib/_stream_readable.js:574:20)
at readableAddChunk (node_modules/tar-fs/node_modules/tar-stream/node_modules/readable-stream/lib/_stream_readable.js:198:16)
at Readable.push (node_modules/tar-fs/node_modules/tar-stream/node_modules/readable-stream/lib/_stream_readable.js:162:10)
at Pack._encode (node_modules/tar-fs/node_modules/tar-stream/pack.js:154:17)
at Pack.entry (node_modules/tar-fs/node_modules/tar-stream/pack.js:100:10)
at onstat (node_modules/tar-fs/index.js:108:19)
at node_modules/tar-fs/index.js:40:9
at FSReqWrap.oncomplete (fs.js:95:15)
I suspected a race condition so I commented out the after block to see if it would make any difference but it doesn't.
I do not understand what the Uncaught Error: write after end is about nor why the stacktrace is unusable, even though I am using Promise.longStackTraces(). What is causing this error?
My tests look overly complicated for what they are doing and I am repeating code several times when instantiating the different test_archive objects. How could I refactor them?
You're trying to re-use the same gzip instance, which won't work. This also explains why the first test works just fine.
So move your var gzip = zlib.createGzip(); line to right inside your Archive.prototype._writeArchive function.
Here is the code I am writing tests for:
'use strict';
var internals = {};
var _ = require('lodash');
module.exports = {
initialize: function (query) {
internals.query = query;
},
createField: function (fieldId, accountId, payload) {
function callQuery (parList) {
var query = 'INSERT into fields VALUES (:uuid, :accountId, :shortcutName, :displayName, :fieldType, :widgetType, :columnOrder, :options, :required, NULL)';
return internals.query(query, parList, function () { return fieldId; });
}
var increment = 10;
var parameterList = {
'uuid': fieldId,
'accountId': accountId,
'shortcutName': payload.shortcutName,
'displayName': payload.displayName,
'fieldType': payload.fieldType,
'widgetType': payload.widgetType,
'columnOrder': payload.columnOrder,
'options': JSON.stringify(payload.options) || null,
'required': payload.required || 'f'
};
if (!payload.columnOrder) {
var columnQuery = 'SELECT MAX(column_order) from fields';
return internals.query(columnQuery, {}, function (x) {return x; })
.then(function (results) {
var highestColumnOrder = results[0]['MAX(column_order)'];
var newHighestColumnOrder = Math.ceil(highestColumnOrder / 10) * 10;
if (newHighestColumnOrder > highestColumnOrder) {
parameterList.columnOrder = newHighestColumnOrder;
} else {
parameterList.columnOrder = newHighestColumnOrder + increment;
}
return callQuery(parameterList);
});
} else {
return callQuery(parameterList);
}
},
getFieldsByAccountId: function(accountId, showDeleted) {
var callQuery = function(paramList) {
var query = 'SELECT ' + paramList.columns.join(", ") + ' FROM fields WHERE account_id = :account_id';
if (!showDeleted) {
query += ' AND archived_at IS NULL';
}
return internals.query(query, paramList, function(rows) {
return _.each(rows, function(row) {
if(row.options) {
row.options = JSON.parse(row.options);
}
row.required = !!row.required;
});
});
};
var columnList = ["uuid", "account_id", "shortcut_name", "display_name", "field_type", "required", "column_order", "options"];
var paramList = {'account_id': accountId};
if (showDeleted) {
columnList.push("archived_at");
}
_.extend(paramList, {'columns': columnList});
return callQuery(paramList);
}
};
Here is my test:
'use strict';
var assert = require('assert');
var sinon = require('sinon');
var Promise = require('bluebird');
var proxyquire = require('proxyquire');
var returnedValues = require('../../../return_values.js');
var fieldGateway = proxyquire('../../../../src/fields/lib/gateway', {});
describe('gateway', function () {
var accountId = 100;
var fieldId = 200;
var _query, sql, mockData, rows;
describe('createField', function() {
describe('is successful with a column order value', function () {
beforeEach(function() {
sql = 'INSERT into fields VALUES (:uuid, :accountId, :shortcutName, :displayName, :fieldType, :widgetType, :columnOrder, :options, :required, NULL)';
mockData = returnedValues.getFieldInputValues();
});
it("should only insert new field", function () {
_query = sinon.spy(function() { return Promise.resolve(); });
fieldGateway.initialize(_query);
fieldGateway.createField(fieldId, accountId, mockData);
mockData.accountId = accountId;
mockData.uuid = fieldId;
mockData.options = JSON.stringify(mockData.options);
assert.equal(sql, _query.getCall(0).args[0]);
assert.deepEqual(mockData, _query.getCall(0).args[1]);
});
it.only("_query should be called with the right sql statement and parameterList", function () {
_query = sinon.stub().returns(Promise.resolve(fieldId));
// _query.onCall(0).returns(Promise.resolve([{'MAX(column_order)': 10}]));
// _query.onCall(1).returns(Promise.resolve(fieldId));
fieldGateway.initialize(_query);
delete mockData.columnOrder;
fieldGateway.createField(fieldId, accountId, mockData);
console.log(_query.args);
assert.equal(sql, _query.getCall(0).args[0]);
fieldGateway.createField.restore();
});
});
});
});
The problem is that when the test runs, the only SQL query that runs is the SELECT statement. What should happen is one SQL statement runs, then an INSERT statement runs
This happens because bluebird is a true Promise/A+ compliant library. And by definition all chained promises must be run in a different execution tick. So only the first promise is executed synchronously (in same tick).
You should tell mocha to "wait" for the rest to act. You do this by specifying a done callback in your unit test and calling it accordingly when your promises finished their job
it.only("_query should be called with the right sql statement and parameterList", function (done) {
_query = sinon.stub().returns(Promise.resolve(fieldId));
fieldGateway.initialize(_query);
delete mockData.columnOrder;
fieldGateway.createField(fieldId, accountId, mockData)
.then(function(){
/// assertion code should be adjusted here
console.log(_query.args);
assert.equal(sql, _query.getCall(0).args[0]);
fieldGateway.createField.restore();
//tell Mocha we're done, it can stop waiting
done();
})
.catch(function(error) {
//in case promise chain was rejected unexpectedly
//gracefully fail the test
done(error);
};
});
Whenever you test your promise-returning functions you should always handle result in a .then
In my main.js, I have:
var example = require('./test');
example.speak('Hello world');
While in my test.js, I have:
module.exports = function() {
this.speak = function(str) {
str += ' < you typed.';
return str;
};
this.listen = function(str) {
return str;
};
};
What exactly am I doing wrong so that I can use browserify correctly with this?
You should either export an object:
module.exports = {
speak: function () {},
// etc
}
or use
var example = new require("./test")();