I have been playing around with Chromes filestorage API. I have built a couple of functions that together automatically downloads a json-object and stores it as a string. If the last server-request was done within 24 hours. I automatically use the last version of the file. I use this for managing a huge data-dump that I do statistical analysis on.
The entire system only has one function that needs to be exposed. It's getData.
Currently all these functions are global variables. How should I make this contained in an orderly way.
//This file will cache serverdata every day.
var onInitFs,
errorHandler,
fileSystemInit,
saveFile,
readFile,
fileSystem,
getData;
//request rights to save files to system.
fileSystemInit = function(){
//Browser specific
window.requestFileSystem = window.requestFileSystem || window.webkitRequestFileSystem;
navigator.webkitPersistentStorage.requestQuota(1048*1048*256, function(grantedBytes) {
//once approved (or if previously approved):
window.requestFileSystem(PERSISTENT, grantedBytes, onInitFs, errorHandler);
}, function(e) {
console.log('Error', e);
});
};
//make filesystem global.
onInitFs = function(fs) {
fileSystem = fs;
};
fileSystemInit();
saveFile = function(url, content, callback){
var filename = makeFilename(url)
if(!fileSystem){
console.log('no filesystem registered')
return;
}
fileSystem.root.getFile(filename, {create: true}, function(fileEntry) {
fileEntry.createWriter(function(fileWriter) {
var blob = new Blob([JSON.stringify(content)], {type: 'application/json'});
fileWriter.write(blob);
fileWriter.onwriteend = function(e) {
console.debug('Write completed.', e);
if(callback){
callback();
}
};
fileWriter.onerror = function(e) {
console.log('Write failed: ', e);
};
}, errorHandler);
}, errorHandler);
};
readFile = function(url, callback){
var filename = makeFilename(url)
if(!fileSystem){
console.log('no filesystem registered');
return;
}
fileSystem.root.getFile(filename, {}, function(fileEntry){
//this object reads files.
var reader = new FileReader();
//register callback for read files
reader.onloadend = function(e){
var callbackValue = JSON.parse(this.result)
callback(callbackValue);
};
//read file-function
fileEntry.file(function(file){
reader.readAsText(file);
},errorHandler);
},errorHandler);
};
makeFilename = function(url){
return url.replace(/\W/g, '') +'.json'
}
errorHandler = function(e) {
console.log('Error: ', e);
};
getData = function(url, callbackNewData, callbackOldData){
var lastDownloaded = localStorage.getItem(url+'lastDownloaded'),
oneDay = 1000*60*60*24;
//update data if the data is old.
window.setTimeout(function(){
if(!lastDownloaded || new Date()-new Date(lastDownloaded) > oneDay ){
console.debug('downloading '+url);
d3.json(url, function(data){
localStorage.setItem(url+'lastDownloaded',new Date());
console.debug('saving '+url);
saveFile(url, data, function(){
callbackNewData(url);
});
});
}else{
callbackOldData(url);
}
}, 200);
};
You can wrap the whole thing in an anonymous function and expose getData only. This is the easiest way to do.
var getDataFromUrl = function () {
//This file will cache serverdata every day.
var onInitFs,
errorHandler,
fileSystemInit,
saveFile,
readFile,
fileSystem,
getData;
// Your original code here ...
return getData; // This exposes the getData function.
})();
In this way you only exposes one global function getDataFromUrl, which is exactly the public API.
For more modern usage, you may want to check out Common JS Modules and Browserify, which let you do exports and require both in browser and NodeJS. There is also a UMD Pattern for exporting libraries.
(function(window){
'use strict'
var onInitFs,errorHandler,fileSystemInit,saveFile,readFile,fileSystem,getData;
fileSystemInit = function(){
// your code
};
//make filesystem global.
onInitFs = function(fs) {
//your code
};
fileSystemInit();
saveFile = function(url, content, callback){
//your code
};
readFile = function(url, callback){
//your code
};
makeFilename = function(url){
//your code
}
errorHandler = function(e) {
//your code
};
getData = function(url, callbackNewData, callbackOldData){
//your code
};
window.HimmatorsFileStorageAPI = getData; // you can change the name here
})(window);
And you can use it simply by including this script in your page and then calling
HimmatorsFileStorageAPI(url, callbackNewData, callbackOldData);
The Module pattern would be a good start: http://addyosmani.com/resources/essentialjsdesignpatterns/book/#modulepatternjavascript
Pseudo-class also would be great:
var StorageInterface = function(arg1, arg2){
this.arg1 = arg1;
this.arg2 = arg2;
}
StorageInterface.prototype.method = function(arg3, arg4){
return arg3 + arg4 + this.arg1 + this.arg2;
}
var si = new StorageInterface(100, 500);
si.method(3, 4);
Just prototype the heck out of it :-) And use some scoped instances(using var that = this) to pass elements back to the parent objects from different scopes.
Now you can just start a new FileSystemInstance() to do your magic.
If you wish to make the more "arcane" methods private you could consider moving them to a object within your object and such, but in the end anyone with true perserverance will be able to access them. So I advice to go with a public way, and name the private methods _fileSystemInit, so people who read the code know it's an internalised method.
//This file will cache serverdata every day.
function FileSystemInstance() {
this.fileSystem = null;
this.requestFileSystem = null;
this.fileSystemInit();
}
//request rights to save files to system.
FileSystemInstance.prototype.fileSystemInit = function(){
//Browser specific
this.requestFileSystem = window.requestFileSystem || window.webkitRequestFileSystem;
this.requestFileSystem = this.requestFileSystem.bind(window);
console.log(this.requestFileSystem);
var that = this;
console.log(that.requestFileSystem);
navigator.webkitPersistentStorage.requestQuota(1048*1048*256, function(grantedBytes) {
//once approved (or if previously approved):
console.log(that.requestFileSystem);
that.requestFileSystem(PERSISTENT, grantedBytes, function(fs){that.onInitFs(fs)}, function(e){that.errorHandler(e)});
}, function(e) {
console.log('Error', e);
});
};
//make filesystem global.
FileSystemInstance.prototype.onInitFs = function(fs) {
this.fileSystem = fs;
};
FileSystemInstance.prototype.saveFile = function(url, content, callback){
var filename = this.makeFilename(url)
if(!fileSystem){
console.log('no filesystem registered')
return;
}
this.fileSystem.root.getFile(filename, {create: true}, function(fileEntry) {
fileEntry.createWriter(function(fileWriter) {
var blob = new Blob([JSON.stringify(content)], {type: 'application/json'});
fileWriter.write(blob);
fileWriter.onwriteend = function(e) {
console.debug('Write completed.', e);
if(callback){
callback();
}
};
fileWriter.onerror = function(e) {
console.log('Write failed: ', e);
};
}, errorHandler);
}, errorHandler);
};
FileSystemInstance.prototype.readFile = function(url, callback){
var filename = this.makeFilename(url)
if(!this.fileSystem){
throw new Error('no filesystem registered');
}
this.fileSystem.root.getFile(filename, {}, function(fileEntry){
//this object reads files.
var reader = new FileReader();
//register callback for read files
reader.onloadend = function(e){
var callbackValue = JSON.parse(this.result)
callback(callbackValue);
};
//read file-function
fileEntry.file(function(file){
reader.readAsText(file);
},errorHandler);
},errorHandler);
};
FileSystemInstance.prototype.makeFilename = function(url){
return url.replace(/\W/g, '') +'.json'
}
FileSystemInstance.prototype.errorHandler = function(e) {
console.error('Error: ', e);
};
FileSystemInstance.prototype.getData = function(url, callbackNewData, callbackOldData){
var that = this;
var lastDownloaded = localStorage.getItem(url+'lastDownloaded'),
oneDay = 1000*60*60*24;
//update data if the data is old.
window.setTimeout(function(){
if(!lastDownloaded || new Date()-new Date(lastDownloaded) > oneDay ){
console.debug('downloading '+url);
d3.json(url, function(data){
localStorage.setItem(url+'lastDownloaded',new Date());
console.debug('saving '+url);
that.saveFile(url, data, function(){
callbackNewData(url);
});
});
}else{
callbackOldData(url);
}
}, 200);
};
FileSystem = new FileSystemInstance();
var data = FileSystem.getData();
console.log("Data is: ",data);
Related
In a previous AngularJS app that I am migrating to Angular 8, I had a function that would download functions by binding them and placing them in an array, waiting to be called using a reduce function. For example:
function stageForDownload() {
$scope.files.forEach(function (file) {
if (file.checked) {
$scope.downloadFunctions.push(downloadFile.bind(null, file));
}
});
}
function downloadStaged() {
$scope.downloadFunctions.reduce(
function (prev, next) {
return prev.then(next);
}, Promise.resolve())
.then( /* do something now that all files are downloaded */ );
}
}
This code would essentially resolve promises in a one by one fashion until the list of functions were empty. As in Angular 8, the structure of promises works in a different way (meaning they use the ECMA 6 implementation now) and I am unsure of how to migrate this code. Furthering my confusion, the HttpClient angular provides now returns an Observable which can be subscribed to; and while rsjx's forkJoin() method seems to support exactly what I want to do, it will not accept a list of bound functions.
I simply just need to know when all of the functions are completed, as they are voids that run an export service method to download a file. So I do not necessarily need to return / subscribe to any data from these methods that are being reduced.
Edit:
There are two more functions involved that I forgot to mention. Here is downloadFile, which is responsible for calling the exportService.
function downloadFile(file) {
var deferred = $q.defer();
$scope.date = formatDate($scope.datepicker.selectedDate);
$scope.fileDate = dateToYMD($scope.datepicker.selectedDate);
exportService.exportData(file.FileNamePrefix + " " + $scope.fileDate + ".xlsx", 'SOME_API_LOCATION' + $scope.date, file).then(
function () {
deferred.resolve();
},
function (error) {
deferred.reject();
notificationService.displayError("Internal Error!");
});
return deferred.promise;
}
And here is the exportService itself:
(function (app) {
'uuse strict';
app.factory('exportService', exportService);
exportService.$inject = ['$q', '$http'];
function exportService($q, $http) {
var service = {
exportData: exportData,
createFilename: createFilename
};
function exportData(filename, url, data) {
var config = {
responseType: 'arraybuffer'
};
return $http.post(url, data, config).then(
function (response) {
var deferred = $q.defer();
var data = response.data;
var status = response.status;
var headers = response.headers();
var octetStreamMime = 'application/octet-stream';
var success = false;
var contentType = headers['content-type'] || octetStreamMime;
try {
// Try using msSaveBlob if supported
var blob = new Blob([data], { type: contentType });
if (navigator.msSaveBlob)
navigator.msSaveBlob(blob, filename);
else {
// Try using other saveBlob implementations, if available
var saveBlob = navigator.webkitSaveBlob || navigator.mozSaveBlob || navigator.saveBlob;
if (saveBlob === undefined) throw "Not supported";
saveBlob(blob, filename);
}
success = true;
deferred.resolve();
} catch (ex) {
}
if (!success) {
// Get the blob url creator
var urlCreator = window.URL || window.webkitURL || window.mozURL || window.msURL;
if (urlCreator) {
// Try to use a download link
var link = document.createElement('a');
if ('download' in link) {
// Try to simulate a click
try {
// Prepare a blob URL
var blob = new Blob([data], { type: contentType });
var url = urlCreator.createObjectURL(blob);
link.setAttribute('href', url);
// Set the download attribute (Supported in Chrome 14+ / Firefox 20+)
link.setAttribute("download", filename);
// Simulate clicking the download link
var event = document.createEvent('MouseEvents');
event.initMouseEvent('click', true, true, window, 1, 0, 0, 0, 0, false, false, false, false, 0, null);
link.dispatchEvent(event);
success = true;
deferred.resolve();
} catch (ex) {
}
}
if (!success) {
// Fallback to window.location method
try {
var blob = new Blob([data], { type: octetStreamMime });
var url = urlCreator.createObjectURL(blob);
window.location = url;
success = true;
deferred.resolve();
} catch (ex) {
deferred.reject();
}
}
}
}
return deferred.promise;
},
function (error) {
return $q.reject(error);
});
}
}
})(angular.module('app'));
So it appears my problem is not necessarily firing off a dynamically created list of http requests, but rather how to convert promises using $q into ECMA6+ promises.
You can use from to turn Promise into Observable.
const observables: Observable<any>[] = promises.map(promise => from(promise));
After that, you can unleash RxJS. You can use forkJoin to get an Observable of all your promises:
forkJoin(observables).subscribe(files => {
// do things with files
});
The way forkJoin is implemented it will only fire once the observables complete. It will not be an issue here, but if you want to batch requests that do not complete, consider zip.
If you want to do use HttpClient, you would just have a different source of your observables.
const observables: Observable<any>[] = urls.map(url => this.httpClient.get(url));
but the forkJoin would be the same. The key is that forkJoin accepts an array of Observables.
I am developing an extension for Mozilla Firefox, where I override the native listener with my own and monitor all HTTP requests, as shown in the post here:
http://www.softwareishard.com/blog/firebug/nsitraceablechannel-intercept-http-traffic/
I monitor those requests that reside under a specific domain and change their corresponding response body, with the response body I receive from my own XMLHTTPRequest. For text files, everything is working fine.
However, I face a problem while downloading images. For some reason, when I write the incoming data to the stream and then, try to open the image, I get the error that the image contains errors and cannot be displayed.
What am I possibly doing wrong?
Update: I provide some code, in order to clarify my approach.
var xmlRequest = Cc['#mozilla.org/xmlextras/xmlhttprequest;1'].createInstance(Ci.nsIXMLHttpRequest);
...
xmlRequest.open('GET', xmlRequestURL, false);
xmlRequest.send(null);
function TracingListener() {}
TracingListener.prototype = {
originalListener: null,
onStartRequest: function (request, context) {
httpChannel = request.QueryInterface(Ci.nsIHttpChannel);
requestURL = httpChannel.URI.spec;
try {
this.originalListener.onStartRequest(request, context);
}
catch (ex) {
request.cancel(ex);
}
},
onDataAvailable: function (request, context, inputStream, offset, count) {
httpChannel = request.QueryInterface(Ci.nsIHttpChannel);
requestURL = httpChannel.URI.spec;
//Read the contents from the stream, but ignore them.
var binaryInputStream = CCIN('#mozilla.org/binaryinputstream;1', 'nsIBinaryInputStream');
binaryInputStream.setInputStream(inputStream);
var binaryOutputStream = CCIN('#mozilla.org/binaryoutputstream;1', 'nsIBinaryOutputStream');
var data = binaryInputStream.readBytes(count);
//Delay the call to the original listener.
},
onStopRequest: function (request, context, statusCode) {
httpChannel = request.QueryInterface(Ci.nsIHttpChannel);
requestURL = httpChannel.URI.spec;
//Check if the response is successful.
if(xmlRequest.status == 200) {
try {
var responseLen = xmlRequest.getResponseHeader("Content-Length");
var response = xmlRequest.response;
var storageStream = CCIN('#mozilla.org/storagestream;1', 'nsIStorageStream');
storageStream.init(8192, responseLen, null);
var binaryOutputStream = CCIN('#mozilla.org/binaryoutputstream;1', 'nsIBinaryOutputStream');
binaryOutputStream.setOutputStream(storageStream.getOutputStream(0));
binaryOutputStream.writeBytes(response, responseLen);
//Make the call to the original listener.
this.originalListener.onDataAvailable(request, context, storageStream.newInputStream(0), 0, responseLen);
}
catch (e) {
dumpError(e);
}
//Pass it to the original listener
this.originalListener.onStopRequest(request, context, statusCode);
}
else {
console.log('[INFO] onStopRequest not processed, status is ' + xmlRequest.status + ', for URL: ' + requestURL);
}
}
}
var observer = {
httpRequestObserver: {
observe: function (request, aTopic, aData) {
httpChannel = request.QueryInterface(Ci.nsIHttpChannel);
requestURL = httpChannel.URI.spec;
if(mustBeMonitored(requestURL)) {
console.log('[INFO] Observing URL: ' + requestURL);
if (aTopic == 'http-on-modify-request') {
console.log('[INFO] ' + aTopic + ' for URL: ' + requestURL);
var newListener = new TracingListener();
request.QueryInterface(Ci.nsITraceableChannel);
newListener.originalListener = request.setNewListener(newListener);
}
}
},
register: function () {
observerService.addObserver(observer.httpRequestObserver, 'http-on-modify-request', false);
},
unregister: function () {
observerService.removeObserver(observer.httpRequestObserver, 'http-on-modify-request');
},
QueryInterface: function (aIID) {
/*if (typeof Cc == "undefined") {
var Cc = components.classes;
}
if (typeof Ci == "undefined") {
var Ci = components.interfaces;
}*/
if (aIID.equals(Ci.nsIObserver) || aIID.equals(Ci.nsISupports))
return this;
throw components.results.NS_NOINTERFACE;
}
}
};
Finally, I was able to detect the problem. For the XMLHttpRequest, I had to specify its response type as follows:
xmlRequest.responseType = 'arraybuffer';
Then, the response was stored in a JavaScript ArrayBuffer, which I had to transform into a Uint8Array and then, store it into the stream.
This solution applies for both binary and text files.
I'm trying to upload a file on a server (currently on my local wamp), with a Windows 8 application running with HTML and JavaScript.
So this is my code :
(function () {
"use strict";
WinJS.Binding.optimizeBindingReferences = true;
var app = WinJS.Application;
var activation = Windows.ApplicationModel.Activation;
app.onactivated = function (args) {
if (args.detail.kind === activation.ActivationKind.launch) {
if (args.detail.previousExecutionState !== activation.ApplicationExecutionState.terminated) {
} else {
}
args.setPromise(WinJS.UI.processAll().then(function () {
document.getElementById('boutonEnvoyer').onclick = function () {
var selector = Windows.Storage.Pickers.FileOpenPicker();
selector.fileTypeFilter.replaceAll(["*"]);
selector.pickSingleFileAsync().then(function (file) {
if (!file) {
console.log('No file selected');
return;
}
var url = 'http://192.168.10.28/videomaton/index.php';
UploadImage(url, fichier);
});
};
var UploadImage = function (urlString, file) {
try {
var uri = Windows.Foundation.Uri(urlString);
var uploader = new Windows.Networking.BackgroundTransfer.BackgroundUploader();
//Set a header to be able to save the file
uploader.setRequestHeader("Filename", file.name);
//Create the operation
var upload = uploader.createUpload(uri, file);
upload.startAsync().then(
function succes(res) {
console.log('goodgame');
},
function error(res) {
console.log(res);
},
function progression(res) {
var pourcent = Math.round(res.progress.bytesSent * 100 / res.progress.totalBytesToSend);
console.log(pourcent + '%');
}
);
} catch (err) {
console.log('try and catch missed');
}
};
}));
}
};
app.oncheckpoint = function (args) {
};
app.start();
})();
This seems to work, but here is my problem, my javascript consol progress until 100% and say "goodgame". But on my server side, I got nothing.
Here is my php code :
$arr = get_defined_vars();
ob_start();
var_dump($arr);
$result = ob_get_clean();
file_put_contents('fichier.txt', $result);
And when I look into this "fichier.txt" _Files is empty, but I've a line with HTTP_FILENAME => 7.jpg (which is the name of my image that I'm trying to upload).
Thanks you for your help !
H4mm3R
You need to send a multipart/form-data request.
Try (pseudocode):
var contentPart = new Windows.Networking.BackgroundTransfer.BackgroundTransferContentPart("myField", file.name);
contentPart.SetFile(file)
var parts = [];
parts.push(contentPart);
var uploader = new Windows.Networking.BackgroundTransfer.BackgroundUploader();
uploader.createUploadAsync(uri, parts).then(function (upload) {
upload.startAsync().then( ... );
});
Then check for the file in the PHP $_FILES variable.
I want to add some add some extra logic (logging, trace stuff) into the main function of superagent: https://github.com/visionmedia/superagent/blob/master/lib/client.js#L444
So I need to extend superagent, and want to provide the same API, kind of passthrough all functions. I tried to solve it via different mechanisms: Object.create, prototype, deep copy, but I didn't get it working.
I don't want to manipulate the source code of superagent, just require it and wrap it, add my extra logic and call, passthrough the origin function. I think it's kind of aspect oriented.
// edit
So what don't work for me is to bypass the Request constructor:
function Request(method, url) {
var self = this;
Emitter.call(this);
this._query = this._query || [];
this.method = method;
this.url = url;
this.header = {};
this._header = {};
this.on('end', function(){
try {
var res = new Response(self);
if ('HEAD' == method) res.text = null;
self.callback(null, res);
} catch(e) {
var err = new Error('Parser is unable to parse the response');
err.parse = true;
err.original = e;
self.callback(err);
}
});
}
I got it almost working with this code:
var superagent = require('superagent');
var uuid = require('uuid');
var map = {};
var init = function() {
var supderdebug = function(method, url) {
console.log("pass through: root");
return superagent.apply(this, arguments);
}
var methods = ['get', 'head', 'del', 'patch','post', 'put'];
methods.forEach(function(method) {
var origin = superagent[method];
supderdebug[method] = function(url) {
console.log("pass through: "+method+"('"+url+"')");
var request = origin.apply(this, arguments);
var id = uuid();
map[id] = request;
return request;
}
});
_end = superagent.Request.prototype.end;
superagent.Request.prototype.end = function(fn) {
console.log("pass through: end");
return _end.apply(this, arguments);
}
_callback = superagent.Request.prototype.callback;
superagent.Request.prototype.callback = function(err, res) {
console.log("pass through: callback");
if (err) {
console.log(err);
}
var response = _callback.apply(this, arguments);
return response;
}
return supderdebug;
}
module.exports.init = init
Usage:
var sd = require("supderdebug").init();
Then I get the same API as superagent provides when I require it: var superagent = require("superagent")
But I cannot do the same with the superagent.Request and sa.Response. It doesn't work when I do:
superagent.Request.prototype.constructor = function(method, url)
// my hook
}
And there is another side effect, it would be nice if there is a solution without this side effect:
When requiring both my library and superagent, the superagent is not the origin anymore, because I overwrite the functions of superagent.
You need to send in the existing function
superagent.Request.prototype.end = function(end) {
return function() {
console.log("before end");
var request = end.apply(this, arguments);
console.log("after end");
return request;
};
}(superagent.Request.prototype.end);
I'm trying to make this test work, but I couldn't get my head around how to write a test with FileReader. This is my code
function Uploader(file) {
this.file = file;
}
Uploader.prototype = (function() {
function upload_file(file, file_contents) {
var file_data = new FormData()
file_data.append('filename', file.name)
file_data.append('mimetype', file.type)
file_data.append('data', file_contents)
file_data.append('size', file.size)
$.ajax({
url: "/upload/file",
type: "POST",
data: file_contents,
contentType: file.type,
success: function(){
// $("#thumbnail").attr("src", "/upload/thumbnail");
},
error: function(){
alert("Failed");
},
xhr: function() {
myXhr = $.ajaxSettings.xhr();
if(myXhr.upload){
myXhr.upload.addEventListener('progress',showProgress, false);
} else {
console.log("Upload progress is not supported.");
}
return myXhr;
}
});
}
return {
upload : function() {
var self = this,
reader = new FileReader(),
file_content = {};
reader.onload = function(e) {
file_content = e.target.result.split(',')[1];
upload_file(self.file, file_content);
}
}
};
})();
And this is my test
describe("Uploader", function() {
it("should upload a file successfully", function() {
spyOn($, "ajax");
var fakeFile = {};
var uploader = new Uploader(fakeFile);
uploader.upload();
expect($.ajax.mostRecentCall.args[0]["url"]).toEqual("/upload/file");
})
});
But it never gets to reader.onload.
The problem here is the use of reader.onload which is hard to test. You could use reader.addEventListener instead so you can spy on the global FileReader object and return a mock:
eventListener = jasmine.createSpy();
spyOn(window, "FileReader").andReturn({
addEventListener: eventListener
})
then you can fire the onload callback by yourself:
expect(eventListener.mostRecentCall.args[0]).toEqual('load');
eventListener.mostRecentCall.args[1]({
target:{
result:'the result you wanna test'
}
})
This syntax changed in 2.0. Code below gives an example based on Andreas Köberle's answer but using the new syntax
// create a mock object, its a function with some inspection methods attached
var eventListener = jasmine.createSpy();
// this is going to be returned when FileReader is instantiated
var dummyFileReader = { addEventListener: eventListener };
// pipe the dummy FileReader to the application when FileReader is called on window
// this works because window.FileReader() is equivalent to new FileReader()
spyOn(window, "FileReader").and.returnValue(dummyFileReader)
// your application will do something like this ..
var reader = new FileReader();
// .. and attach the onload event handler
reader.addEventListener('load', function(e) {
// obviously this wouldnt be in your app - but it demonstrates that this is the
// function called by the last line - onloadHandler(event);
expect(e.target.result).toEqual('url');
// jasmine async callback
done();
});
// if addEventListener was called on the spy then mostRecent() will be an object.
// if not it will be null so careful with that. the args array contains the
// arguments that addEventListener was called with. in our case arg[0] is the event name ..
expect(eventListener.calls.mostRecent().args[0]).toEqual('load');
// .. and arg[1] is the event handler function
var onloadHandler = eventListener.calls.mostRecent().args[1];
// which means we can make a dummy event object ..
var event = { target : { result : 'url' } };
// .. and call the applications event handler with our test data as if the user had
// chosen a file via the picker
onloadHandler(event);
I also faced similar problem and was able to achieve it without use of addeventlistener. I had used onloadend, so below is what I did.
My ts file had below code:-
let reader = new FileReader();
reader.onloadend = function() {
let dataUrl = reader.result;
// Some working here
};
reader.readAsDataURL(blob);
My spec file (test) case code :-
let mockFileReader = {
result:'',
readAsDataURL:(blobInput)=> {
console.log('readAsDataURL');
},
onloadend:()=> {
console.log('onloadend');
}
};
spyOn<any>(window, 'FileReader').and.returnValue(mockFileReader);
spyOn<any>(mockFileReader, 'readAsDataURL').and.callFake((blobInput)=> {
// debug your running application and assign to "encodedString" whatever
//value comes actually after using readAsDataURL for e.g.
//"data:*/*;base64,XoteIKsldk......"
mockFileReader.result = encodedString;
mockFileReader.onloadend();
});
This way you have mocked the FileReader object and returned a fake call to your own "readAsDataURL". And thus now when your actual code calls "reasAsDataURL" your fake function is called in which you are assigning an encoded string in "result" and calling "onloadend" function which you had already assigned a functionality in your code (.ts) file. And hence it gets called with expected result.
Hope it helps.
I think the best way is to use the real FileReader (don't mock it), and pass in a real File or Blob. This improves your test coverage and makes your tests less brittle.
If your tests don't run in IE, you can use the File constructor, e.g.
const fakeFile = new File(["some contents"], "file.txt", {type: "text/plain"});
If you need to be compatible with IE, you can construct a Blob and make it look like a file:
const fakeFile = new Blob(["some contents"]);
fakeFile.name = "file.txt";
fakeFile.type = "text/plain";
The FileReader can read either of these objects so there is no need to mock it.
i found easiest for myself to do next.
mock blob file
run reader.onload while in test environment.
as result - i do not mock Filereader
// CONTROLLER
$scope.handleFile = function (e) {
var f = e[0];
$scope.myFile = {
name: "",
size: "",
base64: ""
};
var reader = new FileReader();
reader.onload = function (e) {
try {
var buffer = e.target.result;
$scope.myFile = {
name: f.name,
size: f.size,
base64: XLSX.arrayBufferToBase64(buffer)
};
$scope.$apply();
} catch (error) {
$scope.error = "ERROR!";
$scope.$apply();
}
};
reader.readAsArrayBuffer(f);
//run in test env
if ( typeof jasmine == 'object') {reader.onload(e)}
}
//JASMINE TEST
it('handleFile 0', function () {
var fileContentsEncodedInHex = ["\x45\x6e\x63\x6f\x64\x65\x49\x6e\x48\x65\x78\x42\x65\x63\x61\x75\x73\x65\x42\x69\x6e\x61\x72\x79\x46\x69\x6c\x65\x73\x43\x6f\x6e\x74\x61\x69\x6e\x55\x6e\x70\x72\x69\x6e\x74\x61\x62\x6c\x65\x43\x68\x61\x72\x61\x63\x74\x65\x72\x73"];
var blob = new Blob(fileContentsEncodedInHex);
blob.type = 'application/zip';
blob.name = 'name';
blob.size = 11111;
var e = {0: blob, target: {result: {}}};
$scope.handleFile(e);
expect($scope.error ).toEqual("");
});
I struggled to figure out how to test onloadend when it gets called from readAsDataURL.
Here is a dump of what I ended up with.
Production code:
loadFileDataIntoChargeback(tempFileList) {
var fileNamesAndData = [];
for (var i = 0, f; f = tempFileList[i]; i++) {
let theFile = tempFileList[i];
var reader = new FileReader();
reader.onloadend = ((theFile) => {
return (fileData) => {
var insertionIndex = this.chargeback.fileList.length;
this.chargeback.fileList.push({ FileName: theFile.name, Data: fileData.target.result, FileType: theFile.type });
this.loadFilePreviews(theFile, insertionIndex);
}
})(f);
reader.readAsDataURL(f);
}
this.fileInputPath = "";
}
Test code:
describe('when the files are loaded into the chargeback', () => {
it('loads file previews', () => {
let mockFileReader = {
target: { result: '' },
readAsDataURL: (blobInput) => {},
onloadend: () => {}
};
spyOn(chargeback, "loadFilePreviews");
spyOn(window, 'FileReader').and.returnValue(mockFileReader);
spyOn(mockFileReader, 'readAsDataURL').and.callFake((blobInput) => {
mockFileReader.onloadend({ target: { result: "data:image/jpeg;base64,/9j/4QAYRXh" } });
});
var readFileList = chargeback.getArrayFromFileInput([getImageFile1()]);
chargeback.loadFileDataIntoChargeback(readFileList);
expect(chargeback.loadFilePreviews).toHaveBeenCalled();
});
});