I am trying to write a newer watch module that uses the fs.watch method instead of the watchFile approach.
So far, it works beautifully, but only when I run it outside of mocha. I can't figure out why my unit test is throwing a tantrum, maybe someone here can?
Here is my class code:
/**
* requirements
*/
var fs, path, events;
fs = require('fs');
path = require('path');
events = require('events');
/**
* private
*/
var Monitor = function(directory, options) {
this.directory = directory;
this.options = options || {};
(this.options.lazy && this.empty()) || this.walk(this.directory);
this.watch(this.directory);
};
Monitor.prototype = new events.EventEmitter();
Monitor.prototype.watch = function(directory, stats) {
var stats = stats || {};
if (!this.directories[directory]) {
var w = fs.watch(directory, this.options, this.justlookatit.bind(this));
}
this.directories[directory] = { 'stats': stats, 'w': w };
};
Monitor.prototype.directories = function() {
if (!Object.keys(this.directories).length) {
this.walk(this.directory);
}
return this.directories;
};
Monitor.prototype.files = function() {
if (!Object.keys(this.files).length) {
this.walk(this.directory);
}
return this.files;
};
Monitor.prototype.unwatch = function() {
if (!Object.keys(this.directories).length) {
for (var dir in this.directories) {
dir.w.close();
}
}
};
Monitor.prototype.empty = function() {
this.unwatch();
this.files = {};
this.directories = {};
};
Monitor.prototype.walk = function(directory) {
var monitor = this;
this.empty();
fs.readdir(directory, function(err, files) {
if (err) return;
for (var file in files) {
var fullname = path.resolve(files[file]);
if (!monitor.options.filter || monitor.options.filter(fullname)) {
fs.stat(fullname, function(err, stats) {
if (err) return;
if (stats.isDirectory()) {
monitor.walk(fullname);
monitor.watch(fullname, stats);
} else {
monitor.files[fullname] = stats;
}
});
}
}
});
};
Monitor.prototype.justlookatit = function(action, file) {
var monitor = this;
var fullname = path.resolve(file);
if (this.options.filter && !this.options.filer(fullname)) return;
fs.exists(fullname, function(exists) {
if (exists) {
fs.stat(fullname, function(err, stats) {
if (stats.isDirectory()) {
monitor.watch(fullname, stats);
} else {
if (monitor.files[fullname]) {
if (stats.mtime.getTime() > monitor.files[fullname].mtime.getTime()) {
monitor.emit('modified', fullname, stats);
}
} else {
monitor.emit('added', fullname, stats);
}
monitor.files[fullname] = stats;
}
});
} else {
if (monitor.files[fullname]) {
delete monitor.files[fullname];
monitor.emit('deleted', fullname);
} else if (monitor.directories[fullname]) {
monitor.directories[fullname].w.close();
delete monitor.directories[fullname];
}
}
});
};
/**
* exports
*/
exports.start = function(directory, options) {
return new Monitor(directory, options);
};
Here is my Working external test code:
var watch = require("./watch.js");
var fs = require('fs');
monitor = watch.start(__dirname);
monitor.on('added', function(file, stats) {
console.log("Caught Added: " + file);
});
monitor.on('modified', function(file, stats) {
console.log("Caught Modified: " + file);
});
monitor.on('deleted', function(file) {
console.log("Caught deleted: " + file);
});
// try creating a file immediately
fs.openSync('v.md', 'w');
The first test file runs perfectly fine, and I've tried both openSync and open. Finally, here is a version of the same test code, wrapped in a mocha unit test which is timing out:
/**
* requirements
*/
var watch, Q, fs, path, mocha, chai, assert;
watch = require('../lib/watch.js');
Q = require('q');
fs = require('fs');
path = require('path');
mocha = require('mocha');
chai = require('chai');
assert = chai.assert;
/**
* variables
*/
var watch_directory = path.join(__dirname, './watch');
/**
* tests
*/
describe('test watch', function() {
it('should create a monitor and run callbacks after fs changes', function(done) {
// I had planned to implement promises that chained the three callbacks
// but couldn't get one of them working in general
var added = function(file, stats) {
console.log("added");
done();
};
var modified = function(file, stats) {
console.log("modified");
};
var deleted = function(file, stats) {
console.log("deleted");
};
// create our service
var monitor = watch.start(watch_directory);
// assert it is defined
assert.isDefined(monitor);
// establish a listener
monitor.on('added', added);
monitor.on('modified', modified);
monitor.on('deleted', deleted);
// here is a file name using the current date to prevent duplication during tests
var file = path.join(watch_directory, (new Date()).getTime() + '.md');
// let's create the file, then delete it
fs.open(file, 'w+', function(err, fileDescriptor) {
// this prints before console output from the watch.js's `justlookatit` method
console.log(err);
console.log("writing to file");
// we probably don't want to try closing the fileDescriptor if the open failed
if (err) return;
// close the file descriptor
fs.close(fileDescriptor, function() {
// delete the file we just created
// fs.unlink(file, function() { /* not a big deal */ });
});
});
// modify a known-existing test file
fs.open('test.md', 'w+', function() {/* we don't care about this */});
})
});
I checked with console.log(fullname) inside the justlookatit method on the watch code, and it spits out the correct file name, matching the one generated by the unit test.
However, it then proceeds to return false when I run fs.exists. As I undestand it, that means the file system is notifying me that a file exists before it exists, which doesn't make sense really. So I tried adding an additional delay by wrapping my fs.exists method in a setTimeout, and that didn't change the results. I have also tried using both openSync and existsSync, and that made no difference.
I'm stumped, does anyone have any ideas why the mocha code isn't working?
So, the solution was to go for a walk. I came back, looked at the code again and figured out the cause of the problem with mocha, and also identified many other bugs.
The problem was the lack of context. The justlookatit method does not have a context, and in the test.js scenario it is watching the current directory, while the mocha test is watching a sub-directory.
The path.resolve was receiving only the file name, not the directory, and therefore merged it with the default (executables) directory, so the level of test.js, or watch_test.js for mocha. It proceeded to fail to locate any of the files in the mocha test case because they were all one level below the executable.
I won't go into detail about all the other bugs, but I may come back and post the repository link when I get to a point that I want to push it online.
You're missing the callback return(done); at the end of your test. Unless you call that callback, Mocha will time out every time.
Related
I was using cpy with a globbing pattern to find and copy all the files in src/main/css and place them in ./dist.
However now I also have sub directories below src/main/css (For example src/main/css/margins/index.css) and cpy does not include these when copying the files.
Is there an API in Node (fs or path?) that handles this case, or anyone know of a handy package?
Try this.
const fs = require('fs');
const path = require('path');
var mkdir = function (dir) {
// making directory without exception if exists
try {
fs.mkdirSync(dir, 0755);
} catch (e) {
if (e.code != "EEXIST") {
throw e;
}
}
};
var copy = function (src, dest) {
var readS = fs.createReadStream(src);
var writeS = fs.createWriteStream(dest);
readS.pipe(writeS);
readS.on("end", function () {
// Operation done
});
};
var copyDir = function (src, dest) {
mkdir(dest);
var files = fs.readdirSync(src);
for (var i = 0; i < files.length; i++) {
var current = fs.lstatSync(path.join(src, files[i]));
if (current.isDirectory()) {
copyDir(path.join(src, files[i]), path.join(dest, files[i]));
} else if (current.isSymbolicLink()) {
var symlink = fs.readlinkSync(path.join(src, files[i]));
fs.symlinkSync(symlink, path.join(dest, files[i]));
} else {
copy(path.join(src, files[i]), path.join(dest, files[i]));
}
}
};
copyDir('./src', './dest');
This piece of code is inspired from https://gist.github.com/tkihira/3014700. I have made some modifications in the original code to get it working as util.pump is obsolete now.
I ended up using using copy-dir
require('copy-dir').sync(PLI.src.main.css, PLI.DIST);
If anyone has a way to do the same thing with the Node and avoiding dependencies please do tell.
I am quite new to Jquery, node.js, Phantom.js and JavaScript in general and having a few problems wrapping my head around all of it.
I'm working on a simple script using Jquery.go.js (https://github.com/travist/jquery.go.js) that should log me in to a site, then access a specific resource and parse the response (JSON) that it returns.
So far I managed to get the login working, but am really lost now for the second part.
What I have so far is based on https://github.com/travist/makemeasandwich.js
var async = require('async'),
$ = require('jquerygo'),
path = require('path'),
fs = require('fs');
// Add some default configs.
$.config.site = 'https://host.com';
$.config.addJQuery = false;
var login = function(done) {
async.series([
$.go(false, 'visit', '/login'),
$.go(false, 'waitForElement', '#email'),
debugCapture('login1'),
$('#email').go('val', 'email#example.com'),
$('#password').go('val', 'securepassword'),
debugCapture('login2'),
$("form").go('attr','id','validForm'),
$('#validForm').go('submit'),
sleep(3000),
debugCapture('login3'),
print('Successfully logged in.'),
sleep(3000),
], done);
};
var viewJournalEntries = function(done){
$.config.addJQuery = true;
async.series([
$.go(false, 'visit', '/api/journals/show/546'),
$.getPage(function(page) {
// Is this right, what do I need to do here??
}),
debugCapture('step2'),
],done);
}
var debugCapture = function(fileName) {
// if (config.get('debug')) {
return capture(fileName);
// }
return function(done) { done(); }
};
/**
* Method to capture and ensure the screenshots directory exists.
*/
var capture = function(fileName) {
// Return the async function.
return function(done) {
// The directory to store the screenshots.
var dir = __dirname + '/screenshots-ls';
// Check that the directory exists.
fs.exists(dir, function(exists) {
if (exists) {
$.capture(dir + '/' + fileName + '.png', done);
}
else {
fs.mkdir(dir, function(err) {
if (err) return done(err);
$.capture(dir + '/' + fileName + '.png', done);
});
}
});
}
}
/**
* Helper to print something when it is executed.
*
* #param {type} text
* #returns {unresolved}
*/
var print = function(text) {
return function(done) {
console.log(text);
done();
};
};
/**
* Helper function to take a pause...
*
* #param {type} time
* #returns {unresolved}
*/
var sleep = function(time) {
return function(done) {
setTimeout(done, time);
};
};
async.series([
login,
viewJournalEntries,
], function() {
$.close();
});
The login works like a charm and I can call the viewJournalEntries function which calls the url /api/journals/show/546 that returns a typical JSON string like this:
{"data":{"id":546,"user_id":1,[etc...]
The screen capture proofs that it works, but I am just not sure how to proceed to get the JSON into a variable that I can parse.
Any help/directions are appreciated.
Thanks,
Ben
I've had no trouble testing my own route handlers but in this case I want to test express's static handler. I can't for the life of me figure out why it's hanging. Clearly there's some callback I'm missing or some event I need to emit.
I tried to make the smallest example I could.
var events = require('events');
var express = require('express');
var stream = require('stream');
var util = require('util');
function MockResponse(callback) {
stream.Writable.call(this);
this.headers = {};
this.statusCode = -1;
this.body = undefined;
this.setHeader = function(key, value) {
this.headers[key] = value;
}.bind(this);
this.on('finish', function() {
console.log("finished response");
callback();
});
};
util.inherits(MockResponse, stream.Writable);
MockResponse.prototype._write = function(chunk, encoding, done) {
if (this.body === undefined) {
this.body = "";
}
this.body += chunk.toString(encoding !== 'buffer' ? encoding : undefined);
done();
};
function createRequest(req) {
var emitter = new events.EventEmitter();
req.on = emitter.on.bind(emitter);
req.once = emitter.once.bind(emitter);
req.addListener = emitter.addListener.bind(emitter);
req.emit = emitter.emit.bind(emitter);
return req;
};
describe('test', function() {
var app;
before(function() {
app = express();
app.use(express.static(__dirname));
});
it('gets test.js', function(done) {
var req = createRequest({
url: "http://foo.com/test.js",
method: 'GET',
headers: {
},
});
var res = new MockResponse(responseDone);
app(req, res);
function responseDone() {
console.log("done");
done();
}
});
});
Setup,
mkdir foo
cd foo
mkdir test
cat > test/test.js # copy and paste code above
^D
npm install express
npm install mocha
node node_modules/mocha/bin/mocha --recursive
it just times out.
What am I missing?
I also tried making the request a Readable stream. No change
var events = require('events');
var express = require('express');
var stream = require('stream');
var util = require('util');
function MockResponse(callback) {
stream.Writable.call(this);
this.headers = {};
this.statusCode = -1;
this.body = undefined;
this.setHeader = function(key, value) {
this.headers[key] = value;
}.bind(this);
this.on('finish', function() {
console.log("finished response");
callback();
});
};
util.inherits(MockResponse, stream.Writable);
MockResponse.prototype._write = function(chunk, encoding, done) {
if (this.body === undefined) {
this.body = "";
}
this.body += chunk.toString(encoding !== 'buffer' ? encoding : undefined);
done();
};
function MockMessage(req) {
stream.Readable.call(this);
var self = this;
Object.keys(req).forEach(function(key) {
self[key] = req[key];
});
}
util.inherits(MockMessage, stream.Readable);
MockMessage.prototype._read = function() {
this.push(null);
};
describe('test', function() {
var app;
before(function() {
app = express();
app.use(express.static(__dirname));
});
it('gets test.js', function(done) {
var req = new MockMessage({
url: "http://foo.com/test.js",
method: 'GET',
headers: {
},
});
var res = new MockResponse(responseDone);
app(req, res);
function responseDone() {
console.log("done");
done();
}
});
});
I've still been digging. Look inside static-server I see it creates a Readable stream by calling fs.createReadStream. It does effectively
var s = fs.createReadStream(filename);
s.pipe(res);
So trying that myself works just fine
it('test stream', function(done) {
var s = fs.createReadStream(__dirname + "/test.js");
var res = new MockResponse(responseDone);
s.pipe(res);
function responseDone() {
console.log("done");
done();
}
});
I thought maybe it's something about express waiting for the input stream to finish but that doesn't seem to be it either. If I consume the mock input stream with the response it works just fine
it('test msg->res', function(done) {
var req = new MockMessage({});
var res = new MockResponse(responseDone);
req.pipe(res);
function responseDone() {
console.log("done");
done();
}
});
Any insight what I might be missing would be helpful
Note: while suggestions for 3rd party mocking libraries are appreciated I'm still really looking to understand what I'm missing to do it myself. Even if I eventually switch to some library I still want to know why this isn't working.
I found two issues that prevent the finish callback from being executed.
serve-static uses send module which is used to create file readstream from the path and pipe it to res object. But that module uses on-finished module which checks if finished attribute is set to false in response object, otherwise it destroys the file readstream. So filestream never gets a chance to emit data event.
express initialization overwrites the response object prototype. So the default stream methods like end() method is overwritten by http response prototype:
exports.init = function(app){
return function expressInit(req, res, next){
...
res.__proto__ = app.response;
..
};
};
To prevent this, I added another middleware right before static middleware to reset it back to MockResponse prototype:
app.use(function(req, res, next){
res.__proto__ = MockResponse.prototype; //change it back to MockResponse prototype
next();
});
Here are the changes made to make it work with MockResponse:
...
function MockResponse(callback) {
...
this.finished = false; // so `on-finished` module doesn't emit finish event prematurely
//required because of 'send' module
this.getHeader = function(key) {
return this.headers[key];
}.bind(this);
...
};
...
describe('test', function() {
var app;
before(function() {
app = express();
//another middleware to reset the res object
app.use(function(req, res, next){
res.__proto__ = MockResponse.prototype;
next();
});
app.use(express.static(__dirname));
});
...
});
EDIT:
As #gman pointed out, it is possible to use direct property instead of prototype method. In that case the extra middleware to overwrite prototype isn't necessary:
function MockResponse(callback) {
...
this.finished = false; // so `on-finished` module doesn't emit finish event prematurely
//required because of 'send' module
this.getHeader = function(key) {
return this.headers[key];
}.bind(this);
...
//using direct property for _write, write, end - since all these are changed when prototype is changed
this._write = function(chunk, encoding, done) {
if (this.body === undefined) {
this.body = "";
}
this.body += chunk.toString(encoding !== 'buffer' ? encoding : undefined);
done();
};
this.write = stream.Writable.prototype.write;
this.end = stream.Writable.prototype.end;
};
It appears my answer is not complete. For some reason the app works only if the file is not found. First thing to debug is do the following in your shell (or cmd):
export DEBUG=express:router,send
then run the test, you'll get more info.
Meanwhile I am still looking into this, for now, ignore my answer below.
----------- ignore this till I verify that it does work -----------
It seems like express static does not favor the absolute path you give it (__dirname).
Try:
app.use(express.static('.'));
and it will work. Note that your current dir for the mocha runner is 'test/'
I have to admit this is quite a mistery. I tried 'fulling' it by doing:
app.use(express.static(__dirname + '/../test')
but still it didn't work. Even specifying a full path did not solve this. Strange.
In my routes.js file, I've this:
var pages = require('./pages')();
...
app.get('/:page', function(req, res, next) {
var p = req.params.page;
if (p in pages) {
res.render('page', pages[p]);
} else {
next();
}
});
pages.js:
module.exports = function() {
var fs = require('fs'),
ret = [],
dir = './pages',
files = fs.readdirSync(dir);
files.forEach(function(file) {
var text = fs.readFileSync(dir + '/' + file, 'utf-8'),
fileName = file.substr(0, file.lastIndexOf('.'));
ret[fileName] = {content: text};
});
return ret;
};
This code runs only one, when I run node. This is how I can make it async:
require('./pages')(function(pages) {
app.get('/:page', function(req, res, next) {
var p = req.params.page;
if (p in pages) {
res.render('page', pages[p]);
} else {
next();
}
});
});
pages.js:
module.exports = function(callback) {
var fs = require('fs'),
ret = [],
dir = './pages';
fs.readdir(dir, function(err, files) {
if (err) throw err;
files.forEach(function(file, i) {
fs.readFile(dir + '/' + file, 'utf-8', function(err, text) {
if (err) throw err;
var fileName = file.substr(0, file.lastIndexOf('.'));
ret[fileName] = {content: text};
if ( i === (files.length - 1) ) callback(ret);
});
});
});
};
Assuming the total pages will not exceed more than 1 MB in size, I can cache the text into memory indefinitely without getting node crashed due to out of memory.
Should I be using the async code?
According to what I've learnt, the async version will make node start listening on localhost faster, but /:page URLs will only work when the files have been loaded into memory.
Is the async code in the right pattern?
What if I need to reuse the pages object in another file? Right now it is only accessible in routes.js.
Can I rewrite pages.js to execute only once like this:
var ret = [];
module.exports = function(callback) {
var fs = require('fs'),
dir = './pages';
if (ret.length < 1) {
fs.readdir(dir, function(err, files) {
if (err) throw err;
files.forEach(function(file) {
fs.readFile(dir + '/' + file, 'utf-8', function(err, text) {
if (err) throw err;
var fileName = file.substr(0, file.lastIndexOf('.'));
ret[fileName] = {content: text};
if ( i === (files.length - 1) ) callback(ret);
});
});
});
} else {
callback(ret);
}
};
What if require('./pages')(function(pages) {}) is called multiple times together? Is there a chance of the if condition failing? I can't wrap my mind around this.
Should I be using the async code?
If you want, why not. But there's no real need for it, synchronous IO on startup is fine. require does it as well.
Is the async code in the right pattern?
No. It does invoke callback once for each directory. Calling app.get('/:page', …) multiple times is not what you want.
What if I need to reuse the pages object in another file? Right now it is only accessible in routes.js.
You could pass it from routes.js to the other modules. Or just rewrite pages.js to store it statically and execute the async things only once, so that you can require it multiple times.
What if require('./pages')(function(pages) {}) is called multiple times together? Is there a chance of the if condition failing?
Yes, it will most certainly fail, because you are populating ret only asynchronously.
I can't wrap my mind around this.
Use promises. The act as asynchronous, unmutable values, just what you need here. They will guarantee that callbacks are only invoked once, that every callback is invoked with the same ret value, and provide many more useful things (like managing the parallel file reads for you).
You'll want to export a promise from pages.js.
I have a web application that my client uses for the cash registry.
What I need to do is to create a local file as the cash register's software needs to read from that file in order to print.
Until now i was using this code:
netscape.security.PrivilegeManager.enablePrivilege("UniversalXPConnect");
var file = Components.classes["#mozilla.org/file/local;1"].createInstance(Components.interfaces.nsILocalFile);
file.initWithPath(filePath);
Unfortunately with the latest version of firefox this isn't working anymore so I was told that i need and add-on to create the file.I've tried to develop an add-on(don't know if succesfully) and i have main.js looking like this :
var FileManager =
{
Write:
function (File, Text)
{
if (!File) return;
const unicodeConverter = Components.classes["#mozilla.org/intl/scriptableunicodeconverter"]
.createInstance(Components.interfaces.nsIScriptableUnicodeConverter);
unicodeConverter.charset = "UTF-8";
Text = unicodeConverter.ConvertFromUnicode(Text);
const os = Components.classes["#mozilla.org/network/file-output-stream;1"]
.createInstance(Components.interfaces.nsIFileOutputStream);
os.init(File, 0x02 | 0x08 | 0x20, 0700, 0);
os.write(Text, Text.length);
os.close();
},
Read:
function (File)
{
if (!File) return;
var res;
const is = Components.classes["#mozilla.org/network/file-input-stream;1"]
.createInstance(Components.interfaces.nsIFileInputStream);
const sis = Components.classes["#mozilla.org/scriptableinputstream;1"]
.createInstance(Components.interfaces.nsIScriptableInputStream);
is.init(File, 0x01, 0400, null);
sis.init(is);
res = sis.read(sis.available());
is.close();
return res;
},
};
Any ideas how should I use main.js?Where I find it after the add-on is installed?
I need to use something like this : FileManager.Write(path,text).
Sorry about the super-late reply.
If I understand your question correctly, you have a P.O.S application that runs in Firefox talking to some sort of local webserver via HTTP. The client-side JavaScript of your application needs to be able to read & write files from the local filesystem of the browser's PC.
If that's correct, then you can do so as follows. You'll need to create a Firefox addon, the simpliest kind of which is called a "bootstrapped" (or "restartless") addon.
A restartless addon consists of two files:
bootstrap.js (The JavaScript file containing your 'privileged' code)
install.rdf (an XML file describing your addon to Firefrox)
To build the addon, simply place both files inside the top-level (no folders!) of a ZIP file with the file extension .xpi. To install the addon, navigate to about:addons then from the tools menu, click Install from file, find your XPI, open it, then after a short delay choose Install.
In install.rdf put something like this:
<?xml version="1.0"?>
<RDF xmlns="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:em="http://www.mozilla.org/2004/em-rdf#">
<Description about="urn:mozilla:install-manifest">
<em:id>youraddonname#yourdomain</em:id>
<em:type>2</em:type>
<em:name>Name of your addon</em:name>
<em:version>1.0</em:version>
<em:bootstrap>true</em:bootstrap>
<em:description>Describe your addon.</em:description>
<em:creator>Your name</em:creator>
<!-- Firefox Desktop -->
<em:targetApplication>
<Description>
<em:id>{ec8030f7-c20a-464f-9b0e-13a3a9e97384}</em:id>
<em:minVersion>4.0.*</em:minVersion>
<em:maxVersion>29.0.*</em:maxVersion>
</Description>
</em:targetApplication>
</Description>
</RDF>
You need to implement two mandatory JavaScript functions in the bootstrap.js:
startup() - called when you install the addon, and when your browser starts up.
shutdown() - called when you uninstall the addon, and when your browser shuts down.
You should call the rest of your 'privileged' code in startup(). For hygiene, you can (and probably should) also implement install() and uninstall() functions.
Start by implementing the following code in bootstrap.js:
const Cc = Components.classes;
const Ci = Components.interfaces;
let consoleService = Cc["#mozilla.org/consoleservice;1"]
.getService(Ci.nsIConsoleService);
let wm = Cc["#mozilla.org/appshell/window-mediator;1"]
.getService(Ci.nsIWindowMediator);
function LOG(msg) {
consoleService.logStringMessage("EXTENSION: "+msg);
}
function startup() {
try {
LOG("starting up...");
let windows = wm.getEnumerator("navigator:browser");
while (windows.hasMoreElements()) {
let chromeWindow = windows.getNext().QueryInterface(Ci.nsIDOMWindow);
WindowListener.setupBrowserUI(chromeWindow);
}
wm.addListener(WindowListener);
LOG("done startup.");
} catch (e) {
LOG("error starting up: "+e);
}
}
function shutdown() {
try {
LOG("shutting down...");
let windows = wm.getEnumerator("navigator:browser");
while (windows.hasMoreElements()) {
let chromeWindow = windows.getNext().QueryInterface(Ci.nsIDOMWindow);
WindowListener.tearDownBrowserUI(chromeWindow);
}
wm.addListener(WindowListener);
LOG("done shutdown.");
} catch (e) {
LOG("error shutting down: "+e);
}
}
Basically, that calls WindowListener.setupBrowserUI() for each current & future window of your web-browser. WindowListener is defined as follows:
var WindowListener = {
setupBrowserUI: function(chromeWindow) {
chromeWindow.gBrowser.addEventListener('load', my_load_handler, true);
},
tearDownBrowserUI: function(chromeWindow) {
chromeWindow.gBrowser.removeEventListener('load', my_load_handler, true);
},
onOpenWindow: function(xulWindow) {
let chromeWindow = xulWindow.QueryInterface(Ci.nsIInterfaceRequestor)
.getInterface(Ci.nsIDOMWindow);
chromeWindow.addEventListener("load", function listener() {
chromeWindow.removeEventListener("load", listener, false);
var domDocument = chromeWindow.document.documentElement;
var windowType = domDocument.getAttribute("windowtype");
if (windowType == "navigator:browser")
WindowListener.setupBrowserUI(chromeWindow);
}, false);
},
onCloseWindow: function(chromeWindow) { },
onWindowTitleChange: function(chromeWindow, newTitle) { }
};
That sets up an event listener for the OpenWindow event, and in turn installs an event listener for load events in the TabBrowser of each ChromeWindow. The load event handler is defined as:
var my_load_handler = function (evt) {
try {
var browserEnumerator = wm.getEnumerator("navigator:browser");
while (browserEnumerator.hasMoreElements()) {
var browserWin = browserEnumerator.getNext();
var tabbrowser = browserWin.gBrowser;
var numTabs = tabbrowser.browsers.length;
for (var index = 0; index < numTabs; index++) {
var currentBrowser = tabbrowser.getBrowserAtIndex(index);
var domWindow = currentBrowser.contentWindow.wrappedJSObject;
// identify your target page(s)...
if (domWindow.location.href == 'http://yourserver/yourpage') {
// install the privileged methods (if not already there)
if (!domWindow.hasOwnProperty('__my_priv_members__') {
install_my_privileged_methods(browserWin, domWindow);
}
}
}
}
} catch (e) {
LOG(e);
}
}
That targets the correct pages (by checking the window.location.href and calls install_my_privileged_methods on their window object, which is defined as:
function install_my_privileged_methods(chromeWindow, domWindow) {
install_privileged_method(chromeWindow, domWindow, 'WriteFile',
function(priv) {
return function(File, Text, cb) {
priv.call([File, Text], function(rstatus, rdata, rerror){
if (cb) cb(rstatus, rerror);
});
};
},
function (chromeWindow, args, cb) {
var [File, Text] = args;
if (!File) return cb(0, null, "need a filename");
try {
const unicodeConverter =
Cc["#mozilla.org/intl/scriptableunicodeconverter"]
.createInstance(Ci.nsIScriptableUnicodeConverter);
unicodeConverter.charset = "UTF-8";
Text = unicodeConverter.ConvertFromUnicode(Text);
const os = Cc["#mozilla.org/network/file-output-stream;1"]
.createInstance(Ci.nsIFileOutputStream);
os.init(File, 0x02 | 0x08 | 0x20, 0700, 0);
os.write(Text, Text.length);
os.close();
cb(1, null, null);
} catch (e) {
cb(0, null, "error writing file: "+e);
}
}
);
install_privileged_method(chromeWindow, domWindow, 'ReadFile',
function(priv) {
return function(File, cb) {
priv.call([File], function(rstatus, rdata, rerror){
if (cb) cb(rstatus, rdata, rerror);
});
};
},
function (chromeWindow, args, cb) {
var [File] = args;
if (!File) return cb(0, null, "need a filename");
try {
const is = Cc["#mozilla.org/network/file-input-stream;1"]
.createInstance(Ci.nsIFileInputStream);
const sis = Cc["#mozilla.org/scriptableinputstream;1"]
.createInstance(Ci.nsIScriptableInputStream);
is.init(File, 0x01, 0400, null);
sis.init(is);
var Text = sis.read(sis.available());
is.close();
cb(1, Text, null);
} catch (e) {
cb(0, null, "error reading file: "+e);
}
}
);
}
I didn't test this code. It's a straigh-forward translation of what you wrote above... I'll assume that works!
That add two special methods, WriteFile & ReadFile, to the chosen window objects. In your web application's (unprivileged) JavaScript code use them like this:
var buffer = '...'; // the text to be written
window.WriteFile('C:\\path\\to\\file.txt', buffer, function(ok, errmsg) {
if (!ok) alert(errmsg);
});
window.ReadFile('C:\\path\\to\\file.txt', function(ok, buffer, errmsg) {
if (!ok) return alert(errmsg);
// use buffer here!
});
Finally, install_privileged_method is defined as:
var install_privileged_method = (function(){
var gensym = (function (){
var __sym = 0;
return function () { return '__sym_'+(__sym++); }
})();
return function (chromeWindow, target, slot, handler, methodFactory) {
try {
target.__pmcache__ = target.hasOwnProperty('__pmcache__')
? target.__pmcache__
: { ticket_no: 0, callbacks: {}, namespace: gensym() };
target[slot] = methodFactory({ call: function(fargs, fcb) {
try {
var ticket_no = target.__pmcache__.ticket_no++;
target.__pmcache__.callbacks[ticket_no] = fcb;
var cevent = target.document.createEvent("CustomEvent");
cevent.initCustomEvent(
target.__pmcache__.namespace+'.'+slot,
true, true, { fargs: fargs, ticket_no: ticket_no }
);
target.dispatchEvent(cevent);
} catch (ue) {
fcb(0, null, 'untrusted dispatcher error: '+ue);
}
}});
LOG("installed untrusted dispatcher for method '"+slot+"'.");
target.addEventListener(
target.__pmcache__.namespace+'.'+slot,
function(cevent){
var ticket_no = cevent.detail.ticket_no;
var fargs = cevent.detail.fargs;
var fcb = target.__pmcache__.callbacks[ticket_no];
try {
handler(chromeWindow, fargs, fcb);
} catch (pe) {
fcb(0, null, 'privileged handler error: '+pe);
}
},
false,
true
);
LOG("installed privileged handler for method '"+slot+"'.");
} catch (ie) {
LOG("ERROR installing handler/factory for privileged "+
"method '"+slot+"': "+ie);
}
};
})();