I am using the npm xlsx (lib/parserScripts/readExcel.js)
and threads module to read a large excel file.
This works fine for the first time but if I simultaneously upload another large file then I get an error
Error: channel closed
at ChildProcess.target.send (internal/child_process.js:554:16)
at Worker.send (/app/node_modules/threads/lib/worker.node/worker.js:108:16)...
This is maybe due to the previous threads are still processing /have not been killed hence when a new pool is made for another request the previous threads are still busy processing.
How to solve this? Do I have to manually terminate the threads in the below piece of code? If so then how?
index.js
parseFile: ['fileHeaders', (results, cb) => {
const excelParserScript = __dirname + '/../lib/parserScripts/readExcel';
const worksheetIndex = 3;
const params = {
file.path,
worksheetIndex
}
// using worker process
// result will be of the type {error: false, message: '', data: {}}
lib.miniWorker.bufferedJob(excelParserScript, params, (err, result) => {
lib/miniworker.js
const Threads = require('threads');
const Pool = Threads.Pool;
const workerPool = new Pool();
module.exports = class JobManager {
static bufferedJob(pathToScript, params, callback){
workerPool
.run(pathToScript)
.send(params)
.on('done', (result, input) => {
console.log(`Worker Job done: ${pathToScript} `);
callback(null, result);
})
.on('error', (job, error) => {
console.log(`Error in executing Worker Job: ${pathToScript}`);
callback(job || error);
})
}
}
lib/parserScripts/readExcel.js
module.exports = function(input, done) {
const XLSX = require('xlsx');
let workbook;
const path = input.path;
const worksheetIndex = input.worksheetIndex;
const expectedHeaders = input.expectedHeaders || [];
const options = {};
if (expectedHeaders.length > 0) {
options.header = expectedHeaders;
}
const response = {
error: false,
message: '',
data: {}
}
try {
workbook = XLSX.readFile(path, {});
const sheet = workbook['Sheets'][workbook.SheetNames[worksheetIndex]];
const headers = getHeaders(sheet);
const fileData = XLSX.utils.sheet_to_json(workbook['Sheets'][workbook.SheetNames[worksheetIndex]], options);
response.data = fileData;
response.headers = headers;
return done(response)
} catch (err) {
response.error = true;
response.messsage = 'Error in reading the file';
return done(response);
}
function getHeaders(sheet) {
var header = 0, offset = 1;
var hdr = [];
var o = {};
if (sheet == null || sheet["!ref"] == null) return [];
var range = o.range !== undefined ? o.range : sheet["!ref"];
var r;
if (o.header === 1) header = 1;
else if (o.header === "A") header = 2;
else if (Array.isArray(o.header)) header = 3;
switch (typeof range) {
case 'string':
r = safe_decode_range(range);
break;
case 'number':
r = safe_decode_range(sheet["!ref"]);
r.s.r = range;
break;
default:
r = range;
}
if (header > 0) offset = 0;
var rr = XLSX.utils.encode_row(r.s.r);
var cols = new Array(r.e.c - r.s.c + 1);
for (var C = r.s.c; C <= r.e.c; ++C) {
cols[C] = XLSX.utils.encode_col(C);
var val = sheet[cols[C] + rr];
switch (header) {
case 1:
hdr.push(C);
break;
case 2:
hdr.push(cols[C]);
break;
case 3:
hdr.push(o.header[C - r.s.c]);
break;
default:
if (val === undefined) continue;
hdr.push(XLSX.utils.format_cell(val));
}
}
return hdr;
}
function safe_decode_range(range) {
var o = {s: {c: 0, r: 0}, e: {c: 0, r: 0}};
var idx = 0, i = 0, cc = 0;
var len = range.length;
for (idx = 0; i < len; ++i) {
if ((cc = range.charCodeAt(i) - 64) < 1 || cc > 26) break;
idx = 26 * idx + cc;
}
o.s.c = --idx;
for (idx = 0; i < len; ++i) {
if ((cc = range.charCodeAt(i) - 48) < 0 || cc > 9) break;
idx = 10 * idx + cc;
}
o.s.r = --idx;
if (i === len || range.charCodeAt(++i) === 58) {
o.e.c = o.s.c;
o.e.r = o.s.r;
return o;
}
for (idx = 0; i != len; ++i) {
if ((cc = range.charCodeAt(i) - 64) < 1 || cc > 26) break;
idx = 26 * idx + cc;
}
o.e.c = --idx;
for (idx = 0; i != len; ++i) {
if ((cc = range.charCodeAt(i) - 48) < 0 || cc > 9) break;
idx = 10 * idx + cc;
}
o.e.r = --idx;
return o;
}
}
This works fine for the first time but if I simultaneously upload another large file then I get an error.
you should upload a different file name like a final01.xlsx and then rename it to final.xlsx
The reason being when you upload file the readfile cant finish as writing a file locks the file and change content.
If upload file means you are simultaneously reading another large file in node.js code ignore my comment.
The issue is because of the older version module of threads. Updating to the new version and using the updated API which is not event-based can solve the purpose.
https://github.com/andywer/threads.js/issues/164
However, if you want to correct the event-based code(from older version) this is what you need to do (kill the threads after the event gets completed).
const Threads = require('threads');
const Pool = Threads.Pool;
module.exports = class JobManager {
static bufferedJob(pathToScript, params, callback){
let workerPool = new Pool();
workerPool
.run(pathToScript)
.send(params)
.on('done', (result, input) => {
console.log(`Worker Job done: ${pathToScript} `);
callback(null, result);
workerPool.killAll();
workerPool = null ;
})
.on('error', (job, error) => {
console.log(`Error in executing Worker Job: ${pathToScript}`);
callback(job || error);
workerPool.killAll();
workerPool = null ;
}).on('abort', (job, error)=>{
console.log(`Abort Worker Job: ${pathToScript}, Error : ${error}`);
callback(job || error);
workerPool.killAll();
workerPool = null ;
}).on('finished', ()=>{
console.log('Everything done, shutting down the thread pool.');
workerPool.killAll();
});
}
}
Related
when i pass this url i need to get all user commit ---> http://localhost:3080/api/msgs/email=k1199#gmail.com but i getting one user commit only and getting this error [Error [ERR_HTTP_HEADERS_SENT]: Cannot set headers after they are sent to the client]
app.get('/api/msgs/:msg', (req, res) => {
console.log('api/users called!!!!!!!')
const databuffer = fs.readFileSync('notes1.json', 'utf8')
const datajson = databuffer.toString()
const dtafil = JSON.parse(datajson);
id = req.url.split('/')[3]
findname = req.url.split('/')[2]
console.log(findname)
let a = id;
let h = 0;
let k = [];
let pre = [];
for (let i = 0; i < a.length; i++) {
if (a[i] == "=") h = i;
}
for (let j = 1; j < a.length - h; j++) {
k.push(a[h + j]);
}
for (let s = 0; s <= a.length - (k.length + 1); s++) {
pre.push(a[s]);
}
var idegenerator = k.join("");
var idegeneratorlk = pre.join("")
console.log(idegeneratorlk, idegenerator)
if (idegeneratorlk == 'email=') {
for(i=0;i<=dtafil.length;i++){
if(dtafil[i].namees === idegenerator){
console.log(dtafil[i])
res.json(dtafil[i]);
}
}
// const user = dtafil.find((i) => i.namees === idegenerator)
// res.json(user);
} else if (idegeneratorlk == 'youtube=') {
const user = dtafil.find((i) => i.youtube === idegenerator)
res.json(user);
}
else {
res.json('404 Not fount');
}
});
In the simple term, ERR_HTTP_HEADERS_SENT means then you are sending a response more than one time for the same request.
As in you code see this for loop:
for(i=0;i<=dtafil.length;i++){
if(dtafil[i].namees === idegenerator){
console.log(dtafil[i])
res.json(dtafil[i]);
}
}
You are sending a JSON response in each iteration that will send the same response back to the client multiple times which you can't.
Save data in some variable and then send a response at once outside of foor loop something like this:
if (idegeneratorlk == 'email=') {
const response = dtafil.filter(data => {
return data.namees === idegenerator;
})
res.json(response);
}
I have just written this pseudo-code for reference which you may ned to edit as per your use case.
I have a problem when using await in a for loop. Every time it hits the await funtion it executes it fine, but it stops loping through the rest of the array that is looping through. I'm using nodejs with axios to send http request to a restAPI. The function that is in the api is big. So I thought maby that is the problem but it wasn't. Also the api uses its own await functions to, but that wasn't the problem either(As far as I know).
Here is my code for the request
var files = await globPromise("assets/series/**/*.json");
var json = null;
var file = null;
var series = [];
for (i = 0; i < files.length; i++) {
file = files[i];
var raw = fs.readFileSync(file);
json = JSON.parse(raw);
if (json.type === "series") {
try {
var userId = null;
if (req.user == null) {
userId = req.query.userid;
} else {
userId = req.user.id;
}
const response = await axios.get("http://" + host + "/series/info/" + json.id + "?userid=" + userId);
series.push(JSON.parse(response.data));
} catch (error) {
console.log(error);
series.push(json);
}
}
}
res.json(JSON.stringify(series));
});
And also the api side:
app.get('/series/info/:id', async(req, res) => {
var files = await globPromise("assets/series/**/*.json");
var json = null;
var file = null;
for (i = 0; i < files.length; i++) {
file = files[i];
var raw = fs.readFileSync(file);
json = JSON.parse(raw);
if (json.type === "series" && json.id === req.params.id) {
break;
}
json = null;
}
let path = pathFs.dirname(file) + "/";
try {
var seriesFiles = await fsPromise.readdir(path);
var latestWatchedVideo = null;
var latestWatchedTime = null;
var latestWatchTime = null;
var latestWatchDuration = null;
var seasonCount = 0;
var seasons = [];
for (i = 0; i < seriesFiles.length; i++) {
seriesFile = seriesFiles[i];
if (fs.lstatSync(path + "/" + seriesFile).isDirectory()) {
if (!seriesFile.startsWith("s")) {
continue;
}
seasonCount++;
try {
var videoFiles = await fsPromise.readdir(path + "/" + seriesFile + "/");
var videos = [];
for (let i = 0; i < videoFiles.length; i++) {
const video = videoFiles[i];
if (video.endsWith(".json")) {
var rawVideo = fs.readFileSync(path + "/" + seriesFile + "/" + video);
videoJson = JSON.parse(rawVideo);
const query = util.promisify(con.query).bind(con);
var userId = null;
if (req.user == null) {
userId = req.query.userid;
} else {
userId = req.user.id;
}
var results = await query(`SELECT * FROM watched WHERE video_id = "${videoJson.id}" AND user_id = "${userId}"`);
if (results.length > 0) {
var updated = JSON.parse(JSON.stringify(results[0].updated));
var duration = JSON.parse(JSON.stringify(results[0].duration));
var time = JSON.parse(JSON.stringify(results[0].time));
if (latestWatchedVideo == null) {
latestWatchedVideo = videoJson.id;
latestWatchedTime = updated;
latestWatchTime = time;
latestWatchDuration = duration;
} else {
if (latestWatchedTime < updated) {
latestWatchedVideo = videoJson.id;
latestWatchedTime = updated;
latestWatchTime = time;
latestWatchDuration = duration;
}
}
}
videos.push(videoJson);
}
}
function compare(a, b) {
if (a.episode < b.episode) {
return -1;
}
if (a.episode > b.episode) {
return 1;
}
return 0;
}
videos.sort(compare);
seasons.push({
season: seasonCount,
title: seriesFile.replace("s" + seasonCount, ""),
videos: videos
});
} catch (error) {
console.log(error);
}
}
}
json.seasonCount = seasonCount;
json.seasons = seasons;
json.latestWatchDuration = latestWatchDuration;
json.latestWatchTime = latestWatchTime;
json.latestWatchedVideo = latestWatchedVideo;
json.latestWatchedTime = latestWatchedTime;
res.json(JSON.stringify(json));
} catch (error) {
console.log(error);
}
});
Is there something (important) about await and async that I've missed?
Edit: my problem is that is loops fine through the first item and the await is working fine too, but it stops the loop and executes the next lines of code like there are no other items in my array.
Solved: I tried using the for/of and it works now. I don't know whats is so different between de default for and this one but it works!
I'm porting a CRC check function made in Python to JavaScript and I'm struggling a bit.
Here is the working Python code:
from BitVector import BitVector # BitVector==3.4.8
def compute_crc(message):
message = message.deep_copy()
generator = BitVector(bitstring='1111111111111010000001001')
crc_length = len(generator) - 1
content_length = len(message) - crc_length
assert content_length >= 0
for i in range(content_length):
if message[i]:
message[i:i + len(generator)] ^= generator
return message[-crc_length:]
adsb_hex = "8D4840D6202CC371C32CE0576098"
adsb_crc = compute_crc(BitVector(hexstring = adsb_hex))
if int(adsb_crc) != 0:
print 'Bad'
else:
print 'Good'
Here is my (bad) JavaScript code:
function BinToDec(binStr)
{
var regexp = /^[01]+$/
if (!regexp.test(binStr)) {
return false;
}
var result = parseInt(binStr, 2);
if (isNaN(result)) {
return false;
}
return result;
}
function StrToBinArray(binStr)
{
var result = new Array();
for (var i = 0; i < binStr.length; i++) {
result[i] = binStr[i] === '0' ? 0 : 1;
}
return result;
}
function TestCRC(adsbBinStr)
{
var genStr = '1111111111111010000001001';
var generatorInt = BinToDec(genStr);
var generator = StrToBinArray(genStr);
var crcLength = generator.length - 1;
var contentLength = adsbBinStr.length - crcLength;
if (contentLength < 0) {
return false;
}
var adsbBin = StrToBinArray(adsbBinStr);
for (var i = 0; i <= contentLength; i++) {
if (adsbBin[i]) {
var currSlice = adsbBin.slice(i, i + generator.length);
var currSliceInt = BinToDec(BinArrayToStr(currSlice));
var currCheck = currSliceInt ^ generatorInt;
var currCheckBin = currCheck.toString(2);
for (var j = 0; j <= currCheckBin.length; j++) {
adsbBin[i + j] = currCheckBin[j];
}
}
}
return BinToDec(BinArrayToStr(adsbBin.slice(contentLength + 2))) === 0;
}
TestCRC('1000110101001000010000001101011000100000001011001100001101110001110000110010110011100000010101110110000010011000');
If it helps here is the CRC check as pseudocode:
GENERATOR = 1111111111111010000001001
MSG = binary("8D4840D6202CC371C32CE0576098") # total 112 bits
FOR i FROM 0 TO 88: # 112 - 24 parity bits
if MSG[i] is 1:
MSG[i:i+24] = MSG[i:i+24] ^ GENERATOR
CRC = MSG[-24:] # last 24 bits
IF CRC not 0:
MSG is corrupted
TLDR I'm struggling to port message[i:i + len(generator)] ^= generator correctly from Python to JavaScript and I need help.
According to your Python code, this could be an alternative in Javascript.
// just some util functions to replace BitVector functionality
// https://stackoverflow.com/a/12987042/4209136
function checkBin(n){return/^[01]{1,64}$/.test(n)}
function checkDec(n){return/^[0-9]{1,64}$/.test(n)}
function checkHex(n){return/^[0-9A-Fa-f]{1,64}$/.test(n)}
function Bin2Dec(n){if(!checkBin(n))return 0;return parseInt(n,2).toString(10)}
function Dec2Bin(n){if(!checkDec(n)||n<0)return 0;return n.toString(2)}
function Hex2Bin(n){if(!checkHex(n))return 0;return parseInt(n,16).toString(2)}
function ZeroPad(num, size)
{
var result = String(num);
while (result.length < size) {
result = '0' + result;
}
return result;
}
function computeCrc(message) {
var generator = '1111111111111010000001001';
var genInt = Bin2Dec(generator);
var crcLength = generator.length - 1;
var contentLength = message.length - crcLength;
if (contentLength < 0) {
throw 'Invalid content length.';
}
for (var i = 0;i < contentLength;i++) {
if (message[i] === '0') continue;
let curInt = Bin2Dec(message.substr(i, i + generator.length));
message = message.substr(0, i) +
ZeroPad(Dec2Bin(curInt ^ genInt), generator.length) +
message.substr(i + generator.length);
}
return message.substr(message.length - crcLength);
}
var adsbCrc = computeCrc(Hex2Bin("8D4840D6202CC371C32CE0576098"));
console.log(parseInt(adsbCrc) === 0 ? 'Good' : 'Bad');
Situation:
I want to create a multithread script where I load a list of IPs + account information with a CSV.
I load the data and call a function where I open electron and run my nightmare script in combination with Vo. Inside the script I go to a site, loop through a list of links and check if someone lives in Australia.
When I have an error, for example Timeout, the browser stops working.
Error Example ->
{ message: 'navigation error',
code: -7,
details: 'Navigation timed out after 30000 ms',
url: 'https://facebook.com/login' }
Here is my Code
var fs = require('fs');
var csv = require('fast-csv');
var vo = require('vo');
var Nightmare = require('nightmare');
var count = 0;
var urls = fs.readFileSync('uniqueIds.csv').toString().split("\n");
var arrayUrls = Object.keys(urls).map(function (key) {return urls[key]});
var bloqNumber = 0;
function *run(proxy, user, pass, urlsID) {
var nightmare = new Nightmare({
webPreferences: { partition: 'your-custom-partition'},
switches:{
'proxy-server': proxy,
'ignore-certificate-errors': true
}, show: true });
yield nightmare
.goto('https://facebook.com/login')
.wait(".inputtext._55r1.inputtext._1kbt.inputtext._1kbt")
.type('input[name="email"]', user)
.type('input[name="pass"]', pass)
.click('button[name=login]')
.wait(29000);
var range = urlsID * 2000;
var rangeStart = range - 2000;
var urlsarray = arrayUrls.slice(rangeStart, range);
for (var i = 0; i < urlsarray.length; i++) {
count++;
console.log(count + " -> " + proxy);
if (count > 150){
yield nightmare.end();
}
yield nightmare
.goto("https://www.facebook.com/profile.php?id=" + urlsarray[i] + "&sk=about§ion=living&pnref=about")
.wait(1000);
var seqCheck = yield nightmare.exists(".captcha_interstitial");
var bloqCheck = yield nightmare.exists(".mvl.ptm.uiInterstitial.uiInterstitialLarge.uiBoxWhite");
if (seqCheck == true) {
console.log("Seqcheck");
yield nightmare.wait(29000);
}
if (bloqCheck == true) {
console.log("Blocked for a week" + user + proxy);
bloqNumber++;
console.log(bloqNumber);
if (bloqNumber > 6) {
yield nightmare.end();
}
continue;
}
var location = yield nightmare.exists("._3pw9._2pi4._2ge8");
bloqNumber = 0;
console.log(location);
if (location == true) {
var getLocation = yield nightmare.evaluate(function() {
var jsonObject = new Array();
var links = document.getElementsByClassName('_3pw9 _2pi4 _2ge8');
var numProfiles = links.length;
for(var i = 0; i< numProfiles; i++){
var elem;
try {
elem = links[0].querySelector("._50f5._50f7 a").text;
} catch (err) {
var arrr = new Array('Hello', 'world');
return arrr;
}
jsonObject.push(elem);
}
return jsonObject;
});
var locationString = getLocation.join(" + ");
console.log(locationString + " -> " + urlsarray[i]);
if (locationString.indexOf("Australia") !== -1 ||
locationString.indexOf("Queensland") !== -1 ||
locationString.indexOf("New South Wales") !== -1 ||
locationString.indexOf("Victoria") !== -1 ||
locationString.indexOf("Northern Territory") !== -1 ||
locationString.indexOf("South Australia") !== -1||
locationString.indexOf("Tasmania") !== -1 ||
locationString.indexOf("Sydney") !== -1 ||
locationString.indexOf("Adelaide") !== -1 ||
locationString.indexOf("Cairns") !== -1 ||
locationString.indexOf("Perth") !== -1 ||
locationString.indexOf("Melbourne") !== -1 ||
locationString.indexOf("Brisbane") !== -1 ||
locationString.indexOf("Bundaberg") !== -1 ||
locationString.indexOf("Canberra") !== -1 ||
locationString.indexOf("Newcastle") !== -1 ||
locationString.indexOf("Western Australia") !== -1 ) {
console.log("Im in australia");
var stringToPrint = urlsarray[i] + ", " + locationString + "\n";
fs.appendFile('pages.csv', stringToPrint.replace(/(\r\n|\n|\r)/gm,"") + "\n", function (err) {
console.log("a new entry");
});
}
} else {
console.log("It was false");
}
}
yield nightmare.end();
}
fs.createReadStream('proxies.csv')
.pipe(csv())
.on('data', function (data) {
var proxy = data[0];
var user = data[1];
var pass = data[2];
var urlsID = data[3];
console.log(urlsID);
console.log(user);
console.log(pass);
vo(run(proxy, user, pass, urlsID)).then(out => console.log('out', out)).catch(error => console.log(error));
}).on('end', function (data) {
console.log('CSV reading finished.')
});
Desired Outcome:
I want every time i get some kind of error that my thread is closing.
Solved. Just append .catch like in the example below.
yield nightmare
.goto('https://facebook.com/login')
.wait(".inputtext._55r1.inputtext._1kbt.inputtext._1kbt")
.type('input[name="email"]', user)
.type('input[name="pass"]', pass)
.click('button[name=login]')
.wait(29000).catch(function(err){
console.dir(err);
nightmare.end();
});
I'm trying to unit test my websocket on node.js and want to mock out a websocket client. I could create a HTML file that just connects to my server but then I can't run a single test on the server.
How would I go about (using either http.Client or net.Stream) to create a websocket client and have it interact with my server.
I'm targetting the (soon to be dead) draft 76 of the websocket spec.
The server side implementation I'm using is this
Since you already know that all current WebSocket version will be obsolete very soon and you're using a WebSocket server which supports the old 75 draft, it's fairly trivial to make one if you already have some server code lying around, so no need for the "security" header stuff in 76.
Disclaimer: This thing had only 5 minute of testing or so but it should work for the most part.
Epic wall of code follows
var net = require('net');
function WebSocket(host, port, encoder, decoder) {
this.encoder = encoder || function(data){return data.toString()};
this.decoder = decoder || function(data){return data};
this.socket = net.createConnection(port, host);
this.connected = false;
this.header = 0;
this.bytesSend = 0;
this.dataFrames = [];
this.dataState = 0;
var that = this;
process.nextTick(function() {
that.init(host, port);
});
}
// Prototype -------------------------------------------------------------------
WebSocket.prototype = {
onConnect: function() {console.log('connect');},
onClose: function() {console.log('close');},
onData: function(data) {console.log(data)},
init: function(host, port) {
var that = this;
this.socket.addListener('connect', function() {
var data ='GET / HTTP/1.1\r\n'
+ 'Host: ' + host + ':' + port + '\r\n'
+ 'Origin: websocket.node.js\r\n'
+ 'Connection: Upgrade\r\n'
+ 'Upgrade: WebSocket\r\n\r\n';
that.socket.write(data, 'ascii');
that.socket.flush();
});
this.socket.addListener('data', function(data) {that.read(data);});
this.socket.addListener('end', function() {that.onClose();});
this.socket.addListener('error', function(e) {console.log(e.message);that.close();});
},
send: function(data, encoded) {
if (this.connected) {
return this.write(encoded ? data : this.encoder(data));
} else {
return 0;
}
},
close: function() {
if (this.connected) {
this.connected = false;
this.write(null);
this.socket.end();
this.socket.destroy();
}
},
read: function read(data) {
for(var i = 0, l = data.length; i < l; i++) {
var b = data[i];
if (this.header < 4) {
if ((this.header === 0 || this.header === 2) && b === 0x0d) {
this.header++;
} else if ((this.header === 1 || this.header === 3) && b === 0x0a) {
this.header++;
} else {
this.header = 0;
}
if (this.header === 4) {
this.connected = true;
this.onConnect();
this.header = 5;
}
} else {
if (this.dataState === 0) {
this.dataState = b & 0x80 === 0x80 ? 2 : 1;
// Low bit frame
} else if (this.dataState === 1) {
if (b === 0xff) {
var buffer = new Buffer(this.dataFrames);
this.dataFrames = [];
this.dataState = 0;
if (!this.message(buffer.toString('utf8', 0, buffer.length))) {
this.send({error: 'Invalid Message.'});
this.close();
return;
}
} else {
this.dataFrames.push(b);
}
// Unused high bit frames
} else if (this.dataState === 2) {
if (b === 0x00) {
this.close();
}
}
}
}
},
write: function(data) {
var bytes = 0;
if (!this.socket.writable) {
return bytes;
}
try {
this.socket.write('\x00', 'binary');
if (typeof data === 'string') {
this.socket.write(data, 'utf8');
bytes += Buffer.byteLength(data);
}
this.socket.write('\xff', 'binary');
this.socket.flush();
bytes += 2;
} catch(e) {}
this.bytesSend += bytes;
return bytes;
},
message: function(msg) {
if (this.decoder) {
try {
msg = this.decoder(msg);
} catch(e) {
this.close();
return false;
}
}
this.onData(msg);
return true;
}
};
And here we set it up:
var bison = require('bison');
// automatically do some encoding/decoding magic
var test = new WebSocket('localhost', 28785, bison.encode, bison.decode);
test.onConnect = function() {
};
test.onData = function(data) {
};
Feel free to ask questions in the comments.
PS: For info on how it works, read the spec :P
Take a look at the modules page on the wiki, it has some modules for websocket clients available in npm.