I've written an application in node.js consisting of a server and a client for storing/uploading files.
For reproduction purposes, here's a proof of concept using a null write stream in the server and a random read stream in the client.
Using node.js 12.19.0 on Ubuntu 18.04. The client depends on node-fetch v2.6.1.
The issue I have is after 60 seconds the connection is reset and haven't found a way to make this work.
Any ideas are appreciated.
Thank you.
testServer.js
// -- DevNull Start --
var util = require('util')
, stream = require('stream')
, Writable = stream.Writable
, setImmediate = setImmediate || function (fn) { setTimeout(fn, 0) }
;
util.inherits(DevNull, Writable);
function DevNull (opts) {
if (!(this instanceof DevNull)) return new DevNull(opts);
opts = opts || {};
Writable.call(this, opts);
}
DevNull.prototype._write = function (chunk, encoding, cb) {
setImmediate(cb);
}
// -- DevNull End --
const http = require('http');
const server = http.createServer();
server.on('request', async (req, res) => {
try {
req.socket.on('end', function() {
console.log('SOCKET END: other end of the socket sends a FIN packet');
});
req.socket.on('timeout', function() {
console.log('SOCKET TIMEOUT');
});
req.socket.on('error', function(error) {
console.log('SOCKET ERROR: ' + JSON.stringify(error));
});
req.socket.on('close', function(had_error) {
console.log('SOCKET CLOSED. IT WAS ERROR: ' + had_error);
});
const writeStream = DevNull();
const promise = new Promise((resolve, reject) => {
req.on('end', resolve);
req.on('error', reject);
});
req.pipe(writeStream);
await promise;
res.writeHead(200);
res.end('OK');
} catch (err) {
res.writeHead(500);
res.end(err.message);
}
});
server.listen(8081)
.on('listening', () => { console.log('Listening on port', server.address().port); });
testClient.js
// -- RandomStream Start --
var crypto = require('crypto');
var stream = require('stream');
var util = require('util');
var Readable = stream.Readable;
function RandomStream(length, options) {
// allow calling with or without new
if (!(this instanceof RandomStream)) {
return new RandomStream(length, options);
}
// init Readable
Readable.call(this, options);
// save the length to generate
this.lenToGenerate = length;
}
util.inherits(RandomStream, Readable);
RandomStream.prototype._read = function (size) {
if (!size) size = 1024; // default size
var ready = true;
while (ready) { // only cont while push returns true
if (size > this.lenToGenerate) { // only this left
size = this.lenToGenerate;
}
if (size) {
ready = this.push(crypto.randomBytes(size));
this.lenToGenerate -= size;
}
// when done, push null and exit loop
if (!this.lenToGenerate) {
this.push(null);
ready = false;
}
}
};
// -- RandomStream End --
const fetch = require('node-fetch');
const runSuccess = async () => { // Runs in ~35 seconds
const t = Date.now();
try {
const resp = await fetch('http://localhost:8081/test', {
method: 'PUT',
body: new RandomStream(256e6) // new RandomStream(1024e6)
});
const data = await resp.text();
console.log(Date.now() - t, data);
} catch (err) {
console.warn(Date.now() - t, err);
}
};
const runFail = async () => { // Fails after 60 seconds
const t = Date.now();
try {
const resp = await fetch('http://localhost:8081/test', {
method: 'PUT',
body: new RandomStream(1024e6)
});
const data = await resp.text();
console.log(Date.now() - t, data);
} catch (err) {
console.warn(Date.now() - t, err);
}
};
// runSuccess().then(() => process.exit(0));
runFail().then(() => process.exit(0));
I tried (unsuccessfully) to reproduce what you are seeing based on your code example. Neither the success call is completing in ~35 seconds nor is the error being thrown in 60 seconds.
However, that being said, I think what is happening here is that your client is terminating the request.
You can increase the timeout by adding a httpAgent to the fetch PUT call. You can then set a timeout in the httpAgent.
const http = require('http');
...
const runFail = async () => { // Fails after 60 seconds
const t = Date.now();
try {
const resp = await fetch('http://localhost:8081/test', {
method: 'PUT',
body: new RandomStream(1024e6),
agent: new http.Agent({ keepAlive: true, timeout: 300000 })
});
const data = await resp.text();
console.log(Date.now() - t, data);
} catch (err) {
console.warn(Date.now() - t, err);
}
};
See the fetch docs for adding a custom http(s) agent here
See options for creating http(s) agent here
This turned out to be a bug in node.js
Discussion here: https://github.com/nodejs/node/issues/35661
Related
I need your help, it turns out that I am trying to use the Hubstaff api. I am working on nodejs to make the connection, I followed the documentation (official hubstaff api documentation) and use the methods they give as implementation examples (example of implementation nodejs).
But I get the following error:
I don't know why this happens, and I can't find more examples of how I can implement this api. The openid-client lib is used to make the connection through the token and a state follow-up is carried out to refresh the token.
To be honest, I'm not understanding how to implement it. Can someone who has already used this API give me a little explanation? I attach the code
hubstaffConnect.util
const {
Issuer,
TokenSet
} = require('openid-client');
const fs = require('fs');
const jose = require('jose');
// constants
const ISSUER_EXPIRE_DURATION = 7 * 24 * 60 * 60; // 1 week
const ACCESS_TOKEN_EXPIRATION_FUZZ = 30; // 30 seconds
const ISSUER_DISCOVERY_URL = 'https://account.hubstaff.com';
// API URl with trailing slash
const API_BASE_URL = 'https://api.hubstaff.com/';
let state = {
api_base_url: API_BASE_URL,
issuer_url: ISSUER_DISCOVERY_URL,
issuer: {}, // The issuer discovered configuration
issuer_expires_at: 0,
token: {},
};
let client;
function loadState() {
return fs.readFileSync('./configState.json', 'utf8');
}
function saveState() {
fs.writeFileSync('./configState.json', JSON.stringify(state, null, 2), 'utf8');
console.log('State saved');
}
function unixTimeNow() {
return Date.now() / 1000;
}
async function checkToken() {
if (!state.token.access_token || state.token.expires_at < (unixTimeNow() + ACCESS_TOKEN_EXPIRATION_FUZZ)) {
console.log('Refresh token');
state.token = await client.refresh(state.token);
console.log('Token refreshed');
saveState();
}
}
async function initialize() {
console.log('API Hubstaff API');
let data = loadState();
data = JSON.parse(data);
if (data.issuer) {
state.issuer = new Issuer(data.issuer);
state.issuer_expires_at = data.issuer_expires_at;
}
if (data.token) {
state.token = new TokenSet(data.token);
}
if (data.issuer_url) {
state.issuer_url = data.issuer_url;
}
if (data.api_base_url) {
state.api_base_url = data.api_base_url;
}
if (!state.issuer_expires_at || state.issuer_expires_at < unixTimeNow()) {
console.log('Discovering');
state.issuer = await Issuer.discover(state.issuer_url);
state.issuer_expires_at = unixTimeNow() + ISSUER_EXPIRE_DURATION;
console.log(state.issuer);
}
client = new state.issuer.Client({
// For personal access token we can use PAT/PAT.
// This is only needed because the library requires a client_id where as the API endpoint does not require it
client_id: 'PAT',
client_secret: 'PAT',
});
saveState();
console.log('API Hubstaff initialized');
}
async function request(url, options) {
await checkToken();
let fullUrl = state.api_base_url + url;
return client.requestResource(fullUrl, state.token, options);
}
function tokenDetails() {
let ret = {};
if (state.token.access_token) {
ret.access_token = jose.JWT.decode(state.token.access_token);
}
if (state.token.refresh_token) {
ret.refresh_token = jose.JWT.decode(state.token.refresh_token);
}
return ret;
}
module.exports = {
initialize,
checkToken,
request,
tokenDetails
};
controller
const usersGet = async(req, res = response) => {
const response = await api.request('v2/organizations', {
method: 'GET',
json: true,
});
const body = JSON.parse(response.body);
res.render('organizations', {
title: 'Organization list',
organizations: body.organizations || []
});
};
I am working on solutions using which i can send desktop push notification to subscribed clients.
I have created basic solution in where whenever user click on button i ask user for whether they want to allow notifications for my app or not!
I am getting an error of "Registration failed - permission denied" whenever i click on button for first time.
So that i am not able to get required endpoints to save at backend
Here is my code
index.html
<html>
<head>
<title>PUSH NOT</title>
<script src="index.js"></script>
</head>
<body>
<button onclick="main()">Ask Permission</button>
</body>
</html>
index.js
const check = () => {
if (!("serviceWorker" in navigator)) {
throw new Error("No Service Worker support!");
} else {
console.log("service worker supported")
}
if (!("PushManager" in window)) {
throw new Error("No Push API Support!");
} else {
console.log("PushManager worker supported")
}
};
const registerServiceWorker = async () => {
const swRegistration = await navigator.serviceWorker.register("/service.js?"+Math.random());
return swRegistration;
};
const requestNotificationPermission = async () => {
const permission = await window.Notification.requestPermission();
// value of permission can be 'granted', 'default', 'denied'
// granted: user has accepted the request
// default: user has dismissed the notification permission popup by clicking on x
// denied: user has denied the request.
if (permission !== "granted") {
throw new Error("Permission not granted for Notification");
}
};
const main = async () => {
check();
const swRegistration = await registerServiceWorker();
const permission = await requestNotificationPermission();
};
// main(); we will not call main in the beginning.
service.js
// urlB64ToUint8Array is a magic function that will encode the base64 public key
// to Array buffer which is needed by the subscription option
const urlB64ToUint8Array = base64String => {
const padding = "=".repeat((4 - (base64String.length % 4)) % 4);
const base64 = (base64String + padding)
.replace(/\-/g, "+")
.replace(/_/g, "/");
const rawData = atob(base64);
const outputArray = new Uint8Array(rawData.length);
for (let i = 0; i < rawData.length; ++i) {
outputArray[i] = rawData.charCodeAt(i);
}
return outputArray;
};
const saveSubscription = async subscription => {
console.log("Save Sub")
const SERVER_URL = "http://localhost:4000/save-subscription";
const response = await fetch(SERVER_URL, {
method: "post",
headers: {
"Content-Type": "application/json"
},
body: JSON.stringify(subscription)
});
return response.json();
};
self.addEventListener("activate", async () => {
try {
const applicationServerKey = urlB64ToUint8Array(
"BFPtpIVOcn2y25il322-bHQIqXXm-OACBtFLdo0EnzGfs-jIGXgAzjY6vNapPb4MM1Z1WuTBUo0wcIpQznLhVGM"
);
const options = { applicationServerKey, userVisibleOnly: true };
const subscription = await self.registration.pushManager.subscribe(options);
console.log(JSON.stringify(subscription))
const response = await saveSubscription(subscription);
} catch (err) {
console.log(err.code)
console.log(err.message)
console.log(err.name)
console.log('Error', err)
}
});
self.addEventListener("push", function(event) {
if (event.data) {
console.log("Push event!! ", event.data.text());
} else {
console.log("Push event but no data");
}
});
Also i have created a bit of backend as well
const express = require("express");
const cors = require("cors");
const bodyParser = require("body-parser");
const webpush = require('web-push')
const app = express();
app.use(cors());
app.use(bodyParser.json());
const port = 4000;
app.get("/", (req, res) => res.send("Hello World!"));
const dummyDb = { subscription: null }; //dummy in memory store
const saveToDatabase = async subscription => {
// Since this is a demo app, I am going to save this in a dummy in memory store. Do not do this in your apps.
// Here you should be writing your db logic to save it.
dummyDb.subscription = subscription;
};
// The new /save-subscription endpoint
app.post("/save-subscription", async (req, res) => {
const subscription = req.body;
await saveToDatabase(subscription); //Method to save the subscription to Database
res.json({ message: "success" });
});
const vapidKeys = {
publicKey:
'BFPtpIVOcn2y25il322-bHQIqXXm-OACBtFLdo0EnzGfs-jIGXgAzjY6vNapPb4MM1Z1WuTBUo0wcIpQznLhVGM',
privateKey: 'mHSKS-uwqAiaiOgt4NMbzYUb7bseXydmKObi4v4bN6U',
}
webpush.setVapidDetails(
'mailto:janakprajapati90#email.com',
vapidKeys.publicKey,
vapidKeys.privateKey
)
const sendNotification = (subscription, dataToSend='') => {
webpush.sendNotification(subscription, dataToSend)
}
app.get('/send-notification', (req, res) => {
const subscription = {endpoint:"https://fcm.googleapis.com/fcm/send/dLjyDYvI8yo:APA91bErM4sn_wRIW6xCievhRZeJcIxTiH4r_oa58JG9PHUaHwX7hQlhMqp32xEKUrMFJpBTi14DeOlECrTsYduvHTTnb8lHVUv3DkS1FOT41hMK6zwMvlRvgWU_QDDS_GBYIMRbzjhg",expirationTime:null,keys:{"p256dh":"BE6kUQ4WTx6v8H-wtChgKAxh3hTiZhpfi4DqACBgNRoJHt44XymOWFkQTvRPnS_S9kmcOoDSgOVD4Wo8qDQzsS0",auth:"CfO4rOsisyA6axdxeFgI_g"}} //get subscription from your databse here.
const message = 'Hello World'
sendNotification(subscription, message)
res.json({ message: 'message sent' })
})
app.listen(port, () => console.log(`Example app listening on port ${port}!`));
Please help me
Try the following code:
index.js
const check = () => {
if (!("serviceWorker" in navigator)) {
throw new Error("No Service Worker support!");
} else {
console.log("service worker supported")
}
if (!("PushManager" in window)) {
throw new Error("No Push API Support!");
} else {
console.log("PushManager worker supported")
}
};
const saveSubscription = async subscription => {
console.log("Save Sub")
const SERVER_URL = "http://localhost:4000/save-subscription";
const response = await fetch(SERVER_URL, {
method: "post",
headers: {
"Content-Type": "application/json"
},
body: JSON.stringify(subscription)
});
return response.json();
};
const urlB64ToUint8Array = base64String => {
const padding = "=".repeat((4 - (base64String.length % 4)) % 4);
const base64 = (base64String + padding)
.replace(/\-/g, "+")
.replace(/_/g, "/");
const rawData = atob(base64);
const outputArray = new Uint8Array(rawData.length);
for (let i = 0; i < rawData.length; ++i) {
outputArray[i] = rawData.charCodeAt(i);
}
return outputArray;
};
const registerServiceWorker = async () => {
return navigator.serviceWorker.register("service.js?"+Math.random()).then((swRegistration) => {
console.log(swRegistration);
return swRegistration;
});
};
const requestNotificationPermission = async (swRegistration) => {
return window.Notification.requestPermission().then(() => {
const applicationServerKey = urlB64ToUint8Array(
"BFPtpIVOcn2y25il322-bHQIqXXm-OACBtFLdo0EnzGfs-jIGXgAzjY6vNapPb4MM1Z1WuTBUo0wcIpQznLhVGM"
);
const options = { applicationServerKey, userVisibleOnly: true };
return swRegistration.pushManager.subscribe(options).then((pushSubscription) => {
console.log(pushSubscription);
return pushSubscription;
});
});
};
const main = async () => {
check();
const swRegistration = await registerServiceWorker();
const subscription = await requestNotificationPermission(swRegistration);
// saveSubscription(subscription);
};
service.js
self.addEventListener("push", function(event) {
if (event.data) {
console.log("Push event!! ", event.data.text());
} else {
console.log("Push event but no data");
}
});
I can think of three reasons that the permission is denied
1) your site is not on https (including localhost that is not on https), the default behaviour from chrome as far as i know is to block notifications on http sites. If that's the case, click on the info icon near the url, then click on site settings, then change notifications to ask
2) if you are on Safari, then safari is using the deprecated interface of the Request permission, that is to say the value is not returned through the promise but through a callback so instead of
Notification.requestPermission().then(res => console.log(res))
it is
Notification.requestPermission(res => console.log(res))
3) Your browser settings are blocking the notifications request globally, to ensure that this is not your problem run the following code in the console (on a secured https site)
Notification.requestPermission().then(res => console.log(res))
if you receive the alert box then the problem is something else, if you don't then make sure that the browser is not blocking notifications requests
I'm trying to use nock in my tests to intercept the request calls i'm making from the native https module in Node.js. I'm using Promise.all to make two requests to the external server. I want my tests to intercept the calls, and check some of the form fields to make sure they're filled in as i want.
I have my class setup below (kept the most relevant parts of code in):
const archiver = require('archiver');
const { generateKeyPairSync } = require('crypto');
const FormData = require('form-data');
const fs = require('fs');
const https = require('https');
class Platform {
constructor() {
this.FILESTORE_USERNAME = process.env.FILESTORE_USERNAME;
this.FILESTORE_PASSWORD = process.env.FILESTORE_PASSWORD;
}
store(serviceName) {
const { publicKey, privateKey } = this._generateKeys();
return Promise.all([this._postKey(publicKey), this._postKey(privateKey)])
.then(() => {
return this._zipKeys(publicKey, privateKey, serviceName);
})
.catch((err) => {
throw err;
});
}
_postKey(key) {
const options = this._getOptions();
const keyName = (key.search(/(PUBLIC)/) !== -1) ? 'publicKey' : 'privateKey';
const form = new FormData();
form.append('file', key);
form.append('Name', keyName);
form.append('MimeMajor', 'application');
form.append('MimeMinor', 'x-pem-file');
form.append('Extension', (keyName == 'publicKey') ? 'pub' : '');
form.append('FileClass', 'MFS::File');
options.headers = form.getHeaders();
options.headers.Authorization = 'Basic ' + Buffer.from(this.FILESTORE_USERNAME + ':' + this.FILESTORE_PASSWORD).toString('base64');
return new Promise((resolve, reject) => {
let post = https.request(options, (res) => {
let data = '';
if (res.statusCode < 200 || res.statusCode > 299) {
reject(new Error('File Storage API returned a status code outside of acceptable range: ' + res.statusCode));
} else {
res.setEncoding('utf8');
res.on('data', (chunk) => {
data += chunk;
});
res.on('error', (err) => {
reject(err);
});
res.on('end', () => {
if (data) {
resolve(JSON.parse(data));
} else {
resolve();
}
});
}
});
post.on('error', (err) => {
reject(err);
});
form.pipe(post);
post.end();
});
}
_getOptions() {
return {
hostname: 'api.example.com',
path: '/media/files/',
method: 'POST',
};
}
}
module.exports = Platform;
And then, my testing code looks like the below. I'm using mocha, sinon, chai, sinon-chai and nock.
const Platform = require('/usr/src/app/api/Services/Platform');
const crypto = require('crypto');
const fs = require('fs');
const nock = require('nock');
const yauzl = require('yauzl');
describe('Platform', function() {
let platform;
beforeEach(() => {
platform = new Platform();
});
afterEach(() => {
const list = fs.readdirSync('/usr/src/app/api/Services/data/');
list.forEach((file) => {
fs.unlink('/usr/src/app/api/Services/data/' + file, (err) => {
if (err) throw err;
});
});
nock.cleanAll();
});
after(() => {
nock.restore();
});
describe('store', function() {
it('should post each generated key to an external storage place', async function() {
this.timeout(5000);
// const stub = sinon.stub(platform, '_postKey').resolves();
const scope = nock('https://api.example.com')
.persist()
.post('/media/files/', (body) => {
// console.log(body);
})
.reply(200);
let serviceName = 'test';
let actual = await platform.store(serviceName)
.catch((err) => {
(() => { throw err; }).should.not.throw();
});
console.log(scope);
// expect(stub.callCount).to.equal(2);
expect(actual).to.be.a('string');
expect(actual).to.include(serviceName + '.zip');
// stub.reset();
});
});
});
The problem I am coming across is this error that is thrown when running my tests:
AssertionError: expected [Function] to not throw an error but 'Error:
Nock: No match for request {\n "method": "POST",\n "url":
"https://api.example.com/media/files/",\n "headers": {\n
"content-type": "multipart/form-data;
boundary=--------------------------363749230271182821931703",\n
"authorization": "Basic abcdef1224u38454857483hfjdhjgtuyrwyt="\n },\n
"body":
"----------------------------363749230271182821931703\r\nContent-Disposition: form-data; name=\"file\"\r\n\r\n-----BEGIN PUBLIC
KEY-----\nMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAq+QnVOYVjbrHIlAEsEoF\nZ4sTvqiB3sJGwecNhmgrUp9U8oqgoB50aW6VMsL71ATRyq9b3vMQKpjbU3R2RcOF\na6mlaBtBjxDGu2nEpGX++mtPCdD9HV7idvWgJ3XS0vGaCM//8ukY+VLBc1IB8CHC\nVj+8YOD5Y9TbdpwXR+0zCaiHwwd8MHIo1kBmQulIL7Avtjh55OmQZZtjO525lbqa\nWUZ24quDp38he2GjLDeTzHm9z1RjYJG6hS+Ui0s2xRUs6VAr7KFtiJmmjxPS9/vZ\nwQyFcz/R7AJKoEH8p7NE7nn/onbybJy+SWRxjXVH8afHkVoC65BiNoMiEzk1rIsx\ns92woHnq227JzYwFYcLD0W+TYjtGCB8+ks+QRIiV0pFJ3ja5VFIxjn9MxLntWcf2\nhsiYrmfJlqmpW1DMfZrtt41cJUFQwt7CpN72aix7btmd/q0syh6VVlQEHq/0nDky\nItv7dqyqZc9NNOMqK9/kXWhbq5cwS21mm+kTGas5KSdeIR0LH7uVtivB+LKum14e\nRDGascZcXZIVTbOeCxA6BD7LyaJPzXmlMy4spXlhGoDYyVRhpvv2K03Mg7ybiB4X\nEL1oJtiCFkRX5LtRJv0PCRJjaa3UvfyIuz8bHK4ANxIZqcwZwER+g02gw8iqNfMa\nDWXpfMJUU8TQuLGWZQaGJc8CAwEAAQ==\n-----END
PUBLIC
KEY-----\n\r\n----------------------------363749230271182821931703\r\nContent-Disposition: form-data;
name=\"Name\"\r\n\r\npublicKey\r\n----------------------------363749230271182821931703\r\nContent-Disposition: form-data;
name=\"MimeMajor\"\r\n\r\napplication\r\n----------------------------363749230271182821931703\r\nContent-Disposition: form-data;
name=\"MimeMinor\"\r\n\r\nx-pem-file\r\n----------------------------363749230271182821931703\r\nContent-Disposition: form-data;
name=\"Extension\"\r\n\r\npub\r\n----------------------------363749230271182821931703\r\nContent-Disposition: form-data;
name=\"FileClass\"\r\n\r\nMFS::File\r\n----------------------------363749230271182821931703--\r\n"\n}'
was thrown
I take it it's because nock expects me to fake out the body for the request to get a correct match? Is there a way of just looking for requests made to that address, regardless of the body, so I can do my own tests or whatever.
When the post method of a Nock Scope is passed a second argument, it is used to match against the body of the request.
Docs for specifying the request body
In your test, you're passing a function as the second argument, but not returning true so Nock is not considering it a match.
From the docs:
Function: nock will evaluate the function providing the request body
object as first argument. Return true if it should be considered a
match
Since your goal is to assert form fields on the request, your best approach would be to leave the function there, do your assertions where the // console.log(body); line is, but add return true; to the end of the function.
You could also return true or false depending on if your form fields match your assertions, but in my experience it makes the error output from the test convoluted. My preference is to use standard chai expect() calls and let the assertions bubble errors before Nock continues with request matching.
I am building a node application, and trying to neatly organize my code. I wrote a serial module that imports the serial libs and handles the connection. My intention was to write a basic module and then reuse it over and over again in different projects as needed. The only part that changes per use is how the incoming serial data is handled. For this reason I would like to pull out following handler and redefine it as per the project needs. How can I use module exports to redefine only this section of the file?
I have tried added myParser to exports, but that gives me a null and I would be out of scope.
Handler to redefine/change/overload for each new project
myParser.on('data', (data) => {
console.log(data)
//DO SOMETHING WITH DATA
});
Example usage: main.js
const serial = require('./serial');
const dataParser = require('./dataParser');
const serial = require('./serial');
//call connect with CL args
serial.connect(process.argv[2], Number(process.argv[3]))
serial.myParser.on('data',(data) => {
//Do something unique with data
if (dataParser.parse(data) == 0)
serial.send('Error');
});
Full JS Module below serial.js
const SerialPort = require('serialport');
const ReadLine = require('#serialport/parser-readline');
const _d = String.fromCharCode(13); //char EOL
let myPort = null;
let myParser = null;
function connect(port, baud) {
let portName = port || `COM1`;
let baudRate = baud || 115200;
myPort = new SerialPort(portName, {baudRate: baudRate})
myParser = myPort.pipe(new ReadLine({ delimiter: '\n'}))
//Handlers
myPort.on('open', () => {
console.log(`port ${portName} open`)
});
myParser.on('data', (data) => {
console.log(data)
});
myPort.on('close', () => {
console.log(`port ${portName} closed`)
});
myPort.on('error', (err) => {
console.error('port error: ' + err)
});
}
function getPorts() {
let portlist = [];
SerialPort.list((err, ports) => {
ports.forEach(port => {
portlist.push(port.comName)
});
})
return portlist;
}
function send(data) {
myPort.write(JSON.stringify(data) + _d, function (err) {
if (err) {
return console.log('Error on write: ', err.message);
}
console.log(`${data} sent`);
});
}
function close() {
myPort.close();
}
module.exports = {
connect, getPorts, send, close
}
The problem is that a module is used where a class or a factory would be appropriate. myParser cannot exist without connect being called, so it doesn't make sense to make it available as module property, it would be unavailable by default, and multiple connect calls would override it.
It can be a factory:
module.exports = function connect(port, baud) {
let portName = port || `COM1`;
let baudRate = baud || 115200;
let myPort = new SerialPort(portName, {baudRate: baudRate})
let myParser = myPort.pipe(new ReadLine({ delimiter: '\n'}))
//Handlers
myPort.on('open', () => {
console.log(`port ${portName} open`)
});
myParser.on('data', (data) => {
console.log(data)
});
myPort.on('close', () => {
console.log(`port ${portName} closed`)
});
myPort.on('error', (err) => {
console.error('port error: ' + err)
});
function getPorts() {
let portlist = [];
SerialPort.list((err, ports) => {
ports.forEach(port => {
portlist.push(port.comName)
});
})
return portlist;
}
function send(data) {
myPort.write(JSON.stringify(data) + _d, function (err) {
if (err) {
return console.log('Error on write: ', err.message);
}
console.log(`${data} sent`);
});
}
function close() {
myPort.close();
}
return {
myParser, getPorts, send, close
};
}
So it could be used like:
const serial = require('./serial');
const connection = serial(...);
connection.myParser.on('data',(data) => {
//Do something unique with data
if (dataParser.parse(data) == 0)
connection.send('Error');
});
I try to add an option to my app to toggle using a cache. For cache I am using Redis and made simple wrapper for that. The problem is coming when I try to overwrite redis.get method, it's simply doens't work or cannot found this.
'use strict';
const redis = require('redis');
const config = require('../config');
const REDIS_EMPTY_VALUE = 'NOT_EXIST';
const MINUTE = 60;
let client = redis.createClient({
host: config.get('REDIS_HOST'),
port: config.get('REDIS_PORT')
});
client.on("error", function (err) {
logging.error('Redis Error: ' + err);
throw new Error(err);
});
/**
* Next are custom extensions for Redis module
*/
client.emptyValue = REDIS_EMPTY_VALUE;
client.minute = MINUTE;
client.setAndExprire = function(key, value, expire) {
this.set(key, value);
this.expire(key, expire);
};
// Here is the problem
client.get = function(key, cb) {
if (config.get('disable-cache') === 'true') return cb(null, null);
return client.get(key, cb);
}
module.exports = client;
Since you override client.get, the client.get become the new function that you defined. So that, you can call to the function that come along with redis package. You can use another object (custom) to call the redis function like below:
'use strict';
const redis = require('redis');
const config = require('../config');
const REDIS_EMPTY_VALUE = 'NOT_EXIST';
const MINUTE = 60;
const definedFunctions = [
'hgetall', 'hexists', 'hmset', 'hmget', 'hkeys', 'hvals', 'hget', 'hset', 'hdel',
'mget', 'mset', 'set', 'del', 'exists', 'lpush', 'lrange',
'zrange', 'zrem', 'zadd', 'zrangebyscore', 'zrevrangebyscore',
'expire', 'incrby'
];
let client = redis.createClient({
host: config.get('REDIS_HOST'),
port: config.get('REDIS_PORT')
});
client.on("error", function (err) {
console.error('Redis Error: ' + err);
throw new Error(err);
});
/**
* Next are custom extensions for Redis module
*/
const custom = {
emptyValue: REDIS_EMPTY_VALUE,
minute: MINUTE
};
definedFunctions.map((fn) => {
custom[fn] = (...args) => {
return client[fn](args);
};
});
custom.get = function(key, cb) {
if (config.get('disable-cache') === 'true') return cb(null, null);
return client.get(key, cb);
}
module.exports = custom;