So I've been searching for a long time on mqtt.js examples for structuring and best practices and haven't found anything worthwhile. thus [main] how do you structure your mqtt.js code in your node/express application?
[1] So the libraries mqttjs/async-MQTT provides some example on connecting and on-message but on a real app with lots of subscription and publishes how to structure code so that it initiliazes on the app.js and uses the same client (return from the mqtt.connect) for all the sub/pub in different files.
[2] and from the question[1] should my app only use 1 client for all the works or can use multiple clients as needed on multiple files (let's say I have 3 files mqttInit, subscriber, publisher. so if I use the init on subscriber and get a client should I export it or just make a new instance of a client on the publisher file)
[3] so the mqttjs API provides only an onMessage function so all subscribed topics message gets here thus I put a switch or a if else to manage this so if we have a lot of topics how do you manage this
[4] so my current setup is kind of messed up
this is the initializer file lets say'
mqttService.js
const mqtt = require("mqtt");
const { readFileSync } = require("fs");
module.exports = class mqttService {
constructor() {
this.client = mqtt.connect("mqtt://xxxxxxxxxxx", {
cert: readFileSync(process.cwd() + "/certificates/client.crt"),
key: readFileSync(process.cwd() + "/certificates/client.key"),
rejectUnauthorized: false,
});
this.client.on("error", (err) => {
console.log(err);
});
this.client.once("connect", () => {
console.log("connected to MQTT server");
});
}
};
subscriber.js
this is the function(subscribe()) that I call in app.js to init the mqtt thing
const { sendDeviceStatus, sendSensorStatus } = require("../socketApi");
const { client } = new (require("./mqttService"))();
function subscribe() {
let state = {
timer: false,
};
...
let topics = {
....
},
client.subscribe([...]);
client.on("message", async (topic, buffer) => {
if (topic) {
...
}
});
}
module.exports = {
subscribe,
client,
};
publish.js
const { AsyncClient } = require("async-mqtt");
const _client = require("./subscribe").client;
const client = new AsyncClient(_client);
async function sendSensorList(daqId) {
let returnVal = await client.publish(
`${daqId}-GSL-DFC`,
JSON.stringify(publishObject),
{ qos: 1 }
);
console.log(returnVal);
return publishObject;
}
.....
module.exports = {
sendSensorList,
.......
};
so as you can see from the above code everything is kind of linked with one another and messed up thus I need some expo on how you structure code
thanks for reading, please feel free to provide any info and any info is much appreciated
Related
I'm using sveltekit and trying to understand all the new features added after retiring Sapper. One of those new features is hooks.js which runs on the server and not accessible to the frontend. It makes dealing with db safe. So I created a connection to my mongodb to retrieve user's data before I use the db results in my getSession function. It works but I noticed that it access my database TWICE. Here is my hooks.js code:
import * as cookie from 'cookie';
import { connectToDatabase } from '$lib/mongodb.js';
export const handle = async ({event, resolve})=>{
const dbConnection = await connectToDatabase();
const db = dbConnection.db;
const userinfo = await db.collection('users').findOne({ username: "a" });
console.log("db user is :" , userinfo) //username : John
const response = await resolve(event)
response.headers.set(
'set-cookie', cookie.serialize("cookiewithjwt", "sticksafterrefresh")
)
return response
}
export const getSession = (event)=>{
return {
user : {
name : "whatever"
}
}
}
The console.log you see here returns the user data twice. One as soon as I fire up my app at localhost:3000 with npm run dev and then less than a second, it prints another console log with the same information
db user is : John
a second later without clicking on anything a second console.log prints
db user is : John
So my understanding from the sveltekit doc is that hooks.js runs every time SvelteKit receives a request. I removed all prerender and prefetch from my code. I made sure I only have the index.svelte in my app but still it prints twice. My connection code I copied from an online post has the following:
/**
* Global is used here to maintain a cached connection across hot reloads
* in development. This prevents connections growing exponentially
* during API Route usage.
*/
Here is my connection code:
import { MongoClient } from 'mongodb';
const mongoURI ="mongodb+srv://xxx:xxx#cluster0.qjeag.mongodb.net/xxxxdb?retryWrites=true&w=majority";
const mongoDB = "xxxxdb"
export const MONGODB_URI = mongoURI;
export const MONGODB_DB = mongoDB;
if (!MONGODB_URI) {
throw new Error('Please define the mongoURI property inside config/default.json');
}
if (!MONGODB_DB) {
throw new Error('Please define the mongoDB property inside config/default.json');
}
/**
* Global is used here to maintain a cached connection across hot reloads
* in development. This prevents connections growing exponentially
* during API Route usage.
*/
let cached = global.mongo;
if (!cached) {
cached = global.mongo = { conn: null, promise: null };
}
export const connectToDatabase = async() => {
if (cached.conn) {
return cached.conn;
}
if (!cached.promise) {
const opts = {
useNewUrlParser: true,
useUnifiedTopology: true
};
cached.promise = MongoClient.connect(MONGODB_URI).then((client) => {
return {
client,
db: client.db(MONGODB_DB)
};
});
}
cached.conn = await cached.promise;
return cached.conn;
So my question is : is hooks.js runs twice all the time, one time on the server and one time on the front? If not, then why the hooks.js running/printing twice the db results in my case?
Anyone?
I'll start with an example of how I set up my tests for a backend server. TL;DR at bottom.
This file represents my server:
//server.js
const express = require('express');
class BackendServer {
constructor(backingFileDestination, database) {
this.server = express();
/* Set up server ... */
/* Set up a mysql connection ... */
}
closeMySQLConnectionPool = () => {
/* Close the mysql connection ... */
};
}
module.exports = BackendServer;
In my package.json, I have the following:
"jest": {
"testEnvironment": "node",
"setupFilesAfterEnv": [
"<rootDir>/tests/setupTests.js"
]
}
Which allows me to use this setup file for my tests:
//setupTests.js
const { endpointNames } = require('../../src/common/endpointNames.js');
const BackendServer = require('../server.js');
const server = new BackendServer(
'../backing_files_test/',
'test',
);
const supertester = require('supertest')(server.server);
// drop, recreate, and populate the database once before any tests run
beforeAll(async () => {
await supertester.post(endpointNames.DROP_ALL_TABLES);
await supertester.post(endpointNames.CREATE_ALL_TABLES);
await supertester.post(endpointNames.POPULATE_ALL_TABLES);
});
// clean up the local setupTests server instance after all the tests are done
afterAll(async () => {
await server.closeMySQLConnectionPool();
});
Notice how I had to import the BackendServer class and instantiate it, then use that instance of it.
Now, I have other test files, for example test1.test.js:
//test1.test.js
const { endpointNames } = require('../../src/common/endpointNames.js');
const BackendServer = require('../server.js');
const server = new BackendServer(
'../backing_files_test/',
'test',
);
const supertester = require('supertest')(server.server);
// clean up the local server instance after all tests are done
afterAll(async () => {
await server.closeMySQLConnectionPool();
});
test('blah blah', () => {
/* Some test ... */
});
The problem is when I go to write test2.test.js, it will be the same as test1.test.js. This is the issue. For every test file, I need to instantiate a new server and then have a separate afterAll() call that cleans up that server's SQL connection. I can't stick that afterAll() inside of setupTests.js because it needs to operate on test1.test.js's local server instance.
TL;DR: Each of my test files instantiates a new instance of my server. What I want is to instantiate the server once in setupTests.js and then simply use that instance in all my tests. Is there a good way to share this single instance between all my test files?
I was able to figure out a way to achieve what I wanted. It involves instantiating variables in setupTests.js and then exporting getters for them.
//setupTests.js
const { endpointNames } = require('../../src/common/endpointNames.js');
const BackendServer = require('../server.js');
const server = new BackendServer(
'../backing_files_test/',
'test',
);
const supertester = require('supertest')(server.server);
// drop, recreate, and populate the database once before any tests run
beforeAll(async () => {
await supertester.post(endpointNames.DROP_ALL_TABLES);
await supertester.post(endpointNames.CREATE_ALL_TABLES);
await supertester.post(endpointNames.POPULATE_ALL_TABLES);
});
// clean up the local setupTests server instance after all the tests are done
afterAll(async () => {
await server.closeMySQLConnectionPool();
});
const getServer = () => { // <==== ADD THESE 2 FUNCTIONS
return server;
};
const getSupertester = () => { // <==== ADD THESE 2 FUNCTIONS
return supertester;
};
module.exports = { getServer, getSupertester }; // <==== EXPORT THEM
I added a couple functions to the end of setupTests.js that, when called, will return whatever the local variables point to at the time. In this case, server and supertester are declared with const, so I could have exported them directly I think, but in other cases I had some variables that I wanted to share that were declared with let, so I left it this way.
Now I have functions exported from setupTests.js that I can import in my test files like this:
//test1.test.js
const { endpointNames } = require('../../src/common/endpointNames.js');
const { getServer, getSupertester } = require('./setupTests.js');
test('blah blah', () => {
/* Some test using getSupertester().post() or getServer().someFunction() ... */
});
So now I can have local variables inside setupTests.js that are accessible in my .test.js files.
This makes my whole testing process cleaner because I only need to set up and tear down one server, which means only 1 connection pool for my SQL server and less code duplication of having to instantiate and clean up a new server in every .test.js file.
Cheers.
I'm trying to implement a queue in NodeJS using BullMQ but i have some issues in production when trying to use a remote Redis (Heroku Redis or Redis Cloud).
In local, everything work well but when i try to use a REDIS_URL, a job is created but events doesn't work.
Here is the code:
// test_job.js
import { Queue, Worker, QueueEvents } from "bullmq";
import IORedis from "ioredis";
import Dotenv from "dotenv";
Dotenv.config();
// Good
const connection = new IORedis(process.env.REDIS_URL || 6379);
// Good
const queue = new Queue("Paint", { connection });
// Good
const worker = new Worker(
"Paint",
async job => {
if (job.name === "cars") {
console.log(job.data.color);
}
},
{ connection }
);
/**
* BUG HERE: Events work in local but not when using a remote Redis (REDIS_URL)
*/
const queueEvents = new QueueEvents("Paint");
queueEvents.on("completed", jobId => {
console.log("done painting");
});
queue.add("cars", { color: "blue" });
const queueEvents = new QueueEvents("Paint", { connection: connection.duplicate() });
https://github.com/taskforcesh/bullmq/issues/173
I have a node.js module pg-promise instantiated as follows.
const pgp = require('pg-promise')();
// Database connection details;
const cn = {
host: 'localhost', // 'localhost' is the default;
...
}
// Create db connection and verify it
var db = pgp(process.env.DATABASE_URL || cn);
db.one('Select version()')
.then(data => {
log.info('Connected: ', data);
})
.catch(error => {
log.error("Error connecting to db", error);
})
// extension methods
db.findById = function (table, id) {
log.debug('read ', table, id);
return db.one('Select * from ' + table + ' where id = $1', id);
}
module.exports = db;
The db object is an instance of interface type pgPromise.IDatabase<{}, pg.Iclient>
I would like to be able to call the functions provided by this lib along with my own functions.:
const db = require('../db');
db.any('Select query..')
.then(data => { res.send(data); })
.catch(err => { log.error(err); });
db.findById('users',1)
.then(data => { res.send(data); })
.catch(err => { log.error(err); });
But when I run it I get the error
TypeError: db.findById is not a function
I tried this too but with the same effect.
module.exports = db;
module.exports.findById = function()...;
Only one sollution I could come up with was this:
module.exports = {
db: db,
findById: function(){
...
}
}
But it is now ugly to use it other modules, as I need always to ask specificaly for the db property.
From the author of pg-promise.
Database protocol in pg-promise is extendable, supporting event extend that lets you extend the protocol on all levels. You need this level of automation, because when it comes to the essential Tasks and Transactions, which encapsulate the allocated connection, the protocol becomes dynamic, and so you need a special provision to make the protocol extension work automatically, which is exactly what event extend does.
In order to understand it better, I wrote pg-promise-demo to show how to do it correctly, plus some other high-level stuff that comes useful most of the time.
pg-promise seems to use an annoying pattern where they freeze every object and make every property read-only, so you'll be unable to simply add properties to it manually like you're attempting. The library supports extensions in the extend property of initOptions like this:
const initOptions = {
extend(obj, dc) {
obj.findById = function() {
...
}
//add other extension methods or properties here
}
};
const pgp = require('pg-promise')(initOptions);
//now any databases created with pgp will contain those extension methods
Alternatively, you can define a Proxy over your export object that defers either to the db or to your own custom function:
const extension = {
findById: function() {
...
},
//other functions
};
module.exports = new Proxy(extension, { get(target, name) {
if(db[name] !== undefined) return db[name];
return target[name];
});
But you should prefer the natively supported way to do this using initOptions.
I am trying to trigger a side effect (send notification, using socket.io) when adding new record in Strapi. The socket setup is OK, successfully emitting from back-end (Strapi API) to front-end.
I followed the docs on customizing controllers and the recommendations in this Stack Overflow thread, but didn't help. Nothing happens when changing the controller - tried to break it by replacing the create function body with just return null; or console.log(), but still nothing. Here's the ../controllers/Orders.js:
'use strict';
const { parseMultipartData, sanitizeEntity } = require('strapi-utils');
module.exports = {
async create(ctx) {
let entity;
if (ctx.is('multipart')) {
const { data, files } = parseMultipartData(ctx);
entity = await strapi.api.order.services.order.create(data, { files });
} else {
entity = await strapi.api.order.services.order.create(ctx.request.body);
}
strapi.emitToAllUsers(entity);
return sanitizeEntity(entity, { model: strapi.query('order').model });
},
};
strapi.emitToAllUsers() is defined in bootstrap.js. Connection ready messages and other emitted data is received in the front end, but stuff inside the controller seems to not be invoked at all. Here's the boilerplate stuff from bootstrap.js:
'use strict';
require('dotenv').config({ path: require('find-config')('.env') });
module.exports = () => {
var io = require('socket.io')(strapi.server);
var users = [];
io.on('connection', socket => {
socket.user_id = (Math.random() * 100000000000000); // not so secure
users.push(socket); // save the socket to use it later
socket.on('disconnect', () => {
users.forEach((user, i) => {
// delete saved user when they disconnect
if (user.user_id === socket.user_id) {
users.splice(i, 1);
}
});
});
io.emit('emit_test');
});
strapi.io = io;
// send to all users connected
strapi.emitToAllUsers = (order) => {
io.emit('new_order', order);
};
};
API controllers ./api/blabla/controllers/Blabla.js are only called by your REST API. To make sure the Admin panel will work in any circumstances, the Content Manager plugin uses it's own functions. If you want to apply so things to both REST API and Admin panel, you will have to customize the life cycle functions. But you will have less flexibility because of some issues you will be able to find in this thread https://github.com/strapi/strapi/issues/1443
Source: Strapi
You have to do it from the lifecycle object in the model not from the controller, controller checks only the api layer, model checks the database/strapi layer
https://strapi.io/documentation/v3.x/concepts/models.html#concept
module.exports = {
lifecycles: {
afterCreate: async (result, data) => {
// after create registry from strapi admin
strapi.emitToAllUsers(result);
}
}
}