I have a node.js module pg-promise instantiated as follows.
const pgp = require('pg-promise')();
// Database connection details;
const cn = {
host: 'localhost', // 'localhost' is the default;
...
}
// Create db connection and verify it
var db = pgp(process.env.DATABASE_URL || cn);
db.one('Select version()')
.then(data => {
log.info('Connected: ', data);
})
.catch(error => {
log.error("Error connecting to db", error);
})
// extension methods
db.findById = function (table, id) {
log.debug('read ', table, id);
return db.one('Select * from ' + table + ' where id = $1', id);
}
module.exports = db;
The db object is an instance of interface type pgPromise.IDatabase<{}, pg.Iclient>
I would like to be able to call the functions provided by this lib along with my own functions.:
const db = require('../db');
db.any('Select query..')
.then(data => { res.send(data); })
.catch(err => { log.error(err); });
db.findById('users',1)
.then(data => { res.send(data); })
.catch(err => { log.error(err); });
But when I run it I get the error
TypeError: db.findById is not a function
I tried this too but with the same effect.
module.exports = db;
module.exports.findById = function()...;
Only one sollution I could come up with was this:
module.exports = {
db: db,
findById: function(){
...
}
}
But it is now ugly to use it other modules, as I need always to ask specificaly for the db property.
From the author of pg-promise.
Database protocol in pg-promise is extendable, supporting event extend that lets you extend the protocol on all levels. You need this level of automation, because when it comes to the essential Tasks and Transactions, which encapsulate the allocated connection, the protocol becomes dynamic, and so you need a special provision to make the protocol extension work automatically, which is exactly what event extend does.
In order to understand it better, I wrote pg-promise-demo to show how to do it correctly, plus some other high-level stuff that comes useful most of the time.
pg-promise seems to use an annoying pattern where they freeze every object and make every property read-only, so you'll be unable to simply add properties to it manually like you're attempting. The library supports extensions in the extend property of initOptions like this:
const initOptions = {
extend(obj, dc) {
obj.findById = function() {
...
}
//add other extension methods or properties here
}
};
const pgp = require('pg-promise')(initOptions);
//now any databases created with pgp will contain those extension methods
Alternatively, you can define a Proxy over your export object that defers either to the db or to your own custom function:
const extension = {
findById: function() {
...
},
//other functions
};
module.exports = new Proxy(extension, { get(target, name) {
if(db[name] !== undefined) return db[name];
return target[name];
});
But you should prefer the natively supported way to do this using initOptions.
Related
Whenever I retrieve an object from my mongo database, it doesn’t have any methods and I am trying to figure out (1) why and (2) a solution.
To summarise, the following sums up the problem:
(1) I can create an object and its methods work:
const newUser = new User(email, hashedPassword);
console.log(newUser.test); // correct - it’s not undefined
(2) I can insert the instance (with its methods) into the database (via saveToDb):
const insertedUser = await collection.insertOne(this);
(3) I can retrieve it from the database (via findByEmail):
const user = await collection.findOne({ email });
(4) But it doesn’t have any methods anymore:
if (!user) return; // temporary code
console.log(user); // correctly displays object keys and values
console.log('user.test', user.test); // undefined - why?
Why does this happen? I’ve read a few other posts about it, but they all seem to use mongoose, which I do not use in my app (maybe I should use it?). This post did not get an answer, which I think is a similar issue.
Any insight would be appreciated, thank you.
Incase needed to see, here’s the class:
export class User {
email: string;
hashedPassword: any;
dateCreated: Date;
constructor(email: string, hashedPassword: any) {
this.email = email; // make email.toLowerCase();
this.hashedPassword = hashedPassword;
this.dateCreated = new Date();
}
async saveToDb() {
try {
const collection = getCollection(USERS_COLLECTION_NAME);
const sanitisedEmail = this.email.toLowerCase();
const insertedUser = await collection.insertOne(this);
console.log('THIS HAS BEEN INSERTED TO DB:', this);
console.log('this.test:', this.test); // works
this.test();
const token = jwt.sign(insertedUser, sanitisedEmail, {
expiresIn: 60 * 24,
});
return token;
} catch (err) {
throw err;
}
}
test() {
console.log('test() within class Fn');
return 5;
}
static staticTest() {
console.log('staticTest() within class Fn');
return 6;
}
signToken() {
const token = jwt.sign(this, this.email, {
expiresIn: 60 * 24,
});
return token;
}
static async fetchAll() {
try {
const collection = getCollection(USERS_COLLECTION_NAME);
const users = await collection.find().toArray();
return users;
} catch (err) {
throw err;
}
}
static async findByEmail(email: string) {
try {
const collection = getCollection(USERS_COLLECTION_NAME);
const user = await collection.findOne({ email });
if (!user) return; // temporary code
console.log('FOUND THIS USER: ', user); // works
console.log('user.test', user.test); // undefined - key problem lies here...
return user;
} catch (err) {
throw err;
}
}
}
The objects you get back via query methods, such as findOne, will be plain objects. They are not instances of your class, as these objects were sent and saved as JSON in the database, and this does not include class information, prototypes, nor methods.
So what you can do, is change the prototype of the object you get back. Or better, create a new instance of your class with Object.create and inject the properties with Object.assign:
const user = Object.assign(
Object.create(User.prototype),
await collection.findOne({ email })
);
Now your user object will again have access to the prototype methods.
For completeness sake, I'll also mention the alternative:
const user = await collection.findOne({ email });
Object.setPrototypeOf(user, User.prototype);
But read the disclaimer provided by Mozilla Contributors on setPrototypeOf:
Changing the [[Prototype]] of an object is, by the nature of how modern JavaScript engines optimize property accesses, currently a very slow operation in every browser and JavaScript engine. In addition, the effects of altering inheritance are subtle and far-flung, and are not limited to the time spent in the Object.setPrototypeOf(...) statement, but may extend to any code that has access to any object whose [[Prototype]] has been altered.
So I've been searching for a long time on mqtt.js examples for structuring and best practices and haven't found anything worthwhile. thus [main] how do you structure your mqtt.js code in your node/express application?
[1] So the libraries mqttjs/async-MQTT provides some example on connecting and on-message but on a real app with lots of subscription and publishes how to structure code so that it initiliazes on the app.js and uses the same client (return from the mqtt.connect) for all the sub/pub in different files.
[2] and from the question[1] should my app only use 1 client for all the works or can use multiple clients as needed on multiple files (let's say I have 3 files mqttInit, subscriber, publisher. so if I use the init on subscriber and get a client should I export it or just make a new instance of a client on the publisher file)
[3] so the mqttjs API provides only an onMessage function so all subscribed topics message gets here thus I put a switch or a if else to manage this so if we have a lot of topics how do you manage this
[4] so my current setup is kind of messed up
this is the initializer file lets say'
mqttService.js
const mqtt = require("mqtt");
const { readFileSync } = require("fs");
module.exports = class mqttService {
constructor() {
this.client = mqtt.connect("mqtt://xxxxxxxxxxx", {
cert: readFileSync(process.cwd() + "/certificates/client.crt"),
key: readFileSync(process.cwd() + "/certificates/client.key"),
rejectUnauthorized: false,
});
this.client.on("error", (err) => {
console.log(err);
});
this.client.once("connect", () => {
console.log("connected to MQTT server");
});
}
};
subscriber.js
this is the function(subscribe()) that I call in app.js to init the mqtt thing
const { sendDeviceStatus, sendSensorStatus } = require("../socketApi");
const { client } = new (require("./mqttService"))();
function subscribe() {
let state = {
timer: false,
};
...
let topics = {
....
},
client.subscribe([...]);
client.on("message", async (topic, buffer) => {
if (topic) {
...
}
});
}
module.exports = {
subscribe,
client,
};
publish.js
const { AsyncClient } = require("async-mqtt");
const _client = require("./subscribe").client;
const client = new AsyncClient(_client);
async function sendSensorList(daqId) {
let returnVal = await client.publish(
`${daqId}-GSL-DFC`,
JSON.stringify(publishObject),
{ qos: 1 }
);
console.log(returnVal);
return publishObject;
}
.....
module.exports = {
sendSensorList,
.......
};
so as you can see from the above code everything is kind of linked with one another and messed up thus I need some expo on how you structure code
thanks for reading, please feel free to provide any info and any info is much appreciated
I am trying to trigger a side effect (send notification, using socket.io) when adding new record in Strapi. The socket setup is OK, successfully emitting from back-end (Strapi API) to front-end.
I followed the docs on customizing controllers and the recommendations in this Stack Overflow thread, but didn't help. Nothing happens when changing the controller - tried to break it by replacing the create function body with just return null; or console.log(), but still nothing. Here's the ../controllers/Orders.js:
'use strict';
const { parseMultipartData, sanitizeEntity } = require('strapi-utils');
module.exports = {
async create(ctx) {
let entity;
if (ctx.is('multipart')) {
const { data, files } = parseMultipartData(ctx);
entity = await strapi.api.order.services.order.create(data, { files });
} else {
entity = await strapi.api.order.services.order.create(ctx.request.body);
}
strapi.emitToAllUsers(entity);
return sanitizeEntity(entity, { model: strapi.query('order').model });
},
};
strapi.emitToAllUsers() is defined in bootstrap.js. Connection ready messages and other emitted data is received in the front end, but stuff inside the controller seems to not be invoked at all. Here's the boilerplate stuff from bootstrap.js:
'use strict';
require('dotenv').config({ path: require('find-config')('.env') });
module.exports = () => {
var io = require('socket.io')(strapi.server);
var users = [];
io.on('connection', socket => {
socket.user_id = (Math.random() * 100000000000000); // not so secure
users.push(socket); // save the socket to use it later
socket.on('disconnect', () => {
users.forEach((user, i) => {
// delete saved user when they disconnect
if (user.user_id === socket.user_id) {
users.splice(i, 1);
}
});
});
io.emit('emit_test');
});
strapi.io = io;
// send to all users connected
strapi.emitToAllUsers = (order) => {
io.emit('new_order', order);
};
};
API controllers ./api/blabla/controllers/Blabla.js are only called by your REST API. To make sure the Admin panel will work in any circumstances, the Content Manager plugin uses it's own functions. If you want to apply so things to both REST API and Admin panel, you will have to customize the life cycle functions. But you will have less flexibility because of some issues you will be able to find in this thread https://github.com/strapi/strapi/issues/1443
Source: Strapi
You have to do it from the lifecycle object in the model not from the controller, controller checks only the api layer, model checks the database/strapi layer
https://strapi.io/documentation/v3.x/concepts/models.html#concept
module.exports = {
lifecycles: {
afterCreate: async (result, data) => {
// after create registry from strapi admin
strapi.emitToAllUsers(result);
}
}
}
I've been working on a Node project that involves fetching some data from BigQuery. Everything has been fine so far; I have my credential.json file (from BigQuery) and the project works as expected.
However, I want to implement a new feature in the project and this would involve fetching another set of data from BigQuery. I have an entirely different credential.json file for this new dataset. My project seems to recognize only the initial credential.json file I had (I named them differently though).
Here's a snippet of how I linked my first credential.json file:
function createCredentials(){
try{
const encodedCredentials = process.env.GOOGLE_AUTH_KEY;
if (typeof encodedCredentials === 'string' && encodedCredentials.length > 0) {
const google_auth = atob(encodedCredentials);
if (!fs.existsSync('credentials.json')) {
fs.writeFile("credentials.json", google_auth, function (err, google_auth) {
if (err) console.log(err);
console.log("Successfully Written to File.");
});
}
}
}
catch (error){
logger.warn(`Ensure that the environment variable for GOOGLE_AUTH_KEY is set correctly: full errors is given here: ${error.message}`)
process.kill(process.pid, 'SIGTERM')
}
}
Is there a way to fuse my two credential.json files together? If not, how can I separately declare which credential.json file to use?
If not, how can I separately declare which credential.json file to use?
What I would do I would create a function which is the exit point to BigQuery and pass an identifier to your function which credential to generate, This credential will then be used when calling BigQuery.
The below code assume you changed this
function createCredentials(){
try{
const encodedCredentials = process.env.GOOGLE_AUTH_KEY;
To this:
function createCredentials(auth){
try{
const encodedCredentials = auth;
And you can use it like this
import BigQuery from '#google-cloud/bigquery';
import {GoogApi} from "../apiManager" //Private code to get Token from client DB
if (!global._babelPolyfill) {
var a = require("babel-polyfill")
}
describe('Check routing', async () => {
it('Test stack ', async (done, auth) => {
//Fetch client Auth from local Database
//Replace the 2 value below with real values
const tableName = "myTest";
const dataset = "myDataset";
try {
const bigquery = new BigQuery({
projectId: `myProject`,
keyFilename: this.createCredentials(auth)
});
await bigquery.createDataset(dataset)
.then(
args => {
console.log(`Create dataset, result is: ${args}`)
})
.catch(err => {
console.log(`Error in the process: ${err.message}`)
})
} catch (err) {
console.log("err", err)
}
})
})
How do I pass a programmatically populated array of links to a service worker script for caching?
I am generating the array in cachelist.js like this:
const fs = require('fs');
const path = require('path');
require('dotenv').config();
var cachedItems = ['/'];
function walkSync(currentDirPath, callback) {
fs.readdirSync(currentDirPath).forEach(function (name) {
var filePath = path.join(currentDirPath, name);
var stat = fs.statSync(filePath);
if (stat.isFile()) {
callback(filePath, stat);
} else if (stat.isDirectory()) {
walkSync(filePath, callback);
}
});
}
walkSync('./pages/', function(filePath, stat) {
cachedItem = filePath.substr(5);
if(cachedItem.indexOf('_') == -1) {
cachedItems.push(cachedItem);
}
});
module.exports = { cachedItems };
And then I'm trying to use this cachedItems array in /offline/serviceWorker.js as follows:
const URLSTOCACHE = require("../cachelist.js");
const CACHE_NAME = "version-0.0.46";
// Call install event
self.addEventListener("install", e => {
e.waitUntil(
caches
.open(CACHE_NAME)
.then(cache => cache.addAll(URLSTOCACHE))
.then(() => self.skipWaiting())
);
});
// Call fetch event
self.addEventListener("fetch", e => {
e.respondWith(
fetch(e.request).catch(() => caches.match(e.request))
)
});
However, this fails with an "Uncaught ReferenceError: require is not defined." Any workaround?
require isn't a built-in browser-side utility. There are various libraries (like RequireJS) and bundlers (which rewrite the require call), but unless you're using one of those, you can't use require browser-side.
If your goal is to read that file in the service worker and add the URLs in it to the cache, use fetch to load it, and then use thme in addAll.