I am trying to write a API to handle my indexedDB functionality. I am having trouble storing the db object in a class, because I have to wait for the .onsuccess events from the .open() request to fire.
So I wrote a method to initialize the db:
async initializeDB() {
return new Promise((resolve, reject) => {
const {
dbVersion,
databaseName,
fieldsObjectStoreName,
filedObjectStoreKeyName
} = IndexDbParams;
// Open a connection to indexDB
const DbOpenRequest = window.indexedDB.open(databaseName, dbVersion);
DbOpenRequest.onsuccess = e => {
const db = DbOpenRequest.result;
// Create data stores if none exist
if (db.objectStoreNames.length < 1) {
if (db.objectStoreNames.indexOf(fieldsObjectStoreName) < 0) {
db.createObjectStore(fieldsObjectStoreName, {
keyPath: filedObjectStoreKeyName
});
}
}
// return db object, will come hore from onupgradeneeded as well
resolve(db);
};
// If we need to upgrade db version
DbOpenRequest.onupgradeneeded = e => {
const db = event.target.result;
const objectStore = db.createObjectStore(fieldsObjectStoreName, {
keyPath: filedObjectStoreKeyName
});
};
});
}
Which I would then call at the beginning of all my other methods, for example:
async getData() {
this.initializeDB().then(db => {
// do stuff with the db object
})
}
My question is - is this wasting a lot more resources than calling .open() once, and then storing it in global state? What (if any) could be the possible consequences of this approach?
Definitely not wasteful.
All my projects look something like this:
function openDb() {
return new Promise(function(resolve, reject) {
var request = indexedDB.open();
request.onsuccess = () => resolve(request.result);
});
}
function doSomeDbAction() {
openDb().then(function(db) {
// do stuff with db
}).catch(console.warn).finally(function(db) {
if(db) db.close();
});
}
function doAnotherDbAction() {
openDb().then(function(db) {
// do more stuff with db
}).catch(console.warn).finally(function(db) {
if(db) db.close();
});
}
Related
I have a function where I would like to call specified redis methods which currently errors if it times out after a specified time in ms.
const redis = require('redis');
const client = redis.createClient({ url: process.env.REDIS_URL || 'http://localhost:6379'});
const callRedis = (fn, timeoutLimit, ...args) => new Promise((resolve, reject) => {
const timer = setTimeout(() => reject(new Error(`Command timed out after ${timeoutLimit}ms`)), timeoutLimit);
client[fn](...args, (err, res) => {
clearTimeout(timer);
if (err) {
reject(err);
} else {
resolve(res);
}
});
});
how can I adjust the above code to ensure the specified fn gets aborted once the timeoutLimit is reached?
I've found that I can create a new AbortController() which also gives me the signal property but I'm unsure how to use this with/in the callRedis() function?
EDIT: an example use of callRedis():
const { callRedis } = require('../redisService.js');
const ONE_SECOND = 1000;
try {
const someCachedValue = await callRedis('get', ONE_SECOND, 'someKey');
} catch (err) {
console.log('Redis call timed out!');
}
I came across a very complicated situation. I'll try to keep it as concise as possible.
So I have a code like this in myModule.js:
const lib = require('#third-party/lib');
const myFunction = () => {
const client = lib.createClient('foo');
return new Promise((resolve, reject) => {
client.on('error', (err) => reject(err));
client.on('success', () => {
client.as(param1).post(param2, param3, (err, data) => {
if (err) reject(err);
// Some important logical processing of data
resolve(data);
});
});
});
}
module.exports = { myFunction };
There are a few things I am able to mock, like: createClient.
What I am not able to mock is the event part I don't even know how to do this. And the .as().post() part.
Here's how my jest test looks like:
const myModule = require('./myModule');
const mockData = require('./mockData');
describe('myFunction', () => {
it('Should resolve promise when lib calls success event', async () => {
try {
const myData = await myModule.myFunction();
expect(myData).toMatchObject(mockData.resolvedData);
} catch (err) {
expect(err).toBeNull();
}
})
});
Any help, much Appreciated.
I tried to find similar questions but at this point, my mind has just stopped working...
Please let me know if you need any more details.
Here’s what you need to do:
// EventEmitter is here to rescue you
const events = require("events");
// Mock the third party library
const lib = require("#third-party/lib");
lib.createClient.mockImplementationOnce(params => {
const self = new events.EventEmitter();
self.as = jest.fn().mockImplementation(() => {
// Since we're calling post on the same object.
return self;
});
self.post = jest.fn().mockImplementation((arg1, _cb) => {
// Can have a conditional check for arg 1 if so desird
_cb(null, { data : "foo" });
});
// Finally call the required event with delay.
// Don't know if the delay is necessary or not.
setInterval(() => {
self.emit("success");
}, 10);
return self;
}).mockImplementationOnce(params => {
const self = new events.EventEmitter();
// Can also simulate event based error like so:
setInterval(() => {
self.emit("error", {message: "something went wrong."});
}, 10);
return self;
}).mockImplementationOnce(params => {
const self = new events.EventEmitter();
self.as = jest.fn().mockImplementation(() => {
return self;
});
self.post = jest.fn().mockImplementation((arg1, _cb) => {
// for negative callback in post I did:
_cb({mesage: "Something went wrong"}, null);
});
setInterval(() => {
self.emit("success");
}, 10);
return self;
});
This is only the mock object that you need to put in your test.js file.
Not sure if this code will work as is, although won’t require a lot of debugging.
If you just want to positive scenario, remove the second mockImplementationOnce and replace the first mockImplementationOnce with just mockImplementation.
I'm creating a "class" that emits events such as error, data, downloadFile and initialize. Each event is fired after a request is made, and each event is fired by a method that has the same name:
class MyClass extends EventEmitter {
constructor(data) {
this.data = data
this.initialize()
.then(this.downloadFile)
.then(this.data)
.catch(this.error)
}
initialize() {
const req = superagent.post('url...')
superagent.send(data)
const res = await req // this will actually fire the request
this.emit('initialize')
this.url = res.body
return res
}
downloadFile() {
const req = superagent.put(this.url)
const res = await req; // this will actually fire the request
req.on('progress', (progress) => this.emit('downloadFile', progress)
//
// save to disk
//
return res
}
data() {
// Next in the sequence. And will fire the 'data' event: this.emit('data', data)
}
error(err) {
this.emit('error', err)
}
}
After that I have the data method to be called. My doubt is: Is there a design pattern to call the events in sequence without using Promises? Currently I'm using chaining, but I'm feeling that this isn't the best approach, maybe I'm wrong.
this.initialize()
.then(this.downloadFile)
.then(this.data)
.catch(this.error)
But I feel that could be a better approach.
Answers for bergi's questions:
a) Why are you using class syntax?
Because it's easier to inherit from EventEmitter and personally I think it's more readable than using a constructor
functin, e.g:
function Transformation(data) {
this.data = data
}
// Prototype stuffs here
b) How this code is going to be used
I'm creating a client to interact with my API. The ideia is that the user can see what is happening in the background. E.g:
const data = {
data: {},
format: 'xls',
saveTo: 'path/to/save/xls/file.xls'
}
const transformation = new Transformation(data)
// Events
transformation.on('initialize', () => {
// Here the user knows that the transformation already started
})
transformation.on('fileDownloaded', () => {
// Here the file has been downloaded to disk
})
transformation.on('data', (data) => {
// Here the user can see details of the transformation -
// name,
// id,
// size,
// the original object,
// etc
})
transformation.on('error', () => {
// Here is self explanatory, if something bad happens, this event will be fired
})
c) What it is supposed to do?
The user will be able to transform a object with data into a Excel.
It sounds like the transformation object you are creating is used by the caller solely for listening to the events. The user does not need a class instance with properties to get or methods to call. So don't make one. KISS (keep it super simple).
function transform(data) {
const out = new EventEmitter();
async function run() {
try {
const url = await initialise();
const data = await downloadFile(url);
out.emit('data', data);
} catch(err) {
out.emit('error', err);
}
}
async function initialise() {
const req = superagent.post('url...')
superagent.send(data)
const res = await req // this will actually fire the request
out.emit('initialize')
return res.body
}
async function downloadFile(url) {
const req = superagent.put(url)
req.on('progress', (progress) => out.emit('downloadFile', progress)
const res = await req; // this will actually fire the request
//
// save to disk
//
return data;
}
run();
return out;
}
It might be even simpler to leave out the (once-only?) data and error events and just to return a promise, alongside the event emitter for progress notification:
return {
promise: run(), // basically just `initialise().then(downloadFile)`
events: out
};
If you want another way to call the events in sequence, and if you're using a Node.js version that supports ES7, you can do the following :
class MyClass extends EventEmitter {
constructor(data) {
this.data = data;
this.launcher();
}
async launcher() {
try {
await this.initialize();
await this.downloadFile();
await this.data();
}
catch(err) {
this.error(err);
}
}
initialize() {
const req = superagent.post('url...');
superagent.send(data);
this.emit('initialize');
this.url = req.body;
return req;
}
downloadFile() {
const req = superagent.put(this.url);
req.on('progress', (progress) => this.emit('downloadFile', progress)
//
// save to disk
//
return req;
}
data() {
// Next in the sequence. And will fire the 'data' event: this.emit('data', data)
}
error(err) {
this.emit('error', err)
}
}
Explanation : instead of awaiting for your Promises inside your functions, just return the Promises and await for them at root level.
I have a class that loads indexedDB. Before methods in the class can access it, I need to have indexedDB loaded beforehand. Currently I'm using an init() method prior to any other method that does not have this.db initialized.
I'm looking for a cleaner way to implement what I have, which definitely isn't DRY. Essentially every method is currently implemented with the same code pattern below.
Problem points are:
The requirement of another method init() in order to properly
handle the intialization of indexedDB.
The if (!this.db) { segment that ends up repeating itself later.
export default class Persist {
constructor(storage) {
if (storage) {
this.storage = storage;
}
else {
throw new Error('A storage object must be passed to new Persist()');
}
}
// needed to ensure that indexedDB is initialized before other methods can access it.
init () {
// initialize indexedDB:
const DBOpenRequest = this.storage.open('db', 1);
DBOpenRequest.onupgradeneeded = () => {
const db = DBOpenRequest.result;
db.createObjectStore('db', { keyPath: 'id', autoIncrement: true });
};
return new Promise((resolve, reject) => {
DBOpenRequest.onerror = event => {
reject(event);
};
DBOpenRequest.onsuccess = event => {
console.log(`IndexedDB successfully opened: ${event.target.result}`);
resolve(event.result);
this.db = DBOpenRequest.result;
};
});
}
toStorage(session) {
if (!this.db) {
return this.init().then(() => {
const db = this.db;
const tx = db.transaction('db', 'readwrite');
const store = tx.objectStore('db');
const putData = store.put(session.toJS());
return new Promise((resolve, reject) => {
putData.onsuccess = () => {
resolve(putData.result);
};
putData.onerror = () => {
reject(putData.error);
};
});
});
}
// basically a repeat of above
const db = this.db;
const tx = db.transaction('db', 'readwrite');
const store = tx.objectStore('db');
const putData = store.put(session.toJS());
return new Promise((resolve, reject) => {
putData.onsuccess = () => {
resolve(putData.result);
};
putData.onerror = () => {
reject(putData.error);
};
});
}
indexedDB provides asynchronous functions. indexedDB.open is an asynchronous function. It looks like you are trying to work with indexedDB in a non-asynchronous manner. Instead of storing the IDBDatabase variable as a property of the instance of your class, just return it as the resolve value and manage it external to the class.
function connect(name, version) {
return new Promise((resolve, reject) => {
const request = indexedDB.open(name, version);
request.onupgradeneeded = myUpgradeHandlerFunction;
request.onsuccess = () => resolve(request.result);
request.onerror = () => reject(request.error);
request.onblocked = () => { console.log('blocked'); };
});
}
function doStuffWithConn(conn, value) {
return new Promise((resolve, reject) => {
const tx = conn.transaction(...);
const store = tx.objectStore(...);
const request = store.put(value);
request.onsuccess = () => resolve(request.result);
request.onerror = () => reject(request.error);
});
}
async function putValue(value) {
let conn;
try {
conn = await connect(...);
await doStuffWithConn(conn, value);
} catch(exception) {
console.error(exception);
} finally {
if(conn)
conn.close();
}
}
Let us say I have a project with this structure:
lib/
api.js
thing.js
index.js
The code for each module is as follows (simplified without losing the sense of the thing).
/* lib/api.js */
"use strict";
var Api = function() {
};
Api.prototype.authenticate = function(credentials) {
var self = this;
return new Promise((resolve, reject) => {
self.token = {
id: 1,
token: "somesortoftoken"
};
resolve();
});
};
Api.prototype.request = function(data) {
var self = this;
return new Promise((resolve, reject) => {
console.log(self.token);
if (self.token) {
resolve("success");
}else{
reject("failure");
}
});
};
module.exports = new Api();
The purpose of exporting with the new keyword is to ensure the same object is always returned, because I want to authenticate once and retain the authenticated api object.
The next module calls the API to make a request. Note that I require the api and then call the function request on it.
/* lib/thing.js */
"use strict";
var api = require('./api.js');
var thing = {
storeThing: function(thingData) {
return new Promise((resolve, reject) => {
api.request(thingData).then(result => {
resolve(result);
}).catch(error => {
console.log(error);
});
});
}
};
module.exports = thing;
Finally I have an entry point which authenticates with the API and then calls the thing library.
/* index.js */
"use strict";
var api = require('./lib/api.js');
var libThing = require('./lib/thing.js');
var credentials = {
username: "test#test.com",
password: "password"
};
api.authenticate(credentials).then(result => {
return libThing.storeThing({ id: 1 });
}).then(result => {
console.log(result);
}).catch(error => {
console.log(error);
});
This works. I can require api.js from anywhere in the project and it will work as expected. However if I try the following change in lib/thing.js then it fails:
/* lib/thing.js */
"use strict";
var request = require('./api.js').request;
var thing = {
storeThing: function(thingData) {
return new Promise((resolve, reject) => {
request(thingData).then(result => {
resolve(result);
}).catch(error => {
console.log(error);
});
});
}
};
module.exports = thing;
From what I can diagnose the request function returned has an undefined this, so obviously no properties can be accessed.
I feel like this is something I ought to understand, but actually I can't see why it is so. If require('./api.js') returns the same global object, then why require('./api.js').request would not return a function derived from that object escapes me. Is it something to do with when it is evaluated or something more obvious?
This is because you are changing of this context when referring an object property function.
In that case, this is bound to global object.