So I have been trying to make a system with "memory". I used a JSON file for this, but it never refreshes the file. I looked it up and I got a function showing this
function requireUncached(module) {
delete require.cache[require.resolve(module)];
return require(module);
}
but they didn't show any syntax. Do I only put it at the top instead of const whatever = require(file)? Do I do it in every function it needs to be refreshed in? I have no idea. The reason I need this is so that it is all automatic and I don't have to do node . every time.
Warning, using Proxy and fs.writeFileSync() is slow
Here's a convenient function that uses a Proxy to automatically write changes back to disk whenever the object in memory is updated:
const fs = require('fs');
const storage = (path, encoding = 'utf8', space = null) => {
const object = JSON.parse(fs.readFileSync(path, encoding));
let immediate = null;
const scheduleWriteFile = () => {
clearImmediate(immediate);
immediate = setImmediate(() => {
fs.writeFileSync(path, JSON.stringify(object, null, space), encoding);
});
};
const handler = {
get(target, property) {
const value = Reflect.get(target, property);
if (Object(value) === value) {
const descriptor = Reflect.getOwnPropertyDescriptor(target, property);
if (descriptor === undefined || descriptor.configurable || descriptor.writable) {
return new Proxy(value, handler);
}
}
return value;
},
...Object.fromEntries(['defineProperty', 'deleteProperty', 'set'].map(
(key) => [key, (...args) => {
const result = Reflect[key](...args);
if (result) {
scheduleWriteFile();
}
return result;
}]
))
};
return new Proxy(object, handler);
};
Example usage:
const settings = storage('config.json', 'utf8', 2);
...
// automatically schedules call to
// fs.writeFileSync('config.json', JSON.stringify(settings, null, 2), 'utf8')
// after updating object
settings.users[user.id].banned = true;
The advantage of using setImmediate() is that if multiple properties are updated synchronously on settings, it only schedules one call to fs.writeFileSync(), and it will occur after the event loop processes currently pending I/O events.
Because the proxy object recurses, you can treat settings exactly as a normal object, keep variable references to object or array properties, and read primitive values from it as usual.
The only restriction is that the JSON file must begin with { or [ in order for the object to be allowed as the target of a Proxy.
Related
I have 3 separate js files, one js file has function that i export and access on the other two js file. The purpose of the function is to get data from realtime database firebase once, when the function is called for the first time it will get data from database and store it on array outside the function. Then on second call, it will just return the array that contains data from database.
let mySub = [];
export function getMySub(id) { // get users subjects
if (mySub.length !== 0) {
console.log('From Array SUBJECTS');
return mySub;
} else if (mySub.length == 0) {
get(child(path, "General")).then((snapshot) => {
snapshot.forEach(function(childSnapshot) {
var data = childSnapshot.val();
if (data.INS_SYS_ID == id) {
mySub.push([data.COURSE_NAME, data.SECTION, childSnapshot.key, data.ENROLLED_STUDENTS]);
}
})
}).catch((error) => {
console.error(error);
});
console.log('From DB SUBJECTS');
return mySub;
}
}
It works like this, if the mySub array length is not equal to zero the function will just return the array. if the array length is equal to zero it will get data from database and store it to mySub.
I also have a function on that same js file that updates the mySub array when i need to.
export function updateDisplayedSub(coursename, section, key, enrolledStudents) {
mySub.push([coursename, section, key, enrolledStudents]);
}
Now here is my problem, whenever i call the function that updates mySub array and call the function getMySub(id). it returns array that reads from database instead of returning the existing array that i updated.
updateDisplayedSub and getMySub is called on different js file, does this affect? I'm using this function to avoid too many reads and conserve bandwidth on my free tier hosting firebase.
It seems that you are trying to invent memoization.
Your current method falls short because if you vary the id parameter, there's no way of telling if the cached data in mySub refers to that id or not. This will likely cause bugs.
Instead, we can write a higher order function that returns a function that wraps your existing function:
const memoizeUnary = (f) => {
const map = new Map();
return (v, skipCached) => {
if (!skipCached && map.has(v)) {
return map.get(v);
}
else {
const r = f(v);
map.set(v, r);
return r;
}
};
};
and use it to wrap another function:
function foo(v) {
console.log("foo invoked");
return `hello ${v}`;
}
const memoizedFoo = memoizeUnary(foo);
Now, if we call
memoizedFoo("monkey")
for the first time, the function that it wraps will be invoked, its return value stored in a map and then returned to the caller.
The second time we call
memoizedFoo("monkey")
the function it wraps will not be called, and the cached return value for parameter "monkey" will be retrieved and returned to the caller.
If you want to reset the cache and fetch from the wrapped function again, you can add an optional parameter, true to skip the cache, re-fetch and store the new value in the cache:
memoizedFoo("monkey", true)
Pop open the snippet below to see this in action:
const memoizeUnary = (f) => {
const map = new Map();
return (v, skipCached) => {
if (!skipCached && map.has(v)) {
return map.get(v);
} else {
const r = f(v);
map.set(v, r);
return r;
}
};
};
function foo(v) {
console.log("foo invoked");
return `hello ${v}`;
}
const memoizedFoo = memoizeUnary(foo);
memoizedFoo("a");
memoizedFoo("a");
memoizedFoo("a");
memoizedFoo("a");
memoizedFoo("a");
memoizedFoo("a", true);
memoizedFoo("a");
memoizedFoo("a");
memoizedFoo("a");
Notice how, even though we call memoizedFoo("a") multiple times, the function it wraps is only called for the first time or if we supply a second true parameter.
I'm having a problem understanding a line of code from Eloquent Javascript ebook, Chapter 11 (Message Routing section). In it the author tries to explain how message routing in a supposed network might work (by incorporating promises and other async concepts). He constructs different types of functions that handle different actions (sending request, receiving it, responding,...). But then there is this implementation of route finding algorithm that I think I don't quite understand.
//SECTION THAT CREATES A KIND OF NEIGHBOUR MAP THAT EVERY NEST (COMPUTER) HAS
requestType("connections", (nest, {name, neighbors},
source) => {
let connections = nest.state.connections;
if (JSON.stringify(connections.get(name)) ==
JSON.stringify(neighbors)) return;
connections.set(name, neighbors);
broadcastConnections(nest, name, source);
});
function broadcastConnections(nest, name, exceptFor = null) {
for (let neighbor of nest.neighbors) {
if (neighbor == exceptFor) continue;
request(nest, neighbor, "connections", {
name,
neighbors: nest.state.connections.get(name)
});
}
}
everywhere(nest => {
nest.state.connections = new Map();
nest.state.connections.set(nest.name, nest.neighbors);
broadcastConnections(nest, nest.name);
});
//PATH FINDING FUNCTION
function findRoute(from, to, connections) {
let work = [{at: from, via: null}];
for (let i = 0; i < work.length; i++) {
let {at, via} = work[i];
for (let next of connections.get(at) || []) {
if (next == to) return via;
if (!work.some(w => w.at == next)) {
work.push({at: next, via: via || next});
}
}
}
return null;
}
//THEN THERE ARE FUNCTIONS THAT HANDLE THE ACTUAL MESSAGE SENDING/ROUTING
function routeRequest(nest, target, type, content) {
if (nest.neighbors.includes(target)) {
return request(nest, target, type, content);
} else {
let via = findRoute(nest.name, target,
nest.state.connections);
if (!via) throw new Error(`No route to ${target}`);
return request(nest, via, "route",
{target, type, content});
}
}
requestType("route", (nest, {target, type, content}) => {
return routeRequest(nest, target, type, content);
});
My question is, in the findRoute function, why is there || [] in the inner for loop? Is it there for appropriate consequent error handling (in case somehow there is no nest specified as having neighbours in the connections property, but is regardless of that listed as someones neighbouring nest)?
connections.get(at) may return null or undefined, depending on the api, and you can't do a for...of loop over null or undefined, so he replaces that value with an empty array in that case
I have the following code:
const readDataFromSql = () => {
// going to have to iterate through all known activities + load them here
let sql = "[...]"
return new Promise((resolve, reject) => {
executeSqlQuery(sql).then((dict) => {
let loadedData = [];
for (let key in dict) {
let newItemVal = new ItemVal("reading hw", 7121, progress.DONE);
loadedData.push(newItemVal);
}
resolve(loadedData);
});
});
}
ItemVal implementation:
class ItemVal {
constructor(name, time, type) {
this.name = name
this.time = time
this.type = type
}
}
Let's assume that newItemVal = "reading hwj", 5081, progress.PAUSED when readDataFromSql() first runs.
readDataFromSql() is then again called after some state changes -- where it repulls some information from a database and generates new values. What is perplexing, however, is that when it is called the second time, newItemVal still retains its old properties (attaching screenshot below).
Am I misusing the new keyword?
From what I can see in your example code, you are not mutating existing properties but creating a new object with the ItemVal constructor function and adding them to an array, that you then return as a resolved promise. Are you sure the examples you give a correct representation of what you are actually doing
Given that, I'm not sure what could be causing the issue you are having, but I would at least recommend a different structure for your code, using a simpler function for the itemVal.
Perhaps with this setup, you might get an error returned that might help you debug your issue.
const itemVal = (name, time, type) => ({ name, time, type })
const readDataFromSql = async () => {
try {
const sql = "[...]"
const dict = await executeSqlQuery(sql)
const loadedData = dict.map((key) =>
ItemVal("reading hw", 7121, progress.DONE)
)
return loadedData
} catch (error) {
return error
}
};
If the issue is not in the function, then I would assume that the way you handle the data, returned from the readDataFromSql function, is where the issue lies. You need to then share more details about your implementation.
const readDataFromSql = async () => {
let sql = "[...]"
------> await executeSqlQuery(sql).then((dict) => {
Use the await keyword instead of creating a new promise.
I did some modification and found that below code is working correctly, and updating the new values on each call.
const readDataFromSql = () => {
return new Promise((resolve, reject) => {
let loadedData = [];
let randomVal = Math.random();
let newItemVal = new ItemVal(randomVal*10, randomVal*100, randomVal*1000);
loadedData.push(newItemVal);
resolve(loadedData);
});
}
Could you recheck if you are using below line in the code, as it will instantiate object with same properties again and again.
let newItemVal = new ItemVal("reading hw", 7121, progress.DONE);
You can modify your code as below to simplify the problem.
const readDataFromSql = async () => {
// going to have to iterate through all known activities + load them here
let sql = "[...]" // define sql properly
let result = await executeSqlQuery(sql);
let loadedData = [];
for (let row in result) {
let newItemVal = new ItemVal(row.name, row.time, row.type);
loadedData.push(newItemVal);
}
return loadedData;
}
class ItemVal {
constructor(name, time, type) {
this.name = name
this.time = time
this.type = type
}
}
What you are talking about is an issue related to Object mutation in Redux, however, you didn't add any redux code. Anyway, you might be making some mistake while recreating(not mutating) the array.
General solution is the use spread operator as:
loadedData = [ ...loadedData.slice(0) , ...newloadedData]
In Dropdown.js line 188 instead of console.log-ing your variable write debugger;
This will function as a breakpoint. It will halt your code and you can inspect the value by hovering your mouse over the code BEFORE the newItemVal is changed again.
I can see in your screenshot that the newItemVal is modified again after you log it.
I am implementing a cacheing layer in NodeJS and MongoDB using Redis. I am fairly new to Redis. So I am having trouble where I am trying to automatically clear cache after a given timing. The error I am getting
ReplyError: ERR wrong number of arguments for 'hset' command
This is my code block
mongoose.Query.prototype.exec = async function() {
const key = JSON.stringify(
Object.assign({}, this.getQuery(), {collection:
this.mongooseCollection.name})
);
const cachedValue = await client.hget(this.hashKey, key);
if(cachedValue) {
const parsedDoc = JSON.parse(cachedValue);
return Array.isArray(parsedDoc) ? parsedDoc.map(doc => new
this.model(doc)) : new this.model(parsedDoc);
}
const result = await exec.apply(this, arguments);
client.hset(this.hashKey, key, JSON.stringify(result), 'EX', 10);
return result;
}
Redis HSET only accepts 3 arguments. If you want to store multiple keys in one call, you should use HMSET.
Reference:
https://redis.io/commands/hset
https://redis.io/commands/hmset
client.hmset(this.hashKey, key, JSON.stringify(result), 'EX', 10);
should work.
I have an application communicating with a device via serial port. Every sent command is answered by a data event containing the status/answer. Basically there are commands changing the device and a command which just returns the status. Every time the last command has been answered (so upon receiving data) the app should send the next command or as a default query the status. I'm trying to model this with rxjs.
My idea here is that there is a command observable and a data observable derived from the data event. These two should be combined in such a way, that the resulting observable only emits values, when there is data and combine it with a command or the default command (request status), if no command came down the command stream.
data: ---------d---d-----d---------d------d-------
command: --c1---c2----------------------c3-----------
______________________________________________________
combined ---------c1--c2----dc--------dc-----c3
dc is the default command. Also no commands should be lost.
Currently I have an implementation with a anonymous subject, implementing the observable and observer myself. Collecting commands from the command stream in an array, subscribing to the data event, publish the data by hand with onNext and sending the next command from the array or the default. This works, but I have the feeling this could be expressed more elegantly with rxjs.
One approach was to have a separate default_command stream, repeating the default command every 100ms. This was merged with the command stream and then zipped with the data stream. The problem here was the merged command stream, because it piled up default commands, but the default command should only apply, if there is no other command.
Only thing I can think of is to:
subscribe to the command stream and queue the results (in an array)
Apply a mapping operation to the data stream which will pull from the queue (or use default if the queue is empty).
We can wrap this up into a generic observable operator. I'm bad with names, so I'll call it zipWithDefault:
Rx.Observable.prototype.zipWithDefault = function(bs, defaultB, selector) {
var source = this;
return Rx.Observable.create(function(observer) {
var sourceSubscription = new Rx.SingleAssignmentDisposable(),
bSubscription = new Rx.SingleAssignmentDisposable(),
subscriptions = new Rx.CompositeDisposable(sourceSubscription, bSubscription),
bQueue = [],
mappedSource = source.map(function(value) {
return selector(value, bQueue.length ? bQueue.shift() : defaultB);
});
bSubscription.setDisposable(bs.subscribe(
function(b) {
bQueue.push(b);
},
observer.onError.bind(observer)));
sourceSubscription.setDisposable(mappedSource.subscribe(observer));
return subscriptions;
});
};
And use it like so:
combined = dataStream
.zipWithDefault(commandStream, defaultCommand, function (data, command) {
return command;
});
I think the sample operator would be your best bet. Unfortunately, it does not come with a built in default value, so you would have to roll your own from the existing operator:
Rx.Observable.prototype.sampleWithDefault = function(sampler, defaultValue){
var source = this;
return new Rx.AnonymousObservable(function (observer) {
var atEnd, value, hasValue;
function sampleSubscribe() {
observer.onNext(hasValue ? value : defaultValue);
hasValue = false;
}
function sampleComplete() {
atEnd && observer.onCompleted();
}
return new Rx.CompositeDisposable(
source.subscribe(function (newValue) {
hasValue = true;
value = newValue;
}, observer.onError.bind(observer), function () {
atEnd = true;
}),
sampler.subscribe(sampleSubscribe, observer.onError.bind(observer), sampleComplete)
);
}, source);
}
You can achieve the queuing behavior using the controlled operator. Thus your final data chain would look like so:
var commands = getCommandSource().controlled();
var pipeline = commands
.sampleWithDefault(data, defaultCommand)
.tap(function() { commands.request(1); });
Here is a full example:
Rx.Observable.prototype.sampleWithDefault = function(sampler, defaultValue) {
var source = this;
return new Rx.AnonymousObservable(function(observer) {
var atEnd, value, hasValue;
function sampleSubscribe() {
observer.onNext(hasValue ? value : defaultValue);
hasValue = false;
}
function sampleComplete() {
atEnd && observer.onCompleted();
}
return new Rx.CompositeDisposable(
source.subscribe(function(newValue) {
hasValue = true;
value = newValue;
}, observer.onError.bind(observer), function() {
atEnd = true;
}),
sampler.subscribe(sampleSubscribe, observer.onError.bind(observer), sampleComplete)
);
}, source);
}
var scheduler = new Rx.TestScheduler();
var onNext = Rx.ReactiveTest.onNext;
var onCompleted = Rx.ReactiveTest.onCompleted;
var data = scheduler.createHotObservable(onNext(210, 18),
onNext(220, 17),
onNext(230, 16),
onNext(250, 15),
onCompleted(1000));
var commands = scheduler.createHotObservable(onNext(205, 'a'),
onNext(210, 'b'),
onNext(240, 'c'),
onNext(400, 'd'),
onCompleted(800))
.controlled(true, scheduler);
var pipeline = commands
.sampleWithDefault(data, 'default')
.tap(function() {
commands.request(1);
});
var output = document.getElementById("output");
pipeline.subscribe(function(x) {
var li = document.createElement("li");
var text = document.createTextNode(x);
li.appendChild(text);
output.appendChild(li);
});
commands.request(1);
scheduler.start();
<script src="https://cdnjs.cloudflare.com/ajax/libs/rxjs/2.5.2/rx.all.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/rxjs/2.5.2/rx.testing.js"></script>
<div>
<ul id="output" />
</div>
This can be solved by using the scan function. In the accumulated value the commands are stored for which no data response has been received yet.
var result = Rx.Observable
.merge(data, command)
.scan(function (acc, x) {
if (x === 'd') {
acc.result = acc.commands.length > 0 ? acc.commands.shift() : 'dc';
} else {
acc.result = '';
acc.commands.push(x);
}
return acc;
}, {result: '', commands: []})
.map(function (x) {
return x.result;
})
.filter(function (x) {
return x !== '';
});
Please find a complete more detail here: http://jsbin.com/tubade/edit?html,js,console