After refactoring some of our scripts and putting some functions in separate custom modules in shared folder, I had encountered a very strange problem. Don't even know how to describe it properly. Think best way is to show it by an example. I apologise in advance, for long portions of code I will post here. I just don't know how else I can explain my problem.
We have several shared scripts with some of our common functional:
shared/commons-util.js - several convenient utility functions, among them is this one:
function getErrorHandler(errMessage, callback) {
return function(err) {
console.log('inside error handler')
console.error(errMessage + '\nCause: ', err)
if (callback instanceof Function) {
callback(err)
}
}
}
exports.getErrorHandler = getErrorHandler
shared/commons-db.js - helper functions for working with database tables:
var waitingTime = +process.env.throttlingWaitTime || 0;
var retryCount = +process.env.throttlingRetryCount || 0;
function refreshThrottlingLimit() {
var throttlingLimit = +process.env.throttlingLimit || 0
if (LMD_GLOBALS.throttling.limit != throttlingLimit) {
LMD_GLOBALS.throttling.available += throttlingLimit - LMD_GLOBALS.throttling.limit
LMD_GLOBALS.throttling.limit = throttlingLimit
}
}
function throttledWrite(operation, table, data, callbacks, retriesLeft) {
refreshThrottlingLimit()
if (LMD_GLOBALS.throttling.limit && LMD_GLOBALS.throttling.available > 0) {
LMD_GLOBALS.throttling.available--
try {
console.log('Executing ' + operation + ' on table: ' + table.getTableName() + ', data: ', data, 'callbacks: ', callbacks)
table[operation](data, {
success: function(result) {
try {
console.log('throttledWrite: SUCCESS')
LMD_GLOBALS.throttling.available++
if (callbacks && callbacks.success) callbacks.success(result)
} catch (e) {
console.error('Exception in success callback', e)
}
},
error: function(err) {
try {
console.log('throttledWrite: ERROR')
LMD_GLOBALS.throttling.available++
err.table = table.getTableName()
err.operation = operation
err.data = data
if (callbacks && callbacks.error) callbacks.error(err)
} catch (e) {
console.error('Exception in error callback', e)
}
}
})
console.log(operation + ' started...')
} catch (e) {
console.error('Exception starting ' + operation, e)
}
} else if (retriesLeft > 0) {
setTimeout(throttledWrite, waitingTime, operation, table, data, callbacks, retriesLeft - 1)
} else {
if (callbacks && callbacks.error) callbacks.error(new Error('Aborting ' + operation + ' operation (waited for too long)'))
}
}
exports.throttledInsert = function(table, data, callbacks) {
throttledWrite('insert', table, data, callbacks, retryCount)
}
exports.throttledUpdate = function(table, data, callbacks) {
throttledWrite('update', table, data, callbacks, retryCount)
}
shared/commons.js - Just combining this two modules:
exports.db = require('../shared/commons-db.js')
exports.util = require('../shared/commons-util.js')
We are using this modules in scheduled script executeBackgroundJobs.js:
var commons = require('../shared/commons.js');
// Same high-order function as in `commons-util.js`. Only difference is that it is defined locally, not in the separate module.
function getErrorHandler(errMessage, callback) {
return function(err) {
console.log('inside error handler')
console.error(errMessage + '\nCause: ', err)
if (callback instanceof Function) {
callback(err)
}
}
}
function executeBackgroundJobs() {
test();
}
function test() {
console.log('Testing paranormal activity in Azure');
var testTable = tables.getTable('test');
var callback = function(res) {
console.log('Test OK', res)
}
var errMessage = 'Could not write to `test` table';
var errHandler_1 = commons.util.getErrorHandler(errMessage, callback); // First handler we get from high-order function from custom module
var errHandler_2 = getErrorHandler(errMessage, callback); // Second handler must be the same, just getting from local function not from the external module
// This log lines just show that this two functions are identical
console.log('errHandler_1: ', errHandler_1);
console.log('errHandler_2: ', errHandler_2);
// We are calling `throttledUpdate` two times with two different error handlers for each invocation
commons.db.throttledUpdate(testTable, {
id: 'test-01', // Data object is intentionally illegal, we want update to fail
someColumn: 'some value #1'
}, {
success: callback,
error: errHandler_1
});
commons.db.throttledUpdate(testTable, {
id: 'test-02', // Data object is intentionally illegal, we want update to fail
someColumn: 'some value #2'
}, {
success: callback,
error: errHandler_2
});
}
Now here's the output of the one invocation of this job:
1: First call of throttledWrite:
INFORMATION:
Executing update on table: test, data: { id: 'test-01', someColumn: 'some value #1' } callbacks: { success: [Function], error: [Function] }
2: Start of the test(). It is already strange that logs are not in proper order, cause this functions so far are called synchronously (or at least I believe so). Does this mean that logging in Azure is happening in different threads? Anyway, this is not our main problem.
INFORMATION:
Testing paranormal activity in Azure
3: Here we just show the body of first handler
INFORMATION:
errHandler_1: function (err) {
console.log('inside error handler')
console.error(errMessage + '\nCause: ', err)
if (callback instanceof Function) {
callback(err)
}
}
4: Here we just show the body of second handler
INFORMATION:
errHandler_2: function (err) {
console.log('inside error handler')
console.error(errMessage + '\nCause: ', err)
if (callback instanceof Function) {
callback(err)
}
}
5: table.update method was launched successfully in the first throttledWrite (no exceptions)
INFORMATION:
update started...
6: Second call of throttledWrite:
INFORMATION:
Executing update on table: test, data: { id: 'test-02', someColumn: 'some value #2' } callbacks: { success: [Function], error: [Function] }
7: table.update method was launched successfully in the second throttledWrite (no exceptions)
INFORMATION:
update started...
9: Error handler was called
INFORMATION:
inside error handler
10: From only the second throttledWrite
ERROR:
Could not write to `test` table
Cause: { _super: undefined,
message: 'Could not save item because it contains a column that is not in the table schema.',
code: 'BadInput',
table: 'test',
operation: 'update',
data: { id: 'test-02', someColumn: 'some value #2' } }
11: And that's all
INFORMATION:
Test OK { _super: undefined,
message: 'Could not save item because it contains a column that is not in the table schema.',
code: 'BadInput',
table: 'test',
operation: 'update',
data: { id: 'test-02', someColumn: 'some value #2' } }
First of all the strangest thing I cannot understand is why there was no callback invoked in first throttledWrite call.
And besides that, even in the second throttledWrite, where callback was called, no log was written from table.update-s error callback. Remember this lines from throttledWrite function:
error: function(err) {
try {
console.log('throttledWrite: ERROR')
LMD_GLOBALS.throttling.available++
err.table = table.getTableName()
err.operation = operation
err.data = data
if (callbacks && callbacks.error) callbacks.error(err)
} catch (e) {
console.error('Exception in error callback', e)
}
}
I have to add that this is not the actual code where I first encountered this problems. I tried to shorten code before posting. So you may don't fully understand the purposes of some methods. It does not matter, this code is just for demonstration.
That's it. I have three mysteries that I cannot explain:
Why callbacks are not called (in first case)?
Why did console.log('throttledWrite: ERROR') not write anything in log (when I am sure, it was called and was successful)?
Less critical, but still interesting, why are logs not in proper order?
Related
I'm not knowledgeable in JS and JS in Node RTE. But I tried writing simple functions as arrow functions in objects for later usage. One of these arrow functions (data.volatile.modularVariables.read) calls the readFile (asynchronous) function from FileSystem node native module, and passes the following into the parameters from the same object:
file path (data.persistent.paths.srcRoot)
encoding scheme (data.volatile.modularVariables.encoding.utf8)
call-back function (data.volatile.modularVariables.readCB) <-issue lays here
relevant Code (the object):
var data =
{
persistent:
{
states:
{
//exempt for question clarity
},
paths:
{
srcRoot: './vortex/root.txt'
}
},
volatile:
{
//much of the data exempt for question clarity
modularVariables:
{
encoding: {utf8:'utf8',hex:'hex',base64:'base64',BIN:(data,encoding)=>{Buffer.from(data,encoding)}},
readCB: (err,data)=>{if(err){console.log(`%c${data.volatile.debug.debugStrings.errCodes.read}: Error reading from file`,'color: red'); console.log(data);}},
writeCB: (err)=>{if(err){console.log(`%c${data.volatile.debug.debugStrings.errCodes.write}: Error writing to file`, 'color:red')}},
read: (file,encoding,cb)=>{fs.readFile(file,encoding,cb)}, //cb = readCB READ file srcRoot, pass into root(rootHash,encoding)
write: (file,data,cb)=>{fs.writeFile(file,data,cb)}, //cb = writeCB
checkInit: (symbol)=>{if(typeof symbol !== undefined){return symbol}}
},
debug:
{
functions:
{
append:
{
testProg:{program:{main:'system.node.upgrade'}}
}
},
debugStrings:
{
errCodes:
{
read: 'AFTERNET ERR 000',
write: 'AFTERNET ERR 001',
}
}
}
}
};
Aggregator Code:
testing()
{
data.volatile.modularVariables.read(data.persistent.paths.srcRoot,data.volatile.modularVariables.encoding.utf8,data.volatile.modularVariables.readCB(data));
};
Terminal Error:
readCB: (err,data)=>{if(err){console.log(`%c${data.volatile.debug.debugStrings.errCodes.read}: Error reading from file`,'color: red'); console.log(data);}},
^
TypeError: Cannot read property 'volatile' of undefined
Notes:
In aggregator code, I tried not passing "data" into callback
I tried passing "err" but says it's undefined
I tried not passing anything into CB
Conclusion:
Can someone point out what I'm doing wrong and why?
I couldn't tell from the comments if you've resolved the error, but I have a few suggestions that may help.
Aggregator Code
I noticed that you are sending the callback in that code and passing in the data object, which is assigned to the err argument and the data argument will be undefined:
testing() {
data.volatile.modularVariables.read(
data.persistent.paths.srcRoot,
data.volatile.modularVariables.encoding.utf8,
data.volatile.modularVariables.readCB(data)
);
}
When passing in the callback function, you only need to specify the callback function name as below. NodeJS will call the callback with the appropriate err and data arguments. I believe this should resolve your problem.
testing() {
data.volatile.modularVariables.read(
data.persistent.paths.srcRoot,
data.volatile.modularVariables.encoding.utf8,
data.volatile.modularVariables.readCB
);
}
Variable Naming
One thing that could help others read your code and spot issues without needing to run the code is making sure you don't have variables with the same name. I believe you are attempting to reference your data object outside of the callback function, but in the scope of the callback function, data will be primarily referenced as the argument passed in (see scope). When I ran your code, the debugger showed me that data was undefined before applying the fix above. After, it showed me that data was an empty string.
For this, you can either change your data object to be named something like myData or cypherData and it will not conflict with any of your other variables/parameters.
Full Solution
var cypherData = {
persistent: {
states: {
//exempt for question clarity
},
paths: {
srcRoot: "./vortex/root.txt"
}
},
volatile: {
//much of the data exempt for question clarity
modularVariables: {
encoding: {
utf8: "utf8",
hex: "hex",
base64: "base64",
BIN: (data, encoding) => {
Buffer.from(data, encoding);
}
},
readCB: (err, data) => {
console.log(data);
if (err) {
console.log(
`%c${data.volatile.debug.debugStrings.errCodes.read}: Error reading from file`,
"color: red"
);
}
},
writeCB: (err) => {
if (err) {
console.log(
`%c${data.volatile.debug.debugStrings.errCodes.write}: Error writing to file`,
"color:red"
);
}
},
read: (file, encoding, cb) => {
fs.readFile(file, encoding, cb);
}, //cb = readCB READ file srcRoot, pass into root(rootHash,encoding)
write: (file, data, cb) => {
fs.writeFile(file, data, cb);
}, //cb = writeCB
checkInit: (symbol) => {
if (typeof symbol !== undefined) {
return symbol;
}
}
},
debug: {
functions: {
append: {
testProg: { program: { main: "system.node.upgrade" } }
}
},
debugStrings: {
errCodes: {
read: "AFTERNET ERR 000",
write: "AFTERNET ERR 001"
}
}
}
}
};
cypherData.volatile.modularVariables.read(
cypherData.persistent.paths.srcRoot,
cypherData.volatile.modularVariables.encoding.utf8,
cypherData.volatile.modularVariables.readCB
);
async function displayEmbed(args, msg) {
var bridgeembed = await getBridgeClutcherStats(args, msg) //should set bridgeembed to an embed
var omniembed = await getOmniClutcherStats(args, msg) //should set omniembed to an embed
var extembed = await getExtClutcherStats(args, msg) //should set extembed to an embed
var desccc = "```Bridge Clutch: 🧱```\n" + "```Omni Clutch: ⚛```\n" + "```Extension Clutch: 📏```\n";
new Menu(msg.channel, msg.author.id, [{
name: "main",
content: new MessageEmbed({
title: "Please Choose a Gamemode",
description: desccc,
url: "https://3d4h.world",
color: 16711935,
author: {
name: args[1],
url: `https://namemc.com/profile/${args[1]}`,
icon_url: `https://mc-heads.net/body/${args[1]}`
}
}),
reactions: {
"🚫": "stop",
"🧱": "bridgeclutch",
"⚛": "omniclutch",
"📏": "extclutch"
}
},
{
name: "bridgeclutch",
content: bridgeembed,
reactions: {
"◀": "main"
},
name: "omniclutch",
content: omniembed,
reactions: {
"◀": "main"
},
name: "extclutch",
content: extembed,
reactions: {
"◀": "main"
}
}
]);
}
So I've been trying to have three functions run before I create an embed menu. I followed these (How to synchronously call a set of functions in javascript) steps but bridgeembed, omniembed, and extembed still ended up as undefined.
When I directly sent the embed from the functions, it worked. I also tried using callbacks like this:
getOmniClutcherStats(args, msg, function(omniclutchEmbed){
getExtClutcherStats(args, msg, function(extclutchEmbed){
getBridgeClutcherStats(args, msg, function(bridgeclutchEmbed) {
new Menu(msg.channel, msg.author.id, [{
name: "main",
content: new MessageEmbed({
title: "Please Choose a Gamemode",
description: desccc,
url: "https://3d4h.world",
color: 16711935,
author: {
name: args[1],
url: `https://namemc.com/profile/${args[1]}`,
icon_url: `https://mc-heads.net/body/${args[1]}`
}
}),
reactions: {
"🚫": "stop",
"🧱": "bridgeclutch",
"⚛": "omniclutch",
"📏": "extclutch"
}
},
{
name: "bridgeclutch",
content: bridgeembed,
reactions: {
"◀": "main"
},
name: "omniclutch",
content: omniembed,
reactions: {
"◀": "main"
},
name: "extclutch",
content: extembed,
reactions: {
"◀": "main"
}
}
]);
});
});
});
But only when getting bridgeembed it worked, for the others UnhandledPromiseRejectionWarning: TypeError: Cannot read property 'content' of undefined
How can I save the returned or callbacked embeds to variables so I can access them in the reaction menu?
Thanks in advance!
As mentioned by #o-jones and #jfriend00 you'll need to return something directly from your functions in order to have them available for use in displayEmbed. Further, as mentioned by #jfriend00 if you want to use async/await to ensure the calls happen in sequence, you'll need to ensure that the return type of the functions is a Promise that will resolve with the result, or the result itself if that can be determined synchronously.
Looking over the provided code for getExtClutcherStats in that PasteBin, you are calling a few asynchronous functions:
connection.query
MojangAPI.nameToUuid
MojangAPI.profile
average
all using the NodeJS style callback passing pattern. Additionally, in your happy path, you are returning extclutchembed nested inside all of these calls:
connection.query('SELECT * FROM C3', function (error, rows, fields) {
…
MojangAPI.nameToUuid(args[1], function (err, res) {
…
MojangAPI.profile(rows[playerNum].UUID, function (err, res) {
…
average(`/home/bardia/3d4hbot/${res.name}.png`, (err, color) => {
…
return extclutchEmbed
}
…
}
…
}
…
}
Given that structure, extclutchEmbed will only be returned to the immediate callback function (average in the happy path).
In this case, the quickest way to ensure that a Promise is returned would be to wrap the logic inside the function passed to the Promise constructor, and to pass the result you would like returned on promise resolution to the resolve function, like so:
function getExtClutcherStats (args, msg) {
…
return new Promise((resolve, reject) => {
…
connection.query('SELECT * FROM C3', function (error, rows, fields) {
…
MojangAPI.nameToUuid(args[1], function (err, res) {
…
MojangAPI.profile(rows[playerNum].UUID, function (err, res) {
…
average(`/home/bardia/3d4hbot/${res.name}.png`, (err, color) => {
…
resolve(extclutchEmbed)
return
}
…
}
…
}
…
}
});
}
You'll need to work through the additional places that you return in that function to ensure that either the caller can handle them or, if you want to push the error handling onto the caller, you can use the reject function; for example, near the top:
if (args[1].length > 16) {
msg.channel.send('Invalid Player Name')
return
}
could become:
if (args[1].length > 16) {
reject('Invalid Player Name')
return
}
and the call site could then become:
try {
var extembed = await getExtClutcherStats(args, msg)
} catch (error) {
msg.channel.send(error);
}
I've got the following.
var lookupInit = function () {
http.get('api/employmenttype', null, false)
.done(function (response) {
console.log('loaded: employmenttype');
vm.lookups.allEmploymentTypes(response);
});
http.get('api/actionlist', null, false)
.done(function (response) {
console.log('loaded: actionlist');
vm.lookups.allActionListOptions(response);
});
http.get('api/company', null, false)
.done(function (response) {
console.log('loaded: company');
vm.lookups.allCompanies(response);
});
//... x 5 more
return true;
};
// somewhere else
if (lookupInit(id)) {
vm.userInfo.BusinessUnitID('0');
vm.userInfo.BuildingCode('0');
if (id === undefined) {
console.log('api/adimport: latest');
http.json('api/adimport', { by: "latest" }, false).done(viewInit);
}
else if (id !== undefined) {
console.log('api/adimport: transaction');
http.json('api/adimport', { by: "transaction", TransactionId: id }, false).done(viewInit);
}
} else {
console.log('User info init failed!');
}
The following "http.get('api/employmenttype', null, false)" means i set async to false.
I'm aware that this is probably inefficient. And i'd like to have all the calls load simultaneously.
The only problem is if i don't have them set to async false, the second part of my code might execute before the dropdowns are populated.
I've tried a couple of attempts with Jquery Deferreds, but they have resulted in what i can only describe as an abortion.
The only thing i'm looking to achieve is that the lookup calls finish before the adimport/second part of my code, in any order.... But having each call wait for the one before it to finish EG: async, seems like the only solution I'm capable of implementing decently ATM.
Would this be an appropriate place for deferred function, and could anyone point me into a direction where i could figure out how to implement it correctly, as I've never done this before?
You can use $.when to combine multiple promises to one that resolves when all of them have been fulfilled. If I got you correctly, you want
function lookupInit() {
return $.when(
http.get('api/employmenttype').done(function (response) {
console.log('loaded: employmenttype');
vm.lookups.allEmploymentTypes(response);
}),
http.get('api/actionlist').done(function (response) {
console.log('loaded: actionlist');
vm.lookups.allActionListOptions(response);
}),
http.get('api/company').done(function (response) {
console.log('loaded: company');
vm.lookups.allCompanies(response);
}),
// … some more
);
}
Then somewhere else
lookupInit(id).then(function(/* all responses if you needed them */) {
vm.userInfo.BusinessUnitID('0');
vm.userInfo.BuildingCode('0');
if (id === undefined) {
console.log('api/adimport: latest');
return http.json('api/adimport', {by:"latest"})
} else {
console.log('api/adimport: transaction');
return http.json('api/adimport', {by:"transaction", TransactionId:id});
}
}, function(err) {
console.log('User info init failed!');
}).done(viewInit);
In the Jquery API I've found this about resolving multiple deferreds:
$.when($.ajax("/page1.php"), $.ajax("/page2.php")).done(function(a1, a2){
/* a1 and a2 are arguments resolved for the
page1 and page2 ajax requests, respectively.
each argument is an array with the following
structure: [ data, statusText, jqXHR ] */
var data = a1[0] + a2[0]; /* a1[0] = "Whip", a2[0] = " It" */
if ( /Whip It/.test(data) ) {
alert("We got what we came for!");
}
});
Using this with your code:
var defer = $.when(
$.get('api/employmenttype'),
$.get('api/actionlist'),
$.get('api/company'),
// ... 5 more
);
defer.done(function (arg1, arg2, arg3 /*, ... 5 more*/) {
vm.lookups.allEmploymentTypes(arg1[0]);
vm.lookups.allEmploymentTypes(arg2[0]);
vm.lookups.allEmploymentTypes(arg3[0]);
// .. 5 more
vm.userInfo.BusinessUnitID('0');
vm.userInfo.BuildingCode('0');
if (id === undefined) {
console.log('api/adimport: latest');
http.json('api/adimport', { by: "latest" }, false).done(viewInit);
} else if (id !== undefined) {
console.log('api/adimport: transaction');
http.json('api/adimport', { by: "transaction", TransactionId: id }, false).done(viewInit);
}
});
You can use the defer of the $.when() inside an other $.when(), so if the json calls are not dependant on the first calls you can add them in a an onther defer.
here is my code
db.query(str, arr, function selectCb(error, results, fields) {
if (error) {
return proceed(false, {errno:'010',message:error.message}, request);
}
var q = async.queue(function (results, callback) {
// add the gib infor
if (results.refertype=='G') {
var input={};
input.fields="*";
input.gibname=results.refername;
gib.getgibinternal(input, makeCallback(i));
function makeCallback(index) {
return function(gresult) {
results.gib=gresult.data[0];
callback(results);
}
}
// add the user info
} else if(results.refertype=='U') {
var input={};
input.username=results.refername;
input.fields="*";
user.getuserinternal(input, makeCallbackuser(i));
function makeCallbackuser(index) {
return function(gresult) {
results.user=gresult.data[0];
callback(results);
}
}
}
}, results.length);
// assign a callback
q.drain = function() {
return proceed(true, self.results, self.request);
}
self.results=results;
for (var i=0; i<results.length; i++) {
// the first console
console.log(results[i]);
// add some items to the queue
q.push(results[i], function (results) {
results[i]=results;
self.results[i]=results;
//the second console.
console.log(results);
});
}
if (results.length==0) {
return proceed(true, results, request);
}
});
The out put of above code is :
// the first console
{ cardid: 30,
cardtype: 'I',
status: 'A',
refername: 'admin',
refertype: 'U' }
// the second console
{ '1': [Circular],
cardid: 30,
cardtype: 'I',
status: 'A',
refername: 'admin',
refertype: 'U',
user:
{ name: 'admin',
username: 'admin',
deleted: 'N' } }
how this '1': [Circular], get added .?
This bit:
q.push(results[i], function (results) {
is the same as this (with some renaming to make it easier to track the scope):
q.push(self.results[i], function(r) {
r[i] = r; // <------------------- Look at me!
self.results[i] = r;
//the second console.
console.log(r);
});
The self.results[i] change just comes from the self.results=results; right above your for loop. The interesting part is this:
r[i] = r;
If i is 1 you will have added a property named 1 to r whose value is r itself, hence the [Circular]. I would hazard a guess that results.length is 2 and that your function is acting as a closure over i and ending up with using the last value that i had and that's why you get the '1' rather than a '0' property.
I see three main Things that could be causing you trouble:
The classic closure problem with i.
Too many things were called results so it was easy to lose track of which one you were working with.
The circular reference: r[i] = r;.
Another possible source of confusion is that results and self.results are the same object but that might be okay.
Seems like it's a circular reference and console.log is muffling it. Try doing console.log(Object.keys(results['1'])); for more information on what's inside the Object 1
I want to implement a function which performs ajax requests (response is json) until there will be no "next" property in response. After that i need to perform some callback function. What is the best way for this? My code below doesn't work because of the wrong callback's scope, and i cannot imagine how to pass it correctly.
_requestPhotos = function(url, callback) {
getYFContent(url, function(data) {
// some actions
if (!!data.next) {
_requestPhotos(data.next, callback);
} else {
callback(smth);
}
});
};
There are no obvious errors from the script you've posted. For example, an equivalent test could look like this:
alertResult = function(text) {
console.log("Result is: " + text);
}
doRecursive = function(data, callback) {
if(!!data.next) {
doRecursive(data.next, callback);
} else {
callback(data.value);
}
}
var d = { value: 1, next: { value: 2, next: { value: 3 }}};
doRecursive(d, alertResult);
The log result is "Result is: 3", which is what you'd expect.
The error is elsewhere. How are you calling this the first time, what is the callback you're passing to it (and how is it defined) and what exactly does getYFContent do?