I'm puzzling over a weird problem I cannot replicate.
Scenario
I wrote a simple nodejs application that, after some UI interaction append some records to an Access 97 database. I'm using node-adodb to connect with it.
Some relevant piece of code.
var DBDATA = require('node-adodb'), dbConnection = DBDATA.open('Provider=Microsoft.Jet.OLEDB.4.0;Data Source=/path/to/my/file.mdb');
function append(data, callback)
{
var table;
var query;
var array = [];
array.push({name: "Date", value: formatDate(data.date)});
array.push({name: "Time", value: formatTime(data.date)});
array.push({name: "Type", value: Number(data.Type)});
array.push({name: "Value", value: Number(exists(data.value, 0))});
// ...other fields
var fields = array.map(function (e) {
return "[" + e.name + "]";
}).join(",");
var values = array.map(function (e) {
return e.value;
}).join(",");
table = "tblData";
query = 'INSERT INTO ' + table + '(' + fields + ') ' + 'VALUES (' + values + ')';
dbConnection
.execute(query)
.on('done', function (data) {
return callback({id: id, success: true});
})
.on('fail', function (data) {
console.log(data);
return callback({id: id, success: false});
});
}
The issue
The above function is called whenever a new record is ready. Usually it works fine, but it happens about 1 time per week (among hundreds of records) that I find in the database multiple rows identical.
Due to the nature of the information this is impossible - I mean, it's impossible that the actual data is the same.
I guessed for a bug in the caller, that for some reasons sends me the same variable's content. Hence I added a check before append the record.
What I tried to do
function checkDuplicate(table, array, callback)
{
var query = "SELECT * FROM " + table + " WHERE ";
array.forEach(function(element)
{
query += "([" + element.name + "]=" + element.value + ") AND ";
});
query = query.substr(0, query.length - 4);
dbConnection
.query(query)
.on("done", function (data) {
return callback(data.records.length > 0);
})
.on("fail", function (data) {
return callback(false);
});
}
in the append function I call this one and if it returns a value > 0 I don't execute the query, because it would mean there already is the same row.
Testing it with fake data gave good results: no multiple records were added.
Unfortunately, this didn't fixed the issue in the real world. After 20 days I noticed that a row was added three times.
Questions
Do you see any evidence of a major mistake in my approach?
Is there a more reliable way to avoid this problem?
Please note I cannot change the database structure because it's not mine.
UPDATE
This is the new code I'm using:
// Add only if there isn't an identical record
query = 'INSERT INTO ' + table + '(' + fields + ') ';
query += ' SELECT TOP 1 ' + values;
query += ' FROM ' + table;
query += ' WHERE NOT EXISTS ( SELECT 1 FROM ' + table + ' WHERE ';
array.forEach(function(element)
{
query += "([" + element.name + "]=" + element.value + ") AND ";
});
query = query.substr(0, query.length - 4);
query += ' );';
dbConnection
.execute(query)
.on('done', function (data) {
return callback({id: id, success: true});
})
.on('fail', function (data) {
console.log(data);
return callback({id: id, success: false});
});
but it doesn't solved the problem, i.e. sometimes I still found two or more records identical in the database.
I'm afraid it could be the same behavior: the client make multiple requests in a while and they are executed in parallel, so each one doesn't find the record, and all will be add it.
Hance, what is the right approach to avoid this without change the database structure?
Is there a way to force node-adodb to execute only one query at time?
Related
I am developing a chrome extension and I need to read information from a page, insert the data into the database table and then move on to the next page and do the same thing.
The problem is that the function which inserts the data (using ajax) inserts 6 rows out of 45 before moving to the next page, which means that we are moving to the next page without inserting the rest of the data.
What I want it to do is to respect code order and insert all the rows into the database and then move on to the next page.
the code is as follows :
for (elt of an) {
var t = elt.getElementsByClassName('annonce_titre');
if ((elt.id !== '')) {
//console.log(().getElementsByTagName('a')[0].href);
let titleH2 = t[0].getElementsByTagName('h2');
let titleLink = t[0].getElementsByTagName('a');
var url = titleLink[0].href;
var title2 = titleH2[0].innerHTML;
var last_item = 0;
var w = elt.getElementsByClassName('titre_wilaya')[0].innerHTML;
console.log(w);
var wilaya = w.substring(w.length - 2, w.length);
console.log("leg0 leng " + (w.length - 2) + " ** " + w.length)
console.log("wilaya " + wilaya)
if (isNumber(wilaya)) {
var city = w.substring(0, w.length - 4);
} else {
var city = w;
wilaya = 16;
}
console.log("w c " + wilaya + " ** " + city)
var num = (elt.id).substring(4, 20)
// ADD DELAY OF 5 SECONDS BETWEEN PAGE LOADS
var logger = setInterval(logNextTitle, 10);
var inserts = [
[title2, wilaya, city, url, num]
];
test = test.concat(inserts);
console.log('test spead ');
$.ajax({
data: {
link: url,
h2: title2,
field: "auto",
last: last_item,
numero: num,
wilaya: wilaya,
city: city,
items: test
},
type: "post",
url: "http://localhost/insert.php",
success: function(data) {
console.log("Data Save: " + data);
}
});
}
}
//window.open(first_link ,'_SELF');
console.log("first_link " + first_link)
What this code does is loop through all the elements of an array , insert the data into the DB using ajax and then moving to the next page .
https://imgur.com/Rw4xrMq
This console shows that echoing "first_link" in the code is after the code for insertion , but the later is executed after the echo. There is a miss-order in javascript
Basics of asynchronous calls is they start and the code moves on. Problem with your code is you assume they are all done when they are still queued up waiting to be made. So you need to wait for them to all be done before you do it. Promises make it easier to do that. So look at using jQuery's when.
var ajaxCalls = []
for (elt of an) {
...
ajaxCalls.push($.ajax({...}))
}
$.when.apply($, ajaxCalls).then( function () { console.log('done'); } );
// $.when(...ajaxCalls).then( function () { console.log('done'); } );
I am attempting to move data from a JSON format to a SharePoint list. The database that I am pulling the JSON from has a limit of 500 items per request so I am using EPOCH time to make multiple calls to pull all of the data. I am able to pull all of the data perfectly fine, but when I attempt to place it into the SharePoint list, I start to encounter problems. The first 515 items will be added to the SharePoint list without any issue, but any other items after that will not go into the SharePoint list. The last EPOCH time that will go into the SharePoint list is 1523302307, any item with an EPOCH time that is greater than that will not be placed into the list for some reason.
When the item does not get added to the SharePoint list, theOnQueryFailed() function says,
Request failed. Column '' does not exist. It may have been deleted by another user.
A snippet of my code is below.
function testDatabase() {
var settingObj = {
sinceDateTime: '0',
dynoCode: 'xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxx',
custCode: 'xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxx',
formGUID: 'xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxx',
getJSONURL: 'https://www.xxxxxxxxxx.net/api/api/xxxx'
};
var columnArray = ['Quandrant', 'MapNumber', 'Picture1', 'Picture2', 'PoleOwner', 'PoleClass', 'PoleSize', 'PowerTagNumber', 'TelephoneTagNumber', 'AttachmentHeight', 'Guying', 'StreetLightAttached', 'TransformerPole', 'ExistingFiber', 'ExistingCopper', 'Grounds', 'MR1AtMidspanClearanceViolation', 'MR1ClearanceViolation', 'MR2PoleTransferRequired', 'MR3DownguyOrAnchorDeficiency', 'MR4BrokenLashingWire', 'MR5DamagedOrKinkedCoaxOrFiber', 'MR6DropTransferRequired', 'MR7TemporaryCoaxOrFiberCable', 'MR8DamagedElectronics', 'MR9PlaceOrRepairGroundOrBond', 'MR10DrpSplttrOrInsfntTapPrts', 'MR11PedOrVltOrLockbxDmged', 'PoleReplacement', 'PoleGrade', 'HouseCount', 'BuildingInformation'];
$.getJSON(settingObj.getJSONURL, {
sinceDateTime: settingObj.sinceDateTime,
custCode: settingObj.custCode,
dynoCode: settingObj.dynoCode,
formGUID: settingObj.formGUID
}).done(function (data) {
var clientContext = new SP.ClientContext(siteUrl);
var oList = clientContext.get_web().get_lists().getByTitle('testlist2');
var itemCreateInfo = new SP.ListItemCreationInformation();
$.each(data, function (i, item) {
var contentArray = data[i].FormFieldsList;
this.oListItem = oList.addItem(itemCreateInfo);
this.oListItem.set_item('Title', data[i].resourceName);
this.oListItem.set_item('SinceDateTime', data[i].FormUpdatedDate);
var self = this;
$.each(contentArray, function (i1, item2) {
self.oListItem.set_item(columnArray[i1], contentArray[i1].FieldValue);
console.log(contentArray[i1].FieldValue);
});
this.oListItem.update();
clientContext.load(this.oListItem);
clientContext.executeQueryAsync(onQuerySucceeded, onQueryFailed);
});
console.log('Rows returned: ' + data.length);
console.log(data);
}).fail(function (jqXHR, textStatus, err) {
console.log('Error on web service call: ' + err + ' Text Status: ' + textStatus + ' Message: ' + jqXHR.responseXML);
});
}
function onQuerySucceeded() {
console.log('Item created!');
}
function onQueryFailed(sender, args) {
console.log('Request failed. ' + args.get_message() + '\n' + args.get_stackTrace());
}
I figured out the issue! The dataset that I was importing had items with 32 columns which matched my array that I created, but then after the 515th item, the dataset switched to having 33 columns which explains the error I am receiving. Just thought I should post my answer in case anyone has this issue in the future.
I think a firebase function updating a list that I have in the firebase database is being captured by a subscription that is subscribed to that list. From what the list output looks like on my phone (in the app)...and from what my console output looks like (the way it repeats) it seems like it is capturing the whole list and displaying it each time one is added. So (I looked this up)...I believe this equation represents what is happening:
(N(N + 1))/2
It is how you get the sum of all of the numbers from 1 to N. Doing the math in my case (N = 30 or so), I get around 465 entries...so you can see it is loading a ton, when I only want it to load the first 10.
To show what is happening with the output here is a pastebin https://pastebin.com/B7yitqvD.
In the output pay attention to the array that is above/before length - 1 load. You can see that it is rapidly returning an array with one more entry every time and adding it to the list. I did an extremely rough count of how many items are in my list too, and I got 440...so that roughly matches the 465 number.
The chain of events starts in a page that isn't the page with the list with this function - which initiates the sorting on the firebase functions side:
let a = this.http.get('https://us-central1-mane-4152c.cloudfunctions.net/sortDistance?text='+resp.coords.latitude+':'+resp.coords.longitude+':'+this.username);
this.subscription6 = a.subscribe(res => {
console.log(res + "response from firesbase functions");
loading.dismiss();
}, err => {
console.log(JSON.stringify(err))
loading.dismiss();
})
Here is the function on the page with the list that I think is capturing the entire sort for some reason. The subscription is being repeated as the firebase function sorts, I believe.
loadDistances() {
//return new Promise((resolve, reject) => {
let cacheKey = "distances"
let arr = [];
let mapped;
console.log("IN LOADDISTANCES #$$$$$$$$$$$$$$$$$$$$$");
console.log("IN geo get position #$$$$$$$5354554354$$$$$$$");
this.distancelist = this.af.list('distances/' + this.username, { query: {
orderByChild: 'distance',
limitToFirst: 10
}});
this.subscription6 = this.distancelist.subscribe(items => {
let x = 0;
console.log(JSON.stringify(items) + " length - 1 load");
items.forEach(item => {
let storageRef = firebase.storage().ref().child('/settings/' + item.username + '/profilepicture.png');
storageRef.getDownloadURL().then(url => {
console.log(url + "in download url !!!!!!!!!!!!!!!!!!!!!!!!");
item.picURL = url;
}).catch((e) => {
console.log("in caught url !!!!!!!$$$$$$$!!");
item.picURL = 'assets/blankprof.png';
});
this.distances.push(item);
if(x == items.length - 1) {
this.startAtKey4 = items[x].distance;
}
x++;
})
//this.subscription6.unsubscribe();
})
}
The subscription in loadDistances function works fine as long as I don't update the list from the other page - another indicator that it might be capturing the whole sort and listing it repeatedly as it sorts.
I have tried as as I could think of to unsubscribe from the list after I update...so then I could just load the list of 10 the next time the page with the list enters, instead of right after the update (over and over again). I know that firebase functions is in beta. Could this be a bug on their side? Here is my firebase functions code:
exports.sortDistance = functions.https.onRequest((req, res) => {
// Grab the text parameter.
var array = req.query.text.split(':');
// Push the new message into the Realtime Database using the Firebase Admin SDK.
// Get a database reference to our posts
var db = admin.database();
var ref = db.ref("profiles/stylists");
var promises = [];
// Attach an asynchronous callback to read the data at our posts reference
ref.on("value", function(snapshot) {
//console.log(snapshot.val());
var snap = snapshot.val();
for(const user in snap) {
promises.push(new Promise(function(resolve, reject) {
var snapadd = snap[user].address;
console.log(snapadd + " snap user address (((((((())))))))");
if(snapadd != null || typeof snapadd != undefined) {
googleMapsClient.geocode({
address: snapadd
}).asPromise()
.then(response => {
console.log(response.json.results[0].geometry.location.lat);
console.log(" +++ " + response.json.results[0].geometry.location.lat + ' ' + response.json.results[0].geometry.location.lng + ' ' + array[0] + ' ' + array[1]);
var distanceBetween = distance(response.json.results[0].geometry.location.lat, response.json.results[0].geometry.location.lng, array[0], array[1]);
console.log(distanceBetween + " distance between spots");
var refList = db.ref("distances/"+array[2]);
console.log(snap[user].username + " snap username");
refList.push({
username: snap[user].username,
distance: Math.round(distanceBetween * 100) / 100
})
resolve();
})
.catch(err => { console.log(err); resolve();})
}
else {
resolve();
}
}).catch(err => console.log('error from catch ' + err)));
//console.log(typeof user + 'type of');
}
var p = Promise.all(promises);
console.log(JSON.stringify(p) + " promises logged");
res.status(200).end();
}, function (errorObject) {
console.log("The read failed: " + errorObject.code);
});
});
What is weird is, when I check the firebase functions logs, all of this appears to only run once...but I still think the subscription could be capturing the whole sorting process in some weird way while rapidly returning it. To be as clear as possible with what I think is going on - I think each stage of the sort is being captured in an (N(N + 1))/2...starting at 1 and going to roughly 30...and the sum of the sorting ends up being the length of my list (with 1-10 items repeated over and over again).
I updated to angularfire2 5.0 and angular 5.0...which took a little while, but ended up solving the problem:
this.distanceList = this.af.list('/distances/' + this.username,
ref => ref.orderByChild("distance").limitToFirst(50)).valueChanges();
In my HTML I used an async pipe, which solved the sorting problem:
...
<ion-item *ngFor="let z of (distanceList|async)" no-padding>
...
I'm trying to figure out a way to wait until the first function (relatedVids(...)) to finish executing and then in turn execute the second function (relatedVidsDetails()). What the code does is simply loop through a single $.get(...) request from the youtube api and loop through each of its item retrieved.
The problem with this as tested in the debugger, is that it will go in the first function (up to the $.get() without actually getting anything yet) then skip into the second function (once again, up to the $.get()). Then, it will proceed to execute the first function till it's finished retrieving all items, then when it gets into the second function, it will do the same thing but for some mysterious reason, the videoIdChainStr which holds all the video ids in a string from the first function is never retrieved or being executed since I suspected it executed the second function's $.get(...) already and never did it again a "second time" when it had the values.
So, my next step is trying to use $.Deferred() which is said to help resolve the first function first before even stepping into executing the second function so it will guaranteed values from the first function to be used in the second without skipping anything. But I'm not sure if I'm doing this right as it still does the same thing with or without using $.Deferred().
First function (relatedVids(...)):
var relatedVidsDefer = function relatedVids(videoId)
{
var r = $.Deferred();
$.get( // get related videos related to videoId
"https://www.googleapis.com/youtube/v3/search",
{
part: 'snippet',
maxResults: vidResults,
relatedToVideoId: videoId,
order: 'relevance',
type: 'video',
key: 'XXXXXXXXXX'
},
function(data)
{
debugger;
$.each(data.items,
function(i, item)
{
try
{
console.log(item);
var vidTitle = item.snippet.title; // video title
var vidThumbUrl = item.snippet.thumbnails.default.url; // video thumbnail url
var channelTitle = item.snippet.channelTitle; // channel of uploaded video
var extractVideoId = null; // var to extract video id string from vidThumbUrl
// check if vidThumbUrl is not null, empty string, or undefined
if(vidThumbUrl)
{
var split = vidThumbUrl.split("/"); // split string when '/' seen
extractVideoId = split[4]; // retrieve the fourth index on the fourth '/'
}
else console.error("vidThumbUrl is either undefined or null or empty string.");
// if video title is longer than 25 characters, insert the three-dotted ellipse
if(vidTitle.length > 25)
{
var strNewVidTitle = vidTitle.substr(0, 25) + "...";
vidTitle = strNewVidTitle;
}
// check whether channelTitle is March of Dimes
if(channelTitle === "March of Dimes")
{
extractedVideoIdArr.push(extractVideoId); // add the extracted video id to the array
// check if extractedVideoIdArr is not empty
if(extractedVideoIdArr !== 'undefined' && extractedVideoIdArr.length > 0)
{
console.log("before join(): ", extractedVideoIdArr);
videoIdChainStr = extractedVideoIdArr.join(", "); // change from an array to a chain string of videoIds for the relatedVidsDetails()
console.log("after join(): ", videoIdChainStr);
}
var vidThumbnail = '<div class="video-thumbnail"><a class="thumb-link" href="single-video.html"><div class="video-overlay"><img src="imgs/video-play-button.png"/></div><img src="' + vidThumbUrl + '" alt="No Image Available." style="width:204px;height:128px"/></a><p><a class="thumb-link" href="single-video.html">' + vidTitle + '</a><br/></div>'; //+ convert_time(vidDuration) + ' / Views: ' + viewCount + '</p></div>';
// print results
$('.thumb-related').append(vidThumbnail);
$(item).show(); // show current video thumbnail item
}
else $(item).hide(); // hide current video thumbnail item
}
catch(err)
{
console.error(err.message); // log error but continue operation
}
}
);
}
);
return r;
};
Second function (relatedVidsDetails(...)):
var relatedVidsDetailsDefer = function relatedVidsDetails()
{
// change extractvideoid into a string by tostring() or join() for param to recognize
console.log("initial: ", extractedVideoIdArr);
$.get(
"https://www.googleapis.com/youtube/v3/videos",
{
part: 'snippet, contentDetails, statistics',
id: videoIdChainStr, // chain string of video ids to be called upon in a single request
key: 'XXXXXXXXXX',
},
function(data)
{
debugger;
$.each(data.items,
function(i, item)
{
try
{
console.log("relatedVidsDetails()", item);
console.log("extractedvideoidarr: ", extractedVideoIdArr[i]);
var _vidDuration = item.contentDetails.duration;
var _viewCount = item.statistics.viewCount;
console.log("id: " + extractedVideoIdArr[i] + " duration: " + _vidDuration);
console.log("id: " + extractedVideoIdArr[i] + " viewCount: " + _viewCount);
}
catch(err)
{
console.error(err.message); // log error but continue operation
}
}
);
}
);
};
Code being skipped when the second function has been tapped into the second time:
$.each(data.items,
function(i, item)
{
try
{
console.log("relatedVidsDetails()", item);
console.log("extractedvideoidarr: ", extractedVideoIdArr[i]);
var _vidDuration = item.contentDetails.duration;
var _viewCount = item.statistics.viewCount;
console.log("id: " + extractedVideoIdArr[i] + " duration: " + _vidDuration);
console.log("id: " + extractedVideoIdArr[i] + " viewCount: " + _viewCount);
}
catch(err)
{
console.error(err.message); // log error but continue operation
}
}
);
The code being skipped due to the second function being stepped into when videoIdChainStr was empty and then skipped when first function completed and had the values ready for the second to use. I couldn't get this videoIdChainStr when it has values to then execute.
Well the answer to the question: "How to wait for the first function to finish executing before the second is executed" would be:
USE CALLBACK OR PROMISES.
Example:
function firstMethod() {
// half of first method actions ...
secondMethod(function() {
// other half of first method actions
});
}
function secondMethod(callBack) {
// Second method actions
setTimeout(function() { // The timeout is just a way to simulate the time that a response make us wait
callBack();
}, 5000);
}
I am building a Phonegap application currently targeted primarily for iOS 7+.
I have a local SQLite database that I am copying clean from server. Several of the tables are empty at this time (they will not always be empty). When I use the code below, the result.insertId value is not being populated, but only for the first row inserted into the tables. After the first row all the insertIdvalues are correct.
db.transaction(function (tx1) {
tx1.executeSql(insertSQL1, [arguments],
function (t1, result1) {
if (result1 != null && result1.rowsAffected > 0) {
logMessage("Result1:" + JSON.stringify(result1));
$.each(UserSelectedArrayOfItems, function (index, value) {
db.transaction(function (tx2) {
tx2.executeSql(insertSQL2, [result1.insertId, value],
function (t2, result2) {
if (result2 != null) {
logMessage("Result2: " + JSON.stringify(result2));
}
},
function (t2, error) {
logMessage("There was an error: " + JSON.stringify(error));
});
});
});
<< Do app Navigation if all was ok >>
}
},
function (t, error) {
logMessage("Error: " + JSON.stringify(error));
});
});
While testing, both tables start empty. Zero rows. Both have an ID column with properties: INTEGER PRIMARY KEY. The insert does work and it does get an ID of 1. The ID is correct in the table, but the result.insertId is undefined in the transaction success callback.
The logMessage function writes the strings out to file so I can debug/support the app. Example log messages when inserting 1 row to parent table (always only one to parent) and 2 rows to child table (could be 1 to n rows):
Result1: {"rows":{"length":0},"rowsAffected":1}
Result2: {"rows":{"length":0},"rowsAffected":1}
Result2: {"rows":{"length":0},"rowsAffected":1,"insertId":2}
Has anyone seen this behavior before? I am using the following plugin:
<gap:plugin name="com.millerjames01.sqlite-plugin" version="1.0.1" />
Yes, I found that when you are using a value from a result (or rows) the best practice is to first evaluate the value of object to a new variable, then pass the new variable to another function call and do the work there. As an update to my example:
db.transaction(function (tx1) {
tx1.executeSql(insertSQL1, [arguments],
function (t1, result1) {
if (result1 != null && result1.rowsAffected > 0) {
logMessage("Result1:" + JSON.stringify(result1));
var newID = result1.insertId;
$.each(UserSelectedArrayOfItems, function (index, value) {
DoWork(newID, value);
});
<< Do app Navigation if all was ok >>
}
},
function (t, error) {
logMessage("Error: " + JSON.stringify(error));
});
});
function DoWork(id, value){
db.transaction(function (tx2) {
tx2.executeSql(insertSQL2, [id, value],
function (t2, result2) {
if (result2 != null) {
logMessage("Result2: " + JSON.stringify(result2));
}
},
function (t2, error) {
logMessage("There was an error: " + JSON.stringify(error));
});
});
}
You'll need to add any checks or returns so you can confirm everything went as expected. But that's the overall gist of what fixed it for me.