I am working with IndexedDB and I have a database with about 4000 records. I am trying to load small chucks of records at a time. Something similar to the infinite scrolling you see on your twitter feed. I've tried to Google this and found no code examples, but came across the advance method. I tried to use that, but still no success. The browser loaded all the records at once. How do I make it so I am only loading small amounts of records at a time?
var openRequest = indexedDB.open("USA", 1);
openRequest.onsuccess = function (e) {
var db = e.target.result;
var transaction = db.transaction(["Male"], "readonly");
var objectStore = transaction.objectStore("Male");
var cursor = objectStore.openCursor();
cursor.onsuccess = function (e) {
var res = e.target.result;
if (res) {
res.advance(25);
res.continue();
console.log(res.value);
}
}
}
The res.advance(25) will skip 25 rows but it will still load the rest. I changed your code + added some comments. You might want to use something like this:
var openRequest = indexedDB.open("USA", 1);
openRequest.onsuccess = function (e) {
var db = e.target.result;
var transaction = db.transaction(["Male"], "readonly");
var objectStore = transaction.objectStore("Male");
var cursor = objectStore.openCursor();
var results = null; // variable to store the results
var startIndex = 25,
maxResults = 25;
cursor.onsuccess = function (e) {
var res = e.target.result;
if (res) {
if (!results) {
results = [];
// skip rows, but only the first time
res.advance(startIndex);
} else {
results.push(res.value)
// We don't have 25 results yet, continue to load a next one
if(results.length < maxResults) {
res.continue();
}
}
}
}
transaction.oncomplete = function() {
// maxResults loaded or maybe it was the last page so it might not be full
console.log(results);
}
}
The below example is a lot more simple and works with autoincrement, but the pages are only full when results aren't deleted (not full code, but it's all about the IDBKeyRange.bound(0, 25))
var cursor = objectStore.openCursor(IDBKeyRange.bound(0, 25));
var results = [];
cursor.onsuccess = function (e) {
var res = e.target.result;
if (res) {
results.push(res.value)
}
}
transaction.oncomplete = function() {
console.log(results);
}
Related
I have a web app with one drop down list and 2 buttons. The drop down list get values from a sheet. The buttons write back in the sheet. The script I have works fine with that:
<script>
$(function() {
$('#txt1').val('');
google.script.run
.withSuccessHandler(updateSelect)
.getSelectOptions();
});
function updateSelect(opt)
{
var select = document.getElementById("sel1");
select.options.length = 0;
for(var i=0;i<opt.length;i++)
{
select.options[i] = new Option(opt[i],opt[i]);
}
}
function listS() {
const selectElem = document.getElementById('sel1')
const index = selectElem.selectedIndex;
if (index > -1) {
const e = document.getElementById("sel1");
const value = e.options[index].value;
const body = { index: index, value: value };
google.script.run.withSuccessHandler(yourCallBack).yourServerSideFunc(body);
}
}
document.getElementById("but1").addEventListener("click",listS);
function yourCallBack(response) {
}
</script>
In Java script:
function getSelectOptions()
{
var ss=SpreadsheetApp.openById('1onuWoUKh1XmvEAmKktwJekD782BFIru-MDA0omqzHjw');
var sh=ss.getSheetByName('Database');
var rg=sh.getRange(2,1,sh.getLastRow()-1,8);
var vA=rg.getValues();
var useremail = Session.getActiveUser().getEmail();
var opt=[];
for(var i=0;i<vA.length;i++)
{
if(vA[i][1] == "Pending Approval"){
if(vA[i][7]+"#xxx.com" == useremail || vA[i][7]+"#xxx.com" == useremail) {
opt.push(vA[i][3]+" REQ ID: "+vA[i][0]);
}
}
};
if (opt.length == 0) {opt.push("You do not have pending requests")};
return opt;
}
function doGet() {
var output = HtmlService.createHtmlOutputFromFile('list');
return output;
}
function yourServerSideFunc(body) {
var value = body["value"];
var ss = SpreadsheetApp.openById('1onuWoUKh1XmvEAmKktwJekD782BFIru-MDA0omqzHjw');
var sh = ss.getSheetByName('Database');
var rg=sh.getRange(1,1,sh.getLastRow()-1,4);
var vA=rg.getValues();
var str = "Approved";
for(var i=0;i<vA.length;i++)
{
if(vA[i][3]+" REQ ID: "+vA[i][0] == value) {
sh.getRange(i+1, 2).setValue(str);
}
};
return ContentService.createTextOutput(JSON.stringify({message: "ok"})).setMimeType(ContentService.MimeType.JSON);
Now I am trying to regenerate the drop down list values after the button is clicked. I tried to add
var output = HtmlService.createHtmlOutputFromFile('list');
return output;
in yourServerSideFunc(body) function to regenerate the HTML but does not work. I have tried to force a HTML refresh, but also did not work.
How can I easily re-trigger the generation of the drop down list items? Worst case scenario it is ok to refresh the whole page, but it should be simple to regenerate the drop down list since I have already the code for it.
I ended up with this work around.
function listS() {
const selectElem = document.getElementById('sel1')
const index = selectElem.selectedIndex;
if (index > -1) {
const e = document.getElementById("sel1");
const value = e.options[index].value;
const body = { index: index, value: value };
google.script.run.withSuccessHandler(yourCallBack).yourServerSideFunc(body);
//ADDED:
var select = document.getElementById("sel1");
select.options[index] = new Option("Approved! Please refresh","Approved! Please refresh");
selectElem.selectedIndex = index;
}
}
It does not really meet the original goal to refresh the list from the sheet. It would be great if someone else posted a solution to call the server function. I tried to add google.script.run.doGet() and similar, but it seems that it does not call the server side functions properly.
I am working on small idea to collect errors from pages and to store them in DB and then use graph API to display information visually.
There is 8 sites and on each of them there is 100 entries - so 800 transactions per time.
I loop through each site and then sub-loop through table of errors and collect them.
I got it working if I make insert query on each of those sub-loops for all 800 entries but I am getting some sort of memory leak from so many transactions and after few minutes - Node breaks due to memory exceeding.
So I tried queuing all 800 entries into Array of Arrays and then performing multi-insert at the end of every iteration but I am getting ER_PARSE_ERROR.
var tabletojson = require('tabletojson');
var mysql = require("mysql");
var striptag = require("striptags");
var fs = require("fs");
var path = require('path');
var startCollector;
var iterations = 0;
var insertions = 0;
var duplicated = 0;
var datas = [];
var clients = ["ClientA", "ClientB", "ClientC", "ClientD", "ClientE", "ClientF", "ClientG", "ClientH"];
var appDir = path.dirname(require.main.filename);
var errorList = ["err1", "err2", "err3", "err4", "err5", "err6"];
var con = mysql.createPool({
host: "localhost",
user: "User",
password: "Password",
database: "errors"
});
function CollectErrors() {
startCollector = new Date();
for(var a = 0; a < clients.length; a++) {
(function(a) {
tabletojson.convertUrl("http://example.com" + clients[a] + "/page.php?limit=100", { stripHtmlFromCells: false }, function(response) {
var rs = response[0];
for(var l = rs.length-1; l > -1; l--) {
var newDate = formatDate(striptag(rs[l]["Date"]), striptag(rs[l]["Time"]));
var user = getUser(striptag(rs[l]["User"]));
var msg = striptag(rs[l]["Error"]);
var splitError = rs[l]["Error"].split("<a href=\"");
var link = getUrl(splitError[1]);
var id = getId(link);
var type = getType(striptag(splitError[0]));
var temp = [newDate, link, type, user, clients[a], id, msg];
datas.push(temp);
}
});
})(a);
}
con.getConnection(function(err, connection) {
connection.query("INSERT IGNORE INTO entries (time, url, type, author, client, uid, message) VALUES ?", [datas], function(err, rows) {
console.log(err);
});
connection.release();
datas = [];
});
setTimeout(CollectErrors, 10000);
}
function formatDate(date, time) {
var newdate = date.split("/").reverse().join("-");
var newtime = time+":00";
return newdate + " " + newtime;
}
function getUrl(uri) {
return "http://example.com/"+uri.split("\">Details")[0];
}
function getId(url) {
return decodeURIComponent((new RegExp('[?|&]' + "id" + '=' + '([^&;]+?)(&|#|;|$)').exec(url) || [null, ''])[1].replace(/\+/g, '%20')) || null;
}
function getType(error) {
for(var a = 0; a < errorList.length; a++) {
if(error.indexOf(errorList[a]) !== -1) {
return errorList[a];
}
}
return "Other";
}
function getUser(user) {
if(user == "" || user == " " || user == null) {
return "System";
}
return user;
}
CollectErrors();
I've tried mysql.createConnection too but that also gave me same issue.
I've been stuck for past 12 hours and I can't see what's wrong, I've even tried populating Datas table with just strings but got same error.
I've changed your code to use ES6 and correct modules features.
Useful links: correct pooling with mysql, correct insert query, async/await, IIFE, enhanced object
const tabletojson = require('tabletojson'),
mysql = require("mysql"),
striptag = require("striptags"),
fs = require("fs"),
path = require('path');
const startCollector,
iterations = 0,
insertions = 0,
duplicated = 0;
let datas = [];
const clients = ["ClientA", "ClientB", "ClientC", "ClientD", "ClientE", "ClientF", "ClientG", "ClientH"];
const appDir = path.dirname(require.main.filename);
const errorList = ["err1", "err2", "err3", "err4", "err5", "err6"];
const con = mysql.createPool({
host: "localhost",
user: "User",
password: "Password",
database: "errors"
});
// We'll use async/await from ES6
const collectErrors = async() => {
// Up to here I've only changed syntax to ES6
let startCollector = new Date();
// We'll try to iterate through each client. And we use here for..of syntax to allow us using await
for (let client of clients) {
// Please, check that client value return correct data. If not, change for..of to your for..each and client variable to clients[a]
const tbj = await tabletojson.convertUrl("http://example.com" + client + "/page.php?limit=100", {
stripHtmlFromCells: false
});
const result = tgj[0];
for (rs of result) {
// I can't check this part, but I hope your example was with correct values.
let newDate = formatDate(striptag(rs[l]["Date"]), striptag(rs[l]["Time"]));
let user = getUser(striptag(rs[l]["User"]));
let link = getUrl(splitError[1]);
let msg = striptag(rs[l]["Error"]);
let id = getId(link);
let splitError = rs[l]["Error"].split("<a href=\"");
let getType = getType(striptag(splitError[0]));
// ES6 enhanced object syntax
datas.push({
newDate,
user,
msg,
id,
splitError,
link,
getType,
temp: [newDate, link, type, user, client, id, msg]
});
}
}
// OK, here we have fulfilled datas array. And we want to save it.
con.getConnection((err, connection) => {
// Please, notice, here I've changed your insert query to prepared statement.
connection.query("INSERT IGNORE INTO entries SET ?", datas, (err, rows) => {
console.log(err);
connection.release();
datas = [];
});
});
// I don't see why do you need timeout here, so I've left it commented.
// setTimeout(CollectErrors, 10000);
};
// Here your other methods go....
// And to call your async function we'll use IIFE
(async() => {
await collectErrors();
})();
Probably there may be errors with mysql insert, but that's not for sure. If occurred, please write in comments and I'll help you with that.
Below is my indexeddb(IDB) objectStore name and its calling method.
What I want is get the values from objectStore and based on condition matched values add those values calculation into Variable.
var item_id = [];
var db;
function displayNotes(filter) {
var line = 0;
var transaction = db.transaction(["frp_item_master"], "readonly");
var itemsname =[];
var itemsqty =[];
var itemsprice =[];
var itemsdisc =[];
var itemstax =[];
var handleResult = function(event) {
var cursor = event.target.result;
var price_with_tax;
var i = 0;
i++;
if (cursor) {
++line;
if (cursor.value.item_id == 20008)
{
item_id.push(event.target.result.value.item_id);
//and other array push here
//call function
price_with_tax = (get_tax(itemstax)/100)*itemsprice;
}
cursor.continue();
}
else {
//error
}
};
Then after creating my function which is called in above method.
function get_tax(p_tax_master_id){
var get_me;
var total_tax_rate = 0;
var transaction = db.transaction(["frp_tax_master"], "readonly");
var objectStore = transaction.objectStore('frp_tax_master');
objectStore.openCursor().onsuccess = function(event){
var cur = event.target.result;
get_me = event.target.result.value.tax_master_id;
console.log('get_me'+get_me);
if (cur) {
if (get_me == 2008)
{
total_tax_rate += event.target.result.value.rate;
console.log("total tax"+total_tax_rate);
}
cur.continue();
}
else {
console.log('else'+get_me);
}
};
return total_tax_rate;
};
I am getting errors as shown in images. Cursor is still running even if there no value into the object store and it shows Value can not be set to null.
Can we just loop through all records and till last values are fetched then exit from cursor assign that values to the variable.
Basically I am adding some number to a variable.
screenshot-1
screenshot-2
In this line you get the cursor:
var cur = event.target.result;
And here you correctly check that the cursor is not null before using it:
if (cur) {
But this line assumes that event.target.result is not null:
get_me = event.target.result.value.tax_master_id;
So when the cursor hits the end of its range, event.target.result is null which gives you the "can't read property 'value' of null" error on the exact line the console is telling you.
Consider moving that line into the if (cur) block.
In addition to what Joshua said, the onsuccess method is asynchronous, so your get_tax function will always return 0.
Consider something like:
function get_tax(p_tax_master_id, cb) {
var get_me;
var total_tax_rate = 0;
var transaction = db.transaction(["frp_tax_master"], "readonly");
var objectStore = transaction.objectStore('frp_tax_master');
objectStore.openCursor().onsuccess = function(event){
var cur = event.target.result;
console.log('get_me'+get_me);
if (cur) {
get_me = event.target.result.value.tax_master_id;
if (get_me == 2008) {
total_tax_rate += event.target.result.value.rate;
console.log("total tax"+total_tax_rate);
}
cur.continue();
} else {
console.log('else'+get_me);
cb(null, total_tax_rate);
}
};
};
You might also prefer to use a library such as ZangoDB so you could perform a query like:
var db = new zango.Db('db_name', ['frp_tax_master']);
var frp_tax_master = db.collection('frp_tax_master');
function get_tax(p_tax_master_id, cb) {
frp_tax_master.find({
tax_master_id: 2008
}).group({
total_tax_rate: { $sum: '$rate' }
}).toArray((error, docs) => {
if (error || !docs[0]) { cb(error); }
else { cb(null, docs[0].total_tax_rate); }
});
}
I'm having an issue when it comes to the localStorage clearing in any browser environment outside of Chrome. I have a captcha using shapes that the user must draw to submit a form. There is a span the user can click to generate a new shape. In order to keep the information inputted into the previous fields, I store the data in localStorage every time a new shape is requested. However, if the page is refreshed, I'd like the localStorage to be completely wiped out.
Here is the code for the span that the user clicks on:
<span style="color:#0000EE;cursor:pointer;" id="new-shape" onclick="window.location.reload()" title="Click for a new shape">new shape</span>
And here is the JS for the localStorage:
$('#new-shape').on('click', function () {
var stickies = $('.sticky');
var dropdowns = $('select');
window.onbeforeunload = function () {
localStorage.setItem("branch", $('#00NL0000003INTJ').val());
localStorage.setItem("department", $('#00NL0000003I0Ux').val());
localStorage.setItem("contact", $('#00NL0000003INUC').val());
localStorage.setItem("company", stickies[0].value);
localStorage.setItem("firstName", stickies[1].value);
localStorage.setItem("lastName", stickies[2].value);
localStorage.setItem("phone", stickies[3].value);
localStorage.setItem("ext", stickies[4].value);
localStorage.setItem("email", stickies[5].value);
localStorage.setItem("help", stickies[6].value);
}
});
window.onload = function () {
var stickies = $('.sticky');
var dropdowns = $('select');
var selects = [localStorage.getItem("branch"), localStorage.getItem("department"), localStorage.getItem("contact")];
var company = localStorage.getItem("company");
var first = localStorage.getItem("firstName");
var last = localStorage.getItem("lastName");
var phone = localStorage.getItem("phone");
var ext = localStorage.getItem("ext");
var email = localStorage.getItem("email");
var help = localStorage.getItem("help");
var stickiesArr = [company, first, last, phone, ext, email, help];
for (var i = 0; i < stickiesArr.length; i++) {
if (stickiesArr[i] != null) {
stickies[i].value = stickiesArr[i];
}
}
for (var i = 0; i < selects.length; i++) {
if (selects[i] != null) {
dropdowns[i].value = selects[i];
}
}
//this allows the wipe out of all data on a page refresh,
//but clicking on "new shape" will maintain the data
localStorage.clear();
}
This code works flawlessly in Chrome, but the page refresh in IE and Firefox fails to clear the localStorage. Am I doing something wrong for the localStorage to clear across multiple browsers?
Edit
I have tried using window.location.clear(), and for good measure, localSession.clear().
I managed to finally get it to work. Part of the problem was the call of:
window.onbeforeunload = function () { (...) }
FireFox and IE11 both were having trouble with this anonymous function call. I had to change my on.click event to:
$('#new-shape').on('click', function () {
var stickies = $('.sticky');
localStorage.setItem("branch", $('#00NL0000003INTJ').val());
localStorage.setItem("department", $('#00NL0000003I0Ux').val());
localStorage.setItem("contact", $('#00NL0000003INUC').val());
localStorage.setItem("company", stickies[0].value);
localStorage.setItem("firstName", stickies[1].value);
localStorage.setItem("lastName", stickies[2].value);
localStorage.setItem("phone", stickies[3].value);
localStorage.setItem("ext", stickies[4].value);
localStorage.setItem("email", stickies[5].value);
localStorage.setItem("help", stickies[6].value);
});
And my window.onload anonymous function call had to be slightly reconfigured to:
window.onload = function () {
var stickies = $('.sticky');
var dropdowns = $('select');
var selects = [localStorage.getItem("branch"), localStorage.getItem("department"), localStorage.getItem("contact")];
var company = localStorage.getItem("company");
var first = localStorage.getItem("firstName");
var last = localStorage.getItem("lastName");
var phone = localStorage.getItem("phone");
var ext = localStorage.getItem("ext");
var email = localStorage.getItem("email");
var help = localStorage.getItem("help");
var localStorageArr = [company, first, last, phone, ext, email, help];
//input fields sticky
for (var i = 0; i < localStorageArr.length; i++) {
stickies[i].value = localStorageArr[i];
}
//dropdown fields sticky
for (var i = 0; i < selects.length; i++) {
if (selects[i] != null) {
dropdowns[i].value = selects[i];
} else {
dropdowns[i].selectedIndex = 0;
}
}
//fixing textarea
if (help === null) {
stickies[6].value = "";
}
localStorage.clear();
}
Some hacky workarounds, but the results are now working as desired.
I want to sort results obtained from indexedDB.
Each record has structure {id, text, date} where 'id' is the keyPath.
I want to sort the results by date.
My current code is as below:
var trans = db.transaction(['msgs'], IDBTransaction.READ);
var store = trans.objectStore('msgs');
// Get everything in the store;
var keyRange = IDBKeyRange.lowerBound("");
var cursorRequest = store.openCursor(keyRange);
cursorRequest.onsuccess = function(e) {
var result = e.target.result;
if(!!result == false){
return;
}
console.log(result.value);
result.continue();
};
Actually you have to index the date field in the msgs objectStore and open an index cursor on the objectStore.
var cursorRequest = store.index('date').openCursor(null, 'next'); // or prev
This will get the sorted result. That is how indexes are supposed to be used.
Here's the more efficient way suggested by Josh.
Supposing you created an index on "date":
// Use the literal "readonly" instead of IDBTransaction.READ, which is deprecated:
var trans = db.transaction(['msgs'], "readonly");
var store = trans.objectStore('msgs');
var index = store.index('date');
// Get everything in the store:
var cursorRequest = index.openCursor();
// It's the same as:
// var cursorRequest = index.openCursor(null, "next");
// Or, if you want a "descendent ordering":
// var cursorRequest = index.openCursor(null, "prev");
// Note that there's no need to define a key range if you want all the objects
var res = new Array();
cursorRequest.onsuccess = function(e) {
var cursor = e.target.result;
if (cursor) {
res.push(cursor.value);
cursor.continue();
}
else {
//print res etc....
}
};
More on cursor direction here: http://www.w3.org/TR/IndexedDB/#cursor-concept
IDBIndex API is here: http://www.w3.org/TR/IndexedDB/#idl-def-IDBIndex
Thanks to zomg, hughfdjackson of javascript irc, I sorted the final array. Modified code as below:
var trans = db.transaction(['msgs'], IDBTransaction.READ);
var store = trans.objectStore('msgs');
// Get everything in the store;
var keyRange = IDBKeyRange.lowerBound("");
var cursorRequest = store.openCursor(keyRange);
var res = new Array();
cursorRequest.onsuccess = function(e) {
var result = e.target.result;
if(!!result == false){
**res.sort(function(a,b){return Number(a.date) - Number(b.date);});**
//print res etc....
return;
}
res.push(result.value);
result.continue();
};