IDBKeyRange.only and IDBKeyRange.lowerBound together in one query - javascript

I need to convert this sql query to IndexedDB syntax.
"SELECT * FROM article WHERE userid=100 AND date_created > '2015-1-15 18:00:00'"
I'm thinking about these two solutions.
userid index
var articles = [];
objectStore.index('userid').openCursor(100).onsuccess = function(e){
var cursor = e.target.result;
if(cursor){
var article = cursor.value;
if(new Date(article.date_created) > new Date('2015-1-15 18:00:00')){
articles.push(article);
}
cursor.continue();
}esle{
console.log('done');
}
};
date_created index
var articles = [];
objectStore.index('date_created').openCursor(IDBKeyRange.lowerBound('2015-1-15 18:00:00')).onsuccess = function(e){
var cursor = e.target.result;
if(cursor){
var article = cursor.value;
if(article.userid === 100){
articles.push(article);
}
cursor.continue();
}esle{
console.log('done');
}
};
How can I write a similar query with a compound index on userid and date_created?

Hmm. Try something like using IDKeyRange.bound:
function queryArticlesByIdDate(id, date, handleArticle) {
var openRequest = indexedDB.open(...);
openRequest.onupgradeneeded = function(event) {
var db = this.result;
var articleStore = db.createObjectStore('articles');
articleStore.createIndex('date-id',['date_created','userid']);
};
openRequest.onsuccess = function(event) {
var db = this.result;
var articleStore = db.objectStore('articles');
var dateIdIndex = articleStore.index('date-id');
var lowerDate = date;
var upperDate = new Date(date + 1); // or whatever
var bounds = IDBKeyRange.bound([lowerDate, id], [upperDate, id]);
var cursorRequest = dateIdIndex.openCursor(bounds);
cursorRequest.onsuccess = function(event) {
var cursor = this.result;
if(!cursor) return;
handleArticle(cursor.value);
cursor.continue();
};
};
}
queryArticlesByIdDate(5, new Date(...), function handler(article) {
console.dir(article);
});

Related

How to get data from a local JSON file in Javascript

I'm trying to retrieve a specific ID from the JSON file depending on user input and then display a picture based on the ID retrieved from the JSON file
function showCard() {
var cardNaqme = document.getElementById('un').value;
var cardNameProper = cardName.replace(/\s/g, '');
var obj = JSON.parse("https://db.ygoprodeck.com/api/v7/cardinfo.php"+cardNameProper)
var imgId = obj["data"][0]["id"]
document.getElementById("chosenCard").src = "https://storage.googleapis.com/ygoprodeck.com/pics_small/"+imgId+".jgp";
event.preventDefault();
}
I think what you need is below but I am not sure I understood your json structure:
var cardNaqme = document.getElementById('un').value;
var cardNameProper = cardName.replace(/\s/g, '');
var oReq = new XMLHttpRequest();
oReq.open("GET", "https://db.ygoprodeck.com/api/v7/cardinfo.php"+cardNameProper", true);
oReq.responseType = "json";
oReq.onload = function(e) {
JSONObject json = new JSONObject(res);
JSONArray ja = json.getJSONArray("data");
JSONObject obj = ja.getJSONObject(0);
var imgId = obj.id;
document.getElementById("chosenCard").src = "https://storage.googleapis.com/ygoprodeck.com/pics_small/"+imgId+".jgp";
}
oReq.send();
I tried to answer your problem after identifying your problem.
function showCard()
{
event.preventDefault();
var cardNaqme = document.getElementById('un').value;
var cardNameProper = cardName.replace(/\s/g, '');
var obj = JSON.parse("https://db.ygoprodeck.com/api/v7/cardinfo.php"+cardNameProper)
var imgId = obj[0].id
document.getElementById("chosenCard").src =
"https://storage.googleapis.com/ygoprodeck.com/pics_small/"+imgId+".jpg";
}

Populate XML table after filling it in JavaScript Controller

I have an empty XML table in my SAPUi5 application:
<m:Table id="testdata3"></m:Table>
In my JavaScript controller, I use the SheetJS library to upload an Excel file and then read the data into the table, using "sheet_to_html".
When debugging my code I have all the data together and also my console.log call shows me data in the innerHTML of my table. But for some reason, the table stays empty. So basically nothing happens in the front-end. I feel like I'm missing a "return" or "populate" or something in that direction to load the table with the new data.
Any ideas?
_import : function(file) {
var oTable = this.getView().byId('testdata3');
if(file && window.FileReader){
var reader = new FileReader();
var result = {}, data;
reader.readAsBinaryString(file);
reader.onload = function(e) {
var rawLog = reader.result;
data = e.target.result;
var wb = XLSX.read(data, {type: 'binary'});
var first_sheet_name = wb.SheetNames[0];
var worksheet = wb.Sheets[first_sheet_name];
oTable.innerHTML = XLSX.utils.sheet_to_html(worksheet);
console.log(oTable.innerHTML);
}
Update: code for model
_import : function(file) {
var oTable = this.getView().byId('testdata3');
if(file && window.FileReader){
var reader = new FileReader();
var result = {}, data;
var that = this;
reader.readAsBinaryString(file);
reader.onload = function(e) {
var rawLog = reader.result;
data = e.target.result;
var wb = XLSX.read(data, {type: 'binary'});
var first_sheet_name = wb.SheetNames[0];
var worksheet = wb.Sheets[first_sheet_name];
var oModel = new sap.ui.model.json.JSONModel();
sap.ui.getCore().setModel(oModel,'myResultModel');
that.getView().byId("testdata3").setModel(oModel);
var oColumns = [];
*// I'm iterating over the column names and pushing them to my table works fine, but I'm then stuck with proceeding and pushing the rest of the data to my table..*
var cells = Object.keys(worksheet);
for (var i = 0; i < Object.keys(cells).length; i++) {
if( cells[i].indexOf('1') > -1)
{
oColumns.push(worksheet[cells[i]].v);
}
}
var oColumnNames = [];
$.each(oColumns, function(i, value) {
oColumnNames.push({
Text: oColumns[i]
});
});
oModel.setProperty("/columnNames", oColumnNames);
oModel.setProperty("/columnNames", oColumnNames);
var oTemplate = new Column({
header: new Label({
text: "{Text}"
})
});
oTable.bindAggregation("columns", "/columns", oTemplate);
};
};
},
EDIT here is an other attempt I tried, when debugging everything looks fine but the table stays empty anyways..
onXLSXupload : function(e) {
this._import(e.getParameter("files") && e.getParameter("files")[0]);
},
_import : function(file) {
console.log(file);
var oTable = this.getView().byId('testdata3');
if(file && window.FileReader){
var reader = new FileReader();
var result = {}, data;
var that = this;
reader.readAsBinaryString(file);
reader.onload = function(e) {
var rawLog = reader.result;
data = e.target.result;
var wb = XLSX.read(data, {type: 'binary'});
var first_sheet_name = wb.SheetNames[0];
var worksheet = wb.Sheets[first_sheet_name];
wb.SheetNames.forEach(function(first_sheet_name) {
var roa = XLSX.utils.sheet_to_json(wb.Sheets[first_sheet_name]);
if(roa.length > 0){
result[first_sheet_name] = roa;
}
});
var data = result[Object.keys(result)[0]];
for(var i=0; i<data.length; i++){
var excelRows = new sap.m.ColumnListItem({cells:[
new sap.m.Text({text: data[i][Object.keys(data[i])[0]]}),
new sap.m.Text({text: data[i][Object.keys(data[i])[1]]})
]});
that.getView().byId("testdata3").addItem(excelRows );
}
};
};
},
I'm simply struggling to find the right approach of binding the items to my table.. I'm wondering if it has something to do with my XML view or the controller file..
Update with answer from #MatthijsMennen Now still struggling because the items are only populated in one column
_import : function(file) {
var oTable = this.getView().byId('testdata3');
if(file && window.FileReader){
var reader = new FileReader();
var result = {}, data;
var that = this;
reader.readAsBinaryString(file);
reader.onload = function(e) {
var rawLog = reader.result;
data = e.target.result;
var wb = XLSX.read(data, {type: 'binary'});
var first_sheet_name = wb.SheetNames[0];
var worksheet = wb.Sheets[first_sheet_name];
var aColumns = that.getColumnNames(worksheet);
var aData = that.getRowData(worksheet, result);alert(aData);
var oModel = new sap.ui.model.json.JSONModel();
oModel.setData({
columns: aColumns,
rows: aData
});
oTable.setModel(oModel);
oTable.bindAggregation("columns", "/columns", function(index, context) {
return new sap.m.Column({
header: new sap.m.Label({
text: context.getObject().columnId
})
});
});
oTable.bindAggregation("items", "/rows", function(index, context){
return new sap.m.ColumnListItem({
cells: [
new sap.m.Text({text: context.getObject().cellId })
]
});
});
};
};
},
getColumnNames: function(worksheet) {
var oColumns = [];
var cells = Object.keys(worksheet);
for (var i = 0; i < Object.keys(cells).length; i++) {
if (cells[i].indexOf("1") > -1) {
var columnName = worksheet[cells[i]].v;
oColumns.push({
columnId: columnName
});
}
}
return oColumns;
},
getRowData: function(worksheet, result) {
var roa = XLSX.utils.sheet_to_json(worksheet);
if(roa.length > 0){
result[worksheet] = roa;
}
var data = result[Object.keys(result)[0]];
console.log(data.length);
var i; var x;
var oCells = []
for(i = 0; i < data.length; i++){
for(var x = 0; x < data.length; x ++) {
var excelRows = data[i][Object.keys(data[i])[x]];
console.log(data[i][Object.keys(data[i])[x]])
oCells.push({ cellId: excelRows});
}
}
return oCells;
},
update with for loop for columnlistitems
oTable.bindAggregation("items", "/rows", function(index, context) {
var roa = XLSX.utils.sheet_to_json(worksheet);
if(roa.length > 0){
result[worksheet] = roa;
}
for(var i = 0; i < roa.length; i++){
return new sap.m.ColumnListItem({
cells: [
new Text({ text :context.getObject().cellId })
]
})
};
});
I created a small example here with SheetJS. The column names get extracted from the worksheet, but you still need to get the rows from the worksheet. I added some dummy data for the rows.
Hope this helps.
View
<Table id="testdata3" />
<u:FileUploader change="onChange" buttonText="Upload" />
Controller
onChange: function(oEvent) {
var file = oEvent.getParameter("files")[0];
var oTable = this.byId("testdata3");
var reader = new FileReader();
var that = this;
reader.readAsBinaryString(file);
reader.onload = function(e) {
var data = e.target.result;
var wb = XLSX.read(data, {
type: "binary"
});
var firstSheetName = wb.SheetNames[0];
var worksheet = wb.Sheets[firstSheetName];
var oModel = new sap.ui.model.json.JSONModel();
var aColumns = that.getColumnNames(worksheet);
var aData = that.getRowData(worksheet);
var aCells = that.getCells(aColumns);
oModel.setData({
columns: aColumns,
rows: aData
});
oTable.setModel(oModel);
oTable.bindAggregation("columns", "/columns", function(index, context) {
return new sap.m.Column({
header: new sap.m.Label({
text: context.getObject().columnId
})
});
});
oTable.bindAggregation("items", "/rows", new sap.m.ColumnListItem({
// CHANGE ACCORDINGLY OR MAKE DYNAMIC
cells: aCells
}));
};
},
getColumnNames: function(worksheet) {
var oColumns = [];
var cells = Object.keys(worksheet);
for (var i = 0; i < Object.keys(cells).length; i++) {
if (cells[i].indexOf("1") > -1) {
var columnName = worksheet[cells[i]].v;
oColumns.push({
columnId: columnName
});
}
}
return oColumns;
},
getRowData: function() {
var aItems = [];
// DO YOUR THING HERE
aItems[0] = {
value0: "testvalue0",
value1: "testvalue1",
value2: "testvalue2",
value3: "testvalue3",
value4: "testvalue4"
};
return aItems;
},
getCells: function(aColumns) {
var cells = [];
for (var i = 0; i < aColumns.length; i++) {
cells[i] = new sap.m.Text({
text: "{value" + i + "}"
});
}
return cells;
}
Excel example
In one of my projects I needed to create table dynamically from controller.
Here is how I did hoping it helps you:
VIEW
<Table id="tableTask"
inset="false">
<headerToolbar>
<OverflowToolbar id="otbSubheader">
<ToolbarSpacer/>
<SearchField id="taskSearchBox" search="onSearchOrClearPressed" liveChange="onSearchTasks" showSearchButton="false">
<layoutData><OverflowToolbarLayoutData minWidth="200px" maxWidth="300px" shrinkable="true"/></layoutData>
</SearchField>
</OverflowToolbar>
</headerToolbar>
<columns>
<!-- Columns created in controller -->
</columns>
<items>
<ColumnListItem id="columnsListItemTask" press="onPressListItem" type="Navigation">
<cells>
<!-- Cells created in controller -->
</cells>
</ColumnListItem>
</items>
</Table>
CONTROLLER
onInit: function(){
...
// Get columns aggregation of table
let oColumns = this.getView().byId('columnsListItemTask');
// Define table cells
let cellToAdd1 = new sap.m.Text('textCell1',{
text: "{path/to/modelValue1}"
});
let cellToAdd2 = new sap.m.Text('textCell2',{
text: "{path/to/modelValue2}"
});
let cellToAdd3 = new sap.m.Text('textCell3',{
text: "{path/to/modelValue3}"
});
let cellToAdd4 = new sap.m.Text('textCell4',{
text: "{path/to/modelValue4}"
});
// Add cells (in this case 4 columns)
oColumns.addCell(cellToAdd1);
oColumns.addCell(cellToAdd2);
oColumns.addCell(cellToAdd3);
oColumns.addCell(cellToAdd4);
var jsonModel = new JSONModel(yourModel);
this.getView().setModel(jsonModel);
// Get Table by id
let oTable = this.getView().byId('tableTask');
oTable.removeAllItems(); //Remove old items if present
oTable.removeAllAggregation(); //Remove aggregations
oTable.bindAggregation('items', {
path:'/rowsData', // field name of you JSONModel containing data rows
template: oColumns, // rowTemplate of cells
});
...
}
UPDATE
You can do a for loop to add dynamically any number of columns:
onInit: function(){
...
// Get columns aggregation of table
let oColumns = this.getView().byId('columnsListItemTask');
let cellToAdd;
// This for loop in each columns I need to generate
// (in my case they are defined in a database and I get it with AJAX request)
for(let key in allCellsToGenearte){
cellToAdd = new sap.m.Text({
text: allCellsToGenearte["modelPath"]
});
oColumns.addCell(cellToAdd);
}
// Get Table by id
let oTable = this.getView().byId('tableTask');
oTable.removeAllItems(); //Remove old items if present
oTable.removeAllAggregation(); //Remove aggregations
oTable.bindAggregation('items', {
path:'/rowsData', // field name of JSON Model containing data rows
template: oColumns, // rowTemplate of cells
});
...
}

Javascript - FileReader how can I read and process each file at a time among multiple files

I am trying let the user drop multiple excel file and extract desired values from each one of the files and upload it to website ONE FILE AT A TIME.
My code is not working, and I am assuming this is because of the callback problem..
Could anybody help?
Edit: I also added my uploadFile function. I very much appreciate your help.
for(var i = 0; i < fileList.length; i++) {
//console.log(fileList[i]["file"]);
var reader = new FileReader();
var f = fileList[i]["file"];
//var fName = fileList[i]["fileName"];
var excelObject = fileList[i];
reader.onload = function(ev) {
var data = ev.target.result;
if(!rABS) data = new Uint8Array(data);
var wb = XLSX.read(data, {type: rABS ? 'binary' : 'array'});
var einAddress = "B3";
var engCodeAddress = "B1";
var goAddress = "B2";
var errMsg = tabName + " tab or required value is missing";
// Worksheet with the necessary info
try{
var ws = wb.Sheets[tabName];
var ein_cell = ws[einAddress];
ein = (ein_cell ? ein_cell.v.toString() : undefined);
var eng_cell = ws[engCodeAddress];
engCode = (eng_cell ? eng_cell.v.toString() : undefined);
var go_cell = ws[goAddress];
goLocator = (go_cell ? go_cell.v.toString() : undefined);
if(ein == undefined || engCode == undefined || goLocator == undefined){
hasValues = false;
}
excelObject["EngagementCode"] = engCode;
excelObject["GoSystem"] = goLocator;
excelObject["EIN"] = ein;
if(hasValues && isValid){
uploadFile(fileList[i], userInfo);
} else {
noValueErrorHandler(errMsg);
}
} catch(err){
hasValues = false;
}
};
if(rABS) reader.readAsBinaryString(f); else reader.readAsArrayBuffer(f);
}
function uploadFile(f, userInfo) {
// Define the folder path for this example.
var serverRelativeUrlToFolder = listName;
// Get info of the file to be uploaded
var file = f;
var fileInput = file["file"];
var newName = file["fileName"];
var ein = file["EIN"];
var engCode = file["EngagementCode"];
var email = userInfo;
var goLocator = file["GoSystem"];
console.log("file: " + file);
// Get the server URL.
var serverUrl = _spPageContextInfo.siteAbsoluteUrl + "/StatusTracker";
// Initiate method calls using jQuery promises.
// Get the local file as an array buffer.
var getFile = getFileBuffer(fileInput);
getFile.done(function (arrayBuffer) {
// Add the file to the SharePoint folder.
var addFile = addFileToFolder(arrayBuffer, newName);
addFile.done(function (file, status, xhr) {
// Get the list item that corresponds to the uploaded file.
var getItem = getListItem(file.d.ListItemAllFields.__deferred.uri);
getItem.done(function (listItem, status, xhr) {
// Change the display name and title of the list item.
var changeItem = updateListItem(listItem.d.__metadata);
changeItem.done(function (data, status, xhr) {
processedCount += 1;
if (processedCount < fileCount) {
uploadFile(fileList[processedCount], email);
} else if (processedCount == fileCount){
$("#dropbox").text("Done, drop your next file");
$("#ADMNGrid").data("kendoGrid").dataSource.read();
fileList = [];
alert("Total of " + processedCount + " items are processed!");
}
// Refresh kendo grid and change back the message and empty fileList
//$("#dropbox").text("Drag your Fund/Lower Tier workpaper here ...");
//location.reload(true);
});
changeItem.fail(onError);
});
getItem.fail(onError);
});
addFile.fail(onError);
});
getFile.fail(onError);
You might put the whole thing into an async function and await a Promise for each iteration, forcing the files to be processed in serial. You didn't post your uploadFile, but if you have it return a Promise that resolves once it's done, you could do the following:
async fn() {
for (var i = 0; i < fileList.length; i++) {
await new Promise((resolve, reject) => {
//console.log(fileList[i]["file"]);
var reader = new FileReader();
var f = fileList[i]["file"];
//var fName = fileList[i]["fileName"];
var excelObject = fileList[i];
reader.onload = function(ev) {
var data = ev.target.result;
if (!rABS) data = new Uint8Array(data);
var wb = XLSX.read(data, {
type: rABS ? 'binary' : 'array'
});
var einAddress = "B3";
var engCodeAddress = "B1";
var goAddress = "B2";
var errMsg = tabName + " tab or required value is missing";
// Worksheet with the necessary info
try {
var ws = wb.Sheets[tabName];
var ein_cell = ws[einAddress];
ein = (ein_cell ? ein_cell.v.toString() : undefined);
var eng_cell = ws[engCodeAddress];
engCode = (eng_cell ? eng_cell.v.toString() : undefined);
var go_cell = ws[goAddress];
goLocator = (go_cell ? go_cell.v.toString() : undefined);
if (ein == undefined || engCode == undefined || goLocator == undefined) {
hasValues = false;
}
excelObject["EngagementCode"] = engCode;
excelObject["GoSystem"] = goLocator;
excelObject["EIN"] = ein;
if (hasValues && isValid) {
uploadFile(fileList[i], userInfo)
.then(resolve);
} else {
noValueErrorHandler(errMsg);
reject();
}
} catch (err) {
hasValues = false;
reject();
}
};
if (rABS) reader.readAsBinaryString(f);
else reader.readAsArrayBuffer(f);
});
}
}

Is there a GOOD way to use Angular.js and Indexed DB?

So, I'm developing a javascript + api forum software. My point in this is so that you can read a forum while offline -- this involves HTML5's offline-storage. In particular, I would like to use IndexedDB as it seems to be the most promising for the future. I've gotten a good service/factory for fetching / temporarily storing the data, but IDDB is broken majorly. Does anybody have advice on how to go about this?
edit Also, for anybody who would like a hosted version, here it is on cloud9.
var angular = angular || {};
(function(w){
w.localStorage = w.localStorage||{};
w.indexedDB = w.indexedDB || w.mozIndexedDB || w.webkitIndexedDB || w.msIndexedDB || null;
w.IDBTransaction = w.IDBTransaction || w.webkitIDBTransaction || w.msIDBTransaction || null;
w.IDBKeyRange = w.IDBKeyRange || w.webkitIDBKeyRange || w.msIDBKeyRange || null;
})(this);
angular.module("JSForumServices",[],function($provide){
$provide.factory('ForumStorage',(function(){
var service = {
post:null,
thread:null,
board:null,
cache:{},
pending:{}
};
var fetch = (function(baseFunction,path,callback){
if(path in service.pending)
return service.pending[path];
var r=baseFunction();
service.pending[path] = r;
var ajaxRequest = new XMLHttpRequest();
var cancelled = false;
var dateRegex =
/^(?:(Sun|Mon|Tue|Wed|Thu|Fri|Sat),\s+)?(0[1-9]|[1-2]?[0-9]|3[01])\s+(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s+(19[0-9]{2}|[2-9][0-9]{3})\s+(2[0-3]|[0-1][0-9]):([0-5][0-9])(?::(60|[0-5][0-9]))?\s+([-\+][0-9]{2}[0-5][0-9]|(?:UT|GMT|(?:E|C|M|P)(?:ST|DT)|[A-IK-Z]))(\s+|\(([^\(\)]+|\\\(|\\\))*\))*$/;
ajaxRequest.onreadystatechange = (function(){
var readyState = ajaxRequest.readyState;
if(readyState==4&&(!cancelled)){
// Store the copy locally!
// Also, initiate the callback.
// This way if the storage fails,
// The application continues to work
// As expected.
var data = JSON.parse(ajaxRequest.responseText);
for(var k in data)
r[k] = data[k];
service.cache[path]={obj:r,modified:new Date()};
delete service.pending[path];
callback(r);
}
else if((path in service.cache)&&readyState>=2){
var oldDate = service.cache[path].modified;
console.log("Cache'd copy for",path,"exists.",oldDate.toString());
var isMoreRecent = false;//Is the server-copy more recent?
var serverModifiedString = ajaxRequest.getResponseHeader("Last-Modified");
console.log(serverModifiedString);
var match = dateRegex.exec(serverModifiedString);
var serverModified = new Date();
serverModified.setDate(parseInt(match[2],10));
serverModified.setMonth({
"jan":0,
"feb":1,
"mar":2,
"apr":3,
"may":4,
"jun":5,
"jul":6,
"aug":7,
"sep":8,
"oct":9,
"nov":10,
"dec":11
}[match[3].toLowerCase()]);
serverModified.setYear(parseInt(match[4],10));
serverModified.setHours(parseInt(match[5],10));
serverModified.setMinutes(parseInt(match[6],10));
serverModified.setMilliseconds(parseInt(match[7],10));
isMoreRecent = serverModified > oldDate;
if(!isMoreRecent&&(path in service.pending)){//sometimes this code may be slower than network speeds? Just to be safe, I guess.
cancelled=true;
var oldObject = service.cache[path].obj;
for(var key in oldObject)
r[key]=oldObject[key];
console.log("using a cache'd value.",r);
callback(r);
delete service.pending[path];
ajaxRequest.abort();//No need to waste more bandwidth!
}
}
});
ajaxRequest.open("GET",path,true);
ajaxRequest.send();
return r;
});
var ObjectFetch = (function(base,constr){
var ret = (function(id,cb){
cb = cb||(function(){});
return fetch(constr,base+id+".json",cb);
});
return ret;
});
service.post = ObjectFetch("./post/",(function(){
return {
id:"???",
author:"???",
content:"",
date:""
};
}));
service.thread = ObjectFetch("./thread/",(function(){
return {
id:"???",
title:"???",
posts:[],
tags:""
};
}));
service.board = ObjectFetch("./board/",(function(){
return {
id:"???",
title:"???",
threads:[],
tags:""
};
}));
service.forum = ObjectFetch("./",(function(){
return {
name:"Javascript Forum Software.",
boards:[]
};
}));
if(window.indexedDB!==null){
var postFetch = service.post;
var threadFetch = service.thread;
var boardFetch = service.board;
(function(dbengine){
var boardsLoaded = false;
var threadsLoaded = false;
var postsLoaded = false;
var req = dbengine.open("forum",1);
req.onupgradeneeded = (function(e){
var db = e.target.result;
db.createObjectStore("post",{
keyPath:"id"
});
db.createObjectStore("thread",{
keyPath:"id"
});
db.createObjectStore("board",{
keyPath:"id"
});
});
req.onsuccess = (function(e){
service.database = e.target.result;
var loadData = service.database.transaction([
'board',
'thread',
'post'],'readwrite');
loadData.onsuccess = (function(ee){
var transaction = ee.target.result;
transaction.objectStore("board").openCursor().onsuccess=(function(e){
var cursor = e.target.result;
if(cursor===null){
boardsLoaded = true;
return;
}
var id = cursor.key;
var obj = cursor.value;
var lastModified = new Date();
if("lastModified" in obj){
lastModified = obj.lastModified;
}
service.cache["./board/"+id.toString().toLowerCase()+".json"]={
obj:obj,
modified:lastModified
};
});
transaction.objectStore("thread").openCursor().onsuccess=(function(e){
var cursor = e.target.result;
if(cursor===null){
threadsLoaded = true;
return;
}
var id = cursor.key;
var obj = cursor.value;
var lastModified = new Date();
if("lastModified" in obj){
lastModified = obj.lastModified;
}
service.cache["./thread/"+id.toString().toLowerCase()+".json"]={
obj:obj,
modified:lastModified
};
});
transaction.objectStore("post").openCursor().onsuccess=(function(e){
var cursor = e.target.result;
if(cursor===null){
postsLoaded = true;
return;
}
var id = cursor.key;
var obj = cursor.value;
var lastModified = new Date();
if("lastModified" in obj){
lastModified = obj.lastModified;
}
service.cache["./post/"+id.toString().toLowerCase()+".json"]={
obj:obj,
modified:lastModified
};
});
service.post = (function(id,cb){
console.log("DDDDDAFF");
var trans = service.database.transaction(["post"],"readwrite");
trans.onsuccess = (function(e){
var req = e.target.result.objectStore("post").get(id);
req.onsuccess = (function(ee){
cb(req.result);
});
req.onerror = (function(ee){
console.log("HAAAA?!");
postFetch(id,(function(post){
e.target.result.objcetStore.save(post);
cb(post);
}));
});
});
trans.onerror = (function(e){
console.log("Error with IDDB:",e);
threadFetch(id,cb);
});
});
service.thread = (function(id,cb){
var trans = service.database.transaction(["thread"],"readwrite");
trans.onsuccess = (function(e){
var req = e.target.result.objectStore("thread").get(id);
req.onsuccess = (function(ee){
cb(req.result);
});
req.onerror = (function(ee){
threadFetch(id,(function(post){
e.target.result.objcetStore.save(post);
cb(post);
}));
});
});
trans.onerror = (function(e){
console.log("Error with IDDB:",e);
postFetch(id,cb);
});
});
service.board = (function(id,cb){
var trans = service.database.transaction(["board"],"readwrite");
trans.onsuccess = (function(e){
var req = e.target.result.objectStore("board").get(id);
req.onsuccess = (function(ee){
cb(req.result);
});
req.onerror = (function(ee){
boardFetch(id,(function(post){
e.target.result.objcetStore.save(post);
cb(post);
}));
});
});
trans.onerror = (function(e){
console.log("Error with IDDB:",e);
boardFetch(id,cb);
});
});
});
});
})(window.indexedDB);
}
return service;
}));
});
angular.module('JSForum',["JSForumServices"]).config(
['$routeProvider',
function($routeProvider){
$routeProvider.when('/',{
templateUrl:"forum.html",
controller:ForumController
});
$routeProvider.when('/board/:id',{
templateUrl:"board.html",
controller:BoardController
});
$routeProvider.when('/thread/:id',{
templateUrl:"thread.html",
controller:ThreadController
});
$routeProvider.otherwise({redirectTo:"/"});
}]);
function ThreadController($scope,$routeParams,ForumStorage){
$scope.id = $routeParams.id.toString().toLowerCase();
$scope.thread = null;
ForumStorage.thread($scope.id,(function(thread){
$scope.thread = thread;
$scope.$apply();
var callback = (function(p){
return (function(post){
$scope.thread.posts[p] = post;
$scope.$apply();
});
});
for(var i=0;i<thread.posts.length;i++)
if(typeof(thread.posts[i].id) == 'undefined')
ForumStorage.post(thread.posts[i],callback(i));
$scope.$apply();
}));
}
function BoardController($scope,$routeParams,ForumStorage){
$scope.id = $routeParams.id.toString().toLowerCase();
$scope.board = null;
ForumStorage.board($scope.id,(function(board){
var callback = (function(p){
return (function(thread){
$scope.board.threads[p] = thread;
$scope.$apply();
});
});
$scope.board = board;
console.log("Using board:",$scope.board);
for(var i=0;i<board.threads.length;i++)
if(typeof(board.threads[i].id)=='undefined')
ForumStorage.thread(board.threads[i],callback(i));
$scope.$apply();
}));
}
function ForumController($scope,ForumStorage){
$scope.name = localStorage.forumName||"Forum";
$scope.boards = [];
ForumStorage.forum("forum",(function(forum){
document.title = $scope.name = localStorage.forumName = forum.name;
$scope.boards = forum.boards;
var callback=(function(p){
return (function(o){
$scope.boards[p] = o;
$scope.$apply();
});
});
for(var i=0;i<$scope.boards.length;i++){
if(typeof($scope.boards[i].id) == 'undefined')
ForumStorage.board(forum.boards[i],callback(i));
}
$scope.$apply();
}));
}
If your backend is google cloud storage, you can use my open source database library http://dev.yathit.com/api-reference/ydn-db/storage.html It cached in IndexedDB and persist to blob store. forum post URI path is taken as primary key of the record. LastModified header value is persisted into IndexedDB and used for conditional HTTP request. since blob store can only be queried by ascending order of key (URI path), forum post URI path are generated so that the last post is the smallest. In this way, we can query new posts by giving smallest known key as marker.
An example can be found in https://bitbucket.org/ytkyaw/ydn-auth/src/master/examples/note-app/note-app.js (not forum, but a simple note app). you need a backend server that generate signed url for posting new record.

Object has no method "open" error when using indexedDB

I'm trying to build a small class-like container that will make it a little cleaner to load and store data from the HTML5 IndexedDB. To be honest this is the first time I've ever played with this feature, so my issue could be trivial.
I'm basing my code off of this tutorial:
http://www.html5rocks.com/en/tutorials/indexeddb/todo/
function DBDictionary()
{
this.Holder = {};
this.Entries = new Array();
this.Opened = false;
this.v = "1.0";
this.Holder.indexedDB = window.indexedDB || window.webkitIndexedDB || window.mozIndexedDB;
if ('webkitIndexedDB' in window)
{
window.IDBTransaction = window.webkitIDBTransaction;
window.IDBKeyRange = window.webkitIDBKeyRange;
}
this.Holder.indexedDB = {};
this.Holder.indexedDB.db = null;
this.Holder.indexedDB.onerror = function(e)
{
console.log(e);
};
this.DownloadDB = function()
{
if(this.Opened) return;
var request = this.Holder.indexedDB.open("Storage");
request.onsuccess = function(e)
{
this.Holder.indexedDB.db = e.target.result;
var db = this.Holder.indexedDB.db;
// We can only create Object stores in a setVersion transaction;
if (v!= db.version)
{
var setVrequest = db.setVersion(v);
// onsuccess is the only place we can create Object Stores
setVrequest.onerror = this.Holder.indexedDB.onerror;
setVrequest.onsuccess = function(e)
{
if(db.objectStoreNames.contains("Storage")) db.deleteObjectStore("Storage");
var store = db.createObjectStore("Storage", {keyPath: "Key"});
this.PopulateAll();
};
}
else
{
this.PopulateAll();
}
};
request.onerror = this.Holder.indexedDB.onerror;
};
this.UploadDB = function()
{
this.DeleteAll();
this.SaveAll();
};
this.DeleteAll = function()
{
var db = this.Holder.indexedDB.db;
var trans = db.transaction(["Storage"], IDBTransaction.READ_WRITE);
var store = trans.objectStore("Storage");
Entries.forEach(function(element, index, array)
{
var request = store.delete(index);
request.onerror = function(e)
{
console.log("Error Deleting: ", e);
};
});
};
this.PopulateAll = function()
{
var db = this.Holder.indexedDB.db;
var trans = db.transaction(["Storage"], IDBTransaction.READ_WRITE);
var store = trans.objectStore("Storage");
// Get everything in the store;
var keyRange = IDBKeyRange.lowerBound(0);
var cursorRequest = store.openCursor(keyRange);
cursorRequest.onsuccess = function(e)
{
var result = e.target.result;
//No more results to load
if(!!result == false)
{
if(!this.Opened) this.Opened = true;
return;
}
this.Entries[result.Key] = result.Value;
result.continue();
};
cursorRequest.onerror = this.Holder.indexedDB.onerror;
};
this.SaveAll = function()
{
var db = this.Holder.indexedDB.db;
var trans = db.transaction(["Storage"], IDBTransaction.READ_WRITE);
var store = trans.objectStore("Storage");
Entries.forEach(function(element, index, array)
{
var data = {
"Key": index,
"Value": element,
"timeStamp": new Date().getTime()
};
var request = store.put(data);
request.onerror = function(e) {
console.log("Error Adding: ", e);
};
});
};
}
function main()
{
var dictionary = new DBDictionary();
dictionary.DownloadDB();
dictionary.Entries["hello"] = "world";
alert(dictionary.Entries["hello"]);
}
$(document).ready(main);
My desired implemented state should look something like this:
function main()
{
var dictionary = new DBDictionary();
dictionary.DownloadDB();
dictionary.Entries["hello"] = "world";
alert(dictionary.Entries["hello"]);
}
$(document).ready(main);
What this should do is download the data from the browser's IndexedDB object and store them into the object-housed array Entries. When I want to store the value of Entires back into the DB, I would call dictionary.UploadDB();
However, I'm getting the single javascript error: Uncaught TypeError: Object # has no method 'open'. I'm pretty much at a loss as to what I'm doing wrong. Can anyone offer me some tips?
Do a typeof check and console.log the this.Holder.indexedDB object to inspect the prototype. Does it inherit the IDBDatabase prototype? If it does, the open method would be available to you.
If your window.indexedDB did fire the on success callback, e.target.result would be the correct way to access the newly opened database via the event object. But the fact that you're not getting that far suggests that your this.Holder.indexedDB object is not actually an instance of an IDBDatabase.
EDIT: Yes, this is exactly your issue. If you console.log the this.holder.indexedDB object you get an object that looks like {"db":null}.
Swap this.Holder.indexedDB for window.webkitIndexedDB at your open invocation and you'll see that 'world' alert pops. JSFiddle here.

Categories