Is there a GOOD way to use Angular.js and Indexed DB? - javascript

So, I'm developing a javascript + api forum software. My point in this is so that you can read a forum while offline -- this involves HTML5's offline-storage. In particular, I would like to use IndexedDB as it seems to be the most promising for the future. I've gotten a good service/factory for fetching / temporarily storing the data, but IDDB is broken majorly. Does anybody have advice on how to go about this?
edit Also, for anybody who would like a hosted version, here it is on cloud9.
var angular = angular || {};
(function(w){
w.localStorage = w.localStorage||{};
w.indexedDB = w.indexedDB || w.mozIndexedDB || w.webkitIndexedDB || w.msIndexedDB || null;
w.IDBTransaction = w.IDBTransaction || w.webkitIDBTransaction || w.msIDBTransaction || null;
w.IDBKeyRange = w.IDBKeyRange || w.webkitIDBKeyRange || w.msIDBKeyRange || null;
})(this);
angular.module("JSForumServices",[],function($provide){
$provide.factory('ForumStorage',(function(){
var service = {
post:null,
thread:null,
board:null,
cache:{},
pending:{}
};
var fetch = (function(baseFunction,path,callback){
if(path in service.pending)
return service.pending[path];
var r=baseFunction();
service.pending[path] = r;
var ajaxRequest = new XMLHttpRequest();
var cancelled = false;
var dateRegex =
/^(?:(Sun|Mon|Tue|Wed|Thu|Fri|Sat),\s+)?(0[1-9]|[1-2]?[0-9]|3[01])\s+(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s+(19[0-9]{2}|[2-9][0-9]{3})\s+(2[0-3]|[0-1][0-9]):([0-5][0-9])(?::(60|[0-5][0-9]))?\s+([-\+][0-9]{2}[0-5][0-9]|(?:UT|GMT|(?:E|C|M|P)(?:ST|DT)|[A-IK-Z]))(\s+|\(([^\(\)]+|\\\(|\\\))*\))*$/;
ajaxRequest.onreadystatechange = (function(){
var readyState = ajaxRequest.readyState;
if(readyState==4&&(!cancelled)){
// Store the copy locally!
// Also, initiate the callback.
// This way if the storage fails,
// The application continues to work
// As expected.
var data = JSON.parse(ajaxRequest.responseText);
for(var k in data)
r[k] = data[k];
service.cache[path]={obj:r,modified:new Date()};
delete service.pending[path];
callback(r);
}
else if((path in service.cache)&&readyState>=2){
var oldDate = service.cache[path].modified;
console.log("Cache'd copy for",path,"exists.",oldDate.toString());
var isMoreRecent = false;//Is the server-copy more recent?
var serverModifiedString = ajaxRequest.getResponseHeader("Last-Modified");
console.log(serverModifiedString);
var match = dateRegex.exec(serverModifiedString);
var serverModified = new Date();
serverModified.setDate(parseInt(match[2],10));
serverModified.setMonth({
"jan":0,
"feb":1,
"mar":2,
"apr":3,
"may":4,
"jun":5,
"jul":6,
"aug":7,
"sep":8,
"oct":9,
"nov":10,
"dec":11
}[match[3].toLowerCase()]);
serverModified.setYear(parseInt(match[4],10));
serverModified.setHours(parseInt(match[5],10));
serverModified.setMinutes(parseInt(match[6],10));
serverModified.setMilliseconds(parseInt(match[7],10));
isMoreRecent = serverModified > oldDate;
if(!isMoreRecent&&(path in service.pending)){//sometimes this code may be slower than network speeds? Just to be safe, I guess.
cancelled=true;
var oldObject = service.cache[path].obj;
for(var key in oldObject)
r[key]=oldObject[key];
console.log("using a cache'd value.",r);
callback(r);
delete service.pending[path];
ajaxRequest.abort();//No need to waste more bandwidth!
}
}
});
ajaxRequest.open("GET",path,true);
ajaxRequest.send();
return r;
});
var ObjectFetch = (function(base,constr){
var ret = (function(id,cb){
cb = cb||(function(){});
return fetch(constr,base+id+".json",cb);
});
return ret;
});
service.post = ObjectFetch("./post/",(function(){
return {
id:"???",
author:"???",
content:"",
date:""
};
}));
service.thread = ObjectFetch("./thread/",(function(){
return {
id:"???",
title:"???",
posts:[],
tags:""
};
}));
service.board = ObjectFetch("./board/",(function(){
return {
id:"???",
title:"???",
threads:[],
tags:""
};
}));
service.forum = ObjectFetch("./",(function(){
return {
name:"Javascript Forum Software.",
boards:[]
};
}));
if(window.indexedDB!==null){
var postFetch = service.post;
var threadFetch = service.thread;
var boardFetch = service.board;
(function(dbengine){
var boardsLoaded = false;
var threadsLoaded = false;
var postsLoaded = false;
var req = dbengine.open("forum",1);
req.onupgradeneeded = (function(e){
var db = e.target.result;
db.createObjectStore("post",{
keyPath:"id"
});
db.createObjectStore("thread",{
keyPath:"id"
});
db.createObjectStore("board",{
keyPath:"id"
});
});
req.onsuccess = (function(e){
service.database = e.target.result;
var loadData = service.database.transaction([
'board',
'thread',
'post'],'readwrite');
loadData.onsuccess = (function(ee){
var transaction = ee.target.result;
transaction.objectStore("board").openCursor().onsuccess=(function(e){
var cursor = e.target.result;
if(cursor===null){
boardsLoaded = true;
return;
}
var id = cursor.key;
var obj = cursor.value;
var lastModified = new Date();
if("lastModified" in obj){
lastModified = obj.lastModified;
}
service.cache["./board/"+id.toString().toLowerCase()+".json"]={
obj:obj,
modified:lastModified
};
});
transaction.objectStore("thread").openCursor().onsuccess=(function(e){
var cursor = e.target.result;
if(cursor===null){
threadsLoaded = true;
return;
}
var id = cursor.key;
var obj = cursor.value;
var lastModified = new Date();
if("lastModified" in obj){
lastModified = obj.lastModified;
}
service.cache["./thread/"+id.toString().toLowerCase()+".json"]={
obj:obj,
modified:lastModified
};
});
transaction.objectStore("post").openCursor().onsuccess=(function(e){
var cursor = e.target.result;
if(cursor===null){
postsLoaded = true;
return;
}
var id = cursor.key;
var obj = cursor.value;
var lastModified = new Date();
if("lastModified" in obj){
lastModified = obj.lastModified;
}
service.cache["./post/"+id.toString().toLowerCase()+".json"]={
obj:obj,
modified:lastModified
};
});
service.post = (function(id,cb){
console.log("DDDDDAFF");
var trans = service.database.transaction(["post"],"readwrite");
trans.onsuccess = (function(e){
var req = e.target.result.objectStore("post").get(id);
req.onsuccess = (function(ee){
cb(req.result);
});
req.onerror = (function(ee){
console.log("HAAAA?!");
postFetch(id,(function(post){
e.target.result.objcetStore.save(post);
cb(post);
}));
});
});
trans.onerror = (function(e){
console.log("Error with IDDB:",e);
threadFetch(id,cb);
});
});
service.thread = (function(id,cb){
var trans = service.database.transaction(["thread"],"readwrite");
trans.onsuccess = (function(e){
var req = e.target.result.objectStore("thread").get(id);
req.onsuccess = (function(ee){
cb(req.result);
});
req.onerror = (function(ee){
threadFetch(id,(function(post){
e.target.result.objcetStore.save(post);
cb(post);
}));
});
});
trans.onerror = (function(e){
console.log("Error with IDDB:",e);
postFetch(id,cb);
});
});
service.board = (function(id,cb){
var trans = service.database.transaction(["board"],"readwrite");
trans.onsuccess = (function(e){
var req = e.target.result.objectStore("board").get(id);
req.onsuccess = (function(ee){
cb(req.result);
});
req.onerror = (function(ee){
boardFetch(id,(function(post){
e.target.result.objcetStore.save(post);
cb(post);
}));
});
});
trans.onerror = (function(e){
console.log("Error with IDDB:",e);
boardFetch(id,cb);
});
});
});
});
})(window.indexedDB);
}
return service;
}));
});
angular.module('JSForum',["JSForumServices"]).config(
['$routeProvider',
function($routeProvider){
$routeProvider.when('/',{
templateUrl:"forum.html",
controller:ForumController
});
$routeProvider.when('/board/:id',{
templateUrl:"board.html",
controller:BoardController
});
$routeProvider.when('/thread/:id',{
templateUrl:"thread.html",
controller:ThreadController
});
$routeProvider.otherwise({redirectTo:"/"});
}]);
function ThreadController($scope,$routeParams,ForumStorage){
$scope.id = $routeParams.id.toString().toLowerCase();
$scope.thread = null;
ForumStorage.thread($scope.id,(function(thread){
$scope.thread = thread;
$scope.$apply();
var callback = (function(p){
return (function(post){
$scope.thread.posts[p] = post;
$scope.$apply();
});
});
for(var i=0;i<thread.posts.length;i++)
if(typeof(thread.posts[i].id) == 'undefined')
ForumStorage.post(thread.posts[i],callback(i));
$scope.$apply();
}));
}
function BoardController($scope,$routeParams,ForumStorage){
$scope.id = $routeParams.id.toString().toLowerCase();
$scope.board = null;
ForumStorage.board($scope.id,(function(board){
var callback = (function(p){
return (function(thread){
$scope.board.threads[p] = thread;
$scope.$apply();
});
});
$scope.board = board;
console.log("Using board:",$scope.board);
for(var i=0;i<board.threads.length;i++)
if(typeof(board.threads[i].id)=='undefined')
ForumStorage.thread(board.threads[i],callback(i));
$scope.$apply();
}));
}
function ForumController($scope,ForumStorage){
$scope.name = localStorage.forumName||"Forum";
$scope.boards = [];
ForumStorage.forum("forum",(function(forum){
document.title = $scope.name = localStorage.forumName = forum.name;
$scope.boards = forum.boards;
var callback=(function(p){
return (function(o){
$scope.boards[p] = o;
$scope.$apply();
});
});
for(var i=0;i<$scope.boards.length;i++){
if(typeof($scope.boards[i].id) == 'undefined')
ForumStorage.board(forum.boards[i],callback(i));
}
$scope.$apply();
}));
}

If your backend is google cloud storage, you can use my open source database library http://dev.yathit.com/api-reference/ydn-db/storage.html It cached in IndexedDB and persist to blob store. forum post URI path is taken as primary key of the record. LastModified header value is persisted into IndexedDB and used for conditional HTTP request. since blob store can only be queried by ascending order of key (URI path), forum post URI path are generated so that the last post is the smallest. In this way, we can query new posts by giving smallest known key as marker.
An example can be found in https://bitbucket.org/ytkyaw/ydn-auth/src/master/examples/note-app/note-app.js (not forum, but a simple note app). you need a backend server that generate signed url for posting new record.

Related

Javascript - FileReader how can I read and process each file at a time among multiple files

I am trying let the user drop multiple excel file and extract desired values from each one of the files and upload it to website ONE FILE AT A TIME.
My code is not working, and I am assuming this is because of the callback problem..
Could anybody help?
Edit: I also added my uploadFile function. I very much appreciate your help.
for(var i = 0; i < fileList.length; i++) {
//console.log(fileList[i]["file"]);
var reader = new FileReader();
var f = fileList[i]["file"];
//var fName = fileList[i]["fileName"];
var excelObject = fileList[i];
reader.onload = function(ev) {
var data = ev.target.result;
if(!rABS) data = new Uint8Array(data);
var wb = XLSX.read(data, {type: rABS ? 'binary' : 'array'});
var einAddress = "B3";
var engCodeAddress = "B1";
var goAddress = "B2";
var errMsg = tabName + " tab or required value is missing";
// Worksheet with the necessary info
try{
var ws = wb.Sheets[tabName];
var ein_cell = ws[einAddress];
ein = (ein_cell ? ein_cell.v.toString() : undefined);
var eng_cell = ws[engCodeAddress];
engCode = (eng_cell ? eng_cell.v.toString() : undefined);
var go_cell = ws[goAddress];
goLocator = (go_cell ? go_cell.v.toString() : undefined);
if(ein == undefined || engCode == undefined || goLocator == undefined){
hasValues = false;
}
excelObject["EngagementCode"] = engCode;
excelObject["GoSystem"] = goLocator;
excelObject["EIN"] = ein;
if(hasValues && isValid){
uploadFile(fileList[i], userInfo);
} else {
noValueErrorHandler(errMsg);
}
} catch(err){
hasValues = false;
}
};
if(rABS) reader.readAsBinaryString(f); else reader.readAsArrayBuffer(f);
}
function uploadFile(f, userInfo) {
// Define the folder path for this example.
var serverRelativeUrlToFolder = listName;
// Get info of the file to be uploaded
var file = f;
var fileInput = file["file"];
var newName = file["fileName"];
var ein = file["EIN"];
var engCode = file["EngagementCode"];
var email = userInfo;
var goLocator = file["GoSystem"];
console.log("file: " + file);
// Get the server URL.
var serverUrl = _spPageContextInfo.siteAbsoluteUrl + "/StatusTracker";
// Initiate method calls using jQuery promises.
// Get the local file as an array buffer.
var getFile = getFileBuffer(fileInput);
getFile.done(function (arrayBuffer) {
// Add the file to the SharePoint folder.
var addFile = addFileToFolder(arrayBuffer, newName);
addFile.done(function (file, status, xhr) {
// Get the list item that corresponds to the uploaded file.
var getItem = getListItem(file.d.ListItemAllFields.__deferred.uri);
getItem.done(function (listItem, status, xhr) {
// Change the display name and title of the list item.
var changeItem = updateListItem(listItem.d.__metadata);
changeItem.done(function (data, status, xhr) {
processedCount += 1;
if (processedCount < fileCount) {
uploadFile(fileList[processedCount], email);
} else if (processedCount == fileCount){
$("#dropbox").text("Done, drop your next file");
$("#ADMNGrid").data("kendoGrid").dataSource.read();
fileList = [];
alert("Total of " + processedCount + " items are processed!");
}
// Refresh kendo grid and change back the message and empty fileList
//$("#dropbox").text("Drag your Fund/Lower Tier workpaper here ...");
//location.reload(true);
});
changeItem.fail(onError);
});
getItem.fail(onError);
});
addFile.fail(onError);
});
getFile.fail(onError);
You might put the whole thing into an async function and await a Promise for each iteration, forcing the files to be processed in serial. You didn't post your uploadFile, but if you have it return a Promise that resolves once it's done, you could do the following:
async fn() {
for (var i = 0; i < fileList.length; i++) {
await new Promise((resolve, reject) => {
//console.log(fileList[i]["file"]);
var reader = new FileReader();
var f = fileList[i]["file"];
//var fName = fileList[i]["fileName"];
var excelObject = fileList[i];
reader.onload = function(ev) {
var data = ev.target.result;
if (!rABS) data = new Uint8Array(data);
var wb = XLSX.read(data, {
type: rABS ? 'binary' : 'array'
});
var einAddress = "B3";
var engCodeAddress = "B1";
var goAddress = "B2";
var errMsg = tabName + " tab or required value is missing";
// Worksheet with the necessary info
try {
var ws = wb.Sheets[tabName];
var ein_cell = ws[einAddress];
ein = (ein_cell ? ein_cell.v.toString() : undefined);
var eng_cell = ws[engCodeAddress];
engCode = (eng_cell ? eng_cell.v.toString() : undefined);
var go_cell = ws[goAddress];
goLocator = (go_cell ? go_cell.v.toString() : undefined);
if (ein == undefined || engCode == undefined || goLocator == undefined) {
hasValues = false;
}
excelObject["EngagementCode"] = engCode;
excelObject["GoSystem"] = goLocator;
excelObject["EIN"] = ein;
if (hasValues && isValid) {
uploadFile(fileList[i], userInfo)
.then(resolve);
} else {
noValueErrorHandler(errMsg);
reject();
}
} catch (err) {
hasValues = false;
reject();
}
};
if (rABS) reader.readAsBinaryString(f);
else reader.readAsArrayBuffer(f);
});
}
}

How do I define and receive data from multiple WebSockets in JavaScript?

I have the following code snippet which I need to extend to define multiple WebSockets and I am clueless as to how do I go about it:
var registerWebSocketHandlers = function(webSocket) {
webSocket.onclose = function(){
setTimeout(service.reopen, reconnectTimeout *= 2);
};
webSocket.onopen = function(e) {
icc.publish('webSocket.reconnect');
reconnectTimeout = defaultReconnectTimeout; //reset this
deferredSend();
};
webSocket.onerror = function(e) {
throw new Error("[WebSocket] An error occured " + e);
};
}
var openConnection = function() {
connectionWasOpenBefore = true;
webSocket = new $window.WebSocket(xyz);
webSocket.id = uniqid();
registerWebSocketHandlers(webSocket);
};
var uniqid = function() {
return (new Date().getTime()).toString(16);
}
service.setMessageEventHandler = function(cb) {
webSocket.onmessage = function(msg) {
if(msg.data.indexOf('Status: connected') === 0)
{
return;
}
var jsonObj = JSON.parse(msg.data);
cb(jsonObj);
};
};
How do I twist the code to suit the needs of multiple WebSockets and attaching the appropriate callback to it?
Use the multiton pattern.
var socketFactory = module.factory('SocketFactory', function($rootScope){
var factory = {};
var instances = {};
factory.getInstance = function(name, config){
if(!(name in instances)){
instances[name] = createNewWebSocketInstance(name, config);
}
return instances[name];
};
var createNewWebSocketInstance = function(name, config){
var webSocket = new $window.WebSocket(config.address);
webSocket.id = uniqid();
registerWebSocketHandlers(webSocket, name, config.handlers); //etc.
return webSocket;
};
var registerWebSocketHandlers = function(webSocket, name, handlers){
webSocket.onmessage = function(event){
$rootScope.$emit('SocketMessageReceived_' + name, event.data);
};
//etc...
};
return factory;
});
This will separate your different websockets by name. Use getInstance('whatever') to get a websocket labelled as 'whatever'.
var firstConfig = {url: '*****', username: '****', password: '****', etc: '****'};
// You only need to pass in the config the first time.
var firstWebSocket = SocketFactory.getInstance('firstSocket', firstConfig);
var secondConfig = {url: '####', username: '####', password: '####', etc: '####'};
var secondWebSocket = SocketFactory.getInstance('secondSocket', secondConfig);
Next, from any other area you can access the configured websockets by their instance names.
var firstWebSocket = SocketFactory.getInstance('firstSocket');
// It would probably be a good idea to add this listener in the SocketFactory instead and broadcast an event when there's a message so multiple listeners can respond to it.
firstWebSocket.onmessage = function(){...};
var secondWebSocket = SocketFactory.getInstance('secondSocket');
secondWebSocket.onmessage = function(){...};

IDBKeyRange.only and IDBKeyRange.lowerBound together in one query

I need to convert this sql query to IndexedDB syntax.
"SELECT * FROM article WHERE userid=100 AND date_created > '2015-1-15 18:00:00'"
I'm thinking about these two solutions.
userid index
var articles = [];
objectStore.index('userid').openCursor(100).onsuccess = function(e){
var cursor = e.target.result;
if(cursor){
var article = cursor.value;
if(new Date(article.date_created) > new Date('2015-1-15 18:00:00')){
articles.push(article);
}
cursor.continue();
}esle{
console.log('done');
}
};
date_created index
var articles = [];
objectStore.index('date_created').openCursor(IDBKeyRange.lowerBound('2015-1-15 18:00:00')).onsuccess = function(e){
var cursor = e.target.result;
if(cursor){
var article = cursor.value;
if(article.userid === 100){
articles.push(article);
}
cursor.continue();
}esle{
console.log('done');
}
};
How can I write a similar query with a compound index on userid and date_created?
Hmm. Try something like using IDKeyRange.bound:
function queryArticlesByIdDate(id, date, handleArticle) {
var openRequest = indexedDB.open(...);
openRequest.onupgradeneeded = function(event) {
var db = this.result;
var articleStore = db.createObjectStore('articles');
articleStore.createIndex('date-id',['date_created','userid']);
};
openRequest.onsuccess = function(event) {
var db = this.result;
var articleStore = db.objectStore('articles');
var dateIdIndex = articleStore.index('date-id');
var lowerDate = date;
var upperDate = new Date(date + 1); // or whatever
var bounds = IDBKeyRange.bound([lowerDate, id], [upperDate, id]);
var cursorRequest = dateIdIndex.openCursor(bounds);
cursorRequest.onsuccess = function(event) {
var cursor = this.result;
if(!cursor) return;
handleArticle(cursor.value);
cursor.continue();
};
};
}
queryArticlesByIdDate(5, new Date(...), function handler(article) {
console.dir(article);
});

Node.js Q promise forEach returning undefined

Using Q for Node.js, I am promising a HTTP request, and upon fulfillment calling another function passing in the response of that HTTP request, that function then iterates over a JSON array from the HTTP requests, builds up a new array, and returns it.
Debugging Reddit.prototype.parseData I can see the HTTP JSON is passed in, and within the for statement I can console.log data as it's been built, but at the end of the foreach I cannot console.log, or return the data object, it returns undefined
Reddit.js
var Reddit = function(){
this.endpoint = "https://www.reddit.com/r/programming/hot.json?limit=10";
}
Reddit.prototype.parseData = function(json, q){
var dataLength = json.data.children.length,
data = [];
for(var i = 0; i <= dataLength; i++){
var post = {};
post.url = json.data.children[i].data.url;
post.title = json.data.children[i].data.title;
post.score = json.data.children[i].data.score;
console.log(data); //returns data
data.push(post);
}
console.log(data); // returns undefined
return data;
}
module.exports = Reddit;
Feeds.js
var https = require('https'),
q = require('q'),
Reddit = require('./sources/reddit');
var Feeds = function(){
this.reddit = new Reddit();
console.log(this.parseRedditData()); //undefined
}
Feeds.prototype.getData = function(endpoint){
var deferred = q.defer();
https.get(endpoint, function(res) {
var body = '';
res.on('data', function(chunk) {
body += chunk;
});
res.on('end', function() {
deferred.resolve(JSON.parse(body));
});
}).on('error', function(e) {
deferred.reject(e);
});
return deferred.promise;
}
Feeds.prototype.parseRedditData = function(){
var _this = this;
this.getData(this.reddit.endpoint).then(function(data){
return _this.reddit.parseData(data);
});
}
var fe = new Feeds()
As #sholanozie said, you aren't returning anything from parseRedditData. I'm guessing what you want is:
var Feeds = function(){
this.reddit = new Reddit();
this.parseRedditData().then(function(data) {
console.log(data);
});
};
...
Feeds.prototype.parseRedditData = function(){
var _this = this;
return this.getData(this.reddit.endpoint).then(function(data){
return _this.reddit.parseData(data);
});
}

Object has no method "open" error when using indexedDB

I'm trying to build a small class-like container that will make it a little cleaner to load and store data from the HTML5 IndexedDB. To be honest this is the first time I've ever played with this feature, so my issue could be trivial.
I'm basing my code off of this tutorial:
http://www.html5rocks.com/en/tutorials/indexeddb/todo/
function DBDictionary()
{
this.Holder = {};
this.Entries = new Array();
this.Opened = false;
this.v = "1.0";
this.Holder.indexedDB = window.indexedDB || window.webkitIndexedDB || window.mozIndexedDB;
if ('webkitIndexedDB' in window)
{
window.IDBTransaction = window.webkitIDBTransaction;
window.IDBKeyRange = window.webkitIDBKeyRange;
}
this.Holder.indexedDB = {};
this.Holder.indexedDB.db = null;
this.Holder.indexedDB.onerror = function(e)
{
console.log(e);
};
this.DownloadDB = function()
{
if(this.Opened) return;
var request = this.Holder.indexedDB.open("Storage");
request.onsuccess = function(e)
{
this.Holder.indexedDB.db = e.target.result;
var db = this.Holder.indexedDB.db;
// We can only create Object stores in a setVersion transaction;
if (v!= db.version)
{
var setVrequest = db.setVersion(v);
// onsuccess is the only place we can create Object Stores
setVrequest.onerror = this.Holder.indexedDB.onerror;
setVrequest.onsuccess = function(e)
{
if(db.objectStoreNames.contains("Storage")) db.deleteObjectStore("Storage");
var store = db.createObjectStore("Storage", {keyPath: "Key"});
this.PopulateAll();
};
}
else
{
this.PopulateAll();
}
};
request.onerror = this.Holder.indexedDB.onerror;
};
this.UploadDB = function()
{
this.DeleteAll();
this.SaveAll();
};
this.DeleteAll = function()
{
var db = this.Holder.indexedDB.db;
var trans = db.transaction(["Storage"], IDBTransaction.READ_WRITE);
var store = trans.objectStore("Storage");
Entries.forEach(function(element, index, array)
{
var request = store.delete(index);
request.onerror = function(e)
{
console.log("Error Deleting: ", e);
};
});
};
this.PopulateAll = function()
{
var db = this.Holder.indexedDB.db;
var trans = db.transaction(["Storage"], IDBTransaction.READ_WRITE);
var store = trans.objectStore("Storage");
// Get everything in the store;
var keyRange = IDBKeyRange.lowerBound(0);
var cursorRequest = store.openCursor(keyRange);
cursorRequest.onsuccess = function(e)
{
var result = e.target.result;
//No more results to load
if(!!result == false)
{
if(!this.Opened) this.Opened = true;
return;
}
this.Entries[result.Key] = result.Value;
result.continue();
};
cursorRequest.onerror = this.Holder.indexedDB.onerror;
};
this.SaveAll = function()
{
var db = this.Holder.indexedDB.db;
var trans = db.transaction(["Storage"], IDBTransaction.READ_WRITE);
var store = trans.objectStore("Storage");
Entries.forEach(function(element, index, array)
{
var data = {
"Key": index,
"Value": element,
"timeStamp": new Date().getTime()
};
var request = store.put(data);
request.onerror = function(e) {
console.log("Error Adding: ", e);
};
});
};
}
function main()
{
var dictionary = new DBDictionary();
dictionary.DownloadDB();
dictionary.Entries["hello"] = "world";
alert(dictionary.Entries["hello"]);
}
$(document).ready(main);
My desired implemented state should look something like this:
function main()
{
var dictionary = new DBDictionary();
dictionary.DownloadDB();
dictionary.Entries["hello"] = "world";
alert(dictionary.Entries["hello"]);
}
$(document).ready(main);
What this should do is download the data from the browser's IndexedDB object and store them into the object-housed array Entries. When I want to store the value of Entires back into the DB, I would call dictionary.UploadDB();
However, I'm getting the single javascript error: Uncaught TypeError: Object # has no method 'open'. I'm pretty much at a loss as to what I'm doing wrong. Can anyone offer me some tips?
Do a typeof check and console.log the this.Holder.indexedDB object to inspect the prototype. Does it inherit the IDBDatabase prototype? If it does, the open method would be available to you.
If your window.indexedDB did fire the on success callback, e.target.result would be the correct way to access the newly opened database via the event object. But the fact that you're not getting that far suggests that your this.Holder.indexedDB object is not actually an instance of an IDBDatabase.
EDIT: Yes, this is exactly your issue. If you console.log the this.holder.indexedDB object you get an object that looks like {"db":null}.
Swap this.Holder.indexedDB for window.webkitIndexedDB at your open invocation and you'll see that 'world' alert pops. JSFiddle here.

Categories