pjax/ajax and browser back button issues - javascript

I use pjax to ajaxify my menu links. This works fine until I use the browser back button. In my javascript file I use Common Script files (to load all the necessary js files when the user hits the url) and Script files with respect to each menu links (when navigated through pjax)
function myFunction(){
/*All the script files */
}
$(document).ready(function(){
myFunction();
/*pjax menu loading block*/
$(document).on('click', 'a[data-pjax]', function(event) {
$.pjax.click(event, '#pjax-container');
$(document).on('pjax:end', function() {
myFunction();
});
});
});
Now when I navigate to a menu item and try to come back by clicking the browser back button, the script files are getting duplicated (eg: slider images getting duplicated and table sorting not working).How to overcome this issue?

You can implement the url specific loading this way, create a queue of functions which you want to load and unload on pjax complete
The solution is based on js prototyping
// create queue for load and unload
var onLoad = new PjaxExecQueue();
var onUnload = new PjaxExecQueue();
// way to add functions to queue to run on pjax load
onLoad.queue(function() {
someFunction();
});
// way to add functions to queue to unload on pjax load
onUnload.queue(function() {
someOtherFunction();
});
// load function if url contain particular path name
onLoad.queue_for_url(function_name, 'url_section');
// check for url specific function
var URLPjaxQueueElement = function(exec_function, url) {
this.method = exec_function;
if(url) {
this.url = new RegExp(url);
} else {
this.url = /.*/;
}
};
// create a queue object
var PjaxExecQueue = function () {
this.url_exec_queue = [];
this.id_exec_queue = [];
this.fired = false;
this.indicating_loading = false;
this.content = $('#content');
};
PjaxExecQueue.prototype = {
queue: function (exec_function) {
this.url_exec_queue.unshift(new URLPjaxQueueElement(exec_function));
},
queue_for_url: function (exec_function, url_pattern) {
this.url_exec_queue.unshift(new URLPjaxQueueElement(exec_function, url_pattern));
},
queue_if_id_present: function(exec_function, id) {
this.id_exec_queue.unshift(new IDPjaxQueueElement(exec_function, id));
},
fire: function () {
if(this.indicating_loading) {
this.content.removeClass("indicate-loading");
this.indicating_loading = false;
}
if(!this.fired) {
var match_loc = window.location.pathname;
var i = this.url_exec_queue.length;
while(i--) {
this.url_exec_queue[i].fire(match_loc);
}
i = this.id_exec_queue.length;
while(i--) {
this.id_exec_queue[i].fire(match_loc);
}
}
this.fired = true;
},
reset: function() {
this.fired = false;
},
loading: function () {
this.content.addClass("indicate-loading");
this.indicating_loading = true;
this.reset();
},
count: function () {
return exec_queue.length;
},
show: function (for_url) {
for (var i=0; i < exec_queue.length; i++) {
if(for_url) {
if(exec_queue[i].url.test(for_url)) {
console.log("" + exec_queue[i].method);
}
} else{
console.log(exec_queue[i].url + " : " + exec_queue[i].method);
}
}
}
};
// before send
$(document).on('pjax:beforeSend', function() {
onLoad.loading();
onUnload.fire();
});
// after pjax complete
$(document).on('pjax:complete', function() {
onLoad.fire();
onUnload.reset();
});

Related

A JavaScript function is repeated many times

I have two function print and callPrint bellow. I click call function print the first time is right.
But when click call function print the second or third then function callPrint will be call 2 times or 3 times.
I have debug on attack file.
function print(url) {
console.log('print');
var _this = this, iframeId = 'iframeprint', $iframe = $('iframe#iframeprint');
if ($iframe.attr('src') != url) {
$.when(
$iframe.attr('src', 'about:blank'),
$iframe.load(function () {
console.log($iframe.prop('contentWindow').document.readyState);
})
).done(function () {
$iframe.attr('src', url);
$iframe.load(function () {
console.log('new');
_this.callPrint(iframeId);
});
});
} else {
console.log('old');
_this.callPrint(iframeId);
}
}
// initiates print once content has been loaded into iframe
function callPrint(iframeId) {
console.log('callPrint');
$('div.wait').hide();
var PDF = document.getElementById(iframeId);
PDF.focus();
PDF.contentWindow.print();
return false;
}
A JavaScript function is repeated many times
The problem is because you're attaching two new load() event handlers to the iframe every time print() is called. To fix this, add a single load() event handler and call your function from in there. This will be triggered whenever you update the src attribute on the element. Try this:
var $iframe = $('#iframeprint').load(function() {
// You'll need to make sure the function is in scope of the handler
// There's not enough information in the OP for me to show you how
callPrint('iframeprint');
});
function print(url) {
var _this = this;
if ($iframe.attr('src') != url) {
$iframe.attr('src', url);
} else {
_this.callPrint(iframeId);
}
}
Thanks "Rory McCrossan". I add setTimeout function when callPrint so dialog print will open. But I can't vote for you at the moment.
var $iframe = $('iframe#iframeprint').load(function () {
// You'll need to make sure the function is in scope of the handler
// There's not enough information in the OP for me to show you how
setTimeout(function () {
callPrint('iframeprint');
}, 100);
});
function print(url) {
if ($iframe.attr('src') != url) {
$iframe.attr('src', url);
} else {
console.log('old');
callPrint('iframeprint');
}
}
// initiates print once content has been loaded into iframe
function callPrint(iframeId) {
$('div.wait').hide();
var PDF = document.getElementById(iframeId);
PDF.focus();
PDF.contentWindow.print();
}

Keep browser history with django-el-pagination (lazy pagination)

Im using django-el-pagination to do lazy loading of entries.
When I click on an entry and then use the browser back button, all of the lazy loading is gone, I tried to add window.history.pushState() but then I only get the current page i.e.?page=4 when I use the browser back button, and all of the entries on top is not loaded.
Is there any way to implement a correct history so that the user is back at the same place when they use the browser back button?
$.endlessPaginate({
paginateOnScroll: true,
paginateOnScrollMargin: 400,
paginateOnScrollChunkSize: 2,
onCompleted: function(context, fragment) {
window.history.pushState(null, null, context.url);
}
});
Edit 1
Here is the JavaScript for the .endlessPaginate function:
'use strict';
(function ($) {
// Fix JS String.trim() function is unavailable in IE<9 #45
if (typeof(String.prototype.trim) === "undefined") {
String.prototype.trim = function() {
return String(this).replace(/^\s+|\s+$/g, '');
};
}
$.fn.endlessPaginate = function(options) {
var defaults = {
// Twitter-style pagination container selector.
containerSelector: '.endless_container',
// Twitter-style pagination loading selector.
loadingSelector: '.endless_loading',
// Twitter-style pagination link selector.
moreSelector: 'a.endless_more',
// Digg-style pagination page template selector.
pageSelector: '.endless_page_template',
// Digg-style pagination link selector.
pagesSelector: 'a.endless_page_link',
// Callback called when the user clicks to get another page.
onClick: function() {},
// Callback called when the new page is correctly displayed.
onCompleted: function() {},
// Set this to true to use the paginate-on-scroll feature.
paginateOnScroll: false,
// If paginate-on-scroll is on, this margin will be used.
paginateOnScrollMargin : 1,
// If paginate-on-scroll is on, it is possible to define chunks.
paginateOnScrollChunkSize: 0
},
settings = $.extend(defaults, options);
var getContext = function(link) {
return {
key: link.attr('rel').split(' ')[0],
url: link.attr('href')
};
};
return this.each(function() {
var element = $(this),
loadedPages = 1;
// Twitter-style pagination.
element.on('click', settings.moreSelector, function() {
var link = $(this),
html_link = link.get(0),
container = link.closest(settings.containerSelector),
loading = container.find(settings.loadingSelector);
// Avoid multiple Ajax calls.
if (loading.is(':visible')) {
return false;
}
link.hide();
loading.show();
var context = getContext(link);
// Fire onClick callback.
if (settings.onClick.apply(html_link, [context]) !== false) {
var data = 'querystring_key=' + context.key;
// Send the Ajax request.
$.get(context.url, data, function(fragment) {
container.before(fragment);
container.remove();
// Increase the number of loaded pages.
loadedPages += 1;
// Fire onCompleted callback.
settings.onCompleted.apply(
html_link, [context, fragment.trim()]);
});
}
return false;
});
// On scroll pagination.
if (settings.paginateOnScroll) {
var win = $(window),
doc = $(document);
doc.scroll(function(){
if (doc.height() - win.height() -
win.scrollTop() <= settings.paginateOnScrollMargin) {
// Do not paginate on scroll if chunks are used and
// the current chunk is complete.
var chunckSize = settings.paginateOnScrollChunkSize;
if (!chunckSize || loadedPages % chunckSize) {
element.find(settings.moreSelector).click();
} else {
element.find(settings.moreSelector).addClass('endless_chunk_complete');
}
}
});
}
// Digg-style pagination.
element.on('click', settings.pagesSelector, function() {
var link = $(this),
html_link = link.get(0),
context = getContext(link);
// Fire onClick callback.
if (settings.onClick.apply(html_link, [context]) !== false) {
var page_template = link.closest(settings.pageSelector),
data = 'querystring_key=' + context.key;
// Send the Ajax request.
page_template.load(context.url, data, function(fragment) {
// Fire onCompleted callback.
settings.onCompleted.apply(
html_link, [context, fragment.trim()]);
});
}
return false;
});
});
};
$.endlessPaginate = function(options) {
return $('body').endlessPaginate(options);
};
})(jQuery);
short answer: no. The whole point of 'endless pagination' is to not reload a (new) page, therefore there is no history.

Passing functions between RequireJS files

I've got a file which needs to run on page load (randomise_colors.js), but also needs to be called by another file as part of a callback function (in infinite_scroll.js). The randomise_colors script just loops through a list of posts on the page and assigns each one a color from an array which is used on the front-end.
Infinite Scroll loads new posts in to the DOM on a button click, but because the randomise_colors.js file has already ran on page load, new content loaded is not affected by this so I need it to run again. I'm open to other suggestions if it sounds like I could be tackling the problem in a different way, I'm no JS expert.
Currently I'm getting Uncaught ReferenceError: randomise_colours is not defined referring this line of infinite_scroll.js:
randomise_colours.init();
I'm calling all files that need be loaded on document.ready in app.js
require(['base/randomise-colours', 'base/infinite-scroll'],
function(randomise_colours, infinite_scroll) {
var $ = jQuery;
$(document).ready(function() {
infinite_scroll.init();
randomise_colours.init();
});
}
);
This is infinite_scroll.js which initialises Infinite Scroll and features the callback. The callback function runs whenever new items are loaded in via AJAX using the Infinite Scroll jQuery plugin. I've put asterix around the area where I need to run the randomise_colors.init() function from randomise_colors.js.
define(['infinitescroll'], function() {
var $ = jQuery,
$loadMore = $('.load-more-posts a');
function addClasses() {
**randomise_colours.init();**
};
return {
init: function() {
if($loadMore.length >= 1) {
this.setUp();
} else {
return false;
}
},
setUp: function() {
this.initInfiniteScroll();
},
initInfiniteScroll: function() {
$('.article-listing').infinitescroll({
navSelector : '.load-more-posts',
nextSelector : '.load-more-posts a',
itemSelector : '.standard-post'
}, function(newItems) {
addClasses();
});
//Unbind the standard scroll-load function
$(window).unbind('.infscr');
//Click handler to retrieve new posts
$loadMore.on('click', function() {
$('.article-listing').infinitescroll('retrieve');
return false;
});
}
};
});
And this is my randomise_colors.js file which runs fine on load, but needs to be re-called again after new content has loaded in.
define([], function() {
var $ = jQuery,
$colouredSlide = $('.image-overlay'),
colours = ['#e4cba3', '#867d75', '#e1ecb9', '#f5f08a'],
used = [];
function pickRandomColour() {
if(colours.length == 0) {
colours.push.apply(colours, used);
used = [];
}
var selected = colours[Math.floor(Math.random() * colours.length)];
var getSelectedIndex = colours.indexOf(selected);
colours.splice(getSelectedIndex, 1);
used.push(selected);
return selected;
};
return {
init: function() {
if($colouredSlide.length >= 1) {
this.setUp();
} else {
return false;
}
},
setUp: function() {
this.randomiseColours();
},
randomiseColours: function() {
console.log('randomise');
$colouredSlide.each(function() {
var newColour = pickRandomColour();
$(this).css('background', newColour);
});
}
};
});
You would have to reference randomiseColours inside the infiniteScroll file. So you need to change your define function to the following:
define(['infinitescroll', 'randomise-colours'], function(infiniteScroll, randomise_colours)
Remember that when using require you need to reference all variables through the define function, otherwise they will not be recognised.

jQuery Find and Replace is Hanging up the browser! Data size too big?

With alot of help from #kalley we have found out that If I comment the following two lines out the LAG is gone!
var $tableContents = $table.find('tbody')
var $html = $('<tbody/>').html(data);
But how do I keep the above but cancel out the LAG ?
MORE INFO:
The code below works but the problem is that the $.GET is causing the browser to hang until the ajax request completes. I need (flow control?) or something that will solve this problem without locking/hanging up the browser until ajax completes the GET request.
The biggest LAG/Lockup/Hang is at $.get("updatetable.php", since the others only return 7 or less (number) values and this one ('updatetable.php') returns alot more (200-300kb). I would like to implement some sort of flow control here or make the script wait like 5 secs before firing the update command for tablesort and before showing the toast message so that ajax has time to GET the $.get("updatetable.php"data I just don't understand why does it lockup the browser as it is getting the data? is it trying to fire the other commands and that's whats causing the LAG?
Here are the STEPS
1.
$.get("getlastupdate.php" Will fire every 10 secs or so to check if the date and time are the same the return data looks like this: 20130812092636 the format is: YYYmmddHHmmss.
2.
if the date and time are not the same as the last GET then $.get("getlastupdate2.php" will trigger and this data will be send back and placed into a toast message and dispalyed to the user $().toastmessage('showNoticeToast', Vinfoo);
3.
before or after the above ($.get("getlastupdate2.php") another GET will fire: $.get('updatetable.php' this will GET the updated table info. and replace the old one with the new info. and then update/resort the table
4.
at the end of it all I want to $.get("ajaxcontrol.php" and this will return a 1 or 2 if the user is logged in then it will be a 2 else it's a 1 and it will destroy the session and log the user out.
<script type="text/javascript" src="tablesorter/jquery-1.10.2.min.js"></script>
<script type="text/javascript" src="tablesorter/final/jquery.tablesorter.js"></script>
<script type="text/javascript" src="tablesorter/final/jquery.tablesorter.widgets.js"></script>
<script type="text/javascript" src="tablesorter/final/toastmessage/jquery.toastmessage-min.js"></script>
<script type="text/javascript" src="tablesorter/qtip/jquery.qtip.min.js"></script>
<script type="text/javascript">
var comper;
function checkSession() {
return $.get("ajaxcontrol.php", function (DblIn) {
console.log('checking for session');
if (DblIn == 1) {
window.location = 'loggedout.php';
}
}).then(updateTable);
}
function checkComper() {
var SvInfo;
var onResponse = function (comperNow) {
if (comper === undefined) {
comper = comperNow;
} else if (comper !== comperNow) {
var Vinfoo;
comper = comperNow;
// returning this $.get will make delay done until this is done.
return $.get("getlastupdate2.php", function (primaryAddType) {
Vinfoo = primaryAddType;
$().toastmessage('showNoticeToast', Vinfoo);
}).then(checkSession);
}
};
$.get('getlastupdate.php').then(onResponse).done(function () {
tid = setTimeout(checkComper, 2000);
});
}
function updateTable() {
return $.get('updatetable.php', function (data) {
console.log('update table');
var $table = $("table.tablesorter");
var $tableContents = $table.find('tbody')
var $html = $('<tbody/>').html(data);
$tableContents.replaceWith('<tbody>' + data + '</tbody>')
//$tableContents.replaceWith($html)
$table.trigger("update", [true]);
var currentUrl = document.getElementById("frmcontent").contentWindow.location.href;
var urls = ['indexTOM.php', 'index1.php'],
frame = document.getElementById('frmcontent').contentDocument;
for (var i = 0; i < urls.length; i++) {
var url = urls[i];
if (frame.location.href.indexOf(url) !== -1) {
frame.location.reload()
}
}
$('[title!=""]').qtip({});
});
};
$(function () {
var tid = setTimeout(checkComper, 2000);
$("#append").click(function (e) {
// We will assume this is a user action
e.preventDefault();
updateTable();
});
// call the tablesorter plugin
$("table.tablesorter").tablesorter({
theme: 'blue',
// hidden filter input/selects will resize the columns, so try to minimize the change
widthFixed: true,
// initialize zebra striping and filter widgets
widgets: ["saveSort", "zebra", "filter"],
headers: {
8: {
sorter: false,
filter: false
}
},
widgetOptions: {
filter_childRows: false,
filter_columnFilters: true,
filter_cssFilter: 'tablesorter-filter',
filter_filteredRow: 'filtered',
filter_formatter: null,
filter_functions: null,
filter_hideFilters: false, // true, (see note in the options section above)
filter_ignoreCase: true,
filter_liveSearch: true,
filter_reset: 'button.reset',
filter_searchDelay: 300,
filter_serversideFiltering: false,
filter_startsWith: false,
filter_useParsedData: false
}
});
// External search
$('button.search').click(function () {
var filters = [],
col = $(this).data('filter-column'), // zero-based index
txt = $(this).data('filter-text'); // text to add to filter
filters[col] = txt;
$.tablesorter.setFilters($('table.hasFilters'), filters, true); // new v2.9
return false;
});
});
</script>
Maybe instead of using setInterval, you should consider switching to setTimeout. It will give you more control over when the time repeats:
function checkComper() {
var SvInfo;
var onResponse = function (comperNow) {
if (comper === undefined) {
comper = comperNow;
} else if (comper !== comperNow) {
var Vinfoo;
comper = comperNow;
// returning this $.get will make delay done until this is done.
return $.get("getlastupdate2.php", function (primaryAddType) {
Vinfoo = primaryAddType;
$().toastmessage('showNoticeToast', Vinfoo);
}).then(checkSession);
}
};
$.get('getlastupdate.php').then(onResponse).done(function () {
tid = setTimeout(checkComper, 10000);
});
}
var tid = setTimeout(checkComper, 10000);
Then you can keep it async: true
Here's a fiddle showing it working using echo.jsontest.com and some fudging numbers.
Since the click event callback seems to be where the issue is, try doing this and see if it removes the lag (I removed other comments to make it more brief):
function checkSession() {
return $.get("ajaxcontrol.php", function (DblIn) {
console.log('checking for session');
if (DblIn == 1) {
window.location = 'loggedout.php';
}
}).then(updateTable);
}
function updateTable() {
return $.get('updatetable.php', function (data) {
console.log('update table');
var $tableContents = $table.find('tbody')
//var $html = $('<tbody/>').html(data);
//$tableContents.replaceWith($html);
// replaceWith text seems to be much faster:
// http://jsperf.com/jquery-html-vs-replacewith/4
$tableContents.replaceWith('<tbody'> + data + '</tbody>');
//$table.trigger("update", [true]);
var currentUrl = document.getElementById("frmcontent").contentWindow.location.href;
var urls = ['indexTOM.php', 'index1.php'],
frame = document.getElementById('frmcontent').contentDocument;
for (var i = 0; i < urls.length; i++) {
var url = urls[i];
if (frame.location.href.indexOf(url) !== -1) {
frame.location.reload()
}
}
$('[title!=""]').qtip({});
});
};
$("#append").click(function (e) {
// We will assume this is a user action
e.preventDefault();
updateTable();
});
I commented out $table.trigger("update", [true]) since if you sort the table on the server before you return it, you shouldn't need to run that, which I'm almost certain is where the bottleneck is.
It is really hard untangle the mess you have but if what you want is ajax requests every 10 seconds it make sense to separate this logic from business logic over data from server.
Your code would also really benefit from using promises. Consider this example
$(document).ready(function() {
var myData = { }
, ajaxPromise = null
setInterval(callServer, 1000)
function callServer() {
ajaxPromise = updateCall()
.then(controlCall)
.done(handler)
.error(errorHandler)
}
function updateCall() {
return $.get('updateTable.php', function(data) {
myData.update = data
})
}
function controlCall( ) {
return $.get('ajaxControl.php', function(data) {
myData.control = data
})
}
function handler() {
console.dir(myData)
}
function errorHandler(err) {
console.log(err)
console.dir(myData)
}
})

setInterval with other jQuery events - Too many recursions

I'm trying to build a Javascript listener for a small page that uses AJAX to load content based on the anchor in the URL. Looking online, I found and modified a script that uses setInterval() to do this and so far it works fine. However, I have other jQuery elements in the $(document).ready() for special effects for the menus and content. If I use setInterval() no other jQuery effects work. I finagled a way to get it work by including the jQuery effects in the loop for setInterval() like so:
$(document).ready(function() {
var pageScripts = function() {
pageEffects();
pageURL();
}
window.setInterval(pageScripts, 500);
});
var currentAnchor = null;
function pageEffects() {
// Popup Menus
$(".bannerMenu").hover(function() {
$(this).find("ul.bannerSubmenu").slideDown(300).show;
}, function() {
$(this).find("ul.bannerSubmenu").slideUp(400);
});
$(".panel").hover(function() {
$(this).find(".panelContent").fadeIn(200);
}, function() {
$(this).find(".panelContent").fadeOut(300);
});
// REL Links Control
$("a[rel='_blank']").click(function() {
this.target = "_blank";
});
$("a[rel='share']").click(function(event) {
var share_url = $(this).attr("href");
window.open(share_url, "Share", "width=768, height=450");
event.preventDefault();
});
}
function pageURL() {
if (currentAnchor != document.location.hash) {
currentAnchor = document.location.hash;
if (!currentAnchor) {
query = "section=home";
} else {
var splits = currentAnchor.substring(1).split("&");
var section = splits[0];
delete splits[0];
var params = splits.join("&");
var query = "section=" + section + params;
}
$.get("loader.php", query, function(data) {
$("#load").fadeIn("fast");
$("#content").fadeOut(100).html(data).fadeIn(500);
$("#load").fadeOut("fast");
});
}
}
This works fine for a while but after a few minutes of the page being loaded, it drags to a near stop in IE and Firefox. I checked the FF Error Console and it comes back with an error "Too many Recursions." Chrome seems to not care and the page continues to run more or less normally despite the amount of time it's been open.
It would seem to me that the pageEffects() call is causing the issue with the recursion, however, any attempts to move it out of the loop breaks them and they cease to work as soon as setInterval makes it first loop.
Any help on this would be greatly appreciated!
I am guessing that the pageEffects need added to the pageURL content.
At the very least this should be more efficient and prevent duplicate handlers
$(document).ready(function() {
pageEffects($('body'));
(function(){
pageURL();
window.setTimeout(arguments.callee, 500);
})();
});
var currentAnchor = null;
function pageEffects(parent) {
// Popup Menus
parent.find(".bannerMenu").each(function() {
$(this).unbind('mouseenter mouseleave');
var proxy = {
subMenu: $(this).find("ul.bannerSubmenu"),
handlerIn: function() {
this.subMenu.slideDown(300).show();
},
handlerOut: function() {
this.subMenu.slideUp(400).hide();
}
};
$(this).hover(proxy.handlerIn, proxy.handlerOut);
});
parent.find(".panel").each(function() {
$(this).unbind('mouseenter mouseleave');
var proxy = {
content: panel.find(".panelContent"),
handlerIn: function() {
this.content.fadeIn(200).show();
},
handlerOut: function() {
this.content.slideUp(400).hide();
}
};
$(this).hover(proxy.handlerIn, proxy.handlerOut);
});
// REL Links Control
parent.find("a[rel='_blank']").each(function() {
$(this).target = "_blank";
});
parent.find("a[rel='share']").click(function(event) {
var share_url = $(this).attr("href");
window.open(share_url, "Share", "width=768, height=450");
event.preventDefault();
});
}
function pageURL() {
if (currentAnchor != document.location.hash) {
currentAnchor = document.location.hash;
if (!currentAnchor) {
query = "section=home";
} else {
var splits = currentAnchor.substring(1).split("&");
var section = splits[0];
delete splits[0];
var params = splits.join("&");
var query = "section=" + section + params;
}
var content = $("#content");
$.get("loader.php", query, function(data) {
$("#load").fadeIn("fast");
content.fadeOut(100).html(data).fadeIn(500);
$("#load").fadeOut("fast");
});
pageEffects(content);
}
}
Thanks for the suggestions. I tried a few of them and they still did not lead to the desirable effects. After some cautious testing, I found out what was happening. With jQuery (and presumably Javascript as a whole), whenever an AJAX callback is made, the elements brought in through the callback are not binded to what was originally binded in the document, they must be rebinded. You can either do this by recalling all the jQuery events on a successful callback or by using the .live() event in jQuery's library. I opted for .live() and it works like a charm now and no more recursive errors :D.
$(document).ready(function() {
// Popup Menus
$(".bannerMenu").live("hover", function(event) {
if (event.type == "mouseover") {
$(this).find("ul.bannerSubmenu").slideDown(300);
} else {
$(this).find("ul.bannerSubmenu").slideUp(400);
}
});
// Rollover Content
$(".panel").live("hover", function(event) {
if (event.type == "mouseover") {
$(this).find(".panelContent").fadeIn(200);
} else {
$(this).find(".panelContent").fadeOut(300);
}
});
// HREF Events
$("a[rel='_blank']").live("click", function(event) {
var target = $(this).attr("href");
window.open(target, "_blank");
event.preventDefault();
});
$("a[rel='share']").live("click", function(event) {
var share_url = $(this).attr("href");
window.open(share_url, "Share", "width=768, height=450");
event.preventDefault();
});
setInterval("checkAnchor()", 500);
});
var currentAnchor = null;
function checkAnchor() {
if (currentAnchor != document.location.hash) {
currentAnchor = document.location.hash;
if (!currentAnchor) {
query = "section=home";
} else {
var splits = currentAnchor.substring(1).split("&");
var section = splits[0];
delete splits[0];
var params = splits.join("&");
var query = "section=" + section + params;
}
$.get("loader.php", query, function(data) {
$("#load").fadeIn(200);
$("#content").fadeOut(200).html(data).fadeIn(200);
$("#load").fadeOut(200);
});
}
}
Anywho, the page works as intended even in IE (which I rarely check for compatibility). Hopefully, some other newb will learn from my mistakes :p.

Categories