Loading jQuery plugins and multiple scripts best practices - javascript

I'm currently looking for a way to load in multiple scripts/plugins without having a laundry list listed out in the header.
To simply have a load.js have everything load in would be very elegant to me.
$(function() {
var scripts = ['scripts/jquery1.5.js','scripts/easing.js','scripts/scroll.js','scripts/main.js'];
for(var i = 0; i < scripts.length; i++) {
$.getScript(scripts[i]);
}
})
I currently have something like this but can't get it to work for some reason. Any ideas?

Have you looked at head.js?

Here is my conclusion for head.js, I have done some benchmarks myself:
http://blog.feronovak.com/2011/03/headjs-script-is-it-really-necessary.html
It is subjective opinion and benchmarks are not by any means scientific.

This is my solution : check if file is added (stored in array) and then load one file after another. Works perfectly!
var filesadded = "" //list of files already added
function loadJSQueue(array, success) {
if (array.length != 0) {
if (filesadded.indexOf("[" + array[0] + "]") == -1) {
filesadded += "[" + array[0] + "]" //List of files added in the form "[filename1],[filename2],etc"
oHead = document.getElementsByTagName('head')[0];
var oScript = document.createElement('script');
oScript.type = 'text/javascript';
oScript.src = array[0];
array.shift();
oScript.onreadystatechange = function () {
if (this.readyState == 'complete') {
loadJSQueue(array, success);
}
}
oHead.appendChild(oScript);
}
else {
array.shift();
loadJSQueue(array, success);
}
}
else {
success();
}
}
call it with
loadJSQueue(["../../JavaScript/plupload/js/jquery.plupload.queue/jquery.plupload.queue.js",
"../../JavaScript/plupload/js/plupload.js",
"../../JavaScript/plupload/js/plupload.html4.js"
], function(){alert("success");})

loadScripts(['script1.js','script2.js'], function(){ alert('scripts loaded'); }
function loadScripts(scripts, callback){
var scripts = scripts || new Array();
var callback = callback || function(){};
for(var i = 0; i < scripts.length; i++){
(function(i) {
$.getScript(scripts[i], function() {
if(i + 1 == scripts.length){
callback();
}
});
})(i);
}
}

Related

Selecting and Manipulating Elements from Ajax

I use a code for loading HTML into a div-container. The goal is to have a single page application. Now I want to use the loaded content and sometimes change texts or designs. At the moment it doesn't work as I want.
One of three parts shows the most important part I think:
Router.prototype = {
routes: undefined,
rootElem: undefined,
constructor: function (routes) {
this.routes = routes;
this.rootElem = document.getElementById('app');
},
init: function () {
var r = this.routes;
(function(scope, r) {
window.addEventListener('hashchange', function (e) {
scope.hasChanged(scope, r);
});
})(this, r);
this.hasChanged(this, r);
},
hasChanged: function(scope, r){
if (window.location.hash.length > 0) {
for (var i = 0, length = r.length; i < length; i++) {
var route = r[i];
if(route.isActiveRoute(window.location.hash.substr(1))) {
scope.goToRoute(route.htmlName);
}
}
} else {
for (var i = 0, length = r.length; i < length; i++) {
var route = r[i];
if(route.default) {
scope.goToRoute(route.htmlName);
}
}
}
},
goToRoute: function (htmlName) {
(function(scope) {
var url = 'views/' + htmlName,
xhttp = new XMLHttpRequest();
xhttp.onreadystatechange = function () {
if (this.readyState === 4 && this.status === 200) {
scope.rootElem.innerHTML = this.responseText;
}
};
xhttp.open('GET', url, true);
xhttp.send();
})(this);
}
};
At the end you can see xhttp.open('GET'...). Because this is code I took from a tutorial I don't want to mess around with it, but select loaded elements in a separate script.
I have a page with a container having the ID 'app'. The files loaded are html like about.php
<div><h1 id="abt">About</h1></div>
List
Because I want to select them in an extra file I don't like to use a script like the following
$(document).on("load", function() {
ChangeTextFunction();
});
Instead, I want to learn how I can select the elements from the request mentioned above in an extra file, a separate request. E.g. $('#abt').text("Hello");
Is it somehow possible? Thanks in advance.

downloading array of urls in chrome extension

i'm creating a chrome extension. i have an array of urls and i want to download them all one by one, one at a time (meaning the second download will start only after the first one finished etc...)
i've tried this: ("links" is the array)
function doDownloads(links, iterator, max) {
chrome.downloads.download({
url: links[iterator],
saveAs: false
}, function(downloadId) {
if (iterator < max)
chrome.downloads.onChanged.addListener(function goToNextDL(delta) {
if (delta.id == downloadId) {
chrome.downloads.onChanged.removeListener(goToNextDL);
if (delta.state && delta.state.current === 'complete' && iterator + 1 < max)
doDownloads(links, iterator + 1, max);
}
});
});
}
but it doesn't work ):
any ideas?
This should work...
var Dest='';
for(var i=0; i<links.length; i++){
Dest='image'+i+'.jpg';
chrome.downloads.download({url:links[i] ,filename:Dest, conflictAction:'overwrite'});
}
NB: I've added simple logic for changing the destination file name.
How's this method?
var Counter=0; // global var
function DownloadURL(URL){
var DL=new XMLHttpRequest();
DL.onload=function(evt){var arraybuffer=DL.response;};
DL.addEventListener('load', function(evt){ Counter++; if(Counter<Links.length){DownloadURL(Links[Counter]);} }); // increment to next file
DL.open('GET',URL,true);
DL.responseType='arraybuffer';
DL.send();
}
Call it once to get the ball rolling...
DownloadURL(Links[Counter]);
After a few more tries, I got the solution to my problem.
Here is the correct code:
function doDownloads(links, iterator, max) {
chrome.downloads.download({
url: links[iterator],
saveAs: false
}, function(downloadId) {
if (iterator < max + 1)
var goToNextDL = function(delta) {
if ((delta.id == downloadId) && (delta.state) && (delta.state.current != "in_progress")) {
chrome.downloads.onChanged.removeListener(goToNextDL);
if (delta.state.current === 'complete')
doDownloads(links, iterator + 1, max);
}
};
chrome.downloads.onChanged.addListener(goToNextDL);
});
}
var array_of_links = [...];
doDownloads(array_of_links, 0, array_of_links.length);

Function infinite loop and ignoring parts of other functions

Okay, so basically all I wrote this script to do is clear and click a button if the textbox is full and refresh the page if its not.
I can successfully clear the text box when its full and refresh the page when its not, but as soon as I try to use my clickButton function it kicks into an infinite loop and skips the if() in clrLog
function addFunction(func, exec) {
var script = document.createElement('script');
script.textContent = '-' + func + (exec ? '()' : '');
document.body.appendChild(script);
document.body.removeChild(script);
}
function clickButton(val) {
buttons = document.getElementsByTagName('INPUT');
for (var i = 0; i < buttons.length; i++)
{
if (buttons[i].type == 'submit' && buttons[i].value == val)
{
buttons[i].click();
}
}
}
function clrLog() {
var elements = [
];
elements = document.getElementsByClassName('logarea');
if (elements.log.value === '')
setTimeout(function () {
location.reload();
}, 5000);
for (var i = 0; i < elements.length; i++) {
elements[i].value = '';
}
clickButton('Edit log file');
}
function main() {
addFunction(clrLog(), true);
}
main();
I found out that I could avoid using a for loop by using document.querySelector(); instead - so much easier :)

Fadein callback not working properly with ajax-loaded json data

This is a design portfolio page. On load, the JSON data is retrieved via ajax, and one of the keys is used to generate a list of '.project-links' (no project is displayed on load, and the project images are loaded only when a project is selected (see showProj function)). My question regards the fadein/fadeout: the images are still painting onto the screen after the fade in completes, despite the project content being defined/loaded within the fadeOut callback; can someone please enlighten me as to how I can tweak this so that the fadeIn won't run until the projImages are loaded?
Thank you, svs.
function ajaxReq() {
var request = new XMLHttpRequest();
return request;
}
function makeLinks(projects) { // result = getJsonData > request.responseText
var projectList = document.getElementById("project-list");
for (var project in projects) {
if (projects[project].status !== "DNU") {
var projectId = projects[project].id;
var listItem = "<li><a class=\"project-link\" id=\""+projects[project].project+"\" href=\"#\">" + projects[project].project + "</a></li>";
projectList.innerHTML += listItem;
} // if !DNU
}
// ADD EVENT LISTENERS
var projLink = document.getElementsByClassName("project-link");
for (var i = 0; i < projLink.length; i++) {
var projId = projLink[i].id;
//projLink[i].dataset.projIx = [i];
projLink[i].addEventListener("click", showProject, false);
}
var showNext = document.getElementById("show-next");
var showPrev = document.getElementById("show-previous");
showNext.addEventListener("click", showProject, false);
showPrev.addEventListener("click", showProject, false);
// ARROW KEYS [not invoking the showProject function]
$(document).keydown(function(e) {
if(e.which==37) { // LEFT arrow
$(showPrev).click(showProject);
console.log("previous");
} else
if(e.which==39) { // RIGHT arrow
$(showNext).click(showProject);
console.log("next");
}
})
function showProject(projId) {
var intro = document.getElementById("intro");
if (intro) {
intro.parentNode.removeChild(intro);
}
projId.preventDefault();
var projLinks = document.getElementsByClassName("project-link"); // array
var selIx = $(".selected").index();
// ###### CLICK PREVIOUS/NEXT ######
if (this.id === "show-previous" || this.id === "show-next") {
// 1a. if nothing is .selected
if (selIx < 0) {
if (this.id === "show-previous") {
var selIx = projLinks.length-1;
}
else if (this.id === "show-next") {
var selIx = 0;
}
}
// 1b. if .selected:
else if (selIx > -1) {
if (this.id === "show-previous") {
if (selIx === 0) { // if # first slide
selIx = projLinks.length-1;
}
else {
selIx --;
}
}
else if (this.id === "show-next") {
if (selIx === projLinks.length-1) { // if # last slide
selIx = 0;
}
else {
selIx ++;
}
}
}
var selProjLi = projLinks[selIx]; // => li
} // click previous/next
// ###### CLICK .project-link ######
else if (this.id !== "show-previous" && this.id !== "show-next") {
var selIx = $(this).closest("li").index();
}
// FADE OUT, CALLBACK: LOAD NEW PROJECT
$("#project-display").fadeTo(450, 0.0, function() {
// ###### ALL ######
$(".selected").removeClass("selected");
var projId = projLink[selIx].id;
var selProjLi = projLink[selIx].parentElement;
selProjLi.className = "selected";
var projectDisplay = document.getElementById("project-display");
// set vars for the project display elements:
var projName = document.getElementById("project-name"); // h3
var projTools = document.getElementById("project-tools");
var projNotes = document.getElementById("project-notes");
var projImages = document.getElementById("project-images");
// disappear the metadata elements 'cause sometimes they'll be empty
projTools.style.display = "none";
projNotes.style.display = "none";
testimonial.style.display = "none";
for (var project in projects) { // 'Projects array' -> project
if (projects[project].project === projId) {
var activeProj = projects[project];
projName.innerHTML = activeProj.project;
// maintain centered display of project-metadata: check for a value, else the element remains hidden
if(activeProj["tools used"]) {
projTools.style.display = "inline-block";
projTools.innerHTML = activeProj["tools used"];
}
if(activeProj.notes) {
projNotes.style.display = "inline-block";
projNotes.innerHTML = activeProj.notes;
}
if(activeProj.testimonial) {
testimonial.style.display = "inline-block";
testimonial.innerHTML = activeProj.testimonial;
}
// HOW TO ENSURE THESE ARE ALREADY LOADED ***BEFORE #project-display FADES IN***
projImages.innerHTML = "";
for (var i = 0; i < activeProj.images.length; i++ ) {
projImages.innerHTML += "<img src=\"" + activeProj.images[i].url + "\" />";
}
} // if project id ...
} // for (var obj in data)
}) // fade out
$("#project-display").fadeTo(600, 1.0);
} // showProject
} // makeLinks
function getJsonData() {
var request = ajaxReq();
request.open("GET", "/json/projects.json", true);
request.setRequestHeader("content-type", "application/json");
request.send(null);
request.onreadystatechange = function() {
if (request.readyState === 4) {
if (request.status === 200) {
//makeLinks(request.responseText);
var projects = JSON.parse(request.responseText);
var projects = projects["Projects"];
makeLinks(projects); // makeLinks = callback
return projects;
}
}
} // onreadystatechange
} // getJsonData
getJsonData(makeLinks);
You can add a load event to the images and run the fadeOut when all the images are loaded.
Since you are going to need multiple images to complete loading, I chose to keep track of which loads are complete using an array of jQuery.Deferred() objects. Once all the Deferreds are resolved then you can run the fade animation.
Here's a function that should work:
function fadeWhenReady(projImages, images) {
projImages.innerHTML = "";
var loads = []; //create holding bin for deferred calls
//create images and attach load events
for (var i = 0; i < activeProj.images.length; i++ ) {
var deferred = $.Deferred();
var img = $("<img src=\"" + activeProj.images[i].url + "\" />");
img.on("load", function() { deferred.resolve() }); //after image load, resolve deferred
loads.push(deferred.promise()); //add the deferred event to the array
img.appendTo(projImages); //append image to the page
}
//when all deferreds are resolved, then apply the fade
$.when.apply($, loads).done(function() {
$("#project-display").fadeTo(600, 1.0);
});
}
In your function showProject remove your call to $("#project-display").fadeTo(600, 1.0); and replace the lines below with a call to the fadeWhenReady function.
projImages.innerHTML = "";
for (var i = 0; i < activeProj.images.length; i++ ) {
projImages.innerHTML += "<img src=\"" + activeProj.images[i].url + "\" />";
}
P.S. You are using a strange mix of jQuery and vanilla javascript. The calls to document.getElementById() don't mind me so much, but I'd certainly recommend replacing your XMLHttpRequests with jQuery.ajax().

dynamic script loader in JS

How can i write dynamic MULTIPLE script loader with complete handler like google
google.load("http://script1");
google.load("http://script2");
google.setOnLoadCallback(function(){});
thanks
My advise is not to bother with script loading yourself, unless you take a look at how some frameworks do it because there can be security risks for your application with that sort of thing. In fact, I would redirect you to JQuery instead as it does have that functionality implemented (see here).
There are open source js which will ease your problem.
You can use LABJS or RequreJS plugins.
Script loaders like LABJS, RequireJS will improve the speed and quality of your code. Additionally it will load scripts dynamically.
I wrote like that
myApp.Loader = function(){
var queries = [];
var q = 0;
var p = 0;
var started = false;
var _callback = function(){};
var start = function(){
if(queries.length > 0 && !started){
started = true;
load(queries.shift());
} else if(queries.length > 0 && started){
load(queries.shift());
} else if(queries.length == 0 && started){
started = false;
if(q > 0 && q == p){
callback();
}
}
};
var load = function(fullUrl){
$.getScript(fullUrl, function() {
p++;
start();
});
};
var callback = function(){
_callback();
};
this.setCallback = function(fnc){
_callback = fnc;
if(q > 0 && q == p){
callback();
}
};
this.addQuery = function(query){
queries.push(query);
q++;
if(!started) {
start();
}
};
return this;
}
var Loader = new myApp.Loader();
myApp.load = function(fullUrl){
Loader.addQuery(fullUrl);
}
myApp.setOnLoadCallback = function(fnc){
Loader.setCallback(fnc);
}
and call it
myApp.load("http://script1");
myApp.load("http://script2");
myApp.load("http://script3");
myApp.setOnLoadCallback(function(){
// complete script load handling
});

Categories