So i am reading a local json file that consist of {[Object,Object,Object.....]}
I am using the
$.getJSON('products.json', function (pdata) {
for (var i = 0; i < pdata.data.length; i++) {
AppendtoDom(pdata.data[i]);
}
The above code reads the json objects and appends to the DOM, but i want to initially load only 100 objects at a time and on scroll keep appending.
Say there are around 1200 objects. How do i go about this?
My implementaion so far
$(function(){
loadData();
});
function loadData(){
$.getJSON('products.json', function (pdata) {
var i = 0;
function addtoDom(num){
var limit = Math.min(i + num, pdata.data.length);
for(; i < limit; i++){
getInformation(pdata.data[i]);
}
}
addtoDom(100);
$('.content').jscroll({
callback: addtoDom(100)
});
});
}
function getInformation(obj){
var content = "";
for (var i = 0; i < 4; i++) {
content += '<li>';
content += "<img src='" + obj.imageUrl + "' style='width:200px;height:200px'/>";
content += '<div class="productName">' + obj.fullName + "</div>";
content += '<div class="price">Price: ' + obj.price + "</div>";
content += '</li>';
}
$("<ul class= 'view'>" + content + "</ul>").appendTo('.content');
}
Similar question i asked in How would i implement an infinite scroll in my DOM
You can put all the objects you get back from the Ajax call into a persistent variable, add the first 100 to the DOM, keep a counter of how many you've added so far and then upon scrolling to a certain point, add another 100, add another 100 and so on.
$.getJSON('products.json', function (pdata) {
var i = 0;
function addMore(num) {
var limit = Math.min(i + num, pdata.data.length);
for (; i < limit; i++) {
AppendtoDom(pdata.data[i]);
}
}
// add the first 100
addMore(100);
// then set up whatever scroll detection you want here and
// when you decide that it has scrolled enough to add some more
// you just call addMore(100) again
});
In your specific implementation of the above idea, you have an implementation mistake. You have to pass a function reference for the callback so change this:
$('.content').jscroll({
callback: addtoDom(100)
});
to this:
$('.content').jscroll({
callback: function() {addtoDom(100);}
});
Assign your JSON to a variable and dynamically render them as needed.
var json;
$.getJSON('products.json', function (pdata) {
JSON = pdata;
};
// Scheduling logic
AppendtoDom(json[i]);
Related
Can you explain the functions below:
viz = new tableau.Viz(containerDiv, url, options);
function listenToMarksSelection() {
viz.addEventListener(tableau.TableauEventName.MARKS_SELECTION, onMarksSelection);
}
function onMarksSelection(marksEvent) {
return marksEvent.getMarksAsync().then(reportSelectedMarks);
}
function reportSelectedMarks(marks) {
var html = "";
for (var markIndex = 0; markIndex < marks.length; markIndex++) {
var pairs = marks[markIndex].getPairs();
html += "<b>Mark " + markIndex + ":</b><ul>";
for (var pairIndex = 0; pairIndex < pairs.length; pairIndex++) {
var pair = pairs[pairIndex];
html += "<li><b>Field Name:</b> " + pair.fieldName;
html += "<br/><b>Value:</b> " + pair.formattedValue + "</li>";
}
}
}
This function just listen the selected mark
This one throw the selection in reportSelectedMarks
It take the marks and write it in an HTML file in a '<'li'>'.
The fieldname would probably be a String and in the value it depend what you are workin with.
So basically these functions would be useful to dynamically print a selected mark on a graph or something like that and print a fielname and a value for that one.
This question already has answers here:
How do I return the response from an asynchronous call?
(41 answers)
Closed 7 years ago.
I'm new to working with AJAX, but I've been researching it for the past two hours to help in my scenario. I haven't made any progress. :(
Regardless, my issue is that the subPages array is out of scope when I'm outside $.get(...). I've tried using when() and done() for my code, but just can't get it right still.
I think the problem lies within the iterations going through a for loop since I have pages[i] in multiple sections of my code being used. That's why I can't use when() and done() when needed.
Here's what I have:
var subPages = [];
var containsSub = '/sites/Pages/';
var tempString = '';
// iterate through the pages array in reverse
for(var i = pages.length - 1; i >= 0; i--){
// grab all <a> within response text
var getLinks = $.get(baseURL + pages[i]).then(function(responseData){
var $response = $(responseData);
var $links = $response.find('a');
// push each valid link into subPages array
$links.each(function(index, $link){
if(this.href.indexOf(containsSub) > -1){
subPages.push(this.href);
}
});
// subPages array is loaded with the correct values
console.log("subPages inside get: " + subPages);
});
// empty here
console.log("subPages outstide all: " + subPages);
Edit: With the addition of the then chain and code, I'm having an undefined for subPages[i]
var subPages = [];
var containsSub = '/sites/Pages/';
var tempString = '';
// iterate through the pages array in reverse
for(var i = pages.length - 1; i >= 0; i--){
// grab all <a> within response text
var getLinks = $.get(baseURL + pages[i]).then(function(responseData){
var $response = $(responseData);
var $links = $response.find('a');
// push each valid link into subPages array
$links.each(function(index, $link){
if(this.href.indexOf(containsSub) > -1){
subPages.push(this.href);
//console.log("<a href='"+ this.href + "'>" + this.href + "</a>" + " <br>");
}
});
console.log("subPages inside get: " + subPages);
})
.then(function(){
console.log("subPages outstide all: " + subPages);
// print bold for current main page
tempString += "<strong><a href='"+ baseURL + pages[i] + "'>" + pages[i].substr(27,pages[i].length) + "</a><strong>" + " <br>";
for(var i = 0; i < subPages.length - 1; i++){
console.log("<a href='"+ subPages[i] + "'>" + subPages[i] + "</a>" + " <br>");
}
subPages = [];
pages.splice(i, 1);
})
}
11/25 Edit: I fixed the issue below with my answer by removing some complications and decided that an AJAX request was more in logic.
var subPages = [];
var containsSub = '/sites/it/InfoProtect/Pages/';
var tempString = '';
// iterate through the pages array in reverse
for(var i = pages.length - 1; i >= 0; i--){
// grab all <a> within response text
var getLinks = $.ajax({
url: baseURL + pages[i],
async: false,
success: function(responseData){
var $response = $(responseData);
var $links = $response.find('a');
// push each valid link into subPages array
$links.each(function(index, $link){
if(this.href.indexOf(containsSub) > -1){
subPages.push(this.href);
}
});
}
})
Your for loop immediately executes all iterations of the loop. The subPages array is populated after the last line of console.log has run.
$.get is asynchronous, so after calling it, the code inside .then is not immediately called. So, it continues to the next iteration of your loop, finally exits, and shows an empty subpages array, because your data hasn't returned yet.
Here's a quick idea of how to wait for your ajax calls, prior to logging the array (untested):
var ajaxCalls = [];
// iterate through the pages array in reverse
for(var i = pages.length - 1; i >= 0; i--){
// grab all <a> within response text
var getLinks = $.get(baseURL + pages[i]).then(function(responseData){
var $response = $(responseData);
var $links = $response.find('a');
// push each valid link into subPages array
$links.each(function(index, $link){
if(this.href.indexOf(containsSub) > -1){
subPages.push(this.href);
}
});
// subPages array is loaded with the correct values
console.log("subPages inside get: " + subPages);
});
ajaxCalls.push(getLinks);
}
$.when.apply(null, ajaxCalls).then(function() {
// not empty here
console.log("subPages outstide all: " + subPages);
});
issue is that the subPages array is out of scope when I'm outside
$.get(...)
$.get() returns an asynchronous response . Try chaining .then() to $.get() to maintain same scope as initial .then()
var getLinks = $.get(baseURL + pages[i]).then(function(responseData){
})
.then(function() {
console.log("subPages outstide all: " + subPages);
})
Try creating an IIFE within for loop to pass i
e.g.,
var pages = ["a", "b", "c"];
for(var i = pages.length -1; i >= 0; i--) {
(function(j) {
var dfd = $.Deferred(function(d) {
setTimeout(function() {
d.resolve(j)
}, Math.random() * 1000)
}).promise()
.then(function(n) {
console.log("first", n, pages[n]);
return n
}).then(function(res) {
console.log("second", res, pages[res])
})
}(i))
}
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script>
Hacker News recently released an API that I am using to display what the current top ten items are on Hacker News. I am running into some problems.
When I run the code below, the order of the items on the frontpage are inaccurate, jumping from the second one in the frontpage to the fourth, to the first, to the fifth, to the third and so on. Running the code again results in a slightly different order again.
$.getJSON('https://hacker-news.firebaseio.com/v0/topstories.json', function(json) {
var convoText = '<ol>';
for (var i = 0; i < 10; i++) {
(function(i) {
$.getJSON('https://hacker-news.firebaseio.com/v0/item/' + json[i] + '.json', function(json2) {
convoText += '<li>' + json2.title + '</li>';
if (i === 9) {
convoText += '</ol>';
addConvo(convoText);
}
});
})(i);
}
});
I understand that this is an effect of Javascript's asynchronous nature. How can I fix it?
The knack is to create and append a <li><a></a></li> structure synchronously in the loop - thereby establishing the correct order - then populate it asynchronously with json2 data when it arrives.
$.getJSON('https://hacker-news.firebaseio.com/v0/topstories.json', function(json) {
var $ol = $('<ol/>').appendTo(...);//wherever
for (var i = 0; i < Math.min(json.length, 10); i++) {
(function(i) {
var $a = $('<li><a></a></li>').appendTo($ol).find('a');
$.getJSON('https://hacker-news.firebaseio.com/v0/item/' + json[i] + '.json', function(json2) {
$a.attr('href', json2.url).text(json2.title);
});
})(i);
}
});
You will have to complete the .appendTo(...) line. I don't know from the question where the <ol>...</ol> is appended.
You can use jQueries $.when for that:
$.getJSON('https://hacker-news.firebaseio.com/v0/topstories.json', function(json) {
var requests = [];
for (var i = 0; i < 10; i++) {
requests.push($.getJSON('https://hacker-news.firebaseio.com/v0/item/' + json[i] + '.json'));
}
$.when.apply($, requests).done(function() {
var results = [].slice.call(arguments);
var list = results.map(function(arr) {
return '<li>' + arr[0].title + '</li>';
});
var convoText = '<ol>' + list.join('') + '</ol>';
console.log(convoText);
});
});
There are a few ways to fix this. The easiest is, instead of appending to convoText, use an array, and set its index when you get data. Like data[i] = json2;. Then when all your data is fetched, join your array.
A more structural fix would be to rearchitect your loop as a collection of promises, and construct your HTML when they have all resolved (what #xat was alluding to above).
While trying out jQuery, I have a question that is probably a newbie mistake, but I cannot seem to find the solution. This is the code:
$.get("index.html", function() {
var i = 0;
for (; i < 3; i++)
{
var lDiv = document.createElement('div');
lDiv.id = 'body-' + i;
document.getElementById('body').appendChild(lDiv);
$.get('index.html', function(data) {
lDiv.innerHTML = "<p>Hello World " + i + "</p>";
});
}
});
The output seems to be
<div id='body-0'></div>
<div id='body-1'></div>
<div id='body-2'>
<p>Hello World 3</p>
</div>
I expected the lDiv.innerHTML= code to be executed for each i, but apparently it is only executed for the last i? What am I overlooking?
This happens because the loop completes (i is 2) before any of the callbacks are fired.
#thecodeparadox's solution works, but it serializes the HTTP requests. (Makes them fire one-at-a-time.) This allows the requests to execute in parallel, and thus quicker:
for (var i = 0; i < 3; i++)
{
var lDiv = document.createElement('div');
lDiv.id = 'body-' + i;
document.getElementById('body').appendChild(lDiv);
$.get('index.html', function(i,lDiv) { // the current iteration's `i` and `lDiv` are captured...
return function(data) {
lDiv.innerHTML = "<p>Hello World " + i + "</p>";
}
}(i,lDiv)); // ...by passing them as an argument to the self-executing function
}
As $.get() is asynchronous, so you need to execute your append and next call within $.get()'s success() callback function.
var i = 0;
function recursiveLoad() {
if(i == 3) return;
var lDiv = document.createElement('div');
lDiv.id = 'body-' + i;
document.getElementById('body').appendChild(lDiv);
$.get('index.html', function(data) {
lDiv.innerHTML = "<p>Hello World " + i + "</p>";
i++;
recursiveLoad();
});
}
// initial call
recursiveLoad();
I made a web app that loads images using jquery, ajax and json. I got it to work in Firefox, but alas, Safari and Chrome remain stubborn.
It has to do with a "race condition" where images don't load quickly enough, so I have a load event as well as a trigger event to wait until all images are loaded before appending the html to a container div.
Here is the link to the page that works in FF:
http://chereecheree.com/dagworthy/style.html
And some code:
var aSectionImages = new Array;
//count how many images are in a section:
var aImagesCount = new Array();
//count how many images of a particular section have been loaded
var aImagesLoaded = new Array();
var htmlString;
var jsonStyleImages = "images.json"
var jsonNavImages = "imagesNav.json";
//container div:
var scrollableArea = $("#scrollableArea");
$.getJSON(jsonNavImages, getNavIcons);
$.getJSON(jsonStyleImages, makeScroller);
//trigger this function on load event:
function imageLoaded(oImg){
//get the name of the section of images:
var locSectionId = (imageInSection(oImg.src)).replace(/\s|'/g, "_");
//get the file name of the current image
var locFileName = getFileName(oImg.src);
if (aImagesLoaded[locSectionId]===undefined){
aImagesLoaded[locSectionId] = new Array;
};
//check if it has already been loaded by seeing if it exists in the array of loaded images:
var inArray = false;
inArray = $.inArray(locFileName, aImagesLoaded[locSectionId]);
if (inArray == -1) {
//array.push returns the new length of the array:
var tempLength = aImagesLoaded[locSectionId].push(locFileName);
}
if (tempLength==aImagesCount[locSectionId]){
htmlString += "</div>";
scrollableArea.append(htmlString);
//after the html has been appended, force it to be 1000 px -- totally unstable hack.
scrollableArea.width(1000);
}
}
//helper function to get section name of a loading image:
function imageInSection(src){
var resultId=false;
var locFileName = getFileName(src);
for (var k = 0; k < aSectionImages.length; k++){
for(var j=0; j < aSectionImages[k].images.length; j++){
tempSrc = aSectionImages[k].images[j].src.split("/");
tempFileName = tempSrc[tempSrc.length-1];
if (tempFileName == locFileName) {
resultId = aSectionImages[k].id;
}
}
}
return resultId;
}
//helper function to get the file name of a loading image:
function getFileName(href){
var resultFileName=false;
var locSrc = href.split("/");
resultFileName = (locSrc[locSrc.length-1]);
return resultFileName;
}
//function called when ajax request is successful -- it puts together the html string that will be appended to the containter div
function makeScroller(data){
aSectionImages = data;
for (ii=0; ii<aSectionImages.length; ii++){
var locData = aSectionImages[ii];
var locId = locData.id.replace(/\s|'/g, "_");
aImagesCount[locId] = locData.images.length;
htmlString = "<div id=\"" + locId + "\">";
for (jj=0; jj<locData.images.length; jj++){
var oImage = new Image();
var locImage = locData.images[jj];
$(oImage)
.load(function(){
imageLoaded(oImage);
})
.attr("src", locData.images[jj].src);
if (oImage.complete && oImage.naturalWidth !== 0){
$(oImage).trigger("load");
}
//alert (oImage.width);
locImage.id ? locImage.id = " id=\""+locImage.id+"\" " : locImage.id = "";
htmlString += "<img height=\"" + locImage.height + "\"" + " width=\"" + oImage.width + "\"" + locImage.id + " src=\"" + locImage.src + "\" />";
}
}
}
But it's probably best to look at it online, as there is a plugin that's used.
Anyhow, the computed style for the container div shows up at "0px" sometimes, which is why I'm forcing it to "1000px" but that hack is not very stable.
Any ideas would be greatly appreciated.
Thanks!
--Daniel.
In this section:
$(oImage)
.load(function(){
imageLoaded(oImage);
})
.attr("src", locData.images[jj].src);
if (oImage.complete && oImage.naturalWidth !== 0){
$(oImage).trigger("load");
}
image.naturalWidth isn't available in all browsers (so undefined !== 0 won't be a correct check), but you don't need it to use it, just rearrange the code, like this:
$(oImage).one('load', function(){ imageLoaded(this); })
.attr("src", locData.images[jj].src)
.each(function() {
if (this.complete) $(this).trigger("load");
});
This uses .each() to loop through after setting the src and triggers the load event in case it came from cache (this.complete being instantly true indicates this). The .one() call ensures the load event only fires once, whether from cache or a normal load.