I'm trying to avoid making two ajax calls by using .ajax.params() to get the last set of ajax parameters and returning the table data from my first call.
I then pass in my own json to datatables following this pattern
datatable.clear();
datatable.rows.add(newDataArray);
datatable.draw();
from this question.
However my table has ajax set so when draw() is called another ajax call is fired which defeats the point of passing in the data myself. What I need is a way to suppress the ajax call while redrawing the table.
An alternative would be to write my own ajax handling and manually add the data into datatables as above, however I think I would also have to create the ajax parameters myself which would be a pain.
I have created the following datatables plugin that makes it possible to do an ajax load with custom ajax settings for just one time.
var __reload = function ( settings, holdPosition, callback ) {
// Use the draw event to trigger a callback
if ( callback ) {
var api = new _Api( settings );
api.one( 'draw', function () {
callback( api.ajax.json() );
} );
}
if ( settings.oApi._fnDataSource( settings ) == 'ssp' ) {
settings.oApi._fnReDraw( settings, holdPosition );
}
else {
settings.oApi._fnProcessingDisplay( settings, true );
// Cancel an existing request
var xhr = settings.jqXHR;
if ( xhr && xhr.readyState !== 4 ) {
xhr.abort();
}
// Trigger xhr
settings.oApi._fnBuildAjax( settings, [], function( json ) {
settings.oApi._fnClearTable( settings );
var data = settings.oApi._fnAjaxDataSrc( settings, json );
for ( var i=0, ien=data.length ; i<ien ; i++ ) {
settings.oApi._fnAddData( settings, data[i] );
}
settings.oApi._fnReDraw( settings, holdPosition );
settings.oApi._fnProcessingDisplay( settings, false );
} );
}
};
jQuery.fn.dataTable.Api.register( 'ajax.loadOnce()', function ( ajax, callback, resetPaging ) {
return this.iterator( 'table', function ( ctx ) {
store = ctx.ajax;
ctx.ajax = ajax;
__reload( ctx, resetPaging===false, callback );
ctx.ajax = store;
} );
} );
This makes it possible to combine the datatables parameters with custom data and a new url as such
ajax = {
url: url,
data: function (d){
d.value = value;
}
};
table.ajax.loadOnce(ajax);
I had a similar issue. The easiest work around I found was temporarily disabling the AJAX and server side processing, make your changes, and then reset the values like shown below:
function set_datatable_ajax_processing(table, bool) {
table.settings()[0].oFeatures.bServerSide = bool;
table.settings()[0].ajax = bool;
}
set_datatable_ajax_processing(table, false)
// table edits and/or draw done here ...
set_datatable_ajax_processing(table, true)
Related
I've spent quite a while trying to figure this out with various iterations of code, but with no luck. Coming from a php background I am new to javascript.
assume an array of three patches: patch1, patch2, patch3.
What I'm trying to achieve is:
an ajax call to the same php script for each patch, but each call must be made only after the previous call is completed
After all 3 are complete an ajax call to a separate php script is made.
Point 2 is working fine, point 1 not so.
Below is my code: the myAjaxInitialData func (and the underlying php script) is being called simultaneously for all 3 patches, rather than waiting for each to complete. The myAjaxGetSRCount is, correctly, not being called unitl all the patches are complete.
<body onload="initialData(0)">
<script>
function initialData(i) {
var patches = [<?php echo $jsPatchArray ?>];
var x = patches.length - 1;
var divId = "#initialData-patch-" +i;
var script = "ajax_initial_data.php";
var dataVar = "patch";
var data = patches[i];
if ( i != x) {
i++;
$.when(myAjaxInitialData(divId,script,dataVar,data)).then(initialData(i));
} else {
$.when(myAjaxInitialData(divId,script,dataVar,data)).then(myAjaxGetSRCount);
}
}
function myAjaxInitialData(divId,script,dataVar,data ) {
return $.ajax({
type: "GET",
url: script,
data: {patch:data},
success: function( response ) {
$( divId ).html( response );
}
});
}
function myAjaxGetSRCount() {
document.getElementById('srCount').innerHTML="Retrieving SR Counts..";
$.ajax({
type: "GET",
url: "ajax_sr_count.php",
success: function( response ) {
$( "#srCount" ).html( response );
}
});
}
</script>
Your problem seems to be here:
$.when(myAjaxInitialData(divId,script,dataVar,data)).then(initialData(i));
then takes a callback, i.e. a function. initialData(i) doesn't return anything so you are passing undefined into this function. If you mean to call initialData after this ajax request then you need to wrap it in a parameter-less function.
$.when(myAjaxInitialData(divId,script,dataVar,data)).then(function() { initialData(i); });
You should also be very aware that the value of i will be the value at the time of the callback. When closing over iterator variables, you should capture the value you expect before you create the callback. I.e.
if (i != x) {
i++;
var j = i;
$.when(myAjaxInitialData(divId,script,dataVar,data)).then(function() { initialData(j); });
}
I know my questions is marked as duplicate. But the given answer is using async:false. I don't want to force synchronous requests. How do maintain async ajax call sequence ???
I don't need to replace the content. I need to append svg one after another in a sequence.
I am appending 5 svg elements in a div. All svgs are coming by ajax call. The issue is the order of those svgs. Every time they appended in different order. I want to maintain their order. Please find below my code:
FlagRow.DEFAULTS = {
flagOrder: [
Enums.flagType.INDIA,
Enums.flagType.USA,
Enums.flagType.UK,
Enums.flagType.FRANCE,
Enums.flagType.GERMANY
]
}
var container = $(document.createElement("div"));
var topic = new Array();
for (var key in this.options.flagOrder) {
topic.push(this.options.flagOrder[key]);
}
var appendFlag = function (flag) {
console.log(flag);
var svgDiv = $(document.createElement("div"));
$(svgDiv).addClass('svgDiv');
var importedSVGRootElement = document.importNode(flag.documentElement, true);
$(importedSVGRootElement).attr('viewBox', '0 0 100 125');
svgDiv.append(importedSVGRootElement)
container.append(svgDiv);
}
$.each(topic, function (i, val) {
$.when(//ajax call to get flag svg).done(function (flag ) { appendFlag(flag ); });
});
// api call to get flag svg
var deferred = $.Deferred();
$.ajax({
url: url,
type: 'get',
data: '',
dataType: 'xml',
timeout: 300000,
success: function (data) {
deferred.resolve(data);
},
error: function (e) {
console.log(':::error in flag:::', e);
},
beforeSend: function (xhr) {
xhr.setRequestHeader("Authorization", 'myapikey');
}
});
Here every time flag svg comes in different order. I want it to display it in an order of enum. And so I tried it with $.when().done(). But it's working as per my requirement.
How do I maintain order of appended svgs coming via ajax call ???
You can use async: false to mimic what you tried to do with Deferred. Since you know the order at the moment of calling your ajax requests, using placeholders as the duplicate question (for some reason they re-opened this...) suggests is your best bet.
function getAllTheFlags() {
for( var i = 0; i < 5; i++ ) {
insertPlaceHolder( i ); //inserts <div id="placeholder-i"></div> at desired location
insertFlag( i );
}
}
function insertFlag( i ) {
$.ajax( { ... } ).success( function( data ) {
var svgDiv = $(document.createElement("div"));
$(svgDiv).addClass('svgDiv');
var importedSVGRootElement = document.importNode(flag.documentElement, true);
$(importedSVGRootElement).attr('viewBox', '0 0 100 125');
svgDiv.append(importedSVGRootElement)
$( '#placeholder-' + i ).replaceWith( svgDiv );
} );
}
The function insertFlag(..) is mandatory, as you need to copy the value of i.
You can not expect async ajax call to end in order of call. But you could wrap it in a function that takes the element as parameter that you can acces in your ajax callback.
function fetchContent(element, url){
$.ajax({
url: url,
success: function(data) {
element.whatever(...);
}
});
}
In your code you then create a div or search for an existent one. And call your fetchContent by passing that element as a parameter. Even if your ajax calls don't end in the order of call the content should be added to the good element.
I think it should work.
I'm an old 'C' programmer and with Javascript always struggle populating my data following ajax calls; i.e. I always resort to using global references. I'd rather be able to pass in the objects I need to update. Here's one example of what I do now - 'app' is the global (I'd have used a pointer in C :))
treeMapp.login = function ( dialog_div, form_div, server_call ) {
// validate the fields
if ( 0 ) {
}
else {
// send to server & parse JSON response (single line)
var jqxhr =
$.getJSON( server_call,
$( "#" + form_div ).serialize() )
.done( function( data, status ) {
if( status == 'success' ) {
// hack!?
app.user.username = data.username;
app.user.organisation = data.organisation;
app.user.loggedIn = true;
//close the dialog
$( '#' + dialog_div ).dialog('close');
}
else {
// login failed
alert( "login failed!" );
}
})
.fail( function() {
alert( "login: server error" );
}); // end var jqxhr =
} // end else (field validation ok)
}; // end treeMapp.login()
What's the best way of updating passed in parameters?
thanks
mini
You can pass app as an argument to your treeMapp.login function, then within the scope of it it would be local.
treeMapp.login = function ( dialog_div, form_div, server_call, app )
You could assign the data object returned by your jQuery result to app.user, thus avoiding the need for element by element assignment.
i.e. app.user = data
However, normally you ensure the global object can self initialise through a method, or you pass a reference to the global object to the method so it can initialise. Directly using assignment to global variables is (with a few exceptions) poor programming in Javascript as in any other language
UPDATE: the following shows an amalgamation of the answers...
treeMapp.login = function ( dialog_div, form_div, server_call, theapp ) {
var app = theapp; // may be needed for scope issue..
// validate the fields
if ( 0 ) {
}
else {
// send to server & parse JSON response (single line)
var jqxhr =
$.getJSON( server_call,
$( "#" + form_div ).serialize() )
.done( function( data, status ) {
if( status == 'success' ) {
// not a hack
$.extend(this.app.user, data, { loggedIn: true })
//close the dialog
$( '#' + dialog_div ).dialog('close');
}
else {
// login failed
alert( "login failed!" );
}
})
.fail( function() {
alert( "login: server error" );
}); // end var jqxhr =
} // end else (field validation ok)
}; // end treeMapp.login()
Consider a webpage having lot of XHR calls to server and A iframe which again contains lot of XHR calls to server.
Many of this calls are same (Redundant). I do have single communication interface (i.e. set of methods in a javascript object).
How to optimize server calls? Can we cache responses? (I can invalidate stored response when some action happend which may change response), Also This cache should be cleared after page refresh. Is there any such component/technique available?
Regards,
Nachiket
Some form of memoization.
NOTE: you will have to change the following code to accomodate your XHR implementation.
Had to make assumptions since you offered no code.
var cacheXHRWrapper = function ( url , handler ) {
var cache = {};
var queued = {};
var pending = {};
if ( cache[ url ] ) {
handler( cache[ url ] );
} else {
queued[ url ] || ( queued[ url ] = [] ); // I know, call me lazy.
queued[ url ].push( handler );
if ( !pending[ url ] ) {
// might want to adjust this to comply to your XHR implementation
XHR_IMPL.request( url , function ( response ) {
// cache response
cache[ url ] = response;
// serve all queued handlers.
var fn = queued[ url ].shift();
while ( fn ) {
fn( response );
fn = queued[ url ].shift();
}
} );
pending[ url ] = true;
}
}
}
Bonus, queues request handlers (by url) that are already running.
As a learning exercise I've hacked together a script for an SO feature request (for the purposes of this question please ignore the merits or otherwise of that request). In the script I've encountered a technical issue that my limited javascript knowledge can't get past and I'd appreciate suggestions on how to resolve it.
To avoid spamming the server I use some search hacks to determine the number of answers and accepted answers for a tag. This involves using window.setTimeout() to callback to a function that sends a get request for each tag, increasing the timeout on each call to stagger the requests.
To get the results in a single request involves appending &pagesize=1 to the end of the url in the get request, so that the number of pages in the results gives you the total number of results without having to make any further requests.
A side affect of this approach is that subsequent page views use &pagesize=1 and I only see a single entry. I attempt to resolve this by firing another query with &pagesize=30 to reset it afterwards, but as it is all asynchronous the timing of the last query can result in the pagesize either being 1 or 30, depending on which request completes first. I've tried adding a further timeout and callback for this "reset" query but it hasn't really helped.
Is there a means to monitor the queries, waiting until all have been completed, then once they have all completed send the reset request? Or is there another approach that I could take?
You could make a call chain
Based on my previous idea of a ParallelAjaxExecuter, here's a SerialAjaxExecuter
$(function(){
var se = new SerialAjaxExecuter( function( results )
{
console.log( results );
}, 1000 );
se.addRequest( $.get, 'test.php', {n:1}, function( d ){ console.log( '1 done', d ); }, 'text' );
se.addRequest( $.get, 'test.php', {n:2}, function( d ){ console.log( '2 done', d ); }, 'text' );
se.addRequest( $.get, 'test.php', {n:3}, function( d ){ console.log( '3 done', d ); }, 'text' );
se.addRequest( $.get, 'test.php', {n:4}, function( d ){ console.log( '4 done', d ); }, 'text' );
se.execute();
});
var SerialAjaxExecuter = function( onComplete, delay )
{
this.requests = [];
this.results = [];
this.delay = delay || 1;
this.onComplete = onComplete;
}
SerialAjaxExecuter.prototype.addRequest = function( method, url, data, callback, format )
{
var self = this;
this.requests.push( {
"method" : method
, "url" : url
, "data" : data
, "format" : format
, "callback" : callback
} );
var numRequests = this.requests.length;
if ( numRequests > 1 )
{
this.requests[numRequests-2].callback = function( nextRequest, completionCallback )
{
return function( data )
{
completionCallback( data );
setTimeout( function(){ self.execute( nextRequest ); }, self.delay );
}
}( this.requests[numRequests-1], this.requests[numRequests-2].callback )
}
}
SerialAjaxExecuter.prototype.execute = function( request )
{
var self = this;
if ( 'undefined' == typeof request )
{
request = this.requests[0];
var lastRequest = this.requests[this.requests.length-1];
lastRequest.callback = function( completionCallback )
{
return function( data )
{
completionCallback( data )
self.onComplete( self.results );
}
}( lastRequest.callback )
}
request.method( request.url, request.data, function( r )
{
return function( data )
{
self.results.push( data );
r.callback( data );
}
}( request ) )
}
I didn't bake in a sleep period between requests, but that could certainly be added. Added the timeout
Note: this example is littered with console.log() calls for which you need firebug, or just remove them.
I'm not sure if I fully understand the problem but why not chain the requests rather than using a setTimeout? So at the end of the response handler of one request fire off the next request.
Append &pagesize= to every link on page that would need it with the pagesize you're currently using.