I'm having some trouble handling multiple jquery ajax call, there are timeout error, but how do I know which request error this is?
$.when(ajaxDeferred1, ajaxDeferred2)
.done(function(result1, result2) {})
.fail(function(response, statusText) {
// how do I know which(ajaxDeferred1 or ajaxDeferred2) ajax call error this is?
});
ajaxDeferred1
$.ajax({
method: 'GET',
url: xxx,
timeout: 5000,
});
I know I can handle error by putting '.fail()' on each request, but is there any way handle error above way?
Going further, can I handle like this?
$.when(ajaxDeferred1, ajaxDeferred2)
.done(function(result1, result2) {})
.fail(function(errorForRequest1, errorForRequest2) {
// handle error
});
EDIT:
I think I need to make my question more clear. In the above example:
$.when(ajaxDeferred1, ajaxDeferred2)
.done(function(result1, result2) {})
.fail(function(errorForRequest1, errorForRequest2) {
// if all request has done, but one or more request has got error, return the error here then!
// So I can handle which part UI should show and which UI part I should hide due to the error!
});
I want to get 'done' result when all is done without error, and get 'error' when all is done but one or more request has encountered problem and return them all together into '.fail()'.
You can use .always() and within it iterate over each promise and check whether it resolved or failed.
var promises = [ajaxDeferred1, ajaxDeferred2]
$.when.apply($, promises)
.always(function() {
$.each(promises, function(i) {
this.done(function(result) {
console.log('promise', i, 'resolved');
}).fail(function(error) {
console.log('promise', i, 'failed');
});
});
});
JSFiddle demo: https://jsfiddle.net/pr45eov1/3/
I like #Miguel Mota's answer. But as an alternative, you are going to get your deferred object back if it fails. So you can add some data to it:
var dfd1 = jQuery.ajax({
method: 'GET',
url: 'some request that fails first',
timeout: 1
});
dfd1.id = 1;
var dfd2 = jQuery.ajax({
method: 'GET',
url: 'some request that fails second',
timeout: 1000
});
dfd2.id = 2;
jQuery.when(dfd1, dfd2)
.done(function () { })
.fail(function (request) {
// id: 1
console.log('id: ' + request.id);
});
Related
So I have an array that contains data that needs to be sent as part of the payload for POST requests that need to be made. Now, these need to be made sequentially, since I need to display the particular payload , then the result(the result being the response that is returned after making the POST request with the payload), then the next payload, then the next result and so on, on my page. I was just wondering what would be the best approach to accomplish this.So this is how I'm approaching it at the moment:
for (var i = 0; i < quotesList.length; i++) {
var response = $.ajax({
type: "POST",
url: url,
data: JSON.stringify(quotesList[i]),
success: add_to_page,
},
timeout: 300000,
error: function(){
console.log("Some Error!")
},
contentType: "application/json"
})
Obviously, the issue here that arises is there's no guarantee in terms of the getting that sequence right and also I wasn't quite sure how exactly to keep a track of the payload since trying to add it as a part of the success function only gets me the last element in the last(presumably the last request thats fired)
You can try something like below.
let quotesListIndex = 0;
function sendAjaxCall() {
if (quotesListIndex < quotesList.length) {
var response = $.ajax({
type: "POST",
url: url,
data: JSON.stringify(quotesList[quotesListIndex++]),
success: function () {
//... Add code here
sendAjaxCall(); // Call the function again for next request
},
timeout: 300000,
error: function () {
console.log("Some Error!")
},
contentType: "application/json"
})
} else {
return;
}
}
Add one global variable to track the index. and call function recursively. Once the index reached the array size then return from function.
I am testing a code using jasmine and creating a mock object for ajax method
spyOn($,'ajax').and.callFake(function(e){
console.log("is hitting");
})
to test the following piece of code
$.ajax({
url: AppManager.defaults.contextPath + "/solutions/mcn/mcn-lookup-list",
data: {
mcnNumber : mcnNumberData,
mcnCustomerName : mcnCustomerNameData
},
dataType: "json",
type: "GET",
global: false
})
.done(function(data) {
solution.CommonObjects.theSolution.orderHandoff.mcnSearchData = self.filterMCNSearchData(data, resultObj);
$promise.resolve();
})
.fail(function() {
$promise.reject();
self.displayErrorPopup('MCN Search Error','There is no MCN associated with MCN Number or MCN Customer Name Entered!!!');
});
},
It's throwing an error cannot read done of undefined . Do I need to create a spy for that also . Please help with the code to do so
Issues with your code:
You are spying it right but you need to send in a promise object back via your spy. Basically you need to return something like this ==> return new $.Deferred().resolve(dummyData).promise();
There are multiple ways to create a deferred object/promise object. I suggest you to read both Promise & Deferred
Also could you explain where your $promise is coming from? is this some feature of require.js?
Below is one way to fake the ajax calls.
var testObj = {
ajaxFunction: function() {
$.ajax({
url: 'https://jsonplaceholder.typicode.com/posts/1'
}).done(function(data){
consoleLogFunction(data);
});
}
};
var consoleLogFunction = function(data){
console.log("The lengh of array is..=> "+ data.length);
};
describe("ajax test suite", function() {
it("expect true to be true", function() {
var dummyData = ["Foo", "Boo"];
spyOn($, 'ajax').and.callFake(function(e){
return new $.Deferred().resolve(dummyData).promise();
});
spyOn(window, 'consoleLogFunction').and.callThrough();
testObj.ajaxFunction();
expect(window.consoleLogFunction).toHaveBeenCalledWith(dummyData);
});
});
My application has a lot of AJAX calls, each of them return a JSON response. Instead of validating the data in each of the the .done() calls, I'm trying compact the code.
What we have so far
$.ajax({
url: 'test',
type: 'GET',
data: {
_token: token
},
dataFilter: function(jsonResponse) {
return isValidJson(jsonResponse);
}
}).done(function(jsonResponse) {
// do things
});
isValidJson(jsonResponse) {
try {
var parsedJson = $.parseJSON(jsonResponse);
if (parsedJson.error == 1) {
notificationController.handleNotification(parsedJson.message, 'error');
return false;
}
} catch (err) {
notificationController.handleNotification('A server-side error occured. Try refreshing if the problem persists.', 'error');
return false;
}
return jsonResponse; // Have to return the original data not true
}
The expected behavior is that if dataFilter returns false, it will trigger .fail(), if it returns true then it will continue to .done(). Instead, it just continues to .done() with the result of isValidJson().
Is there also a way to make .fail() do something standard like send a notification to the user without having to put it under every AJAX call?
Easiest way is to create a shorthand for $.ajax, by extending it.
Extending the AJAX call
jQuery.extend({
myAjax: function(params){
// Here we can modify the parameters and override them e.g. making 'error:' do something different
// If we want to add a default 'error:' callback
params.error = function() {
console.log('its failed');
};
// or you can specify data parse here
if (params.success && typeof params.success == 'function') {
var successCallback = params.success;
var ourCallback = function(responseJson) {
if (isValidJson(responseJson)) { // Validate the data
console.log('The json is valid');
successCallback(responseJson); // Continue to function
} else {
console.log('The json is not valid');
}
}
params.success = ourCallback;
}
return $.ajax(params);
}
});
Now everytime you want to make an AJAX call in your application, you DO NOT use $.ajax({}). Instead, you use $.myAjax({});
Example
$.myAjax({
url: 'domain.com',
type: 'GET',
success: function(data) {
// Do what you'd do normally, the data here is definitely JSON.
},
error: function(data) {}
});
And this special function will handle all errors same way, no need to write those validators every time.
Try to do it like this (Not tested):
var jxhr = $.ajax({
url: 'test',
type: 'GET',
data: {
_token: token
},
dataFilter: function(jsonResponse) {
if (!isValidJson(jsonResponse)) {
jxhr.abort();
}
return jsonResponse;
}
}).done(function(jsonResponse) {
// do things
});
By using this strategy - you are violating "separation of concern" strategy.
Ajax should resolve or reject according to its action. Not according if response is JSON or not.
A possible solution : ( sure there are also another solutions)
function GetSanitized(d) {
return d.then(function(a) {
if (a.indexOf('{') > -1) //check if json ( just for example)
return $.Deferred().resolve(JSON.parse(a)); //return object
else
return $.Deferred().reject(a); //reject
},
function() {
return $.Deferred().reject("ajax error"); //ajax failed
}
);
}
var ajax = $.Deferred();
GetSanitized(ajax) .then(function (a){alert(" Json p's value is "+a["p"]);},function (a){alert("Error"+a);});
ajax.resolve("{\"p\":2}"); //simulate ajax ok , valid json
//ajax.resolve("\"p\":2}"); //simulate ajax ok , invalid json
//ajax.reject("\"p\":2}"); //simulate ajax bad , valid json
http://jsbin.com/vozoqonuda/2/edit
I have looked at various questions that talk about when Synchronous requests, how to return data from the Ajax request (with Promises and Callbacks) but nothing seems to answer what I am after.
var FacadeApi = ( function($) {
var endpoint = '/auth/',
self = this,
settings = {
isAuthenticated: false,
authkey: null
};
function init() {
$.ajax( {
url: self.endpoint,
dataType: 'json',
async: true
})
.done( function( `enter code here`data ) {
self.settings.authKey = data.authKey;
self.settings.isAuthenticated = true;
})
.fail( function( error ) {
});
}
var Facade = {
someRequest: function( ) {
$.ajax( {
url: someUri,
dataType: 'json',
data: { authKey: authKey }
})
.done( fuction( data ) {
// does stuff.
});
}
};
init();
return Facade;
}( jQuery ));
My module depends on requesting an authentication key and then having that as part of each request. Until this request is resolved, it can't be used. Which lead me to the async: false in the init function.
In this scenario, is it recommended to 'block' until the init call is completed? Could I achieve what I am after and keep the init request asynchronous?
So if i understand correctly,
You want to make sure that if someone calls somerequest() it should wait till you have the token , which is initiated in a separate request.
I suggest following pattern.
Do not call init() from the beginning.
when you need to call somerequest()
check if you have valid token value.
If token value is not there you can use $.When() such that somerequest() is only called when init() ajax call returns successfully.
I have a couple of jQuery Ajax requests, which have to be synchronous, but they keep locking/freezing the browser, until the response is received. My main problem is, that until the response is received I have to display a spinning icon, but due to the freezing the spinner is not displayed and even if it miraculously is it doesn't animate.
This is the event displaying the spinner and sending the request:
$(document).on('click', '#open-button', function () {
var input = "some text";
var wrapper = $('#wrapperWindow');
wrapper.children().animate({
opacity: 0
}, 500);
wrapper.children().remove();
wrapper.append('<div id="loading-spinner" style="display:none;"></div>');
var spinner = $('#loading-spinner');
spinner.css({
backgroundImage: 'url("img/loading.gif")',
opacity: 0
});
spinner.show();
spinner.animate({
opacity: 1
}, 500);
var dataForTab = requestData(input); //<-- the request
if (dataForTab.length > 0) {
//do stuff
}
});
The request:
function requestData(input) {
var result = null;
$.ajax({
async: false,
type: "POST",
url: "/some/url?input=" + input,
dataType: "json",
retryLimit: 3,
success: function (json) {
result = json;
},
error: function (xhr, err) {
console.log(xhr);
console.log(err);
}
});
return result;
}
Until the request returns the received JSON data, everything stops moving. How can I fix this please?
That's the essence of synchronous requests, they are locking. You may want to try to move the requests to a web worker. Here's an example (not using XHR, but it can give you an implementation idea)
A web worker is implemented in a separate file, the scripting can look like:
onmessage = function (e) {
var result = null;
$.ajax({
async: false,
type: "POST",
url: "/some/url?input=" + input,
dataType: "json",
retryLimit: 3,
success: function (json) {
result = json;
postMessage({result: result});
},
error: function (xhr, err) {
postMessage({error: err});
}
});
}
Depending on your use case you can use something like
task.js Simplified interface for getting CPU intensive code to run on all cores (node.js, and web)
A example would be
// turn blocking pure function into a worker task
const syncWorkerRequest = task.wrap(function (url) {
// sync request logic
});
// run task on a autoscaling worker pool
syncWorkerRequest('./bla').then(result => {
// do something with result
});
You should not be doing this though, unless you need to do some heavy data processing, please use async requests.