sequential execution of Jquery Defers - javascript

In the below code, I want sequential executuon of the method saveBulkUploadSinglePacket in the while loop, that means process next packet after completion of the current packet. How to achieve that.
var saveBulkUploadSinglePacket = function(){
while (packetCount<=bulkUploadPackets.length){
$.when(saveBulkUploadSinglePacket(modelToSave)).done(function(arguments){
saveBulkUploadPackets.push(arguments);
packetCount++;
});
}
return saveBulkUploadPackets;
}
var saveBulkUploadSinglePacket = function(modelToSave){
var defer = $.Deferred();
$.when(new SaveBulkUpload().save(modelToSave)).done(function(arguments){
defer.resolve(arguments);
}).fail(function(errorObj){
defer.reject(errorObj);
});
return defer.promise();
}

The standard way to say "perform x when promise is done" is through promise.then(). Keep track of the current promise in a var outside the loop and attach each call to the previous promise with a .then():
var saveBulkUploadSinglePacket = function(){
var lastPromise = $.when();
while (packetCount<=bulkUploadPackets.length){
lastPromise = (
lastPromise
.then(function(){
// make sure saveBulkUploadSinglePacket returns a promise
return saveBulkUploadSinglePacket(modelToSave));
})
.then(function(){
saveBulkUploadPackets.push(arguments);
packetCount++;
// return a resolved promise
return $.when();
})
);
}
lastPromise.resolve(saveBulkUploadPackets);
return lastPromise;
}
At the end of the function I resolved the final promise with the desired return value, then returned the promise. This way you can call saveBulUploadSinglePacket().then(...) to wait for all the promises to complete and handle the result.

To save your pakets sequentially, use Array.prototype.reduce() to build a .then() chain very concisely, as follows :
var saveBulkUploadPackets = function(packets) {
return packets.reduce(function(promise, paket) {
return promise.then(function(results) {
return (new SaveBulkUpload()).save(paket).then(function(res) {
return results.concat(res);
});
});
}, $.when([]));
}
And call like this :
saveBulkUploadPackets(bulkUploadPackets).then(function(results) {
// All done.
// use/log the `results` array here.
});

Related

Ensure a set of deferred.then callbacks completes

I am using Dojo 1.10 by the way which came with Deferred and Promise API. So the scenario is as below:
var deferred_1 = new Deferred();
deferred_1 .then(function(value){
// do something
return something;
})
var deferred_2 = new Deferred();
deferred_2.then(function(value){
// do something
return something;
})
var completeFunc = function(value){
console.log("done");
}
//run completeFunc after completion of both deferred_1.then and
deferred_2.then
How do I ensure the completion of both deferred objects callback functions in deferred_1.then and deferred_2.then are completed before running another function.
Use Promise.all to create a Promise that resolves only after all promises you pass into it are resolved. Note that you have to save a reference to the new promises:
const deferred_1 = new Deferred();
const d1 = deferred_1.then(function(value) {
// do something
return something;
});
const deferred_2 = new Deferred();
const d2 = deferred_2.then(function(value) {
// do something
return something;
})
const completeFunc = function(value) {
console.log("done");
}
Promise.all([d1, d2]).then(completeFunc);
You can use dojo/promise/all function.
Example from documentation:
require(["dojo/promise/all"], function(all){
all([promise1, promise2]).then(function(results){
// results will be an Array
});
// -- or --
all({
promise1: promise1,
promise2: promise2
}).then(function(results){
// results will be an Object using the keys "promise1" and "promise2"
});
});

Method chaining and promises issue in Angular app with regards to returning a compared and updated array

I am trying to filter/match a list of returned IDs to a list of JSON data records, but I am struggling with (I believe) my promises and method chaining.
I can get the functions to work, except for when I add step 3 below. Then it resolves without the matching data (the function does carry on and eventually return the correct matching data, but by that time my method has already completed).
This is how it is supposed to work:
(getCompanyBrandProfileIDs) First my method gets a brandProfileID linked to the current user.
(getBrandProfiles) Then it takes the brandProfileID and get all brandProfiles linked to the specific brandProfile.
(getKeywordProfiles) Then it SHOULD take the returned brandProfiles, and get the matching keywordProfile for each brandProfile. It is an array of objects containing a brand_profile_id and and id.
This is my main method:
this.getCompanyBrandProfileIDs = function () {
var brandProfileIDsToReturn = $q.defer();
GetUserAccessService.returnBrandProfileID().then(function (brandProfileID) {
console.log(brandProfileID);
getBrandProfiles(brandProfileID).then(function (brandProfiles) {
console.log(JSON.stringify(brandProfiles));
var keywordProfilesArray = [];
getKeywordProfiles(brandProfiles).then(function (keywordProfiles) {
keywordProfilesArray = keywordProfiles;
console.log(JSON.stringify(keywordProfilesArray));
//brandProfileIDsToReturn.resolve(keywordProfilesArray);
});
brandProfileIDsToReturn.resolve(keywordProfilesArray);
});
});
return brandProfileIDsToReturn.promise;
};
This is the getBrandProfiles method:
function getBrandProfiles(brandProfileID) {
var getBrandProfilesLinkedToCompany = $q.defer();
pullSocialMediaData('keyword_profile_brand_profiles.json?brand_profile_id=' + brandProfileID).then(function (brandProfiles) {
var brandProfilesArray = [];
brandProfiles.forEach(function (profile) {
brandProfilesArray.push({ id: profile.id, name: profile.name });
});
getBrandProfilesLinkedToCompany.resolve(brandProfilesArray);
});
return getBrandProfilesLinkedToCompany.promise;
}
This is the getKeywordProfiles method:
function getKeywordProfiles(brandProfiles) {
var keywordProfilesToReturn = $q.defer();
var brandProfilesArray = brandProfiles;
var array = [];
brandProfilesArray.forEach(function (profile) {
findKeywordProfile(profile.id).then(function (keywordID) {
array.push(keywordID);
});
keywordProfilesToReturn.resolve(array);
})
return keywordProfilesToReturn.promise;
}
This is a helper method for getKeywordProfiles:
function findKeywordProfile(brandProfileID) {
var keywordProfileID = $q.defer();
pullSocialMediaData('list_keyword_profiles.json').then(function (data) {
var keywordProfileInstance = data.filter(function (keyword) {
return keyword.brand_profile_id === brandProfileID;
});
keywordProfileID.resolve(keywordProfileInstance[0].id);
});
return keywordProfileID.promise;
}
I would appreciate your assistance!
You are resolving the brandProfileIDsToReturn too soon. In this code: when you resolve the promise the then callback will not have been called, so keywordProfilesArray is guaranteed to be empty.
this.getCompanyBrandProfileIDs = function () {
var brandProfileIDsToReturn = $q.defer();
GetUserAccessService.returnBrandProfileID().then(function (brandProfileID) {
console.log(brandProfileID);
getBrandProfiles(brandProfileID).then(function (brandProfiles) {
console.log(JSON.stringify(brandProfiles));
var keywordProfilesArray = [];
getKeywordProfiles(brandProfiles).then(function (keywordProfiles) {
keywordProfilesArray = keywordProfiles;
console.log(JSON.stringify(keywordProfilesArray));
//brandProfileIDsToReturn.resolve(keywordProfilesArray);
});
brandProfileIDsToReturn.resolve(keywordProfilesArray);
});
});
return brandProfileIDsToReturn.promise;
};
Simply moving the resolve() call inside the then callback should fix it and in fact you have that line commented out, so uncomment it and remove the other resolve:
this.getCompanyBrandProfileIDs = function () {
var brandProfileIDsToReturn = $q.defer();
GetUserAccessService.returnBrandProfileID().then(function (brandProfileID) {
console.log(brandProfileID);
getBrandProfiles(brandProfileID).then(function (brandProfiles) {
console.log(JSON.stringify(brandProfiles));
var keywordProfilesArray = [];
getKeywordProfiles(brandProfiles).then(function (keywordProfiles) {
keywordProfilesArray = keywordProfiles;
console.log(JSON.stringify(keywordProfilesArray));
brandProfileIDsToReturn.resolve(keywordProfilesArray);
});
});
});
return brandProfileIDsToReturn.promise;
};
However you can probably simplify the code a lot if you stop using $q.defer(). Your functions already return promises so just return the promises they use and stop trying to create additional promises. I think this is equivalent to the previous code except it returns the promises directly, and I removed the log messages, and that means the getKeywordProfiles call simplifies down to a callback that just calls the function so you can pass the function directly:
this.getCompanyBrandProfileIDs = function () {
return GetUserAccessService.returnBrandProfileID().then(function (brandProfileID) {
return getBrandProfiles(brandProfileID).then(getKeywordProfiles);
});
});
};
and then you can simplify it further by extracting the inner .then:
this.getCompanyBrandProfileIDs = function () {
return GetUserAccessService.returnBrandProfileID()
.then(getBrandProfiles)
.then(getKeywordProfiles);
};
The getKeywordProfiles() function also needs to avoid resolving its promise until all of the findKeywordProfile() calls have resolved. Return a promise for the array of promises and when they resolve the promise will complete to an array of values:
function getKeywordProfiles(brandProfilesArray) {
var array = [];
brandProfilesArray.forEach(function (profile) {
array.push(findKeywordProfile(profile.id));
})
return $q.all(array);
}
To clarify my comments about $q, there are some cases where you need to create a promise from scratch using it, but they're fairly uncommon. Anything that happens asynchronously in Angular already returns a promise, and the great thing about promises is that they chain together, so when you have one promise calling .then() or .catch() will return a new one. Also the .then() callback can either return a value which resolves the new promise, or can return a promise which will only resolve the new promise when it, itself resolves. So just keep chaining the .then() calls together and each will wait for the previous one to complete.
$q is still useful though: $q.all() if you want to wait for a bunch of promises to all resolve, $q.race() if you have a bunch of promises and only one needs to resolve, $q.resolve(value) can also be useful as sometimes you just want a promise that will resolve immediately so you can hang a chain of other async functions off it.
Also it is safe to keep a promise around even long after it has resolved and you can still call .then() on it: this is useful if you have asynchronous initialisation code and events that may or may not be triggered before the initialisation has completed. No need to do if(isInitialised) when you can just do initPromise.then(...)
In getKeywordProfiles function you need resolve it when array loop finished.
function getKeywordProfiles(brandProfiles) {
var keywordProfilesToReturn = $q.defer();
var brandProfilesArray = brandProfiles;
var array = [];
brandProfilesArray.forEach(function (profile) {
findKeywordProfile(profile.id).then(function (keywordID) {
array.push(keywordID);
});
//--
//keywordProfilesToReturn.resolve(array);
})
//++
keywordProfilesToReturn.resolve(array);
return keywordProfilesToReturn.promise;
}
Info: I think you need to create an profileIdArray push all brandProfileID and send to your findKeywordProfile function. It will be more useful.

Adding a Promise to Promise.all() [duplicate]

This question already has answers here:
How to know when all Promises are Resolved in a dynamic "iterable" parameter?
(5 answers)
Closed 6 years ago.
I've got an api call that sometimes returns paged responses. I'd like to automatically add these to my promises so I get the callback once all the data has arrived.
This is my attempt. I'd expect the new promise to be added and Promise.all to resolve once that is done.
What actually happens is that Promise.all doesn't wait for the second request. My guess is that Promise.all attaches "listeners" when it's called.
Is there a way to "reintialize" Promise.all()?
function testCase (urls, callback) {
var promises = [];
$.each(urls, function (k, v) {
promises.push(new Promise(function(resolve, reject) {
$.get(v, function(response) {
if (response.meta && response.meta.next) {
promises.push(new Promise(function (resolve, reject) {
$.get(v + '&offset=' + response.meta.next, function (response) {
resolve(response);
});
}));
}
resolve(response);
}).fail(function(e) {reject(e)});
}));
});
Promise.all(promises).then(function (data) {
var response = {resource: []};
$.each(data, function (i, v) {
response.resource = response.resource.concat(v.resource);
});
callback(response);
}).catch(function (e) {
console.log(e);
});
}
Desired flow is something like:
Create a set of promises.
Some of the promises spawn more promises.
Once all the initial promises and spawned promises resolve, call the callback.
It looks like the overall goal is:
For each entry in urls, call $.get and wait for it to complete.
If it returns just a response without "next", keep that one response
If it returns a response with a "next," we want to request the "next" as well and then keep both of them.
Call the callback with response when all of the work is done.
I would change #2 so you just return the promise and fulfill it with response.
A key thing about promises is that then returns a new promise, which will be resolved based on what you return: if you return a non-thenable value, the promise is fulfilled with that value; if you return a thenable, the promise is resolved to the thenable you return. That means that if you have a source of promises ($.get, in this case), you almost never need to use new Promise; just use the promises you create with then. (And catch.)
(If the term "thenable" isn't familiar, or you're not clear on the distinction between "fulfill" and "resolve", I go into promise terminology in this post on my blog.)
See comments:
function testCase(urls) {
// Return a promise that will be settled when the various `$.get` calls are
// done.
return Promise.all(urls.map(function(url) {
// Return a promise for this `$.get`.
return $.get(url)
.then(function(response) {
if (response.meta && response.meta.next) {
// This `$.get` has a "next", so return a promise waiting
// for the "next" which we ultimately fulfill (via `return`)
// with an array with both the original response and the
// "next". Note that by returning a thenable, we resolve the
// promise created by `then` to the thenable we return.
return $.get(url + "&offset=" + response.meta.next)
.then(function(nextResponse) {
return [response, nextResponse];
});
} else {
// This `$.get` didn't have a "next", so resolve this promise
// directly (via `return`) with an array (to be consistent
// with the above) with just the one response in it. Since
// what we're returning isn't thenable, the promise `then`
// returns is resolved with it.
return [response];
}
});
})).then(function(responses) {
// `responses` is now an array of arrays, where some of those will be one
// entry long, and others will be two (original response and next).
// Flatten it, and return it, which will settle he overall promise with
// the flattened array.
var flat = [];
responses.forEach(function(responseArray) {
// Push all promises from `responseArray` into `flat`.
flat.push.apply(flat, responseArray);
});
return flat;
});
}
Note how we never use catch there; we defer error handling to the caller.
Usage:
testCase(["url1", "url2", "etc."])
.then(function(responses) {
// Use `responses` here
})
.catch(function(error) {
// Handle error here
});
The testCase function looks really long, but that's just because of the comments. Here it is without them:
function testCase(urls) {
return Promise.all(urls.map(function(url) {
return $.get(url)
.then(function(response) {
if (response.meta && response.meta.next) {
return $.get(url + "&offset=" + response.meta.next)
.then(function(nextResponse) {
return [response, nextResponse];
});
} else {
return [response];
}
});
})).then(function(responses) {
var flat = [];
responses.forEach(function(responseArray) {
flat.push.apply(flat, responseArray);
});
return flat;
});
}
...and it'd be even more concise if we were using ES2015's arrow functions. :-)
In a comment you've asked:
Could this handle if there was a next next? Like a page 3 of results?
We can do that by encapsulating that logic into a function we use instead of $.get, which we can use recursively:
function getToEnd(url, target, offset) {
// If we don't have a target array to fill in yet, create it
if (!target) {
target = [];
}
return $.get(url + (offset ? "&offset=" + offset : ""))
.then(function(response) {
target.push(response);
if (response.meta && response.meta.next) {
// Keep going, recursively
return getToEnd(url, target, response.meta.next);
} else {
// Done, return the target
return target;
}
});
}
Then our main testCase is simpler:
function testCase(urls) {
return Promise.all(urls.map(function(url) {
return getToEnd(url);
})).then(function(responses) {
var flat = [];
responses.forEach(function(responseArray) {
flat.push.apply(flat, responseArray);
});
return flat;
});
}
Assuming you are using jQuery v3+ you can use the promises returned by $.ajax to pass to Promise.all().
What you are missing is returning the second request as a promise instead of trying to push it to the promises array
Simplified example
var promises = urls.map(function(url) {
// return promise returned by `$.ajax`
return $.get(url).then(function(response) {
if (response.meta) {
// return a new promise
return $.get('special-data.json').then(function(innerResponse) {
// return innerResponse to resolve promise chain
return innerResponse;
});
} else {
// or resolve with first response
return response;
}
});
})
Promise.all(promises).then(function(data) {
console.dir(data)
}).catch(function(e) {
console.log(e);
});
DEMO

Cannot get $q.all with nested promises working - wait for all

I fully understand that similar questions have been asked before but I don't get it running. I need a chain of promises and I need to wait until all promises are resolved.
My issue is that I need to call different promises based on the outcome of another promise :-(
So based on the outcome of promise2 my promise4 is EITHER $translate.use OR it is $translate.refresh.
This is what I have so far (simplified):
var promise1 = someService.get({}).$promise.then(function (result) {
// do stuff
});
var promise2 = anotherService.getPromise().then(function (result) {
var properties = result[0];
// do stuff
return properties;
});
var promise3 = promise2.then(function(properties){
// using $translate (angular-translate) here which is async as well
var userLanguage = properties.language;
if (userLanguage !== $translate.preferredLanguage()) {
// either this is the 4th promise
$translate.use(userLanguage).then(function (myVar) {
// ...
});
} else {
// or this is the 4th promise
$translate.refresh().then(function (myVar) {
// ...
});
}
});
var loadPromises = {
promise1: promise1
promise2: promise2
promise3: promise3
promise4: ???
};
$q.all(loadPromises).then(function (result) {
// anything done
});
You don't really need to monitor promise3 and promise4, all you need is promise1 and promise2. promise3 becomes next step of the promise2. Just note how you return new promise (either return $translate.use or return $translate.refresh) from promise2 then part :
var promise1 = someService.get({}).$promise.then(function(result) {
// do stuff
});
var promise2 = anotherService.getPromise().then(function(result) {
var properties = result[0];
// do stuff
return properties;
})
.then(function(properties) {
// using $translate (angular-translate) here which is async as well
var userLanguage = properties.language;
if (userLanguage !== $translate.preferredLanguage()) {
// either this is the 4th promise
return $translate.use(userLanguage).then(function(myVar) {
// ...
});
} else {
// or this is the 4th promise
return $translate.refresh().then(function(myVar) {
// ...
});
}
});
var loadPromises = {
promise1: promise1,
promise2: promise2
};
$q.all(loadPromises).then(function(result) {
// anything done
});
If I understand your code well enough, I think you only need to return the inner promise inside of promise3. In your example, both promise1 and promise2 won't be resolved until the service is done. Since promise3 depends on promise2 it won't finish until promise2 resolved. I believe $q.all keeps going until all promises are complete, even promises returned by promises. Since promise3 returns a promise, promise3 won't be considered resolved until the inner promise is resolved. So, I think adding a couple return statements is all that you need:
var promise1 = someService.get({}).$promise.then(function (result) {
// do stuff
});
var promise2 = anotherService.getPromise().then(function (result) {
var properties = result[0];
// do stuff
return properties;
});
var promise3 = promise2.then(function(properties){
// using $translate (angular-translate) here which is async as well
var userLanguage = properties.language;
if (userLanguage !== $translate.preferredLanguage()) {
// either this is the 4th promise
//NOTE: added return here
return $translate.use(userLanguage).then(function (myVar) {
// ...
});
} else {
// or this is the 4th promise
//NOTE: added return here
return $translate.refresh().then(function (myVar) {
// ...
});
}
});
var loadPromises = {
promise1: promise1,
promise2: promise2,
promise3: promise3
};
$q.all(loadPromises).then(function (result) {
// anything done
});

Promises for promises that are yet to be created without using the deferred [anti]pattern

Problem 1: only one API request is allowed at a given time, so the real network requests are queued while there's one that has not been completed yet. An app can call the API level anytime and expecting a promise in return. When the API call is queued, the promise for the network request would be created at some point in the future - what to return to the app? That's how it can be solved with a deferred "proxy" promise:
var queue = [];
function callAPI (params) {
if (API_available) {
API_available = false;
return doRealNetRequest(params).then(function(data){
API_available = true;
continueRequests();
return data;
});
} else {
var deferred = Promise.defer();
function makeRequest() {
API_available = false;
doRealNetRequest(params).then(function(data) {
deferred.resolve(data);
API_available = true;
continueRequests();
}, deferred.reject);
}
queue.push(makeRequest);
return deferred.promise;
}
}
function continueRequests() {
if (queue.length) {
var makeRequest = queue.shift();
makeRequest();
}
}
Problem 2: some API calls are debounced so that the data to be sent is accumulated over time and then is sent in a batch when a timeout is reached. The app calling the API is expecting a promise in return.
var queue = null;
var timeout = 0;
function callAPI2(data) {
if (!queue) {
queue = {data: [], deferred: Promise.defer()};
}
queue.data.push(data);
clearTimeout(timeout);
timeout = setTimeout(processData, 10);
return queue.deferred.promise;
}
function processData() {
callAPI(queue.data).then(queue.deferred.resolve, queue.deferred.reject);
queue = null;
}
Since deferred is considered an anti-pattern, (see also When would someone need to create a deferred?), the question is - is it possible to achieve the same things without a deferred (or equivalent hacks like new Promise(function (resolve, reject) {outerVar = [resolve, reject]});), using the standard Promise API?
Promises for promises that are yet to be created
…are easy to build by chaining a then invocation with the callback that creates the promise to a promise represents the availability to create it in the future.
If you are making a promise for a promise, you should never use the deferred pattern. You should use deferreds or the Promise constructor if and only if there is something asynchronous that you want to wait for, and it does not already involve promises. In all other cases, you should compose multiple promises.
When you say
When the API call is queued, the promise for the network request would be created at some point in the future
then you should not create a deferred that you can later resolve with the promise once it is created (or worse, resolve it with the promises results once the promise settles), but rather you should get a promise for the point in the future at which the network reqest will be made. Basically you're going to write
return waitForEndOfQueue().then(makeNetworkRequest);
and of course we're going to need to mutate the queue respectively.
var queue_ready = Promise.resolve(true);
function callAPI(params) {
var result = queue_ready.then(function(API_available) {
return doRealNetRequest(params);
});
queue_ready = result.then(function() {
return true;
});
return result;
}
This has the additional benefit that you will need to explicitly deal with errors in the queue. Here, every call returns a rejected promise once one request failed (you'll probably want to change that) - in your original code, the queue just got stuck (and you probably didn't notice).
The second case is a bit more complicated, as it does involve a setTimeout call. This is an asynchronous primitive that we need to manually build a promise for - but only for the timeout, and nothing else. Again, we're going to get a promise for the timeout, and then simply chain our API call to that to get the promise that we want to return.
function TimeoutQueue(timeout) {
var data = [], timer = 0;
this.promise = new Promise(resolve => {
this.renew = () => {
clearTimeout(timer);
timer = setTimeout(resolve, timeout);
};
}).then(() => {
this.constructor(timeout); // re-initialise
return data;
});
this.add = (datum) => {
data.push(datum);
this.renew();
return this.promise;
};
}
var queue = new TimeoutQueue(10);
function callAPI2(data) {
return queue.add(data).then(callAPI);
}
You can see here a) how the debouncing logic is completely factored out of callAPI2 (which might not have been necessary but makes a nice point) and b) how the promise constructor only concerns itself with the timeout and nothing else. It doesn't even need to "leak" the resolve function like a deferred would, the only thing it makes available to the outside is that renew function which allows extending the timer.
When the API call is queued, the promise for the network request would
be created at some point in the future - what to return to the app?
Your first problem can be solved with promise chaining. You don't want to execute a given request until all prior requests have finished and you want to execute them serially in order. This is exactly the design pattern for promise chaining. You can solve that one like this:
var callAPI = (function() {
var p = Promise.resolve();
return function(params) {
// construct a promise that chains to prior callAPI promises
var returnP = p.then(function() {
return doRealNetRequest(params);
});
// make sure the promise we are chaining to does not abort chaining on errors
p = returnP.then(null, function(err) {
// handle rejection locally for purposes of continuing chaining
return;
});
// return the new promise
return returnP;
}
})();
In this solution, a new promise is actually created immediately with .then() so you can return that promise immediately - there is no need to create a promise in the future. The actual call to doRealNetRequest() is chained to this retrurned .then() promise by returning its value in the .then() handler. This works because, the callback we provide to .then() is not called until some time in the future when the prior promises in the chain have resolved, giving us an automatic trigger to execute the next one in the chain when the prior one finishes.
This implementation assumes that you want queued API calls to continue even after one returns an error. The extra few lines of code around the handle rejection comment are there to make sure the chain continues even where a prior promise rejects. Any rejection is returned back to the caller as expected.
Here's a solution to your second one (what you call debounce).
the question is - is it possible to achieve the same things without a
deferred (or equivalent hacks like new Promise(function (resolve,
reject) {outerVar = [resolve, reject]});), using the standard Promise
API?
As far as I know, the debouncer type of problem requires a little bit of a hack to expose the ability to trigger the resolve/reject callbacks somehow from outside the promise executor. It can be done a little cleaner than you propose by exposing a single function that is within the promise executor function rather than directly exposing the resolve and reject handlers.
This solution creates a closure to store private state that can be used to manage things from one call to callAPI2() to the next.
To allow code at an indeterminate time in the future to trigger the final resolution, this creates a local function within the promise executor function (which has access to the resolve and reject functions) and then shares that to the higher (but still private) scope so it can be called from outside the promise executor function, but not from outside of callAPI2.
var callAPI2 = (function() {
var p, timer, trigger, queue = [];
return function(data) {
if (!p) {
p = new Promise(function(resolve) {
// share completion function to a higher scope
trigger = function() {
resolve(queue);
// reinitialize for future calls
p = null;
queue = [];
}
}).then(callAPI);
}
// save data and reset timer
queue.push(data);
clearTimeout(timer);
setTimeout(trigger, 10);
return p;
}
})();
You can create a queue, which resolves promises in the order placed in queue
window.onload = function() {
(function(window) {
window.dfd = {};
that = window.dfd;
that.queue = queue;
function queue(message, speed, callback, done) {
if (!this.hasOwnProperty("_queue")) {
this._queue = [];
this.done = [];
this.res = [];
this.complete = false;
this.count = -1;
};
q = this._queue,
msgs = this.res;
var arr = Array.prototype.concat.apply([], arguments);
q.push(arr);
msgs.push(message);
var fn = function(m, s, cb, d) {
var j = this;
if (cb) {
j.callback = cb;
}
if (d) {
j.done.push([d, j._queue.length])
}
// alternatively `Promise.resolve(j)`, `j` : `dfd` object
// `Promise` constructor not necessary here,
// included to demonstrate asynchronous processing or
// returned results
return new Promise(function(resolve, reject) {
// do stuff
setTimeout(function() {
div.innerHTML += m + "<br>";
resolve(j)
}, s || 0)
})
// call `cb` here, interrupting queue
.then(cb ? j.callback.bind(j, j._queue.length) : j)
.then(function(el) {
console.log("queue.length:", q.length, "complete:", el.complete);
if (q.length > 1) {
q.splice(0, 1);
fn.apply(el, q[0]);
return el
} else {
el._queue = [];
console.log("queue.length:", el._queue.length
, "complete:", (el.complete = !el._queue.length));
always(promise(el), ["complete", msgs])
};
return el
});
return j
}
, promise = function(t) {
++t.count;
var len = t._queue.length,
pending = len + " pending";
return Promise.resolve(
len === 1
? fn.apply(t, t._queue[0]) && pending
: !(t.complete = len === 0) ? pending : t
)
}
, always = function(elem, args) {
if (args[0] === "start") {
console.log(elem, args[0]);
} else {
elem.then(function(_completeQueue) {
console.log(_completeQueue, args);
// call any `done` callbacks passed as parameter to `.queue()`
Promise.all(_completeQueue.done.map(function(d) {
return d[0].call(_completeQueue, d[1])
}))
.then(function() {
console.log(JSON.stringify(_completeQueue.res, null, 2))
})
})
}
};
always(promise(this), ["start", message, q.length]);
return window
};
}(window));
window
.dfd.queue("chain", 1000)
.dfd.queue("a", 1000)
.dfd.queue("b", 2000)
.dfd.queue("c", 2000, function callback(n) {
console.log("callback at queue index ", n, this);
return this
}, function done(n) {
console.log("all done callback attached at queue index " + n)
})
.dfd.queue("do", 2000)
.dfd.queue("other", 2000)
.dfd.queue("stuff", 2000);
for (var i = 0; i < 10; i++) {
window.dfd.queue(i, 1000)
};
window.dfd.queue.apply(window.dfd, ["test 1", 5000]);
window.dfd.queue(["test 2", 1000]);
var div = document.getElementsByTagName("div")[0];
var input = document.querySelector("input");
var button = document.querySelector("button");
button.onclick = function() {
window.dfd.queue(input.value, 0);
input.value = "";
}
}
<input type="text" />
<button>add message</button>
<br>
<div></div>

Categories