How do I optimising for loops + promises? - javascript

I'm developing a SAPUI5 deployed to a mobile device which is experiencing performance issues.
The below function returns a promise,
It iterates through categories and questions using underscore _each loops,
It then performs a READ on each question in turn, before finally updating the view Model and resolving the promise.
Is there any issue with doing it this way and can it be further optimised?
_getAnswers: function() {
return new Promise(function(resolve, reject) {
// Loop through Categories for questions.
_.each(oViewData.categories, function(result, index) {
// For each Category, read Answers for each Question
_.each(result, function(resultInner, indexInner) {
// Read AnswerSet on QuestionId
surveyModel.read("/AnswerSet", {
filters: [
new Filter("QuestionId", FilterOperator.EQ, resultInner.QuestionId)
],
success: function(oData) {
oData.results = _.sortBy(oData.results, 'AnswerId');
// Populate Answer Array for Question
var oAnswersArray = [];
_.each(oData.results, function(resultInnerInner, indexInnerInner) {
oAnswersArray.push(resultInnerInner);
});
// Check what the current Answer is for the Question.
_.each(oAnswersArray, function(answerData, answerIndex) {
if (oViewData.categories[resultInner.CategoryId].questions[a].AnswerId === oAnswersArray[answerIndex].AnswerId) {
oAnswersArray[answerIndex].Selected = true;
}
});
// Write back the Answer Array to the viewModel
oViewData.categories[resultInner.CategoryId].questions[a].answers = oAnswersArray;
oViewModel.setData(oViewData);
// Go to next Question in the Loop.
a++;
// resovle Promise and continue.
resolve(true);
},
error: function(oError) {}
});
});
});
});
}

In computer science there is a term called "Big O' Notation". This is used to measure the time your algorithm takes given the amount of data. In general, nested loops makes the computation time go up.
What you are doing is known as O(N^2) where each nested loop increments the exponent.
Please read this: https://rob-bell.net/2009/06/a-beginners-guide-to-big-o-notation/
If I were you, I would change how you are accessing the data and look into a better structure that does not involve nested loops.
i.e. Don't load the items in the categories until a category is clicked.
EDIT:
It looks like you are returning your callback in every iteration of the second loop. This is not optimal either.

Related

Promises nodejs readability improvements

I have writting a few functions which returns a promise, to get data from google analytics api. I think what I've written is called a callback hell..
Can someone help me optimise this code (or give tips/best practices), so it's better readable.
var express = require('express');
var router = express.Router();
var googleAuth = require('google-oauth-jwt');
var google = require('googleapis');
var app = express();
module.exports.getGoogleData = function (jwtClient,analytics,VIEW_ID){
return new Promise(function(resolve, reject){
return getOrdersToday(jwtClient,analytics,VIEW_ID).then(function (orders) {
return getOnlineUsersToday(jwtClient,analytics,VIEW_ID).then(function (users) {
return getSearchedToday(jwtClient, analytics, VIEW_ID).then(function (searched){
return getPageviewsTodayAndUsersToday(jwtClient, analytics, VIEW_ID).then(function (pageviews){
var returndata =[
{
"Orders":orders,
"Onlineusers":users,
"searched":searched,
"pageviews":pageviews[0].pageviews,
"usersToday":pageviews[0].users
}
]
resolve(returndata);
});
});
});
});
});
}
Example getfunction
function getOrdersToday(jwtClient,analytics,view_id){
return new Promise(function(resolve,reject){
analytics.data.ga.get({
'auth':jwtClient,
'ids': view_id,
'metrics': 'ga:totalEvents',
'start-date': 'today',
'end-date': 'today',
filters: 'ga:eventAction==Bestelling geplaatst',
'max-results': '1'
}, function(err, response) {
// handle the errors (if any)
if(err){
console.log(err)
reject(err)
} else
console.log('Response:',response)
resolve(response.totalsForAllResults["ga:totalEvents"]);
});
});
}
There's no need for the new Promise in there at all, and in fact by using it, you leave it open to never settling if an error occurs in one of your calls. Remember that then returns a new promise. So you can just chain all those together if you want them run sequentially:
module.exports.getGoogleData = function (jwtClient,analytics,VIEW_ID){
var result = {};
return getOrdersToday(jwtClient,analytics,VIEW_ID)
.then(function (orders) {
result.Orders = orders;
return getOnlineUsersToday(jwtClient,analytics,VIEW_ID);
})
.then(function (users) {
result.Onlineusers = users;
return getSearchedToday(jwtClient, analytics, VIEW_ID);
}).then(function (searched){
result.searched = searched;
return getPageviewsTodayAndUsersToday(jwtClient, analytics, VIEW_ID);
}).then(function (pageviews){
result.pageviews = pageviews[0].pageviews;
result.usersToday = pageviews[0].users;
return [result]; // (Seems a bit odd that it's wrapped in an array, but
// that's what the original code did...)
});
}
But, those operations look independent of one another. If that's really true, run them in parallel with Promise.all:
module.exports.getGoogleData = function (jwtClient,analytics,VIEW_ID){
return Promise.all([
getOrdersToday(jwtClient,analytics,VIEW_ID),
getOnlineUsersToday(jwtClient,analytics,VIEW_ID),
getSearchedToday(jwtClient, analytics, VIEW_ID),
getPageviewsTodayAndUsersToday(jwtClient, analytics, VIEW_ID)
]).then(results => {
return [{
Orders: results[0],
Onlineusers: results[1],
searched: results[2],
pageviews: results[3][0].pageviews,
usersToday: results[3][0].users
}];
});
}
The only way to fix that ugly code is killing Promise hell, for doing that you must use:
ES2017 async/await syntax
So, take a look at this new code
var express = require('express');
var router = express.Router();
var googleAuth = require('google-oauth-jwt');
var google = require('googleapis');
var app = express();
module.exports.getGoogleData = foo
async function foo (jwtClient,analytics,VIEW_ID){
var orders = await getOrdersToday(jwtClient,analytics,VIEW_ID)
var users = await getOnlineUsersToday(jwtClient,analytics,VIEW_ID)
var searched = await getSearchedToday(jwtClient, analytics, VIEW_ID)
var pageviews = await getPageviewsTodayAndUsersToday(jwtClient, analytics, VIEW_ID)
return [{
"Orders":orders,
"Onlineusers":users,
"searched":searched,
"pageviews":pageviews[0].pageviews,
"usersToday":pageviews[0].users
}]
}
Since you have the promises created already. You can use Promise.all() and pass it the array of promises in the order you want them to be executed.
Refer this for more details - https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/all
Alternatively, you can use async/await as well - https://javascript.info/async-await
Below is the correct and readable way to chain your promises. Correct chaining reduce nesting by expanding it vertically. Also, you don't need to instantiate a new Promise because all your get methods already returns promises.
function (jwtClient,analytics,VIEW_ID) {
var returndata =[{}];
return getOrdersToday(jwtClient,analytics,VIEW_ID).then(function (orders) {
returndata[0].Orders = orders;
return getOnlineUsersToday(jwtClient,analytics,VIEW_ID);
}).then(function(users) {
returndata[0].Onlineusers= users;
return getSearchedToday(jwtClient, analytics, VIEW_ID);
}).then(function(searched) {
returndata[0].searched = searched;
return getPageviewsTodayAndUsersToday(jwtClient, analytics, VIEW_ID);
}).then(function(pageviews) {
returndata[0].pageviews = pageviews[0].pageviews;
returndata[0].usersToday = pageviews[0].users;
return returndata;
});
}
As others have noted, these calls do not depend on each other, which means you don't need to sequence them and can instead execute them all simultaneously. Here is what that might look like:
module.exports.getGoogleData = function(jwtClient, analytics, VIEW_ID) {
return Promise.all([
getOrdersToday(jwtClient, analytics, VIEW_ID),
getOnlineUsersToday(jwtClient, analytics, VIEW_ID),
getSearchedToday(jwtClient, analytics, VIEW_ID),
getPageviewsTodayAndUsersToday(jwtClient, analytics, VIEW_ID)
])
.then(function(results) {
return {
"Orders": results[0],
"Onlineusers": results[1],
"searched": results[2],
"pageviews": results[3][0].pageviews,
"usersToday": results[3][0].users
};
});
}
But I think your question is a really good one, and I hope your interest in good patterns doesn't end just because this specific case calls for a different solution. As I have noted elsewhere, async patterns is a complicated problem domain.
So, let's pretend you need to make 4 calls, each depending on the results of the previous one, so they must be executed in sequence. We have access to this fictional API:
function fetchOne(null) // returns opaque ONE
function fetchTwo(ONE) // must feed it ONE, returns opaque TWO
function fetchThree(TWO) // must feed it TWO, returns opaque THREE
function fetchFour(THREE) // must feed it THREE, returns opaque FOUR
And furthermore, let's pretend that at the end of this process, you wish to have access to all four of these return values so you can craft a response with all of them. After all, what good is a pattern if it can't accommodate the kinds of complicated workflows we see in the real world?
Thus, our goal is to flesh out the implementation of this function:
function doTheWork() {
// DO STUFF somehow, finally...
return {
one: ONE,
two: TWO,
three: THREE,
four: FOUR,
checksum: ONE + TWO + THREE + FOUR
};
}
A naively straightforward attempt might look like this:
function doTheWork() {
return fetchOne()
.then((ONE) => {
return fetchTwo(ONE)
})
.then((TWO) => {
return fetchThree(TWO)
})
.then((THREE) => {
return fetchFour(THREE)
})
.then((FOUR) => {
// all work is done, let's package up the results
return {
one: ONE // uh-oh...
};
})
}
This will execute, but the problem is that we no longer have access to the earlier values in the final handler.
There are really only two ways around this: (1) declare variables with a scope that will be shared by all the callbacks, or (2) pass all data between handlers in some kind of structure. I think solution 1 is really inelegant, and I recommend against it, but let's see what that might look like:
function doTheWork() {
var A, B, C
return fetchOne()
.then((ONE) => {
A = ONE // store ONE in A
return fetchTwo(ONE)
})
// ...
.then((FOUR) => {
// all work is done, let's package up the results
// at this point, A & B & C contain the results of previous calls
// and are within scope here
return {
one: A,
two: B,
three: C,
four: FOUR,
checksum: A + B + C + FOUR
};
})
}
This will work, but I think it's bad practice and I dislike it. It's bad practice because these variables are now "public" in a limited kind of way, exposing them to everything else within doTheWork. They can be clobbered by statements anywhere in this function. And unless they are scalars, they will be passed by reference, which means any mutation bugs associated with them can manifest themselves in sometimes bizarre ways.
Also, each of these "temporary" variables now competes for good names in a shared namespace (the scope of doTheWork). Ideally, you should create as few variables as possible, and each should live only as long as necessary. This might save memory, but it definitely saves your "naming pool." Naming things is mentally exhausting. I am not kidding. And every name must be good -- never name variables in a slapdash way. The names of things are often the only clues within the code about what is happening. If some of those names are throwaway garbage, you are making your life and the lives of future maintainers harder.
So, let's look at solution 2. At this point, I want to point out that this approach works best when you can use ES6 destructuring syntax. You can do this without it, but it's a bit clunkier.
We're going to construct an array that will slowly accumulate all the data fetched by each of these async calls. At the end, the array will look like this:
[ ONE , TWO , THREE , FOUR ]
By chaining promises efficiently, and by passing this array from one handler to the next, we can both avoid the Pyramid of Doom and share async results among all of these methods easily. See below:
function doTheWork() {
return fetchOne()
.then((ONE) => {
return fetchTwo(ONE)
.then((TWO) => [ ONE , TWO ])
})
.then(([ ONE , TWO ]) => {
return fetchThree(TWO)
.then((THREE) => [ ONE , TWO , THREE ])
})
.then(([ ONE , TWO , THREE ]) => {
return fetchFour(THREE)
.then((FOUR) => [ ONE , TWO , THREE , FOUR ])
})
.then(([ ONE , TWO , THREE , FOUR ]) => {
return {
one: ONE,
two: TWO,
three: THREE,
four: FOUR,
checksum: ONE + TWO + THREE + FOUR
}
})
}
That's the whole thing, but let's step through just the first part:
return fetchOne()
.then((ONE) => {
return fetchTwo(ONE)
.then((TWO) => [ ONE , TWO ])
})
As usual, fetchOne returns ONE -- it's the third-party API we have no control over. And as you might expect, we use ONE to make the second call, making sure to return its promise. But that last line is the real magic:
.then((TWO) => [ ONE , TWO ])
The second API call still returns just TWO, but rather than us simply returning TWO alone, we instead return an array that contains both ONE and TWO. That value -- the array -- becomes the argument to the next .then handler.
This works because nested promises are automatically unwrapped. Here's a simple example showing that at work:
function getDogName() {
return fetchCatName()
.then((catName) => 'Fido')
}
getDogName()
.then((dogName) => {
console.log(dogName);
})
// logs 'Fido'
This illustrates that you can attach .then handlers to nested promises, and the outer promises will return the results of those handlers. This is a really common pattern when using fetch to get JSON:
function makeApiCall() {
fetch(api_url) // resolves with a Response object
.then((response) => response.json()) // extracts just the JSON and returns that instead!
}
Going back to our code, since we want to avoid the Pyramid, we leave this handler along (rather than nesting the next call inside, as you originally did). The next line looks like this:
.then(([ ONE , TWO ]) => {
This is the ES6 destructuring at work. By destructuring the array argument, we can name the elements as they enter the function. And if we're doing that, we might as well give them the names we like best, the ones the came into the world as: ONE and TWO.
We then repeat the pattern, using TWO to invoke fetchThree, making sure to return its promise, but not before tacking on a tiny .then handler that bundles the newly-available THREE into the package that we pass forward.
I hope this helps. It's a pattern I worked out to deal with some complicated branching workflows in AWS, making calls against S3 and Dynamo and other platforms with a lot of parallel conditionals, mixing blocking and non-blocking behavior.
Async patterns are a special problem domain. Even with the right tech, it's can be hard to find clear ways to express behavior, especially when the underlying workflow is convoluted. This is often the case when exploring the data graphs that are so common these days.
Happy coding.

Invoking http.get sequentially when the list length is unknown

lets suppose I have the following:
var myurls = ['http://server1.com', 'http://server2.com', 'http:sever2.com', etc ]
Each url is a "fallback" and should be used only if the previous one cannot be reached. In other words, this list specifies a priority. Lets also assume that this list can be of any length - I don't know and must iterate.
How do I go about writing a function, lets say "reachability" that loops through this array and returns the first reachable server?
I can't do $http.all as it is parallel. I can't run a while loop with an $http.get inside because the result may come later and in the mean time, my UI will freeze.
Please note I am not using jQuery. I am using ionic, which has a version of jQuery-lite in it.
Various examples I've seen talk about chaining them in .then, which is fine if you know the # of URLs before hand, but I don't.
thanks
Just reduce over the array:
myurls.reduce((p, url) => p.catch(() => http.get(url).then(() => url)),
Promise.reject());
Flow explained:
It's based off the perhaps more common pattern of using reduce to build a promise chain, like so: [func1, func2].reduce((p, f) => p.then(f), Promise.resolve()); is equivalent to Promise.resolve().then(func1).then(func2) (the last arg of reduce is the initial value).
In your case, since you're retrying on failure, you want to build a retry (or reject) chain, so we must start with Promise.reject() instead. Promise.reject().catch(func1).catch(func2)
I guess recursion and chaining could suit your needs:
var findFirstReachableUrl = function (urls) {
if (urls.length > 0) {
return $http.get(urls[0]).then(function () {
return urls[0];
}, function () {
return findFirstReachableUrl(urls.slice(1));
});
} else {
return $q.reject("No reachable URL");
}
}
Call:
findFirstReachableUrl(myurls).then(function (firstReachableUrl) {
// OK: do something with firstReachableUrl
}, function () {
// KO: no url could be reached
});

Executing and delaying array of promises in series [duplicate]

This question already has an answer here:
How to sequentially run promises with Q in Javascript?
(1 answer)
Closed 7 years ago.
I'm trying to execute a series of functions synchronousl usingy. Each function is supposed to be delayed by 3 seconds before invoking the next one.
I must be doing something wrong because they are all invoked at the same time after 3 seconds instead of in order.
What am I doing wrong?
var tasks = []
allGroups.forEach(function(group){
tasks.push(deleteFromGroup(group))
})
tasks.reduce(function(cur, next) {
return cur.then(next);
}, Promise.resolve()).then(function() {
console.log("all executed")
});
})
}
function deleteFromGroup(group){
return new Promise(function(resolve, reject) {
setTimeout(function(){
console.log(group.id)
resolve()
}, 3000);
})
}
The way you're creating your tasks array inevitably results in the timeouts all hitting at (about) the same time, because you're creating the tasks simultaneously in that very first .forEach loop.
To achieve the effect you require you need to actually not create the next task until the current one is resolved. Here's a pseudo-recursive way of achieving that:
return new Promise(resolve, reject) {
var groups = allGroups.slice(0); // clone
(function loop() {
if (groups.length) {
deleteFromGroup(groups.shift()).catch(reject).then(loop);
} else {
console.log("all executed")
resolve();
}
})();
});
p.s. in practise you might actually want to incorporate the 3s timeout directly into the loop, instead of into deleteFromGroup - as written the code above (and your original code) won't show "completed" until 3s after the final delete call, but I expect it's really supposed to occur immediately at the end.
You do not need to resort to callbacks and explicit construction here. You can in fact use a for loop - but not on the actions since a promise is an already started operation.
All you need to do is merge your two loops:
allGroups.reduce(function(cur, next) {
return cur.then(function(){ return deleteFromGroup(next) });
}, Promise.resolve()).then(function() {
console.log("all executed")
});

asynchronously iterate over massive array in JavaScript without triggering stack size exceeded

My environment is NodeJS, although this could be a web related problem as well. I have a large set of data from a database which I am attempting to enumerate over. However, for the sake of argument lets say that I have an array of 20,000 strings:
var y = 'strstrstrstrstrstrstrstrstrstr';
var x = [];
for(var i = 0; i < 20000; i++)
x.push(y);
and I want to enumerate this list asynchronously, lets say using the async library, and lets say because I'm super cautious that I even limit my enumeration to 5 iterations at once:
var allDone = function() { console.log('done!') };
require('async').eachLimit(x, 5, function(item, cb){
...
someAsyncCall(.., cb);
}, allDone);
The expectation is that 5 items of x would be iterated concurrently above and that eventually all 20,000 items would be iterated over and the console would print 'done!'. What actually happens is:
Uncaught exception: [RangeError: Maximum call stack size exceeded]
And at this point I assumed that this must be some sort of bug with the async library, so I wrote my own version of eachLimit which follows:
function eachLimit(data, limit, iterator, cb) {
var consumed = 0;
var consume;
var finished = false;
consume = function() {
if(!finished && consumed >= data.length) {
finished = true;
cb();
}else if(!finished) {
return iterator(data[consumed++], consume);
}
};
var concurrent = limit > data.length ? data.length : limit;
for(var i = 0; i < concurrent; i++)
consume();
}
and interestingly enough, this solved my problem. But then when I moved my experiment from nodeJS over to Chrome, even with my solution above I still receive a stack size exceeded.
Clearly, my method does not increase the stack as large as the eachLimit method contained within async. However, I still consider my approach to be bad because maybe not for 20k items, but for some sized array I can still exceed the stack size using my method. I feel like I need to design some sort of solution to this problem using tail recursion, but I'm not sure if v8 will even optimize for this case, or if it's possible given the problem.
I feel like I need to design some sort of solution to this problem using tail recursion, but I'm not sure if v8 will even optimize for this case, or if it's possible given the problem.
The continuation-passing-style you are using is already tail recursive (or close to anyway). The problem is that most JS engines really tend to do stackoverflows in these sorts of situations.
There are two main ways to work around this issue:
1) Force the code to be async using setTimeout.
What is happening with your code is that you are calling the return callbacks before the original function returns. In some async libraries this will end up resulting in stackoverflow. One simple workaround is to force the callback to run only in the next iteration of the event handling loop, by wrapping it inside a setTimeout. Translate
//Turns out this was actually "someSyncCall"...
someAsyncCall(.., cb);
into
someAsyncCall(..., function(){
setTimeout(cb, 0)
});
The main advantage here is that this is very simple to do. The disadvantage is that this add some latency to your loop because setTimeout is implemented so that there will always be some nonzero delay to the callback (even if you set it to zero). On the server you can use nextTick (or somethign like that, forgot the precise name) to do something similar as well.
That said, its already a bit weird to have a large loop of sequential async operations. If your operations are all actually async then its going to take years to complete due to the network latency.
2) Use trampolining to handle the sync code.
The only way to 100% avoid a stackoverflow is to use bona-fide while loops. With promises this would be a bit easier to write the pseudocode for:
//vastly incomplete pseudocode
function loopStartingFrom(array, i){
for(;i<array.length; i++){
var x = run_next_item(i);
if(is_promise(x)){
return x.then(function(){
loopStartingFrom(array, i+1)
});
}
}
}
Basically, you run your loop in an actual loop, with some way to detect if one of your iterations is returning immediately or deferring to an async computation. When things return immediately you keep the loop running and when you finally get a real async result you stop the loop and resume it when the async iteration result completes.
The downside of using trampolining is that its a bit more complicated. That said, there are some async libraries out there that guarantee that stackoverflow does not occur (by using one of the two tricks I mentioned under the hood).
To prevent a stack overflow, you need to avoid that consume recurses into itself. You can do that using a simple flag:
function eachLimit(data, limit, iterator, cb) {
var consumed = 0,
running = 0,
isAsync = true;
function consume() {
running--;
if (!isAsync)
return;
while (running < limit && consumed < data.length) {
isAsync = false;
running++;
iterator(data[consumed++], consume);
isAsync = true;
}
if (running == 0)
cb();
}
running++;
consume();
}
Have you considered using promises for this? They should resolve the issue of an ever-increasing stack (and also you get to use promises, which is a big plus in my book):
// Here, iterator() should take a single data value as input and return
// a promise for the asynchronous behavior (if it is asynchronous)
// or any value if it is synchronous
function eachLimit(data, limit, iterator) {
return Promise(function (resolve, reject) {
var i = 0;
var failed = false;
function handleFailure(error) {
failed = true;
reject(error);
}
function queueAction() {
try {
Promise.when(iterator(data[i]))
.then(handleSuccess, handleFailure);
} catch (error) {
reject(error);
}
}
function handleSuccess() {
if (!failed) {
if (i < data.length) {
queueAction();
i += 1;
} else {
resolve();
}
}
}
for (; i < data.length && i < limit; i += 1) {
queueAction();
}
});
}

Parse SDK saveAll promise chaining

Advice from a Parse developer forum said to "limit saveAll to 75 objects unless one wants saveAll to make its own batches" which by default are 20 objects. And to put this in a promise chain.
I need to do a saveAll promise chain where I don't know how many promises I need.
How would this be done?
I have an Array of Arrays. The sub arrays are all length 75. I need all the indexes of the master array to be saveAll in a Promise each.
var savePromises = []; // this will collect save promises
while((partition=partitionedArray.pop()) != null){
savePromises.push(Parse.Object.saveAll(partition, {
success: function(objs) {
// objects have been saved...
},
error: function(error) {
// an error occurred...
status.error("something failed");
}
}));
}
return Parse.Promise.when(savePromises);
}).then(function() {
// Set the job's success status
status.success("successful everything");
A nice way to do this is to build the chain of promises recursively. If you've already batched the objects that need saving into batches, then some of the work is done already.
// assume batches is [ [ unsaved_object0 ... unsaved_object74 ], [ unsaved_object75 ... unsaved_object149 ], ... ]
function saveBatches(batches) {
if (batches.length === 0) { return Parse.Promise.as(); }
var nextBatch = batches[0];
return Parse.Object.saveAll(nextBatch).then(function() {
var remainingBatches = batches.slice(1, batches.length);
return saveBatches(remainingBatches);
});
}
EDIT - To call this, just call it and handle the promise it returns...
function doAllThoseSaves() {
var batches = // your code to build unsaved objects
// don't save them yet, just create (or update) e.g....
var MyClass = Parse.Object.extend("MyClass")
var instance = new MyClass();
// set, etc
batches = [ [ instance ] ]; // see? not saved
saveBatches(batches).then(function() {
// the saves are done
}, function(error) {
// handle the error
});
}
EDIT 2 - At some point, the transactions you want to won't fit under the burst limit of the free tier, and spread out (somehow) won't fit within the timeout limit.
I've struggled with a similar problem. In my case, it's a rare, admin-facing migration. Rare enough and invisible to the end user, to have made me lazy about a solid solution. This is kind of a different question, now, but a few ideas for a solid solution could be:
see underscore.js _.throttle(), running from the client, to spread the transactions out over time
run your own node server that throttles calls into parse similarly (or the equal) to _.throttle().
a parse scheduled job that runs frequently, taking a small bite at a time (my case involves an import file, so I can save it quickly initially, open it in the job, count the number of objects that I've created so far, scan accordingly into the file, and do another batch)
my current (extra dumb, but functional) solution: admin user manually requests N small batches, taking care to space those requests ("one mississippi, two mississippi, ...") between button presses
heaven forbid - hire another back-end, remembering that we usually get what we pay for, and parse -- even at the free-tier -- is pretty nice.

Categories