I'm trying to split an API request with an offset variable in order to have partial results without waiting the end of the entire request.
Basically I make an API request for the first 100 values, and then I increase it with 100 more till reach the end. The offset is just the starting point.
/*Node.js connector to Context.io API*/
var key = xxxxxx;
var secret = "xxxxxxxxx";
var inbox_id = "xxxxxxxxx";
var end_loop = false;
var offset = 6000;
/*global ContextIO, console*/
var ContextIO = require('contextio');
var ctxioClient = new ContextIO.Client('2.0', 'https://api.context.io', { key: key, secret: secret });
while(end_loop === false) {
contextio_request(offset, function(response){
if (response.body.length < 100) { console.log("This is the end "+response.body.length); end_loop = true; }
else { offset += 100; }
console.log("Partial results processing");
});
};
/* Context.io API request to access all messages for the id inbox */
function contextio_request(offset, callback) {
ctxioClient.accounts(inbox_id).messages().get({body_type: 'text/plain', include_body: 1, limit: 100, offset: offset}, function (err, response) {
"use strict";
if (err) {
return console.log(err);
}
callback(response);
});
};
What I don't understand is why if I change the "while loop" with a "if condition", everything works, but with the "while" it enters in an infinite loop". Also, is it the correct way to make a partial request -> wait for response - > process the response -> follow with next request?
The while loop will call contextio_request() almost indefinitely, because that makes an asynchronous call which wont return immediately.
A better way could be to write a recursive method which calls contextio_request(), inside that method you check if the response body length is less than 100.
Basic logic:
function recursiveMethod = function(offset, partialCallback, completedCallback) {
contextio_request(offset, function(response) {
if (response.body.length < 100) {
completedCallback(...);
} else {
partialCallback(...);
recursiveMethod(offset, partialCallback, completedCallback);
}
});
};
Also, is it the correct way to make a partial request -> wait for response - > process the response -> follow with next request?
I see no reason why not.
Related
I'm sorry if this question as already been answered and I hope I'm not breaking any SO rule, if so, in advance I apologise...
I was wondering what was the best way to handle a request limiter? I've seen a bunch of throttles and rate-limiters online but I'm not sure how or if it could apply in my case.
I'm doing a bunch of [OUTGOING] request-promises based on an Array and on a server I can only make 90 request per minute. My request-promises are generated by this command: return Promise.all(array.map(request)).
I was thinking to handle it like this:
var i = 0;
return rp({
url: uri,
json: true,
}).then((data) => {
if (i <=90) {
i ++;
return data;
} else {
return i;
}
});
but I'm not sure if it will be a really effective way to handle it plus, I'm not sure how to handle the time relation yet... :S
Thanks in advance for your help and sorry I'm still a huge beginner...
You can use setInterval. Check out the documentation here.
var requestCount = 0;
setInterval(function(){
// Every 60 seconds, reset the count
requestCount = 0;
}, 60000);
// There needs to be an additional check before calling rp,
// that checks for requestCount > 90, and returns before starting the request.
rp({
url: uri,
json: true,
})
.then((data) => {
requestCount++;
return data;
});
If the requests are started from different code parts, it might be useful to implement sth like a server queue which awaits the request until it is allowed to do so. The general handler:
var fromUrl = new Map();
function Server(url, maxPerMinute){
if(fromUrl.has(url)) return fromUrl.get(url);
fromUrl.set(url,this);
this.tld = url;
this.maxPerMinute = maxPerMinute;
this.queue = [];
this.running = false;
}
Server.prototype ={
run(d){
if(this.running && !d) return;
var curr = this.queue.shift();
if(!curr){
this.running = false;
return;
}
var [url,resolve] = curr;
Promise.all([
request(this.tld + url),
new Promise(res => setTimeout(res, 1000*60/this.maxPerMinute)
]).then(([res]) => {
resolve(res);
this.run(true);
});
},
request(url){
return new Promise(res => {
this.queue.push([url,res]);
this.run();
});
}
};
module.exports = Server;
Usable like this:
var google = new require("server")("http://google.com");
google.maxPerMinute = 90;
google.request("/api/v3/hidden/service").then(res => ...);
Probably just do 90 requests per minute. You could use a pseudorecursive promise utilizing function:
function multiRequest(urls, maxPerMinute){
return new Promise(function(cb){
var result = [];
//iterate recursively
(function next(i){
//if finished resolve promise
if(i>=urls.length) return cb(result);
//get all requests
var requests = Promise.all(urls.slice(i,i+maxPerMinute).map(request));
//if the requests are done, add them to result
requests.then(data=>result.push(...data));
//if the requests + one minute done, conginue with next
Promise.all([
requests,
new Promise(res=>setTimeout(res,1000*60))
] ).then(_=>next(i+maxPerMinute))
})(0);
});
}
Use it like this:
multiRequests(["google.com","stackoverflow.com"],90)
.then(([google,so])=>...);
I have a 100 or so Word Open XML (.xml, not .docx, saved as "Word XML Document")documents (components) stored on SharePoint.
I use AJAX to load these by selection, as xml, 1 to many into an array, in which I also manage the selection sequence.
Once the user has selected the "components" they can then insert them into Word, the insertion is done via an array traversal (there is probably a better way to do this - but for now it does work),
wordBuild does the loading
function writeDocSync(){
// run through nameXMLArray to find the right sequence
var x = 0;
var countXMLAdds = 0;
//debugger;
toggleWriteButton("disable");
$('.progress-button').progressInitialize("Building Word");
toggleProgressBar(true);
// only run if we have data present
if(nameXMLArray.length > 0){
// increment through sequentially until we have all values
while (countXMLAdds <= checkedList.length){
// repeatedly traverse the array to get the next in sequence
while (x < nameXMLArray.length){
if (Number(nameXMLArray[x].position) === countXMLAdds && nameXMLArray[x].useStatus === true){
progHold = countXMLAdds;
wordBuild(nameXMLArray[x].xml, nameXMLArray[x].filename, countXMLAdds);
}
x++;
}
x=0;
countXMLAdds ++;
}
document.getElementById("showCheck").className = "results";
writeSelections("<b>You just built your proposal using<br/>the following components:</b><br/>");
toggleWriteButton("enable");
}
}
xxxxxxxxx
function wordBuild(xmlBody, nameDoc, progress){
var aryLN = checkedList.length;
var progPCT = (progress/aryLN)*100;
progressMeter.progressSet(progPCT);
Word.run(function (context) {
var currentDoc = context.document;
var body = currentDoc.body;
body.insertOoxml(xmlBody, Word.InsertLocation.end);
body.insertBreak(Word.BreakType.page, Word.InsertLocation.end);
return context.sync().then(function () {
showNotification("Written " + nameDoc);
});
})
.catch(function (error) {
showNotification('Error: ' + nameDoc + ' :' + JSON.stringify(error));
if (error instanceof OfficeExtension.Error) {
showNotification('Debug info: ' + JSON.stringify(error.debugInfo));
}
});
}
All the documents will load singly, and all will load in batches of say 10 - 30 or more.
The problem comes when I load the entire set (I have a "check all" option).
Sometimes 50 will build before I get an exception, sometimes 60, rarely more than 60, but very occasionally I get a gap where the exception doesn't occur, then it continues later.
The exception (which is repeated for each file) is:
Debug info: {}
Error: componentABC.xml :{"name":"OfficeExtension.Error","code":"GeneralException","message":"An internal error has occurred.","traceMessages":[],"debugInfo":{},"stack":"GeneralException: An internal error has occurred.\n at Anonymous function (https://customerportal.sharepoint.com/sites/components/Shared%20Documents/componentAssembler/Scripts/Office/1/word-win32-16.00.js:19:150094)\n at yi (https://customerportal.sharepoint.com/sites/components/Shared%20Documents/componentAssembler/Scripts/Office/1/word-win32-16.00.js:19:163912)\n at st (https://customerportal.sharepoint.com/sites/components/Shared%20Documents/componentAssembler/Scripts/Office/1/word-win32-16.00.js:19:163999)\n at d (https://customerportal.sharepoint.com/sites/components/Shared%20Documents/componentAssembler/Scripts/Office/1/word-win32-16.00.js:19:163819)\n at c (https://customerportal.sharepoint.com/sites/components/Shared%20Documents/componentAssembler/Scripts/Office/1/word-win32-16.00.js:19:162405)"}
Any help with what might cause this would be hugely appreciated.
Oh I should also say, the files where the exception is raised don't get inserted into Word. But in smaller batches - they work without issue.
Word.run() is an asynchronous call, and there's a limit to the number of concurrent Word.run() calls you can make. Since you're executing Word.run() inside a while loop, all of them get kicked off at the same time and run simultaneously.
There are a few ways to work around this.
Put everything inside one Word.run() call. This puts everything in one giant batch, avoiding multiple roundtrip calls to Word.
if (nameXMLArray.length > 0 {
Word.run(function(context) {
//...
while(...) {
wordBuild(context, nameXMLArray[x].xml, nameXMLArray[x].filename, countXMLAdds);
//...
}
return context.sync();
});
}
function wordBuild(context, xmlBoxy, nameDoc, progress) {
//everything as it currently is, except without the Word.run and the context.sync
}
Implement wordBuild as a promise, and use AngularJS’s $q service to chain the promises, something vaguely like this:
function wordBuild(...) {
var deferred = $q.defer();
Word.run( function(context) {
// current code
return context.sync().then(function() {
deferred.resolve();
});
});
return deferred.promise;
}
//Somewhere else
for (var x…)
{
promises.add(wordBuild);
}
$q.all(promises);
https://docs.angularjs.org/api/ng/service/$q
Angularjs $q.all
Chain the wordBuild calls yourself, as something like this:
var x = 0;
var context;
function (wordBuild() {
if (x >= nameXMLArray.length)
return;
else {
context.document.body.insertOoxml(ooxml, Word.InsertLocation.end);
x++;
return context.sync().then(wordBuild);
}
});
Word.run(function (ctx) {
context = ctx;
return wordBuild();
}
This sort of approach is difficult to maintain, but it could work.
Incidentally, the progress meter in your original code only updates when the call to Word starts, not when it actually returns. You might want to move the progress meter update code into the callback.
I ended up using jQuery deferreds, I was already using jQuery for treeview and checkboxes etc. so it made sense.
This is a mix of Geoffrey's suggestions and my own! I cannot claim it to be good code, only that is does work. (If it is good code or not will take me more time to understand!)
I run batches of 49 xml doc inserts, at 51 the Async call "Word.run" failed in tests, and inserts of 80 or so documents in one Word.run caused Word to freeze, so although not proven 49 inserts within 1 Word.run seems like a good starter for 10! 50 inserts of 49 pieces allows for 2450 inserts, which is way beyond anything I can see being needed, and would probably break Word!
To get the deferreds and sent variables to keep their values once launched as asynch deferreds I had to create a variable to transfer both new deferreds, and values, so I could use the "bind" command.
As Word async returns context.sync() I check the count of the batch, when the batch is completed, I then call the next batch - inside the context.sync()
A sort of recursive call, still a combination of Geoffrey's suggestion, and batches. This has a theoretical limit of 50 batches of 49 document sections. So far this has worked in all tests.
The progress meter exists in its own timed call, but as JavaScript prioritises code over UI it does hop. For example 120 documents it will hop just below half way fairly quickly, then a while later jump to almost complete, then complete (effectively 3 hops of a massively fast sequential percentage increases, various tricks suggested have zero effect (forceRepaint() is the latest experiment!).
function startUILock(){
// batch up in groups of 49 documents (51 and more were shown to fail, 49 gives manouvre room)
toggleProgressBar(true);
$('.progress-button').progressInitialize("Building Word");
progressMeter.progressSet(1);
$.blockUI({message: "Building word..."});
setTimeout(forceRepaint, 3000);
}
function forceRepaint(){
var el = document.getElementById('progDiv');
el.style.cssText += ';-webkit-transform:rotateZ(0deg)';
el.offsetHeight;
el.style.cssText += ';-webkit-transform:none';
}
function UIUnlock(insertedCount){
debugger;
var pct = (insertedCount/checkedList.length)*100
//showNotification('Progress percent is: ' + pct);
if (insertedCount !== checkedList.length ){
progressMeter.progressSet(pct);
forceRepaint();
} else {
$.unblockUI();
progressMeter.progressSet(100);
}
}
function writeDocDeffered(){
insertedCounter = 0;
var lastBatch = 0;
var x = 49;
var z = checkedList.length + 1;
if(x > z){
x=z;
}
deferreds = buildDeferredBatch(x, lastBatch);
$.when(deferreds).done(function () {
return;
})
.fail(function () {
//showNotification('One of our promises failed');
});
}
function buildDeferredBatch(batch, lastBatch) {
// this ensures the variables remain as issued - allows use of "bind"
var deferredsa = [];
var docSender = {
defr : $.Deferred(),
POSITION: batch,
LASTPOSITION: lastBatch,
runMe : function(){
this.defr.resolve(writeDocBatchedDeferred(this.POSITION, this.LASTPOSITION, this.defr));
}
}
// small timeout might not be required
deferredsa.push(setTimeout(docSender.runMe.bind(docSender), 10));
return deferredsa;
}
function writeDocBatchedDeferred(batch, lastBatch, defr){
// write the batches using deferred and promises
var x;
var countXMLAdds = lastBatch;
x = 0;
var fileName;
debugger;
// only run if we have data present
if(nameXMLArray.length > 0){
var aryLN = checkedList.length;
// increment through sequentially until we have all values
Word.run(function (context) {
var currentDoc = context.document;
var body = currentDoc.body;
while (countXMLAdds <= batch){
// repeatedly traverse the array to get the next in sequence
while (x < nameXMLArray.length){
if (Number(nameXMLArray[x].position) === countXMLAdds && nameXMLArray[x].useStatus === true){
fileName = nameXMLArray[x].filename;
body.insertOoxml(nameXMLArray[x].xml, Word.InsertLocation.end);
body.insertBreak(Word.BreakType.page, Word.InsertLocation.end);
insertedCounter = countXMLAdds;
var latest = insertedCounter;
var timerIt = {
LATEST: latest,
runMe : function(){
UIUnlock(this.LATEST);
}
}
setTimeout(timerIt.runMe.bind(timerIt),1000);
}
x++;
}
x=0;
countXMLAdds ++;
}
return context.sync().then(function () {
if(countXMLAdds = batch){
var lastBatch = batch + 1;
// set for next batch
var nextBatch = batch + 50;
var totalBatch = checkedList.length + 1;
// do not exceed the total batch
if(nextBatch > totalBatch){
nextBatch=totalBatch;
}
// any left to process keep going
if (nextBatch <= totalBatch && lastBatch < nextBatch){
deferreds = deferreds.concat(buildDeferredBatch(nextBatch, lastBatch));
}
// this batch done
defr.done();
}
});
})
.catch(function (error) {
showNotification('Error: ' + nameXMLArray[x].filename + " " + JSON.stringify(error));
if (error instanceof OfficeExtension.Error) {
showNotification('Debug info: ' + JSON.stringify(error.debugInfo));
}
});
document.getElementById("showCheck").className = "results";
writeSelections("<b>You just built your document using<br/>the following components:</b><br/>");
}
return defr.promise;
}
I use the recursive function below, in order to reopen website if httpstatus != 200:
retryOpen = function(){
this.thenOpen("http://www.mywebsite.com", function(response){
utils.dump(response.status);
var httpstatus = response.status;
if(httpstatus != 200){
this.echo("FAILED GET WEBSITE, RETRY");
this.then(retryOpen);
} else{
var thisnow = hello[variable];
this.evaluate(function(valueOptionSelect){
$('select#the_id').val(valueOptionSelect);
$('select#the_id').trigger('change');
},thisnow);
}
});
}
The problem is that sometimes the retryOpen function does not even go as far as to callback function(response){}. Then, my script freezes.
I wonder how one could change the function to be able to recursively try to open website again if there is no response from website (not even some error code as 404 or something)? In other words, how to rewrite the retryOpen function so it reruns when the function does not reach callback after a certain amount of time?
I would try something like this. Please note this is untested code, but should get you on the correct path
retryOpen = function(maxretry){
var count = 0;
function makeCall(url)
{
this.thenOpen(url, function(response){
utils.dump(response.status);
});
}
function openIt(){
makeCall.call(this,"http://www.mywebsite.com");
this.waitFor(function check() {
var res = this.status(false);
return res.currentHTTPStatus === 200;
}, function then() {
var thisnow = hello[variable];
this.evaluate(function(valueOptionSelect){
$('select#the_id').val(valueOptionSelect);
$('select#the_id').trigger('change');
},thisnow);
}, function timeout() { // step to execute if check has failed
if(count < maxretry)
{
openIt.call(this);
}
count++
},
1000 //wait 1 sec
);
}
openIt();
}
I am writing a tool that will loop through a list of id's (represented by id in id_list). We check a cache object to see if we already have a value for the id. If we don't already have a value for the given id, we'll need to make a get request to get the associated value and then add it to the cache.
In the time it takes to do one async get request, the entire loop runs. This means the cache is never actually used. Is there anyway I can require the get request to finish before continuing the loop? Normally I would chain the request through the onSuccess function of the previous, but since there's a change, no request will be made.
cache = {};
var rating;
for (id in id_list){
if (id in cache){
rating = cache[id];
}else{
rating = $.get(~~~async get request happens here~~~);
cache[id] = rating;
}
$(".result").append(rating);//display result in ui
}
You can't use a for loop if you want it to wait between each iteration. A common design pattern is to create a local function for a given iteration and then call it each time the async operation finishes.
Assuming id_list is an object with properties, you could do it like this:
var cache = {};
var ids = Object.keys(id_list);
var cntr = 0;
function next() {
var id;
if (cntr < ids.length) {
id = ids[cntr++];
// see if we can just get the value from the cache
if (id in cache) {
$(".result").append(cache[id]);
// schedule next iteration of the loop
setTimeout(next, 1);
} else {
// otherwise get rating via Ajax call
$.get(...).then(function(rating) {
$(".result").append(rating);
// put rating in the cache
cache[id] = rating;
next();
});
}
}
}
next();
Or, if id_list is an array of ids, you can change it to this:
var cache = {};
var cntr = 0;
var id_list = [...];
function next() {
var id;
if (cntr < id_list.length) {
id = id_list[cntr++];
// see if we can just get the value from the cache
if (id in cache) {
$(".result").append(cache[id]);
// schedule next iteration of the loop
setTimeout(next, 1);
} else {
// otherwise get rating via Ajax call
$.get(...).then(function(rating) {
$(".result").append(rating);
// put rating in the cache
cache[id] = rating;
next();
});
}
}
}
next();
I have a problem in my project.
To describe this issue I have wrote simplified code snippet:
function waitFor(fnReady, fnCallback) {
var check = function() {
if (fnReady()) {
fnCallback();
}
else {
setTimeout(check, 100); // wait another 100ms, and try again
}
};
check();
}
var result = 0;
var flag = true;
function ajaxRequest() {
setTimeout(
function() { flag = false;
console.log('ping');
},3000
);
}
function ajaxRequestHandler() {
setTimeout(
function() { flag = true;
console.log('pong');
}, 200
);
}
for(var i =0;i<10; i++){
waitFor(function() { return flag; }, ajaxRequest);
waitFor(function() { return !flag; }, ajaxRequestHandler);
}
it returns:
ping - 10 times
pong - 10 times
desired result:
ping
3 second timeout
ping
---------------------
ping
3 second timeout
pong
--------------------
.....
Can you help correct my code?
UPDATE
Actual problem:
I have a google map.
I have a lot of places when I should to redraw it.
For application logic very important that If I send
request1
request2
request3
request4
I should handle responses in the this order
handle response of request1
handle response of request2
handle response of request3
handle response of request4
Problem that I don't know order of requests.
In different places of file I see following code rows:
google.maps.event.addListener(searchBox, 'bounds_changed', renderTerminalsOnMapAndFitBounds);
...
$.getJSON('getAllTerminals.json', renderTerminalsOnMapAndFitBounds);
.....
$.getJSON('getAllTerminalsInsideRectangle.json', renderTerminalsOnMapAndFitBounds);
...
$.getJSON('getAllTerminalsInsideCircle.json', renderTerminalsOnMapAndFitBounds);
...
$.getJSON('getBigTerminals.json', renderTerminalsOnMapAndFitBounds);
........
renderTerminalsOnMapAndFitBounds method sends request to server and in succes alternative render result on map. But this event happens very often
Try this pattern
var map = "abcdefghi".split("");
var responses = []; // collect responses
$.ajaxSetup({
beforeSend : function(jqxhr, settings) {
jqxhr.id = Number(settings.data.split(/id=/)[1]); // add `id` to `request`
console.log(settings.data.split(/id=/)[1]);
}
});
var request = function(id, data) {
// append `id` to `id` data
return $.post("/echo/json/", {json:JSON.stringify([data]), id:id})
};
$.each(map, function(k, v) {
setTimeout(function() {
request(k + 1, v)
.done(function(data) {
// do stuff at each response
console.log(data); // note return values
})
.always(function(data, textStatus, jqxhr) {
// do stuff at each response
responses.push([jqxhr.id, data[0]]);
// do stuff when all requests completed , results items in `responses`
if (responses.length === map.length) {
responses.sort(); // sort `responses` based on `id`
// do stuff with `responses`
console.log(responses);
}
});
},1 + Math.random() * 1000) // async
});
jsfiddle http://jsfiddle.net/guest271314/g254bbjg/
my variant:
var index = 0;
// callback function
function tryMe (param1) {
waitFor(function(){return param1 == index},
function(){console.log(param1);
index++;
}
)
}
// callback executer
function callbackTester (callback,i) {
setTimeout( function(){callback(i);}, 20000 - i*1000);
}
// test function
for(var i=0 ; i<10 ; i++){
callbackTester ( tryMe,i );
}
function waitFor(fnReady, fnCallback) {
var check = function() {
if (fnReady()) {
fnCallback();
}
else {
setTimeout(check, 100); // wait another 100ms, and try again
}
};
check();
}
http://jsfiddle.net/x061dx75/17/
I personally would use promises for this, but you've said no promises (not sure why), so here's a generic sequencer algorithm in plain javascript (tested in the jsFiddle linked below):
function sequence(fn) {
// initialize sequence data upon first use
if (typeof sequence.low === "undefined") {
sequence.low = sequence.high = 0;
sequence.results = {};
}
// save id in local variable so we can reference it in the closure from the function below
var id = sequence.high;
// advance to next sequence number
++sequence.high;
// initialize the result value for this sequence callback
sequence.results[id] = {fn: fn, args: [], ready: false, context: null};
return function(/* args */) {
// save args and context and mark it ready
var args = Array.prototype.slice.call(arguments, 0);
// get the results object for this callback and save info in it
var thisResult = sequence.results[id];
thisResult.args = args;
thisResult.context = this;
thisResult.ready = true;
// now process any requests in order that are ready
for (var i = sequence.low; i < sequence.high; i++) {
var result = sequence.results[i];
// if this one is ready, process it
if (result.ready) {
// increment counter past this result
++sequence.low;
// remove this stored result
delete sequence.results[i];
// process this result
result.fn.apply(result.context, result.args);
} else {
// if this one not ready, then nothing to do yet
break;
}
}
};
}
// your usage:
google.maps.event.addListener(searchBox, 'bounds_changed', sequence(renderTerminalsOnMapAndFitBounds));
...
$.getJSON('getAllTerminals.json', sequence(renderTerminalsOnMapAndFitBounds));
.....
$.getJSON('getAllTerminalsInsideRectangle.json', sequence(renderTerminalsOnMapAndFitBounds));
...
$.getJSON('getAllTerminalsInsideCircle.json', sequence(renderTerminalsOnMapAndFitBounds));
...
$.getJSON('getBigTerminals.json', sequence(renderTerminalsOnMapAndFitBounds));
........
Working demo: http://jsfiddle.net/jfriend00/aqugm1fs/
Conceptually, what this does is as follows:
Pass a substitute completion handler in place of the normal completion callback.
This substitute function marks each response with a sequence id and saved the original completion handler.
If a response comes back while another response with a lower sequence id is still pending, then the result is just stored and saved for later.
As each response comes in, it processes as many responses in sequence as are ready
Note: while all the examples you have use the same callback function, this will work with any callback function so it would work with a mix of different types of operations.