I am uploading multiple files from a browser and need to do so sequentially.
So I chain the next upload commencement from the previous upload completion callback.
It's simple and works just fine.
During the upload I display the progress to the user along with a cancel button.
If the user hits cancel I want to stop the entire callback chain.
How would I do that? Is there some mechanism in JavaScript to halt my callback chain?
OK here is an example of a callback chain in JavaScript. The question is, how to break it from a "cancel" button?
https://jsfiddle.net/jq7m9beq/
var filenamesToProcessQueue = ['v.jpg','w.jpg','x.jpg','y.jpg','z.jpg']
function finishedProcessing (filename) {
console.log('finished processing: ' + filename)
// processing finished for this file, start again by chaining to the next one
doProcessFiles()
}
function waitForEachFile (filename, callback) {
// wait a couple of seconds and log the filename
setTimeout(function(){ console.log('Waited 2 seconds for: ' + filename);callback(filename);}, 2000)
}
function doProcessFiles() {
// get next file to process and remove it from the queue at same time
filename = filenamesToProcessQueue.pop()
// if the file is undefined then the queue was empty
if (typeof filename !== 'undefined') {
console.log('Process ' + filename)
waitForEachFile(filename, finishedProcessing)
}
}
doProcessFiles()
On click of a cancel button, set a flag
var cancelFlag = false;
document.getElementById("cancelBtn").addEventListener("click", function(){
cancelFlag = true;
//other code
});
change your doProcess to
function doProcessFiles()
{
if (cancelFlag)
{
return false; //this will break the chain
}
// get next file to process and remove it from the queue at same time
filename = filenamesToProcessQueue.pop()
// if the file is undefined then the queue was empty
if (typeof filename !== 'undefined')
{
console.log('Process ' + filename)
waitForEachFile(filename, finishedProcessing)
}
}
You can also stop your waiting
function waitForEachFile (filename, callback)
{
if (cancelFlag)
{
return false; //this will stop waiting as well
}
// wait a couple of seconds and log the filename
setTimeout(function(){ console.log('Waited 2 seconds for: ' + filename);callback(filename);}, 2000)
}
you can set the flag in the cancel button itself
document.getElementById("cancelBtn").setAttribute("data-flag", "true");
and check this value
var cancelFlag = Boolean(document.getElementById("cancelBtn").getAttribute("data-flag"));
Related
I have created an API that takes user input and process it. However, the process takes more than 5 seconds (dialogflow limit).
How can I continue with other processes until this certain process is finished?
Or is it possible to return to the user any messages like "Please hold on a bit..." so it can restart the time.
var message = "hi"; //test purpose
async function GetCertain_info(agent) {
await uiFx();
agent.add("Here are the information on " + message);
}
async function uiFx() {
var {
ui
} = require('./uia.js');
return new Promise(function(resolve, reject) {
ui().then((msg) => {
console.log("Destination Message : " + msg)
message = msg;
resolve(message);
}).catch((msg) => {
console.log(msg)
message = msg;
reject(message);
})
});
}
Appreciate your help
Yes, it is possible to return to the user any messages like "Please hold on a bit…" by setting up a FollowupEvent.
You can extend the 5 seconds Intent limit up to 15 seconds by setting up multiple follow-up events. Currently, you can only set up 3 follow-up events one after another (which can extend the timeout up to 15 seconds).
Here's an example of how you can do it in the fulfillment:
function function1(agent){
//This function handles your intent fulfillment
//you can initialize your db query here.
//When data is found, store it in a separate table for quick search
//get current date
var currentTime = new Date().getTime();
while (currentTime + 4500 >= new Date().getTime()) {
/*waits for 4.5 seconds
You can check every second if data is available in the database
if not, call the next follow up event and do the
same while loop in the next follow-up event
(up to 3 follow up events)
*/
/*
if(date.found){
agent.add('your data here');//Returns response to user
}
*/
}
//add a follow-up event
agent.setFollowupEvent('customEvent1');
//add a default response (in case there's a problem with the follow-up event)
agent.add("This is function1");
}
let intentMap = new Map();
intentMap.set('Your intent name here', function1);;
agent.handleRequest(intentMap);
In my application, I am trying to gather data from two different sources. So first it has to loop.each in to an internal JSON file and see if the data is found, if not, it has to request another $.getjson() to get the data from an external source.
So the second $.getjson() is dependant on the first one and sometimes does not need to be run if the the data is already found in the first one.
First $.getjson() call:
$.getJSON(InternalURL, function (data) {
$.each(data.Source, function (index, value) {
if(artist.indexOf(value.keyword) > -1){
image = value.image;
$(".bg").css("background-image", "url(" + image + ")");
}
});
});
Second $.getjson() call:
$.getJSON(ExternalURL, function (data) {
image = data.artist.image;
(".bg").css("background-image", "url(" + image + ")");
});
The other consideration is the timing of this process. Of course it has to be done as fast as possible, so the it wont be noticeable in the interface.
UPDATE
Example using Async / Await based on the answer provided by #Tiny Giant
JSFiddle
Currently this code works on JSFiddle, but in the actual application, while it works fine, it gives a Console error as "Uncaught (in promise)" with an object of methods such as, always, abort, fail, and etc. Any idea why this error comes up?
You could use Async / Await (initially defined in the ECMAScript® 2017 Language Specification). See caniuse.com for information on current support.
This works by pausing the current execution context and removing it from the stack once the getJSON call begins. Once the $.getJSON call returns, the paused execution context will be added back onto the stack, then execution will continue once the preceding items in the stack have been processed.
(async () => {
const post = await $.getJSON('https://jsonplaceholder.typicode.com/posts/1');
const comments = await $.getJSON('https://jsonplaceholder.typicode.com/comments');
post.comments = comments.filter(e => e.postId === post.id);
console.log(post);
})();
<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.1/jquery.min.js"></script>
To apply this to your specific example, you could use the following code:
(async () => {
let image, data = await $.getJSON(InternalURL);
if(data) {
for(let value of data.Source) {
if(!image && artist.indexOf(value.keyword) > -1) {
image = value.image;
}
}
} else {
// request failed
}
if(!image) {
let data = await $.getJSON(ExternalURL);
if(data) {
image = data.artist.image;
} else {
// request failed
}
}
$(".bg").css("background-image", "url(" + image + ")");
})();
This requests the first resource, then—once that request is completed and execution is continued—it checks the response as your example does. If the script doesn't find what it is looking for in the first response, it initiates the second request. Once execution is continued, it sets the background image.
Further reading:
Async functions
Await operator
Another option would be to use callbacks. This method is more difficult to follow, but is supported everywhere.
$.getJSON('https://jsonplaceholder.typicode.com/posts/1', post => {
$.getJSON('https://jsonplaceholder.typicode.com/comments', comments => {
post.comments = comments.filter(e => e.postId === post.id);
console.log(post);
});
});
<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.1/jquery.min.js"></script>
$.getJSON(InternalURL, function (data) {
$.each(data.Source, function (index, value) {
if(artist.includes(value.keyword)){
if (value.image) {
image = value.image;
$(".bg").css("background-image", "url(" + image + ")");
} else {
$.getJSON(ExternalURL, function (data) {
image = data.artist.image;
(".bg").css("background-image", "url(" + image + ")");
});
}
}
});
});
We have a web application which upon visiting the url, will prepare and generate a .zip file which is then downloaded.
I need to create a nodejs application using requestjs that can keep making requests until there is an attachment header, from which point it would be downloaded.
The page which generates the .zip file contains a simple html message, stating that the file is being prepared for download. With a javascript reload(true) function called on load.
I'm not sure if this is the right way of doing it, but I am open to suggestions.
You could use async.until to loop through some logic until the header is available:
let success = true;
async.until(
// Do this as a test for each iteration
function() {
return success == true;
},
// Function to loop through
function(callback) {
request(..., function(err, response, body) {
// Header test
if(resonse.headers['Content-Disposition'] == 'attatchment;filename=...') {
response.pipe(fs.createWriteStream('./filename.zip'));
success = true;
}
// If you want to set a timeout delay
// setTimeout(function() { callback(null) }, 3000);
callback(null);
});
},
// Success!
function(err) {
// Do anything after it's done
}
)
You could do it with some other ways like a setInterval, but I would choose to use async for friendly asynchronous functionality.
EDIT: Here's another example using setTimeout (I didn't like the initial delay with setInterval.
let request = require('request');
let check_loop = () => {
request('http://url-here.tld', (err, response, body) => {
// Edit line below to look for specific header and value
if(response.headers['{{HEADER_NAME_HERE}}'] == '{{EXPECTED_HEADER_VAL}}')
{
response.pipe(fs.createWriteStream('./filename.zip')); // write file to ./filename.zip
}
else
{
// Not ready yet, try again in 30s
setTimeout(check_loop, 30 * 1000);
}
});
};
check_loop();
I am writing a PhantomJS program that should open a page, track its time and write the loading time in the console. My goal is to make this function run for 5 times.
Problem is that when I write "for" loop which executes only 1 time - function runs correctly, but when I try to run it 2 times, I get the error message that it failed to open the address. It seems that address is not passed second time. I have no idea what could go wrong anymore. This is my code:
var page = require('webpage').create(),
system = require('system'),
t;
//Opening a page and tracking page load time
var loadpage = function (){
address = 'http://www.google.com';
t = Date.now();
page.open(address, function(status) {
if (status !== 'success') {
console.log('FAIL to load the address');
} else {
t = Date.now() - t;
console.log('Loading ' + address);
console.log('Loading time ' + t + ' msec');
}
phantom.exit();
});
};
for (var i = 0; i <2 ; i++) {
loadpage(i);
}
page.open() is an asynchronous function. If you call it in a loop, then it the loop will be fully executed before even the first page request is sent. The problem is that calling page.open() multiple times immediately after each other will overwrite the previous invocations. At best, you can only load the last URL.
You either need to wait for each page load or you can create multiple pages in order to request pages in parallel. Also, you should exit (phantom.exit()) PhantomJS only after you've waited for each page load.
Here are some ways to do this:
Q: Looping over urls to do the same thing
Q: loop through array of urls in phantomjs using javascript
Easy way to do it:
var loadpage = function (i, max){
if (i === max) {
phantom.exit();
return;
}
var address = 'http://www.google.com';
t = Date.now();
page.open(address, function(status) {
if (status !== 'success') {
console.log('FAIL to load the address');
} else {
t = Date.now() - t;
console.log('Loading ' + address);
console.log('Loading time ' + t + ' msec');
}
loadpage(i+1, max)
});
};
loadpage(0, 5);
It seems you shouldn't call phantom.exit() halfway through your program.
In the getCursor_ function below, please explain how I could determine if IndexedDb has opened and if not re-run function once it has? The getCursor_ runs correctly. However, since all of these calls are asynchronous, the function fails when executing before the database has completed opening.
This code is executed in a separate process:
var ixDb;
var ixDbRequest;
ixDbRequest = window.indexedDB.open(dbName, dbVersion);
ixDbRequest.onsuccess = function (e) {
ixDb = ixDbRequest.result || e.result;
};
The getCursor_ function below works fine unless ixDbRequest has not completed execution. I figured out how to test for that, but I am not sure how to wait in the instance where the open database request is still running.
function getCursor_(objectStoreName) {
if (!ixDb) {
if (ixDbRequest) {
// "Database is still opening. Need code to wait or re-run
// once completed here.
// I tried the following with no luck:
ixDbRequest.addEventListener ("success",
getCursor_(objectStoreName), false);
return;
}
}
var transaction = ixDb.transaction(objectStoreName,
IDBTransaction.READ_ONLY);
var objectStore = transaction.objectStore(objectStoreName);
try{
var request = objectStore.openCursor();
return request;
}
catch(e){
console.log('IndexedDB Error' + '(' + e.code + '): ' + e.message);
}
}
UPDATE BELOW:
The answer from #Dangerz definitely helped put me on the right track. However, since the function call is asynchronous, I also ended up having to add a callback in order to actually be able to use the cursor once the "success" event finally fired, and I was able to get the requested IndexedDb cursor. The final working function is below (re-factored slightly to remove the negative logic above "if(!ixDb)" . I am totally open to suggestions, if anyone sees room for improvement!
//****************************************************************************
//Function getCursor - Returns a cursor for the requested ObjectStore
// objectStoreName - Name of the Object Store / Table "MyOsName"
// callback - Name of the function to call and pass the cursor
// request back to upon completion.
// Ex. getCursor_("ObjectStore_Name", MyCallBackFunction);
// Function MyCallBackFunction(CursorRequestObj) {
// CursorRequestObj.onsuccess =
// function() {//do stuff here};
// }
//****************************************************************************
function getCursor_(objectStoreName, callback) {
//The the openCursor call is asynchronous, so we must check to ensure a
// database connection has been established and then provide the cursor
// via callback.
if (ixDb) {
var transaction =
ixDb.transaction(objectStoreName, IDBTransaction.READ_ONLY);
var objectStore = transaction.objectStore(objectStoreName);
try{
var request = objectStore.openCursor();
callback(request);
console.log('ixDbEz: Getting cursor request for '
+ objectStoreName + ".");
}
catch(e){
console.log('ixDbEz Error' + ' getCursor:('
+ e.code + '): ' + e.message);
}
}
else {
if (ixDbRequest) {
ixDbRequest.addEventListener ("success"
, function() { getCursor_(objectStoreName, callback); }
, false);
}
}
}
Change your addEventListener line to:
ixDbRequest.addEventListener ("success", function() { getCursor_(objectStoreName) }, false);