In my code I deal with multiple JSON requests that need to be parsed in an order.
let allRadio_data, allHistory_data, iTunes_data;
$.when(parseRadioList())
.then(function(list) {
radioList_data = list;
return $.when(
parseEachRadioData(radioList_data),
ParseEachHistoryData(radioList_data)
);
})
.then(function() {
console.log(allRadio_data);
console.log(allHistory_data);
return $.when(_parseiTunes());
})
.then(function(iTunesInfo) {
iTunes_data = iTunesInfo;
return _cacheOptions();
})
function _cacheOptions() {
// FINAL function
}
/////////
function parseRadioList() {
return $.getJSON("https://api.myjson.com/bins/xiyvr");
}
function _parseiTunes() {
return $.getJSON("https://itunes.apple.com/search?term=jackson&limit=10&callback=?")
}
function parseEachRadioData(radioList) {
allRadio_data = [];
$.each(radioList, function(index, radio) {
$.when($.getJSON(radio.url + "/stats?sid=" + radio.stream_id + "&json=1&callback=?"))
.then(function(data) {
allRadio_data.push(data);
});
})
}
function ParseEachHistoryData(radioList) {
allHistory_data = [];
$.each(radioList, function(index, radio) {
$.when($.getJSON(radio.url + "/played?sid=" + radio.stream_id + "&type=json&callback=?"))
.then(function(data) {
allHistory_data.push(data);
});
})
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.3.1/jquery.min.js"></script>
Right now the code is running, but where I do console.log(allRadio_data); it is empty. However, if I do settimeout() to delay it for a second the data is completed. This means then() is not running on time.
This is the structure I am looking for:
JSON file 1 is parsed. parseRadioList()
JSON1 is an array of multiple entries of JSON URLS.
Run through the URLs within JSON1 array and do getJSON for each. parseEachRadioData(radioList_data) & ParseEachHistoryData(radioList_data)
Push data of each JSON in one general Array.
Once completed, parse JSON2 _parseiTunes()
Any Idea how to make this code running in the right structure.
Thanks in advance.
For a start, parseEachRadioData and ParseEachHistoryData don't return anything at all, let alone a Promise - so it's impossible to wait on them
Also, you're overusing $.when ... in fact you never need to use it, just use regular promises since jQuery $.getJSON etc return a usable Promise-like object
i.e. your code could be
let allRadio_data, allHistory_data, iTunes_data;
parseRadioList()
.then(function(list) {
radioList_data = list;
return Promise.all([
parseEachRadioData(radioList_data),
ParseEachHistoryData(radioList_data)
]);
})
.then(function(result) {
allRadio_data = result[0];
allHistory_data = result[1];
console.log(allRadio_data);
console.log(allHistory_data);
return _parseiTunes();
})
.then(function(iTunesInfo) {
iTunes_data = iTunesInfo;
return _cacheOptions();
})
function _cacheOptions() {
// FINAL function
}
/////////
function parseRadioList() {
return $.getJSON("https://api.myjson.com/bins/xiyvr");
}
function _parseiTunes() {
return $.getJSON("https://itunes.apple.com/search?term=jackson&limit=10&callback=?")
}
function parseEachRadioData(radioList) {
return Promise.all(radioList.map(radio => $.getJSON(radio.url + "/stats?sid=" + radio.stream_id + "&json=1&callback=?")));
}
function ParseEachHistoryData(radioList) {
return Promise.all(radioList.map(radio => $.getJSON(radio.url + "/played?sid=" + radio.stream_id + "&type=json&callback=?")));
}
from a first view
$.when(parseRadioList()) // your code will wait her
.then(function (list) {
radioList_data = list;
return $.when(
parseEachRadioData(radioList_data), // this two method are void they will complete
ParseEachHistoryData(radioList_data) // immediately without waithing for getJson...
);
})
Related
I've been trying to code up a search engine using angular js, but I can't copy one array to another. When I initiate the the code (in the service.FoundItems in the q.all then function) new array(foundArray) shows up as an empty array. I searched up how to copy one array to another and tried that method as you can see, but it isn't working. Please help, here is the code, and thank you.
P.S. if you need the html please tell me.
(function () {
'use strict';
angular.module('narrowDownMenuApp', [])
.controller('narrowItDownController', narrowItDownController)
.service('MenuSearchService', MenuSearchService)
.directive('searchResult', searchResultDirective);
function searchResultDirective() {
var ddo = {
templateUrl: 'searchResult.html',
scope: {
items: '<'
},
};
return ddo
}
narrowItDownController.$inject = ['MenuSearchService'];
function narrowItDownController(MenuSearchService) {
var menu = this;
menu.input = "";
menu.displayResult = [];
menu.searchX = function(name) {
menu.displayResult = MenuSearchService.FoundItems(menu.input, name);
console.log(menu.displayResult);
};
}
MenuSearchService.$inject = ['$http', '$q'];
function MenuSearchService($http, $q) {
var service = this;
service.getMatchedMenuItems = function(name, searchTerm) {
var deferred = $q.defer();
var foundItems = [];
var result = $http({
method: "GET",
url: ('https://davids-restaurant.herokuapp.com/menu_items.json'),
params: {
category: name
}
}).then(function (result) {
var items = result.data;
for (var i = 0; i < items.menu_items.length; i++) {
if (searchTerm === ""){
deferred.reject("Please enter search term");
i = items.menu_items.length;
}
else if (items.menu_items[i].name.toLowerCase().indexOf(searchTerm.toLowerCase()) ==! -1){
foundItems.push(items.menu_items[i].name)
deferred.resolve(foundItems);
}else {
console.log("doesn't match search");
}
}
});
return deferred.promise;
};
service.FoundItems = function (searchTerm, name) {
var searchResult = service.getMatchedMenuItems(name, searchTerm);
var foundArray = [];
$q.all([searchResult])
.then(function (foundItems) {
foundArray = foundItems[0].slice(0);
foundArray.reverse();
})
.catch(function (errorResponse) {
foundArray.push(errorResponse);
});
console.log(foundArray);
return foundArray;
};
};
})();
If the goal of the service.FoundItems function is to return a reference to an array that is later populated with results from the server, use angular.copy to copy the new array from the server to the existing array:
service.FoundItems = function (searchTerm, name) {
var foundArray = [];
var searchPromise = service.getMatchedMenuItems(name, searchTerm);
foundArray.$promise = searchPromise
.then(function (foundItems) {
angular.copy(foundItems, foundArray);
foundArray.reverse();
return foundArray;
})
.catch(function (errorResponse) {
return $q.reject(errorResponse);
})
.finally(function() {
console.log(foundArray);
});
return foundArray;
};
I recommend that the promise be attached to the array reference as a property named $promise so that it can be used to chain functions that depend on results from the server.
Frankly I don't recommend designing services that return array references that are later populated with results. If you insist on designing it that way, this is how it is done.
I tried the $promise thing that you recommended. I was wondering how you would get the value from it ie the array.
In the controller, use the .then method of the $promise to see the final value of the array:
narrowItDownController.$inject = ['MenuSearchService'];
function narrowItDownController(MenuSearchService) {
var menu = this;
menu.input = "";
menu.displayResult = [];
menu.searchX = function(name) {
menu.displayResult = MenuSearchService.FoundItems(menu.input, name);
̶c̶o̶n̶s̶o̶l̶e̶.̶l̶o̶g̶(̶m̶e̶n̶u̶.̶d̶i̶s̶p̶l̶a̶y̶R̶e̶s̶u̶l̶t̶)̶;̶
menu.displayResult.$promise
.then(function(foundArray) {
console.log(foundArray);
console.log(menu.displayResult);
}).catch(function(errorResponse) {
console.log("ERROR");
console.log(errorResponse);
});
};
}
To see the final result, the console.log needs to be moved inside the .then block of the promise.
Titus is right. The function always immediately returns the initial value of foundArray which is an empty array. The promise is executed asynchronously so by the time you are trying to change foundArray it is too late. You need to return the promise itself and then using .then() to retrieve the value just like you are currently doing inside the method.
From just quickly looking at your code I think you made have a simple error in there. Are you sure you want
foundArray = foundItems[0].slice(0);
instead of
foundArray = foundItems.slice(0);
I'm having trouble figuring out the best way to utilise promises. I'm trying to create a routine that will copy a folder and I find myself trying to deal with promises in sequence. The code that I've written isn't very elegant:
copyFolder = function( source, target ) {
var deferred = $q.defer();
var sourceFolder = null;
var sourcePromise = Windows.Storage.KnownFolders.documentsLibrary.getFolderAsync(sourcePath)
sourcePromise.then(function(dir) {
sourceFolder = dir;
}, function(error) {
return
});
var targetFolder = null;
var targetPromise = Windows.Storage.KnownFolders.documentsLibrary.getFolderAsync(targetPath)
targetPromise.then(function(dir) {
targetFolder = dir;
}
$q.all( sourcePromise, targetPromise ).then( function() {
if ( !targetFolder ) {
var createPromise = Windows.Storage.KnownFolders.documentsLibrary.createFolderAsync(targetPath)
createPromise.then(function(dir) {
targetFolder = dir;
}
}
createPromise.then( function() {
deferred.resolve( copyFolderRecursively(sourceFolder, targetFolder) );
});
});
return deferred.promise;
}
Please note that I'm copying files that are in the Documents folder, so I can't use cordova-plugin-file.
In the case of the targetFolder, I'm checking to see if it exists (targetPromise). If it doesn't, I create it (createPromise). I like the idea of the $q.all() call, unfortunately it can't be used for createPromise.
To top it all off, I'm trying to create a recursive function that will return once all the files are copied. Actually, it would be better if it could return a promise (or build a list of promises).
Can this code be written in a more elegant form? Also, is there an example I can follow with a recursive function that implements a promise?
I realised that I can create a promise that depends on another promise and wait for that:
var targetFolder = null;
var targetPromise = Windows.Storage.KnownFolders.documentsLibrary.getFolderAsync( target ).then( function() {
return Windows.Storage.KnownFolders.documentsLibrary.getFolderAsync( target );
}, function(error) {
return Windows.Storage.KnownFolders.documentsLibrary.createFolderAsync( target );
});
targetPromise.then( function(folder) {
targetFolder = folder;
});
It's not ideal, but I treat the first call to getFolderAsync as an "exists" call. The success callback is then used to return the result of getFolderAsync. If the "exists" fails, then the call to createFolderAsync is made. This means that $q.all( sourcePromise, targetPromise ) will wait until the folder is retrieved (or created). To keep things simple, instead of trapping the fail callback for createFolderAsync I just check to see whether targetFolder is defined.
Though this technically answers the first part of my question. I realised the following is much more concise:
var replace = Windows.Storage.CreationCollisionOption.replaceExisting;
var targetFolder = null;
var targetPromise = Windows.Storage.KnownFolders.documentsLibrary.createFolderAsync(target, replace);
targetPromise.then( function(folder) {
targetFolder = folder;
});
What I'm not sure about is whether the targetPromise.then is executed sequentially before the $q.all (used at the end of the function). It seems to work, but I'm not sure whether it's supposed to.
I also created a function that will generate promises and wait for each copy operation (before resolving a single deferred promise). This is just the first version that worked. Any advice on how to improve it will be appreciated:
$scope.copyFolderRecursively = function(sourceFolder, targetFolder) {
function CopySubFolders(sourcefolder, targetFolder) {
var deferred = $q.defer();
if ( !sourceFolder ) return false;
var promises = [];
// Copy the files
var getFilesPromise = sourceFolder.getFilesAsync();
var copyFilesPromise = getFilesPromise.then(function (files) {
if ( files ) {
files.forEach(function (file) {
console.log("copy file: " + file.displayName);
var delayed = $q.defer();
promises.push( delayed.promise );
file.copyAsync(targetFolder).then( function(copiedfile) {
delayed.resolve( copiedfile );
}, function(error) {
delayed.reject( "Error copying file" + file.path );
});
});
}
});
// Recursively copy each subfolder
var getFoldersPromise = sourceFolder.getFoldersAsync();
var copyFoldersPromise = getFoldersPromise.then(function (folders) {
if ( folders ) {
folders.forEach(function (folder) {
var replace = Windows.Storage.CreationCollisionOption.replaceExisting;
console.log("create folder: " + folder.name);
var delayed = $q.defer();
promises.push( delayed.promise );
targetFolder.createFolderAsync(folder.name, replace).then(function (newFolder) {
CopySubFolders(folder, newFolder).then( function(success) {
delayed.resolve( newFolder );
}, function(error) {
delayed.reject( "Error copying folder " + newFolder.path );
});
});
});
}
});
$q.all( getFilesPromise, copyFilesPromise, getFoldersPromise, copyFoldersPromise, promises ).then( function() {
return $q.all( promises );
}).then( function() {
deferred.resolve( true );
}, function(error) {
deferred.resolve( false );
});
return deferred.promise;
}
return CopySubFolders( sourceFolder, targetFolder );
}
One thing worth noting with this solution (another realisation), UWP promises are not Angularjs promises. I basically convert each UWP call into an AngularJS deferred promise - so I can use $q.all().
I'm trying to create a JSON output (tree structure) from the recursive async calls and have come up with below -
$scope.processTree = function (mData, callback) {
_processTree.getWebCollection(mData.url).then(
function(_rdata){
// transform xml -> json
var x2js = new X2JS();
var json = x2js.xml_str2json(_rdata);
//console.log("XML DATA: " + _rdata);
//console.log("JSON DATA: " + JSON.stringify(json));
var _webs = json.Envelope.Body.GetWebCollectionResponse.GetWebCollectionResult.Webs.Web;
// if response has [] of webs - array of objects / sites
if ($(_webs).length > 0 && $.isArray(_webs)) {
$.each(_webs, function (key) {
// loop and build tree
mData.children.push({
name: "Site: " + _webs[key]._Title,
url: _webs[key]._Url,
children: []
});
// recursive loop call for each site again
$scope.processTree(mData.children[key]);
});
}
// if response has {} of webs - single object / site
else if ($.isPlainObject(_webs)) {
mData.children.push({
name: _webs._Title,
url: _webs._Url,
children: []
});
}
// if no response or response is null, do nothing
else {
}
}, function(msg){
alert("ERROR!!! \n\nERROR DATA: " + msg[0] + " \tStatus: " + msg[1]);
});
};
function callback(mData){
// do something - use mData, create tree html and display
}
The recursion gets all sites and subsites, if any for each site and stores in a variable - mData and when done, I need need to return and use this variable as JSON input to create a tree map. Each async. call returns either array of sites or single site, if any.
How can I return mData, only after the entire recursion has completed ? How to know if recursion has ended and a call can be made to desired function ?
You can use a pattern like the following using promises and angular's $q service, assuming foo.getStuff(url) returns an angular promise.
function getRec(url) {
return foo.getStuff(url).then(function(data){
var result = {} // construct your result
var promises = [] // array of promises for recursive calls.
for (x in data) {
promises.push(getRec(url).then(function(r){
// update result with r
}))
}
// wait for all recursive calls and then resolve the promise with the constructed result
return $q.all(promises).then(function(){return result})
})
}
getRec(ROOT_URL).then(callback)
I have an array of ids, and I want to make an api request for each id, but I want to control how many requests are made per second, or better still, have only 5 open connections at any time, and when a connection is complete, fetch the next one.
Currently I have this, which just fires off all the requests at the same time:
_.each([1,2,3,4,5,6,7,8,9,10], function(issueId) {
github.fetchIssue(repo.namespace, repo.id, issueId, filters)
.then(function(response) {
console.log('Writing: ' + issueId);
writeIssueToDisk(fetchIssueCallback(response));
});
});
Personally, I'd use Bluebird's .map() with the concurrency option since I'm already using promises and Bluebird for anything async. But, if you want to see what a hand-coded counter scheme that restricts how many concurrent requests can run at once looks like, here's one:
function limitEach(collection, max, fn, done) {
var cntr = 0, index = 0, errFlag = false;
function runMore() {
while (!errFlag && cntr < max && index < collection.length) {
++cntr;
fn(collection[index++], function(err, data) {
--cntr;
if (errFlag) return;
if (err) {
errFlag = true;
done(err);
} else {
runMore();
}
});
}
if (!errFlag && cntr === 0 && index === collection.length) {
done();
}
}
runMore();
}
With Bluebird:
function fetch(id) {
console.log("Fetching " + id);
return Promise.delay(2000, id)
.then(function(id) {
console.log(" Fetched " + id);
});
}
var ids = [1,2,3,4,5,6,7,8,9,10];
Promise.map(ids, fetch, { concurrency: 3 });
<script src="https://cdnjs.cloudflare.com/ajax/libs/bluebird/3.3.1/bluebird.min.js"></script>
<!-- results pane console output; see http://meta.stackexchange.com/a/242491 -->
<script src="http://gh-canon.github.io/stack-snippet-console/console.min.js"></script>
Divide your data into as many arrays as you want concurrent connections. Schedule with setTimeout, and have the completion callback handle the rest of the sub-array.
Wrap the setTimeout in a function of its own so that the variable values are frozen to their values at the time of delayed_fetch() invocation.
function delayed_fetch(delay, namespace, id, issueIds, filters) {
setTimeout(
function() {
var issueId=issueIds.shift();
github.fetchIssue(namespace, id, issueId, filters).then(function(response) {
console.log('Writing: ' + issueId);
writeIssueToDisk(fetchIssueCallback(response));
delayed_fetch(0, namespace, id, issueIds, filters);
});
}, delay);
}
var i=0;
_.each([ [1,2] , [3,4], [5,6], [7,8], [9,10] ], function(issueIds) {
var delay=++i*200; // millisecond
delayed_fetch(delay, repo.namespace, repo.id, issueIds, filters);
});
i'd recommend using throat just for this: https://github.com/ForbesLindesay/throat
Using Bluebird
function getUserFunc(user) {
//Get a collection of user
}
function getImageFunc(id) {
//get a collection of image profile based on id of the user
}
function search(response) {
return getUsersFunc(response).then(response => {
const promises = response.map(items => return items.id);
const images = id => {
return getImagesFunc(id).then(items => items.image);
};
return Promise.map(promises, images, { concurrency: 5 });
});
}
Previously i used ES6 function Promise.all(), but it doesn't work like what i'm expecting. Then go with third party library bluebird.js and Work like a charm.
Scenario
I need a background upload queue that sends files to a server.
The queue should send the files in sequential order as they are pushed into the queue (FIFO).
My solution
var pendingFiles = [];
var filesOperation = null;
uploadNextAsync = function(file) {
var next;
if (!pendingFiles.length) {
return WinJS.Promise.as();
}
next = pendingFiles.shift();
fileslogger.debug("Uploading " + next);
return fileQuery.folder.getFileAsync(next).then(function(file) {
return Server.sendFileAsync(file).then(function() {
return filesOk += 1;
}, function(error) {
filesNok += 1;
return logger.error(error.message, error);
}).then(function() {
if (pendingFiles.length) {
return uploadNextAsync(inspection);
}
});
});
};
createTaskForFile = function(file) {
if (pendingFiles.length == 0) {
pendingFiles = [file.name]
filesOperation = uploadNextAsync(file);
} else {
pendingFiles.push(file.name);
return filesOperation.then(function() {
return uploadNextAsync(file);
});
}
};
Problem
It seems that sometimes if createTaskForFile is called very quickly in succession then 2 files end up being uploaded at the same time. So somewhere is a little glitch either in the createTastForFile function on how it uses the fileOperation.then construct or inside the uploadNextAsync does something wrong?
Your problem is that pendingFiles is always empty. In createTaskForFile, you would set it to an one-element array then, but immediately call uploadNextAsync() which shifts it out. I guess your script might work if you shifted the file after the file has been uploaded.
However, you actually don't need this array. You can just queue the action to filesOperation, which would be a promise representing the upload of all current files.
var filesOperation = WinJS.Promise.as();
function createTaskForFile(file) {
return filesOperation = filesOperation.then(function() {
return uploadNextAsync(file);
});
}
function uploadAsync(next) {
fileslogger.debug("Uploading " + next.name);
return fileQuery.folder.getFileAsync(next.name).then(function(file) {
return Server.sendFileAsync(file);
}).then(function() {
return filesOk += 1;
}, function(error) {
filesNok += 1;
return logger.error(error.message, error);
});
}