Iterate through appended json file - javascript

I have this code:
function heatMapRange() {
var script1 = document.createElement('script');
script1.src = 'allCoords.js';
document.getElementsByTagName('head')[0].appendChild(script1);
}
which appends the allCoords.js file above:
allCoords_callback({
"coordinates": [
[50.1729677, 12.6692243, 580000],
[50.001168, 14.4270033, 2895000],
[50.6988037, 13.9384015, 945000],
[50.1218161, 14.4824004, 409900],
[49.470061, 17.0937597, 1499000],
[49.8509959, 18.5593087, 380000]
]
});
What I want is to iterate through this data with something like this:
function allCoords_callback(results1) {
for (var i = 0; i < results1.coordinates.length; i++) {
alert(results1.coordinates[i]);
}
}
Is it possible?

You can iterate an array in javascript with Array.map().
In your example will be something like:
results1.coordinates.map(function(coordinate) { alert(coordinate); })
That's about iterating part.
Then, another topic is how do you get JSON you need to process. In the example given on Google Maps docs they do it using JSONP just because that is the way the real-time earthquake data works. Another method to fetch data are XMLHttpRequests (AKA as AJAX). This is a more common practice and I would recomend using it if possible.
In your case I would re-write your code to look something like this:
function heatMapRange(){
var request = new XMLHttpRequest();
request.open('GET', '/allCoords.js', true);
request.onload = function () {
if (request.status >= 200 && request.status < 400) {
// Success!
var data = JSON.parse(request.responseText);
// process the data in the response, like
// iterating through the list of coordinates
data.coordinates.map(function(coordinate) { alert(coordinate); })
} else {
// We reached our target server, but it returned an error
}
}
request.error = function () {
// There was a connection error of some sort
}
request.send();
}
Which fetch the data from the JSON file allCoords.json:
{
"coordinates": [
[50.1729677,12.6692243,580000],
[50.001168,14.4270033,2895000],
[50.6988037,13.9384015,945000],
[50.1218161,14.4824004,409900],
[49.470061,17.0937597,1499000],
[49.8509959,18.5593087,380000]
]
}
This way of fetching data from a server align more with the best practices used in the industry. This is just the straight forward example using vanillaJS XMLHttpRequest. There are tons of libraries that simplify this action. Even better there is Fetch API coming tackling the topic of fetching resources.

Well the code on the top works, problem was that I disabled the alerts in google chrome. So closing the tab and reopening page did the trick.

Related

Chrome extension: Get the text of a web page from given url

First, I'm completly newbie making chrome extension, then in a part of the chrome extension I will receive differents urls and I want to store the text of the web page to process it later, resulting in an array of boolean variables, each associated with the given url. Schematically it would be something like this:
var result;
function process(text){
if something -> result.push(true);
if not -> result.push(false);
}
function main(){
for (i...){
url = given[i];
text = getHTMLText(url);
process(text);
}
final();//when the loop finish activate another function that use the global variable: result
}
I have problems with main function, first I have tried with synchronous XMLHttpRequest, although it works it's very slow and chrome always give the warning that synchronous XMLHttpRequest is deprecated.
for (var i = 0; i < urls.length; i++){
url = urls[i];
var req = new XMLHttpRequest();
req.open('GET', url, false);
req.send(null);
if (req.status == 200) detecting(req.responseText);
};
Other solution that I find was use fetch(url), but the code that I find I don't fully understand. Although the returned text works correctly but then the proccess function give different results on each page update.
for (var i = 0; i < urls.length; i++){
url = urls[i];
fetch(url).then(function(response) {
response.text().then(function(text) {
detecting(text);
});
});
};
Other problem, but this is because of the little knowledge I have of fetch(), was that I can't store the text out of the fetch(), every time I do console.log give undefined, this greatly complicates the processing of the text for me.
I have seen that maybe it can be done through extension APIs of chrome but I can't see how to do it.
The algorithm shown in your main pseudocode can be implemented easily by using async/await and Promise.all, without a for loop:
(async () => {
const results = await Promise.all(urls.map(processUrl));
console.log(results);
// further processing must be also inside this IIFE
})();
async function processUrl(url) {
try {
const text = await (await fetch(url)).text();
return {url, text, status: detecting(text)};
} catch (error) {
return {url, error};
}
}

Using Ajax with Mongodb

I've been looking for this for quite a while and I have no clue.
I want to use Ajax to display information from a collection called 'reizen' in mondodb.
I want to be able switch through all the elements from the collection and get all the information like the title etc. I have no idea how to get the information, I allready have a bit of code but I've been stuck for days, so I hope somebody can help me. If I only know how to get the title from each element from the collection 'reizen', I will be able to work further. (I copied some code from someone who used CD's but I want to do the same with 'reizen')
This is my code:
extends layout
block content
#reizen.w3-content.w3-container.w3-padding-64
h3.w3-center REIZEN
//reizen
input(type='button' onclick='previous()' value='<<')
input(type='button' onclick='next()' value='>>')
p
|
#showCD Here we will see all the elements of collection 'reizen'
block content
script.
var i = 0;
var x;
displayCD(i);
function displayCD(i) {
var xmlhttp = new XMLHttpRequest();
xmlhttp.onreadystatechange = function() {
if (this.readyState == 4 && this.status == 200) {
myFunction(this, i);
}
};
xmlhttp.open("GET", "cd_catalog.xml", true);
xmlhttp.send();
}
function myFunction(xml, i) {
var xmlDoc = xml.responseXML;
x = xmlDoc.getElementsById('showCD');
document.getElementById('showCD').innerHTML =
"Title:" // get the title?
}
function next() {
if (i < x.length-1) {
i++;
displayCD(i);
}
}
function previous() {
if (i > 0) {
i--;
displayCD(i);
}
}
You are trying to use the data server-side. Your AJAX will work client-side. You can use Jade/Pug to create the container to hold your CDs, but you have to actually create the DOM elements client side using createElement, or React, or whatever other DOM API/wrapper you want.
If you want all of this to be done server side, you don't want to do your AJAX in a script block that gets served to the client, you want to get the CDs programmatically within server-land and then pass the CD data to the Jade/Pug renderer server-side.
You are trying to mix server-land and client-land and you need to pick one.

Get number of yearly GitHub user contributions

User profiles on GitHub display the number of contributions a user has made in the past year:
I'd like to display this information on my website, preferably without having to introduce any kind of backend. I've looked for it in several places. Nothing is listed in /users/(me)/. Short of scraping the page source, is there a way to retrieve this information? I don't see anything documented...
Have you seen the GithubAPI? Using it, you could run the request from your js file, without implemeting any backend.
For your purpose, I think you could use the following request https://api.github.com/users/yourUserName/events. That gives you as result, a list of events related to youUserName.
e.g
[
{
"id": "xxxxxxx",
"type": "PushEvent",
"actor": {.......},
"repo": {......},
"payload": {.......},
"public": true,
"created_at": "2016-11-17T21:33:15Z"
},
.....
]
You should go through the list and filter the type = PushEvent and created_at = lastYear.
I hope this can help you!
Update: that service is just giving the results for the last 3 months, so other possibility is to check (users/username/repos) and after that check the commits over them (repos/username/repoName/commits)
I've settled for parsing the HTML of my profile page using PHP. I begin by downloading the HTML for my profile page:
$githubProfile = file_get_contents('https://github.com/controversial');
next I find the position of the phrase contributions in the last year on the page. The whitespace is strange between contributions and in the last year so I'm using a regex to match any amount of space.
$contributionsIndex = preg_match(
'/contributions\s+in the last year/',
$githubProfile,
$matches,
PREG_OFFSET_CAPTURE
);
$index = $matches[0][1];
Finally, I iterate backwards from this point until I come across a character that is neither a number or a comma in the number. Once this condition becomes false, I know I've found the beginning of the number:
$endIndex = $index;
while (is_numeric($githubProfile[$index-2]) || $githubProfile[$index-2] == ',')
$index--;
$contributionsCount = substr($githubProfile, $index-1, $endIndex-$index);
Finally, I echo the number out (without commas)
echo(str_replace(',', '', $contributionsCount));
Finally, I use an AJAX call from my JavaScript page to get the value from the PHP script.
function get(url, callback) {
/* eslint-disable no-console */
const xhr = new XMLHttpRequest();
xhr.open('GET', url);
xhr.onload = e => (callback || console.log)(e.target.responseText);
xhr.onerror = () => console.log('error');
xhr.send();
/* eslint-enable no-console */
}
// Fill in GitHub contributions count
get('contributions-count.php', (resp) => {
document.getElementById('gh-contributions-count').innerText = resp;
});
I'm sure this will be accomplishable more cleanly in the future with GitHub's GraphQL API, but that's still in preview stages.
It's also possible to parse the svg calendar data with a xml parser and then sum all data-count entries. To avoid CORS issue, you can use a proxy like urlreq living on http://urlreq.appspot.com/req :
const user = 'controversial';
fetch('https://urlreq.appspot.com/req?method=GET&url=https://github.com/users/' + user + '/contributions')
.then(function(response) {
return response.text();
})
.then(function(text) {
xmlDoc = new DOMParser().parseFromString(text, 'text/xml');
var nodes = xmlDoc.getElementsByTagName('rect');
var count = 0;
for (var i = 0; i < nodes.length; i++) {
count += parseInt(nodes[i].getAttribute('data-count'));
}
console.log('contributions count : ' + count);
})
.catch(function(error) {
console.log('Request failed', error)
});
Another example using the command line with curl & xmlstarlet can be found here

How to return data from CRM 2011 javascript odata function?

This seems silly that I haven't been able to accomplish this. I'm using the common code found on the web to do an odata query. The problem is the results stay in getFieldData(retrieveReq) routine. I don't want to immediately set a field on the current form. How can I get my values out of it so the data can be used in other javascript functions? Global variable would be good but nothing I've tried has worked. The below code displays "x".
var var1 = "x"; odataquery(); alert(var1);
The example given here has two alerts that display the data. How can Id and Name get outside of that function to be useful?
Edit1: Below is the main part of the routine that calls getFieldData(this). I want to use OwnerBUID and OwnerBUName in other javascript functions.
var retrieveReq = new XMLHttpRequest();
retrieveReq.open("GET", odataSelect, false);
retrieveReq.setRequestHeader("Accept", "application/json");
retrieveReq.setRequestHeader("Content-Type", "application/json; charset=utf-8");
retrieveReq.onreadystatechange = function () {
getFieldData(this);
};
retrieveReq.send();
function getFieldData(retrieveReq) {
if (retrieveReq.readyState == 4 && retrieveReq.status == 200) {
// 4=request complete, 200=OK
var retrieved = this.parent.JSON.parse(retrieveReq.responseText).d;
var retrievedValue = retrieved.results[0].BusinessUnitId;
OwnerBUID = retrievedValue.Id;
OwnerBUName = retrievedValue.Name;
}
}
I guess you want to put the data as the return value of a javascript function. You could do this:
var returnedData = function getFieldData(retrieveReq)
{
...
return data;
}
BTW, you could consider to use JayData, Breeze and Datajs sources code packages in your client application. They implement the low level APIs for consuming an odata service using javascript.

Concept - Designing a collapsible queue for asynchronous resources

I've noticed that the size of a file requested will effect how long the response takes for ajax calls. So if I fire 3 ajax GET requests for files of varying size, they may arrive in any order. What I want to do is guarantee the ordering when I append the files to the DOM.
How can I set up a queue system so that when I fire A1->A2->A3. I can guarantee that they are appeneded as A1->A2->A3 in that order.
For example, suppose A2 arrives before A1. I would want the action to wait upon the arrival and loading of A1.
One idea is to create a status checker using a timed callback as such
// pseudo-code
function check(ready, fund) {
// check ready some how
if (ready) {
func();
} else {
setTimeout(function () {
check(ready, fund);
}, 1); // check every msec
}
}
but this seems like a resource heavy way, as I fire the same function every 1msec, until the resources is loaded.
Is this the right path to complete this problem?
status checker using a 1msec-timed callback - but this seems like a resource heavy way; Is this the right path to complete this problem?
No. You should have a look at Promises. That way, you can easily formulate it like this:
var a1 = getPromiseForAjaxResult(ressource1url);
var a2 = getPromiseForAjaxResult(ressource2url);
var a3 = getPromiseForAjaxResult(ressource3url);
a1.then(function(res) {
append(res);
return a2;
}).then(function(res) {
append(res);
return a3;
}).then(append);
For example, jQuery's .ajax function implements this.
You can try something like this:
var resourceData = {};
var resourcesLoaded = 0;
function loadResource(resource, callback) {
var xhr = new XMLHttpRequest();
xhr.onload = function() {
var state = this.readyState;
var responseCode = request.status;
if(state == this.DONE && responseCode == 200) {
callback(resource, this.responseText);
}
};
xhr.open("get", resource, true);
xhr.send();
}
//Assuming that resources is an array of path names
function loadResources(resources) {
for(var i = 0; i < resources.length; i++) {
loadResource(resources[i], function(resource, responseText) {
//Store the data of the resource in to the resourceData map,
//using the resource name as the key. Then increment the
//resource counter.
resourceData[resource] = responseText;
resourcesLoaded++;
//If the number of resources that we have loaded is equal
//to the total number of resources, it means that we have
//all our resources.
if(resourcesLoaded === resources.length) {
//Manipulate the data in the order that you desire.
//Everything you need is inside resourceData, keyed
//by the resource url.
...
...
}
});
}
}
If certain components must be loaded and executed before (like certain JS files) others, you can queue up your AJAX requests like so:
function loadResource(resource, callback) {
var xhr = new XMLHttpRequest();
xhr.onload = function() {
var state = this.readyState;
var responseCode = request.status;
if(state == this.DONE && responseCode == 200) {
//Do whatever you need to do with this.responseText
...
...
callback();
}
};
xhr.open("get", resource, true);
xhr.send();
}
function run() {
var resources = [
"path/to/some/resource.html",
"path/to/some/other/resource.html",
...
"http://example.org/path/to/remote/resource.html"
];
//Function that sequentially loads the resources, so that the next resource
//will not be loaded until first one has finished loading. I accomplish
//this by calling the function itself in the callback to the loadResource
//function. This function is not truly recursive since the callback
//invocation (even though it is the function itself) is an independent call
//and therefore will not be part of the original callstack.
function load(i) {
if (i < resources.length) {
loadResource(resources[i], function () {
load(++i);
});
}
}
load(0);
}
This way, the next file will not be loaded until the previous one has finished loading.
If you cannot use any third-party libraries, you can use my solution. However, your life will probably be much easier if you do what Bergi suggested and use Promises.
There's no need to call check() every millisecond, just run it in the xhr's onreadystatechange. If you provide a bit more of your code, I can explain further.
I would have a queue of functions to execute and each of them checks the previous result has completed before executing.
var remoteResults[]
function requestRemoteResouse(index, fetchFunction) {
// the argument fetchFunction is a function that fetches the remote content
// once the content is ready it call the passed in function with the result.
fetchFunction(
function(result) {
// add the remote result to the list of results
remoteResults[index] = result
// write as many results as ready.
writeResultsWhenReady(index);
});
}
function writeResults(index) {
var i;
// Execute all functions at least once
for(i = 0; i < remoteResults.length; i++) {
if(!remoteResults[i]) {
return;
}
// Call the function that is the ith result
// This will modify the dom.
remoteResults[i]();
// Blank the result to ensure we don't double execute
// Store a function so we can do a simple boolean check.
remoteResults[i] = function(){};
}
}
requestRemoteResouse(0, [Function to fetch the first resouse]);
requestRemoteResouse(1, [Function to fetch the second resouse]);
requestRemoteResouse(2, [Function to fetch the thrid resouse]);
Please note that this is currently O(n^2) for simplicity, it would get faster but more complex if you stored an object at every index of remoteResults, which had a hasRendered property. Then you would only scan back until you found a result that had not yet occurred or one that has been rendered.

Categories