JQuery function wont push nor add value to property - javascript

I got this piece of code. Problem is that PUSH wont add anything to the array WORDS or if I try assign value to property. Data in collection that it iterates trough isn't null I tested it and calling other functions from that place in code work for example alert etc.
Thanks for your help.
Here is my piece of code
var uri = 'GetTickerData';
var words = [];
var string;
$.getJSON(uri)
.done(function (data) {
$.each(data, function (key, item) {
words.push('Some text'); //dont work
string = 'Another text'; // also dont work
});
});

I've not tested this, you'll have to let me know if this works for you
function getReturnedData(callback) {
var uri = 'GetTickerData';
var string;
$.getJSON(uri, function (data) {
var words = [];
$.each(data, function (i) {
words.push(data[i]);
});
callback(words);
});
}
getReturnedData(function(returnValue) {
console.log(returnValue);
});

Related

getting data from file.txt and returning it with an array

i have file.txt
apple <--line 1
banana <--line 2
and this is my script
url = 'file.txt';
homelists = [];
$.get(url, function(data) {
var lines = data.split("\n"); <--i want to split it by line
$.each(lines, function(n ,urlRecord) {
homelists.push(urlRecord); <--add it to my homelists array
});
});
console.log(homelists); <-- returns array
console.log(homelists[0]); <--undefined
my problem is i cant get the inside value of homelists
how can i get homelists[0] or homelists[1]..(javascript or jquery(preferrable))
Javascript/Jquery ajax is an Async call meaning the code $.get and console.log on your example will be executed parallelly (immediate or the same times), so to parse the result of your file.txt, you need to do it inside the function (which will be executed after ajax called is done).
url = 'file.txt';
homelists = [];
$.get(url, function(data) {
var lines = data.split("\n");
$.each(lines, function(n ,urlRecord) {
homelists.push(urlRecord);
});
console.log(homelists);
console.log(homelists[0]);
});
I know this is too simple answer and may sound stupid to others but i have an idea!
why not store in the session the $.get data
url = 'file.txt';
$.get(url, function(data) {
localStorage['homelists'] = data;
});
then assign a variable to that session
homelists = localStorage['homelists'];
then make the session = null
localStorage['homelists'] = null
when you do console.log outside
console.log(homelists); <-returns string which you can manipulate to turn it into array
console.log(localStorage['homelists']); <-returns null
I dont know yet what could be the bad side/effect of this with my project.. any idea?
Since you are using jQuery, It would be better if you use AJAX. !
const ImportData = function(file){
let arrayData = undefined;
$.ajax({
url: file,
type: 'GET',
error: (err) => { throw new Error(err) },
success: ( data ) => {
arrayData = MakeArray( data );
//Do whatever you want here
console.log( arrayData );
}
});
}
const MakeArray = function(plaintext){
const array = [];
plaintext.split('\n').forEach( (line) => {
line = line.trim();
array.push( line );
} );
return array;
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.3.1/jquery.min.js"></script>
<script>
const file = "https://www.w3.org/TR/PNG/iso_8859-1.txt";
document.addEventListener('DOMContentLoaded', function(){
ImportData( file );
});
</script>

Can I stop an event handler while it's executing?

Is it possible to stop the execution of a previous event when the event is called again?
To clarify, I have a button <button onclick='load()'>load</button> that calls a load() function which gets an array, processes each element and displays it in a list <ul id='main'></ul>
function load(event) {
$("#main").empty(); //empty old elements
$.get("load.php", '', function (data) {
var arr = JSON.parse(data);
for (i = 0; i < arr.length; ++i) {
process(arr[I]); //process and append to #main
}
});
}
Problem is, that if I click the button again while its still putting the elements into the array, I get the new list plus the rest of the old list.
Is there a way to stop the first event while its still executing but still execute the second event?
You should try this:
var xhr;
function load(ev){
if(ev.eventPhase === 2){
if(xhr)xhr.abort();
$('#main').empty();
xhr = $.get('load.php', function(data){
var a = JSON.parse(data);
for(var i=0,l=a.length; i<l; i++){
process(a[i]);
}
});
}
}
I can be wrong, but...
var req = $.ajax({
$("#main").addEventListener("click",()=>{req.abort()})
...
...
$("#main").removeEventListener("click",()=>{req.abort()})
});
As noted, you can stop the event by setting a flag and checking it, but a better approach would simply be to assign the new value directly. If your code works it means JSON.parse is returning an array already.
That means
"use strict";
(function () {
function load(event) {
$("#main").empty();
$.get("load.php", '', function (data) {
process = JSON.parse(data);
$("#main").whateverMethodFillsTheElement(process);
});
}());
Also, when writing asynchronous JavaScript code that makes HTTP requests, promises are preferred to callbacks. Since $.get returns a Promise you can write
"use strict";
(function () {
function load(event) {
$("#main").empty();
$.get("load.php")
.then(function (data) {
var items = JSON.parse(data);
$("#main").whateverMethodFillsTheElement(items);
});
}
}());
As discussed in comments, the aim is to use each item in another request which provides the actual value to add to 'main'. So loading data triggers an asynchronous call for each loaded item.
To accommodate this, we need to determine a key field that we can use to track each item so we do not append existing items to the list. We will call this field id for the sake of exposition.
"use strict";
(function () {
var allItems = [];
function load(event) {
$("#main").empty();
$.get("load.php")
.then(function (data) {
return JSON.parse(data);
})
.then(function (items) {
items.forEach(item => {
processItem(item)
.then(function (processed) {
var existingItem = allItems.filter(i => i.id === item.id)[0];
if(existingItem) {
var existingIndex = allItems.indexOf(existingItem);
allItems[existingIndex] = processed;
}
else {
allItems.push(processed);
}
});
});
});
}
}());
Ok, seems like it's not possible to stop an Ajax success function after it began executing or to stop a past event without aborting the current one.
But the following solution worked for me so I figured I'll post it here:
var num = 0;
function load() {
var curNum = ++num;
$("#main").empty();
$.get("load.php", '', function (data) {
var arr = JSON.parse(data);
for (i = 0; i < arr.length; ++i) {
process(arr[i], curNum);
}
});
}
function process(item, curNum) {
if(curNum === num) { //don't process if a new request has been made
//get 'data' based on 'item'...
if(curNum === num) { //check again in case a new request was made in the meantime
$("#main").append(data);
}
}
}
I appreciate everyone's help.

Accessing array inside object in node js - undefined?

In javascript:
var post = {};
post.arr = ["hi", "hello"];
$.post("http://localhost:8000/test", post);
and in node:
var body = "";
request.on('data', function (data) {
body += data
});
request.on('end', function (data) {
var post = qs.parse(body);
console.log(post); // I see { 'arr[]': ['hi', 'hello'] };
console.log(post.arr); // undefined
}
Any idea what might have caused this?
Based on your comments, it looks like somehow the map key is literally arr[]. Try console.log(post['arr[]']);
jQuery will modify the name of arrays as #MikeC pointed out. More info here: https://stackoverflow.com/a/5888057/1861459

Put XMLHttprequest results into one string

I'm trying to get my array of URL's to run through a JQuery .get function to get the site's source code into one string outside of the function. My code is below.
var URL = ["http://website.org", "http://anothersite.com"];
var array = URL.map(function(fetch) {
var get = $.get(fetch, function(sourcecode) {
sourcecode = fetch;
}
I need the sourcecode variable to be the combination of source code on all of the URLs in the array.
You need to put a variable outside of the function, something like this data variable below and append to it with +=:
var URL = ["http://website.org", "http://anothersite.com"];
var array = URL.map(function(fetch) {
var data = null;
var get = $.get(fetch, function(sourcecode) {
data += fetch;
}
}
Try this like,
var URL = ["http://website.org", "http://anothersite.com"];
var array = $(URL).map(function(fetch) {
var data='';
$.ajax({
url:fetch,
async:false,
success : function(d){
data=d;
}
});
return data;
}).get();
Since you're using jQuery, I suppose that jQuery.each() may be a better way to iterate over the array.
var URL = ["http://website.org", "http://anothersite.com"];
var str = [];
$.each(URL, function(index, fetch) {
$.get(fetch, function(sourcecode) {
str.push(sourcecode); // if you want an array
})
});
str.join(''); // if you want a string
console.log(str);
<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.1/jquery.min.js"></script>

How to pull JSON data from two different sources?

I was wondering if there is a way to pull and use JSON data from two different sources. Currently, the code looks like this:
//JSON1
$.getJSON('url1',function(data){
$.each(data,function(key,val){
//code
});
});
//JSON2
$.getJSON('url2',function(data){
$.each(data,function(key,val){
//code
});
});
When I do this, i seems that variables created from one JSON function aren't available in the other one, which makes it hard for them to be useful together.
Is there a better way to have these two work together?
This function takes an array of urls and a callback as parameters:
function getMultiJSON(urlList,callback) {
var respList = {};
var doneCount = 0;
for(var x = 0; x < urlList.length; x++) {
(function(url){
$.getJSON(url,function(data){
respList[url] = data;
doneCount++;
if(doneCount === urlList.length) {
callback(respList);
}
});
})(urlList[x]);
}
}
You would use it like this:
getMultiJSON(['url1','url2'],function(response) {
// in this case response would have 2 properties,
//
// response.url1 data for url1
// response.url2 data for url2
// continue logic here
});
You might want to add a timeout as the function will never call your handler should any of the URLs fail to load
Variable declared within the functions using var (or blocks, using let) are not available outside of the functions (or blocks).
$.getJSON('url1',function(data){
$.each(data,function(key,val){
var only_accessible_here = key;
});
});
So if you want variables that are accessible outside the scope of the function they are declared in, you need to declare them outside of the function they are used in.
var combined_stuff = ''
$.getJSON('url1',function(data){
$.each(data,function(key,val){
combined_stuff += val;
});
});
//JSON2
$.getJSON('url2',function(data){
$.each(data,function(key,val){
combined_stuff += val;
});
});
As Marc B says, there is no way to know which order the combined_stuff variable will be updated, either by JSON1 first, or by JSON2 first, or by only one, if one of the getJSON calls fail, or by neither if both fail.
If the order of updating is important, call the one you want to use second in the function of the one you want to call first.
var combined_stuff = ''
$.getJSON('url1',function(data){
$.each(data,function(key,val){
combined_stuff += val;
//JSON2
$.getJSON('url2',function(data){
$.each(data,function(key,val){
combined_stuff += val;
});
});
});
});
Easily using the open source project jinqJs (http://www.jinqJs.com)
var data1 = jinqJs().from('http://....').select();
var data2 = jinqJs().from('http://....').select();
var result = jinqJs().from(data1, data2).select();
The example does a sync call, you can do an async call by doing something like this:
var data1 = null;
jinqJs().from('http://....', function(self){ data1 = self.select(); });
Result will contain both results combined.
If you control the endpoint, you could make it return all of the data you want in one shot. Then your data would look like:
{
"url1_data": url1_json_data,
"url2_data": url2_json_data
}
If you still have 2 endpoints you need to hit, you can pass the result of your first ajax call to the second function (but this makes your 2 ajax calls synchronous):
function getJson1(){
$.getJSON('url1',function(data){
getJson2(data);
});
}
function getJson2(json1Data){
$.getJSON('url2',function(data){
//Do stuff with json1 and json2 data
});
}
getJson1();
I would recommend you to use $.when function available in jquery to execute both the methods in parallel and then take the action. See the code snipped below,
var json1 = [], json2 = [];
$.when(GetJson1(), GetJson2()).always(function () {
//this code will execute only after getjson1 and getjson2 methods are run executed
if (json1.length > 0)
{
$.each(json1,function(key,val){
//code
});
}
if (json2.length > 0)
{
$.each(json2,function(key,val){
//code
});
}
});
function GetJson1()
{
return $.ajax({
url: 'url1',
type: 'GET',
dataType: 'json',
success: function (data, textStatus, xhr) {
if (data != null) {
json1 = data;
}
},
error: function (xhr, textStatus, errorThrown) {
json1 = [];//just initialize to avoid js error
}
}
function GetJson2()
{
return $.ajax({
url: 'url2',
type: 'GET',
dataType: 'json',
success: function (data, textStatus, xhr) {
if (data != null) {
json2 = data;
}
},
error: function (xhr, textStatus, errorThrown) {
json2 = [];//just initialize to avoid js error
}
}
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script>
The returned data from each AJAX call are not available outside its own callback function. I'm sure there are more elegant (complex?) solutions, but a couple of simple, Occamic, solutions include global variables, or storing the received data in hidden input elements.
Within each callback function, just loop until the data from the other call is present:
function getJson1(){
$.getJSON('url1',function(data){
var d2 = '';
$('#hidden1').val(data);
while ( d2 == '' ){
//you should use a time delay here
d2 = $('#hidden2').val();
}
getJson2();
});
}
function getJson2(){
$.getJSON('url2',function(d2){
var d1 = '';
$('#hidden2').val(d2);
while ( d1 == '' ){
//you should use a time delay here
d1 = $('#hidden1').val();
}
//Do stuff with json1 and json2 data
});
}
getJson1();

Categories