I want get all files json in my directory for search into it.
$('#get-data').click(function () {
var showData = $('#show-data');
$.getJSON('/data/**all files json**', function (data) {
console.log(data);
var items = data.items.map(function (item) {
return item.key + ': ' + item.value;
});
showData.empty();
if (items.length) {
var content = '<li>' + items.join('</li><li>') + '</li>';
var list = $('<ul />').html(content);
showData.append(list);
}
});
showData.html(data);
});
Do you think its possible or i need use other method ?
Thx
You cannot make a wildcard AJAX request that will return all the possible JSON files on your server. You need to know in advance the exact endpoint that you will be sending your request to. Otherwise it would be a security vulnerability if all clients could know in advance what files are available on the server.
So one possible strategy here would be to have some /describe endpoint on your server which will return a list of all available JSON files. Then the client will first make an AJAX request to this endpoint and then for each returned endpoint in the list it will make a separate AJAX request:
$.getJSON('/data/discover', function (data) {
// suppose that data looks like this:
// ["/data/a.json", "/data/b.json", "/data/c.json"]
for (var i := 0; i < data.length; i++) {
// send an AJAX request to each individual JSON file
// available on the server as returned by the discover endpoint
$.getJSON(data[i], function (result) {
...
});
}
});
Basically you can't request for multiple files by one request.
However the scenario is perfect fit for async.parallel:
var async = require('async');
app.get('/json', function(req, res) {
var work = {
file01: async.apply(fs.readFile, __dirname + '/file01.json'),
file02: async.apply(fs.readFile, __dirname + '/file02.json')
};
async.parallel(work, function (error, results) {
if (error) {
res.status(500).send(error);
return;
}
//might need string->Object here
results['file01'] = JSON.parse(results['file01']);
results['file02'] = JSON.parse(results['file02']);
res.send(results);
});
});
Related
I have a dictionary on my server that is set up by Express. I pushed the dictionary, chunks, to the server using the app.get() function and set the url to /search.
Here is that code:
app.get('/search', function (req, res) {
var queryString = req.query.query
console.log(queryString)
var results = Object.keys(index).reduce(function(a, b) {
if (b.toLowerCase() === queryString.toLowerCase()) {
return a.concat(index[b]);
} else {
console.log("error")
}
return a;
}, []);
******
var num_chunks = Math.round(results.length/12)
var chunks = []
for (var i=0; i<num_chunks; i++) {
var j = 12*i
var sliced = results.slice(j, j+12)
chunks.push({
key: Math.floor((Math.random() * 100000000000000000000) + 1),
value: sliced
})
}
******
res.header("Content-Type",'application/json');
res.json(results);
});
Now on the client side of my website, I want to retrieve chunks and interact with the contents of it. Could anyone tell me how I would implement an Ajax/jQuery call to get the dictionary and then reference it in my client side code?
Here is what my ajax function looks like right now, but it is missing the key elements:
$.ajax({
url: '/search',
data: _needs to be filled in_,
success: _needs to be filled in_,
dataType: "json"
});
Any help would be immensely appreciated. Thanks so much in advance.
Cheers, Theo
I'm creating a script that will make a request 2 times per second to a localserver of cameras network and after it gets a positive response that camera detected someone I want to log three images.
In the json config file I have the triggerURL of the server, the interval port, the dataDir where logged images should be saved and a track array which contains the url of those images and the fileName they should receive.
This is the code of the script I use after reading the JSON file:
var configGet = {
host: config.triggerURL
, port: config.interval
, method: 'GET'
};
setInterval(function () {
var request = http.request(configGet, function (response) {
var content = "";
// Handle data chunks
response.on('data', function (chunk) {
content += chunk;
});
// Once we're done streaming the response, parse it as json.
response.on('end', function () {
var data = JSON.parse(response);
if (data.track.length > 0) {
//log images
var download = function (uri, filename, callback) {
request.head(uri, function (err, res, body) {
request(uri)
.pipe(fs.createWriteStream(filename))
.on('close', callback);
});
};
for (var image in data.track) {
var path = config.dataDir + '/' + image.fileName
download(image.url, path.format(config.timestamp), function () {
console.log('done');
});
}
}
});
// Report errors
request.on('error', function (error) {
console.log("Error while calling endpoint.", error);
});
request.end();
}, 500);
});
I have the following questions:
This method produces some kind of error with the download process of the images.Can you identify it?
Is there a better way of doing this process?
Without running the code or deeper inspection; should not "data = JSON.parse(response)" rather be "data = JSON.parse(content)"? Also, if data is undefined or does not contain "track" the "if (data.track.length > 0)" will throw an error. This can be fixed with "if (data && data.track && data.track.length > 0)".
I can not think of a very much better way. I would break it up more in functions to make the code more clear though.
Just learning Sails.js so go easy on me.
I have queried an XML service and successfully jsonified it using xml2js
var req = https.request(options, function(res) {
var xml = '';
res.on('data', function(chunk) {
xml += chunk;
});
res.on('end', function () {
var result = parseString(xml, function (err, result) {
console.log(JSON.stringify(result)); // Position 1
});
return result;
});
});
req.on('error', function(e) {
console.log('problem with request: ' + e.message);
});
req.write(data);
var result = req.end();
console.log('Result: ' + JSON.stringify(result)); // Position 2
res.view({ message : 'hello', result : result });
The view is loading fine, and <%= message %> outputs hello. Great.
Position1 console.log is returning the stringified json object - Great.
Position 2 consile.log is returning Result: true - Not good.
I need to be able to get that json data to my view for parsing. How do I do this?
It looks like you're assuming that calling req.end() will give you the response from the https.request you started above. There are a couple of things wrong with that:
req.end() is used to finish writing to an open request, not to get a response. According to the docs, the return value is unspecified.
The https.request call is asynchronous; even if req.end() worked like you want it to, the response wouldn't have come in by the time you call it.
The solution is to put your response code (i.e. your res.view) inside the handler for the end event that you've already written. I'd also recommend refactoring your code to use different variable names for the remote request / response so that they don't collide with the req and res variables in your controller action. The whole thing would then be something like:
myAction: function (req, res) {
// Not sure how you're setting options, so just an example
var options = {url: 'http://example.com', ...}
var request = https.request(options, function(response) {
var xml = '';
response.on('data', function(chunk) {
xml += chunk;
});
response.on('end', function () {
var result = parseString(xml, function (err, result) {
return res.view({ message : 'hello', result : JSON.stringify(result)});
});
});
});
request.on('error', function(e) {
console.log('problem with request: ' + e.message);
res.serverError(e);
});
}
You might also look into using something like the Request module to simplify your external request; it would save you from having to write event handlers for data and end.
if you want to pass json to some javascript variable:
var clientJsonVar = <%- JSON.stringify(serverSideJson)%>
I am a rookie in Nodejs and asynchronous programming. I am having a problem executing a GET request inside an asynchronous function. Here I am posting the whole code. I am trying to pull a list of all Urls , add them to a list and send the list for processing to another function.
My problem is with processing them. Inturn for each url I am executing a GET request to fetch the body and to look for image elements in it. I am looking to pass the Image url to a 3rd party api as a GET param. I am unable to execute the GET request as the control doesn't seem to reach there at all.
var async = require("async"),
request = require("request"),
cheerio = require("cheerio");
async.waterfall([
function(callback) {
var url = "someSourceUrl";
var linkList = [];
request(url, function(err, resp, body) {
var $ = cheerio.load(body);
$('.list_more li').each(function() {
//Find all urls and add them to a list
$(this).find('a').each(function() {
linkList.push($(this).attr('href'));
});
});
callback(null, linkList);
});
},
//pass all the links as a list to callback
function(liksListFetched, callback) {
for (var i in liksListFetched) {
callback(null, liksListFetched[i]);
}
}],
//***********My problem is with the below code**************
function(err, curUrl) {
var cuResp = "";
console.log("Currently Processing Url : " + curUrl);
request(curUrl, function(err, resp, body) {
var $ = cheerio.load(body);
var article = $("article");
var articleImage = article.find("figure").children('img').attr('src');
var responseGrabbed = "API response : ";
//check if there is an IMG element
if (articleImage === undefined) {
console.log("No Image Found.");
articleImage = 'none';
}
else {
//if there is an img element, pass this image url to an API,
//So do a GET call by passing imageUrl to the API as a GET param
request("http://apiurl.tld?imageurl=" + articleImage, function(error, response, resp) { //code doesn't seem to reach here
I would like to grab the response and concatenate it to the responseGrabbed var.
console.log(resp);
responseGrabbed += resp;
});
}
console.log(responseGrabbed);// api response never gets concatenated :(
console.log("_=_=_=_=_=_=__=_=_=_=_=_=__=_=_=_=_=_=__=_=_=_=_=_=_");
process.exit(0);
});
});
I appreciate if any one can help me understand the root cause. Thanks in advance.
request() is asynchronous, so when you're console logging the string, the string hasn't been built yet, you have to do the console log inside the callback :
request("http://apiurl.tld?imageurl=" + articleImage, function(error, response, resp) {
responseGrabbed += resp;
console.log(responseGrabbed);// api response never gets concatenated :(
console.log("_=_=_=_=_=_=__=_=_=_=_=_=__=_=_=_=_=_=__=_=_=_=_=_=_");
});
Same goes for terminating the process, which should be done when all the requests have finished
I'm trying to make simple feed reader in node and I'm facing a problem with multiple requests in node.js.
For example, I got table with urls something like:
urls = [
"http://url1.com/rss.xml",
"http://url2.com",
"http://url3.com"];
Now I want to get contents of each url. First idea was to use for(var i in urls) but it's not good idea. the best option would be to do it asynchronously but I don't know how to make it.
Any ideas?
EDIT:
I got this code:
var data = [];
for(var i = 0; i<urls.length; i++){
http.get(urls[i], function(response){
console.log('Reponse: ', response.statusCode, ' from url: ', urls[i]);
var body = '';
response.on('data', function(chunk){
body += chunk;
});
response.on('end', function() {
data.push(body);
});
}).on('error', function(e){
console.log('Error: ', e.message);
});
}
Problem is that first is call line "http.get..." for each element in loop and after that event response.on('data') is called and after that response.on('end'). It makes mess and I don't know how to handle this.
I know this is an old question, but I think a better solution would be to use JavaScripts Promise.all():
const request = require('request-promise');
const urls = ["http://www.google.com", "http://www.example.com"];
const promises = urls.map(url => request(url));
Promise.all(promises).then((data) => {
// data = [promise1,promise2]
});
By default node http requests are asynchronous. You can start them sequentially in your code and call a function that'll start when all requests are done. You can either do it by hand (count the finished vs started request) or use async.js
This is the no-dependency way (error checking omitted):
var http = require('http');
var urls = ["http://www.google.com", "http://www.example.com"];
var responses = [];
var completed_requests = 0;
for (i in urls) {
http.get(urls[i], function(res) {
responses.push(res);
completed_requests++;
if (completed_requests == urls.length) {
// All download done, process responses array
console.log(responses);
}
});
}
You need to check that on end (data complete event) has been called the exact number of requests... Here's a working example:
var http = require('http');
var urls = ['http://adrianmejia.com/atom.xml', 'http://twitrss.me/twitter_user_to_rss/?user=amejiarosario'];
var completed_requests = 0;
urls.forEach(function(url) {
var responses = [];
http.get(url, function(res) {
res.on('data', function(chunk){
responses.push(chunk);
});
res.on('end', function(){
if (completed_requests++ == urls.length - 1) {
// All downloads are completed
console.log('body:', responses.join());
}
});
});
})
You can use any promise library with ".all" implementation. I use RSVP library, Its simple enough.
var downloadFileList = [url:'http://stuff',dataname:'filename to download']
var ddownload = downloadFileList.map(function(id){
var dataname = id.dataname;
var url = id.url;
return new RSVP.Promise(function(fulfill, reject) {
var stream = fs.createWriteStream(dataname);
stream.on('close', function() {
console.log(dataname+' downloaded');
fulfill();
});
request(url).on('error', function(err) {
console.log(err);
reject();
}).pipe(stream);
});
});
return new RSVP.hashSettled(ddownload);
Promise.allSettled will not stop at error. It make sure you process all responses, even if some have an error.
Promise.allSettled(promises)
.then((data) => {
// do your stuff here
})
.catch((err) => {
console.log(JSON.stringify(err, null, 4));
});
The problem can be easily solved using closure. Make a function to handle the request and call that function in the loop. Every time the function would be called, it would have it's own lexical scope and using closure, it would be able to retain the address of the URL even if the loop ends. And even is the response is in streams, closure would handle that stuff too.
const request = require("request");
function getTheUrl(data) {
var options = {
url: "https://jsonplaceholder.typicode.com/posts/" + data
}
return options
}
function consoleTheResult(url) {
request(url, function (err, res, body) {
console.log(url);
});
}
for (var i = 0; i < 10; i++) {
consoleTheResult(getTheUrl(i))
}