Express node.js - Requesting JSON, processing it asyncronously - javascript

With my node.js app, I'm getting my JSON data from a spreadsheet API.
It basically returns JSON of the following.
{
"status":200,
"success":true,
"result":[
{
"Dribbble":"a",
"Behance":"",
"Blog":"http://blog.invisionapp.com/reimagine-web-design-process/",
"Youtube":"",
"Vimeo":""
},
{
"Dribbble":"",
"Behance":"",
"Blog":"http://creative.mailchimp.com/paint-drips/?_ga=1.32574201.612462484.1431430487",
"Youtube":"",
"Vimeo":""
}
]
}
It's just a dummy data for now but one thing for certain is that, I need to process values (blog URLs) under Blog differently. With the blog url, I need to get Open Graph data so I'm using a module called open-graph-scraper
With data.js I'm getting the whole JSON and it's available in route index.js as data Then I'm processing this data by checking Blog column. If it's a match, I loop the values (blog URLs) through open-graph-scraper module.
This will give me open graph data of each blog url like the following example JSON.
{
data:
{ success: 'true',
ogImage: 'http://davidwalsh.name/wp-content/themes/punky/images/logo.png',
ogTitle: 'David Walsh - JavaScript, HTML5 Consultant',
ogUrl: 'http://davidwalsh.name/',
ogSiteName: 'David Walsh Blog',
ogDescription: 'David Walsh Blog features tutorials about MooTools, jQuery, Dojo, JavaScript, PHP, CSS, HTML5, MySQL, and more!' },
success: true
}
So my goal is to pass this blog JSON as a separate data from the main JSON and put it in the render as a separate object so it's available in view as two separate JSON. But I'm not sure if my approach with getBlogData is correct.
I'm not even sure if processing data like this is a good thing to do in a router file. I would appreciate some directions.
index.js
var ogs = require('open-graph-scraper');
var data = require('../lib/data.js');
data( function(data) {
var getBlogData = function (callback) {
var blogURL = [];
if (data.length > 0) {
var columnsIn = data[0];
for(var key in columnsIn) {
if (key === 'Blog') {
for(var i = 0; i < data.length; i++) {
blogURL += data[i][key];
}
}
}
};
ogs({
url: blogURL
}, function(er, res) {
console.log(er, res);
callback(res);
});
}
getBlogData( function (blogData) {
//I want to make this blogData available in render below
but don't know how
});
router.get('/', function(req, res, next) {
res.render('index', {
title: 'Express',
data: data
});
});
});
data.js (my module that gets JSON data)
module.exports = function(callback) {
var request = require("request")
var url = "http://sheetsu.com/apis/94dc0db4"
request({
url: url,
json: true
}, function (error, response, body) {
if (!error && response.statusCode === 200) {
var results = body["result"];
callback(results)
}
})
}

The problem you'll have is that if you do getBlogData asynchronously (and you should, you don't want the client waiting around for all that data to return), by the time you get the data res.render will have already been called. As you can't call res.render again, you have 2 options that come to mind:
You could query for individual blog data from the client. This will result in more back-and-forth between client and server but is a good strategy if you have a lot of entries in your initial data but only want to display a small number.
You could use websockets to send the data to the client as you retrieve it. Look up something like express.io for an easy way to do this.

Related

Node js http server accept POST and accept JSON

I am trying to create one Node js server with http package. I want to receive only POST request which I have already implemented it. The problem which I am facing is that I am not able to parse JSON correctly (I am expecting one JSON to be attached).
I tried using JSON.parse but that doesn't parse whole json content. It leaves some values as [Object] which is wrong. I saw few packages which is JSONStream but I am not sure how to implement in this case.
server.on('request', function(req, res){
if(req.method == 'POST')
{
var jsonString;
req.on('data', function (data) {
jsonString = JSON.parse(data);
});
req.on('end', function () {
serverNext(req, res, jsonString);
});
}
else
{
res.writeHead(405, {'Content-type':'application/json'});
res.write(JSON.stringify({error: "Method not allowed"}, 0, 4));
}
res.end();
});
Request example:
Here d = JSON file content. (I did this in Python to make this example request)
r = requests.post('http://localhost:9001', headers = {'content-type': 'application/json'}, data = json.dumps(d))
Note: I am able to parse JSON correctly but there are some cases when it shows something like this:
{ 'Heading':
{ 'Content':
{ sometext: 'value',
List: [Object], // Wrong
test: [Array] } } } // Wrong
Update:
Inside serverNext() I am getting few values like:
var testReq = Object.keys(jsonData)[0];
var testId = Object.keys(jsonData[testRequest])[0];
var test = jsonData[testRequest][testId]
Further if I keep on extracting values then at some point it encounters [Objects] value and get crashed.
I can reproduce this "problem" with data as { "Foo": {"Bar": {"Some data": [43, 32, 44]} } } -- it returns the following result: { Foo: { Bar: { 'Some data': [Object] } } }.
As OP mentioned in question, the JSON is parsed correctly, the reason why [Object] is displayed in result is: when JavaScript Object is returned to display, it would be converted to String first by toString() automatically, which will make all object (including array) as [Object] in text.
To display the real content, JSON.stringify() need to be invoked. In your case, the code would be:
req.on('end', function () {
serverNext(req, res, JSON.stringify(jsonString));
});
Please note it is better to rename variable jsonString as jsonObject.

Node.js backend to recieve javascript fetch

I want to use javascript fetch to post data to Node.js backend. I use fetch on the javascript side in angular 2 functions.
In my front end I have button to save data to array:
<input #data /><button (click)="addGoal(data.value)">Add</button>
<ul><li *ngFor="let goal of goals">{{goal}}</li></ul>
<br/><button (click)="saveData()">Save</button>
Angular 2 side i add data into array goals and call angular 2 function addGoal from the front end:
export class AppComponent {
title = 'Goals';
goals = [];
addGoal(goal:string) {
this.goals.push(goal);
}
saveData() {
fetch("http://localhost:3000/", {
method: "POST",
mode:"no-cors",
body: {goals: this.goals},
});
}
}
I want to get data to backend to later save it to the database.
Node.js side:
var data = {};
app.post('/',function (req,res) {
data = req.body;
})
app.get('/', function (req, res) {
res.send(data);
})
However, this is not a correct solution. Can you help me?
edit: Body.fetch functions(https://developer.mozilla.org/en-US/docs/Web/API/Body) not seem to work.
In case you are using express, you need to use the body-parser middleware. If you are dealing with html5 FormData/multiplart formdata, you need multer.

Backbone - Populating multiple models from one fetch call in my controller

Say I have a collection (of search results, for example) which needs to be populated and a pagination model that needs to take values for current page, total number of pages, etc. In my controller, I make a GET call to an API which returns both search results and pagination information. How, then, can I fetch all this information and parse it into a collection and a separate model? Is this possible?
I am using AirBNB's Rendr, which allows you to use a uniform code base to run Backbone on both the server and the client. Rendr forces me to parse the API response as an array of models, keeping me from being able to access pagination information.
In Rendr, my controller would look like this:
module.exports = {
index: function (params, callback) {
var spec = {
pagination: { model: 'Pagination', params: params },
collection: { collection: 'SearchResults', params: params }
};
this.app.fetch(spec, function (err, result) {
callback(err, result);
});
}
}
I apologize if this is not clear enough. Feel free to ask for more information!
This is super old so you've probably figured it out by now (or abandoned it). This is as much a backbone question as a Rendr one since the API response is non-standard.
Backbone suggests that if you have a non-standard API response then you need to override the parse method for your exact data format.
If you really want to break it up, the way you may want to code it is:
a Pagination Model
a Search Results Collection
a Search Result Model
and most importantly a Search Model with a custom parse function
Controller:
index: function (params, callback) {
var spec = {
model: { model: 'Search', params: params }
};
this.app.fetch(spec, function (err, result) {
callback(err, result);
});
}
Search Model
var Base = require('./base'),
_ = require('underscore');
module.exports = Base.extend({
url: '/api/search',
parse: function(data) {
if (_.isObject(data.paginationInfo)) {
data.paginationInfo = this.app.modelUtils.getModel('PaginationInfo', data.paginationInfo, {
app: this.app
});
}
if (_.isArray(data.results)) {
data.results = this.app.modelUtils.getCollection('SearchResults', data.results, {
app: this.app,
params: {
searchQuery: data.searchQuery // replace with real parameters for client-side caching.
}
});
}
return data;
}
});
module.exports.id = 'Search';

Is there a way to send an object as well as a separate variable in an ajax POST request

I have a post request I am doing like so:
var addProject = function (project, id) {
return $.ajax(projectUrl, {
type: "POST",
data: { project: project, id: id }
});
};
This is all fine, and it send up my project object (in JSON) with no problem. What i want to do is this request but also add in a key (that does not belong in the project object) that I can pass to the server controller. Any idea if I can do this, or what is a good way to do this. I really don't want to have to go back and change my object models to account for a key that I will only need every once in awhile.
This is an ASP.NET MVC4 project, and it is going back to a web api controller if that matters.
EDIT: here is my server side stuff
public HttpResponseMessage PostNew(Project project, int id)
{
//TODO grab the client to add the project to
Uow.Projects.Add(project);
Uow.Commit();
HttpResponseMessage response = Request.CreateResponse(HttpStatusCode.Created, project);
//response.Headers.Location = new Uri(Url.Link("ApiControllerAction", new { id = client.ID }));
return response;
}
Maybe I am not being clear enough. I want to pass in the project object, and then just an int variable. Passing project alone works, but if I try to pass the int it gives me a 400 error
var addProject = function (project) {
return
$.ajax(projectUrl, {
type: "POST",
data: {data1 : 'Object',data2: 'variable'}
});
};
You have just to send 2 data params., object and var..
Easiest way to pass a complex object is to JSON-encode it:
var addProject = function (project) {
return $.ajax(projectUrl, {
type: "POST",
contentType: 'application/json',
data: JSON.stringify(project)
});
};
To decode this on the server side, you will need a JSON decoder of some kind. I'm sure C# has some available.

Node.js - Can't post nested/escaped JSON to body using Fermata REST client

The problem may be with the actual client, but he's not responding on github, so I'll give this a shot!
I'm trying to post, in the body, nested JSON:
{
"rowkeys":[
{
"rowkey":"rk",
"columns":[
{
"columnname":"cn",
"columnvalue":"{\"date\":\"2011-06-21T00:53:10.309Z\",\"disk0\":{\"kbt\":31.55,\"tps\":6,\"mbs\":0.17},\"cpu\":{\"us\":5,\"sy\":4,\"id\":90},\"load_average\":{\"m1\":0.85,\"m5\":0.86,\"m15\":0.78}}",
"ttl":10000
},
{
"columnname":"cn",
"columnvalue":"cv",
"ttl":10000
}
]
},
{
"rowkey":"rk",
"columns":[
{
"columnname":"cn",
"columnvalue":"fd"
},
{
"columnname":"cn",
"columnvalue":"cv"
}
]
}
]
}
When I remove the columnvalue's json string, the POST works. Maybe there's something I'm missing regarding escaping? I've tried a few built in escape utilities to no avail.
var jsonString='the json string above here';
var sys = require('sys'),
rest = require('fermata'), // https://github.com/andyet/fermata
stack = require('long-stack-traces');
var token = ''; // Username
var accountId = ''; // Password
var api = rest.api({
url : 'http://url/v0.1/',
user : token,
password : accountId
});
var postParams = {
body: jsonString
};
(api(postParams)).post(function (error, result) {
if (error)
sys.puts(error);
sys.puts(result);
});
The API I'm posting to can't deserialize this.
{
"rowkeys":[
{
"rowkey":"rk",
"columns":[
{
"columnname":"cn",
"columnvalue":{
"date":"2011-06-21T00:53:10.309Z",
"disk0":{
"kbt":31.55,
"tps":6,
"mbs":0.17
},
"cpu":{
"us":5,
"sy":4,
"id":90
},
"load_average":{
"m1":0.85,
"m5":0.86,
"m15":0.78
}
},
"ttl":10000
},
{
"columnname":"cn",
"columnvalue":"cv",
"ttl":10000
}
]
},
{
"rowkey":"rk",
"columns":[
{
"columnname":"cn",
"columnvalue":"fd"
},
{
"columnname":"cn",
"columnvalue":"cv"
}
]
}
]
}
Dual problems occuring at the same occurred led me to find an issue with the fermata library handling large JSON posts. The JSON above is just fine!
I think the real problem here is that you are trying to post data via a URL parameter instead of via the request body.
You are using Fermata like this:
path = fermata.api({url:"http://example.com/path");
data = {key1:"value1", key2:"value2"};
path(data).post(callback);
What path(data) represents is still a URL, with data showing up in the query part. So your code is posting to "http://example.com/path/endpoint?key1=value1&key2=value2" with an empty body.
Since your data is large, I'm not surprised if your web server would look at such a long URL and send back a 400 instead. Assuming your API can also handle JSON data in the POST body, a better way to send a large amount of data would be to use Fermata like this instead:
path = fermata.api({url:"http://example.com/path");
data = {key1:"value1", key2:"value2"};
path.post(data, callback);
This will post your data as a JSON string to "http://example.com/path" and you would be a lot less likely to run into data size problems.
Hope this helps! The "magic" of Fermata is that unless you pass a callback function, you are getting local URL representations, instead of calling HTTP functions on them.

Categories