Express.js response converts snake_case keys to camelCase automatically - javascript

I'm working on a small project at work and we have an Express.js based node application running that sends a json response that has keys in snake_case format. We have another node application that consumes this service but the response object keys are accessed in camelCase format here. I'd like to know what happens in the background to make this work.
This is the code in the REST API
app.get('/api/customer/:id', (req, res) => {
const data = {
"arr": [{
"my_key": "609968029"
}]
}
res.send(data);
});
This is how it is consumed in the other node application
getData = (id) => {
const options = {
url: `api/customer/${id}`
};
return httpClient.get(options)
.then(data => {
const arr = data.arr.map(arrEntry => {
return {
myKey: arrEntry.myKey
};
});
return {
arr
};
});
};
Here myKey correctly has the data from the REST API but I'm not sure how my_key is converted to myKey for it work.

Turns out we have used humps library to resolve the response object from keys snake-case to camelCase.
I found this code in the lib call
const humps = require('humps');
...
axios(optionsObj)
.then(response => {
resolve(humps.camelizeKeys(response.data));
})
.catch(err => {
reject(err);
});

lodash can do this
_.camelCase('Foo Bar');
// => 'fooBar'

Related

Send Map in express res.send

I'm working on a game with a friend and we need to send a Map with some stuff in it, but express only sends the user {} instead of the actual Map. The problem is at sending it and not the code itself, console.log'ging it does return the Map.
Code:
router.get("/list", async (req, res) => {
try {
const users = await userCollection.find();
accessedListEmbed(req);
let userData = new Map();
users.forEach((user) => userData.set(user.userName, user.status));
res.send(userData);
console.log(userData);
} catch (error) {
res.send("unknown");
}
});
Generally, you can only send serializable values over the network. Maps aren't serializable:
const map = new Map();
map.set('key', 'value');
console.log(JSON.stringify(map));
Either send an array of arrays that can be converted into a Map on the client side, or use another data structure, like a plain object. For example:
router.get("/list", async (req, res) => {
try {
const users = await userCollection.find();
accessedListEmbed(req);
const userDataArr = [];
users.forEach((user) => {
userDataArr.push([user.userName, user.status]);
});
res.json(userDataArr); // make sure to use .json
} catch (error) {
// send JSON in the case of an error too so it can be predictably parsed
res.json({ error: error.message });
}
});
Then on the client-side:
fetch(..)
.then(res => res.json())
.then((result) => {
if ('error' in result) {
// do something with result.error and return
}
const userDataMap = new Map(result);
// ...
Or something along those lines.

GET request from a local server not working

I am trying to obtain json files with axios' GET request, but the information is not retrieved.
In index.js (retrieving information):
axios.get('http://localhost:1000/getpost/')
.then((response) => {
console.log(response);
});
Backend endpoint getpost.js (sending information):
var router = require('express').Router();
var Posts = require('../models/post-model.js');
router.route('/').get(() => {
Posts.find({color: "Green"})
.then((res) => {
return res;
});
});
module.exports = router;
I have also tried return Posts.find({color: "Green"}); inside the router.route('/').get... function,
but the value returned is different compared to the one in the promise which is the one I need. I checked that the information is actually sent with console.log(res), but it is not received in the frontend--when I log the result there, it is null.
You are not doing anything with the route response. Maybe something like...
router.route('/').get((req, res1) => {
Posts.find({color: "Green"})
.then((res) => {
res1.end(res);
});
});
(assuming res is the data in plaint text, if it is a JavaScript object you'll do res1.json(res) or res1.jsonp(res))
You need to map the route to getpost as:
router.route('/getpost')
So your getpost would be as:
var router = require('express').Router();
var Posts = require('../models/post-model.js');
router.route('/getpost').get(() => {
Posts.find({color: "Green"})
.then((res) => {
res.send({status:200,message:res});
});
});
module.exports = router;
Apparently, I was not passing the result properly.
The router in getpost.js should be:
router.route('/').get((req, res) => {
Posts.find({color: "Green"})
.then((posts) => res.json(posts));
});

Streaming JSON data to React results in unexpected end of JSON inpit

I'm trying to stream a lot of data from a NodeJS server that fetches the data from Mongo and sends it to React. Since it's quite a lot of data, I've decided to stream it from the server and display it in React as soon as it comes in. Here's a slightly simplified version of what I've got on the server:
const getQuery = async (req, res) => {
const { body } = req;
const query = mongoQueries.buildFindQuery(body);
res.set({ 'Content-Type': 'application/octet-stream' });
Log.find(query).cursor()
.on('data', (doc) => {
console.log(doc);
const data = JSON.stringify(result);
res.write(`${data}\r\n`);
}
})
.on('end', () => {
console.log('Data retrieved.');
res.end();
});
};
Here's the React part:
fetch(url, { // this fetch fires the getQuery function on the backend
method: "POST",
body: JSON.stringify(object),
headers: {
"Content-Type": "application/json",
}
})
.then(response => {
const reader = response.body.getReader();
const decoder = new TextDecoder();
const pump = () =>
reader.read().then(({ done, value }) => {
if (done) return this.postEndHandler();
console.log(value.length); // !!!
const decoded = decoder.decode(value);
this.display(decoded);
return pump();
});
return pump();
})
.catch(err => {
console.error(err);
toast.error(err.message);
});
}
display(chunk) {
const { data } = this.state;
try {
const parsedChunk = chunk.split('\r\n').slice(0, -1);
parsedChunk.forEach(e => data.push(JSON.parse(e)));
return this.setState({data});
} catch (err) {
throw err;
}
}
It's a 50/50 whether it completes with no issues or fails at React's side of things. When it fails, it's always because of an incomplete JSON object in parsedChunk.forEach. I did some digging and it turns out that every time it fails, the console.log that I marked with 3 exclamation marks shows 65536. I'm 100% certain it's got something to do with my streams implementation and I'm not queuing the chunks correctly but I'm not sure whether I should be fixing it client or server side. Any help would be greatly appreciated.
Instead of implementing your own NDJSON-like streaming JSON protocol which you are basically doing here (with all of the pitfalls of dividing the stream into chunks and packets which is not always under your control), you can take a look at some of the existing tools that are created to do what you need, e.g.:
http://oboejs.com/
http://ndjson.org/
https://www.npmjs.com/package/stream-json
https://www.npmjs.com/package/JSONStream
https://www.npmjs.com/package/clarinet

Send Response in 'then' after Promise resolves

I want to display the json i got from the search for localhost:8400/api/v1/search. But I have no idea how.
I'm using the Elasticsearch Javascript Client
my routing:
'use-strict';
const express = require('express');
const elasticsearch = require('../models/elasticsearch.js');
const router = express.Router();
router.get('/api/v1/search', elasticsearch.search);
for accessing the ElasticSearch DB
const es = require('elasticsearch');
let esClient = new es.Client({
host: 'localhost:9200',
log: 'info',
apiVersion: '5.3',
requestTimeout: 30000
})
let indexName = "randomindex";
const elasticsearch = {
search() {
return esClient.search({
index: indexName,
q: "test"
})
.then(() => {
console.log(JSON.stringify(body));
// here I want to return a Response with the Content of the body
})
.catch((error) => { console.trace(error.message); });
}
}
module.exports = elasticsearch;
Firstly, the route handlers for express routes always have (request, response, next) as it's parameters. You can use the response object to send data back to the client.
Instead of passing the elasticsearch.search method as a route handler, you can write your own route handler and call elasticsearch.search in there, so you still have access to the response object. For example:
function handleSearch(req, res, next) {
elasticsearch.search()
.then(function(data) {
res.json(data)
})
.catch(next)
}
And structure your search function like so:
const elasticsearch = {
search() {
return esClient.search({
index: indexName,
q: "test"
})
.then((body) => body) // just return the body from this method
}
}
This way you separate your concerns of querying elastic and handling the request. You also have access to the request object in case you want to pass any query string parameters from your request to your search function.
Since you add elasticsearch.search as the route handler, it will be invoked with some arguments.
Change the signature of the search method to search(req, res).
Then just call res.send(JSON.stringify(body));
See https://expressjs.com/en/4x/api.html#res for more details

How/where to convert dates from the server in a redux app?

I'm starting to study redux now. I used the real-word example as a starting point, using normalizr and reselect to handle data.
Now, I need to understand where is the best place to convert dates coming from the server into js Date objects. Since normalizr already takes care of "some Schemas" I thought it could do that too, but I did not find it there.
Where should I convert these dates? My assumption is that I have to keep those dates already converted on the store. Is that right?
Normalizr can do this - in the current version (v3.3.0) you can achieve it like this:
import { schema } from 'normalizr';
const ReleaseSchema = new schema.Entity('releases', {}, {
processStrategy: (obj, parent, key) => {
return {
...obj,
createdAt: new Date(obj.createdAt),
updatedAt: new Date(obj.updatedAt),
};
},
});
I added a third parameter to callApi (inside api middleware):
function callApi(endpoint, schema, conversionFromServer) {
const fullUrl = (endpoint.indexOf(API_ROOT) === -1) ? API_ROOT + endpoint : endpoint
return fetch(fullUrl)
.then(response =>
response.json().then(json => ({ json, response }))
).then(({ json, response }) => {
if (!response.ok) {
return Promise.reject(json)
}
const camelizedJson = camelizeKeys(json)
const nextPageUrl = getNextPageUrl(response)
let convJson = camelizedJson;
if(conversionFromServer) {
convJson = conversionFromServer(convJson);
}
return Object.assign({},
normalize(convJson, schema),
{ nextPageUrl }
)
})
}
and now I can call it like this:
return {
[CALL_API]: {
types: [ TIMESLOTS_REQUEST, TIMESLOTS_SUCCESS, TIMESLOTS_FAILURE ],
endpoint: `conference/${conferenceId}/timeslots`,
schema: Schemas.TIMESLOT_ARRAY,
conversionFromServer: (data) => {
data.timeslots.forEach((t)=>{
t.startTime=php2js.date(t.startTime)
t.endTime=php2js.date(t.endTime)
});
return data;
}
}
}
This way I keep this conversion "as close to the server" as I can.

Categories