Express not rendering my React Front End? - javascript

I have two repos for the Front End and Back End portions of my project.
The Front End is a simple create-react-app project that hits my Express Back End and received responses from API calls.
I ran npm run build in my Front End project and moved that build folder to the root of my express backend repo.
However, when I try to reach the root page (i.e. localhost:3001), for some reason the response only returns the static html from index.html and doesn't actually render anything.
But if I go to something that has a path like localhost:3001/pokedex/1 then at least I see a correct response coming from the API.
I have a feeling that there is something wrong with the way I'm declaring my paths.
Here is the code on the Front End that is reaching out to the Back End:
import axios from 'axios'
const baseUrl = '/'
const getAll = () => {
const request = axios.get(baseUrl)
return request.then(response => response.data)
}
const getPkm = (id) => {
const request = axios.get(`${baseUrl}pokedex/${id}`)
return request.then(response => response.data)
}
export default { getAll, getPkm }
This is my Express Back End entry index.js:
const express = require('express')
const app = express()
const cors = require('cors')
const axios = require('axios')
//Middleware
app.use(cors())
app.use(express.json())
app.use(express.static('build'))
const unknownEndpoint = (request, response) => {
response.status(404).send({ error: 'unknown endpoint' })
}
let fullPkmList = require('./fullPkmList.json')
function ignoreFavicon(req, res, next) {
if (req.originalUrl.includes('favicon.ico')) {
res.status(204).end()
}
next();
}
app.get('/', (req, res) => {
axios.get(`https://pokeapi.co/api/v2/pokemon/?limit=100`)
.then((list) => res.json(list.data.results))
})
app.get('/pokedex/:id', (request, response) => {
const id = Number(request.params.id)
const pokemon = fullPkmList[id - 1]
if (pokemon) {
axios.all([
axios.get(`https://pokeapi.co/api/v2/pokemon/${id}`),
axios.get(`https://pokeapi.co/api/v2/pokemon-species/${id}`)
])
.then(axios.spread((pokemonResponse, speciesReponse) => {
let pkmResponse = pokemonResponse.data
let speciesResponse = speciesReponse.data
response.json({pkm: pkmResponse, species: speciesResponse })
}))
} else {
response.status(404).end()
}
})
app.use(unknownEndpoint)
const PORT = process.env.PORT || 3001
app.listen(PORT, () => {
console.log(`this is a test ${PORT}`)
})
Code for the Front End: https://github.com/rohithpalagiri/pocketdex
Code for the Back End: https://github.com/rohithpalagiri/pocketdex-backend
To see the issue, you only need to run the backend. I console log the response and in that, you will see the index.html file markup being returned. My goal is to have all of the paths relative so that the root url doesn't really matter. I think that is the part I'm getting stuck on.
I'd appreciate any help!

Related

web scraping for html page but need for repeat on lots link?

I wrote the following code for parse some part of HTML for one URL. I means parse page const URL= 'https://www.example.com/1'
Now I want to parse the next page 'https://www.example.com/2' and so on. so I want to implement a For-Loop manner here.
what is the easiest way that I can use the iteration manner here to
change URL (cover page 1,2,3, ...) automatically and run this code in repeat to parse other pages? How I can use for-loop manner here?
const PORT = 8000
const axios = require('axios')
const cheerio = require('cheerio')
const express = require('express')
const app = express()
const cors = require('cors')
app.use(cors())
const url = 'https://www.example.com/1'
app.get('/', function (req, res) {
res.json('This is my parser')
})
app.get('/results', (req, res) => {
axios(url)
.then(response => {
const html = response.data
const $ = cheerio.load(html)
const articles = []
$('.fc-item__title', html).each(function () {
const title = $(this).text()
const url = $(this).find('a').attr('href')
articles.push({
title,
url
})
})
res.json(articles)
}).catch(err => console.log(err))
})
app.listen(PORT, () => console.log(`server running on PORT ${PORT}`))
Some considerations, if you added CORS to your app, so that you can GET the data, it's useless, you add CORS when you want to SEND data, when your app is going to receive requests, CORS enable other people to use your app, it's useless then trying to use other people's app. And CORS problems happen only in the browser, as node is on the server, it will never get CORS error.
The first problem with your code, is that https://www.example.com/1, even working on the browser, returns 404 Not Found Error to axios, because this page really doesn't exist, only https://www.example.com would work.
I added an example using the comic site https://xkcd.com/ that accepts pages.
I added each axios request to an array of promises, then used Promise.all to wait for all of them:
The code is to get the image link:
const PORT = 8000;
const axios = require("axios");
const cheerio = require("cheerio");
const express = require("express");
const app = express();
const url = "https://xkcd.com/";
app.get("/", function (req, res) {
res.json("This is my parser");
});
let pagesToScrap = 50;
app.get("/results", (req, res) => {
const promisesArray = [];
for (let pageNumber = 1; pageNumber <= pagesToScrap; pageNumber++) {
let promise = new Promise((resolve, reject) => {
axios(url + pageNumber)
.then((response) => {
const $ = cheerio.load(response.data);
let result = $("#transcript").prev().html();
resolve(result);
})
.catch((err) => reject(err));
});
promisesArray.push(promise);
}
Promise.all(promisesArray)
.then((result) => res.json(result))
.catch((err) => {
res.json(err);
});
});
app.listen(PORT, () => console.log(`server running on PORT ${PORT}`));

Using express-formidable to get multipart data , but it makes simple post requests with request body run forever

app.js
const express = require('express')
const app = express()
const app = express()
const PORT = process.env.SERVER_PORT || 8080
app.use(express.json())
app.use(express.urlencoded({
extended : true
}))
app.use(formidableMiddleware());
app.listen(PORT, () => {
console.log(`The application is up and running on ${PORT}`)
})
controller.js
This contains the controller that takes base64 encoded image in formdata and that can be accessed with filename property (This is one controller which is working fine with formidable)
const uploadProfilePic = async (req, res) => {
let strArr = req.fields.filename.split(',')
let buffer = new Buffer(strArr[1], 'base64')
let filename =
Date.now().toString() + '' + strArr[0].split('/')[1].split(';')[0]
try {
req.user.profile = buffer
req.user.filename = filename
await req.user.save()
return res.status(200).json(
customMessage(true, {
message: 'Successfully uploaded',
}),
)
} catch (error) {
return res.status(500).status(internalServerError)
}
}
controller2.js This controller is not working properly, it does not even run when we use express-formidable and the post request route to which this controller is binded to, runs forever, but if we pass no request body then it runs perfectly or if we comment out:
//app.use(express-formidable);
//In app.js
then it runs properly but then controller.js doesnt run.
const updateUserData = async (req, res) => {
try {
const {_id, email, name, username, bio, code, platform, languages } = req.body
if (username === undefined || code === undefined || !platform || !languages)
return res
.status(400)
.json(customMessage(false, 'Please Satisy Validations'))
let user = req.user
let user1 = await UserModel.findById(_id)
user1.username = username;
user1.code = code;
user1.bio = bio;
user1.platform = platform;
user1.languages = languages;
if (!user) return res.status(500).json(internalServerError())
else {
await user1.save()
console.log
return res
.status(200)
.json(customMessage(true, `user with ${email} updated`))
}
} catch (error) {
console.log(error)
return res.status(500).json(internalServerError())
}
}
Okay I found a way to send requests through the multipart/form data and the application/json without breaking any thing. I actually spent 2 hours trying to figure out what happened and how to solve the problem. I discovered that the package "express-formidable" is no longer being maintained and has been closed down. Then I also found another package which of course solved the problem just about now "express-formidable-v2" which I believe is the continuation of the former package.
Check this https://github.com/Abderrahman-byte/express-formidable-v2 out, It is a fork of "express-formidable" package
Now you have access to your {req.fields} and {req.body}

Next.js grpc-node usage

I use gRPC but I have a problem initializing the service in Next.js app.
Goal: Create client service only once in app and use it in getServerSideProps (app doesn't use client-side routing).
For example, we have a service generated with grpc-tools (only available on SSR) and then I just want to initialize it somewhere. At first I thought it can be realized in a custom server.js:
const { credentials } = require('#grpc/grpc-js');
const express = require("express");
const next = require("next");
const { MyserviceClient } = require('./gen/myservice_grpc_pb');
const dev = process.env.NODE_ENV !== "production";
const app = next({ dev });
const handle = app.getRequestHandler();
// Init & Export
exports.myService = new MyserviceClient(
'http://localhost:3000',
credentials.createInsecure(),
);
(async () => {
await app.prepare();
const server = express();
server.get("*", (req, res) => handle(req, res));
server.listen(process.env.PORT, () => {
console.log(`Listening at http://localhost:${process.env.PORT}`);
});
})();
And then use it on the homepage, for example:
import React from 'react';
const { GetSmthRequest } = require('../gen/myservice_pb');
const { myService } = require('../server.js');
const IndexPage = () => (
<div>
<span>My HomePage</span>
</div>
)
const getServerSideProps = async () => {
const request = new GetSmthRequest();
request.setSomeStuff('random');
myService.getStmh(GetSmthRequest, (err, res) => {
//...
})
return {
props: {
}
}
}
export default IndexPage;
But for some reason it's not possible to initialize the client service in the server.js.
Also I tried doing it with next.config.js:
const { credentials } = require('#grpc/grpc-js');
const { MyserviceClient } = require('./gen/myservice_grpc_pb');
module.exports = {
serverRuntimeConfig: {
myService: new MyserviceClient(
'http://localhost:3000',
credentials.createInsecure(),
),
},
};
This solution works, so I can use the service through serverRuntimeConfig, thereby initializing it only once in the entire application, but when I make a request somewhere using getServerSideProps, I get an error:
Request message serialization failure: Expected argument of type ...
Error explanation: (https://stackoverflow.com/a/50845069/9464680)
That error message indicates that message serialization
(transformation of the message object passed to gRPC into binary data)
failed. This generally happens because the message object doesn't
match the expected message type or is otherwise invalid
Does anyone know why I am getting this error?
It's also interesting to see some examples of using Next.js with grpc-node.
For such a case you can use Node.js global

Prevent Apify from shutting down my express server

I have an express server with a POST endpoint that starts a crawler. When the crawler finishes it shuts down the whole server. Am I doing something wrong? How can I prevent it from happening?
The project looks something like this:
// server.js
const express = require('express')
const bodyParser = require('body-parser')
const startSearch = require('./crawler.js')
const app = express()
app.use(bodyParser.json())
app.post('/crawl', async (req, res) => {
const { foo, bar } = req.body
startSearch({ foo, bar })
res.end()
})
app.listen(PORT, () => console.log(`listening on port ${PORT}`))
// crawler.js
const Apify = require('apify')
const startSearch = ({ foo, bar }) => {
Apify.main(async () => {
const sources = [{
url: 'https://path_to_website.com',
userData: { foo, bar }
}]
const requestList = await Apify.openRequestList(null, sources)
const crawler = new Apify.PuppeteerCrawler({
requestList,
handlePageFunction: async ({ request, page }) => {
// do things using puppeteer
}
}
})
await crawler.run()
})
}
Just avoid using Apify.main(). For details, see How to use Apify on Google Cloud Functions
(I thought I'm sending the answer, but it seems it was just a comment)

Unit Testing Controllers use Jest, NodeJS

I want to check a case that certain routes are calling the correct controller use Jest specific (mock or spy).
It is case specific for unit testing. Somebody can help me how to check it use jest. I don't need verify kind of
expect (status code or res object) i need to check if controller have been called.
Thanks!
For instance:
// todoController.js
function todoController (req, res) {
res.send('Hello i am todo controller')
}
// index.spec.js
const express = require('express');
const request = require('request-promise');
const todoController = require('./todoController');
jest.mock('./todoController');
const app = express();
app.get('/todo', todoController)
test('If certain routes are calling the correct controller , controller should to have been called times one.', async() => {
await request({url: 'http://127.0.0.1/todo'})
expect(todoController).toHaveBeenCalledTimes(1);
})
Actually if you search, there are many references out there.
In the following, I share a few ways that I know.
One of the big conceptual leaps to testing Express applications with mocked request/response is understanding how to mock a chained
API eg. res.status(200).json({ foo: 'bar' }).
First you can make some kind of interceptor, this is achieved by returning the res instance from each of its methods:
// util/interceptor.js
module.exports = {
mockRequest: () => {
const req = {}
req.body = jest.fn().mockReturnValue(req)
req.params = jest.fn().mockReturnValue(req)
return req
},
mockResponse: () => {
const res = {}
res.send = jest.fn().mockReturnValue(res)
res.status = jest.fn().mockReturnValue(res)
res.json = jest.fn().mockReturnValue(res)
return res
},
// mockNext: () => jest.fn()
}
The Express user-land API is based around middleware. AN middleware that takes a request (usually called req), a response (usually called res ) and a next (call next middleware) as parameters.
And then you have controller like this :
// todoController.js
function todoController (req, res) {
if (!req.params.id) {
return res.status(404).json({ message: 'Not Found' });
}
res.send('Hello i am todo controller')
}
They are consumed by being “mounted” on an Express application (app) instance (in app.js):
// app.js
const express = require('express');
const app = express();
const todoController = require('./todoController');
app.get('/todo', todoController);
Using the mockRequest and mockResponse we’ve defined before, then we’ll asume that res.send() is called with the right payload ({ data }).
So on your test file :
// todo.spec.js
const { mockRequest, mockResponse } = require('util/interceptor')
const controller = require('todoController.js')
describe("Check method \'todoController\' ", () => {
test('should 200 and return correct value', async () => {
let req = mockRequest();
req.params.id = 1;
const res = mockResponse();
await controller.todoController(req, res);
expect(res.send).toHaveBeenCalledTimes(1)
expect(res.send.mock.calls.length).toBe(1);
expect(res.send).toHaveBeenCalledWith('Hello i am todo controller');
});
test('should 404 and return correct value', async () => {
let req = mockRequest();
req.params.id = null;
const res = mockResponse();
await controller.todoController(req, res);
expect(res.status).toHaveBeenCalledWith(404);
expect(res.json).toHaveBeenCalledWith({ message: 'Not Found' });
});
});
This is only 1 approach to testing Express handlers and middleware. The alternative is to fire up the Express server.

Categories