Modifying data from API and save in postgreSQL - javascript

I want to fetch some data from a public API and then modify this data. For example, I will add a Point(long, lat) in geography in Postgis. However, I need to fetch the data from a public API before I get there. I have tried this so far, but it doesn't seem like the logic makes sense.
Inserting data into the database works fine, and I have set it up correctly. However, the problems happen when I try to do it in a JSON function.
require('dotenv').config()
const express = require('express')
const fetch = require('node-fetch');
const app = express();
const db = require("./db");
app.use(express.json());
async function fetchDummyJSON(){
fetch('https://jsonplaceholder.typicode.com/todos/1')
.then(res => res.json())
.then((json) => {
await db.query("INSERT INTO logictest(userId,id,title,completed) values($1,$2,$3,$4)",[json.userId+1,json.id,json.title,json.completed])
});
}
fetchDummyJSON()
app.get('/', (req, res) => {
res.send('Hello World!')
});
const port = process.env.PORT || 3001
app.listen(port, () => {
console.log(`Example app listening on port ${port}`)
})
I keep getting SyntaxError: await is only valid in async functions and the top-level bodies of modules; however, fetchDummyData() is an async function from what I can tell. Is there a way to make more sense or make this work? I am going for a PERN stack.

This is a function as well:
.then((json) => {
await db.query("INSERT INTO logictest(userId,id,title,completed) values($1,$2,$3,$4)",[json.userId+1,json.id,json.title,json.completed])
});
try
.then(async (json) => {
await db.query("INSERT INTO logictest(userId,id,title,completed) values($1,$2,$3,$4)",[json.userId+1,json.id,json.title,json.completed])
});

Related

Cookie not showing up in browsers development tool application?

I am working on a web app project and for which i need to authenticate the user for some protected routes so i am using jwt tokens for this need.
Technologies used in project :-
frontend --> react
backend --> node, express
Node JS backend code.
const express = require('express');
const app = express();
const ProductModel = require('../Schemas/productSchema')
const product = ProductModel;
app.get('/', (req, res) => {
try {
product.find(function(err, data){
if(data){
res.cookie("test", "test1");
res.send(data);
}
else{
res.json({message : err});
}
})
} catch (error) {
res.json({message : error});
}
})
React frontend code.
here in this frontend code i am making a get request to the server using fetch
useEffect(() => {
async function fetchData(){
await fetch('http://localhost:5000/products')
.then(res => res.json())
.then(data =>{
// setProducts(data.data);
})
.catch(err => console.log(err))
}
fetchData();
}, [])
In the nodejs code i am sending cookie to the browser and for good the cookie is getting shown in the chrome devtool network
But cookie is not getting shown up in the browser->devtool->application->cookies
I don't know why this happening please submit the solutions with explanation.
First, I guess that u need to import the cookie parser
const cookieParser = require('cookie-parser')
app.use(cookieParser());
this lets you use the cookieParser in your application
And finally u can use it :
res.cookie(`...`);

web scraping for html page but need for repeat on lots link?

I wrote the following code for parse some part of HTML for one URL. I means parse page const URL= 'https://www.example.com/1'
Now I want to parse the next page 'https://www.example.com/2' and so on. so I want to implement a For-Loop manner here.
what is the easiest way that I can use the iteration manner here to
change URL (cover page 1,2,3, ...) automatically and run this code in repeat to parse other pages? How I can use for-loop manner here?
const PORT = 8000
const axios = require('axios')
const cheerio = require('cheerio')
const express = require('express')
const app = express()
const cors = require('cors')
app.use(cors())
const url = 'https://www.example.com/1'
app.get('/', function (req, res) {
res.json('This is my parser')
})
app.get('/results', (req, res) => {
axios(url)
.then(response => {
const html = response.data
const $ = cheerio.load(html)
const articles = []
$('.fc-item__title', html).each(function () {
const title = $(this).text()
const url = $(this).find('a').attr('href')
articles.push({
title,
url
})
})
res.json(articles)
}).catch(err => console.log(err))
})
app.listen(PORT, () => console.log(`server running on PORT ${PORT}`))
Some considerations, if you added CORS to your app, so that you can GET the data, it's useless, you add CORS when you want to SEND data, when your app is going to receive requests, CORS enable other people to use your app, it's useless then trying to use other people's app. And CORS problems happen only in the browser, as node is on the server, it will never get CORS error.
The first problem with your code, is that https://www.example.com/1, even working on the browser, returns 404 Not Found Error to axios, because this page really doesn't exist, only https://www.example.com would work.
I added an example using the comic site https://xkcd.com/ that accepts pages.
I added each axios request to an array of promises, then used Promise.all to wait for all of them:
The code is to get the image link:
const PORT = 8000;
const axios = require("axios");
const cheerio = require("cheerio");
const express = require("express");
const app = express();
const url = "https://xkcd.com/";
app.get("/", function (req, res) {
res.json("This is my parser");
});
let pagesToScrap = 50;
app.get("/results", (req, res) => {
const promisesArray = [];
for (let pageNumber = 1; pageNumber <= pagesToScrap; pageNumber++) {
let promise = new Promise((resolve, reject) => {
axios(url + pageNumber)
.then((response) => {
const $ = cheerio.load(response.data);
let result = $("#transcript").prev().html();
resolve(result);
})
.catch((err) => reject(err));
});
promisesArray.push(promise);
}
Promise.all(promisesArray)
.then((result) => res.json(result))
.catch((err) => {
res.json(err);
});
});
app.listen(PORT, () => console.log(`server running on PORT ${PORT}`));

Express not rendering my React Front End?

I have two repos for the Front End and Back End portions of my project.
The Front End is a simple create-react-app project that hits my Express Back End and received responses from API calls.
I ran npm run build in my Front End project and moved that build folder to the root of my express backend repo.
However, when I try to reach the root page (i.e. localhost:3001), for some reason the response only returns the static html from index.html and doesn't actually render anything.
But if I go to something that has a path like localhost:3001/pokedex/1 then at least I see a correct response coming from the API.
I have a feeling that there is something wrong with the way I'm declaring my paths.
Here is the code on the Front End that is reaching out to the Back End:
import axios from 'axios'
const baseUrl = '/'
const getAll = () => {
const request = axios.get(baseUrl)
return request.then(response => response.data)
}
const getPkm = (id) => {
const request = axios.get(`${baseUrl}pokedex/${id}`)
return request.then(response => response.data)
}
export default { getAll, getPkm }
This is my Express Back End entry index.js:
const express = require('express')
const app = express()
const cors = require('cors')
const axios = require('axios')
//Middleware
app.use(cors())
app.use(express.json())
app.use(express.static('build'))
const unknownEndpoint = (request, response) => {
response.status(404).send({ error: 'unknown endpoint' })
}
let fullPkmList = require('./fullPkmList.json')
function ignoreFavicon(req, res, next) {
if (req.originalUrl.includes('favicon.ico')) {
res.status(204).end()
}
next();
}
app.get('/', (req, res) => {
axios.get(`https://pokeapi.co/api/v2/pokemon/?limit=100`)
.then((list) => res.json(list.data.results))
})
app.get('/pokedex/:id', (request, response) => {
const id = Number(request.params.id)
const pokemon = fullPkmList[id - 1]
if (pokemon) {
axios.all([
axios.get(`https://pokeapi.co/api/v2/pokemon/${id}`),
axios.get(`https://pokeapi.co/api/v2/pokemon-species/${id}`)
])
.then(axios.spread((pokemonResponse, speciesReponse) => {
let pkmResponse = pokemonResponse.data
let speciesResponse = speciesReponse.data
response.json({pkm: pkmResponse, species: speciesResponse })
}))
} else {
response.status(404).end()
}
})
app.use(unknownEndpoint)
const PORT = process.env.PORT || 3001
app.listen(PORT, () => {
console.log(`this is a test ${PORT}`)
})
Code for the Front End: https://github.com/rohithpalagiri/pocketdex
Code for the Back End: https://github.com/rohithpalagiri/pocketdex-backend
To see the issue, you only need to run the backend. I console log the response and in that, you will see the index.html file markup being returned. My goal is to have all of the paths relative so that the root url doesn't really matter. I think that is the part I'm getting stuck on.
I'd appreciate any help!

Prevent Apify from shutting down my express server

I have an express server with a POST endpoint that starts a crawler. When the crawler finishes it shuts down the whole server. Am I doing something wrong? How can I prevent it from happening?
The project looks something like this:
// server.js
const express = require('express')
const bodyParser = require('body-parser')
const startSearch = require('./crawler.js')
const app = express()
app.use(bodyParser.json())
app.post('/crawl', async (req, res) => {
const { foo, bar } = req.body
startSearch({ foo, bar })
res.end()
})
app.listen(PORT, () => console.log(`listening on port ${PORT}`))
// crawler.js
const Apify = require('apify')
const startSearch = ({ foo, bar }) => {
Apify.main(async () => {
const sources = [{
url: 'https://path_to_website.com',
userData: { foo, bar }
}]
const requestList = await Apify.openRequestList(null, sources)
const crawler = new Apify.PuppeteerCrawler({
requestList,
handlePageFunction: async ({ request, page }) => {
// do things using puppeteer
}
}
})
await crawler.run()
})
}
Just avoid using Apify.main(). For details, see How to use Apify on Google Cloud Functions
(I thought I'm sending the answer, but it seems it was just a comment)

React pending promise while after fetching from express backend

I have a simple node.js express server like so:
const express = require('express');
const app = express();
const port = process.env.PORT || 5000;
app.listen(port, () => console.log(`Listening on port ${port}`));
app.get('/express_backend', (req, res) => {
res.send({ express: 'YOUR EXPRESS BACKEND IS CONNECTED TO REACT' });
});
and have the proxy set up in the package.json like so:
"proxy": "http://localhost:5000/"
However, in my App.js, I try to fetch the JSON from the server, but it returns me a pending promise (I found this out with a console.log in the .then()). How could I fix this so that it properly fetches the string in the express JSON object?
class App extends Component {
constructor(){
super();
this.state = {data: "asdf"};
}
componentDidMount() {
fetch('/express_backend')
.then(res => this.setState({data: res.express}));
}
I am running both node server.js and npm start at this time.
Here's a good write-up on how fetch returns data.
The response of a fetch() request is a Stream object, which means that when we call the json() method, a Promise is returned since the reading of the stream will happen asynchronously.
So you need to call the json() method, which returns a promise, first.
fetch('/express_backend')
.then(res => {
if (!res.ok) {
throw new Error(res.statusText);
}
return res.json()
})
.then(res => this.setState({data: res.express}))
.catch(err => console.log(err));

Categories