I have a mongodb server setup which on running the below command starts on port 3000
npm run start
I also a graphql server which on running the below command starts at port 4000
npm run start-graphql
the scripts of my package.json is as below
"scripts": {
"start": "nodemon server.js",
"start-graphql": "nodemon graphqlserver.js",
"test": "echo \"Error: no test specified\" && exit 1"
},
server.js
require('dotenv').config();
const express = require('express');
const app = express();
const mongoose = require('mongoose');
mongoose.connect(process.env.DATABASE_URL);
const db = mongoose.connection;
db.on('error', (err) => console.log(err));
db.once('open', () => {
console.log("Backend Database connected");
});
app.use(express.json({ limit: '2mb'}));
const photosRouter = require('./routes/photos');
app.use('/images', photosRouter)
app.listen(3000, () => {
console.log('Server started at port 3000');
})
graphqlserver.js
const express = require('express');
const path = require('path');
const express_graphql = require('express-graphql').graphqlHTTP;
const { loadSchemaSync } = require('#graphql-tools/load');
const { GraphQLFileLoader } = require('#graphql-tools/graphql-file-loader');
const { addResolversToSchema } = require('#graphql-tools/schema');
const getResolvers = require('./graphql/resolvers');
// GraphQL schema
const combinedSchema = loadSchemaSync(
path.join(__dirname, './graphql/schemas/*.graphql'),
{
loaders: [new GraphQLFileLoader()],
}
);
const schema = addResolversToSchema({
schema: combinedSchema,
resolvers: Object.assign({}, getResolvers())
});
// Create an express server and a GraphQL endpoint
const app = express();
app.use('/graphql', express_graphql({
schema: schema,
graphiql: true
}));
app.listen(4000, () => console.log('Express GraphQL Server Now Running On localhost:4000/graphql'));
when I call the rest api's normally either through postman or curl it returns the response as expected.
For eg: http://localhost:3000/images returns me an array of objects
But When I want to call (using axios) the same via the graphql server (which is running on port 4000 ),
I get response as null.
I have no clue why this is happening.
Please check the below screenshot for your reference
Note: For better clarity please check the codebase link
https://github.com/yaswankar/G-photos-backend
Any help would be appreciated.
Thanks in advance
Request: Please help by giving an upvote so that it could better reach to those who could help.
Edit:
New Error screenshot
I was able to resolve the main issue by adding the query block inside photos resolver
Query: {
photo: photosContext,
},
The other error was resolved by processing the response instead of sending the raw data to the hyper class
async function getActivePhotos(parent, args, req) {
try {
const activePhotos = await photoService.getActivePhotos(req).then(resp => resp.data.map(item => item)); // Process and mapping data
return activePhotos;
} catch (error) {
// logger.error(__filename + ': Failed to get response for getActivePhotos, err=' + JSON.stringify(error));
return new GraphQLError(JSON.stringify(error));
}
}
Related
I've been following a tutorial on how to connect to a mongoDB collection. The tutorial works fine but I'm trying the same code in a project I'm working on and have been getting the following error constantly:
./node_modules/raw-body/index.js
Module not found: Can't resolve 'async_hooks' in '*:\*\*\Desktop\Projects\testing-area\node_modules\raw-body'
I've tried:
-deleting node_modules and then running npm install
-running npm update to bring all dependencies to the latest version
-updating npm itself
I've read that async_hooks is used for backend work and if you try to use it in the frontend, it can cause this issue. Problem is, I don't really know a way around it.
Here's the code I'm trying to use to connect to the mongodb collection:
//give functions of mongo db to MongoClient
const { MongoClient } = require('mongodb')
let dbConnection
const bark = (input) => {
console.log(input)
}
module.exports = {
connectToDb: (cb) => {
MongoClient.connect("mongodb://localhost:27017/Treasures")
.then((client) => {
dbConnection = client.db()
return cb()
})
.catch(err => {
bark("----------")
bark("Pants shat when trying to connect to DB:\n")
bark(err)
return cb(err)
bark("----------")
})
},
getDb: () => dbConnection
}
And then in a component I have this, to try and get the data from the collection:
// Imports/Requires for mongoDb connection
const { ObjectID } = require("bson");
const express = require("express");
const { connectToDb, getDb } = require("../../db")
// COMPONENT STARTS HERE:
export const TreasureGen = () => {
//init app and middleware
const app = express();
//db connection
let db
connectToDb((err) => {
if(!err)
{
app.listen(3000, () => {
bark("App listening on port 3000")
})
db = getDb()
}
})
I've created an API which I want to create a file, and after the file was written, request a log API and after its response, response relatively to the user.
I've simplified the code like this:
const express = require('express');
const router = express.Router();
const fetch = require("node-fetch");
const util = require('util');
const fs = require("fs-extra")
router.get('/works/', (req, res) => {
logData(res)
})
router.get('/fails/', (req, res) => {
let t = Date.now();
const writeFile = util.promisify(fs.writeFile)
writeFile(`./data/${t}.json`, 'test').then(function(){
logData(res)
})
})
function logData(res) {
return new Promise(resolve => {
fetch('https://webhook.site/44dad1a5-47f6-467b-9088-346e7222d7be')
.then(response => response.text())
.then(x => res.send('x'));
});
}
module.exports = router
The /works/ API works fine,
but the /fails/ API fails with Error: read ECONNRESET
OP clarified in the comments that he uses nodemon to run this code.
The problem is that nodemon watches .json files too and restarts the server. So the request that changes a JSON file fails with Error: read ECONNRESET.
To prevent nodemon from restarting server when you change .json files see this.
For example, you can add nodemon.json configuration file to ignore ./data directory (make sure to restart nodemon after this file is added):
{
"ignore": ["./data"]
}
I am trying to deploy reactjs & nodejs app to heroku.
I have successfully deployed frontend,but frontend is sending data to nodejs using localhost due to which when running app through heroku only frontend is working.
This code send data to nodejs.
saveUserJson = (User) =>{
const url = 'http://localhost:5000/write'
axios.post(url,User)
.then(response => {
//console.log(response);
});
}
This is nodejs code(ignore hostname in code).
const express = require('express');
const bodyParser = require('body-parser');
const fs = require('fs');
const morgan = require('morgan');
const cors = require('cors');
const jsonData = require('../src/descriptors/bnk48.json')
const app = express();
const port = 5000;
const hostname = '192.168.43.113';
app.use(bodyParser.json());
app.use(morgan('dev'));
app.use(cors());
app.get('/',(req,res) => res.status(200).send({
message: "Server is running..."
}));
const WriteTextToFileSync = (contentToWrite) => {
fs.writeFileSync('./src/descriptors/bnk48.json',contentToWrite,(err) =>{
//console.log(contentToWrite);
if(err) {
console.log(err);
}else {
console.log('Done writing to file successfully...')
}
})
}
const user = {
}
app.post('/write',(req,res,next) =>{
const user = {
"name": req.body[0].name,
"descriptors": req.body[0].descriptors
}
jsonData[user.name]=user
//console.log(req.body[0].descriptors)
const requestContent = JSON.stringify(jsonData,null,2);
WriteTextToFileSync(requestContent)
});
app.use((req,res,next) => res.status(404).send({
message: "Couldn't find specified route that was requested..."
}));
app.listen(port,hostname,()=>{
console.log(
`
!!! server is running..
!!! Listening for incoming requests on port ${port}
!!! Server running at http://${hostname}:${port}/
!!! http://localhost:5000
`
)
})
How can i change localhost so that while deploying it automatically chooses where to send data?
How can i change localhost so that while deploying it automatically chooses where to send data?
There are several ways to do this, but it's quite common to use environment variables for this purpose. These are set by environment, your development machine being one environment and your production site on Heroku being another environment. You could for example define the environment variable BACKEND_ROOT_URL to hold the schema and FQDN of your site, and make your axios call like this:
saveUserJson = (User) =>{
const url = `${process.env.BACKEND_ROOT_URL}/write`
axios.post(url,User)
.then(response => {
//console.log(response);
});
}
The build-time value of url will be different, depending on which environment you perform the build in.
Setting environment variables locally can be done in several ways. In a Bash shell you can set them manually like export BACKEND_ROOT_URL=http://localhost:5000. That get's boring quite fast though, so I would recommend you to check out dotenv which handles this for you efficiently.
Heroku has its own way of handling the setting of envvars - check the documentation here
I'm trying to connect from Nodejs to DialogFlow. I have completed all the steps to configure the user agent, the intent, etc. If I lunch with NODEMON the app, all its ok, but when I send a GET or POST request I get this error:
"UnhandledPromiseRejectionWarning: TypeError: sessionClient.projectAgentSessionPath" and more. But I think the most relevant mistake is this.
The code I used it's the same as the APi docs. I don't know why I get this error.
const express = require('express');
const bodyParser = require('body-parser');
const app = express();
const dialogflow = require('#google-cloud/dialogflow');
const uuid = require('uuid');
//const sendReq = require('./reqDialogFlow');
async function runSample(projectId = 'helpcenter-qwoj') {
// A unique identifier for the given session
const sessionId = uuid.v4();
// Create a new session
const sessionClient = new dialogflow.SessionsClient();
const sessionPath = sessionClient.projectAgentSessionPath(projectId, sessionId);
console.log(sessionPath);
// The text query request.
const request = {
session: sessionPath,
queryInput: {
text: {
// The query to send to the dialogflow agent
text: 'hello',
// The language used by the client (en-US)
languageCode: 'it',
},
},
};
// Send request and log result
const responses = await sessionClient.detectIntent(request);
console.log('Detected intent');
const result = responses[0].queryResult;
console.log(` Query: ${result.queryText}`);
console.log(` Response: ${result.fulfillmentText}`);
if (result.intent) {
console.log(` Intent: ${result.intent.displayName}`);
} else {
console.log(` No intent matched.`);
}
};
app.get('/', (req, res) => {
res.send({ "hello": "Daniele Asteggiante" })
});
app.post('/api/textAPIE', (req, res) => {
res.send({ "text": "CIAO" });
runSample();
});
app.use(bodyParser.json());
const PORT = process.env.PORT || 5000;
app.listen(PORT);
i had the same error.
i had installed
npm i dialogflow
instead of
npm install #google-cloud/dialogflow
I tried to change the Express Version version with an earlier version 4.17.0 instead 4.17.1.
Now it goes.
change "sessionClient.projectAgentSessionPath" -> "sessionClient.sessionPath"
Found this solution on github:https://github.com/googleapis/nodejs-dialogflow/issues/127
I'm trying to set up testing for various routes in my Express server that require connectivity to my MongoDB database.
I'm not sure how to structure the Jest file in order to allow for testing. In my normal index.js file, I'm importing the app, and running app.listen within the connect .then call, like this:
const connect = require("../dbs/mongodb/connect");
connect()
.then(_ => {
app.listen(process.env.PORT, _ => logger.info('this is running')
})
.catch(_ => logger.error('The app could not connect.');
I've tried running the same setup in my test.js files, but it's not working.
For example:
const connect = require("../dbs/mongodb/connect");
const request = require("supertest");
const runTests = () => {
describe("Test the home page", () => {
test("It should give a 200 response.", async () => {
let res = await request(app).get("/");
expect(res.statusCode).toBe(200);
});
});
};
connect()
.then(_ => app.listen(process.env.PORT))
.then(runTests)
.catch(err => {
console.error(`Could not connect to mongodb`, err);
});
How is it possible to wait for a connection to MongoDB before running my tests?
So, turns out there were a few changes that I had to make. Firstly, I had to load in my .env file before running the tests. I did this by creating a jest.config.js file in the root of my project:
module.exports = {
verbose: true,
setupFiles: ["dotenv/config"]
};
Then within the actual testing suite, I'm running beforeEach to connect to the MongoDB server.
const connect = require("../dbs/mongodb/connect");
const app = require("../app");
const request = require("supertest");
beforeEach(async() => {
await connect();
});
describe("This is the test", () => {
test("This should work", async done => {
let res = await request(app).get("/home");
expect(res.statusCode).toBe(200);
done();
})
});