I want to use pg_promise with feathersjs and vuejs to display results of query run on postgres db, my sql.js looks as follows
sql.js
const express = require('express');
const jsonexport = require('jsonexport');
const dbgeo = require('dbgeo');
const router = express.Router();
function dbGeoParse(data) {
return new Promise((resolve, reject) => {
dbgeo.parse(
data,
{
outputFormat: 'geojson',
},
(err, result) => {
if (err) {
reject(err);
} else {
resolve(result);
}
},
);
});
}
// expose sql endpoint, grab query as URL parameter and send it to the database
router.get('/sql', (req, res) => {
const { app } = req;
const sql = req.query.q;
const format = req.query.format || 'topojson';
console.log(`Executing SQL: ${sql}`, format); // eslint-disable-line
// query using pg-promise
app.db
.any(sql)
.then((data) => {
console.log('data',data)
// use dbgeo to convert WKB from PostGIS into topojson
return dbGeoParse(data).then((geojson) => {
res.setHeader('Content-disposition', 'attachment; filename=query.geojson');
res.setHeader('Content-Type', 'application/json');
return geojson;
});
})
.then((data) => {
res.send(data);
})
.catch((err) => {
// send the error message if the query didn't work
const msg = err.message || err;
console.log('ERROR:', msg); // eslint-disable-line
res.send({
error: msg,
});
});
});
module.exports = router;
I have configured my app.js to use sql.js as a route by adding following lines to it
const pgp = require('pg-promise')({
query(e) {
console.log(e.query); // eslint-disable-line
},
});
const connectionString = 'postgres://abc:abc123#localhost:5678/gisdb';
// initialize database connection
const app = express(feathers());
app.db = pgp(connectionString);
app.use('/sql', require('sql'));
but when I call handleSubmit fuction shown below from my vue component
handleSubmit() {
const SQL = "select * from spatial_search(searchmethod := 'id',sale_price :=10000,tax_ogcid :=
84678,distance := 0.5)";
const queryType = 'sql';
fetch(`/${queryType}?q=${encodeURIComponent(SQL)}`)
.then(res => {console.log('res',res.json())})}
I get the following error
Uncaught (in promise) SyntaxError: Unexpected token < in JSON at position 0
I think I might be doing something wrong here. Can someone help me?
Related
I'm trying to call my global variables in my controller but i got an error variable is not defined. Please see the code below for your reference. Hoping to solve my problem. Thank you Guys
**server.js **
const serverConfig = require('./config/server.config')
const app = require('fastify')({ logger: true })
require('./models/response.model')
const mongoose = require('mongoose')
const conn = require('./config/monggo.config')
require('./routes/dbm.routes')(app)
const connect = async () => {
try {
await mongoose.connect(conn.uri)
console.log('Connected to Mongoose!')
} catch (error) {
console.log(error)
}
}
connect();
app.listen(serverConfig.port, '::', (err, address) => {
if (err) {
app.log.error(err)
process.exit(1)
}
console.log('Listening at', address)
})
response.model.js
module.exports = function () {
global.successModel = {
status: 'sucess',
statusCode: 0,
isSuccess: true,
message: ''
}
global.failModel = {
status: 'failed',
statusCode: 1,
isSuccess: false,
message: 'Error encountered while processing request.'
}
}
**monggo.controller.js
**
exports.getProducts = async (req, res) => {
//find Products in the databse
Product.find({}, (err, product) => {
//send error message if not found
if (err) {
res.send(err);
}
//else pass the Products
res.send(successModel);
})
await res;
}
Hoping to solve my problem. Thank you
The './models/response.model' file exports a function that you need to call to "install" your globals.
- require('./models/response.model')
+ require('./models/response.model')()
As a suggestion, you should avoid to use globals, in fastify you can use:
decorator to add to your app instance useful data such as configs
Moreover, your mongoose connection is unrelated with Fastify, you can encapsulate it into a plugin to be sure that fastify is starting after connecting to the database:
const fp = require('fastify-plugin')
app.register(fp(async function connect (instance, opts) {
await mongoose.connect(conn.uri)
}))
In an Express JS connected to a mySQL db, I am trying to get some data of an already defined route/ query:
// customers.model.js
CUSTOMERS.getAll = (result) => {
let query = "SELECT * FROM customers"
sql.query(query, (err, res) => {
if (err) {
console.log("error: ", err)
result(null, err)
return
}
result(null, res)
})
}
// customers.controller.js
// GET customers is a standalone route and should output all the customers when called.
const CUSTOMERS = require("../models/customers.model.js")
exports.findAll = (req, res) => {
return CUSTOMERS.getAll((err, data) => {
if (err)
res.status(500).send({
message: err.message ||
"Some error occurred while retrieving customers...",
})
else res.send(data)
})
}
In payments.controller.js I would firstly like to get all users so I can do something with the data:
// payments.controller.js
// GET payments is also a standalone route and should get the customers,
// do something with the data and output a calculation with the help of this data
const CUSTOMERS = require("../models/customers.model.js")
exports.calculateAll = (req, res) => {
const customers = CUSTOMERS.getAll((err, data) => {
console.log('this always has correct data', data)
if (err) return err
else return data
})
console.log('this is always undefined', customers)
...
res.send(whatEverCalculatedData)...
}
But that data here is always undefined.
What am I doing wrong in the above, and what's the correct way to call this route inside another route?
I know it has similarities with this question but I couldn't sort it out for my particular example.
It's due to your call which is asynchronous.
You must wait your data being ready before rendering the results.
Maybe you could to use Promises or async/await statements.
For example:
CUSTOMERS.getAll = async () => {
const query = "SELECT * FROM customers";
try {
return await sql.query(query);
} catch (e) {
console.log(`An error occurred while fetching customers: ${e.message}.`);
return null;
}
}
exports.calculateAll = async (req, res) => {
try {
const data = await CUSTOMERS.getAll();
res.send(whatEverCalculatedData);
} catch (e) {
res.send(`Something went wront: ${e.message}.`);
}
}
I've been reading about async and await in JS and tried to implement them in my code (which I totally messed up).
Here is my JS.
var express = require('express');
var router = express.Router();
var jsforce = require('jsforce');
const SEC_TOKEN = 'SEC_TOKEN';
const USER_ID = 'USER_ID';
const PASSWORD = 'PWD';
const { default: axios } = require('axios');
router.get("/", async (req, res, next) => {
await initConnect;
await soqlData;
await slackPostTest;
});
initConnect = async () => {
var conn = new jsforce.Connection({
loginUrl: 'https://login.salesforce.com'
});
await conn.login(USER_ID, PASSWORD + SEC_TOKEN, (err, userInfo) => {
if (err)
console.log(err);
else {
console.log(userInfo.Id);
}
});
}
soqlData = async () => {
await conn.query('Select Id, Name from Account LIMIT 1', (err, data) => {
if (err)
console.log(err);
else
return data.records[0];
})
}
slackPostTest = async () => {
await axios.post('SLACK_WEBHOOK', {
"text": "soqlRes"
})
}
module.exports = router;
What I am trying to achieve?
Initialize my connection by passing in SEC_TOKEN, USER_ID, PASSWORD to my initConnect function this will give me a connection (conn).
Use this conn and query my salesforce instance and get some other data.
post some message(currently irrelevant, but will hook up with the above response later) to my slack endpoint.
Also can someone please give me a little detailed explanation of the solution (in terms of async/await)?
Thanks
Assuming everything else about your JsForce API usage was correct (I have no experience with it, so I can't say), here's how to promisify those callback-based APIs and call them.
var express = require("express");
var router = express.Router();
var jsforce = require("jsforce");
const SEC_TOKEN = "SEC_TOKEN";
const USER_ID = "USER_ID";
const PASSWORD = "PWD";
const { default: axios } = require("axios");
router.get("/", async (req, res, next) => {
const { conn, userInfo } = await initConnect();
const data = await soqlData(
conn,
"Select Id, Name from Account LIMIT 1",
);
await slackPostTest(data.records[0]);
});
function initConnect() {
const conn = new jsforce.Connection({
loginUrl: "https://login.salesforce.com",
});
return new Promise((resolve, reject) => {
conn.login(USER_ID, PASSWORD + SEC_TOKEN, (err, userInfo) => {
if (err) return reject(err);
resolve({ conn, userInfo });
});
});
}
function soqlData(conn, query) {
return new Promise((resolve, reject) => {
conn.query(query, (err, data) => {
if (err) return reject(err);
resolve(data);
});
});
}
function slackPostTest(soqlRes) {
return axios.post("SLACK_WEBHOOK", {
text: soqlRes,
});
}
module.exports = router;
I previously had a single file upload set up and working properly. Now I need to make it handle multiple files.
Here is my code right now:
const multer = require('multer')
const { Storage } = require('#google-cloud/storage')
const storage = new Storage()
const m = multer({ storage: multer.memoryStorage() })
module.exports = app => {
app.use('/', router)
router.post(
'/reader-:shortId/file-upload',
passport.authenticate('jwt', { session: false }),
m.array('files'),
async function (req, res) {
const bucketName = req.params.shortId.toLowerCase()
await storage.createBucket(bucketName)
bucket = storage.bucket(bucketName)
let promises = []
req.files.forEach((file) => {
const blob = bucket.file(file.originalname)
const newPromise = new Promise((resolve, reject) => {
blob.createWriteStream({
metadata: { contentType: file.mimetype }
}).on('finish', async response => {
await blob.makePublic()
resolve(response)
}).on('error', err => {
reject('upload error: ', err)
}).end()
})
promises.push(newPromise)
})
Promise.all(promises).then((response) => {
// the response I get here is [undefined, undefined]
res.status(200).send(response)
}).catch((err) => {
res.status(400).send(err.message)
});
})
}
req.files does give me an array of files, with a buffer and a size that makes sense.
The promises all resolve.
But once I check the files in the google bucket, they have the right name but don't have any content (and size of 0)
As I said before, it was working when I was doing it with one file (using m.single('file')
I don't want to use the bucket as the destination with multer setup because I also have to change the file name before uploading to google bucket.
edit: this is the code example given by google cloud documentations for single file uploads (https://cloud.google.com/nodejs/getting-started/using-cloud-storage):
function sendUploadToGCS (req, res, next) {
if (!req.file) {
return next();
}
const gcsname = Date.now() + req.file.originalname;
const file = bucket.file(gcsname);
const stream = file.createWriteStream({
metadata: {
contentType: req.file.mimetype
},
resumable: false
});
stream.on('error', (err) => {
req.file.cloudStorageError = err;
next(err);
});
stream.on('finish', () => {
req.file.cloudStorageObject = gcsname;
file.makePublic().then(() => {
req.file.cloudStoragePublicUrl = getPublicUrl(gcsname);
next();
});
});
stream.end(req.file.buffer);
}
I originally had something like that working, but I just don't understand where it is getting the file buffer data from. That is probably where things are different with multiple files.
I know its too late, but someone might looking an answer for uploading multiple files on Google Cloud Storage.
Dependencies:
Express
Google Cloud Library
Multer
Body Parser
This is the controller code.
exports.post_image_upload = async (req, res) => {
/** Check if file exist */
if (!req.files) {
res.status(400).send('No file uploaded.');
return;
}
let PublicUrls = []
req.files.forEach((file) => {
const blob = bucket.file(file.fieldname + '-' + Date.now() + path.extname(file.originalname))
const blobStream = blob.createWriteStream({
metadata: { contentType: file.mimetype }
})
blobStream.on('finish', ()=> {
blob.makePublic()
})
blobStream.on('error', err => {
//Put your error message here
})
blobStream.end(file.buffer)
const Url = `https://storage.googleapis.com/${bucket.name}/${blob.name}`
PublicUrls.push(Url)
})
res.send(PublicUrls)
}
Good Luck
Ok, turns out I had to change
.end()
to
.end(file.buffer)
Marie Pelletier, I think your approach is 100% right. I modified a little your code trying to avoid the async response:
let promises = []
req.files.forEach((file) => {
const blob = bucket.file(file.originalname)
const newPromise = new Promise((resolve, reject) => {
blob.createWriteStream({
metadata: { contentType: file.mimetype },
resumable: false //Good for small files
}).on('finish', () => {
const Url = `https://storage.googleapis.com/${bucket.name}/${blob.name}`;
resolve({ name: file.originalname, url: Url });
}).on('error', err => {
reject('upload error: ', err);
}).end(file.buffer);
})
promises.push(newPromise);
})
Promise.all(promises).then((response) => {
res.status(200).send(response)
}).catch((err) => {
res.status(400).send(err.message)
});
This way, I didn't get 'undefined' anymore.
I'm trying to send some data from a React form to my Express back end. To do this I'm using fetch where I'm trying to send some variable data from react. I'm console logging the data before running the fetch to see if it is there, console log can see the data.
My error states
[0] (node:2966) UnhandledPromiseRejectionWarning: TypeError: Cannot read property 'message' of undefined
So it seems like my Express back end can't see the variable data.
How I'm sending the data from react
handleSubmit = async e => {
e.preventDefault();
console.log("Submit was pressed!");
if (this.state.email === "") {
}
const { name } = this.state;
const query = this.state.query;
const subject = "kontakt fra nettside";
const message = { name, query };
console.log(message.name, message.text, "data is");
fetch(
"http://localhost:5000/api/email", variabler
{
method: "POST",
cache: "no-cache",
headers: {
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Credentials": true,
content_type: "application/json"
},
body: JSON.stringify(message, subject)
}
); //.then(response => response.json());
};
My file for retrieving the data from the front end in Express
const emailConfig = require("./emailConfig")();
const mailgun = require("mailgun-js")(emailConfig);
exports.sendEmail = (recipient, message, attachment) =>
new Promise((resolve, reject) => {
const data = {
from: "Test <test#test.no>", // Real email removed from this post
to: recipient,
subject: message.subject,
text: message.query,
inline: attachment,
html: message.html
};
mailgun.messages().send(data, error => {
if (error) {
return reject(error);
}
return resolve();
});
});
and sendMail.js
const express = require("express");
const sendMail = express.Router();
const emailUtil = require("./emailUtil");
const { sendEmail } = emailUtil;
sendMail.post("/", async (req, res, next) => {
// const { recipient, message } = req.body;
console.log("Request mottatt");
const recipient = "test#test.no";
const message = req.body.message;
try {
await sendEmail(recipient, message);
res.json({ message: "Your query has been sent" });
console.log("Message has been sent");
await next();
} catch (e) {
await next(e);
console.log("nah", e);
}
});
module.exports = sendMail;
I can't figure out where the error is, any ideas? :)