write/add data in file using express js - javascript

I'm trying to write/add data in a json file (for example for each request, a new json is added in the json file) i am using Express.js. I am new to all of this so I really don't know what to do. I'm using a POST request, here's what i got so far. I know it's a big catastrophic mess, i scraped everything that could help me and gathered all of it. I'm just SO lost. Thanks for your help !
app.post('*/', function(req, res) {
res={
first_name: req.body.first_name,
last_name: req.body.last_name,
reponse1: req.body.reponse1,
reponse2: req.body.reponse2,
};
JSON.stringify(res);
var body = {
table: []
};
body.table.push(res);
filePath = __dirname + '/data.json';
req.on('data', function(data) {
body += data;
});
req.on('end', function (){
fs.appendFile(filePath, body, function() {
res.end();
});
});
});

In your code, I see a lot of bugs. Firstly, you should not assign res = { }. Secondly, you stringify the JSON data like below. I will also suggest you to go through some tutorials of Node.js first. You can go through https://www.tutorialspoint.com/nodejs/ or https://www.codementor.io/nodejs/tutorial.
For your requirement, you can simply use following code:
const express = require('express')
const app = express()
const bodyParser= require('body-parser')
const fs = require('fs')
app.use(bodyParser.json())
app.use(bodyParser.urlencoded({ extended: false }))
app.post('/', function(req, res){
var body = {
first_name: req.body.firstName,
last_name: req.body.lastName
}
filePath = __dirname + '/data.json'
fs.appendFile(filePath, JSON.stringify(body), function(err) {
if (err) { throw err }
res.status(200).json({
message: "File successfully written"
})
})
})
app.listen(3000,function(){
console.log("Working on port 3000")
})

Related

How come fetch only works here when I add an alert to the end of the line? Express + NodeJS + Fetch. What's a good fix here

I'm using NodeJS w/ Express to create a web app that records your audio using the VMSG library and posts the BLOB audio to my file system using HTTP Requests and multer. It also adds that instance of a recording into a MongoDB database.
I'm having an issue with the fetch command. It's not working unless I put an alert right after the fetch. The way I have it set up is that I have my main express app (index.js), and a router to the /recordingsDirectory (recordings.js) which is the endpoint for processing the posts. My main index HTML page uses Handlebars and uses a separate JS script (recorder.js) to 1) use the VMSG library and 2) fetch a POST to the /recordingsDirectory once someone submits the audio file w/ the name and the AudioBlob present. This is where I'm stuck. I can fetch in recorder.js with an alert line after the fetch, but I can't have the fetch on the end of the else if block by itself. I'd like to do it without this since the alert is ugly. A solution I've tried is that I tried to make the onsubmit function async and await fetch since I thought maybe it's waiting for a promise but that didn't work.
Here are the files. I commented CRITICAL and SUPER CRITICAL to the lines of code that you should check out and I think where the issues lie:
index.js
const express = require('express')
const handlebars = require('express-handlebars')
const path = require('path')
const XMLHttpRequest = require('xmlhttprequest').XMLHttpRequest
const xhr = new XMLHttpRequest()
const db = require('./db')
const app = express()
const PORT = process.env.PORT || 8000
app.set('view engine', 'hbs')
app.engine('hbs', handlebars({
layoutsDir: path.join(__dirname, 'views', 'layouts'),
extname: 'hbs',
defaultLayout: 'index',
partialsDir: path.join(__dirname, 'views', 'partials'),
}))
app.use(express.json())
app.use(express.urlencoded({extended: false}))
app.use((err, req, res, next) => {
if (err instanceof SyntaxError && err.status === 400 && 'body' in err) {
return res.status(400).send({ status: 404, message: err.message })
}
next()
})
app.get('/', (req, res) => {
res.render('main', {
title: 'Main Page'
})
})
app.get('/recordings', (req, res) => {
var database = db.get().db('AudioJungle')
database.collection('recordings').find().sort({ "date": -1 }).toArray(function(err, docs) {
res.render('recordings', {
title: 'Recordings',
recordings: docs
})
})
})
// CRITICAL
app.use('/recordingsDirectory', require('./recordings/recordings'))
app.use(express.static('public'))
app.use('/scripts', express.static(path.join(__dirname, 'node_modules', 'vmsg')))
db.connect(function(err) {
if (err) {
console.log('Unable to connect to Mongo.')
process.exit(1)
} else {
app.listen(PORT, () => console.log(`Listening on Port: ${PORT}`))
}
})
process.on('SIGINT', function() {
db.close(function () {
console.log('Disconnected on app termination');
process.exit(0);
});
});
app.use((req, res, next) => {
res.status(404).send({
status: 404,
error: 'Not found'
})
})
recordings.js (Aka the /recordingsDirectory endpoint for a fetch POST)
const express = require('express')
const router = express.Router()
const multer = require('multer')
const fs = require('fs-extra')
const db = require('../db')
const { ObjectId } = require('bson')
const moment = require('moment')
const upload = multer({
storage: multer.diskStorage({
destination: (req, file, callback) => {
let path = './public/uploads'
fs.mkdirsSync(path)
callback(null, path)
},
filename: (req, file, callback) => {
createRecording(req).then((id) => {
var file_name = id + '.mp3'
callback(null, file_name)
})
}
})
})
var type = upload.single('audio-file')
// CRITICAL
router.post('/', type, (req, res) => {
console.log('made it')
res.status(200)
res.send('OK')
})
router.delete('/delete', (req, res) => {
deleteRecording(req.body._id).then((dbResponse) => {
if (dbResponse == null || dbResponse == undefined) {
res.status(400).json({ msg: 'ID already deleted' })
} else {
res.status(200)
}
})
})
router.get('/', (req, res) => {
var database = db.get().db('AudioJungle')
var recordings = database.collection('recordings')
recordings.findOne({"_id": ObjectId(req.query.id)}, function(err, result) {
if (err) throw err
if (result == null || result == undefined) {
return res.status(400).json({
status: 404,
error: 'Recording no longer in the database'
})
}
res.status(200)
res.json({
name: result.name,
date: result.date
})
})
})
async function createRecording(req) {
var database = db.get().db('AudioJungle')
var recordings = database.collection('recordings')
var audioObject = {
name: req.body.name,
date: moment().format('MMMM Do YYYY, h:mm:ss a')
}
var dbResponse = await recordings.insertOne(audioObject)
return dbResponse.insertedId
}
async function deleteRecording(id) {
var database = db.get().db('AudioJungle')
var recordings = database.collection('recordings')
var audioToDelete = {
_id: ObjectId(id)
}
var deleteResult = await recordings.deleteOne(audioToDelete)
return deleteResult
}
module.exports = router
And below is the Script the audio and name and tries to Fetch (where I need the alert for it to actually process into the /recordingsdirectory)
recorder.js
import { record } from "/scripts/vmsg.js";
let recordButton = document.getElementById("record");
var blobObj = null
recordButton.onclick = function() {
record({wasmURL: "/scripts/vmsg.wasm"}).then(blob => {
blobObj = blob
var tag = document.createElement("p")
tag.id="finishedRecording"
var text = document.createTextNode("Audio File Recorded")
tag.appendChild(text)
var element = document.getElementById("box")
element.appendChild(tag)
document.getElementById('box').appendChild(a)
})
}
let form = document.getElementById('mp3Form');
form.addEventListener("submit", submitAudio)
function submitAudio() {
var fileName = form.elements[0].value
if (fileName == "") {
alert('Please enter a name for your file')
} else if (blobObj != null) {
// CRITICAL
// SUPER CRITICAL WHERE FETCH DOESN'T WORK UNLESS I PUT AN ALERT AT THE END
const formData = new FormData()
formData.append('name', fileName)
formData.append('audio-file', blobObj)
const options = {
method: 'POST',
body: formData
}
fetch('/recordingsDirectory', options);
// If I comment an alert here, /recordingsDirectory will process the post since it console.logs 'made it'
} else {
alert('Record some Audio to upload')
}
}
Here's my file system.
Also, I'd like to mention that the fetch works properly on my Windows PC without having to add the alert, but it doesn't work without the alert on my macbook. If any one figures out a fix or an error in how I'm doing things to allow this please let me know. I've been stuck on this problem for a day now. Thanks a bunch!

How to set two root endpoints in one file Restful API with node.js & EXPRESS

I have root endpoint who work when users enter the url like this:
http://localhost:8000/?date=2019-10-20&station=41027&daysForward=3
I want to create second root endpoint in the same file with different query but that did not work.
My code:
// Create express app
var express = require("express")
var app = express()
var mysql = require('mysql')
var express = require("express")
var cors = require('cors')
app.use(cors())
// Server port
var HTTP_PORT = 8000
// Start server
app.listen(HTTP_PORT, () => {
console.log("Server running on port %PORT%".replace("%PORT%",HTTP_PORT))
});
var con = mysql.createConnection({
host: "192.168.1.1",
port: "3456",
user: "user",
password: "pass"
});
var con2 = mysql.createConnection({
host: "192.168.1.1",
port: "3456",
user: "user",
password: "pass"
});
let aladinModel= '';
let aladinModelStations = '';
app.route('/')
.get(function(req, res) {
// omitted
res.setHeader('Access-Control-Allow-Origin', '*');
const date = req.query.date;
const station = req.query.station;
const daysForward = req.query.daysForward;
try {
const query = `CALL aladin_surfex.Get_mod_cell_values_meteogram('${date}', ${station}, ${daysForward})`;
con.query(query, function (err, result, fields) {
if (err) throw err;
aladinModel = result;
});
res.json({aladinModel})
} catch(error){
console.log("Error query database!!!");
}
});
app.route('/stations')
.get(function(req, res) {
// omitted
res.setHeader('Access-Control-Allow-Origin', '*');
try {
const query2 = `SELECT Station,Ime FROM stations_cells`;
con2.query2(query2, function (err, result2, fields) {
if (err) throw err;
aladinModelStations = result2;
});
res.json({aladinModelStations})
} catch(error){
console.log("Error query database!!!");
}
});
app.use(function(req, res){
res.status(404);
});
I guess this is not the right way to route pages but I hope someone can explain to me with an example how I could fix the code - so when a user enters:
http://localhost:3000/stations
the data should be loaded.
I see this error when I try to open this link.
[nodemon] starting `node server.js localhost:8000`
Server running on port 8000
Error query database!!!
This query
SELECT station, ime
FROM stations_cells
on the second root point is fine. I try to SELECT with HeidiSQL and database return the data values ?
Where is the problem for the second root point ?
That might not be the case, but here's the suggestion (which doesn't fit in comment section)
app.route('/')
.get(function(req, res) {
// omitted
});
app.route('/stations')
.get(function(req, res) {
// omitted
});

router.post can't pass data

$.post($gameNetwork._serverURL+'/addfriend',
{username:"r",tusername:"w"}).done(function (data) {
console.log("finished");
});
Account.statics.
friend = function(name,tname,cb) {
return this.findOneAndUpdate(
{ 'username': name },
{ $push: {'friendlist': tname}},
{ upsert: true, new: true},
cb);
};
route
router.post('/addfriend', function(req, res) {
//Account.findByName(req.body.username, function(err, account){
Account.friend(req.body.username,req.body.tusername, function(err, account){
if (err) {
return res.status(203).json({
err: err.msg
});}
if (!account) {
return res.status(203).json({
err: "Invalid username"
});}
var tname = req.body.tusername;
var profile = {
tname : tname,
name: account.username,
email: account.email,
id: account._id,
rank: account.rank
}; });
this code should enter "w" in to the field 'friendlist' in Mongodb, but I got null instead of w.
how can I get "w" into the field 'friendlist' in Mongodb.
Any help is appreciated
Thanks in advance
router in an ExpressJS router?
If yes did you set the bodyParser middleware?
If not, set it like this
const express = require('express')
const bodyParser = require('body-parser')
const app = express()
// parse application/json
app.use(bodyParser.json())
You can also try to stringify the body in the ajax request like described here
You can debug your data of request like this:
router.post('/addfriend', function(req, res) {
console.log(req.body);
// your logic
}
Run your code again, and you can see your data that you expect. If you see tusername = null or undefined, may be the problem in configure of your module that you use, example body-parser, busboy...vv
All comments are welcome!

GET images from GridFs to Angular

I´m storing images from my angular app in MongoDB using GridFs. But i cant figure out, how to GET the images out of the DB to the app?
I´m using a custom objectId for the query.
EDIT
It looks like the GET part now works, but then there was no media in the collection. I played a bit with the code, and now I can see fs.chunks and fs.files in the database. I think the problem is, that I try to query for metadata in the GET request. This returns no response data. Anybody got an idea how to fix this?
var fs = require('fs');
var conn = mongoose.connection;
var Grid = require ('gridfs-stream');
Grid.mongo = mongoose.mongo;
var gfs = Grid (conn.db);
var buffer = "";
app.post('/uploads/', multer({
upload: null,
onFileUploadStart: function (file, req){
this.upload = gfs.createWriteStream({
filename: file.originalname,
metadata:{"objectId" : req.body.project_id},
mode: "w",
chunkSize: 1024*4,
content_type: file.mimetype,
root: "fs",
});
},
onFileUploadData: function(file, data) {
this.upload.write(data);
},
onFileUploadComplete: function(file, res) {
done=true;
}
}), function(req, res){
res.status(200);
res.send("Success!");
});
app.route('/uploads/media/:projectId').get(function (req, res){
var readstream = gfs.createReadStream({
"metadata.objectId" : req.params.projectId
});
res.set('Content-Type', 'image/jpeg');
readstream.pipe(res);
});
You need to write the stream back out to your response. Here is another similar question. But basically you either need to pipe the stream to your response, or use the stream's end event and write the result to your response. The following code pipes to the response and sets a content-type of image/jpeg.
app.get('/uploads/:objectId', function(req, res){
var options = {
_id : req.params.objectId
};
gfs.exist(options, function(err, exists) {
if(!exists) {
res.status(404);
res.end();
} else {
var readstream = gfs.createReadStream(options);
res.set('Content-Type', 'image/jpeg');
readstream.pipe(res);
}
});
});
var pi_id = fields.pic_id;
gfs.findOne({ _id: pi_id }, function (err, file) {
console.log(file);
if (err) return res.status(400).send(err);
if (!file) return res.status(404).send('');
res.set('Content-Type', file.contentType);
res.set('Content-Disposition', 'attachment; filename="' + file.filename + '"');
var readstream = gfs.createReadStream({
_id: file._id
});
readstream.on("error", function(err) {
console.log("Got error while processing stream " + err.message);
res.end();
});
readstream.pipe(res);
console.log(readstream.pipe(res))
});

Parsing nested JSON using body-parser and express

I have an iOS app which is sending a JSON packet to a webserver. The webserver code looks like this:
var express = require('express');
var bodyParser = require('body-parser');
var mongoose = require('mongoose');
var app = express();
mongoose.connect('mongodb://localhost/test');
var db = mongoose.connection;
db.on('error', console.error.bind(console, 'connection error:'));
db.once('open', function (callback) {
console.log("MongoDB connection is open.");
});
// Mongoose Schema definition
var Schema = mongoose.Schema;
var LocationSchema = new Schema({
X: Number,
Y: Number,
Orientation: Number,
UserID: String,
Time: String
});
// Mongoose Model definition
var LocationsCollection = mongoose.model('locations', LocationSchema);
// create application/json parser
var jsonParser = bodyParser.json();
// URL management
app.get('/', function (req, res) {
res.sendFile(__dirname + '/index.html');
});
app.post('/update', jsonParser, function (req, res) {
if (!req.body) return res.sendStatus(400);
else {
console.log(req.body);
}
});
// Start the server
var server = app.listen(3000, function () {
var host = server.address().address
var port = server.address().port
console.log('App listening at %s:%s',host, port)
});
The key part is the app.post method which processes the incoming http request being sent from my iOS app. At the moment, the method which prints the req.body to the console looks like this:
{
datapoint_1:
{ timestamp: '2015-02-06T13:02:40:361Z',
x: 0.6164286615466197,
y: -0.6234909703424794,
id: 'B296DF8B-6489-420A-97B4-6F0F48052758',
orientation: 271.3345946652066 },
datapoint_2:
{ timestamp: '2015-02-06T13:02:40:961Z',
x: 0.6164286615466197,
y: -0.6234909703424794,
id: 'B296DF8B-6489-420A-97B4-6F0F48052758',
orientation: 273.6719055175781 }
}
So, you can see the request is a nested JSON object. Ideally, I'd like to loop through the request objects (ie. the datapoints) and insert those into the mongoDB database (via mongoose). However, I can't seem to figure out how to do much of anything with the req.body. I can't seem to create a loop to iterate through the request or how to properly parse the nested JSON file so it matches the mongoose schema. Can anyone provide some guidance on how to insert these datapoints into the mongoose database?
Set body-parser's extended property to true to allow parsing nested objects.
var express = require('express');
var app = express()
var bodyParser = require('body-parser');
app.use(bodyParser.urlencoded({
extended: true
}));
Answering my own question. But, after figuring out how to access the key/value pairs inside the nested JSON object... it became relatively easy to figure out the rest. The updated app.post function now looks like this:
app.post('/update', jsonParser, function (req, res) {
if (!req.body) return res.sendStatus(400);
else {
for(var datapoint in req.body){
//create new instance of LocationCollection document
var point = new LocationsCollection({
X:Number(req.body[datapoint]["x"]),
Y:Number(req.body[datapoint]["y"]),
Orientation:Number(req.body[datapoint]["orientation"]),
Time:req.body[datapoint]["timestamp"],
UserID:req.body[datapoint]["id"]
});
//insert the newly constructed document into the database
point.save(function(err, point){
if(err) return console.error(err);
else console.dir(point);
});
}
}
});
I can test if this worked by putting the following method inside the callback function once the mongodb connection is first established:
//Find all location points and print to the console.
console.log("Searching for all documents in Location Points Collection");
LocationsCollection.find(function(err,data){
if(err) console.error(err);
else console.dir(data);
});
This will print any documents that have been previously added to the database. Hopefully this helps.
Try somthing like this.
var app = express();
var bodyParser = require('body-parser');
app.use(bodyParser.json({limit:1024*1024, verify: function(req, res, buf){
try {
JSON.parse(buf);
} catch(e) {
res.send({
error: 'BROKEN_JSON'
});
}
}}));
It should be a simple for (var key in obj) loop:
app.post('/update', jsonParser, function (req, res) {
var locationObject = req.body(),
insertObjects = [],
key;
for (key in locationObject) { // loop through each object and insert them into our array of object to insert.
insertObjects.push(locationObject[key]);
}
if (!insertObjects.length) { // if we don't have any object to insert we still return a 200, we just don't insert anything.
return res.status(200).send({
success: true,
message: 'Nothing inserted, 0 locations in POST body',
count: 0;
});
}
LocationsCollection.create(insertObjects, function (err, res) {
if (err) {
return res.status(400).send({
success: false,
message: err.message
});
}
// we have successfully inserted our objects. let's tell the client.
res.status(200).send({
success: true,
message: 'successfully inserted locations',
count: insertObjects.length;
});
});
});
Mongo allows for inserting multiple documents with a single callback, which makes this a lot easier.
This also checks the schema to ensure only proper documents are created.

Categories