Missing data when recording model into JSON file - javascript

I'm beginner and trying to save the model todoModel into data.json, but some elements (title, description) are not saved.
app.js
const http = require("http");
const todoRouter = require("./routes/todo.router");
const server = http.createServer(todoRouter);
const PORT = process.env.PORT || 3000;
server.listen(PORT, () =>
console.log(`Server listening on http://localhost:${PORT}`)
);
todo.router.js
const url = require("url");
const todoController = require("../controllers/todo.controller");
const todoRouter = (req, res) => {
const urlparse = url.parse(req.url, true);
if (urlparse.pathname == "/todos" && req.method == "POST") {
todoController.createTodo(req, res);
}
};
module.exports = todoRouter;
todo.controller.js
const fs = require("fs");
class todoController {
async createTodo(req, res) {
req.on("data", (data) => {
if (data) {
todos.push(todoModel);
fs.writeFile(
"./data/data.json",
JSON.stringify(todos, null, 2),
(err) => {
if (err) throw error;
}
);
}
});
}}
todo.model.js
const { v4: uuidv4 } = require("uuid");
const fs = require("fs");
const data = fs.readFileSync("./data/data.json");
const jsondata = JSON.parse(data);
const title = jsondata.title;
const description = jsondata.description;
const todoModel ={
id: uuidv4(),
title,
description,
dateOfCreate: new Date(),
lastModified: new Date(),
check: new Boolean(false),
};
module.exports = todoModel;
Saved model todoModel in data.json looks like that:
[
{
"id": "cb996b22-d9d8-49ee-8e35-6f8bfc005268",
"dateOfCreate": "2021-11-06T14:53:28.608Z",
"lastModified": "2021-11-06T14:53:28.608Z",
"check": false
}
]

Well i recommend to just export functions and constants and not variables or object witch can change. However i would rather use an function here
function getTodoModel() {
const data = fs.readFileSync("./data/data.json");
const jsondata = JSON.parse(data);
const title = jsondata.title;
const description = jsondata.description;
return {
id: uuidv4(),
title,
description,
dateOfCreate: new Date(),
lastModified: new Date(),
check: new Boolean(false),
}
}
module.exports = getTodoModel
Later you simply import your function and call it:
let todoModel = getTodoModel()
The variables will get evaluated

Your values title and description don't have assigned a key in your todoModel. Just assign these values to proper keys in your object.
const todoModel ={
id: uuidv4(),
title: title, // add key for title
description: description, // and here add key for description
dateOfCreate: new Date(),
lastModified: new Date(),
check: new Boolean(false),
};
But the problem doesn't stop just here. Your problem is that you read the JSON file, load its content and then write the content back without any modifications. In the file todo.controller.js, you load the todoModel, which loads the content of the file. Then you add this content to todos array and write it right back to the file. You don't do any modification to todoModel, so what it does is pretty much nothing.

Related

How to make a model in another file

I'm beginner and tried to transfer the model to another file, it didn't work for me, suggest me how to do it correctly. The question may seem silly, but if I knew the answer, I would not ask it.
file todo.controller.js
const fs = require("fs");
const { v4: uuidv4 } = require("uuid");
const data = fs.readFileSync("./data/data.json");
let todos = JSON.parse(data);
class todoController {
async createTodo(req, res) {
req.on("data", (data) => {
const jsondata = JSON.parse(data);
const title = jsondata.title;
const description = jsondata.description;
if ((title, description)) {
todos.push({
id: uuidv4(),
title,
description,
dateOfCreate: new Date(),
lastModified: new Date(),
check: new Boolean(false),
});
fs.writeFile(
"./data/data.json",
JSON.stringify(todos, null, 2),
(err) => {
if (err) throw error;
}
);
}
});
}}
file todo.router.js
const url = require("url");
const todoController = require("../controllers/todo.controller");
const todoRouter = (req, res) => {
const urlparse = url.parse(req.url, true);
if (urlparse.pathname == "/todos" && req.method == "POST") {
todoController.createTodo(req, res);
}
};
module.exports = todoRouter;
here is file data.json
data.json
You have two separate problems here, separating your code to a different file and also saving or persisting that data somewhere, in this case a file.
You have to create something like a data model and then you have to import it in your other code.
// data.js
export const get = async () => {} // we will implement this just now
export const set = async (data) => {} // we will implement this just now
...
// controller.js
import {get, set} from './data.js' // import the methods we just created
...
const createTodo = async (req, res) => {
req.on("data", (data) => {
// here you can use get() if you want to use the data
set(JSON.stringify(data)) // send data to your data model
}
}
Then we also have to actually do something with those methods.
// data.js
export const get = async () => {
// may need to use JSON.parse here depending on how you'll use it
return fs.readFile('./data.json')
}
export const set = async (data) => {
fs.writeFile('data.json', JSON.stringify(data))
}
So the idea is to have a model responsible for managing the data, retrieving it and saving it, then importing and using those methods in the main controller. The code above isn't perfect, it's just to show you how to think about it.

string to bufferstream not always writing data

I have a cloud function receiving a json string in a pubsub topic.
The goal is to extracts some data into a new json string.
Next parse it as JSONL.
And finally stream it to Google Cloud Storage.
I notice that sometimes the files seem to contain data and sometimes they do not.
The pubsub is working fine and data is coming into this cloud function just fine.
I tried adding some async awaits where I seem it might fit but I am afraid it has do to with the bufferstream. Both topics on where I have trouble getting my head around.
What could be the issue?
const stream = require('stream');
const { Storage } = require('#google-cloud/storage');
// Initiate the source
const bufferStream = new stream.PassThrough();
// Creates a client
const storage = new Storage();
// save stream to bucket
const toBucket = (message, filename) => {
// Write your buffer
bufferStream.end(Buffer.from(message));
const myBucket = storage.bucket(process.env.BUCKET);
const file = myBucket.file(filename);
// Pipe the 'bufferStream' into a 'file.createWriteStream' method.
bufferStream.pipe(file.createWriteStream({
validation: 'md5',
}))
.on('error', (err) => { console.error(err); })
.on('finish', () => {
// The file upload is complete.
console.log(`${filename} is uploaded`);
});
};
// extract correct fields
const extract = (entry) => ({
id: entry.id,
status: entry.status,
date_created: entry.date_created,
discount_total: entry.discount_total,
discount_tax: entry.discount_tax,
shipping_total: entry.shipping_total,
shipping_tax: entry.shipping_tax,
total: entry.total,
total_tax: entry.total_tax,
customer_id: entry.customer_id,
payment_method: entry.payment_method,
payment_method_title: entry.payment_method_title,
transaction_id: entry.transaction_id,
date_completed: entry.date_completed,
billing_city: entry.billing.city,
billing_state: entry.billing.state,
billing_postcode: entry.billing.postcode,
coupon_lines_id: entry.coupon_lines.id,
coupon_lines_code: entry.coupon_lines.code,
coupon_lines_discount: entry.coupon_lines.discount,
coupon_lines_discount_tax: entry.coupon_lines.discount_tax,
});
// format json to jsonl
const format = async (message) => {
let jsonl;
try {
// extract only the necessary
const jsonMessage = await JSON.parse(message);
const rows = await jsonMessage.map((row) => {
const extractedRow = extract(row);
return `${JSON.stringify(extractedRow)}\n`;
});
// join all lines as one string with no join symbol
jsonl = rows.join('');
console.log(jsonl);
} catch (e) {
console.error('jsonl conversion failed');
}
return jsonl;
};
exports.jsonToBq = async (event, context) => {
const message = Buffer.from(event.data, 'base64').toString();
const { filename } = event.attributes;
console.log(filename);
const jsonl = await format(message, filename);
toBucket(jsonl, filename);
};
it's fixed by moving the bufferstream const into the tobucket function.

Pass PDF as Blob from Azure to Node to React

I am attempting to grab a PDF stored in Azure Blob Storage via a node backend and then serve that PDF file to a React Frontend. I am using Microsofts #azure/storage-blob with a BlockBlobClient but every example I find online converts the readableStreamBody to a string. The blob has a content type of application/pdf. Ive tried passing the readableStreamBody and the pure output to the frontend but those result in broken pdf's. I also followed the documentation online and made it a string and passed that to the frontend. That produced a PDF that would open and had the proper amount of pages but was completly blank.
Node.js Code on the Backend
app.get('/api/file/:company/:file', (req, res) => {
const containerClient = blobServiceClient.getContainerClient(req.params.company);
const blockBlobClient = containerClient.getBlockBlobClient(req.params.file);
blockBlobClient.download(0)
.then(blob => streamToString(blob.readableStreamBody))
.then(response => res.send(response))
});
FrontEnd Code
getFileBlob = (company,file) => {
axios(`/api/file/${company}/${file}`, { method: 'GET', responseType: 'blob'})
.then(response => {
const file = new Blob(
[response.data],
{type: 'application/pdf'});
const fileURL = URL.createObjectURL(file);
window.open(fileURL);
})
.catch(error => {
console.log(error);
});
}
This might help you, its working for me.
Node
var express = require('express');
const { BlobServiceClient } = require('#azure/storage-blob');
var router = express.Router();
const AZURE_STORAGE_CONNECTION_STRING =
'YOUR_STRING';
async function connectAzure() {
// Create the BlobServiceClient object which will be used to create a container client
const blobServiceClient = BlobServiceClient.fromConnectionString(
AZURE_STORAGE_CONNECTION_STRING
);
const containerName = 'filestorage';
const blobName = 'sample.pdf';
console.log('\nConnecting container...');
console.log('\t', containerName);
// Get a reference to a container
const containerClient = blobServiceClient.getContainerClient(containerName);
// Get a block blob client
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
for await (const blob of containerClient.listBlobsFlat()) {
console.log('\t', blob.name);
}
const downloadBlockBlobResponse = await blockBlobClient.download(0);
const data = await streamToString(downloadBlockBlobResponse.readableStreamBody)
return data;
}
async function streamToString(readableStream) {
return new Promise((resolve, reject) => {
const chunks = [];
readableStream.on('data', data => {
chunks.push(data.toString());
});
readableStream.on('end', () => {
resolve(chunks.join(''));
});
readableStream.on('error', reject);
});
}
router.get('/', async function(req, res, next) {
const data = await connectAzure();
res.send({data}).status(200);
});
module.exports = router;
Front-end
function createFile() {
fetch('/createfile').then(res => {
res.json().then(data => {
var blob = new Blob([data.data], { type: 'application/pdf' });
var fileURL = URL.createObjectURL(blob);
if (filename) {
if (typeof a.download === 'undefined') {
window.location.href = fileURL;
} else {
window.open(fileURL, '_blank');
}
}
})
}).catch(err => console.log(err))
}
HTML
<body><h1>Express</h1><p>Welcome to Express</p><button onclick="createFile()">Create File</button></body>

Node js: fs WriteFile writing data to file twice

The fs.writeFile code in the backend is running twice.
The data appending appears in console twice and data is written twice in JSON file.
Any idea why this happens?
Any advice is greatly appreciated.
EDIT: this seems like a front-end problem. onFavSubmit is running twice...
Front-end
constructor (props) {
super (props)
this.state = {
inputOne: '',
chosenOne: ['Favourite Movie', 'X'],
chosenTwo: ['2nd Favourite Movie', 'X'],
chosenThree: ['3rd Favourite Movie', 'X'],
movies:[],
};
this.onFavSubmit = this.onFavSubmit.bind(this);
this.onReset = this.onReset.bind(this);
}
onFavSubmit = event => {
const newFav = {
first: this.state.chosenOne[0],
second: this.state.chosenTwo[0],
third: this.state.chosenThree[0]
}
if(this.state.chosenOne[1] === 'X' || this.state.chosenTwo[1] === 'X' || this.state.chosenThree[1] === 'X'){
alert ('Need All 3 Favourite Shows')
event.preventDefault();
} else {
axios.post('http://localhost:8001/fav', {newFav})
.then(
alert('Successfully Added'),
this.onReset()
)
.catch(err => console.log(err.response))
}
}
<button className="fav__button" type="button" onClick={this.onFavSubmit}>Click Me</button>
Back-end
const express = require("express");
const favData = require("../data/fav.json")
const fs = require ('fs')
const router = express.Router();
router.get("/", (_, res) => {
res.json(favData);
});
router.post("/", (req, res) => {
const newFirst = req.body.newFav.first;
const newSecond = req.body.newFav.second;
const newThird = req.body.newFav.third;
const newfavData = {
First: newFirst,
Second: newSecond,
Third: newThird,
};
fs.readFile('./data/fav.json', 'utf-8', function (err, data){
var json = JSON.parse(data);
json.push(newfavData);
console.log(newfavData)
fs.writeFile('./data/fav.json', JSON.stringify(json), function(err){
if (err) throw err;
console.log('data appended')
return;
})
})
});
module.exports = router;
I don't have enough reputation so I can't comment.
Have you tried commenting out this.onReset() and see if that fixes the problem?
There had been times when I was sending a request to "/" on reload.

Express and Mongoose-property is not being saved

TL;DR: I'm trying to save a new object, one of the fields is not saving, others save fine.
I've got a Mongoose schema with a property called superPlotId:
const mongoose = require('mongoose');
const GeoJSON = require('mongoose-geojson-schema');
const Schema = mongoose.Schema;
const plotSchema = new Schema(
{
...fields...
superPlotId: String,
...more fields
},
{ strict: false },
{ bufferCommands: false }
);
//create model class
const ModelClass = mongoose.model('plot', plotSchema);
//export model
module.exports = ModelClass;
I'm trying to save a new object fitting this schema with Express, like this:
exports.newPlot = async (req, res, next) => {
const {
...a bunch of fields...
superPlotId
} = req.body.props;
const plot = new Plot({
...a bunch of fields...
superPlotId
});
console.log(('new plot:', JSON.stringify(plot)));
try {
const newPlot = await plot.save();
res.json(newPlot);
} catch (e) {
console.log("couldn't save new plot", JSON.stringify(e));
return res.status(422).send({ error: { message: e, resend: true } });
}
};
I know that a properly formatted object is hitting the endpoint, because the console.log above shows it:
{...bunch of fields..."superPlotId":"5a9e9f9f0f8a8026005fe1e7"}
And yet the plot appears in my database without the superPlotId field.
Anyone know what I'm missing here?
try this
try {
let plot = new Plot();
plot = Object.assign(plot, req.body.props);
const newPlot = await plot.save();
res.json(newPlot);
} catch (e) {
console.log("couldn't save new plot", JSON.stringify(e));
return res.status(422).send({
error: {
message: e,
resend: true
}
});
}

Categories