fs.writeFile refreshing index.html whenever called - javascript

I have an issue where I am writing data from an array to a JSON file every 10 secs on an express server, and this is causing the main page to reload whenever the writeFile function is called.
The main page makes a GET request to retrieve Entry objects in the array, however I don't understand why it is reloading when the array isn't being changed in anyway, it is just being used to write to the JSON file.
In index.js (server code):
const server = require('./app');
const { readFromFile, writeToFile } = require('./helpers/readWrite');
const port = process.env.PORT || 3000;
readFromFile();
// start the server
server.listen(port, () => {
console.log(`Listening at http://localhost:${port}`);
// set the server to save to file every 10 seconds
setInterval(writeToFile, 10000);
});
In readWrite.js:
const Entry = require('../models/entry'); // file containing array that the data is written from.
function writeToFile() {
const entriesDataStringified = JSON.stringify(Entry.all); // stringify the entriesData array
// write to the json file, overwriting any data already in the file
fs.writeFile('./data/entries.json', entriesDataStringified, (err) => {
// check for error when writing file
if (err) {
console.log(err);
} else {
console.log('File successfully written');
}
});
}
Retrieving entries on client side:
async function getPosts(e) {
try{
response = await fetch(`http://localhost:3000/search/page/${pageNum}`);
data = await response.json();
console.log(data)
data.entries.forEach(post => {
if(!postArray.includes(post)){
newestArray.push(post);
postArray.push(post);
emojiArray.push({id: post.id, emojis: {loveCount: false, laughCount: false, likeCount: false}})
};
});
console.log(emojiArray);
Post.drawAll();
pageNum++
} catch(err) {
console.log(err)
}
}
Thanks.

Related

Electron nedb-promises storage file gets replaced on every app start

I'm now trying for a few hours to understand why this happens, in my electron app i would like to use the nedb-promises package("nedb-promises": "^6.2.1",). Installation and configuration works so far but on every app start (dev & prod) the db file got replaced by a new / empty one. Should'nt the package not handle that?
I've took the code from this example:
https://shivekkhurana.medium.com/persist-data-in-electron-apps-using-nedb-5fa35500149a
// db.js
const {app} = require('electron');
const Datastore = require('nedb-promises');
const dbFactory = (fileName) => Datastore.create({
filename: `${process.env.NODE_ENV === 'development' ? '.' : app.getPath('userData')}/data/${fileName}`,
timestampData: true,
autoload: true
});
const db = {
customers: dbFactory('customers.db'),
tasks: dbFactory('tasks.db')
};
module.exports = db;
import db from './db'
....
// load task should not be important because file is already replaced when arriving here
ipcMain.handle('Elements:Get', async (event, args) => {
// 'Select * from Customers
let data = await db.customers.find({});
console.log(data);
return data;
})
...
// Set an item
ipcMain.handle('Element:Save', async (event, data) => {
console.log(data)
const result = db.customers.insertOne(data.item).then((newDoc) => {
console.log(newDoc)
return newDoc
}).catch((err) => {
console.log("Error while Adding")
console.log(err)
});
console.log(result);
return result;
})
Note: After "adding" newDoc contains the new element and when checking the file manually in the filesystem it is added. When i now close the app and open again the file got replaced.
I've checked the docs up and down - i have no clue what i'm doing wrong - thanks for your help.

How to send Data in chunks from server (node.js) to client(react.js)?

I ended up with having a Server who provides pictures in base64 format. The Problem is when I want to send an array of base64 Images they are to huge.
I want to send the Array in little chunks to the frontend.
I use Sequelize to retrieve the data from the database.
Does someone has an Idea How to do it.
Thats How my endpoint looks until now:
router.get('/download', async (req, res, next) => {
try {
const getAllStoneData = await StoneData.findAll();
const convertBlobToString = getAllStoneData.map((oneStone) => {
const getBuffer = oneStone.dataValues.blobImage;
const convertToString = getBuffer.toString('utf8');
const copyOneStone = {
...oneStone.dataValues,
blobImage: convertToString,
};
return copyOneStone;
});
let chunk = [];
while (convertBlobToString.length > 0) {
chunk = convertBlobToString.splice(0, 1);
res.write(chunk);
}
res.end();
} catch (error) {
res.status(400).send({ error });
}
});
When I run this I get the Error of
"UnhandledPromiseRejectionWarning: Error [ERR_HTTP_HEADERS_SENT]: Cannot set headers after they are sent to the client"

Adding multiple BigQuery JSON credential files in Node project

I've been working on a Node project that involves fetching some data from BigQuery. Everything has been fine so far; I have my credential.json file (from BigQuery) and the project works as expected.
However, I want to implement a new feature in the project and this would involve fetching another set of data from BigQuery. I have an entirely different credential.json file for this new dataset. My project seems to recognize only the initial credential.json file I had (I named them differently though).
Here's a snippet of how I linked my first credential.json file:
function createCredentials(){
try{
const encodedCredentials = process.env.GOOGLE_AUTH_KEY;
if (typeof encodedCredentials === 'string' && encodedCredentials.length > 0) {
const google_auth = atob(encodedCredentials);
if (!fs.existsSync('credentials.json')) {
fs.writeFile("credentials.json", google_auth, function (err, google_auth) {
if (err) console.log(err);
console.log("Successfully Written to File.");
});
}
}
}
catch (error){
logger.warn(`Ensure that the environment variable for GOOGLE_AUTH_KEY is set correctly: full errors is given here: ${error.message}`)
process.kill(process.pid, 'SIGTERM')
}
}
Is there a way to fuse my two credential.json files together? If not, how can I separately declare which credential.json file to use?
If not, how can I separately declare which credential.json file to use?
What I would do I would create a function which is the exit point to BigQuery and pass an identifier to your function which credential to generate, This credential will then be used when calling BigQuery.
The below code assume you changed this
function createCredentials(){
try{
const encodedCredentials = process.env.GOOGLE_AUTH_KEY;
To this:
function createCredentials(auth){
try{
const encodedCredentials = auth;
And you can use it like this
import BigQuery from '#google-cloud/bigquery';
import {GoogApi} from "../apiManager" //Private code to get Token from client DB
if (!global._babelPolyfill) {
var a = require("babel-polyfill")
}
describe('Check routing', async () => {
it('Test stack ', async (done, auth) => {
//Fetch client Auth from local Database
//Replace the 2 value below with real values
const tableName = "myTest";
const dataset = "myDataset";
try {
const bigquery = new BigQuery({
projectId: `myProject`,
keyFilename: this.createCredentials(auth)
});
await bigquery.createDataset(dataset)
.then(
args => {
console.log(`Create dataset, result is: ${args}`)
})
.catch(err => {
console.log(`Error in the process: ${err.message}`)
})
} catch (err) {
console.log("err", err)
}
})
})

Streaming JSON data to React results in unexpected end of JSON inpit

I'm trying to stream a lot of data from a NodeJS server that fetches the data from Mongo and sends it to React. Since it's quite a lot of data, I've decided to stream it from the server and display it in React as soon as it comes in. Here's a slightly simplified version of what I've got on the server:
const getQuery = async (req, res) => {
const { body } = req;
const query = mongoQueries.buildFindQuery(body);
res.set({ 'Content-Type': 'application/octet-stream' });
Log.find(query).cursor()
.on('data', (doc) => {
console.log(doc);
const data = JSON.stringify(result);
res.write(`${data}\r\n`);
}
})
.on('end', () => {
console.log('Data retrieved.');
res.end();
});
};
Here's the React part:
fetch(url, { // this fetch fires the getQuery function on the backend
method: "POST",
body: JSON.stringify(object),
headers: {
"Content-Type": "application/json",
}
})
.then(response => {
const reader = response.body.getReader();
const decoder = new TextDecoder();
const pump = () =>
reader.read().then(({ done, value }) => {
if (done) return this.postEndHandler();
console.log(value.length); // !!!
const decoded = decoder.decode(value);
this.display(decoded);
return pump();
});
return pump();
})
.catch(err => {
console.error(err);
toast.error(err.message);
});
}
display(chunk) {
const { data } = this.state;
try {
const parsedChunk = chunk.split('\r\n').slice(0, -1);
parsedChunk.forEach(e => data.push(JSON.parse(e)));
return this.setState({data});
} catch (err) {
throw err;
}
}
It's a 50/50 whether it completes with no issues or fails at React's side of things. When it fails, it's always because of an incomplete JSON object in parsedChunk.forEach. I did some digging and it turns out that every time it fails, the console.log that I marked with 3 exclamation marks shows 65536. I'm 100% certain it's got something to do with my streams implementation and I'm not queuing the chunks correctly but I'm not sure whether I should be fixing it client or server side. Any help would be greatly appreciated.
Instead of implementing your own NDJSON-like streaming JSON protocol which you are basically doing here (with all of the pitfalls of dividing the stream into chunks and packets which is not always under your control), you can take a look at some of the existing tools that are created to do what you need, e.g.:
http://oboejs.com/
http://ndjson.org/
https://www.npmjs.com/package/stream-json
https://www.npmjs.com/package/JSONStream
https://www.npmjs.com/package/clarinet

ECONRESET socket hungup

I have a function that triggers on firebase database onWrite. The function body use two google cloud apis (DNS and Storage).
While the function is running and working as expected (mostly), the issue is that the Socket hang up more often than I'd like. (50%~ of times)
My questions are:
Is it similar to what the rest of the testers have experienced? Is it a well known issue that is outstanding or expected behavior?
the example code is as follows:
const functions = require('firebase-functions');
const admin = require('firebase-admin');
const {credentials} = functions.config().auth;
credentials.private_key = credentials.private_key.replace(/\\n/g, '\n');
const config = Object.assign({}, functions.config().firebase, {credentials});
admin.initializeApp(config);
const gcs = require('#google-cloud/storage')({credentials});
const dns = require('#google-cloud/dns')({credentials});
const zoneName = 'applambda';
const zone = dns.zone(zoneName);
exports.createDeleteDNSAndStorage = functions.database.ref('/apps/{uid}/{appid}/name')
.onWrite(event => {
// Only edit data when it is first created.
const {uid, appid} = event.params;
const name = event.data.val();
const dbRef = admin.database().ref(`/apps/${uid}/${appid}`);
if (event.data.previous.exists()) {
console.log(`already exists ${uid}/${appid}`);
return;
}
// Exit when the data is deleted.
if (!event.data.exists()) {
console.log(`data is being deleted ${uid}/${appid}`);
return;
}
const url = `${name}.${zoneName}.com`;
console.log(`data: ${uid}/${appid}/${name}\nsetting up: ${url}`);
setupDNS({url, dbRef});
setupStorage({url, dbRef});
return;
});
function setupDNS({url, dbRef}) {
// Create an NS record.
let cnameRecord = zone.record('cname', {
name: `${url}.`,
data: 'c.storage.googleapis.com.',
ttl: 3000
});
zone.addRecords(cnameRecord).then(function() {
console.log(`done setting up zonerecord for ${url}`);
dbRef.update({dns: url}).then(res => console.log(res)).catch(err => console.log(err));
}).catch(function(err) {
console.error(`error setting up zonerecord for ${url}`);
console.error(err);
});
}
function setupStorage({url, dbRef}) {
console.log(`setting up storage bucket for ${url}`);
gcs.createBucket(url, {
website: {
mainPageSuffix: `https://${url}`,
notFoundPage: `https://${url}/404.html`
}
}).then(function(res) {
let bucket = res[0];
console.log(`created bucket ${url}, setting it as public`);
dbRef.update({storage: url}).then(function() {
console.log(`done setting up bucket for ${url}`);
}).catch(function(err) {
console.error(`db update for storage failed ${url}`);
console.error(err);
});
bucket.makePublic().then(function() {
console.log(`bucket set as public for ${url}`);
}).catch(function(err) {
console.error(`setting public for storage failed ${url}`);
console.error(err);
});
}).catch(function(err) {
console.error(`creating bucket failed ${url}`);
console.error(err);
});
}
I'm thinking your function needs to return a promise so that all the other async work has time to complete before the function shuts down. As it's shown now, your functions simply returns immediately without waiting for the work to complete.
I don't know the cloud APIs you're using very well, but I'd guess that you should make your setupDns() and setupStorage() return the promises from the async work that they're doing, then return Promise.all() passing those two promises to let Cloud Functions know it should wait until all that work is complete before cleaning up the container that's running the function.

Categories