Related
I am trying us use the Faker npm package to seed data into my new database so that I can properly test my filters. The issue is that I am unable to use both require and import in the same app. All of my other packages make sure of require while faker has to use import. I have come across a few suggested fixes that all don't seem to work.
What I've done so far is to include the below lines of code at he top of my server.js as well as have added type: module to my package.json.
I think the issue may have something to do with how my routes are configured but I'm not 100% sure.
The error I am getting with this configuration is as below
node:internal/errors:465
ErrorCaptureStackTrace(err);
^
Error [ERR_REQUIRE_ESM]: require() of ES Module C:\Users\darre\Desktop\Web Development\foodappbackend\routes\subscribers.js from C:\Users\darre\Desktop\Web Development\foodappbackend\server.js not supported.
subscribers.js is treated as an ES module file as it is a .js file whose nearest parent package.json contains "type": "module" which declares all .js files in that package scope as ES modules.
Instead rename subscribers.js to end in .cjs, change the requiring code to use dynamic import() which is available in all CommonJS modules, or change "type": "module" to "type": "commonjs" in C:\Users\darre\Desktop\Web Development\foodappbackend\package.json to treat all .js files as CommonJS (using .mjs for all ES modules instead).
at file:///C:/Users/darre/Desktop/Web%20Development/foodappbackend/server.js:28:27
at async Promise.all (index 0) {
code: 'ERR_REQUIRE_ESM'
}
ADDED CODE AT TOP OF SERVER.JS
import {createRequire} from "module";
const require = createRequire(
import.meta.url
);
PACKAGE.JSON
{
"name": "foodappbackend",
"version": "1.0.0",
"description": "",
"type": "module",
"main": "app.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"author": "",
"license": "ISC",
"dependencies": {
"#faker-js/faker": "^7.2.0",
"bcrypt": "^5.0.1",
"body-parser": "^1.20.0",
"cors": "^2.8.5",
"dotenv": "^16.0.1",
"express": "^4.18.1",
"express-session": "^1.17.3",
"mongoose": "^6.3.3",
"passport": "^0.5.3",
"passport-facebook": "^3.0.0",
"passport-google-oauth20": "^2.0.0",
"passport-local": "^1.0.0",
"passport-local-mongoose": "^7.1.0"
}
}
SERVER.JS
import {createRequire} from "module";
const require = createRequire(
import.meta.url
);
require("dotenv").config();
const express = require("express");
const mongoose = require("mongoose");
const app = express();
const cors = require("cors")
mongoose.connect(process.env.DATABASE_URL)
const db = mongoose.connection
db.on("error", () => console.error(error))
db.once("open", () => console.log("connected to database"))
app.use(express.json())
app.use(cors())
const subscribersRouter = require("./routes/subscribers")
const restaurantsRouter = require("./routes/restaurants")
const ordersRouter = require("./routes/orders")
app.use("/subscribers", subscribersRouter)
app.use("/restaurants", restaurantsRouter)
app.use("/orders", ordersRouter)
app.listen(3000, () => {
console.log("Server has started on port 3000")
});
RESTUARANTS.js
const express = require("express")
const router = express.Router()
const Restaurant = require("../models/restaurant")
// router.get("/test", (req, res) => {
// const randomName = faker.name.firstName()
// console.log(randomName)
// })
// FILTER OPTIONS
router.get("/filter", async (req, res) => {
//USERS CHOSEN CATEGORIES SENT THROUGH THE REQUEST
const chosenCats = await req.body.categories
var spendPerHead = await req.body.spendperhead
const numberOfHeads = await req.body.numberofheads
if (spendPerHead != null && spendPerHead.length === 1){
const duplicateItems = (arr, numberOfRepetitions) =>
arr.flatMap(i => Array.from({
length: numberOfRepetitions
}).fill(i));
spendPerHead = duplicateItems(spendPerHead, numberOfHeads);
}else{
console.log("no SPH duplication needed")
}
console.log(spendPerHead)
// RETURNS ONLY RESTURANT OPTIONS WITH CATEGORIES CONTAINING AT LEAST ONE OPTION IN THE USERS REQUESTED CATEGORIES
let foundMatch = await Restaurant.aggregate(
[{
$match: {
categories: {
$in: chosenCats
}
}
}]
)
//RESULT OF ALL MENUE ITEMS MATCHING USER CATEGORIES
let result = []
//FULL RESULT OF BOTH RESTURANTS MATCHING USERS CHOSEN CATEGORIES AND MENUE ITEMS OF THOSE RESTURANTS MATCHING USERS CATEGORIES
let fullResult = []
// LOOPS THROUGH ALL RESTURANT OPTIONS MENUES AND OUTPUTS MENUE ITEMS MATCHING THE USERS CHOSEN CATEGORIES
for (let i = 0; i < foundMatch.length; i++) {
foundMatch[i].menue.filter(function checkOptions(option) {
for (let x = 0; x < option.categories.length; x++) {
if (option.categories[x] === chosenCats[0] || option.categories[x] === chosenCats[1] || option.categories[x] === chosenCats[2] || option.categories[x] === chosenCats[3] || option.categories[x] === chosenCats[4] || option.categories[x] === chosenCats[5] || option.categories[x] === chosenCats[6]) {
// FILTERS RESULTS BASED ON TOTAL SPEND PER HEAD CHOSEN BY USER
if (option.price <= spendPerHead[1]) {
result.push(option)
}else if (spendPerHead === undefined){
result.push(option)
}
}
}
})
}
//PUSHES BOTH RESTURANT FILTER RESULT AND MENUE ITEM OPTION FILTER RESULT INTO A SINGLE ARRAY TO BE SENT AS A JSON RESPONSE BY THE SERVER
fullResult.push(result)
fullResult.push(foundMatch)
// console.log(result)
try {
// position 0 == menue option result position 1 == resturant options result
res.json(fullResult)
} catch (err) {
if (err) {
res.status(500).json({
message: err.message
})
}
}
})
// Getting All
router.get("/", async (req, res) => {
try {
const restaurants = await Restaurant.find()
res.json(restaurants)
} catch (err) {
res.status(500).json({
message: err.message
})
}
})
// Getting One
router.get("/:id", getRestaurant, (req, res) => {
res.json(res.restaurant)
})
// Creating One
router.post("/createrestaurant", async (req, res) => {
const restaurant = new Restaurant({
src: req.body.src,
title: req.body.title,
description: req.body.description,
menue: req.body.menue,
rating: req.body.rating,
categories: req.body.categories
})
try {
const newRestaurant = await restaurant.save()
res.status(201).json(newRestaurant)
} catch (err) {
res.status(400).json({
message: err.message
})
}
})
// Updating One
router.patch("/:id", getRestaurant, async (req, res) => {
if (req.body.name != null) {
res.restaurant.name = req.body.name
}
if (req.body.title != null) {
res.restaurant.title = req.body.title
}
if (req.body.description != null) {
res.restaurant.description = req.body.description
}
if (req.body.menue != null) {
const currentMenue = res.restaurant.menue
const newMenueItem = req.body.menue
currentMenue.push(newMenueItem)
}
try {
const updatedRestaurant = await res.restaurant.save()
res.json(updatedRestaurant)
} catch (err) {
res.status(400).json({
message: err.message
})
}
})
// Deleting One
router.delete("/:id", getRestaurant, async (req, res) => {
try {
await res.restaurant.remove()
res.json({
message: "Deleted Restaurant"
})
} catch (err) {
res.status(500).json({
message: err.message
})
}
})
async function getRestaurant(req, res, next) {
let restaurant
try {
restaurant = await Restaurant.findById(req.params.id)
if (restaurant == null) {
return res.status(404).json({
message: "cannot find Restaurant"
})
}
} catch (err) {
return res.status(500).jsong({
message: err.message
})
}
res.restaurant = restaurant
next()
}
module.exports = router
SUBSCRIBERS.JS
const express = require("express");
const router = express.Router();
const Subscriber = require("../models/subscriber");
const passport = require("passport");
const session = require("express-session");
const Order = require("../models/order");
const GoogleStrategy = require('passport-google-oauth20').Strategy;
const facebookStrategy = require('passport-facebook').Strategy;
router.use(session({
secret: "foodsecrets",
resave: false,
saveUninitialized: false
}));
router.use(passport.initialize());
router.use(passport.session());
passport.use(Subscriber.createStrategy());
passport.serializeUser(function (user, done) {
done(null, user.id);
});
passport.deserializeUser(function (id, done) {
Subscriber.findById(id, function (err, user) {
done(err, user);
});
});
// Google auth routes
passport.use(new GoogleStrategy({
clientID: "330178790432-ro0cr35k37f7kq4ln4pdq6dqdpqqtri6.apps.googleusercontent.com",
clientSecret: "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",
callbackURL: "http://localhost:3000/subscribers/google/callback",
},
function (accessToken, refreshToken, profile, email, done) {
//check user table for anyone with a facebook ID of profile.id
const ID = JSON.stringify(email.id)
console.log(ID)
Subscriber.findOne({
googleID: ID
}, function (err, user) {
if (err) {
return done(err);
}
//No user was found... so create a new user with values from Facebook (all the profile. stuff)
if (!user) {
const subscriber = new Subscriber({
googleID: ID,
username: email.displayName,
email: email.emails[0].value,
provider: 'google',
//now in the future searching on User.findOne({'facebook.id': profile.id } will match because of this next line
google: profile._json
});
subscriber.save(function (err) {
if (err) console.log(err);
return done(err, user);
});
} else {
//found user. Return
return done(err, user);
}
});
}));
router.get("/google",
passport.authenticate("google", {
scope: ["profile", "email"]
})
);
router.get("/google/callback",
passport.authenticate("google", {
successRedirect: "https://www.youtube.com/",
failureRedirect: "/login/failed",
})
);
// Facebook Auth Routes
passport.use(new facebookStrategy({
clientID: "1142565916475628",
clientSecret: "f0c297bf99f71d090b317cdcaa5ae6d8",
callbackURL: "http://localhost:3000/subscribers/facebook/callback",
profileFields: ["email", "displayName", "name"]
},
function (accessToken, refreshToken, profile, done) {
//check user table for anyone with a facebook ID of profile.id
console.log(profile)
const ID = JSON.stringify(profile.id)
console.log(ID)
Subscriber.findOne({
facebookID: ID
}, function (err, user) {
if (err) {
return done(err);
}
//No user was found... so create a new user with values from Facebook (all the profile. stuff)
if (!user) {
const subscriber = new Subscriber({
facebookID: ID,
username: profile.displayName,
email: profile._json.email,
provider: profile.provider,
//now in the future searching on User.findOne({'facebook.id': profile.id } will match because of this next line
facebook: profile._json
});
subscriber.save(function (err) {
if (err) console.log(err);
return done(err, user);
});
} else {
//found user. Return
return done(err, user);
}
});
}
));
router.get("/facebook",
passport.authenticate("facebook", {
scope: [ "email"]
})
);
router.get("/facebook/callback",
passport.authenticate("facebook", {
successRedirect: "https://www.youtube.com/",
failureRedirect: "/login/failed",
})
);
// Edit cart (user must be authenticated)
router.patch("/editcart", async (req, res) => {
// DETERMINES IF USER IS AUTH AND IF ADD OR REMOVE ITEM MAKE SURE ADD OR REMOVE PROP IS OUTSIDE OF CART ITEM OBECT
if (req.isAuthenticated() && req.body.addOrRemoveItem === "add") {
var sub
// FINDS SUBSCRIBER BASED ON REQUEST
sub = await Subscriber.findById(req.user._id, function (err, docs) {
if (err) {
console.log(err)
} else {
console.log("founduser1")
}
}).clone()
console.log(sub.cart)
// PUSHES ITEM FROM REQUEST INTO SUBSCRIBERS CART
const currentCart = sub.cart
const newCartItem = req.body.cartItem
await currentCart.push(newCartItem)
// DETERMINES IF USER IS AUTH AND IF ADD OR REMOVE ITEM MAKE SURE REMOVE ITEM PROP IS NOT IN CARTITEM OBJECT
} else if (req.isAuthenticated() && req.body.addOrRemoveItem === "remove") {
var sub
sub = await Subscriber.findById(req.user._id, function (err, docs) {
if (err) {
console.log(err)
} else {
console.log("founduser")
}
}).clone()
// REMOVES A CART ITEM BASED ON ITEM ID MUST PASS IN CART ITEM ID ONLY REMOVES OFF OF SPCIFIC ITEM ID
const cartItemId = req.body.id
await Subscriber.updateOne({
_id: sub._id
}, {
$pull: {
cart: {
_id: cartItemId
}
}
})
} else {
console.log("not reading")
}
try {
// SAVES THE CHANGES IN THE SUBSCRIBERS COLLECTION
const updatedSubscriber = await sub.save()
res.json(updatedSubscriber)
} catch (err) {
console.log(err)
}
})
// Create Order (user must be authenticated)
router.post("/createorder", async (req, res) => {
if (req.isAuthenticated()) {
try {
// FINDS SUBSCRIBER BASED ON REQUEST ID
const sub = await Subscriber.findById(req.user._id, function (err, docs) {
if (err) {
console.log(err)
} else {
console.log("founduser")
}
}).clone()
//STORES/TARGETS THE PENDING ORDER OF SUBSCRIBER
const pendingOrder = await sub.pendingOrder
//DETERMINES IF THE USER ALREADY HAS A PENDING ORDER, IF USER HAS PENDING ORDERS THEY WILL BE BLOCKED FROM CREATING A NEW ORDER UNTIL THE PREVIOUS ORDER IS CONFIRMED OR CANCELLED
// IDENTIFIES SPECIFIC CART BASED ON REQUEST
const cart = req.user.cart
// STORES/TARGETS THE CART OF THE SUBSCRIBER
const subCart = sub.cart
//MAPS THE PRICE OF EACH CART ITEM TO AN ARRAY
const itemTotals = cart.map(prop => prop.price)
//FUNCTION FOR SUMMING ALL VALUES IN AN ARRAY
const reducer = (accumulator, curr) => accumulator + curr;
//USES REDUCER FUNCTION TO SUM ALL PRICES OF ITEMS IN CART
const total = itemTotals.reduce(reducer)
//CREATES A NEW ORDER USING THE ABOVE STORED VARIABLES
const order = new Order({
userID: req.user._id,
total: total,
items: cart,
confirmed: false
})
// PUSHES NEW PENDING ORER INTO SUBSCRIBERS PENDING ORDER ARRAY
await pendingOrder.push(order)
//EMPTIES THE SUBSCRIBERS CART
await subCart.splice(0, subCart.length);
// SAVES THE NEW ORDER TO THE MAIN ORDERS COLLECTION & THE SUBS PENDING ORDER
const newOrder = await order.save()
const newPendingOrder = await sub.save()
//SENDS BACK BOTH THE ORDERS COLLECTION AND USERS ORDER HISTORY ARRAY
res.status(201).send({
newOrder,
newPendingOrder
})
} catch (err) {
res.status(400).json({
message: err.message
})
}
}
})
// GET ONE SUBSCRIBER BASED ON REQUEST ID USING PASSPORT IDEALLY USED FOR DATA NEEDED FOR THE PAYMENT PAGE AFTER MAKING AN ORDER
router.get("/getone", async (req, res) =>{
if (req.isAuthenticated()){
const sub = await Subscriber.findById(req.user._id, function (err, docs) {
if (err) {
console.log(err)
} else {
console.log("founduser")
}
}).clone()
try {
res.json(sub)
} catch (err) {
res.status(500).json({
message: err.message
})
}
}
})
// CONFIRMS ORDER ON POST REQUEST RESULTING FROM A PAYMENT CONFIRMATION ON THE FRONTEND
router.post("/confirmorder", async (req, res) => {
if (req.isAuthenticated) {
const sub = await Subscriber.findById(req.user._id, function (err, docs) {
if (err) {
console.log(err)
} else {
console.log("founduser")
}
}).clone()
const pendingOrder = await sub.pendingOrder
const subOrderHistory = await sub.orderHistory
const mainOrder = await Order.findById(pendingOrder[0]._id, function (err, docs) {
if (err) {
console.log(err)
} else {
console.log("Found Order")
}
}).clone()
console.log(mainOrder)
await subOrderHistory.push(pendingOrder[0]);
mainOrder.confirmed = true
try {
pendingOrder.splice(0, pendingOrder.length);
const updatedOrder = await mainOrder.save()
const updatedSub = await sub.save()
res.status(201).send({
updatedOrder,
updatedSub
})
} catch (err) {
res.status(400).json({
message: err.message
})
}
}
})
// GETS ALL SUBSCRIBERS
router.get("/getall", async (req, res) => {
if (req.isAuthenticated()) {
try {
const subscribers = await Subscriber.find()
res.json(subscribers)
} catch (err) {
res.status(500).json({
message: err.message
})
}
}
});
// DELIVERS ALL DATA NEEDED FOR LOGGED IN HOMEPAGE BASED ON IF THE USER IS AUTHENTICATED
router.get("/loggedin", async (req, res) => {
if (req.isAuthenticated()) {
try {
const subscribers = await Subscriber.findById(req.user._id, function (err, docs) {
if (err) {
console.log(err)
} else {
console.log("Found User!")
}
}).clone()
res.json(subscribers)
} catch (err) {
res.status(500).json({
message: err.message
})
}
}
});
// // Getting One
// router.get("/:id", getSubscriber, (req, res) => {
// res.json(res.subscriber)
// });
// LOGIN USING PASSPORT JS
router.post("/login", (req, res) => {
const subscriber = new Subscriber({
username: req.body.username,
password: req.body.password,
email: req.body.email
});
req.login(subscriber, async function (err) {
if (err) {
console.log(err)
} else {
try {
passport.authenticate("local")(req, res, function () {
console.log("Authenticated")
console.log(req)
res.status(201).json("authenticated")
})
} catch (err) {
res.status(400).json({
message: err.message
})
}
}
})
})
// REGISTER USING PASSPORT JS
router.post("/register", async (req, res) => {
Subscriber.register({
username: req.body.username,
email: req.body.email
}, req.body.password, async (err, subscriber) => {
if (err) {
console.log(err)
} else {
try {
await passport.authenticate("local")(req, res, function () {
console.log("is authenticated")
res.status(201).json(newSubscriber)
})
const newSubscriber = await subscriber.save()
} catch (err) {
res.status(400).json({
message: err.message
})
}
}
});
})
// UPDATES ONE SUBSCRIBER BASED ON THE SUBSCRIBERS ID
router.patch("/:id", getSubscriber, async (req, res) => {
if (req.body.email != null) {
res.subscriber.email = req.body.email
}
if (req.body.password != null) {
res.subscriber.password = req.body.password
}
try {
const updatedSubscriber = await res.subscriber.save()
res.json(updatedSubscriber)
} catch (err) {
res.status(400).json({
message: err.message
})
}
})
// DELETES ONE SUBSCRIBER BASED ON THE SUBSCRIBERS ID
router.delete("/:id", getSubscriber, async (req, res) => {
try {
await res.subscriber.remove()
res.json({
message: "Deleted Subscriber"
})
} catch (err) {
res.status(500).json({
message: err.message
})
}
})
// FUNCTION FOR GETTING A SPECIFIC SUBSCRIBER FROM THE SUBSCRIBERS COLLECTION BASED ON A PRIOVIDED ID IN THE REQUEST PARAMATERS
async function getSubscriber(req, res, next) {
let subscriber
try {
subscriber = await Subscriber.findById(req.params.id)
if (subscriber == null) {
return res.status(404).json({
message: "cannot find subscriber"
})
}
} catch (err) {
return res.status(500).json({
message: err.message
})
}
res.subscriber = subscriber
next()
}
module.exports = router
You can use faker with require:
// node v14.18.1
const {faker} = require('#faker-js/faker');
console.log(faker.datatype.uuid());
If you check the package you will see that inside dist folder it has both esm and cjs versions.
I've been reading the code all day and I can't figure out how to make a loading page.
What I would like to do is: once the link is created, redirect to a loading page (5 seconds for example) and then show it the link.
This is the index.js of the application
const notFoundPath = path.join(__dirname, 'public/404.html');
app.get('/:id', async (req, res, next) => {
const { id: slug } = req.params;
try {
const url = await urls.findOne({ slug });
if (url) {
return res.redirect(url.url);
}
return res.status(404).sendFile(notFoundPath);
} catch (error) {
return res.status(404).sendFile(notFoundPath);
}
});
const schema = yup.object().shape({
slug: yup.string().trim().matches(/^[\w\-]+$/i),
url: yup.string().trim().url().required(),
});
app.post('/url', slowDown({
windowMs: 30 * 1000,
delayAfter: 2,
delayMs: 500,
}), rateLimit({
windowMs: 30 * 1000,
max: 3,
}), async (req, res, next) => {
let { slug, url } = req.body;
try {
await schema.validate({
slug,
url,
});
if (url.includes('xshorturl')) {
throw new Error('🛑 ERROR 🛑');
}
if (!slug) {
slug = nanoid(5);
} else {
const existing = await urls.findOne({ slug });
if (existing) {
throw new Error('Apodo en uso. 🌹');
}
}
slug = slug.toLowerCase();
const newUrl = {
url,
slug,
};
const created = await urls.insert(newUrl);
res.json(created);
} catch (error) {
next(error);
}
});
app.use((req, res, next) => {
res.status(404).sendFile(notFoundPath);
});
app.use((error, req, res, next) => {
if (error.status) {
res.status(error.status);
} else {
res.status(500);
}
res.json({
message: error.message,
stack: process.env.NODE_ENV === 'production' ? '🥞' : error.stack,
});
});
This is the main html of the application
This is the index.js of the html
I hope your help and thanks in advance.
I am trying to upload multiple files to Google Cloud Storage. I am using a for loop to for each file in the list of files which I want to upload.
However, the problem is that the for loop does not pause to wait for the upload to finish before moving on to the next upload. It eventually uploads them, however, the for loop finishes earlier which then sends back to the client the empty urlList.
How do I make it pause and wait for each upload process before moving on to the next file in the for loop?
const processFile = require('../middleware');
const { format } = require('util');
let uuidv4 = require('uuid/v4');
const Cloud = require('#google-cloud/storage');
const { Storage } = Cloud;
const storage = new Storage({
keyFilename: './xxx.json',
projectId: 'xxx'
});
const bucket = storage.bucket('xxx');
exports.upload = async (req, res) => {
const urlList = [];
await processFile(req, res); //multer
for (var i = 0; i < req.files.length; i++) {
if (!req.files[i]) {
return res.status(400).send({ message: 'Please upload a file!' });
}
const { originalname, buffer } = req.files[i];
var filename = originalname
.toLowerCase()
.split(' ')
.join('-');
filename = uuidv4() + '-' + filename;
console.log(filename);
const blob = bucket.file(filename);
const blobStream = blob.createWriteStream({
resumable: false
});
blobStream.on('error', err => {
res.status(500).send({ message: err.message });
});
blobStream.on('finish', async data => {
const publicUrl = format(
`https://storage.googleapis.com/${bucket.name}/${blob.name}`
);
urlList.push(publicUrl);
try {
await bucket.file(filename).makePublic();
} catch (err) {
console.log('failed to make it public');
reject(err);
}
});
blobStream.end(buffer);
}
return res.status(200).send({
message: 'Uploaded the files successfully',
url: urlList
});
};
Just put your "upload" code in a Promise that you can await in the loop. Othervise by using on the code inside of it will not follow the for loop. By using such event based code your for loop will just go trough it and can't await it to finish. This should do the trick:
const uploadFile = (f) => {
return new Promise((resolve, reject) => {
const { originalname, buffer } = f;
var filename = originalname.toLowerCase().split(" ").join("-");
filename = uuidv4() + "-" + filename;
console.log(filename);
const blob = bucket.file(filename);
const blobStream = blob.createWriteStream({
resumable: false,
});
blobStream.on("error", (err) => {
res.status(500).send({ message: err.message });
reject(err);
});
blobStream.on("finish", async (data) => {
const publicUrl = format(
`https://storage.googleapis.com/${bucket.name}/${blob.name}`
);
try {
await bucket.file(filename).makePublic();
resolve(publicUrl);
} catch (err) {
console.log("failed to make it public");
reject(err);
}
});
blobStream.end(buffer);
});
};
exports.upload = async (req, res) => {
const urlList = [];
await processFile(req, res); //multer
for (var i = 0; i < req.files.length; i++) {
if (!req.files[i]) {
return res.status(400).send({ message: "Please upload a file!" });
}
const publicUrl = await uploadFile(req.files[i]);
urlList.push(publicUrl);
}
return res.status(200).send({
message: "Uploaded the files successfully",
url: urlList,
});
};
I'm trying to pass a function ereaseFiles() before the upload.array() method is called but I can't figure out how to make it.
The main goal is with a put request to delete all files on disk related to that object before of uploading new ones.
I've tried to set the function in the diskStorage section as well as in the callback of the put route. I even tried handling it in the function itself before the upload.array() method was called. I've tried working with promises but that is way too hard for me.
//function to be called (this works)
function ereaseFiles(req) {
glob("uploads/" + req.body.numeroScheda + "*", function (err, files) {
for (const file of files) {
fs.unlink(file, err => {
if (err) throw err;
console.log('successfully deleted files');
});
}
});
}
//My multer setup:
const storage = multer.diskStorage({
destination: function (req, file, cb) {
cb(null, './uploads/');
},
filename: function (req, file, cb) {
cb(null, req.body.numeroScheda + "-" + file.originalname);
}
});
const upload = multer({
storage: storage, limits: {
fileSize: 1024 * 1024 * 2,
},
});
//MY EDIT PUT ROUTE
app.put("/immobili/:_id", upload.array('immaginePrincipale', 30), function (req, res) {
const requestedId = req.params._id;
const proprietaImmagini = req.files;
const immagini = proprietaImmagini.map(function (immagine) {
//console.log(immagine.path);
return immagine.path;
});
Immobile.findOneAndUpdate(requestedId, {
numeroScheda: req.body.numeroScheda,
categoria: req.body.categoria,
titolo: req.body.titolo,
sottotitolo: req.body.sottotitolo,
descrizione: req.body.descrizione,
localita: req.body.localita,
locali: req.body.locali,
superficie: req.body.superficie,
camere: req.body.camere,
bagni: req.body.bagni,
immagini: immagini,
}, function (err, updatedImmobile) {
if (err) return console.error(err);
res.redirect("/immobili/" + requestedId);
});
});
What should happen is that all files on disk associated with the object (numeroScheda) get deleted before the new ones are uploaded to keep the storage of files automated and clean.
EDIT 1:
I've created a delete route that works:
app.delete("/immobili/:_id", (req, res) => {
const requestedId = req.params._id;
Immobile.findOne({ _id: requestedId }, function (err, immobile) {
if (err) return console.error(err);
ereaseFiles(immobile);
});
Immobile.findOneAndRemove(requestedId, err => {
if (err) console.error(err);
else res.redirect('/immobili');
});
});
the function ereaseFiles looks now like this:
ereaseFiles = immobile => {
glob("uploads/" + immobile.numeroScheda + "*", function (err, files) {
for (const file of files) {
fs.unlink(file, err => {
if (err) throw err;
});
}
});
cancellato = true;
}
I've tried to implement this in the edit route with the middleware as kindly suggested with this:
app.use("/immobili/:_id", function (req, res, next) {
const requestedId = req.params._id;
let timer = setInterval(() => {
Immobile.findOne({ _id: requestedId }, (err, immobile) => {
if (err) return console.error(err);
ereaseFiles(immobile);
console.log(this.cancellato);
if (this.cancellato) {
clearInterval(timer);
next();
}
});
}, 1000);
});
This works if the uploaded pictures are more or same than before but if less it outputs with strange behaviors (sometimes it uploads some pictures, sometimes none).
You can use a middleware for that. Just make sure that the middleware is positioned before your put request handler.
app.use("/immobili/:_id", function(req,res,next){
eraseFiles(req);
let timer = setInterval(() => {
if(erased){
clearInterval(timer);
next();
}
},100);
})
app.put("/immobili/:_id", upload.array('immaginePrincipale', 30), function (req, res) { ...
EDIT 1:
Please change your eraseFiles function to:
function ereaseFiles(req) {
glob("uploads/" + req.body.numeroScheda + "*", function (err, files) {
for (let i=0;i<files.length-1;i++) {
fs.unlink(files[i], err => {
if (err) throw err;
console.log('successfully deleted files');
if(i===files.length-1)
erased = true;
});
}
});
}
Edit 2: Changed a lot of things. Now the approach is your route will go to the middleware first. Your eraseFiles function will be called. While the erased variable is not true, your put route will not be hit. When the erasedFiles function is complete it will set erased to true. For this to work, you'll have to set erased=false in the file before all this.
I won! The solution was to put a little timer on the next() function as it was firing too soon and the uploads and it was mixing the two. Thanks for your help everyone!
I've also added an ereased variable that turned off and on as the function completes. Thanks to Mr. Web for that.
Here's the answer if someone runs across this, there's some Italian in the code, hopefully is readable enough anyways!
cancellaFoto = immobile => {
cancellato = false;
glob("uploads/" + immobile.numeroScheda + "*", function (err, files) {
for (const file of files) {
fs.unlink(file, err => {
if (err) throw err;
});
}
});
cancellato = true;
}
app.use("/immobili/:_id", function (req, res, next) {
const requestedId = req.params._id;
Immobile.findOne({ _id: requestedId }, (err, immobile) => {
if (err) return console.error(err);
immobile.immagini = [];
cancellaFoto(immobile);
console.log(immobile.immagini);
if (this.cancellato) {
console.log(this.cancellato);
return setTimeout(next, 500);
} else {
return console.log("Aborted");
}
});
});
//EDIT PUT ROUTE
app.put("/immobili/:_id", upload.array('immaginePrincipale', 30), function (req, res) {
const requestedId = req.params._id;
const proprietaImmagini = req.files;
const immagini = proprietaImmagini.map(function (immagine) {
//console.log(immagine.path);
return immagine.path;
});
console.log(immagini);
Immobile.findOneAndUpdate(requestedId, {
numeroScheda: req.body.numeroScheda,
categoria: req.body.categoria,
titolo: req.body.titolo,
sottotitolo: req.body.sottotitolo,
descrizione: req.body.descrizione,
localita: req.body.localita,
locali: req.body.locali,
superficie: req.body.superficie,
camere: req.body.camere,
bagni: req.body.bagni,
immagini: immagini,
}, function (err, updatedImmobile) {
if (err) return console.error(err);
res.redirect("/immobili/" + requestedId);
});
});
I'm writing an Angular 6 + Express.JS app and now I stuck with the following problem: when there are some multiple requests made at the same time, sometimes (especially when there are more than 4 requests) all of them response with 404 or even get cancelled. Is there any problem with the way I handle requests in Express or I should add some tweaks for concurrent requests?
Requests:
let requests = [];
files.forEach((file) => {
if (file.type.toLowerCase().includes('zip')) {
requests.push(this.imagesService.uploadArchive(file).pipe(first()));
} else {
requests.push(this.imagesService.saveImage(file).pipe(first()));
}
});
forkJoin(requests).subscribe(
(res) => res.forEach(response => {
this.onSave.emit(response);
}),
(error) => {
console.error(error);
},
() => {
this.close.emit();
}
);
Express handling routes:
router.post('/images',
formidable({
encoding: 'utf-8',
uploadDir: path.resolve(__dirname, '..', '..', 'uploads'),
multiples: true,
keepExtensions: true
}),
(req, res, next) => {
const image = req.fields;
const data = req.files;
image.path = data.image.path;
const file = fs.createReadStream(image.path);
saveImage(image).then(
result => {
if (result) {
res.status(200).send(result);
} else {
console.error("Cannot save image");
res.status(400).send("Cannot save image");
}
}).catch(e => console.error(e.stack));
});
Responses:
UPDATE
router.post('/archives',
formidable({
encoding: 'utf-8',
uploadDir: path.resolve(__dirname, '..', '..', 'uploads'),
multiples: true,
keepExtensions: true
}),
(req, res, next) => {
const data = req.files;
let promises = [];
fs.readFile(data.archive.path, async (err, archive) => {
if (err) throw err;
await extractImagesFromZip(archive, data.archive.path).then((images) =>
images.forEach((image) => {
promises.push(
saveImage(image).then(
result => {
if (result) {
result.path = result.path.split('/').pop();
return result;
} else {
console.error("Cannot save image " + image.name);
fs.unlink(image.path, () => {});
}
}).catch(e => {
fs.unlink(image.path, () => {});
console.error(e.stack)
})
);
})
);
Promise.all(promises)
.then((result) => {
if (result.length > 0) {
res.status(200).send(result)
} else {
res.status(400).send("None images were saved")
}
}).catch((error) => {
console.log(error.stack);
res.status(400).send("None images were saved")
});
});
}
);
export const extractImagesFromZip = (file, link) => {
let promises = [];
var zip = new JSZip();
return zip.loadAsync(file)
.then((archive) => {
Object.values(archive.files).filter(
f =>
['.jpg', '.jpeg', '.png'].some((suffix) => f.name.toLowerCase().endsWith(suffix))
&& ![...f.name.toLowerCase().split('/')].pop().startsWith('.')
&& !f.dir
).forEach(f => promises.push(zip.file(f.name).async('nodebuffer').then((content) => {
const ext = f.name.split('.').pop().toLowerCase();
var dest = path.resolve(__dirname, '..', '..') + '/uploads/upload_'
+ crypto.randomBytes(Math.ceil(1322)).toString('hex').slice(0, 32).toLowerCase()
+ '.' + ext;
return new Promise((res, rej) => {
fs.writeFile(dest, content, (err) => {
if (err) rej(err);
res(new Promise((resolve, reject) => {
fs.readFile(dest, (erro, data) => {
if (erro) reject(erro);
if (data) resolve({
name: f.name,
type: 'image/' + (ext === 'jpg' ? 'jpeg' : ext),
path: dest
});
});
}));
});
});
})));
fs.unlink(link, () => {});
return Promise.all(promises);
});
}
export const saveImage = (image) => {
return database.raw(
"INSERT INTO images (name, type, path) " +
"VALUES (?, ?, ?) " +
"RETURNING name, type, path, id",
[image.name, image.type, image.path]
).then(data => data.rows[0]).catch(e => console.error(e.stack));
};
UPDATE 2
Everything works fine if user and server are on localhost (regardless server running with nginx or without it), but problem appears when server is remote
Such code worked
public async uploadFiles(files: File[]) {
of(files)
.pipe(
concatMap(files =>
files.map(file => {
return this.imagesService
.saveImage(file)
.pipe(
map(),
catchError((error, caught) => {
console.error(error);
return empty();
})
);
})
),
concatAll(),
toArray(),
map(res => console.log)
)
.subscribe();
}