I have encountered a problem when following a maximilian schwarzmüller course, which has otherwise been great: https://www.youtube.com/watch?v=qZ1EFnFOGvE
The image logs in the Firebase console as uploaded, recognises the type of file/size etc. But continually loads and never displays the image. I use a post request in POSTMAN to upload the image.
When I upload manually to firebase on their UI, everything works fine.
My code:
const functions = require("firebase-functions");
const os = require("os");
const path = require("path");
const spawn = require("child-process-promise").spawn;
const cors = require("cors")({ origin: true });
const Busboy = require("busboy");
const fs = require("fs");
const gcconfig = {
projectId: "REDACTED",
keyFilename: "REDACTED"
};
const gcs = require("#google-cloud/storage")(gcconfig);
//
exports.onFileChange = functions.storage.object().onFinalize(event => {
const object = event.data;
const bucket = object.bucket;
const contentType = object.contentType;
const filePath = object.name;
console.log("File change detected, function execution started");
if (object.resourceState === "not_exists") {
console.log("We deleted a file, exit...");
return;
}
if (path.basename(filePath).startsWith("resized-")) {
console.log("We already renamed that file!");
return;
}
const destBucket = gcs.bucket(bucket);
const tmpFilePath = path.join(os.tmpdir(), path.basename(filePath));
const metadata = { contentType: contentType };
return destBucket
.file(filePath)
.download({
destination: tmpFilePath
})
.then(() => {
return spawn("convert", [tmpFilePath, "-resize", "500x500", tmpFilePath]);
})
.then(() => {
return destBucket.upload(tmpFilePath, {
destination: "resized-" + path.basename(filePath),
metadata: metadata
});
});
});
exports.uploadFile = functions.https.onRequest((req, res) => {
cors(req, res, () => {
if (req.method !== "POST") {
return res.status(500).json({
message: "Not allowed"
});
}
const busboy = new Busboy({ headers: req.headers });
let uploadData = null;
busboy.on("file", (fieldname, file, filename, encoding, mimetype) => {
const filepath = path.join(os.tmpdir(), filename);
uploadData = { file: filepath, type: mimetype };
file.pipe(fs.createWriteStream(filepath));
});
busboy.on("finish", () => {
const bucket = gcs.bucket("REDACTED");
bucket
.upload(uploadData.file, {
uploadType: "media",
metadata: {
metadata: {
contentType: uploadData.type
}
}
})
.then(() => {
res.status(200).json({
message: "It worked!"
});
})
.catch(err => {
res.status(500).json({
error: err
});
});
});
busboy.end(req.rawBody);
});
});
My security rules:
rules_version = '2';
service firebase.storage {
match /b/{bucket}/o {
match /{allPaths=**} {
allow read, write:if true;
}
}
}
Related
I have a page where i made the backend in NodeJs + MongoDb and the frontend with React. In the backend i have a middleware that i use to upload images to Cloudinary. For example one route is for create a new pet and when i do the post request with Postman everything goes good, the new pet is created well in the db and also have the url of Cloudinary in the image place. The problem come when i try to do the same with a form in react... Everything goes "good" too, but in the image place (where with postman i have the clodinary url), now is empty...
The node controller code:
const petCreatePost = async(req, res, next) => {
const { type, name, avatar, age, sex, breed, size, isVaccinated, isSterilized, isDewormed, microchip, province, shelter, status } = req.body;
try {
const newPet = new Pet({
type,
name,
avatar: req.imageUrl ? req.imageUrl : '',
age,
sex,
breed,
size,
isVaccinated,
isSterilized,
isDewormed,
microchip,
province,
shelter,
status
});
const createdPet = await newPet.save();
return res.status(200).json('Mascota creada correctamente', { pet: createdPet });
} catch (error) {
return next(error);
}
}
Cloudinary middleware:
const multer = require('multer');
const path = require('path');
const fs = require('fs');
const cloudinary = require('cloudinary').v2
const ACCEPTED_FILE = [ 'image/jpg', 'image/jpeg', 'image/png' ];
const fileFilter = (req, file, cb) => {
if(!ACCEPTED_FILE.includes(file.mimetype)) {
const error = new Error ('Extensión del archivo inválida.')
error.status = 400;
return cb(error);
}
return cb(null, true);
};
const storage = multer.diskStorage({
filename: (req, file, cb) => {
const fileName = `${Date.now()}-${file.originalname}`;
cb(null, fileName);
},
destination: (req, file, cb) => {
const directory = path.join(__dirname, '../public/uploads');
cb(null, directory);
}
});
const upload = multer({
storage,
fileFilter,
});
const uploadToCloudinary = async (req, res, next) => {
try {
console.log('req', req);
if(req.file) {
const path = req.file.path;
const image = await cloudinary.uploader.upload(path);
req.imageUrl = image.secure_url;
console.log('image url', req.imageUrl);
return next();
} else {
return next();
}
} catch (error) {
return next(error);
}
};
module.exports = { upload, uploadToCloudinary };
How i use the middleware:
router.post('/new', [upload.single('avatar'), uploadToCloudinary], controller.petCreatePost);
The react component:
import React, { useContext } from 'react';
export const NewPet = () => {
const submitForm = async (e) => {
e.preventDefault();
const { type, name, age, avatar, sex, breed, size, isVaccinated, isSterilized, isDewormed, microchip, province, status } = e.target;
const form = {
type: type.value,
name: name.value,
age: age.value,
sex: sex.value,
breed: breed.value,
size: size.value,
isVaccinated: isVaccinated.value,
isSterilized: isSterilized.value,
isDewormed: isDewormed.value,
microchip: microchip.value,
province: province.value,
status: status.value
};
// const form = new FormData();
// form.append('type', type.value);
// form.append('name', name.value);
// form.append('age', age.value);
// form.append('sex', sex.value);
// form.append('breed', breed.value);
// form.append('size', size.value);
// form.append('isVaccinated', isVaccinated.value);
// form.append('isSterilized', isSterilized.value);
// form.append('isDewormed', isDewormed.value);
// form.append('microchip', microchip.value);
// form.append('province', province.value);
// form.append('status', status.value);
// form.append('avatar', imagenPrueba);
try {
const pet = await newPet(form);
console.log('pet', pet);
} catch (err) {
console.log(err);
}
}
The part of the code where is commented is an alternative that i try to use, because i'm sending a file and i have to use a FormData, but is not working too. I also checked that the form have the enctype="multipart/form-data".
And by last the "newPet" function that i use to connect to the back:
export const newPet = async(form) => {
const req = await fetch(newPetUrl, {
method: "POST",
headers: {
Accept: "application/json",
"Content-Type": "application/json",
"Access-Control-Allow-Origin": "*",
},
credentials: "include",
body: JSON.stringify(form),
});
const response = await req.json(form);
if (!req.ok) {
throw new Error(response.message);
}
return response;
};
I hope someone can help me.. Thanks!
You need to await the url from cloudinary. I had this problem too
I am trying to upload multiple files to Google Cloud Storage. I am using a for loop to for each file in the list of files which I want to upload.
However, the problem is that the for loop does not pause to wait for the upload to finish before moving on to the next upload. It eventually uploads them, however, the for loop finishes earlier which then sends back to the client the empty urlList.
How do I make it pause and wait for each upload process before moving on to the next file in the for loop?
const processFile = require('../middleware');
const { format } = require('util');
let uuidv4 = require('uuid/v4');
const Cloud = require('#google-cloud/storage');
const { Storage } = Cloud;
const storage = new Storage({
keyFilename: './xxx.json',
projectId: 'xxx'
});
const bucket = storage.bucket('xxx');
exports.upload = async (req, res) => {
const urlList = [];
await processFile(req, res); //multer
for (var i = 0; i < req.files.length; i++) {
if (!req.files[i]) {
return res.status(400).send({ message: 'Please upload a file!' });
}
const { originalname, buffer } = req.files[i];
var filename = originalname
.toLowerCase()
.split(' ')
.join('-');
filename = uuidv4() + '-' + filename;
console.log(filename);
const blob = bucket.file(filename);
const blobStream = blob.createWriteStream({
resumable: false
});
blobStream.on('error', err => {
res.status(500).send({ message: err.message });
});
blobStream.on('finish', async data => {
const publicUrl = format(
`https://storage.googleapis.com/${bucket.name}/${blob.name}`
);
urlList.push(publicUrl);
try {
await bucket.file(filename).makePublic();
} catch (err) {
console.log('failed to make it public');
reject(err);
}
});
blobStream.end(buffer);
}
return res.status(200).send({
message: 'Uploaded the files successfully',
url: urlList
});
};
Just put your "upload" code in a Promise that you can await in the loop. Othervise by using on the code inside of it will not follow the for loop. By using such event based code your for loop will just go trough it and can't await it to finish. This should do the trick:
const uploadFile = (f) => {
return new Promise((resolve, reject) => {
const { originalname, buffer } = f;
var filename = originalname.toLowerCase().split(" ").join("-");
filename = uuidv4() + "-" + filename;
console.log(filename);
const blob = bucket.file(filename);
const blobStream = blob.createWriteStream({
resumable: false,
});
blobStream.on("error", (err) => {
res.status(500).send({ message: err.message });
reject(err);
});
blobStream.on("finish", async (data) => {
const publicUrl = format(
`https://storage.googleapis.com/${bucket.name}/${blob.name}`
);
try {
await bucket.file(filename).makePublic();
resolve(publicUrl);
} catch (err) {
console.log("failed to make it public");
reject(err);
}
});
blobStream.end(buffer);
});
};
exports.upload = async (req, res) => {
const urlList = [];
await processFile(req, res); //multer
for (var i = 0; i < req.files.length; i++) {
if (!req.files[i]) {
return res.status(400).send({ message: "Please upload a file!" });
}
const publicUrl = await uploadFile(req.files[i]);
urlList.push(publicUrl);
}
return res.status(200).send({
message: "Uploaded the files successfully",
url: urlList,
});
};
const functions = require('firebase-functions');
const cors = require('cors')({ origin: true });
const Busboy = require('busboy');
const os = require('os');
const path = require('path');
const fs = require('fs');
const fbAdmin = require('firebase-admin');
const uuid = require('uuid/v4');
// // Create and Deploy Your First Cloud Functions
// // https://firebase.google.com/docs/functions/write-firebase-functions
//
// exports.helloWorld = functions.https.onRequest((request, response) => {
// response.send("Hello from Firebase!");
// });
const gcconfig = {
' '
};
const gcs = require('#google-cloud/storage')(gcconfig);
fbAdmin.initializeApp({ credential: fbAdmin.credential.cert(require('')) });
exports.storeImage = functions.https.onRequest((req, res) => {
return cors(req, res, () => {
if (req.method !== 'POST') {
return res.status(500).json({ message: 'Not allowed.' });
}
if (!req.headers.authorization || !req.headers.authorization.startsWith('Bearer ')) {
return res.status(401).json({ error: 'Unauthorized.' });
}
let idToken;
idToken = req.headers.authorization.split('Bearer ')[1];
const busboy = new Busboy({ headers: req.headers });
let uploadData;
let oldImagePath;
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
const filePath = path.join(os.tmpdir(), filename);
uploadData = { filePath: filePath, type: mimetype, name: filename };
file.pipe(fs.createWriteStream(filePath));
});
busboy.on('field', (fieldname, value) => {
oldImagePath = decodeURIComponent(path);
});
busboy.on('finish', () => {
const bucket = gcs.bucket(' ');
const id = uuid();
let imagePath = 'images/' + id + '-' + uploadData.name
if (oldImagePath) {
imagePath = oldImagePath;
}
return fbAdmin.auth().verifyIdToken(idToken).then(decodedToken => {
return bucket.upload(uploadData.filePath, {
uploadType: 'media',
destination: imagePath,
metadata: {
metadata: {
contentType: uploadData.type,
firebaseStorageDownloadToken: id
}
}
});
}).then(() => {
return res.status(201).json({
imageUrl: 'https://firebasestorage.googleapis.com/v0/b/' + bucket.name + '/o/' + encodeURIComponent(imagePath) + '?alt=media&token=' + id,
imagePath: imagePath
});
}).catch(error => {
return res.status(401).json({ error: 'Unauthorized!' });
});
});
return busboy.end(req.rawBody);
});
});
iam trying to deploy this function but i got this error(Error occurred while parsing your function triggers.
Error [ERR_PACKAGE_PATH_NOT_EXPORTED]: Package subpath './v4' is not defined by "exports" in C:\Users\ahmed aziz\AndroidStudioProjects\salers_demo\functions\node_modules\uuid\package.json). so please help me
The error is complaining about your usage of require('uuid/v4').
The API documentation for uuid suggests that you import and use it like this:
import { v4 as uuidv4 } from 'uuid';
uuidv4(); // ⇨ '9b1deb4d-3b7d-4bad-9bdd-2b0d7b3dcb6d'
Given a link that when the user hits it a PDF is downloaded ,
I want to upload the file to S3 and then get an Amazon S3 URL that would be public (I don't want the user to see the real Link , so that's why I'd rather upload it to S3).
Consider the code :
module.exports = class S3Uploader {
uploadPdfFromURLToS3 = urlToUpload => {
import aws from "aws-sdk";
const request = require("request");
const s3 = new aws.S3();
const config = require("config");
var uuidv4 = require("uuid/v4");
var filename = uuidv4() + ".pdf";
aws.config.update({
accessKeyId: config.get("-------------"),
secretAccessKey: config.get("-----------------")
});
var promise = new Promise((resolve, reject) => {
return request({ url: urlToUpload, encoding: null }, function(
err,
res,
body
) {
if (err) return reject({ status: 500, error: err });
return resolve({ status: 200, body: body });
});
});
promise.then(pdf => {
if (pdf.status == 200) {
s3.putObject(
{
Bucket: "-----Your-Bucket-Name",
Body: pdf.body,
Key: filename,
ACL: "public-read"
},
(err, data) => {
if (err) console.log(err);
else
{
console.log("uploaded");
// Get the S3 Public link ????
}
}
);
}
});
};
};
How can I get the link after the file has been uploaded successfully , in the callback ?
You can build up the url using string concatentation.
https://your-bucket-name.s3-eu-west-1.amazonaws.com/filename
Make sure you are using the correct region.
You can try adding $(data.Location) in your console.log("uploaded") line.
console.log("uploaded. $(data.Location)");
**TRY THIS **The main change is in s3.putObject()
module.exports = class S3Uploader {
uploadPdfFromURLToS3 = urlToUpload => {
import aws from "aws-sdk";
const request = require("request");
const s3 = new aws.S3();
const config = require("config");
var uuidv4 = require("uuid/v4");
var filename = uuidv4() + ".pdf";
aws.config.update({
accessKeyId: config.get("-------------"),
secretAccessKey: config.get("-----------------")
});
var promise = new Promise((resolve, reject) => {
return request({ url: urlToUpload, encoding: null }, function(
err,
res,
body
) {
if (err) return reject({ status: 500, error: err });
return resolve({ status: 200, body: body });
});
});
promise.then(pdf => {
if (pdf.status == 200) {
s3.putObject(
{
Bucket: "-----Your-Bucket-Name",
Body: pdf.body,
Key: filename,
ACL: "public-read"
},async(err,data)=>{if(err){console.log("error")}
else
console.log(data.location) //get pdf url
}
);
}
});
};
};
I previously had a single file upload set up and working properly. Now I need to make it handle multiple files.
Here is my code right now:
const multer = require('multer')
const { Storage } = require('#google-cloud/storage')
const storage = new Storage()
const m = multer({ storage: multer.memoryStorage() })
module.exports = app => {
app.use('/', router)
router.post(
'/reader-:shortId/file-upload',
passport.authenticate('jwt', { session: false }),
m.array('files'),
async function (req, res) {
const bucketName = req.params.shortId.toLowerCase()
await storage.createBucket(bucketName)
bucket = storage.bucket(bucketName)
let promises = []
req.files.forEach((file) => {
const blob = bucket.file(file.originalname)
const newPromise = new Promise((resolve, reject) => {
blob.createWriteStream({
metadata: { contentType: file.mimetype }
}).on('finish', async response => {
await blob.makePublic()
resolve(response)
}).on('error', err => {
reject('upload error: ', err)
}).end()
})
promises.push(newPromise)
})
Promise.all(promises).then((response) => {
// the response I get here is [undefined, undefined]
res.status(200).send(response)
}).catch((err) => {
res.status(400).send(err.message)
});
})
}
req.files does give me an array of files, with a buffer and a size that makes sense.
The promises all resolve.
But once I check the files in the google bucket, they have the right name but don't have any content (and size of 0)
As I said before, it was working when I was doing it with one file (using m.single('file')
I don't want to use the bucket as the destination with multer setup because I also have to change the file name before uploading to google bucket.
edit: this is the code example given by google cloud documentations for single file uploads (https://cloud.google.com/nodejs/getting-started/using-cloud-storage):
function sendUploadToGCS (req, res, next) {
if (!req.file) {
return next();
}
const gcsname = Date.now() + req.file.originalname;
const file = bucket.file(gcsname);
const stream = file.createWriteStream({
metadata: {
contentType: req.file.mimetype
},
resumable: false
});
stream.on('error', (err) => {
req.file.cloudStorageError = err;
next(err);
});
stream.on('finish', () => {
req.file.cloudStorageObject = gcsname;
file.makePublic().then(() => {
req.file.cloudStoragePublicUrl = getPublicUrl(gcsname);
next();
});
});
stream.end(req.file.buffer);
}
I originally had something like that working, but I just don't understand where it is getting the file buffer data from. That is probably where things are different with multiple files.
I know its too late, but someone might looking an answer for uploading multiple files on Google Cloud Storage.
Dependencies:
Express
Google Cloud Library
Multer
Body Parser
This is the controller code.
exports.post_image_upload = async (req, res) => {
/** Check if file exist */
if (!req.files) {
res.status(400).send('No file uploaded.');
return;
}
let PublicUrls = []
req.files.forEach((file) => {
const blob = bucket.file(file.fieldname + '-' + Date.now() + path.extname(file.originalname))
const blobStream = blob.createWriteStream({
metadata: { contentType: file.mimetype }
})
blobStream.on('finish', ()=> {
blob.makePublic()
})
blobStream.on('error', err => {
//Put your error message here
})
blobStream.end(file.buffer)
const Url = `https://storage.googleapis.com/${bucket.name}/${blob.name}`
PublicUrls.push(Url)
})
res.send(PublicUrls)
}
Good Luck
Ok, turns out I had to change
.end()
to
.end(file.buffer)
Marie Pelletier, I think your approach is 100% right. I modified a little your code trying to avoid the async response:
let promises = []
req.files.forEach((file) => {
const blob = bucket.file(file.originalname)
const newPromise = new Promise((resolve, reject) => {
blob.createWriteStream({
metadata: { contentType: file.mimetype },
resumable: false //Good for small files
}).on('finish', () => {
const Url = `https://storage.googleapis.com/${bucket.name}/${blob.name}`;
resolve({ name: file.originalname, url: Url });
}).on('error', err => {
reject('upload error: ', err);
}).end(file.buffer);
})
promises.push(newPromise);
})
Promise.all(promises).then((response) => {
res.status(200).send(response)
}).catch((err) => {
res.status(400).send(err.message)
});
This way, I didn't get 'undefined' anymore.