I'm working on node js app that is creating pdf by user id (using pdfkit), fills with user data and then uploads to Google Drive using Google Drive API. Client is getting back URL of this file. My problem is that when I upload it once, it works perfectly. But when I'm trying to do it again, the file uploads but it's empty. If I restart server it works again.
Pdf creation is also fine on the second attempt, the only problem is second create/upload. I have tried making the name of the file dynamic but that did not help. Only server restart works. Thank you.
Function for creating pdf
const createPdf = async (req, res, missions) => {
try {
const { _id } = req.user;
const pdfPath = path.join('data', 'pdf', _id + '.pdf');
let doc = new PDFDocument({ margin: 50 });
doc.pipe(fs.createWriteStream(pdfPath));
generateInnerPdf(doc, missions);
doc.end();
return { success: true };
} catch (err) {
return { success: false };
}
};
Function for upload on google drive and retrieve url
exports.uploads = (_id) => {
return new Promise((resolve) => {
const auth = new google.auth.JWT(
credentials.client_email,
null,
credentials.private_key,
scopes
);
const drive = google.drive({ version: 'v3', auth });
var folderId = 'xxxxxxxxxxx';
const pdfPath = path.join('data', 'pdf', _id + '.pdf');
var fileMetadata = {
name: `${_id}.pdf`,
parents: [folderId],
};
var media = {
mimeType: 'application/pdf',
body: fs.createReadStream(pdfPath),
};
drive.files.create(
{
resource: fileMetadata,
media: media,
fields: 'id',
},
(error, result) => {
resolve({
fileUrl: `https://drive.google.com/file/d/${result.data.id}/view?usp=sharing`,
});
}
);
});
};
My pdf controller
exports.create = async (req, res) => {
try {
const { missions } = await getMissions(req.user._id);
const { success } = await createPdf(req, res, missions);
if (success) {
googleApi.uploads(req.user._id).then((result) => {
res.status(200).send(result);
});
} else {
res.status(422).send(err);
}
} catch (err) {
console.log(err);
res.status(422).send(err.message);
}
};
EDIT: Should be a problem when I'm resolving promise again?
Fixed when setting timeout
if (success) {
setTimeout(function () {
googleApi.uploads(req.user._id).then((result) => {
res.status(200).send(result);
});
}, 500);
Related
I have created a file from which I can successfully upload the image, But the name of the image that comes up ( undefiend.jpg ) in this project use express file upload middleware
*admin.js
var express = require("express");
const productHelpers = require("../helpers/product-helpers");
var router = express.Router();
var productHelper = require("../helpers/product-helpers");
/* GET users listing. */
router.get("/", function (req, res, next) {
productHelpers.getAllProducts().then((products) => {
console.log(products);
res.render("admin/view-products", { admin: true, products });
});
});
router.get("/add-product", function (req, res) {
res.render("admin/add-product");
});
router.post("/add-product", (req, res, next) => {
productHelpers.addProduct(req.body, (id) => {
let image = req.files.Image;
console.log(id);
image.mv("./public/product-images/" + id + ".jpg", (err) => {
if (!err) {
res.render("admin/add-product");
} else {
console.log(err);
}
});
});
});
module.exports = router;
product-helpers.js
Here i do callback using id
product-helpers.js` var db=require('../config/connection')
var collection=require('../config/collections')
module.exports={
addProduct:(product,callback)=>{
db.get().collection('product').insertOne(product).then((data)=>{
callback(data._id)
})
}
}
`
I think you should use data.insertedId,
module.exports = {
addProduct: (product, callback) => {
db.get().collection('product').insertOne(product).then((data) => {
callback(data.insertedId)
})
}
}
in the mongodb documentation https://www.mongodb.com/docs/manual/reference/method/db.collection.insertOne/, it is showed that insertOne() returns
{
"acknowledged" : true,
"insertedId" : ObjectId("56fc40f9d735c28df206d078")
}
Insert a Document without Specifying an _id Field In the following
example, the document passed to the insertOne() method does not
contain the _id field:
try {
db.products.insertOne({ item: "card", qty: 15 });
} catch (e) { print(e); };
The operation returns the following document:
{
"acknowledged" : true,
"insertedId" : ObjectId("56fc40f9d735c28df206d078")
}
But anyway, you can use the name property from req.files.image: for example:
const image = req.files.image;
const imageName = image.name;
const imagePath = './public/product-images/' + imageName;
console.log(imagePath);
image.mv(imagePath, (error) => {
//something here
})
If you want to use more properties, look at here:
https://www.npmjs.com/package/express-fileupload
I have a page where i made the backend in NodeJs + MongoDb and the frontend with React. In the backend i have a middleware that i use to upload images to Cloudinary. For example one route is for create a new pet and when i do the post request with Postman everything goes good, the new pet is created well in the db and also have the url of Cloudinary in the image place. The problem come when i try to do the same with a form in react... Everything goes "good" too, but in the image place (where with postman i have the clodinary url), now is empty...
The node controller code:
const petCreatePost = async(req, res, next) => {
const { type, name, avatar, age, sex, breed, size, isVaccinated, isSterilized, isDewormed, microchip, province, shelter, status } = req.body;
try {
const newPet = new Pet({
type,
name,
avatar: req.imageUrl ? req.imageUrl : '',
age,
sex,
breed,
size,
isVaccinated,
isSterilized,
isDewormed,
microchip,
province,
shelter,
status
});
const createdPet = await newPet.save();
return res.status(200).json('Mascota creada correctamente', { pet: createdPet });
} catch (error) {
return next(error);
}
}
Cloudinary middleware:
const multer = require('multer');
const path = require('path');
const fs = require('fs');
const cloudinary = require('cloudinary').v2
const ACCEPTED_FILE = [ 'image/jpg', 'image/jpeg', 'image/png' ];
const fileFilter = (req, file, cb) => {
if(!ACCEPTED_FILE.includes(file.mimetype)) {
const error = new Error ('Extensión del archivo inválida.')
error.status = 400;
return cb(error);
}
return cb(null, true);
};
const storage = multer.diskStorage({
filename: (req, file, cb) => {
const fileName = `${Date.now()}-${file.originalname}`;
cb(null, fileName);
},
destination: (req, file, cb) => {
const directory = path.join(__dirname, '../public/uploads');
cb(null, directory);
}
});
const upload = multer({
storage,
fileFilter,
});
const uploadToCloudinary = async (req, res, next) => {
try {
console.log('req', req);
if(req.file) {
const path = req.file.path;
const image = await cloudinary.uploader.upload(path);
req.imageUrl = image.secure_url;
console.log('image url', req.imageUrl);
return next();
} else {
return next();
}
} catch (error) {
return next(error);
}
};
module.exports = { upload, uploadToCloudinary };
How i use the middleware:
router.post('/new', [upload.single('avatar'), uploadToCloudinary], controller.petCreatePost);
The react component:
import React, { useContext } from 'react';
export const NewPet = () => {
const submitForm = async (e) => {
e.preventDefault();
const { type, name, age, avatar, sex, breed, size, isVaccinated, isSterilized, isDewormed, microchip, province, status } = e.target;
const form = {
type: type.value,
name: name.value,
age: age.value,
sex: sex.value,
breed: breed.value,
size: size.value,
isVaccinated: isVaccinated.value,
isSterilized: isSterilized.value,
isDewormed: isDewormed.value,
microchip: microchip.value,
province: province.value,
status: status.value
};
// const form = new FormData();
// form.append('type', type.value);
// form.append('name', name.value);
// form.append('age', age.value);
// form.append('sex', sex.value);
// form.append('breed', breed.value);
// form.append('size', size.value);
// form.append('isVaccinated', isVaccinated.value);
// form.append('isSterilized', isSterilized.value);
// form.append('isDewormed', isDewormed.value);
// form.append('microchip', microchip.value);
// form.append('province', province.value);
// form.append('status', status.value);
// form.append('avatar', imagenPrueba);
try {
const pet = await newPet(form);
console.log('pet', pet);
} catch (err) {
console.log(err);
}
}
The part of the code where is commented is an alternative that i try to use, because i'm sending a file and i have to use a FormData, but is not working too. I also checked that the form have the enctype="multipart/form-data".
And by last the "newPet" function that i use to connect to the back:
export const newPet = async(form) => {
const req = await fetch(newPetUrl, {
method: "POST",
headers: {
Accept: "application/json",
"Content-Type": "application/json",
"Access-Control-Allow-Origin": "*",
},
credentials: "include",
body: JSON.stringify(form),
});
const response = await req.json(form);
if (!req.ok) {
throw new Error(response.message);
}
return response;
};
I hope someone can help me.. Thanks!
You need to await the url from cloudinary. I had this problem too
current url is: http://localhost:3000/products/60e03e16229da058968f33c3
but i want to mask it as http://localhost:3000/products/product-1
This is the getStaticPaths code that i have (uses apollo client to fetch the data)
const GET_ALL_QUERY = gql`
{
products {
id
}
}
`;
export async function getStaticPaths() {
const paths = await client
.query({
query: GET_ALL_QUERY,
})
.then((data) => {
const productsData = data.data.products;
if (productsData.length) {
return productsData.map((res) => {
return { params: { id: res.id } };
});
}
})
.catch((err) => Promise.reject(new Error(err)));
return {
paths,
fallback: false,
};
}
export async function getStaticProps({ params }) {
try {
const res = await client.query({
query: GET_ONE_QUERY,
variables: { id: params.id },
});
return {
props: res.data.product,
};
} catch (err) {
throw new Error(err);
}
}
if i use
<Link href={`/products/${product_id}` as={`/products/${product_name}`}>
</Link>
it doesnt seem to work. it is looking for /product-1 page and returns page not found.
also in sever.js,
i have
const server = express();
server.get("/products/:id", (req, res) => {
const actualPage = "/products";
const queryParams = { id: req.params.id };
console.dir("req.params.id = " + JSON.stringify(req.params.id));
app.render(req, res, actualPage, queryParams);
});
suspecting if this could be affecting the dynamic path.
I am not able to figure out the solution. Searched extensively but all resources refer to using only the id. Is this possible in nextjs ?
I am using the Node.js Google Drive client trying to download certain files from a gdrive. When using the example provided in their GitHub I get a Uncaught (in promise) TypeError: res.data.on is not a function error. The file is still created locally, but it's just an empty file from createWriteStream().
When I log the res variable I get: ReadableStream {locked: false}.
I'm pretty new to streams so this is quite a bit over my head.
Here is my code. You'll notice it's almost exactly what the example they provide looks like.
syncFileFromDrive(fileId, filePath) {
filePath.replace(userDataPath, '');
filePath = `${userDataPath}/${filePath}`;
filePath.replaceAll('//', '/');
logger.info(`Sync file from drive: Syncing file to path: ${filePath}`);
logger.info(`Sync file from drive: File id: ${fileId}`)
const dest = fs.createWriteStream(filePath);
let progress = 0;
this.drive.files.get({fileId, alt: 'media'}, {responseType: 'stream'}).then(res => {
console.log(res)
console.log(res.data)
res.data
.on('end', () => {
console.log('Done downloading file.');
folderStructure.buildFileMenu()
resolve(dest)
})
.on('error', err => {
console.error('Error downloading file.');
reject(err);
})
.on('data', d => {
progress += d.length;
if (process.stdout.isTTY) {
process.stdout.clearLine();
process.stdout.cursorTo(0);
process.stdout.write(`Downloaded ${progress} bytes`);
}
})
.pipe(dest);
});
}
Edit: I should add that this is for an Electron application. So while Node is supported, I'm not sure if that may affect the way I can use streams.
This feels like it's a bit of a work around, and I am open to any suggestions, but this was able to solve the issue I was having.
syncFileFromDrive(fileId, filePath) {
filePath.replace(userDataPath, '');
filePath = `${userDataPath}/${filePath}`;
filePath.replaceAll('//', '/');
logger.info(`Sync file from drive: Syncing file to path: ${filePath}`);
logger.info(`Sync file from drive: File id: ${fileId}`)
this.drive.files
.get({ fileId, alt: "media"}, {responseType: 'stream'})
.then((res) => {
const dest = fs.createWriteStream(filePath);
const decoder = new TextDecoder("utf-8");
const reader = res.data.getReader()
reader.read().then(function processText({ done, value }) {
if (done) {
console.log("Stream complete");
return;
}
dest.write(decoder.decode(value))
// Read some more, and call this function again
return reader.read().then(processText);
});
})
}
Please take a look at my implementation, which I used to downloading the file
import { google } from 'googleapis';
const getOauth2Client = () => new google.auth.OAuth2(
process.env.GOOGLE_DRIVE_CLIENT_ID,
process.env.GOOGLE_DRIVE_CLIENT_SECRET,
process.env.GOOGLE_DRIVE_REDIRECT_URL
);
const downloadFile = ({ id, access_token, path }) => {
return new Promise((resolve, reject) => {
const dest = fs.createWriteStream(path);
const oauth2Client = getOauth2Client();
oauth2Client.setCredentials({ access_token });
const drive = google.drive({
version: 'v3',
auth: oauth2Client
});
drive.files.get(
{ fileId: id, alt: 'media' }, { responseType: 'stream' },
(err, res) => {
if (err) reject(err);
res.data
.on('end', () => {
console.log('Done');
})
.on('error', _e => {
console.log('Error', _e);
if (_e) reject(_e);
})
.pipe(dest);
dest.on('finish', () => {
console.log('Download finished');
resolve(true);
});
}
);
});
};
This is because in the renderer process, Google's gaxios modules uses the fetch API instead of Node's http. Fetch API returns a ReadableStream unlike http which returns a Node.js Readable. Currently there's no way to change the default adapter. You can use this quick workaround the convert it.
// Transforms a web ReadableStream to Node.js Readable
function toNodeReadable(webStream) {
const reader = webStream.getReader();
const rs = new Readable();
rs._read = async () => {
const result = await reader.read();
if (!result.done) {
rs.push(Buffer.from(result.value));
} else {
rs.push(null);
}
};
return rs;
}
Usage with your code:
syncFileFromDrive(fileId, filePath) {
filePath.replace(userDataPath, '');
filePath = `${userDataPath}/${filePath}`;
filePath.replaceAll('//', '/');
logger.info(`Sync file from drive: Syncing file to path: ${filePath}`);
logger.info(`Sync file from drive: File id: ${fileId}`)
const dest = fs.createWriteStream(filePath);
let progress = 0;
this.drive.files.get({fileId, alt: 'media'}, {responseType: 'stream'}).then(res => {
console.log(res)
console.log(res.data)
toNodeReadable(res.data)
.on('end', () => {
console.log('Done downloading file.');
folderStructure.buildFileMenu()
resolve(dest)
})
.on('error', err => {
console.error('Error downloading file.');
reject(err);
})
.on('data', d => {
progress += d.length;
if (process.stdout.isTTY) {
process.stdout.clearLine();
process.stdout.cursorTo(0);
process.stdout.write(`Downloaded ${progress} bytes`);
}
})
.pipe(dest);
});
}
I previously had a single file upload set up and working properly. Now I need to make it handle multiple files.
Here is my code right now:
const multer = require('multer')
const { Storage } = require('#google-cloud/storage')
const storage = new Storage()
const m = multer({ storage: multer.memoryStorage() })
module.exports = app => {
app.use('/', router)
router.post(
'/reader-:shortId/file-upload',
passport.authenticate('jwt', { session: false }),
m.array('files'),
async function (req, res) {
const bucketName = req.params.shortId.toLowerCase()
await storage.createBucket(bucketName)
bucket = storage.bucket(bucketName)
let promises = []
req.files.forEach((file) => {
const blob = bucket.file(file.originalname)
const newPromise = new Promise((resolve, reject) => {
blob.createWriteStream({
metadata: { contentType: file.mimetype }
}).on('finish', async response => {
await blob.makePublic()
resolve(response)
}).on('error', err => {
reject('upload error: ', err)
}).end()
})
promises.push(newPromise)
})
Promise.all(promises).then((response) => {
// the response I get here is [undefined, undefined]
res.status(200).send(response)
}).catch((err) => {
res.status(400).send(err.message)
});
})
}
req.files does give me an array of files, with a buffer and a size that makes sense.
The promises all resolve.
But once I check the files in the google bucket, they have the right name but don't have any content (and size of 0)
As I said before, it was working when I was doing it with one file (using m.single('file')
I don't want to use the bucket as the destination with multer setup because I also have to change the file name before uploading to google bucket.
edit: this is the code example given by google cloud documentations for single file uploads (https://cloud.google.com/nodejs/getting-started/using-cloud-storage):
function sendUploadToGCS (req, res, next) {
if (!req.file) {
return next();
}
const gcsname = Date.now() + req.file.originalname;
const file = bucket.file(gcsname);
const stream = file.createWriteStream({
metadata: {
contentType: req.file.mimetype
},
resumable: false
});
stream.on('error', (err) => {
req.file.cloudStorageError = err;
next(err);
});
stream.on('finish', () => {
req.file.cloudStorageObject = gcsname;
file.makePublic().then(() => {
req.file.cloudStoragePublicUrl = getPublicUrl(gcsname);
next();
});
});
stream.end(req.file.buffer);
}
I originally had something like that working, but I just don't understand where it is getting the file buffer data from. That is probably where things are different with multiple files.
I know its too late, but someone might looking an answer for uploading multiple files on Google Cloud Storage.
Dependencies:
Express
Google Cloud Library
Multer
Body Parser
This is the controller code.
exports.post_image_upload = async (req, res) => {
/** Check if file exist */
if (!req.files) {
res.status(400).send('No file uploaded.');
return;
}
let PublicUrls = []
req.files.forEach((file) => {
const blob = bucket.file(file.fieldname + '-' + Date.now() + path.extname(file.originalname))
const blobStream = blob.createWriteStream({
metadata: { contentType: file.mimetype }
})
blobStream.on('finish', ()=> {
blob.makePublic()
})
blobStream.on('error', err => {
//Put your error message here
})
blobStream.end(file.buffer)
const Url = `https://storage.googleapis.com/${bucket.name}/${blob.name}`
PublicUrls.push(Url)
})
res.send(PublicUrls)
}
Good Luck
Ok, turns out I had to change
.end()
to
.end(file.buffer)
Marie Pelletier, I think your approach is 100% right. I modified a little your code trying to avoid the async response:
let promises = []
req.files.forEach((file) => {
const blob = bucket.file(file.originalname)
const newPromise = new Promise((resolve, reject) => {
blob.createWriteStream({
metadata: { contentType: file.mimetype },
resumable: false //Good for small files
}).on('finish', () => {
const Url = `https://storage.googleapis.com/${bucket.name}/${blob.name}`;
resolve({ name: file.originalname, url: Url });
}).on('error', err => {
reject('upload error: ', err);
}).end(file.buffer);
})
promises.push(newPromise);
})
Promise.all(promises).then((response) => {
res.status(200).send(response)
}).catch((err) => {
res.status(400).send(err.message)
});
This way, I didn't get 'undefined' anymore.