I am trying to download a zip file in my MERN application. I am getting the file in the response, how ever the client does not download the actual file. I am using archiver to zip files then return them in a fetch call.
Archive Service:
const archiver = require('archiver')
const zip = archiver('zip')
const path = require('path')
const fs = require('fs')
const appDir = path.dirname(require.main.filename)
exports.FileArchiver = function (feed, res) {
// const app = this.app;
const uploadsDir = path.join(appDir, '/uploads/');
const templatesDir = path.join(appDir, '/templates/');
const feedArray = feed.feed.data;
const extensions = [".jpg", ".png", ".svg"];
const feedArrayString = JSON.stringify(feedArray);
const feedArrayObject = JSON.parse(feedArrayString);
let imageArray = [];
let templateType = 'b'; //test
// grab image names from object
feedArrayObject.forEach(function(x){iterate(x)});
// remove duplicates
imageArray = uniq_fast(imageArray);
// zip images
for (let i = 0; i < imageArray.length; i++) {
console.log(imageArray[i])
const filePath = path.join(uploadsDir, imageArray[i]);
zip.append(fs.createReadStream(filePath), { name: 'images/'+imageArray[i] });
}
res.attachment(feed.name + '.zip');
zip.pipe(res);
zip.append(feedArrayString, { name: 'feed.json' })
zip.directory(templatesDir + '/' + templateType, false);
zip.on('error', (err) => { throw err; });
zip.on('warning', (err) => {
if (err.code === 'ENOENT') {
console.log('ENOENT for archive')
} else {
throw err;
}
});
zip.finalize();
return this;
}
Client side fetch:
export const downloadData = (url, _id, name, type) => {
return fetch(url, {method: 'GET'})
.then((res) => {
console.log(res);
return res;
})
}
Client side Headers attached:
content-disposition: attachment; filename="a_gpo.zip"
content-type: application/zip
The network request response returns 200 status and I can also see the attachment response in client contains zip file jargon. However the client does not return the actual file download.
Try to redirect the browser location to the URL
export const downloadData = (url) => {
window.location = url;
}
Related
i am trying to make a component that take a pdf from input or an already uploaded one and then extract pages from it and uploaded again
when choosing a file from input (choosing file from my computer)
i am using this
const handleFileChange = async (event) => {
const file = event.target.files[0];
setFiles(event.target.files[0])
const fileName = event.target.files[0].name
setFileName(fileName);
const fileReader = new FileReader();
fileReader.onload = async () => {
const pdfBytes = new Uint8Array(fileReader.result);
const pdfDoc = await PDFDocument.load(pdfBytes);
setPdfDoc(pdfDoc);
setPdfBlob(pdfBytes)
};
fileReader.readAsArrayBuffer(file);
setShowPdf(true)
};
we get a pdfDoc and a Unit8Array
then i use the pdfDoc to get pages and extract a new pdf file....
this works fine
now when selecting a file that we already uploaded
i use this to ping the api to get the file
const handleGetFile = async (url) => {
const headers = {
Authorization: "Bearer " + (localStorage.getItem("token")),
Accept: 'application/pdf'
}
await axios.put(`${process.env.NEXT_PUBLIC_API_URL}getPdfFileBlob`, {
pdfUrl: `https://handle-pdf-photos-project-through-compleated-task.s3.amazonaws.com/${url}`
}, { responseType: 'arraybuffer', headers }).then((res) => {
const handlePdf = async () => {
const uint8Array = new Uint8Array(res.data);
const pdfBlob = new Blob([uint8Array], { type: 'application/pdf' });
setPdfBlob(uint8Array)
// setPdfDoc(pdfBlob) .....? how do i create a pdf doc from the unit8array
}
handlePdf()
}).catch((err) => {
console.log(err)
})
}
this the the end point i am pinging
app.put('/getPdfFileBlob',async function(req,res){
try {
console.log(req.body.pdfUrl)
const url =req.body.pdfUrl;
const fileName = 'file.pdf';
const file = fs.createWriteStream(fileName);
https.get(url, (response) => {
response.pipe(file);
file.on('finish', () => {
file.close();
// Serve the file as a response
const pdf = fs.readFileSync(fileName);
res.setHeader('Content-Type', 'application/pdf');
res.setHeader( 'Content-Transfer-Encoding', 'Binary'
);
res.setHeader('Content-Disposition', 'inline; filename="' + fileName + '"');
res.send(pdf);
});
});
} catch (error) {
res.status(500).json({success:false,msg:"server side err"})
}
})
after getting this file here is what am trying to do
const handlePageSelection = (index) => {
setSelectedPages(prevSelectedPages => {
const newSelectedPages = [...prevSelectedPages];
const pageIndex = newSelectedPages.indexOf(index);
if (pageIndex === -1) {
newSelectedPages.push(index);
} else {
newSelectedPages.splice(pageIndex, 1);
}
return newSelectedPages;
});
};
const handleExtractPages = async () => {
for (let i = pdfDoc.getPageCount() - 1; i >= 0; i -= 1) {
if (!selectedPages.includes(i + 1)) {
pdfDoc.removePage(i);
}
}
await pdfDoc.save();
};
well in the first case where i upload the pdf file from local storage i get a pdfDoc
console of pdf Doc and pdfBlob
and when i select already existing file i can't find a way to transfer unit8array buffer to pdf doc
log of pdfBlob and no pdf doc
what i want is transform the pdfblob to pdfDcoument or get the pdf document from the array buffer so i can use getpages on it
I want a make an API that will take a file or folder path from the user and upload it to AWS s3 I made progress but
when the user gives a file path it's searching the file path in the server, not in the user's pc
I know I made a mistake but I don't know how to connect API from the users pc and get access to system files
here is code for the post route
router.post("/create/:id", auth, async (req, res) => {
try {
let form = new multiparty.Form();
form.parse(req, async (err, fields, files) => {
console.log(fields);
console.log(files);
//check if user has access to project
const user_id = req.userId;
const project_id = req.params.id;
const user_access = await check_user_access_project(user_id, project_id);
const user = await User.findById(user_id);
const project = await Project.findById(project_id);
if (user_access === 1) {
//create version
const version = new Version({
project_id: project_id,
user_id: user_id,
versionName: fields.versionName[0],
version_description: fields.versionDescription[0],
version_file: [],
});
const version_data = await version.save();
console.log(version_data);
let version_id = version_data._id;
//sync folders to s3
const version_folder_path = fields.files_path[0];
let key = `${user.firstName}_${user_id}/${project.projectName}/${fields.versionName[0]}`;
const version_folder_list = await sync_folders(
version_folder_path,
key
);
console.log("version folder list", version_folder_list);
//update version with version folders
await Version.findByIdAndUpdate(
version_id,
{
$set: {
version_file: version_folder_list,
},
},
{ new: true }
);
//wait for version update
await version.save();
//send response
res.json({
success: true,
version: version_data,
});
} else {
res.status(401).json({
success: false,
message: "User does not have access to project",
});
}
});
} catch (error) {
res.status(400).json({ message: error.message });
}
});
here is the folder sync code
const sync_folders = async (folder_path, key) => {
function getFiles(dir, files_) {
files_ = files_ || [];
var files = fs.readdirSync(dir);
for (var i in files) {
var name = dir + "/" + files[i];
if (fs.statSync(name).isDirectory()) {
getFiles(name, files_);
} else {
files_.push(name);
}
}
return files_;
}
const files = getFiles(folder_path);
console.log(files);
const fileData = [];
for (let i = 0; i < files.length; i++) {
const file = files[i];
console.log(file);
const fileName = file.split("/").pop();
const fileType = file.split(".").pop();
const fileSize = fs.statSync(file).size;
const filePath = file;
const fileBuffer = fs.readFileSync(filePath);
//folder is last part of folder path (e.g. /folder1/folder2/folder3)
const folder = folder_path.split("/").pop();
console.log("folder: " + folder);
//split filepath
const filePath_ = filePath.split(folder).pop();
let filekey = key + "/" + folder + filePath_;
console.log("filekey: " + filekey);
const params = {
Bucket: bucket,
Key: filekey,
Body: fileBuffer,
ContentType: fileType,
ContentLength: fileSize,
};
const data = await s3.upload(params).promise();
console.log(data);
fileData.push(data);
}
console.log("file data", fileData);
console.log("files uploaded");
return fileData;
};
if some buddy can help me pls I need your help
You need to post the item in a form rather than just putting the directory path of user in and then upload the result to your s3 bucket.
This might be a good start if you're new to it:
https://www.w3schools.com/nodejs/nodejs_uploadfiles.asp
I'm using Node 12.x version to write my Lambda function. Here is the Parsing error that I am getting. What could be the reason?
Update
const im = require("imagemagick");
const fs = require("fs");
const os = require("os");
const uuidv4 = require("uuid/v4");
const {promisify} = require("util");
const AWS = require('aws-sdk');
const resizeAsync = promisify(im.resize)
const readFileAsync = promisify(fs.readFile)
const unlinkAsync = promisify(fs.unlink)
AWS.config.update({region: 'ap-south-1'})
const s3 = new AWS.S3();
exports.handler = async (event) => {
let filesProcessed = event.Records.map((record) => {
let bucket = record.s3.bucket.name;
let filename = record.s3.object.key;
//Fetch filename from S3
var params = {
Bucket: bucket,
Key: filename
};
//let inputData = await s3.getObject(params).promise()
let inputData = await s3.getObject(params).promise();
//Resize the file
let tempFile = os.tmpdir() + '/' + uuidv4() + '.jpg';
let resizeArgs = {
srcData: inputData.Body,
dstPath: tempFile,
width: 150
};
await resizeAsync(resizeArgs)
//Read the resized File
let resizedData = await readFileAsync(tempFile)
//Upload the resized file to S3
let targetFilename = filename.substring(0, filename.lastIndexOf('.') + '-small.jpg')
var params = {
Bucket: bucket + '-dest',
Key: targetFilename,
Body: new Buffer(resizedData),
ContentType: 'image/jpeg'
}
await s3.putObject(params).promise();
return await unlinkAsync(tempFile)
})
await Promise.all(filesProcessed)
return "done"
}
Here is the same code. I am getting Unexpected token S3 error when hovering the red mark (shown in the image)
What you can do is, declare inputData as below and initialize it with the response from the getObject.
let inputData;
var params = {
Bucket: "examplebucket",
Key: "HappyFace.jpg"
};
s3.getObject(params, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else inputData = data; // successful response
});
For more, you can refer here
I am attempting to grab a PDF stored in Azure Blob Storage via a node backend and then serve that PDF file to a React Frontend. I am using Microsofts #azure/storage-blob with a BlockBlobClient but every example I find online converts the readableStreamBody to a string. The blob has a content type of application/pdf. Ive tried passing the readableStreamBody and the pure output to the frontend but those result in broken pdf's. I also followed the documentation online and made it a string and passed that to the frontend. That produced a PDF that would open and had the proper amount of pages but was completly blank.
Node.js Code on the Backend
app.get('/api/file/:company/:file', (req, res) => {
const containerClient = blobServiceClient.getContainerClient(req.params.company);
const blockBlobClient = containerClient.getBlockBlobClient(req.params.file);
blockBlobClient.download(0)
.then(blob => streamToString(blob.readableStreamBody))
.then(response => res.send(response))
});
FrontEnd Code
getFileBlob = (company,file) => {
axios(`/api/file/${company}/${file}`, { method: 'GET', responseType: 'blob'})
.then(response => {
const file = new Blob(
[response.data],
{type: 'application/pdf'});
const fileURL = URL.createObjectURL(file);
window.open(fileURL);
})
.catch(error => {
console.log(error);
});
}
This might help you, its working for me.
Node
var express = require('express');
const { BlobServiceClient } = require('#azure/storage-blob');
var router = express.Router();
const AZURE_STORAGE_CONNECTION_STRING =
'YOUR_STRING';
async function connectAzure() {
// Create the BlobServiceClient object which will be used to create a container client
const blobServiceClient = BlobServiceClient.fromConnectionString(
AZURE_STORAGE_CONNECTION_STRING
);
const containerName = 'filestorage';
const blobName = 'sample.pdf';
console.log('\nConnecting container...');
console.log('\t', containerName);
// Get a reference to a container
const containerClient = blobServiceClient.getContainerClient(containerName);
// Get a block blob client
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
for await (const blob of containerClient.listBlobsFlat()) {
console.log('\t', blob.name);
}
const downloadBlockBlobResponse = await blockBlobClient.download(0);
const data = await streamToString(downloadBlockBlobResponse.readableStreamBody)
return data;
}
async function streamToString(readableStream) {
return new Promise((resolve, reject) => {
const chunks = [];
readableStream.on('data', data => {
chunks.push(data.toString());
});
readableStream.on('end', () => {
resolve(chunks.join(''));
});
readableStream.on('error', reject);
});
}
router.get('/', async function(req, res, next) {
const data = await connectAzure();
res.send({data}).status(200);
});
module.exports = router;
Front-end
function createFile() {
fetch('/createfile').then(res => {
res.json().then(data => {
var blob = new Blob([data.data], { type: 'application/pdf' });
var fileURL = URL.createObjectURL(blob);
if (filename) {
if (typeof a.download === 'undefined') {
window.location.href = fileURL;
} else {
window.open(fileURL, '_blank');
}
}
})
}).catch(err => console.log(err))
}
HTML
<body><h1>Express</h1><p>Welcome to Express</p><button onclick="createFile()">Create File</button></body>
So I am writing a Lambda that will take in some form data via a straight POST through API Gateway (testing using Postman for now) and then send that image to S3 for storage. Every time I run it, the image uploaded to S3 is corrupted and won't open properly. I have seen people having to decode/encode the incoming data but I feel like I have tried everything using Buffer.from. I am only looking to store either .png or .jpg. The below code does not reflect my attempts using Base64 encoding/decoding seeing they all failed. Here is what I have so far -
Sample Request in postman
{
image: (uploaded .jpg/.png),
metadata: {tag: 'iPhone'}
}
Lambda
const AWS = require('aws-sdk')
const multipart = require('aws-lambda-multipart-parser')
const s3 = new AWS.S3();
exports.handler = async (event) => {
const form = multipart.parse(event, false)
const s3_response = await upload_s3(form)
return {
statusCode: '200',
body: JSON.stringify({ data: data })
}
};
const upload_s3 = async (form) => {
const uniqueId = Math.random().toString(36).substr(2, 9);
const key = `${uniqueId}_${form.image.filename}`
const request = {
Bucket: 'bucket-name',
Key: key,
Body: form.image.content,
ContentType: form.image.contentType,
}
try {
const data = await s3.putObject(request).promise()
return data
} catch (e) {
console.log('Error uploading to S3: ', e)
return e
}
}
EDIT:
I am now atempting to save the image into the /tmp directory then use a read stream to upload to s3. Here is some code for that
s3 upload function
const AWS = require('aws-sdk')
const fs = require('fs')
const s3 = new AWS.S3()
module.exports = {
upload: (file) => {
return new Promise((resolve, reject) => {
const key = `${Date.now()}.${file.extension}`
const bodyStream = fs.createReadStream(file.path)
const params = {
Bucket: process.env.S3_BucketName,
Key: key,
Body: bodyStream,
ContentType: file.type
}
s3.upload(params, (err, data) => {
if (err) {
return reject(err)
}
return resolve(data)
}
)
})
}
}
form parser function
const busboy = require('busboy')
module.exports = {
parse: (req, temp) => {
const ctype = req.headers['Content-Type'] || req.headers['content-type']
let parsed_file = {}
return new Promise((resolve) => {
try {
const bb = new busboy({
headers: { 'content-type': ctype },
limits: {
fileSize: 31457280,
files: 1,
}
})
bb.on('file', function (fieldname, file, filename, encoding, mimetype) {
const stream = temp.createWriteStream()
const ext = filename.split('.')[1]
console.log('parser -- ext ', ext)
parsed_file = { name: filename, path: stream.path, f: file, type: mimetype, extension: ext }
file.pipe(stream)
}).on('finish', () => {
resolve(parsed_file)
}).on('error', err => {
console.err(err)
resolve({ err: 'Form data is invalid: parsing error' })
})
if (req.end) {
req.pipe(bb)
} else {
bb.write(req.body, req.isBase64Encoded ? 'base64' : 'binary')
}
return bb.end()
} catch (e) {
console.error(e)
return resolve({ err: 'Form data is invalid: parsing error' })
}
})
}
}
handler
const form_parser = require('./form-parser').parse
const s3_upload = require('./s3-upload').upload
const temp = require('temp')
exports.handler = async (event, context) => {
temp.track()
const parsed_file = await form_parser(event, temp)
console.log('index -- parsed form', parsed_file)
const result = await s3_upload(parsed_file)
console.log('index -- s3 result', result)
temp.cleanup()
return {
statusCode: '200',
body: JSON.stringify(result)
}
}
The above edited code is a combination of other code and a github repo I found that is trying to achieve the same results. Even with this solution the file is still corrupted
Figured out this issue. Code works perfectly fine - it was an issue with API Gateway. Need to go into the API Gateway settings and set thee Binary Media Type to multipart/form-data then re-deploy the API. Hope this helps someone else who is banging their head against the wall on figuring out sending images via form data to a lambda.