Upload Image from form-data to S3 using a Lambda - javascript

So I am writing a Lambda that will take in some form data via a straight POST through API Gateway (testing using Postman for now) and then send that image to S3 for storage. Every time I run it, the image uploaded to S3 is corrupted and won't open properly. I have seen people having to decode/encode the incoming data but I feel like I have tried everything using Buffer.from. I am only looking to store either .png or .jpg. The below code does not reflect my attempts using Base64 encoding/decoding seeing they all failed. Here is what I have so far -
Sample Request in postman
{
image: (uploaded .jpg/.png),
metadata: {tag: 'iPhone'}
}
Lambda
const AWS = require('aws-sdk')
const multipart = require('aws-lambda-multipart-parser')
const s3 = new AWS.S3();
exports.handler = async (event) => {
const form = multipart.parse(event, false)
const s3_response = await upload_s3(form)
return {
statusCode: '200',
body: JSON.stringify({ data: data })
}
};
const upload_s3 = async (form) => {
const uniqueId = Math.random().toString(36).substr(2, 9);
const key = `${uniqueId}_${form.image.filename}`
const request = {
Bucket: 'bucket-name',
Key: key,
Body: form.image.content,
ContentType: form.image.contentType,
}
try {
const data = await s3.putObject(request).promise()
return data
} catch (e) {
console.log('Error uploading to S3: ', e)
return e
}
}
EDIT:
I am now atempting to save the image into the /tmp directory then use a read stream to upload to s3. Here is some code for that
s3 upload function
const AWS = require('aws-sdk')
const fs = require('fs')
const s3 = new AWS.S3()
module.exports = {
upload: (file) => {
return new Promise((resolve, reject) => {
const key = `${Date.now()}.${file.extension}`
const bodyStream = fs.createReadStream(file.path)
const params = {
Bucket: process.env.S3_BucketName,
Key: key,
Body: bodyStream,
ContentType: file.type
}
s3.upload(params, (err, data) => {
if (err) {
return reject(err)
}
return resolve(data)
}
)
})
}
}
form parser function
const busboy = require('busboy')
module.exports = {
parse: (req, temp) => {
const ctype = req.headers['Content-Type'] || req.headers['content-type']
let parsed_file = {}
return new Promise((resolve) => {
try {
const bb = new busboy({
headers: { 'content-type': ctype },
limits: {
fileSize: 31457280,
files: 1,
}
})
bb.on('file', function (fieldname, file, filename, encoding, mimetype) {
const stream = temp.createWriteStream()
const ext = filename.split('.')[1]
console.log('parser -- ext ', ext)
parsed_file = { name: filename, path: stream.path, f: file, type: mimetype, extension: ext }
file.pipe(stream)
}).on('finish', () => {
resolve(parsed_file)
}).on('error', err => {
console.err(err)
resolve({ err: 'Form data is invalid: parsing error' })
})
if (req.end) {
req.pipe(bb)
} else {
bb.write(req.body, req.isBase64Encoded ? 'base64' : 'binary')
}
return bb.end()
} catch (e) {
console.error(e)
return resolve({ err: 'Form data is invalid: parsing error' })
}
})
}
}
handler
const form_parser = require('./form-parser').parse
const s3_upload = require('./s3-upload').upload
const temp = require('temp')
exports.handler = async (event, context) => {
temp.track()
const parsed_file = await form_parser(event, temp)
console.log('index -- parsed form', parsed_file)
const result = await s3_upload(parsed_file)
console.log('index -- s3 result', result)
temp.cleanup()
return {
statusCode: '200',
body: JSON.stringify(result)
}
}
The above edited code is a combination of other code and a github repo I found that is trying to achieve the same results. Even with this solution the file is still corrupted

Figured out this issue. Code works perfectly fine - it was an issue with API Gateway. Need to go into the API Gateway settings and set thee Binary Media Type to multipart/form-data then re-deploy the API. Hope this helps someone else who is banging their head against the wall on figuring out sending images via form data to a lambda.

Related

Getting pdf file from from express and treating it as if it from an input

i am trying to make a component that take a pdf from input or an already uploaded one and then extract pages from it and uploaded again
when choosing a file from input (choosing file from my computer)
i am using this
const handleFileChange = async (event) => {
const file = event.target.files[0];
setFiles(event.target.files[0])
const fileName = event.target.files[0].name
setFileName(fileName);
const fileReader = new FileReader();
fileReader.onload = async () => {
const pdfBytes = new Uint8Array(fileReader.result);
const pdfDoc = await PDFDocument.load(pdfBytes);
setPdfDoc(pdfDoc);
setPdfBlob(pdfBytes)
};
fileReader.readAsArrayBuffer(file);
setShowPdf(true)
};
we get a pdfDoc and a Unit8Array
then i use the pdfDoc to get pages and extract a new pdf file....
this works fine
now when selecting a file that we already uploaded
i use this to ping the api to get the file
const handleGetFile = async (url) => {
const headers = {
Authorization: "Bearer " + (localStorage.getItem("token")),
Accept: 'application/pdf'
}
await axios.put(`${process.env.NEXT_PUBLIC_API_URL}getPdfFileBlob`, {
pdfUrl: `https://handle-pdf-photos-project-through-compleated-task.s3.amazonaws.com/${url}`
}, { responseType: 'arraybuffer', headers }).then((res) => {
const handlePdf = async () => {
const uint8Array = new Uint8Array(res.data);
const pdfBlob = new Blob([uint8Array], { type: 'application/pdf' });
setPdfBlob(uint8Array)
// setPdfDoc(pdfBlob) .....? how do i create a pdf doc from the unit8array
}
handlePdf()
}).catch((err) => {
console.log(err)
})
}
this the the end point i am pinging
app.put('/getPdfFileBlob',async function(req,res){
try {
console.log(req.body.pdfUrl)
const url =req.body.pdfUrl;
const fileName = 'file.pdf';
const file = fs.createWriteStream(fileName);
https.get(url, (response) => {
response.pipe(file);
file.on('finish', () => {
file.close();
// Serve the file as a response
const pdf = fs.readFileSync(fileName);
res.setHeader('Content-Type', 'application/pdf');
res.setHeader( 'Content-Transfer-Encoding', 'Binary'
);
res.setHeader('Content-Disposition', 'inline; filename="' + fileName + '"');
res.send(pdf);
});
});
} catch (error) {
res.status(500).json({success:false,msg:"server side err"})
}
})
after getting this file here is what am trying to do
const handlePageSelection = (index) => {
setSelectedPages(prevSelectedPages => {
const newSelectedPages = [...prevSelectedPages];
const pageIndex = newSelectedPages.indexOf(index);
if (pageIndex === -1) {
newSelectedPages.push(index);
} else {
newSelectedPages.splice(pageIndex, 1);
}
return newSelectedPages;
});
};
const handleExtractPages = async () => {
for (let i = pdfDoc.getPageCount() - 1; i >= 0; i -= 1) {
if (!selectedPages.includes(i + 1)) {
pdfDoc.removePage(i);
}
}
await pdfDoc.save();
};
well in the first case where i upload the pdf file from local storage i get a pdfDoc
console of pdf Doc and pdfBlob
and when i select already existing file i can't find a way to transfer unit8array buffer to pdf doc
log of pdfBlob and no pdf doc
what i want is transform the pdfblob to pdfDcoument or get the pdf document from the array buffer so i can use getpages on it

Network error when downloading file from S3 - NextJs

I am trying to download a file from Amazon S3, but the call is giving me network error. I have setup my amazon config file accordingly to need and I am using S3.getObjectURl() method to download it. Unfortunately it fails. Here is my code:
export const AWSDownload = (link = '') => {
const s3 = new AWS.S3({endpoint: 's3Url',region: 'us-east-1',accessKeyId: AWS_Config.AWS_ACCESS_KEY_ID, secretAccessKey:AWS_Config.AWS_SECRET_ACCESS_KEY})
const params = {
Bucket: AWS_Config.AWS_BUCKET,
Key: `${link}`
}
const handleDownload = () => {
return new Promise<string>((resolve, reject) => {
console.log('Handling download called')
s3.getObject(params, (err, data:any) => {
console.log(data, 'response')
if (err) {
console.log(err, err.stack)
reject(err)
} else {
if (data.Body) {
let binary = ''
const bytes:any = new Uint8Array(data.Body)
const len:any = bytes.byteLength
for (let i = 0; i < len; i++) {
binary += String.fromCharCode(bytes[i])
}
const b64encoded = `data:${data.ContentType};base64,${btoa(binary)}`
resolve(b64encoded)
} else {
reject('Empty body')
}
}
})
})
}
return {
downloadImage: handleDownload
}
}
export const AWS_Config = {
FILESYSTEM_DRIVER: 's3',
AWS_ACCESS_KEY_ID: 'KeyID',
AWS_SECRET_ACCESS_KEY: 'Secret Access Key',
AWS_DEFAULT_REGION: 'us-east-1',
AWS_BUCKET: 'Bucket'
}
The result is mentioned below:
Try using the latest S3 client via:
import { S3Client, PutObjectCommand } from '#aws-sdk/client-s3'
const s3Client = new S3Client({ region: process.env.region })
try {
const data = await s3Client.send(new PutObjectCommand(uploadParams))
console.log('Success', data)
} catch (err) {
console.log('Error', err)
}
or check your network connection 😉
Edit: Similarly look for getObjectCommand in the latest documentation and implement it.

I want to upload an image to Imgur using Next.js API route

I am trying to create a process that uploads an image, previews it once, and then uploads it to Imgur if the image is OK.
The code is as follows.
const [img, setImg] = useState([])
const previewImg = ({ target: { files } }) => {
if (img.length > 5) return
const reader = new FileReader()
reader.onload = ({ target: { result } }) => {
setImg((img) => [...img, { id: generateID(), src: result }])
}
reader.readAsDataURL(files[0])
}
const uploadImugr = async (e) => {
e.preventDefault();
const base64 = img[0].src.toString().replace(/data:.*\/.*;base64,/, '');
const res = await fetch('/api/upload/', {
method: 'POST',
body: base64,
});
console.log(await res.json());
}
return (
<>
<input type="file" onChange={previewImg} />
{img.length > 0 && img.map((item) => {
return <img key={item.id} src={item.src} />}
}
<button onClick={uploadImgur}>Upload Imgur</button>
</>
)
The following is the API route for next.js.
Imgur API
const uploadImugrAPI = async (req: NextApiRequest, res: NextApiResponse) => {
const formData = new FormData();
 formData.append('image', req.body);
const resImgur = await fetch("https://api.imgur.com/3/upload", {
method: 'POST',
headers: {
Authorization: 'Client-ID MY-CLIEND-ID',
},
body: formData,
})
res.status(200).json(resImgur.json());
};
export default uploadImugrAPI;
When the above API is executed, the following error message will be displayed.
POST http://localhost:3000/api/upload 500 (Internal Server Error)
Uncaught (in promise) SyntaxError: Unexpected token I in JSON at position 0
I'm new to Next.js and external APIs, so I'm not sure what keywords to search on Google for to solve this problem.
Please help me.
Thank you.
Add
When I tried with Postman, I was able to upload images to Imugr by passing a binary file.
Therefore, I changed the code as follows to pass a binary file instead of base64 and tried it.
const [imgArray, setImgArray] = useState([])
+ const [srcArray, setSrcArray] = useState([])
const uploadImg = ({ target: { files } }) => {
if (imgArray.length > 5) return
+ setImgArray((imgArray) => [...imgArray, files[0]])
const reader = new FileReader()
reader.onload = ({ target: { result } }) => {
const uploadImgSrc = result.toString()
setSrcArray((srcArray) => [
...srcArray,
{ id: generateID(), src: uploadImgSrc.toString() },
])
formRef.current.inputImg.value = ''
}
reader.readAsDataURL(files[0])
}
const uploadImugr = async (e) => {
e.preventDefault();
+ const formData = new FormData();
+ formData.append("image", imgArray[0])
const res = await fetch('/api/upload/', {
method: 'POST',
body: formData,
});
console.log(await res.json());
}
The result was that the following error was displayed in the console.
POST http://localhost:3000/api/upload 500 (Internal Server Error)
Request failed with status code 500
After 2 days of frustration, I've patched together a solution based on several answers I stumbled upon. Convert the file to base64 client side and send that as json to the API.
//client.tsx
async function submit(e: React.FormEvent<HTMLFormElement>) {
e.preventDefault();
if (!file) return;
let base64Img = await getBase64(file);
if (typeof base64Img == 'string') {
base64Img = base64Img.replace(/^data:.+base64,/, '')
}
const result = await fetch('/api/upload', {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({image: base64Img}),
})
const response = await result.json() // response.data is an object containing the image URL
}
function getBase64(file: File): Promise<string | ArrayBuffer | null> {
return new Promise((resolve, reject) => {
const reader = new FileReader()
reader.readAsDataURL(file)
reader.onload = () => resolve(reader.result)
reader.onerror = error => reject(error)
})
}
//upload.ts
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
const fd = new FormData();
fd.append('image', req.body.image)
fd.append('type', 'base64')
const response = await fetch('https://api.imgur.com/3/image', {
method: "POST",
headers: {
Authorization: "Client-ID process.env.IMGUR_ID",
},
body: fd,
redirect: 'follow',
})
const data = await response.json();
return res.json(data)
}
Also I found using https://api.imgur.com/3/image instead of https://api.imgur.com/3/upload better as the errors were more helpful.

how to pipe an archive (zip) to an S3 bucket

I’m a bit confused with how to proceed. I am using Archive ( node js module) as a means to write data to a zip file. Currently, I have my code working when I write to a file (local storage).
var fs = require('fs');
var archiver = require('archiver');
var output = fs.createWriteStream(__dirname + '/example.zip');
var archive = archiver('zip', {
zlib: { level: 9 }
});
archive.pipe(output);
archive.append(mybuffer, {name: ‘msg001.txt’});
I’d like to modify the code so that the archive target file is an AWS S3 bucket. Looking at the code examples, I can specify the bucket name and key (and body) when I create the bucket object as in:
var s3 = new AWS.S3();
var params = {Bucket: 'myBucket', Key: 'myMsgArchive.zip' Body: myStream};
s3.upload( params, function(err,data){
…
});
Or
s3 = new AWS.S3({ parms: {Bucket: ‘myBucket’ Key: ‘myMsgArchive.zip’}});
s3.upload( {Body: myStream})
.send(function(err,data) {
…
});
With regards to my S3 example(s), myStream appears to be a readable stream and I am confused as how to make this work as archive.pipe requires a writeable stream. Is this something where we need to use a pass-through stream? I’ve found an example where someone created a pass-through stream but the example is too terse to gain proper understanding. The specific example I am referring to is:
Pipe a stream to s3.upload()
Any help someone can give me would greatly be appreciated. Thanks.
This could be useful for anyone else wondering how to use pipe.
Since you correctly referenced the example using the pass-through stream, here's my working code:
1 - The routine itself, zipping files with node-archiver
exports.downloadFromS3AndZipToS3 = () => {
// These are my input files I'm willing to read from S3 to ZIP them
const files = [
`${s3Folder}/myFile.pdf`,
`${s3Folder}/anotherFile.xml`
]
// Just in case you like to rename them as they have a different name in the final ZIP
const fileNames = [
'finalPDFName.pdf',
'finalXMLName.xml'
]
// Use promises to get them all
const promises = []
files.map((file) => {
promises.push(s3client.getObject({
Bucket: yourBubucket,
Key: file
}).promise())
})
// Define the ZIP target archive
let archive = archiver('zip', {
zlib: { level: 9 } // Sets the compression level.
})
// Pipe!
archive.pipe(uploadFromStream(s3client, 'someDestinationFolderPathOnS3', 'zipFileName.zip'))
archive.on('warning', function(err) {
if (err.code === 'ENOENT') {
// log warning
} else {
// throw error
throw err;
}
})
// Good practice to catch this error explicitly
archive.on('error', function(err) {
throw err;
})
// The actual archive is populated here
return Promise
.all(promises)
.then((data) => {
data.map((thisFile, index) => {
archive.append(thisFile.Body, { name: fileNames[index] })
})
archive.finalize()
})
}
2 - The helper method
const uploadFromStream = (s3client) => {
const pass = new stream.PassThrough()
const s3params = {
Bucket: yourBucket,
Key: `${someFolder}/${aFilename}`,
Body: pass,
ContentType: 'application/zip'
}
s3client.upload(s3params, (err, data) => {
if (err)
console.log(err)
if (data)
console.log('Success')
})
return pass
}
The following example takes the accepted answer and makes it work with local files as requested.
const archiver = require("archiver")
const fs = require("fs")
const AWS = require("aws-sdk")
const s3 = new AWS.S3()
const stream = require("stream")
const zipAndUpload = async () => {
const files = [`test1.txt`, `test2.txt`]
const fileNames = [`test1target.txt`, `test2target.txt`]
const archive = archiver("zip", {
zlib: { level: 9 } // Sets the compression level.
})
files.map((thisFile, index) => {
archive.append(fs.createReadStream(thisFile), { name: fileNames[index] })
})
const uploadStream = new stream.PassThrough()
archive.pipe(uploadStream)
archive.finalize()
archive.on("warning", function (err) {
if (err.code === "ENOENT") {
console.log(err)
} else {
throw err
}
})
archive.on("error", function (err) {
throw err
})
archive.on("end", function () {
console.log("archive end")
})
await uploadFromStream(uploadStream)
console.log("all done")
}
const uploadFromStream = async pass => {
const s3params = {
Bucket: "bucket-name",
Key: `streamtest.zip`,
Body: pass,
ContentType: "application/zip"
}
return s3.upload(s3params).promise()
}
zipAndUpload()

zip file attached but not downloading in client

I am trying to download a zip file in my MERN application. I am getting the file in the response, how ever the client does not download the actual file. I am using archiver to zip files then return them in a fetch call.
Archive Service:
const archiver = require('archiver')
const zip = archiver('zip')
const path = require('path')
const fs = require('fs')
const appDir = path.dirname(require.main.filename)
exports.FileArchiver = function (feed, res) {
// const app = this.app;
const uploadsDir = path.join(appDir, '/uploads/');
const templatesDir = path.join(appDir, '/templates/');
const feedArray = feed.feed.data;
const extensions = [".jpg", ".png", ".svg"];
const feedArrayString = JSON.stringify(feedArray);
const feedArrayObject = JSON.parse(feedArrayString);
let imageArray = [];
let templateType = 'b'; //test
// grab image names from object
feedArrayObject.forEach(function(x){iterate(x)});
// remove duplicates
imageArray = uniq_fast(imageArray);
// zip images
for (let i = 0; i < imageArray.length; i++) {
console.log(imageArray[i])
const filePath = path.join(uploadsDir, imageArray[i]);
zip.append(fs.createReadStream(filePath), { name: 'images/'+imageArray[i] });
}
res.attachment(feed.name + '.zip');
zip.pipe(res);
zip.append(feedArrayString, { name: 'feed.json' })
zip.directory(templatesDir + '/' + templateType, false);
zip.on('error', (err) => { throw err; });
zip.on('warning', (err) => {
if (err.code === 'ENOENT') {
console.log('ENOENT for archive')
} else {
throw err;
}
});
zip.finalize();
return this;
}
Client side fetch:
export const downloadData = (url, _id, name, type) => {
return fetch(url, {method: 'GET'})
.then((res) => {
console.log(res);
return res;
})
}
Client side Headers attached:
content-disposition: attachment; filename="a_gpo.zip"
content-type: application/zip
The network request response returns 200 status and I can also see the attachment response in client contains zip file jargon. However the client does not return the actual file download.
Try to redirect the browser location to the URL
export const downloadData = (url) => {
window.location = url;
}

Categories