upload large javascript object as json file on aws s3 (efficient way) - javascript

Hi I have json response probably of size 150-200MB. Because of its size, I want to save it on aws s3 as json file instead of returning it to the client.
Below this the code I m using currently.
async function uploadFileOnS3(fileData, s3Detail) {
const params = {
Bucket: s3Detail.Bucket,
Key: s3Detail.Key_response,
Body: JSON.stringify(fileData), // big fat js object
};
try {
const stored = await S3.upload(params).promise();
console.log("file uploaded Sucessfully ", stored);
} catch (err) {
console.log(err);
}
console.log("upload exit");
}
I m concern about JSON.stringify(fileData) operation. assuming this function will be part of a aws lambda, won't it take huge resources to parse it as string?
is there any other efficient way to save javascript object as json on aws s3 bucket?

You don't really have to stringify the JSON file. You can pass a stream as a body:
async function uploadFileOnS3(fileData, s3Detail) {
const params = {
Bucket: s3Detail.Bucket,
Key: s3Detail.Key_response,
Body: fileData, // remove stringify from here
};
try {
const stored = await S3.upload(params).promise();
console.log("file uploaded Sucessfully ", stored);
} catch (err) {
console.log(err);
}
console.log("upload exit");
}
exports.handler = async (event) => {
// We create a stream
const stream = fs.createReadStream("/tmp/upload.json");
// Pass the stream to the upload function
await uploadFileOnS3(stream, {
Bucket: "bucket_name",
Key_response: "upload.json"
});
}

Related

File content missing when i download from s3

I am using node.js aws sdk for s3 related methods. I have a method to download the file from s3 bucket.
I am downloading the file using the below code.
const downloadFileBase64 = async (payload) => {
let params = { Bucket: s3BucketName, Key: `${payload.folderName}/${payload.fileName}` };
try {
const response = await s3
.getObject(params, (err) => {
if (err) {
return err;
}
})
.promise();
return {
data: response.Body.toString('base64'),
fileName: payload.fileName
};
} catch (error) {
return Boom.badRequest(error.message);
}
};
Once i get the base64 content i am sending it over an email using sendgrid.
Issue: When i download small files everything is working fine. But when i download large files, some part of the file is missing in multiple pages. I just copy pasted the base64 in few online websites and downloaded the file from there, it's the same issue in those websites also. With this i concluded that there is some issue while returning the response from s3 itself. When i go to s3 and check it in the folder, it's showing proper file.
If you see the above screenshot, its the pdf which is having some random grey background in few pages and some text is also missing from the pdf.
I tried to use another method which just download buffer excluding the base64 conversion as shown below.
const downloadFileBuffer = async (payload) => {
let params = { Bucket: s3BucketName, Key: `${payload.folderName}/${payload.fileName}` };
try {
const response = await s3
.getObject(params, (err) => {
if (err) {
return err;
}
})
.promise();
return {
data: response.Body,
fileName: payload.fileName
};
} catch (error) {
return Boom.badRequest(error.message);
}
};
And once i get the file content in this above response, i am storing temporarily in a folder on server and then reading again and sending over email. But i am still having the same issue.
const fileContent = await docs.downloadFileBuffer({ payload: req.payload.action.dire });
await fs.writeFileSync(`${temp}testinggg.pdf`, fileContent?.data);
const fileData = await fs.readFileSync(`${temp}testinggg.pdf`, { encoding: 'base64' });
Any help on this issue is really appreciated.
After days of research and trying different ways, I found the issue. The issue was with .promise() used in s3.getObject(params, (err) => {}).promise();. Instead of that, I used callback using Promise as shown below. Now the file is properly showing the full content without missing any data.
const downloadFileBuffer = async (payload) => {
let params = { Bucket: s3BucketName, Key: `${payload.folderName}/${payload.fileName}` };
try {
return new Promise((resolve, reject) => {
s3.getObject(params, (err, response) => {
if (err) {
reject(err);
}
resolve({
data: response.Body,
fileName: payload.fileName
});
});
});
} catch (error) {
return Boom.badRequest(error.message);
}
};

AWS S3 file upload with Node.js: Unsupported body payload error

I am trying to get my node.js backend to upload a file to AWS S3, which it got in a post request from my front-end. This is what my function looks like:
async function uploadFile(file){
var uploadParams = {Bucket: '<bucket-name>', Key: file.name, Body: file};
s3.upload (uploadParams, function (err, data) {
if (err) {
console.log("Error", err);
} if (data) {
console.log("Upload Success", data.Location);
}
});
}
When I try uploading the file this way, I get an Unsupported Body Payload Error...
I used fileStream.createReadStream() in the past to upload files saves in a directory on the server, but creating a fileStream did not work for me, since there is no path parameter to pass here.
EDIT:
The file object is created in the angular frontend of my web application. This it the relevant html code where the file is uploaded by a user:
<div class="form-group">
<label for="file">Choose File</label>
<input type="file" id="file"(change)="handleFileInput($event.target.files)">
</div>
If the event occurs, the handleFileInput(files: FileList) method in the corresponding component is called:
handleFileInput(files: FileList) {
// should result in array in case multiple files are uploaded
this.fileToUpload = files.item(0);
// actually upload the file
this.uploadFileToActivity();
// used to check whether we really received the file
console.log(this.fileToUpload);
console.log(typeof this.fileToUpload)
}
uploadFileToActivity() {
this.fileUploadService.postFile(this.fileToUpload).subscribe(data => {
// do something, if upload success
}, error => {
console.log(error);
});
}
the postFile(fileToUpload: File) method of the file-upload service is used to make the post request:
postFile(fileToUpload: File): Observable<Boolean> {
console.log(fileToUpload.name);
const endpoint = '/api/fileupload/single';
const formData: FormData = new FormData();
formData.append('fileKey', fileToUpload, fileToUpload.name);
return this.httpClient
.post(endpoint, formData/*, { headers: yourHeadersConfig }*/)
.pipe(
map(() => { return true; }),
catchError((e) => this.handleError(e)),
);
}
Here is the the server-side code that receives the file and then calls the uploadFile(file) function:
app.post('/api/fileupload/single', async (req, res) => {
try {
if(!req.files) {
res.send({
status: false,
message: 'No file uploaded'
});
} else {
let file = req.files.fileKey;
uploadFile(file);
//send response
res.send({
status: true,
message: 'File is uploaded',
data: {
name: file.name,
mimetype: file.mimetype,
size: file.size
}
});
}
} catch (err) {
res.status(500).send(err);
}
});
Thank you very much for your help in solving this!
Best regards, Samuel
Best way is stream the file. Assuming you are. reading it from disk. You could do this
const fs = require("fs");
const aws = require("aws-sdk");
const s3Client = new aws.S3();
const Bucket = 'somebucket';
const stream = fs.createReadStream("file.pdf");
const Key = stream.path;
const response = await s3Client.upload({Bucket, Key, Body: stream}).promise();
console.log(response);

Turning image into blob in React Native (Expo) and uploading to S3 bucket

I have a camera component which I clicks a picture. I store the clicked picture using expo's FileSystem in the local cacheDirectory. Looks some thing like this:
onPictureSaved = async photo => {
await FileSystem.moveAsync({
from: photo.uri,
to: `${FileSystem.cacheDirectory}test.jpg`
});}
My next step is to my next stop is converting the image in the local cacheDirectory into a blob and upload the image into S3 via the aws-sdk:
var params = {
Bucket: "my-bucket",
Key: 'test.jpg',
Body: blob
};
s3.upload(params, function(err, data) {
if (err) {
console.log(err);
} // an error occurred
else {
console.log(data);
} // successful response
}
However, any methods or modules I install in order to accomplish this tiny step in the process hasn't been working at all. I can't use RNFS, react-native-fetch-blob or any other modules that require linking thanks to the expo client. I don't want to detach expo just for one thing. Is there any other way to accomplish this?
Take a look at https://github.com/expo/image-upload-example/issues/3#issuecomment-387263080. The latest expo release supports blobs, so then you can do something like the following:
uploadToS3 = async (fileUri, s3Bucket, s3Key) => {
const response = await fetch(fileUri);
const blob = await response.blob();
return new Promise((resolve, reject) => {
const params = {
Bucket: s3Bucket,
Key: s3Key,
Body: blob,
};
s3.upload(params, function(err, data) {
if (err) {
console.log('Something went wrong');
console.log(err);
reject(err);
} else {
console.log('Successfully uploaded image');
resolve(data);
}
});
});
};
Hope this helps!

Node.js binary to PDF

I have got a express server, which creates a pdf file.
I am trying to send this file to the client:
const fs = require('fs');
function download(req, res) {
var filePath = '/../../myPdf.pdf';
fs.readFile(__dirname + filePath, function(err, data) {
if (err) throw new Error(err);
console.log('yeyy, no errors :)');
if (!data) throw new Error('Expected data, but got', data);
console.log('got data', data);
res.contentType('application/pdf');
res.send(data);
});
}
On the client I want to download it:
_handleDownloadAll = async () => {
console.log('handle download all');
const response = await request.get(
`http://localhost:3000/download?accessToken=${localStorage.getItem(
'accessToken'
)}`
);
console.log(response);
};
I recieve an body.text like
%PDF-1.4↵1 0 obj↵<<↵/Title (��)↵/Creator (��)↵/Producer (��Qt 5.5.1)↵
but I can't achieve a download.
How can I create a PDF from the data OR directly download it from the server?
I've got it working:
The answer was pretty simple. I just let the browser handle the download with an html anchor tag:
server:
function download(req, res) {
const { creditor } = req.query;
const filePath = `/../../${creditor}.pdf`;
res.download(__dirname + filePath);
}
client:
<a href{`${BASE_URL}?accessToken=${accessToken}&creditor=${creditorId}`} download>Download</a>
The result is the string of the binary. We use base 64 to convert from binary to pdf
var buffer = Buffer.from(result['textBinary'], 'base64')
fs.writeFileSync('/path/to/my/file.pdf', buffer)
You can prompt the browser to download the file by setting the correct content-disposition header:
res.setHeader('Content-disposition', 'attachment; filename=myfile.pdf');
readFile returns a Buffer which is a wrapper around bytes. You're sending Buffer back to the client which is logging them to the console.
The body.text you see is to be expected.
You will need to write these bytes to a file using fs.writeFile or similar. Here's an example:
_handleDownloadAll = async () => {
console.log('handle download all');
const response = await request.get(
`http://localhost:3000/download?accessToken=${localStorage.getItem(
'accessToken'
)}`
);
// load your response data into a Buffer
let buffer = Buffer.from(response.body.text)
// open the file in writing mode
fs.open('/path/to/my/file.pdf', 'w', function(err, fd) {
if (err) {
throw 'could not open file: ' + err;
}
// write the contents of the buffer
fs.write(fd, buffer, 0, buffer.length, null, function(err) {
if (err) {
throw 'error writing file: ' + err;
}
fs.close(fd, function() {
console.log('file written successfully');
});
});
});
};
You may need to experiment with the buffer encoding, it defaults to utf8.
Read this!
The other option you may want to consider is generating the PDF on the server and simply sending the client a link to where it can download this.

Uploading base64 encoded Image to Amazon S3 via Node.js

Yesterday I did a deep night coding session and created a small node.js/JS (well actually CoffeeScript, but CoffeeScript is just JavaScript so lets say JS) app.
what's the goal:
client sends a canvas datauri (png) to server (via socket.io)
server uploads image to amazon s3
step 1 is done.
the server now has a string a la
data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAMgAAADICAYAAACt...
my question is: what are my next steps to "stream"/upload this data to Amazon S3 and create an actual image there?
knox https://github.com/LearnBoost/knox seems like an awesome lib to PUT something to S3, but what I'm missing is the glue between the base64-encoded-image-string and actual upload action?
Any ideas, pointers and feedback welcome.
For people who are still struggling with this issue. Here is the approach I used with native aws-sdk :
var AWS = require('aws-sdk');
AWS.config.loadFromPath('./s3_config.json');
var s3Bucket = new AWS.S3( { params: {Bucket: 'myBucket'} } );
Inside your router method (ContentType should be set to the content type of the image file):
var buf = Buffer.from(req.body.imageBinary.replace(/^data:image\/\w+;base64,/, ""),'base64')
var data = {
Key: req.body.userId,
Body: buf,
ContentEncoding: 'base64',
ContentType: 'image/jpeg'
};
s3Bucket.putObject(data, function(err, data){
if (err) {
console.log(err);
console.log('Error uploading data: ', data);
} else {
console.log('successfully uploaded the image!');
}
});
s3_config.json file :
{
"accessKeyId":"xxxxxxxxxxxxxxxx",
"secretAccessKey":"xxxxxxxxxxxxxx",
"region":"us-east-1"
}
Here's the code from one article I came across, posting below:
const imageUpload = async (base64) => {
const AWS = require('aws-sdk');
const { ACCESS_KEY_ID, SECRET_ACCESS_KEY, AWS_REGION, S3_BUCKET } = process.env;
AWS.config.setPromisesDependency(require('bluebird'));
AWS.config.update({ accessKeyId: ACCESS_KEY_ID, secretAccessKey: SECRET_ACCESS_KEY, region: AWS_REGION });
const s3 = new AWS.S3();
const base64Data = new Buffer.from(base64.replace(/^data:image\/\w+;base64,/, ""), 'base64');
const type = base64.split(';')[0].split('/')[1];
const userId = 1;
const params = {
Bucket: S3_BUCKET,
Key: `${userId}.${type}`, // type is not required
Body: base64Data,
ACL: 'public-read',
ContentEncoding: 'base64', // required
ContentType: `image/${type}` // required. Notice the back ticks
}
let location = '';
let key = '';
try {
const { Location, Key } = await s3.upload(params).promise();
location = Location;
key = Key;
} catch (error) {
}
console.log(location, key);
return location;
}
module.exports = imageUpload;
Read more: http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#upload-property
Credits: https://medium.com/#mayneweb/upload-a-base64-image-data-from-nodejs-to-aws-s3-bucket-6c1bd945420f
ok, this one is the answer how to save canvas data to file
basically it loos like this in my code
buf = new Buffer(data.dataurl.replace(/^data:image\/\w+;base64,/, ""),'base64')
req = knoxClient.put('/images/'+filename, {
'Content-Length': buf.length,
'Content-Type':'image/png'
})
req.on('response', (res) ->
if res.statusCode is 200
console.log('saved to %s', req.url)
socket.emit('upload success', imgurl: req.url)
else
console.log('error %d', req.statusCode)
)
req.end(buf)
The accepted answer works great but if someone needs to accept any file instead of just images this regexp works great:
/^data:.+;base64,/
For laravel developers this should work
/* upload the file */
$path = Storage::putFileAs($uploadfolder, $uploadFile, $fileName, "s3");
make sure to set your .env file property before calling this method

Categories