How to Upload File Using AWS-SDK (From Base64)? - javascript

Example code in here:
var AWS = require('aws-sdk');
var fixNameFile = "tes.png";
let s3bucket = new AWS.S3({
endpoint: "xxx...",
accessKeyId: "xxx...",
secretAccessKey: "xxx...",
});
var imgBase64 = "data:image/png;base64,xxxxxx....";
var params = {
Bucket: 'fileupload',
ACL: 'public-read',
Key: fixNameFile,
Body: imgBase64,
ContentEncoding: 'base64'
};
s3bucket.putObject(params, function (err, data) {
console.log(data);
});
In my case, this was uploaded successfully. However, only blank images like this appear:
https://imgur.com/a/6Eci8Pv
does anyone have an idea from my case? Please help me :)

Related

Javascript || AWS S3 SDK & croppie file upload errors

I am trying to upload the cropped results of croppie to an S3 bucket. I am currently getting a blank error when I successfully crop and then try to upload the cropped results.
I have followed Amazon docs including setting up the S3 bucket, identity pools, and configuring my CORS.
I believe the error has something to do with how croppie is packaging the cropped results. I have included my app.js file (where I handle the upload) and the code where the addPhoto function is being called. Resp is the response from croppie.
The expected outcome is that I can successfully crop a photo and then upload it to my S3 bucket.
$('.crop').on('click', function (ev) {
$uploadCrop.croppie('result', {
type: 'canvas',
size: 'original'
}).then(function (resp) {
Swal.fire({
imageUrl: resp,
showCancelButton: true,
confirmButtonText: "Upload",
reverseButtons: true,
showCloseButton: true
}).then((result) => {
if(result.value) {
addPhoto(resp);
}
app.js
var albumBucketName = "colorsort";
var bucketRegion = "xxx";
var IdentityPoolId = "xxx";
AWS.config.update({
region: bucketRegion,
credentials: new AWS.CognitoIdentityCredentials({
IdentityPoolId: IdentityPoolId
})
});
var s3 = new AWS.S3({
apiVersion: "2006-03-01",
params: { Bucket: albumBucketName }
});
function addPhoto(resp) {
var file = resp;
var fileName = file.name;
console.log(resp.type);
var photoKey = fileName;
// Use S3 ManagedUpload class as it supports multipart uploads
var upload = new AWS.S3.ManagedUpload({
params: {
Bucket: albumBucketName,
Key: photoKey,
Body: file,
ACL: "public-read"
}
});
var promise = upload.promise();
promise.then(
function(data) {
alert("Successfully uploaded photo.");
},
function(err) {
return alert("There was an error uploading your photo: ", err.message);
}
);
}
The solution I found involved adding the following snippet to my CORS config as well as changing the croppie result 'type:' from canvas to base64.
<AllowedHeader>*</AllowedHeader>
Useful resources: Upload missing ETag, Uploading base64 image to Amazon with Node.js

Getting undefined filename, filetype when uploading files to AWS S3

What I'm trying to do:
Uploading a file with AWS S3, and then taking the filename and filetype and placing it at the end of the url to save it in sql, so that every time the person logs in, it will pull the picture up by user image url.
Problem:
Not uploading and not recognizing the file in filename or filetype. Coming up with undefined for filetype and filename in URL and signedURL
The code for my fileUploadService.js used in Node.js is shown below.
The getSignedURL looks like this:
https://sabio-training.s3.us-west-2.amazonaws.com/C56/filename/?AWSAccessKeyId=AKIAJF53EJKW7SJUV55Q&Content-Type=filetype&Expires=1536877443&Signature=WxSvLSzfyZKDRN9LawVOwj1ayVY%3D&x-amz-acl=public-read
The URL looks like this:
https://sabio-training.s3.amazonaws.com/C56/filename/filetype
const aws = require('aws-sdk');
aws.config.region = 'us-west-2';
aws.config.update({ accessKeyId: '', secretAccessKey: '' });
const PROFILE_S3_LINK = "https://sabio-training.s3.amazonaws.com/";
module.exports = {
getUrl: getUrl
}
function getUrl(req, res) {
const s3 = new aws.S3();
const fileName = 'C56/'+"filename"+'/' ; //hardcoded filename and filetype for it to work.
const fileType = "filetype"; //How to take filename from uploaded file to insert into "fileName" along with the "filetype"?
const s3Params = {
Bucket: 'sabio-training',
Key: fileName,
Expires: 3000,
ContentType: fileType,
ACL: 'public-read'
};
s3.getSignedUrl('putObject', s3Params, (err, data) => {
if (err) {
console.log(err);
return res.end();
}
const returnData = {
signedRequest: data,
url: `${PROFILE_S3_LINK}${fileName}${fileType}` //unsigned URL
};
res.write(JSON.stringify(returnData));
res.end();
});
}
=========================================================================
fileUploadRoute.js
const router = require("express").Router();
const fileUploadController = require("../controllers/fileUploadController")
router.put("/", fileUploadController.getUrl);
module.exports = router;
==========================================================================
fileUploadController.js
const fileUploadService = require('../services/fileUploadService')
const responses = require("../models/responses");
module.exports = {
getUrl: getUrl
}
function getUrl(req, res) {
fileUploadService.getUrl(req, res)
.then(response => {
res.send(response)
})
.catch(error => {
res.send(error)
})
}
===========================================================================
index.js in node portion
const router = require("express").Router();
const pogsRoutes = require("./pogs.routes");
const userFromJWT = require("../filters/jwt.user");
const validateUser = require("../filters/validate.user");
const testRoutes = require("./test.routes");
const profileRoute = require("../profile/profileRoute");
const fileUploadRoute = require("../fileUpload/fileUploadRoute")
module.exports = router;
// router.use("/api/profilePage", profileRoute)
router.use("/api/pogs", pogsRoutes);
router.use("/api/upload", fileUploadRoute)
router.use("/api/profilePage", profileRoute)
// -----------------------------------
// Authenticated routes go below this:
// -----------------------------------
router.use(userFromJWT);
router.use(validateUser);
router.use("/api/test", testRoutes); // TODO: remove this before delivery to the client
============================================================================
USED IN REACT
Axios pulled from profile page
handleClickUpload = evt => {
evt.preventDefault()
console.log("RESULT : ", this.state);
// var file = evt.target.files[0]; <-- havent used this yet but I know its for upload
axios.put(`${NODE_API_URL}/api/upload`, {
// f:file
})
.then(response =>{
console.log(
response,"URL SIGNED REQUEST : ",response.data.signedRequest, " URL : ",response.data.url
)
})
.catch(error => {
console.log(error);
})
}
Upload button and file upload portion inside profile page
<div method="post" encType="multipart/form-data" action="/">
<input type="file" name="fileName" className="btn" />
<input type="submit" value="Upload" className="btn" onClick={this.handleClickUpload}/>
Can anyone let me know what I'm doing wrong or if anything is missing?
First of all, take a look at https://s3browser.com/features-content-mime-types-editor.aspx after you can figure out what kind of type you have to use (for ContentType). I think it should be 'text/plain' or 'text/html'.
Then you have to add one more property to s3 params named Body (the body have to contain your entity that you want to upload to s3 bucket)
Here is a snippet that you can use to upload your entity to s3 and then get a location:
let s3 = new AWS.S3({ region: process.env.AWS_REGION, apiVersion: '2006-03-01' });
let params =
{
Bucket: // a bucket's location,
Key: fileName // a path,
Body: // your entity to upload,
ACL: 'public-read',
ContentType: 'text/plain'
};
let s3Response = await s3.upload(params).promise();
console.log(`Entity uploaded to S3 at ${s3Response.Bucket} bucket. Location: ${s3Response.Location}`);
return s3Response.Location;

Strange response S3 getObject

I'm trying to get a file from S3 with Javascript but I'm getting a very strange code, check the image after code
AWS.config.update({
'accessKeyId': 'XXX',
'secretAccessKey': 'XXX',
'region': 'us-east-1'
});
var s3 = new AWS.S3();
var s3Params = { Bucket: "mybucket", Key: "path/photo-515169.jpeg"}
const url = s3.getSignedUrl('getObject', s3Params)
$.ajax({
url: url,
success: function(url){
console.log(url)
}
})
Here's the console.log:
Image

SignatureDoesNotMatch error from S3 putObject

I'm using the AWS Javascript SDK to put files to my S3. The following code is intended to upload a user avatar to the S3. I'm hardcoding the accessKeyId and secretAccessKey for now, and taking the file and key for uploading from a web form.
document.getElementById("upload-button").onclick = function() {
const key = document.getElementById("key-text").value;
var file = document.getElementById("file-chooser").files[0];
const S3 = new AWS.S3({
signatureVersion: "v4",
apiVersion: '2006-03-01',
accessKeyId: 'ACCESS_KEY_ID',
secretAccessKey: 'SECRET_ACCESS_KEY',
region: 'us-west-2'
})
S3.putObject({
Key: key,
Bucket: 'my-bucket-name',
Body: file,
}, (err, data) => {
if (err) {
alert("Error: " + err);
} else {
alert("Upload successful: " + data);
}
})
}
document.getElementById("upload-button").onclick = function() {
const key = document.getElementById("key-text").value;
var file = document.getElementById("file-chooser").files[0];
const S3 = new AWS.S3({`
signatureVersion: "v4",
apiVersion: '2006-03-01',
accessKeyId: 'ACCESS_KEY_ID',
secretAccessKey: 'SECRET_ACCESS_KEY',
region: 'us-west-2'
})
S3.putObject({
Key: key,
Bucket: 'ilarp-data-prod-1',
Body: file,
}, (err, data) => {
if (err) {
alert("Error: " + err);
} else {
alert("Upload successful: " + data);
}
})
}
The code above gives me an error return of SignatureDoesNotMatch I'm mystified by that, since I thought I was letting the API do the signing, and earlier versions of this (which I unfortunately cannot reproduce) did not give me this error.
It turns out that this was pilot error. I was mismatching the ACCESS_KEY_ID and SECRET_ACCESS_KEY. Even though I checked this very thing a dozen times, I still got it wrong. Sorry about that. If you come here wondering about this, know that every programmer makes a dumb mistake every once in a while.

How to upload file to Digital Ocean Spaces using Javascript

I am looking to use Digital Oceans spaces (which seems to have an identical API to S3), and would like to try it by uploading a sample file. I am having lots of difficulty. Here's what I've done so far
{'hi' : 'world'}
Is the contents of a file hiworld.json that I would like to upload. I understand that I need to create an aws v4 signature before I can make this request.
var aws4 = require('aws4')
var request = require('request')
var opts = {'json': true,'body': "{'hi':'world'}",host: '${myspace}.nyc3.digitaloceanspaces.com', path: '/hiworld.json'}
aws4.sign(opts, {accessKeyId: '${SECRET}', secretAccessKey: '${SECRET}'})
Then I send the request
request.put(opts,function(error, response) {
if(error) {
console.log(error);
}
console.log(response.body);
});
However, when I check my Digital Ocean space, I see that my file was not created. I have noticed that if I changed my PUT to GET and try to access an existing file, I have no issues.
Here's what my headers look like
headers:
{ Host: '${myspace}.nyc3.digitaloceanspaces.com',
'Content-Type': 'application/x-www-form-urlencoded; charset=utf-8',
'Content-Length': 14,
'X-Amz-Date': '20171008T175325Z',
Authorization: 'AWS4-HMAC-SHA256 Credential=${mykey}/20171008/us-east-1//aws4_request, SignedHeaders=content-length;content-type;host;x-amz-date, Signature=475e691d4ddb81cca28eb0dcdc7c926359797d5e383e7bef70989656822accc0' },
method: 'POST' }
As an alternative, using aws-sdk:
// 1. Importing the SDK
import AWS from 'aws-sdk';
// 2. Configuring the S3 instance for Digital Ocean Spaces
const spacesEndpoint = new AWS.Endpoint(
`${REGION}.digitaloceanspaces.com`
);
const url = `https://${BUCKET}.${REGION}.digitaloceanspaces.com/${file.path}`;
const S3 = new AWS.S3({
endpoint: spacesEndpoint,
accessKeyId: ACCESS_KEY_ID,
secretAccessKey: SECRET_ACCESS_KEY
});
// 3. Using .putObject() to make the PUT request, S3 signs the request
const params = { Body: file.stream, Bucket: BUCKET, Key: file.path };
S3.putObject(params)
.on('build', request => {
request.httpRequest.headers.Host = `https://${BUCKET}.${REGION}.digitaloceanspaces.com`;
// Note: I am assigning the size to the file Stream manually
request.httpRequest.headers['Content-Length'] = file.size;
request.httpRequest.headers['Content-Type'] = file.mimetype;
request.httpRequest.headers['x-amz-acl'] = 'public-read';
})
.send((err, data) => {
if (err) logger(err, err.stack);
else logger(JSON.stringify(data, '', 2));
});
var str = {
'hi': 'world'
}
var c = JSON.stringify(str);
request(aws4.sign({
'uri': 'https://${space}.nyc3.digitaloceanspaces.com/newworlds.json',
'method': 'PUT',
'path': '/newworlds.json',
'headers': {
"Cache-Control":"no-cache",
"Content-Type":"application/x-www-form-urlencoded",
"accept":"*/*",
"host":"${space}.nyc3.digitaloceanspaces.com",
"accept-encoding":"gzip, deflate",
"content-length": c.length
},
body: c
},{accessKeyId: '${secret}', secretAccessKey: '${secret}'}),function(err,res){
if(err) {
console.log(err);
} else {
console.log(res);
}
})
This gave me a successful PUT
It can be done using multer and aws sdk. It worked for me.
const aws = require('aws-sdk');
const multer = require('multer');
const express = require('express');
const multerS3 = require('multer-s3');
const app = express();
const spacesEndpoint = new aws.Endpoint('sgp1.digitaloceanspaces.com');
const spaces = new aws.S3({
endpoint: spacesEndpoint,
accessKeyId: 'your_access_key_from_API',
secretAccessKey: 'your_secret_key'
});
const upload = multer({
storage: multerS3({
s3: spaces,
bucket: 'bucket-name',
acl: 'public-read',
key: function (request, file, cb) {
console.log(file);
cb(null, file.originalname);
}
})
}).array('upload', 1);
Now you can also call this using an API like this
app.post('/upload', function (request, response, next) {
upload(request, response, function (error) {
if (error) {
console.log(error);
}
console.log('File uploaded successfully.');
});
});
HTML would look like this
<form method="post" enctype="multipart/form-data" action="/upload">
<label for="file">Upload a file</label>
<input type="file" name="upload">
<input type="submit" class="button">
</form>

Categories