Javascript || AWS S3 SDK & croppie file upload errors - javascript

I am trying to upload the cropped results of croppie to an S3 bucket. I am currently getting a blank error when I successfully crop and then try to upload the cropped results.
I have followed Amazon docs including setting up the S3 bucket, identity pools, and configuring my CORS.
I believe the error has something to do with how croppie is packaging the cropped results. I have included my app.js file (where I handle the upload) and the code where the addPhoto function is being called. Resp is the response from croppie.
The expected outcome is that I can successfully crop a photo and then upload it to my S3 bucket.
$('.crop').on('click', function (ev) {
$uploadCrop.croppie('result', {
type: 'canvas',
size: 'original'
}).then(function (resp) {
Swal.fire({
imageUrl: resp,
showCancelButton: true,
confirmButtonText: "Upload",
reverseButtons: true,
showCloseButton: true
}).then((result) => {
if(result.value) {
addPhoto(resp);
}
app.js
var albumBucketName = "colorsort";
var bucketRegion = "xxx";
var IdentityPoolId = "xxx";
AWS.config.update({
region: bucketRegion,
credentials: new AWS.CognitoIdentityCredentials({
IdentityPoolId: IdentityPoolId
})
});
var s3 = new AWS.S3({
apiVersion: "2006-03-01",
params: { Bucket: albumBucketName }
});
function addPhoto(resp) {
var file = resp;
var fileName = file.name;
console.log(resp.type);
var photoKey = fileName;
// Use S3 ManagedUpload class as it supports multipart uploads
var upload = new AWS.S3.ManagedUpload({
params: {
Bucket: albumBucketName,
Key: photoKey,
Body: file,
ACL: "public-read"
}
});
var promise = upload.promise();
promise.then(
function(data) {
alert("Successfully uploaded photo.");
},
function(err) {
return alert("There was an error uploading your photo: ", err.message);
}
);
}

The solution I found involved adding the following snippet to my CORS config as well as changing the croppie result 'type:' from canvas to base64.
<AllowedHeader>*</AllowedHeader>
Useful resources: Upload missing ETag, Uploading base64 image to Amazon with Node.js

Related

By using Express-fileupload library ,Upload image to AWS S3 bucket

any one please help me, by using express-fileupload library, I want to upload my file and image to AWS S3 bucket using restful API.
Hope you found the answer.
In case you haven't found the answer, here is what I just found out
const uploadSingleImage = async (file, s3, fileName) => {
const bucketName = process.env.S3_BUCKET_NAME;
if (!file.mimetype.startsWith('image')) {
return { status: false, message: 'File uploaded is not an image' };
}
const params = {
Bucket: bucketName,
Key: fileName,
Body: file.data,
ACL: 'public-read',
ContentType: file.mimetype,
};
return s3.upload(params).promise();
};
s3.upload(params).promise()
will return an object which contains the Location of the file you uploaded, in fact you could generate it yourself but that would not cover in case any error occur so I think what I posted here is a better solution.

Can I upload to Spaces using a signed url?

Im currently developing a React webapp that lets users upload images. I have it working with AWS S3 but would like to have everything on DO for simplicities sake.
The flow goes like this:
Send image info to server and receive a signed url
Post the image to the signed url
When i try this with spaces im constantly getting a "SignatureDoesNotMatch" error.
Please help, im going batty!
My node server generating the signed url:
spacesEndpoint = new aws.Endpoint(`${DO_REGION}.digitaloceanspaces.com`),
s3 = new aws.S3({
endpoint: spacesEndpoint,
accessKeyId: DO_ACCESS_KEY_ID,
secretAccessKey: DO_SECRET_ACCESS_KEY,
region: DO_REGION,
signatureVersion: 'v4',
});
const s3Params = {
Bucket: DO_SPACE,
Expires: 60,
Key: filePath,
ContentType: fileType, // "image/jpeg"
ACL: 'public-read',
};
const promise = new Promise((resolve, reject) => {
s3.getSignedUrl('putObject', s3Params, (err, url) => {
if (err) {
reject(err);
}
resolve(url);
});
});
my client side js posting after retrieving the signed url
const xhr = new XMLHttpRequest();
xhr.withCredentials = true;
xhr.open('PUT', payload.signedUrl);
xhr.setRequestHeader('Host', `${DO_SPACE}.${DO_REGION}.digitaloceanspaces.com');
xhr.setRequestHeader('x-amz-acl', 'public-read');
xhr.setRequestHeader('Content-Type', payload.file.type);
xhr.setRequestHeader('Content-Length', payload.file.size);
xhr.send(payload.file);

Angular4 how to upload photo to S3 AWS

I have problem with uploading photos to my AWS S3.
My code in angular 4 looks like this:
upload(input) {
let AWSService = window.AWS;
let file = input.target.files[0];
AWSService.config.update({
region: "us-east-1",
credentials: new AWS.CognitoIdentityCredentials({
IdentityPoolId: "us-east-1:XXXXXXXXXXXXXX"
})
});
let bucket = new AWSService.S3({
params: {
Bucket: "NAME"
}
});
let params = {
Key: file.name,
Body: file
};
bucket.upload(params, (res) => {
console.log(res);
});
}
require('aws-sdk/dist/aws-sdk');
Unfortunately, I am always receiving:
403 forbidden error.
What am I doing wrong?
403 Forbidden is coming from AWS S3. No problem with angular.
AWS documentation has a complete example of maintaining photo album with S3.
http://docs.aws.amazon.com/sdk-for-javascript/v2/developer-guide/s3-example-photo-album.html
Hope it helps.
In AWS cors configuration
Add
<AllowHeader> * <AllowHeader>

How do I upload a base64 encoded image (string) directly to a Google Cloud Storage bucket using Node.js?

Currently, I am using the #google-cloud/storage NPM package to upload a file directly to a Google Cloud Storage bucket. This requires some trickery as I only have the image's base64 encoded string. I have to:
Decode the string
Save it as a file
Send the file path to the below script to upload to Google Cloud Storage
Delete the local file
I'd like to avoid storing the file in the filesystem altogether since I am using Google App Engine and I don't want to overload the filesystem / leave junk files there if the delete operation doesn't work for whatever reason. This is what my upload script looks like right now:
// Convert the base64 string back to an image to upload into the Google Cloud Storage bucket
var base64Img = require('base64-img');
var filePath = base64Img.imgSync(req.body.base64Image, 'user-uploads', 'image-name');
// Instantiate the GCP Storage instance
var gcs = require('#google-cloud/storage')(),
bucket = gcs.bucket('google-cloud-storage-bucket-name');
// Upload the image to the bucket
bucket.upload(__dirname.slice(0, -15) + filePath, {
destination: 'profile-images/576dba00c1346abe12fb502a-original.jpg',
public: true,
validation: 'md5'
}, function(error, file) {
if (error) {
sails.log.error(error);
}
return res.ok('Image uploaded');
});
Is there anyway to directly upload the base64 encoded string of the image instead of having to convert it to a file and then upload using the path?
The solution, I believe, is to use the file.createWriteStream functionality that the bucket.upload function wraps in the Google Cloud Node SDK.
I've got very little experience with streams, so try to bear with me if this doesn't work right off.
First of all, we need take the base64 data and drop it into a stream. For that, we're going to include the stream library, create a buffer from the base64 data, and add the buffer to the end of the stream.
var stream = require('stream');
var bufferStream = new stream.PassThrough();
bufferStream.end(Buffer.from(req.body.base64Image, 'base64'));
More on decoding base64 and creating the stream.
We're then going to pipe the stream into a write stream created by the file.createWriteStream function.
var gcs = require('#google-cloud/storage')({
projectId: 'grape-spaceship-123',
keyFilename: '/path/to/keyfile.json'
});
//Define bucket.
var myBucket = gcs.bucket('my-bucket');
//Define file & file name.
var file = myBucket.file('my-file.jpg');
//Pipe the 'bufferStream' into a 'file.createWriteStream' method.
bufferStream.pipe(file.createWriteStream({
metadata: {
contentType: 'image/jpeg',
metadata: {
custom: 'metadata'
}
},
public: true,
validation: "md5"
}))
.on('error', function(err) {})
.on('finish', function() {
// The file upload is complete.
});
Info on file.createWriteStream, File docs, bucket.upload, and the bucket.upload method code in the Node SDK.
So the way the above code works is to define the bucket you want to put the file in, then define the file and the file name. We don't set upload options here. We then pipe the bufferStream variable we just created into the file.createWriteStream method we discussed before. In these options we define the metadata and other options you want to implement. It was very helpful to look directly at the Node code on Github to figure out how they break down the bucket.upload function, and recommend you do so as well. Finally, we attach a couple events for when the upload finishes and when it errors out.
Posting my version of the answer in response to #krlozadan 's request above:
// Convert the base64 string back to an image to upload into the Google Cloud Storage bucket
var mimeTypes = require('mimetypes');
var image = req.body.profile.image,
mimeType = image.match(/data:([a-zA-Z0-9]+\/[a-zA-Z0-9-.+]+).*,.*/)[1],
fileName = req.profile.id + '-original.' + mimeTypes.detectExtension(mimeType),
base64EncodedImageString = image.replace(/^data:image\/\w+;base64,/, ''),
imageBuffer = new Buffer(base64EncodedImageString, 'base64');
// Instantiate the GCP Storage instance
var gcs = require('#google-cloud/storage')(),
bucket = gcs.bucket('my-bucket');
// Upload the image to the bucket
var file = bucket.file('profile-images/' + fileName);
file.save(imageBuffer, {
metadata: { contentType: mimeType },
public: true,
validation: 'md5'
}, function(error) {
if (error) {
return res.serverError('Unable to upload the image.');
}
return res.ok('Uploaded');
});
This worked just fine for me. Ignore some of the additional logic in the first few lines as they are only relevant to the application I am building.
If you want to save a string as a file in Google Cloud Storage, you can do it easily using the file.save method:
const {Storage} = require('#google-cloud/storage');
const storage = new Storage();
const myBucket = storage.bucket('my-bucket');
const file = myBucket.file('my-file.txt');
const contents = 'This is the contents of the file.';
file.save(contents).then(() => console.log('done'));
:) what an issue !! Have tried it and got the issue Image has uploaded on firebase Storage but not download and just loader is moving around and around... After spending time... Got the success to upload the image on firebase storage with downloading... There was an issue in an access token...
check the screenshot
If you check in the file location section on the right side bottom there is an option "create access token" and not showing any "access token" on there if you create manually access token on there then refresh the page image will showing... So now the question is how to create it by code...
just use below code to create the access token
const uuidv4 = require('uuid/v4');
const uuid = uuidv4();
metadata: { firebaseStorageDownloadTokens: uuid }
Full code is given below for uploading an image to storage image on firebase storage
const functions = require('firebase-functions')
var firebase = require('firebase');
var express = require('express');
var bodyParser = require("body-parser");
const uuidv4 = require('uuid/v4');
const uuid = uuidv4();
const os = require('os')
const path = require('path')
const cors = require('cors')({ origin: true })
const Busboy = require('busboy')
const fs = require('fs')
var admin = require("firebase-admin");
var serviceAccount = {
"type": "service_account",
"project_id": "xxxxxx",
"private_key_id": "xxxxxx",
"private_key": "-----BEGIN PRIVATE KEY-----\jr5x+4AvctKLonBafg\nElTg3Cj7pAEbUfIO9I44zZ8=\n-----END PRIVATE KEY-----\n",
"client_email": "xxxx#xxxx.iam.gserviceaccount.com",
"client_id": "xxxxxxxx",
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
"token_uri": "https://oauth2.googleapis.com/token",
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/firebase-adminsdk-5rmdm%40xxxxx.iam.gserviceaccount.com"
}
admin.initializeApp({
credential: admin.credential.cert(serviceAccount),
storageBucket: "xxxxx-xxxx" // use your storage bucket name
});
const app = express();
app.use(bodyParser.urlencoded({ extended: false }));
app.use(bodyParser.json());
app.post('/uploadFile', (req, response) => {
response.set('Access-Control-Allow-Origin', '*');
const busboy = new Busboy({ headers: req.headers })
let uploadData = null
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
const filepath = path.join(os.tmpdir(), filename)
uploadData = { file: filepath, type: mimetype }
console.log("-------------->>",filepath)
file.pipe(fs.createWriteStream(filepath))
})
busboy.on('finish', () => {
const bucket = admin.storage().bucket();
bucket.upload(uploadData.file, {
uploadType: 'media',
metadata: {
metadata: { firebaseStorageDownloadTokens: uuid,
contentType: uploadData.type,
},
},
})
.catch(err => {
res.status(500).json({
error: err,
})
})
})
busboy.end(req.rawBody)
});
exports.widgets = functions.https.onRequest(app);
You have to convert base64 to image buffer then upload as below, you need to provide image_data_from_html variable as the data you extract from HTML event.
const base64Text = image_data_from_html.split(';base64,').pop();
const imageBuffer = Buffer.from(base64Text, 'base64');
const contentType = data.image_data.split(';base64,')[0].split(':')[1];
const fileName = 'myimage.png';
const imageUrl = 'https://storage.googleapis.com/bucket-url/some_path/' + fileName;
await admin.storage().bucket().file('some_path/' + fileName).save(imageBuffer, {
public: true,
gzip: true,
metadata: {
contentType,
cacheControl: 'public, max-age=31536000',
}
});
console.log(imageUrl);
I was able to get the base64 string over to my Cloud Storage bucket with just one line of code.
var decodedImage = new Buffer(poster64, 'base64');
// Store Poster to storage
let posterFile = await client.file(decodedImage, `poster_${path}.jpeg`, { path: 'submissions/dev/', isBuffer: true, raw: true });
let posterUpload = await client.upload(posterFile, { metadata: { cacheControl: 'max-age=604800' }, public: true, overwrite: true });
let permalink = posterUpload.permalink
Something to be aware of is that if you are inside of a Nodejs environment you wont be able to use atob().
The top answer of this post showed me the errors of my ways!
NodeJS base64 image encoding/decoding not quite working

S3 node module, attempting to upload file gives ENETUNREACH error

I'm trying to upload a file to my Amazon S3 bucket but I'm getting an ENETUNREACH error. I do have permissions to upload/delete files for my buckets and also edited the CORS configuration to allow POST/GET requests from all origins. I'm thinking it might be a faulty key(s) that I received from someone. What is a good way to test if the keys I have are valid if that happens to be the issue?
Code below:
var s3 = require('s3');
/* Create a client for uploading or deleting files */
var client = s3.createClient({
maxAsyncS3: 20, // this is the default
s3RetryCount: 3, // this is the default
s3RetryDelay: 1000, // this is the default
multipartUploadThreshold: 20971520, // this is the default (20 MB)
multipartUploadSize: 15728640, // this is the default (15 MB)
s3Options: {
accessKeyId: 'xxxxxxxx',
secretAccesskey: 'xxxxxxxx',
region: 'xxxxxxxx'
},
});
exports.uploadFile = function(fileName, bucket){
console.log('Uploading File: ' +fileName+'\nBucket: ' +bucket);
var params = {
localFile: fileName,
s3Params: {
Bucket: bucket,
Key: 'testfile',
},
};
var uploader = client.uploadFile(params);
uploader.on('error', function(err) {
console.error("unable to upload:", err.stack);
});
uploader.on('progress', function() {
console.log("progress", uploader.progressMd5Amount, uploader.progressAmount, uploader.progressTotal);
});
uploader.on('end', function() {
console.log("done uploading");
});
};
Console log when trying to upload a txt small file:
Console Log
Disabled IIS services to fix my error.

Categories