How to upload file to Digital Ocean Spaces using Javascript - javascript

I am looking to use Digital Oceans spaces (which seems to have an identical API to S3), and would like to try it by uploading a sample file. I am having lots of difficulty. Here's what I've done so far
{'hi' : 'world'}
Is the contents of a file hiworld.json that I would like to upload. I understand that I need to create an aws v4 signature before I can make this request.
var aws4 = require('aws4')
var request = require('request')
var opts = {'json': true,'body': "{'hi':'world'}",host: '${myspace}.nyc3.digitaloceanspaces.com', path: '/hiworld.json'}
aws4.sign(opts, {accessKeyId: '${SECRET}', secretAccessKey: '${SECRET}'})
Then I send the request
request.put(opts,function(error, response) {
if(error) {
console.log(error);
}
console.log(response.body);
});
However, when I check my Digital Ocean space, I see that my file was not created. I have noticed that if I changed my PUT to GET and try to access an existing file, I have no issues.
Here's what my headers look like
headers:
{ Host: '${myspace}.nyc3.digitaloceanspaces.com',
'Content-Type': 'application/x-www-form-urlencoded; charset=utf-8',
'Content-Length': 14,
'X-Amz-Date': '20171008T175325Z',
Authorization: 'AWS4-HMAC-SHA256 Credential=${mykey}/20171008/us-east-1//aws4_request, SignedHeaders=content-length;content-type;host;x-amz-date, Signature=475e691d4ddb81cca28eb0dcdc7c926359797d5e383e7bef70989656822accc0' },
method: 'POST' }

As an alternative, using aws-sdk:
// 1. Importing the SDK
import AWS from 'aws-sdk';
// 2. Configuring the S3 instance for Digital Ocean Spaces
const spacesEndpoint = new AWS.Endpoint(
`${REGION}.digitaloceanspaces.com`
);
const url = `https://${BUCKET}.${REGION}.digitaloceanspaces.com/${file.path}`;
const S3 = new AWS.S3({
endpoint: spacesEndpoint,
accessKeyId: ACCESS_KEY_ID,
secretAccessKey: SECRET_ACCESS_KEY
});
// 3. Using .putObject() to make the PUT request, S3 signs the request
const params = { Body: file.stream, Bucket: BUCKET, Key: file.path };
S3.putObject(params)
.on('build', request => {
request.httpRequest.headers.Host = `https://${BUCKET}.${REGION}.digitaloceanspaces.com`;
// Note: I am assigning the size to the file Stream manually
request.httpRequest.headers['Content-Length'] = file.size;
request.httpRequest.headers['Content-Type'] = file.mimetype;
request.httpRequest.headers['x-amz-acl'] = 'public-read';
})
.send((err, data) => {
if (err) logger(err, err.stack);
else logger(JSON.stringify(data, '', 2));
});

var str = {
'hi': 'world'
}
var c = JSON.stringify(str);
request(aws4.sign({
'uri': 'https://${space}.nyc3.digitaloceanspaces.com/newworlds.json',
'method': 'PUT',
'path': '/newworlds.json',
'headers': {
"Cache-Control":"no-cache",
"Content-Type":"application/x-www-form-urlencoded",
"accept":"*/*",
"host":"${space}.nyc3.digitaloceanspaces.com",
"accept-encoding":"gzip, deflate",
"content-length": c.length
},
body: c
},{accessKeyId: '${secret}', secretAccessKey: '${secret}'}),function(err,res){
if(err) {
console.log(err);
} else {
console.log(res);
}
})
This gave me a successful PUT

It can be done using multer and aws sdk. It worked for me.
const aws = require('aws-sdk');
const multer = require('multer');
const express = require('express');
const multerS3 = require('multer-s3');
const app = express();
const spacesEndpoint = new aws.Endpoint('sgp1.digitaloceanspaces.com');
const spaces = new aws.S3({
endpoint: spacesEndpoint,
accessKeyId: 'your_access_key_from_API',
secretAccessKey: 'your_secret_key'
});
const upload = multer({
storage: multerS3({
s3: spaces,
bucket: 'bucket-name',
acl: 'public-read',
key: function (request, file, cb) {
console.log(file);
cb(null, file.originalname);
}
})
}).array('upload', 1);
Now you can also call this using an API like this
app.post('/upload', function (request, response, next) {
upload(request, response, function (error) {
if (error) {
console.log(error);
}
console.log('File uploaded successfully.');
});
});
HTML would look like this
<form method="post" enctype="multipart/form-data" action="/upload">
<label for="file">Upload a file</label>
<input type="file" name="upload">
<input type="submit" class="button">
</form>

Related

How to Upload File Using AWS-SDK (From Base64)?

Example code in here:
var AWS = require('aws-sdk');
var fixNameFile = "tes.png";
let s3bucket = new AWS.S3({
endpoint: "xxx...",
accessKeyId: "xxx...",
secretAccessKey: "xxx...",
});
var imgBase64 = "data:image/png;base64,xxxxxx....";
var params = {
Bucket: 'fileupload',
ACL: 'public-read',
Key: fixNameFile,
Body: imgBase64,
ContentEncoding: 'base64'
};
s3bucket.putObject(params, function (err, data) {
console.log(data);
});
In my case, this was uploaded successfully. However, only blank images like this appear:
https://imgur.com/a/6Eci8Pv
does anyone have an idea from my case? Please help me :)

using multer to post files into Digital Ocean Space

Trying to post local files to Digital Ocean Space
My post request body is:
[ 'http://localhost:8090/d/3534352009.png',
'http://localhost:8090/d/3534352009-600x600.png' ]
These files are located locally to where this API is running/
In the JS file i have config
// Change bucket property to your Space name
const upload = multer({
storage: multerS3({
s3: s3,
bucket: 'xxxx',
acl: 'public-read',
key: function (request, file, cb) {
console.log(file);
cb(null, file.originalname);
}
})
}).array('upload', 2);
And a route to post
app.post('/upload', function (request, response, next) {
upload(request, response, function (error) {
if (error) {
console.log(error);
return response.redirect("/error");
}
console.log('File uploaded successfully.');
response.redirect("/success");
});
});
Can't figure out how to post from an object these two images into the Digital Ocean space
There are plenty of tutorial on how to from a FORM browser user can post multiple images, but I got the files localy and I want to post it via script.
It can be done using aws sdk putObject API. This works for me.
const aws = require('aws-sdk');
var fs = require('fs');
const spacesEndpoint = new aws.Endpoint('sgp1.digitaloceanspaces.com');
const spaces = new aws.S3({
endpoint: spacesEndpoint,
accessKeyId: 'spaces_key',
secretAccessKey: 'spaces_secret'
});
fs.readFile('file_path', function (err, data) {
if (err) { throw err; }
var params = {Bucket: 'bucket_name', Key:'file_name', Body: data };
spaces.putObject(params, function(err, data) {
if (err) {
console.log(err)
} else {
console.log("Successfully uploaded data to DO speaces");
}
});
});
##Include this code in an API if you want to send file path as request body

Getting undefined filename, filetype when uploading files to AWS S3

What I'm trying to do:
Uploading a file with AWS S3, and then taking the filename and filetype and placing it at the end of the url to save it in sql, so that every time the person logs in, it will pull the picture up by user image url.
Problem:
Not uploading and not recognizing the file in filename or filetype. Coming up with undefined for filetype and filename in URL and signedURL
The code for my fileUploadService.js used in Node.js is shown below.
The getSignedURL looks like this:
https://sabio-training.s3.us-west-2.amazonaws.com/C56/filename/?AWSAccessKeyId=AKIAJF53EJKW7SJUV55Q&Content-Type=filetype&Expires=1536877443&Signature=WxSvLSzfyZKDRN9LawVOwj1ayVY%3D&x-amz-acl=public-read
The URL looks like this:
https://sabio-training.s3.amazonaws.com/C56/filename/filetype
const aws = require('aws-sdk');
aws.config.region = 'us-west-2';
aws.config.update({ accessKeyId: '', secretAccessKey: '' });
const PROFILE_S3_LINK = "https://sabio-training.s3.amazonaws.com/";
module.exports = {
getUrl: getUrl
}
function getUrl(req, res) {
const s3 = new aws.S3();
const fileName = 'C56/'+"filename"+'/' ; //hardcoded filename and filetype for it to work.
const fileType = "filetype"; //How to take filename from uploaded file to insert into "fileName" along with the "filetype"?
const s3Params = {
Bucket: 'sabio-training',
Key: fileName,
Expires: 3000,
ContentType: fileType,
ACL: 'public-read'
};
s3.getSignedUrl('putObject', s3Params, (err, data) => {
if (err) {
console.log(err);
return res.end();
}
const returnData = {
signedRequest: data,
url: `${PROFILE_S3_LINK}${fileName}${fileType}` //unsigned URL
};
res.write(JSON.stringify(returnData));
res.end();
});
}
=========================================================================
fileUploadRoute.js
const router = require("express").Router();
const fileUploadController = require("../controllers/fileUploadController")
router.put("/", fileUploadController.getUrl);
module.exports = router;
==========================================================================
fileUploadController.js
const fileUploadService = require('../services/fileUploadService')
const responses = require("../models/responses");
module.exports = {
getUrl: getUrl
}
function getUrl(req, res) {
fileUploadService.getUrl(req, res)
.then(response => {
res.send(response)
})
.catch(error => {
res.send(error)
})
}
===========================================================================
index.js in node portion
const router = require("express").Router();
const pogsRoutes = require("./pogs.routes");
const userFromJWT = require("../filters/jwt.user");
const validateUser = require("../filters/validate.user");
const testRoutes = require("./test.routes");
const profileRoute = require("../profile/profileRoute");
const fileUploadRoute = require("../fileUpload/fileUploadRoute")
module.exports = router;
// router.use("/api/profilePage", profileRoute)
router.use("/api/pogs", pogsRoutes);
router.use("/api/upload", fileUploadRoute)
router.use("/api/profilePage", profileRoute)
// -----------------------------------
// Authenticated routes go below this:
// -----------------------------------
router.use(userFromJWT);
router.use(validateUser);
router.use("/api/test", testRoutes); // TODO: remove this before delivery to the client
============================================================================
USED IN REACT
Axios pulled from profile page
handleClickUpload = evt => {
evt.preventDefault()
console.log("RESULT : ", this.state);
// var file = evt.target.files[0]; <-- havent used this yet but I know its for upload
axios.put(`${NODE_API_URL}/api/upload`, {
// f:file
})
.then(response =>{
console.log(
response,"URL SIGNED REQUEST : ",response.data.signedRequest, " URL : ",response.data.url
)
})
.catch(error => {
console.log(error);
})
}
Upload button and file upload portion inside profile page
<div method="post" encType="multipart/form-data" action="/">
<input type="file" name="fileName" className="btn" />
<input type="submit" value="Upload" className="btn" onClick={this.handleClickUpload}/>
Can anyone let me know what I'm doing wrong or if anything is missing?
First of all, take a look at https://s3browser.com/features-content-mime-types-editor.aspx after you can figure out what kind of type you have to use (for ContentType). I think it should be 'text/plain' or 'text/html'.
Then you have to add one more property to s3 params named Body (the body have to contain your entity that you want to upload to s3 bucket)
Here is a snippet that you can use to upload your entity to s3 and then get a location:
let s3 = new AWS.S3({ region: process.env.AWS_REGION, apiVersion: '2006-03-01' });
let params =
{
Bucket: // a bucket's location,
Key: fileName // a path,
Body: // your entity to upload,
ACL: 'public-read',
ContentType: 'text/plain'
};
let s3Response = await s3.upload(params).promise();
console.log(`Entity uploaded to S3 at ${s3Response.Bucket} bucket. Location: ${s3Response.Location}`);
return s3Response.Location;

How do I transfer data from one method to another in Node.js?

I'm using Telegram bot API and AWS S3 to read data from a bucket. I need to use the data from the s3 method in the Telgraf method, but I don't know how:
'use strict'
const Telegraf = require('telegraf');
const bot = new Telegraf('TOKEN')
var AWS = require('aws-sdk')
var s3 = new AWS.S3({
accessKeyId: 'key',
secretAccessKey: 'secret'
})
var params = {Bucket: 'myBucket', Key:"ipsum.txt"};
var s3Promise = s3.getObject(params, function(err, data) {
if (err) console.log(err, err.stack);
else
var words= data.Body.toString(); //WHAT I WANT IN IN COMMAND METHOD
console.log('\n' + words+ '\n') //Returns ipsum.txt as string on console
})
bot.command('s', (ctx) => { //Bot Command
s3Promise; //Returns ipsum.txt as string on console
ctx.reply('Check console') //Meesage in Telegram
//ctx.reply(<I WANT data.Body.toSting() HERE>)
});
const { PORT = 3000 } = process.env
bot.startWebhook('/', null, PORT)
How do I use the data from the s3.getObject method in ctx.reply() ?
If you want to send the file as an attachment, you have to use: ctx.replyWithDocument. Aside from that your problem is: How do I return the response from an asynchronous call?
In this particular case you can use s3.getObject(params).promise() in order to avoid the callback API, and use it easily inside your bot.command listener.
Using async/await (Node >= 7.6) your code can be written like this
'use strict';
const Telegraf = require('telegraf');
const bot = new Telegraf('TOKEN');
const AWS = require('aws-sdk');
const s3 = new AWS.S3({
accessKeyId: 'key',
secretAccessKey: 'secret'
});
const params = {
Bucket: 'myBucket',
Key: 'ipsum.txt'
};
bot.command('s', async ctx => { // Bot Command
try {
// If you're sending always the same file and it won't change
// too much, you can cache it to avoid the external call everytime
const data = await s3.getObject(params).promise();
ctx.reply('Check console'); // Message in Telegram
// This will send the file as an attachment
ctx.replyWithDocument({
source: data.Body,
filename: params.Key
});
// or just as text
ctx.reply(data.Body.toString());
} catch(e) {
// S3 failed
ctx.reply('Oops');
console.log(e);
}
});
const {
PORT = 3000
} = process.env;
bot.startWebhook('/', null, PORT);
More info on how to work with files can be found on telegraf docs
PS: I tested the code and it it's fully working:
While I haven't used S3, I do know that AWS services added support for Promises to their implementations to avoid using callbacks. Personally, I much prefer the use of promises as I think they lead to more readable code.
I think the following should handle the issue you're having.
'use strict'
const Telegraf = require('telegraf');
const bot = new Telegraf('TOKEN')
var AWS = require('aws-sdk')
var s3 = new AWS.S3({
accessKeyId: 'key',
secretAccessKey: 'secret'
})
var params = {Bucket: 'myBucket', Key:"ipsum.txt"};
bot.command('s', (ctx) => {
s3.getObject(params).promise()
.then(data => {
ctx.reply('Check console');
ctx.reply(data.Body.toString());
}, err => console.log(err, err.stack));
})
const { PORT = 3000 } = process.env
bot.startWebhook('/', null, PORT)
As suggested by Luca, I called bot.command inside of s3.getObject and it works!
s3.getObject(params, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else
bot.command('s', (ctx) => {
ctx.reply('Succesfully read from S3:\n\n' + data.Body.toString())
});
})

Upload file to S3 with PUT and form-data

I am trying to upload an image file (jpeg) to AWS S3 via the PUT interface, and I am getting the error SignatureDoesNotMatch.
On my server, I have an Express node.js app with an endpoint to create a signed url.
'use strict';
const express = require('express');
const bodyParser = require('body-parser');
const config = require('./config');
// Load the AWS SDK for Node.js
const AWS = require('aws-sdk');
AWS.config.update({
accessKeyId: config.AWS_ACCESS_KEY_ID,
secretAccessKey: config.AWS_SECRET_ACCESS_KEY,
region: 'us-east-1'
});
const s3 = new AWS.S3();
const app = express();
const awsS3Router = express.Router();
// parse application/json
app.use(bodyParser.urlencoded({extended: false}));
app.use(bodyParser.json());
// AWS S3 REST endpoints
awsS3Router.get('/getImageDrop', function(req, res) {
if(!req.query.filename) {
res.status(400).send('Request query is empty!');
}
const s3Params = {
Bucket: config.S3_BUCKET,
ContentType: 'image/jpeg',
ACL: 'public-read',
Key: req.query.filename,
Expires: 6000
};
s3.getSignedUrl('putObject', s3Params, function(err, data) {
if(err){
console.error('ERROR: ' + err);
return res.end();
}
const returnData = {
signedRequest: data,
url: 'https://' + config.S3_BUCKET + '.s3.amazonaws.com/' + req.query.filename
};
app.locals.s3SignedUrl = returnData.signedRequest;
res.write(JSON.stringify(returnData));
res.end();
});
});
app.use('/aws/s3', awsS3Router);
module.exports = app;
On the client side, I can call this endpoint and get a signed S3 url back. The response url format is:
https://[bucket name].s3.amazonaws.com/878CF5A4-D013-435F-BF7D-F45AB69E580F.jpg?AWSAccessKeyId=[AWS access key]&Content-Type=image%2Fjpeg&Expires=1521244920&Signature=[Signature]&x-amz-acl=public-read
The client code has a function to upload the file to the signed S3 url.
async uploadImageToS3BucketAsync(imageFileUri, fileSize, signedUrl) {
const fileName = PathParse(imageFileUri).base;
let form = new FormData();
form.append('files[0]', {
'uri': imageFileUri,
'name': fileName,
'type': 'image/jpeg'
});
//form.append('photo', imageFileUri);
console.info('INFO: PUT ' + signedUrl.signedRequest + ': Request: ' + JSON.stringify(form));
return fetch((signedUrl.signedRequest), {
method: 'PUT',
headers: { 'Content-Type': 'image/jpeg', 'Content-Length': fileSize },
body: form
})
.then(function(res) {
if (res.ok) {
console.info('INFO: PUT ' + JSON.stringify(signedUrl) + ': Response: ' + JSON.stringify(res));
return res.json();
} else {
console.error('Failed to upload image to S3 bucket!');
console.error('ERROR: ' + JSON.stringify(res));
alert('Failed to upload image to S3 bucket!!');
}
})
.catch(function(err) {
console.error('ERROR: Request failed', err);
});
}
Unfortunately, the upload fails systematically with a 403 error:
<Error><Code>SignatureDoesNotMatch</Code><Message>The request signature we calculated does not match the signature you provided. Check your key and signing method.</Message>
I am guessing I am missing something in the request headers of the PUT call, but I am not sure what it is. Has anyone found a solution for this in node.js?
The trick is to retrieve the file from the FormData instance after appending it.
const formData = new FormData();
formData.append('File', selectedFile);
fetch(
presignedPutUrl,
{
method: 'PUT',
body: formData.get("File"),
}
)
Thanks #[Michael - sqlbot] for the comment that got me here.
You don't use PUT with a form structure. PUT expects the raw binary body.

Categories