I volunteer for a certain association and I wanted to create a landing page where the volunteers upload a picture, send it and it is saved in my Google Drive.
After many attempts - I get an error at the moment of sending. Brings up the code I wrote down and the error. The "---" is the details of the project.
Thanks!
```
<!DOCTYPE html>
<html>
<head>
<title>Upload Image to Google Drive</title>
<script src="https://apis.google.com/js/api.js"></script>
<script>
const CLIENT_ID = '------';
const API_KEY = '-----';
const PUBLIC_FOLDER_ID = '-----';
// Authorization scopes required by the API
const SCOPES = 'https://www.googleapis.com/auth/drive.file';
/**
* Load the API client library and authorize the user.
*/
function handleClientLoad() {
gapi.load('client:auth2', initClient);
}
function initClient() {
gapi.client.init({
apiKey: API_KEY,
clientId: CLIENT_ID,
discoveryDocs: ["https://www.googleapis.com/discovery/v1/apis/drive/v3/rest"],
scope: SCOPES,
plugin_name:'demoApp'
}).then(() => {
// Listen for form submit events
document.getElementById('upload-form').addEventListener('submit', uploadImage);
}).catch(error => {
console.error('Error initializing API client:', error);
});
}
/**
* Upload an image to Google Drive.
*/
function uploadImage(event) {
event.preventDefault();
const file = document.getElementById('file-input').files[0];
if (!file) {
console.error('No file selected.');
return;
}
const metadata = {
name: file.name,
parents: [PUBLIC_FOLDER_ID]
};
const reader = new FileReader();
reader.onload = function(e) {
const fileContent = e.target.result;
const fileData = new Blob([fileContent], {type: file.type});
const uploadRequest = gapi.client.drive.files.create({
resource: metadata,
media: {
mimeType: file.type,
body: fileData
},
fields: 'id'
});
uploadRequest.execute(response => {
console.log('Image uploaded with ID:', response.id);
});
};
reader.readAsArrayBuffer(file);
}
</script>
</head>
<body onload="handleClientLoad()">
<h1>Upload Image to Google Drive</h1>
<form id="upload-form">
<input type="file" id="file-input">
<button type="submit">Upload</button>
</form>
</body>
</html>
```
Related
I am trying to create a task on Amazon MTurk, where the workers would collect some data and upload a single file when they are ready & submit the task. When the task is submitted, I want to upload the file to my linked S3 bucket - which is mostly based on this tutorial.
However, the file is sometimes uploaded successfully, and sometimes not. Since the S3.upload function is asynchronous, it looks like the task submission is sometimes completed before the file upload is completed. I am a javascript newbie: I tried to make this happen synchronously, but it still doesn't work properly. Here is my javascript code:
<script>
let config = {
region: 'xxx',
pool: 'xxx',
bucket: 'xxx'
}
AWS.config.region = config.region;
AWS.config.credentials = new AWS.CognitoIdentityCredentials({
IdentityPoolId: config.pool,
});
var s3 = new AWS.S3({
apiVersion: '2006-03-01',
params: {Bucket: config.bucket},
});
start_upload = function (event) {
$("#status").text("Uploading...");
let file = $("#file").prop('files')[0];
if (file === null || file === undefined) {
alert("You must upload a file before submitting.");
$("#status").text("");
return false;
}
console.log('Filename: ' + file.name);
let workerId = turkGetParam('workerId');
let fileKey = '${food_name}' + '/' + workerId + '-' + file.name;
return upload_to_s3(file, fileKey);
};
upload_to_s3 = async (file, fileKey) => {
const params = {
Key: fileKey,
Body: file,
ContentType: file.type,
ACL: 'bucket-owner-full-control'
};
try {
console.log("Starting upload...");
const data = await s3.upload(params).promise();
console.log("Done uploading file");
$("#status").text("Success.");
return true;
} catch (err) {
console.log("Error uploading data. ", err);
alert("Failed to upload, please try again. If the problem persists, contact the Requester.");
$("#status").text("");
return false;
}
}
// Validate and upload file on submit
window.onload = function() {document.getElementById('submitButton').setAttribute('onclick', 'return start_upload()'); }
</script>
Here is the relevant part of the layout of this task (HIT):
How can I make sure that the file upload is completed before the task is completed? I saw that I can overwrite the default submit button added by MTurk, but I would prefer not doing that if possible.
I've found the problem: S3#upload returns a ManagedUpload object, but it doesn't mean that the file upload is completed. I am now using promises and in the callback I submit the form manually. Note that the form is provided by MTurk by default. I just find it by its ID and invoke the submit function manually.
For reference, here is the working code:
<script>
let config = {
region: 'xxx',
pool: 'xxx',
bucket: 'xxx'
}
AWS.config.region = config.region;
AWS.config.credentials = new AWS.CognitoIdentityCredentials({
IdentityPoolId: config.pool,
});
var s3 = new AWS.S3({
apiVersion: '2006-03-01',
params: {Bucket: config.bucket},
});
start_upload = function (event) {
$("#status").text("Uploading, please wait...");
let file = $("#file").prop('files')[0];
if (file === null || file === undefined) {
alert("You must choose a file before submitting.");
$("#status").text("");
return false;
}
let workerId = turkGetParam('workerId');
let fileKey = '${food_name}' + '/' + workerId + '-' + file.name;
upload_to_s3(file, fileKey);
return false;
};
upload_to_s3 = (file, fileKey) => {
const params = {
Key: fileKey,
Body: file,
ContentType: file.type,
ACL: 'bucket-owner-full-control'
};
let promise = s3.upload(params).promise();
promise.then( (data) => {
console.log("Upload completed");
$("#status").text("Success.");
const form = document.getElementById('mturk_form');
form.submit();
}, (err) => {
console.log("Upload failed!!!", err);
alert("Failed to upload, please try again. If the problem persists, contact the Requester.");
$("#status").text("");
} );
}
// Validate and upload file on submit
window.onload = function() {document.getElementById('submitButton').setAttribute('onclick', 'return start_upload()'); }
</script>
Hey guys I'm working with PouchDB and react. File uploading is working normally but when I try to convert blob into url I'm having this error in console.
"Failed to execute 'createObjectURL' on 'URL': Overload resolution
failed."
I checked the return and I'm able to retrieve all the data/images into the console and basically see the file types and so on. Anyway I heard that "createObjectURL" is deprecated or so but I followed up the tutorial provided in docs for the PouchDB. So I'm not sure now. Can someone give me any insights or help on this ? Thanks
Snippet below:
// uploading files
const uploadF = e => {
// saving chosen file
const file = e.target.files[0];
// generating random number and converting it to the string
const random_id = Math.floor(Math.random() * 10000);
const random_id_to_string = String(random_id);
console.log(file);
// insert data into local DB
db.post({
_id: random_id_to_string,
_attachments: {
fileName: {
content_type: file.type,
data: file
}
}
})
// insert data into remote db
redb.post({
_id: random_id_to_string,
_attachments: {
fileName: {
content_type: file.type,
data: file
}
}
})
// upload file to s3 bucket
S3FileUpload.uploadFile(e.target.files[0],config)
.then(data => {
console.log(data);
})
.catch(err => console.log(err))
}
// retrieve all data from db
const files = [];
db.allDocs({
include_docs: true,
attachments: true
}).then(function (result) {
return result;
})
.then(function(blob){
const url = URL.createObjectURL(blob);
console.log(url);
})
.catch(function (err) {
console.log(err);
});
return(
<section className="hero">
<nav>
<h2>Welcome</h2>
<button onClick={handleLogout}>Logout</button>
<input type="file" onChange={e => uploadF(e)} />
</nav>
</section>
);
I am trying to get my node.js backend to upload a file to AWS S3, which it got in a post request from my front-end. This is what my function looks like:
async function uploadFile(file){
var uploadParams = {Bucket: '<bucket-name>', Key: file.name, Body: file};
s3.upload (uploadParams, function (err, data) {
if (err) {
console.log("Error", err);
} if (data) {
console.log("Upload Success", data.Location);
}
});
}
When I try uploading the file this way, I get an Unsupported Body Payload Error...
I used fileStream.createReadStream() in the past to upload files saves in a directory on the server, but creating a fileStream did not work for me, since there is no path parameter to pass here.
EDIT:
The file object is created in the angular frontend of my web application. This it the relevant html code where the file is uploaded by a user:
<div class="form-group">
<label for="file">Choose File</label>
<input type="file" id="file"(change)="handleFileInput($event.target.files)">
</div>
If the event occurs, the handleFileInput(files: FileList) method in the corresponding component is called:
handleFileInput(files: FileList) {
// should result in array in case multiple files are uploaded
this.fileToUpload = files.item(0);
// actually upload the file
this.uploadFileToActivity();
// used to check whether we really received the file
console.log(this.fileToUpload);
console.log(typeof this.fileToUpload)
}
uploadFileToActivity() {
this.fileUploadService.postFile(this.fileToUpload).subscribe(data => {
// do something, if upload success
}, error => {
console.log(error);
});
}
the postFile(fileToUpload: File) method of the file-upload service is used to make the post request:
postFile(fileToUpload: File): Observable<Boolean> {
console.log(fileToUpload.name);
const endpoint = '/api/fileupload/single';
const formData: FormData = new FormData();
formData.append('fileKey', fileToUpload, fileToUpload.name);
return this.httpClient
.post(endpoint, formData/*, { headers: yourHeadersConfig }*/)
.pipe(
map(() => { return true; }),
catchError((e) => this.handleError(e)),
);
}
Here is the the server-side code that receives the file and then calls the uploadFile(file) function:
app.post('/api/fileupload/single', async (req, res) => {
try {
if(!req.files) {
res.send({
status: false,
message: 'No file uploaded'
});
} else {
let file = req.files.fileKey;
uploadFile(file);
//send response
res.send({
status: true,
message: 'File is uploaded',
data: {
name: file.name,
mimetype: file.mimetype,
size: file.size
}
});
}
} catch (err) {
res.status(500).send(err);
}
});
Thank you very much for your help in solving this!
Best regards, Samuel
Best way is stream the file. Assuming you are. reading it from disk. You could do this
const fs = require("fs");
const aws = require("aws-sdk");
const s3Client = new aws.S3();
const Bucket = 'somebucket';
const stream = fs.createReadStream("file.pdf");
const Key = stream.path;
const response = await s3Client.upload({Bucket, Key, Body: stream}).promise();
console.log(response);
How would I use client-side browser Javascript with AJAX to upload a file into Wasabi Storage?
Pankaj at Wasabi Tech Support got back to me and said that this code snippet will work just fine. They recommend one download and use the Amazon AWS SDK for S3 because Wasabi is S3 API compliant.
<!DOCTYPE html>
<html>
<head>
<script src="https://sdk.amazonaws.com/js/aws-sdk-2.619.0.min.js"></script>
</head>
<body>
<h1>Wasabi Upload Test</h1>
<input type="file" id="wasabiupload" onchange="handleFile()" />
<script>
function handleFile() {
// console.log("handle file - " + JSON.stringify(event, null, 2));
var files = document.getElementById('wasabiupload').files;
if (!files.length) {
return alert('Please choose a file to upload first.');
}
var f = files[0];
var fileName = f.name;
const s3 = new AWS.S3({
correctClockSkew: true,
endpoint: 'https://s3.wasabisys.com', //use appropriate endpoint as per region of the bucket
accessKeyId: 'Wasabi-Access-keys',
secretAccessKey: 'Wasabi-Secret-Access-key',
region: 'us-east-1'
,logger: console
});
console.log('Loaded');
const uploadRequest = new AWS.S3.ManagedUpload({
params: { Bucket: 'bucket-name', Key: 'file-name', Body: f },
service: s3
});
uploadRequest.on('httpUploadProgress', function(event) {
const progressPercentage = Math.floor(event.loaded * 100 / event.total);
console.log('Upload progress ' + progressPercentage);
});
console.log('Configed and sending');
uploadRequest.send(function(err) {
if (err) {
console.log('UPLOAD ERROR: ' + JSON.stringify(err, null, 2));
} else {
console.log('Good upload');
}
});
}
</script>
</body>
</html>
I am trying to upload an image to Firebase Storage and save several certain metadata to the Firebase Cloud.
I am coding in JavaScript.
Goal is to set also customised metadata to Firebase Cloud for example from a text input field which the user has to fill.
That's how I store images to the Firebase Storage:
storageRef.child('images/' + file.name).put(file, metadata).then(function(snapshot) {
console.log('Uploaded', snapshot.totalBytes, 'bytes.');
console.log(snapshot.metadata);
var url = snapshot.downloadURL;
console.log('File available at', url);
// [START_EXCLUDE]
document.getElementById('linkbox').innerHTML = 'Click For File';
// [END_EXCLUDE]
}).catch(function(error) {
// [START onfailure]
console.error('Upload failed:', error);
// [END onfailure]
});
// [END oncomplete]
}
I have no idea how to integrate in the upload function another task to write meta data to Firebase Cloud.
Any help will be appreciated!
#eykjs #Sam Storie: Thanks for your help.
I changed my code. Right now, there is an error which I can't figure it out, whats wrong.
Error: TypeError: undefined is not an object (evaluating 'selectedFile.name')
My code:
var selectedFile;
function handleFileSelect(event) {
//$(".upload-group").show();
selectedFile = event.target.files[0];
};
function confirmUpload() {
var metadata = {
contentType: 'image',
customMetadata: {
'dogType': 'Lab',
'title': $("#imgTitle").val(),
'caption': $("#imgDesc").val()
},
};
var uploadTask = firebase.storage().ref().child('dogImages/' + selectedFile.name).put(selectedFile, metadata);
uploadTask.on('state_changed', function(snapshot){
}, function(error) {
} );
}
What is wrong with my selectedFile definition?
Thanks a lot for help.
Maybe you could upload the data to Firestore after you finished the upload.
storageRef.child('images/' + file.name).put(file, metadata).then(function(snapshot) {
console.log('Uploaded', snapshot.totalBytes, 'bytes.');
let db = firebase.firestore();
let dbRef = db.collection("images").doc(file.name);
let setData = dbRef.set({
//yourdata here
downloadURl: snapshot.downloadURL
}).then( () => {
console.log("Data stored in Firestore!");
});
// your actions
If I understand what you're after this should easily be done with Firebase functions:
https://firebase.google.com/docs/storage/extend-with-functions
You can trigger a function when something in storage changes, and thus easily write data to either the realtime database, or the new(er) Firestore database.
Here's a simple snippet from the page I referenced to see what this might look like:
exports.generateThumbnail = functions.storage.object().onChange(event => {
// ...
});
Source Link
Image Upload in Firestore and saving meta info on Cloud Storage
import { AngularFireStorage, AngularFireUploadTask } from '#angular/fire/storage';
import { AngularFirestore, AngularFirestoreCollection } from '#angular/fire/firestore';
import { Observable } from 'rxjs';
import { finalize, tap } from 'rxjs/operators';
...
...
...
// The main task
this.task = this.storage.upload(path, file, { customMetadata });
// Get file progress percentage
this.percentage = this.task.percentageChanges();
this.snapshot = this.task.snapshotChanges().pipe(
finalize(() => {
// Get uploaded file storage path
this.UploadedFileURL = fileRef.getDownloadURL();
this.UploadedFileURL.subscribe(resp=>{
this.addImagetoDB({
name: file.name,
filepath: resp,
size: this.fileSize
});
this.isUploading = false;
this.isUploaded = true;
},error=>{
console.error(error);
})
}),
tap(snap => {
this.fileSize = snap.totalBytes;
})
)