I'm using the gcloud API on a Nodejs web server to upload files. I'd prefer the files not be uploaded on the client side and instead uploaded on the server. Currently, I am producing a blob on the client side, then converting it to text and passing that to the server through a POST request. All of the information gets successfully passed from the client to the server as expected. This data is also uploaded to gcloud, however, Gcloud does not recognize this as a valid file nor does my computer when I download it.
What is the best way to get the contents of the file to gcloud from the server side? I've tried using dataURIs and reading the orignal file by text and both produce similiar issues. I've also explored piping a readFileStream from the blob on the server end but blobs are not natively supported by node so I have not done so yet.
Client Side
function readSingleFile(e, func, func2){
var file = e.target.files[0];
if(!file){
return; // Add error msg_here
}
var reader = new FileReader();
reader.onload = function(e){
let contents = e.target.result;
let img = document.createElement('img')
let cvs = document.createElement('canvas');
img.onload = ()=>{
cvs.width = img.width;
cvs.height= img.height;
let ctx = cvs.getContext('2d');
ctx.drawImage(img,0,0);
cvs.toBlob((res)=>{res.text().then((text)=>{func2(text)})}, "image/jpeg", 0.92);
}
img.src=contents;
func(contents);
}
reader.readAsDataURL(file);
}
Server Side
function publishPrintjob(dataObj){
try{
var newElemKey = database.ref().child('queue').push().key; // Get random Key
// Create a new blob in the bucket and upload the file data.
const gcloudFile = storage.file('images/' + newElemKey + '.jpg');
gcloudFile.save(dataObj.sockImageFile, function(err) {
if (!err) {
Console.log("File Uploaded!")
}
});
var data = {
date: dataObj.Date,
email: dataObj.email,
design: dataObj.Design,
author: dataObj.Author,
address: dataObj.address,
imageKey: newElemKey,
}
admin.database().ref('queue/' + newElemKey).set(data);
} catch(err){
console.log(err)
}
}
Note: func simply shows the image on the client side, func2 just adds the contents to the POST object.
Uploading a file directly from the computer would be easiest using the storage.bucket(bucketName).upload() function from the cloud storage library. However, this uses location of a file locally and thus will not work unless a file is transferred to the server and saved first. This could be achieved using multi-part form data. Using multipart or uploading locally are better methods for uploading to google storage.
Instead, I solve this by first converting the image to a dataURI, sending the data URI to the server via the body of a GET request, and then converting it to a buffer with a readable stream that can be piped to google storage.
Client
let formData = getFormData('myForm');
var xhttp = new XMLHttpRequest();
xhttp.onreadystatechange = function() {
if (this.readyState == 4 && this.status == 200) {
// Typical action to be performed when the document is ready:
}
};
xhttp.open("POST", "dashboard", true);
xhttp.setRequestHeader('Content-Type', 'application/json');
xhttp.send(JSON.stringify(formData));
xhttp.onload = ()=> {
console.log(JSON.parse(xhttp.response))
// Handle server response here
};
}
Server
// DataObject is the body of the GET request, the property imageFile is the URI from readFileAsURI
function uploadImageOnServer(dataObj){
try{
var newElemKey = database.ref().child('queue').push().key; // Get random Key to use as filename
// Create a new blob in the bucket and upload the file data.
const gcloudFile = storage.file('images/' + newElemKey + '.jpeg');
var fs = require('fs'); // Should be required at the top of the file
var string = dataObj.ImageFile;
var regex = /^data:.+\/(.+);base64,(.*)$/;
var matches = string.match(regex);
var ext = matches[1];
var data = matches[2];
var buffer = Buffer.from(data, 'base64');
// Create the readstream
const readableInstanceStream = new Readable({
read() {
this.push(buffer);
this.push(null);
}
});
readableInstanceStream.pipe(gcloudFile.createWriteStream()) // link to gcloud storage api
.on('error', function(err) {
console.log('error')
})
.on('finish', function() {
console.log('upload complete')
});
} catch(err){
console.log(err)
}
}
Related
Im trying to send audio from a client (javascript) to a server (java). I take the user audio from the microphone and then make a blob from it (and a url for the blob). The project is a spring boot project so i am looking for a way to send it as a parameter in a method to upload it to the server.
Was hoping that it would be possible to upload the blob to the server, but it seems to only be avalible localy on the browser and since the url for the blob starts with "blob:" before "http" it causes problems.
I have also looked at serialization but dont seem to find a way to do that with a blob in js.
Just passing the blob url here between the client and the server
Client side in js
// Convert the audio data in to blob
// after stopping the recording
mediaRecorder.onstop = function (ev) {
console.log(dataArray);
// blob of type mp3
let audioData = new Blob(dataArray,
{ 'type': 'audio/mp3;' });
// After fill up the chunk
// array make it empty
dataArray = [];
// Creating audio url with reference
// of created blob named 'audioData'
let audioSrc = window.URL
.createObjectURL(audioData);
//console.log(audioSrc);
// Pass the audio url to the 2nd video tag
playAudio.src = audioSrc;
const url = "http://localhost:8080/speech?url=" + audioSrc;
console.log(url);
$.get(url, function(data) {
$("#resultat").html("transcribed tekst: " + data);
});
}
Server in Java
#GetMapping("/speech")
public String speechToText(String url) throws IOException {
try (SpeechClient speechClient = SpeechClient.create()) {
// The path to the audio file to transcribe
String gcsUri = url;
// Builds the sync recognize request
RecognitionConfig config =
RecognitionConfig.newBuilder()
.setEncoding(RecognitionConfig.AudioEncoding.LINEAR16)
.setSampleRateHertz(16000)
.setLanguageCode("en-US")
.build();
RecognitionAudio audio = RecognitionAudio.newBuilder().setUri(gcsUri).build();
// Performs speech recognition on the audio file
RecognizeResponse response = speechClient.recognize(config, audio);
List<SpeechRecognitionResult> results = response.getResultsList();
for (SpeechRecognitionResult result : results) {
// There can be several alternative transcripts for a given chunk of speech. Just use the
// first (most likely) one here.
SpeechRecognitionAlternative alternative = result.getAlternativesList().get(0);
System.out.printf("Transcription: %s%n", alternative.getTranscript());
return alternative.getTranscript();
}
return "idk";
} catch (IOException e) {
e.printStackTrace();
return "noe ble feil";
}
}
I have set up in an SPA application the ability to send files to Azure Blob Storage.
To do this I used XMLHttpRequest and FormData (my users are on computers managed by my company and all have access to HTML5).
In order to manage security, the sending of each file is preceded by a call to a method of a Web Api to obtain the shared access signature.
I forward the Content-Type of the file as well as other information to headers.
Everything happens for the better, the files are correctly sent and saved in Azure Blob Storage, but during the transfer, the image files seem to be "altered".
They are well present, I can download them and read them after the download, but I can not open them directly from an img tag.
On the other hand if I send the same image file via Microsoft Azure Storage Explorer, there is no problem, the image is well recognized in the img tag.
However, in one case as in the other, the content-type is marked as "image / jpeg". The only noticeable difference is that the MD5 is not the same between these 2 mailings while it is the same file of origin.
From my findings it seems that there is text added to the beginning and the end of the file when sending via XMLHttpRequest.
I explain my code so that you can guide me:
Note 1 : I use typescript (but a javascript solution will suit me) and Promise.
Note 2 : I have resolve all the CORS problems.
Note 3 : I'm using Azure Storage Emulator, but i try with the normal Azure service and the problem is the same.
Here is the text added in the image in Chrome:
------WebKitFormBoundaryKj5cK88faAwJd4av
Content-Disposition: form-data; name="file1"; filename="test.jpg"
Content-Type: image/jpeg
[image content]
------WebKitFormBoundaryKj5cK88faAwJd4av--
My Web Api :
[Route(#"api/Storage/FileSas/Customers/{id:int}")]
public async Task<IHttpActionResult> GetFileSas(int id, string fileName, long? fileSize = 0, string contentType = null)
{
if (string.IsNullOrWhiteSpace(fileName))
this.ModelState.AddModelError("fileName", "File name i");
if (!fileSize.HasValue || fileSize.Value > maxFileSize)
this.ModelState.AddModelError("fileSize", "File size exceeded");
if (!this.ModelState.IsValid)
return BadRequest(this.ModelState);
var serverUrl = ConfigurationManager.AppSettings[SERVER_URL];
var container = ConfigurationManager.AppSettings[CONTAINER_NAME];
SharedAccessBlobPolicy policy = new SharedAccessBlobPolicy()
{
Permissions = SharedAccessBlobPermissions.Write,
SharedAccessStartTime = DateTime.UtcNow.AddMinutes(-60),
SharedAccessExpiryTime = DateTime.UtcNow.AddMinutes(60),
};
CloudBlockBlob blobFile = blobContainer.GetBlockBlobReference(Path.Combine("customers", id.ToString(), fileName));
var exists = await blobFile.ExistsAsync();
if (exists)
{
await blobFile.SnapshotAsync();
}
var signature = blobFile.GetSharedAccessSignature(policy);
return Content<string>(HttpStatusCode.Created, Path.Combine(serverUrl, container, blobFile.Name + signature));
}
My TypeScript file :
context.Storage.getFileSas(customerId, file)
.then((response: Interfaces.Result<string>) => {
let sasUrl = response.Data;
let formData = new FormData();
formData.append("file1", file, file.name);
var xhr = new XMLHttpRequest();
xhr.upload.onprogress = (event) => {
if (event.total > 0)
this.Progress(event.loaded * 100 / event.total);
};
xhr.onloadstart = function (e) {
}
xhr.onloadend = (e) => {
this.Progress(0);
}
xhr.open("PUT", sasUrl, true);
xhr.setRequestHeader('x-ms-blob-type', 'BlockBlob');
xhr.setRequestHeader('Content-Type', file.type);
xhr.setRequestHeader('x-ms-blob-content-type', file.type);
xhr.setRequestHeader('x-ms-version', "2016-05-31");
xhr.setRequestHeader('x-ms-meta-CustomerId', customerId);
xhr.setRequestHeader('x-ms-meta-UserId', context.User.User.Id.toString());
xhr.setRequestHeader('x-ms-meta-UserName', context.User.User.Name);
xhr.send(formData);
})
})).catch((error) => {
console.log(error);
});
File come from here :
let fileInputElement1: HTMLInputElement = <HTMLInputElement>document.getElementById("file1");
let file = fileInputElement1.files[0];
My HTML part : (i'm using knockout)
<form method="put" target="_blank" enctype="multipart/form-data">
<input type="file" name="name" value="" id="file1" />
<button data-bind="click:send"> Send</button>
</form>
If someone have an idea ? ...
Thank's in advance.
PS : sasUrl is like this : http://127.0.0.1:10000/devstoreaccount1/customers/65143/test.jpg?sv=2016-05-31&sr=b&sig=s0671%2BLvCZTqyNfhlCthZW8KftjKyIMAlOT1nbsnlng%3D&st=2017-03-05T11%3A38%3A22Z&se=2017-03-06T12%3A38%3A22Z&sp=r&rsct=image%2Fjpeg
Thank's to Gaurav Mantri, he point me to the right, here my modifications (only because i use typescript) :
context.Storage.getFileSas(customerId, file)
.then((response: Interfaces.Result<string>) => {
let sasUrl = response.Data;
var xhr = new XMLHttpRequest();
xhr.upload.onprogress = (event) => {
if (event.total > 0)
this.Progress(event.loaded * 100 / event.total);
};
xhr.onloadstart = function (e) {
}
xhr.onloadend = (e) => {
this.Progress(0);
}
let reader = new FileReader();
reader.readAsArrayBuffer(file);
reader.onloadend = (event) => {
let target = <FileReader>event.target;
if (target.readyState == reader.DONE) {
var requestData = new Uint8Array(target.result);
xhr.open("PUT", sasUrl, true);
xhr.responseType = "blob";
xhr.setRequestHeader('X-Requested-With', 'XMLHttpRequest');
xhr.setRequestHeader('X-File-Name', file.name);
xhr.setRequestHeader('x-ms-blob-type', 'BlockBlob');
xhr.setRequestHeader('Content-Type', file.type || 'application/octet-stream');
xhr.setRequestHeader('x-ms-blob-content-type', file.type || 'application/octet-stream');
xhr.setRequestHeader('x-ms-version', "2016-05-31");
xhr.setRequestHeader('x-ms-meta-CustomerId', customerId);
xhr.setRequestHeader('x-ms-meta-UserId', context.User.Id.toString());
xhr.setRequestHeader('x-ms-meta-UserName', context.User.Name);
xhr.send(requestData);
}
}
})
})).catch((error) => {
console.log(error);
});
Now i'll start to write a Promise to embedded this fonctionality.
PS : i didn't find the way to mark the Gaurav Mantri as answer so i create mine.
PS 2 : I'll like to put some +1 to Gaurav Mantri for the help... but i can't :/
Using firebase 3.0.x, is it possible to save a base64 encoded image to the new Firebase Storage service?
I am using canvas to resize images in the browser prior to uploading them, and output them as a base64 jpeg. I know that the Storage api can accept Blobs, but IE9 support is needed for my current project.
You only need to use the putString function without converting the BASE64 to blob.
firebase.storage().ref('/your/path/here').child('file_name')
.putString(your_base64_image, ‘base64’, {contentType:’image/jpg’});
Make sure to pass the metadata {contentType:’image/jpg’} as the third parameter (optional) to the function putString in order for you to retrieve the data in an image format.
or simply put:
uploadTask = firebase.storage().ref('/your/path/here').child('file_name').putString(image, 'base64', {contentType:'image/jpg'});
uploadTask.on(firebase.storage.TaskEvent.STATE_CHANGED, // or 'state_changed'
function(snapshot) {
// Get task progress, including the number of bytes uploaded and the total number of bytes to be uploaded
var progress = (snapshot.bytesTransferred / snapshot.totalBytes) * 100;
console.log('Upload is ' + progress + '% done');
switch (snapshot.state) {
case firebase.storage.TaskState.PAUSED: // or 'paused'
console.log('Upload is paused');
break;
case firebase.storage.TaskState.RUNNING: // or 'running'
console.log('Upload is running');
break;
}
}, function(error) {
console.log(error);
}, function() {
// Upload completed successfully, now we can get the download URL
var downloadURL = uploadTask.snapshot.downloadURL;
});
You can then use the downloadURL to save to firebase.database() and/or to put as an src to an <img> tag.
The latest version of the Firebase SDK supports base64 image uploads. Simply use the putString method from Firebase Storage.
https://firebase.google.com/docs/reference/js/firebase.storage
One small caveat is that sometimes you'll have a base64 String with unnecessary whitespace. For example, I've found that the cordova Camera plugin returns base64 with unnecessary whitespace. The Storage SDK will fail to upload this because JavaScript can't perform it's native atob function - something the Firebase JS does under the hood. You'll have to strip the whitespace - see DOM Exception 5 INVALID CHARACTER error on valid base64 image string in javascript
Yes, it's possible now. You should use Firebase Storage new method called putString. You may read spec here.
So, Firebase spec says that you have now two methods to store Base64 string and Base64url string:
// Base64 formatted string
var message = '5b6p5Y+344GX44G+44GX44Gf77yB44GK44KB44Gn44Go44GG77yB';
ref.putString(message, 'base64').then(function(snapshot) {
console.log('Uploaded a base64 string!');
});
// Base64url formatted string
var message = '5b6p5Y-344GX44G-44GX44Gf77yB44GK44KB44Gn44Go44GG77yB';
ref.putString(message, 'base64url').then(function(snapshot) {
console.log('Uploaded a base64url string!');
})
From my experience, using putString(message, 'base64url') constantly returns Error about bad formated Base64 string code: "storage/invalid-format", message: "Firebase Storage: String does not match format 'base64': Invalid character found". The solution is to cut off beginning of string data:image/jpeg;base64, and use first method instead putString(message, 'base64'). Then it works.
If you use canvas.toBlob() you'll get the byte[] that you need to pass into Firebase Storage.
Quick example:
function save() {
var canvas = document.getElementById("canvas");
canvas.toBlob(blob => {
var storage = firebase.app().storage().ref();
var name = id + "/" + (new Date()).getTime() + ".png";
var f = storage.child("drawings/" + name);
var task = f.put(blob);
task.on('state_changed', function(snapshot) {
}, function(error) {
console.error("Unable to save image.");
console.error(error);
}, function() {
var url = task.snapshot.downloadURL;
console.log("Saved to " + url);
var db = firebase.database();
var chats = db.ref().child("chats");
chats.child(id).child("drawingURL").set(url);
});
});
};
Otherwise you'll have to convert the base64 yourself, for example with atob().
Here are two values I use to help with support on .toBlob() it is less performant however, it gets the job done.
This method takes in the base64 string, the content type (IE: image/png) and a callback for when the blob has been constructed using atob()
var b64_to_blob = function(b64_data, content_type, callback) {
content_type = content_type || '';
var slice_size = 512;
var byte_characters = atob(b64_data);
var byte_arrays = [];
for(var offset = 0; offset < byte_characters.length; offset += slice_size) {
var slice = byte_characters.slice(offset, offset + slice_size);
var byte_numbers = new Array(slice.length);
for(var i = 0; i < slice.length; i++) {
byte_numbers[i] = slice.charCodeAt(i);
}
var byte_array = new Uint8Array(byte_numbers);
byte_arrays.push(byte_array);
}
var blob = new Blob(byte_arrays, {type: content_type});
callback(blob);
};
I use this method to get the base64 value of a direct link when necessary, In my case it's to download a users Facebook photo when they register on my application.
var image_link_to_b64 = function(url, content_type, callback) {
var image = new Image();
image.crossOrigin = 'Anonymous';
image.onload = function() {
var canvas = document.createElement('CANVAS');
var context = canvas.getContext('2d');
var data_url;
canvas.height = this.height;
canvas.width = this.width;
context.drawImage(this, 0, 0);
data_url = canvas.toDataURL(content_type);
data_url = data_url.substr(22);
callback(data_url);
canvas = null;
};
image.src = url;
};
Below is how it looks when adding the information to firebase storage
$fileService.image_link_to_b64(facebook_info.photoURL + '?width=512&height=512', 'image/png', function(b64) {
$fileService.b64_to_blob(b64, 'image/png', function(blob) {
$fileService.upload_user_photo(blob, 'image/png', function(url) {
// Firebase storage download url here
}
}
}
Incase you're wondering upload_user_photo simply uploads to firebase storage:
var upload_user_photo = function(data, blob, callback) {
var upload_task = user_photo_reference.child('user_photo').put(data);
upload_task.on('state_changed', function(snapshot) {
}, function(error) {
alert('error: ', error);
}, function() {
callback(upload_task.snapshot.downloadURL);
});
};]
For IE9 see this polyfill: https://github.com/eligrey/Blob.js/blob/master/Blob.js
This solution works for me using the Google Cloud Storage API.
But it should work also with the Firebase one by replacing the file.save with the ref put method.
const file = storage.file(file_path_in_gs)
const contents = new Uint8Array(Buffer.from(base64ImgStr, 'base64'))
file.save(contents,
{
contentType: img_type,
metadata: {
metadata: {
contentType: img_type,
firebaseStorageDownloadTokens: uuid()
}
}
}
, () => { })
With Firebase SDK 9 for web (as of May 2022)
import { getStorage, ref, uploadString } from "firebase/storage";
const storage = getStorage();
const storageRef = ref(storage, 'some-child');
// Data URL string
const message4 = 'data:text/plain;base64,5b6p5Y+344GX44G+44GX44Gf77yB44GK44KB44Gn44Go44GG77yB';
uploadString(storageRef, message4, 'data_url').then((snapshot) => {
console.log('Uploaded a data_url string!');
});
I need to upload a file reading from filesystem (not specified by the user) using xhr. Is there a way to send it via Ajax?
I understand that javascript has input type file, which gives javascript file object https://developer.mozilla.org/en-US/docs/Web/API/File.
I tried getting file descriptor using Node fs APIs (https://nodejs.org/docs/latest/api/fs.html) . But am unable to send it via xhr. Here is my code snippet.
Any help will be greatly appreciated.
var req = new XMLHttpRequest();
req.open(method, url);
req.overrideMimeType('text/xml');
var progress = 0;
req.upload.addEventListener('progress', function (event) {
if (event.lengthComputable) {
progress = Math.round(event.loaded * 100 / event.total);
}
}, false);
req.onreadystatechange = function () {
// add logic for each state
};
var fs = require('fs');
if (filename) {
// get the file descriptor and send it via xhr
fs.open(filename, "r", function(error, fd) {
// -- THIS IS THE PART NOT WORKING --
req.send(fd);
});
} else {
console.log('no filename');
}
That's not the way to read a file. This is how you do it:
var fs = require('fs');
fs.readFile(filename, { encoding : 'utf8' }, function(err, data){
//data holds the contents of the file.
req.send(data);
});
I send multiple files chunked into Blob's over XHR2 to a Node.js/Express server.
How can I receive them on the server while making sure they are put together correctly? In their right order and to the right file when multiple files are uploaded "at once".
Following is the code (both front- and backend) I have so far but doesn't account for multiple uploads yet.
Frontend:
// 'files' is of type FileList, directly from file input.
for (var i = 0, length = files.length; i < length; i++) {
var file = files[i];
var bytes = 51200; // 50 KB
var size = file.size;
var start = 0;
var end = bytes;
while (start < size) {
sendBlob(file.slice(start, end), file.name, file.type);
start = end;
end = start + bytes;
}
}
// sendBlob()
var sendBlob: function (data, filename, filetype) {
var xhr = new XMLHttpRequest();
xhr.open('POST', this.url, false);
xhr.setRequestHeader('X_FILENAME', filename);
xhr.setRequestHeader('Content-Type', filetype);
xhr.send(data);
};
Backend:
app.post('/', function (req, res) {
var body = '';
req.on('data', function (data) {
body += data;
});
req.on('end', function () {
var filename = req.headers['x_filename'];
var newPath = __dirname + '/upload/' + filename;
fs.writeFile(newPath, body, function (err) {
res.send({
filename: filename
});
});
});
});
Very small text files are stored correctly but images seem to always get messed up and end up with a bigger file size. Bigger text files are written correctly but there the first chunk seems to be missing.
Your upload logic is naive. Here are some things you should do to ensure correctness :
You have to maintain and communicate the chunk id/number between client and server so that order can be maintained.
var sendBlob: function (data, filename, filetype, chunkid)
//set chunkid in header or in data.
In your server you are accepting any post request and appending it to the body. You should maintain variables for filename and filetype and match it with incoming request before appending it.
Files[Name] = { //Create a new Entry in The Files Variable for each new file
Filetype : "",
FileSize: 0,//size of Data in buffer
Data: "", //buffer for storing data
Downloaded: //chunks recieved
}
Append to Data only when you check it. (Extra file size could be due to this)
In your fs.writeFile you should set encoding as binary, image and video files are binary encoded and writing them into default utf-8 encoding may corrupt them.
fs.writeFile(newPath, body, 'binary', function (err){...});
(optional) For each chunk received by server it should send an acknowledgement back to client so that it knows which chunk is dropped and must be sent.