Read downloaded blob from dropbox API using HTTP - javascript

Using dropbox you can create a shortcut by dragging and dropping a URL into your Dropbox folder. This will be saved like this:
Using the /2/files/download HTTP API from dropbox will return an XHR response that looks something like this:
How do you parse this response so that you can grab only the URL and make that a clickable link?

Here is what needs to go into an Angular 1 factory. To use this, you would just call the downloadFile function from a controller and provide the path to the file in your dropbox account.
function downloadFile(filePath) {
if (!filePath) {
console.error('Cannot download file because no file was specified.');
return;
}
return $q(function(fulfill, reject) {
$http({
url: 'https://content.dropboxapi.com/2/files/download',
method: 'POST',
headers: {
'Authorization': 'Bearer {{access-token-goes-here}}',
'Dropbox-API-Arg': `{"path": "${filePath}"}`
},
responseType: 'blob'
}).then(
results => {
// data received from dropbox is binary data saved as a blob
// The FileReader object lets web applications asynchronously read the contents of files
// https://developer.mozilla.org/en-US/docs/Web/API/FileReader
var fileReader = new FileReader();
// function will run after successfully reading the file
fileReader.onload = function() {
var string = this.result; // store the file contents
string = encodeURI(string); // get rid of the paragraph return characters
var endPosition = string.indexOf('%0D%0A', 32); // find the end of the URL, startPosition is 32
var actualURL = string.substring(32, endPosition); // grab only the characters between start and end positions
fulfill(actualURL);
};
fileReader.readAsText(results.data);
},
error => reject(error));
});
}

Related

Converting dataURI to file for upload via REST does not work but normal filefield upload works fine

I have implemented a solution that accepts a single file upload (image for profile) in a Django Rest Framework backend. This route api/people/id/upload_image. Only accepts a parameter with an image. and can be used via HTTP POST.
When uploading via a fileinput field in eg. Postman, the default Django API or via browser fetch() in my Vue.js application is no problem. So it seems as long as it is a default form-upload field it is doing its job.
But in my frond-end (vuejs 3) I am using an image-cropper. Users can upload an image and via the javascript cropper the image can be cropped. This is important for the UI because I need a square image. The cropper uses HTMLCanvasElement.toDataURL() as export format.
And what seemed to be not that difficult gets me stuck for days now. I just can't find a way to convert and POST the cropped image in such a way that is accepted by the upload_image API backend. I am using Fetch() for sending this POST call.
I am not a javascript expert so I get my knowledge via internet and I tried it in several ways; with first creating a BLOB from the dataURI, and by creating a File before feeding it to dataForm and send it as :body in Fetch()
The dataURI seems OK because I am also replacing the cropped image directly in the HTML. And that looks totally fine. The API is responding with an 'HTTP 200 OK'. But the old image is not being replaced.
So my assumption is that there is something wrong with the image send to the API, because via normal fileupload everything works fine. How should I convert this dataURI in a proper way so it can be send and accepted by the API endpoint. And how should the API call look like: headers, body..
this is my last attempt in converting and sending the cropped image: (dataURIimage is OK)
uploadPhoto(context, dataURIimage) {
const blob = dataURItoBlob(dataURIimage);
const resultFile = new File([blob], "picture", {
type: "image/png"
});
const formData = new FormData();
formData.append("image", resultFile);
const headerToken = "Token" + " " + this.getters.getToken;
const url =
"https://workserver-7e6s4.ondigitalocean.app/api/people/" +
this.getters.getProfiel.id +
"/upload_image/";
fetch(url, {
method: "POST",
headers: {
'Content-Type': 'multipart/form-data',
'Authorization': headerToken,
},
body: formData,
})
.then(function(response) {
console.log(response.status)
if (response.ok) {
return response.json();
}
})
.then(function(data) {
console.log(data.image);
})
.catch(function(error) {
alert(error + " " + ":ERROR");
});
},
function dataURItoBlob(dataURI) {
// convert base64 to raw binary data held in a string
// doesn't handle URLEncoded DataURIs - see SO answer #6850276 for code that does this
var byteString = atob(dataURI.split(',')[1]);
// separate out the mime component
var mimeString = dataURI.split(',')[0].split(':')[1].split(';')[0];
// write the bytes of the string to an ArrayBuffer
var ab = new ArrayBuffer(byteString.length);
var ia = new Uint8Array(ab);
for (var i = 0; i < byteString.length; i++) {
ia[i] = byteString.charCodeAt(i);
}
return new Blob([ab], { type: mimeString });
}
Martijn dekker

Why is ajax not passing through image file?

For some reason, when I don't use Ajax, the image file in the form being processed comes through just fine, but not when using the Ajax script below.
Can't figure out why...
Note: Not all form fields are image files (there are some text fields).
jQuery.ajax({
method: 'POST',
url: 'actions/listings-add.php',
data: $('#form').serialize(),
dataType:'json',
success: function(msg){
if(parseInt(msg.status)==1)
{
window.location=msg.txt;
}
else if(parseInt(msg.status)==0)
{
error(1,msg.txt);
}
}
});
JSON does not work with "file" inputs, because they need to be transfered with multipart/form-data.
To upload files using AJAX, you can either use FormData to also transfer the file input or if you have a normal rest api that follows a standard endpoint use javascript to base64 encode the image into a string and then transfer it using your ajax request.
To convert the image, you have to use the browsers file api:
// Convert file to base64 string
export const fileToBase64 = (filename, filepath) => {
return new Promise(resolve => {
var file = new File([filename], filepath);
var reader = new FileReader();
// Read file content on file loaded event
reader.onload = function(event) {
resolve(event.target.result);
};
// Convert data to base64
reader.readAsDataURL(file);
});
};
// Example call:
fileToBase64("test.pdf", "../files/test.pdf").then(result = {
console.log(result);
// ##### PUT YOUR AJAX CALL HERE
});

How to convert node.js array-buffer to javascript array buffer

I send audio to client using node.js :
res.send(audio); / audio it is the buffer array
And I get object 'arrayBuffer' in data.
And I conver it to Blob and after to file (I want to get dataURL to show it in player)
.then(
(result) => {
result.arrayBuffer().then(function (data) {
const blob = new Blob([data], { type: 'audio/wav' });
var fileReader = new FileReader();
fileReader.readAsDataURL(blob);
fileReader.onload = function (evt) {
// Read out file contents as a Data URL
var url = evt.target.result;
res({blob, url})
};
})
}
)
And it works good. I use this url it as src of my <audio> attribute and it works.
But now I want to send this file from server as a part of json. Now I get audioBuffer from the serve in "audio" property. I chage arrayBuffer method to json in fetch:
result.json().then(function (data) {
const blob = new Blob([data.audio], { type: 'audio/wav' });
...
But now it doesn't work. I tried to use module www.npmjs.com/package/to-array-buffer to convert data.audio to js-arrayBuffer, but it doesn't help.
Maybe you know what's problem here?
Ok, I solved problem. I use module about whick I wrote to-array-buffer, but I use not toArrayBuffer(data.audio) but toArrayBuffer(data.audio.data)

Retrieve image file from url using JS / Angular

Im working on exporting data from a wordpress environment to a MongoDB using MongooseJS as data model bridges. I've got a JSON with every objects including all required information.
As a example, I've got user item including an avatarpath field pointing to the wordpress server url: (ex: http://url/wp-content/upload/img/avatar.jpg)
What I would like to do it retrieving the image from its url, upload it to my new storage folder, retrieve the new path, and store the new object in my mongodb.
My issue is that I can't manage to find a way to get the file data from a http get or any other way. Usually, I've got a file input in my html, and I start from the file object from this input. How should I proceed to make this work? Am I going into the wrong direction?
I've found this answer but it seems deprecated:
how to upload image file from url using FileReader API?
Here is what I've got for now:
$scope.curateurs_data = {};
$scope.curateurs = [];
$http.get('resources/json_curateurs.json').success(function(data) {
$scope.curateurs_data = data;
console.log(data[0]);
$scope.getPics();
});
//RETRIEVE IMAGE DATA
$scope.getPics = function(data){
console.log("RETRIEVING PICTURE")
var uploadPlace = '/upload/user';
var images;
angular.forEach($scope.curateurs_data, function(item, key) {
$scope.curitem = item;
console.log($scope.curitem);
$http.get(item.avatarpath, {responseType: "arraybuffer"}).success(function(data){
var arrayBufferView = new Uint8Array( data );
var blob = new Blob( [ arrayBufferView ], { type: "image/png" } );
var urlCreator = window.URL || window.webkitURL;
var imageUrl = urlCreator.createObjectURL( blob );
console.log(imageUrl);
console.log(blob);
images = blob;
var pic = {
images: images
};
Upload.upload({
url: uploadPlace,
arrayKey: '',
data: pic,
}).then(function(response) {
// Adding data paths to formData object before creating mood
// MUST respect images array order
$scope.curitem.avatarpath = response.data.files[0].path;
console.log(response.data.files[0].path);
});
}).error(function(err, status){})
$scope.curateurs.push($scope.curitem);
});
}
I've also tried something like this but I can't seems to make it work as well.
$http.get(item.avatarpath,{responseType: "blob"}).
success(function(data, status, headers, config) {
// encode data to base 64 url
fr = new FileReader();
fr.onload = function(){
// this variable holds your base64 image data URI (string)
// use readAsBinary() or readAsBinaryString() below to obtain other data types
console.log( fr.result );
};
fr.readAsDataURL(data);
console.log(fr.readAsDataURL(data));
}).
error(function(data, status, headers, config) {
alert("The url could not be loaded...\n (network error? non-valid url? server offline? etc?)");
});
Use node's http object on the backend to download the image. Something like:
http.request(url, function(response) {
// Your code to write the file out or store it in the database here.
});

How to get a file or blob from an object URL?

I am allowing the user to load images into a page via drag&drop and other methods. When an image is dropped, I'm using URL.createObjectURL to convert to an object URL to display the image. I am not revoking the url, as I do reuse it.
So, when it comes time to create a FormData object so I can allow them to upload a form with one of those images in it, is there some way I can then reverse that Object URL back into a Blob or File so I can then append it to a FormData object?
Modern solution:
let blob = await fetch(url).then(r => r.blob());
The url can be an object url or a normal url.
As gengkev alludes to in his comment above, it looks like the best/only way to do this is with an async xhr2 call:
var xhr = new XMLHttpRequest();
xhr.open('GET', 'blob:http%3A//your.blob.url.here', true);
xhr.responseType = 'blob';
xhr.onload = function(e) {
if (this.status == 200) {
var myBlob = this.response;
// myBlob is now the blob that the object URL pointed to.
}
};
xhr.send();
Update (2018): For situations where ES5 can safely be used, Joe has a simpler ES5-based answer below.
Maybe someone finds this useful when working with React/Node/Axios. I used this for my Cloudinary image upload feature with react-dropzone on the UI.
axios({
method: 'get',
url: file[0].preview, // blob url eg. blob:http://127.0.0.1:8000/e89c5d87-a634-4540-974c-30dc476825cc
responseType: 'blob'
}).then(function(response){
var reader = new FileReader();
reader.readAsDataURL(response.data);
reader.onloadend = function() {
var base64data = reader.result;
self.props.onMainImageDrop(base64data)
}
})
The problem with fetching the blob URL again is that this will create a full copy of the Blob's data, and so instead of having it only once in memory, you'll have it twice. With big Blobs this can blow your memory usage quite quickly.
It's rather unfortunate that the File API doesn't give us access to the currently linked Blobs, certainly they thought web-authors should store that Blob themselves at creation time anyway, which is true:
The best here is to store the object you used when creating the blob:// URL.
If you are afraid this would prevent the Blob from being Garbage Collected, you're right, but so does the blob:// URL in the first place, until you revoke it. So holding yourself a pointer to that Blob won't change a thing.
But for those who aren't responsible for the creation of the blob:// URI (e.g because a library made it), we can still fill that API hole ourselves by overriding the default URL.createObjectURL and URL.revokeObjectURL methods so that they do store references to the object passed.
Be sure to call this function before the code that does generate the blob:// URI is called.
// Adds an URL.getFromObjectURL( <blob:// URI> ) method
// returns the original object (<Blob> or <MediaSource>) the URI points to or null
(() => {
// overrides URL methods to be able to retrieve the original blobs later on
const old_create = URL.createObjectURL;
const old_revoke = URL.revokeObjectURL;
Object.defineProperty(URL, 'createObjectURL', {
get: () => storeAndCreate
});
Object.defineProperty(URL, 'revokeObjectURL', {
get: () => forgetAndRevoke
});
Object.defineProperty(URL, 'getFromObjectURL', {
get: () => getBlob
});
const dict = {};
function storeAndCreate(blob) {
const url = old_create(blob); // let it throw if it has to
dict[url] = blob;
return url
}
function forgetAndRevoke(url) {
old_revoke(url);
try {
if(new URL(url).protocol === 'blob:') {
delete dict[url];
}
} catch(e){}
}
function getBlob(url) {
return dict[url] || null;
}
})();
// Usage:
const blob = new Blob( ["foo"] );
const url = URL.createObjectURL( blob );
console.log( url );
const retrieved = URL.getFromObjectURL( url );
console.log( "retrieved Blob is Same Object?", retrieved === blob );
fetch( url ).then( (resp) => resp.blob() )
.then( (fetched) => console.log( "fetched Blob is Same Object?", fetched === blob ) );
And an other advantage is that it can even retrieve MediaSource objects, while the fetching solutions would just err in that case.
Using fetch for example like below:
fetch(<"yoururl">, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
'Authorization': 'Bearer ' + <your access token if need>
},
})
.then((response) => response.blob())
.then((blob) => {
// 2. Create blob link to download
const url = window.URL.createObjectURL(new Blob([blob]));
const link = document.createElement('a');
link.href = url;
link.setAttribute('download', `sample.xlsx`);
// 3. Append to html page
document.body.appendChild(link);
// 4. Force download
link.click();
// 5. Clean up and remove the link
link.parentNode.removeChild(link);
})
You can paste in on Chrome console to test. the file with download with 'sample.xlsx' Hope it can help!
See Getting BLOB data from XHR request which points out that BlobBuilder doesn't work in Chrome so you need to use:
xhr.responseType = 'arraybuffer';
Unfortunately #BrianFreud's answer doesn't fit my needs, I had a little different need, and I know that is not the answer for #BrianFreud's question, but I am leaving it here because a lot of persons got here with my same need. I needed something like 'How to get a file or blob from an URL?', and the current correct answer does not fit my needs because its not cross-domain.
I have a website that consumes images from an Amazon S3/Azure Storage, and there I store objects named with uniqueidentifiers:
sample: http://****.blob.core.windows.net/systemimages/bf142dc9-0185-4aee-a3f4-1e5e95a09bcf
Some of this images should be download from our system interface.
To avoid passing this traffic through my HTTP server, since this objects does not require any security to be accessed (except by domain filtering), I decided to make a direct request on user's browser and use local processing to give the file a real name and extension.
To accomplish that I have used this great article from Henry Algus:
http://www.henryalgus.com/reading-binary-files-using-jquery-ajax/
1. First step: Add binary support to jquery
/**
*
* jquery.binarytransport.js
*
* #description. jQuery ajax transport for making binary data type requests.
* #version 1.0
* #author Henry Algus <henryalgus#gmail.com>
*
*/
// use this transport for "binary" data type
$.ajaxTransport("+binary", function (options, originalOptions, jqXHR) {
// check for conditions and support for blob / arraybuffer response type
if (window.FormData && ((options.dataType && (options.dataType == 'binary')) || (options.data && ((window.ArrayBuffer && options.data instanceof ArrayBuffer) || (window.Blob && options.data instanceof Blob))))) {
return {
// create new XMLHttpRequest
send: function (headers, callback) {
// setup all variables
var xhr = new XMLHttpRequest(),
url = options.url,
type = options.type,
async = options.async || true,
// blob or arraybuffer. Default is blob
dataType = options.responseType || "blob",
data = options.data || null,
username = options.username || null,
password = options.password || null;
xhr.addEventListener('load', function () {
var data = {};
data[options.dataType] = xhr.response;
// make callback and send data
callback(xhr.status, xhr.statusText, data, xhr.getAllResponseHeaders());
});
xhr.open(type, url, async, username, password);
// setup custom headers
for (var i in headers) {
xhr.setRequestHeader(i, headers[i]);
}
xhr.responseType = dataType;
xhr.send(data);
},
abort: function () {
jqXHR.abort();
}
};
}
});
2. Second step: Make a request using this transport type.
function downloadArt(url)
{
$.ajax(url, {
dataType: "binary",
processData: false
}).done(function (data) {
// just my logic to name/create files
var filename = url.substr(url.lastIndexOf('/') + 1) + '.png';
var blob = new Blob([data], { type: 'image/png' });
saveAs(blob, filename);
});
}
Now you can use the Blob created as you want to, in my case I want to save it to disk.
3. Optional: Save file on user's computer using FileSaver
I have used FileSaver.js to save to disk the downloaded file, if you need to accomplish that, please use this javascript library:
https://github.com/eligrey/FileSaver.js/
I expect this to help others with more specific needs.
If you show the file in a canvas anyway you can also convert the canvas content to a blob object.
canvas.toBlob(function(my_file){
//.toBlob is only implemented in > FF18 but there is a polyfill
//for other browsers https://github.com/blueimp/JavaScript-Canvas-to-Blob
var myBlob = (my_file);
})
Following #Kaiido answer, another way to overload URL without messing with URL is to extend the URL class like this:
export class URLwithStore extends URL {
static createObjectURL(blob) {
const url = super.createObjectURL(blob);
URLwithStore.store = { ...(URLwithStore.store ?? {}), [url]: blob };
return url;
}
static getFromObjectURL(url) {
return (URLwithStore.store ?? {})[url] ?? null;
}
static revokeObjectURL(url) {
super.revokeObjectURL(url);
if (
new URL(url).protocol === "blob:" &&
URLwithStore.store &&
url in URLwithStore.store
)
delete URLwithStore.store[url];
}
}
Usage
const blob = new Blob( ["foo"] );
const url = URLwithStore.createObjectURL( blob );
const retrieved = URLwithStore.getFromObjectURL( url );
console.log( "retrieved Blob is Same Object?", retrieved === blob );

Categories