I want to process an image and make it secure by adding a watermark to it using transloadit. But the source of image I have is base64 encoded string rather than asking user to upload it to a form. Can I pass this base64 encoded string to form and process it to get watermark on it?
Any help is appreciated. Thanks!
It seems like you need to convert the base64 data uri to a Blob object, then manually set the blob as a parameter in FormData, as suggested in this SO question
function dataURItoBlob(dataURI) {
// convert base64/URLEncoded data component to raw binary data held in a string
var byteString;
if (dataURI.split(',')[0].indexOf('base64') >= 0)
byteString = atob(dataURI.split(',')[1]);
else
byteString = unescape(dataURI.split(',')[1]);
// separate out the mime component
var mimeString = dataURI.split(',')[0].split(':')[1].split(';')[0];
// write the bytes of the string to a typed array
var ia = new Uint8Array(byteString.length);
for (var i = 0; i < byteString.length; i++) {
ia[i] = byteString.charCodeAt(i);
}
return new Blob([ia], {type:mimeString});
}
Then set it using:
var dataURL = canvas.toDataURL('image/jpeg', 0.5);
var blob = dataURItoBlob(dataURL);
var fd = new FormData(document.forms[0]);
fd.append("canvasImage", blob);
credit to #Stoive for the code
Related
I have large text which i want to save as BLOB in SQLite DB. I learned in other posts that the string needs to be converted into ByteArray before being saved as BLOB. Below is my function that converts string into ByteArray But when inserting this ArrayBuffer into DB,its being saved as Object string [object ArrayBuffer]. That is because I am assuming Javascript ArrayBuffer is incompatible with SQLite. Can you help me as to how can I store this ArrayBuffer as BLOB without it being getting converted to string?
function str2ab(str) {
var buf = new ArrayBuffer(str.length*2); // 2 bytes for each char
var bufView = new Uint16Array(buf);
for (var i=0, strLen=str.length; i < strLen; i++) {
bufView[i] = str.charCodeAt(i);
}
return bufView;
}
In my javascript I have a base64 encoded pkcs12 object, which I want to provide as download link. The Pkcs12 (pfx) file to be downloaded is binary data.
So I decoded the object and tried to create an objectUrl from it:
var bin = atob(pkcs12);
var blob = new Blob([bin],
{ type : 'application/x-pkcs12' });
$scope.pkcs12Blob = (window.URL || window.webkitURL).createObjectURL( blob );
The problem is, that the downloaded file is bigger than the original binary data and is not recognized as pkcs12. It looks like as if some utf-8/unicode stuff was introduced into the file.
If I provide the original base64 encoded data to the createObjectURL and download the base64 encoded file, I can decode the downloaded file and get a valid p12 file.
So I am wondering: How does createObjectURL work for binary data?
For some reason createObjectURL does not accept a binary string but requires a byte array. This code worked like a charm:
var bytechars = atob($scope.enrolledToken.pkcs12);
var byteNumbers = new Array(bytechars.length);
for (var i = 0; i < bytechars.length; i++) {
byteNumbers[i] = bytechars.charCodeAt(i);
}
var byteArray = new Uint8Array(byteNumbers);
var blob = new Blob([byteArray], {type: 'application/x-pkcs12'});
$scope.pkcs12Blob = (window.URL || window.webkitURL).createObjectURL( blob );
I have some string need to be a UTF-16 text file. For example:
var s = "aosjdfkzlzkdoaslckjznx";
var file = "data:text/plain;base64," + btoa(s);
This will result a UTF-8 encoding text file. How can I get a UTF-16 text file with string s?
Related: Javascript to csv export encoding issue
This should do it:
document.getElementById('download').addEventListener('click', function(){
downloadUtf16('Hello, World', 'myFile.csv')
});
function downloadUtf16(str, filename) {
// ref: https://stackoverflow.com/q/6226189
var charCode, byteArray = [];
// BE BOM
byteArray.push(254, 255);
// LE BOM
// byteArray.push(255, 254);
for (var i = 0; i < str.length; ++i) {
charCode = str.charCodeAt(i);
// BE Bytes
byteArray.push((charCode & 0xFF00) >>> 8);
byteArray.push(charCode & 0xFF);
// LE Bytes
// byteArray.push(charCode & 0xff);
// byteArray.push(charCode / 256 >>> 0);
}
var blob = new Blob([new Uint8Array(byteArray)], {type:'text/plain;charset=UTF-16BE;'});
var blobUrl = URL.createObjectURL(blob);
// ref: https://stackoverflow.com/a/18197511
var link = document.createElement('a');
link.href = blobUrl;
link.download = filename;
if (document.createEvent) {
var event = document.createEvent('MouseEvents');
event.initEvent('click', true, true);
link.dispatchEvent(event);
} else {
link.click();
}
}
<button id="download">Download</button>
You can use a legacy polyfill of the native TextEncoder API to transform a JavaScript string into an ArrayBuffer. As you'll see in that documentation, UTF16 with either endianness is was supported. Libraries that provide UTF-16 support in a Text-Encoder-compatible way will probably appear soon, if they haven't already. Let's assume that one such library exposes a constructor called ExtendedTextEncoder.
Then you can easily create a Blob URL to allow users to download the file, without the inefficient base-64 conversion.
Something like this:
s = "aosjdfkzlzkdoaslckjznx"
var encoder = new ExtendedTextEncoder("utf-16be")
var blob = new Blob(encoder.encode(s), "text/plain")
var url = URL.createObjectURL(blob)
Now you can use url instead of your data: URL.
I have a bunch of hex values and I have to convert it into binary data before write them into a file.
I trasformed the hex string in an array of integers, then I convert each integer to a char:
// bytes contains the integers
str = String.fromCharCode.apply(String, bytes);
now I create the blob file and download it:
var blob = new Blob([str], {type: "application/octet-stream"});
saveAs(blob, "file.bin");
but something goes wrong: if I print the length of bytes and the length of str I have the same value (512), but the file contains 684 chars, and of course it isn't how I expect it.
So I have:
512 pairs of hex values ->
512 integers ->
512 chars ->
I save the file ->
684 chars inside the file.
What am I doing wrong? I even tried to add the charset to the blob file, ie:
var blob = new Blob([str], {type: "application/octet-stream;charset=UTF-8,"});
but with no success.
EDIT:
Original HEX:
Saved file:
Thanks to Andrey I found the solution:
I have to write in binary mode, so:
var ab = new ArrayBuffer(bytes.length); //bytes is the array with the integer
var ia = new Uint8Array(ab);
for (var i = 0; i < bytes.length; i++) {
ia[i] = bytes[i];
}
var blob = new Blob([ia], {type: "application/octet-stream"});
saveAs(blob, id + "_<?php echo $report['md5']; ?>.bin");
My web app receives data in the form of a base64 encoded string, which is decodes using atob, and stores via URL.createObjectURL(). This data is then downloaded via the right-click save-as dialog. The downloaded filed always matches the source file when the source file is ascii encoded. However this isn't the case when the source file is just plain binary data. A diff of a non ascii encoded downloaded file vs its source file appears to show that the downloaded file is UTF-8 encoded. How can this problem be fixed? Please note, I'm locked into using firefox 10.
Convert the string to a Arraybuffer and it should work. If there is any way that you can get the data into an array buffer directly without passing a sting that would be the best solution.
The following code is tested in FF10, and are using the now obsolete MozBlobBuilder.
fiddle
var str="",
idx, len,
buf, view, blobbuild, blob, url,
elem;
// create a test string
for (var idx = 0; idx < 256; ++idx) {
str += String.fromCharCode(idx);
}
// create a buffer
buf = new ArrayBuffer(str.length);
view = new Uint8Array(buf);
// convert string to buffer
for (idx = 0, len = str.length; idx < len; ++idx) {
view[idx] = str.charCodeAt(idx);
}
blobbuild = new MozBlobBuilder();
blobbuild.append(buf);
blob = blobbuild.getBlob('application/octet-stream');
url = URL.createObjectURL(blob);
elem = document.createElement('a');
elem.href = url;
elem.textContent = 'Test';
document.body.appendChild(elem);