I am trying to upload an image file from Apex File browser, as image files are huge so I am using a Chunked upload technique to upload an image into the database. Everything is working fine as the chunked file is being used in Apex Collections to pass in ajax call back process to update in the database. Here is my code in javascript to Upload image as chunked
var fileInputElem = document.getElementById('P130_FILE_UPLOAD');
var fileIndex = 0;
// builds a js array from long string
function clob2Array(clob, size, array) {
loopCount = Math.floor(clob.length / size) + 1;
for (var i = 0; i < loopCount; i++) {
array.push(clob.slice(size * i, size * (i + 1)));
}
return array;
}
// converts binaryArray to base64 string
function binaryArray2base64(int8Array) {
var data = "";
var bytes = new Uint8Array(int8Array);
var length = bytes.byteLength;
for (var i = 0; i < length; i++) {
data += String.fromCharCode(bytes[i]);
}
return btoa(data);
}
// a recursive function that calls itself to upload multiple files synchronously
function uploadFile(pFileIndex) {
var file = fileInputElem.files[pFileIndex];
var reader = new FileReader();
reader.onload = (function(pFile) {
return function(e) {
if (pFile) {
var base64 = binaryArray2base64(e.target.result);
var f01Array = [];
f01Array = clob2Array(base64, 30000, f01Array);
apex.server.process(
'UPLOAD_FILE',
{
x01: file.name,
x02: file.type,
f01: f01Array
},
{
dataType: 'json',
success: function(data) {
if (data.result == 'success') {
fileIndex++;
if (fileIndex < fileInputElem.files.length) {
// start uploading the next file
uploadFile(fileIndex);
} else {
// all files have been uploaded at this point
spinner.stop();
fileInputElem.value = '';
$('#uploadedFilesRpt').trigger('apexrefresh');
}
} else {
alert('Oops! Something went terribly wrong. Please try again or contact your application administrator.');
}
}
}
);
}
}
})(file);
reader.readAsArrayBuffer(file);
}
// variables for spin.js
var spinner;
var spinTargetElem = document.getElementById('wwvFlowForm');
var spinOptions = {
lines: 13
, length: 28
, width: 14
, radius: 42
, scale: 1
, corners: 1
, color: '#000'
, opacity: 0.25
, rotate: 0
, direction: 1
, speed: 1
, trail: 60
, fps: 20
, zIndex: 2e9
, className: 'spinner'
, top: '50%'
, left: '50%'
, shadow: false
, hwaccel: false
, position: 'absolute'
}
Ajax Call back to Process at the Server level
declare
lco_collection_name constant apex_collections.collection_name%type := 'UPLOADED_FILES';
l_blob blob;
l_filename varchar2(200);
l_mime_type varchar2(200);
l_token varchar2(32000);
begin
l_filename := apex_application.g_x01;
l_mime_type := nvl(apex_application.g_x02, 'application/octet-stream');
-- build BLOB from f01 30k array (base64 encoded)
dbms_lob.createtemporary(l_blob, false, dbms_lob.session);
for i in 1 .. apex_application.g_f01.count loop
l_token := wwv_flow.g_f01(i);
if length(l_token) > 0 then
dbms_lob.append(
dest_lob => l_blob,
src_lob => to_blob(utl_encode.base64_decode(utl_raw.cast_to_raw(l_token)))
);
end if;
end loop;
-- add collection member (only if BLOB is not null)
if dbms_lob.getlength(l_blob) is not null then
apex_collection.add_member(
p_collection_name => lco_collection_name,
p_c001 => l_filename,
p_c002 => l_mime_type,
p_blob001 => l_blob
);
end if;
apex_json.open_object;
apex_json.write(
p_name => 'result',
p_value => 'success'
);
apex_json.close_object;
exception
when others then
apex_json.open_object;
apex_json.write(
p_name => 'result',
p_value => 'fail'
);
apex_json.close_object;
end;
The only thing I want to resize the uploaded image like if the size is more than 800x600 then it should be resized to 800x600 at client-side, not on the server-side. Please suggest some best way how I can resize an image in my javascript code.
Related
I stored some jpeg files (exactly 350, same files same size. Total: 336.14 MB) as Blob in IndexedDB. It took around 1 second to complete the transaction. Then I read all the data from IndexedDB to an array and again sored to IndexedDB. But this time it takes around 15 Seconds. I observed this as a consistent behavior. Anything wrong here? I used performance.now() to get the time difference
Files: 350,
Size of each: 937 KB,
Browser: Chrome and Chromium Edge
//Open
var dbOpen = indexedDB.open(INDEXED_DB_NAME, INDEXED_DB_VERSION);
dbOpen.onupgradeneeded = function (e) {
console.log("onupgradeneeded");
var store = e.currentTarget.result.createObjectStore(
IMAGE_DATA_STORE, { autoIncrement: true });
};
dbOpen.onsuccess = function (e) {
image_data_db = dbOpen.result;
console.log("indexed DB opened");
};
//Initial Write
var inputFiles = document.getElementById('inputFiles');
for (var i = 0; i < inputFiles.files.length; i++) {
let file = inputFiles.files[i];
var b = new Blob([file], { type: file.type });
fileblobs.push(b);
}
StoreIdb(fileblobs); // < First write
//StoreIdb()
t0 = performace.now();
var trx = image_data_db.transaction(IMAGE_DATA_STORE, 'readwrite');
var imagestore = trx.objectStore(IMAGE_DATA_STORE);
for (i = 0; i < fileblobs.length; i++) {
request = imagestore.add(fileblobs[i]);
request.onsuccess = function (e) {
console.log('added');
};
request.onerror = function (e) {
console.error("Request Error", this.error);
};
}
trx.onabort = function (e) {
console.error("Exception:", this.error, this.error.name);
};
trx.oncomplete = function (e) {
console.log('completed');
t1 = performance.now();
timetaken = t1 - t0;
}
//Read
var objectStore = image_data_db.transaction(IMAGE_DATA_STORE).objectStore(IMAGE_DATA_STORE);
objectStore.openCursor().onsuccess = function (e) {
var cursor = e.target.result;
if (cursor) {
blobArray.push(cursor.value.blob);
cursor.continue();
}
else
{
// completed
}
}
// blobArray will be used for second time << Second Write
I figured it out. First time it was storing file instance blob.
I ve changed file instance blob to Array buffer just to want to ensure data type similar in both cases. Now it is taking same time.
for (var i = 0; i < inputFiles.files.length; i++) {
let file = inputFiles.files[i];
file.arrayBuffer().then((arrayBuffer) => {
let blob = new Blob([new Uint8Array(arrayBuffer)], {type: file.type });
blobs.push(blob);
if ( blobs.length == inputFiles.files.length){
callback(blobs);
}
});
}
I'm trying to send multiple files to dropbox using JavaScript and Ruby/Rails. First I have to decode an image with qr code and send it to my server in Ruby, then send it to dropbox. The problem is, when I'm trying to decode a file and send it, it is not going in the right order. I'm aware that this is probably because of the onload function of the reader. But how can I solve this and why this is the way it is?
PS: My qr code have to be a number (a code from the db), that's why I'm using 'isnum'
ps2: I tried 10 files and in my console.log that has 'j' have printed in this orded: 1, 7, 7, 4, 6, 6, 6, 6, 3, and 0. How can I make it to not repeat these indexes?
function setupReader(file, j){
var reader = new FileReader();
reader.onload = function (e) {
qrcode.decode(e.target.result);
qrcode.callback = function(a){
console.log("callback", j);
var isnum = /^\d+$/.test(a);
var form_data = new FormData();
count_send++;
if (isnum){
form_data.append("code", a);
form_data.append("file", file);
// console.log(form_data);
var request = new XMLHttpRequest();
request.open('POST', '/docs/baixaponto', /* async = */ false);
request.setRequestHeader('X-CSRF-Token', $('meta[name="csrf-token"]').attr('content'));
request.send(form_data);
if (returnedStatus != 200){
error_files_dropbox_baixa_filesimgs.push(e.target.result);
e.target.result = "";
error_files_dropbox_baixa_arquivos.push( returnedStringFinal );
error_files_dropbox_baixa.push( file );
error_files_dropbox_baixacount++;
}
else{
success_files_filesimgs.push(e.target.result);
e.target.result = "";
success_files_arquivos.push( returnedStringFinal );
success_files.push( file );
success_filescount++;
}
}
else {
error_files_decode_filesimgs.push(imgcoded64);
e.target.result = "";
error_files_decode.push( file );
error_files_decodecount++;
}
if ( count_send == count ){
//function that opens a modal with file results
setTimeout(showdetalhedfiles, 2000);
}
};
}
reader.readAsDataURL(file);
}
The other function is:
function readqrcode(){
f = document.getElementById('myfiles');
count = f.files.length;
if (f.files && f.files[0]) {
for (var i = 0; i < count; i++) {
setupReader(f.files[i], i);
}
}
else{
alert("Nenhum arquivo selecionado");
}
}
Hi Im trying to upload a 2 file or more, my problem is my progress bar will say 100% because of the small file being uploaded first, then its going back to the percent of the large file.. My question is how can I have a same progress if i have many files being uploaded?
$('body').on('change', 'input:file.gallery_images', function(event)
{
event.preventDefault();
var data = new FormData();
data.append('id', $("#id").val());
var count = $(this)[0].files.length;
$.each($(this)[0].files, function(i, file)
{
data.append('userfile', file);
$.ajax(
{
type: "POST",
url: href+path+"/imagens/store",
data: data,
mimeType: 'multipart/form-data',
contentType: false,
cache: false,
processData: false,
dataType: "json",
xhr: function()
{
var _xhr = $.ajaxSettings.xhr();
_xhr.addEventListener('progress', function (event) { }, false);
if (_xhr.upload)
{
_xhr.upload.onprogress = function(event)
{
var percent = 0;
if (event.lengthComputable)
{
var position = event.position || event.loaded;
var total = event.totalSize || event.total;
percent = Math.ceil(position / total * 100);
}
$("#progress-bar").width(percent + '%');
};
}
return _xhr;
},
beforeSend: function()
{
$("#progress").fadeIn('slow');
$("#progress-bar").width('0%');
},
success: function(data)
{
if(data.gallery)
{
if($(".alert").length > 0)
{
$(".alert").hide('slow').remove();
$("#droppable").show('slow');
}
$('.gallery').fadeTo('300', '0.5', function () {
$(this).html($(this).html() + data.gallery).fadeTo('300', '1');
});
}
$("#progress").fadeOut('slow');
}
});
});
});
Ok, first thing I noticed is that you're adding the file to the 'data' variable inside your $.each... but that means the first POST contains the first image, the second POST contains the first and the second, and so on. I think you should this part inside your $.each:
var data = new FormData();
data.append('id', $("#id").val());
Ok, so, to solve your problem: Before sending anything, go through them and sum their size. You'll also need to store the progress for each file individually, so start it as zero:
var sumTotal = 0;
var loaded = [];
for (var i = 0, list = $(this)[0].files; i < list.length; i++) {
sumTotal += list[i].size;
loaded[i] = 0;
}
Inside your onprogress, instead of comparing the event.position with the event.totalSize, you'll store this position on your 'loaded' array, sum all your array, and then compare it to your sumTotal.
loaded[i] = event.position || event.loaded;
var sumLoaded = 0;
for (var j = 0; j < loaded.length; j++) sumLoaded += loaded[j];
percent = Math.ceil(sumLoaded * 100/sumTotal);
;)
I am working on a function that will write data to a remote server in chunks using a 3rd party API. Through some help on Stack Overflow I was able to accomplish this, where it is now working as expected. The problem is that I can only get a single 16kb chunk to write as I will need to advance the pos of where the next bytes are written to.
The initial write starts at 0 easily enough. Due to my unfamiliarity with this though, I am unsure if the next pos should just be 16 or what. If it helps, the API call writeFileChunk() takes 3 parameters, filepath (str), pos (int64), and data (base64 encoded string).
reader.onload = function(evt)
{
// Get SERVER_ID from URL
var server_id = getUrlParameter('id');
$("#upload_status").text('Uploading File...');
$("#upload_progress").progressbar('value', 0);
var chunkSize = 16<<10;
var buffer = evt.target.result;
var fileSize = buffer.byteLength;
var segments = Math.ceil(fileSize / chunkSize); // How many segments do we need to divide into for upload
var count = 0;
// start the file upload
(function upload()
{
var segSize = Math.min(chunkSize, fileSize - count * chunkSize);
if (segSize > 0)
{
$("#upload_progress").progressbar('value', (count / segments));
var chunk = new Uint8Array(buffer, count++ * chunkSize, segSize); // get a chunk
var chunkEncoded = btoa(String.fromCharCode.apply(null, chunk));
// Send Chunk data to server
$.ajax({
type: "POST",
url: "filemanagerHandler.php",
data: { 'action': 'writeFileChunk', 'server_id': server_id, 'filepath': filepath, 'pos': 0, 'chunk': chunkEncoded },
dataType: 'json',
success: function(data)
{
console.log(data);
setTimeout(upload, 100);
},
error: function(XMLHttpRequest, textStatus, errorThrown)
{
alert("Status: " + textStatus); alert("Error: " + errorThrown); alert("Message: " + XMLHttpRequest.responseText);
}
});
}
else
{
$("#upload_status").text('Finished!');
$("#upload_progress").progressbar('value', 100);
getDirectoryListing(curDirectory);
}
})()
};
The current position for the file on client side would be represented by this line, or more specifically the second argument at the pre-incremental step:
var chunk = new Uint8Array(buffer, count++ * chunkSize, segSize);
though, in this case it advances (count++) before you can reuse it so if you need the actual position (below as pos) you can extract it by simply rewriting the line into:
var pos = count++ * chunkSize; // here chunkSize = 16kb
var chunk = new Uint8Array(buffer, pos, segSize);
Here each position update will increment 16kb as that is the chunk-size. For progress then it is calculated pos / fileSize * 100. This of course assuming using the unencoded buffer size.
The only special case is the last chunk, but when there are no more chunks left to read the position should be equal to the file length (fileSize) so it should be pretty straight-forward.
When the ajax call return the server should have the same position unless something went wrong (connection, write access change, disk full etc.).
You can use Filereader API to read the chunks and send it to your remote server.
HTML
<input type="file" id="files" name="file" /> Read bytes:
<span class="readBytesButtons">
<button>Read entire file in chuncks</button>
</span>
Javascript
// Post data to your server.
function postChunk(obj) {
var url = "https://your.remote.server";
return new Promise(function(resolve, reject) {
var xhr = new XMLHttpRequest();
xhr.open('post', url, true);
xhr.responseType = 'json';
xhr.onload = function() {
var status = xhr.status;
if (status == 200) {
resolve(xhr.response);
} else {
reject(status);
}
};
var params = "";
// check that obj has the proper keys and create the url parameters
if (obj.hasOwnProperty(action) && obj.hasOwnProperty(server_id) && obj.hasOwnProperty(filepath) && obj.hasOwnProperty(pos) && obj.hasOwnProperty(chunk)) {
params += "action="+obj[action]+"&server_id="+obj[server_id]+"&filepath="+obj[filepath]+"&pos="+obj[pos]+"&chunk="+obj[chunk];
}
if(params.length>0) {
xhr.send(params);
} else {
alert('Error');
}
});
}
// add chunk to "obj" object and post it to server
function addChunk(reader,obj,divID) {
reader.onloadend = function(evt) {
if (evt.target.readyState == FileReader.DONE) { // DONE == 2
obj.chunk = evt.target.result;
console.log(obj);
document.getElementById(divID).textContent +=
['Sending bytes: ', obj.pos*16000, ' - ', ((obj.pos*16000)+(obj.pos+1)*obj.chunk.length),
'\n'].join('');
// post data to server
postChunk(obj).then(function(data) {
if(data!=="" && data!==null && typeof data!=="undefined") {
// chunk was sent successfully
document.getElementById(divID).textContent +=
['Sent bytes: ', obj.pos*16000, ' - ', ((obj.pos*16000)+(obj.pos+1)*obj.chunk.length),'\n'].join('');
} else {
alert('Error! Empty response');
}
}, function(status) {
alert('Resolve Error');
});
}
};
}
// read and send Chunk
function readChunk() {
var files = document.getElementById('files').files;
if (!files.length) {
alert('Please select a file!');
return;
}
var file = files[0];
var size = parseInt(file.size);
var chunkSize = 16000;
var chunks = Math.ceil(size/chunkSize);
var start,stop = 0;
var blob = [];
for(i=0;i<chunks;i++) {
start = i*chunkSize;
stop = (i+1)*chunkSize-1;
if(i==(chunks-1)) {
stop = size;
}
var reader = new FileReader();
blob = file.slice(start, stop);
reader.readAsBinaryString(blob);
var obj = {action: 'writeFileChunk', server_id: 'sid', filepath: 'path', pos: i, chunk: ""};
var div = document.createElement('div');
div.id = "bytes"+i;
document.body.appendChild(div);
addChunk(reader,obj,div.id);
}
}
// Check for the various File API support.
if (window.File && window.FileReader && window.FileList && window.Blob) {
console.log(' Great success! All the File APIs are supported.');
} else {
alert('The File APIs are not fully supported in this browser.');
}
document.querySelector('.readBytesButtons').addEventListener('click', function(evt) {
if (evt.target.tagName.toLowerCase() == 'button') {
readChunk();
}
}, false);
You can check this example in this Fiddle
I would like to calculate an MD5 checksum of every image uploaded with Dropzone.js, this way the user can safely remove the correct image (I calculate the MD5 Checksum in php part).
I need to create the MD5 hash with another library (FastMD5 or another one), and then send it along with the data when remove button is clicked.
For now:
$Dropzone.autoDiscover = false;
// Dropzone class:
var myDropzone = new Dropzone("div#dropzonePreview", {
maxFiles:5,
url: "up",
acceptedFiles: ".png,.jpg,.gif,.jpeg",
maxFilesize: 6,
uploadMultiple: true,
addRemoveLinks: true,
removedfile: function(file) {
var name = file.name;
var idform = document.getElementById('idform').value; //for me
var hash= md5(file); // not tested
$.ajax({
type: 'POST',
url: 'del.php',
data:"filename="+name+"&idform="+idform+"&hash="+hash,
dataType: 'html'
});
var _ref;
return (_ref = file.previewElement) != null ? _ref.parentNode.removeChild(file.previewElement) : void 0;
}
});
The problem is that md5(file) is not working, I guess it isn't the data file, I tried to look for the data to calculate the hash but found nothing.
I'm sure there is a better way to do it, but I've made this and it's sending the right hash to my delete page (del.php), I've just realised that I will also need the hash to avoid the upload of the same file 2 times..
I've used SPARK-MD5.
Dropzone.autoDiscover = false;
// Dropzone class:
var myDropzone = new Dropzone("div#dropzonePreview", {
maxFiles:5,
url: "upload.php",
acceptedFiles: ".png,.jpg,.gif,.jpeg",
maxFilesize: 6,
uploadMultiple: true,
addRemoveLinks: true,
//to remove one file
removedfile: function(file) {
var name = file.name;
var idform = document.getElementById('idform').value; //for me
// START SPARKMD5
var blobSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice,
chunkSize = 2097152, // Read in chunks of 2MB
chunks = Math.ceil(file.size / chunkSize),
currentChunk = 0,
spark = new SparkMD5.ArrayBuffer(),
fileReader = new FileReader();
fileReader.onload = function (e) {
console.log('read chunk nr', currentChunk + 1, 'of', chunks);
spark.append(e.target.result); // Append array buffer
currentChunk++;
if (currentChunk < chunks) {
loadNext();
} else {
console.log('finished loading');
// START DROPZONE PART
$.ajax({
type: 'POST',
url: 'del.php',
data:"filename="+name+"&idform="+idform+"&hash="+spark.end(), //spark.end is the MD5
dataType: 'html'
});
var _ref;
return (_ref = file.previewElement) != null ? _ref.parentNode.removeChild(file.previewElement) : void 0;
// END DROPZONE PART
}
};
fileReader.onerror = function () {
console.warn('oops, something went wrong.');
};
function loadNext() {
var start = currentChunk * chunkSize,
end = ((start + chunkSize) >= file.size) ? file.size : start + chunkSize;
fileReader.readAsArrayBuffer(blobSlice.call(file, start, end));
}
loadNext();
// END SPARKMD5
}
});
I'm not sure about the necessity of fileReader.onerror and load next.
Anyway it's working when the need is to send the hash when the "remove" button is clicked, but i'm still looking for a better way to compare md5 before uploading.