FileReader return undefined [duplicate] - javascript

This question already has answers here:
How do I return the response from an asynchronous call?
(41 answers)
Closed 4 years ago.
I'm trying to use FileReader() in my website but it's returning undefined in the array. This is the first time I'm using the FileReader to send encoded data from my input file. I'm using the API of Recruiterbox and as you can see in the print the encoded_data is undefined. I'm stucked here and need some help.
applyOpening: function applyOpening() {
let fields = [];
let formControl = document.querySelectorAll('.form-control');
for (var i = 0; i < formControl.length; i++) {
let field = formControl[i];
let obj = {
key: field.getAttribute('name')
}
if (field.tagName.toLowerCase() === 'input' && field.getAttribute('type') === 'file' ) {
obj.value = {
'encoded_data': getBase64(field.files[0]),
'file_name': field.value
}
} else {
obj.value = field.value
}
fields.push(obj);
}
function getBase64(file) {
let reader = new FileReader();
if (file) {
return reader.readAsDataURL(file);
} else {
return false;
}
reader.onloadend = function() {
return reader.result;
};
}
$.ajax({
url: 'https://jsapi.recruiterbox.com/v1/openings/' + id + '/apply?client_name=clientname',
data: JSON.stringify({ fields: fields }),
dataType: 'json',
contentType: 'application/json',
type: 'POST',
success: function(response) {
console.log(JSON.stringify(data));
},
error: function(er) {
console.error(er);
}
});
}
Update:
applyOpening: function applyOpening() {
function getBase64(file) {
return new Promise(function(resolve) {
var reader = new FileReader();
reader.onloadend = function() {
resolve(reader);
}
reader.readAsDataURL(file);
})
}
let fields = [];
let formControl = document.querySelectorAll('.form-control');
for (var i = 0; i < formControl.length; i++) {
let field = formControl[i];
let obj = {
key: field.getAttribute('name')
}
if (field.tagName.toLowerCase() === 'input' && field.getAttribute('type') === 'file' ) {
if (field.files[0]) {
getBase64(field.files[0]).then(function(reader) {
obj.value = {
'encoded_data': reader.result,
'file_name': field.value.replace("C:\\fakepath\\", "")
};
});
}
} else {
obj.value = field.value;
}
if (obj.key !== null) {
fields.push(obj);
}
console.log(obj);
}
app.postApplyOpening(fields);
Console log:
Request Payload:
Update 2:
applyOpening: async function applyOpening() {
function getBase64(file) {
return new Promise(function(resolve) {
var reader = new FileReader();
reader.onloadend = function() {
resolve(reader);
}
reader.readAsDataURL(file);
})
}
let fields = [];
let formControl = document.querySelectorAll('.form-control');
for (var i = 0; i < formControl.length; i++) {
let field = formControl[i];
let obj = {
key: field.getAttribute('name')
}
if (field.tagName.toLowerCase() === 'input' && field.getAttribute('type') === 'file' ) {
if (field.files[0]) {
let reader = await getBase64(field.files[0]);
obj.value = {
'encoded_data': reader.result.replace(new RegExp("^data:[A-Z]+/[A-Z]+;base64,", "gi"), ''),
'file_name': field.value.replace("C:\\fakepath\\", "")
};
}
} else {
obj.value = field.value;
}
if (obj.key !== null) {
fields.push(obj);
}
console.log(obj);
}
app.postApplyOpening(fields);
},

FileReader returns results asynchronously. Use a promise to get the result asynchronously. You cannot return the result of readAsDataURL, that is undefined, which is what you are seeing.
See here: Javascript base64 encoding function returns undefined

Related

JSON.stringify not working with nested array of objects

I know this question has been asked but none of the solutions are working for me and I can't figure out what's wrong. I have an object with a nested array of objects I am stringifying but I get a blank array of the nested array when I use JSON.stringify on it.
This is a simplified version of the way I'm constructing the object. The main difference is that there is a for loop iterating through all the rows, here I am just manually creating 2 rows
// JAVASCRIPT
let obj = {};
obj['type'] = 'Setting';
obj['id'] = 1;
obj['import'] = parseCSV();
function parseCSV() {
let jsonData = [];
let row1 = {};
let row2 = {};
row1['date'] = '2022-01-01';
row1['amount'] = '30';
row2['date'] = '2022-01-02';
row2['amount'] = '50';
jsonData.push(row1);
jsonData.push(row2);
return jsonData;
}
console.log('RAW DATA', obj);
console.log('STRINGIFIED', JSON.stringify(obj));
The above outputs the correct stringified JSON
But the full version of my code gives me a blank array for import.
Both objects look identical to me. The culprit is somewhere in my parseCSV function, because when I use the simplified version above I get the correct stringified data, but I can't pinpoint where I'm wrong. Below is my full function.
function parseCSV(file) {
let filename = file.name;
let extension = filename.substring(filename.lastIndexOf('.')).toUpperCase();
if(extension == '.CSV') {
try {
let reader = new FileReader();
let jsonData = [];
let headers = [];
reader.readAsBinaryString(file);
reader.onload = function(e) {
let rows = e.target.result.split('\n');
for(let i = 0; i < rows.length; i++) {
let cells = rows[i].split(',');
let rowData = {};
for(let j = 0; j < cells.length; j++) {
if(i == 0) headers.push(cells[j].trim());
else {
if(headers[j]) rowData[headers[j]] = cells[j].trim();
}
}
if(i != 0 && rowData['date'] != '') jsonData.push(rowData);
}
}
return jsonData;
} catch(err) {
console.error('!! ERROR READING CSV FILE', err);
}
} else alert('PLEASE UPLOAD A VALID CSV FILE');*/
}
Thanks for the help!
EDIT
When I add await before parseCSV as #BJRINT's answer suggests I get a syntax error await is only valid in async function
async function submitForm(event) {
event.preventDefault();
let newItem = await gatherFormData(event.target);
return fetch('server.php', {
method: "POST",
headers: {
'Content-Type': 'application/json',
'Accept': 'application/json'
},
body: JSON.stringify(newItem)
})
.then(checkError)
.then(data => parseData(data))
.catch(err => console.error('>> ERROR READING JSON DATA', err));
}
function gatherFormData(target) {
const inputs = target.querySelectorAll('input');
let obj = {};
inputs.forEach(function(input) {
if(intKeys.indexOf(input.name) >= 0) obj[input.name] = parseInt(input.value);
else if(curKeys.indexOf(input.name) >= 0) obj[input.name] = parseInt(parseFloat(input.value) * 100);
else if(chkKeys.indexOf(input.name) >= 0) input.checked ? obj[input.name] = 1 : obj[input.name] = 0;
else if(fileKeys.indexOf(input.name) >= 0 && input.files.length > 0) obj[input.name] = parseCSV(input.files[0]);
else obj[input.name] = input.value;
});
return obj;
}
The problem does not come from the stringify function. Since you are filling your array asynchronously (when the reader callback is executed) but returning your data first, it is empty.
You could wrap your function with a Promise that resolves when the reader callback function is finally executed, like so:
function parseCSV(file) {
return new Promise((resolve, reject) => {
let filename = file.name;
let extension = filename.substring(filename.lastIndexOf('.')).toUpperCase();
if(extension !== '.CSV')
return reject('PLEASE UPLOAD A VALID CSV FILE')
try {
let reader = new FileReader();
let jsonData = [];
let headers = [];
reader.readAsBinaryString(file);
reader.onload = function(e) {
let rows = e.target.result.split('\n');
for(let i = 0; i < rows.length; i++) {
let cells = rows[i].split(',');
let rowData = {};
for(let j = 0; j < cells.length; j++) {
if(i == 0) headers.push(cells[j].trim());
else {
if(headers[j]) rowData[headers[j]] = cells[j].trim();
}
}
if(i != 0 && rowData['date'] != '') jsonData.push(rowData);
}
return resolve(jsonData);
}
} catch(err) {
return reject('!! ERROR READING CSV FILE', err);
}
})
}
// calling the function
const data = await parseCSV(file)
The solution that worked for my specific case was to use a combination of BJRINT's answer and a timer to keep checking if the data had finished loading which I found here.
async function parseCSV(file) {
return await new Promise((resolve, reject) => {
let extension = file.name.substring(file.name.lastIndexOf('.')).toUpperCase();
if(extension !== '.CSV') reject('PLEASE UPLOAD A VALID CSV FILE');
try {
let reader = new FileReader();
reader.readAsText(file);
reader.onload = function(e) {
let jsonData = [];
let headers = [];
let rows = e.target.result.split(/\r\n|\r|\n/);
for(let i = 0; i < rows.length; i++) {
let cells = rows[i].split(',');
let rowData = {};
for(let j = 0; j < cells.length; j++) {
if(i == 0) headers.push(cells[j].trim());
else {
if(headers[j]) rowData[headers[j]] = cells[j].trim();
}
}
if(i != 0 && rowData['date'] != '') jsonData.push(rowData);
}
resolve(jsonData);
}
} catch(err) {
reject(err);
}
});
}
function submitForm(event) {
event.preventDefault();
showForm(false);
loading.classList.remove('hidden');
let ready = true;
const inputs = event.target.querySelectorAll('input');
let newItem = {};
let check = function() {
if(ready === true) {
console.log(newItem);
console.log(JSON.stringify(newItem));
return fetch('server.php', {
method: "POST",
headers: {
'Content-Type': 'application/json',
'Accept': 'application/json'
},
body: JSON.stringify(newItem)
})
.then(checkError)
.then(data => parseData(data))
.catch(err => console.error('>> ERROR READING JSON DATA', err));
}
setTimeout(check, 1000);
}
inputs.forEach(function(input) {
if(intKeys.indexOf(input.name) >= 0) newItem[input.name] = parseInt(input.value);
else if(curKeys.indexOf(input.name) >= 0) newItem[input.name] = parseInt(parseFloat(input.value) * 100);
else if(chkKeys.indexOf(input.name) >= 0) input.checked ? newItem[input.name] = 1 : newItem[input.name] = 0;
else if(fileKeys.indexOf(input.name) >= 0 && input.files.length > 0) {
ready = false;
parseCSV(input.files[0]).then(data => {
ready = true;
newItem[input.name] = data;
});
}
else newItem[input.name] = input.value;
});
check();
}

Issue in uploading multiple files and only 1 file to firebase storage

I can successfully upload multiple files to firebase storage the issue is when I try to upload only 1 file nothing happened.
When I have multiple files they are added to an array then I can upload it to firebase database but how to do it with only 1 file.
//choose files
document.querySelector('#pictures').addEventListener('change', (e) => {
const formData = extractFormData('#statusForm');
while (fileCollection.length) {
fileCollection.pop();
console.log("test");
}
[].slice.call(formData.pictures).map(f => fileCollection.push(f));
console.log(formData.pictures);
});
//choose files
document.forms.statusForm.addEventListener('submit', (e) => {
e.preventDefault();
e.stopPropagation();
const formData = extractFormData('#statusForm');
const text = formData.status;
formData.status = '';
sendData(text, fileCollection)
setTimeout(() => {
while (fileCollection.length) {
fileCollection.pop();
}
}, 100);
});
var extractFormData = function (form) {
const formData = new FormData(document.querySelector(form));
values = {};
for(var pair of formData.entries()) {
if( values[pair[0]] ) {
if(!(values[pair[0]] instanceof Array)) {
values[pair[0]] = new Array(values[pair[0]]);
}
values[pair[0]].push(pair[1]);
} else {
values[pair[0]] = pair[1];
}
}
console.log("values: " + values);
return values;
}
This is how it was fixed i changed the extractFormData function to
const extractFormData = function (form) {
const formData = new FormData(document.querySelector(form));
const values = {};
for (let [key, value] of formData.entries()) {
if (values[key]) {
if ( ! (values[key] instanceof Array) ) {
values[key] = new Array(values[key]);
}
values[key].push(value);
} else {
values[key] = value;
if(typeof values[key] === 'undefined') {
// does not exist
console.log("doesnt exist")
}
else {
// does exist
values[key] = new Array(values[key]);
}
}
}
return values;
}

How can Actionhero receive the parameter without inputs defined?

I'm using ActionHero in node.js and Angular.js.
I am trying images send to ActionHero using $http method.
but I don't know How many images are made.
so I can't define the parameter names on action in ActionHero.
below is my source.
First. images are in object, so I change object to each parameter.
insert: function (param, next) {
var url = settings.apiUrl + "/api/online/productAdd";
var vdata = {
img_objects :param.img_objects
};
angular.forEach(param.img_objects, function (v, k) {
vdata['img_file'+(k)] = v.files;
});
commonSVC.sendUrlFile("POST", url, vdata, function (state, data) {
next(state, data);
});
}
Second. make formData in sendUrlFile like source below. and then send to actionHero.
var promise = $http({
method: method,
url: url,
headers: {
'Content-Type': undefined
},
data: params,
transformRequest: function (data) {
var formData = new FormData();
angular.forEach(data, function (value, key) {
if(angular.isObject(value)){
if(value.lastModified > 0 && value.size > 0){
formData.append(key, value);
}else{
formData.append(key, JSON.stringify(value));
}
}else{
formData.append(key, value);
}
});
return formData;
}
});
Third. ActionHero is received. but parameter isn't defined so ActionHero can't receive.
exports.productAdd = {
name: 'online/productAdd',
inputs: {
I don't know How Many Images are made? 1~10? or 1~100?
},
authenticate: true,
outputExample: {
'result':'success'
}
So I have two Questions:
How can actionhero receive the parameter without inputs defined?
Can I object with Image Data send to ActionHero by Ajax?
Thank You.
I change reduceParams function in actionProcessor.js.
api.actionProcessor.prototype.reduceParams = function(){
var self = this;
var inputNames = [];
if(self.actionTemplate.inputs){
inputNames = Object.keys(self.actionTemplate.inputs);
}
// inputs * 확인 2017-01-20 Eddy
var multi = [];
var strArray;
for(var v in inputNames){
if(inputNames[v].indexOf("*") != -1){
strArray = inputNames[v].split('*');
multi.push(strArray[0]);
}
}
var multiLength = multi.length;
var flag;
if(api.config.general.disableParamScrubbing !== true){
for(var p in self.params){
flag = true;
if(multiLength > 0){
for(var i=0; i<multiLength; i++){
if(p.indexOf(multi[i]) != -1){
flag = false;
}
}
}
if(flag){
if(api.params.globalSafeParams.indexOf(p) < 0 && inputNames.indexOf(p) < 0){
delete self.params[p];
}
}
}
}
};
i can define on inputs like below.
'img_*' : {required: false}
and Then I make middleware
var actionHeroMiddleware = {
name: '-',
global: true,
priority: 1000,
preProcessor: function(data, next) {
api.actionProcessor.prototype.reduceParams = function(){
var self = this;
var inputNames = [];
if(self.actionTemplate.inputs){
inputNames = Object.keys(self.actionTemplate.inputs);
}
// inputs * 확인 2017-01-20 Eddy
var multi = [];
var strArray;
for(var v in inputNames){
if(inputNames[v].indexOf("*") != -1){
strArray = inputNames[v].split('*');
multi.push(strArray[0]);
}
}
var multiLength = multi.length;
var flag;
if(api.config.general.disableParamScrubbing !== true){
for(var p in self.params){
flag = true;
if(multiLength > 0){
for(var i=0; i<multiLength; i++){
if(p.indexOf(multi[i]) != -1){
flag = false;
}
}
}
if(flag){
if(api.params.globalSafeParams.indexOf(p) < 0 && inputNames.indexOf(p) < 0){
delete self.params[p];
}
}
}
}
};
next();
},
stop: function(api, next) {
next();
}
};
api.actions.addMiddleware(actionHeroMiddleware);
next();

Determining the last file chunk

I'm trying to setup a file upload through rest for large files. The function below is taking care of chunking but I need to be able to recognize the last chunk because my rest call changes to /finishUpload() in order to commit the save.
Right now I'm only able to figure out when the blob is empty but I can't figure out how to determine the last iteration before the blob is empty.
This is the script I'm using below to parse my files.
export default function parseFile(file, options) {
var opts = typeof options === 'undefined' ? {} : options;
var fileSize = file.size;
var chunkSize = typeof opts['chunk_size'] === 'undefined' ? 64 * 1024 : parseInt(opts['chunk_size']);
var binary = typeof opts['binary'] === 'undefined' ? false : opts['binary'] == true;
var offset = 0;
var self = this; // we need a reference to the current object
var readBlock = null;
var chunkReadCallback = typeof opts['chunk_read_callback'] === 'function' ? opts['chunk_read_callback'] : function() {};
var chunkErrorCallback = typeof opts['error_callback'] === 'function' ? opts['error_callback'] : function() {};
var success = typeof opts['success'] === 'function' ? opts['success'] : function() {};
var onLoadHandler = function(evt) {
if (evt.target.result == "") {
console.log('Chunk empty, call finish');
success(file);
return;
}
if (evt.target.error == null) {
chunkReadCallback(evt.target.result, offset).then(function() {
offset += evt.target.result.length;
readBlock(offset, chunkSize, file);
});
} else {
chunkErrorCallback(evt.target.error);
return;
}
if (offset >= fileSize) {
success(file);
return;
}
}
readBlock = function(_offset, _chunkSize, _file) {
var r = new FileReader();
var blob = _file.slice(_offset, _chunkSize + _offset);
console.log("blob size:", blob.size, "offset:", _offset, "C+S:",_chunkSize + _offset)
r.onload = onLoadHandler;
if (binary) {
r.readAsArrayBuffer(blob);
} else {
r.readAsText(blob);
}
}
readBlock(offset, chunkSize, file);
}
Codepen
Why not rely on the filesize, i.e. check the condition _chunkSize + _offset >= fileSize?
You can use progress, loadend events to process File object one byte at a time; define a variable where processing should be paused or stopped at nth byte, every nth byte, or any byte during processing of file.
var str = "abcdefghijklmnopqrstuvwxyz";
var type = "application/octet-stream";
var data = new Blob([], {
type: type
});
var filename = "file.txt";
var reader = new FileReader();
var complete = false;
var beforeEnd = false;
var stopAt = str.length - 1;
function handleFile(e) {
data = new File([data.slice()
, str.slice(data.size, data.size + 1)]
, filename, {
type: type,
lastModifiedDate: new Date()
});
}
function handleRead(e) {
if (data.size <= str.length && !complete) {
if (data.size === stopAt && !beforeEnd) {
beforeEnd = true;
var r = new FileReader();
r.onloadend = function() {
alert(`stopAt: ${stopAt}\n`
+`data.size: ${data.size}\n`
+`result at stopAt: ${r.result[stopAt -1]}`);
reader.readAsArrayBuffer(data);
}
r.readAsText(data);
} else {
reader.readAsArrayBuffer(data)
}
} else {
console.log("complete")
}
}
function handleProgress(e) {
if (data.size <= str.length && !complete) {
var read = new FileReader();
read.onload = function() {
if (!complete) {
console.log(read.result);
if (read.result.length === str.length) {
complete = true;
console.log(data);
}
}
}
read.readAsText(data);
}
}
reader.addEventListener("load", handleFile);
reader.addEventListener("loadend", handleRead);
reader.addEventListener("progress", handleProgress);
reader.readAsArrayBuffer(data);

Javascript Array indexFirst

I build a prototype that handle pages, I successfully add (push), but can get the data, I failed:
var foundImageIndex = Pages.indexFirst(function (item) { if (item.PageID == PageID) return true; });
Here the javascript page handler:
var Pages = new Array();
PageContainer = function () //constructor for the proxy
{
// this._baseURL = url;
};
PageContainer.prototype =
{
AddPage: function (data) {
if (data == null) return;
Pages.push({ PageID: data.PageID, SegmentID: data.SegmentID });
},
GetPage: function (PageID) {
alert('getPage('+PageID+')=' + JSON.stringify(Pages));
var foundImageIndex = Pages.indexFirst(function (item) { if (item.PageID == PageID) return true; });
var dt = { PageID: Pages[foundImageIndex].PageID, SegmentID: Pages[foundImageIndex].SegmentID };
return dt;
}
};
I call from other js as following:
var gPageContainer = new PageContainer();
for (var i = 0; i < SegStruct.SegmentsCount; i++) {
var segRClass = //get from webservice
gPageContainer.AddPage({ PageID: i, SegmentID: segRClass.SegmentID });
}
I trying to call: gPageContainer.GetPage(1); but it failed in GetPage: function (PageID) it returns -1 in:
var foundImageIndex = Pages.indexFirst(function (item) { if (item.PageID == PageID) return true; });
foundImageIndex always -1
why?
Simply add the following before the constructor:
if (typeof Array.prototype.indexFirst == 'undefined') {
Array.prototype.indexFirst = function (validator) {
for (var i = 0; i <= this.length - 1; i++) {
if (validator(this[i])) {
return i;
}
}
return -1;
};
}

Categories