I am using the xlsx to parse .xls files.
I am having an issue with the date format being different after parsing the file and would like to know why, and perhaps how to fix it?
Here is an example file that I have been working with, it is only 3 lines long.
here the code I have so far
import React, { Component } from 'react';
import { render } from 'react-dom';
import XLSX from "xlsx";
import './style.css';
class App extends Component {
constructor() {
super();
this.state = {
name: 'React'
};
}
xslToJson = workbook => {
var data = [];
var sheet_name_list = workbook.SheetNames;
return XLSX.utils.sheet_to_json(workbook.Sheets[sheet_name_list[0]], {raw: false});
};
handleFile = (file /*:File*/) => {
/* Boilerplate to set up FileReader */
const reader = new FileReader();
const rABS = !!reader.readAsBinaryString;
reader.onload = e => {
/* Parse data */
const bstr = e.target.result;
const wb = XLSX.read(bstr, { type: rABS ? "binary" : "array" });
/* Get first worksheet */
let arr = this.xslToJson(wb);
console.log(arr)
};
if (rABS) reader.readAsBinaryString(file);
else reader.readAsArrayBuffer(file);
};
handleChange = (e) => {
const files = e.target.files;
if (files && files[0]) {
this.handleFile(files[0]);
}
};
render() {
return (
<div>
<input
type="file"
onChange={this.handleChange}
className="inputfile"
id="embedpollfileinput"
/>
</div>
);
}
}
render(<App />, document.getElementById('root'));
in my xsl file, DD/MM/YYYY format is present.but when i parse xls file it shows me in MM/DD/YY
xslToJson = workbook => {
var data = [];
var sheet_name_list = workbook.SheetNames;
return XLSX.utils.sheet_to_json(workbook.Sheets[sheet_name_list[0]], {raw: false});
};
handleFile = (file /*:File*/) => {
/* Boilerplate to set up FileReader */
const reader = new FileReader();
const rABS = !!reader.readAsBinaryString;
reader.onload = e => {
/* Parse data */
const bstr = e.target.result;
const wb = XLSX.read(bstr, { type: rABS ? "binary" : "array" });
/* Get first worksheet */
let arr = this.xslToJson(wb);
console.log(arr)
};
if (rABS) reader.readAsBinaryString(file);
else reader.readAsArrayBuffer(file);
};
Related
the next function is getting file and setting it in state obj (arr: [readerEvent.target.result]).
works fine when uploading one file,
fine with 2 and 3.
when I am trying to upload more then 3 files - only 3 uploaded .
I can see that the full (5) list of files is coming into the func by using console.log.
input:
<Input
onChange={handleChange}
type="file"
// accept="image/png, image/jpeg"
multiple
/>
----------------------------------------
Component:
const list = Object.keys(e.target.files).map((elm) => e.target.files[elm]);
list.map((file, index) => {
loadFile(file, index, setImagesList);
});
---------------------------------------------------------------------------------------
Util:
export default function loadFile(file, index, setImagesList) {
// console.log("another file ", file);
let image = new Image();
var reader = new FileReader();
reader.onloadend = function (readerEvent) {
image.src = readerEvent.target.result;
image.onload = function () {
setImagesList((old) => [
...old,
{
key: `${Date.now()}-${file.name}-${index}`,
arr: [readerEvent.target.result],
imageOriginalWidth: image.width,
imageOriginalHeight: image.height,
},
]);
};
};
reader.onerror = function (event) {
console.error("File could not be read! Code " + event.target.error.code);
};
reader.readAsDataURL(file);
}
OK
found a solution so I will share it.
sending to the util function the entire list and handle it there.
in util func I will update a state that will be the optional loaded file .
only after a check I will set the "real" images list - that will happen out of the util - inside the component:
useEffect(()=>{
uploaded.map((obj, index) => {
if (isValid) {
setImagesList((old) => [...old, obj]);
}
},[uploaded])
-----------------------------------
util :
export default function loadFiles(files, setUploaded) {
const reader = new FileReader();
let arr = [];
function readFile(index) {
if (index >= files.length || index > 5) {
setUploaded(arr);
return;
}
const file = files[index];
reader.onload = function (e) {
let image = new Image();
image.src = e.target.result;
image.onload = function () {
arr.push({
key: `${Date.now()}-${file.name}-${index}`,
name: file.name,
arr: [e.target.result],
imageOriginalWidth: image?.width,
imageOriginalHeight: image?.height,
});
readFile(index + 1);
};
};
reader.readAsDataURL(file);
}
readFile(0);
}
good luck!
i tried to read the excel files with vue.js but u can once i read the file the memory start to sky rocket like 5 gb ram and i the excel file is feairly small pls help need to convert the file to json
The vue method to handle the excel file i tried all the type option i saw in the documintation but all send me diffrent errors
I saw a similar question here but still could not solve this
when tried
base64: "TypeError: input.replace is not a function"
binary: "TypeError: x.charCodeAt is not a function"
string: "TypeError: data.match is not a function"
array: is the one that cause the memory to get into 5gb
Also when tried to use the new file reader as present in the documentation when create the reader.onload the function never ran.
the actual temeplate i tried two things.
when i use the buffer it's seems to work but all the function return empty array.
like the file is empty but it is not
both way did the same thing
<v-file-input
v-on:change="displayFile($event)"
v-model="file">
</v-file-input>
<input type="file" name="xlfile" id="xlf" v-on:change="displayFile($event)" />
displayFile: function (event) {
// console.log(event.target.files[0])
// const file = event.target.files[0]
// const workbook = XLSX.read(file, {
// type: 'string'
// })
// console.log(workbook, workbook.SheetNames)
// const res = XLSX.read(file)
// console.log(res)
// const res = XLSX.read(this.file)
// console.log(res)
console.log(this.file)
this.file.text().then(text => {
const fileType = this.file.type
console.log(fileType)
// this.PropceseMethod(this.file, fileType)
})
const reader = new FileReader()
reader.onload = (data) => {
console.log('HERE')
console.log(data)
const workbook = XLSX.read(data, {
type: 'buffer'
})
console.log(workbook)
workbook.SheetNames.forEach(function (sheetName) {
console.log(sheetName)
console.log(workbook.Sheets[sheetName])
// Here is your object
const XLRowObject = XLSX.utils.sheet_to_row_object_array(workbook.Sheets[sheetName])
console.log(XLSX.utils.sheet_to_json(workbook.Sheets[sheetName]))
console.log(XLRowObject)
const jsonObject = JSON.stringify(XLRowObject)
console.log(jsonObject)
})
}
reader.onerror = function (ex) {
console.log(ex)
}
reader.readAsText(this.file)
}
to manage this i had to do change the way i am reading the file.
When i used readAsBinaryString it's working, and pay using the type binary with this.
This function is reading only the first sheet
fileToJson (e) {
const file = e.target.files[0]
/* Boilerplate to set up FileReader */
const reader = new FileReader()
reader.onload = (e) => {
/* Parse data */
const bstr = e.target.result
const wb = XLSX.read(bstr, { type: 'binary' })
/* Get first worksheet */
const wsname = wb.SheetNames[0]
const ws = wb.Sheets[wsname]
/* Convert array of arrays */
const data = XLSX.utils.sheet_to_json(ws, { header: 1 })
/* Update state */
this.data = data
const header = data.shift()
}
reader.readAsBinaryString(file)
}
This code worked for me in a Vue CLI App:
// Important that import statement must get the full.min.js file only.
import XLSX from '../../../node_modules/xlsx/dist/xlsx.full.min.js'
var reader = new FileReader()
reader.onload = function (e) {
var data = e.target.result
var workbook = XLSX.read(data, { type: 'binary' })
let sheetName = workbook.SheetNames[0]
let worksheet = workbook.Sheets[sheetName]
let rowObject = XLSX.utils.sheet_to_row_object_array(worksheet)
const finalJsonData = JSON.stringify(rowObject, undefined, 4)
console.log(finalJsonData)
}
reader.readAsBinaryString(this.excelFile)
With my final JSON Output as:
[
{
"email": "test5#test.com",
"password": "password",
"full_name": "Some Name 5",
"mobile": 9897675463
},
{
"email": "test6#test.com",
"password": "password",
"full_name": "Some Name 6",
"mobile": 9897675463
},
...
...
]
And my Excel file as:
Initially, I have made loading here so like this
export function сonvertFilesToByteArray(e) {
const MAX_FILE_SIZE = 1024 * 1024 * 50; // 50MB
const files = Object.keys(e.target.files);
const asyncReadFile = eachFile =>
new Promise((resolve, reject) => {
if (e.target.files[eachFile].size > MAX_FILE_SIZE) {
return reject([{ message: `File ${e.target.files[eachFile].name} too large` }]);
}
const reader = new FileReader();
const targetFileInfo = {
contentType: e.target.files[eachFile].type,
filename: e.target.files[eachFile].name,
};
reader.readAsArrayBuffer(e.target.files[eachFile]);
reader.onload = () => {
resolve({ ...targetFileInfo, body: Array.from(new Uint8Array(reader.result)) });
};
reader.onerror = error => reject(error);
});
return Promise.all(files.map(asyncReadFile));
}
Here in the constant files, I define how many at my files and I apply a function to each of them.
And then I get my file(s) in the component
handleFileUpload = (e) => {
сonvertFilesToByteArray(e)
.then((result) => {
runInAction(() => {
this.files = [
...this.files,
...result,
];
});
})
.catch(err => runInAction(() => {
this.errors = [...this.errors, err[0].message];
}));
}
And put in this.files and finally my this.files looks like [{contentType: 'plain/text', filename: 'blabla', body: [123, 456, 23, ...] }]
Where [123, 456, 23...] there is my ArrayBuffer
But at such approach in spite of the fact that I use Promise.all, when loading files/files which have weight more ~ 2MB, the page is frozen, it is impossible to interact with her in any way (but I can scroll). Except as realization when each file are divided into chunks nothing has come to mind to correct a situation.
Ok, I try to rewrite the code: With chunks
export function сonvertFilesToByteArray(e) {
const MAX_FILE_SIZE = 1024 * 1024 * 50; // 50MB
const files = Object.keys(e.target.files);
const asyncReadFile = eachFile =>
new Promise((resolve, reject) => {
if (e.target.files[eachFile].size > MAX_FILE_SIZE) {
return reject([{ message: `File ${e.target.files[eachFile].name} too large` }]);
}
const file = e.target.files[eachFile];
let offset = 0;
console.log(offset, 'offset', file.size, 'size');
const defaultChunkSize = 64 * 1024; // bytes
const fileReader = new FileReader();
const blob = file.slice(offset, offset + defaultChunkSize);
const isEndOfFile = () => offset >= file.size;
const testEndOfFile = () => {
if (isEndOfFile()) {
console.log('Done reading file');
}
};
fileReader.readAsArrayBuffer(blob);
fileReader.onloadend = (event) => {
const target = (event.target);
if (target.error == null) {
const result = target.result;
offset += result.length;
testEndOfFile();
console.log(result, 'result');
resolve(result);
} else {
reject(target.error);
}
};
});
return Promise.all(files.map(asyncReadFile));
}
Here I receive the file and I divide it. But the problem is that if the file is more than a chunk, then I should bring together him from them again and again. But how to make it in my case? I can't understand it in any way...
Please help me :) What it is necessary to make to read the file in chunks and to receive it as ArrayBuffer?
I am trying to post files (Video and Thumbnail) and object to server, but I have an issue because of the structure backend expects. When I do it from postman, this is how it looks like and it works:
{'name': ['Blabla'], 'user': ['8c3a636c-9d08-453d-9e59-7a0ec93200c4'], 'file': [<InMemoryUploadedFile: SampleVideo_1280x720_1mb.mp4 (video/mp4)>]}>
I am having trouble with passing the file like this, don't know how to do it. I tried to do it like this:
videoFile: File[] = [];
thumbnailFile: File[] = [];
files: File[] = [];
readVideoUrl(event:any) {
this.videoFile = [];
const eventObj: MSInputMethodContext = <MSInputMethodContext> event;
const target: HTMLInputElement = <HTMLInputElement> eventObj.target;
const files: FileList = target.files;
if (files) {
this.videoFile.push(files[0]);
this.videoModel.name = files[0].name;
}
if (event.target.files && event.target.files[0]) {
var reader = new FileReader();
reader.onload = (event: ProgressEvent) => {
this.videoUrl = (<FileReader>event.target).result;
}
reader.readAsDataURL(event.target.files[0]);
}
}
readThumbUrl(event:any) {
this.thumbnailFile = [];
const eventObj: MSInputMethodContext = <MSInputMethodContext> event;
const target: HTMLInputElement = <HTMLInputElement> eventObj.target;
const files: FileList = target.files;
if (files) {
this.thumbnailFile.push(files[0]);
}
if (event.target.files && event.target.files[0]) {
var reader = new FileReader();
reader.onload = (event: ProgressEvent) => {
this.thumbUrl = (<FileReader>event.target).result;
}
reader.readAsDataURL(event.target.files[0]);
}
}
I pass the model and files:
this.campaignService.createVideo(this.videoModel, this.files)
.subscribe(
(response: any) => {
},
(error) => {
console.log(error);
}
);
And here is the issue, how can I create the structure from above with form data, I used to do it like this:
postMultipart(url: string, data: any, files: File[]) {
const formData: FormData = new FormData();
// I understand this stringify is part of the issue,
// just put the code as it is at the moment.
formData.append('data', JSON.stringify(data));
for (const file of files) {
formData.append(file.name, file);
}
const result = this.http.post(url, formData)
.pipe(map((response: Response) => {
return response;
// }
}),
catchError(response => this.handleError(response))
);
return result;
}
But this passes everything like string, and backend is not expecting that. How can I get this (this is just video, the thumbnail is an image):
{'file': [<InMemoryUploadedFile: SampleVideo_1280x720_1mb.mp4 (video/mp4)>]}>
I am trying to load a JSON file from local disk and use the data from it to fill a FabricJS canvas. I have problems on getting the data from the file.
This is what i have till now.
app.html
<input type="file" accept=".json" id="fileInput" (change)="loadFile($event)"/>
app.ts
loadFile(event) {
const eventObj: MSInputMethodContext = <MSInputMethodContext> event;
const target: HTMLInputElement = <HTMLInputElement> eventObj.target;
const files: FileList = target.files;
this.file = files[0];
const reader = new FileReader();
reader.readAsText(this.file, 'utf8');
this.canvas.loadFromJSON(this.file, this.canvas.renderAll.bind(this.canvas), function (o, object) {
console.log(o, object);
});
Any thoughts on how I can make this work?
FileReader has an async api.
You must register a callback to the onload event to get the data.
loadFile(event) {
const eventObj: MSInputMethodContext = <MSInputMethodContext> event;
const target: HTMLInputElement = <HTMLInputElement> eventObj.target;
const files: FileList = target.files;
this.file = files[0];
const reader = new FileReader();
reader.readAsText(this.file, 'utf8');
reader.onload = function() {
this.canvas.loadFromJSON(reader.result, this.canvas.renderAll.bind(this.canvas), function (o, object) {
console.log(o, object);
});
}