javascript FileReader - how to parsing long file in chunks? - javascript

Initially, I have made loading here so like this
export function сonvertFilesToByteArray(e) {
const MAX_FILE_SIZE = 1024 * 1024 * 50; // 50MB
const files = Object.keys(e.target.files);
const asyncReadFile = eachFile =>
new Promise((resolve, reject) => {
if (e.target.files[eachFile].size > MAX_FILE_SIZE) {
return reject([{ message: `File ${e.target.files[eachFile].name} too large` }]);
}
const reader = new FileReader();
const targetFileInfo = {
contentType: e.target.files[eachFile].type,
filename: e.target.files[eachFile].name,
};
reader.readAsArrayBuffer(e.target.files[eachFile]);
reader.onload = () => {
resolve({ ...targetFileInfo, body: Array.from(new Uint8Array(reader.result)) });
};
reader.onerror = error => reject(error);
});
return Promise.all(files.map(asyncReadFile));
}
Here in the constant files, I define how many at my files and I apply a function to each of them.
And then I get my file(s) in the component
handleFileUpload = (e) => {
сonvertFilesToByteArray(e)
.then((result) => {
runInAction(() => {
this.files = [
...this.files,
...result,
];
});
})
.catch(err => runInAction(() => {
this.errors = [...this.errors, err[0].message];
}));
}
And put in this.files and finally my this.files looks like [{contentType: 'plain/text', filename: 'blabla', body: [123, 456, 23, ...] }]
Where [123, 456, 23...] there is my ArrayBuffer
But at such approach in spite of the fact that I use Promise.all, when loading files/files which have weight more ~ 2MB, the page is frozen, it is impossible to interact with her in any way (but I can scroll). Except as realization when each file are divided into chunks nothing has come to mind to correct a situation.
Ok, I try to rewrite the code: With chunks
export function сonvertFilesToByteArray(e) {
const MAX_FILE_SIZE = 1024 * 1024 * 50; // 50MB
const files = Object.keys(e.target.files);
const asyncReadFile = eachFile =>
new Promise((resolve, reject) => {
if (e.target.files[eachFile].size > MAX_FILE_SIZE) {
return reject([{ message: `File ${e.target.files[eachFile].name} too large` }]);
}
const file = e.target.files[eachFile];
let offset = 0;
console.log(offset, 'offset', file.size, 'size');
const defaultChunkSize = 64 * 1024; // bytes
const fileReader = new FileReader();
const blob = file.slice(offset, offset + defaultChunkSize);
const isEndOfFile = () => offset >= file.size;
const testEndOfFile = () => {
if (isEndOfFile()) {
console.log('Done reading file');
}
};
fileReader.readAsArrayBuffer(blob);
fileReader.onloadend = (event) => {
const target = (event.target);
if (target.error == null) {
const result = target.result;
offset += result.length;
testEndOfFile();
console.log(result, 'result');
resolve(result);
} else {
reject(target.error);
}
};
});
return Promise.all(files.map(asyncReadFile));
}
Here I receive the file and I divide it. But the problem is that if the file is more than a chunk, then I should bring together him from them again and again. But how to make it in my case? I can't understand it in any way...
Please help me :) What it is necessary to make to read the file in chunks and to receive it as ArrayBuffer?

Related

Getting pdf file from from express and treating it as if it from an input

i am trying to make a component that take a pdf from input or an already uploaded one and then extract pages from it and uploaded again
when choosing a file from input (choosing file from my computer)
i am using this
const handleFileChange = async (event) => {
const file = event.target.files[0];
setFiles(event.target.files[0])
const fileName = event.target.files[0].name
setFileName(fileName);
const fileReader = new FileReader();
fileReader.onload = async () => {
const pdfBytes = new Uint8Array(fileReader.result);
const pdfDoc = await PDFDocument.load(pdfBytes);
setPdfDoc(pdfDoc);
setPdfBlob(pdfBytes)
};
fileReader.readAsArrayBuffer(file);
setShowPdf(true)
};
we get a pdfDoc and a Unit8Array
then i use the pdfDoc to get pages and extract a new pdf file....
this works fine
now when selecting a file that we already uploaded
i use this to ping the api to get the file
const handleGetFile = async (url) => {
const headers = {
Authorization: "Bearer " + (localStorage.getItem("token")),
Accept: 'application/pdf'
}
await axios.put(`${process.env.NEXT_PUBLIC_API_URL}getPdfFileBlob`, {
pdfUrl: `https://handle-pdf-photos-project-through-compleated-task.s3.amazonaws.com/${url}`
}, { responseType: 'arraybuffer', headers }).then((res) => {
const handlePdf = async () => {
const uint8Array = new Uint8Array(res.data);
const pdfBlob = new Blob([uint8Array], { type: 'application/pdf' });
setPdfBlob(uint8Array)
// setPdfDoc(pdfBlob) .....? how do i create a pdf doc from the unit8array
}
handlePdf()
}).catch((err) => {
console.log(err)
})
}
this the the end point i am pinging
app.put('/getPdfFileBlob',async function(req,res){
try {
console.log(req.body.pdfUrl)
const url =req.body.pdfUrl;
const fileName = 'file.pdf';
const file = fs.createWriteStream(fileName);
https.get(url, (response) => {
response.pipe(file);
file.on('finish', () => {
file.close();
// Serve the file as a response
const pdf = fs.readFileSync(fileName);
res.setHeader('Content-Type', 'application/pdf');
res.setHeader( 'Content-Transfer-Encoding', 'Binary'
);
res.setHeader('Content-Disposition', 'inline; filename="' + fileName + '"');
res.send(pdf);
});
});
} catch (error) {
res.status(500).json({success:false,msg:"server side err"})
}
})
after getting this file here is what am trying to do
const handlePageSelection = (index) => {
setSelectedPages(prevSelectedPages => {
const newSelectedPages = [...prevSelectedPages];
const pageIndex = newSelectedPages.indexOf(index);
if (pageIndex === -1) {
newSelectedPages.push(index);
} else {
newSelectedPages.splice(pageIndex, 1);
}
return newSelectedPages;
});
};
const handleExtractPages = async () => {
for (let i = pdfDoc.getPageCount() - 1; i >= 0; i -= 1) {
if (!selectedPages.includes(i + 1)) {
pdfDoc.removePage(i);
}
}
await pdfDoc.save();
};
well in the first case where i upload the pdf file from local storage i get a pdfDoc
console of pdf Doc and pdfBlob
and when i select already existing file i can't find a way to transfer unit8array buffer to pdf doc
log of pdfBlob and no pdf doc
what i want is transform the pdfblob to pdfDcoument or get the pdf document from the array buffer so i can use getpages on it

how to convert to json from excel in js i have prublem with spilt lines

i have excel file That I uploaded to react i'm use XLSX library
this is my excel
my orginal excel
and i try to do Something like that
item[{
CJ Order Number:14,
Your Order Number:100101,
SKU:{
CJNSXZHL:1,
CJNSX:1},
},
{
CJ Order Number:15,
Your Order Number:100102,
SKU:{
CJNSXZN:1,// 1 is quentity
CJNS:1},
},
{
CJ Order Number:16,
Your Order Number:100103,
SKU:{
CJNSX:1,
CJNS:1},
},
{
CJ Order Number:17,
Your Order Number:100103,
SKU:{
CJNSTX:2
},
}]
but i got Something like this
[{CJ Order Number: '14',
Your Order Number: '100101',
SKU: 'CJNSXZHL',
CJ Quantity: '1'}
{SKU: 'CJNSXL',
CJ Quantity: '1'}]
my code is
const [items, setItems] = useState([]);
const readExcel = (file) => {
const promise = new Promise((resolve, reject) => {
const fileReader = new FileReader();
fileReader.readAsArrayBuffer(file);
fileReader.onload = (e) => {
const bufferArray = e.target.result;
const wb = XLSX.read(bufferArray, { type: "buffer" });
const wsname = wb.SheetNames[0];
const ws = wb.Sheets[wsname];
const data = XLSX.utils.sheet_to_json(ws);
resolve(data);
};
fileReader.onerror = (error) => {
reject(error);
};
});
promise.then((d) => {
setItems(d);
});
}
I compare products that want to buy
Do I have them in my inventory
It could be that a person has ordered 2 products and I only have one order number so I want to know how I pass the Excel standard
That every line that is double he will assign it to one person
And not to be treated as another separate line
That it's a mistake
I solved the problem using the following code I hope it will help more people who have had difficulty with it
I continue the code above
const [items, setItems] = useState([]);
setItems(d);
let i = 0
for ( i ; i <= items.length - 1; i++) {
try {
if (items[i].Orders) {
arr.push({
"OrderNumber":items[i].Orders,
"Address":items[i].Address,
"SKU": [items[i].SKU],
"Quantity":[parseInt(items[i].Quantity)],
"Province":items[i].Province,
"Product_Name":[items[i].Product_Name],
"Name":items[i].Name,
"City":items[i].City,
"ZipCode":items[i].ZipCode,
"Phone":(items[i].Phone),
});
} else {
arr[arr.length - 1].SKU.push(items[i].SKU);
arr[arr.length - 1].Quantity.push(parseInt(items[i].Quantity));
arr[arr.length - 1].Product_Name.push(items[i].Product_Name);
}
} catch (err) {
console.log("err",err);
}
}

Firebase React native Expo file upload and download issue resolving promise

I'm having a problem where the Array is not filled out, I think its something to do with the promoses resolving.
const UploadFile = async ({
imageName = `${Date.now()}`,
imageUris,
imageFolder = '',
metadata,
}: IFile) => {
if (imageUris) {
const promises: any[] = [];
const imageURLs: string[] = [];
imageUris.forEach(async (uri) => {
const randomNumber = Randomize('0', 10);
const finalImageName = `${Slugify(imageName)}`.toLowerCase();
const imagePath = `${imageFolder}/${finalImageName}-${randomNumber}`;
const imageRef = storageRef.child(imagePath);
const blob = (await fetch(uri)).blob();
const uploadTask = imageRef.put(await blob, metadata);
uploadTask.on(
firebase.storage.TaskEvent.STATE_CHANGED,
(snapshot) => {
const progress =
(snapshot.bytesTransferred / snapshot.totalBytes) * 100;
console.log('Upload is ' + progress + '% done');
},
(error) => console.log('Error:', error),
() => {
uploadTask.snapshot.ref.getDownloadURL().then((downloadURL) => {
console.log('File available at', downloadURL);
imageURLs.push(downloadURL);
});
},
);
promises.push(uploadTask);
});
// Not sure promise is resolving
Promise.all(promises).then((i) => {
console.log('All files uploaded', i);
});
Promise.all(imageURLs).then((i) => {
console.log('All imageURLs', i);
});
}
}
Output:
Retrieved listings
All files uploaded Array []
All imageURLs Array []
imageURLs Contents undefined
Upload is 0% done
Upload is 0% done
Upload is 100% done
File available at https://firebasestorage.googleapis.com/v0/b/wrecknet-ab69d.appspot.com/o/listings%2Fcar-5701393331?alt=media&token=ccfda911-36fb-4305-b6d7-0ee06fc824e1
Listing was successfully created
Upload is 100% done
File available at https://firebasestorage.googleapis.com/v0/b/wrecknet-ab69d.appspot.com/o/listings%2Fcar-4491812919?alt=media&token=03f72706-4201-4652-9172-8bcefaeb3e1f
As you can see the "All files uploaded Array []" and "All imageURLs Array []" arrays are empty, I suspect the Promise is not resolving.
As far as I know you can either listen to the on() of the UploadTask or to its then(), but not to both. Luckily you don't do anything meaningful in the on handling, so the entire code can be simplified down to:
const UploadFile = async ({
imageName = `${Date.now()}`,
imageUris,
imageFolder = '',
metadata,
}: IFile) => {
if (imageUris) {
const promises: any[] = [];
imageUris.forEach(async (uri) => {
const randomNumber = Randomize('0', 10);
const finalImageName = `${Slugify(imageName)}`.toLowerCase();
const imagePath = `${imageFolder}/${finalImageName}-${randomNumber}`;
const imageRef = storageRef.child(imagePath);
const blob = (await fetch(uri)).blob();
promises.push(imageRef.put(await blob, metadata));
});
Promise.all(promises).then((imageURLs) => {
console.log('All imageURLs', imageURLs);
});
}
}

Node.js process exiting in the middle, with no error (using streams)

I'm writing a Lambda function which is given a list of text files on S3, and concatenates them together, and then zips that resulting file. For some reason, the function is bombing out in the middle of the process, with no errors.
The payload sent to the Lambda func looks like this:
{
"sourceFiles": [
"s3://bucket/largefile1.txt",
"s3://bucket/largefile2.txt"
],
"destinationFile": "s3://bucket/concat.zip",
"compress": true,
"omitHeader": false,
"preserveSourceFiles": true
}
The scenarios in which this function works totally fine:
The two files are small, and compress === false
The two files are small, and compress === true
The two files are large, and compress === false
If I try to have it compress two large files, it quits in the middle. The concatenation process itself works fine, but when it tries to use zip-stream to add the stream to an archive, it fails.
The two large files together are 483,833 bytes. When the Lambda function fails, it reads either 290,229 or 306,589 bytes (it's random) then quits.
This is the main entry point of the function:
const packer = require('zip-stream');
const S3 = require('aws-sdk/clients/s3');
const s3 = new S3({ apiVersion: '2006-03-01' });
const { concatCsvFiles } = require('./csv');
const { s3UrlToParts } = require('./utils');
function addToZip(archive, stream, options) {
return new Promise((resolve, reject) => {
archive.entry(stream, options, (err, entry) => {
console.log('entry done', entry);
if (err) reject(err);
resolve(entry);
});
});
}
export const handler = async event => {
/**
* concatCsvFiles returns a readable stream to pass to either the archiver or
* s3.upload.
*/
let bytesRead = 0;
try {
const stream = await concatCsvFiles(event.sourceFiles, {
omitHeader: event.omitHeader,
});
stream.on('data', chunk => {
bytesRead += chunk.length;
console.log('read', bytesRead, 'bytes so far');
});
stream.on('end', () => {
console.log('this is never called :(');
});
const dest = s3UrlToParts(event.destinationFile);
let archive;
if (event.compress) {
archive = new packer();
await addToZip(archive, stream, { name: 'concat.csv' });
archive.finalize();
}
console.log('uploading');
await s3
.upload({
Body: event.compress ? archive : stream,
Bucket: dest.bucket,
Key: dest.key,
})
.promise();
console.log('done uploading');
if (!event.preserveSourceFiles) {
const s3Objects = event.sourceFiles.map(s3Url => {
const { bucket, key } = s3UrlToParts(s3Url);
return {
bucket,
key,
};
});
await s3
.deleteObjects({
Bucket: s3Objects[0].bucket,
Delete: {
Objects: s3Objects.map(s3Obj => ({ Key: s3Obj.key })),
},
})
.promise();
}
console.log('## Never gets here');
// return {
// newFile: event.destinationFile,
// };
} catch (e) {
if (e.code) {
throw new Error(e.code);
}
throw e;
}
};
And this is the concatenation code:
import MultiStream from 'multistream';
import { Readable } from 'stream';
import S3 from 'aws-sdk/clients/s3';
import { s3UrlToParts } from './utils';
const s3 = new S3({ apiVersion: '2006-03-01' });
/**
* Takes an array of S3 URLs and returns a readable stream of the concatenated results
* #param {string[]} s3Urls Array of S3 URLs
* #param {object} options Options
* #param {boolean} options.omitHeader Omit the header from the final output
*/
export async function concatCsvFiles(s3Urls, options = {}) {
// Get the header so we can use the length to set an offset in grabbing files
const firstFile = s3Urls[0];
const file = s3UrlToParts(firstFile);
const data = await s3
.getObject({
Bucket: file.bucket,
Key: file.key,
Range: 'bytes 0-512', // first 512 bytes is pretty safe for header size
})
.promise();
const streams = [];
const [header] = data.Body.toString().split('\n');
for (const s3Url of s3Urls) {
const { bucket, key } = s3UrlToParts(s3Url);
const stream = s3
.getObject({
Bucket: bucket,
Key: key,
Range: `bytes=${header.length + 1}-`, // +1 for newline char
})
.createReadStream();
streams.push(stream);
}
if (!options.omitHeader) {
const headerStream = new Readable();
headerStream.push(header + '\n');
headerStream.push(null);
streams.unshift(headerStream);
}
const combinedStream = new MultiStream(streams);
return combinedStream;
}
Got it. The problem was actually with the zip-stream library. Apparently it doesn't work well with S3 + streaming. I tried yazl and it works perfectly.

Angular 6 - Upload files

I am trying to post files (Video and Thumbnail) and object to server, but I have an issue because of the structure backend expects. When I do it from postman, this is how it looks like and it works:
{'name': ['Blabla'], 'user': ['8c3a636c-9d08-453d-9e59-7a0ec93200c4'], 'file': [<InMemoryUploadedFile: SampleVideo_1280x720_1mb.mp4 (video/mp4)>]}>
I am having trouble with passing the file like this, don't know how to do it. I tried to do it like this:
videoFile: File[] = [];
thumbnailFile: File[] = [];
files: File[] = [];
readVideoUrl(event:any) {
this.videoFile = [];
const eventObj: MSInputMethodContext = <MSInputMethodContext> event;
const target: HTMLInputElement = <HTMLInputElement> eventObj.target;
const files: FileList = target.files;
if (files) {
this.videoFile.push(files[0]);
this.videoModel.name = files[0].name;
}
if (event.target.files && event.target.files[0]) {
var reader = new FileReader();
reader.onload = (event: ProgressEvent) => {
this.videoUrl = (<FileReader>event.target).result;
}
reader.readAsDataURL(event.target.files[0]);
}
}
readThumbUrl(event:any) {
this.thumbnailFile = [];
const eventObj: MSInputMethodContext = <MSInputMethodContext> event;
const target: HTMLInputElement = <HTMLInputElement> eventObj.target;
const files: FileList = target.files;
if (files) {
this.thumbnailFile.push(files[0]);
}
if (event.target.files && event.target.files[0]) {
var reader = new FileReader();
reader.onload = (event: ProgressEvent) => {
this.thumbUrl = (<FileReader>event.target).result;
}
reader.readAsDataURL(event.target.files[0]);
}
}
I pass the model and files:
this.campaignService.createVideo(this.videoModel, this.files)
.subscribe(
(response: any) => {
},
(error) => {
console.log(error);
}
);
And here is the issue, how can I create the structure from above with form data, I used to do it like this:
postMultipart(url: string, data: any, files: File[]) {
const formData: FormData = new FormData();
// I understand this stringify is part of the issue,
// just put the code as it is at the moment.
formData.append('data', JSON.stringify(data));
for (const file of files) {
formData.append(file.name, file);
}
const result = this.http.post(url, formData)
.pipe(map((response: Response) => {
return response;
// }
}),
catchError(response => this.handleError(response))
);
return result;
}
But this passes everything like string, and backend is not expecting that. How can I get this (this is just video, the thumbnail is an image):
{'file': [<InMemoryUploadedFile: SampleVideo_1280x720_1mb.mp4 (video/mp4)>]}>

Categories