Downloading Large Number of Files in Frontend using jszip - javascript

Unable to zip all the files using jszip. JS zip is reading all 402 files as shown in snapshot from the console from around 143 requests but zipping only 143 files. I am using parallelimit to process multiple async requests simultaneously and cleanly. I am How can we get all the 403 files in the result?
private downloadUntouchedFiles = () => {
let requestObjectInfo = [];
let index = 0;
this._eventEmitter.emit(Constants.LOADER_CHANGE, { show: true });
this._eligibilitySubmissionInstance.getUntouchedFiles(this.state.filterObject).then((requests) => {
debugger;
if (!(!requests)) {
if (requests.length > 0) {
var zip = new JSZip();
var zipFileName = "ES_Unviewed_Files";
var promises = [];
this._eventEmitter.emit(Constants.LOADER_CHANGE, { show: true });
const downloadSubPromises = [];
let i =0;
requests.forEach((req) => {
req.Folder.Files.forEach(f => {
f.Name = this.state.initials + '_' + this.state.userId + '_' + f.Name;
console.log(f.Name);
i++;
console.log(i);
downloadSubPromises.push((submit: any) => {
JSZipUtils.getBinaryContent(f.ServerRelativeUrl, (err, data) => {
try {
if (err) {
throw err;
}
zip.file(f.Name, data, { binary: true });
submit(null, true);
} catch (err) {
submit(err, true);
this._eventEmitter.emit(Constants.LOADER_CHANGE, { show: false });
this._loggerInstance.logException(Constants.SISCC_ES_EXCEPTIONS, {
Component: this._canonicalName,
Message: ErrorMessages.COM007,
UserName: !(!DataSingleton.getCurrentUser()) ? DataSingleton.getCurrentUser() : '',
Group: '',
Notes: err,
Source: Constants.EXCEPTION_UI_SOURCE,
ExceptionID: Guid.create().toString()
} as ExceptionObject).then(() => {
});
}
});
});
});
requestObjectInfo.push(req);
});
parallelLimit(downloadSubPromises, Constants.DOWNLOAD_BATCH_MAX_FILE_LIMIT,
(err, results) => {
try {
console.log(results);
debugger;
zip
.generateInternalStream({ type: "blob" })
.accumulate()
.then((content) => {
saveAs(content, zipFileName + ".zip");
});
}
catch (err) {
this._eventEmitter.emit(Constants.LOADER_CHANGE, { show: false });
this._loggerInstance.logException(Constants.SISCC_ES_EXCEPTIONS, {
Component: this._canonicalName,
Message: ErrorMessages.COM007,
UserName: !(!DataSingleton.getCurrentUser()) ? DataSingleton.getCurrentUser() : '',
Group: '',
Notes: err,
Source: Constants.EXCEPTION_UI_SOURCE,
ExceptionID: Guid.create().toString()
} as ExceptionObject).then(() => {
});
}});
while (index < requestObjectInfo.length) {
this.setState({ requestObject: requestObjectInfo[index] });
if (this.state.requestObject.Status !== Constants.ES_DOWNLOADREQUEST_STATUS) {
this.updateESRequestStatus(Constants.ES_DOWNLOADREQUEST_STATUS);
}
index++;
}
this._eventEmitter.emit(Constants.LOADER_CHANGE, { show: false });
}
}
});
}
In this case only 55-75MB on JS Heap is used.

You can use async and await to make the code easier to understand and avoid deep nesting:
const ZIP_FILE_NAME = "ES_Unviewed_Files.zip"
const downloadUntouchedFiles = async () => {
const zip = new JSZip()
const untouched = this._eligibilitySubmissionInstance.getUntouchedFiles(this.state.filterObject)
if (!untouched?.length) return
const files = untouched.reduce((acc, { Folder: { Files } }) =>
Files.forEach(f => acc.push({ name: `${this.state.userId}+${f.Name}`, ...f })), [])
const downloads = files.map(({name, ServerRelativeUrl}) =>
async () => ({ name, data: await fetchBinary(ServerRelativeUrl)}))
const responses = await batched(downloads, Constants.DOWNLOAD_BATCH_MAX_FILE_LIMIT)
responses.forEach(({ status, value: { name, data } }) =>
status === 'fulfilled' && zip.file(name, data, { binary: true }))
const content = await zip.generateInternalStream({ type: "blob" }).accumulate()
await saveAs(content, ZIP_FILE_NAME)
}
const fetchBinary = (file) => new Promise((resolve) =>
JSZipUtils.getBinaryContent(url, (err, data) => err ? reject(err) : resolve(data)))
async function batched(fns, batchSize = 2) {
const results = []
for(let start = 0, end = batchSize; start < fns.length; start += batchSize, end = start+batchSize) {
const slice = fns.slice(start, end)
const promises = slice.map((fn) => fn())
results.push([...await Promise.allSettled(promises)])
}
return results.flat()
}

This will work
private downloadUntouchedFiles = () => {
this._eventEmitter.emit(Constants.LOADER_CHANGE, { show: true });
let statusUpdatePromises = [];
this._eligibilitySubmissionInstance.getUntouchedFiles(this.state.filterObject).then((requests) => {
if (!(!requests)) {
if (requests.length > 0) {
var zip = new JSZip();
var zipFileName = "ES_Unviewed_Files";
const downloadSubPromises = [];
requests.forEach((req: any) => {
req.Folder.Files.forEach((f: any) => {
f.Name = this.state.userId + '_' + f.Name;
downloadSubPromises.push((submit: any) => {
JSZipUtils.getBinaryContent(`${new Constants().BASE_URL}${encodeURIComponent(f.ServerRelativeUrl).replace('%2F', '/')}`, (err, data) => {
try {
if (err) {
submit(null, true);
} else {
zip.file(f.Name, data, { binary: true });
submit(null, true);
}
} catch (err) {
this._eventEmitter.emit(Constants.LOADER_CHANGE, { show: false });
this._loggerInstance.logException(Constants.SISCC_ES_EXCEPTIONS, {
Component: this._canonicalName,
Message: ErrorMessages.COM007,
UserName: !(!DataSingleton.getCurrentUser()) ? DataSingleton.getCurrentUser() : '',
Group: '',
Notes: err,
Source: Constants.EXCEPTION_UI_SOURCE,
ExceptionID: Guid.create().toString()
} as ExceptionObject).then(() => {
});
submit(null, false);
}
});
});
});
statusUpdatePromises.push((submit: any) => {
this.setState({ requestObject: req }, () => {
if (this.state.requestObject.Status !== Constants.ES_DOWNLOADREQUEST_STATUS) {
this.updateESRequestStatus(Constants.ES_DOWNLOADREQUEST_STATUS).then(res => {
submit(true);
});
} else {
submit(true);
}
});
});
});
parallelLimit(downloadSubPromises, Constants.UPLOAD_BATCH_MAX_FILE_LIMIT,
(err: any, results: any) => {
parallelLimit(statusUpdatePromises, Constants.UPLOAD_BATCH_MAX_FILE_LIMIT,
(subErr: any, subResults: any) => {
try {
zip
.generateInternalStream({ type: "blob" })
.accumulate()
.then((content) => {
this._eventEmitter.emit(Constants.LOADER_CHANGE, { show: false });
saveAs(content, zipFileName + ".zip");
});
}
catch (err) {
this._eventEmitter.emit(Constants.LOADER_CHANGE, { show: false });
this._loggerInstance.logException(Constants.SISCC_ES_EXCEPTIONS, {
Component: this._canonicalName,
Message: ErrorMessages.COM007,
UserName: !(!DataSingleton.getCurrentUser()) ? DataSingleton.getCurrentUser() : '',
Group: '',
Notes: err,
Source: Constants.EXCEPTION_UI_SOURCE,
ExceptionID: Guid.create().toString()
} as ExceptionObject).then(() => {
});
}
});
});
}
}
});
}

Related

Rxdb infinitely pulling in replicateRxCollection

I'm working with rxdb and I have pull and push handlers for the backend I have used supabase
I have setup the code for replication as follows:
replication.ts
import { RxDatabase } from "rxdb";
import { RxReplicationPullStreamItem } from "rxdb/dist/types/types";
import { replicateRxCollection } from "rxdb/plugins/replication";
import { Subject } from "rxjs";
import { supabaseClient, SUPABASE_URL } from "src/config/supabase";
import { DbTables } from "src/constants/db";
import {
blockPullHandler,
blockPushHandler,
} from "./repilicationhandlers/block";
import { CheckpointType, RxBlockDocument, RxBlocksCollections } from "./types";
export async function startReplication(
database: RxDatabase<RxBlocksCollections>
) {
const pullStream$ = new Subject<
RxReplicationPullStreamItem<RxBlockDocument, CheckpointType>
>();
supabaseClient
.from(DbTables.Block)
.on("*", (payload) => {
console.log("Change received!", payload);
const doc = payload.new;
pullStream$.next({
checkpoint: {
id: doc.id,
updated: doc.updated,
},
documents: [doc] as any,
});
})
.subscribe((status: string) => {
console.log("STATUS changed");
console.dir(status);
if (status === "SUBSCRIBED") {
pullStream$.next("RESYNC");
}
});
const replicationState = await replicateRxCollection({
collection: database.blocks,
replicationIdentifier: "supabase-replication-to-" + SUPABASE_URL,
deletedField: "archived",
pull: {
handler: blockPullHandler as any,
stream$: pullStream$.asObservable(),
batchSize: 10,
},
push: {
batchSize: 1,
handler: blockPushHandler as any,
},
});
replicationState.error$.subscribe((err) => {
console.error("## replicationState.error$:");
console.log(err);
});
return replicationState;
}
blockPullHandler:
export const blockPullHandler = async (
lastCheckpoint: any,
batchSize: number
) => {
const minTimestamp = lastCheckpoint ? lastCheckpoint.updated : 0;
console.log("Pulling data", batchSize, lastCheckpoint);
const { data, error } = await supabaseClient
.from(DbTables.Block)
.select()
.gt("updated", minTimestamp)
.order("updated", { ascending: true })
.limit(batchSize);
if (error) {
console.log(error);
throw error;
}
const docs: Array<Block> = data;
return {
documents: docs,
hasMoreDocuments: false,
checkpoint:
docs.length === 0
? lastCheckpoint
: {
id: lastOfArray(docs).id,
updated: lastOfArray(docs).updated,
},
};
};
blockPushHandler:
export const blockPushHandler = async (
rows: RxReplicationWriteToMasterRow<RxBlockDocumentType>[]
) => {
if (rows.length !== 1) {
throw new Error("# pushHandler(): too many push documents");
}
const row = rows[0];
const oldDoc: any = row.assumedMasterState;
const doc: Block = row.newDocumentState;
console.log(row, oldDoc, doc);
// insert
if (!row.assumedMasterState) {
const { error } = await supabaseClient.from(DbTables.Block).insert([doc]);
console.log("Error 1", error);
if (error) {
// we have an insert conflict
const conflictDocRes: any = await supabaseClient
.from(DbTables.Block)
.select()
.eq("id", doc.id)
.limit(1);
return [conflictDocRes.data[0]];
} else {
return [];
}
}
// update
console.log("pushHandler(): is update");
const { data, error } = await supabaseClient
.from(DbTables.Block)
.update(doc)
.match({
id: doc.id,
replicationRevision: oldDoc.replicationRevision,
});
console.log("Error 2", error);
if (error) {
console.log("pushHandler(): error:");
console.log(error);
console.log(data);
throw error;
}
console.log("update response:");
console.log(data);
if (data.length === 0) {
// we have an updated conflict
const conflictDocRes: any = await supabaseClient
.from(DbTables.Block)
.select()
.eq("id", doc.id)
.limit(1);
return [conflictDocRes.data[0]];
}
return [];
};
But the issue is when I start the application and the pull handler is called correctly but it doesn't stop calling the pull handler and it sends continuous request one after another even after it has fetched the documents even when I set hasMoreDocuments to false It keeps sending requests and running the replicator. Is there something wrong with my configuration?
database.ts:
export const createDatabase = async () => {
const database = await createRxDatabase({
name: "sundaedb",
storage: getRxStorageDexie(),
});
await database.addCollections({
blocks: {
schema: blockSchema as any,
conflictHandler: conflictHandler as any,
},
documents: {
schema: documentSchema as any,
conflictHandler: conflictHandler as any,
},
});
database.blocks.preInsert((docData) => {
docData.replicationRevision = createRevision(
database.hashFunction,
docData as any
);
return docData;
}, false);
database.blocks.preRemove((docData) => {
console.log(" PRE REMOVE !!");
console.log(JSON.stringify(docData, null, 4));
const oldRevHeight = parseRevision(docData.replicationRevision).height;
docData.replicationRevision =
oldRevHeight + 1 + "-" + database.hashFunction(JSON.stringify(docData));
console.log(JSON.stringify(docData, null, 4));
return docData;
}, false);
database.blocks.preSave((docData) => {
const oldRevHeight = parseRevision(docData.replicationRevision).height;
docData.replicationRevision =
oldRevHeight + 1 + "-" + database.hashFunction(JSON.stringify(docData));
return docData;
}, false);
return database;
};

Variable is not changed in local function

I am trying get specific string from email body and return it. Inside simpleParser function I assign output of substring to token variable which I declare earlier. Why when I print token in getEmails function I get empty string and in simpleParse I get string with value that I wanted?
const Imap = require('imap');
const {simpleParser} = require('mailparser');
const imapConfig = {
user: '',
password: '',
host: 'imap.gmail.com',
port: 993,
tls: true,
tlsOptions: {
rejectUnauthorized: false
}
};
let token = "";
async function getEmails() {
try {
const imap = new Imap(imapConfig);
imap.once('ready', () => {
imap.openBox('INBOX', false, () => {
imap.search(['UNSEEN'], (err, results) => {
const f = imap.fetch(results, {bodies: ''});
f.on('message', msg => {
msg.on('body', stream => {
simpleParser(stream, async (err, parsed) => {
// const {from, subject, textAsHtml, text} = parsed;
// console.log(parsed);
let tokenNr = parsed.html.search('token');
token = parsed.html.substring(tokenNr+6, tokenNr+66);
console.log(token);
});
});
});
f.once('error', ex => {
return Promise.reject(ex);
});
f.once('end', () => {
console.log('Done fetching all messages!');
imap.end();
});
});
});
});
imap.once('error', err => {
console.log(err);
});
imap.once('end', () => {
console.log('Connection ended');
});
imap.connect();
} catch (ex) {
console.log('an error occurred');
}
console.log(token);
return token;
};

Extract matching row by comparing two CSV file in NodeJs

The scenario is I have two large CSV files csv1.csv and csv2.csv. In both the files, there is an email column and I have to read csv1.csv row by row and check if the email exists in csv2.csv and if matches write the row of csv2.csv in csv3.csv. I have tried read stream as well but it is not working as expected. Any guidance or help is appreciated.
Thanks to all in advance.
Following are the CSV files
csv1.csv
email,header1,header2
test1#example.com,test1,test1
test2#example.com,test2,test2
test3#example.com,test3,test3
test4#example.com,test4,test4
test5#example.com,test5,test5
csv2.csv
email,header1,header2
test4#example.com,test4,test4
test5#example.com,test5,test5
test6#example.com,test6,test6
test7#example.com,test7,test7
test8#example.com,test8,test8
Following is the code that I tried
const fs = require('fs');
const csv = require('fast-csv')
class CsvHelper {
static write(filestream, rows, options) {
return new Promise((res, rej) => {
csv.writeToStream(filestream, rows, options)
.on('error', err => rej(err))
.on('finish', () => res());
});
}
constructor(opts) {
this.headers = opts.headers;
this.path = opts.path;
this.writeOpts = {
headers: this.headers,
includeEndRowDelimeter: true
};
}
create(rows) {
return CsvHelper.write(fs.createWriteStream(this.path, { flags: 'a' }), rows, { ...this.writeOpts });
}
append(rows) {
return CsvHelper.write(fs.createWriteStream(this.path, { flags: 'a' }), rows, {
...this.writeOpts,
writeHeaders: false,
});
}
}
class Helper {
async matchCsv (outerRow) {
try {
const filePath2 = "csv2.csv";
const filePath3 = "csv3.csv";
let row = [];
const csvFile = new CsvHelper({
path: filePath3,
headers: ["Email", "Active"]
});
return new Promise((resolve, reject) => {
fs.createReadStream(filePath2)
.on("error", err => {
reject(err);
})
.pipe(csv.parse({headers: true}))
.on("error", err => {
reject(err);
})
.on("data", async innerRow => {
if(outerRow["email"] === innerRow["email"]) {
console.log("====================");
console.log("match found");
console.log(innerRow);
console.log("====================");
row.push([innerRow["email"], "yes"]);
console.log("row: ", row);
}
})
.on("finish", async() => {
if (!fs.existsSync(filePath3)) {
await csvFile.create(row).then(() => {
resolve("Done from matchCsv");
})
} else {
await csvFile.append(row).then(() => {
resolve("Done from matchCsv");
})
}
})
});
} catch (err) {
throw(err);
}
}
async generateCsv () {
try {
const filePath1 = "csv1.csv";
return new Promise((resolve, reject) => {
fs.createReadStream(filePath1)
.on("error", err => {
reject(err);
})
.pipe(csv.parse({headers: true}))
.on("error", err => {
reject(err);
})
.on("data", async outerRow => {
const result = await this.matchCsv(outerRow);
console.log("result: ", result);
})
.on("finish", () => {
resolve("Generated csv3.csv file.");
});
});
} catch (err) {
throw(err);
}
}
}
async function main() {
const helper = new Helper();
const result = await helper.generateCsv()
console.log(result);
}
main();
So the question is a little confusing, but I think I know what you want. Here's what I would do to check if the email exists. It will add all the rows to an array, cycle through them, then if the email address matches the email you're looking for, it will do something else... I think you said you wanted to write to a csv file again with the row, but that should be simple enough.
const csv = require('csv-parser');
const fs = require('fs');
const filepath = "./example_data.csv";
const emailAdd = "myemail#email.com";
var rowsArr = [];
fs.createReadStream(filepath)
.on('error', () => {
// handle error
})
.pipe(csv())
.on('data', (row) => {
rowsArr.push(row);
})
.on('end', () => {
for (var i = 0; i <= rowsArr.length; i++) {
if (rowsArr[i].emailAddress == emailAdd) {
//do something
}
}
})

How to send complex object over IPC in electron?

I'm using ipcRenderer.send() to send an array of objects back to ipcMain. Here's my code:
const loadData = async () => {
let promises = [];
['stocks', 'crypto', 'vehicles', 'property'].forEach(item => {
promises.push(getTableData(item))
})
let data = {}
await Promise.allSettled(promises).then(results => {
for (let i in results) {
var result = results[i]
if (result.status === 'fulfilled') {
console.log(result.value.type)
// result.value.data will be an array of objects
console.log(result.value.data)
data[result.value.type] = result.value.data
} else {
console.error(result)
}
}
}).finally(_ => {
console.log(data)
ipcRenderer.send('asynchronous-message', data)
})
}
When result.value.data is printed via console.log, it shows the correct data (comes from an SQL query):
{ stocks: [{id: 1, ticker: "BRK.B", action: "ADD", price: 173.97, shares: 6}, ...], ...}
However, when it gets printed in ipcMain.on('asynchronous-message', ...), it prints empty arrays for the values:
{ stocks: [], crypto: [], vehicles: [], property: [] }
How would I send an IPC message with a complex object? Is it not being serialized correctly?
For reference, here is my ipcMain.on('asynchronous-message', ...) code:
ipcMain.on('asynchronous-message', async (event, data) => {
console.log(data)
})
In addition, here is getTableData():
const getTableData = (table) => {
let toReturn = {
type: `${table}`,
data: []
}
return new Promise((resolve, reject) => {
try {
db.run(`PRAGMA table_info('${table}');`, err => {
if (err) {
reject(err)
} else {
db.each(`SELECT * FROM ${table}`, (err, row) => {
if (err) {
reject(err)
} else {
toReturn.data.push(row)
}
})
resolve(toReturn)
}
})
} catch (e) {
reject(e)
}
})
}
Where each row is an object that looks like:
{id: 1, ticker: "AAPL", action: "ADD", price: 100.0,
shares: 10, datetime: "2020-05-14 23:24:50", platform: ""}
With #Estradiaz's help, I realized that the promise was being resolved before the SQL query had been processed. To fix this, I switched from using db.each() to db.all(). Here is my updated getTableData():
const getTableData = (table) => {
let toReturn = {
type: `${table}`,
data: []
}
return new Promise((resolve, reject) => {
try {
DB.run(`PRAGMA table_info('${table}');`, err => {
if (err) {
console.error(err)
reject(err)
} else {
DB.all(`SELECT * FROM ${table}`, (err, rows) => {
if (err) {
console.error(err)
reject(err)
} else {
toReturn.data = rows
resolve(toReturn)
}
})
}
})
} catch (e) {
console.error(e)
reject(e)
}
})
}

How to receive multiple requests with Express.JS

I'm writing an Angular 6 + Express.JS app and now I stuck with the following problem: when there are some multiple requests made at the same time, sometimes (especially when there are more than 4 requests) all of them response with 404 or even get cancelled. Is there any problem with the way I handle requests in Express or I should add some tweaks for concurrent requests?
Requests:
let requests = [];
files.forEach((file) => {
if (file.type.toLowerCase().includes('zip')) {
requests.push(this.imagesService.uploadArchive(file).pipe(first()));
} else {
requests.push(this.imagesService.saveImage(file).pipe(first()));
}
});
forkJoin(requests).subscribe(
(res) => res.forEach(response => {
this.onSave.emit(response);
}),
(error) => {
console.error(error);
},
() => {
this.close.emit();
}
);
Express handling routes:
router.post('/images',
formidable({
encoding: 'utf-8',
uploadDir: path.resolve(__dirname, '..', '..', 'uploads'),
multiples: true,
keepExtensions: true
}),
(req, res, next) => {
const image = req.fields;
const data = req.files;
image.path = data.image.path;
const file = fs.createReadStream(image.path);
saveImage(image).then(
result => {
if (result) {
res.status(200).send(result);
} else {
console.error("Cannot save image");
res.status(400).send("Cannot save image");
}
}).catch(e => console.error(e.stack));
});
Responses:
UPDATE
router.post('/archives',
formidable({
encoding: 'utf-8',
uploadDir: path.resolve(__dirname, '..', '..', 'uploads'),
multiples: true,
keepExtensions: true
}),
(req, res, next) => {
const data = req.files;
let promises = [];
fs.readFile(data.archive.path, async (err, archive) => {
if (err) throw err;
await extractImagesFromZip(archive, data.archive.path).then((images) =>
images.forEach((image) => {
promises.push(
saveImage(image).then(
result => {
if (result) {
result.path = result.path.split('/').pop();
return result;
} else {
console.error("Cannot save image " + image.name);
fs.unlink(image.path, () => {});
}
}).catch(e => {
fs.unlink(image.path, () => {});
console.error(e.stack)
})
);
})
);
Promise.all(promises)
.then((result) => {
if (result.length > 0) {
res.status(200).send(result)
} else {
res.status(400).send("None images were saved")
}
}).catch((error) => {
console.log(error.stack);
res.status(400).send("None images were saved")
});
});
}
);
export const extractImagesFromZip = (file, link) => {
let promises = [];
var zip = new JSZip();
return zip.loadAsync(file)
.then((archive) => {
Object.values(archive.files).filter(
f =>
['.jpg', '.jpeg', '.png'].some((suffix) => f.name.toLowerCase().endsWith(suffix))
&& ![...f.name.toLowerCase().split('/')].pop().startsWith('.')
&& !f.dir
).forEach(f => promises.push(zip.file(f.name).async('nodebuffer').then((content) => {
const ext = f.name.split('.').pop().toLowerCase();
var dest = path.resolve(__dirname, '..', '..') + '/uploads/upload_'
+ crypto.randomBytes(Math.ceil(1322)).toString('hex').slice(0, 32).toLowerCase()
+ '.' + ext;
return new Promise((res, rej) => {
fs.writeFile(dest, content, (err) => {
if (err) rej(err);
res(new Promise((resolve, reject) => {
fs.readFile(dest, (erro, data) => {
if (erro) reject(erro);
if (data) resolve({
name: f.name,
type: 'image/' + (ext === 'jpg' ? 'jpeg' : ext),
path: dest
});
});
}));
});
});
})));
fs.unlink(link, () => {});
return Promise.all(promises);
});
}
export const saveImage = (image) => {
return database.raw(
"INSERT INTO images (name, type, path) " +
"VALUES (?, ?, ?) " +
"RETURNING name, type, path, id",
[image.name, image.type, image.path]
).then(data => data.rows[0]).catch(e => console.error(e.stack));
};
UPDATE 2
Everything works fine if user and server are on localhost (regardless server running with nginx or without it), but problem appears when server is remote
Such code worked
public async uploadFiles(files: File[]) {
of(files)
.pipe(
concatMap(files =>
files.map(file => {
return this.imagesService
.saveImage(file)
.pipe(
map(),
catchError((error, caught) => {
console.error(error);
return empty();
})
);
})
),
concatAll(),
toArray(),
map(res => console.log)
)
.subscribe();
}

Categories