How to send complex object over IPC in electron? - javascript

I'm using ipcRenderer.send() to send an array of objects back to ipcMain. Here's my code:
const loadData = async () => {
let promises = [];
['stocks', 'crypto', 'vehicles', 'property'].forEach(item => {
promises.push(getTableData(item))
})
let data = {}
await Promise.allSettled(promises).then(results => {
for (let i in results) {
var result = results[i]
if (result.status === 'fulfilled') {
console.log(result.value.type)
// result.value.data will be an array of objects
console.log(result.value.data)
data[result.value.type] = result.value.data
} else {
console.error(result)
}
}
}).finally(_ => {
console.log(data)
ipcRenderer.send('asynchronous-message', data)
})
}
When result.value.data is printed via console.log, it shows the correct data (comes from an SQL query):
{ stocks: [{id: 1, ticker: "BRK.B", action: "ADD", price: 173.97, shares: 6}, ...], ...}
However, when it gets printed in ipcMain.on('asynchronous-message', ...), it prints empty arrays for the values:
{ stocks: [], crypto: [], vehicles: [], property: [] }
How would I send an IPC message with a complex object? Is it not being serialized correctly?
For reference, here is my ipcMain.on('asynchronous-message', ...) code:
ipcMain.on('asynchronous-message', async (event, data) => {
console.log(data)
})
In addition, here is getTableData():
const getTableData = (table) => {
let toReturn = {
type: `${table}`,
data: []
}
return new Promise((resolve, reject) => {
try {
db.run(`PRAGMA table_info('${table}');`, err => {
if (err) {
reject(err)
} else {
db.each(`SELECT * FROM ${table}`, (err, row) => {
if (err) {
reject(err)
} else {
toReturn.data.push(row)
}
})
resolve(toReturn)
}
})
} catch (e) {
reject(e)
}
})
}
Where each row is an object that looks like:
{id: 1, ticker: "AAPL", action: "ADD", price: 100.0,
shares: 10, datetime: "2020-05-14 23:24:50", platform: ""}

With #Estradiaz's help, I realized that the promise was being resolved before the SQL query had been processed. To fix this, I switched from using db.each() to db.all(). Here is my updated getTableData():
const getTableData = (table) => {
let toReturn = {
type: `${table}`,
data: []
}
return new Promise((resolve, reject) => {
try {
DB.run(`PRAGMA table_info('${table}');`, err => {
if (err) {
console.error(err)
reject(err)
} else {
DB.all(`SELECT * FROM ${table}`, (err, rows) => {
if (err) {
console.error(err)
reject(err)
} else {
toReturn.data = rows
resolve(toReturn)
}
})
}
})
} catch (e) {
console.error(e)
reject(e)
}
})
}

Related

Rxdb infinitely pulling in replicateRxCollection

I'm working with rxdb and I have pull and push handlers for the backend I have used supabase
I have setup the code for replication as follows:
replication.ts
import { RxDatabase } from "rxdb";
import { RxReplicationPullStreamItem } from "rxdb/dist/types/types";
import { replicateRxCollection } from "rxdb/plugins/replication";
import { Subject } from "rxjs";
import { supabaseClient, SUPABASE_URL } from "src/config/supabase";
import { DbTables } from "src/constants/db";
import {
blockPullHandler,
blockPushHandler,
} from "./repilicationhandlers/block";
import { CheckpointType, RxBlockDocument, RxBlocksCollections } from "./types";
export async function startReplication(
database: RxDatabase<RxBlocksCollections>
) {
const pullStream$ = new Subject<
RxReplicationPullStreamItem<RxBlockDocument, CheckpointType>
>();
supabaseClient
.from(DbTables.Block)
.on("*", (payload) => {
console.log("Change received!", payload);
const doc = payload.new;
pullStream$.next({
checkpoint: {
id: doc.id,
updated: doc.updated,
},
documents: [doc] as any,
});
})
.subscribe((status: string) => {
console.log("STATUS changed");
console.dir(status);
if (status === "SUBSCRIBED") {
pullStream$.next("RESYNC");
}
});
const replicationState = await replicateRxCollection({
collection: database.blocks,
replicationIdentifier: "supabase-replication-to-" + SUPABASE_URL,
deletedField: "archived",
pull: {
handler: blockPullHandler as any,
stream$: pullStream$.asObservable(),
batchSize: 10,
},
push: {
batchSize: 1,
handler: blockPushHandler as any,
},
});
replicationState.error$.subscribe((err) => {
console.error("## replicationState.error$:");
console.log(err);
});
return replicationState;
}
blockPullHandler:
export const blockPullHandler = async (
lastCheckpoint: any,
batchSize: number
) => {
const minTimestamp = lastCheckpoint ? lastCheckpoint.updated : 0;
console.log("Pulling data", batchSize, lastCheckpoint);
const { data, error } = await supabaseClient
.from(DbTables.Block)
.select()
.gt("updated", minTimestamp)
.order("updated", { ascending: true })
.limit(batchSize);
if (error) {
console.log(error);
throw error;
}
const docs: Array<Block> = data;
return {
documents: docs,
hasMoreDocuments: false,
checkpoint:
docs.length === 0
? lastCheckpoint
: {
id: lastOfArray(docs).id,
updated: lastOfArray(docs).updated,
},
};
};
blockPushHandler:
export const blockPushHandler = async (
rows: RxReplicationWriteToMasterRow<RxBlockDocumentType>[]
) => {
if (rows.length !== 1) {
throw new Error("# pushHandler(): too many push documents");
}
const row = rows[0];
const oldDoc: any = row.assumedMasterState;
const doc: Block = row.newDocumentState;
console.log(row, oldDoc, doc);
// insert
if (!row.assumedMasterState) {
const { error } = await supabaseClient.from(DbTables.Block).insert([doc]);
console.log("Error 1", error);
if (error) {
// we have an insert conflict
const conflictDocRes: any = await supabaseClient
.from(DbTables.Block)
.select()
.eq("id", doc.id)
.limit(1);
return [conflictDocRes.data[0]];
} else {
return [];
}
}
// update
console.log("pushHandler(): is update");
const { data, error } = await supabaseClient
.from(DbTables.Block)
.update(doc)
.match({
id: doc.id,
replicationRevision: oldDoc.replicationRevision,
});
console.log("Error 2", error);
if (error) {
console.log("pushHandler(): error:");
console.log(error);
console.log(data);
throw error;
}
console.log("update response:");
console.log(data);
if (data.length === 0) {
// we have an updated conflict
const conflictDocRes: any = await supabaseClient
.from(DbTables.Block)
.select()
.eq("id", doc.id)
.limit(1);
return [conflictDocRes.data[0]];
}
return [];
};
But the issue is when I start the application and the pull handler is called correctly but it doesn't stop calling the pull handler and it sends continuous request one after another even after it has fetched the documents even when I set hasMoreDocuments to false It keeps sending requests and running the replicator. Is there something wrong with my configuration?
database.ts:
export const createDatabase = async () => {
const database = await createRxDatabase({
name: "sundaedb",
storage: getRxStorageDexie(),
});
await database.addCollections({
blocks: {
schema: blockSchema as any,
conflictHandler: conflictHandler as any,
},
documents: {
schema: documentSchema as any,
conflictHandler: conflictHandler as any,
},
});
database.blocks.preInsert((docData) => {
docData.replicationRevision = createRevision(
database.hashFunction,
docData as any
);
return docData;
}, false);
database.blocks.preRemove((docData) => {
console.log(" PRE REMOVE !!");
console.log(JSON.stringify(docData, null, 4));
const oldRevHeight = parseRevision(docData.replicationRevision).height;
docData.replicationRevision =
oldRevHeight + 1 + "-" + database.hashFunction(JSON.stringify(docData));
console.log(JSON.stringify(docData, null, 4));
return docData;
}, false);
database.blocks.preSave((docData) => {
const oldRevHeight = parseRevision(docData.replicationRevision).height;
docData.replicationRevision =
oldRevHeight + 1 + "-" + database.hashFunction(JSON.stringify(docData));
return docData;
}, false);
return database;
};

Promise´s "resolve" with Sequelize and Nodejs is not working

I tried without success, to execute my Promise with Sequelize on stored procedure(MSSQL) inside the function, below the code (when I don´t use the Promise, this code runs perfectly, retrieving the json´s data):
function getData(numTravessia) {
return new Promise((resolve, reject) => {
return connection.query(
'EXEC [dbo].[SP_RECUPERAINFOTRAVESSIA] #ID=:param1',
{
replacements: {
param1: numTravessia,
},
type: QueryTypes.SELECT
},
(error, meta, body) => {
if (body != undefined) {
resolve(body.toString());
} else {
reject(error);
}
})
});
}
async function getInfoTravessia(numTravessia) {
try {
var r = await getData(numTravessia);
return JSON.parse(r);;
} catch (error) {
console.log(error);
}
}
app.listen(8000, () => {
console.log("aplicativo em execução");
getInfoTravessia(1955).then((result) => {
console.log(result);
}).catch((e) => {
console.log(e);
})
});
Below, follow the code snippet that I don´t use the Promise and it´s working:
connection.query(
'EXEC [dbo].[SP_RECUPERAINFOTRAVESSIA] #ID=:param1',
{
replacements: {
param1: 1955,
},
type: QueryTypes.SELECT
}).then((result) => {
// RETORNA UM STRING
var obj = result[0];
res.send(obj);
// return obj;
}).catch((e) => {
console.log('error', e);
});
Please, anyone can help me?

Downloading Large Number of Files in Frontend using jszip

Unable to zip all the files using jszip. JS zip is reading all 402 files as shown in snapshot from the console from around 143 requests but zipping only 143 files. I am using parallelimit to process multiple async requests simultaneously and cleanly. I am How can we get all the 403 files in the result?
private downloadUntouchedFiles = () => {
let requestObjectInfo = [];
let index = 0;
this._eventEmitter.emit(Constants.LOADER_CHANGE, { show: true });
this._eligibilitySubmissionInstance.getUntouchedFiles(this.state.filterObject).then((requests) => {
debugger;
if (!(!requests)) {
if (requests.length > 0) {
var zip = new JSZip();
var zipFileName = "ES_Unviewed_Files";
var promises = [];
this._eventEmitter.emit(Constants.LOADER_CHANGE, { show: true });
const downloadSubPromises = [];
let i =0;
requests.forEach((req) => {
req.Folder.Files.forEach(f => {
f.Name = this.state.initials + '_' + this.state.userId + '_' + f.Name;
console.log(f.Name);
i++;
console.log(i);
downloadSubPromises.push((submit: any) => {
JSZipUtils.getBinaryContent(f.ServerRelativeUrl, (err, data) => {
try {
if (err) {
throw err;
}
zip.file(f.Name, data, { binary: true });
submit(null, true);
} catch (err) {
submit(err, true);
this._eventEmitter.emit(Constants.LOADER_CHANGE, { show: false });
this._loggerInstance.logException(Constants.SISCC_ES_EXCEPTIONS, {
Component: this._canonicalName,
Message: ErrorMessages.COM007,
UserName: !(!DataSingleton.getCurrentUser()) ? DataSingleton.getCurrentUser() : '',
Group: '',
Notes: err,
Source: Constants.EXCEPTION_UI_SOURCE,
ExceptionID: Guid.create().toString()
} as ExceptionObject).then(() => {
});
}
});
});
});
requestObjectInfo.push(req);
});
parallelLimit(downloadSubPromises, Constants.DOWNLOAD_BATCH_MAX_FILE_LIMIT,
(err, results) => {
try {
console.log(results);
debugger;
zip
.generateInternalStream({ type: "blob" })
.accumulate()
.then((content) => {
saveAs(content, zipFileName + ".zip");
});
}
catch (err) {
this._eventEmitter.emit(Constants.LOADER_CHANGE, { show: false });
this._loggerInstance.logException(Constants.SISCC_ES_EXCEPTIONS, {
Component: this._canonicalName,
Message: ErrorMessages.COM007,
UserName: !(!DataSingleton.getCurrentUser()) ? DataSingleton.getCurrentUser() : '',
Group: '',
Notes: err,
Source: Constants.EXCEPTION_UI_SOURCE,
ExceptionID: Guid.create().toString()
} as ExceptionObject).then(() => {
});
}});
while (index < requestObjectInfo.length) {
this.setState({ requestObject: requestObjectInfo[index] });
if (this.state.requestObject.Status !== Constants.ES_DOWNLOADREQUEST_STATUS) {
this.updateESRequestStatus(Constants.ES_DOWNLOADREQUEST_STATUS);
}
index++;
}
this._eventEmitter.emit(Constants.LOADER_CHANGE, { show: false });
}
}
});
}
In this case only 55-75MB on JS Heap is used.
You can use async and await to make the code easier to understand and avoid deep nesting:
const ZIP_FILE_NAME = "ES_Unviewed_Files.zip"
const downloadUntouchedFiles = async () => {
const zip = new JSZip()
const untouched = this._eligibilitySubmissionInstance.getUntouchedFiles(this.state.filterObject)
if (!untouched?.length) return
const files = untouched.reduce((acc, { Folder: { Files } }) =>
Files.forEach(f => acc.push({ name: `${this.state.userId}+${f.Name}`, ...f })), [])
const downloads = files.map(({name, ServerRelativeUrl}) =>
async () => ({ name, data: await fetchBinary(ServerRelativeUrl)}))
const responses = await batched(downloads, Constants.DOWNLOAD_BATCH_MAX_FILE_LIMIT)
responses.forEach(({ status, value: { name, data } }) =>
status === 'fulfilled' && zip.file(name, data, { binary: true }))
const content = await zip.generateInternalStream({ type: "blob" }).accumulate()
await saveAs(content, ZIP_FILE_NAME)
}
const fetchBinary = (file) => new Promise((resolve) =>
JSZipUtils.getBinaryContent(url, (err, data) => err ? reject(err) : resolve(data)))
async function batched(fns, batchSize = 2) {
const results = []
for(let start = 0, end = batchSize; start < fns.length; start += batchSize, end = start+batchSize) {
const slice = fns.slice(start, end)
const promises = slice.map((fn) => fn())
results.push([...await Promise.allSettled(promises)])
}
return results.flat()
}
This will work
private downloadUntouchedFiles = () => {
this._eventEmitter.emit(Constants.LOADER_CHANGE, { show: true });
let statusUpdatePromises = [];
this._eligibilitySubmissionInstance.getUntouchedFiles(this.state.filterObject).then((requests) => {
if (!(!requests)) {
if (requests.length > 0) {
var zip = new JSZip();
var zipFileName = "ES_Unviewed_Files";
const downloadSubPromises = [];
requests.forEach((req: any) => {
req.Folder.Files.forEach((f: any) => {
f.Name = this.state.userId + '_' + f.Name;
downloadSubPromises.push((submit: any) => {
JSZipUtils.getBinaryContent(`${new Constants().BASE_URL}${encodeURIComponent(f.ServerRelativeUrl).replace('%2F', '/')}`, (err, data) => {
try {
if (err) {
submit(null, true);
} else {
zip.file(f.Name, data, { binary: true });
submit(null, true);
}
} catch (err) {
this._eventEmitter.emit(Constants.LOADER_CHANGE, { show: false });
this._loggerInstance.logException(Constants.SISCC_ES_EXCEPTIONS, {
Component: this._canonicalName,
Message: ErrorMessages.COM007,
UserName: !(!DataSingleton.getCurrentUser()) ? DataSingleton.getCurrentUser() : '',
Group: '',
Notes: err,
Source: Constants.EXCEPTION_UI_SOURCE,
ExceptionID: Guid.create().toString()
} as ExceptionObject).then(() => {
});
submit(null, false);
}
});
});
});
statusUpdatePromises.push((submit: any) => {
this.setState({ requestObject: req }, () => {
if (this.state.requestObject.Status !== Constants.ES_DOWNLOADREQUEST_STATUS) {
this.updateESRequestStatus(Constants.ES_DOWNLOADREQUEST_STATUS).then(res => {
submit(true);
});
} else {
submit(true);
}
});
});
});
parallelLimit(downloadSubPromises, Constants.UPLOAD_BATCH_MAX_FILE_LIMIT,
(err: any, results: any) => {
parallelLimit(statusUpdatePromises, Constants.UPLOAD_BATCH_MAX_FILE_LIMIT,
(subErr: any, subResults: any) => {
try {
zip
.generateInternalStream({ type: "blob" })
.accumulate()
.then((content) => {
this._eventEmitter.emit(Constants.LOADER_CHANGE, { show: false });
saveAs(content, zipFileName + ".zip");
});
}
catch (err) {
this._eventEmitter.emit(Constants.LOADER_CHANGE, { show: false });
this._loggerInstance.logException(Constants.SISCC_ES_EXCEPTIONS, {
Component: this._canonicalName,
Message: ErrorMessages.COM007,
UserName: !(!DataSingleton.getCurrentUser()) ? DataSingleton.getCurrentUser() : '',
Group: '',
Notes: err,
Source: Constants.EXCEPTION_UI_SOURCE,
ExceptionID: Guid.create().toString()
} as ExceptionObject).then(() => {
});
}
});
});
}
}
});
}

Why doesn't array.map change anything?

I'm trying to format the data i got from YouTube Data API v3 but i'm unable to change anything of it.
const videoIds = youtubeResponse.items.map(item => item.id);
VideoRepo.getById(videoIds, (err, videos) => {
/*
videos is an array of objects that contain youtube videos from YT API and MongoDB(mongoose)
*/
console.log(videos.map((v) => {
v.time = moment(v.time).fromNow();
v.duration = moment('1900-01-01 00:00:00').seconds(v.duration).format('HH:mm:ss');
return v;
}));
});
VideoRepo class:
static getById(id, callback) {
if (Array.isArray(id)) {
// Multiple ids were specified
async.waterfall([
(next) => {
// Get existing videos' data
Video.find({ _id: { $in: id } }).select('-__v').sort({ createdAt: 1 }).exec((err, data) => {
if (err) return next(err);
next(null, data);
});
},
(existingData, next) => {
if (existingData.length === 0) {
// All videos are new, skip to the next step
return next(null, [], id);
}
// Remove existing data from ID array
const obj = existingData.map(el => el._id);
next(null, existingData, id.filter(el => !obj.includes(el)));
},
(existingData, newIDs, next) => {
if (newIDs.length === 0) {
return next(null, existingData);
}
// Get new videos' data from YT API
youtube.videos.list({ id: newIDs.join(','), part: 'snippet,contentDetails,statistics' }, (err, videoResp) => {
if (err) return next(err);
// Final data
const data = id;
// New videos' data
const newData = videoResp.data.items.map(item => this.fixVideoData(item));
// Add new videos to the DB
Video.insertMany(newData, (err) => {
if (err) return next(err);
// Merge new data with existing data
const merged = existingData.concat(newData);
// Fix order
for (let i = 0; i < merged.length; i += 1) {
const d = merged[i];
data[data.indexOf(d._id)] = d;
}
// Success!
next(null, data);
});
});
},
], (err, data) => callback(err, data));
}
}
static fixVideoData(videoData) {
const data = {
_id: videoData.id,
channelId: videoData.snippet.channelId,
title: videoData.snippet.title,
description: videoData.snippet.description,
slug: slugify(videoData.snippet.title, { lower: true }),
views: videoData.statistics.viewCount,
duration: moment.duration(videoData.contentDetails.duration).asSeconds(),
tags: videoData.snippet.tags,
thumbnail: null,
preThumbnail: null,
time: videoData.snippet.publishedAt,
};
const possibleThumbs = ['maxres', 'standard', 'high', 'medium', 'default'];
for (let j = 0; j < possibleThumbs.length; j += 1) {
if (Object.prototype.hasOwnProperty.call(videoData.snippet.thumbnails, possibleThumbs[j])) {
data.thumbnail = videoData.snippet.thumbnails[possibleThumbs[j]].url;
break;
}
}
if (videoData.snippet.thumbnails.medium) {
data.preThumbnail = videoData.snippet.thumbnails.medium.url;
} else if (videoData.snippet.thumbnails.high) {
data.preThumbnail = videoData.snippet.thumbnails.high.url;
} else {
data.preThumbnail = data.thumbnail;
}
return data;
}
This is what videos array contains:
// videoData: https://developers.google.com/youtube/v3/docs/videos#resource
{
_id: videoData.id,
channelId: videoData.snippet.channelId,
title: videoData.snippet.title,
description: videoData.snippet.description,
views: videoData.statistics.viewCount,
duration: moment.duration(videoData.contentDetails.duration).asSeconds(),
tags: videoData.snippet.tags,
thumbnail: null,
preThumbnail: null,
time: videoData.snippet.publishedAt,
};
Expected results:
[...{ [..other keys] duration: "00:05:43", time: "3 days ago" }]
Actual output (nothing is changed, exactly the same array as videos):
[...{ [..other keys] duration: 343, time: 2018-12-26T13:37:32.000Z }]
Why is it not working and how can i fix it?
You can return a new object where you override only those 2 specific keys,
video.map(v => ({
...v,
duration: moment('1900-01-01 00:00:00').seconds(v.duration).format('HH:mm:ss'),
time: moment(v.time).fromNow()
}))
So apparently Model.find() returns mongoose documents instead of javascript objects and i should have used Query.lean().
Video.find({ _id: { $in: id } }).lean().select('-__v').sort({ createdAt: 1 })
.exec()

how to handle expressJs callback and how to update object's property inside a function?

I have two js files. i am able to get data from mongodb by calliing bookDao.getActiveBookByCategoryId().
My Problem
In categoryDao.js file i am trying to update resultJson.book_countinside BookDao.getActiveBookByCategoryId() method. but it is not updating. So may i know how to fix this.
here book_count property in resultJson is still 0.
categoryDao.js
module.exports.getAllActiveCategory = (callback) => {
Category.find({
is_delete : false
}, (error, result) => {
if(error) {
console.log(error);
callback(commonUtil.ERROR);
}
if(result) {
var categoryArray = [];
for(var i=0; i<result.length; i++) {
var categorySingle = result[i];
var resultJson = {
_id : categorySingle._id,
category_name : categorySingle.category_name,
created_on : categorySingle.created_on,
book_count : 0
}
BookDao.getActiveBookByCategoryId(categorySingle._id, (bookResult) => {
if(bookResult) {
if(bookResult.length > 0) {
resultJson.book_count = bookResult.length;
}
}
});
categoryArray.push(resultJson);
}
callback(categoryArray);
}
});
}
bookDao.js
module.exports.getActiveBookByCategoryId = (categoryId, callback) => {
Book.find({
is_delete : false,
category : categoryId
}, (error, result) => {
if(error) {
console.log(error);
callback(commonUtil.ERROR);
}
if(result) {
callback(result);
}
});
}
Try this, In your code categoryArray.push(resultJson); will not wait for BookDao.getActiveBookByCategoryId to finish because of async behavior.
module.exports.getActiveBookByCategoryId = (categoryId) => {
return Book.count({
is_delete: false,
category: categoryId
});
}
module.exports.getAllActiveCategory = async () => {
try {
// Find all category
const result = await Category.find({
is_delete: false
});
// Create array of promise
const promises = result.map(categorySingle => BookDao.getActiveBookByCategoryId(categorySingle._id));
// Get array of Category count
const data = await Promise.all(promises);
// update count in result
return result.map((categorySingle, i) => {
categorySingle.book_count = data[i];
return categorySingle;
});
} catch (error) {
console.log(error);
}
}

Categories