I have a fairly straightforward node script. It pulls in data from an API, loops through it all and compiles 2 reports. One in a JSON format, the other in CSV. It then, sorts the data and finally exports both a JSON file and then 2 CSV files (duplicates).
I have put logging into my script and found that the script fails to advance up to and past the part where I sort the arrays before exporting them into file formats.
What could be causing this?
** It's worth noting that if I remove the sorting and exporting at the bottom for either the JSON or the CSV, the script will run completely. It just wont do both at the same time. But stranger still is that it was working last week in this exact same configuration, but suddenly breaks.
const { xm, xm_env, uncaught_endpoint, prod_xm } = require('./../config');
const util = require('./../node_modules/xmtoolbox/lib/util');
const path = require('path');
const workflow = process.argv.slice(2);
const fs = require('fs')
const pathname = './../../../Logs/';
const filename = path.basename(__filename, '.js');
let fields = [
'sub_name',
'sub_uuid',
'sub_form_name',
'sub_form_uuid',
'workflow_name',
'workflow_uuid',
'owner_targetName',
'owner_uuid',
'owner_firstName',
'owner_lastName',
'recipient_targetName',
'recipient_uuid',
'recipient_firstName',
'recipient_lastName',
];
(async (env, workflow) => {
try {
const report = [];
const json = [];
const subscriptions = await xm.subscriptions.getMany(env);
util.buildLogger(pathname, filename).info('Pulled in all the subscriptions...');
await Promise.all(subscriptions.map(async sub => {
if (sub.criteria) {
let sub_obj = {
sub_name : sub.name,
sub_uuid : sub.id,
sub_form_name : sub.form.name,
sub_form_uuid : sub.form.id,
workflow_name : sub.form.plan.name,
workflow_uuid : sub.form.plan.id,
owner_targetName : sub.owner.targetName,
owner_uuid : sub.owner.id,
owner_firstName : sub.owner.firstName,
owner_lastName : sub.owner.lastName
};
let json_obj = {
recipients : [],
id : sub.id,
name : sub.name,
form : sub.form,
owner : sub.owner,
created : sub.created,
criteria : sub.criteria.data,
description : sub.description,
notificationDelay : sub.notificationDelay
};
report.push(sub_obj);
if (sub.recipients && sub.recipients.count > 1) {
let recipients = await xm.subscriptions.getSubscribers(env, '', sub.id);
await Promise.all(recipients.map(async r => {
json_obj.recipients.push({ targetName: r.targetName, id : r.id })
let recip_obj = {
sub_name : sub.name,
sub_uuid : sub.id,
sub_form_name : sub.form.name,
sub_form_uuid : sub.form.id,
workflow_name : sub.form.plan.name,
workflow_uuid : sub.form.plan.id,
owner_targetName : sub.owner.targetName,
owner_uuid : sub.owner.id,
owner_firstName : sub.owner.firstName,
owner_lastName : sub.owner.lastName,
recipient_targetName : r.targetName,
recipient_firstName : r.firstName,
recipient_lastName : r.lastName,
recipient_uuid : r.id
};
}
json.push(json_obj);
}
}));
util.buildLogger(pathname, filename).info('Looped through all subscriptions and built report arrays...');
// -------------- LOGGING NEVER MAKES IT PAST THIS PART ---------------------- //
let sorted_report = await util.awaitSort(report, ['form_name', 'sub_name']);
util.buildLogger(pathname, filename).info('Built Sorted Report...');
let sorted_json = await util.awaitSort(json, ['name']);
util.buildLogger(pathname, filename).info('Built sorted_json Report...');
fs.writeFileSync('./../../../Feeds/Subs_JSON.json', JSON.stringify(sorted_json))
util.buildCSV(fields, sorted_report, 'Subscriptions');
util.buildCSV(fields, sorted_report, `${util.today()}_Subscriptions`, true);
util.buildLogger(pathname, filename).info('All files written...');
} catch (e) {
util.uncaught_exception_webhook(xm_env, path.basename(__filename), workflow, uncaught_endpoint, e);
}
})(prod_xm, workflow);
Await Sort Function from UTIL file
async function awaitSort(data, keys) {
let sorted_data = data.sort((a, b) => {
keys.map(i => {
if (a[i].toLowerCase() < b[i].toLowerCase()) {
return -1
}
if (a[i].toLowerCase > b[i].toLowerCase()) {
return 1;
}
return 0;
});
});
return sorted_data;
}
Related
I am trying to create a script that pulls from the coin market cap API and displays the current price. The script is working fine on the back end when I assign the variable a value. However, when I try to run the function on sheets the returned value is null.
function marketview(ticker) {
var url = "https://pro-api.coinmarketcap.com/v1/cryptocurrency/quotes/latest?CMC_PRO_API_KEY=XXX&symbol=" + ticker;
var data = UrlFetchApp.fetch(url);
const jsondata = JSON.parse(data);
Logger.log(jsondata.data[ticker].quote['USD'].price)
}
My execution logs show that the scripts are running, but when when I use the function and try and quote ETH for example, the script is running for BTC.
When I do this on the backend and assign ETH the script works fine and returns the right quote. Any ideas on what I'm missing?
I did the same with coingecko API and add an issue having all my requests being rejected with quota exceeded error.
I understood that Google sheets servers IPs address were already spamming coingecko server. (I was obviously not the only one to try this).
This is why I used an external service like apify.com to pull the data and re-expose data over their API.
This is my AppScripts coingecko.gs:
/**
* get latest coingecko market prices dataset
*/
async function GET_COINGECKO_PRICES(key, actor) {
const coinGeckoUrl = `https://api.apify.com/v2/acts/${actor}/runs/last/dataset/items?token=${key}&status=SUCCEEDED`
return ImportJSON(coinGeckoUrl);
}
You need ImportJSON function, available here: https://github.com/bradjasper/ImportJSON/blob/master/ImportJSON.gs
Then in a cell I write: =GET_COINGECKO_PRICES(APIFY_API_KEY,APIFY_COINGECKO_MARKET_PRICES), you will have to create two field named APIFY_API_KEY and APIFY_COINGECKO_MARKET_PRICES in order for this to work.
Then register on apify.com, then you'll have to create an actor by forking apify-webscraper actor.
I set the StartURLs with https://api.coingecko.com/api/v3/coins/list, this will give me the total number of existing crypto (approx 11000 as of today), and number of page so I can run the request concurrently (rate limit is 10 concurrent requests on coingecko), then I just replace /list with /market and set the proper limit to get all the pages I need.
I use the following for the tasks page function:
async function pageFunction(context) {
let marketPrices = [];
const ENABLE_CONCURRENCY_BATCH = true;
const PRICE_CHANGE_PERCENTAGE = ['1h', '24h', '7d'];
const MAX_PAGE_TO_SCRAP = 10;
const MAX_PER_PAGE = 250;
const MAX_CONCURRENCY_BATCH_LIMIT = 10;
await context.WaitFor(5000);
const cryptoList = readJson();
const totalPage = Math.ceil(cryptoList.length / MAX_PER_PAGE);
context.log.info(`[Coingecko total cryptos count: ${cryptoList.length} (${totalPage} pages)]`)
function readJson() {
try {
const preEl = document.querySelector('body > pre');
return JSON.parse(preEl.innerText);
} catch (error) {
throw Error(`Failed to read JSON: ${error.message}`)
}
}
async function loadPage($page) {
try {
const params = {
vs_currency: 'usd',
page: $page,
per_page: MAX_PER_PAGE,
price_change_percentage: PRICE_CHANGE_PERCENTAGE.join(','),
sparkline: true,
}
let pageUrl = `${context.request.url.replace(/\/list$/, '/markets')}?`;
pageUrl += [
`vs_currency=${params.vs_currency}`,
`page=${params.page}`,
`per_page=${params.per_page}`,
`price_change_percentage=${params.price_change_percentage}`,
].join('&');
context.log.info(`GET page ${params.page} URL: ${pageUrl}`);
const page = await fetch(pageUrl).then((response) => response.json());
context.log.info(`Done GET page ${params.page} size ${page.length}`);
marketPrices = [...marketPrices, ...page];
return page
} catch (error) {
throw Error(`Fail to load page ${$page}: ${error.message}`)
}
}
try {
if (ENABLE_CONCURRENCY_BATCH) {
const fetchers = Array.from({ length: totalPage }).map((_, i) => {
const pageIndex = i + 1;
if (pageIndex > MAX_PAGE_TO_SCRAP) {
return null;
}
return () => loadPage(pageIndex);
}).filter(Boolean);
while (fetchers.length) {
await Promise.all(
fetchers.splice(0, MAX_CONCURRENCY_BATCH_LIMIT).map((f) => f())
);
}
} else {
let pageIndex = 1
let page = await loadPage(pageIndex)
while (page.length !== 0 && page <= MAX_PAGE_TO_SCRAP) {
pageIndex += 1
page = await loadPage(pageIndex)
}
}
} catch (error) {
context.log.info(`Fetchers failed: ${error.message}`);
}
context.log.info(`End: Updated ${marketPrices.length} prices for ${cryptoList.length} cryptos`);
const data = marketPrices.sort((a, b) => a.id.toLowerCase() > b.id.toLowerCase() ? 1 : -1);
context.log.info(JSON.stringify(data.find((item) => item.id.toLowerCase() === 'bitcoin')));
function sanitizer(item) {
item.symbol = item.symbol.toUpperCase()
return item;
}
return data.map(sanitizer)
}
I presume you are hiting the same issue I had with coinmarketcap, and that you could do the same with it.
You're not return ing anything to the sheet, but just logging it. Return it:
return jsondata.data[ticker].quote['USD'].price
Behavior I'm getting from the code shown in the attached print screen: my FOR loop executes and then the two 'LET' (i.e. let path_get... and let get = https.get...).
Problem: when the 2nd LET is executed, the code goes back to the FOR, without ever executing the function storeCarData.
My intention: every time that the second LET executes (GET call), I want storeCarData to to be executed taking as inputs the result of this GET. This is probably an async/await question but I need your help in pointing out where to insert the asyncs/awaits if possible. I need to execute ALL of the functions in sequence (i.e one executes only after the previous one has returned).
Could you please help me here? In summary: when I execute the GET I want storeCarData to run, using the GET outputs as inputs for the function.
Thank you in advance.
Diego
PS: full code below
const https = require('https');
const url = require('url');
exports.handler = (req, res) => {
var page_number = (typeof(req.body.state) == 'undefined' || typeof(req.body.state.page_number) == 'undefined') ? 1 : req.body.state.page_number+1;
var kbb_vehicles = [];
var price_type = [2,3];
var price_description = ['C2C','C2B'];
// PRICE TYPES
// "ID": 1, "Typical Dealer Asking Price"
// "ID": 2, Private Party Value (for owners)"
// "ID": 3, "Trade-In Value"
// "ID": 4, "Private Party Value (for shoppers)"
var epoch = new Date().getTime();
console.log(`Date time is ${epoch}`);
var path_get_all_vehicles = `/Gps/GetAllVehicle?securityToken=${req.body.secrets.securityToken}&language=pt-BR&withCount=true&pageNumber=${page_number}&records=100`;
// GET ALL CARS
let get = https.get(
{
hostname: 'api.kbb.com.br',
path: path_get_all_vehicles
},
(getRes) => {
var reply = "";
getRes.on("data", (chunk) => {
(reply += chunk);
}
);
getRes.on("end", () => {
var obj = "";
var obj = JSON.parse(reply);
gotCars(obj.Response.VehicleList);
}
);
}
);
function gotCars(carsJustObtained) {
// PASS ALL CARS
for (var car of carsJustObtained) {
// NOW FOR A GIVEN CAR...
for (var i=0; i<=1; i++){
// ...GET PRICES TYPES 2 AND 3
let path_get_all_prices = `/Gps/GetAllVehiclePrices?vehicleID=${car.ID}&securityToken=${req.body.secrets.securityToken}&vehiclePriceTypeID=${price_type[i]}`;
// console.log(`path_get_all_prices is ${path_get_all_prices}`);
// console.log(`i is ${i}`);
// console.log(`price_type[i] is ${price_type[i]}`);
let get = https.get(
{
hostname: 'api.kbb.com.br',
path: path_get_all_prices
},
(getRes) => {
var reply = "";
getRes.on("data", (chunk) => {
(reply += chunk);
}
);
getRes.on("end", () => {
var obj = "";
var obj = JSON.parse(reply);
// WRITE PRICES INTO OBJECT >> APPEND RESULTS
var grades_array = obj.Response;
storeCarData(car, grades_array,i);
});
}
);
// price_description[0] is equal to C2C
// price_description[1] is equal to C2B
// grades_array[0] corresponds to VehicleGrade=2, thus "Fair"
// grades_array[1] corresponds to VehicleGrade=3, thus "Good"
// grades_array[2] corresponds to VehicleGrade=2, thus "Excellent"
function storeCarData(car, grades_array,i){
var carData = {
timestamp: epoch,
ID : car.ID,
BrandID : car.BrandID,
ModelID : car.ModelID,
VersionID : car.VersionID,
BrandName : car.BrandName,
ModelName : car.ModelName,
VersionName : car.VersionName,
Year : car.Year,
MSRP : car.MSRP,
ISV : car.ISV,
TransportCost : car.TransportCost,
AdminCost : car.AdminCost,
RoadTax : car.RoadTax,
StartYearImported : car.StartYearImported,
EndYearImported : car.EndYearImported,
BodyTypeID : car.BodyTypeID,
CC : car.CC,
HP : car.HP,
KW : car.KW,
Torque : car.Torque,
NrCilinders : car.NrCilinders,
TransmissionTypeID : car.TransmissionTypeID,
Gears : car.Gears,
NrDoors : car.NrDoors,
NrSeats : car.NrSeats,
FuelTypeID : car.FuelTypeID,
EngineTypeID : car.EngineTypeID,
ExhaustTypeID : car.ExhaustTypeID,
DistanceAxis : car.DistanceAxis,
Volume : car.Volume,
DriveTrainID : car.DriveTrainID,
Weight : car.Weight,
WeightCarriage : car.WeightCarriage,
VehicleTypeID : car.VehicleTypeID,
VehicleCilindersID : car.VehicleCilindersID,
FirstMediaURL : car.FirstMediaURL,
CombinedConsumption : car.CombinedConsumption,
UrbanConsumption : car.UrbanConsumption,
ExtraUrbanConsumption : car.ExtraUrbanConsumption,
MaximumSpeed : car.MaximumSpeed,
FuelTankRangeKM : car.FuelTankRangeKM,
FuelTankCapacity : car.FuelTankCapacity,
Acceleration0to100 : car.Acceleration0to100,
EmissionsCO2 : car.EmissionsCO2,
FirstMktCategoryID : car.FirstMktCategoryID,
PriceType : car.PriceType,
Grade : car.Grade,
TAMileage : car.TAMileage,
TAAge : car.TAAge,
AVMileage : car.AVMileage,
AuctionPrice : car.AuctionPrice,
AskingPrice : car.AskingPrice,
TradeInPrice : car.TradeInPrice,
PrivatePartyPrice : car.PrivatePartyPrice,
C2CPrice : car.C2CPrice,
VersionCatalogID : car.VersionCatalogID,
ExternalReference : car.ExternalReference,
VehicleSeriesName : car.VehicleSeriesName,
VehicleVersionNameLong : car.VehicleVersionNameLong,
FirstMediaCompleteURLLarge : car.FirstMediaCompleteURLLarge,
FirstMediaCompleteURLMedium : car.FirstMediaCompleteURLMedium,
FirstMediaCompleteURLSmall : car.FirstMediaCompleteURLSmall,
BedLength : car.BedLength,
CabType : car.CabType,
}
storePrices(grades_array,carData.timestamp, carData.ID, price_description[i]);
};
function storePrices(grades_array, timestamp, carID, price_description){
var prices = {
timestamp:timestamp,
carID:carID,
[`PriceFPP_FairCondition_${price_description}`]: grades_array[0].VehiclePrices.FPP,
[`PriceFPP_GoodCondition_${price_description}`]: grades_array[1].VehiclePrices.FPP,
[`PriceFPP_ExcellentCondition_${price_description}`]: grades_array[2].VehiclePrices.FPP,
[`PriceLow_FairCondition_${price_description}`]: grades_array[0].VehiclePrices.PriceLow,
[`PriceLow_GoodCondition_${price_description}`]: grades_array[1].VehiclePrices.PriceLow,
[`PriceLow_ExcellentCondition_${price_description}`]: grades_array[2].VehiclePrices.PriceLow,
[`PriceHigh_FairCondition_${price_description}`]: grades_array[0].VehiclePrices.PriceHigh,
[`PriceHigh_GoodCondition_${price_description}`]: grades_array[1].VehiclePrices.PriceHigh,
[`PriceHigh_ExcellentCondition_${price_description}`]: grades_array[2].VehiclePrices.PriceHigh,
[`EquipmentAdjustedPrice_FairCondition_${price_description}`]: grades_array[0].VehiclePrices.EquipmentAdjustedPrice,
[`EquipmentAdjustedPrice_GoodCondition_${price_description}`]: grades_array[1].VehiclePrices.EquipmentAdjustedPrice,
[`EquipmentAdjustedPrice_ExcellentCondition_${price_description}`]: grades_array[2].VehiclePrices.EquipmentAdjustedPrice,
[`BaseDiscountedPrice_FairCondition_${price_description}`]: grades_array[0].VehiclePrices.BaseDiscountedPrice,
[`BaseDiscountedPrice_GoodCondition_${price_description}`]: grades_array[1].VehiclePrices.BaseDiscountedPrice,
[`BaseDiscountedPrice_ExcellentCondition_${price_description}`]: grades_array[2].VehiclePrices.BaseDiscountedPrice
}
};
};
};
};
}
Almost any time that you see a call to some .on(...), that code isn’t going to run right away, but at some future time.
To make this async, you can wrap the whole thing in a Promise() constructor, and then call resolve() when the thing that you were waiting for has actually happened.
The minimal change to your code for that would be:
let get = new Promise(resolve => {
https.get(
{
hostname: "api.kbb.com.br",
path: path_get_all_vehicles,
},
(getRes) => {
var reply = "";
getRes.on("data", (chunk) => {
reply += chunk;
});
getRes.on("end", () => {
var obj = "";
var obj = JSON.parse(reply);
gotCars(obj.Response.VehicleList);
resolve();
});
}
);
});
Now if you console.log() the value of get, you’ll see that it’s a Promise object. The for loop will still keep running, but now that you have a promise, you can await get on the next line. To be able to use that syntax though, you’ll have to make the containing function async, that is, change (req, res) => … to async (req, res) => ….
Some additional things to worry about later, once you get the basics working:
What if the http request fails? Then you’ll want to throw an exception or something. Look up Promise.reject() for that case.
Now that the request handler is async, express’s default error-handling stuff won’t work so well. This package might help though I personally haven’t used it.
Those are other issues for other times, but hopefully this helps you get unstuck.
I am unable to pass any object or arrays to IPCRenderer.
I am getting error when passing an object or array through ipcs, I have even tried to send by converting to string using JSON.stringify but it converts it into empty object string.
I have tried passing a fileList, an array of object & even an object nothing passes. only string or handwritten objects are working.
I've read that it uses Structured Clone Algorithm and fileList & Array is allowed by this algorithm
ERROR:
electron/js2c/renderer_init.js:74 Uncaught Error: An object could not be cloned.
at EventEmitter.i.send.i.send (electron/js2c/renderer_init.js:74)
at HTMLButtonElement.compressNow (ImageHandling.js:190)
I have tried many possible solutions but nothing worked
code:
const compressNow = () => {
ipcRenderer.send("image:compress", filess). ///This is the error.
// filess is a variable containing an array of selected files from an HTML input.
}
Now i have tried to send filess as JSON.stringify, i tried to send it as an object but nothing works unless i manually write a dummy object or string.
Here's My Github Repo for this project
Files With ErrorJ:-
ImageHandling.js
const fs = window.require('fs');
const {ipcRenderer} = require("electron")
const SELECT = (target) => document.querySelector(`${target}`)
var filess = []
const imgUploadInput = SELECT("#imgUploadInput")
const warning = SELECT("#warning")
const setImgBase64 = (imgEl, file) => {
const ReadAbleFile = fs.readFileSync(file.path).toString('base64')
let src = "data:image/png;base64," + ReadAbleFile
imgEl.setAttribute("src", src)
// El.src=src
// console.log(`FIXED IMAGE # ${imgEl} `,ReadAbleFile)
}
const renderImages = () => {
const files = filess && Array.from(filess)
const defaultImg = SELECT("#defaultImg")
const addImgBtn = SELECT("#addImgBtn")
imgUploadInput.disabled = true;
let numOfFiles = files.length
if (numOfFiles < 1) {
SELECT("#compressContainer").style.visibility = "hidden"
} else {
SELECT("#compressContainer").style.visibility = "visible"
}
if (numOfFiles > 49) {
warning.innerHTML = `<b style="font-weight:bold; color:red;">WARNING:</b><br/>
<span style="padding:10px;text-align:left">
Your processor/computer may not be able to process ${numOfFiles} Images at once, We recommend selecting less than 50 Images at once for better performance.
</span>
`;
}
addImgBtn.innerHTML = `LOADING.....`
if (defaultImg && numOfFiles > 0)
defaultImg.remove();
setTimeout(() => {
if (files && numOfFiles > 0) {
let displayImages = SELECT("#displayImages")
displayImages.innerHTML = ""
files ?. forEach((file, i) => {
let divEl = document.createElement("div")
let imgEl = document.createElement("img")
imgEl.src = file.path
imgEl.id = `PNG_${i}_${
btoa(file.name)
}`
divEl.className = "displayedImg"
imgEl.setAttribute("onclick", `document.getElementById('ImageView').src=this.src`)
const a = document.createElement("a")
a.appendChild(imgEl)
a.setAttribute("href", `#ViewImage`)
a.className = "perfundo__link"
divEl.appendChild(a)
divEl.className = "displayedImg perfundo"
displayImages.appendChild(divEl)
if (i == files.length - 1) {
warning.innerHTML = "";
updateNumOfImages();
}
imgEl.onerror = () => setImgBase64(imgEl, file) // converting to base64 only on error, this make performance better and help us avoid freezes. (before this i was converting all images to base64 wither errored or not that was making computer freez)
})
addImgBtn.innerHTML = "+ Add MORE"
imgUploadInput.disabled = false
findDuplicate()
}
}, 0);
}
const hasDuplicate=()=>{
let FileNames = [... filess.map(f => f.name)]
let duplicateFiles = filess.filter((file, i) => FileNames.indexOf(file.name) !== i)
return {FileNames,duplicateFiles,FilesLength:duplicateFiles.length}
}
const findDuplicate = (forceAlert = false) => {
if (filess && filess.length) {
let {FileNames} = hasDuplicate()
let {duplicateFiles} = hasDuplicate()
if (duplicateFiles.length) { // alert(``)
let countFiles = duplicateFiles.length
let fileStr = countFiles > 1 ? "files" : "file"
console.log("result from removeDup=> ", filess, " \n dupfilename=> ", FileNames, " \n dupfiles=> ", duplicateFiles)
let shouldNotAsk = localStorage.getItem("NeverAsk")
let msg = `You've selected ${
countFiles > 1 ? countFiles : "a"
} duplicate ${fileStr}`
let duplInner = `<span style='color:red'>
<b>WARNING</b>
<p style="margin:0px;line-height:1"> ${msg} . <button onClick="findDuplicate(true)" type="button" class="btn btn-danger btn-rounded btn-sm">REMOVE DUPLICATE</button></p>
</span>`
if (! shouldNotAsk || forceAlert) {
swal("DUPLICATE FILES DETECTED", `${msg} , Would you like to un-select duplicate ${fileStr} having same name?`, {
icon: 'warning',
dangerMode: true,
buttons: {
cancel: true,
...forceAlert ? {} : {
never: "Never Ask"
},
confirm: "Yes !"
}
}).then((Yes) => {
if (Yes == "never") {
localStorage.setItem("NeverAsk", true)
warning.innerHTML=duplInner
} else if (Yes) {
removeDuplicates()
}
})
} else {
warning.innerHTML=duplInner
}
}
}
}
const removeDuplicates = (showAlert=true) => {
let {FileNames} = hasDuplicate()
let {duplicateFiles} = hasDuplicate()
let duplicateFileNames = duplicateFiles.map(f => f.name)
let uniqueFiles = filess.filter((file) => ! duplicateFileNames.includes(file.name))
filess = [
... uniqueFiles,
... duplicateFiles
]
console.log("result from removeDup=> ", filess, " \n filename=> ", FileNames, " \n dupfiles=> ", duplicateFiles, "\n unique fil=> ", uniqueFiles)
renderImages()
if(showAlert){
swal("DONE", "Removed Duplicate Files ", {icon: 'success'}).then(() =>{
renderImages()
setTimeout(() => {
let hasDuplicateFiles = hasDuplicate().FilesLength
if(hasDuplicate){//Re-check if any duplicate files left after the current removal process.
removeDuplicates(false) //Re-run the function to remove remaining. false will make sure that this alert does not show and the loop does not continue.
}
renderImages()
}, 10);
})
}
}
const updateNumOfImages = () => {
warning.innerHTML = `
<span style="text-align:left; color:green">
Selected ${
filess.length
} Image(s)
</span>
`;
}
const compressNow = () => {
ipcRenderer.send("image:compress", filess)
// alert("WOW")
}
CompressBtn.addEventListener("click", compressNow)
imgUploadInput.addEventListener("change", (e) => {
let SelectedFiles = e.target.files
if (SelectedFiles && SelectedFiles.length) {
filess = [
... filess,
... SelectedFiles
]
renderImages()
}
})
// SELECT("#imgUploadInput").addEventListener("drop",(e)=>console.log("DROP=> ",e))
UPDATE:-
I REPLACED THIS:
const compressNow = () => {
ipcRenderer.send("image:compress",filess)
}
INTO THIS:-
const compressNow = () => {
filess.forEach(file => {
ipcRenderer.send("image:compress",file.path )
});
}
Now here i am sending the files one by one via forEach, actually its sending string "file path" so thats how its working i am still confused why do i have to do this? why can't i send whole fileList i assume that this loop method is a bad practice because it will consume more CPU its one additional loop however it won't be necessary if i am able to send the whole array.
See Behavior Changed: Sending non-JS objects over IPC now throws an exception. DOM objects etc. are not serializable. Electron 9.0 (and newer) throws "object could not be cloned" error when unserializable objects are sent.
In your code, File and FileList are DOM objects.
If you want to avoid using forEach, try this code:
const compressNow = () => {
const paths = filess.map(f => f.path);
ipcRenderer.send("image:compress", paths);
}
Can refer to electron github issue tracker for this issue (already closed)
Error: An object could not be cloned #26338
Docs for ipcRenderer.send(channel, ...args)
This issue mainly comes when we have non-cloneable values like function within an object in data we are sending via IPC, to avoid that we can use JSON.stringify() before sending and JSON.parse() later on receiving end, but doing so will cause to lose some of the values eg:
const obj = {
x :10,
foo : ()=>{
console.log('This is non-cloneable value')
}
}
console.log(JSON.stringify(obj))
output:{"x":10}
Instead of sending the images save them in fs and send the path
The simplest thing that could possibly work is to use lodash cloneDeep()
ipcMain.handle('stuffgetList', async () => {
return _.cloneDeep(await stuffStore.getList())
})
in the windows JSON.stringify()
in the main.js JSON.parse()
Remove :compress from. .send method and try
I have a cloud function that is triggered when a sale/purchase is committed into firestore. This function's purpose is to update the inventory level centrally.
The function works just fine if I'm updating an item's inventory at only 1 warehouse, but doing so for multiple warehouses has unexpected behavior. I'm looping through all the warehouses that are affected to calculate the total inventory level changes, and every iteration kicks-off a javascript promise.
The problem seems to occur with the way the promises are invoked. E.g: if I want to update 3 warehouses and loop 3 times, somehow 5 promises are being kicked-off. This is visible through the logs. I've researched similar questions here, but the solutions were suggested while firestore was still in beta and might not be the right way forward. (Firestore transactions getting triggered multiple times resulting in wrong data)
Here is the code
export const onTransactionCreate = functions.firestore
.document('/companies/{companyId}/sub_transactions/{transId}')
.onCreate(async (snapshot, context) => {
const transId = context.params.transId
const stock_transaction: IStockTransaction = <IStockTransaction>snapshot.data()
const trans_type: TRANS_TYPE = stock_transaction.trans_type
const promises: any[] = []
stock_transaction.lineItems.forEach((element, index) => {
const ITEM_GUID = element.item_guid
const is_increasing = isIncreasingTransaction(element.line_trans_type)
const delta_stock = element.qty_transaction * (is_increasing ? 1 : -1)
const TARGET_BRANCH_ID = element.target_branch_guid
const itemRef = db.collection(FIRESTORE_PATHS.COL_COMPANIES).doc(companyId).
collection(FIRESTORE_PATHS.SUB_COMPANIES_ITEMS).
doc("" + ITEM_GUID)
const item_promise = db.runTransaction(async t => {
try {
const item_doc = await t.get(itemRef)
const item_branch_quantities: IBranchQuantity[] = (item_doc.data()!.branch_quantities || new Array())
const item_branch_ids: string[] = (item_doc.data()!.available_branch_ids || new Array())
const branch_index = item_branch_ids.indexOf(TARGET_BRANCH_ID)
console.log(`${transId} Line Item ${index}, after document.get(), search branch index: ${branch_index}`)
if (branch_index !== -1) {
const prev_qty = item_branch_quantities[branch_index]
const updated_qty = prev_qty.quantity + delta_stock
item_branch_quantities[branch_index] = {
item_guid: prev_qty.item_guid,
branch_guid: prev_qty.branch_guid,
quantity: updated_qty
}
console.log(`${transId} Line Item ${index} Updating qty # item ${delta_stock}, prev qty ${prev_qty.quantity}`)
} else {
item_branch_ids.push(TARGET_BRANCH_ID)
item_branch_quantities.push({
item_guid: element.item_guid,
branch_guid: TARGET_BRANCH_ID,
quantity: delta_stock
})
console.log(`${transId} Line Item ${index} Adding qty # item ${delta_stock}`)
}
t.update(itemRef, {
branch_quantities: item_branch_quantities,
available_branch_ids: item_branch_ids
})
} catch (err) {
throw new Error(err)
}
})
promises.push(item_promise)
});
return Promise.all(promises)
})
we have found the solution by reading this article.
A transaction consists of any number of get() operations followed by any number of write operations such as set(), update(), or delete(). In the case of a concurrent edit, Cloud Firestore runs the entire transaction again. For example, if a transaction reads documents and another client modifies any of those documents, Cloud Firestore retries the transaction. This feature ensures that the transaction runs on up-to-date and consistent data.
lineItems.forEach(element => {
const delta_transaction = element.qty * (isLineTransIncrease(element.line_trans_type) ? 1 : -1)
const itemRef = db.collection('companies').doc(companyId).collection('sub_items').doc("" + element.item_guid)
const p = db.runTransaction(t => {
return t.get(itemRef)
.then(doc => {
let item_branch_quantities: IBranchQuantity[] = doc.data()!.branch_quantities
let item_branch_ids: string[] = doc.data()!.available_branch_ids
if (!item_branch_quantities)
item_branch_quantities = new Array()
if (!item_branch_ids)
item_branch_ids = new Array()
const branch_index = item_branch_ids.indexOf(current_branch_id)
if (branch_index !== -1) {
const prev_qty = item_branch_quantities[branch_index]
const updated_qty: number = prev_qty.quantity + delta_transaction
item_branch_quantities[branch_index] = {
item_guid: prev_qty.item_guid,
branch_guid: prev_qty.branch_guid,
quantity: updated_qty
}
} else {
item_branch_ids.push(current_branch_id)
item_branch_quantities.push({
item_guid: element.item_guid,
branch_guid: current_branch_id,
quantity: delta_transaction
})
}
t.update(itemRef, {
branch_quantities: item_branch_quantities,
branch_ids: item_branch_ids
})
})
})
item_update_transactions.push(p)
});
return Promise.all(item_update_transactions)
})
function isLineTransIncrease(line_trans: number): boolean {
return (line_trans === 1) || (line_trans === 2)
}
I have a pretty simple firebase function :
exports.sendFollowNotification = functions.database.ref('PendingRequest/{receiver_id}/{sender_id}').onWrite(requestEvent => {
const requestSnapShot = requestEvent.data;
const senderId = requestEvent.params.sender_id;
const targetId = requestEvent.params.receiver_id;
const target_token = requestSnapShot.child('sender').val();
const sender_token = requestSnapShot.child('receiver').val();
console.log('sender_id :'+senderId);
console.log('target_id :'+targetId);
console.log('target_token: '+ target_token);
console.log('sender_token: '+sender_token);
const pendingRequestPayload = {
data: {
token_sender : sender_token,
token_target : target_token,
request_sender : senderId,
request_receiver : targetId,
my_message_id: '0'
}
};
if(target_token != null){
// Send a message to devices subscribed to the provided topic.
return admin.messaging().sendToDevice(target_token, pendingRequestPayload)
.then(function (response) {
// See the MessagingTopicResponse reference documentation for the
// contents of response.
console.log("Successfully sent message:", response);
})
.catch(function (error) {
console.log("Error sending message:", error);
});
}
Whenever this function fires there are two values that gets swapped : senderId gets targetId value and vice versa. Both values are retrieved with the params property while nothing strange happens to the values i'm getting from requestSnapShot.child('value_name').val();
The dumb solution is just to swap the two values whenever i need them but well, that's a really dumb solution. What am I missing here ?
If "target" is "receiver", these are swapped:
const target_token = requestSnapShot.child('sender').val();
const sender_token = requestSnapShot.child('receiver').val();
Are you doing that intentionally to work around the problem?
Update:
It's hard to guess why this isn't working for you. I copied your code, eliminated your work-around, and shortened it for testing:
exports.sendFollowNotification = functions.database.ref('PendingRequest/{receiver_id}/{sender_id}')
.onWrite(requestEvent => {
const requestSnapShot = requestEvent.data;
const senderId = requestEvent.params.sender_id;
const targetId = requestEvent.params.receiver_id;
const target_token = requestSnapShot.child('receiver').val();
const sender_token = requestSnapShot.child('sender').val();
console.log('sender_id :'+senderId);
console.log('target_id :'+targetId);
console.log('target_token: '+ target_token);
console.log('sender_token: '+sender_token);
});
Ran with this data:
{
"PendingRequest" : {
"R1" : {
"S1" : {
"receiver" : "R-token",
"sender" : "S-token"
}
}
}
}
And got this log output:
sender_token: S-token
target_token: R-token
target_id :R1
sender_id :S1