I want to monitor a product on my own site for changes. I have stored all available sizes of the product in an array and I want to console.log once a new size gets added to the array.
I'm doing that just for practice hence why I don't want to monitor data source.
for (let product of products) {
main(product)
};
function main(product) {
request({
url: product.url
}, (err, res, body) => {
if (res.statusCode == 200) {
const $ = cheerio.load(body);
const availableSizes = []; //this stores all available sizes
$('.size_box').each((i, el) => {
let size = $(el).text()
availableSizes.push(size) //adds each size to availableSizes
})
} else {
console.log('Error ' + err)
console.log('Retrying in ' + (config.delay/1000) + ' second/s')
setTimeout(() => {
main(product)
}, config.delay)
};
});
};
Basically what I want to achieve/add to the code would be this:
if (<no sizes added>) {
setTimeout(() => {
main(product)
}, config.delay)
} else {
console.log('New size added ' + size)
}
But like I said, I'm not sure how to check for specific changes to the array without just spamming all available sizes all the time.
If there's a better way of monitoring sizes for changes, feel free to suggest it, just storing all sizes in an array was the first thing that came into my mind.
His is a bit of a hack, but unfortunately I do not know of a better way to do this. You can amend to the push function being called by over riding the push function on definition. See below:
var myArr = [];
myArr.push = (...args)=>{
logOnPush(args);
Array.prototype.push.apply(myArr, args);
}
function logOnPush(res){
var container = document.getElementById("displayContainer");
container.innerText = container.innerText+'\n'+'Items added: ' + res;
console.log('Items added: ', res);
}
function addToArray(){
myArr.push(Math.round(Math.random() * 10));
var container = document.getElementById("displayContainer");
container.innerText = myArr.join(', ');
}
document.getElementById("addBtn").addEventListener('click', () => {
addToArray();
});
<button id="addBtn">Add to array</button>
<p id="displayContainer"></p>
<p id="consoleContainer"></p>
You could create a custom class, with its data being updated with a custom push method. This class would act exactly as an Array object (you could even extend the Array class), but you can then add your custom behavior
Take a look at this page for an example: https://github.com/wesbos/es6-articles/blob/master/54%20-%20Extending%20Arrays%20with%20Classes%20for%20Custom%20Collections.md
Related
I'm working a use case where a dynamoDB update should:
Dynamically upsert (update if present, insert if not present) an item, without hardcoding the item's components.
Use the DynamoDB Document Client for simplicity
And in the same atomic operation, update a simple counter
I started with an excellent utility method by Daniel Barrel at https://stackoverflow.com/a/63511693/15369972 that provides a general utility method for the update with dynamic values, but without the atomic counter.
I've attempted to add the atomic counter capability by adding the counter and its incrementor into the parameter objects after the dynamic values are loaded, but am getting a static value in the counter on update instead of a value that increments by one with each call.
Where is this going wrong? I call the modified update function with a table name, a dynamic javascript object, and an array containing the hash and sort key:
await update(tableName, jsonObjectToStore, ['myHashKey', 'mySortKey'])
And the modified update method that's not incrementing as I'd like, is:
async function update (tableName, item, idAttributeNames) {
var params = {
TableName: tableName,
Key: {},
ExpressionAttributeValues: {},
ExpressionAttributeNames: {},
UpdateExpression: "",
ReturnValues: "UPDATED_NEW"
};
for (const attname of idAttributeNames) {
params["Key"][attname] = item[attname];
}
let prefix = "set ";
let attributes = Object.keys(item);
for (let i=0; i<attributes.length; i++) {
let attribute = attributes[i];
if (!idAttributeNames.includes(attribute)) {
params["UpdateExpression"] += prefix + "#" + attribute + " = :" + attribute;
params["ExpressionAttributeValues"][":" + attribute] = item[attribute];
params["ExpressionAttributeNames"]["#" + attribute] = attribute;
prefix = ", ";
}
}
// Add the counter
params["UpdateExpression"] += ", #nImports = :nImports + :incr";
console.log(params["UpdateExpression"])
console.log(params["ExpressionAttributeValues"])
params["ExpressionAttributeValues"][":incr"] = 1;
params["ExpressionAttributeValues"][":nImports"] = 0;
console.log(params["ExpressionAttributeValues"])
console.log(params["ExpressionAttributeNames"])
params["ExpressionAttributeNames"]["#nImports"] = 'nImports'
console.log(params["ExpressionAttributeNames"])
await docClient.update
return await docClient.update(params).promise();
}
Worked with AWS Support to find a reasonable solution. They also were not sure how to do an atomic counter using the ddb document client (as opposed to the low level client which has many documented examples) but suggested the ADD command, which has the side effect of an atomic update on a numeric field.
So, with the example below, we construct our dynamic update from the object to be stored, then append the ADD statement in the update expression (without a comma!), and add what is in effect a numeric incrementor to the ExpressionAttributeValues for nImports. Like this, which should be a complete working lambda example. There's a few console.log statements to show what's happening:
const AWS = require('aws-sdk');
const docClient = new AWS.DynamoDB.DocumentClient();
async function update (tableName, item, idAttributeNames) {
var params = {
TableName: tableName,
Key: {},
ExpressionAttributeValues: {},
ExpressionAttributeNames: {},
UpdateExpression: "",
ReturnValues: "UPDATED_NEW"
};
for (const attname of idAttributeNames) {
params["Key"][attname] = item[attname];
}
let prefix = "set ";
let attributes = Object.keys(item);
for (let i=0; i<attributes.length; i++) {
let attribute = attributes[i];
if (!idAttributeNames.includes(attribute)) {
params["UpdateExpression"] += prefix + "#" + attribute + " = :" + attribute;
params["ExpressionAttributeValues"][":" + attribute] = item[attribute];
params["ExpressionAttributeNames"]["#" + attribute] = attribute;
prefix = ", ";
}
}
console.log('params before adding atomic counter is:', params)
// Add the counter using the ADD syntax
params["UpdateExpression"] += " ADD #nImports :nImports"
params["ExpressionAttributeValues"][":nImports"] = 1;
params["ExpressionAttributeNames"]["#nImports"] = 'nImports'
console.log('params after adding atomic counter is:', params)
try {
const result = await docClient.update(params).promise();
console.log('after await, result is ', result);
return result;
} catch (err) {
console.log('err is ', err)
}
};
exports.handler = async (event) => {
const item = {title: 'sometitle', site_url: "www.amazon.com", key: "G"};
const body = await update('test_table', item, ['title', 'site_url']);
const response = {
statusCode: 200,
body: JSON.stringify(body),
};
return response;
}
The kind folks at AWS did a little more digging, and also pointed out an error in the initial code that, when corrected, should increment as desired using the SET operator.
Basically the original code didn't properly target the variable for increment. So a corrected version where we add the incremented variable should be:
console.log('params before adding atomic counter is:', params)
// Add the counter
params["UpdateExpression"] += ", #nImports = #nImports + :incr";
params["ExpressionAttributeValues"][":incr"] = 1;
//params["ExpressionAttributeValues"][":nImports"] = 0;
params["ExpressionAttributeNames"]["#nImports"] = 'nImports'
console.log('params after adding atomic counter is:', params)```
I'm sticking with the original ADD answer because I like the differentiation it gives over the properties inserted by the SET, but both seem valid and I wanted to include the correction as well
I have a block of code that calls an Api and saves results if there are differences or not. I would like to return different values for DATA as layed out on the code. But this is obviously not working since Its returning undefined.
let compare = (term) => {
let DATA;
//declare empty array where we will push every thinkpad computer for sale.
let arrayToStore = [];
//declare page variable, that will be the amount of pages based on the primary results
let pages;
//this is the Initial get request to calculate amount of iterations depending on result quantities.
axios.get('https://api.mercadolibre.com/sites/MLA/search?q='+ term +'&condition=used&category=MLA1652&offset=' + 0)
.then(function (response) {
//begin calculation of pages
let amount = response.data.paging.primary_results;
//since we only care about the primary results, this is fine. Since there are 50 items per page, we divide
//amount by 50, and round it up, since the last page can contain less than 50 items
pages = Math.ceil(amount / 50);
//here we begin the for loop.
for(i = 0; i < pages; i++) {
// So for each page we will do an axios request in order to get results
//Since each page is 50 as offset, then i should be multiplied by 50.
axios.get('https://api.mercadolibre.com/sites/MLA/search?q='+ term +'&condition=used&category=MLA1652&offset=' + i * 50)
.then((response) => {
const cleanUp = response.data.results.map((result) => {
let image = result.thumbnail.replace("I.jpg", "O.jpg");
return importante = {
id: result.id,
title: result.title,
price: result.price,
link: result.permalink,
image: image,
state: result.address.state_name,
city: result.address.city_name
}
});
arrayToStore.push(cleanUp);
console.log(pages, i)
if (i === pages) {
let path = ('./compare/yesterday-' + term +'.json');
if (fs.existsSync(path)) {
console.log("Loop Finished. Reading data from Yesterday")
fs.readFile('./compare/yesterday-' + term +'.json', (err, data) => {
if (err) throw err;
let rawDataFromYesterday = JSON.parse(data);
// test
//first convert both items to check to JSON strings in order to check them.
if(JSON.stringify(rawDataFromYesterday) !== JSON.stringify(arrayToStore)) {
//Then Check difference using id, otherwise it did not work. Using lodash to help.
let difference = _.differenceBy(arrayToStore[0], rawDataFromYesterday[0],'id');
fs.writeFileSync('./compare/New'+ term + '.json', JSON.stringify(difference));
//if they are different save the new file.
//Then send it via mail
console.log("different entries, wrote difference to JSON");
let newMail = mail(difference, term);
fs.writeFileSync('./compare/yesterday-' + term +'.json', JSON.stringify(arrayToStore));
DATA = {
content: difference,
message: "These were the differences, items could be new or deleted.",
info: "an email was sent, details are the following:"
}
return DATA;
} else {
console.log("no new entries, cleaning up JSON");
fs.writeFileSync('./compare/New'+ term + '.json', []);
DATA = {
content: null,
message: "There were no difference from last consultation",
info: "The file" + './compare/New'+ term + '.json' + ' was cleaned'
}
return DATA;
}
});
} else {
console.error("error");
console.log("file did not exist, writing new file");
fs.writeFileSync('./compare/yesterday-' + term +'.json', JSON.stringify(arrayToStore));
DATA = {
content: arrayToStore,
message: "There were no registries of the consultation",
info: "Writing new file to ' " + path + "'"
}
return DATA;
}
}
})
}
}).catch(err => console.log(err));
}
module.exports = compare
So I export this compare function, which I call on my app.js.
What I want is to make this compare function return the DATA object, so I can display the actual messages on the front end,
My hopes would be, putting this compare(term) function inside a route in app.js like so:
app.get("/api/compare/:term", (req, res) => {
let {term} = req.params
let data = compare(term);
res.send(data);
})
But as I said, Its returning undefined. I tried with async await, or returning the whole axios first axios call, but Im always returning undefined.
Thank you
I am trying to get the sum of all image widths.
I tried to get the values in an array so that I could calculate the sum of them but still not working, but it should be something really simple indeed, all I want is the sum of the image width values.
componentDidMount() {
let images = document.querySelectorAll('#draggable img');
let widths = [];
images.forEach(each => {
each.addEventListener("load", () => {
widths.push(each.width)
});
});
console.log(widths); // this is logging the array!
const total = widths.reduce(function(a, b){ return a + b; });
console.log("total is : " + total ); // this is crashing!
}
Your widths Array might be empty (your are populating it with a load event) and your are calling reduce on it without initialValue.
This will cause an error, see Array.reduce ,
You could just do this:
widths.reduce((acc, width) => (acc + width), 0);
Update 1, Base on your Codepen and your comments.. The load event listener is not really neaded. There is a compatibily issue with IE < 9, which support attachEvent not addEventListener. I will suggest to use a timer with a recursive function.
sumWidths = () => {
const images = document.querySelectorAll('#draggable img');
let sum = 0;
images.forEach(({ width }) => {
if(!width){ // not width or width 0 means the image has not been fully loaded.
setTimeout(this.sumWidths, 500) // delay half second to allow image to load and try again;
return;
} else {
sum = sum + width;
}
});
// This function can be created on parent component
// or use state management library or what ever befit your needs.
saveImageWidths(sum); // Create this function
// Or save `sum` to the state of this component!!!!
this.setState(state => ({ ...state, widthsSum: sum }));
}
componentDidMount() {
this.sumWidths();
}
Update 2. Using load event listeer
Take a loot at your forked working codepen here
function totalImagesWidth() {
let reportWidth = 0;
let images = document.querySelectorAll('#imagesContainer img');
let imagesWidth = [];
images.forEach(each => {
each.addEventListener("load", () => {
imagesWidth.push(each.width);
if (imagesWidth.length === images.length) {
reportWidth = (imagesWidth.reduce((a, b) => { return a + b; }, 0));
showResult(reportWidth);
}
});
});
function showResult(reportWidth){
const results = document.createElement("p");
results.innerHTML = `
Images: ${images} <br />
Total images: ${images.length} <br />
<code>imagesWidth</code> length: ${imagesWidth.length} <br />
Images widths: ${imagesWidth.toString()} <br />
<b>SUM: ${reportWidth}</b>`;
document.body.appendChild(results);
console.log(imagesWidth);
}
}
totalImagesWidth()
You're trying to get the value of a variable that is supposed to be updated in the future. How about letting it be updated for an expected number of times (which is the total number of images) and then when it has, find the total.
I didn't run my code but am looking at something like
constructor() {
this.widths = [];
}
componentDidMount() {
let images = document.querySelectorAll('#draggable img');
images.forEach(each => {
each.addEventListener('load', () => {
widths.push(each.width);
if (widths.length === images.length) {
this.reportWidth(widths.reduce(function (a, b) { return a + b; }, 0));
}
});
});
}
reportWidth (width) {
console.log(`Width is finally found to be ${width}`);
}
let images = Array.from(document.querySelectorAll('#draggable img'));
reduce is not available on a DOM node collection. Make it an Array first.
Update:
Using your codepen: this logs the desired answer:
const images = Array.from(document.querySelectorAll('#imagesContainer img'));
const sumOfWidths = images.reduce((a, b) => a + b.width, 0);
console.log({sumOfWidths}); // => 600
This assumes the images have already loaded.
Here is a solution for waiting for them to load with promises, which worked in your codepen: (this is not meant to be a good example of using promises - just a simplistic example... consider adding reject handlers for instance)
function totalImagesWidth() {
const images = Array.from(document.querySelectorAll('#imagesContainer img'));
const loadHandler = img => new Promise(resolve => {
img.addEventListener("load", () => resolve({}));
});
Promise.all(images.map(loadHandler)).then(results);
function results() {
const sumOfWidths = images.reduce((a, b) => a + b.width, 0);
const results = document.createElement("p");
results.innerHTML = `
Images: ${images} <br />
Total images: ${images.length} <br />
<b>SUM: ${sumOfWidths}</b>`;
document.body.appendChild(results);
console.log(sumOfWidths);
}
}
totalImagesWidth()
When your reduce happens, you cannot be sure the images are loaded because the event is asynchronous from your function. You need a mechanism to make sure they were all added. Like counting the number of images until the number of items in widths is the same as the number of images.
function whenTotalWidthReady(cb) {
let images = document.querySelectorAll('#draggable img');
if (images.length==0) cb(0);
let widths = [];
images.forEach(each => {
let img = new Image();
img.onload = function() {
widths.push(img.width);
if (widths.length==images.length) {
cb(widths.reduce(function(a, b){ return a + b; }););
}
}
img.src = each.src;
});
}
I don't know if creating intermediary Image objects is mandatory, but that is the way I know works. Also I do not think there is a problem with your reduce. As far as I know, the initial value is optional. But you still can pass "0" as suggested.
Now the way this version works is because it is all asynchronous, you pass a call back function as an argument. And this function will be called with the total width once images are ready.
function componentDidMount() {
whenTotalWidthReady(function(totalWidth) {
console.log("The total width of images is: " + totalWidth);
});
}
Use reduce like so:
const res = [...images].reduce((a, { width = 0 }) => a + width, 0);
I think a firebase function updating a list that I have in the firebase database is being captured by a subscription that is subscribed to that list. From what the list output looks like on my phone (in the app)...and from what my console output looks like (the way it repeats) it seems like it is capturing the whole list and displaying it each time one is added. So (I looked this up)...I believe this equation represents what is happening:
(N(N + 1))/2
It is how you get the sum of all of the numbers from 1 to N. Doing the math in my case (N = 30 or so), I get around 465 entries...so you can see it is loading a ton, when I only want it to load the first 10.
To show what is happening with the output here is a pastebin https://pastebin.com/B7yitqvD.
In the output pay attention to the array that is above/before length - 1 load. You can see that it is rapidly returning an array with one more entry every time and adding it to the list. I did an extremely rough count of how many items are in my list too, and I got 440...so that roughly matches the 465 number.
The chain of events starts in a page that isn't the page with the list with this function - which initiates the sorting on the firebase functions side:
let a = this.http.get('https://us-central1-mane-4152c.cloudfunctions.net/sortDistance?text='+resp.coords.latitude+':'+resp.coords.longitude+':'+this.username);
this.subscription6 = a.subscribe(res => {
console.log(res + "response from firesbase functions");
loading.dismiss();
}, err => {
console.log(JSON.stringify(err))
loading.dismiss();
})
Here is the function on the page with the list that I think is capturing the entire sort for some reason. The subscription is being repeated as the firebase function sorts, I believe.
loadDistances() {
//return new Promise((resolve, reject) => {
let cacheKey = "distances"
let arr = [];
let mapped;
console.log("IN LOADDISTANCES #$$$$$$$$$$$$$$$$$$$$$");
console.log("IN geo get position #$$$$$$$5354554354$$$$$$$");
this.distancelist = this.af.list('distances/' + this.username, { query: {
orderByChild: 'distance',
limitToFirst: 10
}});
this.subscription6 = this.distancelist.subscribe(items => {
let x = 0;
console.log(JSON.stringify(items) + " length - 1 load");
items.forEach(item => {
let storageRef = firebase.storage().ref().child('/settings/' + item.username + '/profilepicture.png');
storageRef.getDownloadURL().then(url => {
console.log(url + "in download url !!!!!!!!!!!!!!!!!!!!!!!!");
item.picURL = url;
}).catch((e) => {
console.log("in caught url !!!!!!!$$$$$$$!!");
item.picURL = 'assets/blankprof.png';
});
this.distances.push(item);
if(x == items.length - 1) {
this.startAtKey4 = items[x].distance;
}
x++;
})
//this.subscription6.unsubscribe();
})
}
The subscription in loadDistances function works fine as long as I don't update the list from the other page - another indicator that it might be capturing the whole sort and listing it repeatedly as it sorts.
I have tried as as I could think of to unsubscribe from the list after I update...so then I could just load the list of 10 the next time the page with the list enters, instead of right after the update (over and over again). I know that firebase functions is in beta. Could this be a bug on their side? Here is my firebase functions code:
exports.sortDistance = functions.https.onRequest((req, res) => {
// Grab the text parameter.
var array = req.query.text.split(':');
// Push the new message into the Realtime Database using the Firebase Admin SDK.
// Get a database reference to our posts
var db = admin.database();
var ref = db.ref("profiles/stylists");
var promises = [];
// Attach an asynchronous callback to read the data at our posts reference
ref.on("value", function(snapshot) {
//console.log(snapshot.val());
var snap = snapshot.val();
for(const user in snap) {
promises.push(new Promise(function(resolve, reject) {
var snapadd = snap[user].address;
console.log(snapadd + " snap user address (((((((())))))))");
if(snapadd != null || typeof snapadd != undefined) {
googleMapsClient.geocode({
address: snapadd
}).asPromise()
.then(response => {
console.log(response.json.results[0].geometry.location.lat);
console.log(" +++ " + response.json.results[0].geometry.location.lat + ' ' + response.json.results[0].geometry.location.lng + ' ' + array[0] + ' ' + array[1]);
var distanceBetween = distance(response.json.results[0].geometry.location.lat, response.json.results[0].geometry.location.lng, array[0], array[1]);
console.log(distanceBetween + " distance between spots");
var refList = db.ref("distances/"+array[2]);
console.log(snap[user].username + " snap username");
refList.push({
username: snap[user].username,
distance: Math.round(distanceBetween * 100) / 100
})
resolve();
})
.catch(err => { console.log(err); resolve();})
}
else {
resolve();
}
}).catch(err => console.log('error from catch ' + err)));
//console.log(typeof user + 'type of');
}
var p = Promise.all(promises);
console.log(JSON.stringify(p) + " promises logged");
res.status(200).end();
}, function (errorObject) {
console.log("The read failed: " + errorObject.code);
});
});
What is weird is, when I check the firebase functions logs, all of this appears to only run once...but I still think the subscription could be capturing the whole sorting process in some weird way while rapidly returning it. To be as clear as possible with what I think is going on - I think each stage of the sort is being captured in an (N(N + 1))/2...starting at 1 and going to roughly 30...and the sum of the sorting ends up being the length of my list (with 1-10 items repeated over and over again).
I updated to angularfire2 5.0 and angular 5.0...which took a little while, but ended up solving the problem:
this.distanceList = this.af.list('/distances/' + this.username,
ref => ref.orderByChild("distance").limitToFirst(50)).valueChanges();
In my HTML I used an async pipe, which solved the sorting problem:
...
<ion-item *ngFor="let z of (distanceList|async)" no-padding>
...
I have an array of ids, and I want to make an api request for each id, but I want to control how many requests are made per second, or better still, have only 5 open connections at any time, and when a connection is complete, fetch the next one.
Currently I have this, which just fires off all the requests at the same time:
_.each([1,2,3,4,5,6,7,8,9,10], function(issueId) {
github.fetchIssue(repo.namespace, repo.id, issueId, filters)
.then(function(response) {
console.log('Writing: ' + issueId);
writeIssueToDisk(fetchIssueCallback(response));
});
});
Personally, I'd use Bluebird's .map() with the concurrency option since I'm already using promises and Bluebird for anything async. But, if you want to see what a hand-coded counter scheme that restricts how many concurrent requests can run at once looks like, here's one:
function limitEach(collection, max, fn, done) {
var cntr = 0, index = 0, errFlag = false;
function runMore() {
while (!errFlag && cntr < max && index < collection.length) {
++cntr;
fn(collection[index++], function(err, data) {
--cntr;
if (errFlag) return;
if (err) {
errFlag = true;
done(err);
} else {
runMore();
}
});
}
if (!errFlag && cntr === 0 && index === collection.length) {
done();
}
}
runMore();
}
With Bluebird:
function fetch(id) {
console.log("Fetching " + id);
return Promise.delay(2000, id)
.then(function(id) {
console.log(" Fetched " + id);
});
}
var ids = [1,2,3,4,5,6,7,8,9,10];
Promise.map(ids, fetch, { concurrency: 3 });
<script src="https://cdnjs.cloudflare.com/ajax/libs/bluebird/3.3.1/bluebird.min.js"></script>
<!-- results pane console output; see http://meta.stackexchange.com/a/242491 -->
<script src="http://gh-canon.github.io/stack-snippet-console/console.min.js"></script>
Divide your data into as many arrays as you want concurrent connections. Schedule with setTimeout, and have the completion callback handle the rest of the sub-array.
Wrap the setTimeout in a function of its own so that the variable values are frozen to their values at the time of delayed_fetch() invocation.
function delayed_fetch(delay, namespace, id, issueIds, filters) {
setTimeout(
function() {
var issueId=issueIds.shift();
github.fetchIssue(namespace, id, issueId, filters).then(function(response) {
console.log('Writing: ' + issueId);
writeIssueToDisk(fetchIssueCallback(response));
delayed_fetch(0, namespace, id, issueIds, filters);
});
}, delay);
}
var i=0;
_.each([ [1,2] , [3,4], [5,6], [7,8], [9,10] ], function(issueIds) {
var delay=++i*200; // millisecond
delayed_fetch(delay, repo.namespace, repo.id, issueIds, filters);
});
i'd recommend using throat just for this: https://github.com/ForbesLindesay/throat
Using Bluebird
function getUserFunc(user) {
//Get a collection of user
}
function getImageFunc(id) {
//get a collection of image profile based on id of the user
}
function search(response) {
return getUsersFunc(response).then(response => {
const promises = response.map(items => return items.id);
const images = id => {
return getImagesFunc(id).then(items => items.image);
};
return Promise.map(promises, images, { concurrency: 5 });
});
}
Previously i used ES6 function Promise.all(), but it doesn't work like what i'm expecting. Then go with third party library bluebird.js and Work like a charm.