How to implement Infinite Scrolling with the new Firebase (2016)? - javascript

QUESTION:
How to implement efficient infinite scrolling in Firebase using javascript (and node.js) ?
WHAT I CHECKED:
Implementing Infinite Scrolling with Firebase?
Problem: older firebase ^
Infinite scroll with AngularJs and Firebase
CODE FROM:
Infinite scroll with AngularJs and Firebase
"First, I recommend to create an Index in your Firebase. For this answer, I create this one:
{
"rules": {
".read": true,
".write": false,
"messages": {
".indexOn": "id"
}
}
}
Then, let's make some magic with Firebase:
// #fb: your Firebase.
// #data: messages, users, products... the dataset you want to do something with.
// #_start: min ID where you want to start fetching your data.
// #_end: max ID where you want to start fetching your data.
// #_n: Step size. In other words, how much data you want to fetch from Firebase.
var fb = new Firebase('https://<YOUR-FIREBASE-APP>.firebaseio.com/');
var data = [];
var _start = 0;
var _end = 9;
var _n = 10;
var getDataset = function() {
fb.orderByChild('id').startAt(_start).endAt(_end).limitToLast(_n).on("child_added", function(dataSnapshot) {
data.push(dataSnapshot.val());
});
_start = _start + _n;
_end = _end + _n;
}
Finally, a better Infinite Scrolling (without jQuery):
window.addEventListener('scroll', function() {
if (window.scrollY === document.body.scrollHeight - window.innerHeight) {
getDataset();
}
});
I'm using this approach with React and it's blazing fast no matter how big your data is."
(answered Oct 26 '15 at 15:02)
(by Jobsamuel)
PROBLEM
In that solution, n posts will be loaded each time the scroll reaches the end of the height of screen.
Depending on screen sizes, this means a lot more posts than needed will be loaded at some point (the screen height only contains 2 posts, which means 3 more posts than necessary will be loaded each time we reach the end of the screen height when n = 5 for example).
Which means 3*NumberOfTimesScrollHeightHasBeenPassed more posts than needed will be loaded each time we reach the end of the scrollheight.
MY CURRENT CODE (loads all posts at once, no infinite scrolling):
var express = require("express");
var router = express.Router();
var firebase = require("firebase");
router.get('/index', function(req, res, next) {
var pageRef = firebase.database().ref("posts/page");
pageRef.once('value', function(snapshot){
var page = [];
global.page_name = "page";
snapshot.forEach(function(childSnapshot){
var key = childSnapshot.key;
var childData = childSnapshot.val();
page.push({
id: key,
title: childData.title,
image: childData.image
});
});
res.render('page/index',{page: page});
});
});

Here is full code for infinite paging.
The function createPromiseCallback is for supporting both promises and callbacks.
function createPromiseCallback() {
var cb;
var promise = new Promise(function (resolve, reject) {
cb = function (err, data) {
if (err) return reject(err);
return resolve(data);
};
});
cb.promise = promise;
return cb;
}
The function getPaginatedFeed implements actual paging
function getPaginatedFeed(endpoint, pageSize, earliestEntryId, cb) {
cb = cb || createPromiseCallback();
var ref = database.ref(endpoint);
if (earliestEntryId) {
ref = ref.orderByKey().endAt(earliestEntryId);
}
ref.limitToLast(pageSize + 1).once('value').then(data => {
var entries = data.val() || {};
var nextPage = null;
const entryIds = Object.keys(entries);
if (entryIds.length > pageSize) {
delete entries[entryIds[0]];
const nextPageStartingId = entryIds.shift();
nextPage = function (cb) {
return getPaginatedFeed(endpoint, pageSize, nextPageStartingId, cb);
};
}
var res = { entries: entries, nextPage: nextPage };
cb(null, res);
});
return cb.promise;
}
And here is how to use getPaginatedFeed function
var endpoint = '/posts';
var pageSize = 2;
var nextPage = null;
var dataChunk = null;
getPaginatedFeed(endpoint, pageSize).then(function (data) {
dataChunk = data.entries;
nextPage = data.nextPage;
//if nexPage is null means there are no more pages left
if (!nextPage) return;
//Getting second page
nextPage().then(function (secondpageData) {
dataChunk = data.entries;
nextPage = data.nextPage;
//Getting third page
if (!nextPage) return;
nextPage().then(function (secondpageData) {
dataChunk = data.entries;
nextPage = data.nextPage;
//You can call as long as your nextPage is not null, which means as long as you have data left
});
});
});
What about the question how many items to put on the screen, you can give a solution like this, for each post give fixed x height and if it requires more space put "read more" link on the bottom of the post which will reveal missing part when user clicks. In that case you can keep fixed count of items on your screen. Additionally you can check screen resolution in order to put more or less items.

Related

Receiving 'events is not iterable' after updating function but the document format hasn't changed

I updated my function to paginate my index and now I'm receiving error 'event is not iterable' from the client side code. Here's the client code that's causing the error (it's at the very top of the page and required for the entire page to display).
<% for (let event of events){%>
Remember, it was working before I changed the function so here's the code prior to my recent edits:
module.exports.index = async (req, res) => {
const events = await Event.find({}).populate('artist');
res.render('events/index', { events });
};
Here's the updated code:
module.exports.index = async (req, res) => {
const page = parseInt(req.query.page);
const limit = parseInt(req.query.limit);
const startIndex = (page - 1) * limit;
const endIndex = page * limit;
const events = {}
if(endIndex < await Event.countDocuments().exec()) {
events.next = {
page: page + 1,
limit: limit
}
}
if(startIndex > 0) {
events.previous = {
page: page - 1,
limit: limit
}
}
events.events = await Event.find().limit(limit).skip(startIndex).exec()
res.paginatedResults = events;
res.render('events/index', { events });
};
The new function returns the array I'm looking for (when I console.log), so it works from a query/pagination standpoint but I'm forgetting to send something to the client side after the code runs that's tripping up the iteration.
I'm also not populating 'artists' anymore but I don't think that has anything to do with it..
Any ideas what I'm missing?

fastest way sending datas from Injected to content Script

I am building a chrome extension and getting datas from a webpage(a list with 20 elements and hundreds of pages ) through my injected script.
This injected script is sending the datas via chrome storage to the background script.
The background script is calculating an array.
Then sending the result to my content script where a mutation observer is waiting for an event where it’s using the calculated array.
I am sending all these data’s around by chrome.local.storage.set / get.
Because so there are so many different specs around, my mutation observer has an timeout of 1second for every loaded page / mutation because else the data’s are loaded to slow and it still has the data’s from the page before.
Is there a faster way sending these data’s around besides the chrome storage ?
Injected.js
//Gettig Datas before as constant players
const payload = {
PlayerList: players.map(player => {
return {
ID: player.id, //resource Id
Price: player.bin // Price
};
}),
};
var payload2 = Object.values(payload);
chrome.runtime.sendMessage(extId, {type: 'GetPlayerList', data: payload2});
background.js
chrome.runtime.onMessageExternal.addListener(
function (request, sender, sendResponse) {
if (request.type === "GetPlayerList") {
var playertest = request;
var playertest2 = playertest.data[0];
var playerbase = chrome.storage.local.get("datafut", function (data) {
playerbase = data.datafut;
var data = mergeArrays(playertest2, playerbase);
chrome.storage.local.set(
{
playerDataListCurrent: data
});
console.log(mergeArrays(playertest2, playerbase));
})
}
});
function mergeArrays(playertest2, playerbase) { //Calculate an array
by filter the IDs from a 30Element Array and a 500Element Array}
mergeArrays function: array
content.js
var s = document.createElement('script');
s.src = chrome.extension.getURL('injected.js');
s.dataset.variable = JSON.stringify(chrome.runtime.id);
s.asnyc = false; (document.head ||
document.documentElement).appendChild(s); s.onload = function () { s.remove(); };
var observeTransferList = new MutationObserver(function (mutations) {
mutations.forEach(function (mutation) {
mutation.addedNodes.forEach(function (node) {
if (node.nodeType === 1 && node.matches(".has-auction-data")) {
$(node).css("height", "28");
setTimeout(() => {
var playerDataListCurrent;
chrome.storage.sync.get(function (items) {
platform = items.platform;
discountprice = items.discountprice;
average = parseInt(items.average);
percentage = parseInt(items.percentage);
if (percentage === 0) {
//Data get
chrome.storage.local.get(function (items) {
playerDataListCurrent = items.playerDataListCurrent;
for (i = 0; i < playerDataListCurrent.length; i++) {
//DO STUFF
}
})
}, 1000); // Timeout for unknown delay. Else its sometimes getting datas from array calculated before
}
});
});
});

How do I wait until entire array is pushed to execute a function?

I am creating a payout API for my site, this one is set up around Monero and I am fetching the values needed to make payments and where to send them from my MongoDB database. I am using Mongoose to do this
I have everything mapped out, it grabs from an array the total net value for that day that needs to be paid out and it grabs from another array the latest wallet entry so it knows where to send the amount to.
The problem arises when I'm trying to structure a new array with this data, pushing this data into it.
Monero intrinsically only allows one transaction to be sent out at a time, locking your leftover balance and the amount sent out until that one transaction has been fully confirmed. The one transaction can be split into multiple transactions but it all has to be sent as "one" big chunk.
So when I send the request to Monero it has to include an array of every wallet and what amount each wallet is receiving.
When I'm pushing to the array I noticed it executes the Transfer Monero function for every time it pushes to the array. How do I make it so the array is pushed completely first with every possible value that meets my if statement (Date is today, paymentStatus is set to false, the amount due is not equal or below 0) before sending that variable over for the Transfer Monero function to use?
I've already tried using return functions, etc. Not sure how to implement async/await or Promises
var bodyParser = require('body-parser');
var mongoose = require('mongoose');
var moment = require('moment');
var User = require('../models/user');
var moneroWallet = require('monero-nodejs');
var Wallet = new moneroWallet('127.0.0.1', 18083);
var destinations = []
var xmrnet = []
// connect to MongoDB
mongoose.connect('mongodb://)
// Fetch IDs
function getID(){
var cursor = User.find({}, function (err, users) {
}).cursor();
return cursor;
}
var cursor = getID();
cursor.on('data', function(name){
var id = name.id
// Map all users using IDs
function getUsers(){
var cursor = User.findById(id, function (err, users) {
}).cursor();
return cursor;
}
var cursor = getUsers();
cursor.on('data', function(user){
net = user.netHistory.map(element => element)
order = user.orderHistory.map(element => element)
userWallets = user.userWallets.map(element => element)
var today = moment();
// date is today? validation
for (var i = 0; i < net.length; i++){
netDate = net[i].net_date_time
netStatus = net[i].paymentStatus
netUSD = net[i].current_usd_net
xmrnet = net[i].xmr_net
for (var j = 0; j < userWallets.length; j++){
xmrwalletsU = userWallets[j].xmr_wallet
if(xmrwalletsU !== ''){
xmrwallet = xmrwalletsU
}
}
var Dateistoday = moment(netDate).isSame(today, 'day');;
if (Dateistoday === true && xmrnet !== 0 && netStatus === false){
console.log('its true')
netid = net[i].id
var destinationsU = [{amount: xmrnet, address: xmrwallet}]
destinations.push(destinationsU)
console.log(destinations)
// problem is right here with destinations^ it pushes one by one
// problem over here as well as wallet.transfer executes one by one by the time it gets to the second, the balance is already locked, unable to send
Wallet.transfer(destinations).then(function(transfer) {
console.log(transfer);
var payouts = {
payout_date_time: Date(),
payout_usd_amount: netUSD,
xmr_wallet: xmrwallet,
xmr_txid: transfer.tx_hash,
}
// add payout to payoutHistory array
User.findByIdAndUpdate(id, {"$addToSet": { 'payoutHistory': [payouts]}},
function(err, user) {
if(err) { console.log(err) }
else {
console.log()
};
})
// update paymentStatus of specific net entry to true
User.updateMany({
"_id": id,
"netHistory._id": netid
}, {
"$set": {
"netHistory.$.paymentStatus": true
}
}, function(error, success) {
})
})
}
}
})
})

How should i design pagination component with async in javascript?

How should i design javascript pagination component by async?
I want to synchronized http request/respone.
Because the pagination component must need total data count to figure out page count when component was created and initialized(like in constructer).
So the total data count is in my web server(or db) so i http requested total data count. But it was async api, so the code will not blocked when i request total data count to server. It must be blocked and dispose with synchronize.(because if component can't know total data count, then it can't initialized.
Below is my component code.
function ComponentPagination(paginationAreaID, pageViewDataCount ,
getTotalDataCountApiUrl)
{
this.OnUpdatePagingCallBack = null;
//div
var paginationArea = document.getElementById(paginationAreaID);
paginationArea.classList.add("component-pagination-area");
//prev button
var prevBtn = document.createElement("a");
prevBtn.textContent = "Prev";
paginationArea.appendChild(prevBtn);
//page buttons
var totalDataCount = getTotalDataCount();
if (totalDataCount == null) //If error occur
totalDataCount = 0;
var pageCount = (totalDataCount / pageViewDataCount);
for (var i = 0; i < pageCount; ++i) {
var pageBtn = document.createElement("a");
pageBtn.textContent = (i + 1) + "";
pageBtn.addEventListener("click", function (event) {
if (this.OnUpdatePagingCallBack == null) {
console.log("[Component Pagination Error] You must register
update paging callback function!");
return;
}
var curClickedPage = event.target.textContent
axios.get(getTotalDataCountApiUrl)
.then(function (response) {
console.log(response);
})
.catch(function (error) {
console.log("[Component Pagination Error] " + error);
});
}.bind(this));
paginationArea.appendChild(pageBtn);
}
//Next Button
var nextBtn = document.createElement("a");
nextBtn.textContent = "Next"
paginationArea.appendChild(nextBtn);
/* Private Function */
function getTotalDataCount()
{
this.totalDataCount = 0;
axios.get('/API/TestAPI/GetTotalFileCount') //hard coded for test
.then(function (response) {
this.totalDataCount = response.data;
}.bind(this))
.catch(function (error) {
console.log("[Component Pagination Error] " + error);
});
return this.totalDataCount;
}
/* Call Back */
this.registerUpdatePagingCallBack = function(onUpdatePagingMethod)
{
if (typeof onUpdatePagingMethod != "function")
{
console.log("[Component Pagination Error] You must pass only function
to registerUpdatePagingCallBack method's parameter");
return;
}
this.OnUpdatePagingCallBack = onUpdatePagingMethod;
}
}
And then i use this like below in window.onload = function().
var fileListPaginationer = new ComponentPagination("blocked-file-view-
pagination-area", 10, "/API/TestAPI/GetTotalFileCount");
fileListPaginationer.registerUpdatePagingCallBack(function ()
{
/* This function will call when paging button clicked or updated.
Parameters will passed like (clickedPageIndex)
User dispose real pagination data like file list table...(etc)
*/
});
JavaScript developers usually say that java script event or request must be run with async, but i have no idea that i can't know total data count in component initializing time.
Well... maybe show loading... message until request/response total data count will be the way of this problem...
If Possible i want to initialize pagination component in initialize time.
(like window.onload)
(ps: I think the loading message is a little bit burdensome.)
May i have some suggestion about design this pagination component?
I had never designed library or component with async.

Javascript Promise().then to prevent re-calling the function before the first call be executed

In my node.js app, reading data from MSSQL using tedious, I'm calling the below every 1 second:
Fetch the data from the server (fetchStock function) and save it in temporary array
Send the data saved in the temporary array to the client using the Server-Sent Events (SSE) API.
It looks the 1 second is not enough to recall the fetchStock function before the previous call is completely executed, so I get execution errors from time to time.
I increased it to 5 seconds, but still get the same issue every once in a while.
How can I use Promise().then to be sure the fetchStock function is not re-called before the previouse call be completely executed?
var Request = require('tedious').Request;
var Connection = require('tedious').Connection;
var config = {
userName: 'sa',
password: 'pswd',
server: 'xx.xxx.xx.xxx',
options: {
database: 'DB',
rowCollectionOnRequestCompletion: 'true',
rowCollectionOnDone: 'true'
},
};
var sql = new Connection(config);
var addElem = (obj, elem)=> [].push.call(obj, elem);
var result = {}, tmpCol = {}, tmpRow = {};
module.exports = {
displayStock: function (es) {
var dloop = setInterval(function() {
if(result.error !== null)
if (es) es.send(JSON.stringify(result), {event: 'rmSoH', id: (new Date()).toLocaleTimeString()});
if(result.error === null)
if (es) es.send('connection is closed');
}, 1000);
},
fetchStock: function () {
request = new Request("SELECT ItemCode, WhsCode, OnHand FROM OITW where OnHand > 0 and (WhsCode ='RM' or WhsCode ='FG');", function(err, rowCount, rows) {
if (err) {
result = {'error': err};
console.log((new Date()).toLocaleTimeString()+' err : '+err);
}
if(rows)
rows.forEach(function(row){
row.forEach(function(column){
var colName = column.metadata.colName;
var value = column.value;
addElem(tmpCol, {colName: value})
});
addElem(tmpRow,{'item': tmpCol[0].colName, 'Whs': tmpCol[1].colName, 'Qty': tmpCol[2].colName});
tmpCol = {};
});
result = tmpRow;
tmpRow={}
});
sql.execSql(request);
}
}
I think what you need is a simple variable to check if there's already running request not Promise.
var latch = false;
// It will be called only if the previous call is completed
var doFetchStock = () => sql.execSql(new Request("SQL", (err, rowCount, rows) => {
// Your logic dealing with result
// Initializes the latch
latch = false;
});
module.exports = {
fetchStock: function () {
// Check if the previous request is completed or not
if (!latch) {
// Sets the latch
latch = true;
// Fetches stock
doFetchStock();
}
}
};
Actually I've used this kind of pattern a lot to allow some behavior only once.
https://github.com/cettia/cettia-javascript-client/blob/1.0.0-Beta1/cettia.js#L397-L413
https://github.com/cettia/cettia-javascript-client/blob/1.0.0-Beta1/cettia.js#L775-L797
Since javascript is mono-threaded a simple code like this should be enough on client-side
function () {
if(currentPromise != null){ // define in a closure outside
currentPromise = [..] // call to server which return a promise
currentPromise.then(function(){
currentPromise = null;
});
}
}

Categories