Pouchdb pagination - javascript

I am looking for a way to paginate in pouchdb by specifying the number of the page that I want.
The closest example I came across is this:
var options = {limit : 5};
function fetchNextPage() {
pouch.allDocs(options, function (err, response) {
if (response && response.rows.length > 0) {
options.startkey = response.rows[response.rows.length - 1].id;
options.skip = 1;
}
});
}
It assumes however that you are paginating one page after the other and calling this consecutively several times.
What I need instead is a way to retrieve page 5 for example, with a single query.

There is no easy answer to the question. A small slice from 3.2.5.6. Jump to Page
One drawback of the linked list style pagination is that you can’t
pre-compute the rows for a particular page from the page number and
the rows per page. Jumping to a specific page doesn’t really work. Our
gut reaction, if that concern is raised, is, “Not even Google is doing
that!” and we tend to get away with it. Google always pretends on the
first page to find 10 more pages of results. Only if you click on the
second page (something very few people actually do) might Google
display a reduced set of pages. If you page through the results, you
get links for the previous and next 10 pages, but no more.
Pre-computing the necessary startkey and startkey_docid for 20 pages
is a feasible operation and a pragmatic optimization to know the rows
for every page in a result set that is potentially tens of thousands
of rows long, or more.
If you are lucky and every document has an ordered sequence number, then a view could be constructed to easily navigate pages.
Another strategy is to precompute (preload) a range of keys, which is more reasonable but is complicated. The snippet below creates a trivial database which can be paged through via nav links.
There are 5 documents per page, and each "chapter" has 10 pages. computePages performs the look ahead
// look ahead and cache startkey for pages.
async function computePages(startPage, perPage, lookAheadPages, startKey) {
let options = {
limit: perPage * lookAheadPages,
include_docs: false,
reduce: false
};
// adjust. This happens when a requested page has no key cached.
if (startKey !== undefined) {
options.startkey = startKey;
options.skip = perPage; // not ideal, but tolerable probably?
}
const result = await db.allDocs(options);
// use max to prevent result overrun
// only the first key of each page is stored
const max = Math.min(options.limit, result.rows.length)
for (let i = 0; i < max; i += perPage) {
page_keys[startPage++] = result.rows[i].id;
}
}
page_keys provides a key/value store mapping page number to start key. Usually anything other than 1 for skip is red flag however this is reasonable here - we won't be skipping say a 100 documents right?
I just threw this together so it is imperfect and likely buggy, but it does demonstrate page navigation generally.
function gel(id) {
return document.getElementById(id);
}
// canned test documents
function getDocsToInstall() {
let docs = [];
// doc ids are a silly sequence of characters.
for (let i = 33; i < 255; i++) {
docs.push({
_id: `doc-${String.fromCharCode(i)}`
});
}
return docs;
}
// init db instance
let db;
async function initDb() {
db = new PouchDB('test', {
adapter: 'memory'
});
await db.bulkDocs(getDocsToInstall());
}
// documents to show per page
const rows_per_page = 5;
// how many pages to preload into the page_keys list.
const look_ahead_pages = 10;
// page key cache: key = page number, value = document key
const page_keys = {};
// the current page being viewed
let page_keys_index = 0;
// track total rows available to prevent rendering links beyond available pages.
let total_rows = undefined;
async function showPage(page) {
// load the docs for this page
let options = {
limit: rows_per_page,
include_docs: true,
startkey: page_keys[page] // page index is computed
};
let result = await db.allDocs(options);
// see renderNav. Here, there is NO accounting for live changes to the db.
total_rows = total_rows || result.total_rows;
// just display the doc ids.
const view = gel('view');
view.innerText = result.rows.map(row => row.id).join("\n");
}
// look ahead and cache startkey for pages.
async function computePages(startPage, perPage, lookAheadPages, startKey) {
let options = {
limit: perPage * lookAheadPages,
include_docs: false,
reduce: false
};
// adjust. This happens when a requested page has no key cached.
if (startKey !== undefined) {
options.startkey = startKey;
options.skip = perPage; // not ideal, but tolerable probably?
}
const result = await db.allDocs(options);
// use max to prevent result overrun
// only the first key of each page is stored
const max = Math.min(options.limit, result.rows.length)
for (let i = 0; i < max; i += perPage) {
page_keys[startPage++] = result.rows[i].id;
}
}
// show page links and optional skip backward/forward links.
let last_chapter;
async function renderNav() {
// calculate which page to start linking.
const chapter = Math.floor(page_keys_index / look_ahead_pages);
if (chapter !== last_chapter) {
last_chapter = chapter;
const start = chapter * look_ahead_pages;
let html = "";
// don't render more page links than possible.
let max = Math.min(start + look_ahead_pages, total_rows / rows_per_page);
// render prev link if nav'ed past 1st chapter.
if (start > 0) {
html = `< `;
}
for (let i = start; i < max; i++) {
html += `${i+1} `;
}
// if more pages available, render the 'next' link
if (max % look_ahead_pages === 0) {
html += ` > `;
}
gel("nav").innerHTML = html;
}
}
async function navTo(page) {
if (page_keys[page] === undefined) {
// page key not cached - compute more page keys.
await computePages(page, rows_per_page, look_ahead_pages, page_keys[page - 1]);
}
page_keys_index = page;
await showPage(page_keys_index);
renderNav();
}
initDb().then(async() => {
await navTo(0);
});
<script src="https://cdn.jsdelivr.net/npm/pouchdb#7.1.1/dist/pouchdb.min.js"></script>
<script src="https://github.com/pouchdb/pouchdb/releases/download/7.1.1/pouchdb.memory.min.js"></script>
<pre id="view"></pre>
<hr/>
<div id="nav">
</nav>

Related

The result is not shown on webpage or it shows very slowly

So... the code is like this
<script>
console.log("Calculating the number of cases...");
calculate_total();
function calculate_total() {
fetch('https://covid.ourworldindata.org/data/owid-covid-data.json')
.then(res => {
return res.json()
})
.then (raw_data => {
var total_cases = 0;
var new_cases = 0;
var total_deaths = 0;
for (const key in raw_data) {
const country = raw_data[key];
const country_data = country.data;
const latest_data = country_data[country_data.length - 1];
if (country.location != "World") {
if (latest_data.total_cases != null) {
total_cases += latest_data.total_cases;
}
if (latest_data.new_cases != null) {
new_cases += latest_data.new_cases;
}
if (latest_data.total_deaths != null) {
total_deaths += latest_data.total_deaths;
}
}
}
console.log("Number of total cases:" + total_cases);
console.log("Number of new confirmed cases:" + new_cases);
console.log("Number of deaths:" + total_deaths);
document.getElementById("total_cases").innerHTML = total_cases;
document.getElementById("new_cases").innerHTML = new_cases;
document.getElementById("total_deaths").innerHTML = total_deaths;
})
}
</script>
The result actually show what I want to see, however it probably takes 5 minutes until the result shows.
What should I change in order to get the result instantly or less waiting time?
The data is around 35mb and the complete traversal of the JSON using the for loop is expected to take a long time.
Also, trying to convert this long stringified version of the JSON also takes compute time.
You cannot do much but try to get only the fields that you require (this is possible if fetching data from a GraphQL API). This way the result you get only contains the fields that you want to work with which I guess in this case will significantly reduce the compute time

How to insert 10 million rows into MySQL database with Knex.js?

I'm trying to insert 10M+ rows into a MySQL database using Knex.js. Is there a way to use a for loop to insert arrays of length 10000 (which seems to maximum size that I am able to insert - anything larger than that gets "Error: ER_NET_PACKET_TOO_LARGE: Got a packet bigger than 'max_allowed_packet' bytes").
I tried using a promise chain but the chain would be very long to accommodate 10M records.
exports.seed = (knex) => {
// Deletes ALL existing entries
return knex('books').del()
.then(() => {
const fakeBooks = [];
for (let i = 0; i < 10000; i += 1) {
fakeBooks.push(createFakeBooks());
}
return knex('books').insert(fakeBooks)
.then(() => {
const fakeBooks1 = [];
for (let i = 0; i < 10000; i += 1) {
fakeBooks1.push(createFakeBooks());
}
return knex('books').insert(fakeBooks1)
.then(() => {
const fakeBooks2 = [];
for (let i = 0; i < 10000; i += 1) {
fakeBooks2.push(createFakeBooks());
}
...
It's easier if you use async and await and ditch the thens. It can then be written like this:
exports.seed = async (knex) => {
await knex('books').del();
let fakeBooks = [];
for (let i = 1; i <= 10000000; i += 1) {
fakeBooks.push(createFakeBooks());
if (i % 1000 === 0) {
await knex('books').insert(fakeBooks);
fakeBooks = [];
}
}
};
await will make the promise finish before the function continues, without blocking the thread. The loop will run ten million times and insert into the database for every 1000 rows. You can change it to 10000 rows, but you might as well use 1000 to be sure.
I only tried with one million rows myself, as it took too much time to insert ten million.
You can use https://knexjs.org/#Utility-BatchInsert which is done for inserting big amount of rows to DB.
await knex.batchInsert('books', create10MFakeBooks(), 5000)
However you might want to actually create those books in smaller batches to prevent using gigabytes of memory. So MikaS's answer is valid, that just use async / await and it will be trivial to write.
I would not use knex for this kind of job, but raw SQL.

Optimizing hash table implementation to accommodate large amount of elements

Consider the following scenario:
One million clients visit a store and pay an amount of money using their credit card. The credit card codes are generated using a 16-digit number, and replacing 4 of its digits (randomly) with the characters 'A', 'B', 'C', 'D'. The 16-digit number is generated randomly once, and is used for every credit card, the only change between cards being the positions in the string of the aforementioned characters (that's ~40k possible distinct codes).
I have to organize the clients in a hash table, using a hash function of my choosing and also using open addressing (linear probing) to deal with the collisions. Once organized in the table, I have to find the client who
paid the most money during his purchases.
visited the store the most times.
My implementation of the hash table is as follows, and seems to be working correctly for the test of 1000 clients. However once I increase the number of clients to 10000 the page never finishes loading. This is a big issue since the total number of "shopping sessions" has to be one million, and I am not even getting close to that number.
class HashTable{
constructor(size){
this.size = size;
this.items = new Array(this.size);
this.collisions = 0;
}
put(k, v){
let hash = polynomial_evaluation(k);
//evaluating the index to the array
//using modulus a prime number (size of the array)
//This works well as long as the numbers are uniformly
//distributed and sparse.
let index = hash%this.size;
//if the array position is empty
//then fill it with the value v.
if(!this.items[index]){
this.items[index] = v;
}
//if not, search for the next available position
//and fill that with value v.
//if the card already is in the array,
//update the amount paid.
//also increment the collisions variable.
else{
this.collisions++;
let i=1, found = false;
//while the array at index is full
//check whether the card is the same,
//and if not then calculate the new index.
while(this.items[index]){
if(this.items[index] == v){
this.items[index].increaseAmount(v.getAmount());
found = true;
break;
}
index = (hash+i)%this.size;
i++;
}
if(!found){
this.items[index] = v;
}
found = false;
}
return index;
}
get(k){
let hash = polynomial_evaluation(k);
let index = hash%this.size, i=1;
while(this.items[index] != null){
if(this.items[index].getKey() == k){
return this.items[index];
}
else{
index = (hash+i)%this.size;
i++;
}
}
return null;
}
findBiggestSpender(){
let max = {getAmount: function () {
return 0;
}};
for(let item of this.items){
//checking whether the specific item is a client
//since many of the items will be null
if(item instanceof Client){
if(item.getAmount() > max.getAmount()){
max = item;
}
}
}
return max;
}
findMostFrequentBuyer(){
let max = {getTimes: function () {
return 0;
}};
for(let item of this.items){
//checking whether the specific item is a client
//since many of the items will be null
if(item instanceof Client){
if(item.getTimes() > max.getTimes()){
max = item;
}
}
}
return max;
}
}
To key I use to calculate the index to the array is a list of 4 integers ranging from 0 to 15, denoting the positions of 'A', 'B', 'C', 'D' in the string
Here's the hash function I am using:
function polynomial_evaluation(key, a=33){
//evaluates the expression:
// x1*a^(d-1) + x2*a^(d-2) + ... + xd
//for a given key in the form of a tuple (x1,x2,...,xd)
//and for a nonzero constant "a".
//for the evaluation of the expression horner's rule is used:
// x_d + a*(x_(d-1) + a(x_(d-2) + .... + a*(x_3 + a*(x_2 + a*x1))... ))
//defining a new key with the elements of the
//old times 2,3,4 or 5 depending on the position
//this helps for "spreading" the values of the keys
let nKey = [key[0]*2, key[1]*3, key[2]*4, key[3]*5];
let sum=0;
for(let i=0; i<nKey.length; i++){
sum*=a;
sum+=nKey[i];
}
return sum;
}
The values corresponding to the keys generated by the hash function are instances of a Client class which contains the fields amount (the amount of money paid), times (the times this particular client shopped), key (the array of 4 integers mentioned above), as well as getter functions for those fields. In addition there's a method that increases the amount when the same client appears more than once.
The size of the hash table is 87383 (a prime number) and the code in my main file looks like this:
//initializing the clients array
let clients = createClients(10000);
//creating a new hash table
let ht = new HashTable(N);
for(let client of clients){
ht.put(client.getKey(), client);
}
This keeps running until google chrome gives a "page not responding" error. Is there any way I can make this faster? Is my approach on the subject (perhaps even my choice of language) correct?
Thanks in advance.
The page is not responding since the main (UI) thread is locked. Use a WebWorker or ServiceWorker to handle the calculations, and post them as messages to the main thread.
Regarding optimizing your code, one thing I see is in findBiggestSpender. I'll break it down line-by-line.
let max = {getAmount: function () {
return 0;
}};
This is a waste. Just assign a local variable, no need to keep calling max.getAmount() in every iteration.
for(let item of this.items){
The fastest way to iterate a list in Javascript is with a cached length for loop: for (let item, len = this.items.length; i < len; i ++)
if(item instanceof Client){
This is slower than a hard null check, just use item !== null.

How to paginate subsections of an array property in Vue.js?

When displaying list data, one can iterate through the items using v-for. I have some filter controls which increase or decrease the number of visible items (size, type, name - stuff like that).
Regardless of this, I'd like to limit the amount visible on page to, say, 10 items.
In other words, if the filter hides 50 out of 100 result items (which are still stored in Vuex) I still want to be able to paginate through 5 pages (10 at a time only).
There's a few plugins such as this one which seem to help with that.
However, I'd like to try to do it manually to understand how it's done, though I'm a bit stumped.
Since you have Vuex on board, a getter seems easiest.
export const getters = {
pagedItems: state => {
return pageNo => {
const pageSize = state.pageSize || 10
const items = state.items || []
return items.slice(pageNo * pageSize, pageSize)
}
}
}
Default values (e.g state.items || []) are there to stop the calculation erroring before initial load completes.
Use it on the component in a computed property, which will make it reactive when pageNo changes,
computed: {
pagedItems() {
return this.$store.getters['pagedItems'](this.pageNo)
},
},
It just occurred to me that if you are filtering, you probably want to apply that before the pagination otherwise the display may not be be consistent in size (e.g page 1 filters to 4 items, page 2 to 6 items).
Depends on your exact filter, should be easy to add a getter for filteredItems and use that as source for pagedItems.
well, i would just divide the number of items by the number of data i want to display per page with the rest operator and create number of pages + 1, of course with some validations to empty data and so on.
Imagine you recieve an object that contains lists, this lists represent all the arrays with your data, each array is a row.
Just get the length, divide it with module operator and add one more, in your case, if you have 52 items, and want to have 10 per page:
52 % 10 = 2
52 / 10 = 5
you need 5 pages + 1 for the 2 items.
so i would do something like this:
const NUMBER_ITEMS_PER_PAGE = 10;
const numberItems = list.length;
const pages = numberItems / NUMBER_ITEMS_PER_PAGE
if(numberItems % NUMBER_ITEMS_PER_PAGE > 0) {
pages++;
}
function buildPages(numberPages) {
const pageObj = {}
for(var i = 0; i < pages; i++) {
pageObj.page[i+1]
const arr = []
for(var j = 0; j < (NUMBER_ITEMS_PER_PAGE) * (i + 1); j++) {
arr.push(lists[i])
}
pageObj.page[i+1] = arr;
}
}
of course this is just one possible solution, but i think this can let you start in some way, the code is just to help. good luck

Angular material pagination next page function

I follow this tutorial to learn how to make a pagination.
https://github.com/feichao/angular-material-paging
I put a json, myData, with items to see how will work. Because I'm a junior programmer eI got blocked to gotoPage().
In my page I changed:
ctrl.total = ctrl.myData.length;
ctrl.currentPage = 1;
ctrl.step = 6;
ctrl.gotoPage = function() {
if(ctrl.total) {
for(var i=0; i < ctrl.total; i++) {
//
}
}
};
but I don't know what to put in this function to show me only, let say 10 items, from my json, per page.
You can see these props and methods in their demo.
Can someone please give ideas how to do this?
You need to calculate the starting amount and then iterate until you hit the max for that page
ctrl.total = ctrl.myData.length; //the total items in your set
ctrl.currentPage = 1;
ctrl.step = 6; //page size? this could be 10 if you want
ctrl.numpages = ctrl.total/ctrl.step; //the total number of items partitioned by page size. convert this to a whole number (the ceiling)
//go to page function here receives the indexes for that page, is that ok?
ctrl.gotoPage = function(Page) { //i modified this to take the page number as an argument
if(Page<= ctrl.numpages && Page>0) //we need to check that the page is valid
{
var startpos = ((Page-1)*(ctrl.step))+1; //this is virtually a skip amount. If its the first page, i = 0, otherwise we start with an increment of the page size. If its the second page, we skip the first 6 results
for(var i=startpos; i < startpos+ctrl.step; i++) {
if(i == ctrl.total) //if its the last page we cut it off early
break;
}
}
};

Categories