Skip to content

Commit

Permalink
Results pagination overhaul (#3)
Browse files Browse the repository at this point in the history
* Replace `offset` parameter with `page` for pagination of results

Instead of taking the offset directly from the user query, we can
compute it by multiplying the page number with the `limit` parameter
set in the query or by default.

Not having a `page` parameter in the query is equivalent to `page=1`.

* Fix `limit <= 0` and add missing tests

* Add `parsePage` tests

* Add metadata information to API responses

The metadata contains information for the pagination of results.
The following fields are available:
- `next_page`, capped to `total_pages` on reaching the last page
- `previous_page`, set to 1 on the first page
- `total_pages`, computed from the `limit` parameter
- `total_results` returned from `rows_before_limit_at_least` of
   ClickHouse's response
   (https://clickhouse.com/docs/en/interfaces/formats#json)

* Make `limit` parameter default to `config.maxLimit`

Resolves #1
  • Loading branch information
0237h authored Apr 19, 2024
1 parent 5f356d0 commit a448010
Show file tree
Hide file tree
Showing 11 changed files with 150 additions and 36 deletions.
16 changes: 13 additions & 3 deletions src/clickhouse/makeQuery.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ export interface Query<T> {
meta: Meta[],
data: T[],
rows: number,
rows_before_limit_at_least: number,
statistics: {
elapsed: number,
rows_read: number,
Expand All @@ -21,7 +22,7 @@ export async function makeQuery<T = unknown>(query: string) {
try {
const response = await client.query({ query })
const data: Query<T> = await response.json();

prometheus.query.inc();
prometheus.bytes_read.inc(data.statistics.bytes_read);
prometheus.rows_read.inc(data.statistics.rows_read);
Expand All @@ -32,7 +33,16 @@ export async function makeQuery<T = unknown>(query: string) {
} catch (e: any) {
logger.error(e.message)

return { data: [] }
return {
meta: [],
data: [],
rows: 0,
rows_before_limit_at_least: 0,
statistics: {
elapsed: 0,
rows_read: 0,
bytes_read: 0,
}
};
}

}
12 changes: 10 additions & 2 deletions src/fetch/balance.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@ import { makeQuery } from "../clickhouse/makeQuery.js";
import { logger } from "../logger.js";
import { getBalanceChanges } from "../queries.js";
import * as prometheus from "../prometheus.js";
import { toJSON } from "./utils.js";
import { addMetadata, toJSON } from "./utils.js";
import { parseLimit, parsePage } from "../utils.js";

function verifyParams(searchParams: URLSearchParams) {
const account = searchParams.get("account");
Expand All @@ -20,7 +21,14 @@ export default async function (req: Request) {
const query = getBalanceChanges(searchParams);
const response = await makeQuery(query)

return toJSON(response.data);
return toJSON(
addMetadata(
response.data,
response.rows_before_limit_at_least,
parseLimit(searchParams.get("limit")),
parsePage(searchParams.get("page"))
)
);
} catch (e: any) {
logger.error(e);
prometheus.request_error.inc({ pathname: "/balance", status: 400 });
Expand Down
5 changes: 2 additions & 3 deletions src/fetch/openapi.ts
Original file line number Diff line number Diff line change
Expand Up @@ -49,11 +49,10 @@ const parameterLimit: ParameterObject = {
schema: { type: "number", maximum: config.maxLimit, minimum: 1 },
}

// TODO: Determine offset from `limit` and replace this with page numbers
const parameterOffset: ParameterObject = {
name: "offset",
name: "page",
in: "query",
description: "Index offset for results pagination.",
description: "Page index for results pagination.",
required: false,
schema: { type: "number", minimum: 1 },
}
Expand Down
12 changes: 10 additions & 2 deletions src/fetch/supply.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@ import { makeQuery } from "../clickhouse/makeQuery.js";
import { logger } from "../logger.js";
import { getTotalSupply } from "../queries.js";
import * as prometheus from "../prometheus.js";
import { toJSON } from "./utils.js";
import { addMetadata, toJSON } from "./utils.js";
import { parseLimit, parsePage } from "../utils.js";

function verifyParams(searchParams: URLSearchParams) {
const contract = searchParams.get("contract");
Expand All @@ -20,7 +21,14 @@ export default async function (req: Request) {
const query = getTotalSupply(searchParams);
const response = await makeQuery(query)

return toJSON(response.data);
return toJSON(
addMetadata(
response.data,
response.rows_before_limit_at_least,
parseLimit(searchParams.get("limit")),
parsePage(searchParams.get("page"))
)
);
} catch (e: any) {
logger.error(e);
prometheus.request_error.inc({ pathname: "/supply", status: 400 });
Expand Down
12 changes: 10 additions & 2 deletions src/fetch/transfers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@ import { makeQuery } from "../clickhouse/makeQuery.js";
import { logger } from "../logger.js";
import { getTransfers } from "../queries.js";
import * as prometheus from "../prometheus.js";
import { toJSON } from "./utils.js";
import { addMetadata, toJSON } from "./utils.js";
import { parseLimit, parsePage } from "../utils.js";

export default async function (req: Request) {
try {
Expand All @@ -12,7 +13,14 @@ export default async function (req: Request) {
const query = getTransfers(searchParams);
const response = await makeQuery(query)

return toJSON(response.data);
return toJSON(
addMetadata(
response.data,
response.rows_before_limit_at_least,
parseLimit(searchParams.get("limit")),
parsePage(searchParams.get("page"))
)
);
} catch (e: any) {
logger.error(e);
prometheus.request_error.inc({ pathname: "/transfers", status: 400 });
Expand Down
38 changes: 38 additions & 0 deletions src/fetch/utils.spec.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
import { expect, test } from "bun:test";
import { addMetadata } from "./utils.js";

test("addMetadata pagination", () => {
const limit = 5;
const mock_query_reponse = {
data: Array(limit),
rows: limit,
rows_before_limit_at_least: 5*limit, // Simulate query with more total results than the query limit making pagination relevant
};

const first_page = addMetadata(mock_query_reponse.data, mock_query_reponse.rows_before_limit_at_least, limit, 1);
expect(first_page.meta.next_page).toBe(2);
expect(first_page.meta.previous_page).toBe(1); // Previous page should be set to 1 on first page
expect(first_page.meta.total_pages).toBe(5);
expect(first_page.meta.total_results).toBe(5*limit);

const odd_page = addMetadata(mock_query_reponse.data, mock_query_reponse.rows_before_limit_at_least, limit, 3);
expect(odd_page.meta.next_page).toBe(4);
expect(odd_page.meta.previous_page).toBe(2);
expect(odd_page.meta.total_pages).toBe(5);
expect(odd_page.meta.total_results).toBe(5*limit);

const even_page = addMetadata(mock_query_reponse.data, mock_query_reponse.rows_before_limit_at_least, limit, 4);
expect(even_page.meta.next_page).toBe(5);
expect(even_page.meta.previous_page).toBe(3);
expect(even_page.meta.total_pages).toBe(5);
expect(even_page.meta.total_results).toBe(5*limit);

const last_page = addMetadata(mock_query_reponse.data, mock_query_reponse.rows_before_limit_at_least, limit, 5);
expect(last_page.meta.next_page).toBe(last_page.meta.total_pages); // Next page should be capped to total_pages on last page
expect(last_page.meta.previous_page).toBe(4);
expect(last_page.meta.total_pages).toBe(5);
expect(last_page.meta.total_results).toBe(5*limit);

// TODO: Expect error message on beyond last page
// const beyond_last_page = addMetadata(mock_query_reponse.data, mock_query_reponse.rows_before_limit_at_least, limit, 6);
});
13 changes: 13 additions & 0 deletions src/fetch/utils.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,16 @@
export function toJSON(data: any, status: number = 200) {
return new Response(JSON.stringify(data), { status, headers: { "Content-Type": "application/json" } });
}

export function addMetadata(data: any[], total_before_limit: number, limit: number, page: number) {
// TODO: Catch page number greater than total_pages and return error
return {
data,
meta: {
"next_page": (page * limit >= total_before_limit) ? page : page + 1,
"previous_page": (page <= 1) ? page : page - 1,
"total_pages": Math.ceil(total_before_limit / limit),
"total_results": total_before_limit
}
}
}
7 changes: 4 additions & 3 deletions src/queries.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import {
getTransfers,
addAmountFilter,
} from "./queries.js";
import { config } from "./config.js";

const contract = "eosio.token";
const account = "push.sx";
Expand Down Expand Up @@ -64,7 +65,7 @@ test("getTotalSupply", () => {
)
);
expect(query).toContain(formatSQL(`ORDER BY block_number DESC`));
expect(query).toContain(formatSQL(`LIMIT 1`));
expect(query).toContain(formatSQL(`LIMIT ${config.maxLimit}`));
});

test("getTotalSupply with options", () => {
Expand Down Expand Up @@ -98,7 +99,7 @@ test("getBalanceChange", () => {
)
);
expect(query).toContain(formatSQL(`ORDER BY timestamp DESC`));
expect(query).toContain(formatSQL(`LIMIT 1`));
expect(query).toContain(formatSQL(`LIMIT ${config.maxLimit}`));
});

test("getBalanceChanges with options", () => {
Expand Down Expand Up @@ -133,5 +134,5 @@ test("getTransfers", () => {
)
);
expect(query).toContain(formatSQL(`ORDER BY timestamp DESC`));
expect(query).toContain(formatSQL(`LIMIT 100`));
expect(query).toContain(formatSQL(`LIMIT ${config.maxLimit}`));
});
30 changes: 15 additions & 15 deletions src/queries.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { DEFAULT_SORT_BY } from "./config.js";
import { parseLimit, parseTimestamp } from "./utils.js";
import { DEFAULT_SORT_BY, config } from "./config.js";
import { parseLimit, parsePage, parseTimestamp } from "./utils.js";

// For reference on Clickhouse Database tables:
// https://raw.githubusercontent.com/pinax-network/substreams-antelope-tokens/main/schema.sql
Expand Down Expand Up @@ -82,11 +82,11 @@ export function getTotalSupply(searchParams: URLSearchParams, example?: boolean)
query += ` ORDER BY block_number ${sort_by ?? DEFAULT_SORT_BY} `;
}

const limit = parseLimit(searchParams.get("limit"));
query += ` LIMIT ${limit} `;
const offset = searchParams.get("offset");
if (offset) query += ` OFFSET ${offset} `;
const limit = parseLimit(searchParams.get("limit"), config.maxLimit);
if (limit) query += ` LIMIT ${limit}`;

const page = parsePage(searchParams.get("page"));
if (page) query += ` OFFSET ${limit * (page - 1)} `;

return query;
}
Expand Down Expand Up @@ -115,11 +115,11 @@ export function getBalanceChanges(searchParams: URLSearchParams, example?: boole
//if (contract && !account) query += `GROUP BY (contract, account) ORDER BY timestamp DESC`;
}

const limit = parseLimit(searchParams.get("limit"));
query += ` LIMIT ${limit} `;
const limit = parseLimit(searchParams.get("limit"), config.maxLimit);
if (limit) query += ` LIMIT ${limit}`;

const offset = searchParams.get("offset");
if (offset) query += ` OFFSET ${offset} `;
const page = parsePage(searchParams.get("page"));
if (page) query += ` OFFSET ${limit * (page - 1)} `;

return query;
}
Expand Down Expand Up @@ -154,11 +154,11 @@ export function getTransfers(searchParams: URLSearchParams, example?: boolean) {
query += ` ORDER BY timestamp DESC`;
}

const limit = parseLimit(searchParams.get("limit"), 100);
query += ` LIMIT ${limit} `;
const limit = parseLimit(searchParams.get("limit"), config.maxLimit);
if (limit) query += ` LIMIT ${limit}`;

const offset = searchParams.get("offset");
if (offset) query += ` OFFSET ${offset} `;
const page = parsePage(searchParams.get("page"));
if (page) query += ` OFFSET ${limit * (page - 1)} `;

return query;
}
18 changes: 16 additions & 2 deletions src/utils.spec.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,27 @@
import { expect, test } from "bun:test";
import { parseBlockId, parseTimestamp } from "./utils.js";
import { parseBlockId, parseLimit, parsePage, parseTimestamp } from "./utils.js";
import { config } from "./config.js";

test("parseBlockId", () => {
expect(parseBlockId("0x123") as string).toBe("123");
});

test("parseLimit", () => {
expect(parseLimit("1")).toBe(1);
expect(parseLimit("0")).toBe(0);
expect(parseLimit(10)).toBe(10);
expect(parseLimit(config.maxLimit + 1)).toBe(config.maxLimit);
});

test("parsePage", () => {
expect(parsePage("1")).toBe(1);
expect(parsePage("0")).toBe(1);
expect(parsePage(10)).toBe(10);
});

test("parseTimestamp", () => {
expect(parseTimestamp("1697587100")).toBe(1697587100);
expect(parseTimestamp("1697587100000")).toBe(1697587100);
expect(parseTimestamp("awdawd")).toBeNaN();
expect(parseTimestamp(null)).toBeUndefined();
});
});
23 changes: 19 additions & 4 deletions src/utils.ts
Original file line number Diff line number Diff line change
@@ -1,20 +1,35 @@
import { config } from "./config.js";

export function parseBlockId(block_id?: string | null) {
return block_id ? block_id.replace("0x", "") : undefined;
}

export function parseLimit(limit?: string | null | number, defaultLimit?: number) {
let value = 1 // default 1
let value = 0; // default 0 (no limit)
if (defaultLimit)
value = defaultLimit;
if (limit) {
if (typeof limit === "string") value = parseInt(limit);
if (typeof limit === "number") value = limit;
}
// limit must be between 1 and maxLimit
// limit must be between 0 (no limit) and maxLimit
if (value < 0) value = 0;
if (value > config.maxLimit) value = config.maxLimit;
return value;
}

export function parseBlockId(block_id?: string | null) {
return block_id ? block_id.replace("0x", "") : undefined;
export function parsePage(page?: string | null | number) {
let value = 1;

if (page) {
if (typeof page === "string") value = parseInt(page);
if (typeof page === "number") value = page;
}

if (value <= 0)
value = 1;

return value;
}

export function parseTimestamp(timestamp?: string | null | number) {
Expand Down

0 comments on commit a448010

Please sign in to comment.