Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Instances list endpoint pagination #64

Open
wants to merge 18 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 7 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion .eslintrc
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,6 @@
"no-trailing-spaces": 2,
"no-underscore-dangle": 0,
"no-unneeded-ternary": 1,
"one-var": 2,
"quotes": [2, "single", "avoid-escape"],
"semi": [2, "always"],
"keyword-spacing": 2,
Expand Down
7 changes: 7 additions & 0 deletions docs/environment-setup.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ To define where Postgres and Redis clients will connect to you can define the fo
- [`CLAY_STORAGE_POSTGRES_DB`](#clay_storage_postgres_db)
- [`CLAY_STORAGE_POSTGRES_CACHE_ENABLED`](#clay_storage_postgres_cache_enabled)
- [`CLAY_STORAGE_POSTGRES_CACHE_HOST`](#clay_storage_postgres_cache_host)
- [`CLAY_STORAGE_POSTGRES_PAGE_SIZE`](#clay_storage_postgres_page_size)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is referred to everywhere else as CLAY_STORAGE_PAGE_SIZE.
It seems that we already have an inconsistent naming scheme for the environment variables in this project, but maybe stick with the more generic CLAY_STORAGE_PAGE_SIZE (something that other storage back-ends could adhere to).


---
## Postgres
Expand Down Expand Up @@ -43,6 +44,12 @@ The port where your Postgres instance resides on its host.

The database within Postgres to connect to.

### `CLAY_STORAGE_PAGE_SIZE`

**Default:** `null` _(Number)_

Default page size for list queries. Enables pagination by default on list endpoints.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think you should note in the docs here that a page size of 0 will explicitly disable pagination.

---

## Redis
Expand Down
1 change: 1 addition & 0 deletions index.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,3 +12,4 @@ module.exports.patchMeta = db.patchMeta;
module.exports.getMeta = db.getMeta;
module.exports.raw = db.raw;
module.exports.createReadStream = db.createReadStream;
module.exports.paginate = db.paginate;
48 changes: 46 additions & 2 deletions postgres/client.js
Original file line number Diff line number Diff line change
@@ -1,14 +1,24 @@
'use strict';

const { POSTGRES_USER, POSTGRES_PASSWORD, POSTGRES_HOST, POSTGRES_PORT, POSTGRES_DB, CONNECTION_POOL_MIN, CONNECTION_POOL_MAX } = require('../services/constants'),
const {
POSTGRES_USER,
POSTGRES_PASSWORD,
POSTGRES_HOST,
POSTGRES_PORT,
POSTGRES_DB,
CONNECTION_POOL_MIN,
CONNECTION_POOL_MAX,
PAGE_SIZE,
} = require('../services/constants'),
{ notFoundError } = require('../services/errors'),
{ parseOrNot, wrapInObject, decode } = require('../services/utils'),
{ findSchemaAndTable, wrapJSONStringInObject } = require('../services/utils'),
knexLib = require('knex'),
{ isList, isUri } = require('clayutils'),
TransformStream = require('../services/list-transform-stream'),
META_PUT_PATCH_FN = patch('meta');
var knex, log = require('../services/log').setup({ file: __filename });
var knex,
log = require('../services/log').setup({ file: __filename });

/**
* Connect to the default DB and create the Clay
Expand Down Expand Up @@ -253,6 +263,39 @@ function createReadStream(options) {
return transform;
}

/**
* Gets a list of components as a readable stream, can handle pagination.
* @param {Object} options
* @returns {Stream}
*/
function paginate(options) {
const { prefix, values, keys, previous, size } = options;
const transform = TransformStream(options);
const selects = [];
const pageSize = size || PAGE_SIZE; // TODO add range, other checks

if (keys) selects.push('id');
if (values) selects.push('data');

const query = baseQuery(prefix);

query.select(...selects);
query.where('id', 'like', `${prefix}%`); // site

if (previous) {
query.where('id', '>', previous); // TODO validation of previous id
}

if (pageSize) {
query.limit(pageSize);
query.orderBy('id', 'asc');
}

query.pipe(transform);

return transform;
}

/**
* [putMeta description]
* @param {[type]} key [description]
Expand Down Expand Up @@ -342,6 +385,7 @@ module.exports.getMeta = getMeta;
module.exports.putMeta = putMeta;
module.exports.patchMeta = META_PUT_PATCH_FN;
module.exports.createReadStream = createReadStream;
module.exports.paginate = paginate;

// Knex methods
module.exports.createSchema = createSchema;
Expand Down
94 changes: 94 additions & 0 deletions postgres/client.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -488,6 +488,100 @@ describe('postgres/client', () => {
});
});

describe.only('paginate', () => {
const pipe = jest.fn(() => ({})),
where = jest.fn(() => ({})),
select = jest.fn(() => ({})),
withSchema = jest.fn(() => ({})),
limit = jest.fn(() => ({})),
orderBy = jest.fn(() => ({})),
knex = jest.fn(() => ({
withSchema,
select,
where,
limit,
orderBy,
pipe,
})),
mockedTransform = {};

beforeEach(() => {
client.setClient(knex);
});

test('creates a read stream of query results with id and data columns', () => {
TransformStream.mockReturnValueOnce(mockedTransform);

const options = {
prefix: 'nymag.com/_uris',
values: true,
keys: true,
},
transform = client.paginate(options);

expect(withSchema.mock.calls.length).toBe(0);
expect(select.mock.calls.length).toBe(1);
expect(select.mock.calls[0][0]).toBe('id');
expect(select.mock.calls[0][1]).toBe('data');
expect(where.mock.calls.length).toBe(1);
expect(where.mock.calls[0][1]).toBe('like');
expect(where.mock.calls[0][2]).toBe(`${options.prefix}%`);
expect(transform).toBe(mockedTransform);
});

test('creates a read stream of query results without id and data columns', () => {
TransformStream.mockReturnValueOnce(mockedTransform);

const options = {
prefix: 'nymag.com/_uris',
values: false,
keys: false,
},
transform = client.paginate(options);

expect(withSchema.mock.calls.length).toBe(0);
expect(select.mock.calls.length).toBe(1);
expect(select.mock.calls[0][0]).toBe(undefined);
expect(where.mock.calls.length).toBe(1);
expect(where.mock.calls[0][1]).toBe('like');
expect(where.mock.calls[0][2]).toBe(`${options.prefix}%`);
expect(transform).toBe(mockedTransform);
});

test('queries with a limit and order when page size is set', () => {
TransformStream.mockReturnValueOnce(mockedTransform);

const options = {
prefix: 'nymag.com/_uris',
values: false,
keys: false,
size: 20,
};

client.paginate(options);

expect(limit.mock.calls[0][0]).toBe(20);
expect(orderBy.mock.calls.length).toBe(1);
expect(orderBy.mock.calls[0]).toEqual(['id', 'asc']);
});

test('queries with where id > previous when previous is set', () => {
const options = {
prefix: 'nymag.com/_uris',
values: false,
keys: false,
size: 20,
previous: 'nymag.com/components/ad/instances/aaa',
};

client.paginate(options);

expect(where.mock.calls[1]).toEqual(['id', '>', options.previous]);
expect(orderBy.mock.calls.length).toBe(1);
expect(orderBy.mock.calls[0]).toEqual(['id', 'asc']);
});
});

describe('put', () => {
const update = jest.fn(() => 'update sql'),
insert = jest.fn(() => 'insert sql'),
Expand Down
2 changes: 2 additions & 0 deletions services/constants.js
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@ module.exports.POSTGRES_DB = process.env.CLAY_STORAGE_POSTGRES_DB ||
module.exports.CONNECTION_POOL_MIN = parseInt(process.env.CLAY_STORAGE_CONNECTION_POOL_MIN, 10) || 2;
module.exports.CONNECTION_POOL_MAX = parseInt(process.env.CLAY_STORAGE_CONNECTION_POOL_MAX, 10) || 10;

module.exports.PAGE_SIZE = parseInt(process.env.CLAY_STORAGE_PAGE_SIZE) || null;

// Redis
module.exports.CACHE_ENABLED = process.env.CLAY_STORAGE_POSTGRES_CACHE_ENABLED || false;
module.exports.REDIS_URL = process.env.CLAY_STORAGE_POSTGRES_CACHE_HOST;
Expand Down
1 change: 1 addition & 0 deletions services/db.js
Original file line number Diff line number Diff line change
Expand Up @@ -101,3 +101,4 @@ module.exports.putMeta = postgres.putMeta;
module.exports.getMeta = postgres.getMeta;
module.exports.patchMeta = postgres.patchMeta;
module.exports.createReadStream = postgres.createReadStream;
module.exports.paginate = postgres.paginate;