+
+
+
+
+
diff --git a/e2e/sveltekit/src/routes/pagination/fragment/bidirectional-cursor/spec.ts b/e2e/sveltekit/src/routes/pagination/fragment/bidirectional-cursor/spec.ts
new file mode 100644
index 0000000000..966beca3ad
--- /dev/null
+++ b/e2e/sveltekit/src/routes/pagination/fragment/bidirectional-cursor/spec.ts
@@ -0,0 +1,113 @@
+import { expect, test } from '@playwright/test';
+import { routes } from '../../../../lib/utils/routes.js';
+import {
+ expect_1_gql,
+ expect_0_gql,
+ expectToBe,
+ expectToContain,
+ goto
+} from '../../../../lib/utils/testsHelper.js';
+
+test.describe('bidirectional cursor paginated fragment', () => {
+ test('backwards and then forwards', async ({ page }) => {
+ await goto(page, routes.Pagination_fragment_bidirectional_cursor);
+
+ await expectToBe(page, 'Morgan Freeman, Tom Hanks');
+
+ /// Click on the previous button
+
+ // load the previous page and wait for the response
+ await expect_1_gql(page, 'button[id=previous]');
+
+ // make sure we got the new content
+ await expectToBe(page, 'Bruce Willis, Samuel Jackson, Morgan Freeman, Tom Hanks');
+
+ // there should be a next page
+ await expectToContain(page, `"hasNextPage":true`);
+ // there should be no previous page
+ await expectToContain(page, `"hasPreviousPage":false`);
+
+ /// Click on the next button
+
+ // load the next page and wait for the response
+ await expect_1_gql(page, 'button[id=next]');
+
+ // there should be no previous page
+ await expectToContain(page, `"hasPreviousPage":false`);
+ // there should be a next page
+ await expectToContain(page, `"hasNextPage":true`);
+
+ // make sure we got the new content
+ await expectToBe(
+ page,
+ 'Bruce Willis, Samuel Jackson, Morgan Freeman, Tom Hanks, Will Smith, Harrison Ford'
+ );
+
+ /// Click on the next button
+
+ // load the next page and wait for the response
+ await expect_1_gql(page, 'button[id=next]');
+
+ // there should be no previous page
+ await expectToContain(page, `"hasPreviousPage":false`);
+ // there should be a next page
+ await expectToContain(page, `"hasNextPage":false`);
+
+ // make sure we got the new content
+ await expectToBe(
+ page,
+ 'Bruce Willis, Samuel Jackson, Morgan Freeman, Tom Hanks, Will Smith, Harrison Ford, Eddie Murphy, Clint Eastwood'
+ );
+ });
+
+ test('forwards then backwards and then forwards again', async ({ page }) => {
+ await goto(page, routes.Pagination_fragment_bidirectional_cursor);
+
+ await expectToBe(page, 'Morgan Freeman, Tom Hanks');
+
+ /// Click on the next button
+
+ // load the next page and wait for the response
+ await expect_1_gql(page, 'button[id=next]');
+
+ // there should be no previous page
+ await expectToContain(page, `"hasPreviousPage":true`);
+ // there should be a next page
+ await expectToContain(page, `"hasNextPage":true`);
+
+ // make sure we got the new content
+ await expectToBe(page, 'Morgan Freeman, Tom Hanks, Will Smith, Harrison Ford');
+
+ /// Click on the previous button
+
+ // load the previous page and wait for the response
+ await expect_1_gql(page, 'button[id=previous]');
+
+ // make sure we got the new content
+ await expectToBe(
+ page,
+ 'Bruce Willis, Samuel Jackson, Morgan Freeman, Tom Hanks, Will Smith, Harrison Ford'
+ );
+
+ // there should be a next page
+ await expectToContain(page, `"hasNextPage":true`);
+ // there should be no previous page
+ await expectToContain(page, `"hasPreviousPage":false`);
+
+ /// Click on the next button
+
+ // load the next page and wait for the response
+ await expect_1_gql(page, 'button[id=next]');
+
+ // there should be no previous page
+ await expectToContain(page, `"hasPreviousPage":false`);
+ // there should be a next page
+ await expectToContain(page, `"hasNextPage":false`);
+
+ // make sure we got the new content
+ await expectToBe(
+ page,
+ 'Bruce Willis, Samuel Jackson, Morgan Freeman, Tom Hanks, Will Smith, Harrison Ford, Eddie Murphy, Clint Eastwood'
+ );
+ });
+});
diff --git a/e2e/sveltekit/src/routes/pagination/query/bidirectional-cursor/+page.svelte b/e2e/sveltekit/src/routes/pagination/query/bidirectional-cursor/+page.svelte
new file mode 100644
index 0000000000..fda0172d1e
--- /dev/null
+++ b/e2e/sveltekit/src/routes/pagination/query/bidirectional-cursor/+page.svelte
@@ -0,0 +1,34 @@
+
+
+
+
+
+
+
+
diff --git a/e2e/sveltekit/src/routes/pagination/query/bidirectional-cursor/spec.ts b/e2e/sveltekit/src/routes/pagination/query/bidirectional-cursor/spec.ts
new file mode 100644
index 0000000000..83e1d0fb4f
--- /dev/null
+++ b/e2e/sveltekit/src/routes/pagination/query/bidirectional-cursor/spec.ts
@@ -0,0 +1,113 @@
+import { expect, test } from '@playwright/test';
+import { routes } from '../../../../lib/utils/routes.js';
+import {
+ expect_1_gql,
+ expect_0_gql,
+ expectToBe,
+ expectToContain,
+ goto
+} from '../../../../lib/utils/testsHelper.js';
+
+test.describe('bidirectional cursor paginated query', () => {
+ test('backwards and then forwards', async ({ page }) => {
+ await goto(page, routes.Pagination_query_bidirectional_cursor);
+
+ await expectToBe(page, 'Morgan Freeman, Tom Hanks');
+
+ /// Click on the previous button
+
+ // load the previous page and wait for the response
+ await expect_1_gql(page, 'button[id=previous]');
+
+ // make sure we got the new content
+ await expectToBe(page, 'Bruce Willis, Samuel Jackson, Morgan Freeman, Tom Hanks');
+
+ // there should be a next page
+ await expectToContain(page, `"hasNextPage":true`);
+ // there should be no previous page
+ await expectToContain(page, `"hasPreviousPage":false`);
+
+ /// Click on the next button
+
+ // load the next page and wait for the response
+ await expect_1_gql(page, 'button[id=next]');
+
+ // there should be no previous page
+ await expectToContain(page, `"hasPreviousPage":false`);
+ // there should be a next page
+ await expectToContain(page, `"hasNextPage":true`);
+
+ // make sure we got the new content
+ await expectToBe(
+ page,
+ 'Bruce Willis, Samuel Jackson, Morgan Freeman, Tom Hanks, Will Smith, Harrison Ford'
+ );
+
+ /// Click on the next button
+
+ // load the next page and wait for the response
+ await expect_1_gql(page, 'button[id=next]');
+
+ // there should be no previous page
+ await expectToContain(page, `"hasPreviousPage":false`);
+ // there should be a next page
+ await expectToContain(page, `"hasNextPage":false`);
+
+ // make sure we got the new content
+ await expectToBe(
+ page,
+ 'Bruce Willis, Samuel Jackson, Morgan Freeman, Tom Hanks, Will Smith, Harrison Ford, Eddie Murphy, Clint Eastwood'
+ );
+ });
+
+ test('forwards then backwards and then forwards again', async ({ page }) => {
+ await goto(page, routes.Pagination_query_bidirectional_cursor);
+
+ await expectToBe(page, 'Morgan Freeman, Tom Hanks');
+
+ /// Click on the next button
+
+ // load the next page and wait for the response
+ await expect_1_gql(page, 'button[id=next]');
+
+ // there should be no previous page
+ await expectToContain(page, `"hasPreviousPage":true`);
+ // there should be a next page
+ await expectToContain(page, `"hasNextPage":true`);
+
+ // make sure we got the new content
+ await expectToBe(page, 'Morgan Freeman, Tom Hanks, Will Smith, Harrison Ford');
+
+ /// Click on the previous button
+
+ // load the previous page and wait for the response
+ await expect_1_gql(page, 'button[id=previous]');
+
+ // make sure we got the new content
+ await expectToBe(
+ page,
+ 'Bruce Willis, Samuel Jackson, Morgan Freeman, Tom Hanks, Will Smith, Harrison Ford'
+ );
+
+ // there should be a next page
+ await expectToContain(page, `"hasNextPage":true`);
+ // there should be no previous page
+ await expectToContain(page, `"hasPreviousPage":false`);
+
+ /// Click on the next button
+
+ // load the next page and wait for the response
+ await expect_1_gql(page, 'button[id=next]');
+
+ // there should be no previous page
+ await expectToContain(page, `"hasPreviousPage":false`);
+ // there should be a next page
+ await expectToContain(page, `"hasNextPage":false`);
+
+ // make sure we got the new content
+ await expectToBe(
+ page,
+ 'Bruce Willis, Samuel Jackson, Morgan Freeman, Tom Hanks, Will Smith, Harrison Ford, Eddie Murphy, Clint Eastwood'
+ );
+ });
+});
diff --git a/e2e/sveltekit/src/routes/pagination/query/offset/+page.svelte b/e2e/sveltekit/src/routes/pagination/query/offset/+page.svelte
index f7463a4a9a..56c964012d 100644
--- a/e2e/sveltekit/src/routes/pagination/query/offset/+page.svelte
+++ b/e2e/sveltekit/src/routes/pagination/query/offset/+page.svelte
@@ -1,13 +1,13 @@
diff --git a/packages/houdini-svelte/src/plugin/codegen/stores/fragment.ts b/packages/houdini-svelte/src/plugin/codegen/stores/fragment.ts
index df14cd2789..4efac83d91 100644
--- a/packages/houdini-svelte/src/plugin/codegen/stores/fragment.ts
+++ b/packages/houdini-svelte/src/plugin/codegen/stores/fragment.ts
@@ -18,10 +18,7 @@ export async function fragmentStore(
// in order to build the store, we need to know what class we're going to import from
let which: keyof Required['customStores'] = 'fragment'
if (paginationMethod === 'cursor') {
- which =
- doc.refetch?.direction === 'forward'
- ? 'fragmentForwardsCursor'
- : 'fragmentBackwardsCursor'
+ which = 'fragmentCursor'
} else if (paginationMethod === 'offset') {
which = 'fragmentOffset'
}
diff --git a/packages/houdini-svelte/src/plugin/codegen/stores/query.test.ts b/packages/houdini-svelte/src/plugin/codegen/stores/query.test.ts
index 4b5009a49b..fd02b10144 100644
--- a/packages/houdini-svelte/src/plugin/codegen/stores/query.test.ts
+++ b/packages/houdini-svelte/src/plugin/codegen/stores/query.test.ts
@@ -214,10 +214,10 @@ test('forward cursor pagination', async function () {
// check the file contents
await expect(parsed).toMatchInlineSnapshot(`
- import { QueryStoreForwardCursor } from '$houdini/plugins/houdini-svelte/runtime/stores'
+ import { QueryStoreCursor } from '$houdini/plugins/houdini-svelte/runtime/stores'
import artifact from '$houdini/artifacts/TestQuery'
- export class TestQueryStore extends QueryStoreForwardCursor {
+ export class TestQueryStore extends QueryStoreCursor {
constructor() {
super({
artifact,
@@ -263,10 +263,10 @@ test('backwards cursor pagination', async function () {
// check the file contents
await expect(parsed).toMatchInlineSnapshot(`
- import { QueryStoreBackwardCursor } from '$houdini/plugins/houdini-svelte/runtime/stores'
+ import { QueryStoreOffset } from '$houdini/plugins/houdini-svelte/runtime/stores'
import artifact from '$houdini/artifacts/TestQuery'
- export class TestQueryStore extends QueryStoreBackwardCursor {
+ export class TestQueryStore extends QueryStoreOffset {
constructor() {
super({
artifact,
diff --git a/packages/houdini-svelte/src/plugin/codegen/stores/query.ts b/packages/houdini-svelte/src/plugin/codegen/stores/query.ts
index 0f7cbf9d93..0b4bf3e0a3 100644
--- a/packages/houdini-svelte/src/plugin/codegen/stores/query.ts
+++ b/packages/houdini-svelte/src/plugin/codegen/stores/query.ts
@@ -31,8 +31,7 @@ export async function queryStore(
// in order to build the store, we need to know what class we're going to import from
let which: keyof Required['customStores'] = 'query'
if (paginationMethod === 'cursor') {
- which =
- doc.refetch?.direction === 'forward' ? 'queryForwardsCursor' : 'queryBackwardsCursor'
+ which = 'queryCursor'
} else if (paginationMethod === 'offset') {
which = 'queryOffset'
}
diff --git a/packages/houdini-svelte/src/plugin/index.ts b/packages/houdini-svelte/src/plugin/index.ts
index 1bdc39fe36..dea304382f 100644
--- a/packages/houdini-svelte/src/plugin/index.ts
+++ b/packages/houdini-svelte/src/plugin/index.ts
@@ -242,11 +242,9 @@ export type HoudiniSvelteConfig = {
mutation?: string
subscription?: string
fragment?: string
- queryForwardsCursor?: string
- queryBackwardsCursor?: string
+ queryCursor?: string
queryOffset?: string
- fragmentForwardsCursor?: string
- fragmentBackwardsCursor?: string
+ fragmentCursor?: string
fragmentOffset?: string
}
}
diff --git a/packages/houdini-svelte/src/plugin/kit.ts b/packages/houdini-svelte/src/plugin/kit.ts
index e05b00bd04..f571099830 100644
--- a/packages/houdini-svelte/src/plugin/kit.ts
+++ b/packages/houdini-svelte/src/plugin/kit.ts
@@ -371,15 +371,9 @@ export function plugin_config(config: Config): Required {
mutation: '$houdini/plugins/houdini-svelte/runtime/stores.MutationStore',
fragment: '$houdini/plugins/houdini-svelte/runtime/stores.FragmentStore',
subscription: '$houdini/plugins/houdini-svelte/runtime/stores.SubscriptionStore',
- queryForwardsCursor:
- '$houdini/plugins/houdini-svelte/runtime/stores.QueryStoreForwardCursor',
- queryBackwardsCursor:
- '$houdini/plugins/houdini-svelte/runtime/stores.QueryStoreBackwardCursor',
+ queryCursor: '$houdini/plugins/houdini-svelte/runtime/stores.QueryStoreCursor',
queryOffset: '$houdini/plugins/houdini-svelte/runtime/stores.QueryStoreOffset',
- fragmentForwardsCursor:
- '$houdini/plugins/houdini-svelte/runtime/stores.FragmentStoreForwardCursor',
- fragmentBackwardsCursor:
- '$houdini/plugins/houdini-svelte/runtime/stores.FragmentStoreBackwardCursor',
+ fragmentCursor: '$houdini/plugins/houdini-svelte/runtime/stores.FragmentStoreCursor',
fragmentOffset: '$houdini/plugins/houdini-svelte/runtime/stores.FragmentStoreOffset',
...cfg?.customStores,
},
diff --git a/packages/houdini-svelte/src/runtime/stores/pagination/cursor.ts b/packages/houdini-svelte/src/runtime/stores/pagination/cursor.ts
index 3eeb494f03..4648b93a19 100644
--- a/packages/houdini-svelte/src/runtime/stores/pagination/cursor.ts
+++ b/packages/houdini-svelte/src/runtime/stores/pagination/cursor.ts
@@ -24,7 +24,10 @@ export function cursorHandlers<_Data extends GraphQLObject, _Input extends Recor
storeName: string
observer: DocumentStore<_Data, _Input>
fetch: FetchFn<_Data, _Input>
- fetchUpdate: FetchFn<_Data, _Input>
+ fetchUpdate: (
+ arg: Parameters>[0],
+ updates: string[]
+ ) => ReturnType>
}): CursorHandlers<_Data, _Input> {
const pageInfo = writable(extractPageInfo(get(observer).data, artifact.refetch!.path))
@@ -37,12 +40,14 @@ export function cursorHandlers<_Data extends GraphQLObject, _Input extends Recor
functionName,
metadata = {},
fetch,
+ where,
}: {
pageSizeVar: string
functionName: string
input: _Input
metadata?: {}
fetch?: typeof globalThis.fetch
+ where: 'start' | 'end'
}) => {
const config = getCurrentConfig()
@@ -58,12 +63,16 @@ export function cursorHandlers<_Data extends GraphQLObject, _Input extends Recor
}
// send the query
- const { data } = await parentFetchUpdate({
- variables: loadVariables,
- fetch,
- metadata,
- policy: CachePolicy.NetworkOnly,
- })
+ const { data } = await parentFetchUpdate(
+ {
+ variables: loadVariables,
+ fetch,
+ metadata,
+ policy: CachePolicy.NetworkOnly,
+ },
+ // if we are adding to the start of the list, prepend the result
+ [where === 'start' ? 'prepend' : 'append']
+ )
// if the query is embedded in a node field (paginated fragments)
// make sure we look down one more for the updated page info
@@ -97,6 +106,11 @@ export function cursorHandlers<_Data extends GraphQLObject, _Input extends Recor
fetch?: typeof globalThis.fetch
metadata?: {}
} = {}) => {
+ if (artifact.refetch?.direction === 'backward') {
+ console.warn(`⚠️ ${storeName}.loadNextPage was called but it does not support forwards pagination.
+If you think this is an error, please open an issue on GitHub`)
+ return
+ }
// we need to find the connection object holding the current page info
const currentPageInfo = extractPageInfo(getState().data, artifact.refetch!.path)
// if there is no next page, we're done
@@ -106,10 +120,10 @@ export function cursorHandlers<_Data extends GraphQLObject, _Input extends Recor
// only specify the page count if we're given one
const input: any = {
+ first: first ?? artifact.refetch!.pageSize,
after: after ?? currentPageInfo.endCursor,
- }
- if (first) {
- input.first = first
+ before: null,
+ last: null,
}
// load the page
@@ -119,6 +133,7 @@ export function cursorHandlers<_Data extends GraphQLObject, _Input extends Recor
input,
fetch,
metadata,
+ where: 'end',
})
},
loadPreviousPage: async ({
@@ -132,6 +147,12 @@ export function cursorHandlers<_Data extends GraphQLObject, _Input extends Recor
fetch?: typeof globalThis.fetch
metadata?: {}
} = {}) => {
+ if (artifact.refetch?.direction === 'forward') {
+ console.warn(`⚠️ ${storeName}.loadPreviousPage was called but it does not support backwards pagination.
+If you think this is an error, please open an issue on GitHub`)
+ return
+ }
+
// we need to find the connection object holding the current page info
const currentPageInfo = extractPageInfo(getState().data, artifact.refetch!.path)
@@ -143,9 +164,9 @@ export function cursorHandlers<_Data extends GraphQLObject, _Input extends Recor
// only specify the page count if we're given one
const input: any = {
before: before ?? currentPageInfo.startCursor,
- }
- if (last) {
- input.last = last
+ last: last ?? artifact.refetch!.pageSize,
+ first: null,
+ after: null,
}
// load the page
@@ -155,6 +176,7 @@ export function cursorHandlers<_Data extends GraphQLObject, _Input extends Recor
input,
fetch,
metadata,
+ where: 'start',
})
},
pageInfo,
@@ -166,21 +188,22 @@ export function cursorHandlers<_Data extends GraphQLObject, _Input extends Recor
const { variables } = params ?? {}
- // build up the variables to pass to the query
- const queryVariables: Record = {
- ...variables,
- }
-
// if the input is different than the query variables then we just do everything like normal
if (variables && !deepEquals(getState().variables, variables)) {
- return await parentFetch({
- ...params,
- then(data) {
- pageInfo.set(extractPageInfo(data, artifact.refetch!.path))
- },
- })
+ return await parentFetch(params)
+ }
+
+ // we need to find the connection object holding the current page info
+ try {
+ var currentPageInfo = extractPageInfo(getState().data, artifact.refetch!.path)
+ } catch {
+ // if there was any issue getting the page info, just fetch like normal
+ return await parentFetch(params)
}
+ // build up the variables to pass to the query
+ const queryVariables: Record = {}
+
// we are updating the current set of items, count the number of items that currently exist
// and ask for the full data set
const count =
@@ -189,10 +212,43 @@ export function cursorHandlers<_Data extends GraphQLObject, _Input extends Recor
// if there are more records than the first page, we need fetch to load everything
if (count && count > artifact.refetch!.pageSize) {
- // reverse cursors need the last entries in the list
- queryVariables[artifact.refetch!.update === 'prepend' ? 'last' : 'first'] = count
+ // if we aren't at one of the boundaries, we can't refresh the current window
+ // of a paginated field. warn the user if that's the case
+ if (
+ currentPageInfo.hasPreviousPage &&
+ currentPageInfo.hasNextPage &&
+ // only log if they haven't provided special parameters
+ !(
+ (variables?.['first'] && variables?.['after']) ||
+ (variables?.['last'] && variables?.['before'])
+ )
+ ) {
+ console.warn(`⚠️ Encountered a fetch() in the middle of the connection.
+Make sure to pass a cursor value by hand that includes the current set (ie the entry before startCursor)
+`)
+ return observer.state
+ }
+
+ // if we are loading the first boundary
+ if (!currentPageInfo.hasPreviousPage) {
+ queryVariables['first'] = count
+ queryVariables['after'] = null
+ queryVariables['last'] = null
+ queryVariables['before'] = null
+ }
+
+ // or we're loading the last boundary
+ else if (!currentPageInfo.hasNextPage) {
+ queryVariables['last'] = count
+ queryVariables['first'] = null
+ queryVariables['after'] = null
+ queryVariables['before'] = null
+ }
}
+ // let the user overwrite the variables
+ Object.assign(queryVariables, variables ?? {})
+
// send the query
const result = await parentFetch({
...params,
diff --git a/packages/houdini-svelte/src/runtime/stores/pagination/fragment.ts b/packages/houdini-svelte/src/runtime/stores/pagination/fragment.ts
index 88760a3962..6fb16c8b57 100644
--- a/packages/houdini-svelte/src/runtime/stores/pagination/fragment.ts
+++ b/packages/houdini-svelte/src/runtime/stores/pagination/fragment.ts
@@ -16,7 +16,7 @@ import type { StoreConfig } from '../query'
import type { CursorHandlers } from './cursor'
import { cursorHandlers } from './cursor'
import { offsetHandlers } from './offset'
-import type { PageInfo } from './pageInfo'
+import { extractPageInfo, type PageInfo } from './pageInfo'
type FragmentStoreConfig<_Data extends GraphQLObject, _Input> = StoreConfig<
_Data,
@@ -69,7 +69,7 @@ class BasePaginatedFragmentStore<_Data extends GraphQLObject, _Input> {
}
// both cursor paginated stores add a page info to their subscribe
-class FragmentStoreCursor<
+export class FragmentStoreCursor<
_Data extends GraphQLObject,
_Input extends Record
> extends BasePaginatedFragmentStore<_Data, _Input> {
@@ -92,11 +92,14 @@ class FragmentStoreCursor<
| undefined
): (() => void) => {
const combined = derived(
- [store, handlers.pageInfo],
- ([$parent, $pageInfo]) =>
+ [store],
+ ([$parent]) =>
({
...$parent,
- pageInfo: $pageInfo,
+ pageInfo: extractPageInfo(
+ $parent.data,
+ this.paginationArtifact.refetch!.path
+ ),
} as FragmentPaginatedResult<_Data, { pageInfo: PageInfo }>)
)
@@ -110,13 +113,17 @@ class FragmentStoreCursor<
fetching: derived(store, ($store) => $store.fetching),
fetch: handlers.fetch,
pageInfo: handlers.pageInfo,
+
+ // add the pagination handlers
+ loadNextPage: handlers.loadNextPage,
+ loadPreviousPage: handlers.loadPreviousPage,
}
}
protected storeHandlers(observer: DocumentStore<_Data, _Input>): CursorHandlers<_Data, _Input> {
return cursorHandlers<_Data, _Input>({
artifact: this.paginationArtifact,
- fetchUpdate: async (args) => {
+ fetchUpdate: async (args, updates) => {
return observer.send({
...args,
variables: {
@@ -124,7 +131,7 @@ class FragmentStoreCursor<
...this.queryVariables(observer),
},
cacheParams: {
- applyUpdates: true,
+ applyUpdates: updates,
},
})
},
@@ -143,53 +150,6 @@ class FragmentStoreCursor<
}
}
-// FragmentStoreForwardCursor adds loadNextPage to FragmentStoreCursor
-export class FragmentStoreForwardCursor<
- _Data extends GraphQLObject,
- _Input extends Record
-> extends FragmentStoreCursor<_Data, _Input> {
- get(initialValue: _Data | null) {
- // get the base class
- const parent = super.get(initialValue)
- const observer = getClient().observe<_Data, _Input>({
- artifact: this.paginationArtifact,
- initialValue,
- })
-
- // generate the pagination handlers
- const handlers = this.storeHandlers(observer)
-
- return {
- ...parent,
- // add the specific handlers for this situation
- loadNextPage: handlers.loadNextPage,
- }
- }
-}
-
-// BackwardFragmentStoreCursor adds loadPreviousPage to FragmentStoreCursor
-export class FragmentStoreBackwardCursor<
- _Data extends GraphQLObject,
- _Input extends Record
-> extends FragmentStoreCursor<_Data, _Input> {
- get(initialValue: _Data | null) {
- const parent = super.get(initialValue)
- const observer = getClient().observe<_Data, _Input>({
- artifact: this.paginationArtifact,
- initialValue,
- })
-
- // generate the pagination handlers
- const handlers = this.storeHandlers(observer)
-
- return {
- ...parent,
- // add the specific handlers for this situation
- loadPreviousPage: handlers.loadPreviousPage,
- }
- }
-}
-
export class FragmentStoreOffset<
_Data extends GraphQLObject,
_Input extends Record
@@ -220,7 +180,7 @@ export class FragmentStoreOffset<
...args?.variables,
},
cacheParams: {
- applyUpdates: true,
+ applyUpdates: ['append'],
},
})
},
diff --git a/packages/houdini-svelte/src/runtime/stores/pagination/index.ts b/packages/houdini-svelte/src/runtime/stores/pagination/index.ts
index 5dc7dd1afb..f40e9eee24 100644
--- a/packages/houdini-svelte/src/runtime/stores/pagination/index.ts
+++ b/packages/houdini-svelte/src/runtime/stores/pagination/index.ts
@@ -1,7 +1,2 @@
-export {
- FragmentStoreBackwardCursor,
- FragmentStoreForwardCursor,
- FragmentStoreOffset,
-} from './fragment'
-
-export { QueryStoreBackwardCursor, QueryStoreForwardCursor, QueryStoreOffset } from './query'
+export { FragmentStoreCursor, FragmentStoreOffset } from './fragment'
+export { QueryStoreCursor, QueryStoreOffset } from './query'
diff --git a/packages/houdini-svelte/src/runtime/stores/pagination/query.ts b/packages/houdini-svelte/src/runtime/stores/pagination/query.ts
index 96a6ddbcb4..f5f4a6da31 100644
--- a/packages/houdini-svelte/src/runtime/stores/pagination/query.ts
+++ b/packages/houdini-svelte/src/runtime/stores/pagination/query.ts
@@ -15,8 +15,7 @@ import type { CursorHandlers } from './cursor'
import { cursorHandlers } from './cursor'
import type { OffsetHandlers } from './offset'
import { offsetHandlers } from './offset'
-import type { PageInfo } from './pageInfo'
-import { nullPageInfo } from './pageInfo'
+import { extractPageInfo, type PageInfo } from './pageInfo'
export type CursorStoreResult<_Data extends GraphQLObject, _Input extends {}> = QueryResult<
_Data,
@@ -24,14 +23,14 @@ export type CursorStoreResult<_Data extends GraphQLObject, _Input extends {}> =
> & { pageInfo: PageInfo }
// both cursor paginated stores add a page info to their subscribe
-class CursorPaginatedStore<_Data extends GraphQLObject, _Input extends {}> extends QueryStore<
+export class QueryStoreCursor<_Data extends GraphQLObject, _Input extends {}> extends QueryStore<
_Data,
_Input
> {
// all paginated stores need to have a flag to distinguish from other query stores
paginated = true
- protected handlers: CursorHandlers<_Data, _Input>
+ #handlers: CursorHandlers<_Data, _Input>
constructor(config: StoreConfig<_Data, _Input, QueryArtifact>) {
super(config)
@@ -41,19 +40,19 @@ class CursorPaginatedStore<_Data extends GraphQLObject, _Input extends {}> exten
artifact: this.artifact,
})
- this.handlers = cursorHandlers<_Data, _Input>({
+ this.#handlers = cursorHandlers<_Data, _Input>({
artifact: this.artifact,
observer: this.observer,
storeName: this.name,
fetch: super.fetch.bind(this),
- fetchUpdate: async (args) => {
+ fetchUpdate: async (args, updates) => {
return paginationObserver.send({
...args,
variables: {
...args?.variables,
},
cacheParams: {
- applyUpdates: true,
+ applyUpdates: updates,
},
})
},
@@ -65,54 +64,35 @@ class CursorPaginatedStore<_Data extends GraphQLObject, _Input extends {}> exten
fetch(params?: ClientFetchParams<_Data, _Input>): Promise>
fetch(params?: QueryStoreFetchParams<_Data, _Input>): Promise>
async fetch(args?: QueryStoreFetchParams<_Data, _Input>): Promise> {
- return this.handlers!.fetch.call(this, args)
+ return this.#handlers!.fetch.call(this, args)
}
- extraFields(): { pageInfo: PageInfo } {
- return {
- pageInfo: nullPageInfo(),
- }
+ async loadPreviousPage(
+ args?: Parameters>['loadPreviousPage']>[0]
+ ) {
+ return this.#handlers.loadPreviousPage(args)
+ }
+
+ async loadNextPage(args?: Parameters['loadNextPage']>[0]) {
+ return this.#handlers.loadNextPage(args)
}
subscribe(
run: Subscriber>,
invalidate?: ((value?: CursorStoreResult<_Data, _Input> | undefined) => void) | undefined
): () => void {
- const combined = derived(
- [{ subscribe: super.subscribe.bind(this) }, this.handlers.pageInfo],
- ([$parent, $pageInfo]) => ({
+ const combined = derived([{ subscribe: super.subscribe.bind(this) }], ([$parent]) => {
+ return {
// @ts-ignore
...$parent,
- pageInfo: $pageInfo,
- })
- )
+ pageInfo: extractPageInfo($parent.data, this.artifact.refetch!.path),
+ }
+ })
return combined.subscribe(run, invalidate)
}
}
-// QueryStoreForwardCursor adds loadNextPage to CursorPaginatedQueryStore
-export class QueryStoreForwardCursor<
- _Data extends GraphQLObject,
- _Input extends {}
-> extends CursorPaginatedStore<_Data, _Input> {
- async loadNextPage(args?: Parameters['loadNextPage']>[0]) {
- return this.handlers.loadNextPage(args)
- }
-}
-
-// QueryStoreBackwardCursor adds loadPreviousPage to CursorPaginatedQueryStore
-export class QueryStoreBackwardCursor<
- _Data extends GraphQLObject,
- _Input extends {}
-> extends CursorPaginatedStore<_Data, _Input> {
- async loadPreviousPage(
- args?: Parameters>['loadPreviousPage']>[0]
- ) {
- return this.handlers.loadPreviousPage(args)
- }
-}
-
export class QueryStoreOffset<_Data extends GraphQLObject, _Input extends {}> extends QueryStore<
_Data,
_Input
@@ -142,7 +122,7 @@ export class QueryStoreOffset<_Data extends GraphQLObject, _Input extends {}> ex
...args?.variables,
},
cacheParams: {
- applyUpdates: true,
+ applyUpdates: ['append'],
},
})
},
diff --git a/packages/houdini/src/codegen/generators/artifacts/artifacts.test.ts b/packages/houdini/src/codegen/generators/artifacts/artifacts.test.ts
index 712b31c95b..534dd2b0a2 100644
--- a/packages/houdini/src/codegen/generators/artifacts/artifacts.test.ts
+++ b/packages/houdini/src/codegen/generators/artifacts/artifacts.test.ts
@@ -502,21 +502,20 @@ test('paginate over unions', async function () {
export default {
"name": "TestQuery",
"kind": "HoudiniQuery",
- "hash": "e51aa476e50a6550a2597054599ac958070848f0b5cb0301774e6b16d5ce629d",
+ "hash": "b26bb5299170a9094b17439a57a84037c883f9ddf4f61856efa5c5dd55754eb0",
"refetch": {
- "update": "append",
"path": ["entitiesByCursor"],
"method": "cursor",
"pageSize": 10,
"embedded": false,
"targetType": "Query",
"paginated": true,
- "direction": "forward"
+ "direction": "both"
},
- "raw": \`query TestQuery($first: Int = 10, $after: String) {
- entitiesByCursor(first: $first, after: $after) {
+ "raw": \`query TestQuery($first: Int = 10, $after: String, $last: Int, $before: String) {
+ entitiesByCursor(first: $first, after: $after, last: $last, before: $before) {
edges {
node {
... on User {
@@ -560,7 +559,7 @@ test('paginate over unions', async function () {
"edges": {
"type": "EntityEdge",
"keyRaw": "edges",
- "update": "append",
+ "updates": ["append", "prepend"],
"selection": {
"fields": {
@@ -615,22 +614,26 @@ test('paginate over unions', async function () {
"fields": {
"hasPreviousPage": {
"type": "Boolean",
- "keyRaw": "hasPreviousPage"
+ "keyRaw": "hasPreviousPage",
+ "updates": ["append", "prepend"]
},
"hasNextPage": {
"type": "Boolean",
- "keyRaw": "hasNextPage"
+ "keyRaw": "hasNextPage",
+ "updates": ["append", "prepend"]
},
"startCursor": {
"type": "String",
- "keyRaw": "startCursor"
+ "keyRaw": "startCursor",
+ "updates": ["append", "prepend"]
},
"endCursor": {
"type": "String",
- "keyRaw": "endCursor"
+ "keyRaw": "endCursor",
+ "updates": ["append", "prepend"]
}
}
}
@@ -647,6 +650,16 @@ test('paginate over unions', async function () {
"after": {
"kind": "Variable",
"value": "after"
+ },
+
+ "last": {
+ "kind": "Variable",
+ "value": "last"
+ },
+
+ "before": {
+ "kind": "Variable",
+ "value": "before"
}
}
}
@@ -656,7 +669,9 @@ test('paginate over unions', async function () {
"input": {
"fields": {
"first": "Int",
- "after": "String"
+ "after": "String",
+ "last": "Int",
+ "before": "String"
},
"types": {}
@@ -3327,21 +3342,20 @@ describe('mutation artifacts', function () {
export default {
"name": "TestQuery",
"kind": "HoudiniQuery",
- "hash": "ae03169e8d96702d39c54183ece747c31f4b5d1e3abf54cf3fc0706abfd597b9",
+ "hash": "6fe0aeaa708161553cd04645834b38c4ce625fce10c46056efcff9a97988d358",
"refetch": {
- "update": "append",
"path": ["usersByCursor"],
"method": "cursor",
"pageSize": 10,
"embedded": false,
"targetType": "Query",
"paginated": true,
- "direction": "forward"
+ "direction": "both"
},
- "raw": \`query TestQuery($first: Int = 10, $after: String) {
- usersByCursor(first: $first, after: $after) {
+ "raw": \`query TestQuery($first: Int = 10, $after: String, $last: Int, $before: String) {
+ usersByCursor(first: $first, after: $after, last: $last, before: $before) {
edges {
node {
firstName
@@ -3383,7 +3397,7 @@ describe('mutation artifacts', function () {
"edges": {
"type": "UserEdge",
"keyRaw": "edges",
- "update": "append",
+ "updates": ["append", "prepend"],
"selection": {
"fields": {
@@ -3428,22 +3442,26 @@ describe('mutation artifacts', function () {
"fields": {
"hasPreviousPage": {
"type": "Boolean",
- "keyRaw": "hasPreviousPage"
+ "keyRaw": "hasPreviousPage",
+ "updates": ["append", "prepend"]
},
"hasNextPage": {
"type": "Boolean",
- "keyRaw": "hasNextPage"
+ "keyRaw": "hasNextPage",
+ "updates": ["append", "prepend"]
},
"startCursor": {
"type": "String",
- "keyRaw": "startCursor"
+ "keyRaw": "startCursor",
+ "updates": ["append", "prepend"]
},
"endCursor": {
"type": "String",
- "keyRaw": "endCursor"
+ "keyRaw": "endCursor",
+ "updates": ["append", "prepend"]
}
}
}
@@ -3460,6 +3478,16 @@ describe('mutation artifacts', function () {
"after": {
"kind": "Variable",
"value": "after"
+ },
+
+ "last": {
+ "kind": "Variable",
+ "value": "last"
+ },
+
+ "before": {
+ "kind": "Variable",
+ "value": "before"
}
}
}
@@ -3469,7 +3497,9 @@ describe('mutation artifacts', function () {
"input": {
"fields": {
"first": "Int",
- "after": "String"
+ "after": "String",
+ "last": "Int",
+ "before": "String"
},
"types": {}
diff --git a/packages/houdini/src/codegen/generators/artifacts/pagination.test.ts b/packages/houdini/src/codegen/generators/artifacts/pagination.test.ts
index 196be5739b..35284de560 100644
--- a/packages/houdini/src/codegen/generators/artifacts/pagination.test.ts
+++ b/packages/houdini/src/codegen/generators/artifacts/pagination.test.ts
@@ -30,21 +30,26 @@ test('pagination arguments stripped from key', async function () {
export default {
"name": "PaginatedFragment",
"kind": "HoudiniFragment",
- "hash": "d655188329bfa82826d0e09c9b56fb90c276ed5b3b155784c3358db3cac30c87",
+ "hash": "6d646ff828f2db18e4c3565cac182da80c45967954b61a8e0dcdc3a2041724c6",
"refetch": {
- "update": "append",
"path": ["friendsByCursor"],
"method": "cursor",
"pageSize": 10,
"embedded": true,
"targetType": "Node",
"paginated": true,
- "direction": "forward"
+ "direction": "both"
},
"raw": \`fragment PaginatedFragment on User {
- friendsByCursor(first: $first, filter: "hello", after: $after) {
+ friendsByCursor(
+ first: $first
+ filter: "hello"
+ after: $after
+ last: $last
+ before: $before
+ ) {
edges {
node {
id
@@ -79,7 +84,7 @@ test('pagination arguments stripped from key', async function () {
"edges": {
"type": "UserEdge",
"keyRaw": "edges",
- "update": "append",
+ "updates": ["append", "prepend"],
"selection": {
"fields": {
@@ -119,22 +124,26 @@ test('pagination arguments stripped from key', async function () {
"fields": {
"hasPreviousPage": {
"type": "Boolean",
- "keyRaw": "hasPreviousPage"
+ "keyRaw": "hasPreviousPage",
+ "updates": ["append", "prepend"]
},
"hasNextPage": {
"type": "Boolean",
- "keyRaw": "hasNextPage"
+ "keyRaw": "hasNextPage",
+ "updates": ["append", "prepend"]
},
"startCursor": {
"type": "String",
- "keyRaw": "startCursor"
+ "keyRaw": "startCursor",
+ "updates": ["append", "prepend"]
},
"endCursor": {
"type": "String",
- "keyRaw": "endCursor"
+ "keyRaw": "endCursor",
+ "updates": ["append", "prepend"]
}
}
}
@@ -148,7 +157,9 @@ test('pagination arguments stripped from key', async function () {
"input": {
"fields": {
"first": "Int",
- "after": "String"
+ "after": "String",
+ "last": "Int",
+ "before": "String"
},
"types": {}
@@ -182,7 +193,6 @@ test('offset based pagination marks appropriate field', async function () {
"hash": "61656f834b4f2afccdd42328b499f288fc9776befbef14154133565e0ac7e8b6",
"refetch": {
- "update": "append",
"path": ["friendsByOffset"],
"method": "offset",
"pageSize": 10,
@@ -206,7 +216,7 @@ test('offset based pagination marks appropriate field', async function () {
"friendsByOffset": {
"type": "User",
"keyRaw": "friendsByOffset(filter: \\"hello\\")::paginated",
- "update": "append",
+ "updates": ["append"],
"selection": {
"fields": {
@@ -266,22 +276,27 @@ test('cursor as scalar gets the right pagination query argument types', async fu
export default {
"name": "ScalarPagination",
"kind": "HoudiniQuery",
- "hash": "09863f3b665ef14816cc6b9cc965f12bb68ea569345d3f346415ba7a8b8af71c",
+ "hash": "52d1832ab8d47b638e7f73bf90390ffd2e3a41ec194f0cc821ebe51ad792d771",
"refetch": {
- "update": "append",
"path": ["user", "friendsByCursorScalar"],
"method": "cursor",
"pageSize": 10,
"embedded": false,
"targetType": "Query",
"paginated": true,
- "direction": "forward"
+ "direction": "both"
},
- "raw": \`query ScalarPagination($first: Int = 10, $after: Cursor) {
+ "raw": \`query ScalarPagination($first: Int = 10, $after: Cursor, $last: Int, $before: Cursor) {
user {
- friendsByCursorScalar(first: $first, filter: "hello", after: $after) {
+ friendsByCursorScalar(
+ first: $first
+ filter: "hello"
+ after: $after
+ last: $last
+ before: $before
+ ) {
edges {
node {
friendsByCursor {
@@ -331,7 +346,7 @@ test('cursor as scalar gets the right pagination query argument types', async fu
"edges": {
"type": "UserEdge",
"keyRaw": "edges",
- "update": "append",
+ "updates": ["append", "prepend"],
"selection": {
"fields": {
@@ -404,22 +419,26 @@ test('cursor as scalar gets the right pagination query argument types', async fu
"fields": {
"hasPreviousPage": {
"type": "Boolean",
- "keyRaw": "hasPreviousPage"
+ "keyRaw": "hasPreviousPage",
+ "updates": ["append", "prepend"]
},
"hasNextPage": {
"type": "Boolean",
- "keyRaw": "hasNextPage"
+ "keyRaw": "hasNextPage",
+ "updates": ["append", "prepend"]
},
"startCursor": {
"type": "String",
- "keyRaw": "startCursor"
+ "keyRaw": "startCursor",
+ "updates": ["append", "prepend"]
},
"endCursor": {
"type": "String",
- "keyRaw": "endCursor"
+ "keyRaw": "endCursor",
+ "updates": ["append", "prepend"]
}
}
}
@@ -441,7 +460,9 @@ test('cursor as scalar gets the right pagination query argument types', async fu
"input": {
"fields": {
"first": "Int",
- "after": "Cursor"
+ "after": "Cursor",
+ "last": "Int",
+ "before": "Cursor"
},
"types": {}
@@ -498,21 +519,26 @@ test("sibling aliases don't get marked", async function () {
export default {
"name": "PaginatedFragment",
"kind": "HoudiniFragment",
- "hash": "1a2d87a1d79e0241ab3ebda1cd43296a631d99973bb06e4fc66becd42c4a67be",
+ "hash": "1a925fd7ed0822f150c0d9bbcdfd73b902a0551af06dc6cbe64488cc7fecf069",
"refetch": {
- "update": "append",
"path": ["friendsByCursor"],
"method": "cursor",
"pageSize": 10,
"embedded": true,
"targetType": "Node",
"paginated": true,
- "direction": "forward"
+ "direction": "both"
},
"raw": \`fragment PaginatedFragment on User {
- friendsByCursor(first: $first, filter: "hello", after: $after) {
+ friendsByCursor(
+ first: $first
+ filter: "hello"
+ after: $after
+ last: $last
+ before: $before
+ ) {
edges {
node {
friendsByCursor {
@@ -568,7 +594,7 @@ test("sibling aliases don't get marked", async function () {
"edges": {
"type": "UserEdge",
"keyRaw": "edges",
- "update": "append",
+ "updates": ["append", "prepend"],
"selection": {
"fields": {
@@ -641,22 +667,26 @@ test("sibling aliases don't get marked", async function () {
"fields": {
"hasPreviousPage": {
"type": "Boolean",
- "keyRaw": "hasPreviousPage"
+ "keyRaw": "hasPreviousPage",
+ "updates": ["append", "prepend"]
},
"hasNextPage": {
"type": "Boolean",
- "keyRaw": "hasNextPage"
+ "keyRaw": "hasNextPage",
+ "updates": ["append", "prepend"]
},
"startCursor": {
"type": "String",
- "keyRaw": "startCursor"
+ "keyRaw": "startCursor",
+ "updates": ["append", "prepend"]
},
"endCursor": {
"type": "String",
- "keyRaw": "endCursor"
+ "keyRaw": "endCursor",
+ "updates": ["append", "prepend"]
}
}
}
@@ -736,7 +766,9 @@ test("sibling aliases don't get marked", async function () {
"input": {
"fields": {
"first": "Int",
- "after": "String"
+ "after": "String",
+ "last": "Int",
+ "before": "String"
},
"types": {}
diff --git a/packages/houdini/src/codegen/generators/artifacts/selection.ts b/packages/houdini/src/codegen/generators/artifacts/selection.ts
index 5d47f7114e..bee61079e9 100644
--- a/packages/houdini/src/codegen/generators/artifacts/selection.ts
+++ b/packages/houdini/src/codegen/generators/artifacts/selection.ts
@@ -2,7 +2,11 @@ import * as graphql from 'graphql'
import type { Config, CollectedGraphQLDocument } from '../../../lib'
import { getRootType, HoudiniError } from '../../../lib'
-import type { MutationOperation, SubscriptionSelection } from '../../../runtime/lib/types'
+import {
+ type MutationOperation,
+ RefetchUpdateMode,
+ type SubscriptionSelection,
+} from '../../../runtime/lib/types'
import { connectionSelection } from '../../transforms/list'
import fieldKey from './fieldKey'
import { convertValue, deepMerge } from './utils'
@@ -16,7 +20,7 @@ export default function selection({
path = [],
includeFragments,
document,
- markEdges,
+ inConnection,
}: {
config: Config
filepath: string
@@ -26,7 +30,7 @@ export default function selection({
path?: string[]
includeFragments: boolean
document: CollectedGraphQLDocument
- markEdges?: string
+ inConnection?: boolean
}): SubscriptionSelection {
// we need to build up an object that contains every field in the selection
let object: SubscriptionSelection = {}
@@ -233,28 +237,40 @@ export default function selection({
(directive) => directive.name.value === config.paginateDirective
)
- // if the field is marked for offset pagination we need to mark this field
+ // if the field is marked for offset pagination
if (paginated && document.refetch && document.refetch.method === 'offset') {
- fieldObj.update = document.refetch.update
+ // we need to mark this field as only accepting append updates
+ fieldObj.updates = [RefetchUpdateMode.append]
}
+ let continueConnection = inConnection
// if we are looking at the edges field and we're supposed to mark it for pagination
- if (attributeName === 'edges' && markEdges && document.refetch) {
+ if (
+ [
+ 'edges',
+ // we want to include the page info fields here so that they are considered special
+ // when we apply a particular update as part of cursor pagination
+ 'endCursor',
+ 'startCursor',
+ 'hasNextPage',
+ 'hasPreviousPage',
+ ].includes(attributeName) &&
+ inConnection &&
+ document.refetch
+ ) {
// otherwise mark this field
- fieldObj.update = document.refetch.update
-
- // make sure we don't mark the children
- markEdges = ''
+ fieldObj.updates = [RefetchUpdateMode.append, RefetchUpdateMode.prepend]
+ }
+ if (attributeName === 'node' && inConnection) {
+ continueConnection = false
}
// only add the field object if there are properties in it
if (field.selectionSet) {
// if this field was marked for cursor based pagination we need to mark
// the edges field that falls underneath it
- const edgesMark =
- paginated && document.refetch?.method === 'cursor'
- ? document.refetch.update
- : markEdges
+ const connectionState =
+ (paginated && document.refetch?.method === 'cursor') || continueConnection
fieldObj.selection = selection({
config,
@@ -265,7 +281,7 @@ export default function selection({
path: pathSoFar,
includeFragments,
document,
- markEdges: edgesMark,
+ inConnection: connectionState,
})
}
diff --git a/packages/houdini/src/codegen/transforms/paginate.test.ts b/packages/houdini/src/codegen/transforms/paginate.test.ts
index 4c5d7e1dc0..00a876819a 100644
--- a/packages/houdini/src/codegen/transforms/paginate.test.ts
+++ b/packages/houdini/src/codegen/transforms/paginate.test.ts
@@ -26,8 +26,8 @@ test('adds pagination info to full', async function () {
// load the contents of the file
expect(docs[0].document).toMatchInlineSnapshot(`
- fragment UserFriends on Query @arguments(first: {type: "Int", default: 10}, after: {type: "String"}) {
- usersByCursor(first: $first, after: $after) @paginate {
+ fragment UserFriends on Query @arguments(first: {type: "Int", default: 10}, after: {type: "String"}, last: {type: "Int"}, before: {type: "String"}) {
+ usersByCursor(first: $first, after: $after, last: $last, before: $before) @paginate {
edges {
node {
id
@@ -47,7 +47,6 @@ test('adds pagination info to full', async function () {
expect(docs[0].refetch).toMatchInlineSnapshot(`
{
- "update": "append",
"path": [
"usersByCursor"
],
@@ -56,7 +55,7 @@ test('adds pagination info to full', async function () {
"embedded": false,
"targetType": "Query",
"paginated": true,
- "direction": "forward"
+ "direction": "both"
}
`)
})
@@ -84,7 +83,6 @@ test('paginated fragments on node pull data from one field deeper', async functi
expect(docs[0].refetch).toMatchInlineSnapshot(`
{
- "update": "append",
"path": [
"friendsByCursor"
],
@@ -93,7 +91,7 @@ test('paginated fragments on node pull data from one field deeper', async functi
"embedded": true,
"targetType": "Node",
"paginated": true,
- "direction": "forward"
+ "direction": "both"
}
`)
})
@@ -149,8 +147,8 @@ test('paginate adds forwards cursor args to the full cursor fragment', async fun
// load the contents of the file
expect(docs[0].document).toMatchInlineSnapshot(`
- fragment UserFriends on Query @arguments(first: {type: "Int", default: 10}, after: {type: "String"}) {
- usersByCursor(first: $first, after: $after) @paginate {
+ fragment UserFriends on Query @arguments(first: {type: "Int", default: 10}, after: {type: "String"}, last: {type: "Int"}, before: {type: "String"}) {
+ usersByCursor(first: $first, after: $after, last: $last, before: $before) @paginate {
edges {
node {
id
@@ -192,8 +190,8 @@ test('paginate adds backwards cursor args to the full cursor fragment', async fu
// load the contents of the file
expect(docs[0].document).toMatchInlineSnapshot(`
- fragment UserFriends on Query @arguments(last: {type: "Int", default: 10}, before: {type: "String"}) {
- usersByCursor(last: $last, before: $before) @paginate {
+ fragment UserFriends on Query @arguments(first: {type: "Int"}, after: {type: "String"}, last: {type: "Int", default: 10}, before: {type: "String"}) {
+ usersByCursor(last: $last, first: $first, after: $after, before: $before) @paginate {
edges {
node {
id
@@ -278,8 +276,8 @@ test('paginate adds backwards cursor args to the fragment', async function () {
// load the contents of the file
expect(docs[0].document).toMatchInlineSnapshot(`
- fragment UserFriends on Query @arguments(last: {type: "Int", default: 10}, before: {type: "String"}) {
- usersByBackwardsCursor(last: $last, before: $before) @paginate {
+ fragment UserFriends on Query @arguments {
+ usersByBackwardsCursor(last: 10) @paginate {
edges {
node {
id
@@ -322,8 +320,8 @@ test('sets before with default value', async function () {
// load the contents of the file
expect(docs[0].document).toMatchInlineSnapshot(`
- fragment UserFriends on Query @arguments(last: {type: "Int", default: 10}, before: {type: "String", default: "cursor"}) {
- usersByCursor(last: $last, before: $before) @paginate {
+ fragment UserFriends on Query @arguments(first: {type: "Int"}, after: {type: "String"}, last: {type: "Int", default: 10}, before: {type: "String", default: "cursor"}) {
+ usersByCursor(last: $last, before: $before, first: $first, after: $after) @paginate {
edges {
node {
id
@@ -418,7 +416,6 @@ test('embeds node pagination query as a separate document', async function () {
"hash": "4ff3c0d7d0bc3f812896dd71dc3ff18e3066fe2459502a99fab163508be90b7a",
"refetch": {
- "update": "append",
"path": ["friendsByForwardsCursor"],
"method": "cursor",
"pageSize": 10,
@@ -477,7 +474,7 @@ test('embeds node pagination query as a separate document', async function () {
"edges": {
"type": "UserEdge",
"keyRaw": "edges",
- "update": "append",
+ "updates": ["append", "prepend", "append", "prepend"],
"selection": {
"fields": {
@@ -517,22 +514,26 @@ test('embeds node pagination query as a separate document', async function () {
"fields": {
"hasPreviousPage": {
"type": "Boolean",
- "keyRaw": "hasPreviousPage"
+ "keyRaw": "hasPreviousPage",
+ "updates": ["append", "prepend", "append", "prepend"]
},
"hasNextPage": {
"type": "Boolean",
- "keyRaw": "hasNextPage"
+ "keyRaw": "hasNextPage",
+ "updates": ["append", "prepend", "append", "prepend"]
},
"startCursor": {
"type": "String",
- "keyRaw": "startCursor"
+ "keyRaw": "startCursor",
+ "updates": ["append", "prepend", "append", "prepend"]
},
"endCursor": {
"type": "String",
- "keyRaw": "endCursor"
+ "keyRaw": "endCursor",
+ "updates": ["append", "prepend", "append", "prepend"]
}
}
}
@@ -576,7 +577,7 @@ test('embeds node pagination query as a separate document', async function () {
"edges": {
"type": "UserEdge",
"keyRaw": "edges",
- "update": "append",
+ "updates": ["append", "prepend"],
"selection": {
"fields": {
@@ -616,22 +617,26 @@ test('embeds node pagination query as a separate document', async function () {
"fields": {
"hasPreviousPage": {
"type": "Boolean",
- "keyRaw": "hasPreviousPage"
+ "keyRaw": "hasPreviousPage",
+ "updates": ["append", "prepend"]
},
"hasNextPage": {
"type": "Boolean",
- "keyRaw": "hasNextPage"
+ "keyRaw": "hasNextPage",
+ "updates": ["append", "prepend"]
},
"startCursor": {
"type": "String",
- "keyRaw": "startCursor"
+ "keyRaw": "startCursor",
+ "updates": ["append", "prepend"]
},
"endCursor": {
"type": "String",
- "keyRaw": "endCursor"
+ "keyRaw": "endCursor",
+ "updates": ["append", "prepend"]
}
}
}
@@ -707,7 +712,6 @@ test('embeds custom pagination query as a separate document', async function ()
"hash": "c5970407ebf288fcad596b8eacf0093c3992c8b16a5044e4f38317c8d73245aa",
"refetch": {
- "update": "append",
"path": ["friendsConnection"],
"method": "cursor",
"pageSize": 10,
@@ -765,7 +769,7 @@ test('embeds custom pagination query as a separate document', async function ()
"edges": {
"type": "GhostEdge",
"keyRaw": "edges",
- "update": "append",
+ "updates": ["append", "prepend", "append", "prepend"],
"selection": {
"fields": {
@@ -810,22 +814,26 @@ test('embeds custom pagination query as a separate document', async function ()
"fields": {
"hasPreviousPage": {
"type": "Boolean",
- "keyRaw": "hasPreviousPage"
+ "keyRaw": "hasPreviousPage",
+ "updates": ["append", "prepend", "append", "prepend"]
},
"hasNextPage": {
"type": "Boolean",
- "keyRaw": "hasNextPage"
+ "keyRaw": "hasNextPage",
+ "updates": ["append", "prepend", "append", "prepend"]
},
"startCursor": {
"type": "String",
- "keyRaw": "startCursor"
+ "keyRaw": "startCursor",
+ "updates": ["append", "prepend", "append", "prepend"]
},
"endCursor": {
"type": "String",
- "keyRaw": "endCursor"
+ "keyRaw": "endCursor",
+ "updates": ["append", "prepend", "append", "prepend"]
}
}
}
@@ -982,8 +990,8 @@ test('query with backwards cursor paginate', async function () {
// load the contents of the file
expect(docs[0]?.document).toMatchInlineSnapshot(`
- query Users($last: Int = 10, $before: String) {
- usersByBackwardsCursor(last: $last, before: $before) @paginate {
+ query Users {
+ usersByBackwardsCursor(last: 10) @paginate {
edges {
node {
id
@@ -1053,8 +1061,8 @@ test('query with backwards cursor on full paginate', async function () {
// load the contents of the file
expect(docs[0]?.document).toMatchInlineSnapshot(`
- query Users($last: Int = 10, $before: String) {
- usersByCursor(last: $last, before: $before) @paginate {
+ query Users($first: Int, $after: String, $last: Int = 10, $before: String) {
+ usersByCursor(last: $last, first: $first, after: $after, before: $before) @paginate {
edges {
node {
id
@@ -1094,10 +1102,53 @@ test('query with forwards cursor on full paginate', async function () {
const config = testConfig()
await runPipeline(config, docs)
+ // load the contents of the file
+ expect(docs[0]?.document).toMatchInlineSnapshot(`
+ query Users($first: Int = 10, $after: String, $last: Int, $before: String) {
+ usersByCursor(first: $first, after: $after, last: $last, before: $before) @paginate {
+ edges {
+ node {
+ id
+ __typename
+ }
+ cursor
+ }
+ pageInfo {
+ hasPreviousPage
+ hasNextPage
+ startCursor
+ endCursor
+ }
+ }
+ }
+ `)
+})
+
+test("don't generate unsupported directions", async function () {
+ const docs = [
+ mockCollectedDoc(
+ `
+ query Users {
+ usersByForwardsCursor(first: 10) @paginate {
+ edges {
+ node {
+ id
+ }
+ }
+ }
+ }
+ `
+ ),
+ ]
+
+ // run the pipeline
+ const config = testConfig()
+ await runPipeline(config, docs)
+
// load the contents of the file
expect(docs[0]?.document).toMatchInlineSnapshot(`
query Users($first: Int = 10, $after: String) {
- usersByCursor(first: $first, after: $after) @paginate {
+ usersByForwardsCursor(first: $first, after: $after) @paginate {
edges {
node {
id
@@ -1139,8 +1190,8 @@ test("forwards cursor paginated query doesn't overlap variables", async function
// load the contents of the file
expect(docs[0]?.document).toMatchInlineSnapshot(`
- query Users($first: Int!, $after: String) {
- usersByCursor(first: $first, after: $after) @paginate {
+ query Users($first: Int!, $after: String, $last: Int, $before: String) {
+ usersByCursor(first: $first, after: $after, last: $last, before: $before) @paginate {
edges {
node {
id
@@ -1182,8 +1233,8 @@ test("backwards cursor paginated query doesn't overlap variables", async functio
// load the contents of the file
expect(docs[0]?.document).toMatchInlineSnapshot(`
- query Users($last: Int!, $before: String) {
- usersByCursor(last: $last, before: $before) @paginate {
+ query Users($last: Int!, $first: Int, $after: String, $before: String) {
+ usersByCursor(last: $last, first: $first, after: $after, before: $before) @paginate {
edges {
node {
id
@@ -1253,7 +1304,6 @@ test('refetch specification with backwards pagination', async function () {
expect(docs[0].refetch).toMatchInlineSnapshot(`
{
- "update": "prepend",
"path": [
"usersByCursor"
],
@@ -1262,7 +1312,7 @@ test('refetch specification with backwards pagination', async function () {
"embedded": false,
"targetType": "Query",
"paginated": true,
- "direction": "backwards"
+ "direction": "both"
}
`)
})
@@ -1290,7 +1340,6 @@ test('refetch entry with initial backwards', async function () {
expect(docs[0].refetch).toMatchInlineSnapshot(`
{
- "update": "prepend",
"path": [
"usersByCursor"
],
@@ -1299,7 +1348,7 @@ test('refetch entry with initial backwards', async function () {
"embedded": false,
"targetType": "Query",
"paginated": true,
- "direction": "backwards",
+ "direction": "both",
"start": "1234"
}
`)
@@ -1328,7 +1377,6 @@ test('refetch entry with initial forwards', async function () {
expect(docs[0].refetch).toMatchInlineSnapshot(`
{
- "update": "append",
"path": [
"usersByCursor"
],
@@ -1337,7 +1385,7 @@ test('refetch entry with initial forwards', async function () {
"embedded": false,
"targetType": "Query",
"paginated": true,
- "direction": "forward",
+ "direction": "both",
"start": "1234"
}
`)
@@ -1368,26 +1416,25 @@ test('generated query has same refetch spec', async function () {
export default {
"name": "UserFriends_Pagination_Query",
"kind": "HoudiniQuery",
- "hash": "1e2bc755f493a5f3c58fdb284609136e7160f1f2365fe192c49f1ae95b3ef2ee",
+ "hash": "f1eb3c2bde855b70a59c4cccd29ddf014bbd0ff8a49f214af22974d698730a31",
"refetch": {
- "update": "append",
"path": ["usersByCursor"],
"method": "cursor",
"pageSize": 10,
"embedded": false,
"targetType": "Query",
"paginated": true,
- "direction": "forward",
+ "direction": "both",
"start": "1234"
},
- "raw": \`query UserFriends_Pagination_Query($first: Int = 10, $after: String = "1234") {
- ...UserFriends_jrGTj
+ "raw": \`query UserFriends_Pagination_Query($first: Int = 10, $after: String = "1234", $last: Int, $before: String) {
+ ...UserFriends_2Bf0M6
}
- fragment UserFriends_jrGTj on Query {
- usersByCursor(first: $first, after: $after) {
+ fragment UserFriends_2Bf0M6 on Query {
+ usersByCursor(first: $first, after: $after, last: $last, before: $before) {
edges {
node {
id
@@ -1418,7 +1465,7 @@ test('generated query has same refetch spec', async function () {
"edges": {
"type": "UserEdge",
"keyRaw": "edges",
- "update": "append",
+ "updates": ["append", "prepend", "append", "prepend"],
"selection": {
"fields": {
@@ -1458,22 +1505,26 @@ test('generated query has same refetch spec', async function () {
"fields": {
"hasPreviousPage": {
"type": "Boolean",
- "keyRaw": "hasPreviousPage"
+ "keyRaw": "hasPreviousPage",
+ "updates": ["append", "prepend", "append", "prepend"]
},
"hasNextPage": {
"type": "Boolean",
- "keyRaw": "hasNextPage"
+ "keyRaw": "hasNextPage",
+ "updates": ["append", "prepend", "append", "prepend"]
},
"startCursor": {
"type": "String",
- "keyRaw": "startCursor"
+ "keyRaw": "startCursor",
+ "updates": ["append", "prepend", "append", "prepend"]
},
"endCursor": {
"type": "String",
- "keyRaw": "endCursor"
+ "keyRaw": "endCursor",
+ "updates": ["append", "prepend", "append", "prepend"]
}
}
}
@@ -1487,7 +1538,9 @@ test('generated query has same refetch spec', async function () {
"input": {
"fields": {
"first": "Int",
- "after": "String"
+ "after": "String",
+ "last": "Int",
+ "before": "String"
},
"types": {}
@@ -1520,7 +1573,6 @@ test('refetch specification with offset pagination', async function () {
expect(docs[0].refetch).toMatchInlineSnapshot(`
{
- "update": "append",
"path": [
"usersByOffset"
],
@@ -1553,7 +1605,6 @@ test('refetch specification with initial offset', async function () {
expect(docs[0].refetch).toMatchInlineSnapshot(`
{
- "update": "append",
"path": [
"usersByOffset"
],
diff --git a/packages/houdini/src/codegen/transforms/paginate.ts b/packages/houdini/src/codegen/transforms/paginate.ts
index aea913c279..ad10693bef 100644
--- a/packages/houdini/src/codegen/transforms/paginate.ts
+++ b/packages/houdini/src/codegen/transforms/paginate.ts
@@ -2,7 +2,7 @@ import * as graphql from 'graphql'
import type { Config, CollectedGraphQLDocument } from '../../lib'
import { HoudiniError, parentTypeFromAncestors, unwrapType, wrapType } from '../../lib'
-import { ArtifactKind, RefetchUpdateMode } from '../../runtime/lib/types'
+import { ArtifactKind } from '../../runtime/lib/types'
// the paginate transform is responsible for preparing a fragment marked for pagination
// to be embedded in the query that will be used to fetch additional data. That means it
@@ -99,11 +99,7 @@ export default async function paginate(
).getFields()[node.name.value]
const args = new Set(fieldTypeFields.args.map((arg) => arg.name))
- // also look to see if the user wants to do forward pagination
- const passedArgs = new Set(node.arguments?.map((arg) => arg.name.value))
- const specifiedForwards = passedArgs.has('first')
- const specifiedBackwards = passedArgs.has('last')
-
+ // find and assign the cursor type
cursorType =
(
fieldTypeFields.args?.find((arg) => ['before', 'after'].includes(arg.name))
@@ -113,21 +109,18 @@ export default async function paginate(
flags.before.type = cursorType
// figure out what kind of pagination the field supports
- const forwardPagination =
- !specifiedBackwards && args.has('first') && args.has('after')
- const backwardsPagination =
- !specifiedForwards && args.has('last') && args.has('before')
+ const passedArgs = new Set(node.arguments?.map((arg) => arg.name.value))
+ const forwards = args.has('first') && args.has('after')
+ const backwards = args.has('last') && args.has('after')
+ const cursorPagination = passedArgs.has('last') || passedArgs.has('first')
const offsetPagination =
- !forwardPagination &&
- !backwardsPagination &&
- args.has('offset') &&
- args.has('limit')
+ !cursorPagination && args.has('offset') && args.has('limit')
// update the flags based on what the tagged field supports
- flags.first.enabled = forwardPagination
- flags.after.enabled = forwardPagination
- flags.last.enabled = backwardsPagination
- flags.before.enabled = backwardsPagination
+ flags.first.enabled = forwards
+ flags.after.enabled = forwards
+ flags.last.enabled = backwards
+ flags.before.enabled = backwards
flags.offset.enabled = offsetPagination
flags.limit.enabled = offsetPagination
@@ -168,12 +161,6 @@ export default async function paginate(
// check if we have to embed the fragment in Node
let nodeQuery = false
- // figure out the right refetch
- let refetchUpdate = RefetchUpdateMode.append
- if (flags.last.enabled) {
- refetchUpdate = RefetchUpdateMode.prepend
- }
-
// remember if we found a fragment or operation
let fragment = ''
@@ -199,14 +186,14 @@ export default async function paginate(
}),
{}
) || {}
-
// figure out the variables we want on the query
let newVariables: Record =
Object.fromEntries(
Object.entries(flags)
.filter(
([, spec]) =>
- // let's tale the spec enabled AND where we don't have a dedicated variable for it
+ // use the fields from enabled pagination strategies
+ // where we don't have a dedicated variable for it already
spec.enabled && spec.variableName === undefined
)
.map(([fieldName, spec]) => [
@@ -315,28 +302,32 @@ export default async function paginate(
}
}
+ // figure out some of the refetch values early
+
+ // page size is the default value of the limit argument
+ const pageSize =
+ flags.first.defaultValue ?? flags.last.defaultValue ?? flags.limit.defaultValue
+ // start is the default value of the offset argument
+ const start =
+ flags.after.defaultValue ?? flags.before.defaultValue ?? flags.offset.defaultValue
+ // the direction is always forwards for offset but check for connections
+ let direction: 'forward' | 'backward' | 'both' = 'forward'
+ if (flags.before.enabled && flags.after.enabled) {
+ direction = 'both'
+ } else if (flags.before.enabled) {
+ direction = 'backward'
+ }
+
// add the paginate info to the collected document
doc.refetch = {
- update: refetchUpdate,
path: paginationPath,
method: flags.first.enabled || flags.last.enabled ? 'cursor' : 'offset',
- pageSize: 0,
+ pageSize,
embedded: nodeQuery,
targetType,
paginated: true,
- direction: flags.last.enabled ? 'backwards' : 'forward',
- }
-
- // add the correct default page size
- if (flags.first.enabled) {
- doc.refetch.pageSize = flags.first.defaultValue
- doc.refetch.start = flags.after.defaultValue
- } else if (flags.last.enabled) {
- doc.refetch.pageSize = flags.last.defaultValue
- doc.refetch.start = flags.before.defaultValue
- } else if (flags.limit.enabled) {
- doc.refetch.pageSize = flags.limit.defaultValue
- doc.refetch.start = flags.offset.defaultValue
+ direction,
+ start,
}
// if we're not paginating a fragment, there's nothing more to do. we mutated
@@ -574,15 +565,6 @@ function replaceArgumentsWithVariables(
continue
}
- // if we are looking at forward pagination args when backwards is enabled ignore it
- if (['first', 'after'].includes(name) && flags['before'].enabled) {
- continue
- }
- // same but opposite for backwards pagination
- if (['last', 'before'].includes(name) && flags['first'].enabled) {
- continue
- }
-
// we need to add a variable referencing the argument
newArgs.push(variableAsArgument(name))
}
diff --git a/packages/houdini/src/runtime/cache/cache.ts b/packages/houdini/src/runtime/cache/cache.ts
index a85b49c1d1..622df9a8fc 100644
--- a/packages/houdini/src/runtime/cache/cache.ts
+++ b/packages/houdini/src/runtime/cache/cache.ts
@@ -50,7 +50,7 @@ export class Cache {
variables?: {}
parent?: string
layer?: LayerID | null
- applyUpdates?: boolean
+ applyUpdates?: string[]
notifySubscribers?: SubscriptionSpec[]
forceNotify?: boolean
}): SubscriptionSpec[] {
@@ -215,7 +215,7 @@ class CacheInternal {
selection,
variables = {},
parent = rootID,
- applyUpdates = false,
+ applyUpdates,
layer,
toNotify = [],
forceNotify,
@@ -227,7 +227,7 @@ class CacheInternal {
root?: string
layer: Layer
toNotify?: FieldSelection[]
- applyUpdates?: boolean
+ applyUpdates?: string[]
forceNotify?: boolean
}): FieldSelection[] {
// if the cache is disabled, dont do anything
@@ -262,7 +262,7 @@ class CacheInternal {
selection: fieldSelection,
operations,
abstract: isAbstract,
- update,
+ updates,
nullable,
} = targetSelection[field]
const key = evaluateKey(keyRaw, variables)
@@ -299,17 +299,47 @@ class CacheInternal {
let newValue = value
// if the value is an array, we might have to apply updates
- if (Array.isArray(value) && applyUpdates && update) {
- // if we have to prepend the new value on the old one
- if (update === 'append') {
- newValue = ((previousValue as any[]) || []).concat(value)
- }
- // we might have to prepend our value onto the old one
- else if (update === 'prepend') {
- newValue = value.concat(previousValue || [])
+ if (updates && applyUpdates && Array.isArray(value)) {
+ // look every update we were told to apply
+ for (const update of applyUpdates) {
+ // make sure the field accepts the update we're about to check
+ if (!updates.includes(update)) {
+ continue
+ }
+
+ // if we have to prepend the new value on the old one
+ if (update === 'append') {
+ newValue = ((previousValue as any[]) || []).concat(value)
+ }
+ // we might have to prepend our value onto the old one
+ else if (update === 'prepend') {
+ newValue = value.concat(previousValue || [])
+ }
}
}
+ // we need to handle pageInfo's contents specially. For now, they have an
+ // update tagged on them which we will interpret here to indicate if we want the new value
+ // or the old one
+
+ // in a prepend update we want to use the old values for endCursor and hasNextPage
+ if (
+ updates &&
+ applyUpdates?.includes('prepend') &&
+ ['endCursor', 'hasNextPage'].includes(key)
+ ) {
+ newValue = previousValue
+ }
+
+ // in an append update we want to use the old values for startCursor and hasPreviousPage
+ else if (
+ updates &&
+ applyUpdates?.includes('append') &&
+ ['startCursor', 'hasPreviousPage'].includes(key)
+ ) {
+ newValue = previousValue
+ }
+
// if the value changed on a layer that impacts the current latest value
const valueChanged = !deepEquals(newValue, previousValue)
@@ -422,7 +452,7 @@ class CacheInternal {
// have already been added as part of a list operation. if that happens
// we will need to filter out ids that refer to these fake-edges which
// can be idenfitied as not having a cursor or node value
- const emptyEdges = !update
+ const emptyEdges = !updates
? []
: oldIDs.map((id) => {
if (!id) {
@@ -473,7 +503,7 @@ class CacheInternal {
})
// if we're supposed to apply this write as an update, we need to figure out how
- if (applyUpdates && update) {
+ if (applyUpdates && updates) {
// if we are updating the edges field, we might need to do a little more than just
// append/prepend to the field value. we might need to wrap the values in extra references
if (key === 'edges') {
@@ -519,17 +549,25 @@ class CacheInternal {
})
}
- // if we have to prepend it, do so
- if (update === 'prepend') {
- linkedIDs = newIDs.concat(oldIDs as (string | null)[])
- }
- // otherwise we might have to append it
- else if (update === 'append') {
- linkedIDs = oldIDs.concat(newIDs)
- }
- // if the update is a replace do the right thing
- else if (update === 'replace') {
- linkedIDs = newIDs
+ // look every update we were told to apply
+ for (const update of applyUpdates) {
+ // make sure the field accepts the update we're about to check
+ if (update !== 'replace' && !updates.includes(update)) {
+ continue
+ }
+
+ // if we have to prepend it, do so
+ if (update === 'prepend') {
+ linkedIDs = newIDs.concat(oldIDs as (string | null)[])
+ }
+ // otherwise we might have to append it
+ else if (update === 'append') {
+ linkedIDs = oldIDs.concat(newIDs)
+ }
+ // if the update is a replace do the right thing
+ else if (update === 'replace') {
+ linkedIDs = newIDs
+ }
}
}
// we're not supposed to apply this write as an update, just use the new value
@@ -954,7 +992,7 @@ class CacheInternal {
abstract: boolean
variables: {}
specs: FieldSelection[]
- applyUpdates: boolean
+ applyUpdates?: string[]
fields: SubscriptionSelection
layer: Layer
forceNotify?: boolean
diff --git a/packages/houdini/src/runtime/cache/lists.ts b/packages/houdini/src/runtime/cache/lists.ts
index 6eb4ce885d..7c24b2d3f2 100644
--- a/packages/houdini/src/runtime/cache/lists.ts
+++ b/packages/houdini/src/runtime/cache/lists.ts
@@ -1,9 +1,4 @@
-import type {
- SubscriptionSelection,
- ListWhen,
- SubscriptionSpec,
- RefetchUpdateMode,
-} from '../lib/types'
+import type { SubscriptionSelection, ListWhen, SubscriptionSpec } from '../lib/types'
import type { Cache, LinkedList } from './cache'
import { rootID } from './cache'
import { flattenList } from './stuff'
@@ -240,9 +235,7 @@ export class List {
edges: {
keyRaw: 'edges',
type: 'ConnectionEdge',
- update: (where === 'first'
- ? 'prepend'
- : 'append') as RefetchUpdateMode,
+ updates: ['append', 'prepend'],
selection: {
fields: {
node: {
@@ -278,7 +271,7 @@ export class List {
newEntries: {
keyRaw: this.key,
type: listType,
- update: (where === 'first' ? 'prepend' : 'append') as RefetchUpdateMode,
+ updates: ['append', 'prepend'],
selection: {
...selection,
fields: {
@@ -303,7 +296,7 @@ export class List {
data: insertData,
variables,
parent: this.recordID,
- applyUpdates: true,
+ applyUpdates: [where === 'first' ? 'prepend' : 'append'],
})
}
diff --git a/packages/houdini/src/runtime/cache/tests/list.test.ts b/packages/houdini/src/runtime/cache/tests/list.test.ts
index 1e59623d87..0078dd5d1d 100644
--- a/packages/houdini/src/runtime/cache/tests/list.test.ts
+++ b/packages/houdini/src/runtime/cache/tests/list.test.ts
@@ -29,7 +29,7 @@ test('prepend linked lists update', function () {
friends: {
type: 'User',
keyRaw: 'friends',
- update: RefetchUpdateMode.prepend,
+ updates: [RefetchUpdateMode.prepend],
selection: {
fields: {
id: {
@@ -68,7 +68,7 @@ test('prepend linked lists update', function () {
],
},
},
- applyUpdates: true,
+ applyUpdates: ['prepend'],
})
// make sure we can get the linked lists back
@@ -79,7 +79,7 @@ test('prepend linked lists update', function () {
friends: {
type: 'User',
keyRaw: 'friends',
- update: RefetchUpdateMode.prepend,
+ updates: [RefetchUpdateMode.prepend],
selection: {
fields: {
id: {
@@ -128,7 +128,7 @@ test('prepend linked lists update', function () {
],
},
},
- applyUpdates: true,
+ applyUpdates: ['prepend'],
})
// make sure we can get the linked lists back
@@ -139,7 +139,7 @@ test('prepend linked lists update', function () {
friends: {
type: 'User',
keyRaw: 'friends',
- update: RefetchUpdateMode.prepend,
+ updates: [RefetchUpdateMode.prepend],
selection: {
fields: {
id: {
@@ -796,6 +796,536 @@ test('append in connection', function () {
})
})
+test("prepending update doesn't overwrite endCursor and hasNext Page", function () {
+ // instantiate a cache
+ const cache = new Cache(config)
+
+ const selection: SubscriptionSelection = {
+ fields: {
+ viewer: {
+ type: 'User',
+ keyRaw: 'viewer',
+ selection: {
+ fields: {
+ id: {
+ type: 'ID',
+ keyRaw: 'id',
+ },
+ friends: {
+ type: 'User',
+ keyRaw: 'friends',
+ list: {
+ name: 'All_Users',
+ connection: true,
+ type: 'User',
+ },
+ selection: {
+ fields: {
+ pageInfo: {
+ type: 'PageInfo',
+ keyRaw: 'pageInfo',
+ selection: {
+ fields: {
+ hasNextPage: {
+ type: 'Boolean',
+ keyRaw: 'hasNextPage',
+ updates: ['prepend'],
+ },
+ hasPreviousPage: {
+ type: 'Boolean',
+ keyRaw: 'hasPreviousPage',
+ updates: ['prepend'],
+ },
+ startCursor: {
+ type: 'String',
+ keyRaw: 'startCursor',
+ updates: ['prepend'],
+ },
+ endCursor: {
+ type: 'String',
+ keyRaw: 'endCursor',
+ updates: ['prepend'],
+ },
+ },
+ },
+ },
+ edges: {
+ type: 'UserEdge',
+ keyRaw: 'edges',
+ updates: ['prepend'],
+ selection: {
+ fields: {
+ node: {
+ type: 'Node',
+ keyRaw: 'node',
+ abstract: true,
+ selection: {
+ fields: {
+ __typename: {
+ type: 'String',
+ keyRaw: '__typename',
+ },
+ id: {
+ type: 'ID',
+ keyRaw: 'id',
+ },
+ firstName: {
+ type: 'String',
+ keyRaw: 'firstName',
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ }
+
+ // write the cached data once
+ cache.write({
+ selection,
+ data: {
+ viewer: {
+ id: '1',
+ friends: {
+ pageInfo: {
+ hasPreviousPage: true,
+ hasNextPage: true,
+ startCursor: 'a',
+ endCursor: 'b',
+ },
+ edges: [
+ {
+ node: {
+ __typename: 'User',
+ id: '2',
+ firstName: 'jane2',
+ },
+ },
+ {
+ node: {
+ __typename: 'User',
+ id: '3',
+ firstName: 'jane',
+ },
+ },
+ ],
+ },
+ },
+ },
+ })
+
+ // write it again with a prepend update to insert the user
+ cache.write({
+ selection,
+ applyUpdates: ['prepend'],
+ data: {
+ viewer: {
+ id: '1',
+ friends: {
+ pageInfo: {
+ // should have a different value for the initial set
+ // so we can confirm that it only picked up the starting keys
+ hasPreviousPage: false,
+ hasNextPage: false,
+ startCursor: 'aa',
+ endCursor: 'bb',
+ },
+ edges: [
+ {
+ node: {
+ __typename: 'User',
+ id: '4',
+ firstName: 'jane3',
+ },
+ },
+ ],
+ },
+ },
+ },
+ })
+
+ // make sure that the data looks good
+ expect(cache.read({ selection })).toEqual({
+ partial: false,
+ data: {
+ viewer: {
+ id: '1',
+ friends: {
+ pageInfo: {
+ hasPreviousPage: false,
+ hasNextPage: true,
+ startCursor: 'aa',
+ endCursor: 'b',
+ },
+ edges: [
+ {
+ node: {
+ __typename: 'User',
+ id: '4',
+ firstName: 'jane3',
+ },
+ },
+ {
+ node: {
+ __typename: 'User',
+ id: '2',
+ firstName: 'jane2',
+ },
+ },
+ {
+ node: {
+ __typename: 'User',
+ id: '3',
+ firstName: 'jane',
+ },
+ },
+ ],
+ },
+ },
+ },
+ })
+})
+
+test("append update doesn't overwrite startCursor and hasPrevious Page", function () {
+ // instantiate a cache
+ const cache = new Cache(config)
+
+ const selection: SubscriptionSelection = {
+ fields: {
+ viewer: {
+ type: 'User',
+ keyRaw: 'viewer',
+ selection: {
+ fields: {
+ id: {
+ type: 'ID',
+ keyRaw: 'id',
+ },
+ friends: {
+ type: 'User',
+ keyRaw: 'friends',
+ list: {
+ name: 'All_Users',
+ connection: true,
+ type: 'User',
+ },
+ selection: {
+ fields: {
+ pageInfo: {
+ type: 'PageInfo',
+ keyRaw: 'pageInfo',
+ selection: {
+ fields: {
+ hasNextPage: {
+ type: 'Boolean',
+ keyRaw: 'hasNextPage',
+ updates: ['append'],
+ },
+ hasPreviousPage: {
+ type: 'Boolean',
+ keyRaw: 'hasPreviousPage',
+ updates: ['append'],
+ },
+ startCursor: {
+ type: 'String',
+ keyRaw: 'startCursor',
+ updates: ['append'],
+ },
+ endCursor: {
+ type: 'String',
+ keyRaw: 'endCursor',
+ updates: ['append'],
+ },
+ },
+ },
+ },
+ edges: {
+ type: 'UserEdge',
+ keyRaw: 'edges',
+ updates: ['append'],
+ selection: {
+ fields: {
+ node: {
+ type: 'Node',
+ keyRaw: 'node',
+ abstract: true,
+ selection: {
+ fields: {
+ __typename: {
+ type: 'String',
+ keyRaw: '__typename',
+ },
+ id: {
+ type: 'ID',
+ keyRaw: 'id',
+ },
+ firstName: {
+ type: 'String',
+ keyRaw: 'firstName',
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ }
+
+ // write the cached data once
+ cache.write({
+ selection,
+ data: {
+ viewer: {
+ id: '1',
+ friends: {
+ pageInfo: {
+ hasPreviousPage: true,
+ hasNextPage: true,
+ startCursor: 'a',
+ endCursor: 'b',
+ },
+ edges: [
+ {
+ node: {
+ __typename: 'User',
+ id: '2',
+ firstName: 'jane2',
+ },
+ },
+ {
+ node: {
+ __typename: 'User',
+ id: '3',
+ firstName: 'jane',
+ },
+ },
+ ],
+ },
+ },
+ },
+ })
+
+ // write it again with a prepend update to insert the user
+ cache.write({
+ selection,
+ applyUpdates: ['append'],
+ data: {
+ viewer: {
+ id: '1',
+ friends: {
+ pageInfo: {
+ // should have a different value for the initial set
+ // so we can confirm that it only picked up the starting keys
+ hasPreviousPage: false,
+ hasNextPage: false,
+ startCursor: 'aa',
+ endCursor: 'bb',
+ },
+ edges: [
+ {
+ node: {
+ __typename: 'User',
+ id: '4',
+ firstName: 'jane3',
+ },
+ },
+ ],
+ },
+ },
+ },
+ })
+
+ // make sure that the data looks good
+ expect(cache.read({ selection })).toEqual({
+ partial: false,
+ data: {
+ viewer: {
+ id: '1',
+ friends: {
+ pageInfo: {
+ hasPreviousPage: true,
+ hasNextPage: false,
+ startCursor: 'a',
+ endCursor: 'bb',
+ },
+ edges: [
+ {
+ node: {
+ __typename: 'User',
+ id: '2',
+ firstName: 'jane2',
+ },
+ },
+ {
+ node: {
+ __typename: 'User',
+ id: '3',
+ firstName: 'jane',
+ },
+ },
+ {
+ node: {
+ __typename: 'User',
+ id: '4',
+ firstName: 'jane3',
+ },
+ },
+ ],
+ },
+ },
+ },
+ })
+})
+
+test('append in connection', function () {
+ // instantiate a cache
+ const cache = new Cache(config)
+
+ const selection: SubscriptionSelection = {
+ fields: {
+ viewer: {
+ type: 'User',
+ keyRaw: 'viewer',
+ selection: {
+ fields: {
+ id: {
+ type: 'ID',
+ keyRaw: 'id',
+ },
+ friends: {
+ type: 'User',
+ keyRaw: 'friends',
+ list: {
+ name: 'All_Users',
+ connection: true,
+ type: 'User',
+ },
+ selection: {
+ fields: {
+ edges: {
+ type: 'UserEdge',
+ keyRaw: 'edges',
+ selection: {
+ fields: {
+ node: {
+ type: 'Node',
+ keyRaw: 'node',
+ abstract: true,
+ selection: {
+ fields: {
+ __typename: {
+ type: 'String',
+ keyRaw: '__typename',
+ },
+ id: {
+ type: 'ID',
+ keyRaw: 'id',
+ },
+ firstName: {
+ type: 'String',
+ keyRaw: 'firstName',
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ }
+
+ // start off associated with one object
+ cache.write({
+ selection,
+ data: {
+ viewer: {
+ id: '1',
+ friends: {
+ edges: [
+ {
+ node: {
+ __typename: 'User',
+ id: '2',
+ firstName: 'jane',
+ },
+ },
+ ],
+ },
+ },
+ },
+ })
+
+ // a function to spy on that will play the role of set
+ const set = vi.fn()
+
+ // subscribe to the fields
+ cache.subscribe({
+ rootType: 'Query',
+ set,
+ selection,
+ })
+
+ // insert an element into the list (no parent ID)
+ cache.list('All_Users').append(
+ {
+ fields: {
+ id: { type: 'ID', keyRaw: 'id' },
+ firstName: { type: 'String', keyRaw: 'firstName' },
+ },
+ },
+ {
+ id: '3',
+ firstName: 'mary',
+ }
+ )
+
+ // make sure we got the new value
+ expect(set).toHaveBeenCalledWith({
+ viewer: {
+ id: '1',
+ friends: {
+ edges: [
+ {
+ node: {
+ __typename: 'User',
+ id: '2',
+ firstName: 'jane',
+ },
+ },
+ {
+ node: {
+ __typename: 'User',
+ id: '3',
+ firstName: 'mary',
+ },
+ },
+ ],
+ },
+ },
+ })
+})
+
test('inserting data with an update overwrites a record inserted with list.append', function () {
// instantiate a cache
const cache = new Cache(config)
@@ -906,7 +1436,7 @@ test('inserting data with an update overwrites a record inserted with list.appen
// insert a record with a query update
cache.write({
- applyUpdates: true,
+ applyUpdates: [RefetchUpdateMode.append],
data: {
viewer: {
id: '1',
@@ -948,7 +1478,7 @@ test('inserting data with an update overwrites a record inserted with list.appen
edges: {
type: 'UserEdge',
keyRaw: 'edges',
- update: RefetchUpdateMode.append,
+ updates: [RefetchUpdateMode.append],
selection: {
fields: {
cursor: {
@@ -3043,7 +3573,7 @@ test('disabled linked lists update', function () {
friends: {
type: 'User',
keyRaw: 'friends',
- update: RefetchUpdateMode.append,
+ updates: [RefetchUpdateMode.append],
selection: {
fields: {
id: {
@@ -3166,7 +3696,7 @@ test('append linked lists update', function () {
friends: {
type: 'User',
keyRaw: 'friends',
- update: RefetchUpdateMode.append,
+ updates: [RefetchUpdateMode.append],
selection: {
fields: {
id: {
@@ -3245,7 +3775,7 @@ test('append linked lists update', function () {
],
},
},
- applyUpdates: true,
+ applyUpdates: [RefetchUpdateMode.append],
})
// make sure we can get the linked lists back
@@ -3298,7 +3828,7 @@ test('writing a scalar marked with a disabled update overwrites', function () {
friends: {
type: 'Int',
keyRaw: 'friends',
- update: RefetchUpdateMode.append,
+ updates: [RefetchUpdateMode.append],
},
},
},
@@ -3371,7 +3901,7 @@ test('writing a scalar marked with a prepend', function () {
friends: {
type: 'Int',
keyRaw: 'friends',
- update: RefetchUpdateMode.prepend,
+ updates: [RefetchUpdateMode.prepend],
},
},
},
@@ -3410,7 +3940,7 @@ test('writing a scalar marked with a prepend', function () {
friends: [2],
},
},
- applyUpdates: true,
+ applyUpdates: [RefetchUpdateMode.prepend],
})
// make sure we can get the updated lists back
@@ -3445,7 +3975,7 @@ test('writing a scalar marked with an append', function () {
friends: {
type: 'Int',
keyRaw: 'friends',
- update: RefetchUpdateMode.append,
+ updates: [RefetchUpdateMode.append],
},
},
},
@@ -3484,7 +4014,7 @@ test('writing a scalar marked with an append', function () {
friends: [2],
},
},
- applyUpdates: true,
+ applyUpdates: [RefetchUpdateMode.append],
})
// make sure we can get the updated lists back
diff --git a/packages/houdini/src/runtime/cache/tests/subscriptions.test.ts b/packages/houdini/src/runtime/cache/tests/subscriptions.test.ts
index 6535d744a9..e6f6b6fbc1 100644
--- a/packages/houdini/src/runtime/cache/tests/subscriptions.test.ts
+++ b/packages/houdini/src/runtime/cache/tests/subscriptions.test.ts
@@ -1769,7 +1769,7 @@ test('ensure parent type is properly passed for nested lists', function () {
connection: false,
type: 'City',
},
- update: RefetchUpdateMode.append,
+ updates: [RefetchUpdateMode.append],
selection: {
fields: {
id: {
@@ -1783,7 +1783,6 @@ test('ensure parent type is properly passed for nested lists', function () {
libraries: {
type: 'Library',
keyRaw: 'libraries',
- update: RefetchUpdateMode.append,
list: {
name: 'Library_List',
connection: false,
diff --git a/packages/houdini/src/runtime/client/documentStore.ts b/packages/houdini/src/runtime/client/documentStore.ts
index d4f2ec8dc6..db78ae97e8 100644
--- a/packages/houdini/src/runtime/client/documentStore.ts
+++ b/packages/houdini/src/runtime/client/documentStore.ts
@@ -560,7 +560,7 @@ export type ClientPluginContext = {
forceNotify?: boolean
disableWrite?: boolean
disableRead?: boolean
- applyUpdates?: boolean
+ applyUpdates?: string[]
}
stuff: App.Stuff
}
diff --git a/packages/houdini/src/runtime/client/plugins/query.ts b/packages/houdini/src/runtime/client/plugins/query.ts
index fd88b2e468..2474fff4e7 100644
--- a/packages/houdini/src/runtime/client/plugins/query.ts
+++ b/packages/houdini/src/runtime/client/plugins/query.ts
@@ -43,7 +43,6 @@ export const queryPlugin: ClientPlugin = documentPlugin(ArtifactKind.Query, func
selection: ctx.artifact.selection,
variables: () => lastVariables,
set: (newValue) => {
- console.log('setting from cache update')
resolve(ctx, {
data: newValue,
errors: null,
diff --git a/packages/houdini/src/runtime/lib/types.ts b/packages/houdini/src/runtime/lib/types.ts
index 156e88aa3b..0856525510 100644
--- a/packages/houdini/src/runtime/lib/types.ts
+++ b/packages/houdini/src/runtime/lib/types.ts
@@ -89,7 +89,6 @@ export type BaseCompiledDocument = {
rootType: string
input?: InputObject
refetch?: {
- update: RefetchUpdateMode
path: string[]
method: 'cursor' | 'offset'
pageSize: number
@@ -97,7 +96,7 @@ export type BaseCompiledDocument = {
embedded: boolean
targetType: string
paginated: boolean
- direction?: 'forward' | 'backwards'
+ direction: 'forward' | 'backward' | 'both'
}
pluginsData?: Record
}
@@ -164,7 +163,7 @@ export type SubscriptionSelection = {
connection: boolean
type: string
}
- update?: RefetchUpdateMode
+ updates?: string[]
filters?: {
[key: string]: {
kind: 'Boolean' | 'String' | 'Float' | 'Int' | 'Variable'
diff --git a/packages/houdini/src/runtime/public/tests/list.test.ts b/packages/houdini/src/runtime/public/tests/list.test.ts
index 741e3c4da7..38c7df62cf 100644
--- a/packages/houdini/src/runtime/public/tests/list.test.ts
+++ b/packages/houdini/src/runtime/public/tests/list.test.ts
@@ -30,6 +30,7 @@ test('list.append accepts record proxies', function () {
edges: {
type: 'UserEdge',
keyRaw: 'edges',
+ updates: ['append'],
selection: {
fields: {
node: {
diff --git a/site/src/routes/api/client-plugins/+page.svx b/site/src/routes/api/client-plugins/+page.svx
index 238258d14f..6520aabe88 100644
--- a/site/src/routes/api/client-plugins/+page.svx
+++ b/site/src/routes/api/client-plugins/+page.svx
@@ -94,9 +94,9 @@ const sayHello: ClientPlugin = () => {
```
One enter hook in a list _must_ use the `resolve` function to provide a value
-for the store. By default, `HoudiniClient` includes a fetch plugin that always
-resolves the pipeline with a value. If no enter hook calls `resolve`, the pipeline
-will hang forever. Here is a simplified version as an example:
+for the store. If no enter hook calls `resolve`, the pipeline
+will hang forever. By default, `HoudiniClient` includes a fetch plugin that always
+resolves the pipeline with a value. Here is a simplified version as an example:
```typescript:title=src/client.ts&typescriptToggle=true
import type { ClientPlugin } from '$houdini'
diff --git a/site/src/routes/api/query/+page.svx b/site/src/routes/api/query/+page.svx
index 03dff4fa81..a8161b9eea 100644
--- a/site/src/routes/api/query/+page.svx
+++ b/site/src/routes/api/query/+page.svx
@@ -6,7 +6,7 @@ description: Queries in Houdini