Skip to content

Commit

Permalink
Generate per-segment responses for any static page
Browse files Browse the repository at this point in the history
Originally I gated per-segment prefetch generation on the PPR flag,
because I thought the client Segment Cache would require PPR to be
enabled on the server. However, since then the strategy has evolved and
I do think we can roll out the Segment Cache independently of PPR.

Dynamic pages without PPR won't be able to take full advantage of the
Segment Cache, but if the page is fully static then there's no reason
we can't implement all the same behavior.

So during per-segment prerendering, I've changed the feature condition
to check for the `clientSegmentCache` flag instead of the PPR one.
  • Loading branch information
acdlite committed Dec 15, 2024
1 parent b6345ec commit a96d7df
Show file tree
Hide file tree
Showing 6 changed files with 38 additions and 6 deletions.
1 change: 1 addition & 0 deletions packages/next/src/export/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -357,6 +357,7 @@ async function exportAppImpl(
clientTraceMetadata: nextConfig.experimental.clientTraceMetadata,
expireTime: nextConfig.expireTime,
dynamicIO: nextConfig.experimental.dynamicIO ?? false,
clientSegmentCache: nextConfig.experimental.clientSegmentCache ?? false,
inlineCss: nextConfig.experimental.inlineCss ?? false,
authInterrupts: !!nextConfig.experimental.authInterrupts,
},
Expand Down
24 changes: 20 additions & 4 deletions packages/next/src/server/app-render/app-render.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -3979,10 +3979,7 @@ async function collectSegmentData(
// decomposed into a separate stream per segment.

const clientReferenceManifest = renderOpts.clientReferenceManifest
if (
!clientReferenceManifest ||
renderOpts.experimental.isRoutePPREnabled !== true
) {
if (!clientReferenceManifest || !renderOpts.experimental.clientSegmentCache) {
return
}

Expand All @@ -4000,8 +3997,27 @@ async function collectSegmentData(
serverModuleMap: null,
}

// When dynamicIO is enabled, missing data is encoded to an infinitely hanging
// promise, the absence of which we use to determine if a segment is fully
// static or partially static. However, when dynamicIO is not enabled, this
// trick doesn't work.
//
// So if PPR is enabled, and dynamicIO is not, we have to be conservative and
// assume all segments are partial.
//
// TODO: When PPR is on, we can at least optimize the case where the entire
// page is static. Either by passing that as an argument to this function, or
// by setting a header on the response like the we do for full page RSC
// prefetches today. The latter approach might be simpler since it requires
// less plumbing, and the client has to check the header regardless to see if
// PPR is enabled.
const shouldAssumePartialData =
renderOpts.experimental.isRoutePPREnabled === true && // PPR is enabled
!renderOpts.experimental.dynamicIO // dynamicIO is disabled

const staleTime = prerenderStore.stale
return await ComponentMod.collectSegmentData(
shouldAssumePartialData,
fullPageDataBuffer,
staleTime,
clientReferenceManifest.clientModules as ManifestNode,
Expand Down
15 changes: 13 additions & 2 deletions packages/next/src/server/app-render/collect-segment-data.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@ function onSegmentPrerenderError(error: unknown) {
}

export async function collectSegmentData(
shouldAssumePartialData: boolean,
fullPageDataBuffer: Buffer,
staleTime: number,
clientModules: ManifestNode,
Expand Down Expand Up @@ -117,6 +118,7 @@ export async function collectSegmentData(
// inside of it, the side effects are transferred to the new stream.
// @ts-expect-error
<PrefetchTreeData
shouldAssumePartialData={shouldAssumePartialData}
fullPageDataBuffer={fullPageDataBuffer}
serverConsumerManifest={serverConsumerManifest}
clientModules={clientModules}
Expand Down Expand Up @@ -146,13 +148,15 @@ export async function collectSegmentData(
}

async function PrefetchTreeData({
shouldAssumePartialData,
fullPageDataBuffer,
serverConsumerManifest,
clientModules,
staleTime,
segmentTasks,
onCompletedProcessingRouteTree,
}: {
shouldAssumePartialData: boolean
fullPageDataBuffer: Buffer
serverConsumerManifest: any
clientModules: ManifestNode
Expand Down Expand Up @@ -191,6 +195,7 @@ async function PrefetchTreeData({
// walk the tree, we will also spawn a task to produce a prefetch response for
// each segment.
const tree = await collectSegmentDataImpl(
shouldAssumePartialData,
flightRouterState,
buildId,
seedData,
Expand All @@ -202,7 +207,8 @@ async function PrefetchTreeData({
segmentTasks
)

const isHeadPartial = await isPartialRSCData(head, clientModules)
const isHeadPartial =
shouldAssumePartialData || (await isPartialRSCData(head, clientModules))

// Notify the abort controller that we're done processing the route tree.
// Anything async that happens after this point must be due to hanging
Expand All @@ -221,6 +227,7 @@ async function PrefetchTreeData({
}

async function collectSegmentDataImpl(
shouldAssumePartialData: boolean,
route: FlightRouterState,
buildId: string,
seedData: CacheNodeSeedData | null,
Expand Down Expand Up @@ -253,6 +260,7 @@ async function collectSegmentDataImpl(
parallelRouteKey
)
const childTree = await collectSegmentDataImpl(
shouldAssumePartialData,
childRoute,
buildId,
childSeedData,
Expand All @@ -276,6 +284,7 @@ async function collectSegmentDataImpl(
// current task to escape the current rendering context.
waitAtLeastOneReactRenderTask().then(() =>
renderSegmentPrefetch(
shouldAssumePartialData,
buildId,
seedData,
segmentPathStr,
Expand Down Expand Up @@ -305,6 +314,7 @@ async function collectSegmentDataImpl(
}

async function renderSegmentPrefetch(
shouldAssumePartialData: boolean,
buildId: string,
seedData: CacheNodeSeedData,
segmentPathStr: string,
Expand All @@ -320,7 +330,8 @@ async function renderSegmentPrefetch(
buildId,
rsc,
loading,
isPartial: await isPartialRSCData(rsc, clientModules),
isPartial:
shouldAssumePartialData || (await isPartialRSCData(rsc, clientModules)),
}
// Since all we're doing is decoding and re-encoding a cached prerender, if
// it takes longer than a microtask, it must because of hanging promises
Expand Down
1 change: 1 addition & 0 deletions packages/next/src/server/app-render/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -183,6 +183,7 @@ export interface RenderOptsPartial {
expireTime: ExpireTime | undefined
clientTraceMetadata: string[] | undefined
dynamicIO: boolean
clientSegmentCache: boolean
inlineCss: boolean
authInterrupts: boolean
}
Expand Down
2 changes: 2 additions & 0 deletions packages/next/src/server/base-server.ts
Original file line number Diff line number Diff line change
Expand Up @@ -592,6 +592,8 @@ export default abstract class Server<
expireTime: this.nextConfig.expireTime,
clientTraceMetadata: this.nextConfig.experimental.clientTraceMetadata,
dynamicIO: this.nextConfig.experimental.dynamicIO ?? false,
clientSegmentCache:
this.nextConfig.experimental.clientSegmentCache ?? false,
inlineCss: this.nextConfig.experimental.inlineCss ?? false,
authInterrupts: !!this.nextConfig.experimental.authInterrupts,
},
Expand Down
1 change: 1 addition & 0 deletions test/e2e/app-dir/ppr-navigations/simple/next.config.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
module.exports = {
experimental: {
ppr: true,
clientSegmentCache: true,
},
}

0 comments on commit a96d7df

Please sign in to comment.