Skip to content

Commit

Permalink
[dynamicIO] update prerender cache scoping and cache warming for vali…
Browse files Browse the repository at this point in the history
…dation (vercel#71822)

Updates the heuristic to scoping prerender caches during dev. A cache
scope will be created for every request if one does not already exist.
If the request is a server action we create a new one. existing caches
already expire after 30 seconds (unchanged)

This updates the prefetch cache purge to be 5 seconds since so we don't
mask short lifetime caches

This also adds cache tracking to the first RSC render of the validation
render. This means we now need to always do a second render regardless
of whether there is a sync abort.
  • Loading branch information
gnoff authored and stipsan committed Nov 6, 2024
1 parent 6103f1f commit 860b12a
Show file tree
Hide file tree
Showing 11 changed files with 306 additions and 82 deletions.
119 changes: 55 additions & 64 deletions packages/next/src/server/app-render/app-render.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -1942,6 +1942,7 @@ async function spawnDynamicValidationInDev(
): Promise<void> {
const { componentMod: ComponentMod } = ctx

const cacheSignal = new CacheSignal()
const firstAttemptServerController = new AbortController()
let serverDynamicTracking = createDynamicTrackingState(false)

Expand All @@ -1950,20 +1951,20 @@ async function spawnDynamicValidationInDev(
phase: 'render',
implicitTags: [],
renderSignal: firstAttemptServerController.signal,
cacheSignal: null,
cacheSignal,
// During the prospective render we don't want to synchronously abort on dynamic access
// because it could prevent us from discovering all caches in siblings. So we omit the controller
// from the prerender store this time.
controller: firstAttemptServerController,
// With PPR during Prerender we don't need to track individual dynamic reasons
// because we will always do a final render after caches have filled and we
// will track it again there
dynamicTracking: serverDynamicTracking,
dynamicTracking: null,
revalidate: INFINITE_CACHE,
expire: INFINITE_CACHE,
stale: INFINITE_CACHE,
tags: [],
// Dev only property that allows certain logs to be supressed
// Dev only property that allows certain logs to be suppressed
validating: true,
}

Expand All @@ -1977,80 +1978,70 @@ async function spawnDynamicValidationInDev(

let reactServerStream = await workUnitAsyncStorage.run(
firstAttemptServerPrerenderStore,
ComponentMod.renderToReadableStream,
firstAttemptRSCPayload,
clientReferenceManifest.clientModules,
{
signal: firstAttemptServerController.signal,
onError: () => {},
}
)

await cacheSignal.cacheReady()
firstAttemptServerController.abort()

const secondAttemptServerController = new AbortController()
serverDynamicTracking = createDynamicTrackingState(false)

const secondAttemptServerPrerenderStore: PrerenderStore = {
type: 'prerender',
phase: 'render',
implicitTags: [],
renderSignal: secondAttemptServerController.signal,
cacheSignal: null,
// During the prospective render we don't want to synchronously abort on dynamic access
// because it could prevent us from discovering all caches in siblings. So we omit the controller
// from the prerender store this time.
controller: secondAttemptServerController,
// With PPR during Prerender we don't need to track individual dynamic reasons
// because we will always do a final render after caches have filled and we
// will track it again there
dynamicTracking: serverDynamicTracking,
revalidate: INFINITE_CACHE,
expire: INFINITE_CACHE,
stale: INFINITE_CACHE,
tags: [],
// Dev only property that allows certain logs to be suppressed
validating: true,
}

const secondAttemptRSCPayload = await workUnitAsyncStorage.run(
secondAttemptServerPrerenderStore,
getRSCPayload,
tree,
ctx,
isNotFound
)

reactServerStream = await workUnitAsyncStorage.run(
secondAttemptServerPrerenderStore,
scheduleInSequentialTasks,
() => {
const stream = ComponentMod.renderToReadableStream(
firstAttemptRSCPayload,
secondAttemptRSCPayload,
clientReferenceManifest.clientModules,
{
signal: firstAttemptServerController.signal,
signal: secondAttemptServerController.signal,
onError: () => {},
}
)
return asHaltedStream(stream, firstAttemptServerController.signal)
return asHaltedStream(stream, secondAttemptServerController.signal)
},
() => {
firstAttemptServerController.abort()
secondAttemptServerController.abort()
}
)

if (serverDynamicTracking.syncDynamicErrorWithStack) {
// If we had a sync dynamic error then we need to retry without
reactServerStream.cancel()

const secondAttemptServerController = new AbortController()
serverDynamicTracking = createDynamicTrackingState(false)

const secondAttemptServerPrerenderStore: PrerenderStore = {
type: 'prerender',
phase: 'render',
implicitTags: [],
renderSignal: secondAttemptServerController.signal,
cacheSignal: null,
// During the prospective render we don't want to synchronously abort on dynamic access
// because it could prevent us from discovering all caches in siblings. So we omit the controller
// from the prerender store this time.
controller: secondAttemptServerController,
// With PPR during Prerender we don't need to track individual dynamic reasons
// because we will always do a final render after caches have filled and we
// will track it again there
dynamicTracking: serverDynamicTracking,
revalidate: INFINITE_CACHE,
expire: INFINITE_CACHE,
stale: INFINITE_CACHE,
tags: [],
// Dev only property that allows certain logs to be supressed
validating: true,
}

const secondAttemptRSCPayload = await workUnitAsyncStorage.run(
secondAttemptServerPrerenderStore,
getRSCPayload,
tree,
ctx,
isNotFound
)

reactServerStream = await workUnitAsyncStorage.run(
secondAttemptServerPrerenderStore,
scheduleInSequentialTasks,
() => {
const stream = ComponentMod.renderToReadableStream(
secondAttemptRSCPayload,
clientReferenceManifest.clientModules,
{
signal: secondAttemptServerController.signal,
onError: () => {},
}
)
return asHaltedStream(stream, secondAttemptServerController.signal)
},
() => {
secondAttemptServerController.abort()
}
)
}

const [warmupStream, renderStream] = reactServerStream.tee()

await warmFlightResponse(warmupStream, clientReferenceManifest)
Expand Down
23 changes: 8 additions & 15 deletions packages/next/src/server/base-server.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3085,34 +3085,27 @@ export default abstract class Server<
if (this.renderOpts.dev) {
let cache = this.prefetchCacheScopesDev.get(urlPathname)

if (isServerAction || !cache) {
cache = new Map()
this.prefetchCacheScopesDev.set(urlPathname, cache)
}

// we need to seed the prefetch cache scope in dev
// since we did not have a prefetch cache available
// and this is not a prefetch request
if (
!cache &&
!isPrefetchRSCRequest &&
routeModule?.definition.kind === RouteKind.APP_PAGE &&
!isServerAction
) {
cache = new Map()

await runWithCacheScope({ cache }, () =>
originalResponseGenerator({ ...state, isDevWarmup: true })
)
this.prefetchCacheScopesDev.set(urlPathname, cache)
}

if (cache) {
return runWithCacheScope({ cache }, () =>
originalResponseGenerator(state)
).finally(() => {
if (isPrefetchRSCRequest) {
this.prefetchCacheScopesDev.set(urlPathname, cache)
} else {
this.prefetchCacheScopesDev.del(urlPathname)
}
})
}
return runWithCacheScope({ cache }, () =>
originalResponseGenerator(state)
)
}

return originalResponseGenerator(state)
Expand Down
11 changes: 9 additions & 2 deletions packages/next/src/server/lib/prefetch-cache-scopes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ export class PrefetchCacheScopes {

private evict() {
for (const [key, value] of this.cacheScopes) {
if (value.timestamp < Date.now() - 30_000) {
if (value.timestamp < Date.now() - 5_000) {
this.cacheScopes.delete(key)
}
}
Expand All @@ -23,7 +23,14 @@ export class PrefetchCacheScopes {
// filter _rsc query
get(url: string) {
setImmediate(() => this.evict())
return this.cacheScopes.get(url)?.cache
const currentScope = this.cacheScopes.get(url)
if (currentScope) {
if (currentScope.timestamp < Date.now() - 5_000) {
return undefined
}
return currentScope.cache
}
return undefined
}

set(url: string, cache: CacheScopeStore['cache']) {
Expand Down
40 changes: 39 additions & 1 deletion packages/next/src/server/use-cache/use-cache-wrapper.ts
Original file line number Diff line number Diff line change
Expand Up @@ -516,6 +516,14 @@ export function cache(kind: string, id: string, fn: any) {
const cacheScope: undefined | CacheScopeStore =
cacheScopeAsyncLocalStorage.getStore()
if (cacheScope) {
const cacheSignal =
workUnitStore && workUnitStore.type === 'prerender'
? workUnitStore.cacheSignal
: null

if (cacheSignal) {
cacheSignal.beginRead()
}
const cachedEntry: undefined | Promise<CacheEntry> =
cacheScope.cache.get(serializedCacheKey)
if (cachedEntry !== undefined) {
Expand All @@ -532,14 +540,44 @@ export function cache(kind: string, id: string, fn: any) {
// expire time is under 5 minutes, then we consider this cache entry dynamic
// as it's not worth generating static pages for such data. It's better to leave
// a PPR hole that can be filled in dynamically with a potentially cached entry.
if (cacheSignal) {
cacheSignal.endRead()
}
return makeHangingPromise(
workUnitStore.renderSignal,
'dynamic "use cache"'
)
}
const [streamA, streamB] = existingEntry.value.tee()
existingEntry.value = streamB
stream = streamA

if (cacheSignal) {
// When we have a cacheSignal we need to block on reading the cache
// entry before ending the read.
const buffer: any[] = []
const reader = streamA.getReader()
for (let entry; !(entry = await reader.read()).done; ) {
buffer.push(entry.value)
}

let idx = 0
stream = new ReadableStream({
pull(controller) {
if (idx < buffer.length) {
controller.enqueue(buffer[idx++])
} else {
controller.close()
}
},
})
cacheSignal.endRead()
} else {
stream = streamA
}
} else {
if (cacheSignal) {
cacheSignal.endRead()
}
}
}

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
function delay() {
return new Promise((resolve) => {
setTimeout(resolve, 100)
})
}
export async function fetchData() {
await delay()
return '' + Math.random()
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
import {
revalidateTag,
unstable_cacheLife as cacheLife,
unstable_cacheTag,
} from 'next/cache'
import { fetchData } from '../api/data'
// import { Suspense } from 'react'
// import { cookies, headers } from 'next/headers'

function InnerComponent({ children }) {
return <span id="value">{children}</span>
}

async function refresh() {
'use server'
revalidateTag('hello')
}

async function reload() {
'use server'
}

async function Component() {
'use cache'
cacheLife({ revalidate: 30 })
unstable_cacheTag('hello')
return <InnerComponent>{await fetchData()}</InnerComponent>
}

export default async function Home() {
return (
<>
<form action={refresh}>
<button id="refresh">Refresh</button>
</form>
<form action={reload}>
<button id="reload">Reload</button>
</form>
<Component />
</>
)
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
export default function Root({ children }: { children: React.ReactNode }) {
return (
<html>
<body>
<main>{children}</main>
</body>
</html>
)
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
import { fetchData } from '../api/data'
// import { Suspense } from 'react'
// import { cookies, headers } from 'next/headers'

function InnerComponent({ children }) {
return <span id="value">{children}</span>
}

async function refresh() {
'use server'
}

async function reload() {
'use server'
}

async function Component() {
return <InnerComponent>{await fetchData()}</InnerComponent>
}

export default async function Home() {
return (
<>
<form action={refresh}>
<button id="refresh">Refresh</button>
</form>
<form action={reload}>
<button id="reload">Reload</button>
</form>
<Component />
</>
)
}
Loading

0 comments on commit 860b12a

Please sign in to comment.