Skip to content

Commit

Permalink
fix fetch lock not being consistently released (#75028)
Browse files Browse the repository at this point in the history
  • Loading branch information
huozhi authored Jan 17, 2025
1 parent 25e8443 commit 9431336
Show file tree
Hide file tree
Showing 4 changed files with 190 additions and 115 deletions.
237 changes: 122 additions & 115 deletions packages/next/src/server/lib/patch-fetch.ts
Original file line number Diff line number Diff line change
Expand Up @@ -610,132 +610,139 @@ export function createPatchedFetcher(
next: { ...init?.next, fetchType: 'origin', fetchIdx },
}

return originFetch(input, clonedInit).then(async (res) => {
if (!isStale && fetchStart) {
trackFetchMetric(workStore, {
start: fetchStart,
url: fetchUrl,
cacheReason: cacheReasonOverride || cacheReason,
cacheStatus:
finalRevalidate === 0 || cacheReasonOverride
? 'skip'
: 'miss',
cacheWarning,
status: res.status,
method: clonedInit.method || 'GET',
})
}
if (
res.status === 200 &&
incrementalCache &&
cacheKey &&
(isCacheableRevalidate || requestStore?.serverComponentsHmrCache)
) {
const normalizedRevalidate =
finalRevalidate >= INFINITE_CACHE
? CACHE_ONE_YEAR
: finalRevalidate
const externalRevalidate =
finalRevalidate >= INFINITE_CACHE ? false : finalRevalidate

if (workUnitStore && workUnitStore.type === 'prerender') {
// We are prerendering at build time or revalidate time with dynamicIO so we need to
// buffer the response so we can guarantee it can be read in a microtask
const bodyBuffer = await res.arrayBuffer()

const fetchedData = {
headers: Object.fromEntries(res.headers.entries()),
body: Buffer.from(bodyBuffer).toString('base64'),
return originFetch(input, clonedInit)
.then(async (res) => {
if (!isStale && fetchStart) {
trackFetchMetric(workStore, {
start: fetchStart,
url: fetchUrl,
cacheReason: cacheReasonOverride || cacheReason,
cacheStatus:
finalRevalidate === 0 || cacheReasonOverride
? 'skip'
: 'miss',
cacheWarning,
status: res.status,
url: res.url,
}

// We can skip checking the serverComponentsHmrCache because we aren't in
// dev mode.

await incrementalCache.set(
cacheKey,
{
kind: CachedRouteKind.FETCH,
data: fetchedData,
revalidate: normalizedRevalidate,
},
{
fetchCache: true,
revalidate: externalRevalidate,
fetchUrl,
fetchIdx,
tags,
method: clonedInit.method || 'GET',
})
}
if (
res.status === 200 &&
incrementalCache &&
cacheKey &&
(isCacheableRevalidate ||
requestStore?.serverComponentsHmrCache)
) {
const normalizedRevalidate =
finalRevalidate >= INFINITE_CACHE
? CACHE_ONE_YEAR
: finalRevalidate
const externalRevalidate =
finalRevalidate >= INFINITE_CACHE ? false : finalRevalidate

if (workUnitStore && workUnitStore.type === 'prerender') {
// We are prerendering at build time or revalidate time with dynamicIO so we need to
// buffer the response so we can guarantee it can be read in a microtask
const bodyBuffer = await res.arrayBuffer()

const fetchedData = {
headers: Object.fromEntries(res.headers.entries()),
body: Buffer.from(bodyBuffer).toString('base64'),
status: res.status,
url: res.url,
}
)
await handleUnlock()

// We we return a new Response to the caller.
return new Response(bodyBuffer, {
headers: res.headers,
status: res.status,
statusText: res.statusText,
})
} else {
// We're cloning the response using this utility because there
// exists a bug in the undici library around response cloning.
// See the following pull request for more details:
// https://github.com/vercel/next.js/pull/73274
const [cloned1, cloned2] = cloneResponse(res)

// We are dynamically rendering including dev mode. We want to return
// the response to the caller as soon as possible because it might stream
// over a very long time.
cloned1
.arrayBuffer()
.then(async (arrayBuffer) => {
const bodyBuffer = Buffer.from(arrayBuffer)

const fetchedData = {
headers: Object.fromEntries(cloned1.headers.entries()),
body: bodyBuffer.toString('base64'),
status: cloned1.status,
url: cloned1.url,
// We can skip checking the serverComponentsHmrCache because we aren't in
// dev mode.

await incrementalCache.set(
cacheKey,
{
kind: CachedRouteKind.FETCH,
data: fetchedData,
revalidate: normalizedRevalidate,
},
{
fetchCache: true,
revalidate: externalRevalidate,
fetchUrl,
fetchIdx,
tags,
}
)
await handleUnlock()

requestStore?.serverComponentsHmrCache?.set(
cacheKey,
fetchedData
)

if (isCacheableRevalidate) {
await incrementalCache.set(
// We return a new Response to the caller.
return new Response(bodyBuffer, {
headers: res.headers,
status: res.status,
statusText: res.statusText,
})
} else {
// We're cloning the response using this utility because there
// exists a bug in the undici library around response cloning.
// See the following pull request for more details:
// https://github.com/vercel/next.js/pull/73274

const [cloned1, cloned2] = cloneResponse(res)

// We are dynamically rendering including dev mode. We want to return
// the response to the caller as soon as possible because it might stream
// over a very long time.
cloned1
.arrayBuffer()
.then(async (arrayBuffer) => {
const bodyBuffer = Buffer.from(arrayBuffer)

const fetchedData = {
headers: Object.fromEntries(cloned1.headers.entries()),
body: bodyBuffer.toString('base64'),
status: cloned1.status,
url: cloned1.url,
}

requestStore?.serverComponentsHmrCache?.set(
cacheKey,
{
kind: CachedRouteKind.FETCH,
data: fetchedData,
revalidate: normalizedRevalidate,
},
{
fetchCache: true,
revalidate: externalRevalidate,
fetchUrl,
fetchIdx,
tags,
}
fetchedData
)
}
})
.catch((error) =>
console.warn(`Failed to set fetch cache`, input, error)
)
.finally(handleUnlock)

return cloned2
if (isCacheableRevalidate) {
await incrementalCache.set(
cacheKey,
{
kind: CachedRouteKind.FETCH,
data: fetchedData,
revalidate: normalizedRevalidate,
},
{
fetchCache: true,
revalidate: externalRevalidate,
fetchUrl,
fetchIdx,
tags,
}
)
}
})
.catch((error) =>
console.warn(`Failed to set fetch cache`, input, error)
)
.finally(handleUnlock)

return cloned2
}
}
}

// we had response that we determined shouldn't be cached so we return it
// and don't cache it. This also needs to unlock the cache lock we acquired.
await handleUnlock()
// we had response that we determined shouldn't be cached so we return it
// and don't cache it. This also needs to unlock the cache lock we acquired.
await handleUnlock()

return res
})
return res
})
.catch((error) => {
handleUnlock()
throw error
})
}

let cacheReasonOverride
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
import { nextTestSetup } from 'e2e-utils'

describe('app-fetch-errors', () => {
const { next } = nextTestSetup({
files: __dirname,
})

it('should still successfully render when a fetch request that acquires a cache lock errors', async () => {
const browser = await next.browser('/1')

expect(await browser.elementByCss('body').text()).toBe('Hello World 1')
})
})
45 changes: 45 additions & 0 deletions test/e2e/app-dir/app-fetch-deduping-errors/app/[id]/page.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
'use server'

import { Metadata } from 'next'

export async function generateMetadata({
params,
}: {
params: Promise<{ id: string }>
}): Promise<Metadata> {
const { id } = await params

try {
// this fetch request will error
await fetch('http://localhost:8000', {
cache: 'force-cache',
next: { tags: ['id'] },
})
} catch (err) {
console.log(err)
}

return {
title: id,
}
}

export default async function Error({
params,
}: {
params: Promise<{ id: string }>
}) {
const { id } = await params

try {
// this fetch request will error
await fetch('http://localhost:8000', {
cache: 'force-cache',
next: { tags: ['id'] },
})
} catch (err) {
console.log(err)
}

return <div>Hello World {id}</div>
}
10 changes: 10 additions & 0 deletions test/e2e/app-dir/app-fetch-deduping-errors/app/layout.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
export default function Layout({ children }) {
return (
<html lang="en">
<head>
<title>my static site</title>
</head>
<body>{children}</body>
</html>
)
}

0 comments on commit 9431336

Please sign in to comment.