feat(blog): add file-based blog with dynamic slugs, MDX content and layout shell

- Introduced blog routing using Next.js App Router
- Implemented dynamic [slug] pages for blog posts
- Added MDX-based content loading via lib/posts
- Integrated shared TopBar layout with navigation
- Established clear content, lib and component separation
This commit is contained in:
PascalSchattenburg
2026-01-22 14:14:15 +01:00
parent b717952234
commit d147843c76
10412 changed files with 2475583 additions and 0 deletions

View File

@@ -0,0 +1,13 @@
type Opaque<K, T> = T & {
__brand: K;
};
export type NormalizedPathname = Opaque<'NormalizedPathname', string>;
export type NormalizedSearch = Opaque<'NormalizedSearch', string>;
export type NormalizedNextUrl = Opaque<'NormalizedNextUrl', string>;
export type RouteCacheKey = Opaque<'RouteCacheKey', {
pathname: NormalizedPathname;
search: NormalizedSearch;
nextUrl: NormalizedNextUrl | null;
}>;
export declare function createCacheKey(originalHref: string, nextUrl: string | null): RouteCacheKey;
export {};

View File

@@ -0,0 +1,28 @@
// TypeScript trick to simulate opaque types, like in Flow.
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "createCacheKey", {
enumerable: true,
get: function() {
return createCacheKey;
}
});
function createCacheKey(originalHref, nextUrl) {
const originalUrl = new URL(originalHref);
const cacheKey = {
pathname: originalUrl.pathname,
search: originalUrl.search,
nextUrl: nextUrl
};
return cacheKey;
}
if ((typeof exports.default === 'function' || (typeof exports.default === 'object' && exports.default !== null)) && typeof exports.default.__esModule === 'undefined') {
Object.defineProperty(exports.default, '__esModule', { value: true });
Object.assign(exports.default, exports);
module.exports = exports.default;
}
//# sourceMappingURL=cache-key.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/client/components/segment-cache/cache-key.ts"],"sourcesContent":["// TypeScript trick to simulate opaque types, like in Flow.\ntype Opaque<K, T> = T & { __brand: K }\n\n// Only functions in this module should be allowed to create CacheKeys.\nexport type NormalizedPathname = Opaque<'NormalizedPathname', string>\nexport type NormalizedSearch = Opaque<'NormalizedSearch', string>\nexport type NormalizedNextUrl = Opaque<'NormalizedNextUrl', string>\n\nexport type RouteCacheKey = Opaque<\n 'RouteCacheKey',\n {\n pathname: NormalizedPathname\n search: NormalizedSearch\n nextUrl: NormalizedNextUrl | null\n\n // TODO: Eventually the dynamic params will be added here, too.\n }\n>\n\nexport function createCacheKey(\n originalHref: string,\n nextUrl: string | null\n): RouteCacheKey {\n const originalUrl = new URL(originalHref)\n const cacheKey = {\n pathname: originalUrl.pathname as NormalizedPathname,\n search: originalUrl.search as NormalizedSearch,\n nextUrl: nextUrl as NormalizedNextUrl | null,\n } as RouteCacheKey\n return cacheKey\n}\n"],"names":["createCacheKey","originalHref","nextUrl","originalUrl","URL","cacheKey","pathname","search"],"mappings":"AAAA,2DAA2D;;;;;+BAmB3CA;;;eAAAA;;;AAAT,SAASA,eACdC,YAAoB,EACpBC,OAAsB;IAEtB,MAAMC,cAAc,IAAIC,IAAIH;IAC5B,MAAMI,WAAW;QACfC,UAAUH,YAAYG,QAAQ;QAC9BC,QAAQJ,YAAYI,MAAM;QAC1BL,SAASA;IACX;IACA,OAAOG;AACT","ignoreList":[0]}

View File

@@ -0,0 +1,117 @@
import type { VaryPath } from './vary-path';
/**
* A specialized data type for storing multi-key cache entries.
*
* The basic structure is a map whose keys are tuples, called the keypath.
* When querying the cache, keypaths are compared per-element.
*
* Example:
* set(map, ['https://localhost', 'foo/bar/baz'], 'yay');
* get(map, ['https://localhost', 'foo/bar/baz']) -> 'yay'
*
* NOTE: Array syntax is used in these examples for illustration purposes, but
* in reality the paths are lists.
*
* The parts of the keypath represent the different inputs that contribute
* to the entry value. To illustrate, if you were to use this data type to store
* HTTP responses, the keypath would include the URL and everything listed by
* the Vary header.
*
* See vary-path.ts for more details.
*
* The order of elements in a keypath must be consistent between lookups to
* be considered the same, but besides that, the order of the keys is not
* semantically meaningful.
*
* Keypaths may include a special kind of key called Fallback. When an entry is
* stored with Fallback as part of its keypath, it means that the entry does not
* vary by that key. When querying the cache, if an exact match is not found for
* a keypath, the cache will check for a Fallback match instead. Each element of
* the keypath may have a Fallback, so retrieval is an O(n ^ 2) operation, but
* it's expected that keypaths are relatively short.
*
* Example:
* set(cacheMap, ['store', 'product', 1], PRODUCT_PAGE_1);
* set(cacheMap, ['store', 'product', Fallback], GENERIC_PRODUCT_PAGE);
*
* // Exact match
* get(cacheMap, ['store', 'product', 1]) -> PRODUCT_PAGE_1
*
* // Fallback match
* get(cacheMap, ['store', 'product', 2]) -> GENERIC_PRODUCT_PAGE
*
* Because we have the Fallback mechanism, we can impose a constraint that
* regular JS maps do not have: a value cannot be stored at multiple keypaths
* simultaneously. These cases should be expressed with Fallback keys instead.
*
* Additionally, because values only exist at a single keypath at a time, we
* can optimize successive lookups by caching the internal map entry on the
* value itself, using the `ref` field. This is especially useful because it
* lets us skip the O(n ^ 2) lookup that occurs when Fallback entries
* are present.
*
* How to decide if stuff belongs in here, or in cache.ts?
* -------------------------------------------------------
*
* Anything to do with retrival, lifetimes, or eviction needs to go in this
* module because it affects the fallback algorithm. For example, when
* performing a lookup, if an entry is stale, it needs to be treated as
* semantically equivalent to if the entry was not present at all.
*
* If there's logic that's not related to the fallback algorithm, though, we
* should prefer to put it in cache.ts.
*/
export interface MapValue {
ref: UnknownMapEntry | null;
size: number;
staleAt: number;
version: number;
}
/**
* Represents a node in the cache map and LRU.
* MapEntry<V> structurally satisfies this interface for any V extends MapValue.
*
* The LRU can contain entries of different value types
* (e.g., both RouteCacheEntry and SegmentCacheEntry). This interface captures
* the common structure needed for cache map and LRU operations without
* requiring knowledge of the specific value type.
*/
export interface MapEntry<V extends MapValue> {
parent: MapEntry<V> | null;
key: unknown;
map: Map<unknown, MapEntry<V>> | null;
value: V | null;
prev: MapEntry<V> | null;
next: MapEntry<V> | null;
size: number;
}
/**
* A looser type for MapEntry
* This allows the LRU to work with entries of different
* value types while still providing type safety.
*
* The `map` field lets Map<unknown, MapEntry<V>> be assignable to this
* type since we're only reading from the map, not inserting into it.
*/
export type UnknownMapEntry = {
parent: UnknownMapEntry | null;
key: unknown;
map: Pick<Map<unknown, UnknownMapEntry>, 'get' | 'delete' | 'size'> | null;
value: MapValue | null;
prev: UnknownMapEntry | null;
next: UnknownMapEntry | null;
size: number;
};
export type CacheMap<V extends MapValue> = MapEntry<V>;
export type FallbackType = {
__brand: 'Fallback';
};
export declare const Fallback: FallbackType;
export declare function createCacheMap<V extends MapValue>(): CacheMap<V>;
export declare function getFromCacheMap<V extends MapValue>(now: number, currentCacheVersion: number, rootEntry: CacheMap<V>, keys: VaryPath, isRevalidation: boolean): V | null;
export declare function isValueExpired(now: number, currentCacheVersion: number, value: MapValue): boolean;
export declare function setInCacheMap<V extends MapValue>(cacheMap: CacheMap<V>, keys: VaryPath, value: V, isRevalidation: boolean): void;
export declare function deleteFromCacheMap(value: MapValue): void;
export declare function deleteMapEntry(entry: UnknownMapEntry): void;
export declare function setSizeInCacheMap<V extends MapValue>(value: V, size: number): void;

View File

@@ -0,0 +1,305 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
0 && (module.exports = {
Fallback: null,
createCacheMap: null,
deleteFromCacheMap: null,
deleteMapEntry: null,
getFromCacheMap: null,
isValueExpired: null,
setInCacheMap: null,
setSizeInCacheMap: null
});
function _export(target, all) {
for(var name in all)Object.defineProperty(target, name, {
enumerable: true,
get: all[name]
});
}
_export(exports, {
Fallback: function() {
return Fallback;
},
createCacheMap: function() {
return createCacheMap;
},
deleteFromCacheMap: function() {
return deleteFromCacheMap;
},
deleteMapEntry: function() {
return deleteMapEntry;
},
getFromCacheMap: function() {
return getFromCacheMap;
},
isValueExpired: function() {
return isValueExpired;
},
setInCacheMap: function() {
return setInCacheMap;
},
setSizeInCacheMap: function() {
return setSizeInCacheMap;
}
});
const _lru = require("./lru");
const Fallback = {};
// This is a special internal key that is used for "revalidation" entries. It's
// an implementation detail that shouldn't leak outside of this module.
const Revalidation = {};
function createCacheMap() {
const cacheMap = {
parent: null,
key: null,
value: null,
map: null,
// LRU-related fields
prev: null,
next: null,
size: 0
};
return cacheMap;
}
function getOrInitialize(cacheMap, keys, isRevalidation) {
// Go through each level of keys until we find the entry that matches, or
// create a new entry if one doesn't exist.
//
// This function will only return entries that match the keypath _exactly_.
// Unlike getWithFallback, it will not access fallback entries unless it's
// explicitly part of the keypath.
let entry = cacheMap;
let remainingKeys = keys;
let key = null;
while(true){
const previousKey = key;
if (remainingKeys !== null) {
key = remainingKeys.value;
remainingKeys = remainingKeys.parent;
} else if (isRevalidation && previousKey !== Revalidation) {
// During a revalidation, we append an internal "Revalidation" key to
// the end of the keypath. The "normal" entry is its parent.
// However, if the parent entry is currently empty, we don't need to store
// this as a revalidation entry. Just insert the revalidation into the
// normal slot.
if (entry.value === null) {
return entry;
}
// Otheriwse, create a child entry.
key = Revalidation;
} else {
break;
}
let map = entry.map;
if (map !== null) {
const existingEntry = map.get(key);
if (existingEntry !== undefined) {
// Found a match. Keep going.
entry = existingEntry;
continue;
}
} else {
map = new Map();
entry.map = map;
}
// No entry exists yet at this level. Create a new one.
const newEntry = {
parent: entry,
key,
value: null,
map: null,
// LRU-related fields
prev: null,
next: null,
size: 0
};
map.set(key, newEntry);
entry = newEntry;
}
return entry;
}
function getFromCacheMap(now, currentCacheVersion, rootEntry, keys, isRevalidation) {
const entry = getEntryWithFallbackImpl(now, currentCacheVersion, rootEntry, keys, isRevalidation, 0);
if (entry === null || entry.value === null) {
return null;
}
// This is an LRU access. Move the entry to the front of the list.
(0, _lru.lruPut)(entry);
return entry.value;
}
function isValueExpired(now, currentCacheVersion, value) {
return value.staleAt <= now || value.version < currentCacheVersion;
}
function lazilyEvictIfNeeded(now, currentCacheVersion, entry) {
// We have a matching entry, but before we can return it, we need to check if
// it's still fresh. Otherwise it should be treated the same as a cache miss.
if (entry.value === null) {
// This entry has no value, so there's nothing to evict.
return entry;
}
const value = entry.value;
if (isValueExpired(now, currentCacheVersion, value)) {
// The value expired. Lazily evict it from the cache, and return null. This
// is conceptually the same as a cache miss.
deleteMapEntry(entry);
return null;
}
// The matched entry has not expired. Return it.
return entry;
}
function getEntryWithFallbackImpl(now, currentCacheVersion, entry, keys, isRevalidation, previousKey) {
// This is similar to getExactEntry, but if an exact match is not found for
// a key, it will return the fallback entry instead. This is recursive at
// every level, e.g. an entry with keypath [a, Fallback, c, Fallback] is
// valid match for [a, b, c, d].
//
// It will return the most specific match available.
let key;
let remainingKeys;
if (keys !== null) {
key = keys.value;
remainingKeys = keys.parent;
} else if (isRevalidation && previousKey !== Revalidation) {
// During a revalidation, we append an internal "Revalidation" key to
// the end of the keypath.
key = Revalidation;
remainingKeys = null;
} else {
// There are no more keys. This is the terminal entry.
// TODO: When performing a lookup during a navigation, as opposed to a
// prefetch, we may want to skip entries that are Pending if there's also
// a Fulfilled fallback entry. Tricky to say, though, since if it's
// already pending, it's likely to stream in soon. Maybe we could do this
// just on slow connections and offline mode.
return lazilyEvictIfNeeded(now, currentCacheVersion, entry);
}
const map = entry.map;
if (map !== null) {
const existingEntry = map.get(key);
if (existingEntry !== undefined) {
// Found an exact match for this key. Keep searching.
const result = getEntryWithFallbackImpl(now, currentCacheVersion, existingEntry, remainingKeys, isRevalidation, key);
if (result !== null) {
return result;
}
}
// No match found for this key. Check if there's a fallback.
const fallbackEntry = map.get(Fallback);
if (fallbackEntry !== undefined) {
// Found a fallback for this key. Keep searching.
return getEntryWithFallbackImpl(now, currentCacheVersion, fallbackEntry, remainingKeys, isRevalidation, key);
}
}
return null;
}
function setInCacheMap(cacheMap, keys, value, isRevalidation) {
// Add a value to the map at the given keypath. If the value is already
// part of the map, it's removed from its previous keypath. (NOTE: This is
// unlike a regular JS map, but the behavior is intentional.)
const entry = getOrInitialize(cacheMap, keys, isRevalidation);
setMapEntryValue(entry, value);
// This is an LRU access. Move the entry to the front of the list.
(0, _lru.lruPut)(entry);
(0, _lru.updateLruSize)(entry, value.size);
}
function setMapEntryValue(entry, value) {
if (entry.value !== null) {
// There's already a value at the given keypath. Disconnect the old value
// from the map. We're not calling `deleteMapEntry` here because the
// entry itself is still in the map. We just want to overwrite its value.
dropRef(entry.value);
entry.value = null;
}
// This value may already be in the map at a different keypath.
// Grab a reference before we overwrite it.
const oldEntry = value.ref;
entry.value = value;
value.ref = entry;
(0, _lru.updateLruSize)(entry, value.size);
if (oldEntry !== null && oldEntry !== entry && oldEntry.value === value) {
// This value is already in the map at a different keypath in the map.
// Values only exist at a single keypath at a time. Remove it from the
// previous keypath.
//
// Note that only the internal map entry is garbage collected; we don't
// call `dropRef` here because it's still in the map, just
// at a new keypath (the one we just set, above).
deleteMapEntry(oldEntry);
}
}
function deleteFromCacheMap(value) {
const entry = value.ref;
if (entry === null) {
// This value is not a member of any map.
return;
}
dropRef(value);
deleteMapEntry(entry);
}
function dropRef(value) {
// Drop the value from the map by setting its `ref` backpointer to
// null. This is a separate operation from `deleteMapEntry` because when
// re-keying a value we need to be able to delete the old, internal map
// entry without garbage collecting the value itself.
value.ref = null;
}
function deleteMapEntry(entry) {
// Delete the entry from the cache.
entry.value = null;
(0, _lru.deleteFromLru)(entry);
// Check if we can garbage collect the entry.
const map = entry.map;
if (map === null) {
// Since this entry has no value, and also no child entries, we can
// garbage collect it. Remove it from its parent, and keep garbage
// collecting the parents until we reach a non-empty entry.
let parent = entry.parent;
let key = entry.key;
while(parent !== null){
const parentMap = parent.map;
if (parentMap !== null) {
parentMap.delete(key);
if (parentMap.size === 0) {
// We just removed the last entry in the parent map.
parent.map = null;
if (parent.value === null) {
// The parent node has no child entries, nor does it have a value
// on itself. It can be garbage collected. Keep going.
key = parent.key;
parent = parent.parent;
continue;
}
}
}
break;
}
} else {
// Check if there's a revalidating entry. If so, promote it to a
// "normal" entry, since the normal one was just deleted.
const revalidatingEntry = map.get(Revalidation);
if (revalidatingEntry !== undefined && revalidatingEntry.value !== null) {
setMapEntryValue(entry, revalidatingEntry.value);
}
}
}
function setSizeInCacheMap(value, size) {
const entry = value.ref;
if (entry === null) {
// This value is not a member of any map.
return;
}
// Except during initialization (when the size is set to 0), this is the only
// place the `size` field should be updated, to ensure it's in sync with the
// the LRU.
value.size = size;
(0, _lru.updateLruSize)(entry, size);
}
if ((typeof exports.default === 'function' || (typeof exports.default === 'object' && exports.default !== null)) && typeof exports.default.__esModule === 'undefined') {
Object.defineProperty(exports.default, '__esModule', { value: true });
Object.assign(exports.default, exports);
module.exports = exports.default;
}
//# sourceMappingURL=cache-map.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,172 @@
import type { LoadingModuleData } from '../../../shared/lib/app-router-types';
import type { Segment as FlightRouterStateSegment } from '../../../shared/lib/app-router-types';
import { HasLoadingBoundary } from '../../../shared/lib/app-router-types';
import { type PrefetchTask, type PrefetchSubtaskResult } from './scheduler';
import { type SegmentVaryPath, type PageVaryPath } from './vary-path';
import type { NormalizedSearch, RouteCacheKey } from './cache-key';
import { type UnknownMapEntry } from './cache-map';
import { type SegmentRequestKey } from '../../../shared/lib/segment-cache/segment-value-encoding';
import type { FlightRouterState } from '../../../shared/lib/app-router-types';
import { FetchStrategy } from './types';
/**
* Ensures a minimum stale time of 30s to avoid issues where the server sends a too
* short-lived stale time, which would prevent anything from being prefetched.
*/
export declare function getStaleTimeMs(staleTimeSeconds: number): number;
type RouteTreeShared = {
requestKey: SegmentRequestKey;
segment: FlightRouterStateSegment;
slots: null | {
[parallelRouteKey: string]: RouteTree;
};
isRootLayout: boolean;
hasLoadingBoundary: HasLoadingBoundary;
hasRuntimePrefetch: boolean;
};
type LayoutRouteTree = RouteTreeShared & {
isPage: false;
varyPath: SegmentVaryPath;
};
type PageRouteTree = RouteTreeShared & {
isPage: true;
varyPath: PageVaryPath;
};
export type RouteTree = LayoutRouteTree | PageRouteTree;
type RouteCacheEntryShared = {
couldBeIntercepted: boolean;
ref: UnknownMapEntry | null;
size: number;
staleAt: number;
version: number;
};
/**
* Tracks the status of a cache entry as it progresses from no data (Empty),
* waiting for server data (Pending), and finished (either Fulfilled or
* Rejected depending on the response from the server.
*/
export declare const enum EntryStatus {
Empty = 0,
Pending = 1,
Fulfilled = 2,
Rejected = 3
}
type PendingRouteCacheEntry = RouteCacheEntryShared & {
status: EntryStatus.Empty | EntryStatus.Pending;
blockedTasks: Set<PrefetchTask> | null;
canonicalUrl: null;
renderedSearch: null;
tree: null;
metadata: null;
isPPREnabled: false;
};
type RejectedRouteCacheEntry = RouteCacheEntryShared & {
status: EntryStatus.Rejected;
blockedTasks: Set<PrefetchTask> | null;
canonicalUrl: null;
renderedSearch: null;
tree: null;
metadata: null;
isPPREnabled: boolean;
};
export type FulfilledRouteCacheEntry = RouteCacheEntryShared & {
status: EntryStatus.Fulfilled;
blockedTasks: null;
canonicalUrl: string;
renderedSearch: NormalizedSearch;
tree: RouteTree;
metadata: RouteTree;
isPPREnabled: boolean;
};
export type RouteCacheEntry = PendingRouteCacheEntry | FulfilledRouteCacheEntry | RejectedRouteCacheEntry;
type SegmentCacheEntryShared = {
fetchStrategy: FetchStrategy;
ref: UnknownMapEntry | null;
size: number;
staleAt: number;
version: number;
};
export type EmptySegmentCacheEntry = SegmentCacheEntryShared & {
status: EntryStatus.Empty;
rsc: null;
loading: null;
isPartial: true;
promise: null;
};
export type PendingSegmentCacheEntry = SegmentCacheEntryShared & {
status: EntryStatus.Pending;
rsc: null;
loading: null;
isPartial: boolean;
promise: null | PromiseWithResolvers<FulfilledSegmentCacheEntry | null>;
};
type RejectedSegmentCacheEntry = SegmentCacheEntryShared & {
status: EntryStatus.Rejected;
rsc: null;
loading: null;
isPartial: true;
promise: null;
};
export type FulfilledSegmentCacheEntry = SegmentCacheEntryShared & {
status: EntryStatus.Fulfilled;
rsc: React.ReactNode | null;
loading: LoadingModuleData | Promise<LoadingModuleData>;
isPartial: boolean;
promise: null;
};
export type SegmentCacheEntry = EmptySegmentCacheEntry | PendingSegmentCacheEntry | RejectedSegmentCacheEntry | FulfilledSegmentCacheEntry;
export type NonEmptySegmentCacheEntry = Exclude<SegmentCacheEntry, EmptySegmentCacheEntry>;
export declare function getCurrentCacheVersion(): number;
/**
* Used to clear the client prefetch cache when a server action calls
* revalidatePath or revalidateTag. Eventually we will support only clearing the
* segments that were actually affected, but there's more work to be done on the
* server before the client is able to do this correctly.
*/
export declare function revalidateEntireCache(nextUrl: string | null, tree: FlightRouterState): void;
export declare function pingInvalidationListeners(nextUrl: string | null, tree: FlightRouterState): void;
export declare function readRouteCacheEntry(now: number, key: RouteCacheKey): RouteCacheEntry | null;
export declare function readSegmentCacheEntry(now: number, varyPath: SegmentVaryPath): SegmentCacheEntry | null;
export declare function waitForSegmentCacheEntry(pendingEntry: PendingSegmentCacheEntry): Promise<FulfilledSegmentCacheEntry | null>;
/**
* Checks if an entry for a route exists in the cache. If so, it returns the
* entry, If not, it adds an empty entry to the cache and returns it.
*/
export declare function readOrCreateRouteCacheEntry(now: number, task: PrefetchTask, key: RouteCacheKey): RouteCacheEntry;
export declare function requestOptimisticRouteCacheEntry(now: number, requestedUrl: URL, nextUrl: string | null): FulfilledRouteCacheEntry | null;
/**
* Checks if an entry for a segment exists in the cache. If so, it returns the
* entry, If not, it adds an empty entry to the cache and returns it.
*/
export declare function readOrCreateSegmentCacheEntry(now: number, fetchStrategy: FetchStrategy, route: FulfilledRouteCacheEntry, tree: RouteTree): SegmentCacheEntry;
export declare function readOrCreateRevalidatingSegmentEntry(now: number, fetchStrategy: FetchStrategy, route: FulfilledRouteCacheEntry, tree: RouteTree): SegmentCacheEntry;
export declare function overwriteRevalidatingSegmentCacheEntry(fetchStrategy: FetchStrategy, route: FulfilledRouteCacheEntry, tree: RouteTree): EmptySegmentCacheEntry;
export declare function upsertSegmentEntry(now: number, varyPath: SegmentVaryPath, candidateEntry: SegmentCacheEntry): SegmentCacheEntry | null;
export declare function createDetachedSegmentCacheEntry(staleAt: number): EmptySegmentCacheEntry;
export declare function upgradeToPendingSegment(emptyEntry: EmptySegmentCacheEntry, fetchStrategy: FetchStrategy): PendingSegmentCacheEntry;
export declare function convertRouteTreeToFlightRouterState(routeTree: RouteTree): FlightRouterState;
export declare function fetchRouteOnCacheMiss(entry: PendingRouteCacheEntry, task: PrefetchTask, key: RouteCacheKey): Promise<PrefetchSubtaskResult<null> | null>;
export declare function fetchSegmentOnCacheMiss(route: FulfilledRouteCacheEntry, segmentCacheEntry: PendingSegmentCacheEntry, routeKey: RouteCacheKey, tree: RouteTree): Promise<PrefetchSubtaskResult<FulfilledSegmentCacheEntry> | null>;
export declare function fetchSegmentPrefetchesUsingDynamicRequest(task: PrefetchTask, route: FulfilledRouteCacheEntry, fetchStrategy: FetchStrategy.LoadingBoundary | FetchStrategy.PPRRuntime | FetchStrategy.Full, dynamicRequestTree: FlightRouterState, spawnedEntries: Map<SegmentRequestKey, PendingSegmentCacheEntry>): Promise<PrefetchSubtaskResult<null> | null>;
/**
* Checks whether the new fetch strategy is likely to provide more content than the old one.
*
* Generally, when an app uses dynamic data, a "more specific" fetch strategy is expected to provide more content:
* - `LoadingBoundary` only provides static layouts
* - `PPR` can provide shells for each segment (even for segments that use dynamic data)
* - `PPRRuntime` can additionally include content that uses searchParams, params, or cookies
* - `Full` includes all the content, even if it uses dynamic data
*
* However, it's possible that a more specific fetch strategy *won't* give us more content if:
* - a segment is fully static
* (then, `PPR`/`PPRRuntime`/`Full` will all yield equivalent results)
* - providing searchParams/params/cookies doesn't reveal any more content, e.g. because of an `await connection()`
* (then, `PPR` and `PPRRuntime` will yield equivalent results, only `Full` will give us more)
* Because of this, when comparing two segments, we should also check if the existing segment is partial.
* If it's not partial, then there's no need to prefetch it again, even using a "more specific" strategy.
* There's currently no way to know if `PPRRuntime` will yield more data that `PPR`, so we have to assume it will.
*
* Also note that, in practice, we don't expect to be comparing `LoadingBoundary` to `PPR`/`PPRRuntime`,
* because a non-PPR-enabled route wouldn't ever use the latter strategies. It might however use `Full`.
*/
export declare function canNewFetchStrategyProvideMoreContent(currentStrategy: FetchStrategy, newStrategy: FetchStrategy): boolean;
export {};

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,4 @@
import type { UnknownMapEntry } from './cache-map';
export declare function lruPut(node: UnknownMapEntry): void;
export declare function updateLruSize(node: UnknownMapEntry, newNodeSize: number): void;
export declare function deleteFromLru(deleted: UnknownMapEntry): void;

View File

@@ -0,0 +1,145 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
0 && (module.exports = {
deleteFromLru: null,
lruPut: null,
updateLruSize: null
});
function _export(target, all) {
for(var name in all)Object.defineProperty(target, name, {
enumerable: true,
get: all[name]
});
}
_export(exports, {
deleteFromLru: function() {
return deleteFromLru;
},
lruPut: function() {
return lruPut;
},
updateLruSize: function() {
return updateLruSize;
}
});
const _cachemap = require("./cache-map");
// We use an LRU for memory management. We must update this whenever we add or
// remove a new cache entry, or when an entry changes size.
let head = null;
let didScheduleCleanup = false;
let lruSize = 0;
// TODO: I chose the max size somewhat arbitrarily. Consider setting this based
// on navigator.deviceMemory, or some other heuristic. We should make this
// customizable via the Next.js config, too.
const maxLruSize = 50 * 1024 * 1024 // 50 MB
;
function lruPut(node) {
if (head === node) {
// Already at the head
return;
}
const prev = node.prev;
const next = node.next;
if (next === null || prev === null) {
// This is an insertion
lruSize += node.size;
// Whenever we add an entry, we need to check if we've exceeded the
// max size. We don't evict entries immediately; they're evicted later in
// an asynchronous task.
ensureCleanupIsScheduled();
} else {
// This is a move. Remove from its current position.
prev.next = next;
next.prev = prev;
}
// Move to the front of the list
if (head === null) {
// This is the first entry
node.prev = node;
node.next = node;
} else {
// Add to the front of the list
const tail = head.prev;
node.prev = tail;
// In practice, this is never null, but that isn't encoded in the type
if (tail !== null) {
tail.next = node;
}
node.next = head;
head.prev = node;
}
head = node;
}
function updateLruSize(node, newNodeSize) {
// This is a separate function from `put` so that we can resize the entry
// regardless of whether it's currently being tracked by the LRU.
const prevNodeSize = node.size;
node.size = newNodeSize;
if (node.next === null) {
// This entry is not currently being tracked by the LRU.
return;
}
// Update the total LRU size
lruSize = lruSize - prevNodeSize + newNodeSize;
ensureCleanupIsScheduled();
}
function deleteFromLru(deleted) {
const next = deleted.next;
const prev = deleted.prev;
if (next !== null && prev !== null) {
lruSize -= deleted.size;
deleted.next = null;
deleted.prev = null;
// Remove from the list
if (head === deleted) {
// Update the head
if (next === head) {
// This was the last entry
head = null;
} else {
head = next;
prev.next = next;
next.prev = prev;
}
} else {
prev.next = next;
next.prev = prev;
}
} else {
// Already deleted
}
}
function ensureCleanupIsScheduled() {
if (didScheduleCleanup || lruSize <= maxLruSize) {
return;
}
didScheduleCleanup = true;
requestCleanupCallback(cleanup);
}
function cleanup() {
didScheduleCleanup = false;
// Evict entries until we're at 90% capacity. We can assume this won't
// infinite loop because even if `maxLruSize` were 0, eventually
// `deleteFromLru` sets `head` to `null` when we run out entries.
const ninetyPercentMax = maxLruSize * 0.9;
while(lruSize > ninetyPercentMax && head !== null){
const tail = head.prev;
// In practice, this is never null, but that isn't encoded in the type
if (tail !== null) {
// Delete the entry from the map. In turn, this will remove it from
// the LRU.
(0, _cachemap.deleteMapEntry)(tail);
}
}
}
const requestCleanupCallback = typeof requestIdleCallback === 'function' ? requestIdleCallback : (cb)=>setTimeout(cb, 0);
if ((typeof exports.default === 'function' || (typeof exports.default === 'object' && exports.default !== null)) && typeof exports.default.__esModule === 'undefined') {
Object.defineProperty(exports.default, '__esModule', { value: true });
Object.assign(exports.default, exports);
module.exports = exports.default;
}
//# sourceMappingURL=lru.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,47 @@
import type { CacheNodeSeedData, FlightRouterState, FlightSegmentPath } from '../../../shared/lib/app-router-types';
import type { CacheNode } from '../../../shared/lib/app-router-types';
import type { HeadData } from '../../../shared/lib/app-router-types';
import type { NormalizedFlightData } from '../../flight-data-helpers';
import { FreshnessPolicy } from '../router-reducer/ppr-navigations';
import { NavigationResultTag } from './types';
type MPANavigationResult = {
tag: NavigationResultTag.MPA;
data: string;
};
type SuccessfulNavigationResult = {
tag: NavigationResultTag.Success;
data: {
flightRouterState: FlightRouterState;
cacheNode: CacheNode;
canonicalUrl: string;
renderedSearch: string;
scrollableSegments: Array<FlightSegmentPath> | null;
shouldScroll: boolean;
hash: string;
};
};
type AsyncNavigationResult = {
tag: NavigationResultTag.Async;
data: Promise<MPANavigationResult | SuccessfulNavigationResult>;
};
export type NavigationResult = MPANavigationResult | SuccessfulNavigationResult | AsyncNavigationResult;
/**
* Navigate to a new URL, using the Segment Cache to construct a response.
*
* To allow for synchronous navigations whenever possible, this is not an async
* function. It returns a promise only if there's no matching prefetch in
* the cache. Otherwise it returns an immediate result and uses Suspense/RSC to
* stream in any missing data.
*/
export declare function navigate(url: URL, currentUrl: URL, currentCacheNode: CacheNode | null, currentFlightRouterState: FlightRouterState, nextUrl: string | null, freshnessPolicy: FreshnessPolicy, shouldScroll: boolean, accumulation: {
collectedDebugInfo?: Array<unknown>;
}): NavigationResult;
export declare function navigateToSeededRoute(now: number, url: URL, canonicalUrl: string, navigationSeed: NavigationSeed, currentUrl: URL, currentCacheNode: CacheNode | null, currentFlightRouterState: FlightRouterState, freshnessPolicy: FreshnessPolicy, nextUrl: string | null, shouldScroll: boolean): SuccessfulNavigationResult | MPANavigationResult;
export type NavigationSeed = {
tree: FlightRouterState;
renderedSearch: string;
data: CacheNodeSeedData | null;
head: HeadData | null;
};
export declare function convertServerPatchToFullTree(currentTree: FlightRouterState, flightData: Array<NormalizedFlightData>, renderedSearch: string): NavigationSeed;
export {};

View File

@@ -0,0 +1,459 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
0 && (module.exports = {
convertServerPatchToFullTree: null,
navigate: null,
navigateToSeededRoute: null
});
function _export(target, all) {
for(var name in all)Object.defineProperty(target, name, {
enumerable: true,
get: all[name]
});
}
_export(exports, {
convertServerPatchToFullTree: function() {
return convertServerPatchToFullTree;
},
navigate: function() {
return navigate;
},
navigateToSeededRoute: function() {
return navigateToSeededRoute;
}
});
const _fetchserverresponse = require("../router-reducer/fetch-server-response");
const _pprnavigations = require("../router-reducer/ppr-navigations");
const _createhreffromurl = require("../router-reducer/create-href-from-url");
const _cache = require("./cache");
const _cachekey = require("./cache-key");
const _segment = require("../../../shared/lib/segment");
const _types = require("./types");
function navigate(url, currentUrl, currentCacheNode, currentFlightRouterState, nextUrl, freshnessPolicy, shouldScroll, accumulation) {
const now = Date.now();
const href = url.href;
// We special case navigations to the exact same URL as the current location.
// It's a common UI pattern for apps to refresh when you click a link to the
// current page. So when this happens, we refresh the dynamic data in the page
// segments.
//
// Note that this does not apply if the any part of the hash or search query
// has changed. This might feel a bit weird but it makes more sense when you
// consider that the way to trigger this behavior is to click the same link
// multiple times.
//
// TODO: We should probably refresh the *entire* route when this case occurs,
// not just the page segments. Essentially treating it the same as a refresh()
// triggered by an action, which is the more explicit way of modeling the UI
// pattern described above.
//
// Also note that this only refreshes the dynamic data, not static/ cached
// data. If the page segment is fully static and prefetched, the request is
// skipped. (This is also how refresh() works.)
const isSamePageNavigation = href === currentUrl.href;
const cacheKey = (0, _cachekey.createCacheKey)(href, nextUrl);
const route = (0, _cache.readRouteCacheEntry)(now, cacheKey);
if (route !== null && route.status === _cache.EntryStatus.Fulfilled) {
// We have a matching prefetch.
const snapshot = readRenderSnapshotFromCache(now, route, route.tree);
const prefetchFlightRouterState = snapshot.flightRouterState;
const prefetchSeedData = snapshot.seedData;
const headSnapshot = readHeadSnapshotFromCache(now, route);
const prefetchHead = headSnapshot.rsc;
const isPrefetchHeadPartial = headSnapshot.isPartial;
// TODO: The "canonicalUrl" stored in the cache doesn't include the hash,
// because hash entries do not vary by hash fragment. However, the one
// we set in the router state *does* include the hash, and it's used to
// sync with the actual browser location. To make this less of a refactor
// hazard, we should always track the hash separately from the rest of
// the URL.
const newCanonicalUrl = route.canonicalUrl + url.hash;
const renderedSearch = route.renderedSearch;
return navigateUsingPrefetchedRouteTree(now, url, currentUrl, nextUrl, isSamePageNavigation, currentCacheNode, currentFlightRouterState, prefetchFlightRouterState, prefetchSeedData, prefetchHead, isPrefetchHeadPartial, newCanonicalUrl, renderedSearch, freshnessPolicy, shouldScroll);
}
// There was no matching route tree in the cache. Let's see if we can
// construct an "optimistic" route tree.
//
// Do not construct an optimistic route tree if there was a cache hit, but
// the entry has a rejected status, since it may have been rejected due to a
// rewrite or redirect based on the search params.
//
// TODO: There are multiple reasons a prefetch might be rejected; we should
// track them explicitly and choose what to do here based on that.
if (route === null || route.status !== _cache.EntryStatus.Rejected) {
const optimisticRoute = (0, _cache.requestOptimisticRouteCacheEntry)(now, url, nextUrl);
if (optimisticRoute !== null) {
// We have an optimistic route tree. Proceed with the normal flow.
const snapshot = readRenderSnapshotFromCache(now, optimisticRoute, optimisticRoute.tree);
const prefetchFlightRouterState = snapshot.flightRouterState;
const prefetchSeedData = snapshot.seedData;
const headSnapshot = readHeadSnapshotFromCache(now, optimisticRoute);
const prefetchHead = headSnapshot.rsc;
const isPrefetchHeadPartial = headSnapshot.isPartial;
const newCanonicalUrl = optimisticRoute.canonicalUrl + url.hash;
const newRenderedSearch = optimisticRoute.renderedSearch;
return navigateUsingPrefetchedRouteTree(now, url, currentUrl, nextUrl, isSamePageNavigation, currentCacheNode, currentFlightRouterState, prefetchFlightRouterState, prefetchSeedData, prefetchHead, isPrefetchHeadPartial, newCanonicalUrl, newRenderedSearch, freshnessPolicy, shouldScroll);
}
}
// There's no matching prefetch for this route in the cache.
let collectedDebugInfo = accumulation.collectedDebugInfo ?? [];
if (accumulation.collectedDebugInfo === undefined) {
collectedDebugInfo = accumulation.collectedDebugInfo = [];
}
return {
tag: _types.NavigationResultTag.Async,
data: navigateDynamicallyWithNoPrefetch(now, url, currentUrl, nextUrl, currentCacheNode, currentFlightRouterState, freshnessPolicy, shouldScroll, collectedDebugInfo)
};
}
function navigateToSeededRoute(now, url, canonicalUrl, navigationSeed, currentUrl, currentCacheNode, currentFlightRouterState, freshnessPolicy, nextUrl, shouldScroll) {
// A version of navigate() that accepts the target route tree as an argument
// rather than reading it from the prefetch cache.
const accumulation = {
scrollableSegments: null,
separateRefreshUrls: null
};
const isSamePageNavigation = url.href === currentUrl.href;
const task = (0, _pprnavigations.startPPRNavigation)(now, currentUrl, currentCacheNode, currentFlightRouterState, navigationSeed.tree, freshnessPolicy, navigationSeed.data, navigationSeed.head, null, null, false, isSamePageNavigation, accumulation);
if (task !== null) {
(0, _pprnavigations.spawnDynamicRequests)(task, url, nextUrl, freshnessPolicy, accumulation);
return navigationTaskToResult(task, canonicalUrl, navigationSeed.renderedSearch, accumulation.scrollableSegments, shouldScroll, url.hash);
}
// Could not perform a SPA navigation. Revert to a full-page (MPA) navigation.
return {
tag: _types.NavigationResultTag.MPA,
data: canonicalUrl
};
}
function navigateUsingPrefetchedRouteTree(now, url, currentUrl, nextUrl, isSamePageNavigation, currentCacheNode, currentFlightRouterState, prefetchFlightRouterState, prefetchSeedData, prefetchHead, isPrefetchHeadPartial, canonicalUrl, renderedSearch, freshnessPolicy, shouldScroll) {
// Recursively construct a prefetch tree by reading from the Segment Cache. To
// maintain compatibility, we output the same data structures as the old
// prefetching implementation: FlightRouterState and CacheNodeSeedData.
// TODO: Eventually updateCacheNodeOnNavigation (or the equivalent) should
// read from the Segment Cache directly. It's only structured this way for now
// so we can share code with the old prefetching implementation.
const accumulation = {
scrollableSegments: null,
separateRefreshUrls: null
};
const seedData = null;
const seedHead = null;
const task = (0, _pprnavigations.startPPRNavigation)(now, currentUrl, currentCacheNode, currentFlightRouterState, prefetchFlightRouterState, freshnessPolicy, seedData, seedHead, prefetchSeedData, prefetchHead, isPrefetchHeadPartial, isSamePageNavigation, accumulation);
if (task !== null) {
(0, _pprnavigations.spawnDynamicRequests)(task, url, nextUrl, freshnessPolicy, accumulation);
return navigationTaskToResult(task, canonicalUrl, renderedSearch, accumulation.scrollableSegments, shouldScroll, url.hash);
}
// Could not perform a SPA navigation. Revert to a full-page (MPA) navigation.
return {
tag: _types.NavigationResultTag.MPA,
data: canonicalUrl
};
}
function navigationTaskToResult(task, canonicalUrl, renderedSearch, scrollableSegments, shouldScroll, hash) {
return {
tag: _types.NavigationResultTag.Success,
data: {
flightRouterState: task.route,
cacheNode: task.node,
canonicalUrl,
renderedSearch,
scrollableSegments,
shouldScroll,
hash
}
};
}
function readRenderSnapshotFromCache(now, route, tree) {
let childRouterStates = {};
let childSeedDatas = {};
const slots = tree.slots;
if (slots !== null) {
for(const parallelRouteKey in slots){
const childTree = slots[parallelRouteKey];
const childResult = readRenderSnapshotFromCache(now, route, childTree);
childRouterStates[parallelRouteKey] = childResult.flightRouterState;
childSeedDatas[parallelRouteKey] = childResult.seedData;
}
}
let rsc = null;
let loading = null;
let isPartial = true;
const segmentEntry = (0, _cache.readSegmentCacheEntry)(now, tree.varyPath);
if (segmentEntry !== null) {
switch(segmentEntry.status){
case _cache.EntryStatus.Fulfilled:
{
// Happy path: a cache hit
rsc = segmentEntry.rsc;
loading = segmentEntry.loading;
isPartial = segmentEntry.isPartial;
break;
}
case _cache.EntryStatus.Pending:
{
// We haven't received data for this segment yet, but there's already
// an in-progress request. Since it's extremely likely to arrive
// before the dynamic data response, we might as well use it.
const promiseForFulfilledEntry = (0, _cache.waitForSegmentCacheEntry)(segmentEntry);
rsc = promiseForFulfilledEntry.then((entry)=>entry !== null ? entry.rsc : null);
loading = promiseForFulfilledEntry.then((entry)=>entry !== null ? entry.loading : null);
// Because the request is still pending, we typically don't know yet
// whether the response will be partial. We shouldn't skip this segment
// during the dynamic navigation request. Otherwise, we might need to
// do yet another request to fill in the remaining data, creating
// a waterfall.
//
// The one exception is if this segment is being fetched with via
// prefetch={true} (i.e. the "force stale" or "full" strategy). If so,
// we can assume the response will be full. This field is set to `false`
// for such segments.
isPartial = segmentEntry.isPartial;
break;
}
case _cache.EntryStatus.Empty:
case _cache.EntryStatus.Rejected:
break;
default:
segmentEntry;
}
}
// The navigation implementation expects the search params to be
// included in the segment. However, the Segment Cache tracks search
// params separately from the rest of the segment key. So we need to
// add them back here.
//
// See corresponding comment in convertFlightRouterStateToTree.
//
// TODO: What we should do instead is update the navigation diffing
// logic to compare search params explicitly. This is a temporary
// solution until more of the Segment Cache implementation has settled.
const segment = (0, _segment.addSearchParamsIfPageSegment)(tree.segment, Object.fromEntries(new URLSearchParams(route.renderedSearch)));
// We don't need this information in a render snapshot, so this can just be a placeholder.
const hasRuntimePrefetch = false;
return {
flightRouterState: [
segment,
childRouterStates,
null,
null,
tree.isRootLayout
],
seedData: [
rsc,
childSeedDatas,
loading,
isPartial,
hasRuntimePrefetch
]
};
}
function readHeadSnapshotFromCache(now, route) {
// Same as readRenderSnapshotFromCache, but for the head
let rsc = null;
let isPartial = true;
const segmentEntry = (0, _cache.readSegmentCacheEntry)(now, route.metadata.varyPath);
if (segmentEntry !== null) {
switch(segmentEntry.status){
case _cache.EntryStatus.Fulfilled:
{
rsc = segmentEntry.rsc;
isPartial = segmentEntry.isPartial;
break;
}
case _cache.EntryStatus.Pending:
{
const promiseForFulfilledEntry = (0, _cache.waitForSegmentCacheEntry)(segmentEntry);
rsc = promiseForFulfilledEntry.then((entry)=>entry !== null ? entry.rsc : null);
isPartial = segmentEntry.isPartial;
break;
}
case _cache.EntryStatus.Empty:
case _cache.EntryStatus.Rejected:
break;
default:
segmentEntry;
}
}
return {
rsc,
isPartial
};
}
// Used to request all the dynamic data for a route, rather than just a subset,
// e.g. during a refresh or a revalidation. Typically this gets constructed
// during the normal flow when diffing the route tree, but for an unprefetched
// navigation, where we don't know the structure of the target route, we use
// this instead.
const DynamicRequestTreeForEntireRoute = [
'',
{},
null,
'refetch'
];
async function navigateDynamicallyWithNoPrefetch(now, url, currentUrl, nextUrl, currentCacheNode, currentFlightRouterState, freshnessPolicy, shouldScroll, collectedDebugInfo) {
// Runs when a navigation happens but there's no cached prefetch we can use.
// Don't bother to wait for a prefetch response; go straight to a full
// navigation that contains both static and dynamic data in a single stream.
// (This is unlike the old navigation implementation, which instead blocks
// the dynamic request until a prefetch request is received.)
//
// To avoid duplication of logic, we're going to pretend that the tree
// returned by the dynamic request is, in fact, a prefetch tree. Then we can
// use the same server response to write the actual data into the CacheNode
// tree. So it's the same flow as the "happy path" (prefetch, then
// navigation), except we use a single server response for both stages.
let dynamicRequestTree;
switch(freshnessPolicy){
case _pprnavigations.FreshnessPolicy.Default:
case _pprnavigations.FreshnessPolicy.HistoryTraversal:
dynamicRequestTree = currentFlightRouterState;
break;
case _pprnavigations.FreshnessPolicy.Hydration:
case _pprnavigations.FreshnessPolicy.RefreshAll:
case _pprnavigations.FreshnessPolicy.HMRRefresh:
dynamicRequestTree = DynamicRequestTreeForEntireRoute;
break;
default:
freshnessPolicy;
dynamicRequestTree = currentFlightRouterState;
break;
}
const promiseForDynamicServerResponse = (0, _fetchserverresponse.fetchServerResponse)(url, {
flightRouterState: dynamicRequestTree,
nextUrl
});
const result = await promiseForDynamicServerResponse;
if (typeof result === 'string') {
// This is an MPA navigation.
const newUrl = result;
return {
tag: _types.NavigationResultTag.MPA,
data: newUrl
};
}
const { flightData, canonicalUrl, renderedSearch, debugInfo: debugInfoFromResponse } = result;
if (debugInfoFromResponse !== null) {
collectedDebugInfo.push(...debugInfoFromResponse);
}
// Since the response format of dynamic requests and prefetches is slightly
// different, we'll need to massage the data a bit. Create FlightRouterState
// tree that simulates what we'd receive as the result of a prefetch.
const navigationSeed = convertServerPatchToFullTree(currentFlightRouterState, flightData, renderedSearch);
return navigateToSeededRoute(now, url, (0, _createhreffromurl.createHrefFromUrl)(canonicalUrl), navigationSeed, currentUrl, currentCacheNode, currentFlightRouterState, freshnessPolicy, nextUrl, shouldScroll);
}
function convertServerPatchToFullTree(currentTree, flightData, renderedSearch) {
// During a client navigation or prefetch, the server sends back only a patch
// for the parts of the tree that have changed.
//
// This applies the patch to the base tree to create a full representation of
// the resulting tree.
//
// The return type includes a full FlightRouterState tree and a full
// CacheNodeSeedData tree. (Conceptually these are the same tree, and should
// eventually be unified, but there's still lots of existing code that
// operates on FlightRouterState trees alone without the CacheNodeSeedData.)
//
// TODO: This similar to what apply-router-state-patch-to-tree does. It
// will eventually fully replace it. We should get rid of all the remaining
// places where we iterate over the server patch format. This should also
// eventually replace normalizeFlightData.
let baseTree = currentTree;
let baseData = null;
let head = null;
for (const { segmentPath, tree: treePatch, seedData: dataPatch, head: headPatch } of flightData){
const result = convertServerPatchToFullTreeImpl(baseTree, baseData, treePatch, dataPatch, segmentPath, 0);
baseTree = result.tree;
baseData = result.data;
// This is the same for all patches per response, so just pick an
// arbitrary one
head = headPatch;
}
return {
tree: baseTree,
data: baseData,
renderedSearch,
head
};
}
function convertServerPatchToFullTreeImpl(baseRouterState, baseData, treePatch, dataPatch, segmentPath, index) {
if (index === segmentPath.length) {
// We reached the part of the tree that we need to patch.
return {
tree: treePatch,
data: dataPatch
};
}
// segmentPath represents the parent path of subtree. It's a repeating
// pattern of parallel route key and segment:
//
// [string, Segment, string, Segment, string, Segment, ...]
//
// This path tells us which part of the base tree to apply the tree patch.
//
// NOTE: We receive the FlightRouterState patch in the same request as the
// seed data patch. Therefore we don't need to worry about diffing the segment
// values; we can assume the server sent us a correct result.
const updatedParallelRouteKey = segmentPath[index];
// const segment: Segment = segmentPath[index + 1] <-- Not used, see note above
const baseTreeChildren = baseRouterState[1];
const baseSeedDataChildren = baseData !== null ? baseData[1] : null;
const newTreeChildren = {};
const newSeedDataChildren = {};
for(const parallelRouteKey in baseTreeChildren){
const childBaseRouterState = baseTreeChildren[parallelRouteKey];
const childBaseSeedData = baseSeedDataChildren !== null ? baseSeedDataChildren[parallelRouteKey] ?? null : null;
if (parallelRouteKey === updatedParallelRouteKey) {
const result = convertServerPatchToFullTreeImpl(childBaseRouterState, childBaseSeedData, treePatch, dataPatch, segmentPath, // Advance the index by two and keep cloning until we reach
// the end of the segment path.
index + 2);
newTreeChildren[parallelRouteKey] = result.tree;
newSeedDataChildren[parallelRouteKey] = result.data;
} else {
// This child is not being patched. Copy it over as-is.
newTreeChildren[parallelRouteKey] = childBaseRouterState;
newSeedDataChildren[parallelRouteKey] = childBaseSeedData;
}
}
let clonedTree;
let clonedSeedData;
// Clone all the fields except the children.
// Clone the FlightRouterState tree. Based on equivalent logic in
// apply-router-state-patch-to-tree, but should confirm whether we need to
// copy all of these fields. Not sure the server ever sends, e.g. the
// refetch marker.
clonedTree = [
baseRouterState[0],
newTreeChildren
];
if (2 in baseRouterState) {
clonedTree[2] = baseRouterState[2];
}
if (3 in baseRouterState) {
clonedTree[3] = baseRouterState[3];
}
if (4 in baseRouterState) {
clonedTree[4] = baseRouterState[4];
}
// Clone the CacheNodeSeedData tree.
const isEmptySeedDataPartial = true;
clonedSeedData = [
null,
newSeedDataChildren,
null,
isEmptySeedDataPartial,
false
];
return {
tree: clonedTree,
data: clonedSeedData
};
}
if ((typeof exports.default === 'function' || (typeof exports.default === 'object' && exports.default !== null)) && typeof exports.default.__esModule === 'undefined') {
Object.defineProperty(exports.default, '__esModule', { value: true });
Object.assign(exports.default, exports);
module.exports = exports.default;
}
//# sourceMappingURL=navigation.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,23 @@
import type { FlightRouterState } from '../../../shared/lib/app-router-types';
import { type PrefetchTaskFetchStrategy } from './types';
/**
* Entrypoint for prefetching a URL into the Segment Cache.
* @param href - The URL to prefetch. Typically this will come from a <Link>,
* or router.prefetch. It must be validated before we attempt to prefetch it.
* @param nextUrl - A special header used by the server for interception routes.
* Roughly corresponds to the current URL.
* @param treeAtTimeOfPrefetch - The FlightRouterState at the time the prefetch
* was requested. This is only used when PPR is disabled.
* @param fetchStrategy - Whether to prefetch dynamic data, in addition to
* static data. This is used by `<Link prefetch={true}>`.
* @param onInvalidate - A callback that will be called when the prefetch cache
* When called, it signals to the listener that the data associated with the
* prefetch may have been invalidated from the cache. This is not a live
* subscription — it's called at most once per `prefetch` call. The only
* supported use case is to trigger a new prefetch inside the listener, if
* desired. It also may be called even in cases where the associated data is
* still cached. Prefetching is a poll-based (pull) operation, not an event-
* based (push) one. Rather than subscribe to specific cache entries, you
* occasionally poll the prefetch cache to check if anything is missing.
*/
export declare function prefetch(href: string, nextUrl: string | null, treeAtTimeOfPrefetch: FlightRouterState, fetchStrategy: PrefetchTaskFetchStrategy, onInvalidate: null | (() => void)): void;

View File

@@ -0,0 +1,31 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "prefetch", {
enumerable: true,
get: function() {
return prefetch;
}
});
const _approuterutils = require("../app-router-utils");
const _cachekey = require("./cache-key");
const _scheduler = require("./scheduler");
const _types = require("./types");
function prefetch(href, nextUrl, treeAtTimeOfPrefetch, fetchStrategy, onInvalidate) {
const url = (0, _approuterutils.createPrefetchURL)(href);
if (url === null) {
// This href should not be prefetched.
return;
}
const cacheKey = (0, _cachekey.createCacheKey)(url.href, nextUrl);
(0, _scheduler.schedulePrefetchTask)(cacheKey, treeAtTimeOfPrefetch, fetchStrategy, _types.PrefetchPriority.Default, onInvalidate);
}
if ((typeof exports.default === 'function' || (typeof exports.default === 'object' && exports.default !== null)) && typeof exports.default.__esModule === 'undefined') {
Object.defineProperty(exports.default, '__esModule', { value: true });
Object.assign(exports.default, exports);
module.exports = exports.default;
}
//# sourceMappingURL=prefetch.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/client/components/segment-cache/prefetch.ts"],"sourcesContent":["import type { FlightRouterState } from '../../../shared/lib/app-router-types'\nimport { createPrefetchURL } from '../app-router-utils'\nimport { createCacheKey } from './cache-key'\nimport { schedulePrefetchTask } from './scheduler'\nimport { PrefetchPriority, type PrefetchTaskFetchStrategy } from './types'\n\n/**\n * Entrypoint for prefetching a URL into the Segment Cache.\n * @param href - The URL to prefetch. Typically this will come from a <Link>,\n * or router.prefetch. It must be validated before we attempt to prefetch it.\n * @param nextUrl - A special header used by the server for interception routes.\n * Roughly corresponds to the current URL.\n * @param treeAtTimeOfPrefetch - The FlightRouterState at the time the prefetch\n * was requested. This is only used when PPR is disabled.\n * @param fetchStrategy - Whether to prefetch dynamic data, in addition to\n * static data. This is used by `<Link prefetch={true}>`.\n * @param onInvalidate - A callback that will be called when the prefetch cache\n * When called, it signals to the listener that the data associated with the\n * prefetch may have been invalidated from the cache. This is not a live\n * subscription — it's called at most once per `prefetch` call. The only\n * supported use case is to trigger a new prefetch inside the listener, if\n * desired. It also may be called even in cases where the associated data is\n * still cached. Prefetching is a poll-based (pull) operation, not an event-\n * based (push) one. Rather than subscribe to specific cache entries, you\n * occasionally poll the prefetch cache to check if anything is missing.\n */\nexport function prefetch(\n href: string,\n nextUrl: string | null,\n treeAtTimeOfPrefetch: FlightRouterState,\n fetchStrategy: PrefetchTaskFetchStrategy,\n onInvalidate: null | (() => void)\n) {\n const url = createPrefetchURL(href)\n if (url === null) {\n // This href should not be prefetched.\n return\n }\n const cacheKey = createCacheKey(url.href, nextUrl)\n schedulePrefetchTask(\n cacheKey,\n treeAtTimeOfPrefetch,\n fetchStrategy,\n PrefetchPriority.Default,\n onInvalidate\n )\n}\n"],"names":["prefetch","href","nextUrl","treeAtTimeOfPrefetch","fetchStrategy","onInvalidate","url","createPrefetchURL","cacheKey","createCacheKey","schedulePrefetchTask","PrefetchPriority","Default"],"mappings":";;;;+BA0BgBA;;;eAAAA;;;gCAzBkB;0BACH;2BACM;uBAC4B;AAsB1D,SAASA,SACdC,IAAY,EACZC,OAAsB,EACtBC,oBAAuC,EACvCC,aAAwC,EACxCC,YAAiC;IAEjC,MAAMC,MAAMC,IAAAA,iCAAiB,EAACN;IAC9B,IAAIK,QAAQ,MAAM;QAChB,sCAAsC;QACtC;IACF;IACA,MAAME,WAAWC,IAAAA,wBAAc,EAACH,IAAIL,IAAI,EAAEC;IAC1CQ,IAAAA,+BAAoB,EAClBF,UACAL,sBACAC,eACAO,uBAAgB,CAACC,OAAO,EACxBP;AAEJ","ignoreList":[0]}

View File

@@ -0,0 +1,130 @@
import type { FlightRouterState } from '../../../shared/lib/app-router-types';
import type { RouteCacheKey } from './cache-key';
import { type PrefetchTaskFetchStrategy, PrefetchPriority } from './types';
import type { SegmentRequestKey } from '../../../shared/lib/segment-cache/segment-value-encoding';
export type PrefetchTask = {
key: RouteCacheKey;
/**
* The FlightRouterState at the time the task was initiated. This is needed
* when falling back to the non-PPR behavior, which only prefetches up to
* the first loading boundary.
*/
treeAtTimeOfPrefetch: FlightRouterState;
/**
* The cache version at the time the task was initiated. This is used to
* determine if the cache was invalidated since the task was initiated.
*/
cacheVersion: number;
/**
* Whether to prefetch dynamic data, in addition to static data. This is
* used by `<Link prefetch={true}>`.
*
* Note that a task with `FetchStrategy.PPR` might need to use
* `FetchStrategy.LoadingBoundary` instead if we find out that a route
* does not support PPR after doing the initial route prefetch.
*/
fetchStrategy: PrefetchTaskFetchStrategy;
/**
* sortId is an incrementing counter
*
* Newer prefetches are prioritized over older ones, so that as new links
* enter the viewport, they are not starved by older links that are no
* longer relevant. In the future, we can add additional prioritization
* heuristics, like removing prefetches once a link leaves the viewport.
*
* The sortId is assigned when the prefetch is initiated, and reassigned if
* the same task is prefetched again (effectively bumping it to the top of
* the queue).
*
* TODO: We can add additional fields here to indicate what kind of prefetch
* it is. For example, was it initiated by a link? Or was it an imperative
* call? If it was initiated by a link, we can remove it from the queue when
* the link leaves the viewport, but if it was an imperative call, then we
* should keep it in the queue until it's fulfilled.
*
* We can also add priority levels. For example, hovering over a link could
* increase the priority of its prefetch.
*/
sortId: number;
/**
* The priority of the task. Like sortId, this affects the task's position in
* the queue, so it must never be updated without resifting the heap.
*/
priority: PrefetchPriority;
/**
* The phase of the task. Tasks are split into multiple phases so that their
* priority can be adjusted based on what kind of work they're doing.
* Concretely, prefetching the route tree is higher priority than prefetching
* segment data.
*/
phase: PrefetchPhase;
/**
* These fields are temporary state for tracking the currently running task.
* They are reset after each iteration of the task queue.
*/
hasBackgroundWork: boolean;
spawnedRuntimePrefetches: Set<SegmentRequestKey> | null;
/**
* True if the prefetch was cancelled.
*/
isCanceled: boolean;
/**
* The callback passed to `router.prefetch`, if given.
*/
onInvalidate: null | (() => void);
/**
* The index of the task in the heap's backing array. Used to efficiently
* change the priority of a task by re-sifting it, which requires knowing
* where it is in the array. This is only used internally by the heap
* algorithm. The naive alternative is indexOf every time a task is queued,
* which has O(n) complexity.
*
* We also use this field to check whether a task is currently in the queue.
*/
_heapIndex: number;
};
/**
* Prefetch tasks are processed in two phases: first the route tree is fetched,
* then the segments. We use this to priortize tasks that have not yet fetched
* the route tree.
*/
declare const enum PrefetchPhase {
RouteTree = 1,
Segments = 0
}
export type PrefetchSubtaskResult<T> = {
/**
* A promise that resolves when the network connection is closed.
*/
closed: Promise<void>;
value: T;
};
/**
* Called by the cache when revalidation occurs. Starts a cooldown period
* during which prefetch requests are blocked to allow CDN cache propagation.
*/
export declare function startRevalidationCooldown(): void;
export type IncludeDynamicData = null | 'full' | 'dynamic';
/**
* Initiates a prefetch task for the given URL. If a prefetch for the same URL
* is already in progress, this will bump it to the top of the queue.
*
* This is not a user-facing function. By the time this is called, the href is
* expected to be validated and normalized.
*
* @param key The RouteCacheKey to prefetch.
* @param treeAtTimeOfPrefetch The app's current FlightRouterState
* @param fetchStrategy Whether to prefetch dynamic data, in addition to
* static data. This is used by `<Link prefetch={true}>`.
*/
export declare function schedulePrefetchTask(key: RouteCacheKey, treeAtTimeOfPrefetch: FlightRouterState, fetchStrategy: PrefetchTaskFetchStrategy, priority: PrefetchPriority, onInvalidate: null | (() => void)): PrefetchTask;
export declare function cancelPrefetchTask(task: PrefetchTask): void;
export declare function reschedulePrefetchTask(task: PrefetchTask, treeAtTimeOfPrefetch: FlightRouterState, fetchStrategy: PrefetchTaskFetchStrategy, priority: PrefetchPriority): void;
export declare function isPrefetchTaskDirty(task: PrefetchTask, nextUrl: string | null, tree: FlightRouterState): boolean;
/**
* Notify the scheduler that we've received new data for an in-progress
* prefetch. The corresponding task will be added back to the queue (unless the
* task has been canceled in the meantime).
*/
export declare function pingPrefetchTask(task: PrefetchTask): void;
export {};

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,43 @@
/**
* Shared types and constants for the Segment Cache.
*/
export declare const enum NavigationResultTag {
MPA = 0,
Success = 1,
NoOp = 2,
Async = 3
}
/**
* The priority of the prefetch task. Higher numbers are higher priority.
*/
export declare const enum PrefetchPriority {
/**
* Assigned to the most recently hovered/touched link. Special network
* bandwidth is reserved for this task only. There's only ever one Intent-
* priority task at a time; when a new Intent task is scheduled, the previous
* one is bumped down to Default.
*/
Intent = 2,
/**
* The default priority for prefetch tasks.
*/
Default = 1,
/**
* Assigned to tasks when they spawn non-blocking background work, like
* revalidating a partially cached entry to see if more data is available.
*/
Background = 0
}
export declare const enum FetchStrategy {
LoadingBoundary = 0,
PPR = 1,
PPRRuntime = 2,
Full = 3
}
/**
* A subset of fetch strategies used for prefetch tasks.
* A prefetch task can't know if it should use `PPR` or `LoadingBoundary`
* until we complete the initial tree prefetch request, so we use `PPR` to signal both cases
* and adjust it based on the route when actually fetching.
* */
export type PrefetchTaskFetchStrategy = FetchStrategy.PPR | FetchStrategy.PPRRuntime | FetchStrategy.Full;

View File

@@ -0,0 +1,69 @@
/**
* Shared types and constants for the Segment Cache.
*/ "use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
0 && (module.exports = {
FetchStrategy: null,
NavigationResultTag: null,
PrefetchPriority: null
});
function _export(target, all) {
for(var name in all)Object.defineProperty(target, name, {
enumerable: true,
get: all[name]
});
}
_export(exports, {
FetchStrategy: function() {
return FetchStrategy;
},
NavigationResultTag: function() {
return NavigationResultTag;
},
PrefetchPriority: function() {
return PrefetchPriority;
}
});
var NavigationResultTag = /*#__PURE__*/ function(NavigationResultTag) {
NavigationResultTag[NavigationResultTag["MPA"] = 0] = "MPA";
NavigationResultTag[NavigationResultTag["Success"] = 1] = "Success";
NavigationResultTag[NavigationResultTag["NoOp"] = 2] = "NoOp";
NavigationResultTag[NavigationResultTag["Async"] = 3] = "Async";
return NavigationResultTag;
}({});
var PrefetchPriority = /*#__PURE__*/ function(PrefetchPriority) {
/**
* Assigned to the most recently hovered/touched link. Special network
* bandwidth is reserved for this task only. There's only ever one Intent-
* priority task at a time; when a new Intent task is scheduled, the previous
* one is bumped down to Default.
*/ PrefetchPriority[PrefetchPriority["Intent"] = 2] = "Intent";
/**
* The default priority for prefetch tasks.
*/ PrefetchPriority[PrefetchPriority["Default"] = 1] = "Default";
/**
* Assigned to tasks when they spawn non-blocking background work, like
* revalidating a partially cached entry to see if more data is available.
*/ PrefetchPriority[PrefetchPriority["Background"] = 0] = "Background";
return PrefetchPriority;
}({});
var FetchStrategy = /*#__PURE__*/ function(FetchStrategy) {
// Deliberately ordered so we can easily compare two segments
// and determine if one segment is "more specific" than another
// (i.e. if it's likely that it contains more data)
FetchStrategy[FetchStrategy["LoadingBoundary"] = 0] = "LoadingBoundary";
FetchStrategy[FetchStrategy["PPR"] = 1] = "PPR";
FetchStrategy[FetchStrategy["PPRRuntime"] = 2] = "PPRRuntime";
FetchStrategy[FetchStrategy["Full"] = 3] = "Full";
return FetchStrategy;
}({});
if ((typeof exports.default === 'function' || (typeof exports.default === 'object' && exports.default !== null)) && typeof exports.default.__esModule === 'undefined') {
Object.defineProperty(exports.default, '__esModule', { value: true });
Object.assign(exports.default, exports);
module.exports = exports.default;
}
//# sourceMappingURL=types.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/client/components/segment-cache/types.ts"],"sourcesContent":["/**\n * Shared types and constants for the Segment Cache.\n */\n\nexport const enum NavigationResultTag {\n MPA,\n Success,\n NoOp,\n Async,\n}\n\n/**\n * The priority of the prefetch task. Higher numbers are higher priority.\n */\nexport const enum PrefetchPriority {\n /**\n * Assigned to the most recently hovered/touched link. Special network\n * bandwidth is reserved for this task only. There's only ever one Intent-\n * priority task at a time; when a new Intent task is scheduled, the previous\n * one is bumped down to Default.\n */\n Intent = 2,\n /**\n * The default priority for prefetch tasks.\n */\n Default = 1,\n /**\n * Assigned to tasks when they spawn non-blocking background work, like\n * revalidating a partially cached entry to see if more data is available.\n */\n Background = 0,\n}\n\nexport const enum FetchStrategy {\n // Deliberately ordered so we can easily compare two segments\n // and determine if one segment is \"more specific\" than another\n // (i.e. if it's likely that it contains more data)\n LoadingBoundary = 0,\n PPR = 1,\n PPRRuntime = 2,\n Full = 3,\n}\n\n/**\n * A subset of fetch strategies used for prefetch tasks.\n * A prefetch task can't know if it should use `PPR` or `LoadingBoundary`\n * until we complete the initial tree prefetch request, so we use `PPR` to signal both cases\n * and adjust it based on the route when actually fetching.\n * */\nexport type PrefetchTaskFetchStrategy =\n | FetchStrategy.PPR\n | FetchStrategy.PPRRuntime\n | FetchStrategy.Full\n"],"names":["FetchStrategy","NavigationResultTag","PrefetchPriority"],"mappings":"AAAA;;CAEC;;;;;;;;;;;;;;;;IA+BiBA,aAAa;eAAbA;;IA7BAC,mBAAmB;eAAnBA;;IAUAC,gBAAgB;eAAhBA;;;AAVX,IAAA,AAAWD,6CAAAA;;;;;WAAAA;;AAUX,IAAA,AAAWC,0CAAAA;IAChB;;;;;GAKC;IAED;;GAEC;IAED;;;GAGC;WAfeA;;AAmBX,IAAA,AAAWF,uCAAAA;IAChB,6DAA6D;IAC7D,+DAA+D;IAC/D,mDAAmD;;;;;WAHnCA","ignoreList":[0]}

View File

@@ -0,0 +1,58 @@
import { FetchStrategy } from './types';
import type { NormalizedPathname, NormalizedSearch, NormalizedNextUrl } from './cache-key';
import type { RouteTree } from './cache';
import { type FallbackType } from './cache-map';
type Opaque<T, K> = T & {
__brand: K;
};
/**
* A linked-list of all the params (or other param-like) inputs that a cache
* entry may vary by. This is used by the CacheMap module to reuse cache entries
* across different param values. If a param has a value of Fallback, it means
* the cache entry is reusable for all possible values of that param. See
* cache-map.ts for details.
*
* A segment's vary path is a pure function of a segment's position in a
* particular route tree and the (post-rewrite) URL that is being queried. More
* concretely, successive queries of the cache for the same segment always use
* the same vary path.
*
* A route's vary path is simpler: it's comprised of the pathname, search
* string, and Next-URL header.
*/
export type VaryPath = {
value: string | null | FallbackType;
parent: VaryPath | null;
};
export type RouteVaryPath = Opaque<{
value: NormalizedPathname;
parent: {
value: NormalizedSearch;
parent: {
value: NormalizedNextUrl | null | FallbackType;
parent: null;
};
};
}, 'RouteVaryPath'>;
export type LayoutVaryPath = Opaque<{
value: string;
parent: PartialSegmentVaryPath | null;
}, 'LayoutVaryPath'>;
export type PageVaryPath = Opaque<{
value: string;
parent: {
value: NormalizedSearch | FallbackType;
parent: PartialSegmentVaryPath | null;
};
}, 'PageVaryPath'>;
export type SegmentVaryPath = LayoutVaryPath | PageVaryPath;
export type PartialSegmentVaryPath = Opaque<VaryPath, 'PartialSegmentVaryPath'>;
export declare function getRouteVaryPath(pathname: NormalizedPathname, search: NormalizedSearch, nextUrl: NormalizedNextUrl | null): RouteVaryPath;
export declare function getFulfilledRouteVaryPath(pathname: NormalizedPathname, search: NormalizedSearch, nextUrl: NormalizedNextUrl | null, couldBeIntercepted: boolean): RouteVaryPath;
export declare function appendLayoutVaryPath(parentPath: PartialSegmentVaryPath | null, cacheKey: string): PartialSegmentVaryPath;
export declare function finalizeLayoutVaryPath(requestKey: string, varyPath: PartialSegmentVaryPath | null): LayoutVaryPath;
export declare function finalizePageVaryPath(requestKey: string, renderedSearch: NormalizedSearch, varyPath: PartialSegmentVaryPath | null): PageVaryPath;
export declare function finalizeMetadataVaryPath(pageRequestKey: string, renderedSearch: NormalizedSearch, varyPath: PartialSegmentVaryPath | null): PageVaryPath;
export declare function getSegmentVaryPathForRequest(fetchStrategy: FetchStrategy, tree: RouteTree): SegmentVaryPath;
export declare function clonePageVaryPathWithNewSearchParams(originalVaryPath: PageVaryPath, newSearch: NormalizedSearch): PageVaryPath;
export {};

View File

@@ -0,0 +1,216 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
0 && (module.exports = {
appendLayoutVaryPath: null,
clonePageVaryPathWithNewSearchParams: null,
finalizeLayoutVaryPath: null,
finalizeMetadataVaryPath: null,
finalizePageVaryPath: null,
getFulfilledRouteVaryPath: null,
getRouteVaryPath: null,
getSegmentVaryPathForRequest: null
});
function _export(target, all) {
for(var name in all)Object.defineProperty(target, name, {
enumerable: true,
get: all[name]
});
}
_export(exports, {
appendLayoutVaryPath: function() {
return appendLayoutVaryPath;
},
clonePageVaryPathWithNewSearchParams: function() {
return clonePageVaryPathWithNewSearchParams;
},
finalizeLayoutVaryPath: function() {
return finalizeLayoutVaryPath;
},
finalizeMetadataVaryPath: function() {
return finalizeMetadataVaryPath;
},
finalizePageVaryPath: function() {
return finalizePageVaryPath;
},
getFulfilledRouteVaryPath: function() {
return getFulfilledRouteVaryPath;
},
getRouteVaryPath: function() {
return getRouteVaryPath;
},
getSegmentVaryPathForRequest: function() {
return getSegmentVaryPathForRequest;
}
});
const _types = require("./types");
const _cachemap = require("./cache-map");
const _segmentvalueencoding = require("../../../shared/lib/segment-cache/segment-value-encoding");
function getRouteVaryPath(pathname, search, nextUrl) {
// requestKey -> searchParams -> nextUrl
const varyPath = {
value: pathname,
parent: {
value: search,
parent: {
value: nextUrl,
parent: null
}
}
};
return varyPath;
}
function getFulfilledRouteVaryPath(pathname, search, nextUrl, couldBeIntercepted) {
// This is called when a route's data is fulfilled. The cache entry will be
// re-keyed based on which inputs the response varies by.
// requestKey -> searchParams -> nextUrl
const varyPath = {
value: pathname,
parent: {
value: search,
parent: {
value: couldBeIntercepted ? nextUrl : _cachemap.Fallback,
parent: null
}
}
};
return varyPath;
}
function appendLayoutVaryPath(parentPath, cacheKey) {
const varyPathPart = {
value: cacheKey,
parent: parentPath
};
return varyPathPart;
}
function finalizeLayoutVaryPath(requestKey, varyPath) {
const layoutVaryPath = {
value: requestKey,
parent: varyPath
};
return layoutVaryPath;
}
function finalizePageVaryPath(requestKey, renderedSearch, varyPath) {
// Unlike layouts, a page segment's vary path also includes the search string.
// requestKey -> searchParams -> pathParams
const pageVaryPath = {
value: requestKey,
parent: {
value: renderedSearch,
parent: varyPath
}
};
return pageVaryPath;
}
function finalizeMetadataVaryPath(pageRequestKey, renderedSearch, varyPath) {
// The metadata "segment" is not a real segment because it doesn't exist in
// the normal structure of the route tree, but in terms of caching, it
// behaves like a page segment because it varies by all the same params as
// a page.
//
// To keep the protocol for querying the server simple, the request key for
// the metadata does not include any path information. It's unnecessary from
// the server's perspective, because unlike page segments, there's only one
// metadata response per URL, i.e. there's no need to distinguish multiple
// parallel pages.
//
// However, this means the metadata request key is insufficient for
// caching the the metadata in the client cache, because on the client we
// use the request key to distinguish the metadata entry from all other
// page's metadata entries.
//
// So instead we create a simulated request key based on the page segment.
// Conceptually this is equivalent to the request key the server would have
// assigned the metadata segment if it treated it as part of the actual
// route structure.
// If there are multiple parallel pages, we use whichever is the first one.
// This is fine because the only difference between request keys for
// different parallel pages are things like route groups and parallel
// route slots. As long as it's always the same one, it doesn't matter.
const pageVaryPath = {
// Append the actual metadata request key to the page request key. Note
// that we're not using a separate vary path part; it's unnecessary because
// these are not conceptually separate inputs.
value: pageRequestKey + _segmentvalueencoding.HEAD_REQUEST_KEY,
parent: {
value: renderedSearch,
parent: varyPath
}
};
return pageVaryPath;
}
function getSegmentVaryPathForRequest(fetchStrategy, tree) {
// This is used for storing pending requests in the cache. We want to choose
// the most generic vary path based on the strategy used to fetch it, i.e.
// static/PPR versus runtime prefetching, so that it can be reused as much
// as possible.
//
// We may be able to re-key the response to something even more generic once
// we receive it — for example, if the server tells us that the response
// doesn't vary on a particular param — but even before we send the request,
// we know some params are reusable based on the fetch strategy alone. For
// example, a static prefetch will never vary on search params.
//
// The original vary path with all the params filled in is stored on the
// route tree object. We will clone this one to create a new vary path
// where certain params are replaced with Fallback.
//
// This result of this function is not stored anywhere. It's only used to
// access the cache a single time.
//
// TODO: Rather than create a new list object just to access the cache, the
// plan is to add the concept of a "vary mask". This will represent all the
// params that can be treated as Fallback. (Or perhaps the inverse.)
const originalVaryPath = tree.varyPath;
// Only page segments (and the special "metadata" segment, which is treated
// like a page segment for the purposes of caching) may contain search
// params. There's no reason to include them in the vary path otherwise.
if (tree.isPage) {
// Only a runtime prefetch will include search params in the vary path.
// Static prefetches never include search params, so they can be reused
// across all possible search param values.
const doesVaryOnSearchParams = fetchStrategy === _types.FetchStrategy.Full || fetchStrategy === _types.FetchStrategy.PPRRuntime;
if (!doesVaryOnSearchParams) {
// The response from the the server will not vary on search params. Clone
// the end of the original vary path to replace the search params
// with Fallback.
//
// requestKey -> searchParams -> pathParams
// ^ This part gets replaced with Fallback
const searchParamsVaryPath = originalVaryPath.parent;
const pathParamsVaryPath = searchParamsVaryPath.parent;
const patchedVaryPath = {
value: originalVaryPath.value,
parent: {
value: _cachemap.Fallback,
parent: pathParamsVaryPath
}
};
return patchedVaryPath;
}
}
// The request does vary on search params. We don't need to modify anything.
return originalVaryPath;
}
function clonePageVaryPathWithNewSearchParams(originalVaryPath, newSearch) {
// requestKey -> searchParams -> pathParams
// ^ This part gets replaced with newSearch
const searchParamsVaryPath = originalVaryPath.parent;
const clonedVaryPath = {
value: originalVaryPath.value,
parent: {
value: newSearch,
parent: searchParamsVaryPath.parent
}
};
return clonedVaryPath;
}
if ((typeof exports.default === 'function' || (typeof exports.default === 'object' && exports.default !== null)) && typeof exports.default.__esModule === 'undefined') {
Object.defineProperty(exports.default, '__esModule', { value: true });
Object.assign(exports.default, exports);
module.exports = exports.default;
}
//# sourceMappingURL=vary-path.js.map

File diff suppressed because one or more lines are too long