- Introduced blog routing using Next.js App Router - Implemented dynamic [slug] pages for blog posts - Added MDX-based content loading via lib/posts - Integrated shared TopBar layout with navigation - Established clear content, lib and component separation
276 lines
14 KiB
JavaScript
276 lines
14 KiB
JavaScript
/* eslint-disable @next/internal/no-ambiguous-jsx -- Bundled in entry-base so it gets the right JSX runtime. */ "use strict";
|
|
Object.defineProperty(exports, "__esModule", {
|
|
value: true
|
|
});
|
|
Object.defineProperty(exports, "collectSegmentData", {
|
|
enumerable: true,
|
|
get: function() {
|
|
return collectSegmentData;
|
|
}
|
|
});
|
|
const _jsxruntime = require("react/jsx-runtime");
|
|
const _client = require("react-server-dom-webpack/client");
|
|
const _static = require("react-server-dom-webpack/static");
|
|
const _nodewebstreamshelper = require("../stream-utils/node-web-streams-helper");
|
|
const _scheduler = require("../../lib/scheduler");
|
|
const _segmentvalueencoding = require("../../shared/lib/segment-cache/segment-value-encoding");
|
|
const _createerrorhandler = require("./create-error-handler");
|
|
const _prospectiverenderutils = require("./prospective-render-utils");
|
|
const _workasyncstorageexternal = require("./work-async-storage.external");
|
|
const filterStackFrame = process.env.NODE_ENV !== 'production' ? require('../lib/source-maps').filterStackFrameDEV : undefined;
|
|
const findSourceMapURL = process.env.NODE_ENV !== 'production' ? require('../lib/source-maps').findSourceMapURLDEV : undefined;
|
|
function onSegmentPrerenderError(error) {
|
|
const digest = (0, _createerrorhandler.getDigestForWellKnownError)(error);
|
|
if (digest) {
|
|
return digest;
|
|
}
|
|
// We don't need to log the errors because we would have already done that
|
|
// when generating the original Flight stream for the whole page.
|
|
if (process.env.NEXT_DEBUG_BUILD || process.env.__NEXT_VERBOSE_LOGGING) {
|
|
const workStore = _workasyncstorageexternal.workAsyncStorage.getStore();
|
|
(0, _prospectiverenderutils.printDebugThrownValueForProspectiveRender)(error, (workStore == null ? void 0 : workStore.route) ?? 'unknown route', _prospectiverenderutils.Phase.SegmentCollection);
|
|
}
|
|
}
|
|
async function collectSegmentData(isCacheComponentsEnabled, fullPageDataBuffer, staleTime, clientModules, serverConsumerManifest) {
|
|
// Traverse the router tree and generate a prefetch response for each segment.
|
|
// A mutable map to collect the results as we traverse the route tree.
|
|
const resultMap = new Map();
|
|
// Before we start, warm up the module cache by decoding the page data once.
|
|
// Then we can assume that any remaining async tasks that occur the next time
|
|
// are due to hanging promises caused by dynamic data access. Note we only
|
|
// have to do this once per page, not per individual segment.
|
|
//
|
|
try {
|
|
await (0, _client.createFromReadableStream)((0, _nodewebstreamshelper.streamFromBuffer)(fullPageDataBuffer), {
|
|
findSourceMapURL,
|
|
serverConsumerManifest
|
|
});
|
|
await (0, _scheduler.waitAtLeastOneReactRenderTask)();
|
|
} catch {}
|
|
// Create an abort controller that we'll use to stop the stream.
|
|
const abortController = new AbortController();
|
|
const onCompletedProcessingRouteTree = async ()=>{
|
|
// Since all we're doing is decoding and re-encoding a cached prerender, if
|
|
// serializing the stream takes longer than a microtask, it must because of
|
|
// hanging promises caused by dynamic data.
|
|
await (0, _scheduler.waitAtLeastOneReactRenderTask)();
|
|
abortController.abort();
|
|
};
|
|
// Generate a stream for the route tree prefetch. While we're walking the
|
|
// tree, we'll also spawn additional tasks to generate the segment prefetches.
|
|
// The promises for these tasks are pushed to a mutable array that we will
|
|
// await once the route tree is fully rendered.
|
|
const segmentTasks = [];
|
|
const { prelude: treeStream } = await (0, _static.prerender)(// RootTreePrefetch is not a valid return type for a React component, but
|
|
// we need to use a component so that when we decode the original stream
|
|
// inside of it, the side effects are transferred to the new stream.
|
|
// @ts-expect-error
|
|
/*#__PURE__*/ (0, _jsxruntime.jsx)(PrefetchTreeData, {
|
|
isClientParamParsingEnabled: isCacheComponentsEnabled,
|
|
fullPageDataBuffer: fullPageDataBuffer,
|
|
serverConsumerManifest: serverConsumerManifest,
|
|
clientModules: clientModules,
|
|
staleTime: staleTime,
|
|
segmentTasks: segmentTasks,
|
|
onCompletedProcessingRouteTree: onCompletedProcessingRouteTree
|
|
}), clientModules, {
|
|
filterStackFrame,
|
|
signal: abortController.signal,
|
|
onError: onSegmentPrerenderError
|
|
});
|
|
// Write the route tree to a special `/_tree` segment.
|
|
const treeBuffer = await (0, _nodewebstreamshelper.streamToBuffer)(treeStream);
|
|
resultMap.set('/_tree', treeBuffer);
|
|
// Also output the entire full page data response
|
|
resultMap.set('/_full', fullPageDataBuffer);
|
|
// Now that we've finished rendering the route tree, all the segment tasks
|
|
// should have been spawned. Await them in parallel and write the segment
|
|
// prefetches to the result map.
|
|
for (const [segmentPath, buffer] of (await Promise.all(segmentTasks))){
|
|
resultMap.set(segmentPath, buffer);
|
|
}
|
|
return resultMap;
|
|
}
|
|
async function PrefetchTreeData({ isClientParamParsingEnabled, fullPageDataBuffer, serverConsumerManifest, clientModules, staleTime, segmentTasks, onCompletedProcessingRouteTree }) {
|
|
// We're currently rendering a Flight response for the route tree prefetch.
|
|
// Inside this component, decode the Flight stream for the whole page. This is
|
|
// a hack to transfer the side effects from the original Flight stream (e.g.
|
|
// Float preloads) onto the Flight stream for the tree prefetch.
|
|
// TODO: React needs a better way to do this. Needed for Server Actions, too.
|
|
const initialRSCPayload = await (0, _client.createFromReadableStream)(createUnclosingPrefetchStream((0, _nodewebstreamshelper.streamFromBuffer)(fullPageDataBuffer)), {
|
|
findSourceMapURL,
|
|
serverConsumerManifest
|
|
});
|
|
const buildId = initialRSCPayload.b;
|
|
// FlightDataPath is an unsound type, hence the additional checks.
|
|
const flightDataPaths = initialRSCPayload.f;
|
|
if (flightDataPaths.length !== 1 && flightDataPaths[0].length !== 3) {
|
|
console.error('Internal Next.js error: InitialRSCPayload does not match the expected ' + 'shape for a prerendered page during segment prefetch generation.');
|
|
return null;
|
|
}
|
|
const flightRouterState = flightDataPaths[0][0];
|
|
const seedData = flightDataPaths[0][1];
|
|
const head = flightDataPaths[0][2];
|
|
// Compute the route metadata tree by traversing the FlightRouterState. As we
|
|
// walk the tree, we will also spawn a task to produce a prefetch response for
|
|
// each segment.
|
|
const tree = collectSegmentDataImpl(isClientParamParsingEnabled, flightRouterState, buildId, seedData, clientModules, _segmentvalueencoding.ROOT_SEGMENT_REQUEST_KEY, segmentTasks);
|
|
// Also spawn a task to produce a prefetch response for the "head" segment.
|
|
// The head contains metadata, like the title; it's not really a route
|
|
// segment, but it contains RSC data, so it's treated like a segment by
|
|
// the client cache.
|
|
segmentTasks.push((0, _scheduler.waitAtLeastOneReactRenderTask)().then(()=>renderSegmentPrefetch(buildId, head, null, _segmentvalueencoding.HEAD_REQUEST_KEY, clientModules)));
|
|
// Notify the abort controller that we're done processing the route tree.
|
|
// Anything async that happens after this point must be due to hanging
|
|
// promises in the original stream.
|
|
onCompletedProcessingRouteTree();
|
|
// Render the route tree to a special `/_tree` segment.
|
|
const treePrefetch = {
|
|
buildId,
|
|
tree,
|
|
staleTime
|
|
};
|
|
return treePrefetch;
|
|
}
|
|
function collectSegmentDataImpl(isClientParamParsingEnabled, route, buildId, seedData, clientModules, requestKey, segmentTasks) {
|
|
// Metadata about the segment. Sent as part of the tree prefetch. Null if
|
|
// there are no children.
|
|
let slotMetadata = null;
|
|
const children = route[1];
|
|
const seedDataChildren = seedData !== null ? seedData[1] : null;
|
|
for(const parallelRouteKey in children){
|
|
const childRoute = children[parallelRouteKey];
|
|
const childSegment = childRoute[0];
|
|
const childSeedData = seedDataChildren !== null ? seedDataChildren[parallelRouteKey] : null;
|
|
const childRequestKey = (0, _segmentvalueencoding.appendSegmentRequestKeyPart)(requestKey, parallelRouteKey, (0, _segmentvalueencoding.createSegmentRequestKeyPart)(childSegment));
|
|
const childTree = collectSegmentDataImpl(isClientParamParsingEnabled, childRoute, buildId, childSeedData, clientModules, childRequestKey, segmentTasks);
|
|
if (slotMetadata === null) {
|
|
slotMetadata = {};
|
|
}
|
|
slotMetadata[parallelRouteKey] = childTree;
|
|
}
|
|
const hasRuntimePrefetch = seedData !== null ? seedData[4] : false;
|
|
if (seedData !== null) {
|
|
// Spawn a task to write the segment data to a new Flight stream.
|
|
segmentTasks.push(// Since we're already in the middle of a render, wait until after the
|
|
// current task to escape the current rendering context.
|
|
(0, _scheduler.waitAtLeastOneReactRenderTask)().then(()=>renderSegmentPrefetch(buildId, seedData[0], seedData[2], requestKey, clientModules)));
|
|
} else {
|
|
// This segment does not have any seed data. Skip generating a prefetch
|
|
// response for it. We'll still include it in the route tree, though.
|
|
// TODO: We should encode in the route tree whether a segment is missing
|
|
// so we don't attempt to fetch it for no reason. As of now this shouldn't
|
|
// ever happen in practice, though.
|
|
}
|
|
const segment = route[0];
|
|
let name;
|
|
let paramType = null;
|
|
let paramKey = null;
|
|
if (typeof segment === 'string') {
|
|
name = segment;
|
|
paramKey = segment;
|
|
paramType = null;
|
|
} else {
|
|
name = segment[0];
|
|
paramKey = segment[1];
|
|
paramType = segment[2];
|
|
}
|
|
// Metadata about the segment. Sent to the client as part of the
|
|
// tree prefetch.
|
|
return {
|
|
name,
|
|
paramType,
|
|
// This value is ommitted from the prefetch response when cacheComponents
|
|
// is enabled.
|
|
paramKey: isClientParamParsingEnabled ? null : paramKey,
|
|
hasRuntimePrefetch,
|
|
slots: slotMetadata,
|
|
isRootLayout: route[4] === true
|
|
};
|
|
}
|
|
async function renderSegmentPrefetch(buildId, rsc, loading, requestKey, clientModules) {
|
|
// Render the segment data to a stream.
|
|
// In the future, this is where we can include additional metadata, like the
|
|
// stale time and cache tags.
|
|
const segmentPrefetch = {
|
|
buildId,
|
|
rsc,
|
|
loading,
|
|
isPartial: await isPartialRSCData(rsc, clientModules)
|
|
};
|
|
// Since all we're doing is decoding and re-encoding a cached prerender, if
|
|
// it takes longer than a microtask, it must because of hanging promises
|
|
// caused by dynamic data. Abort the stream at the end of the current task.
|
|
const abortController = new AbortController();
|
|
(0, _scheduler.waitAtLeastOneReactRenderTask)().then(()=>abortController.abort());
|
|
const { prelude: segmentStream } = await (0, _static.prerender)(segmentPrefetch, clientModules, {
|
|
filterStackFrame,
|
|
signal: abortController.signal,
|
|
onError: onSegmentPrerenderError
|
|
});
|
|
const segmentBuffer = await (0, _nodewebstreamshelper.streamToBuffer)(segmentStream);
|
|
if (requestKey === _segmentvalueencoding.ROOT_SEGMENT_REQUEST_KEY) {
|
|
return [
|
|
'/_index',
|
|
segmentBuffer
|
|
];
|
|
} else {
|
|
return [
|
|
requestKey,
|
|
segmentBuffer
|
|
];
|
|
}
|
|
}
|
|
async function isPartialRSCData(rsc, clientModules) {
|
|
// We can determine if a segment contains only partial data if it takes longer
|
|
// than a task to encode, because dynamic data is encoded as an infinite
|
|
// promise. We must do this in a separate Flight prerender from the one that
|
|
// actually generates the prefetch stream because we need to include
|
|
// `isPartial` in the stream itself.
|
|
let isPartial = false;
|
|
const abortController = new AbortController();
|
|
(0, _scheduler.waitAtLeastOneReactRenderTask)().then(()=>{
|
|
// If we haven't yet finished the outer task, then it must be because we
|
|
// accessed dynamic data.
|
|
isPartial = true;
|
|
abortController.abort();
|
|
});
|
|
await (0, _static.prerender)(rsc, clientModules, {
|
|
filterStackFrame,
|
|
signal: abortController.signal,
|
|
onError () {}
|
|
});
|
|
return isPartial;
|
|
}
|
|
function createUnclosingPrefetchStream(originalFlightStream) {
|
|
// When PPR is enabled, prefetch streams may contain references that never
|
|
// resolve, because that's how we encode dynamic data access. In the decoded
|
|
// object returned by the Flight client, these are reified into hanging
|
|
// promises that suspend during render, which is effectively what we want.
|
|
// The UI resolves when it switches to the dynamic data stream
|
|
// (via useDeferredValue(dynamic, static)).
|
|
//
|
|
// However, the Flight implementation currently errors if the server closes
|
|
// the response before all the references are resolved. As a cheat to work
|
|
// around this, we wrap the original stream in a new stream that never closes,
|
|
// and therefore doesn't error.
|
|
const reader = originalFlightStream.getReader();
|
|
return new ReadableStream({
|
|
async pull (controller) {
|
|
while(true){
|
|
const { done, value } = await reader.read();
|
|
if (!done) {
|
|
// Pass to the target stream and keep consuming the Flight response
|
|
// from the server.
|
|
controller.enqueue(value);
|
|
continue;
|
|
}
|
|
// The server stream has closed. Exit, but intentionally do not close
|
|
// the target stream.
|
|
return;
|
|
}
|
|
}
|
|
});
|
|
}
|
|
|
|
//# sourceMappingURL=collect-segment-data.js.map
|