feat(blog): add file-based blog with dynamic slugs, MDX content and layout shell

- Introduced blog routing using Next.js App Router
- Implemented dynamic [slug] pages for blog posts
- Added MDX-based content loading via lib/posts
- Integrated shared TopBar layout with navigation
- Established clear content, lib and component separation
This commit is contained in:
PascalSchattenburg
2026-01-22 14:14:15 +01:00
parent b717952234
commit d147843c76
10412 changed files with 2475583 additions and 0 deletions

View File

@@ -0,0 +1,4 @@
import { createAsyncLocalStorage } from './async-local-storage';
export const actionAsyncStorageInstance = createAsyncLocalStorage();
//# sourceMappingURL=action-async-storage-instance.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/action-async-storage-instance.ts"],"sourcesContent":["import type { ActionAsyncStorage } from './action-async-storage.external'\nimport { createAsyncLocalStorage } from './async-local-storage'\n\nexport const actionAsyncStorageInstance: ActionAsyncStorage =\n createAsyncLocalStorage()\n"],"names":["createAsyncLocalStorage","actionAsyncStorageInstance"],"mappings":"AACA,SAASA,uBAAuB,QAAQ,wBAAuB;AAE/D,OAAO,MAAMC,6BACXD,0BAAyB","ignoreList":[0]}

View File

@@ -0,0 +1,7 @@
// Share the instance module in the next-shared layer
import { actionAsyncStorageInstance } from './action-async-storage-instance' with {
'turbopack-transition': 'next-shared'
};
export { actionAsyncStorageInstance as actionAsyncStorage };
//# sourceMappingURL=action-async-storage.external.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/action-async-storage.external.ts"],"sourcesContent":["import type { AsyncLocalStorage } from 'async_hooks'\n\n// Share the instance module in the next-shared layer\nimport { actionAsyncStorageInstance } from './action-async-storage-instance' with { 'turbopack-transition': 'next-shared' }\nexport interface ActionStore {\n readonly isAction?: boolean\n readonly isAppRoute?: boolean\n}\n\nexport type ActionAsyncStorage = AsyncLocalStorage<ActionStore>\n\nexport { actionAsyncStorageInstance as actionAsyncStorage }\n"],"names":["actionAsyncStorageInstance","actionAsyncStorage"],"mappings":"AAEA,qDAAqD;AACrD,SAASA,0BAA0B,QAAQ,uCAAuC;IAAE,wBAAwB;AAAc,EAAC;AAQ3H,SAASA,8BAA8BC,kBAAkB,GAAE","ignoreList":[0]}

View File

@@ -0,0 +1,925 @@
import { RSC_HEADER, RSC_CONTENT_TYPE_HEADER, NEXT_ROUTER_STATE_TREE_HEADER, ACTION_HEADER, NEXT_ACTION_NOT_FOUND_HEADER, NEXT_ROUTER_PREFETCH_HEADER, NEXT_ROUTER_SEGMENT_PREFETCH_HEADER, NEXT_URL, NEXT_ACTION_REVALIDATED_HEADER } from '../../client/components/app-router-headers';
import { getAccessFallbackHTTPStatus, isHTTPAccessFallbackError } from '../../client/components/http-access-fallback/http-access-fallback';
import { getRedirectTypeFromError, getURLFromRedirectError } from '../../client/components/redirect';
import { isRedirectError } from '../../client/components/redirect-error';
import RenderResult from '../render-result';
import { FlightRenderResult } from './flight-render-result';
import { filterReqHeaders, actionsForbiddenHeaders } from '../lib/server-ipc/utils';
import { getModifiedCookieValues } from '../web/spec-extension/adapters/request-cookies';
import { JSON_CONTENT_TYPE_HEADER, NEXT_CACHE_REVALIDATED_TAGS_HEADER, NEXT_CACHE_REVALIDATE_TAG_TOKEN_HEADER } from '../../lib/constants';
import { getServerActionRequestMetadata } from '../lib/server-action-request-meta';
import { isCsrfOriginAllowed } from './csrf-protection';
import { warn } from '../../build/output/log';
import { RequestCookies, ResponseCookies } from '../web/spec-extension/cookies';
import { HeadersAdapter } from '../web/spec-extension/adapters/headers';
import { fromNodeOutgoingHttpHeaders } from '../web/utils';
import { selectWorkerForForwarding, getServerActionsManifest, getServerModuleMap } from './manifests-singleton';
import { isNodeNextRequest, isWebNextRequest } from '../base-http/helpers';
import { RedirectStatusCode } from '../../client/components/redirect-status-code';
import { synchronizeMutableCookies } from '../async-storage/request-store';
import { workUnitAsyncStorage } from '../app-render/work-unit-async-storage.external';
import { InvariantError } from '../../shared/lib/invariant-error';
import { executeRevalidates } from '../revalidation-utils';
import { getRequestMeta } from '../request-meta';
import { setCacheBustingSearchParam } from '../../client/components/router-reducer/set-cache-busting-search-param';
import { ActionDidNotRevalidate, ActionDidRevalidateStaticAndDynamic } from '../../shared/lib/action-revalidation-kind';
/**
* Checks if the app has any server actions defined in any runtime.
*/ function hasServerActions() {
const serverActionsManifest = getServerActionsManifest();
return Object.keys(serverActionsManifest.node).length > 0 || Object.keys(serverActionsManifest.edge).length > 0;
}
function nodeHeadersToRecord(headers) {
const record = {};
for (const [key, value] of Object.entries(headers)){
if (value !== undefined) {
record[key] = Array.isArray(value) ? value.join(', ') : `${value}`;
}
}
return record;
}
function getForwardedHeaders(req, res) {
// Get request headers and cookies
const requestHeaders = req.headers;
const requestCookies = new RequestCookies(HeadersAdapter.from(requestHeaders));
// Get response headers and cookies
const responseHeaders = res.getHeaders();
const responseCookies = new ResponseCookies(fromNodeOutgoingHttpHeaders(responseHeaders));
// Merge request and response headers
const mergedHeaders = filterReqHeaders({
...nodeHeadersToRecord(requestHeaders),
...nodeHeadersToRecord(responseHeaders)
}, actionsForbiddenHeaders);
// Merge cookies into requestCookies, so responseCookies always take precedence
// and overwrite/delete those from requestCookies.
responseCookies.getAll().forEach((cookie)=>{
if (typeof cookie.value === 'undefined') {
requestCookies.delete(cookie.name);
} else {
requestCookies.set(cookie);
}
});
// Update the 'cookie' header with the merged cookies
mergedHeaders['cookie'] = requestCookies.toString();
// Remove headers that should not be forwarded
delete mergedHeaders['transfer-encoding'];
return new Headers(mergedHeaders);
}
function addRevalidationHeader(res, { workStore, requestStore }) {
var _workStore_pendingRevalidatedTags;
// If a tag was revalidated, the client router needs to invalidate all the
// client router cache as they may be stale. And if a path was revalidated, the
// client needs to invalidate all subtrees below that path.
// TODO: Currently we don't send the specific tags or paths to the client,
// we just send a flag indicating that all the static data on the client
// should be invalidated. In the future, this will likely be a Bloom filter
// or bitmask of some kind.
// TODO-APP: Currently the prefetch cache doesn't have subtree information,
// so we need to invalidate the entire cache if a path was revalidated.
// TODO-APP: Currently paths are treated as tags, so the second element of the tuple
// is always empty.
const isTagRevalidated = ((_workStore_pendingRevalidatedTags = workStore.pendingRevalidatedTags) == null ? void 0 : _workStore_pendingRevalidatedTags.length) ? 1 : 0;
const isCookieRevalidated = getModifiedCookieValues(requestStore.mutableCookies).length ? 1 : 0;
// First check if a tag, cookie, or path was revalidated.
if (isTagRevalidated || isCookieRevalidated) {
res.setHeader(NEXT_ACTION_REVALIDATED_HEADER, JSON.stringify(ActionDidRevalidateStaticAndDynamic));
} else if (// Check for refresh() actions. This will invalidate only the dynamic data.
workStore.pathWasRevalidated !== undefined && workStore.pathWasRevalidated !== ActionDidNotRevalidate) {
res.setHeader(NEXT_ACTION_REVALIDATED_HEADER, JSON.stringify(workStore.pathWasRevalidated));
}
}
/**
* Forwards a server action request to a separate worker. Used when the requested action is not available in the current worker.
*/ async function createForwardedActionResponse(req, res, host, workerPathname, basePath) {
var _getRequestMeta;
if (!host) {
throw Object.defineProperty(new Error('Invariant: Missing `host` header from a forwarded Server Actions request.'), "__NEXT_ERROR_CODE", {
value: "E226",
enumerable: false,
configurable: true
});
}
const forwardedHeaders = getForwardedHeaders(req, res);
// indicate that this action request was forwarded from another worker
// we use this to skip rendering the flight tree so that we don't update the UI
// with the response from the forwarded worker
forwardedHeaders.set('x-action-forwarded', '1');
const proto = ((_getRequestMeta = getRequestMeta(req, 'initProtocol')) == null ? void 0 : _getRequestMeta.replace(/:+$/, '')) || 'https';
// For standalone or the serverful mode, use the internal origin directly
// other than the host headers from the request.
const origin = process.env.__NEXT_PRIVATE_ORIGIN || `${proto}://${host.value}`;
const fetchUrl = new URL(`${origin}${basePath}${workerPathname}`);
try {
var _response_headers_get;
let body;
if (// The type check here ensures that `req` is correctly typed, and the
// environment variable check provides dead code elimination.
process.env.NEXT_RUNTIME === 'edge' && isWebNextRequest(req)) {
if (!req.body) {
throw Object.defineProperty(new Error('Invariant: missing request body.'), "__NEXT_ERROR_CODE", {
value: "E333",
enumerable: false,
configurable: true
});
}
body = req.body;
} else if (// The type check here ensures that `req` is correctly typed, and the
// environment variable check provides dead code elimination.
process.env.NEXT_RUNTIME !== 'edge' && isNodeNextRequest(req)) {
body = req.stream();
} else {
throw Object.defineProperty(new Error('Invariant: Unknown request type.'), "__NEXT_ERROR_CODE", {
value: "E114",
enumerable: false,
configurable: true
});
}
// Forward the request to the new worker
const response = await fetch(fetchUrl, {
method: 'POST',
body,
duplex: 'half',
headers: forwardedHeaders,
redirect: 'manual',
next: {
// @ts-ignore
internal: 1
}
});
if ((_response_headers_get = response.headers.get('content-type')) == null ? void 0 : _response_headers_get.startsWith(RSC_CONTENT_TYPE_HEADER)) {
// copy the headers from the redirect response to the response we're sending
for (const [key, value] of response.headers){
if (!actionsForbiddenHeaders.includes(key)) {
res.setHeader(key, value);
}
}
return new FlightRenderResult(response.body);
} else {
var // Since we aren't consuming the response body, we cancel it to avoid memory leaks
_response_body;
(_response_body = response.body) == null ? void 0 : _response_body.cancel();
}
} catch (err) {
// we couldn't stream the forwarded response, so we'll just return an empty response
console.error(`failed to forward action response`, err);
}
return RenderResult.fromStatic('{}', JSON_CONTENT_TYPE_HEADER);
}
/**
* Returns the parsed redirect URL if we deem that it is hosted by us.
*
* We handle both relative and absolute redirect URLs.
*
* In case the redirect URL is not relative to the application we return `null`.
*/ function getAppRelativeRedirectUrl(basePath, host, redirectUrl, currentPathname) {
if (redirectUrl.startsWith('/')) {
// Absolute path - just add basePath
return new URL(`${basePath}${redirectUrl}`, 'http://n');
} else if (redirectUrl.startsWith('.')) {
// Relative path - resolve relative to current pathname
let base = currentPathname || '/';
// Ensure the base path ends with a slash so relative resolution works correctly
// e.g., "./subpage" from "/subdir" should resolve to "/subdir/subpage"
// not "/subpage"
if (!base.endsWith('/')) {
base = base + '/';
}
const resolved = new URL(redirectUrl, `http://n${base}`);
// Include basePath in the final URL
return new URL(`${basePath}${resolved.pathname}${resolved.search}${resolved.hash}`, 'http://n');
}
const parsedRedirectUrl = new URL(redirectUrl);
if ((host == null ? void 0 : host.value) !== parsedRedirectUrl.host) {
return null;
}
// At this point the hosts are the same, just confirm we
// are routing to a path underneath the `basePath`
return parsedRedirectUrl.pathname.startsWith(basePath) ? parsedRedirectUrl : null;
}
async function createRedirectRenderResult(req, res, originalHost, redirectUrl, redirectType, basePath, workStore, currentPathname) {
res.setHeader('x-action-redirect', `${redirectUrl};${redirectType}`);
// If we're redirecting to another route of this Next.js application, we'll
// try to stream the response from the other worker path. When that works,
// we can save an extra roundtrip and avoid a full page reload.
// When the redirect URL starts with a `/` or is to the same host, under the
// `basePath` we treat it as an app-relative redirect;
const appRelativeRedirectUrl = getAppRelativeRedirectUrl(basePath, originalHost, redirectUrl, currentPathname);
if (appRelativeRedirectUrl) {
var _getRequestMeta;
if (!originalHost) {
throw Object.defineProperty(new Error('Invariant: Missing `host` header from a forwarded Server Actions request.'), "__NEXT_ERROR_CODE", {
value: "E226",
enumerable: false,
configurable: true
});
}
const forwardedHeaders = getForwardedHeaders(req, res);
forwardedHeaders.set(RSC_HEADER, '1');
const proto = ((_getRequestMeta = getRequestMeta(req, 'initProtocol')) == null ? void 0 : _getRequestMeta.replace(/:+$/, '')) || 'https';
// For standalone or the serverful mode, use the internal origin directly
// other than the host headers from the request.
const origin = process.env.__NEXT_PRIVATE_ORIGIN || `${proto}://${originalHost.value}`;
const fetchUrl = new URL(`${origin}${appRelativeRedirectUrl.pathname}${appRelativeRedirectUrl.search}`);
if (workStore.pendingRevalidatedTags) {
var _workStore_incrementalCache_prerenderManifest_preview, _workStore_incrementalCache_prerenderManifest, _workStore_incrementalCache;
forwardedHeaders.set(NEXT_CACHE_REVALIDATED_TAGS_HEADER, workStore.pendingRevalidatedTags.map((item)=>item.tag).join(','));
forwardedHeaders.set(NEXT_CACHE_REVALIDATE_TAG_TOKEN_HEADER, ((_workStore_incrementalCache = workStore.incrementalCache) == null ? void 0 : (_workStore_incrementalCache_prerenderManifest = _workStore_incrementalCache.prerenderManifest) == null ? void 0 : (_workStore_incrementalCache_prerenderManifest_preview = _workStore_incrementalCache_prerenderManifest.preview) == null ? void 0 : _workStore_incrementalCache_prerenderManifest_preview.previewModeId) || '');
}
// Ensures that when the path was revalidated we don't return a partial response on redirects
forwardedHeaders.delete(NEXT_ROUTER_STATE_TREE_HEADER);
// When an action follows a redirect, it's no longer handling an action: it's just a normal RSC request
// to the requested URL. We should remove the `next-action` header so that it's not treated as an action
forwardedHeaders.delete(ACTION_HEADER);
try {
var _response_headers_get;
setCacheBustingSearchParam(fetchUrl, {
[NEXT_ROUTER_PREFETCH_HEADER]: forwardedHeaders.get(NEXT_ROUTER_PREFETCH_HEADER) ? '1' : undefined,
[NEXT_ROUTER_SEGMENT_PREFETCH_HEADER]: forwardedHeaders.get(NEXT_ROUTER_SEGMENT_PREFETCH_HEADER) ?? undefined,
[NEXT_ROUTER_STATE_TREE_HEADER]: forwardedHeaders.get(NEXT_ROUTER_STATE_TREE_HEADER) ?? undefined,
[NEXT_URL]: forwardedHeaders.get(NEXT_URL) ?? undefined
});
const response = await fetch(fetchUrl, {
method: 'GET',
headers: forwardedHeaders,
next: {
// @ts-ignore
internal: 1
}
});
if ((_response_headers_get = response.headers.get('content-type')) == null ? void 0 : _response_headers_get.startsWith(RSC_CONTENT_TYPE_HEADER)) {
// copy the headers from the redirect response to the response we're sending
for (const [key, value] of response.headers){
if (!actionsForbiddenHeaders.includes(key)) {
res.setHeader(key, value);
}
}
return new FlightRenderResult(response.body);
} else {
var // Since we aren't consuming the response body, we cancel it to avoid memory leaks
_response_body;
(_response_body = response.body) == null ? void 0 : _response_body.cancel();
}
} catch (err) {
// we couldn't stream the redirect response, so we'll just do a normal redirect
console.error(`failed to get redirect response`, err);
}
}
return RenderResult.EMPTY;
}
/**
* Ensures the value of the header can't create long logs.
*/ function limitUntrustedHeaderValueForLogs(value) {
return value.length > 100 ? value.slice(0, 100) + '...' : value;
}
export function parseHostHeader(headers, originDomain) {
var _forwardedHostHeader_split_, _forwardedHostHeader_split;
const forwardedHostHeader = headers['x-forwarded-host'];
const forwardedHostHeaderValue = forwardedHostHeader && Array.isArray(forwardedHostHeader) ? forwardedHostHeader[0] : forwardedHostHeader == null ? void 0 : (_forwardedHostHeader_split = forwardedHostHeader.split(',')) == null ? void 0 : (_forwardedHostHeader_split_ = _forwardedHostHeader_split[0]) == null ? void 0 : _forwardedHostHeader_split_.trim();
const hostHeader = headers['host'];
if (originDomain) {
return forwardedHostHeaderValue === originDomain ? {
type: "x-forwarded-host",
value: forwardedHostHeaderValue
} : hostHeader === originDomain ? {
type: "host",
value: hostHeader
} : undefined;
}
return forwardedHostHeaderValue ? {
type: "x-forwarded-host",
value: forwardedHostHeaderValue
} : hostHeader ? {
type: "host",
value: hostHeader
} : undefined;
}
export async function handleAction({ req, res, ComponentMod, generateFlight, workStore, requestStore, serverActions, ctx, metadata }) {
const contentType = req.headers['content-type'];
const { page } = ctx.renderOpts;
const serverModuleMap = getServerModuleMap();
const { actionId, isMultipartAction, isFetchAction, isURLEncodedAction, isPossibleServerAction } = getServerActionRequestMetadata(req);
const handleUnrecognizedFetchAction = (err)=>{
// If the deployment doesn't have skew protection, this is expected to occasionally happen,
// so we use a warning instead of an error.
console.warn(err);
// Return an empty response with a header that the client router will interpret.
// We don't need to waste time encoding a flight response, and using a blank body + header
// means that unrecognized actions can also be handled at the infra level
// (i.e. without needing to invoke a lambda)
res.setHeader(NEXT_ACTION_NOT_FOUND_HEADER, '1');
res.setHeader('content-type', 'text/plain');
res.statusCode = 404;
return {
type: 'done',
result: RenderResult.fromStatic('Server action not found.', 'text/plain')
};
};
// If it can't be a Server Action, skip handling.
// Note that this can be a false positive -- any multipart/urlencoded POST can get us here,
// But won't know if it's an MPA action or not until we call `decodeAction` below.
if (!isPossibleServerAction) {
return null;
}
// We don't currently support URL encoded actions, so we bail out early.
// Depending on if it's a fetch action or an MPA, we return a different response.
if (isURLEncodedAction) {
if (isFetchAction) {
return {
type: 'not-found'
};
} else {
// This is an MPA action, so we return null
return null;
}
}
// If the app has no server actions at all, we can 404 early.
if (!hasServerActions()) {
return handleUnrecognizedFetchAction(getActionNotFoundError(actionId));
}
if (workStore.isStaticGeneration) {
throw Object.defineProperty(new Error("Invariant: server actions can't be handled during static rendering"), "__NEXT_ERROR_CODE", {
value: "E359",
enumerable: false,
configurable: true
});
}
let temporaryReferences;
// When running actions the default is no-store, you can still `cache: 'force-cache'`
workStore.fetchCache = 'default-no-store';
const originHeader = req.headers['origin'];
const originDomain = typeof originHeader === 'string' && originHeader !== 'null' ? new URL(originHeader).host : undefined;
const host = parseHostHeader(req.headers);
let warning = undefined;
function warnBadServerActionRequest() {
if (warning) {
warn(warning);
}
}
// This is to prevent CSRF attacks. If `x-forwarded-host` is set, we need to
// ensure that the request is coming from the same host.
if (!originDomain) {
// This might be an old browser that doesn't send `host` header. We ignore
// this case.
warning = 'Missing `origin` header from a forwarded Server Actions request.';
} else if (!host || originDomain !== host.value) {
// If the customer sets a list of allowed origins, we'll allow the request.
// These are considered safe but might be different from forwarded host set
// by the infra (i.e. reverse proxies).
if (isCsrfOriginAllowed(originDomain, serverActions == null ? void 0 : serverActions.allowedOrigins)) {
// Ignore it
} else {
if (host) {
// This seems to be an CSRF attack. We should not proceed the action.
console.error(`\`${host.type}\` header with value \`${limitUntrustedHeaderValueForLogs(host.value)}\` does not match \`origin\` header with value \`${limitUntrustedHeaderValueForLogs(originDomain)}\` from a forwarded Server Actions request. Aborting the action.`);
} else {
// This is an attack. We should not proceed the action.
console.error(`\`x-forwarded-host\` or \`host\` headers are not provided. One of these is needed to compare the \`origin\` header from a forwarded Server Actions request. Aborting the action.`);
}
const error = Object.defineProperty(new Error('Invalid Server Actions request.'), "__NEXT_ERROR_CODE", {
value: "E80",
enumerable: false,
configurable: true
});
if (isFetchAction) {
res.statusCode = 500;
metadata.statusCode = 500;
const promise = Promise.reject(error);
try {
// we need to await the promise to trigger the rejection early
// so that it's already handled by the time we call
// the RSC runtime. Otherwise, it will throw an unhandled
// promise rejection error in the renderer.
await promise;
} catch {
// swallow error, it's gonna be handled on the client
}
return {
type: 'done',
result: await generateFlight(req, ctx, requestStore, {
actionResult: promise,
// We didn't execute an action, so no revalidations could have
// occurred. We can skip rendering the page.
skipPageRendering: true,
temporaryReferences
})
};
}
throw error;
}
}
// ensure we avoid caching server actions unexpectedly
res.setHeader('Cache-Control', 'no-cache, no-store, max-age=0, must-revalidate');
const { actionAsyncStorage } = ComponentMod;
const actionWasForwarded = Boolean(req.headers['x-action-forwarded']);
if (actionId) {
const forwardedWorker = selectWorkerForForwarding(actionId, page);
// If forwardedWorker is truthy, it means there isn't a worker for the action
// in the current handler, so we forward the request to a worker that has the action.
if (forwardedWorker) {
return {
type: 'done',
result: await createForwardedActionResponse(req, res, host, forwardedWorker, ctx.renderOpts.basePath)
};
}
}
try {
return await actionAsyncStorage.run({
isAction: true
}, async ()=>{
// We only use these for fetch actions -- MPA actions handle them inside `decodeAction`.
let actionModId;
let boundActionArguments = [];
if (// The type check here ensures that `req` is correctly typed, and the
// environment variable check provides dead code elimination.
process.env.NEXT_RUNTIME === 'edge' && isWebNextRequest(req)) {
if (!req.body) {
throw Object.defineProperty(new Error('invariant: Missing request body.'), "__NEXT_ERROR_CODE", {
value: "E364",
enumerable: false,
configurable: true
});
}
// TODO: add body limit
// Use react-server-dom-webpack/server
const { createTemporaryReferenceSet, decodeReply, decodeAction, decodeFormState } = ComponentMod;
temporaryReferences = createTemporaryReferenceSet();
if (isMultipartAction) {
// TODO-APP: Add streaming support
const formData = await req.request.formData();
if (isFetchAction) {
// A fetch action with a multipart body.
try {
actionModId = getActionModIdOrError(actionId, serverModuleMap);
} catch (err) {
return handleUnrecognizedFetchAction(err);
}
boundActionArguments = await decodeReply(formData, serverModuleMap, {
temporaryReferences
});
} else {
// Multipart POST, but not a fetch action.
// Potentially an MPA action, we have to try decoding it to check.
if (areAllActionIdsValid(formData, serverModuleMap) === false) {
// TODO: This can be from skew or manipulated input. We should handle this case
// more gracefully but this preserves the prior behavior where decodeAction would throw instead.
throw Object.defineProperty(new Error(`Failed to find Server Action. This request might be from an older or newer deployment.\nRead more: https://nextjs.org/docs/messages/failed-to-find-server-action`), "__NEXT_ERROR_CODE", {
value: "E975",
enumerable: false,
configurable: true
});
}
const action = await decodeAction(formData, serverModuleMap);
if (typeof action === 'function') {
// an MPA action.
// Only warn if it's a server action, otherwise skip for other post requests
warnBadServerActionRequest();
const { actionResult } = await executeActionAndPrepareForRender(action, [], workStore, requestStore, actionWasForwarded);
const formState = await decodeFormState(actionResult, formData, serverModuleMap);
// Skip the fetch path.
// We need to render a full HTML version of the page for the response, we'll handle that in app-render.
return {
type: 'done',
result: undefined,
formState
};
} else {
// We couldn't decode an action, so this POST request turned out not to be a server action request.
return null;
}
}
} else {
// POST with non-multipart body.
// If it's not multipart AND not a fetch action,
// then it can't be an action request.
if (!isFetchAction) {
return null;
}
try {
actionModId = getActionModIdOrError(actionId, serverModuleMap);
} catch (err) {
return handleUnrecognizedFetchAction(err);
}
// A fetch action with a non-multipart body.
// In practice, this happens if `encodeReply` returned a string instead of FormData,
// which can happen for very simple JSON-like values that don't need multiple flight rows.
const chunks = [];
const reader = req.body.getReader();
while(true){
const { done, value } = await reader.read();
if (done) {
break;
}
chunks.push(value);
}
const actionData = Buffer.concat(chunks).toString('utf-8');
boundActionArguments = await decodeReply(actionData, serverModuleMap, {
temporaryReferences
});
}
} else if (// The type check here ensures that `req` is correctly typed, and the
// environment variable check provides dead code elimination.
process.env.NEXT_RUNTIME !== 'edge' && isNodeNextRequest(req)) {
// Use react-server-dom-webpack/server.node which supports streaming
const { createTemporaryReferenceSet, decodeReply, decodeReplyFromBusboy, decodeAction, decodeFormState } = require(`./react-server.node`);
temporaryReferences = createTemporaryReferenceSet();
const { PassThrough, Readable, Transform } = require('node:stream');
const { pipeline } = require('node:stream/promises');
const defaultBodySizeLimit = '1 MB';
const bodySizeLimit = (serverActions == null ? void 0 : serverActions.bodySizeLimit) ?? defaultBodySizeLimit;
const bodySizeLimitBytes = bodySizeLimit !== defaultBodySizeLimit ? require('next/dist/compiled/bytes').parse(bodySizeLimit) : 1024 * 1024 // 1 MB
;
let size = 0;
const sizeLimitTransform = new Transform({
transform (chunk, encoding, callback) {
size += Buffer.byteLength(chunk, encoding);
if (size > bodySizeLimitBytes) {
const { ApiError } = require('../api-utils');
callback(Object.defineProperty(new ApiError(413, `Body exceeded ${bodySizeLimit} limit.\n` + `To configure the body size limit for Server Actions, see: https://nextjs.org/docs/app/api-reference/next-config-js/serverActions#bodysizelimit`), "__NEXT_ERROR_CODE", {
value: "E394",
enumerable: false,
configurable: true
}));
return;
}
callback(null, chunk);
}
});
if (isMultipartAction) {
if (isFetchAction) {
// A fetch action with a multipart body.
try {
actionModId = getActionModIdOrError(actionId, serverModuleMap);
} catch (err) {
return handleUnrecognizedFetchAction(err);
}
const busboy = require('next/dist/compiled/busboy')({
defParamCharset: 'utf8',
headers: req.headers,
limits: {
fieldSize: bodySizeLimitBytes
}
});
const abortController = new AbortController();
try {
;
[, boundActionArguments] = await Promise.all([
pipeline(req.body, sizeLimitTransform, busboy, {
signal: abortController.signal
}),
decodeReplyFromBusboy(busboy, serverModuleMap, {
temporaryReferences
})
]);
} catch (err) {
abortController.abort();
throw err;
}
} else {
// Multipart POST, but not a fetch action.
// Potentially an MPA action, we have to try decoding it to check.
const sizeLimitedBody = new PassThrough();
// React doesn't yet publish a busboy version of decodeAction
// so we polyfill the parsing of FormData.
const fakeRequest = new Request('http://localhost', {
method: 'POST',
// @ts-expect-error
headers: {
'Content-Type': contentType
},
body: Readable.toWeb(sizeLimitedBody),
duplex: 'half'
});
let formData;
const abortController = new AbortController();
try {
;
[, formData] = await Promise.all([
pipeline(req.body, sizeLimitTransform, sizeLimitedBody, {
signal: abortController.signal
}),
fakeRequest.formData()
]);
} catch (err) {
abortController.abort();
throw err;
}
if (areAllActionIdsValid(formData, serverModuleMap) === false) {
// TODO: This can be from skew or manipulated input. We should handle this case
// more gracefully but this preserves the prior behavior where decodeAction would throw instead.
throw Object.defineProperty(new Error(`Failed to find Server Action. This request might be from an older or newer deployment.\nRead more: https://nextjs.org/docs/messages/failed-to-find-server-action`), "__NEXT_ERROR_CODE", {
value: "E975",
enumerable: false,
configurable: true
});
}
// TODO: Refactor so it is harder to accidentally decode an action before you have validated that the
// action referred to is available.
const action = await decodeAction(formData, serverModuleMap);
if (typeof action === 'function') {
// an MPA action.
// Only warn if it's a server action, otherwise skip for other post requests
warnBadServerActionRequest();
const { actionResult } = await executeActionAndPrepareForRender(action, [], workStore, requestStore, actionWasForwarded);
const formState = await decodeFormState(actionResult, formData, serverModuleMap);
// Skip the fetch path.
// We need to render a full HTML version of the page for the response, we'll handle that in app-render.
return {
type: 'done',
result: undefined,
formState
};
} else {
// We couldn't decode an action, so this POST request turned out not to be a server action request.
return null;
}
}
} else {
// POST with non-multipart body.
// If it's not multipart AND not a fetch action,
// then it can't be an action request.
if (!isFetchAction) {
return null;
}
try {
actionModId = getActionModIdOrError(actionId, serverModuleMap);
} catch (err) {
return handleUnrecognizedFetchAction(err);
}
// A fetch action with a non-multipart body.
// In practice, this happens if `encodeReply` returned a string instead of FormData,
// which can happen for very simple JSON-like values that don't need multiple flight rows.
const sizeLimitedBody = new PassThrough();
const chunks = [];
await Promise.all([
pipeline(req.body, sizeLimitTransform, sizeLimitedBody),
(async ()=>{
for await (const chunk of sizeLimitedBody){
chunks.push(Buffer.from(chunk));
}
})()
]);
const actionData = Buffer.concat(chunks).toString('utf-8');
boundActionArguments = await decodeReply(actionData, serverModuleMap, {
temporaryReferences
});
}
} else {
throw Object.defineProperty(new Error('Invariant: Unknown request type.'), "__NEXT_ERROR_CODE", {
value: "E114",
enumerable: false,
configurable: true
});
}
// actions.js
// app/page.js
// action worker1
// appRender1
// app/foo/page.js
// action worker2
// appRender
// / -> fire action -> POST / -> appRender1 -> modId for the action file
// /foo -> fire action -> POST /foo -> appRender2 -> modId for the action file
const actionMod = await ComponentMod.__next_app__.require(actionModId);
const actionHandler = actionMod[// `actionId` must exist if we got here, as otherwise we would have thrown an error above
actionId];
const { actionResult, skipPageRendering } = await executeActionAndPrepareForRender(actionHandler, boundActionArguments, workStore, requestStore, actionWasForwarded).finally(()=>{
addRevalidationHeader(res, {
workStore,
requestStore
});
});
// For form actions, we need to continue rendering the page.
if (isFetchAction) {
return {
type: 'done',
result: await generateFlight(req, ctx, requestStore, {
actionResult: Promise.resolve(actionResult),
skipPageRendering,
temporaryReferences,
// If we skip page rendering, we need to ensure pending
// revalidates are awaited before closing the response. Otherwise,
// this will be done after rendering the page.
waitUntil: skipPageRendering ? executeRevalidates(workStore) : undefined
})
};
} else {
// TODO: this shouldn't be reachable, because all non-fetch codepaths return early.
// this will be handled in a follow-up refactor PR.
return null;
}
});
} catch (err) {
if (isRedirectError(err)) {
const redirectUrl = getURLFromRedirectError(err);
const redirectType = getRedirectTypeFromError(err);
// if it's a fetch action, we'll set the status code for logging/debugging purposes
// but we won't set a Location header, as the redirect will be handled by the client router
res.statusCode = RedirectStatusCode.SeeOther;
metadata.statusCode = RedirectStatusCode.SeeOther;
if (isFetchAction) {
return {
type: 'done',
result: await createRedirectRenderResult(req, res, host, redirectUrl, redirectType, ctx.renderOpts.basePath, workStore, requestStore.url.pathname)
};
}
// For an MPA action, the redirect doesn't need a body, just a Location header.
res.setHeader('Location', redirectUrl);
return {
type: 'done',
result: RenderResult.EMPTY
};
} else if (isHTTPAccessFallbackError(err)) {
res.statusCode = getAccessFallbackHTTPStatus(err);
metadata.statusCode = res.statusCode;
if (isFetchAction) {
const promise = Promise.reject(err);
try {
// we need to await the promise to trigger the rejection early
// so that it's already handled by the time we call
// the RSC runtime. Otherwise, it will throw an unhandled
// promise rejection error in the renderer.
await promise;
} catch {
// swallow error, it's gonna be handled on the client
}
return {
type: 'done',
result: await generateFlight(req, ctx, requestStore, {
skipPageRendering: false,
actionResult: promise,
temporaryReferences
})
};
}
// For an MPA action, we need to render a HTML response. We'll handle that in app-render.
return {
type: 'not-found'
};
}
// An error that didn't come from `redirect()` or `notFound()`, likely thrown from user code
// (but it could also be a bug in our code!)
if (isFetchAction) {
// TODO: consider checking if the error is an `ApiError` and change status code
// so that we can respond with a 413 to requests that break the body size limit
// (but if we do that, we also need to make sure that whatever handles the non-fetch error path below does the same)
res.statusCode = 500;
metadata.statusCode = 500;
const promise = Promise.reject(err);
try {
// we need to await the promise to trigger the rejection early
// so that it's already handled by the time we call
// the RSC runtime. Otherwise, it will throw an unhandled
// promise rejection error in the renderer.
await promise;
} catch {
// swallow error, it's gonna be handled on the client
}
return {
type: 'done',
result: await generateFlight(req, ctx, requestStore, {
actionResult: promise,
// If the page was not revalidated, or if the action was forwarded
// from another worker, we can skip rendering the page.
skipPageRendering: workStore.pathWasRevalidated === undefined || workStore.pathWasRevalidated === ActionDidNotRevalidate || actionWasForwarded,
temporaryReferences
})
};
}
// For an MPA action, we need to render a HTML response. We'll rethrow the error and let it be handled above.
throw err;
}
}
async function executeActionAndPrepareForRender(action, args, workStore, requestStore, actionWasForwarded) {
requestStore.phase = 'action';
let skipPageRendering = actionWasForwarded;
try {
const actionResult = await workUnitAsyncStorage.run(requestStore, ()=>action.apply(null, args));
// If the page was not revalidated, or if the action was forwarded from
// another worker, we can skip rendering the page.
skipPageRendering ||= workStore.pathWasRevalidated === undefined || workStore.pathWasRevalidated === ActionDidNotRevalidate;
return {
actionResult,
skipPageRendering
};
} finally{
if (!skipPageRendering) {
requestStore.phase = 'render';
// When we switch to the render phase, cookies() will return
// `workUnitStore.cookies` instead of
// `workUnitStore.userspaceMutableCookies`. We want the render to see any
// cookie writes that we performed during the action, so we need to update
// the immutable cookies to reflect the changes.
synchronizeMutableCookies(requestStore);
// The server action might have toggled draft mode, so we need to reflect
// that in the work store to be up-to-date for subsequent rendering.
workStore.isDraftMode = requestStore.draftMode.isEnabled;
// If the action called revalidateTag/revalidatePath, then that might
// affect data used by the subsequent render, so we need to make sure all
// revalidations are applied before that.
await executeRevalidates(workStore);
}
}
}
/**
* Attempts to find the module ID for the action from the module map. When this fails, it could be a deployment skew where
* the action came from a different deployment. It could also simply be an invalid POST request that is not a server action.
* In either case, we'll throw an error to be handled by the caller.
*/ function getActionModIdOrError(actionId, serverModuleMap) {
var _serverModuleMap_actionId;
// if we're missing the action ID header, we can't do any further processing
if (!actionId) {
throw Object.defineProperty(new InvariantError("Missing 'next-action' header."), "__NEXT_ERROR_CODE", {
value: "E664",
enumerable: false,
configurable: true
});
}
const actionModId = (_serverModuleMap_actionId = serverModuleMap[actionId]) == null ? void 0 : _serverModuleMap_actionId.id;
if (!actionModId) {
throw getActionNotFoundError(actionId);
}
return actionModId;
}
function getActionNotFoundError(actionId) {
return Object.defineProperty(new Error(`Failed to find Server Action${actionId ? ` "${actionId}"` : ''}. This request might be from an older or newer deployment.\nRead more: https://nextjs.org/docs/messages/failed-to-find-server-action`), "__NEXT_ERROR_CODE", {
value: "E974",
enumerable: false,
configurable: true
});
}
const $ACTION_ = '$ACTION_';
const $ACTION_REF_ = '$ACTION_REF_';
const $ACTION_ID_ = '$ACTION_ID_';
const ACTION_ID_EXPECTED_LENGTH = 42;
/**
* This function mirrors logic inside React's decodeAction and should be kept in sync with that.
* It pre-parses the FormData to ensure that any action IDs referred to are actual action IDs for
* this Next.js application.
*/ function areAllActionIdsValid(mpaFormData, serverModuleMap) {
let hasAtLeastOneAction = false;
// Before we attempt to decode the payload for a possible MPA action, assert that all
// action IDs are valid IDs. If not we should disregard the payload
for (let key of mpaFormData.keys()){
if (!key.startsWith($ACTION_)) {
continue;
}
if (key.startsWith($ACTION_ID_)) {
// No Bound args case
if (isInvalidActionIdFieldName(key, serverModuleMap)) {
return false;
}
hasAtLeastOneAction = true;
} else if (key.startsWith($ACTION_REF_)) {
// Bound args case
const actionDescriptorField = $ACTION_ + key.slice($ACTION_REF_.length) + ':0';
const actionFields = mpaFormData.getAll(actionDescriptorField);
if (actionFields.length !== 1) {
return false;
}
const actionField = actionFields[0];
if (typeof actionField !== 'string') {
return false;
}
if (isInvalidStringActionDescriptor(actionField, serverModuleMap)) {
return false;
}
hasAtLeastOneAction = true;
}
}
return hasAtLeastOneAction;
}
const ACTION_DESCRIPTOR_ID_PREFIX = '{"id":"';
function isInvalidStringActionDescriptor(actionDescriptor, serverModuleMap) {
if (actionDescriptor.startsWith(ACTION_DESCRIPTOR_ID_PREFIX) === false) {
return true;
}
const from = ACTION_DESCRIPTOR_ID_PREFIX.length;
const to = from + ACTION_ID_EXPECTED_LENGTH;
// We expect actionDescriptor to be '{"id":"<actionId>",...}'
const actionId = actionDescriptor.slice(from, to);
if (actionId.length !== ACTION_ID_EXPECTED_LENGTH || actionDescriptor[to] !== '"') {
return true;
}
const entry = serverModuleMap[actionId];
if (entry == null) {
return true;
}
return false;
}
function isInvalidActionIdFieldName(actionIdFieldName, serverModuleMap) {
// The field name must always start with $ACTION_ID_ but since it is
// the id is extracted from the key of the field we have already validated
// this before entering this function
if (actionIdFieldName.length !== $ACTION_ID_.length + ACTION_ID_EXPECTED_LENGTH) {
// this field name has too few or too many characters
return true;
}
const actionId = actionIdFieldName.slice($ACTION_ID_.length);
const entry = serverModuleMap[actionId];
if (entry == null) {
return true;
}
return false;
}
//# sourceMappingURL=action-handler.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,4 @@
import { createAsyncLocalStorage } from './async-local-storage';
export const afterTaskAsyncStorageInstance = createAsyncLocalStorage();
//# sourceMappingURL=after-task-async-storage-instance.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/after-task-async-storage-instance.ts"],"sourcesContent":["import type { AfterTaskAsyncStorage } from './after-task-async-storage.external'\nimport { createAsyncLocalStorage } from './async-local-storage'\n\nexport const afterTaskAsyncStorageInstance: AfterTaskAsyncStorage =\n createAsyncLocalStorage()\n"],"names":["createAsyncLocalStorage","afterTaskAsyncStorageInstance"],"mappings":"AACA,SAASA,uBAAuB,QAAQ,wBAAuB;AAE/D,OAAO,MAAMC,gCACXD,0BAAyB","ignoreList":[0]}

View File

@@ -0,0 +1,7 @@
// Share the instance module in the next-shared layer
import { afterTaskAsyncStorageInstance as afterTaskAsyncStorage } from './after-task-async-storage-instance' with {
'turbopack-transition': 'next-shared'
};
export { afterTaskAsyncStorage };
//# sourceMappingURL=after-task-async-storage.external.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/after-task-async-storage.external.ts"],"sourcesContent":["import type { AsyncLocalStorage } from 'async_hooks'\n\n// Share the instance module in the next-shared layer\nimport { afterTaskAsyncStorageInstance as afterTaskAsyncStorage } from './after-task-async-storage-instance' with { 'turbopack-transition': 'next-shared' }\nimport type { WorkUnitStore } from './work-unit-async-storage.external'\n\nexport interface AfterTaskStore {\n /** The phase in which the topmost `after` was called.\n *\n * NOTE: Can be undefined when running `generateStaticParams`,\n * where we only have a `workStore`, no `workUnitStore`.\n */\n readonly rootTaskSpawnPhase: WorkUnitStore['phase'] | undefined\n}\n\nexport type AfterTaskAsyncStorage = AsyncLocalStorage<AfterTaskStore>\n\nexport { afterTaskAsyncStorage }\n"],"names":["afterTaskAsyncStorageInstance","afterTaskAsyncStorage"],"mappings":"AAEA,qDAAqD;AACrD,SAASA,iCAAiCC,qBAAqB,QAAQ,2CAA2C;IAAE,wBAAwB;AAAc,EAAC;AAc3J,SAASA,qBAAqB,GAAE","ignoreList":[0]}

View File

@@ -0,0 +1,217 @@
import { InvariantError } from '../../shared/lib/invariant-error';
import { createAtomicTimerGroup } from './app-render-scheduling';
import { DANGEROUSLY_runPendingImmediatesAfterCurrentTask, expectNoPendingImmediates } from '../node-environment-extensions/fast-set-immediate.external';
/**
* This is a utility function to make scheduling sequential tasks that run back to back easier.
* We schedule on the same queue (setTimeout) at the same time to ensure no other events can sneak in between.
*/ export function prerenderAndAbortInSequentialTasks(prerender, abort) {
if (process.env.NEXT_RUNTIME === 'edge') {
throw Object.defineProperty(new InvariantError('`prerenderAndAbortInSequentialTasks` should not be called in edge runtime.'), "__NEXT_ERROR_CODE", {
value: "E538",
enumerable: false,
configurable: true
});
} else {
return new Promise((resolve, reject)=>{
const scheduleTimeout = createAtomicTimerGroup();
let pendingResult;
scheduleTimeout(()=>{
try {
DANGEROUSLY_runPendingImmediatesAfterCurrentTask();
pendingResult = prerender();
pendingResult.catch(()=>{});
} catch (err) {
reject(err);
}
});
scheduleTimeout(()=>{
try {
expectNoPendingImmediates();
abort();
resolve(pendingResult);
} catch (err) {
reject(err);
}
});
});
}
}
/**
* Like `prerenderAndAbortInSequentialTasks`, but with another task between `prerender` and `abort`,
* which allows us to move a part of the render into a separate task.
*/ export function prerenderAndAbortInSequentialTasksWithStages(prerender, advanceStage, abort) {
if (process.env.NEXT_RUNTIME === 'edge') {
throw Object.defineProperty(new InvariantError('`prerenderAndAbortInSequentialTasksWithStages` should not be called in edge runtime.'), "__NEXT_ERROR_CODE", {
value: "E778",
enumerable: false,
configurable: true
});
} else {
return new Promise((resolve, reject)=>{
const scheduleTimeout = createAtomicTimerGroup();
let pendingResult;
scheduleTimeout(()=>{
try {
DANGEROUSLY_runPendingImmediatesAfterCurrentTask();
pendingResult = prerender();
pendingResult.catch(()=>{});
} catch (err) {
reject(err);
}
});
scheduleTimeout(()=>{
try {
DANGEROUSLY_runPendingImmediatesAfterCurrentTask();
advanceStage();
} catch (err) {
reject(err);
}
});
scheduleTimeout(()=>{
try {
expectNoPendingImmediates();
abort();
resolve(pendingResult);
} catch (err) {
reject(err);
}
});
});
}
}
// React's RSC prerender function will emit an incomplete flight stream when using `prerender`. If the connection
// closes then whatever hanging chunks exist will be errored. This is because prerender (an experimental feature)
// has not yet implemented a concept of resume. For now we will simulate a paused connection by wrapping the stream
// in one that doesn't close even when the underlying is complete.
export class ReactServerResult {
constructor(stream){
this._stream = stream;
}
tee() {
if (this._stream === null) {
throw Object.defineProperty(new Error('Cannot tee a ReactServerResult that has already been consumed'), "__NEXT_ERROR_CODE", {
value: "E106",
enumerable: false,
configurable: true
});
}
const tee = this._stream.tee();
this._stream = tee[0];
return tee[1];
}
consume() {
if (this._stream === null) {
throw Object.defineProperty(new Error('Cannot consume a ReactServerResult that has already been consumed'), "__NEXT_ERROR_CODE", {
value: "E470",
enumerable: false,
configurable: true
});
}
const stream = this._stream;
this._stream = null;
return stream;
}
}
export async function createReactServerPrerenderResult(underlying) {
const chunks = [];
const { prelude } = await underlying;
const reader = prelude.getReader();
while(true){
const { done, value } = await reader.read();
if (done) {
return new ReactServerPrerenderResult(chunks);
} else {
chunks.push(value);
}
}
}
export async function createReactServerPrerenderResultFromRender(underlying) {
const chunks = [];
const reader = underlying.getReader();
while(true){
const { done, value } = await reader.read();
if (done) {
break;
} else {
chunks.push(value);
}
}
return new ReactServerPrerenderResult(chunks);
}
export class ReactServerPrerenderResult {
assertChunks(expression) {
if (this._chunks === null) {
throw Object.defineProperty(new InvariantError(`Cannot \`${expression}\` on a ReactServerPrerenderResult that has already been consumed.`), "__NEXT_ERROR_CODE", {
value: "E593",
enumerable: false,
configurable: true
});
}
return this._chunks;
}
consumeChunks(expression) {
const chunks = this.assertChunks(expression);
this.consume();
return chunks;
}
consume() {
this._chunks = null;
}
constructor(chunks){
this._chunks = chunks;
}
asUnclosingStream() {
const chunks = this.assertChunks('asUnclosingStream()');
return createUnclosingStream(chunks);
}
consumeAsUnclosingStream() {
const chunks = this.consumeChunks('consumeAsUnclosingStream()');
return createUnclosingStream(chunks);
}
asStream() {
const chunks = this.assertChunks('asStream()');
return createClosingStream(chunks);
}
consumeAsStream() {
const chunks = this.consumeChunks('consumeAsStream()');
return createClosingStream(chunks);
}
}
function createUnclosingStream(chunks) {
let i = 0;
return new ReadableStream({
async pull (controller) {
if (i < chunks.length) {
controller.enqueue(chunks[i++]);
}
// we intentionally keep the stream open. The consumer will clear
// out chunks once finished and the remaining memory will be GC'd
// when this object goes out of scope
}
});
}
function createClosingStream(chunks) {
let i = 0;
return new ReadableStream({
async pull (controller) {
if (i < chunks.length) {
controller.enqueue(chunks[i++]);
} else {
controller.close();
}
}
});
}
export async function processPrelude(unprocessedPrelude) {
const [prelude, peek] = unprocessedPrelude.tee();
const reader = peek.getReader();
const firstResult = await reader.read();
reader.cancel();
const preludeIsEmpty = firstResult.done === true;
return {
prelude,
preludeIsEmpty
};
}
//# sourceMappingURL=app-render-prerender-utils.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,97 @@
import { InvariantError } from '../../shared/lib/invariant-error';
import { createAtomicTimerGroup } from './app-render-scheduling';
import { DANGEROUSLY_runPendingImmediatesAfterCurrentTask, expectNoPendingImmediates } from '../node-environment-extensions/fast-set-immediate.external';
/**
* This is a utility function to make scheduling sequential tasks that run back to back easier.
* We schedule on the same queue (setTimeout) at the same time to ensure no other events can sneak in between.
*/ export function scheduleInSequentialTasks(render, followup) {
if (process.env.NEXT_RUNTIME === 'edge') {
throw Object.defineProperty(new InvariantError('`scheduleInSequentialTasks` should not be called in edge runtime.'), "__NEXT_ERROR_CODE", {
value: "E591",
enumerable: false,
configurable: true
});
} else {
return new Promise((resolve, reject)=>{
const scheduleTimeout = createAtomicTimerGroup();
let pendingResult;
scheduleTimeout(()=>{
try {
DANGEROUSLY_runPendingImmediatesAfterCurrentTask();
pendingResult = render();
} catch (err) {
reject(err);
}
});
scheduleTimeout(()=>{
try {
expectNoPendingImmediates();
followup();
resolve(pendingResult);
} catch (err) {
reject(err);
}
});
});
}
}
/**
* This is a utility function to make scheduling sequential tasks that run back to back easier.
* We schedule on the same queue (setTimeout) at the same time to ensure no other events can sneak in between.
* The function that runs in the second task gets access to the first tasks's result.
*/ export function pipelineInSequentialTasks(one, two, three) {
if (process.env.NEXT_RUNTIME === 'edge') {
throw Object.defineProperty(new InvariantError('`pipelineInSequentialTasks` should not be called in edge runtime.'), "__NEXT_ERROR_CODE", {
value: "E875",
enumerable: false,
configurable: true
});
} else {
return new Promise((resolve, reject)=>{
const scheduleTimeout = createAtomicTimerGroup();
let oneResult;
scheduleTimeout(()=>{
try {
DANGEROUSLY_runPendingImmediatesAfterCurrentTask();
oneResult = one();
} catch (err) {
clearTimeout(twoId);
clearTimeout(threeId);
clearTimeout(fourId);
reject(err);
}
});
let twoResult;
const twoId = scheduleTimeout(()=>{
// if `one` threw, then this timeout would've been cleared,
// so if we got here, we're guaranteed to have a value.
try {
DANGEROUSLY_runPendingImmediatesAfterCurrentTask();
twoResult = two(oneResult);
} catch (err) {
clearTimeout(threeId);
clearTimeout(fourId);
reject(err);
}
});
let threeResult;
const threeId = scheduleTimeout(()=>{
// if `two` threw, then this timeout would've been cleared,
// so if we got here, we're guaranteed to have a value.
try {
expectNoPendingImmediates();
threeResult = three(twoResult);
} catch (err) {
clearTimeout(fourId);
reject(err);
}
});
// We wait a task before resolving/rejecting
const fourId = scheduleTimeout(()=>{
resolve(threeResult);
});
});
}
}
//# sourceMappingURL=app-render-render-utils.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,184 @@
import { InvariantError } from '../../shared/lib/invariant-error';
import { unpatchedSetImmediate } from '../node-environment-extensions/fast-set-immediate.external';
/*
==========================
| Background |
==========================
Node.js does not guarantee that two timers scheduled back to back will run
on the same iteration of the event loop:
```ts
setTimeout(one, 0)
setTimeout(two, 0)
```
Internally, each timer is assigned a `_idleStart` property that holds
an internal libuv timestamp in millisecond resolution.
This will be used to determine if the timer is already "expired" and should be executed.
However, even in sync code, it's possible for two timers to get different `_idleStart` values.
This can cause one of the timers to be executed, and the other to be delayed until the next timer phase.
The delaying happens [here](https://github.com/nodejs/node/blob/c208ffc66bb9418ff026c4e3fa82e5b4387bd147/lib/internal/timers.js#L556-L564).
and can be debugged by running node with `NODE_DEBUG=timer`.
The easiest way to observe it is to run this program in a loop until it exits with status 1:
```
// test.js
let immediateRan = false
const t1 = setTimeout(() => {
console.log('timeout 1')
setImmediate(() => {
console.log('immediate 1')
immediateRan = true
})
})
const t2 = setTimeout(() => {
console.log('timeout 2')
if (immediateRan) {
console.log('immediate ran before the second timeout!')
console.log(
`t1._idleStart: ${t1._idleStart}, t2_idleStart: ${t2._idleStart}`
);
process.exit(1)
}
})
```
```bash
#!/usr/bin/env bash
i=1;
while true; do
output="$(NODE_DEBUG=timer node test.js 2>&1)";
if [ "$?" -eq 1 ]; then
echo "failed after $i iterations";
echo "$output";
break;
fi;
i=$((i+1));
done
```
If `t2` is deferred to the next iteration of the event loop,
then the immediate scheduled from inside `t1` will run first.
When this occurs, `_idleStart` is reliably different between `t1` and `t2`.
==========================
| Solution |
==========================
We can guarantee that multiple timers (with the same delay, usually `0`)
run together without any delays by making sure that their `_idleStart`s are the same,
because that's what's used to determine if a timer should be deferred or not.
Luckily, this property is currently exposed to userland and mutable,
so we can patch it.
Another related trick we could potentially apply is making
a timer immediately be considered expired by doing `timer._idleStart -= 2`.
(the value must be more than `1`, the delay that actually gets set for `setTimeout(cb, 0)`).
This makes node view this timer as "a 1ms timer scheduled 2ms ago",
meaning that it should definitely run in the next timer phase.
However, I'm not confident we know all the side effects of doing this,
so for now, simply ensuring coordination is enough.
*/ let shouldAttemptPatching = true;
function warnAboutTimers() {
console.warn("Next.js cannot guarantee that Cache Components will run as expected due to the current runtime's implementation of `setTimeout()`.\nPlease report a github issue here: https://github.com/vercel/next.js/issues/new/");
}
/**
* Allows scheduling multiple timers (equivalent to `setTimeout(cb, delayMs)`)
* that are guaranteed to run in the same iteration of the event loop.
*
* @param delayMs - the delay to pass to `setTimeout`. (default: 0)
*
* */ export function createAtomicTimerGroup(delayMs = 0) {
if (process.env.NEXT_RUNTIME === 'edge') {
throw Object.defineProperty(new InvariantError('createAtomicTimerGroup cannot be called in the edge runtime'), "__NEXT_ERROR_CODE", {
value: "E934",
enumerable: false,
configurable: true
});
} else {
let isFirstCallback = true;
let firstTimerIdleStart = null;
let didFirstTimerRun = false;
// As a sanity check, we schedule an immediate from the first timeout
// to check if the execution was interrupted (i.e. if it ran between the timeouts).
// Note that we're deliberately bypassing the "fast setImmediate" patch here --
// otherwise, this check would always fail, because the immediate
// would always run before the second timeout.
let didImmediateRun = false;
function runFirstCallback(callback) {
didFirstTimerRun = true;
if (shouldAttemptPatching) {
unpatchedSetImmediate(()=>{
didImmediateRun = true;
});
}
return callback();
}
function runSubsequentCallback(callback) {
if (shouldAttemptPatching) {
if (didImmediateRun) {
// If the immediate managed to run between the timers, then we're not
// able to provide the guarantees that we're supposed to
shouldAttemptPatching = false;
warnAboutTimers();
}
}
return callback();
}
return function scheduleTimeout(callback) {
if (didFirstTimerRun) {
throw Object.defineProperty(new InvariantError('Cannot schedule more timers into a group that already executed'), "__NEXT_ERROR_CODE", {
value: "E935",
enumerable: false,
configurable: true
});
}
const timer = setTimeout(isFirstCallback ? runFirstCallback : runSubsequentCallback, delayMs, callback);
isFirstCallback = false;
if (!shouldAttemptPatching) {
// We already tried patching some timers, and it didn't work.
// No point trying again.
return timer;
}
// NodeJS timers have a `_idleStart` property, but it doesn't exist e.g. in Bun.
// If it's not present, we'll warn and try to continue.
try {
if ('_idleStart' in timer && typeof timer._idleStart === 'number') {
// If this is the first timer that was scheduled, save its `_idleStart`.
// We'll copy it onto subsequent timers to guarantee that they'll all be
// considered expired in the same iteration of the event loop
// and thus will all be executed in the same timer phase.
if (firstTimerIdleStart === null) {
firstTimerIdleStart = timer._idleStart;
} else {
timer._idleStart = firstTimerIdleStart;
}
} else {
shouldAttemptPatching = false;
warnAboutTimers();
}
} catch (err) {
// This should never fail in current Node, but it might start failing in the future.
// We might be okay even without tweaking the timers, so warn and try to continue.
console.error(Object.defineProperty(new InvariantError('An unexpected error occurred while adjusting `_idleStart` on an atomic timer', {
cause: err
}), "__NEXT_ERROR_CODE", {
value: "E933",
enumerable: false,
configurable: true
}));
shouldAttemptPatching = false;
warnAboutTimers();
}
return timer;
};
}
}
//# sourceMappingURL=app-render-scheduling.js.map

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,50 @@
const sharedAsyncLocalStorageNotAvailableError = Object.defineProperty(new Error('Invariant: AsyncLocalStorage accessed in runtime where it is not available'), "__NEXT_ERROR_CODE", {
value: "E504",
enumerable: false,
configurable: true
});
class FakeAsyncLocalStorage {
disable() {
throw sharedAsyncLocalStorageNotAvailableError;
}
getStore() {
// This fake implementation of AsyncLocalStorage always returns `undefined`.
return undefined;
}
run() {
throw sharedAsyncLocalStorageNotAvailableError;
}
exit() {
throw sharedAsyncLocalStorageNotAvailableError;
}
enterWith() {
throw sharedAsyncLocalStorageNotAvailableError;
}
static bind(fn) {
return fn;
}
}
const maybeGlobalAsyncLocalStorage = typeof globalThis !== 'undefined' && globalThis.AsyncLocalStorage;
export function createAsyncLocalStorage() {
if (maybeGlobalAsyncLocalStorage) {
return new maybeGlobalAsyncLocalStorage();
}
return new FakeAsyncLocalStorage();
}
export function bindSnapshot(// WARNING: Don't pass a named function to this argument! See: https://github.com/facebook/react/pull/34911
fn) {
if (maybeGlobalAsyncLocalStorage) {
return maybeGlobalAsyncLocalStorage.bind(fn);
}
return FakeAsyncLocalStorage.bind(fn);
}
export function createSnapshot() {
if (maybeGlobalAsyncLocalStorage) {
return maybeGlobalAsyncLocalStorage.snapshot();
}
return function(fn, ...args) {
return fn(...args);
};
}
//# sourceMappingURL=async-local-storage.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/async-local-storage.ts"],"sourcesContent":["import type { AsyncLocalStorage } from 'async_hooks'\n\nconst sharedAsyncLocalStorageNotAvailableError = new Error(\n 'Invariant: AsyncLocalStorage accessed in runtime where it is not available'\n)\n\nclass FakeAsyncLocalStorage<Store extends {}>\n implements AsyncLocalStorage<Store>\n{\n disable(): void {\n throw sharedAsyncLocalStorageNotAvailableError\n }\n\n getStore(): Store | undefined {\n // This fake implementation of AsyncLocalStorage always returns `undefined`.\n return undefined\n }\n\n run<R>(): R {\n throw sharedAsyncLocalStorageNotAvailableError\n }\n\n exit<R>(): R {\n throw sharedAsyncLocalStorageNotAvailableError\n }\n\n enterWith(): void {\n throw sharedAsyncLocalStorageNotAvailableError\n }\n\n static bind<T>(fn: T): T {\n return fn\n }\n}\n\nconst maybeGlobalAsyncLocalStorage =\n typeof globalThis !== 'undefined' && (globalThis as any).AsyncLocalStorage\n\nexport function createAsyncLocalStorage<\n Store extends {},\n>(): AsyncLocalStorage<Store> {\n if (maybeGlobalAsyncLocalStorage) {\n return new maybeGlobalAsyncLocalStorage()\n }\n return new FakeAsyncLocalStorage()\n}\n\nexport function bindSnapshot<T>(\n // WARNING: Don't pass a named function to this argument! See: https://github.com/facebook/react/pull/34911\n fn: T\n): T {\n if (maybeGlobalAsyncLocalStorage) {\n return maybeGlobalAsyncLocalStorage.bind(fn)\n }\n return FakeAsyncLocalStorage.bind(fn)\n}\n\nexport function createSnapshot(): <R, TArgs extends any[]>(\n fn: (...args: TArgs) => R,\n ...args: TArgs\n) => R {\n if (maybeGlobalAsyncLocalStorage) {\n return maybeGlobalAsyncLocalStorage.snapshot()\n }\n return function (fn: any, ...args: any[]) {\n return fn(...args)\n }\n}\n"],"names":["sharedAsyncLocalStorageNotAvailableError","Error","FakeAsyncLocalStorage","disable","getStore","undefined","run","exit","enterWith","bind","fn","maybeGlobalAsyncLocalStorage","globalThis","AsyncLocalStorage","createAsyncLocalStorage","bindSnapshot","createSnapshot","snapshot","args"],"mappings":"AAEA,MAAMA,2CAA2C,qBAEhD,CAFgD,IAAIC,MACnD,+EAD+C,qBAAA;WAAA;gBAAA;kBAAA;AAEjD;AAEA,MAAMC;IAGJC,UAAgB;QACd,MAAMH;IACR;IAEAI,WAA8B;QAC5B,4EAA4E;QAC5E,OAAOC;IACT;IAEAC,MAAY;QACV,MAAMN;IACR;IAEAO,OAAa;QACX,MAAMP;IACR;IAEAQ,YAAkB;QAChB,MAAMR;IACR;IAEA,OAAOS,KAAQC,EAAK,EAAK;QACvB,OAAOA;IACT;AACF;AAEA,MAAMC,+BACJ,OAAOC,eAAe,eAAe,AAACA,WAAmBC,iBAAiB;AAE5E,OAAO,SAASC;IAGd,IAAIH,8BAA8B;QAChC,OAAO,IAAIA;IACb;IACA,OAAO,IAAIT;AACb;AAEA,OAAO,SAASa,aACd,2GAA2G;AAC3GL,EAAK;IAEL,IAAIC,8BAA8B;QAChC,OAAOA,6BAA6BF,IAAI,CAACC;IAC3C;IACA,OAAOR,sBAAsBO,IAAI,CAACC;AACpC;AAEA,OAAO,SAASM;IAId,IAAIL,8BAA8B;QAChC,OAAOA,6BAA6BM,QAAQ;IAC9C;IACA,OAAO,SAAUP,EAAO,EAAE,GAAGQ,IAAW;QACtC,OAAOR,MAAMQ;IACf;AACF","ignoreList":[0]}

View File

@@ -0,0 +1,171 @@
/**
* This class is used to detect when all cache reads for a given render are settled.
* We do this to allow for cache warming the prerender without having to continue rendering
* the remainder of the page. This feature is really only useful when the cacheComponents flag is on
* and should only be used in codepaths gated with this feature.
*/ import { InvariantError } from '../../shared/lib/invariant-error';
export class CacheSignal {
constructor(){
this.count = 0;
this.earlyListeners = [];
this.listeners = [];
this.tickPending = false;
this.pendingTimeoutCleanup = null;
this.subscribedSignals = null;
this.invokeListenersIfNoPendingReads = ()=>{
this.pendingTimeoutCleanup = null;
if (this.count === 0) {
for(let i = 0; i < this.listeners.length; i++){
this.listeners[i]();
}
this.listeners.length = 0;
}
};
if (process.env.NEXT_RUNTIME === 'edge') {
// we rely on `process.nextTick`, which is not supported in edge
throw Object.defineProperty(new InvariantError('CacheSignal cannot be used in the edge runtime, because `cacheComponents` does not support it.'), "__NEXT_ERROR_CODE", {
value: "E728",
enumerable: false,
configurable: true
});
}
}
noMorePendingCaches() {
if (!this.tickPending) {
this.tickPending = true;
queueMicrotask(()=>process.nextTick(()=>{
this.tickPending = false;
if (this.count === 0) {
for(let i = 0; i < this.earlyListeners.length; i++){
this.earlyListeners[i]();
}
this.earlyListeners.length = 0;
}
}));
}
// After a cache resolves, React will schedule new rendering work:
// - in a microtask (when prerendering)
// - in setImmediate (when rendering)
// To cover both of these, we have to make sure that we let immediates execute at least once after each cache resolved.
// We don't know when the pending timeout was scheduled (and if it's about to resolve),
// so by scheduling a new one, we can be sure that we'll go around the event loop at least once.
if (this.pendingTimeoutCleanup) {
// We cancel the timeout in beginRead, so this shouldn't ever be active here,
// but we still cancel it defensively.
this.pendingTimeoutCleanup();
}
this.pendingTimeoutCleanup = scheduleImmediateAndTimeoutWithCleanup(this.invokeListenersIfNoPendingReads);
}
/**
* This promise waits until there are no more in progress cache reads but no later.
* This allows for adding more cache reads after to delay cacheReady.
*/ inputReady() {
return new Promise((resolve)=>{
this.earlyListeners.push(resolve);
if (this.count === 0) {
this.noMorePendingCaches();
}
});
}
/**
* If there are inflight cache reads this Promise can resolve in a microtask however
* if there are no inflight cache reads then we wait at least one task to allow initial
* cache reads to be initiated.
*/ cacheReady() {
return new Promise((resolve)=>{
this.listeners.push(resolve);
if (this.count === 0) {
this.noMorePendingCaches();
}
});
}
beginRead() {
this.count++;
// There's a new pending cache, so if there's a `noMorePendingCaches` timeout running,
// we should cancel it.
if (this.pendingTimeoutCleanup) {
this.pendingTimeoutCleanup();
this.pendingTimeoutCleanup = null;
}
if (this.subscribedSignals !== null) {
for (const subscriber of this.subscribedSignals){
subscriber.beginRead();
}
}
}
endRead() {
if (this.count === 0) {
throw Object.defineProperty(new InvariantError('CacheSignal got more endRead() calls than beginRead() calls'), "__NEXT_ERROR_CODE", {
value: "E678",
enumerable: false,
configurable: true
});
}
// If this is the last read we need to wait a task before we can claim the cache is settled.
// The cache read will likely ping a Server Component which can read from the cache again and this
// will play out in a microtask so we need to only resolve pending listeners if we're still at 0
// after at least one task.
// We only want one task scheduled at a time so when we hit count 1 we don't decrement the counter immediately.
// If intervening reads happen before the scheduled task runs they will never observe count 1 preventing reentrency
this.count--;
if (this.count === 0) {
this.noMorePendingCaches();
}
if (this.subscribedSignals !== null) {
for (const subscriber of this.subscribedSignals){
subscriber.endRead();
}
}
}
hasPendingReads() {
return this.count > 0;
}
trackRead(promise) {
this.beginRead();
// `promise.finally()` still rejects, so don't use it here to avoid unhandled rejections
const onFinally = this.endRead.bind(this);
promise.then(onFinally, onFinally);
return promise;
}
subscribeToReads(subscriber) {
if (subscriber === this) {
throw Object.defineProperty(new InvariantError('A CacheSignal cannot subscribe to itself'), "__NEXT_ERROR_CODE", {
value: "E679",
enumerable: false,
configurable: true
});
}
if (this.subscribedSignals === null) {
this.subscribedSignals = new Set();
}
this.subscribedSignals.add(subscriber);
// we'll notify the subscriber of each endRead() on this signal,
// so we need to give it a corresponding beginRead() for each read we have in flight now.
for(let i = 0; i < this.count; i++){
subscriber.beginRead();
}
return this.unsubscribeFromReads.bind(this, subscriber);
}
unsubscribeFromReads(subscriber) {
if (!this.subscribedSignals) {
return;
}
this.subscribedSignals.delete(subscriber);
// we don't need to set the set back to `null` if it's empty --
// if other signals are subscribing to this one, it'll likely get more subscriptions later,
// so we'd have to allocate a fresh set again when that happens.
}
}
function scheduleImmediateAndTimeoutWithCleanup(cb) {
// If we decide to clean up the timeout, we want to remove
// either the immediate or the timeout, whichever is still pending.
let clearPending;
const immediate = setImmediate(()=>{
const timeout = setTimeout(cb, 0);
clearPending = clearTimeout.bind(null, timeout);
});
clearPending = clearImmediate.bind(null, immediate);
return ()=>clearPending();
}
//# sourceMappingURL=cache-signal.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,268 @@
/* eslint-disable @next/internal/no-ambiguous-jsx -- Bundled in entry-base so it gets the right JSX runtime. */ import { jsx as _jsx } from "react/jsx-runtime";
// eslint-disable-next-line import/no-extraneous-dependencies
import { createFromReadableStream } from 'react-server-dom-webpack/client';
// eslint-disable-next-line import/no-extraneous-dependencies
import { prerender } from 'react-server-dom-webpack/static';
import { streamFromBuffer, streamToBuffer } from '../stream-utils/node-web-streams-helper';
import { waitAtLeastOneReactRenderTask } from '../../lib/scheduler';
import { createSegmentRequestKeyPart, appendSegmentRequestKeyPart, ROOT_SEGMENT_REQUEST_KEY, HEAD_REQUEST_KEY } from '../../shared/lib/segment-cache/segment-value-encoding';
import { getDigestForWellKnownError } from './create-error-handler';
import { Phase, printDebugThrownValueForProspectiveRender } from './prospective-render-utils';
import { workAsyncStorage } from './work-async-storage.external';
const filterStackFrame = process.env.NODE_ENV !== 'production' ? require('../lib/source-maps').filterStackFrameDEV : undefined;
const findSourceMapURL = process.env.NODE_ENV !== 'production' ? require('../lib/source-maps').findSourceMapURLDEV : undefined;
function onSegmentPrerenderError(error) {
const digest = getDigestForWellKnownError(error);
if (digest) {
return digest;
}
// We don't need to log the errors because we would have already done that
// when generating the original Flight stream for the whole page.
if (process.env.NEXT_DEBUG_BUILD || process.env.__NEXT_VERBOSE_LOGGING) {
const workStore = workAsyncStorage.getStore();
printDebugThrownValueForProspectiveRender(error, (workStore == null ? void 0 : workStore.route) ?? 'unknown route', Phase.SegmentCollection);
}
}
export async function collectSegmentData(isCacheComponentsEnabled, fullPageDataBuffer, staleTime, clientModules, serverConsumerManifest) {
// Traverse the router tree and generate a prefetch response for each segment.
// A mutable map to collect the results as we traverse the route tree.
const resultMap = new Map();
// Before we start, warm up the module cache by decoding the page data once.
// Then we can assume that any remaining async tasks that occur the next time
// are due to hanging promises caused by dynamic data access. Note we only
// have to do this once per page, not per individual segment.
//
try {
await createFromReadableStream(streamFromBuffer(fullPageDataBuffer), {
findSourceMapURL,
serverConsumerManifest
});
await waitAtLeastOneReactRenderTask();
} catch {}
// Create an abort controller that we'll use to stop the stream.
const abortController = new AbortController();
const onCompletedProcessingRouteTree = async ()=>{
// Since all we're doing is decoding and re-encoding a cached prerender, if
// serializing the stream takes longer than a microtask, it must because of
// hanging promises caused by dynamic data.
await waitAtLeastOneReactRenderTask();
abortController.abort();
};
// Generate a stream for the route tree prefetch. While we're walking the
// tree, we'll also spawn additional tasks to generate the segment prefetches.
// The promises for these tasks are pushed to a mutable array that we will
// await once the route tree is fully rendered.
const segmentTasks = [];
const { prelude: treeStream } = await prerender(// RootTreePrefetch is not a valid return type for a React component, but
// we need to use a component so that when we decode the original stream
// inside of it, the side effects are transferred to the new stream.
// @ts-expect-error
/*#__PURE__*/ _jsx(PrefetchTreeData, {
isClientParamParsingEnabled: isCacheComponentsEnabled,
fullPageDataBuffer: fullPageDataBuffer,
serverConsumerManifest: serverConsumerManifest,
clientModules: clientModules,
staleTime: staleTime,
segmentTasks: segmentTasks,
onCompletedProcessingRouteTree: onCompletedProcessingRouteTree
}), clientModules, {
filterStackFrame,
signal: abortController.signal,
onError: onSegmentPrerenderError
});
// Write the route tree to a special `/_tree` segment.
const treeBuffer = await streamToBuffer(treeStream);
resultMap.set('/_tree', treeBuffer);
// Also output the entire full page data response
resultMap.set('/_full', fullPageDataBuffer);
// Now that we've finished rendering the route tree, all the segment tasks
// should have been spawned. Await them in parallel and write the segment
// prefetches to the result map.
for (const [segmentPath, buffer] of (await Promise.all(segmentTasks))){
resultMap.set(segmentPath, buffer);
}
return resultMap;
}
async function PrefetchTreeData({ isClientParamParsingEnabled, fullPageDataBuffer, serverConsumerManifest, clientModules, staleTime, segmentTasks, onCompletedProcessingRouteTree }) {
// We're currently rendering a Flight response for the route tree prefetch.
// Inside this component, decode the Flight stream for the whole page. This is
// a hack to transfer the side effects from the original Flight stream (e.g.
// Float preloads) onto the Flight stream for the tree prefetch.
// TODO: React needs a better way to do this. Needed for Server Actions, too.
const initialRSCPayload = await createFromReadableStream(createUnclosingPrefetchStream(streamFromBuffer(fullPageDataBuffer)), {
findSourceMapURL,
serverConsumerManifest
});
const buildId = initialRSCPayload.b;
// FlightDataPath is an unsound type, hence the additional checks.
const flightDataPaths = initialRSCPayload.f;
if (flightDataPaths.length !== 1 && flightDataPaths[0].length !== 3) {
console.error('Internal Next.js error: InitialRSCPayload does not match the expected ' + 'shape for a prerendered page during segment prefetch generation.');
return null;
}
const flightRouterState = flightDataPaths[0][0];
const seedData = flightDataPaths[0][1];
const head = flightDataPaths[0][2];
// Compute the route metadata tree by traversing the FlightRouterState. As we
// walk the tree, we will also spawn a task to produce a prefetch response for
// each segment.
const tree = collectSegmentDataImpl(isClientParamParsingEnabled, flightRouterState, buildId, seedData, clientModules, ROOT_SEGMENT_REQUEST_KEY, segmentTasks);
// Also spawn a task to produce a prefetch response for the "head" segment.
// The head contains metadata, like the title; it's not really a route
// segment, but it contains RSC data, so it's treated like a segment by
// the client cache.
segmentTasks.push(waitAtLeastOneReactRenderTask().then(()=>renderSegmentPrefetch(buildId, head, null, HEAD_REQUEST_KEY, clientModules)));
// Notify the abort controller that we're done processing the route tree.
// Anything async that happens after this point must be due to hanging
// promises in the original stream.
onCompletedProcessingRouteTree();
// Render the route tree to a special `/_tree` segment.
const treePrefetch = {
buildId,
tree,
staleTime
};
return treePrefetch;
}
function collectSegmentDataImpl(isClientParamParsingEnabled, route, buildId, seedData, clientModules, requestKey, segmentTasks) {
// Metadata about the segment. Sent as part of the tree prefetch. Null if
// there are no children.
let slotMetadata = null;
const children = route[1];
const seedDataChildren = seedData !== null ? seedData[1] : null;
for(const parallelRouteKey in children){
const childRoute = children[parallelRouteKey];
const childSegment = childRoute[0];
const childSeedData = seedDataChildren !== null ? seedDataChildren[parallelRouteKey] : null;
const childRequestKey = appendSegmentRequestKeyPart(requestKey, parallelRouteKey, createSegmentRequestKeyPart(childSegment));
const childTree = collectSegmentDataImpl(isClientParamParsingEnabled, childRoute, buildId, childSeedData, clientModules, childRequestKey, segmentTasks);
if (slotMetadata === null) {
slotMetadata = {};
}
slotMetadata[parallelRouteKey] = childTree;
}
const hasRuntimePrefetch = seedData !== null ? seedData[4] : false;
if (seedData !== null) {
// Spawn a task to write the segment data to a new Flight stream.
segmentTasks.push(// Since we're already in the middle of a render, wait until after the
// current task to escape the current rendering context.
waitAtLeastOneReactRenderTask().then(()=>renderSegmentPrefetch(buildId, seedData[0], seedData[2], requestKey, clientModules)));
} else {
// This segment does not have any seed data. Skip generating a prefetch
// response for it. We'll still include it in the route tree, though.
// TODO: We should encode in the route tree whether a segment is missing
// so we don't attempt to fetch it for no reason. As of now this shouldn't
// ever happen in practice, though.
}
const segment = route[0];
let name;
let paramType = null;
let paramKey = null;
if (typeof segment === 'string') {
name = segment;
paramKey = segment;
paramType = null;
} else {
name = segment[0];
paramKey = segment[1];
paramType = segment[2];
}
// Metadata about the segment. Sent to the client as part of the
// tree prefetch.
return {
name,
paramType,
// This value is ommitted from the prefetch response when cacheComponents
// is enabled.
paramKey: isClientParamParsingEnabled ? null : paramKey,
hasRuntimePrefetch,
slots: slotMetadata,
isRootLayout: route[4] === true
};
}
async function renderSegmentPrefetch(buildId, rsc, loading, requestKey, clientModules) {
// Render the segment data to a stream.
// In the future, this is where we can include additional metadata, like the
// stale time and cache tags.
const segmentPrefetch = {
buildId,
rsc,
loading,
isPartial: await isPartialRSCData(rsc, clientModules)
};
// Since all we're doing is decoding and re-encoding a cached prerender, if
// it takes longer than a microtask, it must because of hanging promises
// caused by dynamic data. Abort the stream at the end of the current task.
const abortController = new AbortController();
waitAtLeastOneReactRenderTask().then(()=>abortController.abort());
const { prelude: segmentStream } = await prerender(segmentPrefetch, clientModules, {
filterStackFrame,
signal: abortController.signal,
onError: onSegmentPrerenderError
});
const segmentBuffer = await streamToBuffer(segmentStream);
if (requestKey === ROOT_SEGMENT_REQUEST_KEY) {
return [
'/_index',
segmentBuffer
];
} else {
return [
requestKey,
segmentBuffer
];
}
}
async function isPartialRSCData(rsc, clientModules) {
// We can determine if a segment contains only partial data if it takes longer
// than a task to encode, because dynamic data is encoded as an infinite
// promise. We must do this in a separate Flight prerender from the one that
// actually generates the prefetch stream because we need to include
// `isPartial` in the stream itself.
let isPartial = false;
const abortController = new AbortController();
waitAtLeastOneReactRenderTask().then(()=>{
// If we haven't yet finished the outer task, then it must be because we
// accessed dynamic data.
isPartial = true;
abortController.abort();
});
await prerender(rsc, clientModules, {
filterStackFrame,
signal: abortController.signal,
onError () {}
});
return isPartial;
}
function createUnclosingPrefetchStream(originalFlightStream) {
// When PPR is enabled, prefetch streams may contain references that never
// resolve, because that's how we encode dynamic data access. In the decoded
// object returned by the Flight client, these are reified into hanging
// promises that suspend during render, which is effectively what we want.
// The UI resolves when it switches to the dynamic data stream
// (via useDeferredValue(dynamic, static)).
//
// However, the Flight implementation currently errors if the server closes
// the response before all the references are resolved. As a cheat to work
// around this, we wrap the original stream in a new stream that never closes,
// and therefore doesn't error.
const reader = originalFlightStream.getReader();
return new ReadableStream({
async pull (controller) {
while(true){
const { done, value } = await reader.read();
if (!done) {
// Pass to the target stream and keep consuming the Flight response
// from the server.
controller.enqueue(value);
continue;
}
// The server stream has closed. Exit, but intentionally do not close
// the target stream.
return;
}
}
});
}
//# sourceMappingURL=collect-segment-data.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,4 @@
import { createAsyncLocalStorage } from './async-local-storage';
export const consoleAsyncStorageInstance = createAsyncLocalStorage();
//# sourceMappingURL=console-async-storage-instance.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/console-async-storage-instance.ts"],"sourcesContent":["import { createAsyncLocalStorage } from './async-local-storage'\nimport type { ConsoleAsyncStorage } from './console-async-storage.external'\n\nexport const consoleAsyncStorageInstance: ConsoleAsyncStorage =\n createAsyncLocalStorage()\n"],"names":["createAsyncLocalStorage","consoleAsyncStorageInstance"],"mappings":"AAAA,SAASA,uBAAuB,QAAQ,wBAAuB;AAG/D,OAAO,MAAMC,8BACXD,0BAAyB","ignoreList":[0]}

View File

@@ -0,0 +1,7 @@
// Share the instance module in the next-shared layer
import { consoleAsyncStorageInstance } from './console-async-storage-instance' with {
'turbopack-transition': 'next-shared'
};
export { consoleAsyncStorageInstance as consoleAsyncStorage };
//# sourceMappingURL=console-async-storage.external.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/console-async-storage.external.ts"],"sourcesContent":["import type { AsyncLocalStorage } from 'async_hooks'\n\n// Share the instance module in the next-shared layer\nimport { consoleAsyncStorageInstance } from './console-async-storage-instance' with { 'turbopack-transition': 'next-shared' }\n\nexport interface ConsoleStore {\n /**\n * if true the color of logs output will be dimmed to indicate the log is\n * from a repeat or validation render that is not typically relevant to\n * the primary action the server is taking.\n */\n readonly dim: boolean\n}\n\nexport type ConsoleAsyncStorage = AsyncLocalStorage<ConsoleStore>\n\nexport { consoleAsyncStorageInstance as consoleAsyncStorage }\n"],"names":["consoleAsyncStorageInstance","consoleAsyncStorage"],"mappings":"AAEA,qDAAqD;AACrD,SAASA,2BAA2B,QAAQ,wCAAwC;IAAE,wBAAwB;AAAc,EAAC;AAa7H,SAASA,+BAA+BC,mBAAmB,GAAE","ignoreList":[0]}

View File

@@ -0,0 +1,23 @@
import { interopDefault } from './interop-default';
import { getLinkAndScriptTags } from './get-css-inlined-link-tags';
import { getAssetQueryString } from './get-asset-query-string';
import { encodeURIPath } from '../../shared/lib/encode-uri-path';
import { renderCssResource } from './render-css-resource';
export async function createComponentStylesAndScripts({ filePath, getComponent, injectedCSS, injectedJS, ctx }) {
const { componentMod: { createElement } } = ctx;
const { styles: entryCssFiles, scripts: jsHrefs } = getLinkAndScriptTags(filePath, injectedCSS, injectedJS);
const styles = renderCssResource(entryCssFiles, ctx);
const scripts = jsHrefs ? jsHrefs.map((href, index)=>createElement('script', {
src: `${ctx.assetPrefix}/_next/${encodeURIPath(href)}${getAssetQueryString(ctx, true)}`,
async: true,
key: `script-${index}`
})) : null;
const Comp = interopDefault(await getComponent());
return [
Comp,
styles,
scripts
];
}
//# sourceMappingURL=create-component-styles-and-scripts.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/create-component-styles-and-scripts.tsx"],"sourcesContent":["import { interopDefault } from './interop-default'\nimport { getLinkAndScriptTags } from './get-css-inlined-link-tags'\nimport type { AppRenderContext } from './app-render'\nimport { getAssetQueryString } from './get-asset-query-string'\nimport { encodeURIPath } from '../../shared/lib/encode-uri-path'\nimport { renderCssResource } from './render-css-resource'\n\nexport async function createComponentStylesAndScripts({\n filePath,\n getComponent,\n injectedCSS,\n injectedJS,\n ctx,\n}: {\n filePath: string\n getComponent: () => any\n injectedCSS: Set<string>\n injectedJS: Set<string>\n ctx: AppRenderContext\n}): Promise<[React.ComponentType<any>, React.ReactNode, React.ReactNode]> {\n const {\n componentMod: { createElement },\n } = ctx\n const { styles: entryCssFiles, scripts: jsHrefs } = getLinkAndScriptTags(\n filePath,\n injectedCSS,\n injectedJS\n )\n\n const styles = renderCssResource(entryCssFiles, ctx)\n\n const scripts = jsHrefs\n ? jsHrefs.map((href, index) =>\n createElement('script', {\n src: `${ctx.assetPrefix}/_next/${encodeURIPath(href)}${getAssetQueryString(ctx, true)}`,\n async: true,\n key: `script-${index}`,\n })\n )\n : null\n\n const Comp = interopDefault(await getComponent())\n\n return [Comp, styles, scripts]\n}\n"],"names":["interopDefault","getLinkAndScriptTags","getAssetQueryString","encodeURIPath","renderCssResource","createComponentStylesAndScripts","filePath","getComponent","injectedCSS","injectedJS","ctx","componentMod","createElement","styles","entryCssFiles","scripts","jsHrefs","map","href","index","src","assetPrefix","async","key","Comp"],"mappings":"AAAA,SAASA,cAAc,QAAQ,oBAAmB;AAClD,SAASC,oBAAoB,QAAQ,8BAA6B;AAElE,SAASC,mBAAmB,QAAQ,2BAA0B;AAC9D,SAASC,aAAa,QAAQ,mCAAkC;AAChE,SAASC,iBAAiB,QAAQ,wBAAuB;AAEzD,OAAO,eAAeC,gCAAgC,EACpDC,QAAQ,EACRC,YAAY,EACZC,WAAW,EACXC,UAAU,EACVC,GAAG,EAOJ;IACC,MAAM,EACJC,cAAc,EAAEC,aAAa,EAAE,EAChC,GAAGF;IACJ,MAAM,EAAEG,QAAQC,aAAa,EAAEC,SAASC,OAAO,EAAE,GAAGf,qBAClDK,UACAE,aACAC;IAGF,MAAMI,SAAST,kBAAkBU,eAAeJ;IAEhD,MAAMK,UAAUC,UACZA,QAAQC,GAAG,CAAC,CAACC,MAAMC,QACjBP,cAAc,UAAU;YACtBQ,KAAK,GAAGV,IAAIW,WAAW,CAAC,OAAO,EAAElB,cAAce,QAAQhB,oBAAoBQ,KAAK,OAAO;YACvFY,OAAO;YACPC,KAAK,CAAC,OAAO,EAAEJ,OAAO;QACxB,MAEF;IAEJ,MAAMK,OAAOxB,eAAe,MAAMO;IAElC,OAAO;QAACiB;QAAMX;QAAQE;KAAQ;AAChC","ignoreList":[0]}

View File

@@ -0,0 +1,734 @@
import { isClientReference, isUseCacheFunction } from '../../lib/client-and-server-references';
import { getLayoutOrPageModule } from '../lib/app-dir-module';
import { interopDefault } from './interop-default';
import { parseLoaderTree } from '../../shared/lib/router/utils/parse-loader-tree';
import { createComponentStylesAndScripts } from './create-component-styles-and-scripts';
import { getLayerAssets } from './get-layer-assets';
import { hasLoadingComponentInTree } from './has-loading-component-in-tree';
import { validateRevalidate } from '../lib/patch-fetch';
import { PARALLEL_ROUTE_DEFAULT_PATH } from '../../client/components/builtin/default';
import { getTracer } from '../lib/trace/tracer';
import { NextNodeServerSpan } from '../lib/trace/constants';
import { StaticGenBailoutError } from '../../client/components/static-generation-bailout';
import { workUnitAsyncStorage } from './work-unit-async-storage.external';
import { DEFAULT_SEGMENT_KEY } from '../../shared/lib/segment';
import { BOUNDARY_PREFIX, BOUNDARY_SUFFIX, BUILTIN_PREFIX, getConventionPathByType, isNextjsBuiltinFilePath } from './segment-explorer-path';
/**
* Use the provided loader tree to create the React Component tree.
*/ export function createComponentTree(props) {
return getTracer().trace(NextNodeServerSpan.createComponentTree, {
spanName: 'build component tree'
}, ()=>createComponentTreeInternal(props, true));
}
function errorMissingDefaultExport(pagePath, convention) {
const normalizedPagePath = pagePath === '/' ? '' : pagePath;
throw Object.defineProperty(new Error(`The default export is not a React Component in "${normalizedPagePath}/${convention}"`), "__NEXT_ERROR_CODE", {
value: "E45",
enumerable: false,
configurable: true
});
}
const cacheNodeKey = 'c';
async function createComponentTreeInternal({ loaderTree: tree, parentParams, rootLayoutIncluded, injectedCSS, injectedJS, injectedFontPreloadTags, ctx, missingSlots, preloadCallbacks, authInterrupts, MetadataOutlet }, isRoot) {
const { renderOpts: { nextConfigOutput, experimental, cacheComponents }, workStore, componentMod: { createElement, Fragment, SegmentViewNode, HTTPAccessFallbackBoundary, LayoutRouter, RenderFromTemplateContext, ClientPageRoot, ClientSegmentRoot, createServerSearchParamsForServerPage, createPrerenderSearchParamsForClientPage, createServerParamsForServerSegment, createPrerenderParamsForClientSegment, serverHooks: { DynamicServerError }, Postpone }, pagePath, getDynamicParamFromSegment, isPrefetch, query } = ctx;
const { page, conventionPath, segment, modules, parallelRoutes } = parseLoaderTree(tree);
const { layout, template, error, loading, 'not-found': notFound, forbidden, unauthorized } = modules;
const injectedCSSWithCurrentLayout = new Set(injectedCSS);
const injectedJSWithCurrentLayout = new Set(injectedJS);
const injectedFontPreloadTagsWithCurrentLayout = new Set(injectedFontPreloadTags);
const layerAssets = getLayerAssets({
preloadCallbacks,
ctx,
layoutOrPagePath: conventionPath,
injectedCSS: injectedCSSWithCurrentLayout,
injectedJS: injectedJSWithCurrentLayout,
injectedFontPreloadTags: injectedFontPreloadTagsWithCurrentLayout
});
const [Template, templateStyles, templateScripts] = template ? await createComponentStylesAndScripts({
ctx,
filePath: template[1],
getComponent: template[0],
injectedCSS: injectedCSSWithCurrentLayout,
injectedJS: injectedJSWithCurrentLayout
}) : [
Fragment
];
const [ErrorComponent, errorStyles, errorScripts] = error ? await createComponentStylesAndScripts({
ctx,
filePath: error[1],
getComponent: error[0],
injectedCSS: injectedCSSWithCurrentLayout,
injectedJS: injectedJSWithCurrentLayout
}) : [];
const [Loading, loadingStyles, loadingScripts] = loading ? await createComponentStylesAndScripts({
ctx,
filePath: loading[1],
getComponent: loading[0],
injectedCSS: injectedCSSWithCurrentLayout,
injectedJS: injectedJSWithCurrentLayout
}) : [];
const isLayout = typeof layout !== 'undefined';
const isPage = typeof page !== 'undefined';
const { mod: layoutOrPageMod, modType } = await getTracer().trace(NextNodeServerSpan.getLayoutOrPageModule, {
hideSpan: !(isLayout || isPage),
spanName: 'resolve segment modules',
attributes: {
'next.segment': segment
}
}, ()=>getLayoutOrPageModule(tree));
/**
* Checks if the current segment is a root layout.
*/ const rootLayoutAtThisLevel = isLayout && !rootLayoutIncluded;
/**
* Checks if the current segment or any level above it has a root layout.
*/ const rootLayoutIncludedAtThisLevelOrAbove = rootLayoutIncluded || rootLayoutAtThisLevel;
const [NotFound, notFoundStyles] = notFound ? await createComponentStylesAndScripts({
ctx,
filePath: notFound[1],
getComponent: notFound[0],
injectedCSS: injectedCSSWithCurrentLayout,
injectedJS: injectedJSWithCurrentLayout
}) : [];
const prefetchConfig = layoutOrPageMod ? layoutOrPageMod.unstable_prefetch : undefined;
/** Whether this segment should use a runtime prefetch instead of a static prefetch. */ const hasRuntimePrefetch = (prefetchConfig == null ? void 0 : prefetchConfig.mode) === 'runtime';
const [Forbidden, forbiddenStyles] = authInterrupts && forbidden ? await createComponentStylesAndScripts({
ctx,
filePath: forbidden[1],
getComponent: forbidden[0],
injectedCSS: injectedCSSWithCurrentLayout,
injectedJS: injectedJSWithCurrentLayout
}) : [];
const [Unauthorized, unauthorizedStyles] = authInterrupts && unauthorized ? await createComponentStylesAndScripts({
ctx,
filePath: unauthorized[1],
getComponent: unauthorized[0],
injectedCSS: injectedCSSWithCurrentLayout,
injectedJS: injectedJSWithCurrentLayout
}) : [];
let dynamic = layoutOrPageMod == null ? void 0 : layoutOrPageMod.dynamic;
if (nextConfigOutput === 'export') {
if (!dynamic || dynamic === 'auto') {
dynamic = 'error';
} else if (dynamic === 'force-dynamic') {
// force-dynamic is always incompatible with 'export'. We must interrupt the build
throw Object.defineProperty(new StaticGenBailoutError(`Page with \`dynamic = "force-dynamic"\` couldn't be exported. \`output: "export"\` requires all pages be renderable statically because there is no runtime server to dynamically render routes in this output format. Learn more: https://nextjs.org/docs/app/building-your-application/deploying/static-exports`), "__NEXT_ERROR_CODE", {
value: "E527",
enumerable: false,
configurable: true
});
}
}
if (typeof dynamic === 'string') {
// the nested most config wins so we only force-static
// if it's configured above any parent that configured
// otherwise
if (dynamic === 'error') {
workStore.dynamicShouldError = true;
} else if (dynamic === 'force-dynamic') {
workStore.forceDynamic = true;
// TODO: (PPR) remove this bailout once PPR is the default
if (workStore.isStaticGeneration && !experimental.isRoutePPREnabled) {
// If the postpone API isn't available, we can't postpone the render and
// therefore we can't use the dynamic API.
const err = Object.defineProperty(new DynamicServerError(`Page with \`dynamic = "force-dynamic"\` won't be rendered statically.`), "__NEXT_ERROR_CODE", {
value: "E585",
enumerable: false,
configurable: true
});
workStore.dynamicUsageDescription = err.message;
workStore.dynamicUsageStack = err.stack;
throw err;
}
} else {
workStore.dynamicShouldError = false;
workStore.forceStatic = dynamic === 'force-static';
}
}
if (typeof (layoutOrPageMod == null ? void 0 : layoutOrPageMod.fetchCache) === 'string') {
workStore.fetchCache = layoutOrPageMod == null ? void 0 : layoutOrPageMod.fetchCache;
}
if (typeof (layoutOrPageMod == null ? void 0 : layoutOrPageMod.revalidate) !== 'undefined') {
validateRevalidate(layoutOrPageMod == null ? void 0 : layoutOrPageMod.revalidate, workStore.route);
}
if (typeof (layoutOrPageMod == null ? void 0 : layoutOrPageMod.revalidate) === 'number') {
const defaultRevalidate = layoutOrPageMod.revalidate;
const workUnitStore = workUnitAsyncStorage.getStore();
if (workUnitStore) {
switch(workUnitStore.type){
case 'prerender':
case 'prerender-runtime':
case 'prerender-legacy':
case 'prerender-ppr':
if (workUnitStore.revalidate > defaultRevalidate) {
workUnitStore.revalidate = defaultRevalidate;
}
break;
case 'request':
break;
// createComponentTree is not called for these stores:
case 'cache':
case 'private-cache':
case 'prerender-client':
case 'unstable-cache':
break;
default:
workUnitStore;
}
}
if (!workStore.forceStatic && workStore.isStaticGeneration && defaultRevalidate === 0 && // If the postpone API isn't available, we can't postpone the render and
// therefore we can't use the dynamic API.
!experimental.isRoutePPREnabled) {
const dynamicUsageDescription = `revalidate: 0 configured ${segment}`;
workStore.dynamicUsageDescription = dynamicUsageDescription;
throw Object.defineProperty(new DynamicServerError(dynamicUsageDescription), "__NEXT_ERROR_CODE", {
value: "E394",
enumerable: false,
configurable: true
});
}
}
const isStaticGeneration = workStore.isStaticGeneration;
// Assume the segment we're rendering contains only partial data if PPR is
// enabled and this is a statically generated response. This is used by the
// client Segment Cache after a prefetch to determine if it can skip the
// second request to fill in the dynamic data.
//
// It's OK for this to be `true` when the data is actually fully static, but
// it's not OK for this to be `false` when the data possibly contains holes.
// Although the value here is overly pessimistic, for prefetches, it will be
// replaced by a more specific value when the data is later processed into
// per-segment responses (see collect-segment-data.tsx)
//
// For dynamic requests, this must always be `false` because dynamic responses
// are never partial.
const isPossiblyPartialResponse = isStaticGeneration && experimental.isRoutePPREnabled === true;
const LayoutOrPage = layoutOrPageMod ? interopDefault(layoutOrPageMod) : undefined;
/**
* The React Component to render.
*/ let MaybeComponent = LayoutOrPage;
if (process.env.NODE_ENV === 'development' || isStaticGeneration) {
const { isValidElementType } = require('next/dist/compiled/react-is');
if (typeof MaybeComponent !== 'undefined' && !isValidElementType(MaybeComponent)) {
errorMissingDefaultExport(pagePath, modType ?? 'page');
}
if (typeof ErrorComponent !== 'undefined' && !isValidElementType(ErrorComponent)) {
errorMissingDefaultExport(pagePath, 'error');
}
if (typeof Loading !== 'undefined' && !isValidElementType(Loading)) {
errorMissingDefaultExport(pagePath, 'loading');
}
if (typeof NotFound !== 'undefined' && !isValidElementType(NotFound)) {
errorMissingDefaultExport(pagePath, 'not-found');
}
if (typeof Forbidden !== 'undefined' && !isValidElementType(Forbidden)) {
errorMissingDefaultExport(pagePath, 'forbidden');
}
if (typeof Unauthorized !== 'undefined' && !isValidElementType(Unauthorized)) {
errorMissingDefaultExport(pagePath, 'unauthorized');
}
}
// Handle dynamic segment params.
const segmentParam = getDynamicParamFromSegment(segment);
// Create object holding the parent params and current params
let currentParams = parentParams;
if (segmentParam && segmentParam.value !== null) {
currentParams = {
...parentParams,
[segmentParam.param]: segmentParam.value
};
}
// Resolve the segment param
const isSegmentViewEnabled = !!ctx.renderOpts.dev;
const dir = (process.env.NEXT_RUNTIME === 'edge' ? process.env.__NEXT_EDGE_PROJECT_DIR : ctx.renderOpts.dir) || '';
const [notFoundElement, notFoundFilePath] = await createBoundaryConventionElement({
ctx,
conventionName: 'not-found',
Component: NotFound,
styles: notFoundStyles,
tree
});
const [forbiddenElement] = await createBoundaryConventionElement({
ctx,
conventionName: 'forbidden',
Component: Forbidden,
styles: forbiddenStyles,
tree
});
const [unauthorizedElement] = await createBoundaryConventionElement({
ctx,
conventionName: 'unauthorized',
Component: Unauthorized,
styles: unauthorizedStyles,
tree
});
// TODO: Combine this `map` traversal with the loop below that turns the array
// into an object.
const parallelRouteMap = await Promise.all(Object.keys(parallelRoutes).map(async (parallelRouteKey)=>{
const isChildrenRouteKey = parallelRouteKey === 'children';
const parallelRoute = parallelRoutes[parallelRouteKey];
const notFoundComponent = isChildrenRouteKey ? notFoundElement : undefined;
const forbiddenComponent = isChildrenRouteKey ? forbiddenElement : undefined;
const unauthorizedComponent = isChildrenRouteKey ? unauthorizedElement : undefined;
// if we're prefetching and that there's a Loading component, we bail out
// otherwise we keep rendering for the prefetch.
// We also want to bail out if there's no Loading component in the tree.
let childCacheNodeSeedData = null;
if (// Before PPR, the way instant navigations work in Next.js is we
// prefetch everything up to the first route segment that defines a
// loading.tsx boundary. (We do the same if there's no loading
// boundary in the entire tree, because we don't want to prefetch too
// much) The rest of the tree is deferred until the actual navigation.
// It does not take into account whether the data is dynamic — even if
// the tree is completely static, it will still defer everything
// inside the loading boundary.
//
// This behavior predates PPR and is only relevant if the
// PPR flag is not enabled.
isPrefetch && (Loading || !hasLoadingComponentInTree(parallelRoute)) && // The approach with PPR is different — loading.tsx behaves like a
// regular Suspense boundary and has no special behavior.
//
// With PPR, we prefetch as deeply as possible, and only defer when
// dynamic data is accessed. If so, we only defer the nearest parent
// Suspense boundary of the dynamic data access, regardless of whether
// the boundary is defined by loading.tsx or a normal <Suspense>
// component in userspace.
//
// NOTE: In practice this usually means we'll end up prefetching more
// than we were before PPR, which may or may not be considered a
// performance regression by some apps. The plan is to address this
// before General Availability of PPR by introducing granular
// per-segment fetching, so we can reuse as much of the tree as
// possible during both prefetches and dynamic navigations. But during
// the beta period, we should be clear about this trade off in our
// communications.
!experimental.isRoutePPREnabled) {
// Don't prefetch this child. This will trigger a lazy fetch by the
// client router.
} else {
// Create the child component
if (process.env.NODE_ENV === 'development' && missingSlots) {
var _parsedTree_conventionPath;
// When we detect the default fallback (which triggers a 404), we collect the missing slots
// to provide more helpful debug information during development mode.
const parsedTree = parseLoaderTree(parallelRoute);
if ((_parsedTree_conventionPath = parsedTree.conventionPath) == null ? void 0 : _parsedTree_conventionPath.endsWith(PARALLEL_ROUTE_DEFAULT_PATH)) {
missingSlots.add(parallelRouteKey);
}
}
const seedData = await createComponentTreeInternal({
loaderTree: parallelRoute,
parentParams: currentParams,
rootLayoutIncluded: rootLayoutIncludedAtThisLevelOrAbove,
injectedCSS: injectedCSSWithCurrentLayout,
injectedJS: injectedJSWithCurrentLayout,
injectedFontPreloadTags: injectedFontPreloadTagsWithCurrentLayout,
ctx,
missingSlots,
preloadCallbacks,
authInterrupts,
// `StreamingMetadataOutlet` is used to conditionally throw. In the case of parallel routes we will have more than one page
// but we only want to throw on the first one.
MetadataOutlet: isChildrenRouteKey ? MetadataOutlet : null
}, false);
childCacheNodeSeedData = seedData;
}
const templateNode = createElement(Template, null, createElement(RenderFromTemplateContext, null));
const templateFilePath = getConventionPathByType(tree, dir, 'template');
const errorFilePath = getConventionPathByType(tree, dir, 'error');
const loadingFilePath = getConventionPathByType(tree, dir, 'loading');
const globalErrorFilePath = isRoot ? getConventionPathByType(tree, dir, 'global-error') : undefined;
const wrappedErrorStyles = isSegmentViewEnabled && errorFilePath ? createElement(SegmentViewNode, {
type: 'error',
pagePath: errorFilePath
}, errorStyles) : errorStyles;
// Add a suffix to avoid conflict with the segment view node representing rendered file.
// existence: not-found.tsx@boundary
// rendered: not-found.tsx
const fileNameSuffix = BOUNDARY_SUFFIX;
const segmentViewBoundaries = isSegmentViewEnabled ? createElement(Fragment, null, notFoundFilePath && createElement(SegmentViewNode, {
type: `${BOUNDARY_PREFIX}not-found`,
pagePath: notFoundFilePath + fileNameSuffix
}), loadingFilePath && createElement(SegmentViewNode, {
type: `${BOUNDARY_PREFIX}loading`,
pagePath: loadingFilePath + fileNameSuffix
}), errorFilePath && createElement(SegmentViewNode, {
type: `${BOUNDARY_PREFIX}error`,
pagePath: errorFilePath + fileNameSuffix
}), globalErrorFilePath && createElement(SegmentViewNode, {
type: `${BOUNDARY_PREFIX}global-error`,
pagePath: isNextjsBuiltinFilePath(globalErrorFilePath) ? `${BUILTIN_PREFIX}global-error.js${fileNameSuffix}` : globalErrorFilePath
})) : null;
return [
parallelRouteKey,
createElement(LayoutRouter, {
parallelRouterKey: parallelRouteKey,
error: ErrorComponent,
errorStyles: wrappedErrorStyles,
errorScripts: errorScripts,
template: isSegmentViewEnabled && templateFilePath ? createElement(SegmentViewNode, {
type: 'template',
pagePath: templateFilePath
}, templateNode) : templateNode,
templateStyles: templateStyles,
templateScripts: templateScripts,
notFound: notFoundComponent,
forbidden: forbiddenComponent,
unauthorized: unauthorizedComponent,
...isSegmentViewEnabled && {
segmentViewBoundaries
}
}),
childCacheNodeSeedData
];
}));
// Convert the parallel route map into an object after all promises have been resolved.
let parallelRouteProps = {};
let parallelRouteCacheNodeSeedData = {};
for (const parallelRoute of parallelRouteMap){
const [parallelRouteKey, parallelRouteProp, flightData] = parallelRoute;
parallelRouteProps[parallelRouteKey] = parallelRouteProp;
parallelRouteCacheNodeSeedData[parallelRouteKey] = flightData;
}
let loadingElement = Loading ? createElement(Loading, {
key: 'l'
}) : null;
const loadingFilePath = getConventionPathByType(tree, dir, 'loading');
if (isSegmentViewEnabled && loadingElement) {
if (loadingFilePath) {
loadingElement = createElement(SegmentViewNode, {
key: cacheNodeKey + '-loading',
type: 'loading',
pagePath: loadingFilePath
}, loadingElement);
}
}
const loadingData = loadingElement ? [
loadingElement,
loadingStyles,
loadingScripts
] : null;
// When the segment does not have a layout or page we still have to add the layout router to ensure the path holds the loading component
if (!MaybeComponent) {
return [
createElement(Fragment, {
key: cacheNodeKey
}, layerAssets, parallelRouteProps.children),
parallelRouteCacheNodeSeedData,
loadingData,
isPossiblyPartialResponse,
hasRuntimePrefetch
];
}
const Component = MaybeComponent;
// If force-dynamic is used and the current render supports postponing, we
// replace it with a node that will postpone the render. This ensures that the
// postpone is invoked during the react render phase and not during the next
// render phase.
// @TODO this does not actually do what it seems like it would or should do. The idea is that
// if we are rendering in a force-dynamic mode and we can postpone we should only make the segments
// that ask for force-dynamic to be dynamic, allowing other segments to still prerender. However
// because this comes after the children traversal and the static generation store is mutated every segment
// along the parent path of a force-dynamic segment will hit this condition effectively making the entire
// render force-dynamic. We should refactor this function so that we can correctly track which segments
// need to be dynamic
if (workStore.isStaticGeneration && workStore.forceDynamic && experimental.isRoutePPREnabled) {
return [
createElement(Fragment, {
key: cacheNodeKey
}, createElement(Postpone, {
reason: 'dynamic = "force-dynamic" was used',
route: workStore.route
}), layerAssets),
parallelRouteCacheNodeSeedData,
loadingData,
true,
hasRuntimePrefetch
];
}
const isClientComponent = isClientReference(layoutOrPageMod);
if (process.env.NODE_ENV === 'development' && 'params' in parallelRouteProps) {
// @TODO consider making this an error and running the check in build as well
console.error(`"params" is a reserved prop in Layouts and Pages and cannot be used as the name of a parallel route in ${segment}`);
}
if (isPage) {
const PageComponent = Component;
// Assign searchParams to props if this is a page
let pageElement;
if (isClientComponent) {
if (cacheComponents) {
// Params are omitted when Cache Components is enabled
pageElement = createElement(ClientPageRoot, {
Component: PageComponent,
serverProvidedParams: null
});
} else if (isStaticGeneration) {
const promiseOfParams = createPrerenderParamsForClientSegment(currentParams);
const promiseOfSearchParams = createPrerenderSearchParamsForClientPage(workStore);
pageElement = createElement(ClientPageRoot, {
Component: PageComponent,
serverProvidedParams: {
searchParams: query,
params: currentParams,
promises: [
promiseOfSearchParams,
promiseOfParams
]
}
});
} else {
pageElement = createElement(ClientPageRoot, {
Component: PageComponent,
serverProvidedParams: {
searchParams: query,
params: currentParams,
promises: null
}
});
}
} else {
// If we are passing params to a server component Page we need to track
// their usage in case the current render mode tracks dynamic API usage.
const params = createServerParamsForServerSegment(currentParams, workStore);
// If we are passing searchParams to a server component Page we need to
// track their usage in case the current render mode tracks dynamic API
// usage.
let searchParams = createServerSearchParamsForServerPage(query, workStore);
if (isUseCacheFunction(PageComponent)) {
const UseCachePageComponent = PageComponent;
pageElement = createElement(UseCachePageComponent, {
params: params,
searchParams: searchParams,
$$isPage: true
});
} else {
pageElement = createElement(PageComponent, {
params: params,
searchParams: searchParams
});
}
}
const isDefaultSegment = segment === DEFAULT_SEGMENT_KEY;
const pageFilePath = getConventionPathByType(tree, dir, 'page') ?? getConventionPathByType(tree, dir, 'defaultPage');
const segmentType = isDefaultSegment ? 'default' : 'page';
const wrappedPageElement = isSegmentViewEnabled && pageFilePath ? createElement(SegmentViewNode, {
key: cacheNodeKey + '-' + segmentType,
type: segmentType,
pagePath: pageFilePath
}, pageElement) : pageElement;
return [
createElement(Fragment, {
key: cacheNodeKey
}, wrappedPageElement, layerAssets, MetadataOutlet ? createElement(MetadataOutlet, null) : null),
parallelRouteCacheNodeSeedData,
loadingData,
isPossiblyPartialResponse,
hasRuntimePrefetch
];
} else {
const SegmentComponent = Component;
const isRootLayoutWithChildrenSlotAndAtLeastOneMoreSlot = rootLayoutAtThisLevel && 'children' in parallelRoutes && Object.keys(parallelRoutes).length > 1;
let segmentNode;
if (isClientComponent) {
let clientSegment;
if (cacheComponents) {
// Params are omitted when Cache Components is enabled
clientSegment = createElement(ClientSegmentRoot, {
Component: SegmentComponent,
slots: parallelRouteProps,
serverProvidedParams: null
});
} else if (isStaticGeneration) {
const promiseOfParams = createPrerenderParamsForClientSegment(currentParams);
clientSegment = createElement(ClientSegmentRoot, {
Component: SegmentComponent,
slots: parallelRouteProps,
serverProvidedParams: {
params: currentParams,
promises: [
promiseOfParams
]
}
});
} else {
clientSegment = createElement(ClientSegmentRoot, {
Component: SegmentComponent,
slots: parallelRouteProps,
serverProvidedParams: {
params: currentParams,
promises: null
}
});
}
if (isRootLayoutWithChildrenSlotAndAtLeastOneMoreSlot) {
let notfoundClientSegment;
let forbiddenClientSegment;
let unauthorizedClientSegment;
// TODO-APP: This is a hack to support unmatched parallel routes, which will throw `notFound()`.
// This ensures that a `HTTPAccessFallbackBoundary` is available for when that happens,
// but it's not ideal, as it needlessly invokes the `NotFound` component and renders the `RootLayout` twice.
// We should instead look into handling the fallback behavior differently in development mode so that it doesn't
// rely on the `NotFound` behavior.
notfoundClientSegment = createErrorBoundaryClientSegmentRoot({
ctx,
ErrorBoundaryComponent: NotFound,
errorElement: notFoundElement,
ClientSegmentRoot,
layerAssets,
SegmentComponent,
currentParams
});
forbiddenClientSegment = createErrorBoundaryClientSegmentRoot({
ctx,
ErrorBoundaryComponent: Forbidden,
errorElement: forbiddenElement,
ClientSegmentRoot,
layerAssets,
SegmentComponent,
currentParams
});
unauthorizedClientSegment = createErrorBoundaryClientSegmentRoot({
ctx,
ErrorBoundaryComponent: Unauthorized,
errorElement: unauthorizedElement,
ClientSegmentRoot,
layerAssets,
SegmentComponent,
currentParams
});
if (notfoundClientSegment || forbiddenClientSegment || unauthorizedClientSegment) {
segmentNode = createElement(HTTPAccessFallbackBoundary, {
key: cacheNodeKey,
notFound: notfoundClientSegment,
forbidden: forbiddenClientSegment,
unauthorized: unauthorizedClientSegment
}, layerAssets, clientSegment);
} else {
segmentNode = createElement(Fragment, {
key: cacheNodeKey
}, layerAssets, clientSegment);
}
} else {
segmentNode = createElement(Fragment, {
key: cacheNodeKey
}, layerAssets, clientSegment);
}
} else {
const params = createServerParamsForServerSegment(currentParams, workStore);
let serverSegment;
if (isUseCacheFunction(SegmentComponent)) {
const UseCacheLayoutComponent = SegmentComponent;
serverSegment = createElement(UseCacheLayoutComponent, {
...parallelRouteProps,
params: params,
$$isLayout: true
}, // Force static children here so that they're validated.
// See https://github.com/facebook/react/pull/34846
parallelRouteProps.children);
} else {
serverSegment = createElement(SegmentComponent, {
...parallelRouteProps,
params: params
}, // Force static children here so that they're validated.
// See https://github.com/facebook/react/pull/34846
parallelRouteProps.children);
}
if (isRootLayoutWithChildrenSlotAndAtLeastOneMoreSlot) {
// TODO-APP: This is a hack to support unmatched parallel routes, which will throw `notFound()`.
// This ensures that a `HTTPAccessFallbackBoundary` is available for when that happens,
// but it's not ideal, as it needlessly invokes the `NotFound` component and renders the `RootLayout` twice.
// We should instead look into handling the fallback behavior differently in development mode so that it doesn't
// rely on the `NotFound` behavior.
segmentNode = createElement(HTTPAccessFallbackBoundary, {
key: cacheNodeKey,
notFound: notFoundElement ? createElement(Fragment, null, layerAssets, createElement(SegmentComponent, {
params: params
}, notFoundStyles, notFoundElement)) : undefined
}, layerAssets, serverSegment);
} else {
segmentNode = createElement(Fragment, {
key: cacheNodeKey
}, layerAssets, serverSegment);
}
}
const layoutFilePath = getConventionPathByType(tree, dir, 'layout');
const wrappedSegmentNode = isSegmentViewEnabled && layoutFilePath ? createElement(SegmentViewNode, {
key: 'layout',
type: 'layout',
pagePath: layoutFilePath
}, segmentNode) : segmentNode;
// For layouts we just render the component
return [
wrappedSegmentNode,
parallelRouteCacheNodeSeedData,
loadingData,
isPossiblyPartialResponse,
hasRuntimePrefetch
];
}
}
function createErrorBoundaryClientSegmentRoot({ ctx, ErrorBoundaryComponent, errorElement, ClientSegmentRoot, layerAssets, SegmentComponent, currentParams }) {
const { componentMod: { createElement, Fragment } } = ctx;
if (ErrorBoundaryComponent) {
const notFoundParallelRouteProps = {
children: errorElement
};
return createElement(Fragment, null, layerAssets, createElement(ClientSegmentRoot, {
Component: SegmentComponent,
slots: notFoundParallelRouteProps,
params: currentParams
}));
}
return null;
}
export function getRootParams(loaderTree, getDynamicParamFromSegment) {
return getRootParamsImpl({}, loaderTree, getDynamicParamFromSegment);
}
function getRootParamsImpl(parentParams, loaderTree, getDynamicParamFromSegment) {
const { segment, modules: { layout }, parallelRoutes } = parseLoaderTree(loaderTree);
const segmentParam = getDynamicParamFromSegment(segment);
let currentParams = parentParams;
if (segmentParam && segmentParam.value !== null) {
currentParams = {
...parentParams,
[segmentParam.param]: segmentParam.value
};
}
const isRootLayout = typeof layout !== 'undefined';
if (isRootLayout) {
return currentParams;
} else if (!parallelRoutes.children) {
// This should really be an error but there are bugs in Turbopack that cause
// the _not-found LoaderTree to not have any layouts. For rootParams sake
// this is somewhat irrelevant when you are not customizing the 404 page.
// If you are customizing 404
// TODO update rootParams to make all params optional if `/app/not-found.tsx` is defined
return currentParams;
} else {
return getRootParamsImpl(currentParams, // We stop looking for root params as soon as we hit the first layout
// and it is not possible to use parallel route children above the root layout
// so every parallelRoutes object that this function can visit will necessarily
// have a single `children` prop and no others.
parallelRoutes.children, getDynamicParamFromSegment);
}
}
async function createBoundaryConventionElement({ ctx, conventionName, Component, styles, tree }) {
const { componentMod: { createElement, Fragment } } = ctx;
const isSegmentViewEnabled = !!ctx.renderOpts.dev;
const dir = (process.env.NEXT_RUNTIME === 'edge' ? process.env.__NEXT_EDGE_PROJECT_DIR : ctx.renderOpts.dir) || '';
const { SegmentViewNode } = ctx.componentMod;
const element = Component ? createElement(Fragment, null, createElement(Component, null), styles) : undefined;
const pagePath = getConventionPathByType(tree, dir, conventionName);
const wrappedElement = isSegmentViewEnabled && element ? createElement(SegmentViewNode, {
key: cacheNodeKey + '-' + conventionName,
type: conventionName,
// TODO: Discovered when moving to `createElement`.
// `SegmentViewNode` doesn't support undefined `pagePath`
pagePath: pagePath
}, element) : element;
return [
wrappedElement,
pagePath
];
}
//# sourceMappingURL=create-component-tree.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,156 @@
import stringHash from 'next/dist/compiled/string-hash';
import { formatServerError } from '../../lib/format-server-error';
import { SpanStatusCode, getTracer } from '../lib/trace/tracer';
import { isAbortError } from '../pipe-readable';
import { isBailoutToCSRError } from '../../shared/lib/lazy-dynamic/bailout-to-csr';
import { isDynamicServerError } from '../../client/components/hooks-server-context';
import { isNextRouterError } from '../../client/components/is-next-router-error';
import { isPrerenderInterruptedError } from './dynamic-rendering';
import { getProperError } from '../../lib/is-error';
import { createDigestWithErrorCode } from '../../lib/error-telemetry-utils';
import { isReactLargeShellError } from './react-large-shell-error';
/**
* Returns a digest for well-known Next.js errors, otherwise `undefined`. If a
* digest is returned this also means that the error does not need to be
* reported.
*/ export function getDigestForWellKnownError(error) {
// If we're bailing out to CSR, we don't need to log the error.
if (isBailoutToCSRError(error)) return error.digest;
// If this is a navigation error, we don't need to log the error.
if (isNextRouterError(error)) return error.digest;
// If this error occurs, we know that we should be stopping the static
// render. This is only thrown in static generation when PPR is not enabled,
// which causes the whole page to be marked as dynamic. We don't need to
// tell the user about this error, as it's not actionable.
if (isDynamicServerError(error)) return error.digest;
// If this is a prerender interrupted error, we don't need to log the error.
if (isPrerenderInterruptedError(error)) return error.digest;
return undefined;
}
export function createReactServerErrorHandler(shouldFormatError, isNextExport, reactServerErrors, onReactServerRenderError, spanToRecordOn) {
return (thrownValue)=>{
var _err_message;
if (typeof thrownValue === 'string') {
// TODO-APP: look at using webcrypto instead. Requires a promise to be awaited.
return stringHash(thrownValue).toString();
}
// If the response was closed, we don't need to log the error.
if (isAbortError(thrownValue)) return;
const digest = getDigestForWellKnownError(thrownValue);
if (digest) {
return digest;
}
if (isReactLargeShellError(thrownValue)) {
// TODO: Aggregate
console.error(thrownValue);
return undefined;
}
let err = getProperError(thrownValue);
let silenceLog = false;
// If the error already has a digest, respect the original digest,
// so it won't get re-generated into another new error.
if (err.digest) {
if (process.env.NODE_ENV === 'production' && reactServerErrors.has(err.digest)) {
// This error is likely an obfuscated error from another react-server
// environment (e.g. 'use cache'). We recover the original error here
// for reporting purposes.
err = reactServerErrors.get(err.digest);
// We don't log it again though, as it was already logged in the
// original environment.
silenceLog = true;
} else {
// Either we're in development (where we want to keep the transported
// error with environmentName), or the error is not in reactServerErrors
// but has a digest from other means. Keep the error as-is.
}
} else {
err.digest = createDigestWithErrorCode(err, // TODO-APP: look at using webcrypto instead. Requires a promise to be awaited.
stringHash(err.message + (err.stack || '')).toString());
}
// @TODO by putting this here and not at the top it is possible that
// we don't error the build in places we actually expect to
if (!reactServerErrors.has(err.digest)) {
reactServerErrors.set(err.digest, err);
}
// Format server errors in development to add more helpful error messages
if (shouldFormatError) {
formatServerError(err);
}
// Don't log the suppressed error during export
if (!(isNextExport && (err == null ? void 0 : (_err_message = err.message) == null ? void 0 : _err_message.includes('The specific message is omitted in production builds to avoid leaking sensitive details.')))) {
// Record exception on the provided span if available, otherwise try active span.
const span = spanToRecordOn ?? getTracer().getActiveScopeSpan();
if (span) {
span.recordException(err);
span.setAttribute('error.type', err.name);
span.setStatus({
code: SpanStatusCode.ERROR,
message: err.message
});
}
onReactServerRenderError(err, silenceLog);
}
return err.digest;
};
}
export function createHTMLErrorHandler(shouldFormatError, isNextExport, reactServerErrors, allCapturedErrors, onHTMLRenderSSRError, spanToRecordOn) {
return (thrownValue, errorInfo)=>{
var _err_message;
if (isReactLargeShellError(thrownValue)) {
// TODO: Aggregate
console.error(thrownValue);
return undefined;
}
let isSSRError = true;
allCapturedErrors.push(thrownValue);
// If the response was closed, we don't need to log the error.
if (isAbortError(thrownValue)) return;
const digest = getDigestForWellKnownError(thrownValue);
if (digest) {
return digest;
}
const err = getProperError(thrownValue);
// If the error already has a digest, respect the original digest,
// so it won't get re-generated into another new error.
if (err.digest) {
if (reactServerErrors.has(err.digest)) {
// This error is likely an obfuscated error from react-server.
// We recover the original error here.
thrownValue = reactServerErrors.get(err.digest);
isSSRError = false;
} else {
// The error is not from react-server but has a digest
// from other means so we don't need to produce a new one
}
} else {
err.digest = createDigestWithErrorCode(err, stringHash(err.message + ((errorInfo == null ? void 0 : errorInfo.componentStack) || err.stack || '')).toString());
}
// Format server errors in development to add more helpful error messages
if (shouldFormatError) {
formatServerError(err);
}
// Don't log the suppressed error during export
if (!(isNextExport && (err == null ? void 0 : (_err_message = err.message) == null ? void 0 : _err_message.includes('The specific message is omitted in production builds to avoid leaking sensitive details.')))) {
// HTML errors contain RSC errors as well, filter them out before reporting
if (isSSRError) {
// Record exception on the provided span if available, otherwise try active span.
const span = spanToRecordOn ?? getTracer().getActiveScopeSpan();
if (span) {
span.recordException(err);
span.setAttribute('error.type', err.name);
span.setStatus({
code: SpanStatusCode.ERROR,
message: err.message
});
}
onHTMLRenderSSRError(err, errorInfo);
}
}
return err.digest;
};
}
export function isUserLandError(err) {
return !isAbortError(err) && !isBailoutToCSRError(err) && !isNextRouterError(err);
}
//# sourceMappingURL=create-error-handler.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,54 @@
import { HasLoadingBoundary } from '../../shared/lib/app-router-types';
import { addSearchParamsIfPageSegment } from '../../shared/lib/segment';
function createFlightRouterStateFromLoaderTreeImpl([segment, parallelRoutes, { layout, loading }], getDynamicParamFromSegment, searchParams, includeHasLoadingBoundary, didFindRootLayout) {
const dynamicParam = getDynamicParamFromSegment(segment);
const treeSegment = dynamicParam ? dynamicParam.treeSegment : segment;
const segmentTree = [
addSearchParamsIfPageSegment(treeSegment, searchParams),
{}
];
// Mark the first segment that has a layout as the "root" layout
if (!didFindRootLayout && typeof layout !== 'undefined') {
didFindRootLayout = true;
segmentTree[4] = true;
}
let childHasLoadingBoundary = false;
const children = {};
Object.keys(parallelRoutes).forEach((parallelRouteKey)=>{
const child = createFlightRouterStateFromLoaderTreeImpl(parallelRoutes[parallelRouteKey], getDynamicParamFromSegment, searchParams, includeHasLoadingBoundary, didFindRootLayout);
if (includeHasLoadingBoundary && child[5] !== HasLoadingBoundary.SubtreeHasNoLoadingBoundary) {
childHasLoadingBoundary = true;
}
children[parallelRouteKey] = child;
});
segmentTree[1] = children;
if (includeHasLoadingBoundary) {
// During a route tree prefetch, the FlightRouterState includes whether a
// tree has a loading boundary. The client uses this to determine if it can
// skip the data prefetch request — if `hasLoadingBoundary` is `false`, the
// data prefetch response will be empty, so there's no reason to request it.
// NOTE: It would be better to accumulate this while building the loader
// tree so we don't have to keep re-deriving it, but since this won't be
// once PPR is enabled everywhere, it's not that important.
segmentTree[5] = loading ? HasLoadingBoundary.SegmentHasLoadingBoundary : childHasLoadingBoundary ? HasLoadingBoundary.SubtreeHasLoadingBoundary : HasLoadingBoundary.SubtreeHasNoLoadingBoundary;
}
return segmentTree;
}
export function createFlightRouterStateFromLoaderTree(loaderTree, getDynamicParamFromSegment, searchParams) {
const includeHasLoadingBoundary = false;
const didFindRootLayout = false;
return createFlightRouterStateFromLoaderTreeImpl(loaderTree, getDynamicParamFromSegment, searchParams, includeHasLoadingBoundary, didFindRootLayout);
}
export function createRouteTreePrefetch(loaderTree, getDynamicParamFromSegment) {
// Search params should not be added to page segment's cache key during a
// route tree prefetch request, because they do not affect the structure of
// the route. The client cache has its own logic to handle search params.
const searchParams = {};
// During a route tree prefetch, we include `hasLoadingBoundary` in
// the response.
const includeHasLoadingBoundary = true;
const didFindRootLayout = false;
return createFlightRouterStateFromLoaderTreeImpl(loaderTree, getDynamicParamFromSegment, searchParams, includeHasLoadingBoundary, didFindRootLayout);
}
//# sourceMappingURL=create-flight-router-state-from-loader-tree.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,65 @@
// micromatch is only available at node runtime, so it cannot be used here since the code path that calls this function
// can be run from edge. This is a simple implementation that safely achieves the required functionality.
// the goal is to match the functionality for remotePatterns as defined here -
// https://nextjs.org/docs/app/api-reference/components/image#remotepatterns
// TODO - retrofit micromatch to work in edge and use that instead
function matchWildcardDomain(domain, pattern) {
const domainParts = domain.split('.');
const patternParts = pattern.split('.');
if (patternParts.length < 1) {
// pattern is empty and therefore invalid to match against
return false;
}
if (domainParts.length < patternParts.length) {
// domain has too few segments and thus cannot match
return false;
}
// Prevent wildcards from matching entire domains (e.g. '**' or '*.com')
// This ensures wildcards can only match subdomains, not the main domain
if (patternParts.length === 1 && (patternParts[0] === '*' || patternParts[0] === '**')) {
return false;
}
while(patternParts.length){
const patternPart = patternParts.pop();
const domainPart = domainParts.pop();
switch(patternPart){
case '':
{
// invalid pattern. pattern segments must be non empty
return false;
}
case '*':
{
// wildcard matches anything so we continue if the domain part is non-empty
if (domainPart) {
continue;
} else {
return false;
}
}
case '**':
{
// if this is not the last item in the pattern the pattern is invalid
if (patternParts.length > 0) {
return false;
}
// recursive wildcard matches anything so we terminate here if the domain part is non empty
return domainPart !== undefined;
}
case undefined:
default:
{
if (domainPart !== patternPart) {
return false;
}
}
}
}
// We exhausted the pattern. If we also exhausted the domain we have a match
return domainParts.length === 0;
}
export const isCsrfOriginAllowed = (originDomain, allowedOrigins = [])=>{
return allowedOrigins.some((allowedOrigin)=>allowedOrigin && (allowedOrigin === originDomain || matchWildcardDomain(originDomain, allowedOrigin)));
};
//# sourceMappingURL=csrf-protection.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/csrf-protection.ts"],"sourcesContent":["// micromatch is only available at node runtime, so it cannot be used here since the code path that calls this function\n// can be run from edge. This is a simple implementation that safely achieves the required functionality.\n// the goal is to match the functionality for remotePatterns as defined here -\n// https://nextjs.org/docs/app/api-reference/components/image#remotepatterns\n// TODO - retrofit micromatch to work in edge and use that instead\nfunction matchWildcardDomain(domain: string, pattern: string) {\n const domainParts = domain.split('.')\n const patternParts = pattern.split('.')\n\n if (patternParts.length < 1) {\n // pattern is empty and therefore invalid to match against\n return false\n }\n\n if (domainParts.length < patternParts.length) {\n // domain has too few segments and thus cannot match\n return false\n }\n\n // Prevent wildcards from matching entire domains (e.g. '**' or '*.com')\n // This ensures wildcards can only match subdomains, not the main domain\n if (\n patternParts.length === 1 &&\n (patternParts[0] === '*' || patternParts[0] === '**')\n ) {\n return false\n }\n\n while (patternParts.length) {\n const patternPart = patternParts.pop()\n const domainPart = domainParts.pop()\n\n switch (patternPart) {\n case '': {\n // invalid pattern. pattern segments must be non empty\n return false\n }\n case '*': {\n // wildcard matches anything so we continue if the domain part is non-empty\n if (domainPart) {\n continue\n } else {\n return false\n }\n }\n case '**': {\n // if this is not the last item in the pattern the pattern is invalid\n if (patternParts.length > 0) {\n return false\n }\n // recursive wildcard matches anything so we terminate here if the domain part is non empty\n return domainPart !== undefined\n }\n case undefined:\n default: {\n if (domainPart !== patternPart) {\n return false\n }\n }\n }\n }\n\n // We exhausted the pattern. If we also exhausted the domain we have a match\n return domainParts.length === 0\n}\n\nexport const isCsrfOriginAllowed = (\n originDomain: string,\n allowedOrigins: string[] = []\n): boolean => {\n return allowedOrigins.some(\n (allowedOrigin) =>\n allowedOrigin &&\n (allowedOrigin === originDomain ||\n matchWildcardDomain(originDomain, allowedOrigin))\n )\n}\n"],"names":["matchWildcardDomain","domain","pattern","domainParts","split","patternParts","length","patternPart","pop","domainPart","undefined","isCsrfOriginAllowed","originDomain","allowedOrigins","some","allowedOrigin"],"mappings":"AAAA,uHAAuH;AACvH,yGAAyG;AACzG,8EAA8E;AAC9E,4EAA4E;AAC5E,kEAAkE;AAClE,SAASA,oBAAoBC,MAAc,EAAEC,OAAe;IAC1D,MAAMC,cAAcF,OAAOG,KAAK,CAAC;IACjC,MAAMC,eAAeH,QAAQE,KAAK,CAAC;IAEnC,IAAIC,aAAaC,MAAM,GAAG,GAAG;QAC3B,0DAA0D;QAC1D,OAAO;IACT;IAEA,IAAIH,YAAYG,MAAM,GAAGD,aAAaC,MAAM,EAAE;QAC5C,oDAAoD;QACpD,OAAO;IACT;IAEA,wEAAwE;IACxE,wEAAwE;IACxE,IACED,aAAaC,MAAM,KAAK,KACvBD,CAAAA,YAAY,CAAC,EAAE,KAAK,OAAOA,YAAY,CAAC,EAAE,KAAK,IAAG,GACnD;QACA,OAAO;IACT;IAEA,MAAOA,aAAaC,MAAM,CAAE;QAC1B,MAAMC,cAAcF,aAAaG,GAAG;QACpC,MAAMC,aAAaN,YAAYK,GAAG;QAElC,OAAQD;YACN,KAAK;gBAAI;oBACP,sDAAsD;oBACtD,OAAO;gBACT;YACA,KAAK;gBAAK;oBACR,2EAA2E;oBAC3E,IAAIE,YAAY;wBACd;oBACF,OAAO;wBACL,OAAO;oBACT;gBACF;YACA,KAAK;gBAAM;oBACT,qEAAqE;oBACrE,IAAIJ,aAAaC,MAAM,GAAG,GAAG;wBAC3B,OAAO;oBACT;oBACA,2FAA2F;oBAC3F,OAAOG,eAAeC;gBACxB;YACA,KAAKA;YACL;gBAAS;oBACP,IAAID,eAAeF,aAAa;wBAC9B,OAAO;oBACT;gBACF;QACF;IACF;IAEA,4EAA4E;IAC5E,OAAOJ,YAAYG,MAAM,KAAK;AAChC;AAEA,OAAO,MAAMK,sBAAsB,CACjCC,cACAC,iBAA2B,EAAE;IAE7B,OAAOA,eAAeC,IAAI,CACxB,CAACC,gBACCA,iBACCA,CAAAA,kBAAkBH,gBACjBZ,oBAAoBY,cAAcG,cAAa;AAEvD,EAAC","ignoreList":[0]}

View File

@@ -0,0 +1,4 @@
import { createAsyncLocalStorage } from './async-local-storage';
export const dynamicAccessAsyncStorageInstance = createAsyncLocalStorage();
//# sourceMappingURL=dynamic-access-async-storage-instance.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/dynamic-access-async-storage-instance.ts"],"sourcesContent":["import { createAsyncLocalStorage } from './async-local-storage'\nimport type { DynamicAccessStorage } from './dynamic-access-async-storage.external'\n\nexport const dynamicAccessAsyncStorageInstance: DynamicAccessStorage =\n createAsyncLocalStorage()\n"],"names":["createAsyncLocalStorage","dynamicAccessAsyncStorageInstance"],"mappings":"AAAA,SAASA,uBAAuB,QAAQ,wBAAuB;AAG/D,OAAO,MAAMC,oCACXD,0BAAyB","ignoreList":[0]}

View File

@@ -0,0 +1,7 @@
// Share the instance module in the next-shared layer
import { dynamicAccessAsyncStorageInstance } from './dynamic-access-async-storage-instance' with {
'turbopack-transition': 'next-shared'
};
export { dynamicAccessAsyncStorageInstance as dynamicAccessAsyncStorage };
//# sourceMappingURL=dynamic-access-async-storage.external.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/dynamic-access-async-storage.external.ts"],"sourcesContent":["import type { AsyncLocalStorage } from 'async_hooks'\n\n// Share the instance module in the next-shared layer\nimport { dynamicAccessAsyncStorageInstance } from './dynamic-access-async-storage-instance' with { 'turbopack-transition': 'next-shared' }\n\nexport interface DynamicAccessAsyncStore {\n readonly abortController: AbortController\n}\n\nexport type DynamicAccessStorage = AsyncLocalStorage<DynamicAccessAsyncStore>\nexport { dynamicAccessAsyncStorageInstance as dynamicAccessAsyncStorage }\n"],"names":["dynamicAccessAsyncStorageInstance","dynamicAccessAsyncStorage"],"mappings":"AAEA,qDAAqD;AACrD,SAASA,iCAAiC,QAAQ,+CAA+C;IAAE,wBAAwB;AAAc,EAAC;AAO1I,SAASA,qCAAqCC,yBAAyB,GAAE","ignoreList":[0]}

View File

@@ -0,0 +1,731 @@
/**
* The functions provided by this module are used to communicate certain properties
* about the currently running code so that Next.js can make decisions on how to handle
* the current execution in different rendering modes such as pre-rendering, resuming, and SSR.
*
* Today Next.js treats all code as potentially static. Certain APIs may only make sense when dynamically rendering.
* Traditionally this meant deopting the entire render to dynamic however with PPR we can now deopt parts
* of a React tree as dynamic while still keeping other parts static. There are really two different kinds of
* Dynamic indications.
*
* The first is simply an intention to be dynamic. unstable_noStore is an example of this where
* the currently executing code simply declares that the current scope is dynamic but if you use it
* inside unstable_cache it can still be cached. This type of indication can be removed if we ever
* make the default dynamic to begin with because the only way you would ever be static is inside
* a cache scope which this indication does not affect.
*
* The second is an indication that a dynamic data source was read. This is a stronger form of dynamic
* because it means that it is inappropriate to cache this at all. using a dynamic data source inside
* unstable_cache should error. If you want to use some dynamic data inside unstable_cache you should
* read that data outside the cache and pass it in as an argument to the cached function.
*/ // Once postpone is in stable we should switch to importing the postpone export directly
import React from 'react';
import { DynamicServerError } from '../../client/components/hooks-server-context';
import { StaticGenBailoutError } from '../../client/components/static-generation-bailout';
import { getRuntimeStagePromise, throwForMissingRequestStore, workUnitAsyncStorage } from './work-unit-async-storage.external';
import { workAsyncStorage } from '../app-render/work-async-storage.external';
import { makeHangingPromise } from '../dynamic-rendering-utils';
import { METADATA_BOUNDARY_NAME, VIEWPORT_BOUNDARY_NAME, OUTLET_BOUNDARY_NAME, ROOT_LAYOUT_BOUNDARY_NAME } from '../../lib/framework/boundary-constants';
import { scheduleOnNextTick } from '../../lib/scheduler';
import { BailoutToCSRError } from '../../shared/lib/lazy-dynamic/bailout-to-csr';
import { InvariantError } from '../../shared/lib/invariant-error';
const hasPostpone = typeof React.unstable_postpone === 'function';
export function createDynamicTrackingState(isDebugDynamicAccesses) {
return {
isDebugDynamicAccesses,
dynamicAccesses: [],
syncDynamicErrorWithStack: null
};
}
export function createDynamicValidationState() {
return {
hasSuspenseAboveBody: false,
hasDynamicMetadata: false,
dynamicMetadata: null,
hasDynamicViewport: false,
hasAllowedDynamic: false,
dynamicErrors: []
};
}
export function getFirstDynamicReason(trackingState) {
var _trackingState_dynamicAccesses_;
return (_trackingState_dynamicAccesses_ = trackingState.dynamicAccesses[0]) == null ? void 0 : _trackingState_dynamicAccesses_.expression;
}
/**
* This function communicates that the current scope should be treated as dynamic.
*
* In most cases this function is a no-op but if called during
* a PPR prerender it will postpone the current sub-tree and calling
* it during a normal prerender will cause the entire prerender to abort
*/ export function markCurrentScopeAsDynamic(store, workUnitStore, expression) {
if (workUnitStore) {
switch(workUnitStore.type){
case 'cache':
case 'unstable-cache':
// Inside cache scopes, marking a scope as dynamic has no effect,
// because the outer cache scope creates a cache boundary. This is
// subtly different from reading a dynamic data source, which is
// forbidden inside a cache scope.
return;
case 'private-cache':
// A private cache scope is already dynamic by definition.
return;
case 'prerender-legacy':
case 'prerender-ppr':
case 'request':
break;
default:
workUnitStore;
}
}
// If we're forcing dynamic rendering or we're forcing static rendering, we
// don't need to do anything here because the entire page is already dynamic
// or it's static and it should not throw or postpone here.
if (store.forceDynamic || store.forceStatic) return;
if (store.dynamicShouldError) {
throw Object.defineProperty(new StaticGenBailoutError(`Route ${store.route} with \`dynamic = "error"\` couldn't be rendered statically because it used \`${expression}\`. See more info here: https://nextjs.org/docs/app/building-your-application/rendering/static-and-dynamic#dynamic-rendering`), "__NEXT_ERROR_CODE", {
value: "E553",
enumerable: false,
configurable: true
});
}
if (workUnitStore) {
switch(workUnitStore.type){
case 'prerender-ppr':
return postponeWithTracking(store.route, expression, workUnitStore.dynamicTracking);
case 'prerender-legacy':
workUnitStore.revalidate = 0;
// We aren't prerendering, but we are generating a static page. We need
// to bail out of static generation.
const err = Object.defineProperty(new DynamicServerError(`Route ${store.route} couldn't be rendered statically because it used ${expression}. See more info here: https://nextjs.org/docs/messages/dynamic-server-error`), "__NEXT_ERROR_CODE", {
value: "E550",
enumerable: false,
configurable: true
});
store.dynamicUsageDescription = expression;
store.dynamicUsageStack = err.stack;
throw err;
case 'request':
if (process.env.NODE_ENV !== 'production') {
workUnitStore.usedDynamic = true;
}
break;
default:
workUnitStore;
}
}
}
/**
* This function is meant to be used when prerendering without cacheComponents or PPR.
* When called during a build it will cause Next.js to consider the route as dynamic.
*
* @internal
*/ export function throwToInterruptStaticGeneration(expression, store, prerenderStore) {
// We aren't prerendering but we are generating a static page. We need to bail out of static generation
const err = Object.defineProperty(new DynamicServerError(`Route ${store.route} couldn't be rendered statically because it used \`${expression}\`. See more info here: https://nextjs.org/docs/messages/dynamic-server-error`), "__NEXT_ERROR_CODE", {
value: "E558",
enumerable: false,
configurable: true
});
prerenderStore.revalidate = 0;
store.dynamicUsageDescription = expression;
store.dynamicUsageStack = err.stack;
throw err;
}
/**
* This function should be used to track whether something dynamic happened even when
* we are in a dynamic render. This is useful for Dev where all renders are dynamic but
* we still track whether dynamic APIs were accessed for helpful messaging
*
* @internal
*/ export function trackDynamicDataInDynamicRender(workUnitStore) {
switch(workUnitStore.type){
case 'cache':
case 'unstable-cache':
// Inside cache scopes, marking a scope as dynamic has no effect,
// because the outer cache scope creates a cache boundary. This is
// subtly different from reading a dynamic data source, which is
// forbidden inside a cache scope.
return;
case 'private-cache':
// A private cache scope is already dynamic by definition.
return;
case 'prerender':
case 'prerender-runtime':
case 'prerender-legacy':
case 'prerender-ppr':
case 'prerender-client':
break;
case 'request':
if (process.env.NODE_ENV !== 'production') {
workUnitStore.usedDynamic = true;
}
break;
default:
workUnitStore;
}
}
function abortOnSynchronousDynamicDataAccess(route, expression, prerenderStore) {
const reason = `Route ${route} needs to bail out of prerendering at this point because it used ${expression}.`;
const error = createPrerenderInterruptedError(reason);
prerenderStore.controller.abort(error);
const dynamicTracking = prerenderStore.dynamicTracking;
if (dynamicTracking) {
dynamicTracking.dynamicAccesses.push({
// When we aren't debugging, we don't need to create another error for the
// stack trace.
stack: dynamicTracking.isDebugDynamicAccesses ? new Error().stack : undefined,
expression
});
}
}
export function abortOnSynchronousPlatformIOAccess(route, expression, errorWithStack, prerenderStore) {
const dynamicTracking = prerenderStore.dynamicTracking;
abortOnSynchronousDynamicDataAccess(route, expression, prerenderStore);
// It is important that we set this tracking value after aborting. Aborts are executed
// synchronously except for the case where you abort during render itself. By setting this
// value late we can use it to determine if any of the aborted tasks are the task that
// called the sync IO expression in the first place.
if (dynamicTracking) {
if (dynamicTracking.syncDynamicErrorWithStack === null) {
dynamicTracking.syncDynamicErrorWithStack = errorWithStack;
}
}
}
/**
* use this function when prerendering with cacheComponents. If we are doing a
* prospective prerender we don't actually abort because we want to discover
* all caches for the shell. If this is the actual prerender we do abort.
*
* This function accepts a prerenderStore but the caller should ensure we're
* actually running in cacheComponents mode.
*
* @internal
*/ export function abortAndThrowOnSynchronousRequestDataAccess(route, expression, errorWithStack, prerenderStore) {
const prerenderSignal = prerenderStore.controller.signal;
if (prerenderSignal.aborted === false) {
// TODO it would be better to move this aborted check into the callsite so we can avoid making
// the error object when it isn't relevant to the aborting of the prerender however
// since we need the throw semantics regardless of whether we abort it is easier to land
// this way. See how this was handled with `abortOnSynchronousPlatformIOAccess` for a closer
// to ideal implementation
abortOnSynchronousDynamicDataAccess(route, expression, prerenderStore);
// It is important that we set this tracking value after aborting. Aborts are executed
// synchronously except for the case where you abort during render itself. By setting this
// value late we can use it to determine if any of the aborted tasks are the task that
// called the sync IO expression in the first place.
const dynamicTracking = prerenderStore.dynamicTracking;
if (dynamicTracking) {
if (dynamicTracking.syncDynamicErrorWithStack === null) {
dynamicTracking.syncDynamicErrorWithStack = errorWithStack;
}
}
}
throw createPrerenderInterruptedError(`Route ${route} needs to bail out of prerendering at this point because it used ${expression}.`);
}
export function Postpone({ reason, route }) {
const prerenderStore = workUnitAsyncStorage.getStore();
const dynamicTracking = prerenderStore && prerenderStore.type === 'prerender-ppr' ? prerenderStore.dynamicTracking : null;
postponeWithTracking(route, reason, dynamicTracking);
}
export function postponeWithTracking(route, expression, dynamicTracking) {
assertPostpone();
if (dynamicTracking) {
dynamicTracking.dynamicAccesses.push({
// When we aren't debugging, we don't need to create another error for the
// stack trace.
stack: dynamicTracking.isDebugDynamicAccesses ? new Error().stack : undefined,
expression
});
}
React.unstable_postpone(createPostponeReason(route, expression));
}
function createPostponeReason(route, expression) {
return `Route ${route} needs to bail out of prerendering at this point because it used ${expression}. ` + `React throws this special object to indicate where. It should not be caught by ` + `your own try/catch. Learn more: https://nextjs.org/docs/messages/ppr-caught-error`;
}
export function isDynamicPostpone(err) {
if (typeof err === 'object' && err !== null && typeof err.message === 'string') {
return isDynamicPostponeReason(err.message);
}
return false;
}
function isDynamicPostponeReason(reason) {
return reason.includes('needs to bail out of prerendering at this point because it used') && reason.includes('Learn more: https://nextjs.org/docs/messages/ppr-caught-error');
}
if (isDynamicPostponeReason(createPostponeReason('%%%', '^^^')) === false) {
throw Object.defineProperty(new Error('Invariant: isDynamicPostpone misidentified a postpone reason. This is a bug in Next.js'), "__NEXT_ERROR_CODE", {
value: "E296",
enumerable: false,
configurable: true
});
}
const NEXT_PRERENDER_INTERRUPTED = 'NEXT_PRERENDER_INTERRUPTED';
function createPrerenderInterruptedError(message) {
const error = Object.defineProperty(new Error(message), "__NEXT_ERROR_CODE", {
value: "E394",
enumerable: false,
configurable: true
});
error.digest = NEXT_PRERENDER_INTERRUPTED;
return error;
}
export function isPrerenderInterruptedError(error) {
return typeof error === 'object' && error !== null && error.digest === NEXT_PRERENDER_INTERRUPTED && 'name' in error && 'message' in error && error instanceof Error;
}
export function accessedDynamicData(dynamicAccesses) {
return dynamicAccesses.length > 0;
}
export function consumeDynamicAccess(serverDynamic, clientDynamic) {
// We mutate because we only call this once we are no longer writing
// to the dynamicTrackingState and it's more efficient than creating a new
// array.
serverDynamic.dynamicAccesses.push(...clientDynamic.dynamicAccesses);
return serverDynamic.dynamicAccesses;
}
export function formatDynamicAPIAccesses(dynamicAccesses) {
return dynamicAccesses.filter((access)=>typeof access.stack === 'string' && access.stack.length > 0).map(({ expression, stack })=>{
stack = stack.split('\n')// Remove the "Error: " prefix from the first line of the stack trace as
// well as the first 4 lines of the stack trace which is the distance
// from the user code and the `new Error().stack` call.
.slice(4).filter((line)=>{
// Exclude Next.js internals from the stack trace.
if (line.includes('node_modules/next/')) {
return false;
}
// Exclude anonymous functions from the stack trace.
if (line.includes(' (<anonymous>)')) {
return false;
}
// Exclude Node.js internals from the stack trace.
if (line.includes(' (node:')) {
return false;
}
return true;
}).join('\n');
return `Dynamic API Usage Debug - ${expression}:\n${stack}`;
});
}
function assertPostpone() {
if (!hasPostpone) {
throw Object.defineProperty(new Error(`Invariant: React.unstable_postpone is not defined. This suggests the wrong version of React was loaded. This is a bug in Next.js`), "__NEXT_ERROR_CODE", {
value: "E224",
enumerable: false,
configurable: true
});
}
}
/**
* This is a bit of a hack to allow us to abort a render using a Postpone instance instead of an Error which changes React's
* abort semantics slightly.
*/ export function createRenderInBrowserAbortSignal() {
const controller = new AbortController();
controller.abort(Object.defineProperty(new BailoutToCSRError('Render in Browser'), "__NEXT_ERROR_CODE", {
value: "E721",
enumerable: false,
configurable: true
}));
return controller.signal;
}
/**
* In a prerender, we may end up with hanging Promises as inputs due them
* stalling on connection() or because they're loading dynamic data. In that
* case we need to abort the encoding of arguments since they'll never complete.
*/ export function createHangingInputAbortSignal(workUnitStore) {
switch(workUnitStore.type){
case 'prerender':
case 'prerender-runtime':
const controller = new AbortController();
if (workUnitStore.cacheSignal) {
// If we have a cacheSignal it means we're in a prospective render. If
// the input we're waiting on is coming from another cache, we do want
// to wait for it so that we can resolve this cache entry too.
workUnitStore.cacheSignal.inputReady().then(()=>{
controller.abort();
});
} else {
// Otherwise we're in the final render and we should already have all
// our caches filled.
// If the prerender uses stages, we have wait until the runtime stage,
// at which point all runtime inputs will be resolved.
// (otherwise, a runtime prerender might consider `cookies()` hanging
// even though they'd resolve in the next task.)
//
// We might still be waiting on some microtasks so we
// wait one tick before giving up. When we give up, we still want to
// render the content of this cache as deeply as we can so that we can
// suspend as deeply as possible in the tree or not at all if we don't
// end up waiting for the input.
const runtimeStagePromise = getRuntimeStagePromise(workUnitStore);
if (runtimeStagePromise) {
runtimeStagePromise.then(()=>scheduleOnNextTick(()=>controller.abort()));
} else {
scheduleOnNextTick(()=>controller.abort());
}
}
return controller.signal;
case 'prerender-client':
case 'prerender-ppr':
case 'prerender-legacy':
case 'request':
case 'cache':
case 'private-cache':
case 'unstable-cache':
return undefined;
default:
workUnitStore;
}
}
export function annotateDynamicAccess(expression, prerenderStore) {
const dynamicTracking = prerenderStore.dynamicTracking;
if (dynamicTracking) {
dynamicTracking.dynamicAccesses.push({
stack: dynamicTracking.isDebugDynamicAccesses ? new Error().stack : undefined,
expression
});
}
}
export function useDynamicRouteParams(expression) {
const workStore = workAsyncStorage.getStore();
const workUnitStore = workUnitAsyncStorage.getStore();
if (workStore && workUnitStore) {
switch(workUnitStore.type){
case 'prerender-client':
case 'prerender':
{
const fallbackParams = workUnitStore.fallbackRouteParams;
if (fallbackParams && fallbackParams.size > 0) {
// We are in a prerender with cacheComponents semantics. We are going to
// hang here and never resolve. This will cause the currently
// rendering component to effectively be a dynamic hole.
React.use(makeHangingPromise(workUnitStore.renderSignal, workStore.route, expression));
}
break;
}
case 'prerender-ppr':
{
const fallbackParams = workUnitStore.fallbackRouteParams;
if (fallbackParams && fallbackParams.size > 0) {
return postponeWithTracking(workStore.route, expression, workUnitStore.dynamicTracking);
}
break;
}
case 'prerender-runtime':
throw Object.defineProperty(new InvariantError(`\`${expression}\` was called during a runtime prerender. Next.js should be preventing ${expression} from being included in server components statically, but did not in this case.`), "__NEXT_ERROR_CODE", {
value: "E771",
enumerable: false,
configurable: true
});
case 'cache':
case 'private-cache':
throw Object.defineProperty(new InvariantError(`\`${expression}\` was called inside a cache scope. Next.js should be preventing ${expression} from being included in server components statically, but did not in this case.`), "__NEXT_ERROR_CODE", {
value: "E745",
enumerable: false,
configurable: true
});
case 'prerender-legacy':
case 'request':
case 'unstable-cache':
break;
default:
workUnitStore;
}
}
}
export function useDynamicSearchParams(expression) {
const workStore = workAsyncStorage.getStore();
const workUnitStore = workUnitAsyncStorage.getStore();
if (!workStore) {
// We assume pages router context and just return
return;
}
if (!workUnitStore) {
throwForMissingRequestStore(expression);
}
switch(workUnitStore.type){
case 'prerender-client':
{
React.use(makeHangingPromise(workUnitStore.renderSignal, workStore.route, expression));
break;
}
case 'prerender-legacy':
case 'prerender-ppr':
{
if (workStore.forceStatic) {
return;
}
throw Object.defineProperty(new BailoutToCSRError(expression), "__NEXT_ERROR_CODE", {
value: "E394",
enumerable: false,
configurable: true
});
}
case 'prerender':
case 'prerender-runtime':
throw Object.defineProperty(new InvariantError(`\`${expression}\` was called from a Server Component. Next.js should be preventing ${expression} from being included in server components statically, but did not in this case.`), "__NEXT_ERROR_CODE", {
value: "E795",
enumerable: false,
configurable: true
});
case 'cache':
case 'unstable-cache':
case 'private-cache':
throw Object.defineProperty(new InvariantError(`\`${expression}\` was called inside a cache scope. Next.js should be preventing ${expression} from being included in server components statically, but did not in this case.`), "__NEXT_ERROR_CODE", {
value: "E745",
enumerable: false,
configurable: true
});
case 'request':
return;
default:
workUnitStore;
}
}
const hasSuspenseRegex = /\n\s+at Suspense \(<anonymous>\)/;
// Common implicit body tags that React will treat as body when placed directly in html
const bodyAndImplicitTags = 'body|div|main|section|article|aside|header|footer|nav|form|p|span|h1|h2|h3|h4|h5|h6';
// Detects when RootLayoutBoundary (our framework marker component) appears
// after Suspense in the component stack, indicating the root layout is wrapped
// within a Suspense boundary. Ensures no body/html/implicit-body components are in between.
//
// Example matches:
// at Suspense (<anonymous>)
// at __next_root_layout_boundary__ (<anonymous>)
//
// Or with other components in between (but not body/html/implicit-body):
// at Suspense (<anonymous>)
// at SomeComponent (<anonymous>)
// at __next_root_layout_boundary__ (<anonymous>)
const hasSuspenseBeforeRootLayoutWithoutBodyOrImplicitBodyRegex = new RegExp(`\\n\\s+at Suspense \\(<anonymous>\\)(?:(?!\\n\\s+at (?:${bodyAndImplicitTags}) \\(<anonymous>\\))[\\s\\S])*?\\n\\s+at ${ROOT_LAYOUT_BOUNDARY_NAME} \\([^\\n]*\\)`);
const hasMetadataRegex = new RegExp(`\\n\\s+at ${METADATA_BOUNDARY_NAME}[\\n\\s]`);
const hasViewportRegex = new RegExp(`\\n\\s+at ${VIEWPORT_BOUNDARY_NAME}[\\n\\s]`);
const hasOutletRegex = new RegExp(`\\n\\s+at ${OUTLET_BOUNDARY_NAME}[\\n\\s]`);
export function trackAllowedDynamicAccess(workStore, componentStack, dynamicValidation, clientDynamic) {
if (hasOutletRegex.test(componentStack)) {
// We don't need to track that this is dynamic. It is only so when something else is also dynamic.
return;
} else if (hasMetadataRegex.test(componentStack)) {
dynamicValidation.hasDynamicMetadata = true;
return;
} else if (hasViewportRegex.test(componentStack)) {
dynamicValidation.hasDynamicViewport = true;
return;
} else if (hasSuspenseBeforeRootLayoutWithoutBodyOrImplicitBodyRegex.test(componentStack)) {
// For Suspense within body, the prelude wouldn't be empty so it wouldn't violate the empty static shells rule.
// But if you have Suspense above body, the prelude is empty but we allow that because having Suspense
// is an explicit signal from the user that they acknowledge the empty shell and want dynamic rendering.
dynamicValidation.hasAllowedDynamic = true;
dynamicValidation.hasSuspenseAboveBody = true;
return;
} else if (hasSuspenseRegex.test(componentStack)) {
// this error had a Suspense boundary above it so we don't need to report it as a source
// of disallowed
dynamicValidation.hasAllowedDynamic = true;
return;
} else if (clientDynamic.syncDynamicErrorWithStack) {
// This task was the task that called the sync error.
dynamicValidation.dynamicErrors.push(clientDynamic.syncDynamicErrorWithStack);
return;
} else {
const message = `Route "${workStore.route}": Uncached data was accessed outside of ` + '<Suspense>. This delays the entire page from rendering, resulting in a ' + 'slow user experience. Learn more: ' + 'https://nextjs.org/docs/messages/blocking-route';
const error = createErrorWithComponentOrOwnerStack(message, componentStack);
dynamicValidation.dynamicErrors.push(error);
return;
}
}
export function trackDynamicHoleInRuntimeShell(workStore, componentStack, dynamicValidation, clientDynamic) {
if (hasOutletRegex.test(componentStack)) {
// We don't need to track that this is dynamic. It is only so when something else is also dynamic.
return;
} else if (hasMetadataRegex.test(componentStack)) {
const message = `Route "${workStore.route}": Uncached data or \`connection()\` was accessed inside \`generateMetadata\`. Except for this instance, the page would have been entirely prerenderable which may have been the intended behavior. See more info here: https://nextjs.org/docs/messages/next-prerender-dynamic-metadata`;
const error = createErrorWithComponentOrOwnerStack(message, componentStack);
dynamicValidation.dynamicMetadata = error;
return;
} else if (hasViewportRegex.test(componentStack)) {
const message = `Route "${workStore.route}": Uncached data or \`connection()\` was accessed inside \`generateViewport\`. This delays the entire page from rendering, resulting in a slow user experience. Learn more: https://nextjs.org/docs/messages/next-prerender-dynamic-viewport`;
const error = createErrorWithComponentOrOwnerStack(message, componentStack);
dynamicValidation.dynamicErrors.push(error);
return;
} else if (hasSuspenseBeforeRootLayoutWithoutBodyOrImplicitBodyRegex.test(componentStack)) {
// For Suspense within body, the prelude wouldn't be empty so it wouldn't violate the empty static shells rule.
// But if you have Suspense above body, the prelude is empty but we allow that because having Suspense
// is an explicit signal from the user that they acknowledge the empty shell and want dynamic rendering.
dynamicValidation.hasAllowedDynamic = true;
dynamicValidation.hasSuspenseAboveBody = true;
return;
} else if (hasSuspenseRegex.test(componentStack)) {
// this error had a Suspense boundary above it so we don't need to report it as a source
// of disallowed
dynamicValidation.hasAllowedDynamic = true;
return;
} else if (clientDynamic.syncDynamicErrorWithStack) {
// This task was the task that called the sync error.
dynamicValidation.dynamicErrors.push(clientDynamic.syncDynamicErrorWithStack);
return;
} else {
const message = `Route "${workStore.route}": Uncached data or \`connection()\` was accessed outside of \`<Suspense>\`. This delays the entire page from rendering, resulting in a slow user experience. Learn more: https://nextjs.org/docs/messages/blocking-route`;
const error = createErrorWithComponentOrOwnerStack(message, componentStack);
dynamicValidation.dynamicErrors.push(error);
return;
}
}
export function trackDynamicHoleInStaticShell(workStore, componentStack, dynamicValidation, clientDynamic) {
if (hasOutletRegex.test(componentStack)) {
// We don't need to track that this is dynamic. It is only so when something else is also dynamic.
return;
} else if (hasMetadataRegex.test(componentStack)) {
const message = `Route "${workStore.route}": Runtime data such as \`cookies()\`, \`headers()\`, \`params\`, or \`searchParams\` was accessed inside \`generateMetadata\` or you have file-based metadata such as icons that depend on dynamic params segments. Except for this instance, the page would have been entirely prerenderable which may have been the intended behavior. See more info here: https://nextjs.org/docs/messages/next-prerender-dynamic-metadata`;
const error = createErrorWithComponentOrOwnerStack(message, componentStack);
dynamicValidation.dynamicMetadata = error;
return;
} else if (hasViewportRegex.test(componentStack)) {
const message = `Route "${workStore.route}": Runtime data such as \`cookies()\`, \`headers()\`, \`params\`, or \`searchParams\` was accessed inside \`generateViewport\`. This delays the entire page from rendering, resulting in a slow user experience. Learn more: https://nextjs.org/docs/messages/next-prerender-dynamic-viewport`;
const error = createErrorWithComponentOrOwnerStack(message, componentStack);
dynamicValidation.dynamicErrors.push(error);
return;
} else if (hasSuspenseBeforeRootLayoutWithoutBodyOrImplicitBodyRegex.test(componentStack)) {
// For Suspense within body, the prelude wouldn't be empty so it wouldn't violate the empty static shells rule.
// But if you have Suspense above body, the prelude is empty but we allow that because having Suspense
// is an explicit signal from the user that they acknowledge the empty shell and want dynamic rendering.
dynamicValidation.hasAllowedDynamic = true;
dynamicValidation.hasSuspenseAboveBody = true;
return;
} else if (hasSuspenseRegex.test(componentStack)) {
// this error had a Suspense boundary above it so we don't need to report it as a source
// of disallowed
dynamicValidation.hasAllowedDynamic = true;
return;
} else if (clientDynamic.syncDynamicErrorWithStack) {
// This task was the task that called the sync error.
dynamicValidation.dynamicErrors.push(clientDynamic.syncDynamicErrorWithStack);
return;
} else {
const message = `Route "${workStore.route}": Runtime data such as \`cookies()\`, \`headers()\`, \`params\`, or \`searchParams\` was accessed outside of \`<Suspense>\`. This delays the entire page from rendering, resulting in a slow user experience. Learn more: https://nextjs.org/docs/messages/blocking-route`;
const error = createErrorWithComponentOrOwnerStack(message, componentStack);
dynamicValidation.dynamicErrors.push(error);
return;
}
}
/**
* In dev mode, we prefer using the owner stack, otherwise the provided
* component stack is used.
*/ function createErrorWithComponentOrOwnerStack(message, componentStack) {
const ownerStack = process.env.NODE_ENV !== 'production' && React.captureOwnerStack ? React.captureOwnerStack() : null;
const error = Object.defineProperty(new Error(message), "__NEXT_ERROR_CODE", {
value: "E394",
enumerable: false,
configurable: true
});
// TODO go back to owner stack here if available. This is temporarily using componentStack to get the right
//
error.stack = error.name + ': ' + message + (ownerStack || componentStack);
return error;
}
export var PreludeState = /*#__PURE__*/ function(PreludeState) {
PreludeState[PreludeState["Full"] = 0] = "Full";
PreludeState[PreludeState["Empty"] = 1] = "Empty";
PreludeState[PreludeState["Errored"] = 2] = "Errored";
return PreludeState;
}({});
export function logDisallowedDynamicError(workStore, error) {
console.error(error);
if (!workStore.dev) {
if (workStore.hasReadableErrorStacks) {
console.error(`To get a more detailed stack trace and pinpoint the issue, start the app in development mode by running \`next dev\`, then open "${workStore.route}" in your browser to investigate the error.`);
} else {
console.error(`To get a more detailed stack trace and pinpoint the issue, try one of the following:
- Start the app in development mode by running \`next dev\`, then open "${workStore.route}" in your browser to investigate the error.
- Rerun the production build with \`next build --debug-prerender\` to generate better stack traces.`);
}
}
}
export function throwIfDisallowedDynamic(workStore, prelude, dynamicValidation, serverDynamic) {
if (serverDynamic.syncDynamicErrorWithStack) {
logDisallowedDynamicError(workStore, serverDynamic.syncDynamicErrorWithStack);
throw new StaticGenBailoutError();
}
if (prelude !== 0) {
if (dynamicValidation.hasSuspenseAboveBody) {
// This route has opted into allowing fully dynamic rendering
// by including a Suspense boundary above the body. In this case
// a lack of a shell is not considered disallowed so we simply return
return;
}
// We didn't have any sync bailouts but there may be user code which
// blocked the root. We would have captured these during the prerender
// and can log them here and then terminate the build/validating render
const dynamicErrors = dynamicValidation.dynamicErrors;
if (dynamicErrors.length > 0) {
for(let i = 0; i < dynamicErrors.length; i++){
logDisallowedDynamicError(workStore, dynamicErrors[i]);
}
throw new StaticGenBailoutError();
}
// If we got this far then the only other thing that could be blocking
// the root is dynamic Viewport. If this is dynamic then
// you need to opt into that by adding a Suspense boundary above the body
// to indicate your are ok with fully dynamic rendering.
if (dynamicValidation.hasDynamicViewport) {
console.error(`Route "${workStore.route}" has a \`generateViewport\` that depends on Request data (\`cookies()\`, etc...) or uncached external data (\`fetch(...)\`, etc...) without explicitly allowing fully dynamic rendering. See more info here: https://nextjs.org/docs/messages/next-prerender-dynamic-viewport`);
throw new StaticGenBailoutError();
}
if (prelude === 1) {
// If we ever get this far then we messed up the tracking of invalid dynamic.
// We still adhere to the constraint that you must produce a shell but invite the
// user to report this as a bug in Next.js.
console.error(`Route "${workStore.route}" did not produce a static shell and Next.js was unable to determine a reason. This is a bug in Next.js.`);
throw new StaticGenBailoutError();
}
} else {
if (dynamicValidation.hasAllowedDynamic === false && dynamicValidation.hasDynamicMetadata) {
console.error(`Route "${workStore.route}" has a \`generateMetadata\` that depends on Request data (\`cookies()\`, etc...) or uncached external data (\`fetch(...)\`, etc...) when the rest of the route does not. See more info here: https://nextjs.org/docs/messages/next-prerender-dynamic-metadata`);
throw new StaticGenBailoutError();
}
}
}
export function getStaticShellDisallowedDynamicReasons(workStore, prelude, dynamicValidation) {
if (dynamicValidation.hasSuspenseAboveBody) {
// This route has opted into allowing fully dynamic rendering
// by including a Suspense boundary above the body. In this case
// a lack of a shell is not considered disallowed so we simply return
return [];
}
if (prelude !== 0) {
// We didn't have any sync bailouts but there may be user code which
// blocked the root. We would have captured these during the prerender
// and can log them here and then terminate the build/validating render
const dynamicErrors = dynamicValidation.dynamicErrors;
if (dynamicErrors.length > 0) {
return dynamicErrors;
}
if (prelude === 1) {
// If we ever get this far then we messed up the tracking of invalid dynamic.
// We still adhere to the constraint that you must produce a shell but invite the
// user to report this as a bug in Next.js.
return [
Object.defineProperty(new InvariantError(`Route "${workStore.route}" did not produce a static shell and Next.js was unable to determine a reason.`), "__NEXT_ERROR_CODE", {
value: "E936",
enumerable: false,
configurable: true
})
];
}
} else {
// We have a prelude but we might still have dynamic metadata without any other dynamic access
if (dynamicValidation.hasAllowedDynamic === false && dynamicValidation.dynamicErrors.length === 0 && dynamicValidation.dynamicMetadata) {
return [
dynamicValidation.dynamicMetadata
];
}
}
// We had a non-empty prelude and there are no dynamic holes
return [];
}
export function delayUntilRuntimeStage(prerenderStore, result) {
if (prerenderStore.runtimeStagePromise) {
return prerenderStore.runtimeStagePromise.then(()=>result);
}
return result;
}
//# sourceMappingURL=dynamic-rendering.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,99 @@
// This file should never be bundled into application's runtime code and should
// stay in the Next.js server.
import path from 'path';
import fs from 'fs';
import { getStorageDirectory } from '../cache-dir';
import { arrayBufferToString } from './encryption-utils';
// Keep the key in memory as it should never change during the lifetime of the server in
// both development and production.
let __next_encryption_key_generation_promise = null;
const CONFIG_FILE = '.rscinfo';
const ENCRYPTION_KEY = 'encryption.key';
const ENCRYPTION_EXPIRE_AT = 'encryption.expire_at';
const EXPIRATION = 1000 * 60 * 60 * 24 * 14 // 14 days
;
async function writeCache(distDir, configValue) {
const cacheBaseDir = getStorageDirectory(distDir);
if (!cacheBaseDir) return;
const configPath = path.join(cacheBaseDir, CONFIG_FILE);
if (!fs.existsSync(cacheBaseDir)) {
await fs.promises.mkdir(cacheBaseDir, {
recursive: true
});
}
await fs.promises.writeFile(configPath, JSON.stringify({
[ENCRYPTION_KEY]: configValue,
[ENCRYPTION_EXPIRE_AT]: Date.now() + EXPIRATION
}));
}
// This utility is used to get a key for the cache directory. If the
// key is not present, it will generate a new one and store it in the
// cache directory inside dist.
// The key will also expire after a certain amount of time. Once it
// expires, a new one will be generated.
// During the lifetime of the server, it will be reused and never refreshed.
async function loadOrGenerateKey(distDir, isBuild, generateKey) {
const cacheBaseDir = getStorageDirectory(distDir);
if (!cacheBaseDir) {
// There's no persistent storage available. We generate a new key.
// This also covers development time.
return await generateKey();
}
const configPath = path.join(cacheBaseDir, CONFIG_FILE);
async function hasCachedKey() {
if (!fs.existsSync(configPath)) return false;
try {
const config = JSON.parse(await fs.promises.readFile(configPath, 'utf8'));
if (!config) return false;
if (typeof config[ENCRYPTION_KEY] !== 'string' || typeof config[ENCRYPTION_EXPIRE_AT] !== 'number') {
return false;
}
// For build time, we need to rotate the key if it's expired. Otherwise
// (next start) we have to keep the key as it is so the runtime key matches
// the build time key.
if (isBuild && config[ENCRYPTION_EXPIRE_AT] < Date.now()) {
return false;
}
const cachedKey = config[ENCRYPTION_KEY];
// If encryption key is provided via env, and it's not same as valid cache,
// we should not use the cached key and respect the env key.
if (cachedKey && process.env.NEXT_SERVER_ACTIONS_ENCRYPTION_KEY && cachedKey !== process.env.NEXT_SERVER_ACTIONS_ENCRYPTION_KEY) {
return false;
}
return cachedKey;
} catch {
// Broken config file. We should generate a new key and overwrite it.
return false;
}
}
const maybeValidKey = await hasCachedKey();
if (typeof maybeValidKey === 'string') {
return maybeValidKey;
}
const key = await generateKey();
await writeCache(distDir, key);
return key;
}
export async function generateEncryptionKeyBase64({ isBuild, distDir }) {
// This avoids it being generated multiple times in parallel.
if (!__next_encryption_key_generation_promise) {
__next_encryption_key_generation_promise = loadOrGenerateKey(distDir, isBuild, async ()=>{
const providedKey = process.env.NEXT_SERVER_ACTIONS_ENCRYPTION_KEY;
if (providedKey) {
return providedKey;
}
const key = await crypto.subtle.generateKey({
name: 'AES-GCM',
length: 256
}, true, [
'encrypt',
'decrypt'
]);
const exported = await crypto.subtle.exportKey('raw', key);
return btoa(arrayBufferToString(exported));
});
}
return __next_encryption_key_generation_promise;
}
//# sourceMappingURL=encryption-utils-server.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,59 @@
import { InvariantError } from '../../shared/lib/invariant-error';
import { getServerActionsManifest } from './manifests-singleton';
let __next_loaded_action_key;
export function arrayBufferToString(buffer) {
const bytes = new Uint8Array(buffer);
const len = bytes.byteLength;
// @anonrig: V8 has a limit of 65535 arguments in a function.
// For len < 65535, this is faster.
// https://github.com/vercel/next.js/pull/56377#pullrequestreview-1656181623
if (len < 65535) {
return String.fromCharCode.apply(null, bytes);
}
let binary = '';
for(let i = 0; i < len; i++){
binary += String.fromCharCode(bytes[i]);
}
return binary;
}
export function stringToUint8Array(binary) {
const len = binary.length;
const arr = new Uint8Array(len);
for(let i = 0; i < len; i++){
arr[i] = binary.charCodeAt(i);
}
return arr;
}
export function encrypt(key, iv, data) {
return crypto.subtle.encrypt({
name: 'AES-GCM',
iv
}, key, data);
}
export function decrypt(key, iv, data) {
return crypto.subtle.decrypt({
name: 'AES-GCM',
iv
}, key, data);
}
export async function getActionEncryptionKey() {
if (__next_loaded_action_key) {
return __next_loaded_action_key;
}
const serverActionsManifest = getServerActionsManifest();
const rawKey = process.env.NEXT_SERVER_ACTIONS_ENCRYPTION_KEY || serverActionsManifest.encryptionKey;
if (rawKey === undefined) {
throw Object.defineProperty(new InvariantError('Missing encryption key for Server Actions'), "__NEXT_ERROR_CODE", {
value: "E571",
enumerable: false,
configurable: true
});
}
__next_loaded_action_key = await crypto.subtle.importKey('raw', stringToUint8Array(atob(rawKey)), 'AES-GCM', true, [
'encrypt',
'decrypt'
]);
return __next_loaded_action_key;
}
//# sourceMappingURL=encryption-utils.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/encryption-utils.ts"],"sourcesContent":["import { InvariantError } from '../../shared/lib/invariant-error'\nimport { getServerActionsManifest } from './manifests-singleton'\n\nlet __next_loaded_action_key: CryptoKey\n\nexport function arrayBufferToString(\n buffer: ArrayBuffer | Uint8Array<ArrayBufferLike>\n) {\n const bytes = new Uint8Array(buffer)\n const len = bytes.byteLength\n\n // @anonrig: V8 has a limit of 65535 arguments in a function.\n // For len < 65535, this is faster.\n // https://github.com/vercel/next.js/pull/56377#pullrequestreview-1656181623\n if (len < 65535) {\n return String.fromCharCode.apply(null, bytes as unknown as number[])\n }\n\n let binary = ''\n for (let i = 0; i < len; i++) {\n binary += String.fromCharCode(bytes[i])\n }\n return binary\n}\n\nexport function stringToUint8Array(binary: string) {\n const len = binary.length\n const arr = new Uint8Array(len)\n\n for (let i = 0; i < len; i++) {\n arr[i] = binary.charCodeAt(i)\n }\n\n return arr\n}\n\nexport function encrypt(\n key: CryptoKey,\n iv: Uint8Array<ArrayBuffer>,\n data: Uint8Array<ArrayBuffer>\n) {\n return crypto.subtle.encrypt(\n {\n name: 'AES-GCM',\n iv,\n },\n key,\n data\n )\n}\n\nexport function decrypt(\n key: CryptoKey,\n iv: Uint8Array<ArrayBuffer>,\n data: Uint8Array<ArrayBuffer>\n) {\n return crypto.subtle.decrypt(\n {\n name: 'AES-GCM',\n iv,\n },\n key,\n data\n )\n}\n\nexport async function getActionEncryptionKey() {\n if (__next_loaded_action_key) {\n return __next_loaded_action_key\n }\n\n const serverActionsManifest = getServerActionsManifest()\n\n const rawKey =\n process.env.NEXT_SERVER_ACTIONS_ENCRYPTION_KEY ||\n serverActionsManifest.encryptionKey\n\n if (rawKey === undefined) {\n throw new InvariantError('Missing encryption key for Server Actions')\n }\n\n __next_loaded_action_key = await crypto.subtle.importKey(\n 'raw',\n stringToUint8Array(atob(rawKey)),\n 'AES-GCM',\n true,\n ['encrypt', 'decrypt']\n )\n\n return __next_loaded_action_key\n}\n"],"names":["InvariantError","getServerActionsManifest","__next_loaded_action_key","arrayBufferToString","buffer","bytes","Uint8Array","len","byteLength","String","fromCharCode","apply","binary","i","stringToUint8Array","length","arr","charCodeAt","encrypt","key","iv","data","crypto","subtle","name","decrypt","getActionEncryptionKey","serverActionsManifest","rawKey","process","env","NEXT_SERVER_ACTIONS_ENCRYPTION_KEY","encryptionKey","undefined","importKey","atob"],"mappings":"AAAA,SAASA,cAAc,QAAQ,mCAAkC;AACjE,SAASC,wBAAwB,QAAQ,wBAAuB;AAEhE,IAAIC;AAEJ,OAAO,SAASC,oBACdC,MAAiD;IAEjD,MAAMC,QAAQ,IAAIC,WAAWF;IAC7B,MAAMG,MAAMF,MAAMG,UAAU;IAE5B,6DAA6D;IAC7D,mCAAmC;IACnC,4EAA4E;IAC5E,IAAID,MAAM,OAAO;QACf,OAAOE,OAAOC,YAAY,CAACC,KAAK,CAAC,MAAMN;IACzC;IAEA,IAAIO,SAAS;IACb,IAAK,IAAIC,IAAI,GAAGA,IAAIN,KAAKM,IAAK;QAC5BD,UAAUH,OAAOC,YAAY,CAACL,KAAK,CAACQ,EAAE;IACxC;IACA,OAAOD;AACT;AAEA,OAAO,SAASE,mBAAmBF,MAAc;IAC/C,MAAML,MAAMK,OAAOG,MAAM;IACzB,MAAMC,MAAM,IAAIV,WAAWC;IAE3B,IAAK,IAAIM,IAAI,GAAGA,IAAIN,KAAKM,IAAK;QAC5BG,GAAG,CAACH,EAAE,GAAGD,OAAOK,UAAU,CAACJ;IAC7B;IAEA,OAAOG;AACT;AAEA,OAAO,SAASE,QACdC,GAAc,EACdC,EAA2B,EAC3BC,IAA6B;IAE7B,OAAOC,OAAOC,MAAM,CAACL,OAAO,CAC1B;QACEM,MAAM;QACNJ;IACF,GACAD,KACAE;AAEJ;AAEA,OAAO,SAASI,QACdN,GAAc,EACdC,EAA2B,EAC3BC,IAA6B;IAE7B,OAAOC,OAAOC,MAAM,CAACE,OAAO,CAC1B;QACED,MAAM;QACNJ;IACF,GACAD,KACAE;AAEJ;AAEA,OAAO,eAAeK;IACpB,IAAIxB,0BAA0B;QAC5B,OAAOA;IACT;IAEA,MAAMyB,wBAAwB1B;IAE9B,MAAM2B,SACJC,QAAQC,GAAG,CAACC,kCAAkC,IAC9CJ,sBAAsBK,aAAa;IAErC,IAAIJ,WAAWK,WAAW;QACxB,MAAM,qBAA+D,CAA/D,IAAIjC,eAAe,8CAAnB,qBAAA;mBAAA;wBAAA;0BAAA;QAA8D;IACtE;IAEAE,2BAA2B,MAAMoB,OAAOC,MAAM,CAACW,SAAS,CACtD,OACApB,mBAAmBqB,KAAKP,UACxB,WACA,MACA;QAAC;QAAW;KAAU;IAGxB,OAAO1B;AACT","ignoreList":[0]}

View File

@@ -0,0 +1,216 @@
/* eslint-disable import/no-extraneous-dependencies */ import 'server-only';
/* eslint-disable import/no-extraneous-dependencies */ import { renderToReadableStream } from 'react-server-dom-webpack/server';
/* eslint-disable import/no-extraneous-dependencies */ import { createFromReadableStream } from 'react-server-dom-webpack/client';
import { streamToString } from '../stream-utils/node-web-streams-helper';
import { arrayBufferToString, decrypt, encrypt, getActionEncryptionKey, stringToUint8Array } from './encryption-utils';
import { getClientReferenceManifest, getServerModuleMap } from './manifests-singleton';
import { getCacheSignal, getPrerenderResumeDataCache, getRenderResumeDataCache, workUnitAsyncStorage } from './work-unit-async-storage.external';
import { createHangingInputAbortSignal } from './dynamic-rendering';
import React from 'react';
const isEdgeRuntime = process.env.NEXT_RUNTIME === 'edge';
const textEncoder = new TextEncoder();
const textDecoder = new TextDecoder();
const filterStackFrame = process.env.NODE_ENV !== 'production' ? require('../lib/source-maps').filterStackFrameDEV : undefined;
const findSourceMapURL = process.env.NODE_ENV !== 'production' ? require('../lib/source-maps').findSourceMapURLDEV : undefined;
/**
* Decrypt the serialized string with the action id as the salt.
*/ async function decodeActionBoundArg(actionId, arg) {
const key = await getActionEncryptionKey();
if (typeof key === 'undefined') {
throw Object.defineProperty(new Error(`Missing encryption key for Server Action. This is a bug in Next.js`), "__NEXT_ERROR_CODE", {
value: "E65",
enumerable: false,
configurable: true
});
}
// Get the iv (16 bytes) and the payload from the arg.
const originalPayload = atob(arg);
const ivValue = originalPayload.slice(0, 16);
const payload = originalPayload.slice(16);
const decrypted = textDecoder.decode(await decrypt(key, stringToUint8Array(ivValue), stringToUint8Array(payload)));
if (!decrypted.startsWith(actionId)) {
throw Object.defineProperty(new Error('Invalid Server Action payload: failed to decrypt.'), "__NEXT_ERROR_CODE", {
value: "E191",
enumerable: false,
configurable: true
});
}
return decrypted.slice(actionId.length);
}
/**
* Encrypt the serialized string with the action id as the salt. Add a prefix to
* later ensure that the payload is correctly decrypted, similar to a checksum.
*/ async function encodeActionBoundArg(actionId, arg) {
const key = await getActionEncryptionKey();
if (key === undefined) {
throw Object.defineProperty(new Error(`Missing encryption key for Server Action. This is a bug in Next.js`), "__NEXT_ERROR_CODE", {
value: "E65",
enumerable: false,
configurable: true
});
}
// Get 16 random bytes as iv.
const randomBytes = new Uint8Array(16);
workUnitAsyncStorage.exit(()=>crypto.getRandomValues(randomBytes));
const ivValue = arrayBufferToString(randomBytes.buffer);
const encrypted = await encrypt(key, randomBytes, textEncoder.encode(actionId + arg));
return btoa(ivValue + arrayBufferToString(encrypted));
}
var ReadStatus = /*#__PURE__*/ function(ReadStatus) {
ReadStatus[ReadStatus["Ready"] = 0] = "Ready";
ReadStatus[ReadStatus["Pending"] = 1] = "Pending";
ReadStatus[ReadStatus["Complete"] = 2] = "Complete";
return ReadStatus;
}(ReadStatus || {});
// Encrypts the action's bound args into a string. For the same combination of
// actionId and args the same cached promise is returned. This ensures reference
// equality for returned objects from "use cache" functions when they're invoked
// multiple times within one render pass using the same bound args.
export const encryptActionBoundArgs = React.cache(async function encryptActionBoundArgs(actionId, ...args) {
const workUnitStore = workUnitAsyncStorage.getStore();
const cacheSignal = workUnitStore ? getCacheSignal(workUnitStore) : undefined;
const { clientModules } = getClientReferenceManifest();
// Create an error before any asynchronous calls, to capture the original
// call stack in case we need it when the serialization errors.
const error = new Error();
Error.captureStackTrace(error, encryptActionBoundArgs);
let didCatchError = false;
const hangingInputAbortSignal = workUnitStore ? createHangingInputAbortSignal(workUnitStore) : undefined;
let readStatus = 0;
function startReadOnce() {
if (readStatus === 0) {
readStatus = 1;
cacheSignal == null ? void 0 : cacheSignal.beginRead();
}
}
function endReadIfStarted() {
if (readStatus === 1) {
cacheSignal == null ? void 0 : cacheSignal.endRead();
}
readStatus = 2;
}
// streamToString might take longer than a microtask to resolve and then other things
// waiting on the cache signal might not realize there is another cache to fill so if
// we are no longer waiting on the bound args serialization via the hangingInputAbortSignal
// we should eagerly start the cache read to prevent other readers of the cache signal from
// missing this cache fill. We use a idempotent function to only start reading once because
// it's also possible that streamToString finishes before the hangingInputAbortSignal aborts.
if (hangingInputAbortSignal && cacheSignal) {
hangingInputAbortSignal.addEventListener('abort', startReadOnce, {
once: true
});
}
// Using Flight to serialize the args into a string.
const serialized = await streamToString(renderToReadableStream(args, clientModules, {
filterStackFrame,
signal: hangingInputAbortSignal,
onError (err) {
if (hangingInputAbortSignal == null ? void 0 : hangingInputAbortSignal.aborted) {
return;
}
// We're only reporting one error at a time, starting with the first.
if (didCatchError) {
return;
}
didCatchError = true;
// Use the original error message together with the previously created
// stack, because err.stack is a useless Flight Server call stack.
error.message = err instanceof Error ? err.message : String(err);
}
}), // We pass the abort signal to `streamToString` so that no chunks are
// included that are emitted after the signal was already aborted. This
// ensures that we can encode hanging promises.
hangingInputAbortSignal);
if (didCatchError) {
if (process.env.NODE_ENV === 'development') {
// Logging the error is needed for server functions that are passed to the
// client where the decryption is not done during rendering. Console
// replaying allows us to still show the error dev overlay in this case.
console.error(error);
}
endReadIfStarted();
throw error;
}
if (!workUnitStore) {
// We don't need to call cacheSignal.endRead here because we can't have a cacheSignal
// if we do not have a workUnitStore.
return encodeActionBoundArg(actionId, serialized);
}
startReadOnce();
const prerenderResumeDataCache = getPrerenderResumeDataCache(workUnitStore);
const renderResumeDataCache = getRenderResumeDataCache(workUnitStore);
const cacheKey = actionId + serialized;
const cachedEncrypted = (prerenderResumeDataCache == null ? void 0 : prerenderResumeDataCache.encryptedBoundArgs.get(cacheKey)) ?? (renderResumeDataCache == null ? void 0 : renderResumeDataCache.encryptedBoundArgs.get(cacheKey));
if (cachedEncrypted) {
return cachedEncrypted;
}
const encrypted = await encodeActionBoundArg(actionId, serialized);
endReadIfStarted();
prerenderResumeDataCache == null ? void 0 : prerenderResumeDataCache.encryptedBoundArgs.set(cacheKey, encrypted);
return encrypted;
});
// Decrypts the action's bound args from the encrypted string.
export async function decryptActionBoundArgs(actionId, encryptedPromise) {
const encrypted = await encryptedPromise;
const workUnitStore = workUnitAsyncStorage.getStore();
let decrypted;
if (workUnitStore) {
const cacheSignal = getCacheSignal(workUnitStore);
const prerenderResumeDataCache = getPrerenderResumeDataCache(workUnitStore);
const renderResumeDataCache = getRenderResumeDataCache(workUnitStore);
decrypted = (prerenderResumeDataCache == null ? void 0 : prerenderResumeDataCache.decryptedBoundArgs.get(encrypted)) ?? (renderResumeDataCache == null ? void 0 : renderResumeDataCache.decryptedBoundArgs.get(encrypted));
if (!decrypted) {
cacheSignal == null ? void 0 : cacheSignal.beginRead();
decrypted = await decodeActionBoundArg(actionId, encrypted);
cacheSignal == null ? void 0 : cacheSignal.endRead();
prerenderResumeDataCache == null ? void 0 : prerenderResumeDataCache.decryptedBoundArgs.set(encrypted, decrypted);
}
} else {
decrypted = await decodeActionBoundArg(actionId, encrypted);
}
const { edgeRscModuleMapping, rscModuleMapping } = getClientReferenceManifest();
// Using Flight to deserialize the args from the string.
const deserialized = await createFromReadableStream(new ReadableStream({
start (controller) {
controller.enqueue(textEncoder.encode(decrypted));
switch(workUnitStore == null ? void 0 : workUnitStore.type){
case 'prerender':
case 'prerender-runtime':
// Explicitly don't close the stream here (until prerendering is
// complete) so that hanging promises are not rejected.
if (workUnitStore.renderSignal.aborted) {
controller.close();
} else {
workUnitStore.renderSignal.addEventListener('abort', ()=>controller.close(), {
once: true
});
}
break;
case 'prerender-client':
case 'prerender-ppr':
case 'prerender-legacy':
case 'request':
case 'cache':
case 'private-cache':
case 'unstable-cache':
case undefined:
return controller.close();
default:
workUnitStore;
}
}
}), {
findSourceMapURL,
serverConsumerManifest: {
// moduleLoading must be null because we don't want to trigger preloads of ClientReferences
// to be added to the current execution. Instead, we'll wait for any ClientReference
// to be emitted which themselves will handle the preloading.
moduleLoading: null,
moduleMap: isEdgeRuntime ? edgeRscModuleMapping : rscModuleMapping,
serverModuleMap: getServerModuleMap()
}
});
return deserialized;
}
//# sourceMappingURL=encryption.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,53 @@
// eslint-disable-next-line import/no-extraneous-dependencies
export { createTemporaryReferenceSet, renderToReadableStream, decodeReply, decodeAction, decodeFormState } from 'react-server-dom-webpack/server';
// eslint-disable-next-line import/no-extraneous-dependencies
export { prerender } from 'react-server-dom-webpack/static';
// TODO: Just re-export `* as ReactServer`
export { captureOwnerStack, createElement, Fragment } from 'react';
export { default as LayoutRouter } from '../../client/components/layout-router';
export { default as RenderFromTemplateContext } from '../../client/components/render-from-template-context';
export { workAsyncStorage } from '../app-render/work-async-storage.external';
export { workUnitAsyncStorage } from './work-unit-async-storage.external';
export { actionAsyncStorage } from '../app-render/action-async-storage.external';
export { ClientPageRoot } from '../../client/components/client-page';
export { ClientSegmentRoot } from '../../client/components/client-segment';
export { createServerSearchParamsForServerPage, createPrerenderSearchParamsForClientPage } from '../request/search-params';
export { createServerParamsForServerSegment, createPrerenderParamsForClientSegment } from '../request/params';
export * as serverHooks from '../../client/components/hooks-server-context';
export { HTTPAccessFallbackBoundary } from '../../client/components/http-access-fallback/error-boundary';
export { createMetadataComponents } from '../../lib/metadata/metadata';
export { RootLayoutBoundary } from '../../lib/framework/boundary-components';
export { preloadStyle, preloadFont, preconnect } from './rsc/preloads';
export { Postpone } from './rsc/postpone';
export { taintObjectReference } from './rsc/taint';
export { collectSegmentData } from './collect-segment-data';
import { workAsyncStorage } from '../app-render/work-async-storage.external';
import { workUnitAsyncStorage } from './work-unit-async-storage.external';
import { patchFetch as _patchFetch } from '../lib/patch-fetch';
let SegmentViewNode = ()=>null;
let SegmentViewStateNode = ()=>null;
if (process.env.NODE_ENV === 'development') {
const mod = require('../../next-devtools/userspace/app/segment-explorer-node');
SegmentViewNode = mod.SegmentViewNode;
SegmentViewStateNode = mod.SegmentViewStateNode;
}
// hot-reloader modules are not bundled so we need to inject `__next__clear_chunk_cache__`
// into globalThis from this file which is bundled.
if (process.env.TURBOPACK) {
globalThis.__next__clear_chunk_cache__ = __turbopack_clear_chunk_cache__;
} else {
// Webpack does not have chunks on the server
globalThis.__next__clear_chunk_cache__ = null;
}
// patchFetch makes use of APIs such as `React.unstable_postpone` which are only available
// in the experimental channel of React, so export it from here so that it comes from the bundled runtime
export function patchFetch() {
return _patchFetch({
workAsyncStorage,
workUnitAsyncStorage
});
}
// Development only
export { SegmentViewNode, SegmentViewStateNode };
//# sourceMappingURL=entry-base.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,15 @@
import { RSC_CONTENT_TYPE_HEADER } from '../../client/components/app-router-headers';
import RenderResult from '../render-result';
/**
* Flight Response is always set to RSC_CONTENT_TYPE_HEADER to ensure it does not get interpreted as HTML.
*/ export class FlightRenderResult extends RenderResult {
constructor(response, metadata = {}, waitUntil){
super(response, {
contentType: RSC_CONTENT_TYPE_HEADER,
metadata,
waitUntil
});
}
}
//# sourceMappingURL=flight-render-result.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/flight-render-result.ts"],"sourcesContent":["import { RSC_CONTENT_TYPE_HEADER } from '../../client/components/app-router-headers'\nimport RenderResult, { type RenderResultMetadata } from '../render-result'\n\n/**\n * Flight Response is always set to RSC_CONTENT_TYPE_HEADER to ensure it does not get interpreted as HTML.\n */\nexport class FlightRenderResult extends RenderResult {\n constructor(\n response: string | ReadableStream<Uint8Array>,\n metadata: RenderResultMetadata = {},\n waitUntil?: Promise<unknown>\n ) {\n super(response, {\n contentType: RSC_CONTENT_TYPE_HEADER,\n metadata,\n waitUntil,\n })\n }\n}\n"],"names":["RSC_CONTENT_TYPE_HEADER","RenderResult","FlightRenderResult","constructor","response","metadata","waitUntil","contentType"],"mappings":"AAAA,SAASA,uBAAuB,QAAQ,6CAA4C;AACpF,OAAOC,kBAAiD,mBAAkB;AAE1E;;CAEC,GACD,OAAO,MAAMC,2BAA2BD;IACtCE,YACEC,QAA6C,EAC7CC,WAAiC,CAAC,CAAC,EACnCC,SAA4B,CAC5B;QACA,KAAK,CAACF,UAAU;YACdG,aAAaP;YACbK;YACAC;QACF;IACF;AACF","ignoreList":[0]}

View File

@@ -0,0 +1,19 @@
const isDev = process.env.NODE_ENV === 'development';
const isTurbopack = !!process.env.TURBOPACK;
export function getAssetQueryString(ctx, addTimestamp) {
let qs = '';
// In development we add the request timestamp to allow react to
// reload assets when a new RSC response is received.
// Turbopack handles HMR of assets itself and react doesn't need to reload them
// so this approach is not needed for Turbopack.
const shouldAddVersion = isDev && !isTurbopack && addTimestamp;
if (shouldAddVersion) {
qs += `?v=${ctx.requestTimestamp}`;
}
if (ctx.renderOpts.deploymentId) {
qs += `${shouldAddVersion ? '&' : '?'}dpl=${ctx.renderOpts.deploymentId}`;
}
return qs;
}
//# sourceMappingURL=get-asset-query-string.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/get-asset-query-string.ts"],"sourcesContent":["import type { AppRenderContext } from './app-render'\n\nconst isDev = process.env.NODE_ENV === 'development'\nconst isTurbopack = !!process.env.TURBOPACK\n\nexport function getAssetQueryString(\n ctx: AppRenderContext,\n addTimestamp: boolean\n) {\n let qs = ''\n\n // In development we add the request timestamp to allow react to\n // reload assets when a new RSC response is received.\n // Turbopack handles HMR of assets itself and react doesn't need to reload them\n // so this approach is not needed for Turbopack.\n const shouldAddVersion = isDev && !isTurbopack && addTimestamp\n if (shouldAddVersion) {\n qs += `?v=${ctx.requestTimestamp}`\n }\n\n if (ctx.renderOpts.deploymentId) {\n qs += `${shouldAddVersion ? '&' : '?'}dpl=${ctx.renderOpts.deploymentId}`\n }\n return qs\n}\n"],"names":["isDev","process","env","NODE_ENV","isTurbopack","TURBOPACK","getAssetQueryString","ctx","addTimestamp","qs","shouldAddVersion","requestTimestamp","renderOpts","deploymentId"],"mappings":"AAEA,MAAMA,QAAQC,QAAQC,GAAG,CAACC,QAAQ,KAAK;AACvC,MAAMC,cAAc,CAAC,CAACH,QAAQC,GAAG,CAACG,SAAS;AAE3C,OAAO,SAASC,oBACdC,GAAqB,EACrBC,YAAqB;IAErB,IAAIC,KAAK;IAET,gEAAgE;IAChE,qDAAqD;IACrD,+EAA+E;IAC/E,gDAAgD;IAChD,MAAMC,mBAAmBV,SAAS,CAACI,eAAeI;IAClD,IAAIE,kBAAkB;QACpBD,MAAM,CAAC,GAAG,EAAEF,IAAII,gBAAgB,EAAE;IACpC;IAEA,IAAIJ,IAAIK,UAAU,CAACC,YAAY,EAAE;QAC/BJ,MAAM,GAAGC,mBAAmB,MAAM,IAAI,IAAI,EAAEH,IAAIK,UAAU,CAACC,YAAY,EAAE;IAC3E;IACA,OAAOJ;AACT","ignoreList":[0]}

View File

@@ -0,0 +1,41 @@
import { getClientReferenceManifest } from './manifests-singleton';
/**
* Get external stylesheet link hrefs based on server CSS manifest.
*/ export function getLinkAndScriptTags(filePath, injectedCSS, injectedScripts, collectNewImports) {
const filePathWithoutExt = filePath.replace(/\.[^.]+$/, '');
const cssChunks = new Set();
const jsChunks = new Set();
const { entryCSSFiles, entryJSFiles } = getClientReferenceManifest();
const cssFiles = entryCSSFiles[filePathWithoutExt];
const jsFiles = entryJSFiles == null ? void 0 : entryJSFiles[filePathWithoutExt];
if (cssFiles) {
for (const css of cssFiles){
if (!injectedCSS.has(css.path)) {
if (collectNewImports) {
injectedCSS.add(css.path);
}
cssChunks.add(css);
}
}
}
if (jsFiles) {
for (const file of jsFiles){
if (!injectedScripts.has(file)) {
if (collectNewImports) {
injectedScripts.add(file);
}
jsChunks.add(file);
}
}
}
return {
styles: [
...cssChunks
],
scripts: [
...jsChunks
]
};
}
//# sourceMappingURL=get-css-inlined-link-tags.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/get-css-inlined-link-tags.tsx"],"sourcesContent":["import type { CssResource } from '../../build/webpack/plugins/flight-manifest-plugin'\nimport { getClientReferenceManifest } from './manifests-singleton'\n\n/**\n * Get external stylesheet link hrefs based on server CSS manifest.\n */\nexport function getLinkAndScriptTags(\n filePath: string,\n injectedCSS: Set<string>,\n injectedScripts: Set<string>,\n collectNewImports?: boolean\n): { styles: CssResource[]; scripts: string[] } {\n const filePathWithoutExt = filePath.replace(/\\.[^.]+$/, '')\n const cssChunks = new Set<CssResource>()\n const jsChunks = new Set<string>()\n const { entryCSSFiles, entryJSFiles } = getClientReferenceManifest()\n const cssFiles = entryCSSFiles[filePathWithoutExt]\n const jsFiles = entryJSFiles?.[filePathWithoutExt]\n\n if (cssFiles) {\n for (const css of cssFiles) {\n if (!injectedCSS.has(css.path)) {\n if (collectNewImports) {\n injectedCSS.add(css.path)\n }\n cssChunks.add(css)\n }\n }\n }\n\n if (jsFiles) {\n for (const file of jsFiles) {\n if (!injectedScripts.has(file)) {\n if (collectNewImports) {\n injectedScripts.add(file)\n }\n jsChunks.add(file)\n }\n }\n }\n\n return { styles: [...cssChunks], scripts: [...jsChunks] }\n}\n"],"names":["getClientReferenceManifest","getLinkAndScriptTags","filePath","injectedCSS","injectedScripts","collectNewImports","filePathWithoutExt","replace","cssChunks","Set","jsChunks","entryCSSFiles","entryJSFiles","cssFiles","jsFiles","css","has","path","add","file","styles","scripts"],"mappings":"AACA,SAASA,0BAA0B,QAAQ,wBAAuB;AAElE;;CAEC,GACD,OAAO,SAASC,qBACdC,QAAgB,EAChBC,WAAwB,EACxBC,eAA4B,EAC5BC,iBAA2B;IAE3B,MAAMC,qBAAqBJ,SAASK,OAAO,CAAC,YAAY;IACxD,MAAMC,YAAY,IAAIC;IACtB,MAAMC,WAAW,IAAID;IACrB,MAAM,EAAEE,aAAa,EAAEC,YAAY,EAAE,GAAGZ;IACxC,MAAMa,WAAWF,aAAa,CAACL,mBAAmB;IAClD,MAAMQ,UAAUF,gCAAAA,YAAc,CAACN,mBAAmB;IAElD,IAAIO,UAAU;QACZ,KAAK,MAAME,OAAOF,SAAU;YAC1B,IAAI,CAACV,YAAYa,GAAG,CAACD,IAAIE,IAAI,GAAG;gBAC9B,IAAIZ,mBAAmB;oBACrBF,YAAYe,GAAG,CAACH,IAAIE,IAAI;gBAC1B;gBACAT,UAAUU,GAAG,CAACH;YAChB;QACF;IACF;IAEA,IAAID,SAAS;QACX,KAAK,MAAMK,QAAQL,QAAS;YAC1B,IAAI,CAACV,gBAAgBY,GAAG,CAACG,OAAO;gBAC9B,IAAId,mBAAmB;oBACrBD,gBAAgBc,GAAG,CAACC;gBACtB;gBACAT,SAASQ,GAAG,CAACC;YACf;QACF;IACF;IAEA,OAAO;QAAEC,QAAQ;eAAIZ;SAAU;QAAEa,SAAS;eAAIX;SAAS;IAAC;AAC1D","ignoreList":[0]}

View File

@@ -0,0 +1,55 @@
import { getLinkAndScriptTags } from './get-css-inlined-link-tags';
import { getPreloadableFonts } from './get-preloadable-fonts';
import { getAssetQueryString } from './get-asset-query-string';
import { encodeURIPath } from '../../shared/lib/encode-uri-path';
import { renderCssResource } from './render-css-resource';
export function getLayerAssets({ ctx, layoutOrPagePath, injectedCSS: injectedCSSWithCurrentLayout, injectedJS: injectedJSWithCurrentLayout, injectedFontPreloadTags: injectedFontPreloadTagsWithCurrentLayout, preloadCallbacks }) {
const { componentMod: { createElement } } = ctx;
const { styles: styleTags, scripts: scriptTags } = layoutOrPagePath ? getLinkAndScriptTags(layoutOrPagePath, injectedCSSWithCurrentLayout, injectedJSWithCurrentLayout, true) : {
styles: [],
scripts: []
};
const preloadedFontFiles = layoutOrPagePath ? getPreloadableFonts(ctx.renderOpts.nextFontManifest, layoutOrPagePath, injectedFontPreloadTagsWithCurrentLayout) : null;
if (preloadedFontFiles) {
if (preloadedFontFiles.length) {
for(let i = 0; i < preloadedFontFiles.length; i++){
const fontFilename = preloadedFontFiles[i];
const ext = /\.(woff|woff2|eot|ttf|otf)$/.exec(fontFilename)[1];
const type = `font/${ext}`;
const href = `${ctx.assetPrefix}/_next/${encodeURIPath(fontFilename)}`;
preloadCallbacks.push(()=>{
ctx.componentMod.preloadFont(href, type, ctx.renderOpts.crossOrigin, ctx.nonce);
});
}
} else {
try {
let url = new URL(ctx.assetPrefix);
preloadCallbacks.push(()=>{
ctx.componentMod.preconnect(url.origin, 'anonymous', ctx.nonce);
});
} catch (error) {
// assetPrefix must not be a fully qualified domain name. We assume
// we should preconnect to same origin instead
preloadCallbacks.push(()=>{
ctx.componentMod.preconnect('/', 'anonymous', ctx.nonce);
});
}
}
}
const styles = renderCssResource(styleTags, ctx, preloadCallbacks);
const scripts = scriptTags ? scriptTags.map((href, index)=>{
const fullSrc = `${ctx.assetPrefix}/_next/${encodeURIPath(href)}${getAssetQueryString(ctx, true)}`;
return createElement('script', {
src: fullSrc,
async: true,
key: `script-${index}`,
nonce: ctx.nonce
});
}) : [];
return styles.length || scripts.length ? [
...styles,
...scripts
] : null;
}
//# sourceMappingURL=get-layer-assets.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,35 @@
/**
* Get hrefs for fonts to preload
* Returns null if there are no fonts at all.
* Returns string[] if there are fonts to preload (font paths)
* Returns empty string[] if there are fonts but none to preload and no other fonts have been preloaded
* Returns null if there are fonts but none to preload and at least some were previously preloaded
*/ export function getPreloadableFonts(nextFontManifest, filePath, injectedFontPreloadTags) {
if (!nextFontManifest || !filePath) {
return null;
}
const filepathWithoutExtension = filePath.replace(/\.[^.]+$/, '');
const fontFiles = new Set();
let foundFontUsage = false;
const preloadedFontFiles = nextFontManifest.app[filepathWithoutExtension];
if (preloadedFontFiles) {
foundFontUsage = true;
for (const fontFile of preloadedFontFiles){
if (!injectedFontPreloadTags.has(fontFile)) {
fontFiles.add(fontFile);
injectedFontPreloadTags.add(fontFile);
}
}
}
if (fontFiles.size) {
return [
...fontFiles
].sort();
} else if (foundFontUsage && injectedFontPreloadTags.size === 0) {
return [];
} else {
return null;
}
}
//# sourceMappingURL=get-preloadable-fonts.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/get-preloadable-fonts.tsx"],"sourcesContent":["import type { NextFontManifest } from '../../build/webpack/plugins/next-font-manifest-plugin'\nimport type { DeepReadonly } from '../../shared/lib/deep-readonly'\n\n/**\n * Get hrefs for fonts to preload\n * Returns null if there are no fonts at all.\n * Returns string[] if there are fonts to preload (font paths)\n * Returns empty string[] if there are fonts but none to preload and no other fonts have been preloaded\n * Returns null if there are fonts but none to preload and at least some were previously preloaded\n */\nexport function getPreloadableFonts(\n nextFontManifest: DeepReadonly<NextFontManifest> | undefined,\n filePath: string | undefined,\n injectedFontPreloadTags: Set<string>\n): string[] | null {\n if (!nextFontManifest || !filePath) {\n return null\n }\n const filepathWithoutExtension = filePath.replace(/\\.[^.]+$/, '')\n const fontFiles = new Set<string>()\n let foundFontUsage = false\n\n const preloadedFontFiles = nextFontManifest.app[filepathWithoutExtension]\n if (preloadedFontFiles) {\n foundFontUsage = true\n for (const fontFile of preloadedFontFiles) {\n if (!injectedFontPreloadTags.has(fontFile)) {\n fontFiles.add(fontFile)\n injectedFontPreloadTags.add(fontFile)\n }\n }\n }\n\n if (fontFiles.size) {\n return [...fontFiles].sort()\n } else if (foundFontUsage && injectedFontPreloadTags.size === 0) {\n return []\n } else {\n return null\n }\n}\n"],"names":["getPreloadableFonts","nextFontManifest","filePath","injectedFontPreloadTags","filepathWithoutExtension","replace","fontFiles","Set","foundFontUsage","preloadedFontFiles","app","fontFile","has","add","size","sort"],"mappings":"AAGA;;;;;;CAMC,GACD,OAAO,SAASA,oBACdC,gBAA4D,EAC5DC,QAA4B,EAC5BC,uBAAoC;IAEpC,IAAI,CAACF,oBAAoB,CAACC,UAAU;QAClC,OAAO;IACT;IACA,MAAME,2BAA2BF,SAASG,OAAO,CAAC,YAAY;IAC9D,MAAMC,YAAY,IAAIC;IACtB,IAAIC,iBAAiB;IAErB,MAAMC,qBAAqBR,iBAAiBS,GAAG,CAACN,yBAAyB;IACzE,IAAIK,oBAAoB;QACtBD,iBAAiB;QACjB,KAAK,MAAMG,YAAYF,mBAAoB;YACzC,IAAI,CAACN,wBAAwBS,GAAG,CAACD,WAAW;gBAC1CL,UAAUO,GAAG,CAACF;gBACdR,wBAAwBU,GAAG,CAACF;YAC9B;QACF;IACF;IAEA,IAAIL,UAAUQ,IAAI,EAAE;QAClB,OAAO;eAAIR;SAAU,CAACS,IAAI;IAC5B,OAAO,IAAIP,kBAAkBL,wBAAwBW,IAAI,KAAK,GAAG;QAC/D,OAAO,EAAE;IACX,OAAO;QACL,OAAO;IACT;AACF","ignoreList":[0]}

View File

@@ -0,0 +1,34 @@
import { ESCAPE_REGEX } from '../htmlescape';
export function getScriptNonceFromHeader(cspHeaderValue) {
var _directive_split_slice_map_find;
const directives = cspHeaderValue// Directives are split by ';'.
.split(';').map((directive)=>directive.trim());
// First try to find the directive for the 'script-src', otherwise try to
// fallback to the 'default-src'.
const directive = directives.find((dir)=>dir.startsWith('script-src')) || directives.find((dir)=>dir.startsWith('default-src'));
// If no directive could be found, then we're done.
if (!directive) {
return;
}
// Extract the nonce from the directive
const nonce = (_directive_split_slice_map_find = directive.split(' ')// Remove the 'strict-src'/'default-src' string, this can't be the nonce.
.slice(1).map((source)=>source.trim())// Find the first source with the 'nonce-' prefix.
.find((source)=>source.startsWith("'nonce-") && source.length > 8 && source.endsWith("'"))) == null ? void 0 : _directive_split_slice_map_find.slice(7, -1);
// If we could't find the nonce, then we're done.
if (!nonce) {
return;
}
// Don't accept the nonce value if it contains HTML escape characters.
// Technically, the spec requires a base64'd value, but this is just an
// extra layer.
if (ESCAPE_REGEX.test(nonce)) {
throw Object.defineProperty(new Error('Nonce value from Content-Security-Policy contained HTML escape characters.\nLearn more: https://nextjs.org/docs/messages/nonce-contained-invalid-characters'), "__NEXT_ERROR_CODE", {
value: "E440",
enumerable: false,
configurable: true
});
}
return nonce;
}
//# sourceMappingURL=get-script-nonce-from-header.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/get-script-nonce-from-header.tsx"],"sourcesContent":["import { ESCAPE_REGEX } from '../htmlescape'\n\nexport function getScriptNonceFromHeader(\n cspHeaderValue: string\n): string | undefined {\n const directives = cspHeaderValue\n // Directives are split by ';'.\n .split(';')\n .map((directive) => directive.trim())\n\n // First try to find the directive for the 'script-src', otherwise try to\n // fallback to the 'default-src'.\n const directive =\n directives.find((dir) => dir.startsWith('script-src')) ||\n directives.find((dir) => dir.startsWith('default-src'))\n\n // If no directive could be found, then we're done.\n if (!directive) {\n return\n }\n\n // Extract the nonce from the directive\n const nonce = directive\n .split(' ')\n // Remove the 'strict-src'/'default-src' string, this can't be the nonce.\n .slice(1)\n .map((source) => source.trim())\n // Find the first source with the 'nonce-' prefix.\n .find(\n (source) =>\n source.startsWith(\"'nonce-\") &&\n source.length > 8 &&\n source.endsWith(\"'\")\n )\n // Grab the nonce by trimming the 'nonce-' prefix.\n ?.slice(7, -1)\n\n // If we could't find the nonce, then we're done.\n if (!nonce) {\n return\n }\n\n // Don't accept the nonce value if it contains HTML escape characters.\n // Technically, the spec requires a base64'd value, but this is just an\n // extra layer.\n if (ESCAPE_REGEX.test(nonce)) {\n throw new Error(\n 'Nonce value from Content-Security-Policy contained HTML escape characters.\\nLearn more: https://nextjs.org/docs/messages/nonce-contained-invalid-characters'\n )\n }\n\n return nonce\n}\n"],"names":["ESCAPE_REGEX","getScriptNonceFromHeader","cspHeaderValue","directive","directives","split","map","trim","find","dir","startsWith","nonce","slice","source","length","endsWith","test","Error"],"mappings":"AAAA,SAASA,YAAY,QAAQ,gBAAe;AAE5C,OAAO,SAASC,yBACdC,cAAsB;QAmBRC;IAjBd,MAAMC,aAAaF,cACjB,+BAA+B;KAC9BG,KAAK,CAAC,KACNC,GAAG,CAAC,CAACH,YAAcA,UAAUI,IAAI;IAEpC,yEAAyE;IACzE,iCAAiC;IACjC,MAAMJ,YACJC,WAAWI,IAAI,CAAC,CAACC,MAAQA,IAAIC,UAAU,CAAC,kBACxCN,WAAWI,IAAI,CAAC,CAACC,MAAQA,IAAIC,UAAU,CAAC;IAE1C,mDAAmD;IACnD,IAAI,CAACP,WAAW;QACd;IACF;IAEA,uCAAuC;IACvC,MAAMQ,SAAQR,kCAAAA,UACXE,KAAK,CAAC,IACP,yEAAyE;KACxEO,KAAK,CAAC,GACNN,GAAG,CAAC,CAACO,SAAWA,OAAON,IAAI,GAC5B,kDAAkD;KACjDC,IAAI,CACH,CAACK,SACCA,OAAOH,UAAU,CAAC,cAClBG,OAAOC,MAAM,GAAG,KAChBD,OAAOE,QAAQ,CAAC,0BAVRZ,gCAaVS,KAAK,CAAC,GAAG,CAAC;IAEd,iDAAiD;IACjD,IAAI,CAACD,OAAO;QACV;IACF;IAEA,sEAAsE;IACtE,uEAAuE;IACvE,eAAe;IACf,IAAIX,aAAagB,IAAI,CAACL,QAAQ;QAC5B,MAAM,qBAEL,CAFK,IAAIM,MACR,gKADI,qBAAA;mBAAA;wBAAA;0BAAA;QAEN;IACF;IAEA,OAAON;AACT","ignoreList":[0]}

View File

@@ -0,0 +1,15 @@
export const dynamicParamTypes = {
catchall: 'c',
'catchall-intercepted-(..)(..)': 'ci(..)(..)',
'catchall-intercepted-(.)': 'ci(.)',
'catchall-intercepted-(..)': 'ci(..)',
'catchall-intercepted-(...)': 'ci(...)',
'optional-catchall': 'oc',
dynamic: 'd',
'dynamic-intercepted-(..)(..)': 'di(..)(..)',
'dynamic-intercepted-(.)': 'di(.)',
'dynamic-intercepted-(..)': 'di(..)',
'dynamic-intercepted-(...)': 'di(...)'
};
//# sourceMappingURL=get-short-dynamic-param-type.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/get-short-dynamic-param-type.tsx"],"sourcesContent":["import type {\n DynamicParamTypes,\n DynamicParamTypesShort,\n} from '../../shared/lib/app-router-types'\n\nexport const dynamicParamTypes: Record<\n DynamicParamTypes,\n DynamicParamTypesShort\n> = {\n catchall: 'c',\n 'catchall-intercepted-(..)(..)': 'ci(..)(..)',\n 'catchall-intercepted-(.)': 'ci(.)',\n 'catchall-intercepted-(..)': 'ci(..)',\n 'catchall-intercepted-(...)': 'ci(...)',\n 'optional-catchall': 'oc',\n dynamic: 'd',\n 'dynamic-intercepted-(..)(..)': 'di(..)(..)',\n 'dynamic-intercepted-(.)': 'di(.)',\n 'dynamic-intercepted-(..)': 'di(..)',\n 'dynamic-intercepted-(...)': 'di(...)',\n}\n"],"names":["dynamicParamTypes","catchall","dynamic"],"mappings":"AAKA,OAAO,MAAMA,oBAGT;IACFC,UAAU;IACV,iCAAiC;IACjC,4BAA4B;IAC5B,6BAA6B;IAC7B,8BAA8B;IAC9B,qBAAqB;IACrBC,SAAS;IACT,gCAAgC;IAChC,2BAA2B;IAC3B,4BAA4B;IAC5B,6BAA6B;AAC/B,EAAC","ignoreList":[0]}

View File

@@ -0,0 +1,9 @@
export function hasLoadingComponentInTree(tree) {
const [, parallelRoutes, { loading }] = tree;
if (loading) {
return true;
}
return Object.values(parallelRoutes).some((parallelRoute)=>hasLoadingComponentInTree(parallelRoute));
}
//# sourceMappingURL=has-loading-component-in-tree.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/has-loading-component-in-tree.tsx"],"sourcesContent":["import type { LoaderTree } from '../lib/app-dir-module'\n\nexport function hasLoadingComponentInTree(tree: LoaderTree): boolean {\n const [, parallelRoutes, { loading }] = tree\n\n if (loading) {\n return true\n }\n\n return Object.values(parallelRoutes).some((parallelRoute) =>\n hasLoadingComponentInTree(parallelRoute)\n ) as boolean\n}\n"],"names":["hasLoadingComponentInTree","tree","parallelRoutes","loading","Object","values","some","parallelRoute"],"mappings":"AAEA,OAAO,SAASA,0BAA0BC,IAAgB;IACxD,MAAM,GAAGC,gBAAgB,EAAEC,OAAO,EAAE,CAAC,GAAGF;IAExC,IAAIE,SAAS;QACX,OAAO;IACT;IAEA,OAAOC,OAAOC,MAAM,CAACH,gBAAgBI,IAAI,CAAC,CAACC,gBACzCP,0BAA0BO;AAE9B","ignoreList":[0]}

View File

@@ -0,0 +1,7 @@
/**
* Interop between "export default" and "module.exports".
*/ export function interopDefault(mod) {
return mod.default || mod;
}
//# sourceMappingURL=interop-default.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/interop-default.ts"],"sourcesContent":["/**\n * Interop between \"export default\" and \"module.exports\".\n */\nexport function interopDefault(mod: any) {\n return mod.default || mod\n}\n"],"names":["interopDefault","mod","default"],"mappings":"AAAA;;CAEC,GACD,OAAO,SAASA,eAAeC,GAAQ;IACrC,OAAOA,IAAIC,OAAO,IAAID;AACxB","ignoreList":[0]}

View File

@@ -0,0 +1,77 @@
/* eslint-disable @next/internal/no-ambiguous-jsx -- whole module is used in React Client */ import { jsx as _jsx, jsxs as _jsxs, Fragment as _Fragment } from "react/jsx-runtime";
import React from 'react';
import { isHTTPAccessFallbackError } from '../../client/components/http-access-fallback/http-access-fallback';
import { getURLFromRedirectError, getRedirectStatusCodeFromError } from '../../client/components/redirect';
import { isRedirectError } from '../../client/components/redirect-error';
import { renderToReadableStream } from 'react-dom/server';
import { streamToString } from '../stream-utils/node-web-streams-helper';
import { RedirectStatusCode } from '../../client/components/redirect-status-code';
import { addPathPrefix } from '../../shared/lib/router/utils/add-path-prefix';
export function makeGetServerInsertedHTML({ polyfills, renderServerInsertedHTML, serverCapturedErrors, tracingMetadata, basePath }) {
let flushedErrorMetaTagsUntilIndex = 0;
// These only need to be rendered once, they'll be set to empty arrays once flushed.
let polyfillTags = polyfills.map((polyfill)=>{
return /*#__PURE__*/ _jsx("script", {
...polyfill
}, polyfill.src);
});
let traceMetaTags = (tracingMetadata || []).map(({ key, value }, index)=>/*#__PURE__*/ _jsx("meta", {
name: key,
content: value
}, `next-trace-data-${index}`));
return async function getServerInsertedHTML() {
// Loop through all the errors that have been captured but not yet
// flushed.
const errorMetaTags = [];
while(flushedErrorMetaTagsUntilIndex < serverCapturedErrors.length){
const error = serverCapturedErrors[flushedErrorMetaTagsUntilIndex];
flushedErrorMetaTagsUntilIndex++;
if (isHTTPAccessFallbackError(error)) {
errorMetaTags.push(/*#__PURE__*/ _jsx("meta", {
name: "robots",
content: "noindex"
}, error.digest), process.env.NODE_ENV === 'development' ? /*#__PURE__*/ _jsx("meta", {
name: "next-error",
content: "not-found"
}, "next-error") : null);
} else if (isRedirectError(error)) {
const redirectUrl = addPathPrefix(getURLFromRedirectError(error), basePath);
const statusCode = getRedirectStatusCodeFromError(error);
const isPermanent = statusCode === RedirectStatusCode.PermanentRedirect ? true : false;
if (redirectUrl) {
errorMetaTags.push(/*#__PURE__*/ _jsx("meta", {
id: "__next-page-redirect",
httpEquiv: "refresh",
content: `${isPermanent ? 0 : 1};url=${redirectUrl}`
}, error.digest));
}
}
}
const serverInsertedHTML = renderServerInsertedHTML();
// Skip React rendering if we know the content is empty.
if (polyfillTags.length === 0 && traceMetaTags.length === 0 && errorMetaTags.length === 0 && Array.isArray(serverInsertedHTML) && serverInsertedHTML.length === 0) {
return '';
}
const stream = await renderToReadableStream(/*#__PURE__*/ _jsxs(_Fragment, {
children: [
polyfillTags,
serverInsertedHTML,
traceMetaTags,
errorMetaTags
]
}), {
// Larger chunk because this isn't sent over the network.
// Let's set it to 1MB.
progressiveChunkSize: 1024 * 1024
});
// The polyfills and trace metadata have been flushed, so they don't need to be rendered again
polyfillTags = [];
traceMetaTags = [];
// There's no need to wait for the stream to be ready
// e.g. calling `await stream.allReady` because `streamToString` will
// wait and decode the stream progressively with better parallelism.
return streamToString(stream);
};
}
//# sourceMappingURL=make-get-server-inserted-html.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,226 @@
import { InvariantError } from '../../shared/lib/invariant-error';
import { normalizeAppPath } from '../../shared/lib/router/utils/app-paths';
import { pathHasPrefix } from '../../shared/lib/router/utils/path-has-prefix';
import { removePathPrefix } from '../../shared/lib/router/utils/remove-path-prefix';
import { workAsyncStorage } from './work-async-storage.external';
// This is a global singleton that is, among other things, also used to
// encode/decode bound args of server function closures. This can't be using a
// AsyncLocalStorage as it might happen at the module level.
const MANIFESTS_SINGLETON = Symbol.for('next.server.manifests');
const globalThisWithManifests = globalThis;
function createProxiedClientReferenceManifest(clientReferenceManifestsPerRoute) {
const createMappingProxy = (prop)=>{
return new Proxy({}, {
get (_, id) {
const workStore = workAsyncStorage.getStore();
if (workStore) {
const currentManifest = clientReferenceManifestsPerRoute.get(workStore.route);
if (currentManifest == null ? void 0 : currentManifest[prop][id]) {
return currentManifest[prop][id];
}
// In development, we also check all other manifests to see if the
// module exists there. This is to support a scenario where React's
// I/O tracking (dev-only) creates a connection from one page to
// another through an emitted async I/O node that references client
// components from the other page, e.g. in owner props.
// TODO: Maybe we need to add a `debugBundlerConfig` option to React
// to avoid this workaround. The current workaround has the
// disadvantage that one might accidentally or intentionally share
// client references across pages (e.g. by storing them in a global
// variable), which would then only be caught in production.
if (process.env.NODE_ENV !== 'production') {
for (const [route, manifest] of clientReferenceManifestsPerRoute){
if (route === workStore.route) {
continue;
}
const entry = manifest[prop][id];
if (entry !== undefined) {
return entry;
}
}
}
} else {
// If there's no work store defined, we can assume that a client
// reference manifest is needed during module evaluation, e.g. to
// create a server function using a higher-order function. This
// might also use client components which need to be serialized by
// Flight, and therefore client references need to be resolvable. In
// that case we search all page manifests to find the module.
for (const manifest of clientReferenceManifestsPerRoute.values()){
const entry = manifest[prop][id];
if (entry !== undefined) {
return entry;
}
}
}
return undefined;
}
});
};
const mappingProxies = new Map();
return new Proxy({}, {
get (_, prop) {
const workStore = workAsyncStorage.getStore();
switch(prop){
case 'moduleLoading':
case 'entryCSSFiles':
case 'entryJSFiles':
{
if (!workStore) {
throw Object.defineProperty(new InvariantError(`Cannot access "${prop}" without a work store.`), "__NEXT_ERROR_CODE", {
value: "E952",
enumerable: false,
configurable: true
});
}
const currentManifest = clientReferenceManifestsPerRoute.get(workStore.route);
if (!currentManifest) {
throw Object.defineProperty(new InvariantError(`The client reference manifest for route "${workStore.route}" does not exist.`), "__NEXT_ERROR_CODE", {
value: "E951",
enumerable: false,
configurable: true
});
}
return currentManifest[prop];
}
case 'clientModules':
case 'rscModuleMapping':
case 'edgeRscModuleMapping':
case 'ssrModuleMapping':
case 'edgeSSRModuleMapping':
{
let proxy = mappingProxies.get(prop);
if (!proxy) {
proxy = createMappingProxy(prop);
mappingProxies.set(prop, proxy);
}
return proxy;
}
default:
{
throw Object.defineProperty(new InvariantError(`This is a proxied client reference manifest. The property "${String(prop)}" is not handled.`), "__NEXT_ERROR_CODE", {
value: "E953",
enumerable: false,
configurable: true
});
}
}
}
});
}
/**
* This function creates a Flight-acceptable server module map proxy from our
* Server Reference Manifest similar to our client module map. This is because
* our manifest contains a lot of internal Next.js data that are relevant to the
* runtime, workers, etc. that React doesn't need to know.
*/ function createServerModuleMap() {
return new Proxy({}, {
get: (_, id)=>{
var _getServerActionsManifest__id, _getServerActionsManifest_;
const workers = (_getServerActionsManifest_ = getServerActionsManifest()[process.env.NEXT_RUNTIME === 'edge' ? 'edge' : 'node']) == null ? void 0 : (_getServerActionsManifest__id = _getServerActionsManifest_[id]) == null ? void 0 : _getServerActionsManifest__id.workers;
if (!workers) {
return undefined;
}
const workStore = workAsyncStorage.getStore();
let workerEntry;
if (workStore) {
workerEntry = workers[normalizeWorkerPageName(workStore.page)];
} else {
// If there's no work store defined, we can assume that a server
// module map is needed during module evaluation, e.g. to create a
// server action using a higher-order function. Therefore it should be
// safe to return any entry from the manifest that matches the action
// ID. They all refer to the same module ID, which must also exist in
// the current page bundle. TODO: This is currently not guaranteed in
// Turbopack, and needs to be fixed.
workerEntry = Object.values(workers).at(0);
}
if (!workerEntry) {
return undefined;
}
const { moduleId, async } = workerEntry;
return {
id: moduleId,
name: id,
chunks: [],
async
};
}
});
}
/**
* The flight entry loader keys actions by bundlePath. bundlePath corresponds
* with the relative path (including 'app') to the page entrypoint.
*/ function normalizeWorkerPageName(pageName) {
if (pathHasPrefix(pageName, 'app')) {
return pageName;
}
return 'app' + pageName;
}
/**
* Converts a bundlePath (relative path to the entrypoint) to a routable page
* name.
*/ function denormalizeWorkerPageName(bundlePath) {
return normalizeAppPath(removePathPrefix(bundlePath, 'app'));
}
/**
* Checks if the requested action has a worker for the current page.
* If not, it returns the first worker that has a handler for the action.
*/ export function selectWorkerForForwarding(actionId, pageName) {
var _serverActionsManifest__actionId;
const serverActionsManifest = getServerActionsManifest();
const workers = (_serverActionsManifest__actionId = serverActionsManifest[process.env.NEXT_RUNTIME === 'edge' ? 'edge' : 'node'][actionId]) == null ? void 0 : _serverActionsManifest__actionId.workers;
// There are no workers to handle this action, nothing to forward to.
if (!workers) {
return;
}
// If there is an entry for the current page, we don't need to forward.
if (workers[normalizeWorkerPageName(pageName)]) {
return;
}
// Otherwise, grab the first worker that has a handler for this action id.
return denormalizeWorkerPageName(Object.keys(workers)[0]);
}
export function setManifestsSingleton({ page, clientReferenceManifest, serverActionsManifest }) {
const existingSingleton = globalThisWithManifests[MANIFESTS_SINGLETON];
if (existingSingleton) {
existingSingleton.clientReferenceManifestsPerRoute.set(normalizeAppPath(page), clientReferenceManifest);
existingSingleton.serverActionsManifest = serverActionsManifest;
} else {
const clientReferenceManifestsPerRoute = new Map([
[
normalizeAppPath(page),
clientReferenceManifest
]
]);
const proxiedClientReferenceManifest = createProxiedClientReferenceManifest(clientReferenceManifestsPerRoute);
globalThisWithManifests[MANIFESTS_SINGLETON] = {
clientReferenceManifestsPerRoute,
proxiedClientReferenceManifest,
serverActionsManifest,
serverModuleMap: createServerModuleMap()
};
}
}
function getManifestsSingleton() {
const manifestSingleton = globalThisWithManifests[MANIFESTS_SINGLETON];
if (!manifestSingleton) {
throw Object.defineProperty(new InvariantError('The manifests singleton was not initialized.'), "__NEXT_ERROR_CODE", {
value: "E950",
enumerable: false,
configurable: true
});
}
return manifestSingleton;
}
export function getClientReferenceManifest() {
return getManifestsSingleton().proxiedClientReferenceManifest;
}
export function getServerActionsManifest() {
return getManifestsSingleton().serverActionsManifest;
}
export function getServerModuleMap() {
return getManifestsSingleton().serverModuleMap;
}
//# sourceMappingURL=manifests-singleton.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,18 @@
/**
* For chromium based browsers (Chrome, Edge, etc.) and Safari,
* icons need to stay under <head> to be picked up by the browser.
*
*/ const REINSERT_ICON_SCRIPT = `\
document.querySelectorAll('body link[rel="icon"], body link[rel="apple-touch-icon"]').forEach(el => document.head.appendChild(el))`;
export function createServerInsertedMetadata(nonce) {
let inserted = false;
return async function getServerInsertedMetadata() {
if (inserted) {
return '';
}
inserted = true;
return `<script ${nonce ? `nonce="${nonce}"` : ''}>${REINSERT_ICON_SCRIPT}</script>`;
};
}
//# sourceMappingURL=create-server-inserted-metadata.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../../src/server/app-render/metadata-insertion/create-server-inserted-metadata.tsx"],"sourcesContent":["/**\n * For chromium based browsers (Chrome, Edge, etc.) and Safari,\n * icons need to stay under <head> to be picked up by the browser.\n *\n */\nconst REINSERT_ICON_SCRIPT = `\\\ndocument.querySelectorAll('body link[rel=\"icon\"], body link[rel=\"apple-touch-icon\"]').forEach(el => document.head.appendChild(el))`\n\nexport function createServerInsertedMetadata(nonce: string | undefined) {\n let inserted = false\n\n return async function getServerInsertedMetadata(): Promise<string> {\n if (inserted) {\n return ''\n }\n\n inserted = true\n return `<script ${nonce ? `nonce=\"${nonce}\"` : ''}>${REINSERT_ICON_SCRIPT}</script>`\n }\n}\n"],"names":["REINSERT_ICON_SCRIPT","createServerInsertedMetadata","nonce","inserted","getServerInsertedMetadata"],"mappings":"AAAA;;;;CAIC,GACD,MAAMA,uBAAuB,CAAC;kIACoG,CAAC;AAEnI,OAAO,SAASC,6BAA6BC,KAAyB;IACpE,IAAIC,WAAW;IAEf,OAAO,eAAeC;QACpB,IAAID,UAAU;YACZ,OAAO;QACT;QAEAA,WAAW;QACX,OAAO,CAAC,QAAQ,EAAED,QAAQ,CAAC,OAAO,EAAEA,MAAM,CAAC,CAAC,GAAG,GAAG,CAAC,EAAEF,qBAAqB,SAAS,CAAC;IACtF;AACF","ignoreList":[0]}

View File

@@ -0,0 +1,53 @@
import { InvariantError } from '../../../shared/lib/invariant-error';
import { isThenable } from '../../../shared/lib/is-thenable';
import { trackPendingImport } from './track-module-loading.external';
/**
* in CacheComponents, `import(...)` will be transformed into `trackDynamicImport(import(...))`.
* A dynamic import is essentially a cached async function, except it's cached by the module system.
*
* The promises are tracked globally regardless of if the `import()` happens inside a render or outside of it.
* When rendering, we can make the `cacheSignal` wait for all pending promises via `trackPendingModules`.
* */ export function trackDynamicImport(modulePromise) {
if (process.env.NEXT_RUNTIME === 'edge') {
throw Object.defineProperty(new InvariantError("Dynamic imports should not be instrumented in the edge runtime, because `cacheComponents` doesn't support it"), "__NEXT_ERROR_CODE", {
value: "E687",
enumerable: false,
configurable: true
});
}
if (!isThenable(modulePromise)) {
// We're expecting `import()` to always return a promise. If it's not, something's very wrong.
throw Object.defineProperty(new InvariantError('`trackDynamicImport` should always receive a promise. Something went wrong in the dynamic imports transform.'), "__NEXT_ERROR_CODE", {
value: "E677",
enumerable: false,
configurable: true
});
}
// Even if we're inside a prerender and have `workUnitStore.cacheSignal`, we always track the promise globally.
// (i.e. via the global `moduleLoadingSignal` that `trackPendingImport` uses internally).
//
// We do this because the `import()` promise might be cached in userspace:
// (which is quite common for e.g. lazy initialization in libraries)
//
// let promise;
// function doDynamicImportOnce() {
// if (!promise) {
// promise = import("...");
// // transformed into:
// // promise = trackDynamicImport(import("..."));
// }
// return promise;
// }
//
// If multiple prerenders (e.g. multiple pages) depend on `doDynamicImportOnce`,
// we have to wait for the import *in all of them*.
// If we only tracked it using `workUnitStore.cacheSignal.trackRead()`,
// then only the first prerender to call `doDynamicImportOnce` would wait --
// Subsequent prerenders would re-use the existing `promise`,
// and `trackDynamicImport` wouldn't be called again in their scope,
// so their respective CacheSignals wouldn't wait for the promise.
trackPendingImport(modulePromise);
return modulePromise;
}
//# sourceMappingURL=track-dynamic-import.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../../src/server/app-render/module-loading/track-dynamic-import.ts"],"sourcesContent":["import { InvariantError } from '../../../shared/lib/invariant-error'\nimport { isThenable } from '../../../shared/lib/is-thenable'\nimport { trackPendingImport } from './track-module-loading.external'\n\n/**\n * in CacheComponents, `import(...)` will be transformed into `trackDynamicImport(import(...))`.\n * A dynamic import is essentially a cached async function, except it's cached by the module system.\n *\n * The promises are tracked globally regardless of if the `import()` happens inside a render or outside of it.\n * When rendering, we can make the `cacheSignal` wait for all pending promises via `trackPendingModules`.\n * */\nexport function trackDynamicImport<TExports extends Record<string, any>>(\n modulePromise: Promise<TExports>\n): Promise<TExports> {\n if (process.env.NEXT_RUNTIME === 'edge') {\n throw new InvariantError(\n \"Dynamic imports should not be instrumented in the edge runtime, because `cacheComponents` doesn't support it\"\n )\n }\n\n if (!isThenable(modulePromise)) {\n // We're expecting `import()` to always return a promise. If it's not, something's very wrong.\n throw new InvariantError(\n '`trackDynamicImport` should always receive a promise. Something went wrong in the dynamic imports transform.'\n )\n }\n\n // Even if we're inside a prerender and have `workUnitStore.cacheSignal`, we always track the promise globally.\n // (i.e. via the global `moduleLoadingSignal` that `trackPendingImport` uses internally).\n //\n // We do this because the `import()` promise might be cached in userspace:\n // (which is quite common for e.g. lazy initialization in libraries)\n //\n // let promise;\n // function doDynamicImportOnce() {\n // if (!promise) {\n // promise = import(\"...\");\n // // transformed into:\n // // promise = trackDynamicImport(import(\"...\"));\n // }\n // return promise;\n // }\n //\n // If multiple prerenders (e.g. multiple pages) depend on `doDynamicImportOnce`,\n // we have to wait for the import *in all of them*.\n // If we only tracked it using `workUnitStore.cacheSignal.trackRead()`,\n // then only the first prerender to call `doDynamicImportOnce` would wait --\n // Subsequent prerenders would re-use the existing `promise`,\n // and `trackDynamicImport` wouldn't be called again in their scope,\n // so their respective CacheSignals wouldn't wait for the promise.\n trackPendingImport(modulePromise)\n\n return modulePromise\n}\n"],"names":["InvariantError","isThenable","trackPendingImport","trackDynamicImport","modulePromise","process","env","NEXT_RUNTIME"],"mappings":"AAAA,SAASA,cAAc,QAAQ,sCAAqC;AACpE,SAASC,UAAU,QAAQ,kCAAiC;AAC5D,SAASC,kBAAkB,QAAQ,kCAAiC;AAEpE;;;;;;GAMG,GACH,OAAO,SAASC,mBACdC,aAAgC;IAEhC,IAAIC,QAAQC,GAAG,CAACC,YAAY,KAAK,QAAQ;QACvC,MAAM,qBAEL,CAFK,IAAIP,eACR,iHADI,qBAAA;mBAAA;wBAAA;0BAAA;QAEN;IACF;IAEA,IAAI,CAACC,WAAWG,gBAAgB;QAC9B,8FAA8F;QAC9F,MAAM,qBAEL,CAFK,IAAIJ,eACR,iHADI,qBAAA;mBAAA;wBAAA;0BAAA;QAEN;IACF;IAEA,+GAA+G;IAC/G,yFAAyF;IACzF,EAAE;IACF,0EAA0E;IAC1E,oEAAoE;IACpE,EAAE;IACF,iBAAiB;IACjB,qCAAqC;IACrC,sBAAsB;IACtB,iCAAiC;IACjC,6BAA6B;IAC7B,wDAAwD;IACxD,QAAQ;IACR,sBAAsB;IACtB,MAAM;IACN,EAAE;IACF,gFAAgF;IAChF,mDAAmD;IACnD,uEAAuE;IACvE,4EAA4E;IAC5E,6DAA6D;IAC7D,oEAAoE;IACpE,kEAAkE;IAClEE,mBAAmBE;IAEnB,OAAOA;AACT","ignoreList":[0]}

View File

@@ -0,0 +1,9 @@
// NOTE: this is marked as shared/external because it's stateful
// and the state needs to be shared between app-render (which waits for pending imports)
// and helpers used in transformed page code (which register pending imports)
import { trackPendingChunkLoad, trackPendingImport, trackPendingModules } from './track-module-loading.instance' with {
'turbopack-transition': 'next-shared'
};
export { trackPendingChunkLoad, trackPendingImport, trackPendingModules };
//# sourceMappingURL=track-module-loading.external.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../../src/server/app-render/module-loading/track-module-loading.external.ts"],"sourcesContent":["// NOTE: this is marked as shared/external because it's stateful\n// and the state needs to be shared between app-render (which waits for pending imports)\n// and helpers used in transformed page code (which register pending imports)\n\nimport {\n trackPendingChunkLoad,\n trackPendingImport,\n trackPendingModules,\n} from './track-module-loading.instance' with { 'turbopack-transition': 'next-shared' }\n\nexport { trackPendingChunkLoad, trackPendingImport, trackPendingModules }\n"],"names":["trackPendingChunkLoad","trackPendingImport","trackPendingModules"],"mappings":"AAAA,gEAAgE;AAChE,wFAAwF;AACxF,6EAA6E;AAE7E,SACEA,qBAAqB,EACrBC,kBAAkB,EAClBC,mBAAmB,QACd,uCAAuC;IAAE,wBAAwB;AAAc,EAAC;AAEvF,SAASF,qBAAqB,EAAEC,kBAAkB,EAAEC,mBAAmB,GAAE","ignoreList":[0]}

View File

@@ -0,0 +1,47 @@
import { CacheSignal } from '../cache-signal';
import { isThenable } from '../../../shared/lib/is-thenable';
/**
* Tracks all in-flight async imports and chunk loads.
* Initialized lazily, because we don't want this to error in case it gets pulled into an edge runtime module.
*/ let _moduleLoadingSignal;
function getModuleLoadingSignal() {
if (!_moduleLoadingSignal) {
_moduleLoadingSignal = new CacheSignal();
}
return _moduleLoadingSignal;
}
export function trackPendingChunkLoad(promise) {
const moduleLoadingSignal = getModuleLoadingSignal();
moduleLoadingSignal.trackRead(promise);
}
export function trackPendingImport(exportsOrPromise) {
const moduleLoadingSignal = getModuleLoadingSignal();
// requiring an async module returns a promise.
// if it's sync, there's nothing to track.
if (isThenable(exportsOrPromise)) {
// A client reference proxy might look like a promise, but we can only call `.then()` on it, not e.g. `.finally()`.
// Turn it into a real promise to avoid issues elsewhere.
const promise = Promise.resolve(exportsOrPromise);
moduleLoadingSignal.trackRead(promise);
}
}
/**
* A top-level dynamic import (or chunk load):
*
* 1. delays a prerender (potentially for a task or longer)
* 2. may reveal more caches that need be filled
*
* So if we see one, we want to extend the duration of `cacheSignal` at least until the import/chunk-load is done.
*/ export function trackPendingModules(cacheSignal) {
const moduleLoadingSignal = getModuleLoadingSignal();
// We can't just use `cacheSignal.trackRead(moduleLoadingSignal.cacheReady())`,
// because we might start and finish multiple batches of module loads while waiting for caches,
// and `moduleLoadingSignal.cacheReady()` would resolve after the first batch.
// Instead, we'll keep notifying `cacheSignal` of each import/chunk-load.
const unsubscribe = moduleLoadingSignal.subscribeToReads(cacheSignal);
// Later, when `cacheSignal` is no longer waiting for any caches (or imports that we've notified it of),
// we can unsubscribe it.
cacheSignal.cacheReady().then(unsubscribe);
}
//# sourceMappingURL=track-module-loading.instance.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../../src/server/app-render/module-loading/track-module-loading.instance.ts"],"sourcesContent":["import { CacheSignal } from '../cache-signal'\nimport { isThenable } from '../../../shared/lib/is-thenable'\n\n/**\n * Tracks all in-flight async imports and chunk loads.\n * Initialized lazily, because we don't want this to error in case it gets pulled into an edge runtime module.\n */\nlet _moduleLoadingSignal: CacheSignal | null\nfunction getModuleLoadingSignal() {\n if (!_moduleLoadingSignal) {\n _moduleLoadingSignal = new CacheSignal()\n }\n return _moduleLoadingSignal\n}\n\nexport function trackPendingChunkLoad(promise: Promise<unknown>) {\n const moduleLoadingSignal = getModuleLoadingSignal()\n moduleLoadingSignal.trackRead(promise)\n}\n\nexport function trackPendingImport(exportsOrPromise: unknown) {\n const moduleLoadingSignal = getModuleLoadingSignal()\n\n // requiring an async module returns a promise.\n // if it's sync, there's nothing to track.\n if (isThenable(exportsOrPromise)) {\n // A client reference proxy might look like a promise, but we can only call `.then()` on it, not e.g. `.finally()`.\n // Turn it into a real promise to avoid issues elsewhere.\n const promise = Promise.resolve(exportsOrPromise)\n moduleLoadingSignal.trackRead(promise)\n }\n}\n\n/**\n * A top-level dynamic import (or chunk load):\n *\n * 1. delays a prerender (potentially for a task or longer)\n * 2. may reveal more caches that need be filled\n *\n * So if we see one, we want to extend the duration of `cacheSignal` at least until the import/chunk-load is done.\n */\nexport function trackPendingModules(cacheSignal: CacheSignal): void {\n const moduleLoadingSignal = getModuleLoadingSignal()\n\n // We can't just use `cacheSignal.trackRead(moduleLoadingSignal.cacheReady())`,\n // because we might start and finish multiple batches of module loads while waiting for caches,\n // and `moduleLoadingSignal.cacheReady()` would resolve after the first batch.\n // Instead, we'll keep notifying `cacheSignal` of each import/chunk-load.\n const unsubscribe = moduleLoadingSignal.subscribeToReads(cacheSignal)\n\n // Later, when `cacheSignal` is no longer waiting for any caches (or imports that we've notified it of),\n // we can unsubscribe it.\n cacheSignal.cacheReady().then(unsubscribe)\n}\n"],"names":["CacheSignal","isThenable","_moduleLoadingSignal","getModuleLoadingSignal","trackPendingChunkLoad","promise","moduleLoadingSignal","trackRead","trackPendingImport","exportsOrPromise","Promise","resolve","trackPendingModules","cacheSignal","unsubscribe","subscribeToReads","cacheReady","then"],"mappings":"AAAA,SAASA,WAAW,QAAQ,kBAAiB;AAC7C,SAASC,UAAU,QAAQ,kCAAiC;AAE5D;;;CAGC,GACD,IAAIC;AACJ,SAASC;IACP,IAAI,CAACD,sBAAsB;QACzBA,uBAAuB,IAAIF;IAC7B;IACA,OAAOE;AACT;AAEA,OAAO,SAASE,sBAAsBC,OAAyB;IAC7D,MAAMC,sBAAsBH;IAC5BG,oBAAoBC,SAAS,CAACF;AAChC;AAEA,OAAO,SAASG,mBAAmBC,gBAAyB;IAC1D,MAAMH,sBAAsBH;IAE5B,+CAA+C;IAC/C,0CAA0C;IAC1C,IAAIF,WAAWQ,mBAAmB;QAChC,mHAAmH;QACnH,yDAAyD;QACzD,MAAMJ,UAAUK,QAAQC,OAAO,CAACF;QAChCH,oBAAoBC,SAAS,CAACF;IAChC;AACF;AAEA;;;;;;;CAOC,GACD,OAAO,SAASO,oBAAoBC,WAAwB;IAC1D,MAAMP,sBAAsBH;IAE5B,+EAA+E;IAC/E,+FAA+F;IAC/F,8EAA8E;IAC9E,yEAAyE;IACzE,MAAMW,cAAcR,oBAAoBS,gBAAgB,CAACF;IAEzD,wGAAwG;IACxG,yBAAyB;IACzBA,YAAYG,UAAU,GAAGC,IAAI,CAACH;AAChC","ignoreList":[0]}

View File

@@ -0,0 +1,39 @@
import { flightRouterStateSchema } from './types';
import { assert } from 'next/dist/compiled/superstruct';
export function parseAndValidateFlightRouterState(stateHeader) {
if (typeof stateHeader === 'undefined') {
return undefined;
}
if (Array.isArray(stateHeader)) {
throw Object.defineProperty(new Error('Multiple router state headers were sent. This is not allowed.'), "__NEXT_ERROR_CODE", {
value: "E418",
enumerable: false,
configurable: true
});
}
// We limit the size of the router state header to ~40kb. This is to prevent
// a malicious user from sending a very large header and slowing down the
// resolving of the router state.
// This is around 2,000 nested or parallel route segment states:
// '{"children":["",{}]}'.length === 20.
if (stateHeader.length > 20 * 2000) {
throw Object.defineProperty(new Error('The router state header was too large.'), "__NEXT_ERROR_CODE", {
value: "E142",
enumerable: false,
configurable: true
});
}
try {
const state = JSON.parse(decodeURIComponent(stateHeader));
assert(state, flightRouterStateSchema);
return state;
} catch {
throw Object.defineProperty(new Error('The router state header was sent but could not be parsed.'), "__NEXT_ERROR_CODE", {
value: "E10",
enumerable: false,
configurable: true
});
}
}
//# sourceMappingURL=parse-and-validate-flight-router-state.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/parse-and-validate-flight-router-state.tsx"],"sourcesContent":["import type { FlightRouterState } from '../../shared/lib/app-router-types'\nimport { flightRouterStateSchema } from './types'\nimport { assert } from 'next/dist/compiled/superstruct'\n\nexport function parseAndValidateFlightRouterState(\n stateHeader: string | string[]\n): FlightRouterState\nexport function parseAndValidateFlightRouterState(\n stateHeader: undefined\n): undefined\nexport function parseAndValidateFlightRouterState(\n stateHeader: string | string[] | undefined\n): FlightRouterState | undefined\nexport function parseAndValidateFlightRouterState(\n stateHeader: string | string[] | undefined\n): FlightRouterState | undefined {\n if (typeof stateHeader === 'undefined') {\n return undefined\n }\n if (Array.isArray(stateHeader)) {\n throw new Error(\n 'Multiple router state headers were sent. This is not allowed.'\n )\n }\n\n // We limit the size of the router state header to ~40kb. This is to prevent\n // a malicious user from sending a very large header and slowing down the\n // resolving of the router state.\n // This is around 2,000 nested or parallel route segment states:\n // '{\"children\":[\"\",{}]}'.length === 20.\n if (stateHeader.length > 20 * 2000) {\n throw new Error('The router state header was too large.')\n }\n\n try {\n const state = JSON.parse(decodeURIComponent(stateHeader))\n assert(state, flightRouterStateSchema)\n return state\n } catch {\n throw new Error('The router state header was sent but could not be parsed.')\n }\n}\n"],"names":["flightRouterStateSchema","assert","parseAndValidateFlightRouterState","stateHeader","undefined","Array","isArray","Error","length","state","JSON","parse","decodeURIComponent"],"mappings":"AACA,SAASA,uBAAuB,QAAQ,UAAS;AACjD,SAASC,MAAM,QAAQ,iCAAgC;AAWvD,OAAO,SAASC,kCACdC,WAA0C;IAE1C,IAAI,OAAOA,gBAAgB,aAAa;QACtC,OAAOC;IACT;IACA,IAAIC,MAAMC,OAAO,CAACH,cAAc;QAC9B,MAAM,qBAEL,CAFK,IAAII,MACR,kEADI,qBAAA;mBAAA;wBAAA;0BAAA;QAEN;IACF;IAEA,4EAA4E;IAC5E,yEAAyE;IACzE,iCAAiC;IACjC,gEAAgE;IAChE,wCAAwC;IACxC,IAAIJ,YAAYK,MAAM,GAAG,KAAK,MAAM;QAClC,MAAM,qBAAmD,CAAnD,IAAID,MAAM,2CAAV,qBAAA;mBAAA;wBAAA;0BAAA;QAAkD;IAC1D;IAEA,IAAI;QACF,MAAME,QAAQC,KAAKC,KAAK,CAACC,mBAAmBT;QAC5CF,OAAOQ,OAAOT;QACd,OAAOS;IACT,EAAE,OAAM;QACN,MAAM,qBAAsE,CAAtE,IAAIF,MAAM,8DAAV,qBAAA;mBAAA;wBAAA;0BAAA;QAAqE;IAC7E;AACF","ignoreList":[0]}

View File

@@ -0,0 +1,119 @@
import { getDynamicParam } from '../../shared/lib/router/utils/get-dynamic-param';
import { createPrerenderResumeDataCache, createRenderResumeDataCache } from '../resume-data-cache/resume-data-cache';
import { stringifyResumeDataCache } from '../resume-data-cache/resume-data-cache';
export var DynamicState = /*#__PURE__*/ function(DynamicState) {
/**
* The dynamic access occurred during the RSC render phase.
*/ DynamicState[DynamicState["DATA"] = 1] = "DATA";
/**
* The dynamic access occurred during the HTML shell render phase.
*/ DynamicState[DynamicState["HTML"] = 2] = "HTML";
return DynamicState;
}({});
export var DynamicHTMLPreludeState = /*#__PURE__*/ function(DynamicHTMLPreludeState) {
DynamicHTMLPreludeState[DynamicHTMLPreludeState["Empty"] = 0] = "Empty";
DynamicHTMLPreludeState[DynamicHTMLPreludeState["Full"] = 1] = "Full";
return DynamicHTMLPreludeState;
}({});
export async function getDynamicHTMLPostponedState(postponed, preludeState, fallbackRouteParams, resumeDataCache, isCacheComponentsEnabled) {
const data = [
preludeState,
postponed
];
const dataString = JSON.stringify(data);
// If there are no fallback route params, we can just serialize the postponed
// state as is.
if (!fallbackRouteParams || fallbackRouteParams.size === 0) {
// Serialized as `<postponedString.length>:<postponedString><renderResumeDataCache>`
return `${dataString.length}:${dataString}${await stringifyResumeDataCache(createRenderResumeDataCache(resumeDataCache), isCacheComponentsEnabled)}`;
}
const replacements = Array.from(fallbackRouteParams.entries());
const replacementsString = JSON.stringify(replacements);
// Serialized as `<replacements.length><replacements><data>`
const postponedString = `${replacementsString.length}${replacementsString}${dataString}`;
// Serialized as `<postponedString.length>:<postponedString><renderResumeDataCache>`
return `${postponedString.length}:${postponedString}${await stringifyResumeDataCache(resumeDataCache, isCacheComponentsEnabled)}`;
}
export async function getDynamicDataPostponedState(resumeDataCache, isCacheComponentsEnabled) {
return `4:null${await stringifyResumeDataCache(createRenderResumeDataCache(resumeDataCache), isCacheComponentsEnabled)}`;
}
export function parsePostponedState(state, interpolatedParams) {
try {
var _state_match;
const postponedStringLengthMatch = (_state_match = state.match(/^([0-9]*):/)) == null ? void 0 : _state_match[1];
if (!postponedStringLengthMatch) {
throw Object.defineProperty(new Error(`Invariant: invalid postponed state ${state}`), "__NEXT_ERROR_CODE", {
value: "E314",
enumerable: false,
configurable: true
});
}
const postponedStringLength = parseInt(postponedStringLengthMatch);
// We add a `:` to the end of the length as the first character of the
// postponed string is the length of the replacement entries.
const postponedString = state.slice(postponedStringLengthMatch.length + 1, postponedStringLengthMatch.length + postponedStringLength + 1);
const renderResumeDataCache = createRenderResumeDataCache(state.slice(postponedStringLengthMatch.length + postponedStringLength + 1));
try {
if (postponedString === 'null') {
return {
type: 1,
renderResumeDataCache
};
}
if (/^[0-9]/.test(postponedString)) {
var _postponedString_match;
const match = (_postponedString_match = postponedString.match(/^([0-9]*)/)) == null ? void 0 : _postponedString_match[1];
if (!match) {
throw Object.defineProperty(new Error(`Invariant: invalid postponed state ${JSON.stringify(postponedString)}`), "__NEXT_ERROR_CODE", {
value: "E314",
enumerable: false,
configurable: true
});
}
// This is the length of the replacements entries.
const length = parseInt(match);
const replacements = JSON.parse(postponedString.slice(match.length, // We then go to the end of the string.
match.length + length));
let postponed = postponedString.slice(match.length + length);
for (const [segmentKey, [searchValue, dynamicParamType]] of replacements){
const { treeSegment: [, // This is the same value that'll be used in the postponed state
// as it's part of the tree data. That's why we use it as the
// replacement value.
value] } = getDynamicParam(interpolatedParams, segmentKey, dynamicParamType, null);
postponed = postponed.replaceAll(searchValue, value);
}
return {
type: 2,
data: JSON.parse(postponed),
renderResumeDataCache
};
}
return {
type: 2,
data: JSON.parse(postponedString),
renderResumeDataCache
};
} catch (err) {
console.error('Failed to parse postponed state', err);
return {
type: 1,
renderResumeDataCache
};
}
} catch (err) {
console.error('Failed to parse postponed state', err);
return {
type: 1,
renderResumeDataCache: createPrerenderResumeDataCache()
};
}
}
export function getPostponedFromState(state) {
const [preludeState, postponed] = state.data;
return {
preludeState,
postponed
};
}
//# sourceMappingURL=postponed-state.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,51 @@
import { getDigestForWellKnownError } from './create-error-handler';
import { isReactLargeShellError } from './react-large-shell-error';
export var Phase = /*#__PURE__*/ function(Phase) {
Phase["ProspectiveRender"] = "the prospective render";
Phase["SegmentCollection"] = "segment collection";
return Phase;
}({});
export function printDebugThrownValueForProspectiveRender(thrownValue, route, phase) {
// We don't need to print well-known Next.js errors.
if (getDigestForWellKnownError(thrownValue)) {
return;
}
if (isReactLargeShellError(thrownValue)) {
// TODO: Aggregate
console.error(thrownValue);
return undefined;
}
let message;
if (typeof thrownValue === 'object' && thrownValue !== null && typeof thrownValue.message === 'string') {
message = thrownValue.message;
if (typeof thrownValue.stack === 'string') {
const originalErrorStack = thrownValue.stack;
const stackStart = originalErrorStack.indexOf('\n');
if (stackStart > -1) {
const error = Object.defineProperty(new Error(`Route ${route} errored during ${phase}. These errors are normally ignored and may not prevent the route from prerendering but are logged here because build debugging is enabled.
Original Error: ${message}`), "__NEXT_ERROR_CODE", {
value: "E949",
enumerable: false,
configurable: true
});
error.stack = 'Error: ' + error.message + originalErrorStack.slice(stackStart);
console.error(error);
return;
}
}
} else if (typeof thrownValue === 'string') {
message = thrownValue;
}
if (message) {
console.error(`Route ${route} errored during ${phase}. These errors are normally ignored and may not prevent the route from prerendering but are logged here because build debugging is enabled. No stack was provided.
Original Message: ${message}`);
return;
}
console.error(`Route ${route} errored during ${phase}. These errors are normally ignored and may not prevent the route from prerendering but are logged here because build debugging is enabled. The thrown value is logged just following this message`);
console.error(thrownValue);
return;
}
//# sourceMappingURL=prospective-render-utils.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/prospective-render-utils.ts"],"sourcesContent":["import { getDigestForWellKnownError } from './create-error-handler'\nimport { isReactLargeShellError } from './react-large-shell-error'\n\nexport enum Phase {\n ProspectiveRender = 'the prospective render',\n SegmentCollection = 'segment collection',\n}\n\nexport function printDebugThrownValueForProspectiveRender(\n thrownValue: unknown,\n route: string,\n phase: Phase\n) {\n // We don't need to print well-known Next.js errors.\n if (getDigestForWellKnownError(thrownValue)) {\n return\n }\n\n if (isReactLargeShellError(thrownValue)) {\n // TODO: Aggregate\n console.error(thrownValue)\n return undefined\n }\n\n let message: undefined | string\n if (\n typeof thrownValue === 'object' &&\n thrownValue !== null &&\n typeof (thrownValue as any).message === 'string'\n ) {\n message = (thrownValue as any).message\n if (typeof (thrownValue as any).stack === 'string') {\n const originalErrorStack: string = (thrownValue as any).stack\n const stackStart = originalErrorStack.indexOf('\\n')\n if (stackStart > -1) {\n const error = new Error(\n `Route ${route} errored during ${phase}. These errors are normally ignored and may not prevent the route from prerendering but are logged here because build debugging is enabled.\n \nOriginal Error: ${message}`\n )\n error.stack =\n 'Error: ' + error.message + originalErrorStack.slice(stackStart)\n console.error(error)\n return\n }\n }\n } else if (typeof thrownValue === 'string') {\n message = thrownValue\n }\n\n if (message) {\n console.error(`Route ${route} errored during ${phase}. These errors are normally ignored and may not prevent the route from prerendering but are logged here because build debugging is enabled. No stack was provided.\n \nOriginal Message: ${message}`)\n return\n }\n\n console.error(\n `Route ${route} errored during ${phase}. These errors are normally ignored and may not prevent the route from prerendering but are logged here because build debugging is enabled. The thrown value is logged just following this message`\n )\n console.error(thrownValue)\n return\n}\n"],"names":["getDigestForWellKnownError","isReactLargeShellError","Phase","printDebugThrownValueForProspectiveRender","thrownValue","route","phase","console","error","undefined","message","stack","originalErrorStack","stackStart","indexOf","Error","slice"],"mappings":"AAAA,SAASA,0BAA0B,QAAQ,yBAAwB;AACnE,SAASC,sBAAsB,QAAQ,4BAA2B;AAElE,OAAO,IAAA,AAAKC,+BAAAA;;;WAAAA;MAGX;AAED,OAAO,SAASC,0CACdC,WAAoB,EACpBC,KAAa,EACbC,KAAY;IAEZ,oDAAoD;IACpD,IAAIN,2BAA2BI,cAAc;QAC3C;IACF;IAEA,IAAIH,uBAAuBG,cAAc;QACvC,kBAAkB;QAClBG,QAAQC,KAAK,CAACJ;QACd,OAAOK;IACT;IAEA,IAAIC;IACJ,IACE,OAAON,gBAAgB,YACvBA,gBAAgB,QAChB,OAAO,AAACA,YAAoBM,OAAO,KAAK,UACxC;QACAA,UAAU,AAACN,YAAoBM,OAAO;QACtC,IAAI,OAAO,AAACN,YAAoBO,KAAK,KAAK,UAAU;YAClD,MAAMC,qBAA6B,AAACR,YAAoBO,KAAK;YAC7D,MAAME,aAAaD,mBAAmBE,OAAO,CAAC;YAC9C,IAAID,aAAa,CAAC,GAAG;gBACnB,MAAML,QAAQ,qBAIb,CAJa,IAAIO,MAChB,CAAC,MAAM,EAAEV,MAAM,gBAAgB,EAAEC,MAAM;;gBAEjC,EAAEI,SAAS,GAHL,qBAAA;2BAAA;gCAAA;kCAAA;gBAId;gBACAF,MAAMG,KAAK,GACT,YAAYH,MAAME,OAAO,GAAGE,mBAAmBI,KAAK,CAACH;gBACvDN,QAAQC,KAAK,CAACA;gBACd;YACF;QACF;IACF,OAAO,IAAI,OAAOJ,gBAAgB,UAAU;QAC1CM,UAAUN;IACZ;IAEA,IAAIM,SAAS;QACXH,QAAQC,KAAK,CAAC,CAAC,MAAM,EAAEH,MAAM,gBAAgB,EAAEC,MAAM;;kBAEvC,EAAEI,SAAS;QACzB;IACF;IAEAH,QAAQC,KAAK,CACX,CAAC,MAAM,EAAEH,MAAM,gBAAgB,EAAEC,MAAM,kMAAkM,CAAC;IAE5OC,QAAQC,KAAK,CAACJ;IACd;AACF","ignoreList":[0]}

View File

@@ -0,0 +1,7 @@
// TODO: isWellKnownError -> isNextInternalError
// isReactLargeShellError -> isWarning
export function isReactLargeShellError(error) {
return typeof error === 'object' && error !== null && 'message' in error && typeof error.message === 'string' && error.message.startsWith('This rendered a large document (>');
}
//# sourceMappingURL=react-large-shell-error.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/react-large-shell-error.ts"],"sourcesContent":["// TODO: isWellKnownError -> isNextInternalError\n// isReactLargeShellError -> isWarning\nexport function isReactLargeShellError(\n error: unknown\n): error is Error & { digest?: string } {\n return (\n typeof error === 'object' &&\n error !== null &&\n 'message' in error &&\n typeof error.message === 'string' &&\n error.message.startsWith('This rendered a large document (>')\n )\n}\n"],"names":["isReactLargeShellError","error","message","startsWith"],"mappings":"AAAA,gDAAgD;AAChD,sCAAsC;AACtC,OAAO,SAASA,uBACdC,KAAc;IAEd,OACE,OAAOA,UAAU,YACjBA,UAAU,QACV,aAAaA,SACb,OAAOA,MAAMC,OAAO,KAAK,YACzBD,MAAMC,OAAO,CAACC,UAAU,CAAC;AAE7B","ignoreList":[0]}

View File

@@ -0,0 +1,5 @@
// This file should be opted into the react-server layer
// eslint-disable-next-line import/no-extraneous-dependencies
export { createTemporaryReferenceSet, decodeReply, decodeReplyFromBusboy, decodeAction, decodeFormState } from 'react-server-dom-webpack/server.node';
//# sourceMappingURL=react-server.node.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/react-server.node.ts"],"sourcesContent":["// This file should be opted into the react-server layer\n\n// eslint-disable-next-line import/no-extraneous-dependencies\nexport {\n createTemporaryReferenceSet,\n decodeReply,\n decodeReplyFromBusboy,\n decodeAction,\n decodeFormState,\n} from 'react-server-dom-webpack/server.node'\n"],"names":["createTemporaryReferenceSet","decodeReply","decodeReplyFromBusboy","decodeAction","decodeFormState"],"mappings":"AAAA,wDAAwD;AAExD,6DAA6D;AAC7D,SACEA,2BAA2B,EAC3BC,WAAW,EACXC,qBAAqB,EACrBC,YAAY,EACZC,eAAe,QACV,uCAAsC","ignoreList":[0]}

View File

@@ -0,0 +1,46 @@
import { encodeURIPath } from '../../shared/lib/encode-uri-path';
import { getAssetQueryString } from './get-asset-query-string';
/**
* Abstracts the rendering of CSS files based on whether they are inlined or not.
* For inlined CSS, renders a <style> tag with the CSS content directly embedded.
* For external CSS files, renders a <link> tag pointing to the CSS file.
*/ export function renderCssResource(entryCssFiles, ctx, preloadCallbacks) {
const { componentMod: { createElement } } = ctx;
return entryCssFiles.map((entryCssFile, index)=>{
// `Precedence` is an opt-in signal for React to handle resource
// loading and deduplication, etc. It's also used as the key to sort
// resources so they will be injected in the correct order.
// During HMR, it's critical to use different `precedence` values
// for different stylesheets, so their order will be kept.
// https://github.com/facebook/react/pull/25060
const precedence = process.env.NODE_ENV === 'development' ? 'next_' + entryCssFile.path : 'next';
// In dev, Safari and Firefox will cache the resource during HMR:
// - https://github.com/vercel/next.js/issues/5860
// - https://bugs.webkit.org/show_bug.cgi?id=187726
// Because of this, we add a `?v=` query to bypass the cache during
// development. We need to also make sure that the number is always
// increasing.
const fullHref = `${ctx.assetPrefix}/_next/${encodeURIPath(entryCssFile.path)}${getAssetQueryString(ctx, true)}`;
if (entryCssFile.inlined && !ctx.parsedRequestHeaders.isRSCRequest) {
return createElement('style', {
key: index,
nonce: ctx.nonce,
precedence: precedence,
href: fullHref
}, entryCssFile.content);
}
preloadCallbacks == null ? void 0 : preloadCallbacks.push(()=>{
ctx.componentMod.preloadStyle(fullHref, ctx.renderOpts.crossOrigin, ctx.nonce);
});
return createElement('link', {
key: index,
rel: 'stylesheet',
href: fullHref,
precedence: precedence,
crossOrigin: ctx.renderOpts.crossOrigin,
nonce: ctx.nonce
});
});
}
//# sourceMappingURL=render-css-resource.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/render-css-resource.tsx"],"sourcesContent":["import type { CssResource } from '../../build/webpack/plugins/flight-manifest-plugin'\nimport { encodeURIPath } from '../../shared/lib/encode-uri-path'\nimport type { AppRenderContext } from './app-render'\nimport { getAssetQueryString } from './get-asset-query-string'\nimport type { PreloadCallbacks } from './types'\n\n/**\n * Abstracts the rendering of CSS files based on whether they are inlined or not.\n * For inlined CSS, renders a <style> tag with the CSS content directly embedded.\n * For external CSS files, renders a <link> tag pointing to the CSS file.\n */\nexport function renderCssResource(\n entryCssFiles: CssResource[],\n ctx: AppRenderContext,\n preloadCallbacks?: PreloadCallbacks\n) {\n const {\n componentMod: { createElement },\n } = ctx\n return entryCssFiles.map((entryCssFile, index) => {\n // `Precedence` is an opt-in signal for React to handle resource\n // loading and deduplication, etc. It's also used as the key to sort\n // resources so they will be injected in the correct order.\n // During HMR, it's critical to use different `precedence` values\n // for different stylesheets, so their order will be kept.\n // https://github.com/facebook/react/pull/25060\n const precedence =\n process.env.NODE_ENV === 'development'\n ? 'next_' + entryCssFile.path\n : 'next'\n\n // In dev, Safari and Firefox will cache the resource during HMR:\n // - https://github.com/vercel/next.js/issues/5860\n // - https://bugs.webkit.org/show_bug.cgi?id=187726\n // Because of this, we add a `?v=` query to bypass the cache during\n // development. We need to also make sure that the number is always\n // increasing.\n const fullHref = `${ctx.assetPrefix}/_next/${encodeURIPath(\n entryCssFile.path\n )}${getAssetQueryString(ctx, true)}`\n\n if (entryCssFile.inlined && !ctx.parsedRequestHeaders.isRSCRequest) {\n return createElement(\n 'style',\n {\n key: index,\n nonce: ctx.nonce,\n precedence: precedence,\n href: fullHref,\n },\n entryCssFile.content\n )\n }\n\n preloadCallbacks?.push(() => {\n ctx.componentMod.preloadStyle(\n fullHref,\n ctx.renderOpts.crossOrigin,\n ctx.nonce\n )\n })\n\n return createElement('link', {\n key: index,\n rel: 'stylesheet',\n href: fullHref,\n precedence: precedence,\n crossOrigin: ctx.renderOpts.crossOrigin,\n nonce: ctx.nonce,\n })\n })\n}\n"],"names":["encodeURIPath","getAssetQueryString","renderCssResource","entryCssFiles","ctx","preloadCallbacks","componentMod","createElement","map","entryCssFile","index","precedence","process","env","NODE_ENV","path","fullHref","assetPrefix","inlined","parsedRequestHeaders","isRSCRequest","key","nonce","href","content","push","preloadStyle","renderOpts","crossOrigin","rel"],"mappings":"AACA,SAASA,aAAa,QAAQ,mCAAkC;AAEhE,SAASC,mBAAmB,QAAQ,2BAA0B;AAG9D;;;;CAIC,GACD,OAAO,SAASC,kBACdC,aAA4B,EAC5BC,GAAqB,EACrBC,gBAAmC;IAEnC,MAAM,EACJC,cAAc,EAAEC,aAAa,EAAE,EAChC,GAAGH;IACJ,OAAOD,cAAcK,GAAG,CAAC,CAACC,cAAcC;QACtC,gEAAgE;QAChE,oEAAoE;QACpE,2DAA2D;QAC3D,iEAAiE;QACjE,0DAA0D;QAC1D,+CAA+C;QAC/C,MAAMC,aACJC,QAAQC,GAAG,CAACC,QAAQ,KAAK,gBACrB,UAAUL,aAAaM,IAAI,GAC3B;QAEN,iEAAiE;QACjE,kDAAkD;QAClD,mDAAmD;QACnD,mEAAmE;QACnE,mEAAmE;QACnE,cAAc;QACd,MAAMC,WAAW,GAAGZ,IAAIa,WAAW,CAAC,OAAO,EAAEjB,cAC3CS,aAAaM,IAAI,IACfd,oBAAoBG,KAAK,OAAO;QAEpC,IAAIK,aAAaS,OAAO,IAAI,CAACd,IAAIe,oBAAoB,CAACC,YAAY,EAAE;YAClE,OAAOb,cACL,SACA;gBACEc,KAAKX;gBACLY,OAAOlB,IAAIkB,KAAK;gBAChBX,YAAYA;gBACZY,MAAMP;YACR,GACAP,aAAae,OAAO;QAExB;QAEAnB,oCAAAA,iBAAkBoB,IAAI,CAAC;YACrBrB,IAAIE,YAAY,CAACoB,YAAY,CAC3BV,UACAZ,IAAIuB,UAAU,CAACC,WAAW,EAC1BxB,IAAIkB,KAAK;QAEb;QAEA,OAAOf,cAAc,QAAQ;YAC3Bc,KAAKX;YACLmB,KAAK;YACLN,MAAMP;YACNL,YAAYA;YACZiB,aAAaxB,IAAIuB,UAAU,CAACC,WAAW;YACvCN,OAAOlB,IAAIkB,KAAK;QAClB;IACF;AACF","ignoreList":[0]}

View File

@@ -0,0 +1,61 @@
import { encodeURIPath } from '../../shared/lib/encode-uri-path';
import ReactDOM from 'react-dom';
export function getRequiredScripts(buildManifest, assetPrefix, crossOrigin, SRIManifest, qs, nonce, pagePath) {
var _buildManifest_rootMainFilesTree;
let preinitScripts;
let preinitScriptCommands = [];
const bootstrapScript = {
src: '',
crossOrigin
};
const files = (((_buildManifest_rootMainFilesTree = buildManifest.rootMainFilesTree) == null ? void 0 : _buildManifest_rootMainFilesTree[pagePath]) || buildManifest.rootMainFiles).map(encodeURIPath);
if (files.length === 0) {
throw Object.defineProperty(new Error('Invariant: missing bootstrap script. This is a bug in Next.js'), "__NEXT_ERROR_CODE", {
value: "E459",
enumerable: false,
configurable: true
});
}
if (SRIManifest) {
bootstrapScript.src = `${assetPrefix}/_next/` + files[0] + qs;
bootstrapScript.integrity = SRIManifest[files[0]];
for(let i = 1; i < files.length; i++){
const src = `${assetPrefix}/_next/` + files[i] + qs;
const integrity = SRIManifest[files[i]];
preinitScriptCommands.push(src, integrity);
}
preinitScripts = ()=>{
// preinitScriptCommands is a double indexed array of src/integrity pairs
for(let i = 0; i < preinitScriptCommands.length; i += 2){
ReactDOM.preinit(preinitScriptCommands[i], {
as: 'script',
integrity: preinitScriptCommands[i + 1],
crossOrigin,
nonce
});
}
};
} else {
bootstrapScript.src = `${assetPrefix}/_next/` + files[0] + qs;
for(let i = 1; i < files.length; i++){
const src = `${assetPrefix}/_next/` + files[i] + qs;
preinitScriptCommands.push(src);
}
preinitScripts = ()=>{
// preinitScriptCommands is a singled indexed array of src values
for(let i = 0; i < preinitScriptCommands.length; i++){
ReactDOM.preinit(preinitScriptCommands[i], {
as: 'script',
nonce,
crossOrigin
});
}
};
}
return [
preinitScripts,
bootstrapScript
];
}
//# sourceMappingURL=required-scripts.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/required-scripts.tsx"],"sourcesContent":["import { encodeURIPath } from '../../shared/lib/encode-uri-path'\nimport type { BuildManifest } from '../get-page-files'\n\nimport ReactDOM from 'react-dom'\n\nexport function getRequiredScripts(\n buildManifest: BuildManifest,\n assetPrefix: string,\n crossOrigin: undefined | '' | 'anonymous' | 'use-credentials',\n SRIManifest: undefined | Record<string, string>,\n qs: string,\n nonce: string | undefined,\n pagePath: string\n): [\n () => void,\n { src: string; integrity?: string; crossOrigin?: string | undefined },\n] {\n let preinitScripts: () => void\n let preinitScriptCommands: string[] = []\n const bootstrapScript: {\n src: string\n integrity?: string\n crossOrigin?: string | undefined\n } = {\n src: '',\n crossOrigin,\n }\n\n const files = (\n buildManifest.rootMainFilesTree?.[pagePath] || buildManifest.rootMainFiles\n ).map(encodeURIPath)\n if (files.length === 0) {\n throw new Error(\n 'Invariant: missing bootstrap script. This is a bug in Next.js'\n )\n }\n if (SRIManifest) {\n bootstrapScript.src = `${assetPrefix}/_next/` + files[0] + qs\n bootstrapScript.integrity = SRIManifest[files[0]]\n\n for (let i = 1; i < files.length; i++) {\n const src = `${assetPrefix}/_next/` + files[i] + qs\n const integrity = SRIManifest[files[i]]\n preinitScriptCommands.push(src, integrity)\n }\n preinitScripts = () => {\n // preinitScriptCommands is a double indexed array of src/integrity pairs\n for (let i = 0; i < preinitScriptCommands.length; i += 2) {\n ReactDOM.preinit(preinitScriptCommands[i], {\n as: 'script',\n integrity: preinitScriptCommands[i + 1],\n crossOrigin,\n nonce,\n })\n }\n }\n } else {\n bootstrapScript.src = `${assetPrefix}/_next/` + files[0] + qs\n\n for (let i = 1; i < files.length; i++) {\n const src = `${assetPrefix}/_next/` + files[i] + qs\n preinitScriptCommands.push(src)\n }\n preinitScripts = () => {\n // preinitScriptCommands is a singled indexed array of src values\n for (let i = 0; i < preinitScriptCommands.length; i++) {\n ReactDOM.preinit(preinitScriptCommands[i], {\n as: 'script',\n nonce,\n crossOrigin,\n })\n }\n }\n }\n\n return [preinitScripts, bootstrapScript]\n}\n"],"names":["encodeURIPath","ReactDOM","getRequiredScripts","buildManifest","assetPrefix","crossOrigin","SRIManifest","qs","nonce","pagePath","preinitScripts","preinitScriptCommands","bootstrapScript","src","files","rootMainFilesTree","rootMainFiles","map","length","Error","integrity","i","push","preinit","as"],"mappings":"AAAA,SAASA,aAAa,QAAQ,mCAAkC;AAGhE,OAAOC,cAAc,YAAW;AAEhC,OAAO,SAASC,mBACdC,aAA4B,EAC5BC,WAAmB,EACnBC,WAA6D,EAC7DC,WAA+C,EAC/CC,EAAU,EACVC,KAAyB,EACzBC,QAAgB;QAiBdN;IAZF,IAAIO;IACJ,IAAIC,wBAAkC,EAAE;IACxC,MAAMC,kBAIF;QACFC,KAAK;QACLR;IACF;IAEA,MAAMS,QAAQ,AACZX,CAAAA,EAAAA,mCAAAA,cAAcY,iBAAiB,qBAA/BZ,gCAAiC,CAACM,SAAS,KAAIN,cAAca,aAAa,AAAD,EACzEC,GAAG,CAACjB;IACN,IAAIc,MAAMI,MAAM,KAAK,GAAG;QACtB,MAAM,qBAEL,CAFK,IAAIC,MACR,kEADI,qBAAA;mBAAA;wBAAA;0BAAA;QAEN;IACF;IACA,IAAIb,aAAa;QACfM,gBAAgBC,GAAG,GAAG,GAAGT,YAAY,OAAO,CAAC,GAAGU,KAAK,CAAC,EAAE,GAAGP;QAC3DK,gBAAgBQ,SAAS,GAAGd,WAAW,CAACQ,KAAK,CAAC,EAAE,CAAC;QAEjD,IAAK,IAAIO,IAAI,GAAGA,IAAIP,MAAMI,MAAM,EAAEG,IAAK;YACrC,MAAMR,MAAM,GAAGT,YAAY,OAAO,CAAC,GAAGU,KAAK,CAACO,EAAE,GAAGd;YACjD,MAAMa,YAAYd,WAAW,CAACQ,KAAK,CAACO,EAAE,CAAC;YACvCV,sBAAsBW,IAAI,CAACT,KAAKO;QAClC;QACAV,iBAAiB;YACf,yEAAyE;YACzE,IAAK,IAAIW,IAAI,GAAGA,IAAIV,sBAAsBO,MAAM,EAAEG,KAAK,EAAG;gBACxDpB,SAASsB,OAAO,CAACZ,qBAAqB,CAACU,EAAE,EAAE;oBACzCG,IAAI;oBACJJ,WAAWT,qBAAqB,CAACU,IAAI,EAAE;oBACvChB;oBACAG;gBACF;YACF;QACF;IACF,OAAO;QACLI,gBAAgBC,GAAG,GAAG,GAAGT,YAAY,OAAO,CAAC,GAAGU,KAAK,CAAC,EAAE,GAAGP;QAE3D,IAAK,IAAIc,IAAI,GAAGA,IAAIP,MAAMI,MAAM,EAAEG,IAAK;YACrC,MAAMR,MAAM,GAAGT,YAAY,OAAO,CAAC,GAAGU,KAAK,CAACO,EAAE,GAAGd;YACjDI,sBAAsBW,IAAI,CAACT;QAC7B;QACAH,iBAAiB;YACf,iEAAiE;YACjE,IAAK,IAAIW,IAAI,GAAGA,IAAIV,sBAAsBO,MAAM,EAAEG,IAAK;gBACrDpB,SAASsB,OAAO,CAACZ,qBAAqB,CAACU,EAAE,EAAE;oBACzCG,IAAI;oBACJhB;oBACAH;gBACF;YACF;QACF;IACF;IAEA,OAAO;QAACK;QAAgBE;KAAgB;AAC1C","ignoreList":[0]}

View File

@@ -0,0 +1,8 @@
/*
Files in the rsc directory are meant to be packaged as part of the RSC graph using next-app-loader.
*/ // When postpone is available in canary React we can switch to importing it directly
export { Postpone } from '../dynamic-rendering';
//# sourceMappingURL=postpone.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../../src/server/app-render/rsc/postpone.ts"],"sourcesContent":["/*\n\nFiles in the rsc directory are meant to be packaged as part of the RSC graph using next-app-loader.\n\n*/\n\n// When postpone is available in canary React we can switch to importing it directly\nexport { Postpone } from '../dynamic-rendering'\n"],"names":["Postpone"],"mappings":"AAAA;;;;AAIA,GAEA,oFAAoF;AACpF,SAASA,QAAQ,QAAQ,uBAAsB","ignoreList":[0]}

View File

@@ -0,0 +1,43 @@
/*
Files in the rsc directory are meant to be packaged as part of the RSC graph using next-app-loader.
*/ import ReactDOM from 'react-dom';
export function preloadStyle(href, crossOrigin, nonce) {
const opts = {
as: 'style'
};
if (typeof crossOrigin === 'string') {
opts.crossOrigin = crossOrigin;
}
if (typeof nonce === 'string') {
opts.nonce = nonce;
}
ReactDOM.preload(href, opts);
}
export function preloadFont(href, type, crossOrigin, nonce) {
const opts = {
as: 'font',
type
};
if (typeof crossOrigin === 'string') {
opts.crossOrigin = crossOrigin;
}
if (typeof nonce === 'string') {
opts.nonce = nonce;
}
ReactDOM.preload(href, opts);
}
export function preconnect(href, crossOrigin, nonce) {
const opts = {};
if (typeof crossOrigin === 'string') {
opts.crossOrigin = crossOrigin;
}
if (typeof nonce === 'string') {
opts.nonce = nonce;
}
;
ReactDOM.preconnect(href, opts);
}
//# sourceMappingURL=preloads.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../../src/server/app-render/rsc/preloads.ts"],"sourcesContent":["/*\n\nFiles in the rsc directory are meant to be packaged as part of the RSC graph using next-app-loader.\n\n*/\n\nimport ReactDOM from 'react-dom'\n\nexport function preloadStyle(\n href: string,\n crossOrigin: string | undefined,\n nonce: string | undefined\n) {\n const opts: any = { as: 'style' }\n if (typeof crossOrigin === 'string') {\n opts.crossOrigin = crossOrigin\n }\n if (typeof nonce === 'string') {\n opts.nonce = nonce\n }\n ReactDOM.preload(href, opts)\n}\n\nexport function preloadFont(\n href: string,\n type: string,\n crossOrigin: string | undefined,\n nonce: string | undefined\n) {\n const opts: any = { as: 'font', type }\n if (typeof crossOrigin === 'string') {\n opts.crossOrigin = crossOrigin\n }\n if (typeof nonce === 'string') {\n opts.nonce = nonce\n }\n ReactDOM.preload(href, opts)\n}\n\nexport function preconnect(\n href: string,\n crossOrigin: string | undefined,\n nonce: string | undefined\n) {\n const opts: any = {}\n if (typeof crossOrigin === 'string') {\n opts.crossOrigin = crossOrigin\n }\n if (typeof nonce === 'string') {\n opts.nonce = nonce\n }\n ;(ReactDOM as any).preconnect(href, opts)\n}\n"],"names":["ReactDOM","preloadStyle","href","crossOrigin","nonce","opts","as","preload","preloadFont","type","preconnect"],"mappings":"AAAA;;;;AAIA,GAEA,OAAOA,cAAc,YAAW;AAEhC,OAAO,SAASC,aACdC,IAAY,EACZC,WAA+B,EAC/BC,KAAyB;IAEzB,MAAMC,OAAY;QAAEC,IAAI;IAAQ;IAChC,IAAI,OAAOH,gBAAgB,UAAU;QACnCE,KAAKF,WAAW,GAAGA;IACrB;IACA,IAAI,OAAOC,UAAU,UAAU;QAC7BC,KAAKD,KAAK,GAAGA;IACf;IACAJ,SAASO,OAAO,CAACL,MAAMG;AACzB;AAEA,OAAO,SAASG,YACdN,IAAY,EACZO,IAAY,EACZN,WAA+B,EAC/BC,KAAyB;IAEzB,MAAMC,OAAY;QAAEC,IAAI;QAAQG;IAAK;IACrC,IAAI,OAAON,gBAAgB,UAAU;QACnCE,KAAKF,WAAW,GAAGA;IACrB;IACA,IAAI,OAAOC,UAAU,UAAU;QAC7BC,KAAKD,KAAK,GAAGA;IACf;IACAJ,SAASO,OAAO,CAACL,MAAMG;AACzB;AAEA,OAAO,SAASK,WACdR,IAAY,EACZC,WAA+B,EAC/BC,KAAyB;IAEzB,MAAMC,OAAY,CAAC;IACnB,IAAI,OAAOF,gBAAgB,UAAU;QACnCE,KAAKF,WAAW,GAAGA;IACrB;IACA,IAAI,OAAOC,UAAU,UAAU;QAC7BC,KAAKD,KAAK,GAAGA;IACf;;IACEJ,SAAiBU,UAAU,CAACR,MAAMG;AACtC","ignoreList":[0]}

Some files were not shown because too many files have changed in this diff Show More