feat(blog): add file-based blog with dynamic slugs, MDX content and layout shell
- Introduced blog routing using Next.js App Router - Implemented dynamic [slug] pages for blog posts - Added MDX-based content loading via lib/posts - Integrated shared TopBar layout with navigation - Established clear content, lib and component separation
This commit is contained in:
1
apps/public-web/node_modules/next/dist/server/ReactDOMServerPages.d.ts
generated
vendored
Normal file
1
apps/public-web/node_modules/next/dist/server/ReactDOMServerPages.d.ts
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export * from 'react-dom/server.edge'
|
||||
16
apps/public-web/node_modules/next/dist/server/ReactDOMServerPages.js
generated
vendored
Normal file
16
apps/public-web/node_modules/next/dist/server/ReactDOMServerPages.js
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
"use strict";
|
||||
let ReactDOMServer;
|
||||
try {
|
||||
// TODO: Use Node.js build unless we're in an Edge runtime.
|
||||
ReactDOMServer = require('react-dom/server.edge');
|
||||
} catch (error) {
|
||||
if (error.code !== 'MODULE_NOT_FOUND' && error.code !== 'ERR_PACKAGE_PATH_NOT_EXPORTED') {
|
||||
throw error;
|
||||
}
|
||||
// In React versions without react-dom/server.edge, the browser build works in Node.js.
|
||||
// The Node.js build does not support renderToReadableStream.
|
||||
ReactDOMServer = require('react-dom/server.browser');
|
||||
}
|
||||
module.exports = ReactDOMServer;
|
||||
|
||||
//# sourceMappingURL=ReactDOMServerPages.js.map
|
||||
1
apps/public-web/node_modules/next/dist/server/ReactDOMServerPages.js.map
generated
vendored
Normal file
1
apps/public-web/node_modules/next/dist/server/ReactDOMServerPages.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../src/server/ReactDOMServerPages.js"],"sourcesContent":["let ReactDOMServer\n\ntry {\n // TODO: Use Node.js build unless we're in an Edge runtime.\n ReactDOMServer = require('react-dom/server.edge')\n} catch (error) {\n if (\n error.code !== 'MODULE_NOT_FOUND' &&\n error.code !== 'ERR_PACKAGE_PATH_NOT_EXPORTED'\n ) {\n throw error\n }\n // In React versions without react-dom/server.edge, the browser build works in Node.js.\n // The Node.js build does not support renderToReadableStream.\n ReactDOMServer = require('react-dom/server.browser')\n}\n\nmodule.exports = ReactDOMServer\n"],"names":["ReactDOMServer","require","error","code","module","exports"],"mappings":";AAAA,IAAIA;AAEJ,IAAI;IACF,2DAA2D;IAC3DA,iBAAiBC,QAAQ;AAC3B,EAAE,OAAOC,OAAO;IACd,IACEA,MAAMC,IAAI,KAAK,sBACfD,MAAMC,IAAI,KAAK,iCACf;QACA,MAAMD;IACR;IACA,uFAAuF;IACvF,6DAA6D;IAC7DF,iBAAiBC,QAAQ;AAC3B;AAEAG,OAAOC,OAAO,GAAGL","ignoreList":[0]}
|
||||
1
apps/public-web/node_modules/next/dist/server/accept-header.d.ts
generated
vendored
Normal file
1
apps/public-web/node_modules/next/dist/server/accept-header.d.ts
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export declare function acceptLanguage(header?: string, preferences?: readonly string[]): string;
|
||||
132
apps/public-web/node_modules/next/dist/server/accept-header.js
generated
vendored
Normal file
132
apps/public-web/node_modules/next/dist/server/accept-header.js
generated
vendored
Normal file
@@ -0,0 +1,132 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
Object.defineProperty(exports, "acceptLanguage", {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return acceptLanguage;
|
||||
}
|
||||
});
|
||||
function parse(raw, preferences, options) {
|
||||
const lowers = new Map();
|
||||
const header = raw.replace(/[ \t]/g, '');
|
||||
if (preferences) {
|
||||
let pos = 0;
|
||||
for (const preference of preferences){
|
||||
const lower = preference.toLowerCase();
|
||||
lowers.set(lower, {
|
||||
orig: preference,
|
||||
pos: pos++
|
||||
});
|
||||
if (options.prefixMatch) {
|
||||
const parts = lower.split('-');
|
||||
while(parts.pop(), parts.length > 0){
|
||||
const joined = parts.join('-');
|
||||
if (!lowers.has(joined)) {
|
||||
lowers.set(joined, {
|
||||
orig: preference,
|
||||
pos: pos++
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
const parts = header.split(',');
|
||||
const selections = [];
|
||||
const map = new Set();
|
||||
for(let i = 0; i < parts.length; ++i){
|
||||
const part = parts[i];
|
||||
if (!part) {
|
||||
continue;
|
||||
}
|
||||
const params = part.split(';');
|
||||
if (params.length > 2) {
|
||||
throw Object.defineProperty(new Error(`Invalid ${options.type} header`), "__NEXT_ERROR_CODE", {
|
||||
value: "E77",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
let token = params[0].toLowerCase();
|
||||
if (!token) {
|
||||
throw Object.defineProperty(new Error(`Invalid ${options.type} header`), "__NEXT_ERROR_CODE", {
|
||||
value: "E77",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const selection = {
|
||||
token,
|
||||
pos: i,
|
||||
q: 1
|
||||
};
|
||||
if (preferences && lowers.has(token)) {
|
||||
selection.pref = lowers.get(token).pos;
|
||||
}
|
||||
map.add(selection.token);
|
||||
if (params.length === 2) {
|
||||
const q = params[1];
|
||||
const [key, value] = q.split('=');
|
||||
if (!value || key !== 'q' && key !== 'Q') {
|
||||
throw Object.defineProperty(new Error(`Invalid ${options.type} header`), "__NEXT_ERROR_CODE", {
|
||||
value: "E77",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const score = parseFloat(value);
|
||||
if (score === 0) {
|
||||
continue;
|
||||
}
|
||||
if (Number.isFinite(score) && score <= 1 && score >= 0.001) {
|
||||
selection.q = score;
|
||||
}
|
||||
}
|
||||
selections.push(selection);
|
||||
}
|
||||
selections.sort((a, b)=>{
|
||||
if (b.q !== a.q) {
|
||||
return b.q - a.q;
|
||||
}
|
||||
if (b.pref !== a.pref) {
|
||||
if (a.pref === undefined) {
|
||||
return 1;
|
||||
}
|
||||
if (b.pref === undefined) {
|
||||
return -1;
|
||||
}
|
||||
return a.pref - b.pref;
|
||||
}
|
||||
return a.pos - b.pos;
|
||||
});
|
||||
const values = selections.map((selection)=>selection.token);
|
||||
if (!preferences || !preferences.length) {
|
||||
return values;
|
||||
}
|
||||
const preferred = [];
|
||||
for (const selection of values){
|
||||
if (selection === '*') {
|
||||
for (const [preference, value] of lowers){
|
||||
if (!map.has(preference)) {
|
||||
preferred.push(value.orig);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const lower = selection.toLowerCase();
|
||||
if (lowers.has(lower)) {
|
||||
preferred.push(lowers.get(lower).orig);
|
||||
}
|
||||
}
|
||||
}
|
||||
return preferred;
|
||||
}
|
||||
function acceptLanguage(header = '', preferences) {
|
||||
return parse(header, preferences, {
|
||||
type: 'accept-language',
|
||||
prefixMatch: true
|
||||
})[0] || '';
|
||||
}
|
||||
|
||||
//# sourceMappingURL=accept-header.js.map
|
||||
1
apps/public-web/node_modules/next/dist/server/accept-header.js.map
generated
vendored
Normal file
1
apps/public-web/node_modules/next/dist/server/accept-header.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
21
apps/public-web/node_modules/next/dist/server/after/after-context.d.ts
generated
vendored
Normal file
21
apps/public-web/node_modules/next/dist/server/after/after-context.d.ts
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
import type { RequestLifecycleOpts } from '../base-server';
|
||||
import type { AfterTask } from './after';
|
||||
export type AfterContextOpts = {
|
||||
waitUntil: RequestLifecycleOpts['waitUntil'] | undefined;
|
||||
onClose: RequestLifecycleOpts['onClose'];
|
||||
onTaskError: RequestLifecycleOpts['onAfterTaskError'] | undefined;
|
||||
};
|
||||
export declare class AfterContext {
|
||||
private waitUntil;
|
||||
private onClose;
|
||||
private onTaskError;
|
||||
private runCallbacksOnClosePromise;
|
||||
private callbackQueue;
|
||||
private workUnitStores;
|
||||
constructor({ waitUntil, onClose, onTaskError }: AfterContextOpts);
|
||||
after(task: AfterTask): void;
|
||||
private addCallback;
|
||||
private runCallbacksOnClose;
|
||||
private runCallbacks;
|
||||
private reportTaskError;
|
||||
}
|
||||
140
apps/public-web/node_modules/next/dist/server/after/after-context.js
generated
vendored
Normal file
140
apps/public-web/node_modules/next/dist/server/after/after-context.js
generated
vendored
Normal file
@@ -0,0 +1,140 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
Object.defineProperty(exports, "AfterContext", {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return AfterContext;
|
||||
}
|
||||
});
|
||||
const _pqueue = /*#__PURE__*/ _interop_require_default(require("next/dist/compiled/p-queue"));
|
||||
const _invarianterror = require("../../shared/lib/invariant-error");
|
||||
const _isthenable = require("../../shared/lib/is-thenable");
|
||||
const _workasyncstorageexternal = require("../app-render/work-async-storage.external");
|
||||
const _revalidationutils = require("../revalidation-utils");
|
||||
const _asynclocalstorage = require("../app-render/async-local-storage");
|
||||
const _workunitasyncstorageexternal = require("../app-render/work-unit-async-storage.external");
|
||||
const _aftertaskasyncstorageexternal = require("../app-render/after-task-async-storage.external");
|
||||
function _interop_require_default(obj) {
|
||||
return obj && obj.__esModule ? obj : {
|
||||
default: obj
|
||||
};
|
||||
}
|
||||
class AfterContext {
|
||||
constructor({ waitUntil, onClose, onTaskError }){
|
||||
this.workUnitStores = new Set();
|
||||
this.waitUntil = waitUntil;
|
||||
this.onClose = onClose;
|
||||
this.onTaskError = onTaskError;
|
||||
this.callbackQueue = new _pqueue.default();
|
||||
this.callbackQueue.pause();
|
||||
}
|
||||
after(task) {
|
||||
if ((0, _isthenable.isThenable)(task)) {
|
||||
if (!this.waitUntil) {
|
||||
errorWaitUntilNotAvailable();
|
||||
}
|
||||
this.waitUntil(task.catch((error)=>this.reportTaskError('promise', error)));
|
||||
} else if (typeof task === 'function') {
|
||||
// TODO(after): implement tracing
|
||||
this.addCallback(task);
|
||||
} else {
|
||||
throw Object.defineProperty(new Error('`after()`: Argument must be a promise or a function'), "__NEXT_ERROR_CODE", {
|
||||
value: "E50",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
}
|
||||
addCallback(callback) {
|
||||
// if something is wrong, throw synchronously, bubbling up to the `after` callsite.
|
||||
if (!this.waitUntil) {
|
||||
errorWaitUntilNotAvailable();
|
||||
}
|
||||
const workUnitStore = _workunitasyncstorageexternal.workUnitAsyncStorage.getStore();
|
||||
if (workUnitStore) {
|
||||
this.workUnitStores.add(workUnitStore);
|
||||
}
|
||||
const afterTaskStore = _aftertaskasyncstorageexternal.afterTaskAsyncStorage.getStore();
|
||||
// This is used for checking if request APIs can be called inside `after`.
|
||||
// Note that we need to check the phase in which the *topmost* `after` was called (which should be "action"),
|
||||
// not the current phase (which might be "after" if we're in a nested after).
|
||||
// Otherwise, we might allow `after(() => headers())`, but not `after(() => after(() => headers()))`.
|
||||
const rootTaskSpawnPhase = afterTaskStore ? afterTaskStore.rootTaskSpawnPhase // nested after
|
||||
: workUnitStore == null ? void 0 : workUnitStore.phase // topmost after
|
||||
;
|
||||
// this should only happen once.
|
||||
if (!this.runCallbacksOnClosePromise) {
|
||||
this.runCallbacksOnClosePromise = this.runCallbacksOnClose();
|
||||
this.waitUntil(this.runCallbacksOnClosePromise);
|
||||
}
|
||||
// Bind the callback to the current execution context (i.e. preserve all currently available ALS-es).
|
||||
// We do this because we want all of these to be equivalent in every regard except timing:
|
||||
// after(() => x())
|
||||
// after(x())
|
||||
// await x()
|
||||
const wrappedCallback = (0, _asynclocalstorage.bindSnapshot)(// WARNING: Don't make this a named function. It must be anonymous.
|
||||
// See: https://github.com/facebook/react/pull/34911
|
||||
async ()=>{
|
||||
try {
|
||||
await _aftertaskasyncstorageexternal.afterTaskAsyncStorage.run({
|
||||
rootTaskSpawnPhase
|
||||
}, ()=>callback());
|
||||
} catch (error) {
|
||||
this.reportTaskError('function', error);
|
||||
}
|
||||
});
|
||||
this.callbackQueue.add(wrappedCallback);
|
||||
}
|
||||
async runCallbacksOnClose() {
|
||||
await new Promise((resolve)=>this.onClose(resolve));
|
||||
return this.runCallbacks();
|
||||
}
|
||||
async runCallbacks() {
|
||||
if (this.callbackQueue.size === 0) return;
|
||||
for (const workUnitStore of this.workUnitStores){
|
||||
workUnitStore.phase = 'after';
|
||||
}
|
||||
const workStore = _workasyncstorageexternal.workAsyncStorage.getStore();
|
||||
if (!workStore) {
|
||||
throw Object.defineProperty(new _invarianterror.InvariantError('Missing workStore in AfterContext.runCallbacks'), "__NEXT_ERROR_CODE", {
|
||||
value: "E547",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
return (0, _revalidationutils.withExecuteRevalidates)(workStore, ()=>{
|
||||
this.callbackQueue.start();
|
||||
return this.callbackQueue.onIdle();
|
||||
});
|
||||
}
|
||||
reportTaskError(taskKind, error) {
|
||||
// TODO(after): this is fine for now, but will need better intergration with our error reporting.
|
||||
// TODO(after): should we log this if we have a onTaskError callback?
|
||||
console.error(taskKind === 'promise' ? `A promise passed to \`after()\` rejected:` : `An error occurred in a function passed to \`after()\`:`, error);
|
||||
if (this.onTaskError) {
|
||||
// this is very defensive, but we really don't want anything to blow up in an error handler
|
||||
try {
|
||||
this.onTaskError == null ? void 0 : this.onTaskError.call(this, error);
|
||||
} catch (handlerError) {
|
||||
console.error(Object.defineProperty(new _invarianterror.InvariantError('`onTaskError` threw while handling an error thrown from an `after` task', {
|
||||
cause: handlerError
|
||||
}), "__NEXT_ERROR_CODE", {
|
||||
value: "E569",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
function errorWaitUntilNotAvailable() {
|
||||
throw Object.defineProperty(new Error('`after()` will not work correctly, because `waitUntil` is not available in the current environment.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E91",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
|
||||
//# sourceMappingURL=after-context.js.map
|
||||
1
apps/public-web/node_modules/next/dist/server/after/after-context.js.map
generated
vendored
Normal file
1
apps/public-web/node_modules/next/dist/server/after/after-context.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
6
apps/public-web/node_modules/next/dist/server/after/after.d.ts
generated
vendored
Normal file
6
apps/public-web/node_modules/next/dist/server/after/after.d.ts
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
export type AfterTask<T = unknown> = Promise<T> | AfterCallback<T>;
|
||||
export type AfterCallback<T = unknown> = () => T | Promise<T>;
|
||||
/**
|
||||
* This function allows you to schedule callbacks to be executed after the current request finishes.
|
||||
*/
|
||||
export declare function after<T>(task: AfterTask<T>): void;
|
||||
26
apps/public-web/node_modules/next/dist/server/after/after.js
generated
vendored
Normal file
26
apps/public-web/node_modules/next/dist/server/after/after.js
generated
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
Object.defineProperty(exports, "after", {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return after;
|
||||
}
|
||||
});
|
||||
const _workasyncstorageexternal = require("../app-render/work-async-storage.external");
|
||||
function after(task) {
|
||||
const workStore = _workasyncstorageexternal.workAsyncStorage.getStore();
|
||||
if (!workStore) {
|
||||
// TODO(after): the linked docs page talks about *dynamic* APIs, which after soon won't be anymore
|
||||
throw Object.defineProperty(new Error('`after` was called outside a request scope. Read more: https://nextjs.org/docs/messages/next-dynamic-api-wrong-context'), "__NEXT_ERROR_CODE", {
|
||||
value: "E468",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const { afterContext } = workStore;
|
||||
return afterContext.after(task);
|
||||
}
|
||||
|
||||
//# sourceMappingURL=after.js.map
|
||||
1
apps/public-web/node_modules/next/dist/server/after/after.js.map
generated
vendored
Normal file
1
apps/public-web/node_modules/next/dist/server/after/after.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/after/after.ts"],"sourcesContent":["import { workAsyncStorage } from '../app-render/work-async-storage.external'\n\nexport type AfterTask<T = unknown> = Promise<T> | AfterCallback<T>\nexport type AfterCallback<T = unknown> = () => T | Promise<T>\n\n/**\n * This function allows you to schedule callbacks to be executed after the current request finishes.\n */\nexport function after<T>(task: AfterTask<T>): void {\n const workStore = workAsyncStorage.getStore()\n\n if (!workStore) {\n // TODO(after): the linked docs page talks about *dynamic* APIs, which after soon won't be anymore\n throw new Error(\n '`after` was called outside a request scope. Read more: https://nextjs.org/docs/messages/next-dynamic-api-wrong-context'\n )\n }\n\n const { afterContext } = workStore\n return afterContext.after(task)\n}\n"],"names":["after","task","workStore","workAsyncStorage","getStore","Error","afterContext"],"mappings":";;;;+BAQgBA;;;eAAAA;;;0CARiB;AAQ1B,SAASA,MAASC,IAAkB;IACzC,MAAMC,YAAYC,0CAAgB,CAACC,QAAQ;IAE3C,IAAI,CAACF,WAAW;QACd,kGAAkG;QAClG,MAAM,qBAEL,CAFK,IAAIG,MACR,2HADI,qBAAA;mBAAA;wBAAA;0BAAA;QAEN;IACF;IAEA,MAAM,EAAEC,YAAY,EAAE,GAAGJ;IACzB,OAAOI,aAAaN,KAAK,CAACC;AAC5B","ignoreList":[0]}
|
||||
29
apps/public-web/node_modules/next/dist/server/after/awaiter.d.ts
generated
vendored
Normal file
29
apps/public-web/node_modules/next/dist/server/after/awaiter.d.ts
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
/**
|
||||
* Provides a `waitUntil` implementation which gathers promises to be awaited later (via {@link AwaiterMulti.awaiting}).
|
||||
* Unlike a simple `Promise.all`, {@link AwaiterMulti} works recursively --
|
||||
* if a promise passed to {@link AwaiterMulti.waitUntil} calls `waitUntil` again,
|
||||
* that second promise will also be awaited.
|
||||
*/
|
||||
export declare class AwaiterMulti {
|
||||
private promises;
|
||||
private onError;
|
||||
constructor({ onError }?: {
|
||||
onError?: (error: unknown) => void;
|
||||
});
|
||||
waitUntil: (promise: Promise<unknown>) => void;
|
||||
awaiting(): Promise<void>;
|
||||
}
|
||||
/**
|
||||
* Like {@link AwaiterMulti}, but can only be awaited once.
|
||||
* If {@link AwaiterOnce.waitUntil} is called after that, it will throw.
|
||||
*/
|
||||
export declare class AwaiterOnce {
|
||||
private awaiter;
|
||||
private done;
|
||||
private pending;
|
||||
constructor(options?: {
|
||||
onError?: (error: unknown) => void;
|
||||
});
|
||||
waitUntil: (promise: Promise<unknown>) => void;
|
||||
awaiting(): Promise<void>;
|
||||
}
|
||||
74
apps/public-web/node_modules/next/dist/server/after/awaiter.js
generated
vendored
Normal file
74
apps/public-web/node_modules/next/dist/server/after/awaiter.js
generated
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
0 && (module.exports = {
|
||||
AwaiterMulti: null,
|
||||
AwaiterOnce: null
|
||||
});
|
||||
function _export(target, all) {
|
||||
for(var name in all)Object.defineProperty(target, name, {
|
||||
enumerable: true,
|
||||
get: all[name]
|
||||
});
|
||||
}
|
||||
_export(exports, {
|
||||
AwaiterMulti: function() {
|
||||
return AwaiterMulti;
|
||||
},
|
||||
AwaiterOnce: function() {
|
||||
return AwaiterOnce;
|
||||
}
|
||||
});
|
||||
const _invarianterror = require("../../shared/lib/invariant-error");
|
||||
class AwaiterMulti {
|
||||
constructor({ onError } = {}){
|
||||
this.promises = new Set();
|
||||
this.waitUntil = (promise)=>{
|
||||
// if a promise settles before we await it, we should drop it --
|
||||
// storing them indefinitely could result in a memory leak.
|
||||
const cleanup = ()=>{
|
||||
this.promises.delete(promise);
|
||||
};
|
||||
promise.then(cleanup, (err)=>{
|
||||
cleanup();
|
||||
this.onError(err);
|
||||
});
|
||||
this.promises.add(promise);
|
||||
};
|
||||
this.onError = onError ?? console.error;
|
||||
}
|
||||
async awaiting() {
|
||||
while(this.promises.size > 0){
|
||||
const promises = Array.from(this.promises);
|
||||
this.promises.clear();
|
||||
await Promise.allSettled(promises);
|
||||
}
|
||||
}
|
||||
}
|
||||
class AwaiterOnce {
|
||||
constructor(options = {}){
|
||||
this.done = false;
|
||||
this.waitUntil = (promise)=>{
|
||||
if (this.done) {
|
||||
throw Object.defineProperty(new _invarianterror.InvariantError('Cannot call waitUntil() on an AwaiterOnce that was already awaited'), "__NEXT_ERROR_CODE", {
|
||||
value: "E563",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
return this.awaiter.waitUntil(promise);
|
||||
};
|
||||
this.awaiter = new AwaiterMulti(options);
|
||||
}
|
||||
async awaiting() {
|
||||
if (!this.pending) {
|
||||
this.pending = this.awaiter.awaiting().finally(()=>{
|
||||
this.done = true;
|
||||
});
|
||||
}
|
||||
return this.pending;
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=awaiter.js.map
|
||||
1
apps/public-web/node_modules/next/dist/server/after/awaiter.js.map
generated
vendored
Normal file
1
apps/public-web/node_modules/next/dist/server/after/awaiter.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/after/awaiter.ts"],"sourcesContent":["import { InvariantError } from '../../shared/lib/invariant-error'\n\n/**\n * Provides a `waitUntil` implementation which gathers promises to be awaited later (via {@link AwaiterMulti.awaiting}).\n * Unlike a simple `Promise.all`, {@link AwaiterMulti} works recursively --\n * if a promise passed to {@link AwaiterMulti.waitUntil} calls `waitUntil` again,\n * that second promise will also be awaited.\n */\nexport class AwaiterMulti {\n private promises: Set<Promise<unknown>> = new Set()\n private onError: (error: unknown) => void\n\n constructor({ onError }: { onError?: (error: unknown) => void } = {}) {\n this.onError = onError ?? console.error\n }\n\n public waitUntil = (promise: Promise<unknown>): void => {\n // if a promise settles before we await it, we should drop it --\n // storing them indefinitely could result in a memory leak.\n const cleanup = () => {\n this.promises.delete(promise)\n }\n\n promise.then(cleanup, (err) => {\n cleanup()\n this.onError(err)\n })\n\n this.promises.add(promise)\n }\n\n public async awaiting(): Promise<void> {\n while (this.promises.size > 0) {\n const promises = Array.from(this.promises)\n this.promises.clear()\n await Promise.allSettled(promises)\n }\n }\n}\n\n/**\n * Like {@link AwaiterMulti}, but can only be awaited once.\n * If {@link AwaiterOnce.waitUntil} is called after that, it will throw.\n */\nexport class AwaiterOnce {\n private awaiter: AwaiterMulti\n private done: boolean = false\n private pending: Promise<void> | undefined\n\n constructor(options: { onError?: (error: unknown) => void } = {}) {\n this.awaiter = new AwaiterMulti(options)\n }\n\n public waitUntil = (promise: Promise<unknown>): void => {\n if (this.done) {\n throw new InvariantError(\n 'Cannot call waitUntil() on an AwaiterOnce that was already awaited'\n )\n }\n return this.awaiter.waitUntil(promise)\n }\n\n public async awaiting(): Promise<void> {\n if (!this.pending) {\n this.pending = this.awaiter.awaiting().finally(() => {\n this.done = true\n })\n }\n return this.pending\n }\n}\n"],"names":["AwaiterMulti","AwaiterOnce","constructor","onError","promises","Set","waitUntil","promise","cleanup","delete","then","err","add","console","error","awaiting","size","Array","from","clear","Promise","allSettled","options","done","InvariantError","awaiter","pending","finally"],"mappings":";;;;;;;;;;;;;;;IAQaA,YAAY;eAAZA;;IAoCAC,WAAW;eAAXA;;;gCA5CkB;AAQxB,MAAMD;IAIXE,YAAY,EAAEC,OAAO,EAA0C,GAAG,CAAC,CAAC,CAAE;aAH9DC,WAAkC,IAAIC;aAOvCC,YAAY,CAACC;YAClB,gEAAgE;YAChE,2DAA2D;YAC3D,MAAMC,UAAU;gBACd,IAAI,CAACJ,QAAQ,CAACK,MAAM,CAACF;YACvB;YAEAA,QAAQG,IAAI,CAACF,SAAS,CAACG;gBACrBH;gBACA,IAAI,CAACL,OAAO,CAACQ;YACf;YAEA,IAAI,CAACP,QAAQ,CAACQ,GAAG,CAACL;QACpB;QAhBE,IAAI,CAACJ,OAAO,GAAGA,WAAWU,QAAQC,KAAK;IACzC;IAiBA,MAAaC,WAA0B;QACrC,MAAO,IAAI,CAACX,QAAQ,CAACY,IAAI,GAAG,EAAG;YAC7B,MAAMZ,WAAWa,MAAMC,IAAI,CAAC,IAAI,CAACd,QAAQ;YACzC,IAAI,CAACA,QAAQ,CAACe,KAAK;YACnB,MAAMC,QAAQC,UAAU,CAACjB;QAC3B;IACF;AACF;AAMO,MAAMH;IAKXC,YAAYoB,UAAkD,CAAC,CAAC,CAAE;aAH1DC,OAAgB;aAOjBjB,YAAY,CAACC;YAClB,IAAI,IAAI,CAACgB,IAAI,EAAE;gBACb,MAAM,qBAEL,CAFK,IAAIC,8BAAc,CACtB,uEADI,qBAAA;2BAAA;gCAAA;kCAAA;gBAEN;YACF;YACA,OAAO,IAAI,CAACC,OAAO,CAACnB,SAAS,CAACC;QAChC;QAVE,IAAI,CAACkB,OAAO,GAAG,IAAIzB,aAAasB;IAClC;IAWA,MAAaP,WAA0B;QACrC,IAAI,CAAC,IAAI,CAACW,OAAO,EAAE;YACjB,IAAI,CAACA,OAAO,GAAG,IAAI,CAACD,OAAO,CAACV,QAAQ,GAAGY,OAAO,CAAC;gBAC7C,IAAI,CAACJ,IAAI,GAAG;YACd;QACF;QACA,OAAO,IAAI,CAACG,OAAO;IACrB;AACF","ignoreList":[0]}
|
||||
17
apps/public-web/node_modules/next/dist/server/after/builtin-request-context.d.ts
generated
vendored
Normal file
17
apps/public-web/node_modules/next/dist/server/after/builtin-request-context.d.ts
generated
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
export declare function getBuiltinRequestContext(): BuiltinRequestContextValue | undefined;
|
||||
/** A request context provided by the platform. */
|
||||
export type BuiltinRequestContext = {
|
||||
get(): BuiltinRequestContextValue | undefined;
|
||||
};
|
||||
export type RunnableBuiltinRequestContext = BuiltinRequestContext & {
|
||||
run<T>(value: BuiltinRequestContextValue, callback: () => T): T;
|
||||
};
|
||||
export type BuiltinRequestContextValue = {
|
||||
waitUntil?: WaitUntil;
|
||||
};
|
||||
export type WaitUntil = (promise: Promise<any>) => void;
|
||||
/** "@next/request-context" has a different signature from AsyncLocalStorage,
|
||||
* matching [AsyncContext.Variable](https://github.com/tc39/proposal-async-context).
|
||||
* We don't need a full AsyncContext adapter here, just having `.get()` is enough
|
||||
*/
|
||||
export declare function createLocalRequestContext(): RunnableBuiltinRequestContext;
|
||||
38
apps/public-web/node_modules/next/dist/server/after/builtin-request-context.js
generated
vendored
Normal file
38
apps/public-web/node_modules/next/dist/server/after/builtin-request-context.js
generated
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
0 && (module.exports = {
|
||||
createLocalRequestContext: null,
|
||||
getBuiltinRequestContext: null
|
||||
});
|
||||
function _export(target, all) {
|
||||
for(var name in all)Object.defineProperty(target, name, {
|
||||
enumerable: true,
|
||||
get: all[name]
|
||||
});
|
||||
}
|
||||
_export(exports, {
|
||||
createLocalRequestContext: function() {
|
||||
return createLocalRequestContext;
|
||||
},
|
||||
getBuiltinRequestContext: function() {
|
||||
return getBuiltinRequestContext;
|
||||
}
|
||||
});
|
||||
const _asynclocalstorage = require("../app-render/async-local-storage");
|
||||
function getBuiltinRequestContext() {
|
||||
const _globalThis = globalThis;
|
||||
const ctx = _globalThis[NEXT_REQUEST_CONTEXT_SYMBOL];
|
||||
return ctx == null ? void 0 : ctx.get();
|
||||
}
|
||||
const NEXT_REQUEST_CONTEXT_SYMBOL = Symbol.for('@next/request-context');
|
||||
function createLocalRequestContext() {
|
||||
const storage = (0, _asynclocalstorage.createAsyncLocalStorage)();
|
||||
return {
|
||||
get: ()=>storage.getStore(),
|
||||
run: (value, callback)=>storage.run(value, callback)
|
||||
};
|
||||
}
|
||||
|
||||
//# sourceMappingURL=builtin-request-context.js.map
|
||||
1
apps/public-web/node_modules/next/dist/server/after/builtin-request-context.js.map
generated
vendored
Normal file
1
apps/public-web/node_modules/next/dist/server/after/builtin-request-context.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/after/builtin-request-context.ts"],"sourcesContent":["import { createAsyncLocalStorage } from '../app-render/async-local-storage'\n\nexport function getBuiltinRequestContext():\n | BuiltinRequestContextValue\n | undefined {\n const _globalThis = globalThis as GlobalThisWithRequestContext\n const ctx = _globalThis[NEXT_REQUEST_CONTEXT_SYMBOL]\n return ctx?.get()\n}\n\nconst NEXT_REQUEST_CONTEXT_SYMBOL = Symbol.for('@next/request-context')\n\ntype GlobalThisWithRequestContext = typeof globalThis & {\n [NEXT_REQUEST_CONTEXT_SYMBOL]?: BuiltinRequestContext\n}\n\n/** A request context provided by the platform. */\nexport type BuiltinRequestContext = {\n get(): BuiltinRequestContextValue | undefined\n}\n\nexport type RunnableBuiltinRequestContext = BuiltinRequestContext & {\n run<T>(value: BuiltinRequestContextValue, callback: () => T): T\n}\n\nexport type BuiltinRequestContextValue = {\n waitUntil?: WaitUntil\n}\nexport type WaitUntil = (promise: Promise<any>) => void\n\n/** \"@next/request-context\" has a different signature from AsyncLocalStorage,\n * matching [AsyncContext.Variable](https://github.com/tc39/proposal-async-context).\n * We don't need a full AsyncContext adapter here, just having `.get()` is enough\n */\nexport function createLocalRequestContext(): RunnableBuiltinRequestContext {\n const storage = createAsyncLocalStorage<BuiltinRequestContextValue>()\n return {\n get: () => storage.getStore(),\n run: (value, callback) => storage.run(value, callback),\n }\n}\n"],"names":["createLocalRequestContext","getBuiltinRequestContext","_globalThis","globalThis","ctx","NEXT_REQUEST_CONTEXT_SYMBOL","get","Symbol","for","storage","createAsyncLocalStorage","getStore","run","value","callback"],"mappings":";;;;;;;;;;;;;;;IAkCgBA,yBAAyB;eAAzBA;;IAhCAC,wBAAwB;eAAxBA;;;mCAFwB;AAEjC,SAASA;IAGd,MAAMC,cAAcC;IACpB,MAAMC,MAAMF,WAAW,CAACG,4BAA4B;IACpD,OAAOD,uBAAAA,IAAKE,GAAG;AACjB;AAEA,MAAMD,8BAA8BE,OAAOC,GAAG,CAAC;AAwBxC,SAASR;IACd,MAAMS,UAAUC,IAAAA,0CAAuB;IACvC,OAAO;QACLJ,KAAK,IAAMG,QAAQE,QAAQ;QAC3BC,KAAK,CAACC,OAAOC,WAAaL,QAAQG,GAAG,CAACC,OAAOC;IAC/C;AACF","ignoreList":[0]}
|
||||
1
apps/public-web/node_modules/next/dist/server/after/index.d.ts
generated
vendored
Normal file
1
apps/public-web/node_modules/next/dist/server/after/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export * from './after';
|
||||
21
apps/public-web/node_modules/next/dist/server/after/index.js
generated
vendored
Normal file
21
apps/public-web/node_modules/next/dist/server/after/index.js
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
0 && __export(require("./after"));
|
||||
_export_star(require("./after"), exports);
|
||||
function _export_star(from, to) {
|
||||
Object.keys(from).forEach(function(k) {
|
||||
if (k !== "default" && !Object.prototype.hasOwnProperty.call(to, k)) {
|
||||
Object.defineProperty(to, k, {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return from[k];
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
return from;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
apps/public-web/node_modules/next/dist/server/after/index.js.map
generated
vendored
Normal file
1
apps/public-web/node_modules/next/dist/server/after/index.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/after/index.ts"],"sourcesContent":["export * from './after'\n"],"names":[],"mappings":";;;;;qBAAc","ignoreList":[0]}
|
||||
14
apps/public-web/node_modules/next/dist/server/after/run-with-after.d.ts
generated
vendored
Normal file
14
apps/public-web/node_modules/next/dist/server/after/run-with-after.d.ts
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
import type { AfterContextOpts } from './after-context';
|
||||
type Ctx = {
|
||||
waitUntil: NonNullable<AfterContextOpts['waitUntil']>;
|
||||
onClose: NonNullable<AfterContextOpts['onClose']>;
|
||||
onTaskError: NonNullable<AfterContextOpts['onTaskError']>;
|
||||
};
|
||||
export declare class AfterRunner {
|
||||
private awaiter;
|
||||
private closeController;
|
||||
private finishedWithoutErrors;
|
||||
readonly context: Ctx;
|
||||
executeAfter(): Promise<void>;
|
||||
}
|
||||
export {};
|
||||
35
apps/public-web/node_modules/next/dist/server/after/run-with-after.js
generated
vendored
Normal file
35
apps/public-web/node_modules/next/dist/server/after/run-with-after.js
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
Object.defineProperty(exports, "AfterRunner", {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return AfterRunner;
|
||||
}
|
||||
});
|
||||
const _detachedpromise = require("../../lib/detached-promise");
|
||||
const _webonclose = require("../web/web-on-close");
|
||||
const _awaiter = require("./awaiter");
|
||||
class AfterRunner {
|
||||
async executeAfter() {
|
||||
this.closeController.dispatchClose();
|
||||
await this.awaiter.awaiting();
|
||||
// if we got an error while running the callbacks,
|
||||
// thenthis is a noop, because the promise is already rejected
|
||||
this.finishedWithoutErrors.resolve();
|
||||
return this.finishedWithoutErrors.promise;
|
||||
}
|
||||
constructor(){
|
||||
this.awaiter = new _awaiter.AwaiterOnce();
|
||||
this.closeController = new _webonclose.CloseController();
|
||||
this.finishedWithoutErrors = new _detachedpromise.DetachedPromise();
|
||||
this.context = {
|
||||
waitUntil: this.awaiter.waitUntil.bind(this.awaiter),
|
||||
onClose: this.closeController.onClose.bind(this.closeController),
|
||||
onTaskError: (error)=>this.finishedWithoutErrors.reject(error)
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=run-with-after.js.map
|
||||
1
apps/public-web/node_modules/next/dist/server/after/run-with-after.js.map
generated
vendored
Normal file
1
apps/public-web/node_modules/next/dist/server/after/run-with-after.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/after/run-with-after.ts"],"sourcesContent":["import { DetachedPromise } from '../../lib/detached-promise'\nimport { CloseController } from '../web/web-on-close'\nimport type { AfterContextOpts } from './after-context'\nimport { AwaiterOnce } from './awaiter'\n\ntype Ctx = {\n waitUntil: NonNullable<AfterContextOpts['waitUntil']>\n onClose: NonNullable<AfterContextOpts['onClose']>\n onTaskError: NonNullable<AfterContextOpts['onTaskError']>\n}\n\nexport class AfterRunner {\n private awaiter = new AwaiterOnce()\n private closeController = new CloseController()\n private finishedWithoutErrors = new DetachedPromise<void>()\n\n readonly context: Ctx = {\n waitUntil: this.awaiter.waitUntil.bind(this.awaiter),\n onClose: this.closeController.onClose.bind(this.closeController),\n onTaskError: (error) => this.finishedWithoutErrors.reject(error),\n }\n\n public async executeAfter() {\n this.closeController.dispatchClose()\n await this.awaiter.awaiting()\n\n // if we got an error while running the callbacks,\n // thenthis is a noop, because the promise is already rejected\n this.finishedWithoutErrors.resolve()\n\n return this.finishedWithoutErrors.promise\n }\n}\n"],"names":["AfterRunner","executeAfter","closeController","dispatchClose","awaiter","awaiting","finishedWithoutErrors","resolve","promise","AwaiterOnce","CloseController","DetachedPromise","context","waitUntil","bind","onClose","onTaskError","error","reject"],"mappings":";;;;+BAWaA;;;eAAAA;;;iCAXmB;4BACA;yBAEJ;AAQrB,MAAMA;IAWX,MAAaC,eAAe;QAC1B,IAAI,CAACC,eAAe,CAACC,aAAa;QAClC,MAAM,IAAI,CAACC,OAAO,CAACC,QAAQ;QAE3B,kDAAkD;QAClD,8DAA8D;QAC9D,IAAI,CAACC,qBAAqB,CAACC,OAAO;QAElC,OAAO,IAAI,CAACD,qBAAqB,CAACE,OAAO;IAC3C;;aAnBQJ,UAAU,IAAIK,oBAAW;aACzBP,kBAAkB,IAAIQ,2BAAe;aACrCJ,wBAAwB,IAAIK,gCAAe;aAE1CC,UAAe;YACtBC,WAAW,IAAI,CAACT,OAAO,CAACS,SAAS,CAACC,IAAI,CAAC,IAAI,CAACV,OAAO;YACnDW,SAAS,IAAI,CAACb,eAAe,CAACa,OAAO,CAACD,IAAI,CAAC,IAAI,CAACZ,eAAe;YAC/Dc,aAAa,CAACC,QAAU,IAAI,CAACX,qBAAqB,CAACY,MAAM,CAACD;QAC5D;;AAYF","ignoreList":[0]}
|
||||
8
apps/public-web/node_modules/next/dist/server/api-utils/get-cookie-parser.d.ts
generated
vendored
Normal file
8
apps/public-web/node_modules/next/dist/server/api-utils/get-cookie-parser.d.ts
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
import type { NextApiRequestCookies } from '.';
|
||||
/**
|
||||
* Parse cookies from the `headers` of request
|
||||
* @param req request object
|
||||
*/
|
||||
export declare function getCookieParser(headers: {
|
||||
[key: string]: string | string[] | null | undefined;
|
||||
}): () => NextApiRequestCookies;
|
||||
22
apps/public-web/node_modules/next/dist/server/api-utils/get-cookie-parser.js
generated
vendored
Normal file
22
apps/public-web/node_modules/next/dist/server/api-utils/get-cookie-parser.js
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
Object.defineProperty(exports, "getCookieParser", {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return getCookieParser;
|
||||
}
|
||||
});
|
||||
function getCookieParser(headers) {
|
||||
return function parseCookie() {
|
||||
const { cookie } = headers;
|
||||
if (!cookie) {
|
||||
return {};
|
||||
}
|
||||
const { parse: parseCookieFn } = require('next/dist/compiled/cookie');
|
||||
return parseCookieFn(Array.isArray(cookie) ? cookie.join('; ') : cookie);
|
||||
};
|
||||
}
|
||||
|
||||
//# sourceMappingURL=get-cookie-parser.js.map
|
||||
1
apps/public-web/node_modules/next/dist/server/api-utils/get-cookie-parser.js.map
generated
vendored
Normal file
1
apps/public-web/node_modules/next/dist/server/api-utils/get-cookie-parser.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/api-utils/get-cookie-parser.ts"],"sourcesContent":["import type { NextApiRequestCookies } from '.'\n\n/**\n * Parse cookies from the `headers` of request\n * @param req request object\n */\n\nexport function getCookieParser(headers: {\n [key: string]: string | string[] | null | undefined\n}): () => NextApiRequestCookies {\n return function parseCookie(): NextApiRequestCookies {\n const { cookie } = headers\n\n if (!cookie) {\n return {}\n }\n\n const { parse: parseCookieFn } =\n require('next/dist/compiled/cookie') as typeof import('next/dist/compiled/cookie')\n return parseCookieFn(Array.isArray(cookie) ? cookie.join('; ') : cookie)\n }\n}\n"],"names":["getCookieParser","headers","parseCookie","cookie","parse","parseCookieFn","require","Array","isArray","join"],"mappings":";;;;+BAOgBA;;;eAAAA;;;AAAT,SAASA,gBAAgBC,OAE/B;IACC,OAAO,SAASC;QACd,MAAM,EAAEC,MAAM,EAAE,GAAGF;QAEnB,IAAI,CAACE,QAAQ;YACX,OAAO,CAAC;QACV;QAEA,MAAM,EAAEC,OAAOC,aAAa,EAAE,GAC5BC,QAAQ;QACV,OAAOD,cAAcE,MAAMC,OAAO,CAACL,UAAUA,OAAOM,IAAI,CAAC,QAAQN;IACnE;AACF","ignoreList":[0]}
|
||||
65
apps/public-web/node_modules/next/dist/server/api-utils/index.d.ts
generated
vendored
Normal file
65
apps/public-web/node_modules/next/dist/server/api-utils/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,65 @@
|
||||
import type { IncomingMessage } from 'http';
|
||||
import type { BaseNextRequest } from '../base-http';
|
||||
import type { NextApiResponse } from '../../shared/lib/utils';
|
||||
export type NextApiRequestCookies = Partial<{
|
||||
[key: string]: string;
|
||||
}>;
|
||||
export type NextApiRequestQuery = Partial<{
|
||||
[key: string]: string | string[];
|
||||
}>;
|
||||
export type __ApiPreviewProps = {
|
||||
previewModeId: string;
|
||||
previewModeEncryptionKey: string;
|
||||
previewModeSigningKey: string;
|
||||
};
|
||||
export declare function wrapApiHandler<T extends (...args: any[]) => any>(page: string, handler: T): T;
|
||||
/**
|
||||
*
|
||||
* @param res response object
|
||||
* @param statusCode `HTTP` status code of response
|
||||
*/
|
||||
export declare function sendStatusCode(res: NextApiResponse, statusCode: number): NextApiResponse<any>;
|
||||
/**
|
||||
*
|
||||
* @param res response object
|
||||
* @param [statusOrUrl] `HTTP` status code of redirect
|
||||
* @param url URL of redirect
|
||||
*/
|
||||
export declare function redirect(res: NextApiResponse, statusOrUrl: string | number, url?: string): NextApiResponse<any>;
|
||||
export declare function checkIsOnDemandRevalidate(req: Request | IncomingMessage | BaseNextRequest, previewProps: __ApiPreviewProps): {
|
||||
isOnDemandRevalidate: boolean;
|
||||
revalidateOnlyGenerated: boolean;
|
||||
};
|
||||
export declare const COOKIE_NAME_PRERENDER_BYPASS = "__prerender_bypass";
|
||||
export declare const COOKIE_NAME_PRERENDER_DATA = "__next_preview_data";
|
||||
export declare const RESPONSE_LIMIT_DEFAULT: number;
|
||||
export declare const SYMBOL_PREVIEW_DATA: unique symbol;
|
||||
export declare const SYMBOL_CLEARED_COOKIES: unique symbol;
|
||||
export declare function clearPreviewData<T>(res: NextApiResponse<T>, options?: {
|
||||
path?: string;
|
||||
}): NextApiResponse<T>;
|
||||
/**
|
||||
* Custom error class
|
||||
*/
|
||||
export declare class ApiError extends Error {
|
||||
readonly statusCode: number;
|
||||
constructor(statusCode: number, message: string);
|
||||
}
|
||||
/**
|
||||
* Sends error in `response`
|
||||
* @param res response object
|
||||
* @param statusCode of response
|
||||
* @param message of response
|
||||
*/
|
||||
export declare function sendError(res: NextApiResponse, statusCode: number, message: string): void;
|
||||
interface LazyProps {
|
||||
req: IncomingMessage;
|
||||
}
|
||||
/**
|
||||
* Execute getter function only if its needed
|
||||
* @param LazyProps `req` and `params` for lazyProp
|
||||
* @param prop name of property
|
||||
* @param getter function to get data
|
||||
*/
|
||||
export declare function setLazyProp<T>({ req }: LazyProps, prop: string, getter: () => T): void;
|
||||
export {};
|
||||
201
apps/public-web/node_modules/next/dist/server/api-utils/index.js
generated
vendored
Normal file
201
apps/public-web/node_modules/next/dist/server/api-utils/index.js
generated
vendored
Normal file
@@ -0,0 +1,201 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
0 && (module.exports = {
|
||||
ApiError: null,
|
||||
COOKIE_NAME_PRERENDER_BYPASS: null,
|
||||
COOKIE_NAME_PRERENDER_DATA: null,
|
||||
RESPONSE_LIMIT_DEFAULT: null,
|
||||
SYMBOL_CLEARED_COOKIES: null,
|
||||
SYMBOL_PREVIEW_DATA: null,
|
||||
checkIsOnDemandRevalidate: null,
|
||||
clearPreviewData: null,
|
||||
redirect: null,
|
||||
sendError: null,
|
||||
sendStatusCode: null,
|
||||
setLazyProp: null,
|
||||
wrapApiHandler: null
|
||||
});
|
||||
function _export(target, all) {
|
||||
for(var name in all)Object.defineProperty(target, name, {
|
||||
enumerable: true,
|
||||
get: all[name]
|
||||
});
|
||||
}
|
||||
_export(exports, {
|
||||
ApiError: function() {
|
||||
return ApiError;
|
||||
},
|
||||
COOKIE_NAME_PRERENDER_BYPASS: function() {
|
||||
return COOKIE_NAME_PRERENDER_BYPASS;
|
||||
},
|
||||
COOKIE_NAME_PRERENDER_DATA: function() {
|
||||
return COOKIE_NAME_PRERENDER_DATA;
|
||||
},
|
||||
RESPONSE_LIMIT_DEFAULT: function() {
|
||||
return RESPONSE_LIMIT_DEFAULT;
|
||||
},
|
||||
SYMBOL_CLEARED_COOKIES: function() {
|
||||
return SYMBOL_CLEARED_COOKIES;
|
||||
},
|
||||
SYMBOL_PREVIEW_DATA: function() {
|
||||
return SYMBOL_PREVIEW_DATA;
|
||||
},
|
||||
checkIsOnDemandRevalidate: function() {
|
||||
return checkIsOnDemandRevalidate;
|
||||
},
|
||||
clearPreviewData: function() {
|
||||
return clearPreviewData;
|
||||
},
|
||||
redirect: function() {
|
||||
return redirect;
|
||||
},
|
||||
sendError: function() {
|
||||
return sendError;
|
||||
},
|
||||
sendStatusCode: function() {
|
||||
return sendStatusCode;
|
||||
},
|
||||
setLazyProp: function() {
|
||||
return setLazyProp;
|
||||
},
|
||||
wrapApiHandler: function() {
|
||||
return wrapApiHandler;
|
||||
}
|
||||
});
|
||||
const _headers = require("../web/spec-extension/adapters/headers");
|
||||
const _constants = require("../../lib/constants");
|
||||
const _tracer = require("../lib/trace/tracer");
|
||||
const _constants1 = require("../lib/trace/constants");
|
||||
function wrapApiHandler(page, handler) {
|
||||
return (...args)=>{
|
||||
(0, _tracer.getTracer)().setRootSpanAttribute('next.route', page);
|
||||
// Call API route method
|
||||
return (0, _tracer.getTracer)().trace(_constants1.NodeSpan.runHandler, {
|
||||
spanName: `executing api route (pages) ${page}`
|
||||
}, ()=>handler(...args));
|
||||
};
|
||||
}
|
||||
function sendStatusCode(res, statusCode) {
|
||||
res.statusCode = statusCode;
|
||||
return res;
|
||||
}
|
||||
function redirect(res, statusOrUrl, url) {
|
||||
if (typeof statusOrUrl === 'string') {
|
||||
url = statusOrUrl;
|
||||
statusOrUrl = 307;
|
||||
}
|
||||
if (typeof statusOrUrl !== 'number' || typeof url !== 'string') {
|
||||
throw Object.defineProperty(new Error(`Invalid redirect arguments. Please use a single argument URL, e.g. res.redirect('/destination') or use a status code and URL, e.g. res.redirect(307, '/destination').`), "__NEXT_ERROR_CODE", {
|
||||
value: "E389",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
res.writeHead(statusOrUrl, {
|
||||
Location: url
|
||||
});
|
||||
res.write(url);
|
||||
res.end();
|
||||
return res;
|
||||
}
|
||||
function checkIsOnDemandRevalidate(req, previewProps) {
|
||||
const headers = _headers.HeadersAdapter.from(req.headers);
|
||||
const previewModeId = headers.get(_constants.PRERENDER_REVALIDATE_HEADER);
|
||||
const isOnDemandRevalidate = previewModeId === previewProps.previewModeId;
|
||||
const revalidateOnlyGenerated = headers.has(_constants.PRERENDER_REVALIDATE_ONLY_GENERATED_HEADER);
|
||||
return {
|
||||
isOnDemandRevalidate,
|
||||
revalidateOnlyGenerated
|
||||
};
|
||||
}
|
||||
const COOKIE_NAME_PRERENDER_BYPASS = `__prerender_bypass`;
|
||||
const COOKIE_NAME_PRERENDER_DATA = `__next_preview_data`;
|
||||
const RESPONSE_LIMIT_DEFAULT = 4 * 1024 * 1024;
|
||||
const SYMBOL_PREVIEW_DATA = Symbol(COOKIE_NAME_PRERENDER_DATA);
|
||||
const SYMBOL_CLEARED_COOKIES = Symbol(COOKIE_NAME_PRERENDER_BYPASS);
|
||||
function clearPreviewData(res, options = {}) {
|
||||
if (SYMBOL_CLEARED_COOKIES in res) {
|
||||
return res;
|
||||
}
|
||||
const { serialize } = require('next/dist/compiled/cookie');
|
||||
const previous = res.getHeader('Set-Cookie');
|
||||
res.setHeader(`Set-Cookie`, [
|
||||
...typeof previous === 'string' ? [
|
||||
previous
|
||||
] : Array.isArray(previous) ? previous : [],
|
||||
serialize(COOKIE_NAME_PRERENDER_BYPASS, '', {
|
||||
// To delete a cookie, set `expires` to a date in the past:
|
||||
// https://tools.ietf.org/html/rfc6265#section-4.1.1
|
||||
// `Max-Age: 0` is not valid, thus ignored, and the cookie is persisted.
|
||||
expires: new Date(0),
|
||||
httpOnly: true,
|
||||
sameSite: process.env.NODE_ENV !== 'development' ? 'none' : 'lax',
|
||||
secure: process.env.NODE_ENV !== 'development',
|
||||
path: '/',
|
||||
...options.path !== undefined ? {
|
||||
path: options.path
|
||||
} : undefined
|
||||
}),
|
||||
serialize(COOKIE_NAME_PRERENDER_DATA, '', {
|
||||
// To delete a cookie, set `expires` to a date in the past:
|
||||
// https://tools.ietf.org/html/rfc6265#section-4.1.1
|
||||
// `Max-Age: 0` is not valid, thus ignored, and the cookie is persisted.
|
||||
expires: new Date(0),
|
||||
httpOnly: true,
|
||||
sameSite: process.env.NODE_ENV !== 'development' ? 'none' : 'lax',
|
||||
secure: process.env.NODE_ENV !== 'development',
|
||||
path: '/',
|
||||
...options.path !== undefined ? {
|
||||
path: options.path
|
||||
} : undefined
|
||||
})
|
||||
]);
|
||||
Object.defineProperty(res, SYMBOL_CLEARED_COOKIES, {
|
||||
value: true,
|
||||
enumerable: false
|
||||
});
|
||||
return res;
|
||||
}
|
||||
class ApiError extends Error {
|
||||
constructor(statusCode, message){
|
||||
super(message);
|
||||
this.statusCode = statusCode;
|
||||
}
|
||||
}
|
||||
function sendError(res, statusCode, message) {
|
||||
res.statusCode = statusCode;
|
||||
res.statusMessage = message;
|
||||
res.end(message);
|
||||
}
|
||||
function setLazyProp({ req }, prop, getter) {
|
||||
const opts = {
|
||||
configurable: true,
|
||||
enumerable: true
|
||||
};
|
||||
const optsReset = {
|
||||
...opts,
|
||||
writable: true
|
||||
};
|
||||
Object.defineProperty(req, prop, {
|
||||
...opts,
|
||||
get: ()=>{
|
||||
const value = getter();
|
||||
// we set the property on the object to avoid recalculating it
|
||||
Object.defineProperty(req, prop, {
|
||||
...optsReset,
|
||||
value
|
||||
});
|
||||
return value;
|
||||
},
|
||||
set: (value)=>{
|
||||
Object.defineProperty(req, prop, {
|
||||
...optsReset,
|
||||
value
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
apps/public-web/node_modules/next/dist/server/api-utils/index.js.map
generated
vendored
Normal file
1
apps/public-web/node_modules/next/dist/server/api-utils/index.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
14
apps/public-web/node_modules/next/dist/server/api-utils/node/api-resolver.d.ts
generated
vendored
Normal file
14
apps/public-web/node_modules/next/dist/server/api-utils/node/api-resolver.d.ts
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
import type { IncomingMessage, ServerResponse } from 'http';
|
||||
import type { __ApiPreviewProps } from '../.';
|
||||
import type { RevalidateFn } from '../../lib/router-utils/router-server-context';
|
||||
import type { InstrumentationOnRequestError } from '../../instrumentation/types';
|
||||
type ApiContext = __ApiPreviewProps & {
|
||||
trustHostHeader?: boolean;
|
||||
allowedRevalidateHeaderKeys?: string[];
|
||||
hostname?: string;
|
||||
multiZoneDraftMode?: boolean;
|
||||
dev: boolean;
|
||||
internalRevalidate?: RevalidateFn;
|
||||
};
|
||||
export declare function apiResolver(req: IncomingMessage, res: ServerResponse, query: any, resolverModule: any, apiContext: ApiContext, propagateError: boolean, dev?: boolean, page?: string, onError?: InstrumentationOnRequestError): Promise<void>;
|
||||
export {};
|
||||
395
apps/public-web/node_modules/next/dist/server/api-utils/node/api-resolver.js
generated
vendored
Normal file
395
apps/public-web/node_modules/next/dist/server/api-utils/node/api-resolver.js
generated
vendored
Normal file
@@ -0,0 +1,395 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
Object.defineProperty(exports, "apiResolver", {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return apiResolver;
|
||||
}
|
||||
});
|
||||
const _bytes = /*#__PURE__*/ _interop_require_default(require("next/dist/compiled/bytes"));
|
||||
const _etag = require("../../lib/etag");
|
||||
const _sendpayload = require("../../send-payload");
|
||||
const _stream = require("stream");
|
||||
const _iserror = /*#__PURE__*/ _interop_require_default(require("../../../lib/is-error"));
|
||||
const _utils = require("../../../shared/lib/utils");
|
||||
const _interopdefault = require("../../../lib/interop-default");
|
||||
const _index = require("./../index");
|
||||
const _getcookieparser = require("./../get-cookie-parser");
|
||||
const _constants = require("../../../lib/constants");
|
||||
const _trygetpreviewdata = require("./try-get-preview-data");
|
||||
const _parsebody = require("./parse-body");
|
||||
function _interop_require_default(obj) {
|
||||
return obj && obj.__esModule ? obj : {
|
||||
default: obj
|
||||
};
|
||||
}
|
||||
function getMaxContentLength(responseLimit) {
|
||||
if (responseLimit && typeof responseLimit !== 'boolean') {
|
||||
return _bytes.default.parse(responseLimit);
|
||||
}
|
||||
return _index.RESPONSE_LIMIT_DEFAULT;
|
||||
}
|
||||
/**
|
||||
* Send `any` body to response
|
||||
* @param req request object
|
||||
* @param res response object
|
||||
* @param body of response
|
||||
*/ function sendData(req, res, body) {
|
||||
if (body === null || body === undefined) {
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
// strip irrelevant headers/body
|
||||
if (res.statusCode === 204 || res.statusCode === 304) {
|
||||
res.removeHeader('Content-Type');
|
||||
res.removeHeader('Content-Length');
|
||||
res.removeHeader('Transfer-Encoding');
|
||||
if (process.env.NODE_ENV === 'development' && body) {
|
||||
console.warn(`A body was attempted to be set with a 204 statusCode for ${req.url}, this is invalid and the body was ignored.\n` + `See more info here https://nextjs.org/docs/messages/invalid-api-status-body`);
|
||||
}
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
const contentType = res.getHeader('Content-Type');
|
||||
if (body instanceof _stream.Stream) {
|
||||
if (!contentType) {
|
||||
res.setHeader('Content-Type', 'application/octet-stream');
|
||||
}
|
||||
body.pipe(res);
|
||||
return;
|
||||
}
|
||||
const isJSONLike = [
|
||||
'object',
|
||||
'number',
|
||||
'boolean'
|
||||
].includes(typeof body);
|
||||
const stringifiedBody = isJSONLike ? JSON.stringify(body) : body;
|
||||
const etag = (0, _etag.generateETag)(stringifiedBody);
|
||||
if ((0, _sendpayload.sendEtagResponse)(req, res, etag)) {
|
||||
return;
|
||||
}
|
||||
if (Buffer.isBuffer(body)) {
|
||||
if (!contentType) {
|
||||
res.setHeader('Content-Type', 'application/octet-stream');
|
||||
}
|
||||
res.setHeader('Content-Length', body.length);
|
||||
res.end(body);
|
||||
return;
|
||||
}
|
||||
if (isJSONLike) {
|
||||
res.setHeader('Content-Type', _constants.JSON_CONTENT_TYPE_HEADER);
|
||||
}
|
||||
res.setHeader('Content-Length', Buffer.byteLength(stringifiedBody));
|
||||
res.end(stringifiedBody);
|
||||
}
|
||||
/**
|
||||
* Send `JSON` object
|
||||
* @param res response object
|
||||
* @param jsonBody of data
|
||||
*/ function sendJson(res, jsonBody) {
|
||||
// Set header to application/json
|
||||
res.setHeader('Content-Type', _constants.JSON_CONTENT_TYPE_HEADER);
|
||||
// Use send to handle request
|
||||
res.send(JSON.stringify(jsonBody));
|
||||
}
|
||||
function isValidData(str) {
|
||||
return typeof str === 'string' && str.length >= 16;
|
||||
}
|
||||
function setDraftMode(res, options) {
|
||||
if (!isValidData(options.previewModeId)) {
|
||||
throw Object.defineProperty(new Error('invariant: invalid previewModeId'), "__NEXT_ERROR_CODE", {
|
||||
value: "E169",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const expires = options.enable ? undefined : new Date(0);
|
||||
// To delete a cookie, set `expires` to a date in the past:
|
||||
// https://tools.ietf.org/html/rfc6265#section-4.1.1
|
||||
// `Max-Age: 0` is not valid, thus ignored, and the cookie is persisted.
|
||||
const { serialize } = require('next/dist/compiled/cookie');
|
||||
const previous = res.getHeader('Set-Cookie');
|
||||
res.setHeader(`Set-Cookie`, [
|
||||
...typeof previous === 'string' ? [
|
||||
previous
|
||||
] : Array.isArray(previous) ? previous : [],
|
||||
serialize(_index.COOKIE_NAME_PRERENDER_BYPASS, options.previewModeId, {
|
||||
httpOnly: true,
|
||||
sameSite: process.env.NODE_ENV !== 'development' ? 'none' : 'lax',
|
||||
secure: process.env.NODE_ENV !== 'development',
|
||||
path: '/',
|
||||
expires
|
||||
})
|
||||
]);
|
||||
return res;
|
||||
}
|
||||
function setPreviewData(res, data, options) {
|
||||
if (!isValidData(options.previewModeId)) {
|
||||
throw Object.defineProperty(new Error('invariant: invalid previewModeId'), "__NEXT_ERROR_CODE", {
|
||||
value: "E169",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
if (!isValidData(options.previewModeEncryptionKey)) {
|
||||
throw Object.defineProperty(new Error('invariant: invalid previewModeEncryptionKey'), "__NEXT_ERROR_CODE", {
|
||||
value: "E334",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
if (!isValidData(options.previewModeSigningKey)) {
|
||||
throw Object.defineProperty(new Error('invariant: invalid previewModeSigningKey'), "__NEXT_ERROR_CODE", {
|
||||
value: "E436",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const jsonwebtoken = require('next/dist/compiled/jsonwebtoken');
|
||||
const { encryptWithSecret } = require('../../crypto-utils');
|
||||
const payload = jsonwebtoken.sign({
|
||||
data: encryptWithSecret(Buffer.from(options.previewModeEncryptionKey), JSON.stringify(data))
|
||||
}, options.previewModeSigningKey, {
|
||||
algorithm: 'HS256',
|
||||
...options.maxAge !== undefined ? {
|
||||
expiresIn: options.maxAge
|
||||
} : undefined
|
||||
});
|
||||
// limit preview mode cookie to 2KB since we shouldn't store too much
|
||||
// data here and browsers drop cookies over 4KB
|
||||
if (payload.length > 2048) {
|
||||
throw Object.defineProperty(new Error(`Preview data is limited to 2KB currently, reduce how much data you are storing as preview data to continue`), "__NEXT_ERROR_CODE", {
|
||||
value: "E465",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const { serialize } = require('next/dist/compiled/cookie');
|
||||
const previous = res.getHeader('Set-Cookie');
|
||||
res.setHeader(`Set-Cookie`, [
|
||||
...typeof previous === 'string' ? [
|
||||
previous
|
||||
] : Array.isArray(previous) ? previous : [],
|
||||
serialize(_index.COOKIE_NAME_PRERENDER_BYPASS, options.previewModeId, {
|
||||
httpOnly: true,
|
||||
sameSite: process.env.NODE_ENV !== 'development' ? 'none' : 'lax',
|
||||
secure: process.env.NODE_ENV !== 'development',
|
||||
path: '/',
|
||||
...options.maxAge !== undefined ? {
|
||||
maxAge: options.maxAge
|
||||
} : undefined,
|
||||
...options.path !== undefined ? {
|
||||
path: options.path
|
||||
} : undefined
|
||||
}),
|
||||
serialize(_index.COOKIE_NAME_PRERENDER_DATA, payload, {
|
||||
httpOnly: true,
|
||||
sameSite: process.env.NODE_ENV !== 'development' ? 'none' : 'lax',
|
||||
secure: process.env.NODE_ENV !== 'development',
|
||||
path: '/',
|
||||
...options.maxAge !== undefined ? {
|
||||
maxAge: options.maxAge
|
||||
} : undefined,
|
||||
...options.path !== undefined ? {
|
||||
path: options.path
|
||||
} : undefined
|
||||
})
|
||||
]);
|
||||
return res;
|
||||
}
|
||||
async function revalidate(urlPath, opts, req, context) {
|
||||
if (typeof urlPath !== 'string' || !urlPath.startsWith('/')) {
|
||||
throw Object.defineProperty(new Error(`Invalid urlPath provided to revalidate(), must be a path e.g. /blog/post-1, received ${urlPath}`), "__NEXT_ERROR_CODE", {
|
||||
value: "E153",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const revalidateHeaders = {
|
||||
[_constants.PRERENDER_REVALIDATE_HEADER]: context.previewModeId,
|
||||
...opts.unstable_onlyGenerated ? {
|
||||
[_constants.PRERENDER_REVALIDATE_ONLY_GENERATED_HEADER]: '1'
|
||||
} : {}
|
||||
};
|
||||
const allowedRevalidateHeaderKeys = [
|
||||
...context.allowedRevalidateHeaderKeys || []
|
||||
];
|
||||
if (context.trustHostHeader || context.dev) {
|
||||
allowedRevalidateHeaderKeys.push('cookie');
|
||||
}
|
||||
if (context.trustHostHeader) {
|
||||
allowedRevalidateHeaderKeys.push('x-vercel-protection-bypass');
|
||||
}
|
||||
for (const key of Object.keys(req.headers)){
|
||||
if (allowedRevalidateHeaderKeys.includes(key)) {
|
||||
revalidateHeaders[key] = req.headers[key];
|
||||
}
|
||||
}
|
||||
const internalRevalidate = context.internalRevalidate;
|
||||
try {
|
||||
// We use the revalidate in router-server if available.
|
||||
// If we are operating without router-server (serverless)
|
||||
// we must go through network layer with fetch request
|
||||
if (internalRevalidate) {
|
||||
return await internalRevalidate({
|
||||
urlPath,
|
||||
revalidateHeaders,
|
||||
opts
|
||||
});
|
||||
}
|
||||
if (context.trustHostHeader) {
|
||||
const res = await fetch(`https://${req.headers.host}${urlPath}`, {
|
||||
method: 'HEAD',
|
||||
headers: revalidateHeaders
|
||||
});
|
||||
// we use the cache header to determine successful revalidate as
|
||||
// a non-200 status code can be returned from a successful revalidate
|
||||
// e.g. notFound: true returns 404 status code but is successful
|
||||
const cacheHeader = res.headers.get('x-vercel-cache') || res.headers.get('x-nextjs-cache');
|
||||
if ((cacheHeader == null ? void 0 : cacheHeader.toUpperCase()) !== 'REVALIDATED' && res.status !== 200 && !(res.status === 404 && opts.unstable_onlyGenerated)) {
|
||||
throw Object.defineProperty(new Error(`Invalid response ${res.status}`), "__NEXT_ERROR_CODE", {
|
||||
value: "E175",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
} else {
|
||||
throw Object.defineProperty(new Error(`Invariant: missing internal router-server-methods this is an internal bug`), "__NEXT_ERROR_CODE", {
|
||||
value: "E676",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
throw Object.defineProperty(new Error(`Failed to revalidate ${urlPath}: ${(0, _iserror.default)(err) ? err.message : err}`), "__NEXT_ERROR_CODE", {
|
||||
value: "E240",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
}
|
||||
async function apiResolver(req, res, query, resolverModule, apiContext, propagateError, dev, page, onError) {
|
||||
const apiReq = req;
|
||||
const apiRes = res;
|
||||
try {
|
||||
var _config_api, _config_api1, _config_api2;
|
||||
if (!resolverModule) {
|
||||
res.statusCode = 404;
|
||||
res.end('Not Found');
|
||||
return;
|
||||
}
|
||||
const config = resolverModule.config || {};
|
||||
const bodyParser = ((_config_api = config.api) == null ? void 0 : _config_api.bodyParser) !== false;
|
||||
const responseLimit = ((_config_api1 = config.api) == null ? void 0 : _config_api1.responseLimit) ?? true;
|
||||
const externalResolver = ((_config_api2 = config.api) == null ? void 0 : _config_api2.externalResolver) || false;
|
||||
// Parsing of cookies
|
||||
(0, _index.setLazyProp)({
|
||||
req: apiReq
|
||||
}, 'cookies', (0, _getcookieparser.getCookieParser)(req.headers));
|
||||
// Ensure req.query is a writable, enumerable property by using Object.defineProperty.
|
||||
// This addresses Express 5.x, which defines query as a getter only (read-only).
|
||||
Object.defineProperty(apiReq, 'query', {
|
||||
value: {
|
||||
...query
|
||||
},
|
||||
writable: true,
|
||||
enumerable: true,
|
||||
configurable: true
|
||||
});
|
||||
// Parsing preview data
|
||||
(0, _index.setLazyProp)({
|
||||
req: apiReq
|
||||
}, 'previewData', ()=>(0, _trygetpreviewdata.tryGetPreviewData)(req, res, apiContext, !!apiContext.multiZoneDraftMode));
|
||||
// Checking if preview mode is enabled
|
||||
(0, _index.setLazyProp)({
|
||||
req: apiReq
|
||||
}, 'preview', ()=>apiReq.previewData !== false ? true : undefined);
|
||||
// Set draftMode to the same value as preview
|
||||
(0, _index.setLazyProp)({
|
||||
req: apiReq
|
||||
}, 'draftMode', ()=>apiReq.preview);
|
||||
// Parsing of body
|
||||
if (bodyParser && !apiReq.body) {
|
||||
apiReq.body = await (0, _parsebody.parseBody)(apiReq, config.api && config.api.bodyParser && config.api.bodyParser.sizeLimit ? config.api.bodyParser.sizeLimit : '1mb');
|
||||
}
|
||||
let contentLength = 0;
|
||||
const maxContentLength = getMaxContentLength(responseLimit);
|
||||
const writeData = apiRes.write;
|
||||
const endResponse = apiRes.end;
|
||||
apiRes.write = (...args)=>{
|
||||
contentLength += Buffer.byteLength(args[0] || '');
|
||||
return writeData.apply(apiRes, args);
|
||||
};
|
||||
apiRes.end = (...args)=>{
|
||||
if (args.length && typeof args[0] !== 'function') {
|
||||
contentLength += Buffer.byteLength(args[0] || '');
|
||||
}
|
||||
if (responseLimit && contentLength >= maxContentLength) {
|
||||
console.warn(`API response for ${req.url} exceeds ${_bytes.default.format(maxContentLength)}. API Routes are meant to respond quickly. https://nextjs.org/docs/messages/api-routes-response-size-limit`);
|
||||
}
|
||||
return endResponse.apply(apiRes, args);
|
||||
};
|
||||
apiRes.status = (statusCode)=>(0, _index.sendStatusCode)(apiRes, statusCode);
|
||||
apiRes.send = (data)=>sendData(apiReq, apiRes, data);
|
||||
apiRes.json = (data)=>sendJson(apiRes, data);
|
||||
apiRes.redirect = (statusOrUrl, url)=>(0, _index.redirect)(apiRes, statusOrUrl, url);
|
||||
apiRes.setDraftMode = (options = {
|
||||
enable: true
|
||||
})=>setDraftMode(apiRes, Object.assign({}, apiContext, options));
|
||||
apiRes.setPreviewData = (data, options = {})=>setPreviewData(apiRes, data, Object.assign({}, apiContext, options));
|
||||
apiRes.clearPreviewData = (options = {})=>(0, _index.clearPreviewData)(apiRes, options);
|
||||
apiRes.revalidate = (urlPath, opts)=>revalidate(urlPath, opts || {}, req, apiContext);
|
||||
const resolver = (0, _interopdefault.interopDefault)(resolverModule);
|
||||
let wasPiped = false;
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
// listen for pipe event and don't show resolve warning
|
||||
res.once('pipe', ()=>wasPiped = true);
|
||||
}
|
||||
const apiRouteResult = await resolver(req, res);
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
if (typeof apiRouteResult !== 'undefined') {
|
||||
if (apiRouteResult instanceof Response) {
|
||||
throw Object.defineProperty(new Error('API route returned a Response object in the Node.js runtime, this is not supported. Please use `runtime: "edge"` instead: https://nextjs.org/docs/api-routes/edge-api-routes'), "__NEXT_ERROR_CODE", {
|
||||
value: "E36",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
console.warn(`API handler should not return a value, received ${typeof apiRouteResult}.`);
|
||||
}
|
||||
if (!externalResolver && !(0, _utils.isResSent)(res) && !wasPiped) {
|
||||
console.warn(`API resolved without sending a response for ${req.url}, this may result in stalled requests.`);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
await (onError == null ? void 0 : onError(err, {
|
||||
method: req.method || 'GET',
|
||||
headers: req.headers,
|
||||
path: req.url || '/'
|
||||
}, {
|
||||
routerKind: 'Pages Router',
|
||||
routePath: page || '',
|
||||
routeType: 'route',
|
||||
revalidateReason: undefined
|
||||
}));
|
||||
if (err instanceof _index.ApiError) {
|
||||
(0, _index.sendError)(apiRes, err.statusCode, err.message);
|
||||
} else {
|
||||
if (dev) {
|
||||
if ((0, _iserror.default)(err)) {
|
||||
err.page = page;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
console.error(err);
|
||||
if (propagateError) {
|
||||
throw err;
|
||||
}
|
||||
(0, _index.sendError)(apiRes, 500, 'Internal Server Error');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=api-resolver.js.map
|
||||
1
apps/public-web/node_modules/next/dist/server/api-utils/node/api-resolver.js.map
generated
vendored
Normal file
1
apps/public-web/node_modules/next/dist/server/api-utils/node/api-resolver.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
7
apps/public-web/node_modules/next/dist/server/api-utils/node/parse-body.d.ts
generated
vendored
Normal file
7
apps/public-web/node_modules/next/dist/server/api-utils/node/parse-body.d.ts
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
import type { IncomingMessage } from 'http';
|
||||
import type { SizeLimit } from '../../../types';
|
||||
/**
|
||||
* Parse incoming message like `json` or `urlencoded`
|
||||
* @param req request object
|
||||
*/
|
||||
export declare function parseBody(req: IncomingMessage, limit: SizeLimit): Promise<any>;
|
||||
79
apps/public-web/node_modules/next/dist/server/api-utils/node/parse-body.js
generated
vendored
Normal file
79
apps/public-web/node_modules/next/dist/server/api-utils/node/parse-body.js
generated
vendored
Normal file
@@ -0,0 +1,79 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
Object.defineProperty(exports, "parseBody", {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return parseBody;
|
||||
}
|
||||
});
|
||||
const _contenttype = require("next/dist/compiled/content-type");
|
||||
const _iserror = /*#__PURE__*/ _interop_require_default(require("../../../lib/is-error"));
|
||||
const _index = require("../index");
|
||||
function _interop_require_default(obj) {
|
||||
return obj && obj.__esModule ? obj : {
|
||||
default: obj
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Parse `JSON` and handles invalid `JSON` strings
|
||||
* @param str `JSON` string
|
||||
*/ function parseJson(str) {
|
||||
if (str.length === 0) {
|
||||
// special-case empty json body, as it's a common client-side mistake
|
||||
return {};
|
||||
}
|
||||
try {
|
||||
return JSON.parse(str);
|
||||
} catch (e) {
|
||||
throw Object.defineProperty(new _index.ApiError(400, 'Invalid JSON'), "__NEXT_ERROR_CODE", {
|
||||
value: "E394",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
}
|
||||
async function parseBody(req, limit) {
|
||||
let contentType;
|
||||
try {
|
||||
contentType = (0, _contenttype.parse)(req.headers['content-type'] || 'text/plain');
|
||||
} catch {
|
||||
contentType = (0, _contenttype.parse)('text/plain');
|
||||
}
|
||||
const { type, parameters } = contentType;
|
||||
const encoding = parameters.charset || 'utf-8';
|
||||
let buffer;
|
||||
try {
|
||||
const getRawBody = require('next/dist/compiled/raw-body');
|
||||
buffer = await getRawBody(req, {
|
||||
encoding,
|
||||
limit
|
||||
});
|
||||
} catch (e) {
|
||||
if ((0, _iserror.default)(e) && e.type === 'entity.too.large') {
|
||||
throw Object.defineProperty(new _index.ApiError(413, `Body exceeded ${limit} limit`), "__NEXT_ERROR_CODE", {
|
||||
value: "E394",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
} else {
|
||||
throw Object.defineProperty(new _index.ApiError(400, 'Invalid body'), "__NEXT_ERROR_CODE", {
|
||||
value: "E394",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
}
|
||||
const body = buffer.toString();
|
||||
if (type === 'application/json' || type === 'application/ld+json') {
|
||||
return parseJson(body);
|
||||
} else if (type === 'application/x-www-form-urlencoded') {
|
||||
const qs = require('querystring');
|
||||
return qs.decode(body);
|
||||
} else {
|
||||
return body;
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=parse-body.js.map
|
||||
1
apps/public-web/node_modules/next/dist/server/api-utils/node/parse-body.js.map
generated
vendored
Normal file
1
apps/public-web/node_modules/next/dist/server/api-utils/node/parse-body.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/api-utils/node/parse-body.ts"],"sourcesContent":["import type { IncomingMessage } from 'http'\n\nimport { parse } from 'next/dist/compiled/content-type'\nimport isError from '../../../lib/is-error'\nimport type { SizeLimit } from '../../../types'\nimport { ApiError } from '../index'\n\n/**\n * Parse `JSON` and handles invalid `JSON` strings\n * @param str `JSON` string\n */\nfunction parseJson(str: string): object {\n if (str.length === 0) {\n // special-case empty json body, as it's a common client-side mistake\n return {}\n }\n\n try {\n return JSON.parse(str)\n } catch (e) {\n throw new ApiError(400, 'Invalid JSON')\n }\n}\n\n/**\n * Parse incoming message like `json` or `urlencoded`\n * @param req request object\n */\nexport async function parseBody(\n req: IncomingMessage,\n limit: SizeLimit\n): Promise<any> {\n let contentType\n try {\n contentType = parse(req.headers['content-type'] || 'text/plain')\n } catch {\n contentType = parse('text/plain')\n }\n const { type, parameters } = contentType\n const encoding = parameters.charset || 'utf-8'\n\n let buffer\n\n try {\n const getRawBody =\n require('next/dist/compiled/raw-body') as typeof import('next/dist/compiled/raw-body')\n buffer = await getRawBody(req, { encoding, limit })\n } catch (e) {\n if (isError(e) && e.type === 'entity.too.large') {\n throw new ApiError(413, `Body exceeded ${limit} limit`)\n } else {\n throw new ApiError(400, 'Invalid body')\n }\n }\n\n const body = buffer.toString()\n\n if (type === 'application/json' || type === 'application/ld+json') {\n return parseJson(body)\n } else if (type === 'application/x-www-form-urlencoded') {\n const qs = require('querystring') as typeof import('querystring')\n return qs.decode(body)\n } else {\n return body\n }\n}\n"],"names":["parseBody","parseJson","str","length","JSON","parse","e","ApiError","req","limit","contentType","headers","type","parameters","encoding","charset","buffer","getRawBody","require","isError","body","toString","qs","decode"],"mappings":";;;;+BA4BsBA;;;eAAAA;;;6BA1BA;gEACF;uBAEK;;;;;;AAEzB;;;CAGC,GACD,SAASC,UAAUC,GAAW;IAC5B,IAAIA,IAAIC,MAAM,KAAK,GAAG;QACpB,qEAAqE;QACrE,OAAO,CAAC;IACV;IAEA,IAAI;QACF,OAAOC,KAAKC,KAAK,CAACH;IACpB,EAAE,OAAOI,GAAG;QACV,MAAM,qBAAiC,CAAjC,IAAIC,eAAQ,CAAC,KAAK,iBAAlB,qBAAA;mBAAA;wBAAA;0BAAA;QAAgC;IACxC;AACF;AAMO,eAAeP,UACpBQ,GAAoB,EACpBC,KAAgB;IAEhB,IAAIC;IACJ,IAAI;QACFA,cAAcL,IAAAA,kBAAK,EAACG,IAAIG,OAAO,CAAC,eAAe,IAAI;IACrD,EAAE,OAAM;QACND,cAAcL,IAAAA,kBAAK,EAAC;IACtB;IACA,MAAM,EAAEO,IAAI,EAAEC,UAAU,EAAE,GAAGH;IAC7B,MAAMI,WAAWD,WAAWE,OAAO,IAAI;IAEvC,IAAIC;IAEJ,IAAI;QACF,MAAMC,aACJC,QAAQ;QACVF,SAAS,MAAMC,WAAWT,KAAK;YAAEM;YAAUL;QAAM;IACnD,EAAE,OAAOH,GAAG;QACV,IAAIa,IAAAA,gBAAO,EAACb,MAAMA,EAAEM,IAAI,KAAK,oBAAoB;YAC/C,MAAM,qBAAiD,CAAjD,IAAIL,eAAQ,CAAC,KAAK,CAAC,cAAc,EAAEE,MAAM,MAAM,CAAC,GAAhD,qBAAA;uBAAA;4BAAA;8BAAA;YAAgD;QACxD,OAAO;YACL,MAAM,qBAAiC,CAAjC,IAAIF,eAAQ,CAAC,KAAK,iBAAlB,qBAAA;uBAAA;4BAAA;8BAAA;YAAgC;QACxC;IACF;IAEA,MAAMa,OAAOJ,OAAOK,QAAQ;IAE5B,IAAIT,SAAS,sBAAsBA,SAAS,uBAAuB;QACjE,OAAOX,UAAUmB;IACnB,OAAO,IAAIR,SAAS,qCAAqC;QACvD,MAAMU,KAAKJ,QAAQ;QACnB,OAAOI,GAAGC,MAAM,CAACH;IACnB,OAAO;QACL,OAAOA;IACT;AACF","ignoreList":[0]}
|
||||
5
apps/public-web/node_modules/next/dist/server/api-utils/node/try-get-preview-data.d.ts
generated
vendored
Normal file
5
apps/public-web/node_modules/next/dist/server/api-utils/node/try-get-preview-data.d.ts
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
import type { IncomingMessage, ServerResponse } from 'http';
|
||||
import type { __ApiPreviewProps } from '../.';
|
||||
import type { BaseNextRequest, BaseNextResponse } from '../../base-http';
|
||||
import type { PreviewData } from '../../../types';
|
||||
export declare function tryGetPreviewData(req: IncomingMessage | BaseNextRequest | Request, res: ServerResponse | BaseNextResponse, options: __ApiPreviewProps, multiZoneDraftMode: boolean): PreviewData;
|
||||
86
apps/public-web/node_modules/next/dist/server/api-utils/node/try-get-preview-data.js
generated
vendored
Normal file
86
apps/public-web/node_modules/next/dist/server/api-utils/node/try-get-preview-data.js
generated
vendored
Normal file
@@ -0,0 +1,86 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
Object.defineProperty(exports, "tryGetPreviewData", {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return tryGetPreviewData;
|
||||
}
|
||||
});
|
||||
const _ = require("../.");
|
||||
const _index = require("../index");
|
||||
const _cookies = require("../../web/spec-extension/cookies");
|
||||
const _headers = require("../../web/spec-extension/adapters/headers");
|
||||
function tryGetPreviewData(req, res, options, multiZoneDraftMode) {
|
||||
var _cookies_get, _cookies_get1;
|
||||
// if an On-Demand revalidation is being done preview mode
|
||||
// is disabled
|
||||
if (options && (0, _.checkIsOnDemandRevalidate)(req, options).isOnDemandRevalidate) {
|
||||
return false;
|
||||
}
|
||||
// Read cached preview data if present
|
||||
// TODO: use request metadata instead of a symbol
|
||||
if (_index.SYMBOL_PREVIEW_DATA in req) {
|
||||
return req[_index.SYMBOL_PREVIEW_DATA];
|
||||
}
|
||||
const headers = _headers.HeadersAdapter.from(req.headers);
|
||||
const cookies = new _cookies.RequestCookies(headers);
|
||||
const previewModeId = (_cookies_get = cookies.get(_index.COOKIE_NAME_PRERENDER_BYPASS)) == null ? void 0 : _cookies_get.value;
|
||||
const tokenPreviewData = (_cookies_get1 = cookies.get(_index.COOKIE_NAME_PRERENDER_DATA)) == null ? void 0 : _cookies_get1.value;
|
||||
// Case: preview mode cookie set but data cookie is not set
|
||||
if (previewModeId && !tokenPreviewData && previewModeId === options.previewModeId) {
|
||||
// This is "Draft Mode" which doesn't use
|
||||
// previewData, so we return an empty object
|
||||
// for backwards compat with "Preview Mode".
|
||||
const data = {};
|
||||
Object.defineProperty(req, _index.SYMBOL_PREVIEW_DATA, {
|
||||
value: data,
|
||||
enumerable: false
|
||||
});
|
||||
return data;
|
||||
}
|
||||
// Case: neither cookie is set.
|
||||
if (!previewModeId && !tokenPreviewData) {
|
||||
return false;
|
||||
}
|
||||
// Case: one cookie is set, but not the other.
|
||||
if (!previewModeId || !tokenPreviewData) {
|
||||
if (!multiZoneDraftMode) {
|
||||
(0, _index.clearPreviewData)(res);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
// Case: preview session is for an old build.
|
||||
if (previewModeId !== options.previewModeId) {
|
||||
if (!multiZoneDraftMode) {
|
||||
(0, _index.clearPreviewData)(res);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
let encryptedPreviewData;
|
||||
try {
|
||||
const jsonwebtoken = require('next/dist/compiled/jsonwebtoken');
|
||||
encryptedPreviewData = jsonwebtoken.verify(tokenPreviewData, options.previewModeSigningKey);
|
||||
} catch {
|
||||
// TODO: warn
|
||||
(0, _index.clearPreviewData)(res);
|
||||
return false;
|
||||
}
|
||||
const { decryptWithSecret } = require('../../crypto-utils');
|
||||
const decryptedPreviewData = decryptWithSecret(Buffer.from(options.previewModeEncryptionKey), encryptedPreviewData.data);
|
||||
try {
|
||||
// TODO: strict runtime type checking
|
||||
const data = JSON.parse(decryptedPreviewData);
|
||||
// Cache lookup
|
||||
Object.defineProperty(req, _index.SYMBOL_PREVIEW_DATA, {
|
||||
value: data,
|
||||
enumerable: false
|
||||
});
|
||||
return data;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=try-get-preview-data.js.map
|
||||
1
apps/public-web/node_modules/next/dist/server/api-utils/node/try-get-preview-data.js.map
generated
vendored
Normal file
1
apps/public-web/node_modules/next/dist/server/api-utils/node/try-get-preview-data.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1
apps/public-web/node_modules/next/dist/server/api-utils/web.d.ts
generated
vendored
Normal file
1
apps/public-web/node_modules/next/dist/server/api-utils/web.d.ts
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export declare function byteLength(payload: string): number;
|
||||
17
apps/public-web/node_modules/next/dist/server/api-utils/web.js
generated
vendored
Normal file
17
apps/public-web/node_modules/next/dist/server/api-utils/web.js
generated
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
// Buffer.byteLength polyfill in the Edge runtime, with only utf8 strings
|
||||
// supported at the moment.
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
Object.defineProperty(exports, "byteLength", {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return byteLength;
|
||||
}
|
||||
});
|
||||
function byteLength(payload) {
|
||||
return new TextEncoder().encode(payload).buffer.byteLength;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=web.js.map
|
||||
1
apps/public-web/node_modules/next/dist/server/api-utils/web.js.map
generated
vendored
Normal file
1
apps/public-web/node_modules/next/dist/server/api-utils/web.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/api-utils/web.ts"],"sourcesContent":["// Buffer.byteLength polyfill in the Edge runtime, with only utf8 strings\n// supported at the moment.\nexport function byteLength(payload: string): number {\n return new TextEncoder().encode(payload).buffer.byteLength\n}\n"],"names":["byteLength","payload","TextEncoder","encode","buffer"],"mappings":"AAAA,yEAAyE;AACzE,2BAA2B;;;;;+BACXA;;;eAAAA;;;AAAT,SAASA,WAAWC,OAAe;IACxC,OAAO,IAAIC,cAAcC,MAAM,CAACF,SAASG,MAAM,CAACJ,UAAU;AAC5D","ignoreList":[0]}
|
||||
2
apps/public-web/node_modules/next/dist/server/app-render/action-async-storage-instance.d.ts
generated
vendored
Normal file
2
apps/public-web/node_modules/next/dist/server/app-render/action-async-storage-instance.d.ts
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
import type { ActionAsyncStorage } from './action-async-storage.external';
|
||||
export declare const actionAsyncStorageInstance: ActionAsyncStorage;
|
||||
14
apps/public-web/node_modules/next/dist/server/app-render/action-async-storage-instance.js
generated
vendored
Normal file
14
apps/public-web/node_modules/next/dist/server/app-render/action-async-storage-instance.js
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
Object.defineProperty(exports, "actionAsyncStorageInstance", {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return actionAsyncStorageInstance;
|
||||
}
|
||||
});
|
||||
const _asynclocalstorage = require("./async-local-storage");
|
||||
const actionAsyncStorageInstance = (0, _asynclocalstorage.createAsyncLocalStorage)();
|
||||
|
||||
//# sourceMappingURL=action-async-storage-instance.js.map
|
||||
1
apps/public-web/node_modules/next/dist/server/app-render/action-async-storage-instance.js.map
generated
vendored
Normal file
1
apps/public-web/node_modules/next/dist/server/app-render/action-async-storage-instance.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/app-render/action-async-storage-instance.ts"],"sourcesContent":["import type { ActionAsyncStorage } from './action-async-storage.external'\nimport { createAsyncLocalStorage } from './async-local-storage'\n\nexport const actionAsyncStorageInstance: ActionAsyncStorage =\n createAsyncLocalStorage()\n"],"names":["actionAsyncStorageInstance","createAsyncLocalStorage"],"mappings":";;;;+BAGaA;;;eAAAA;;;mCAF2B;AAEjC,MAAMA,6BACXC,IAAAA,0CAAuB","ignoreList":[0]}
|
||||
8
apps/public-web/node_modules/next/dist/server/app-render/action-async-storage.external.d.ts
generated
vendored
Normal file
8
apps/public-web/node_modules/next/dist/server/app-render/action-async-storage.external.d.ts
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
import type { AsyncLocalStorage } from 'async_hooks';
|
||||
import { actionAsyncStorageInstance } from './action-async-storage-instance';
|
||||
export interface ActionStore {
|
||||
readonly isAction?: boolean;
|
||||
readonly isAppRoute?: boolean;
|
||||
}
|
||||
export type ActionAsyncStorage = AsyncLocalStorage<ActionStore>;
|
||||
export { actionAsyncStorageInstance as actionAsyncStorage };
|
||||
13
apps/public-web/node_modules/next/dist/server/app-render/action-async-storage.external.js
generated
vendored
Normal file
13
apps/public-web/node_modules/next/dist/server/app-render/action-async-storage.external.js
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
Object.defineProperty(exports, "actionAsyncStorage", {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return _actionasyncstorageinstance.actionAsyncStorageInstance;
|
||||
}
|
||||
});
|
||||
const _actionasyncstorageinstance = require("./action-async-storage-instance");
|
||||
|
||||
//# sourceMappingURL=action-async-storage.external.js.map
|
||||
1
apps/public-web/node_modules/next/dist/server/app-render/action-async-storage.external.js.map
generated
vendored
Normal file
1
apps/public-web/node_modules/next/dist/server/app-render/action-async-storage.external.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/app-render/action-async-storage.external.ts"],"sourcesContent":["import type { AsyncLocalStorage } from 'async_hooks'\n\n// Share the instance module in the next-shared layer\nimport { actionAsyncStorageInstance } from './action-async-storage-instance' with { 'turbopack-transition': 'next-shared' }\nexport interface ActionStore {\n readonly isAction?: boolean\n readonly isAppRoute?: boolean\n}\n\nexport type ActionAsyncStorage = AsyncLocalStorage<ActionStore>\n\nexport { actionAsyncStorageInstance as actionAsyncStorage }\n"],"names":["actionAsyncStorage","actionAsyncStorageInstance"],"mappings":";;;;+BAWuCA;;;eAA9BC,sDAA0B;;;4CARQ","ignoreList":[0]}
|
||||
42
apps/public-web/node_modules/next/dist/server/app-render/action-handler.d.ts
generated
vendored
Normal file
42
apps/public-web/node_modules/next/dist/server/app-render/action-handler.d.ts
generated
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
import type { IncomingHttpHeaders } from 'node:http';
|
||||
import type { SizeLimit } from '../../types';
|
||||
import type { RequestStore } from '../app-render/work-unit-async-storage.external';
|
||||
import type { AppRenderContext, GenerateFlight } from './app-render';
|
||||
import type { AppPageModule } from '../route-modules/app-page/module';
|
||||
import type { BaseNextRequest, BaseNextResponse } from '../base-http';
|
||||
import RenderResult, { type AppPageRenderResultMetadata } from '../render-result';
|
||||
import type { WorkStore } from '../app-render/work-async-storage.external';
|
||||
declare const enum HostType {
|
||||
XForwardedHost = "x-forwarded-host",
|
||||
Host = "host"
|
||||
}
|
||||
export declare function parseHostHeader(headers: IncomingHttpHeaders, originDomain?: string): {
|
||||
type: HostType;
|
||||
value: string;
|
||||
} | undefined;
|
||||
type ServerActionsConfig = {
|
||||
bodySizeLimit?: SizeLimit;
|
||||
allowedOrigins?: string[];
|
||||
};
|
||||
type HandleActionResult = {
|
||||
/** An MPA action threw notFound(), and we need to render the appropriate HTML */
|
||||
type: 'not-found';
|
||||
} | {
|
||||
type: 'done';
|
||||
result: RenderResult | undefined;
|
||||
formState?: any;
|
||||
}
|
||||
/** The request turned out not to be a server action. */
|
||||
| null;
|
||||
export declare function handleAction({ req, res, ComponentMod, generateFlight, workStore, requestStore, serverActions, ctx, metadata, }: {
|
||||
req: BaseNextRequest;
|
||||
res: BaseNextResponse;
|
||||
ComponentMod: AppPageModule;
|
||||
generateFlight: GenerateFlight;
|
||||
workStore: WorkStore;
|
||||
requestStore: RequestStore;
|
||||
serverActions?: ServerActionsConfig;
|
||||
ctx: AppRenderContext;
|
||||
metadata: AppPageRenderResultMetadata;
|
||||
}): Promise<HandleActionResult>;
|
||||
export {};
|
||||
952
apps/public-web/node_modules/next/dist/server/app-render/action-handler.js
generated
vendored
Normal file
952
apps/public-web/node_modules/next/dist/server/app-render/action-handler.js
generated
vendored
Normal file
@@ -0,0 +1,952 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
0 && (module.exports = {
|
||||
handleAction: null,
|
||||
parseHostHeader: null
|
||||
});
|
||||
function _export(target, all) {
|
||||
for(var name in all)Object.defineProperty(target, name, {
|
||||
enumerable: true,
|
||||
get: all[name]
|
||||
});
|
||||
}
|
||||
_export(exports, {
|
||||
handleAction: function() {
|
||||
return handleAction;
|
||||
},
|
||||
parseHostHeader: function() {
|
||||
return parseHostHeader;
|
||||
}
|
||||
});
|
||||
const _approuterheaders = require("../../client/components/app-router-headers");
|
||||
const _httpaccessfallback = require("../../client/components/http-access-fallback/http-access-fallback");
|
||||
const _redirect = require("../../client/components/redirect");
|
||||
const _redirecterror = require("../../client/components/redirect-error");
|
||||
const _renderresult = /*#__PURE__*/ _interop_require_default(require("../render-result"));
|
||||
const _flightrenderresult = require("./flight-render-result");
|
||||
const _utils = require("../lib/server-ipc/utils");
|
||||
const _requestcookies = require("../web/spec-extension/adapters/request-cookies");
|
||||
const _constants = require("../../lib/constants");
|
||||
const _serveractionrequestmeta = require("../lib/server-action-request-meta");
|
||||
const _csrfprotection = require("./csrf-protection");
|
||||
const _log = require("../../build/output/log");
|
||||
const _cookies = require("../web/spec-extension/cookies");
|
||||
const _headers = require("../web/spec-extension/adapters/headers");
|
||||
const _utils1 = require("../web/utils");
|
||||
const _manifestssingleton = require("./manifests-singleton");
|
||||
const _helpers = require("../base-http/helpers");
|
||||
const _redirectstatuscode = require("../../client/components/redirect-status-code");
|
||||
const _requeststore = require("../async-storage/request-store");
|
||||
const _workunitasyncstorageexternal = require("../app-render/work-unit-async-storage.external");
|
||||
const _invarianterror = require("../../shared/lib/invariant-error");
|
||||
const _revalidationutils = require("../revalidation-utils");
|
||||
const _requestmeta = require("../request-meta");
|
||||
const _setcachebustingsearchparam = require("../../client/components/router-reducer/set-cache-busting-search-param");
|
||||
const _actionrevalidationkind = require("../../shared/lib/action-revalidation-kind");
|
||||
function _interop_require_default(obj) {
|
||||
return obj && obj.__esModule ? obj : {
|
||||
default: obj
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Checks if the app has any server actions defined in any runtime.
|
||||
*/ function hasServerActions() {
|
||||
const serverActionsManifest = (0, _manifestssingleton.getServerActionsManifest)();
|
||||
return Object.keys(serverActionsManifest.node).length > 0 || Object.keys(serverActionsManifest.edge).length > 0;
|
||||
}
|
||||
function nodeHeadersToRecord(headers) {
|
||||
const record = {};
|
||||
for (const [key, value] of Object.entries(headers)){
|
||||
if (value !== undefined) {
|
||||
record[key] = Array.isArray(value) ? value.join(', ') : `${value}`;
|
||||
}
|
||||
}
|
||||
return record;
|
||||
}
|
||||
function getForwardedHeaders(req, res) {
|
||||
// Get request headers and cookies
|
||||
const requestHeaders = req.headers;
|
||||
const requestCookies = new _cookies.RequestCookies(_headers.HeadersAdapter.from(requestHeaders));
|
||||
// Get response headers and cookies
|
||||
const responseHeaders = res.getHeaders();
|
||||
const responseCookies = new _cookies.ResponseCookies((0, _utils1.fromNodeOutgoingHttpHeaders)(responseHeaders));
|
||||
// Merge request and response headers
|
||||
const mergedHeaders = (0, _utils.filterReqHeaders)({
|
||||
...nodeHeadersToRecord(requestHeaders),
|
||||
...nodeHeadersToRecord(responseHeaders)
|
||||
}, _utils.actionsForbiddenHeaders);
|
||||
// Merge cookies into requestCookies, so responseCookies always take precedence
|
||||
// and overwrite/delete those from requestCookies.
|
||||
responseCookies.getAll().forEach((cookie)=>{
|
||||
if (typeof cookie.value === 'undefined') {
|
||||
requestCookies.delete(cookie.name);
|
||||
} else {
|
||||
requestCookies.set(cookie);
|
||||
}
|
||||
});
|
||||
// Update the 'cookie' header with the merged cookies
|
||||
mergedHeaders['cookie'] = requestCookies.toString();
|
||||
// Remove headers that should not be forwarded
|
||||
delete mergedHeaders['transfer-encoding'];
|
||||
return new Headers(mergedHeaders);
|
||||
}
|
||||
function addRevalidationHeader(res, { workStore, requestStore }) {
|
||||
var _workStore_pendingRevalidatedTags;
|
||||
// If a tag was revalidated, the client router needs to invalidate all the
|
||||
// client router cache as they may be stale. And if a path was revalidated, the
|
||||
// client needs to invalidate all subtrees below that path.
|
||||
// TODO: Currently we don't send the specific tags or paths to the client,
|
||||
// we just send a flag indicating that all the static data on the client
|
||||
// should be invalidated. In the future, this will likely be a Bloom filter
|
||||
// or bitmask of some kind.
|
||||
// TODO-APP: Currently the prefetch cache doesn't have subtree information,
|
||||
// so we need to invalidate the entire cache if a path was revalidated.
|
||||
// TODO-APP: Currently paths are treated as tags, so the second element of the tuple
|
||||
// is always empty.
|
||||
const isTagRevalidated = ((_workStore_pendingRevalidatedTags = workStore.pendingRevalidatedTags) == null ? void 0 : _workStore_pendingRevalidatedTags.length) ? 1 : 0;
|
||||
const isCookieRevalidated = (0, _requestcookies.getModifiedCookieValues)(requestStore.mutableCookies).length ? 1 : 0;
|
||||
// First check if a tag, cookie, or path was revalidated.
|
||||
if (isTagRevalidated || isCookieRevalidated) {
|
||||
res.setHeader(_approuterheaders.NEXT_ACTION_REVALIDATED_HEADER, JSON.stringify(_actionrevalidationkind.ActionDidRevalidateStaticAndDynamic));
|
||||
} else if (// Check for refresh() actions. This will invalidate only the dynamic data.
|
||||
workStore.pathWasRevalidated !== undefined && workStore.pathWasRevalidated !== _actionrevalidationkind.ActionDidNotRevalidate) {
|
||||
res.setHeader(_approuterheaders.NEXT_ACTION_REVALIDATED_HEADER, JSON.stringify(workStore.pathWasRevalidated));
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Forwards a server action request to a separate worker. Used when the requested action is not available in the current worker.
|
||||
*/ async function createForwardedActionResponse(req, res, host, workerPathname, basePath) {
|
||||
var _getRequestMeta;
|
||||
if (!host) {
|
||||
throw Object.defineProperty(new Error('Invariant: Missing `host` header from a forwarded Server Actions request.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E226",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const forwardedHeaders = getForwardedHeaders(req, res);
|
||||
// indicate that this action request was forwarded from another worker
|
||||
// we use this to skip rendering the flight tree so that we don't update the UI
|
||||
// with the response from the forwarded worker
|
||||
forwardedHeaders.set('x-action-forwarded', '1');
|
||||
const proto = ((_getRequestMeta = (0, _requestmeta.getRequestMeta)(req, 'initProtocol')) == null ? void 0 : _getRequestMeta.replace(/:+$/, '')) || 'https';
|
||||
// For standalone or the serverful mode, use the internal origin directly
|
||||
// other than the host headers from the request.
|
||||
const origin = process.env.__NEXT_PRIVATE_ORIGIN || `${proto}://${host.value}`;
|
||||
const fetchUrl = new URL(`${origin}${basePath}${workerPathname}`);
|
||||
try {
|
||||
var _response_headers_get;
|
||||
let body;
|
||||
if (// The type check here ensures that `req` is correctly typed, and the
|
||||
// environment variable check provides dead code elimination.
|
||||
process.env.NEXT_RUNTIME === 'edge' && (0, _helpers.isWebNextRequest)(req)) {
|
||||
if (!req.body) {
|
||||
throw Object.defineProperty(new Error('Invariant: missing request body.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E333",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
body = req.body;
|
||||
} else if (// The type check here ensures that `req` is correctly typed, and the
|
||||
// environment variable check provides dead code elimination.
|
||||
process.env.NEXT_RUNTIME !== 'edge' && (0, _helpers.isNodeNextRequest)(req)) {
|
||||
body = req.stream();
|
||||
} else {
|
||||
throw Object.defineProperty(new Error('Invariant: Unknown request type.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E114",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
// Forward the request to the new worker
|
||||
const response = await fetch(fetchUrl, {
|
||||
method: 'POST',
|
||||
body,
|
||||
duplex: 'half',
|
||||
headers: forwardedHeaders,
|
||||
redirect: 'manual',
|
||||
next: {
|
||||
// @ts-ignore
|
||||
internal: 1
|
||||
}
|
||||
});
|
||||
if ((_response_headers_get = response.headers.get('content-type')) == null ? void 0 : _response_headers_get.startsWith(_approuterheaders.RSC_CONTENT_TYPE_HEADER)) {
|
||||
// copy the headers from the redirect response to the response we're sending
|
||||
for (const [key, value] of response.headers){
|
||||
if (!_utils.actionsForbiddenHeaders.includes(key)) {
|
||||
res.setHeader(key, value);
|
||||
}
|
||||
}
|
||||
return new _flightrenderresult.FlightRenderResult(response.body);
|
||||
} else {
|
||||
var // Since we aren't consuming the response body, we cancel it to avoid memory leaks
|
||||
_response_body;
|
||||
(_response_body = response.body) == null ? void 0 : _response_body.cancel();
|
||||
}
|
||||
} catch (err) {
|
||||
// we couldn't stream the forwarded response, so we'll just return an empty response
|
||||
console.error(`failed to forward action response`, err);
|
||||
}
|
||||
return _renderresult.default.fromStatic('{}', _constants.JSON_CONTENT_TYPE_HEADER);
|
||||
}
|
||||
/**
|
||||
* Returns the parsed redirect URL if we deem that it is hosted by us.
|
||||
*
|
||||
* We handle both relative and absolute redirect URLs.
|
||||
*
|
||||
* In case the redirect URL is not relative to the application we return `null`.
|
||||
*/ function getAppRelativeRedirectUrl(basePath, host, redirectUrl, currentPathname) {
|
||||
if (redirectUrl.startsWith('/')) {
|
||||
// Absolute path - just add basePath
|
||||
return new URL(`${basePath}${redirectUrl}`, 'http://n');
|
||||
} else if (redirectUrl.startsWith('.')) {
|
||||
// Relative path - resolve relative to current pathname
|
||||
let base = currentPathname || '/';
|
||||
// Ensure the base path ends with a slash so relative resolution works correctly
|
||||
// e.g., "./subpage" from "/subdir" should resolve to "/subdir/subpage"
|
||||
// not "/subpage"
|
||||
if (!base.endsWith('/')) {
|
||||
base = base + '/';
|
||||
}
|
||||
const resolved = new URL(redirectUrl, `http://n${base}`);
|
||||
// Include basePath in the final URL
|
||||
return new URL(`${basePath}${resolved.pathname}${resolved.search}${resolved.hash}`, 'http://n');
|
||||
}
|
||||
const parsedRedirectUrl = new URL(redirectUrl);
|
||||
if ((host == null ? void 0 : host.value) !== parsedRedirectUrl.host) {
|
||||
return null;
|
||||
}
|
||||
// At this point the hosts are the same, just confirm we
|
||||
// are routing to a path underneath the `basePath`
|
||||
return parsedRedirectUrl.pathname.startsWith(basePath) ? parsedRedirectUrl : null;
|
||||
}
|
||||
async function createRedirectRenderResult(req, res, originalHost, redirectUrl, redirectType, basePath, workStore, currentPathname) {
|
||||
res.setHeader('x-action-redirect', `${redirectUrl};${redirectType}`);
|
||||
// If we're redirecting to another route of this Next.js application, we'll
|
||||
// try to stream the response from the other worker path. When that works,
|
||||
// we can save an extra roundtrip and avoid a full page reload.
|
||||
// When the redirect URL starts with a `/` or is to the same host, under the
|
||||
// `basePath` we treat it as an app-relative redirect;
|
||||
const appRelativeRedirectUrl = getAppRelativeRedirectUrl(basePath, originalHost, redirectUrl, currentPathname);
|
||||
if (appRelativeRedirectUrl) {
|
||||
var _getRequestMeta;
|
||||
if (!originalHost) {
|
||||
throw Object.defineProperty(new Error('Invariant: Missing `host` header from a forwarded Server Actions request.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E226",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const forwardedHeaders = getForwardedHeaders(req, res);
|
||||
forwardedHeaders.set(_approuterheaders.RSC_HEADER, '1');
|
||||
const proto = ((_getRequestMeta = (0, _requestmeta.getRequestMeta)(req, 'initProtocol')) == null ? void 0 : _getRequestMeta.replace(/:+$/, '')) || 'https';
|
||||
// For standalone or the serverful mode, use the internal origin directly
|
||||
// other than the host headers from the request.
|
||||
const origin = process.env.__NEXT_PRIVATE_ORIGIN || `${proto}://${originalHost.value}`;
|
||||
const fetchUrl = new URL(`${origin}${appRelativeRedirectUrl.pathname}${appRelativeRedirectUrl.search}`);
|
||||
if (workStore.pendingRevalidatedTags) {
|
||||
var _workStore_incrementalCache_prerenderManifest_preview, _workStore_incrementalCache_prerenderManifest, _workStore_incrementalCache;
|
||||
forwardedHeaders.set(_constants.NEXT_CACHE_REVALIDATED_TAGS_HEADER, workStore.pendingRevalidatedTags.map((item)=>item.tag).join(','));
|
||||
forwardedHeaders.set(_constants.NEXT_CACHE_REVALIDATE_TAG_TOKEN_HEADER, ((_workStore_incrementalCache = workStore.incrementalCache) == null ? void 0 : (_workStore_incrementalCache_prerenderManifest = _workStore_incrementalCache.prerenderManifest) == null ? void 0 : (_workStore_incrementalCache_prerenderManifest_preview = _workStore_incrementalCache_prerenderManifest.preview) == null ? void 0 : _workStore_incrementalCache_prerenderManifest_preview.previewModeId) || '');
|
||||
}
|
||||
// Ensures that when the path was revalidated we don't return a partial response on redirects
|
||||
forwardedHeaders.delete(_approuterheaders.NEXT_ROUTER_STATE_TREE_HEADER);
|
||||
// When an action follows a redirect, it's no longer handling an action: it's just a normal RSC request
|
||||
// to the requested URL. We should remove the `next-action` header so that it's not treated as an action
|
||||
forwardedHeaders.delete(_approuterheaders.ACTION_HEADER);
|
||||
try {
|
||||
var _response_headers_get;
|
||||
(0, _setcachebustingsearchparam.setCacheBustingSearchParam)(fetchUrl, {
|
||||
[_approuterheaders.NEXT_ROUTER_PREFETCH_HEADER]: forwardedHeaders.get(_approuterheaders.NEXT_ROUTER_PREFETCH_HEADER) ? '1' : undefined,
|
||||
[_approuterheaders.NEXT_ROUTER_SEGMENT_PREFETCH_HEADER]: forwardedHeaders.get(_approuterheaders.NEXT_ROUTER_SEGMENT_PREFETCH_HEADER) ?? undefined,
|
||||
[_approuterheaders.NEXT_ROUTER_STATE_TREE_HEADER]: forwardedHeaders.get(_approuterheaders.NEXT_ROUTER_STATE_TREE_HEADER) ?? undefined,
|
||||
[_approuterheaders.NEXT_URL]: forwardedHeaders.get(_approuterheaders.NEXT_URL) ?? undefined
|
||||
});
|
||||
const response = await fetch(fetchUrl, {
|
||||
method: 'GET',
|
||||
headers: forwardedHeaders,
|
||||
next: {
|
||||
// @ts-ignore
|
||||
internal: 1
|
||||
}
|
||||
});
|
||||
if ((_response_headers_get = response.headers.get('content-type')) == null ? void 0 : _response_headers_get.startsWith(_approuterheaders.RSC_CONTENT_TYPE_HEADER)) {
|
||||
// copy the headers from the redirect response to the response we're sending
|
||||
for (const [key, value] of response.headers){
|
||||
if (!_utils.actionsForbiddenHeaders.includes(key)) {
|
||||
res.setHeader(key, value);
|
||||
}
|
||||
}
|
||||
return new _flightrenderresult.FlightRenderResult(response.body);
|
||||
} else {
|
||||
var // Since we aren't consuming the response body, we cancel it to avoid memory leaks
|
||||
_response_body;
|
||||
(_response_body = response.body) == null ? void 0 : _response_body.cancel();
|
||||
}
|
||||
} catch (err) {
|
||||
// we couldn't stream the redirect response, so we'll just do a normal redirect
|
||||
console.error(`failed to get redirect response`, err);
|
||||
}
|
||||
}
|
||||
return _renderresult.default.EMPTY;
|
||||
}
|
||||
/**
|
||||
* Ensures the value of the header can't create long logs.
|
||||
*/ function limitUntrustedHeaderValueForLogs(value) {
|
||||
return value.length > 100 ? value.slice(0, 100) + '...' : value;
|
||||
}
|
||||
function parseHostHeader(headers, originDomain) {
|
||||
var _forwardedHostHeader_split_, _forwardedHostHeader_split;
|
||||
const forwardedHostHeader = headers['x-forwarded-host'];
|
||||
const forwardedHostHeaderValue = forwardedHostHeader && Array.isArray(forwardedHostHeader) ? forwardedHostHeader[0] : forwardedHostHeader == null ? void 0 : (_forwardedHostHeader_split = forwardedHostHeader.split(',')) == null ? void 0 : (_forwardedHostHeader_split_ = _forwardedHostHeader_split[0]) == null ? void 0 : _forwardedHostHeader_split_.trim();
|
||||
const hostHeader = headers['host'];
|
||||
if (originDomain) {
|
||||
return forwardedHostHeaderValue === originDomain ? {
|
||||
type: "x-forwarded-host",
|
||||
value: forwardedHostHeaderValue
|
||||
} : hostHeader === originDomain ? {
|
||||
type: "host",
|
||||
value: hostHeader
|
||||
} : undefined;
|
||||
}
|
||||
return forwardedHostHeaderValue ? {
|
||||
type: "x-forwarded-host",
|
||||
value: forwardedHostHeaderValue
|
||||
} : hostHeader ? {
|
||||
type: "host",
|
||||
value: hostHeader
|
||||
} : undefined;
|
||||
}
|
||||
async function handleAction({ req, res, ComponentMod, generateFlight, workStore, requestStore, serverActions, ctx, metadata }) {
|
||||
const contentType = req.headers['content-type'];
|
||||
const { page } = ctx.renderOpts;
|
||||
const serverModuleMap = (0, _manifestssingleton.getServerModuleMap)();
|
||||
const { actionId, isMultipartAction, isFetchAction, isURLEncodedAction, isPossibleServerAction } = (0, _serveractionrequestmeta.getServerActionRequestMetadata)(req);
|
||||
const handleUnrecognizedFetchAction = (err)=>{
|
||||
// If the deployment doesn't have skew protection, this is expected to occasionally happen,
|
||||
// so we use a warning instead of an error.
|
||||
console.warn(err);
|
||||
// Return an empty response with a header that the client router will interpret.
|
||||
// We don't need to waste time encoding a flight response, and using a blank body + header
|
||||
// means that unrecognized actions can also be handled at the infra level
|
||||
// (i.e. without needing to invoke a lambda)
|
||||
res.setHeader(_approuterheaders.NEXT_ACTION_NOT_FOUND_HEADER, '1');
|
||||
res.setHeader('content-type', 'text/plain');
|
||||
res.statusCode = 404;
|
||||
return {
|
||||
type: 'done',
|
||||
result: _renderresult.default.fromStatic('Server action not found.', 'text/plain')
|
||||
};
|
||||
};
|
||||
// If it can't be a Server Action, skip handling.
|
||||
// Note that this can be a false positive -- any multipart/urlencoded POST can get us here,
|
||||
// But won't know if it's an MPA action or not until we call `decodeAction` below.
|
||||
if (!isPossibleServerAction) {
|
||||
return null;
|
||||
}
|
||||
// We don't currently support URL encoded actions, so we bail out early.
|
||||
// Depending on if it's a fetch action or an MPA, we return a different response.
|
||||
if (isURLEncodedAction) {
|
||||
if (isFetchAction) {
|
||||
return {
|
||||
type: 'not-found'
|
||||
};
|
||||
} else {
|
||||
// This is an MPA action, so we return null
|
||||
return null;
|
||||
}
|
||||
}
|
||||
// If the app has no server actions at all, we can 404 early.
|
||||
if (!hasServerActions()) {
|
||||
return handleUnrecognizedFetchAction(getActionNotFoundError(actionId));
|
||||
}
|
||||
if (workStore.isStaticGeneration) {
|
||||
throw Object.defineProperty(new Error("Invariant: server actions can't be handled during static rendering"), "__NEXT_ERROR_CODE", {
|
||||
value: "E359",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
let temporaryReferences;
|
||||
// When running actions the default is no-store, you can still `cache: 'force-cache'`
|
||||
workStore.fetchCache = 'default-no-store';
|
||||
const originHeader = req.headers['origin'];
|
||||
const originDomain = typeof originHeader === 'string' && originHeader !== 'null' ? new URL(originHeader).host : undefined;
|
||||
const host = parseHostHeader(req.headers);
|
||||
let warning = undefined;
|
||||
function warnBadServerActionRequest() {
|
||||
if (warning) {
|
||||
(0, _log.warn)(warning);
|
||||
}
|
||||
}
|
||||
// This is to prevent CSRF attacks. If `x-forwarded-host` is set, we need to
|
||||
// ensure that the request is coming from the same host.
|
||||
if (!originDomain) {
|
||||
// This might be an old browser that doesn't send `host` header. We ignore
|
||||
// this case.
|
||||
warning = 'Missing `origin` header from a forwarded Server Actions request.';
|
||||
} else if (!host || originDomain !== host.value) {
|
||||
// If the customer sets a list of allowed origins, we'll allow the request.
|
||||
// These are considered safe but might be different from forwarded host set
|
||||
// by the infra (i.e. reverse proxies).
|
||||
if ((0, _csrfprotection.isCsrfOriginAllowed)(originDomain, serverActions == null ? void 0 : serverActions.allowedOrigins)) {
|
||||
// Ignore it
|
||||
} else {
|
||||
if (host) {
|
||||
// This seems to be an CSRF attack. We should not proceed the action.
|
||||
console.error(`\`${host.type}\` header with value \`${limitUntrustedHeaderValueForLogs(host.value)}\` does not match \`origin\` header with value \`${limitUntrustedHeaderValueForLogs(originDomain)}\` from a forwarded Server Actions request. Aborting the action.`);
|
||||
} else {
|
||||
// This is an attack. We should not proceed the action.
|
||||
console.error(`\`x-forwarded-host\` or \`host\` headers are not provided. One of these is needed to compare the \`origin\` header from a forwarded Server Actions request. Aborting the action.`);
|
||||
}
|
||||
const error = Object.defineProperty(new Error('Invalid Server Actions request.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E80",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
if (isFetchAction) {
|
||||
res.statusCode = 500;
|
||||
metadata.statusCode = 500;
|
||||
const promise = Promise.reject(error);
|
||||
try {
|
||||
// we need to await the promise to trigger the rejection early
|
||||
// so that it's already handled by the time we call
|
||||
// the RSC runtime. Otherwise, it will throw an unhandled
|
||||
// promise rejection error in the renderer.
|
||||
await promise;
|
||||
} catch {
|
||||
// swallow error, it's gonna be handled on the client
|
||||
}
|
||||
return {
|
||||
type: 'done',
|
||||
result: await generateFlight(req, ctx, requestStore, {
|
||||
actionResult: promise,
|
||||
// We didn't execute an action, so no revalidations could have
|
||||
// occurred. We can skip rendering the page.
|
||||
skipPageRendering: true,
|
||||
temporaryReferences
|
||||
})
|
||||
};
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
// ensure we avoid caching server actions unexpectedly
|
||||
res.setHeader('Cache-Control', 'no-cache, no-store, max-age=0, must-revalidate');
|
||||
const { actionAsyncStorage } = ComponentMod;
|
||||
const actionWasForwarded = Boolean(req.headers['x-action-forwarded']);
|
||||
if (actionId) {
|
||||
const forwardedWorker = (0, _manifestssingleton.selectWorkerForForwarding)(actionId, page);
|
||||
// If forwardedWorker is truthy, it means there isn't a worker for the action
|
||||
// in the current handler, so we forward the request to a worker that has the action.
|
||||
if (forwardedWorker) {
|
||||
return {
|
||||
type: 'done',
|
||||
result: await createForwardedActionResponse(req, res, host, forwardedWorker, ctx.renderOpts.basePath)
|
||||
};
|
||||
}
|
||||
}
|
||||
try {
|
||||
return await actionAsyncStorage.run({
|
||||
isAction: true
|
||||
}, async ()=>{
|
||||
// We only use these for fetch actions -- MPA actions handle them inside `decodeAction`.
|
||||
let actionModId;
|
||||
let boundActionArguments = [];
|
||||
if (// The type check here ensures that `req` is correctly typed, and the
|
||||
// environment variable check provides dead code elimination.
|
||||
process.env.NEXT_RUNTIME === 'edge' && (0, _helpers.isWebNextRequest)(req)) {
|
||||
if (!req.body) {
|
||||
throw Object.defineProperty(new Error('invariant: Missing request body.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E364",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
// TODO: add body limit
|
||||
// Use react-server-dom-webpack/server
|
||||
const { createTemporaryReferenceSet, decodeReply, decodeAction, decodeFormState } = ComponentMod;
|
||||
temporaryReferences = createTemporaryReferenceSet();
|
||||
if (isMultipartAction) {
|
||||
// TODO-APP: Add streaming support
|
||||
const formData = await req.request.formData();
|
||||
if (isFetchAction) {
|
||||
// A fetch action with a multipart body.
|
||||
try {
|
||||
actionModId = getActionModIdOrError(actionId, serverModuleMap);
|
||||
} catch (err) {
|
||||
return handleUnrecognizedFetchAction(err);
|
||||
}
|
||||
boundActionArguments = await decodeReply(formData, serverModuleMap, {
|
||||
temporaryReferences
|
||||
});
|
||||
} else {
|
||||
// Multipart POST, but not a fetch action.
|
||||
// Potentially an MPA action, we have to try decoding it to check.
|
||||
if (areAllActionIdsValid(formData, serverModuleMap) === false) {
|
||||
// TODO: This can be from skew or manipulated input. We should handle this case
|
||||
// more gracefully but this preserves the prior behavior where decodeAction would throw instead.
|
||||
throw Object.defineProperty(new Error(`Failed to find Server Action. This request might be from an older or newer deployment.\nRead more: https://nextjs.org/docs/messages/failed-to-find-server-action`), "__NEXT_ERROR_CODE", {
|
||||
value: "E975",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const action = await decodeAction(formData, serverModuleMap);
|
||||
if (typeof action === 'function') {
|
||||
// an MPA action.
|
||||
// Only warn if it's a server action, otherwise skip for other post requests
|
||||
warnBadServerActionRequest();
|
||||
const { actionResult } = await executeActionAndPrepareForRender(action, [], workStore, requestStore, actionWasForwarded);
|
||||
const formState = await decodeFormState(actionResult, formData, serverModuleMap);
|
||||
// Skip the fetch path.
|
||||
// We need to render a full HTML version of the page for the response, we'll handle that in app-render.
|
||||
return {
|
||||
type: 'done',
|
||||
result: undefined,
|
||||
formState
|
||||
};
|
||||
} else {
|
||||
// We couldn't decode an action, so this POST request turned out not to be a server action request.
|
||||
return null;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// POST with non-multipart body.
|
||||
// If it's not multipart AND not a fetch action,
|
||||
// then it can't be an action request.
|
||||
if (!isFetchAction) {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
actionModId = getActionModIdOrError(actionId, serverModuleMap);
|
||||
} catch (err) {
|
||||
return handleUnrecognizedFetchAction(err);
|
||||
}
|
||||
// A fetch action with a non-multipart body.
|
||||
// In practice, this happens if `encodeReply` returned a string instead of FormData,
|
||||
// which can happen for very simple JSON-like values that don't need multiple flight rows.
|
||||
const chunks = [];
|
||||
const reader = req.body.getReader();
|
||||
while(true){
|
||||
const { done, value } = await reader.read();
|
||||
if (done) {
|
||||
break;
|
||||
}
|
||||
chunks.push(value);
|
||||
}
|
||||
const actionData = Buffer.concat(chunks).toString('utf-8');
|
||||
boundActionArguments = await decodeReply(actionData, serverModuleMap, {
|
||||
temporaryReferences
|
||||
});
|
||||
}
|
||||
} else if (// The type check here ensures that `req` is correctly typed, and the
|
||||
// environment variable check provides dead code elimination.
|
||||
process.env.NEXT_RUNTIME !== 'edge' && (0, _helpers.isNodeNextRequest)(req)) {
|
||||
// Use react-server-dom-webpack/server.node which supports streaming
|
||||
const { createTemporaryReferenceSet, decodeReply, decodeReplyFromBusboy, decodeAction, decodeFormState } = require(`./react-server.node`);
|
||||
temporaryReferences = createTemporaryReferenceSet();
|
||||
const { PassThrough, Readable, Transform } = require('node:stream');
|
||||
const { pipeline } = require('node:stream/promises');
|
||||
const defaultBodySizeLimit = '1 MB';
|
||||
const bodySizeLimit = (serverActions == null ? void 0 : serverActions.bodySizeLimit) ?? defaultBodySizeLimit;
|
||||
const bodySizeLimitBytes = bodySizeLimit !== defaultBodySizeLimit ? require('next/dist/compiled/bytes').parse(bodySizeLimit) : 1024 * 1024 // 1 MB
|
||||
;
|
||||
let size = 0;
|
||||
const sizeLimitTransform = new Transform({
|
||||
transform (chunk, encoding, callback) {
|
||||
size += Buffer.byteLength(chunk, encoding);
|
||||
if (size > bodySizeLimitBytes) {
|
||||
const { ApiError } = require('../api-utils');
|
||||
callback(Object.defineProperty(new ApiError(413, `Body exceeded ${bodySizeLimit} limit.\n` + `To configure the body size limit for Server Actions, see: https://nextjs.org/docs/app/api-reference/next-config-js/serverActions#bodysizelimit`), "__NEXT_ERROR_CODE", {
|
||||
value: "E394",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
}));
|
||||
return;
|
||||
}
|
||||
callback(null, chunk);
|
||||
}
|
||||
});
|
||||
if (isMultipartAction) {
|
||||
if (isFetchAction) {
|
||||
// A fetch action with a multipart body.
|
||||
try {
|
||||
actionModId = getActionModIdOrError(actionId, serverModuleMap);
|
||||
} catch (err) {
|
||||
return handleUnrecognizedFetchAction(err);
|
||||
}
|
||||
const busboy = require('next/dist/compiled/busboy')({
|
||||
defParamCharset: 'utf8',
|
||||
headers: req.headers,
|
||||
limits: {
|
||||
fieldSize: bodySizeLimitBytes
|
||||
}
|
||||
});
|
||||
const abortController = new AbortController();
|
||||
try {
|
||||
;
|
||||
[, boundActionArguments] = await Promise.all([
|
||||
pipeline(req.body, sizeLimitTransform, busboy, {
|
||||
signal: abortController.signal
|
||||
}),
|
||||
decodeReplyFromBusboy(busboy, serverModuleMap, {
|
||||
temporaryReferences
|
||||
})
|
||||
]);
|
||||
} catch (err) {
|
||||
abortController.abort();
|
||||
throw err;
|
||||
}
|
||||
} else {
|
||||
// Multipart POST, but not a fetch action.
|
||||
// Potentially an MPA action, we have to try decoding it to check.
|
||||
const sizeLimitedBody = new PassThrough();
|
||||
// React doesn't yet publish a busboy version of decodeAction
|
||||
// so we polyfill the parsing of FormData.
|
||||
const fakeRequest = new Request('http://localhost', {
|
||||
method: 'POST',
|
||||
// @ts-expect-error
|
||||
headers: {
|
||||
'Content-Type': contentType
|
||||
},
|
||||
body: Readable.toWeb(sizeLimitedBody),
|
||||
duplex: 'half'
|
||||
});
|
||||
let formData;
|
||||
const abortController = new AbortController();
|
||||
try {
|
||||
;
|
||||
[, formData] = await Promise.all([
|
||||
pipeline(req.body, sizeLimitTransform, sizeLimitedBody, {
|
||||
signal: abortController.signal
|
||||
}),
|
||||
fakeRequest.formData()
|
||||
]);
|
||||
} catch (err) {
|
||||
abortController.abort();
|
||||
throw err;
|
||||
}
|
||||
if (areAllActionIdsValid(formData, serverModuleMap) === false) {
|
||||
// TODO: This can be from skew or manipulated input. We should handle this case
|
||||
// more gracefully but this preserves the prior behavior where decodeAction would throw instead.
|
||||
throw Object.defineProperty(new Error(`Failed to find Server Action. This request might be from an older or newer deployment.\nRead more: https://nextjs.org/docs/messages/failed-to-find-server-action`), "__NEXT_ERROR_CODE", {
|
||||
value: "E975",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
// TODO: Refactor so it is harder to accidentally decode an action before you have validated that the
|
||||
// action referred to is available.
|
||||
const action = await decodeAction(formData, serverModuleMap);
|
||||
if (typeof action === 'function') {
|
||||
// an MPA action.
|
||||
// Only warn if it's a server action, otherwise skip for other post requests
|
||||
warnBadServerActionRequest();
|
||||
const { actionResult } = await executeActionAndPrepareForRender(action, [], workStore, requestStore, actionWasForwarded);
|
||||
const formState = await decodeFormState(actionResult, formData, serverModuleMap);
|
||||
// Skip the fetch path.
|
||||
// We need to render a full HTML version of the page for the response, we'll handle that in app-render.
|
||||
return {
|
||||
type: 'done',
|
||||
result: undefined,
|
||||
formState
|
||||
};
|
||||
} else {
|
||||
// We couldn't decode an action, so this POST request turned out not to be a server action request.
|
||||
return null;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// POST with non-multipart body.
|
||||
// If it's not multipart AND not a fetch action,
|
||||
// then it can't be an action request.
|
||||
if (!isFetchAction) {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
actionModId = getActionModIdOrError(actionId, serverModuleMap);
|
||||
} catch (err) {
|
||||
return handleUnrecognizedFetchAction(err);
|
||||
}
|
||||
// A fetch action with a non-multipart body.
|
||||
// In practice, this happens if `encodeReply` returned a string instead of FormData,
|
||||
// which can happen for very simple JSON-like values that don't need multiple flight rows.
|
||||
const sizeLimitedBody = new PassThrough();
|
||||
const chunks = [];
|
||||
await Promise.all([
|
||||
pipeline(req.body, sizeLimitTransform, sizeLimitedBody),
|
||||
(async ()=>{
|
||||
for await (const chunk of sizeLimitedBody){
|
||||
chunks.push(Buffer.from(chunk));
|
||||
}
|
||||
})()
|
||||
]);
|
||||
const actionData = Buffer.concat(chunks).toString('utf-8');
|
||||
boundActionArguments = await decodeReply(actionData, serverModuleMap, {
|
||||
temporaryReferences
|
||||
});
|
||||
}
|
||||
} else {
|
||||
throw Object.defineProperty(new Error('Invariant: Unknown request type.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E114",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
// actions.js
|
||||
// app/page.js
|
||||
// action worker1
|
||||
// appRender1
|
||||
// app/foo/page.js
|
||||
// action worker2
|
||||
// appRender
|
||||
// / -> fire action -> POST / -> appRender1 -> modId for the action file
|
||||
// /foo -> fire action -> POST /foo -> appRender2 -> modId for the action file
|
||||
const actionMod = await ComponentMod.__next_app__.require(actionModId);
|
||||
const actionHandler = actionMod[// `actionId` must exist if we got here, as otherwise we would have thrown an error above
|
||||
actionId];
|
||||
const { actionResult, skipPageRendering } = await executeActionAndPrepareForRender(actionHandler, boundActionArguments, workStore, requestStore, actionWasForwarded).finally(()=>{
|
||||
addRevalidationHeader(res, {
|
||||
workStore,
|
||||
requestStore
|
||||
});
|
||||
});
|
||||
// For form actions, we need to continue rendering the page.
|
||||
if (isFetchAction) {
|
||||
return {
|
||||
type: 'done',
|
||||
result: await generateFlight(req, ctx, requestStore, {
|
||||
actionResult: Promise.resolve(actionResult),
|
||||
skipPageRendering,
|
||||
temporaryReferences,
|
||||
// If we skip page rendering, we need to ensure pending
|
||||
// revalidates are awaited before closing the response. Otherwise,
|
||||
// this will be done after rendering the page.
|
||||
waitUntil: skipPageRendering ? (0, _revalidationutils.executeRevalidates)(workStore) : undefined
|
||||
})
|
||||
};
|
||||
} else {
|
||||
// TODO: this shouldn't be reachable, because all non-fetch codepaths return early.
|
||||
// this will be handled in a follow-up refactor PR.
|
||||
return null;
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
if ((0, _redirecterror.isRedirectError)(err)) {
|
||||
const redirectUrl = (0, _redirect.getURLFromRedirectError)(err);
|
||||
const redirectType = (0, _redirect.getRedirectTypeFromError)(err);
|
||||
// if it's a fetch action, we'll set the status code for logging/debugging purposes
|
||||
// but we won't set a Location header, as the redirect will be handled by the client router
|
||||
res.statusCode = _redirectstatuscode.RedirectStatusCode.SeeOther;
|
||||
metadata.statusCode = _redirectstatuscode.RedirectStatusCode.SeeOther;
|
||||
if (isFetchAction) {
|
||||
return {
|
||||
type: 'done',
|
||||
result: await createRedirectRenderResult(req, res, host, redirectUrl, redirectType, ctx.renderOpts.basePath, workStore, requestStore.url.pathname)
|
||||
};
|
||||
}
|
||||
// For an MPA action, the redirect doesn't need a body, just a Location header.
|
||||
res.setHeader('Location', redirectUrl);
|
||||
return {
|
||||
type: 'done',
|
||||
result: _renderresult.default.EMPTY
|
||||
};
|
||||
} else if ((0, _httpaccessfallback.isHTTPAccessFallbackError)(err)) {
|
||||
res.statusCode = (0, _httpaccessfallback.getAccessFallbackHTTPStatus)(err);
|
||||
metadata.statusCode = res.statusCode;
|
||||
if (isFetchAction) {
|
||||
const promise = Promise.reject(err);
|
||||
try {
|
||||
// we need to await the promise to trigger the rejection early
|
||||
// so that it's already handled by the time we call
|
||||
// the RSC runtime. Otherwise, it will throw an unhandled
|
||||
// promise rejection error in the renderer.
|
||||
await promise;
|
||||
} catch {
|
||||
// swallow error, it's gonna be handled on the client
|
||||
}
|
||||
return {
|
||||
type: 'done',
|
||||
result: await generateFlight(req, ctx, requestStore, {
|
||||
skipPageRendering: false,
|
||||
actionResult: promise,
|
||||
temporaryReferences
|
||||
})
|
||||
};
|
||||
}
|
||||
// For an MPA action, we need to render a HTML response. We'll handle that in app-render.
|
||||
return {
|
||||
type: 'not-found'
|
||||
};
|
||||
}
|
||||
// An error that didn't come from `redirect()` or `notFound()`, likely thrown from user code
|
||||
// (but it could also be a bug in our code!)
|
||||
if (isFetchAction) {
|
||||
// TODO: consider checking if the error is an `ApiError` and change status code
|
||||
// so that we can respond with a 413 to requests that break the body size limit
|
||||
// (but if we do that, we also need to make sure that whatever handles the non-fetch error path below does the same)
|
||||
res.statusCode = 500;
|
||||
metadata.statusCode = 500;
|
||||
const promise = Promise.reject(err);
|
||||
try {
|
||||
// we need to await the promise to trigger the rejection early
|
||||
// so that it's already handled by the time we call
|
||||
// the RSC runtime. Otherwise, it will throw an unhandled
|
||||
// promise rejection error in the renderer.
|
||||
await promise;
|
||||
} catch {
|
||||
// swallow error, it's gonna be handled on the client
|
||||
}
|
||||
return {
|
||||
type: 'done',
|
||||
result: await generateFlight(req, ctx, requestStore, {
|
||||
actionResult: promise,
|
||||
// If the page was not revalidated, or if the action was forwarded
|
||||
// from another worker, we can skip rendering the page.
|
||||
skipPageRendering: workStore.pathWasRevalidated === undefined || workStore.pathWasRevalidated === _actionrevalidationkind.ActionDidNotRevalidate || actionWasForwarded,
|
||||
temporaryReferences
|
||||
})
|
||||
};
|
||||
}
|
||||
// For an MPA action, we need to render a HTML response. We'll rethrow the error and let it be handled above.
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
async function executeActionAndPrepareForRender(action, args, workStore, requestStore, actionWasForwarded) {
|
||||
requestStore.phase = 'action';
|
||||
let skipPageRendering = actionWasForwarded;
|
||||
try {
|
||||
const actionResult = await _workunitasyncstorageexternal.workUnitAsyncStorage.run(requestStore, ()=>action.apply(null, args));
|
||||
// If the page was not revalidated, or if the action was forwarded from
|
||||
// another worker, we can skip rendering the page.
|
||||
skipPageRendering ||= workStore.pathWasRevalidated === undefined || workStore.pathWasRevalidated === _actionrevalidationkind.ActionDidNotRevalidate;
|
||||
return {
|
||||
actionResult,
|
||||
skipPageRendering
|
||||
};
|
||||
} finally{
|
||||
if (!skipPageRendering) {
|
||||
requestStore.phase = 'render';
|
||||
// When we switch to the render phase, cookies() will return
|
||||
// `workUnitStore.cookies` instead of
|
||||
// `workUnitStore.userspaceMutableCookies`. We want the render to see any
|
||||
// cookie writes that we performed during the action, so we need to update
|
||||
// the immutable cookies to reflect the changes.
|
||||
(0, _requeststore.synchronizeMutableCookies)(requestStore);
|
||||
// The server action might have toggled draft mode, so we need to reflect
|
||||
// that in the work store to be up-to-date for subsequent rendering.
|
||||
workStore.isDraftMode = requestStore.draftMode.isEnabled;
|
||||
// If the action called revalidateTag/revalidatePath, then that might
|
||||
// affect data used by the subsequent render, so we need to make sure all
|
||||
// revalidations are applied before that.
|
||||
await (0, _revalidationutils.executeRevalidates)(workStore);
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Attempts to find the module ID for the action from the module map. When this fails, it could be a deployment skew where
|
||||
* the action came from a different deployment. It could also simply be an invalid POST request that is not a server action.
|
||||
* In either case, we'll throw an error to be handled by the caller.
|
||||
*/ function getActionModIdOrError(actionId, serverModuleMap) {
|
||||
var _serverModuleMap_actionId;
|
||||
// if we're missing the action ID header, we can't do any further processing
|
||||
if (!actionId) {
|
||||
throw Object.defineProperty(new _invarianterror.InvariantError("Missing 'next-action' header."), "__NEXT_ERROR_CODE", {
|
||||
value: "E664",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const actionModId = (_serverModuleMap_actionId = serverModuleMap[actionId]) == null ? void 0 : _serverModuleMap_actionId.id;
|
||||
if (!actionModId) {
|
||||
throw getActionNotFoundError(actionId);
|
||||
}
|
||||
return actionModId;
|
||||
}
|
||||
function getActionNotFoundError(actionId) {
|
||||
return Object.defineProperty(new Error(`Failed to find Server Action${actionId ? ` "${actionId}"` : ''}. This request might be from an older or newer deployment.\nRead more: https://nextjs.org/docs/messages/failed-to-find-server-action`), "__NEXT_ERROR_CODE", {
|
||||
value: "E974",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const $ACTION_ = '$ACTION_';
|
||||
const $ACTION_REF_ = '$ACTION_REF_';
|
||||
const $ACTION_ID_ = '$ACTION_ID_';
|
||||
const ACTION_ID_EXPECTED_LENGTH = 42;
|
||||
/**
|
||||
* This function mirrors logic inside React's decodeAction and should be kept in sync with that.
|
||||
* It pre-parses the FormData to ensure that any action IDs referred to are actual action IDs for
|
||||
* this Next.js application.
|
||||
*/ function areAllActionIdsValid(mpaFormData, serverModuleMap) {
|
||||
let hasAtLeastOneAction = false;
|
||||
// Before we attempt to decode the payload for a possible MPA action, assert that all
|
||||
// action IDs are valid IDs. If not we should disregard the payload
|
||||
for (let key of mpaFormData.keys()){
|
||||
if (!key.startsWith($ACTION_)) {
|
||||
continue;
|
||||
}
|
||||
if (key.startsWith($ACTION_ID_)) {
|
||||
// No Bound args case
|
||||
if (isInvalidActionIdFieldName(key, serverModuleMap)) {
|
||||
return false;
|
||||
}
|
||||
hasAtLeastOneAction = true;
|
||||
} else if (key.startsWith($ACTION_REF_)) {
|
||||
// Bound args case
|
||||
const actionDescriptorField = $ACTION_ + key.slice($ACTION_REF_.length) + ':0';
|
||||
const actionFields = mpaFormData.getAll(actionDescriptorField);
|
||||
if (actionFields.length !== 1) {
|
||||
return false;
|
||||
}
|
||||
const actionField = actionFields[0];
|
||||
if (typeof actionField !== 'string') {
|
||||
return false;
|
||||
}
|
||||
if (isInvalidStringActionDescriptor(actionField, serverModuleMap)) {
|
||||
return false;
|
||||
}
|
||||
hasAtLeastOneAction = true;
|
||||
}
|
||||
}
|
||||
return hasAtLeastOneAction;
|
||||
}
|
||||
const ACTION_DESCRIPTOR_ID_PREFIX = '{"id":"';
|
||||
function isInvalidStringActionDescriptor(actionDescriptor, serverModuleMap) {
|
||||
if (actionDescriptor.startsWith(ACTION_DESCRIPTOR_ID_PREFIX) === false) {
|
||||
return true;
|
||||
}
|
||||
const from = ACTION_DESCRIPTOR_ID_PREFIX.length;
|
||||
const to = from + ACTION_ID_EXPECTED_LENGTH;
|
||||
// We expect actionDescriptor to be '{"id":"<actionId>",...}'
|
||||
const actionId = actionDescriptor.slice(from, to);
|
||||
if (actionId.length !== ACTION_ID_EXPECTED_LENGTH || actionDescriptor[to] !== '"') {
|
||||
return true;
|
||||
}
|
||||
const entry = serverModuleMap[actionId];
|
||||
if (entry == null) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
function isInvalidActionIdFieldName(actionIdFieldName, serverModuleMap) {
|
||||
// The field name must always start with $ACTION_ID_ but since it is
|
||||
// the id is extracted from the key of the field we have already validated
|
||||
// this before entering this function
|
||||
if (actionIdFieldName.length !== $ACTION_ID_.length + ACTION_ID_EXPECTED_LENGTH) {
|
||||
// this field name has too few or too many characters
|
||||
return true;
|
||||
}
|
||||
const actionId = actionIdFieldName.slice($ACTION_ID_.length);
|
||||
const entry = serverModuleMap[actionId];
|
||||
if (entry == null) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=action-handler.js.map
|
||||
1
apps/public-web/node_modules/next/dist/server/app-render/action-handler.js.map
generated
vendored
Normal file
1
apps/public-web/node_modules/next/dist/server/app-render/action-handler.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
2
apps/public-web/node_modules/next/dist/server/app-render/after-task-async-storage-instance.d.ts
generated
vendored
Normal file
2
apps/public-web/node_modules/next/dist/server/app-render/after-task-async-storage-instance.d.ts
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
import type { AfterTaskAsyncStorage } from './after-task-async-storage.external';
|
||||
export declare const afterTaskAsyncStorageInstance: AfterTaskAsyncStorage;
|
||||
14
apps/public-web/node_modules/next/dist/server/app-render/after-task-async-storage-instance.js
generated
vendored
Normal file
14
apps/public-web/node_modules/next/dist/server/app-render/after-task-async-storage-instance.js
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
Object.defineProperty(exports, "afterTaskAsyncStorageInstance", {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return afterTaskAsyncStorageInstance;
|
||||
}
|
||||
});
|
||||
const _asynclocalstorage = require("./async-local-storage");
|
||||
const afterTaskAsyncStorageInstance = (0, _asynclocalstorage.createAsyncLocalStorage)();
|
||||
|
||||
//# sourceMappingURL=after-task-async-storage-instance.js.map
|
||||
1
apps/public-web/node_modules/next/dist/server/app-render/after-task-async-storage-instance.js.map
generated
vendored
Normal file
1
apps/public-web/node_modules/next/dist/server/app-render/after-task-async-storage-instance.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/app-render/after-task-async-storage-instance.ts"],"sourcesContent":["import type { AfterTaskAsyncStorage } from './after-task-async-storage.external'\nimport { createAsyncLocalStorage } from './async-local-storage'\n\nexport const afterTaskAsyncStorageInstance: AfterTaskAsyncStorage =\n createAsyncLocalStorage()\n"],"names":["afterTaskAsyncStorageInstance","createAsyncLocalStorage"],"mappings":";;;;+BAGaA;;;eAAAA;;;mCAF2B;AAEjC,MAAMA,gCACXC,IAAAA,0CAAuB","ignoreList":[0]}
|
||||
13
apps/public-web/node_modules/next/dist/server/app-render/after-task-async-storage.external.d.ts
generated
vendored
Normal file
13
apps/public-web/node_modules/next/dist/server/app-render/after-task-async-storage.external.d.ts
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
import type { AsyncLocalStorage } from 'async_hooks';
|
||||
import { afterTaskAsyncStorageInstance as afterTaskAsyncStorage } from './after-task-async-storage-instance';
|
||||
import type { WorkUnitStore } from './work-unit-async-storage.external';
|
||||
export interface AfterTaskStore {
|
||||
/** The phase in which the topmost `after` was called.
|
||||
*
|
||||
* NOTE: Can be undefined when running `generateStaticParams`,
|
||||
* where we only have a `workStore`, no `workUnitStore`.
|
||||
*/
|
||||
readonly rootTaskSpawnPhase: WorkUnitStore['phase'] | undefined;
|
||||
}
|
||||
export type AfterTaskAsyncStorage = AsyncLocalStorage<AfterTaskStore>;
|
||||
export { afterTaskAsyncStorage };
|
||||
13
apps/public-web/node_modules/next/dist/server/app-render/after-task-async-storage.external.js
generated
vendored
Normal file
13
apps/public-web/node_modules/next/dist/server/app-render/after-task-async-storage.external.js
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
Object.defineProperty(exports, "afterTaskAsyncStorage", {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return _aftertaskasyncstorageinstance.afterTaskAsyncStorageInstance;
|
||||
}
|
||||
});
|
||||
const _aftertaskasyncstorageinstance = require("./after-task-async-storage-instance");
|
||||
|
||||
//# sourceMappingURL=after-task-async-storage.external.js.map
|
||||
1
apps/public-web/node_modules/next/dist/server/app-render/after-task-async-storage.external.js.map
generated
vendored
Normal file
1
apps/public-web/node_modules/next/dist/server/app-render/after-task-async-storage.external.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/app-render/after-task-async-storage.external.ts"],"sourcesContent":["import type { AsyncLocalStorage } from 'async_hooks'\n\n// Share the instance module in the next-shared layer\nimport { afterTaskAsyncStorageInstance as afterTaskAsyncStorage } from './after-task-async-storage-instance' with { 'turbopack-transition': 'next-shared' }\nimport type { WorkUnitStore } from './work-unit-async-storage.external'\n\nexport interface AfterTaskStore {\n /** The phase in which the topmost `after` was called.\n *\n * NOTE: Can be undefined when running `generateStaticParams`,\n * where we only have a `workStore`, no `workUnitStore`.\n */\n readonly rootTaskSpawnPhase: WorkUnitStore['phase'] | undefined\n}\n\nexport type AfterTaskAsyncStorage = AsyncLocalStorage<AfterTaskStore>\n\nexport { afterTaskAsyncStorage }\n"],"names":["afterTaskAsyncStorage"],"mappings":";;;;+BAiBSA;;;eAAAA,4DAAqB;;;+CAdyC","ignoreList":[0]}
|
||||
36
apps/public-web/node_modules/next/dist/server/app-render/app-render-prerender-utils.d.ts
generated
vendored
Normal file
36
apps/public-web/node_modules/next/dist/server/app-render/app-render-prerender-utils.d.ts
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
/**
|
||||
* This is a utility function to make scheduling sequential tasks that run back to back easier.
|
||||
* We schedule on the same queue (setTimeout) at the same time to ensure no other events can sneak in between.
|
||||
*/
|
||||
export declare function prerenderAndAbortInSequentialTasks<R>(prerender: () => Promise<R>, abort: () => void): Promise<R>;
|
||||
/**
|
||||
* Like `prerenderAndAbortInSequentialTasks`, but with another task between `prerender` and `abort`,
|
||||
* which allows us to move a part of the render into a separate task.
|
||||
*/
|
||||
export declare function prerenderAndAbortInSequentialTasksWithStages<R>(prerender: () => Promise<R>, advanceStage: () => void, abort: () => void): Promise<R>;
|
||||
export declare class ReactServerResult {
|
||||
private _stream;
|
||||
constructor(stream: ReadableStream<Uint8Array>);
|
||||
tee(): ReadableStream<Uint8Array<ArrayBufferLike>>;
|
||||
consume(): ReadableStream<Uint8Array<ArrayBufferLike>>;
|
||||
}
|
||||
export type ReactServerPrerenderResolveToType = {
|
||||
prelude: ReadableStream<Uint8Array>;
|
||||
};
|
||||
export declare function createReactServerPrerenderResult(underlying: Promise<ReactServerPrerenderResolveToType>): Promise<ReactServerPrerenderResult>;
|
||||
export declare function createReactServerPrerenderResultFromRender(underlying: ReadableStream<Uint8Array>): Promise<ReactServerPrerenderResult>;
|
||||
export declare class ReactServerPrerenderResult {
|
||||
private _chunks;
|
||||
private assertChunks;
|
||||
private consumeChunks;
|
||||
consume(): void;
|
||||
constructor(chunks: Array<Uint8Array>);
|
||||
asUnclosingStream(): ReadableStream<Uint8Array>;
|
||||
consumeAsUnclosingStream(): ReadableStream<Uint8Array>;
|
||||
asStream(): ReadableStream<Uint8Array>;
|
||||
consumeAsStream(): ReadableStream<Uint8Array>;
|
||||
}
|
||||
export declare function processPrelude(unprocessedPrelude: ReadableStream<Uint8Array>): Promise<{
|
||||
prelude: ReadableStream<Uint8Array<ArrayBufferLike>>;
|
||||
preludeIsEmpty: boolean;
|
||||
}>;
|
||||
249
apps/public-web/node_modules/next/dist/server/app-render/app-render-prerender-utils.js
generated
vendored
Normal file
249
apps/public-web/node_modules/next/dist/server/app-render/app-render-prerender-utils.js
generated
vendored
Normal file
@@ -0,0 +1,249 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
0 && (module.exports = {
|
||||
ReactServerPrerenderResult: null,
|
||||
ReactServerResult: null,
|
||||
createReactServerPrerenderResult: null,
|
||||
createReactServerPrerenderResultFromRender: null,
|
||||
prerenderAndAbortInSequentialTasks: null,
|
||||
prerenderAndAbortInSequentialTasksWithStages: null,
|
||||
processPrelude: null
|
||||
});
|
||||
function _export(target, all) {
|
||||
for(var name in all)Object.defineProperty(target, name, {
|
||||
enumerable: true,
|
||||
get: all[name]
|
||||
});
|
||||
}
|
||||
_export(exports, {
|
||||
ReactServerPrerenderResult: function() {
|
||||
return ReactServerPrerenderResult;
|
||||
},
|
||||
ReactServerResult: function() {
|
||||
return ReactServerResult;
|
||||
},
|
||||
createReactServerPrerenderResult: function() {
|
||||
return createReactServerPrerenderResult;
|
||||
},
|
||||
createReactServerPrerenderResultFromRender: function() {
|
||||
return createReactServerPrerenderResultFromRender;
|
||||
},
|
||||
prerenderAndAbortInSequentialTasks: function() {
|
||||
return prerenderAndAbortInSequentialTasks;
|
||||
},
|
||||
prerenderAndAbortInSequentialTasksWithStages: function() {
|
||||
return prerenderAndAbortInSequentialTasksWithStages;
|
||||
},
|
||||
processPrelude: function() {
|
||||
return processPrelude;
|
||||
}
|
||||
});
|
||||
const _invarianterror = require("../../shared/lib/invariant-error");
|
||||
const _apprenderscheduling = require("./app-render-scheduling");
|
||||
const _fastsetimmediateexternal = require("../node-environment-extensions/fast-set-immediate.external");
|
||||
function prerenderAndAbortInSequentialTasks(prerender, abort) {
|
||||
if (process.env.NEXT_RUNTIME === 'edge') {
|
||||
throw Object.defineProperty(new _invarianterror.InvariantError('`prerenderAndAbortInSequentialTasks` should not be called in edge runtime.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E538",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
} else {
|
||||
return new Promise((resolve, reject)=>{
|
||||
const scheduleTimeout = (0, _apprenderscheduling.createAtomicTimerGroup)();
|
||||
let pendingResult;
|
||||
scheduleTimeout(()=>{
|
||||
try {
|
||||
(0, _fastsetimmediateexternal.DANGEROUSLY_runPendingImmediatesAfterCurrentTask)();
|
||||
pendingResult = prerender();
|
||||
pendingResult.catch(()=>{});
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
scheduleTimeout(()=>{
|
||||
try {
|
||||
(0, _fastsetimmediateexternal.expectNoPendingImmediates)();
|
||||
abort();
|
||||
resolve(pendingResult);
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
function prerenderAndAbortInSequentialTasksWithStages(prerender, advanceStage, abort) {
|
||||
if (process.env.NEXT_RUNTIME === 'edge') {
|
||||
throw Object.defineProperty(new _invarianterror.InvariantError('`prerenderAndAbortInSequentialTasksWithStages` should not be called in edge runtime.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E778",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
} else {
|
||||
return new Promise((resolve, reject)=>{
|
||||
const scheduleTimeout = (0, _apprenderscheduling.createAtomicTimerGroup)();
|
||||
let pendingResult;
|
||||
scheduleTimeout(()=>{
|
||||
try {
|
||||
(0, _fastsetimmediateexternal.DANGEROUSLY_runPendingImmediatesAfterCurrentTask)();
|
||||
pendingResult = prerender();
|
||||
pendingResult.catch(()=>{});
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
scheduleTimeout(()=>{
|
||||
try {
|
||||
(0, _fastsetimmediateexternal.DANGEROUSLY_runPendingImmediatesAfterCurrentTask)();
|
||||
advanceStage();
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
scheduleTimeout(()=>{
|
||||
try {
|
||||
(0, _fastsetimmediateexternal.expectNoPendingImmediates)();
|
||||
abort();
|
||||
resolve(pendingResult);
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
class ReactServerResult {
|
||||
constructor(stream){
|
||||
this._stream = stream;
|
||||
}
|
||||
tee() {
|
||||
if (this._stream === null) {
|
||||
throw Object.defineProperty(new Error('Cannot tee a ReactServerResult that has already been consumed'), "__NEXT_ERROR_CODE", {
|
||||
value: "E106",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const tee = this._stream.tee();
|
||||
this._stream = tee[0];
|
||||
return tee[1];
|
||||
}
|
||||
consume() {
|
||||
if (this._stream === null) {
|
||||
throw Object.defineProperty(new Error('Cannot consume a ReactServerResult that has already been consumed'), "__NEXT_ERROR_CODE", {
|
||||
value: "E470",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const stream = this._stream;
|
||||
this._stream = null;
|
||||
return stream;
|
||||
}
|
||||
}
|
||||
async function createReactServerPrerenderResult(underlying) {
|
||||
const chunks = [];
|
||||
const { prelude } = await underlying;
|
||||
const reader = prelude.getReader();
|
||||
while(true){
|
||||
const { done, value } = await reader.read();
|
||||
if (done) {
|
||||
return new ReactServerPrerenderResult(chunks);
|
||||
} else {
|
||||
chunks.push(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
async function createReactServerPrerenderResultFromRender(underlying) {
|
||||
const chunks = [];
|
||||
const reader = underlying.getReader();
|
||||
while(true){
|
||||
const { done, value } = await reader.read();
|
||||
if (done) {
|
||||
break;
|
||||
} else {
|
||||
chunks.push(value);
|
||||
}
|
||||
}
|
||||
return new ReactServerPrerenderResult(chunks);
|
||||
}
|
||||
class ReactServerPrerenderResult {
|
||||
assertChunks(expression) {
|
||||
if (this._chunks === null) {
|
||||
throw Object.defineProperty(new _invarianterror.InvariantError(`Cannot \`${expression}\` on a ReactServerPrerenderResult that has already been consumed.`), "__NEXT_ERROR_CODE", {
|
||||
value: "E593",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
return this._chunks;
|
||||
}
|
||||
consumeChunks(expression) {
|
||||
const chunks = this.assertChunks(expression);
|
||||
this.consume();
|
||||
return chunks;
|
||||
}
|
||||
consume() {
|
||||
this._chunks = null;
|
||||
}
|
||||
constructor(chunks){
|
||||
this._chunks = chunks;
|
||||
}
|
||||
asUnclosingStream() {
|
||||
const chunks = this.assertChunks('asUnclosingStream()');
|
||||
return createUnclosingStream(chunks);
|
||||
}
|
||||
consumeAsUnclosingStream() {
|
||||
const chunks = this.consumeChunks('consumeAsUnclosingStream()');
|
||||
return createUnclosingStream(chunks);
|
||||
}
|
||||
asStream() {
|
||||
const chunks = this.assertChunks('asStream()');
|
||||
return createClosingStream(chunks);
|
||||
}
|
||||
consumeAsStream() {
|
||||
const chunks = this.consumeChunks('consumeAsStream()');
|
||||
return createClosingStream(chunks);
|
||||
}
|
||||
}
|
||||
function createUnclosingStream(chunks) {
|
||||
let i = 0;
|
||||
return new ReadableStream({
|
||||
async pull (controller) {
|
||||
if (i < chunks.length) {
|
||||
controller.enqueue(chunks[i++]);
|
||||
}
|
||||
// we intentionally keep the stream open. The consumer will clear
|
||||
// out chunks once finished and the remaining memory will be GC'd
|
||||
// when this object goes out of scope
|
||||
}
|
||||
});
|
||||
}
|
||||
function createClosingStream(chunks) {
|
||||
let i = 0;
|
||||
return new ReadableStream({
|
||||
async pull (controller) {
|
||||
if (i < chunks.length) {
|
||||
controller.enqueue(chunks[i++]);
|
||||
} else {
|
||||
controller.close();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
async function processPrelude(unprocessedPrelude) {
|
||||
const [prelude, peek] = unprocessedPrelude.tee();
|
||||
const reader = peek.getReader();
|
||||
const firstResult = await reader.read();
|
||||
reader.cancel();
|
||||
const preludeIsEmpty = firstResult.done === true;
|
||||
return {
|
||||
prelude,
|
||||
preludeIsEmpty
|
||||
};
|
||||
}
|
||||
|
||||
//# sourceMappingURL=app-render-prerender-utils.js.map
|
||||
1
apps/public-web/node_modules/next/dist/server/app-render/app-render-prerender-utils.js.map
generated
vendored
Normal file
1
apps/public-web/node_modules/next/dist/server/app-render/app-render-prerender-utils.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
11
apps/public-web/node_modules/next/dist/server/app-render/app-render-render-utils.d.ts
generated
vendored
Normal file
11
apps/public-web/node_modules/next/dist/server/app-render/app-render-render-utils.d.ts
generated
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
/**
|
||||
* This is a utility function to make scheduling sequential tasks that run back to back easier.
|
||||
* We schedule on the same queue (setTimeout) at the same time to ensure no other events can sneak in between.
|
||||
*/
|
||||
export declare function scheduleInSequentialTasks<R>(render: () => R | Promise<R>, followup: () => void): Promise<R>;
|
||||
/**
|
||||
* This is a utility function to make scheduling sequential tasks that run back to back easier.
|
||||
* We schedule on the same queue (setTimeout) at the same time to ensure no other events can sneak in between.
|
||||
* The function that runs in the second task gets access to the first tasks's result.
|
||||
*/
|
||||
export declare function pipelineInSequentialTasks<A, B, C>(one: () => A, two: (a: A) => B, three: (b: B) => C): Promise<C>;
|
||||
112
apps/public-web/node_modules/next/dist/server/app-render/app-render-render-utils.js
generated
vendored
Normal file
112
apps/public-web/node_modules/next/dist/server/app-render/app-render-render-utils.js
generated
vendored
Normal file
@@ -0,0 +1,112 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
0 && (module.exports = {
|
||||
pipelineInSequentialTasks: null,
|
||||
scheduleInSequentialTasks: null
|
||||
});
|
||||
function _export(target, all) {
|
||||
for(var name in all)Object.defineProperty(target, name, {
|
||||
enumerable: true,
|
||||
get: all[name]
|
||||
});
|
||||
}
|
||||
_export(exports, {
|
||||
pipelineInSequentialTasks: function() {
|
||||
return pipelineInSequentialTasks;
|
||||
},
|
||||
scheduleInSequentialTasks: function() {
|
||||
return scheduleInSequentialTasks;
|
||||
}
|
||||
});
|
||||
const _invarianterror = require("../../shared/lib/invariant-error");
|
||||
const _apprenderscheduling = require("./app-render-scheduling");
|
||||
const _fastsetimmediateexternal = require("../node-environment-extensions/fast-set-immediate.external");
|
||||
function scheduleInSequentialTasks(render, followup) {
|
||||
if (process.env.NEXT_RUNTIME === 'edge') {
|
||||
throw Object.defineProperty(new _invarianterror.InvariantError('`scheduleInSequentialTasks` should not be called in edge runtime.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E591",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
} else {
|
||||
return new Promise((resolve, reject)=>{
|
||||
const scheduleTimeout = (0, _apprenderscheduling.createAtomicTimerGroup)();
|
||||
let pendingResult;
|
||||
scheduleTimeout(()=>{
|
||||
try {
|
||||
(0, _fastsetimmediateexternal.DANGEROUSLY_runPendingImmediatesAfterCurrentTask)();
|
||||
pendingResult = render();
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
scheduleTimeout(()=>{
|
||||
try {
|
||||
(0, _fastsetimmediateexternal.expectNoPendingImmediates)();
|
||||
followup();
|
||||
resolve(pendingResult);
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
function pipelineInSequentialTasks(one, two, three) {
|
||||
if (process.env.NEXT_RUNTIME === 'edge') {
|
||||
throw Object.defineProperty(new _invarianterror.InvariantError('`pipelineInSequentialTasks` should not be called in edge runtime.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E875",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
} else {
|
||||
return new Promise((resolve, reject)=>{
|
||||
const scheduleTimeout = (0, _apprenderscheduling.createAtomicTimerGroup)();
|
||||
let oneResult;
|
||||
scheduleTimeout(()=>{
|
||||
try {
|
||||
(0, _fastsetimmediateexternal.DANGEROUSLY_runPendingImmediatesAfterCurrentTask)();
|
||||
oneResult = one();
|
||||
} catch (err) {
|
||||
clearTimeout(twoId);
|
||||
clearTimeout(threeId);
|
||||
clearTimeout(fourId);
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
let twoResult;
|
||||
const twoId = scheduleTimeout(()=>{
|
||||
// if `one` threw, then this timeout would've been cleared,
|
||||
// so if we got here, we're guaranteed to have a value.
|
||||
try {
|
||||
(0, _fastsetimmediateexternal.DANGEROUSLY_runPendingImmediatesAfterCurrentTask)();
|
||||
twoResult = two(oneResult);
|
||||
} catch (err) {
|
||||
clearTimeout(threeId);
|
||||
clearTimeout(fourId);
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
let threeResult;
|
||||
const threeId = scheduleTimeout(()=>{
|
||||
// if `two` threw, then this timeout would've been cleared,
|
||||
// so if we got here, we're guaranteed to have a value.
|
||||
try {
|
||||
(0, _fastsetimmediateexternal.expectNoPendingImmediates)();
|
||||
threeResult = three(twoResult);
|
||||
} catch (err) {
|
||||
clearTimeout(fourId);
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
// We wait a task before resolving/rejecting
|
||||
const fourId = scheduleTimeout(()=>{
|
||||
resolve(threeResult);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=app-render-render-utils.js.map
|
||||
1
apps/public-web/node_modules/next/dist/server/app-render/app-render-render-utils.js.map
generated
vendored
Normal file
1
apps/public-web/node_modules/next/dist/server/app-render/app-render-render-utils.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
8
apps/public-web/node_modules/next/dist/server/app-render/app-render-scheduling.d.ts
generated
vendored
Normal file
8
apps/public-web/node_modules/next/dist/server/app-render/app-render-scheduling.d.ts
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
/**
|
||||
* Allows scheduling multiple timers (equivalent to `setTimeout(cb, delayMs)`)
|
||||
* that are guaranteed to run in the same iteration of the event loop.
|
||||
*
|
||||
* @param delayMs - the delay to pass to `setTimeout`. (default: 0)
|
||||
*
|
||||
* */
|
||||
export declare function createAtomicTimerGroup(delayMs?: number): (callback: () => void) => NodeJS.Timeout;
|
||||
188
apps/public-web/node_modules/next/dist/server/app-render/app-render-scheduling.js
generated
vendored
Normal file
188
apps/public-web/node_modules/next/dist/server/app-render/app-render-scheduling.js
generated
vendored
Normal file
@@ -0,0 +1,188 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
Object.defineProperty(exports, "createAtomicTimerGroup", {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return createAtomicTimerGroup;
|
||||
}
|
||||
});
|
||||
const _invarianterror = require("../../shared/lib/invariant-error");
|
||||
const _fastsetimmediateexternal = require("../node-environment-extensions/fast-set-immediate.external");
|
||||
/*
|
||||
==========================
|
||||
| Background |
|
||||
==========================
|
||||
|
||||
Node.js does not guarantee that two timers scheduled back to back will run
|
||||
on the same iteration of the event loop:
|
||||
|
||||
```ts
|
||||
setTimeout(one, 0)
|
||||
setTimeout(two, 0)
|
||||
```
|
||||
|
||||
Internally, each timer is assigned a `_idleStart` property that holds
|
||||
an internal libuv timestamp in millisecond resolution.
|
||||
This will be used to determine if the timer is already "expired" and should be executed.
|
||||
However, even in sync code, it's possible for two timers to get different `_idleStart` values.
|
||||
This can cause one of the timers to be executed, and the other to be delayed until the next timer phase.
|
||||
|
||||
The delaying happens [here](https://github.com/nodejs/node/blob/c208ffc66bb9418ff026c4e3fa82e5b4387bd147/lib/internal/timers.js#L556-L564).
|
||||
and can be debugged by running node with `NODE_DEBUG=timer`.
|
||||
|
||||
The easiest way to observe it is to run this program in a loop until it exits with status 1:
|
||||
|
||||
```
|
||||
// test.js
|
||||
|
||||
let immediateRan = false
|
||||
const t1 = setTimeout(() => {
|
||||
console.log('timeout 1')
|
||||
setImmediate(() => {
|
||||
console.log('immediate 1')
|
||||
immediateRan = true
|
||||
})
|
||||
})
|
||||
|
||||
const t2 = setTimeout(() => {
|
||||
console.log('timeout 2')
|
||||
if (immediateRan) {
|
||||
console.log('immediate ran before the second timeout!')
|
||||
console.log(
|
||||
`t1._idleStart: ${t1._idleStart}, t2_idleStart: ${t2._idleStart}`
|
||||
);
|
||||
process.exit(1)
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
```bash
|
||||
#!/usr/bin/env bash
|
||||
|
||||
i=1;
|
||||
while true; do
|
||||
output="$(NODE_DEBUG=timer node test.js 2>&1)";
|
||||
if [ "$?" -eq 1 ]; then
|
||||
echo "failed after $i iterations";
|
||||
echo "$output";
|
||||
break;
|
||||
fi;
|
||||
i=$((i+1));
|
||||
done
|
||||
```
|
||||
|
||||
If `t2` is deferred to the next iteration of the event loop,
|
||||
then the immediate scheduled from inside `t1` will run first.
|
||||
When this occurs, `_idleStart` is reliably different between `t1` and `t2`.
|
||||
|
||||
==========================
|
||||
| Solution |
|
||||
==========================
|
||||
|
||||
We can guarantee that multiple timers (with the same delay, usually `0`)
|
||||
run together without any delays by making sure that their `_idleStart`s are the same,
|
||||
because that's what's used to determine if a timer should be deferred or not.
|
||||
Luckily, this property is currently exposed to userland and mutable,
|
||||
so we can patch it.
|
||||
|
||||
Another related trick we could potentially apply is making
|
||||
a timer immediately be considered expired by doing `timer._idleStart -= 2`.
|
||||
(the value must be more than `1`, the delay that actually gets set for `setTimeout(cb, 0)`).
|
||||
This makes node view this timer as "a 1ms timer scheduled 2ms ago",
|
||||
meaning that it should definitely run in the next timer phase.
|
||||
However, I'm not confident we know all the side effects of doing this,
|
||||
so for now, simply ensuring coordination is enough.
|
||||
*/ let shouldAttemptPatching = true;
|
||||
function warnAboutTimers() {
|
||||
console.warn("Next.js cannot guarantee that Cache Components will run as expected due to the current runtime's implementation of `setTimeout()`.\nPlease report a github issue here: https://github.com/vercel/next.js/issues/new/");
|
||||
}
|
||||
function createAtomicTimerGroup(delayMs = 0) {
|
||||
if (process.env.NEXT_RUNTIME === 'edge') {
|
||||
throw Object.defineProperty(new _invarianterror.InvariantError('createAtomicTimerGroup cannot be called in the edge runtime'), "__NEXT_ERROR_CODE", {
|
||||
value: "E934",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
} else {
|
||||
let isFirstCallback = true;
|
||||
let firstTimerIdleStart = null;
|
||||
let didFirstTimerRun = false;
|
||||
// As a sanity check, we schedule an immediate from the first timeout
|
||||
// to check if the execution was interrupted (i.e. if it ran between the timeouts).
|
||||
// Note that we're deliberately bypassing the "fast setImmediate" patch here --
|
||||
// otherwise, this check would always fail, because the immediate
|
||||
// would always run before the second timeout.
|
||||
let didImmediateRun = false;
|
||||
function runFirstCallback(callback) {
|
||||
didFirstTimerRun = true;
|
||||
if (shouldAttemptPatching) {
|
||||
(0, _fastsetimmediateexternal.unpatchedSetImmediate)(()=>{
|
||||
didImmediateRun = true;
|
||||
});
|
||||
}
|
||||
return callback();
|
||||
}
|
||||
function runSubsequentCallback(callback) {
|
||||
if (shouldAttemptPatching) {
|
||||
if (didImmediateRun) {
|
||||
// If the immediate managed to run between the timers, then we're not
|
||||
// able to provide the guarantees that we're supposed to
|
||||
shouldAttemptPatching = false;
|
||||
warnAboutTimers();
|
||||
}
|
||||
}
|
||||
return callback();
|
||||
}
|
||||
return function scheduleTimeout(callback) {
|
||||
if (didFirstTimerRun) {
|
||||
throw Object.defineProperty(new _invarianterror.InvariantError('Cannot schedule more timers into a group that already executed'), "__NEXT_ERROR_CODE", {
|
||||
value: "E935",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const timer = setTimeout(isFirstCallback ? runFirstCallback : runSubsequentCallback, delayMs, callback);
|
||||
isFirstCallback = false;
|
||||
if (!shouldAttemptPatching) {
|
||||
// We already tried patching some timers, and it didn't work.
|
||||
// No point trying again.
|
||||
return timer;
|
||||
}
|
||||
// NodeJS timers have a `_idleStart` property, but it doesn't exist e.g. in Bun.
|
||||
// If it's not present, we'll warn and try to continue.
|
||||
try {
|
||||
if ('_idleStart' in timer && typeof timer._idleStart === 'number') {
|
||||
// If this is the first timer that was scheduled, save its `_idleStart`.
|
||||
// We'll copy it onto subsequent timers to guarantee that they'll all be
|
||||
// considered expired in the same iteration of the event loop
|
||||
// and thus will all be executed in the same timer phase.
|
||||
if (firstTimerIdleStart === null) {
|
||||
firstTimerIdleStart = timer._idleStart;
|
||||
} else {
|
||||
timer._idleStart = firstTimerIdleStart;
|
||||
}
|
||||
} else {
|
||||
shouldAttemptPatching = false;
|
||||
warnAboutTimers();
|
||||
}
|
||||
} catch (err) {
|
||||
// This should never fail in current Node, but it might start failing in the future.
|
||||
// We might be okay even without tweaking the timers, so warn and try to continue.
|
||||
console.error(Object.defineProperty(new _invarianterror.InvariantError('An unexpected error occurred while adjusting `_idleStart` on an atomic timer', {
|
||||
cause: err
|
||||
}), "__NEXT_ERROR_CODE", {
|
||||
value: "E933",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
}));
|
||||
shouldAttemptPatching = false;
|
||||
warnAboutTimers();
|
||||
}
|
||||
return timer;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=app-render-scheduling.js.map
|
||||
1
apps/public-web/node_modules/next/dist/server/app-render/app-render-scheduling.js.map
generated
vendored
Normal file
1
apps/public-web/node_modules/next/dist/server/app-render/app-render-scheduling.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
85
apps/public-web/node_modules/next/dist/server/app-render/app-render.d.ts
generated
vendored
Normal file
85
apps/public-web/node_modules/next/dist/server/app-render/app-render.d.ts
generated
vendored
Normal file
@@ -0,0 +1,85 @@
|
||||
import type { RenderOpts, PreloadCallbacks } from './types';
|
||||
import type { ActionResult, DynamicParamTypesShort, FlightRouterState, Segment, CacheNodeSeedData } from '../../shared/lib/app-router-types';
|
||||
import { type WorkStore } from '../app-render/work-async-storage.external';
|
||||
import type { RequestStore } from '../app-render/work-unit-async-storage.external';
|
||||
import type { NextParsedUrlQuery } from '../request-meta';
|
||||
import type { AppPageModule } from '../route-modules/app-page/module';
|
||||
import type { BaseNextRequest, BaseNextResponse } from '../base-http';
|
||||
import RenderResult, { type AppPageRenderResultMetadata } from '../render-result';
|
||||
import { type ImplicitTags } from '../lib/implicit-tags';
|
||||
import { parseRelativeUrl } from '../../shared/lib/router/utils/parse-relative-url';
|
||||
import type { ServerComponentsHmrCache } from '../response-cache';
|
||||
import type { OpaqueFallbackRouteParams } from '../request/fallback-params';
|
||||
export type GetDynamicParamFromSegment = (segment: string) => DynamicParam | null;
|
||||
export type DynamicParam = {
|
||||
param: string;
|
||||
value: string | string[] | null;
|
||||
treeSegment: Segment;
|
||||
type: DynamicParamTypesShort;
|
||||
};
|
||||
export type GenerateFlight = typeof generateDynamicFlightRenderResult;
|
||||
export type AppSharedContext = {
|
||||
buildId: string;
|
||||
};
|
||||
export type AppRenderContext = {
|
||||
sharedContext: AppSharedContext;
|
||||
workStore: WorkStore;
|
||||
url: ReturnType<typeof parseRelativeUrl>;
|
||||
componentMod: AppPageModule;
|
||||
renderOpts: RenderOpts;
|
||||
parsedRequestHeaders: ParsedRequestHeaders;
|
||||
getDynamicParamFromSegment: GetDynamicParamFromSegment;
|
||||
query: NextParsedUrlQuery;
|
||||
isPrefetch: boolean;
|
||||
isPossibleServerAction: boolean;
|
||||
requestTimestamp: number;
|
||||
appUsingSizeAdjustment: boolean;
|
||||
flightRouterState?: FlightRouterState;
|
||||
requestId: string;
|
||||
htmlRequestId: string;
|
||||
pagePath: string;
|
||||
assetPrefix: string;
|
||||
isNotFoundPath: boolean;
|
||||
nonce: string | undefined;
|
||||
res: BaseNextResponse;
|
||||
/**
|
||||
* For now, the implicit tags are common for the whole route. If we ever start
|
||||
* rendering/revalidating segments independently, they need to move to the
|
||||
* work unit store.
|
||||
*/
|
||||
implicitTags: ImplicitTags;
|
||||
};
|
||||
interface ParsedRequestHeaders {
|
||||
/**
|
||||
* Router state provided from the client-side router. Used to handle rendering
|
||||
* from the common layout down. This value will be undefined if the request is
|
||||
* not a client-side navigation request, or if the request is a prefetch
|
||||
* request.
|
||||
*/
|
||||
readonly flightRouterState: FlightRouterState | undefined;
|
||||
readonly isPrefetchRequest: boolean;
|
||||
readonly isRuntimePrefetchRequest: boolean;
|
||||
readonly isRouteTreePrefetchRequest: boolean;
|
||||
readonly isHmrRefresh: boolean;
|
||||
readonly isRSCRequest: boolean;
|
||||
readonly nonce: string | undefined;
|
||||
readonly previouslyRevalidatedTags: string[];
|
||||
readonly requestId: string | undefined;
|
||||
readonly htmlRequestId: string | undefined;
|
||||
}
|
||||
/**
|
||||
* Produces a RenderResult containing the Flight data for the given request. See
|
||||
* `generateDynamicRSCPayload` for information on the contents of the render result.
|
||||
*/
|
||||
declare function generateDynamicFlightRenderResult(req: BaseNextRequest, ctx: AppRenderContext, requestStore: RequestStore, options?: {
|
||||
actionResult: ActionResult;
|
||||
skipPageRendering: boolean;
|
||||
componentTree?: CacheNodeSeedData;
|
||||
preloadCallbacks?: PreloadCallbacks;
|
||||
temporaryReferences?: WeakMap<any, string>;
|
||||
waitUntil?: Promise<unknown>;
|
||||
}): Promise<RenderResult>;
|
||||
export type BinaryStreamOf<T> = ReadableStream<Uint8Array>;
|
||||
export type AppPageRender = (req: BaseNextRequest, res: BaseNextResponse, pagePath: string, query: NextParsedUrlQuery, fallbackRouteParams: OpaqueFallbackRouteParams | null, renderOpts: RenderOpts, serverComponentsHmrCache: ServerComponentsHmrCache | undefined, sharedContext: AppSharedContext) => Promise<RenderResult<AppPageRenderResultMetadata>>;
|
||||
export declare const renderToHTMLOrFlight: AppPageRender;
|
||||
export {};
|
||||
3526
apps/public-web/node_modules/next/dist/server/app-render/app-render.js
generated
vendored
Normal file
3526
apps/public-web/node_modules/next/dist/server/app-render/app-render.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
apps/public-web/node_modules/next/dist/server/app-render/app-render.js.map
generated
vendored
Normal file
1
apps/public-web/node_modules/next/dist/server/app-render/app-render.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
4
apps/public-web/node_modules/next/dist/server/app-render/async-local-storage.d.ts
generated
vendored
Normal file
4
apps/public-web/node_modules/next/dist/server/app-render/async-local-storage.d.ts
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
import type { AsyncLocalStorage } from 'async_hooks';
|
||||
export declare function createAsyncLocalStorage<Store extends {}>(): AsyncLocalStorage<Store>;
|
||||
export declare function bindSnapshot<T>(fn: T): T;
|
||||
export declare function createSnapshot(): <R, TArgs extends any[]>(fn: (...args: TArgs) => R, ...args: TArgs) => R;
|
||||
76
apps/public-web/node_modules/next/dist/server/app-render/async-local-storage.js
generated
vendored
Normal file
76
apps/public-web/node_modules/next/dist/server/app-render/async-local-storage.js
generated
vendored
Normal file
@@ -0,0 +1,76 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
0 && (module.exports = {
|
||||
bindSnapshot: null,
|
||||
createAsyncLocalStorage: null,
|
||||
createSnapshot: null
|
||||
});
|
||||
function _export(target, all) {
|
||||
for(var name in all)Object.defineProperty(target, name, {
|
||||
enumerable: true,
|
||||
get: all[name]
|
||||
});
|
||||
}
|
||||
_export(exports, {
|
||||
bindSnapshot: function() {
|
||||
return bindSnapshot;
|
||||
},
|
||||
createAsyncLocalStorage: function() {
|
||||
return createAsyncLocalStorage;
|
||||
},
|
||||
createSnapshot: function() {
|
||||
return createSnapshot;
|
||||
}
|
||||
});
|
||||
const sharedAsyncLocalStorageNotAvailableError = Object.defineProperty(new Error('Invariant: AsyncLocalStorage accessed in runtime where it is not available'), "__NEXT_ERROR_CODE", {
|
||||
value: "E504",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
class FakeAsyncLocalStorage {
|
||||
disable() {
|
||||
throw sharedAsyncLocalStorageNotAvailableError;
|
||||
}
|
||||
getStore() {
|
||||
// This fake implementation of AsyncLocalStorage always returns `undefined`.
|
||||
return undefined;
|
||||
}
|
||||
run() {
|
||||
throw sharedAsyncLocalStorageNotAvailableError;
|
||||
}
|
||||
exit() {
|
||||
throw sharedAsyncLocalStorageNotAvailableError;
|
||||
}
|
||||
enterWith() {
|
||||
throw sharedAsyncLocalStorageNotAvailableError;
|
||||
}
|
||||
static bind(fn) {
|
||||
return fn;
|
||||
}
|
||||
}
|
||||
const maybeGlobalAsyncLocalStorage = typeof globalThis !== 'undefined' && globalThis.AsyncLocalStorage;
|
||||
function createAsyncLocalStorage() {
|
||||
if (maybeGlobalAsyncLocalStorage) {
|
||||
return new maybeGlobalAsyncLocalStorage();
|
||||
}
|
||||
return new FakeAsyncLocalStorage();
|
||||
}
|
||||
function bindSnapshot(// WARNING: Don't pass a named function to this argument! See: https://github.com/facebook/react/pull/34911
|
||||
fn) {
|
||||
if (maybeGlobalAsyncLocalStorage) {
|
||||
return maybeGlobalAsyncLocalStorage.bind(fn);
|
||||
}
|
||||
return FakeAsyncLocalStorage.bind(fn);
|
||||
}
|
||||
function createSnapshot() {
|
||||
if (maybeGlobalAsyncLocalStorage) {
|
||||
return maybeGlobalAsyncLocalStorage.snapshot();
|
||||
}
|
||||
return function(fn, ...args) {
|
||||
return fn(...args);
|
||||
};
|
||||
}
|
||||
|
||||
//# sourceMappingURL=async-local-storage.js.map
|
||||
1
apps/public-web/node_modules/next/dist/server/app-render/async-local-storage.js.map
generated
vendored
Normal file
1
apps/public-web/node_modules/next/dist/server/app-render/async-local-storage.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/app-render/async-local-storage.ts"],"sourcesContent":["import type { AsyncLocalStorage } from 'async_hooks'\n\nconst sharedAsyncLocalStorageNotAvailableError = new Error(\n 'Invariant: AsyncLocalStorage accessed in runtime where it is not available'\n)\n\nclass FakeAsyncLocalStorage<Store extends {}>\n implements AsyncLocalStorage<Store>\n{\n disable(): void {\n throw sharedAsyncLocalStorageNotAvailableError\n }\n\n getStore(): Store | undefined {\n // This fake implementation of AsyncLocalStorage always returns `undefined`.\n return undefined\n }\n\n run<R>(): R {\n throw sharedAsyncLocalStorageNotAvailableError\n }\n\n exit<R>(): R {\n throw sharedAsyncLocalStorageNotAvailableError\n }\n\n enterWith(): void {\n throw sharedAsyncLocalStorageNotAvailableError\n }\n\n static bind<T>(fn: T): T {\n return fn\n }\n}\n\nconst maybeGlobalAsyncLocalStorage =\n typeof globalThis !== 'undefined' && (globalThis as any).AsyncLocalStorage\n\nexport function createAsyncLocalStorage<\n Store extends {},\n>(): AsyncLocalStorage<Store> {\n if (maybeGlobalAsyncLocalStorage) {\n return new maybeGlobalAsyncLocalStorage()\n }\n return new FakeAsyncLocalStorage()\n}\n\nexport function bindSnapshot<T>(\n // WARNING: Don't pass a named function to this argument! See: https://github.com/facebook/react/pull/34911\n fn: T\n): T {\n if (maybeGlobalAsyncLocalStorage) {\n return maybeGlobalAsyncLocalStorage.bind(fn)\n }\n return FakeAsyncLocalStorage.bind(fn)\n}\n\nexport function createSnapshot(): <R, TArgs extends any[]>(\n fn: (...args: TArgs) => R,\n ...args: TArgs\n) => R {\n if (maybeGlobalAsyncLocalStorage) {\n return maybeGlobalAsyncLocalStorage.snapshot()\n }\n return function (fn: any, ...args: any[]) {\n return fn(...args)\n }\n}\n"],"names":["bindSnapshot","createAsyncLocalStorage","createSnapshot","sharedAsyncLocalStorageNotAvailableError","Error","FakeAsyncLocalStorage","disable","getStore","undefined","run","exit","enterWith","bind","fn","maybeGlobalAsyncLocalStorage","globalThis","AsyncLocalStorage","snapshot","args"],"mappings":";;;;;;;;;;;;;;;;IA+CgBA,YAAY;eAAZA;;IATAC,uBAAuB;eAAvBA;;IAmBAC,cAAc;eAAdA;;;AAvDhB,MAAMC,2CAA2C,qBAEhD,CAFgD,IAAIC,MACnD,+EAD+C,qBAAA;WAAA;gBAAA;kBAAA;AAEjD;AAEA,MAAMC;IAGJC,UAAgB;QACd,MAAMH;IACR;IAEAI,WAA8B;QAC5B,4EAA4E;QAC5E,OAAOC;IACT;IAEAC,MAAY;QACV,MAAMN;IACR;IAEAO,OAAa;QACX,MAAMP;IACR;IAEAQ,YAAkB;QAChB,MAAMR;IACR;IAEA,OAAOS,KAAQC,EAAK,EAAK;QACvB,OAAOA;IACT;AACF;AAEA,MAAMC,+BACJ,OAAOC,eAAe,eAAe,AAACA,WAAmBC,iBAAiB;AAErE,SAASf;IAGd,IAAIa,8BAA8B;QAChC,OAAO,IAAIA;IACb;IACA,OAAO,IAAIT;AACb;AAEO,SAASL,aACd,2GAA2G;AAC3Ga,EAAK;IAEL,IAAIC,8BAA8B;QAChC,OAAOA,6BAA6BF,IAAI,CAACC;IAC3C;IACA,OAAOR,sBAAsBO,IAAI,CAACC;AACpC;AAEO,SAASX;IAId,IAAIY,8BAA8B;QAChC,OAAOA,6BAA6BG,QAAQ;IAC9C;IACA,OAAO,SAAUJ,EAAO,EAAE,GAAGK,IAAW;QACtC,OAAOL,MAAMK;IACf;AACF","ignoreList":[0]}
|
||||
34
apps/public-web/node_modules/next/dist/server/app-render/cache-signal.d.ts
generated
vendored
Normal file
34
apps/public-web/node_modules/next/dist/server/app-render/cache-signal.d.ts
generated
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
/**
|
||||
* This class is used to detect when all cache reads for a given render are settled.
|
||||
* We do this to allow for cache warming the prerender without having to continue rendering
|
||||
* the remainder of the page. This feature is really only useful when the cacheComponents flag is on
|
||||
* and should only be used in codepaths gated with this feature.
|
||||
*/
|
||||
export declare class CacheSignal {
|
||||
private count;
|
||||
private earlyListeners;
|
||||
private listeners;
|
||||
private tickPending;
|
||||
private pendingTimeoutCleanup;
|
||||
private subscribedSignals;
|
||||
constructor();
|
||||
private noMorePendingCaches;
|
||||
private invokeListenersIfNoPendingReads;
|
||||
/**
|
||||
* This promise waits until there are no more in progress cache reads but no later.
|
||||
* This allows for adding more cache reads after to delay cacheReady.
|
||||
*/
|
||||
inputReady(): Promise<void>;
|
||||
/**
|
||||
* If there are inflight cache reads this Promise can resolve in a microtask however
|
||||
* if there are no inflight cache reads then we wait at least one task to allow initial
|
||||
* cache reads to be initiated.
|
||||
*/
|
||||
cacheReady(): Promise<void>;
|
||||
beginRead(): void;
|
||||
endRead(): void;
|
||||
hasPendingReads(): boolean;
|
||||
trackRead<T>(promise: Promise<T>): Promise<T>;
|
||||
subscribeToReads(subscriber: CacheSignal): () => void;
|
||||
unsubscribeFromReads(subscriber: CacheSignal): void;
|
||||
}
|
||||
181
apps/public-web/node_modules/next/dist/server/app-render/cache-signal.js
generated
vendored
Normal file
181
apps/public-web/node_modules/next/dist/server/app-render/cache-signal.js
generated
vendored
Normal file
@@ -0,0 +1,181 @@
|
||||
/**
|
||||
* This class is used to detect when all cache reads for a given render are settled.
|
||||
* We do this to allow for cache warming the prerender without having to continue rendering
|
||||
* the remainder of the page. This feature is really only useful when the cacheComponents flag is on
|
||||
* and should only be used in codepaths gated with this feature.
|
||||
*/ "use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
Object.defineProperty(exports, "CacheSignal", {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return CacheSignal;
|
||||
}
|
||||
});
|
||||
const _invarianterror = require("../../shared/lib/invariant-error");
|
||||
class CacheSignal {
|
||||
constructor(){
|
||||
this.count = 0;
|
||||
this.earlyListeners = [];
|
||||
this.listeners = [];
|
||||
this.tickPending = false;
|
||||
this.pendingTimeoutCleanup = null;
|
||||
this.subscribedSignals = null;
|
||||
this.invokeListenersIfNoPendingReads = ()=>{
|
||||
this.pendingTimeoutCleanup = null;
|
||||
if (this.count === 0) {
|
||||
for(let i = 0; i < this.listeners.length; i++){
|
||||
this.listeners[i]();
|
||||
}
|
||||
this.listeners.length = 0;
|
||||
}
|
||||
};
|
||||
if (process.env.NEXT_RUNTIME === 'edge') {
|
||||
// we rely on `process.nextTick`, which is not supported in edge
|
||||
throw Object.defineProperty(new _invarianterror.InvariantError('CacheSignal cannot be used in the edge runtime, because `cacheComponents` does not support it.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E728",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
}
|
||||
noMorePendingCaches() {
|
||||
if (!this.tickPending) {
|
||||
this.tickPending = true;
|
||||
queueMicrotask(()=>process.nextTick(()=>{
|
||||
this.tickPending = false;
|
||||
if (this.count === 0) {
|
||||
for(let i = 0; i < this.earlyListeners.length; i++){
|
||||
this.earlyListeners[i]();
|
||||
}
|
||||
this.earlyListeners.length = 0;
|
||||
}
|
||||
}));
|
||||
}
|
||||
// After a cache resolves, React will schedule new rendering work:
|
||||
// - in a microtask (when prerendering)
|
||||
// - in setImmediate (when rendering)
|
||||
// To cover both of these, we have to make sure that we let immediates execute at least once after each cache resolved.
|
||||
// We don't know when the pending timeout was scheduled (and if it's about to resolve),
|
||||
// so by scheduling a new one, we can be sure that we'll go around the event loop at least once.
|
||||
if (this.pendingTimeoutCleanup) {
|
||||
// We cancel the timeout in beginRead, so this shouldn't ever be active here,
|
||||
// but we still cancel it defensively.
|
||||
this.pendingTimeoutCleanup();
|
||||
}
|
||||
this.pendingTimeoutCleanup = scheduleImmediateAndTimeoutWithCleanup(this.invokeListenersIfNoPendingReads);
|
||||
}
|
||||
/**
|
||||
* This promise waits until there are no more in progress cache reads but no later.
|
||||
* This allows for adding more cache reads after to delay cacheReady.
|
||||
*/ inputReady() {
|
||||
return new Promise((resolve)=>{
|
||||
this.earlyListeners.push(resolve);
|
||||
if (this.count === 0) {
|
||||
this.noMorePendingCaches();
|
||||
}
|
||||
});
|
||||
}
|
||||
/**
|
||||
* If there are inflight cache reads this Promise can resolve in a microtask however
|
||||
* if there are no inflight cache reads then we wait at least one task to allow initial
|
||||
* cache reads to be initiated.
|
||||
*/ cacheReady() {
|
||||
return new Promise((resolve)=>{
|
||||
this.listeners.push(resolve);
|
||||
if (this.count === 0) {
|
||||
this.noMorePendingCaches();
|
||||
}
|
||||
});
|
||||
}
|
||||
beginRead() {
|
||||
this.count++;
|
||||
// There's a new pending cache, so if there's a `noMorePendingCaches` timeout running,
|
||||
// we should cancel it.
|
||||
if (this.pendingTimeoutCleanup) {
|
||||
this.pendingTimeoutCleanup();
|
||||
this.pendingTimeoutCleanup = null;
|
||||
}
|
||||
if (this.subscribedSignals !== null) {
|
||||
for (const subscriber of this.subscribedSignals){
|
||||
subscriber.beginRead();
|
||||
}
|
||||
}
|
||||
}
|
||||
endRead() {
|
||||
if (this.count === 0) {
|
||||
throw Object.defineProperty(new _invarianterror.InvariantError('CacheSignal got more endRead() calls than beginRead() calls'), "__NEXT_ERROR_CODE", {
|
||||
value: "E678",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
// If this is the last read we need to wait a task before we can claim the cache is settled.
|
||||
// The cache read will likely ping a Server Component which can read from the cache again and this
|
||||
// will play out in a microtask so we need to only resolve pending listeners if we're still at 0
|
||||
// after at least one task.
|
||||
// We only want one task scheduled at a time so when we hit count 1 we don't decrement the counter immediately.
|
||||
// If intervening reads happen before the scheduled task runs they will never observe count 1 preventing reentrency
|
||||
this.count--;
|
||||
if (this.count === 0) {
|
||||
this.noMorePendingCaches();
|
||||
}
|
||||
if (this.subscribedSignals !== null) {
|
||||
for (const subscriber of this.subscribedSignals){
|
||||
subscriber.endRead();
|
||||
}
|
||||
}
|
||||
}
|
||||
hasPendingReads() {
|
||||
return this.count > 0;
|
||||
}
|
||||
trackRead(promise) {
|
||||
this.beginRead();
|
||||
// `promise.finally()` still rejects, so don't use it here to avoid unhandled rejections
|
||||
const onFinally = this.endRead.bind(this);
|
||||
promise.then(onFinally, onFinally);
|
||||
return promise;
|
||||
}
|
||||
subscribeToReads(subscriber) {
|
||||
if (subscriber === this) {
|
||||
throw Object.defineProperty(new _invarianterror.InvariantError('A CacheSignal cannot subscribe to itself'), "__NEXT_ERROR_CODE", {
|
||||
value: "E679",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
if (this.subscribedSignals === null) {
|
||||
this.subscribedSignals = new Set();
|
||||
}
|
||||
this.subscribedSignals.add(subscriber);
|
||||
// we'll notify the subscriber of each endRead() on this signal,
|
||||
// so we need to give it a corresponding beginRead() for each read we have in flight now.
|
||||
for(let i = 0; i < this.count; i++){
|
||||
subscriber.beginRead();
|
||||
}
|
||||
return this.unsubscribeFromReads.bind(this, subscriber);
|
||||
}
|
||||
unsubscribeFromReads(subscriber) {
|
||||
if (!this.subscribedSignals) {
|
||||
return;
|
||||
}
|
||||
this.subscribedSignals.delete(subscriber);
|
||||
// we don't need to set the set back to `null` if it's empty --
|
||||
// if other signals are subscribing to this one, it'll likely get more subscriptions later,
|
||||
// so we'd have to allocate a fresh set again when that happens.
|
||||
}
|
||||
}
|
||||
function scheduleImmediateAndTimeoutWithCleanup(cb) {
|
||||
// If we decide to clean up the timeout, we want to remove
|
||||
// either the immediate or the timeout, whichever is still pending.
|
||||
let clearPending;
|
||||
const immediate = setImmediate(()=>{
|
||||
const timeout = setTimeout(cb, 0);
|
||||
clearPending = clearTimeout.bind(null, timeout);
|
||||
});
|
||||
clearPending = clearImmediate.bind(null, immediate);
|
||||
return ()=>clearPending();
|
||||
}
|
||||
|
||||
//# sourceMappingURL=cache-signal.js.map
|
||||
1
apps/public-web/node_modules/next/dist/server/app-render/cache-signal.js.map
generated
vendored
Normal file
1
apps/public-web/node_modules/next/dist/server/app-render/cache-signal.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
26
apps/public-web/node_modules/next/dist/server/app-render/collect-segment-data.d.ts
generated
vendored
Normal file
26
apps/public-web/node_modules/next/dist/server/app-render/collect-segment-data.d.ts
generated
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
import type { DynamicParamTypesShort, LoadingModuleData } from '../../shared/lib/app-router-types';
|
||||
import type { ManifestNode } from '../../build/webpack/plugins/flight-manifest-plugin';
|
||||
import { type SegmentRequestKey } from '../../shared/lib/segment-cache/segment-value-encoding';
|
||||
export type RootTreePrefetch = {
|
||||
buildId: string;
|
||||
tree: TreePrefetch;
|
||||
staleTime: number;
|
||||
};
|
||||
export type TreePrefetch = {
|
||||
name: string;
|
||||
paramType: DynamicParamTypesShort | null;
|
||||
paramKey: string | null;
|
||||
slots: null | {
|
||||
[parallelRouteKey: string]: TreePrefetch;
|
||||
};
|
||||
/** Whether this segment should be fetched using a runtime prefetch */
|
||||
hasRuntimePrefetch: boolean;
|
||||
isRootLayout: boolean;
|
||||
};
|
||||
export type SegmentPrefetch = {
|
||||
buildId: string;
|
||||
rsc: React.ReactNode | null;
|
||||
loading: LoadingModuleData | Promise<LoadingModuleData>;
|
||||
isPartial: boolean;
|
||||
};
|
||||
export declare function collectSegmentData(isCacheComponentsEnabled: boolean, fullPageDataBuffer: Buffer, staleTime: number, clientModules: ManifestNode, serverConsumerManifest: any): Promise<Map<SegmentRequestKey, Buffer>>;
|
||||
276
apps/public-web/node_modules/next/dist/server/app-render/collect-segment-data.js
generated
vendored
Normal file
276
apps/public-web/node_modules/next/dist/server/app-render/collect-segment-data.js
generated
vendored
Normal file
@@ -0,0 +1,276 @@
|
||||
/* eslint-disable @next/internal/no-ambiguous-jsx -- Bundled in entry-base so it gets the right JSX runtime. */ "use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
Object.defineProperty(exports, "collectSegmentData", {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return collectSegmentData;
|
||||
}
|
||||
});
|
||||
const _jsxruntime = require("react/jsx-runtime");
|
||||
const _client = require("react-server-dom-webpack/client");
|
||||
const _static = require("react-server-dom-webpack/static");
|
||||
const _nodewebstreamshelper = require("../stream-utils/node-web-streams-helper");
|
||||
const _scheduler = require("../../lib/scheduler");
|
||||
const _segmentvalueencoding = require("../../shared/lib/segment-cache/segment-value-encoding");
|
||||
const _createerrorhandler = require("./create-error-handler");
|
||||
const _prospectiverenderutils = require("./prospective-render-utils");
|
||||
const _workasyncstorageexternal = require("./work-async-storage.external");
|
||||
const filterStackFrame = process.env.NODE_ENV !== 'production' ? require('../lib/source-maps').filterStackFrameDEV : undefined;
|
||||
const findSourceMapURL = process.env.NODE_ENV !== 'production' ? require('../lib/source-maps').findSourceMapURLDEV : undefined;
|
||||
function onSegmentPrerenderError(error) {
|
||||
const digest = (0, _createerrorhandler.getDigestForWellKnownError)(error);
|
||||
if (digest) {
|
||||
return digest;
|
||||
}
|
||||
// We don't need to log the errors because we would have already done that
|
||||
// when generating the original Flight stream for the whole page.
|
||||
if (process.env.NEXT_DEBUG_BUILD || process.env.__NEXT_VERBOSE_LOGGING) {
|
||||
const workStore = _workasyncstorageexternal.workAsyncStorage.getStore();
|
||||
(0, _prospectiverenderutils.printDebugThrownValueForProspectiveRender)(error, (workStore == null ? void 0 : workStore.route) ?? 'unknown route', _prospectiverenderutils.Phase.SegmentCollection);
|
||||
}
|
||||
}
|
||||
async function collectSegmentData(isCacheComponentsEnabled, fullPageDataBuffer, staleTime, clientModules, serverConsumerManifest) {
|
||||
// Traverse the router tree and generate a prefetch response for each segment.
|
||||
// A mutable map to collect the results as we traverse the route tree.
|
||||
const resultMap = new Map();
|
||||
// Before we start, warm up the module cache by decoding the page data once.
|
||||
// Then we can assume that any remaining async tasks that occur the next time
|
||||
// are due to hanging promises caused by dynamic data access. Note we only
|
||||
// have to do this once per page, not per individual segment.
|
||||
//
|
||||
try {
|
||||
await (0, _client.createFromReadableStream)((0, _nodewebstreamshelper.streamFromBuffer)(fullPageDataBuffer), {
|
||||
findSourceMapURL,
|
||||
serverConsumerManifest
|
||||
});
|
||||
await (0, _scheduler.waitAtLeastOneReactRenderTask)();
|
||||
} catch {}
|
||||
// Create an abort controller that we'll use to stop the stream.
|
||||
const abortController = new AbortController();
|
||||
const onCompletedProcessingRouteTree = async ()=>{
|
||||
// Since all we're doing is decoding and re-encoding a cached prerender, if
|
||||
// serializing the stream takes longer than a microtask, it must because of
|
||||
// hanging promises caused by dynamic data.
|
||||
await (0, _scheduler.waitAtLeastOneReactRenderTask)();
|
||||
abortController.abort();
|
||||
};
|
||||
// Generate a stream for the route tree prefetch. While we're walking the
|
||||
// tree, we'll also spawn additional tasks to generate the segment prefetches.
|
||||
// The promises for these tasks are pushed to a mutable array that we will
|
||||
// await once the route tree is fully rendered.
|
||||
const segmentTasks = [];
|
||||
const { prelude: treeStream } = await (0, _static.prerender)(// RootTreePrefetch is not a valid return type for a React component, but
|
||||
// we need to use a component so that when we decode the original stream
|
||||
// inside of it, the side effects are transferred to the new stream.
|
||||
// @ts-expect-error
|
||||
/*#__PURE__*/ (0, _jsxruntime.jsx)(PrefetchTreeData, {
|
||||
isClientParamParsingEnabled: isCacheComponentsEnabled,
|
||||
fullPageDataBuffer: fullPageDataBuffer,
|
||||
serverConsumerManifest: serverConsumerManifest,
|
||||
clientModules: clientModules,
|
||||
staleTime: staleTime,
|
||||
segmentTasks: segmentTasks,
|
||||
onCompletedProcessingRouteTree: onCompletedProcessingRouteTree
|
||||
}), clientModules, {
|
||||
filterStackFrame,
|
||||
signal: abortController.signal,
|
||||
onError: onSegmentPrerenderError
|
||||
});
|
||||
// Write the route tree to a special `/_tree` segment.
|
||||
const treeBuffer = await (0, _nodewebstreamshelper.streamToBuffer)(treeStream);
|
||||
resultMap.set('/_tree', treeBuffer);
|
||||
// Also output the entire full page data response
|
||||
resultMap.set('/_full', fullPageDataBuffer);
|
||||
// Now that we've finished rendering the route tree, all the segment tasks
|
||||
// should have been spawned. Await them in parallel and write the segment
|
||||
// prefetches to the result map.
|
||||
for (const [segmentPath, buffer] of (await Promise.all(segmentTasks))){
|
||||
resultMap.set(segmentPath, buffer);
|
||||
}
|
||||
return resultMap;
|
||||
}
|
||||
async function PrefetchTreeData({ isClientParamParsingEnabled, fullPageDataBuffer, serverConsumerManifest, clientModules, staleTime, segmentTasks, onCompletedProcessingRouteTree }) {
|
||||
// We're currently rendering a Flight response for the route tree prefetch.
|
||||
// Inside this component, decode the Flight stream for the whole page. This is
|
||||
// a hack to transfer the side effects from the original Flight stream (e.g.
|
||||
// Float preloads) onto the Flight stream for the tree prefetch.
|
||||
// TODO: React needs a better way to do this. Needed for Server Actions, too.
|
||||
const initialRSCPayload = await (0, _client.createFromReadableStream)(createUnclosingPrefetchStream((0, _nodewebstreamshelper.streamFromBuffer)(fullPageDataBuffer)), {
|
||||
findSourceMapURL,
|
||||
serverConsumerManifest
|
||||
});
|
||||
const buildId = initialRSCPayload.b;
|
||||
// FlightDataPath is an unsound type, hence the additional checks.
|
||||
const flightDataPaths = initialRSCPayload.f;
|
||||
if (flightDataPaths.length !== 1 && flightDataPaths[0].length !== 3) {
|
||||
console.error('Internal Next.js error: InitialRSCPayload does not match the expected ' + 'shape for a prerendered page during segment prefetch generation.');
|
||||
return null;
|
||||
}
|
||||
const flightRouterState = flightDataPaths[0][0];
|
||||
const seedData = flightDataPaths[0][1];
|
||||
const head = flightDataPaths[0][2];
|
||||
// Compute the route metadata tree by traversing the FlightRouterState. As we
|
||||
// walk the tree, we will also spawn a task to produce a prefetch response for
|
||||
// each segment.
|
||||
const tree = collectSegmentDataImpl(isClientParamParsingEnabled, flightRouterState, buildId, seedData, clientModules, _segmentvalueencoding.ROOT_SEGMENT_REQUEST_KEY, segmentTasks);
|
||||
// Also spawn a task to produce a prefetch response for the "head" segment.
|
||||
// The head contains metadata, like the title; it's not really a route
|
||||
// segment, but it contains RSC data, so it's treated like a segment by
|
||||
// the client cache.
|
||||
segmentTasks.push((0, _scheduler.waitAtLeastOneReactRenderTask)().then(()=>renderSegmentPrefetch(buildId, head, null, _segmentvalueencoding.HEAD_REQUEST_KEY, clientModules)));
|
||||
// Notify the abort controller that we're done processing the route tree.
|
||||
// Anything async that happens after this point must be due to hanging
|
||||
// promises in the original stream.
|
||||
onCompletedProcessingRouteTree();
|
||||
// Render the route tree to a special `/_tree` segment.
|
||||
const treePrefetch = {
|
||||
buildId,
|
||||
tree,
|
||||
staleTime
|
||||
};
|
||||
return treePrefetch;
|
||||
}
|
||||
function collectSegmentDataImpl(isClientParamParsingEnabled, route, buildId, seedData, clientModules, requestKey, segmentTasks) {
|
||||
// Metadata about the segment. Sent as part of the tree prefetch. Null if
|
||||
// there are no children.
|
||||
let slotMetadata = null;
|
||||
const children = route[1];
|
||||
const seedDataChildren = seedData !== null ? seedData[1] : null;
|
||||
for(const parallelRouteKey in children){
|
||||
const childRoute = children[parallelRouteKey];
|
||||
const childSegment = childRoute[0];
|
||||
const childSeedData = seedDataChildren !== null ? seedDataChildren[parallelRouteKey] : null;
|
||||
const childRequestKey = (0, _segmentvalueencoding.appendSegmentRequestKeyPart)(requestKey, parallelRouteKey, (0, _segmentvalueencoding.createSegmentRequestKeyPart)(childSegment));
|
||||
const childTree = collectSegmentDataImpl(isClientParamParsingEnabled, childRoute, buildId, childSeedData, clientModules, childRequestKey, segmentTasks);
|
||||
if (slotMetadata === null) {
|
||||
slotMetadata = {};
|
||||
}
|
||||
slotMetadata[parallelRouteKey] = childTree;
|
||||
}
|
||||
const hasRuntimePrefetch = seedData !== null ? seedData[4] : false;
|
||||
if (seedData !== null) {
|
||||
// Spawn a task to write the segment data to a new Flight stream.
|
||||
segmentTasks.push(// Since we're already in the middle of a render, wait until after the
|
||||
// current task to escape the current rendering context.
|
||||
(0, _scheduler.waitAtLeastOneReactRenderTask)().then(()=>renderSegmentPrefetch(buildId, seedData[0], seedData[2], requestKey, clientModules)));
|
||||
} else {
|
||||
// This segment does not have any seed data. Skip generating a prefetch
|
||||
// response for it. We'll still include it in the route tree, though.
|
||||
// TODO: We should encode in the route tree whether a segment is missing
|
||||
// so we don't attempt to fetch it for no reason. As of now this shouldn't
|
||||
// ever happen in practice, though.
|
||||
}
|
||||
const segment = route[0];
|
||||
let name;
|
||||
let paramType = null;
|
||||
let paramKey = null;
|
||||
if (typeof segment === 'string') {
|
||||
name = segment;
|
||||
paramKey = segment;
|
||||
paramType = null;
|
||||
} else {
|
||||
name = segment[0];
|
||||
paramKey = segment[1];
|
||||
paramType = segment[2];
|
||||
}
|
||||
// Metadata about the segment. Sent to the client as part of the
|
||||
// tree prefetch.
|
||||
return {
|
||||
name,
|
||||
paramType,
|
||||
// This value is ommitted from the prefetch response when cacheComponents
|
||||
// is enabled.
|
||||
paramKey: isClientParamParsingEnabled ? null : paramKey,
|
||||
hasRuntimePrefetch,
|
||||
slots: slotMetadata,
|
||||
isRootLayout: route[4] === true
|
||||
};
|
||||
}
|
||||
async function renderSegmentPrefetch(buildId, rsc, loading, requestKey, clientModules) {
|
||||
// Render the segment data to a stream.
|
||||
// In the future, this is where we can include additional metadata, like the
|
||||
// stale time and cache tags.
|
||||
const segmentPrefetch = {
|
||||
buildId,
|
||||
rsc,
|
||||
loading,
|
||||
isPartial: await isPartialRSCData(rsc, clientModules)
|
||||
};
|
||||
// Since all we're doing is decoding and re-encoding a cached prerender, if
|
||||
// it takes longer than a microtask, it must because of hanging promises
|
||||
// caused by dynamic data. Abort the stream at the end of the current task.
|
||||
const abortController = new AbortController();
|
||||
(0, _scheduler.waitAtLeastOneReactRenderTask)().then(()=>abortController.abort());
|
||||
const { prelude: segmentStream } = await (0, _static.prerender)(segmentPrefetch, clientModules, {
|
||||
filterStackFrame,
|
||||
signal: abortController.signal,
|
||||
onError: onSegmentPrerenderError
|
||||
});
|
||||
const segmentBuffer = await (0, _nodewebstreamshelper.streamToBuffer)(segmentStream);
|
||||
if (requestKey === _segmentvalueencoding.ROOT_SEGMENT_REQUEST_KEY) {
|
||||
return [
|
||||
'/_index',
|
||||
segmentBuffer
|
||||
];
|
||||
} else {
|
||||
return [
|
||||
requestKey,
|
||||
segmentBuffer
|
||||
];
|
||||
}
|
||||
}
|
||||
async function isPartialRSCData(rsc, clientModules) {
|
||||
// We can determine if a segment contains only partial data if it takes longer
|
||||
// than a task to encode, because dynamic data is encoded as an infinite
|
||||
// promise. We must do this in a separate Flight prerender from the one that
|
||||
// actually generates the prefetch stream because we need to include
|
||||
// `isPartial` in the stream itself.
|
||||
let isPartial = false;
|
||||
const abortController = new AbortController();
|
||||
(0, _scheduler.waitAtLeastOneReactRenderTask)().then(()=>{
|
||||
// If we haven't yet finished the outer task, then it must be because we
|
||||
// accessed dynamic data.
|
||||
isPartial = true;
|
||||
abortController.abort();
|
||||
});
|
||||
await (0, _static.prerender)(rsc, clientModules, {
|
||||
filterStackFrame,
|
||||
signal: abortController.signal,
|
||||
onError () {}
|
||||
});
|
||||
return isPartial;
|
||||
}
|
||||
function createUnclosingPrefetchStream(originalFlightStream) {
|
||||
// When PPR is enabled, prefetch streams may contain references that never
|
||||
// resolve, because that's how we encode dynamic data access. In the decoded
|
||||
// object returned by the Flight client, these are reified into hanging
|
||||
// promises that suspend during render, which is effectively what we want.
|
||||
// The UI resolves when it switches to the dynamic data stream
|
||||
// (via useDeferredValue(dynamic, static)).
|
||||
//
|
||||
// However, the Flight implementation currently errors if the server closes
|
||||
// the response before all the references are resolved. As a cheat to work
|
||||
// around this, we wrap the original stream in a new stream that never closes,
|
||||
// and therefore doesn't error.
|
||||
const reader = originalFlightStream.getReader();
|
||||
return new ReadableStream({
|
||||
async pull (controller) {
|
||||
while(true){
|
||||
const { done, value } = await reader.read();
|
||||
if (!done) {
|
||||
// Pass to the target stream and keep consuming the Flight response
|
||||
// from the server.
|
||||
controller.enqueue(value);
|
||||
continue;
|
||||
}
|
||||
// The server stream has closed. Exit, but intentionally do not close
|
||||
// the target stream.
|
||||
return;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
//# sourceMappingURL=collect-segment-data.js.map
|
||||
1
apps/public-web/node_modules/next/dist/server/app-render/collect-segment-data.js.map
generated
vendored
Normal file
1
apps/public-web/node_modules/next/dist/server/app-render/collect-segment-data.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
2
apps/public-web/node_modules/next/dist/server/app-render/console-async-storage-instance.d.ts
generated
vendored
Normal file
2
apps/public-web/node_modules/next/dist/server/app-render/console-async-storage-instance.d.ts
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
import type { ConsoleAsyncStorage } from './console-async-storage.external';
|
||||
export declare const consoleAsyncStorageInstance: ConsoleAsyncStorage;
|
||||
14
apps/public-web/node_modules/next/dist/server/app-render/console-async-storage-instance.js
generated
vendored
Normal file
14
apps/public-web/node_modules/next/dist/server/app-render/console-async-storage-instance.js
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
Object.defineProperty(exports, "consoleAsyncStorageInstance", {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return consoleAsyncStorageInstance;
|
||||
}
|
||||
});
|
||||
const _asynclocalstorage = require("./async-local-storage");
|
||||
const consoleAsyncStorageInstance = (0, _asynclocalstorage.createAsyncLocalStorage)();
|
||||
|
||||
//# sourceMappingURL=console-async-storage-instance.js.map
|
||||
1
apps/public-web/node_modules/next/dist/server/app-render/console-async-storage-instance.js.map
generated
vendored
Normal file
1
apps/public-web/node_modules/next/dist/server/app-render/console-async-storage-instance.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/app-render/console-async-storage-instance.ts"],"sourcesContent":["import { createAsyncLocalStorage } from './async-local-storage'\nimport type { ConsoleAsyncStorage } from './console-async-storage.external'\n\nexport const consoleAsyncStorageInstance: ConsoleAsyncStorage =\n createAsyncLocalStorage()\n"],"names":["consoleAsyncStorageInstance","createAsyncLocalStorage"],"mappings":";;;;+BAGaA;;;eAAAA;;;mCAH2B;AAGjC,MAAMA,8BACXC,IAAAA,0CAAuB","ignoreList":[0]}
|
||||
12
apps/public-web/node_modules/next/dist/server/app-render/console-async-storage.external.d.ts
generated
vendored
Normal file
12
apps/public-web/node_modules/next/dist/server/app-render/console-async-storage.external.d.ts
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
import type { AsyncLocalStorage } from 'async_hooks';
|
||||
import { consoleAsyncStorageInstance } from './console-async-storage-instance';
|
||||
export interface ConsoleStore {
|
||||
/**
|
||||
* if true the color of logs output will be dimmed to indicate the log is
|
||||
* from a repeat or validation render that is not typically relevant to
|
||||
* the primary action the server is taking.
|
||||
*/
|
||||
readonly dim: boolean;
|
||||
}
|
||||
export type ConsoleAsyncStorage = AsyncLocalStorage<ConsoleStore>;
|
||||
export { consoleAsyncStorageInstance as consoleAsyncStorage };
|
||||
13
apps/public-web/node_modules/next/dist/server/app-render/console-async-storage.external.js
generated
vendored
Normal file
13
apps/public-web/node_modules/next/dist/server/app-render/console-async-storage.external.js
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
Object.defineProperty(exports, "consoleAsyncStorage", {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return _consoleasyncstorageinstance.consoleAsyncStorageInstance;
|
||||
}
|
||||
});
|
||||
const _consoleasyncstorageinstance = require("./console-async-storage-instance");
|
||||
|
||||
//# sourceMappingURL=console-async-storage.external.js.map
|
||||
1
apps/public-web/node_modules/next/dist/server/app-render/console-async-storage.external.js.map
generated
vendored
Normal file
1
apps/public-web/node_modules/next/dist/server/app-render/console-async-storage.external.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/app-render/console-async-storage.external.ts"],"sourcesContent":["import type { AsyncLocalStorage } from 'async_hooks'\n\n// Share the instance module in the next-shared layer\nimport { consoleAsyncStorageInstance } from './console-async-storage-instance' with { 'turbopack-transition': 'next-shared' }\n\nexport interface ConsoleStore {\n /**\n * if true the color of logs output will be dimmed to indicate the log is\n * from a repeat or validation render that is not typically relevant to\n * the primary action the server is taking.\n */\n readonly dim: boolean\n}\n\nexport type ConsoleAsyncStorage = AsyncLocalStorage<ConsoleStore>\n\nexport { consoleAsyncStorageInstance as consoleAsyncStorage }\n"],"names":["consoleAsyncStorage","consoleAsyncStorageInstance"],"mappings":";;;;+BAgBwCA;;;eAA/BC,wDAA2B;;;6CAbQ","ignoreList":[0]}
|
||||
8
apps/public-web/node_modules/next/dist/server/app-render/create-component-styles-and-scripts.d.ts
generated
vendored
Normal file
8
apps/public-web/node_modules/next/dist/server/app-render/create-component-styles-and-scripts.d.ts
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
import type { AppRenderContext } from './app-render';
|
||||
export declare function createComponentStylesAndScripts({ filePath, getComponent, injectedCSS, injectedJS, ctx, }: {
|
||||
filePath: string;
|
||||
getComponent: () => any;
|
||||
injectedCSS: Set<string>;
|
||||
injectedJS: Set<string>;
|
||||
ctx: AppRenderContext;
|
||||
}): Promise<[React.ComponentType<any>, React.ReactNode, React.ReactNode]>;
|
||||
33
apps/public-web/node_modules/next/dist/server/app-render/create-component-styles-and-scripts.js
generated
vendored
Normal file
33
apps/public-web/node_modules/next/dist/server/app-render/create-component-styles-and-scripts.js
generated
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
Object.defineProperty(exports, "createComponentStylesAndScripts", {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return createComponentStylesAndScripts;
|
||||
}
|
||||
});
|
||||
const _interopdefault = require("./interop-default");
|
||||
const _getcssinlinedlinktags = require("./get-css-inlined-link-tags");
|
||||
const _getassetquerystring = require("./get-asset-query-string");
|
||||
const _encodeuripath = require("../../shared/lib/encode-uri-path");
|
||||
const _rendercssresource = require("./render-css-resource");
|
||||
async function createComponentStylesAndScripts({ filePath, getComponent, injectedCSS, injectedJS, ctx }) {
|
||||
const { componentMod: { createElement } } = ctx;
|
||||
const { styles: entryCssFiles, scripts: jsHrefs } = (0, _getcssinlinedlinktags.getLinkAndScriptTags)(filePath, injectedCSS, injectedJS);
|
||||
const styles = (0, _rendercssresource.renderCssResource)(entryCssFiles, ctx);
|
||||
const scripts = jsHrefs ? jsHrefs.map((href, index)=>createElement('script', {
|
||||
src: `${ctx.assetPrefix}/_next/${(0, _encodeuripath.encodeURIPath)(href)}${(0, _getassetquerystring.getAssetQueryString)(ctx, true)}`,
|
||||
async: true,
|
||||
key: `script-${index}`
|
||||
})) : null;
|
||||
const Comp = (0, _interopdefault.interopDefault)(await getComponent());
|
||||
return [
|
||||
Comp,
|
||||
styles,
|
||||
scripts
|
||||
];
|
||||
}
|
||||
|
||||
//# sourceMappingURL=create-component-styles-and-scripts.js.map
|
||||
1
apps/public-web/node_modules/next/dist/server/app-render/create-component-styles-and-scripts.js.map
generated
vendored
Normal file
1
apps/public-web/node_modules/next/dist/server/app-render/create-component-styles-and-scripts.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/app-render/create-component-styles-and-scripts.tsx"],"sourcesContent":["import { interopDefault } from './interop-default'\nimport { getLinkAndScriptTags } from './get-css-inlined-link-tags'\nimport type { AppRenderContext } from './app-render'\nimport { getAssetQueryString } from './get-asset-query-string'\nimport { encodeURIPath } from '../../shared/lib/encode-uri-path'\nimport { renderCssResource } from './render-css-resource'\n\nexport async function createComponentStylesAndScripts({\n filePath,\n getComponent,\n injectedCSS,\n injectedJS,\n ctx,\n}: {\n filePath: string\n getComponent: () => any\n injectedCSS: Set<string>\n injectedJS: Set<string>\n ctx: AppRenderContext\n}): Promise<[React.ComponentType<any>, React.ReactNode, React.ReactNode]> {\n const {\n componentMod: { createElement },\n } = ctx\n const { styles: entryCssFiles, scripts: jsHrefs } = getLinkAndScriptTags(\n filePath,\n injectedCSS,\n injectedJS\n )\n\n const styles = renderCssResource(entryCssFiles, ctx)\n\n const scripts = jsHrefs\n ? jsHrefs.map((href, index) =>\n createElement('script', {\n src: `${ctx.assetPrefix}/_next/${encodeURIPath(href)}${getAssetQueryString(ctx, true)}`,\n async: true,\n key: `script-${index}`,\n })\n )\n : null\n\n const Comp = interopDefault(await getComponent())\n\n return [Comp, styles, scripts]\n}\n"],"names":["createComponentStylesAndScripts","filePath","getComponent","injectedCSS","injectedJS","ctx","componentMod","createElement","styles","entryCssFiles","scripts","jsHrefs","getLinkAndScriptTags","renderCssResource","map","href","index","src","assetPrefix","encodeURIPath","getAssetQueryString","async","key","Comp","interopDefault"],"mappings":";;;;+BAOsBA;;;eAAAA;;;gCAPS;uCACM;qCAED;+BACN;mCACI;AAE3B,eAAeA,gCAAgC,EACpDC,QAAQ,EACRC,YAAY,EACZC,WAAW,EACXC,UAAU,EACVC,GAAG,EAOJ;IACC,MAAM,EACJC,cAAc,EAAEC,aAAa,EAAE,EAChC,GAAGF;IACJ,MAAM,EAAEG,QAAQC,aAAa,EAAEC,SAASC,OAAO,EAAE,GAAGC,IAAAA,2CAAoB,EACtEX,UACAE,aACAC;IAGF,MAAMI,SAASK,IAAAA,oCAAiB,EAACJ,eAAeJ;IAEhD,MAAMK,UAAUC,UACZA,QAAQG,GAAG,CAAC,CAACC,MAAMC,QACjBT,cAAc,UAAU;YACtBU,KAAK,GAAGZ,IAAIa,WAAW,CAAC,OAAO,EAAEC,IAAAA,4BAAa,EAACJ,QAAQK,IAAAA,wCAAmB,EAACf,KAAK,OAAO;YACvFgB,OAAO;YACPC,KAAK,CAAC,OAAO,EAAEN,OAAO;QACxB,MAEF;IAEJ,MAAMO,OAAOC,IAAAA,8BAAc,EAAC,MAAMtB;IAElC,OAAO;QAACqB;QAAMf;QAAQE;KAAQ;AAChC","ignoreList":[0]}
|
||||
23
apps/public-web/node_modules/next/dist/server/app-render/create-component-tree.d.ts
generated
vendored
Normal file
23
apps/public-web/node_modules/next/dist/server/app-render/create-component-tree.d.ts
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
import type { ComponentType } from 'react';
|
||||
import type { CacheNodeSeedData } from '../../shared/lib/app-router-types';
|
||||
import type { PreloadCallbacks } from './types';
|
||||
import type { LoaderTree } from '../lib/app-dir-module';
|
||||
import type { AppRenderContext, GetDynamicParamFromSegment } from './app-render';
|
||||
import type { Params } from '../request/params';
|
||||
/**
|
||||
* Use the provided loader tree to create the React Component tree.
|
||||
*/
|
||||
export declare function createComponentTree(props: {
|
||||
loaderTree: LoaderTree;
|
||||
parentParams: Params;
|
||||
rootLayoutIncluded: boolean;
|
||||
injectedCSS: Set<string>;
|
||||
injectedJS: Set<string>;
|
||||
injectedFontPreloadTags: Set<string>;
|
||||
ctx: AppRenderContext;
|
||||
missingSlots?: Set<string>;
|
||||
preloadCallbacks: PreloadCallbacks;
|
||||
authInterrupts: boolean;
|
||||
MetadataOutlet: ComponentType;
|
||||
}): Promise<CacheNodeSeedData>;
|
||||
export declare function getRootParams(loaderTree: LoaderTree, getDynamicParamFromSegment: GetDynamicParamFromSegment): Params;
|
||||
754
apps/public-web/node_modules/next/dist/server/app-render/create-component-tree.js
generated
vendored
Normal file
754
apps/public-web/node_modules/next/dist/server/app-render/create-component-tree.js
generated
vendored
Normal file
@@ -0,0 +1,754 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
0 && (module.exports = {
|
||||
createComponentTree: null,
|
||||
getRootParams: null
|
||||
});
|
||||
function _export(target, all) {
|
||||
for(var name in all)Object.defineProperty(target, name, {
|
||||
enumerable: true,
|
||||
get: all[name]
|
||||
});
|
||||
}
|
||||
_export(exports, {
|
||||
createComponentTree: function() {
|
||||
return createComponentTree;
|
||||
},
|
||||
getRootParams: function() {
|
||||
return getRootParams;
|
||||
}
|
||||
});
|
||||
const _clientandserverreferences = require("../../lib/client-and-server-references");
|
||||
const _appdirmodule = require("../lib/app-dir-module");
|
||||
const _interopdefault = require("./interop-default");
|
||||
const _parseloadertree = require("../../shared/lib/router/utils/parse-loader-tree");
|
||||
const _createcomponentstylesandscripts = require("./create-component-styles-and-scripts");
|
||||
const _getlayerassets = require("./get-layer-assets");
|
||||
const _hasloadingcomponentintree = require("./has-loading-component-in-tree");
|
||||
const _patchfetch = require("../lib/patch-fetch");
|
||||
const _default = require("../../client/components/builtin/default");
|
||||
const _tracer = require("../lib/trace/tracer");
|
||||
const _constants = require("../lib/trace/constants");
|
||||
const _staticgenerationbailout = require("../../client/components/static-generation-bailout");
|
||||
const _workunitasyncstorageexternal = require("./work-unit-async-storage.external");
|
||||
const _segment = require("../../shared/lib/segment");
|
||||
const _segmentexplorerpath = require("./segment-explorer-path");
|
||||
function createComponentTree(props) {
|
||||
return (0, _tracer.getTracer)().trace(_constants.NextNodeServerSpan.createComponentTree, {
|
||||
spanName: 'build component tree'
|
||||
}, ()=>createComponentTreeInternal(props, true));
|
||||
}
|
||||
function errorMissingDefaultExport(pagePath, convention) {
|
||||
const normalizedPagePath = pagePath === '/' ? '' : pagePath;
|
||||
throw Object.defineProperty(new Error(`The default export is not a React Component in "${normalizedPagePath}/${convention}"`), "__NEXT_ERROR_CODE", {
|
||||
value: "E45",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const cacheNodeKey = 'c';
|
||||
async function createComponentTreeInternal({ loaderTree: tree, parentParams, rootLayoutIncluded, injectedCSS, injectedJS, injectedFontPreloadTags, ctx, missingSlots, preloadCallbacks, authInterrupts, MetadataOutlet }, isRoot) {
|
||||
const { renderOpts: { nextConfigOutput, experimental, cacheComponents }, workStore, componentMod: { createElement, Fragment, SegmentViewNode, HTTPAccessFallbackBoundary, LayoutRouter, RenderFromTemplateContext, ClientPageRoot, ClientSegmentRoot, createServerSearchParamsForServerPage, createPrerenderSearchParamsForClientPage, createServerParamsForServerSegment, createPrerenderParamsForClientSegment, serverHooks: { DynamicServerError }, Postpone }, pagePath, getDynamicParamFromSegment, isPrefetch, query } = ctx;
|
||||
const { page, conventionPath, segment, modules, parallelRoutes } = (0, _parseloadertree.parseLoaderTree)(tree);
|
||||
const { layout, template, error, loading, 'not-found': notFound, forbidden, unauthorized } = modules;
|
||||
const injectedCSSWithCurrentLayout = new Set(injectedCSS);
|
||||
const injectedJSWithCurrentLayout = new Set(injectedJS);
|
||||
const injectedFontPreloadTagsWithCurrentLayout = new Set(injectedFontPreloadTags);
|
||||
const layerAssets = (0, _getlayerassets.getLayerAssets)({
|
||||
preloadCallbacks,
|
||||
ctx,
|
||||
layoutOrPagePath: conventionPath,
|
||||
injectedCSS: injectedCSSWithCurrentLayout,
|
||||
injectedJS: injectedJSWithCurrentLayout,
|
||||
injectedFontPreloadTags: injectedFontPreloadTagsWithCurrentLayout
|
||||
});
|
||||
const [Template, templateStyles, templateScripts] = template ? await (0, _createcomponentstylesandscripts.createComponentStylesAndScripts)({
|
||||
ctx,
|
||||
filePath: template[1],
|
||||
getComponent: template[0],
|
||||
injectedCSS: injectedCSSWithCurrentLayout,
|
||||
injectedJS: injectedJSWithCurrentLayout
|
||||
}) : [
|
||||
Fragment
|
||||
];
|
||||
const [ErrorComponent, errorStyles, errorScripts] = error ? await (0, _createcomponentstylesandscripts.createComponentStylesAndScripts)({
|
||||
ctx,
|
||||
filePath: error[1],
|
||||
getComponent: error[0],
|
||||
injectedCSS: injectedCSSWithCurrentLayout,
|
||||
injectedJS: injectedJSWithCurrentLayout
|
||||
}) : [];
|
||||
const [Loading, loadingStyles, loadingScripts] = loading ? await (0, _createcomponentstylesandscripts.createComponentStylesAndScripts)({
|
||||
ctx,
|
||||
filePath: loading[1],
|
||||
getComponent: loading[0],
|
||||
injectedCSS: injectedCSSWithCurrentLayout,
|
||||
injectedJS: injectedJSWithCurrentLayout
|
||||
}) : [];
|
||||
const isLayout = typeof layout !== 'undefined';
|
||||
const isPage = typeof page !== 'undefined';
|
||||
const { mod: layoutOrPageMod, modType } = await (0, _tracer.getTracer)().trace(_constants.NextNodeServerSpan.getLayoutOrPageModule, {
|
||||
hideSpan: !(isLayout || isPage),
|
||||
spanName: 'resolve segment modules',
|
||||
attributes: {
|
||||
'next.segment': segment
|
||||
}
|
||||
}, ()=>(0, _appdirmodule.getLayoutOrPageModule)(tree));
|
||||
/**
|
||||
* Checks if the current segment is a root layout.
|
||||
*/ const rootLayoutAtThisLevel = isLayout && !rootLayoutIncluded;
|
||||
/**
|
||||
* Checks if the current segment or any level above it has a root layout.
|
||||
*/ const rootLayoutIncludedAtThisLevelOrAbove = rootLayoutIncluded || rootLayoutAtThisLevel;
|
||||
const [NotFound, notFoundStyles] = notFound ? await (0, _createcomponentstylesandscripts.createComponentStylesAndScripts)({
|
||||
ctx,
|
||||
filePath: notFound[1],
|
||||
getComponent: notFound[0],
|
||||
injectedCSS: injectedCSSWithCurrentLayout,
|
||||
injectedJS: injectedJSWithCurrentLayout
|
||||
}) : [];
|
||||
const prefetchConfig = layoutOrPageMod ? layoutOrPageMod.unstable_prefetch : undefined;
|
||||
/** Whether this segment should use a runtime prefetch instead of a static prefetch. */ const hasRuntimePrefetch = (prefetchConfig == null ? void 0 : prefetchConfig.mode) === 'runtime';
|
||||
const [Forbidden, forbiddenStyles] = authInterrupts && forbidden ? await (0, _createcomponentstylesandscripts.createComponentStylesAndScripts)({
|
||||
ctx,
|
||||
filePath: forbidden[1],
|
||||
getComponent: forbidden[0],
|
||||
injectedCSS: injectedCSSWithCurrentLayout,
|
||||
injectedJS: injectedJSWithCurrentLayout
|
||||
}) : [];
|
||||
const [Unauthorized, unauthorizedStyles] = authInterrupts && unauthorized ? await (0, _createcomponentstylesandscripts.createComponentStylesAndScripts)({
|
||||
ctx,
|
||||
filePath: unauthorized[1],
|
||||
getComponent: unauthorized[0],
|
||||
injectedCSS: injectedCSSWithCurrentLayout,
|
||||
injectedJS: injectedJSWithCurrentLayout
|
||||
}) : [];
|
||||
let dynamic = layoutOrPageMod == null ? void 0 : layoutOrPageMod.dynamic;
|
||||
if (nextConfigOutput === 'export') {
|
||||
if (!dynamic || dynamic === 'auto') {
|
||||
dynamic = 'error';
|
||||
} else if (dynamic === 'force-dynamic') {
|
||||
// force-dynamic is always incompatible with 'export'. We must interrupt the build
|
||||
throw Object.defineProperty(new _staticgenerationbailout.StaticGenBailoutError(`Page with \`dynamic = "force-dynamic"\` couldn't be exported. \`output: "export"\` requires all pages be renderable statically because there is no runtime server to dynamically render routes in this output format. Learn more: https://nextjs.org/docs/app/building-your-application/deploying/static-exports`), "__NEXT_ERROR_CODE", {
|
||||
value: "E527",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
}
|
||||
if (typeof dynamic === 'string') {
|
||||
// the nested most config wins so we only force-static
|
||||
// if it's configured above any parent that configured
|
||||
// otherwise
|
||||
if (dynamic === 'error') {
|
||||
workStore.dynamicShouldError = true;
|
||||
} else if (dynamic === 'force-dynamic') {
|
||||
workStore.forceDynamic = true;
|
||||
// TODO: (PPR) remove this bailout once PPR is the default
|
||||
if (workStore.isStaticGeneration && !experimental.isRoutePPREnabled) {
|
||||
// If the postpone API isn't available, we can't postpone the render and
|
||||
// therefore we can't use the dynamic API.
|
||||
const err = Object.defineProperty(new DynamicServerError(`Page with \`dynamic = "force-dynamic"\` won't be rendered statically.`), "__NEXT_ERROR_CODE", {
|
||||
value: "E585",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
workStore.dynamicUsageDescription = err.message;
|
||||
workStore.dynamicUsageStack = err.stack;
|
||||
throw err;
|
||||
}
|
||||
} else {
|
||||
workStore.dynamicShouldError = false;
|
||||
workStore.forceStatic = dynamic === 'force-static';
|
||||
}
|
||||
}
|
||||
if (typeof (layoutOrPageMod == null ? void 0 : layoutOrPageMod.fetchCache) === 'string') {
|
||||
workStore.fetchCache = layoutOrPageMod == null ? void 0 : layoutOrPageMod.fetchCache;
|
||||
}
|
||||
if (typeof (layoutOrPageMod == null ? void 0 : layoutOrPageMod.revalidate) !== 'undefined') {
|
||||
(0, _patchfetch.validateRevalidate)(layoutOrPageMod == null ? void 0 : layoutOrPageMod.revalidate, workStore.route);
|
||||
}
|
||||
if (typeof (layoutOrPageMod == null ? void 0 : layoutOrPageMod.revalidate) === 'number') {
|
||||
const defaultRevalidate = layoutOrPageMod.revalidate;
|
||||
const workUnitStore = _workunitasyncstorageexternal.workUnitAsyncStorage.getStore();
|
||||
if (workUnitStore) {
|
||||
switch(workUnitStore.type){
|
||||
case 'prerender':
|
||||
case 'prerender-runtime':
|
||||
case 'prerender-legacy':
|
||||
case 'prerender-ppr':
|
||||
if (workUnitStore.revalidate > defaultRevalidate) {
|
||||
workUnitStore.revalidate = defaultRevalidate;
|
||||
}
|
||||
break;
|
||||
case 'request':
|
||||
break;
|
||||
// createComponentTree is not called for these stores:
|
||||
case 'cache':
|
||||
case 'private-cache':
|
||||
case 'prerender-client':
|
||||
case 'unstable-cache':
|
||||
break;
|
||||
default:
|
||||
workUnitStore;
|
||||
}
|
||||
}
|
||||
if (!workStore.forceStatic && workStore.isStaticGeneration && defaultRevalidate === 0 && // If the postpone API isn't available, we can't postpone the render and
|
||||
// therefore we can't use the dynamic API.
|
||||
!experimental.isRoutePPREnabled) {
|
||||
const dynamicUsageDescription = `revalidate: 0 configured ${segment}`;
|
||||
workStore.dynamicUsageDescription = dynamicUsageDescription;
|
||||
throw Object.defineProperty(new DynamicServerError(dynamicUsageDescription), "__NEXT_ERROR_CODE", {
|
||||
value: "E394",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
}
|
||||
const isStaticGeneration = workStore.isStaticGeneration;
|
||||
// Assume the segment we're rendering contains only partial data if PPR is
|
||||
// enabled and this is a statically generated response. This is used by the
|
||||
// client Segment Cache after a prefetch to determine if it can skip the
|
||||
// second request to fill in the dynamic data.
|
||||
//
|
||||
// It's OK for this to be `true` when the data is actually fully static, but
|
||||
// it's not OK for this to be `false` when the data possibly contains holes.
|
||||
// Although the value here is overly pessimistic, for prefetches, it will be
|
||||
// replaced by a more specific value when the data is later processed into
|
||||
// per-segment responses (see collect-segment-data.tsx)
|
||||
//
|
||||
// For dynamic requests, this must always be `false` because dynamic responses
|
||||
// are never partial.
|
||||
const isPossiblyPartialResponse = isStaticGeneration && experimental.isRoutePPREnabled === true;
|
||||
const LayoutOrPage = layoutOrPageMod ? (0, _interopdefault.interopDefault)(layoutOrPageMod) : undefined;
|
||||
/**
|
||||
* The React Component to render.
|
||||
*/ let MaybeComponent = LayoutOrPage;
|
||||
if (process.env.NODE_ENV === 'development' || isStaticGeneration) {
|
||||
const { isValidElementType } = require('next/dist/compiled/react-is');
|
||||
if (typeof MaybeComponent !== 'undefined' && !isValidElementType(MaybeComponent)) {
|
||||
errorMissingDefaultExport(pagePath, modType ?? 'page');
|
||||
}
|
||||
if (typeof ErrorComponent !== 'undefined' && !isValidElementType(ErrorComponent)) {
|
||||
errorMissingDefaultExport(pagePath, 'error');
|
||||
}
|
||||
if (typeof Loading !== 'undefined' && !isValidElementType(Loading)) {
|
||||
errorMissingDefaultExport(pagePath, 'loading');
|
||||
}
|
||||
if (typeof NotFound !== 'undefined' && !isValidElementType(NotFound)) {
|
||||
errorMissingDefaultExport(pagePath, 'not-found');
|
||||
}
|
||||
if (typeof Forbidden !== 'undefined' && !isValidElementType(Forbidden)) {
|
||||
errorMissingDefaultExport(pagePath, 'forbidden');
|
||||
}
|
||||
if (typeof Unauthorized !== 'undefined' && !isValidElementType(Unauthorized)) {
|
||||
errorMissingDefaultExport(pagePath, 'unauthorized');
|
||||
}
|
||||
}
|
||||
// Handle dynamic segment params.
|
||||
const segmentParam = getDynamicParamFromSegment(segment);
|
||||
// Create object holding the parent params and current params
|
||||
let currentParams = parentParams;
|
||||
if (segmentParam && segmentParam.value !== null) {
|
||||
currentParams = {
|
||||
...parentParams,
|
||||
[segmentParam.param]: segmentParam.value
|
||||
};
|
||||
}
|
||||
// Resolve the segment param
|
||||
const isSegmentViewEnabled = !!ctx.renderOpts.dev;
|
||||
const dir = (process.env.NEXT_RUNTIME === 'edge' ? process.env.__NEXT_EDGE_PROJECT_DIR : ctx.renderOpts.dir) || '';
|
||||
const [notFoundElement, notFoundFilePath] = await createBoundaryConventionElement({
|
||||
ctx,
|
||||
conventionName: 'not-found',
|
||||
Component: NotFound,
|
||||
styles: notFoundStyles,
|
||||
tree
|
||||
});
|
||||
const [forbiddenElement] = await createBoundaryConventionElement({
|
||||
ctx,
|
||||
conventionName: 'forbidden',
|
||||
Component: Forbidden,
|
||||
styles: forbiddenStyles,
|
||||
tree
|
||||
});
|
||||
const [unauthorizedElement] = await createBoundaryConventionElement({
|
||||
ctx,
|
||||
conventionName: 'unauthorized',
|
||||
Component: Unauthorized,
|
||||
styles: unauthorizedStyles,
|
||||
tree
|
||||
});
|
||||
// TODO: Combine this `map` traversal with the loop below that turns the array
|
||||
// into an object.
|
||||
const parallelRouteMap = await Promise.all(Object.keys(parallelRoutes).map(async (parallelRouteKey)=>{
|
||||
const isChildrenRouteKey = parallelRouteKey === 'children';
|
||||
const parallelRoute = parallelRoutes[parallelRouteKey];
|
||||
const notFoundComponent = isChildrenRouteKey ? notFoundElement : undefined;
|
||||
const forbiddenComponent = isChildrenRouteKey ? forbiddenElement : undefined;
|
||||
const unauthorizedComponent = isChildrenRouteKey ? unauthorizedElement : undefined;
|
||||
// if we're prefetching and that there's a Loading component, we bail out
|
||||
// otherwise we keep rendering for the prefetch.
|
||||
// We also want to bail out if there's no Loading component in the tree.
|
||||
let childCacheNodeSeedData = null;
|
||||
if (// Before PPR, the way instant navigations work in Next.js is we
|
||||
// prefetch everything up to the first route segment that defines a
|
||||
// loading.tsx boundary. (We do the same if there's no loading
|
||||
// boundary in the entire tree, because we don't want to prefetch too
|
||||
// much) The rest of the tree is deferred until the actual navigation.
|
||||
// It does not take into account whether the data is dynamic — even if
|
||||
// the tree is completely static, it will still defer everything
|
||||
// inside the loading boundary.
|
||||
//
|
||||
// This behavior predates PPR and is only relevant if the
|
||||
// PPR flag is not enabled.
|
||||
isPrefetch && (Loading || !(0, _hasloadingcomponentintree.hasLoadingComponentInTree)(parallelRoute)) && // The approach with PPR is different — loading.tsx behaves like a
|
||||
// regular Suspense boundary and has no special behavior.
|
||||
//
|
||||
// With PPR, we prefetch as deeply as possible, and only defer when
|
||||
// dynamic data is accessed. If so, we only defer the nearest parent
|
||||
// Suspense boundary of the dynamic data access, regardless of whether
|
||||
// the boundary is defined by loading.tsx or a normal <Suspense>
|
||||
// component in userspace.
|
||||
//
|
||||
// NOTE: In practice this usually means we'll end up prefetching more
|
||||
// than we were before PPR, which may or may not be considered a
|
||||
// performance regression by some apps. The plan is to address this
|
||||
// before General Availability of PPR by introducing granular
|
||||
// per-segment fetching, so we can reuse as much of the tree as
|
||||
// possible during both prefetches and dynamic navigations. But during
|
||||
// the beta period, we should be clear about this trade off in our
|
||||
// communications.
|
||||
!experimental.isRoutePPREnabled) {
|
||||
// Don't prefetch this child. This will trigger a lazy fetch by the
|
||||
// client router.
|
||||
} else {
|
||||
// Create the child component
|
||||
if (process.env.NODE_ENV === 'development' && missingSlots) {
|
||||
var _parsedTree_conventionPath;
|
||||
// When we detect the default fallback (which triggers a 404), we collect the missing slots
|
||||
// to provide more helpful debug information during development mode.
|
||||
const parsedTree = (0, _parseloadertree.parseLoaderTree)(parallelRoute);
|
||||
if ((_parsedTree_conventionPath = parsedTree.conventionPath) == null ? void 0 : _parsedTree_conventionPath.endsWith(_default.PARALLEL_ROUTE_DEFAULT_PATH)) {
|
||||
missingSlots.add(parallelRouteKey);
|
||||
}
|
||||
}
|
||||
const seedData = await createComponentTreeInternal({
|
||||
loaderTree: parallelRoute,
|
||||
parentParams: currentParams,
|
||||
rootLayoutIncluded: rootLayoutIncludedAtThisLevelOrAbove,
|
||||
injectedCSS: injectedCSSWithCurrentLayout,
|
||||
injectedJS: injectedJSWithCurrentLayout,
|
||||
injectedFontPreloadTags: injectedFontPreloadTagsWithCurrentLayout,
|
||||
ctx,
|
||||
missingSlots,
|
||||
preloadCallbacks,
|
||||
authInterrupts,
|
||||
// `StreamingMetadataOutlet` is used to conditionally throw. In the case of parallel routes we will have more than one page
|
||||
// but we only want to throw on the first one.
|
||||
MetadataOutlet: isChildrenRouteKey ? MetadataOutlet : null
|
||||
}, false);
|
||||
childCacheNodeSeedData = seedData;
|
||||
}
|
||||
const templateNode = createElement(Template, null, createElement(RenderFromTemplateContext, null));
|
||||
const templateFilePath = (0, _segmentexplorerpath.getConventionPathByType)(tree, dir, 'template');
|
||||
const errorFilePath = (0, _segmentexplorerpath.getConventionPathByType)(tree, dir, 'error');
|
||||
const loadingFilePath = (0, _segmentexplorerpath.getConventionPathByType)(tree, dir, 'loading');
|
||||
const globalErrorFilePath = isRoot ? (0, _segmentexplorerpath.getConventionPathByType)(tree, dir, 'global-error') : undefined;
|
||||
const wrappedErrorStyles = isSegmentViewEnabled && errorFilePath ? createElement(SegmentViewNode, {
|
||||
type: 'error',
|
||||
pagePath: errorFilePath
|
||||
}, errorStyles) : errorStyles;
|
||||
// Add a suffix to avoid conflict with the segment view node representing rendered file.
|
||||
// existence: not-found.tsx@boundary
|
||||
// rendered: not-found.tsx
|
||||
const fileNameSuffix = _segmentexplorerpath.BOUNDARY_SUFFIX;
|
||||
const segmentViewBoundaries = isSegmentViewEnabled ? createElement(Fragment, null, notFoundFilePath && createElement(SegmentViewNode, {
|
||||
type: `${_segmentexplorerpath.BOUNDARY_PREFIX}not-found`,
|
||||
pagePath: notFoundFilePath + fileNameSuffix
|
||||
}), loadingFilePath && createElement(SegmentViewNode, {
|
||||
type: `${_segmentexplorerpath.BOUNDARY_PREFIX}loading`,
|
||||
pagePath: loadingFilePath + fileNameSuffix
|
||||
}), errorFilePath && createElement(SegmentViewNode, {
|
||||
type: `${_segmentexplorerpath.BOUNDARY_PREFIX}error`,
|
||||
pagePath: errorFilePath + fileNameSuffix
|
||||
}), globalErrorFilePath && createElement(SegmentViewNode, {
|
||||
type: `${_segmentexplorerpath.BOUNDARY_PREFIX}global-error`,
|
||||
pagePath: (0, _segmentexplorerpath.isNextjsBuiltinFilePath)(globalErrorFilePath) ? `${_segmentexplorerpath.BUILTIN_PREFIX}global-error.js${fileNameSuffix}` : globalErrorFilePath
|
||||
})) : null;
|
||||
return [
|
||||
parallelRouteKey,
|
||||
createElement(LayoutRouter, {
|
||||
parallelRouterKey: parallelRouteKey,
|
||||
error: ErrorComponent,
|
||||
errorStyles: wrappedErrorStyles,
|
||||
errorScripts: errorScripts,
|
||||
template: isSegmentViewEnabled && templateFilePath ? createElement(SegmentViewNode, {
|
||||
type: 'template',
|
||||
pagePath: templateFilePath
|
||||
}, templateNode) : templateNode,
|
||||
templateStyles: templateStyles,
|
||||
templateScripts: templateScripts,
|
||||
notFound: notFoundComponent,
|
||||
forbidden: forbiddenComponent,
|
||||
unauthorized: unauthorizedComponent,
|
||||
...isSegmentViewEnabled && {
|
||||
segmentViewBoundaries
|
||||
}
|
||||
}),
|
||||
childCacheNodeSeedData
|
||||
];
|
||||
}));
|
||||
// Convert the parallel route map into an object after all promises have been resolved.
|
||||
let parallelRouteProps = {};
|
||||
let parallelRouteCacheNodeSeedData = {};
|
||||
for (const parallelRoute of parallelRouteMap){
|
||||
const [parallelRouteKey, parallelRouteProp, flightData] = parallelRoute;
|
||||
parallelRouteProps[parallelRouteKey] = parallelRouteProp;
|
||||
parallelRouteCacheNodeSeedData[parallelRouteKey] = flightData;
|
||||
}
|
||||
let loadingElement = Loading ? createElement(Loading, {
|
||||
key: 'l'
|
||||
}) : null;
|
||||
const loadingFilePath = (0, _segmentexplorerpath.getConventionPathByType)(tree, dir, 'loading');
|
||||
if (isSegmentViewEnabled && loadingElement) {
|
||||
if (loadingFilePath) {
|
||||
loadingElement = createElement(SegmentViewNode, {
|
||||
key: cacheNodeKey + '-loading',
|
||||
type: 'loading',
|
||||
pagePath: loadingFilePath
|
||||
}, loadingElement);
|
||||
}
|
||||
}
|
||||
const loadingData = loadingElement ? [
|
||||
loadingElement,
|
||||
loadingStyles,
|
||||
loadingScripts
|
||||
] : null;
|
||||
// When the segment does not have a layout or page we still have to add the layout router to ensure the path holds the loading component
|
||||
if (!MaybeComponent) {
|
||||
return [
|
||||
createElement(Fragment, {
|
||||
key: cacheNodeKey
|
||||
}, layerAssets, parallelRouteProps.children),
|
||||
parallelRouteCacheNodeSeedData,
|
||||
loadingData,
|
||||
isPossiblyPartialResponse,
|
||||
hasRuntimePrefetch
|
||||
];
|
||||
}
|
||||
const Component = MaybeComponent;
|
||||
// If force-dynamic is used and the current render supports postponing, we
|
||||
// replace it with a node that will postpone the render. This ensures that the
|
||||
// postpone is invoked during the react render phase and not during the next
|
||||
// render phase.
|
||||
// @TODO this does not actually do what it seems like it would or should do. The idea is that
|
||||
// if we are rendering in a force-dynamic mode and we can postpone we should only make the segments
|
||||
// that ask for force-dynamic to be dynamic, allowing other segments to still prerender. However
|
||||
// because this comes after the children traversal and the static generation store is mutated every segment
|
||||
// along the parent path of a force-dynamic segment will hit this condition effectively making the entire
|
||||
// render force-dynamic. We should refactor this function so that we can correctly track which segments
|
||||
// need to be dynamic
|
||||
if (workStore.isStaticGeneration && workStore.forceDynamic && experimental.isRoutePPREnabled) {
|
||||
return [
|
||||
createElement(Fragment, {
|
||||
key: cacheNodeKey
|
||||
}, createElement(Postpone, {
|
||||
reason: 'dynamic = "force-dynamic" was used',
|
||||
route: workStore.route
|
||||
}), layerAssets),
|
||||
parallelRouteCacheNodeSeedData,
|
||||
loadingData,
|
||||
true,
|
||||
hasRuntimePrefetch
|
||||
];
|
||||
}
|
||||
const isClientComponent = (0, _clientandserverreferences.isClientReference)(layoutOrPageMod);
|
||||
if (process.env.NODE_ENV === 'development' && 'params' in parallelRouteProps) {
|
||||
// @TODO consider making this an error and running the check in build as well
|
||||
console.error(`"params" is a reserved prop in Layouts and Pages and cannot be used as the name of a parallel route in ${segment}`);
|
||||
}
|
||||
if (isPage) {
|
||||
const PageComponent = Component;
|
||||
// Assign searchParams to props if this is a page
|
||||
let pageElement;
|
||||
if (isClientComponent) {
|
||||
if (cacheComponents) {
|
||||
// Params are omitted when Cache Components is enabled
|
||||
pageElement = createElement(ClientPageRoot, {
|
||||
Component: PageComponent,
|
||||
serverProvidedParams: null
|
||||
});
|
||||
} else if (isStaticGeneration) {
|
||||
const promiseOfParams = createPrerenderParamsForClientSegment(currentParams);
|
||||
const promiseOfSearchParams = createPrerenderSearchParamsForClientPage(workStore);
|
||||
pageElement = createElement(ClientPageRoot, {
|
||||
Component: PageComponent,
|
||||
serverProvidedParams: {
|
||||
searchParams: query,
|
||||
params: currentParams,
|
||||
promises: [
|
||||
promiseOfSearchParams,
|
||||
promiseOfParams
|
||||
]
|
||||
}
|
||||
});
|
||||
} else {
|
||||
pageElement = createElement(ClientPageRoot, {
|
||||
Component: PageComponent,
|
||||
serverProvidedParams: {
|
||||
searchParams: query,
|
||||
params: currentParams,
|
||||
promises: null
|
||||
}
|
||||
});
|
||||
}
|
||||
} else {
|
||||
// If we are passing params to a server component Page we need to track
|
||||
// their usage in case the current render mode tracks dynamic API usage.
|
||||
const params = createServerParamsForServerSegment(currentParams, workStore);
|
||||
// If we are passing searchParams to a server component Page we need to
|
||||
// track their usage in case the current render mode tracks dynamic API
|
||||
// usage.
|
||||
let searchParams = createServerSearchParamsForServerPage(query, workStore);
|
||||
if ((0, _clientandserverreferences.isUseCacheFunction)(PageComponent)) {
|
||||
const UseCachePageComponent = PageComponent;
|
||||
pageElement = createElement(UseCachePageComponent, {
|
||||
params: params,
|
||||
searchParams: searchParams,
|
||||
$$isPage: true
|
||||
});
|
||||
} else {
|
||||
pageElement = createElement(PageComponent, {
|
||||
params: params,
|
||||
searchParams: searchParams
|
||||
});
|
||||
}
|
||||
}
|
||||
const isDefaultSegment = segment === _segment.DEFAULT_SEGMENT_KEY;
|
||||
const pageFilePath = (0, _segmentexplorerpath.getConventionPathByType)(tree, dir, 'page') ?? (0, _segmentexplorerpath.getConventionPathByType)(tree, dir, 'defaultPage');
|
||||
const segmentType = isDefaultSegment ? 'default' : 'page';
|
||||
const wrappedPageElement = isSegmentViewEnabled && pageFilePath ? createElement(SegmentViewNode, {
|
||||
key: cacheNodeKey + '-' + segmentType,
|
||||
type: segmentType,
|
||||
pagePath: pageFilePath
|
||||
}, pageElement) : pageElement;
|
||||
return [
|
||||
createElement(Fragment, {
|
||||
key: cacheNodeKey
|
||||
}, wrappedPageElement, layerAssets, MetadataOutlet ? createElement(MetadataOutlet, null) : null),
|
||||
parallelRouteCacheNodeSeedData,
|
||||
loadingData,
|
||||
isPossiblyPartialResponse,
|
||||
hasRuntimePrefetch
|
||||
];
|
||||
} else {
|
||||
const SegmentComponent = Component;
|
||||
const isRootLayoutWithChildrenSlotAndAtLeastOneMoreSlot = rootLayoutAtThisLevel && 'children' in parallelRoutes && Object.keys(parallelRoutes).length > 1;
|
||||
let segmentNode;
|
||||
if (isClientComponent) {
|
||||
let clientSegment;
|
||||
if (cacheComponents) {
|
||||
// Params are omitted when Cache Components is enabled
|
||||
clientSegment = createElement(ClientSegmentRoot, {
|
||||
Component: SegmentComponent,
|
||||
slots: parallelRouteProps,
|
||||
serverProvidedParams: null
|
||||
});
|
||||
} else if (isStaticGeneration) {
|
||||
const promiseOfParams = createPrerenderParamsForClientSegment(currentParams);
|
||||
clientSegment = createElement(ClientSegmentRoot, {
|
||||
Component: SegmentComponent,
|
||||
slots: parallelRouteProps,
|
||||
serverProvidedParams: {
|
||||
params: currentParams,
|
||||
promises: [
|
||||
promiseOfParams
|
||||
]
|
||||
}
|
||||
});
|
||||
} else {
|
||||
clientSegment = createElement(ClientSegmentRoot, {
|
||||
Component: SegmentComponent,
|
||||
slots: parallelRouteProps,
|
||||
serverProvidedParams: {
|
||||
params: currentParams,
|
||||
promises: null
|
||||
}
|
||||
});
|
||||
}
|
||||
if (isRootLayoutWithChildrenSlotAndAtLeastOneMoreSlot) {
|
||||
let notfoundClientSegment;
|
||||
let forbiddenClientSegment;
|
||||
let unauthorizedClientSegment;
|
||||
// TODO-APP: This is a hack to support unmatched parallel routes, which will throw `notFound()`.
|
||||
// This ensures that a `HTTPAccessFallbackBoundary` is available for when that happens,
|
||||
// but it's not ideal, as it needlessly invokes the `NotFound` component and renders the `RootLayout` twice.
|
||||
// We should instead look into handling the fallback behavior differently in development mode so that it doesn't
|
||||
// rely on the `NotFound` behavior.
|
||||
notfoundClientSegment = createErrorBoundaryClientSegmentRoot({
|
||||
ctx,
|
||||
ErrorBoundaryComponent: NotFound,
|
||||
errorElement: notFoundElement,
|
||||
ClientSegmentRoot,
|
||||
layerAssets,
|
||||
SegmentComponent,
|
||||
currentParams
|
||||
});
|
||||
forbiddenClientSegment = createErrorBoundaryClientSegmentRoot({
|
||||
ctx,
|
||||
ErrorBoundaryComponent: Forbidden,
|
||||
errorElement: forbiddenElement,
|
||||
ClientSegmentRoot,
|
||||
layerAssets,
|
||||
SegmentComponent,
|
||||
currentParams
|
||||
});
|
||||
unauthorizedClientSegment = createErrorBoundaryClientSegmentRoot({
|
||||
ctx,
|
||||
ErrorBoundaryComponent: Unauthorized,
|
||||
errorElement: unauthorizedElement,
|
||||
ClientSegmentRoot,
|
||||
layerAssets,
|
||||
SegmentComponent,
|
||||
currentParams
|
||||
});
|
||||
if (notfoundClientSegment || forbiddenClientSegment || unauthorizedClientSegment) {
|
||||
segmentNode = createElement(HTTPAccessFallbackBoundary, {
|
||||
key: cacheNodeKey,
|
||||
notFound: notfoundClientSegment,
|
||||
forbidden: forbiddenClientSegment,
|
||||
unauthorized: unauthorizedClientSegment
|
||||
}, layerAssets, clientSegment);
|
||||
} else {
|
||||
segmentNode = createElement(Fragment, {
|
||||
key: cacheNodeKey
|
||||
}, layerAssets, clientSegment);
|
||||
}
|
||||
} else {
|
||||
segmentNode = createElement(Fragment, {
|
||||
key: cacheNodeKey
|
||||
}, layerAssets, clientSegment);
|
||||
}
|
||||
} else {
|
||||
const params = createServerParamsForServerSegment(currentParams, workStore);
|
||||
let serverSegment;
|
||||
if ((0, _clientandserverreferences.isUseCacheFunction)(SegmentComponent)) {
|
||||
const UseCacheLayoutComponent = SegmentComponent;
|
||||
serverSegment = createElement(UseCacheLayoutComponent, {
|
||||
...parallelRouteProps,
|
||||
params: params,
|
||||
$$isLayout: true
|
||||
}, // Force static children here so that they're validated.
|
||||
// See https://github.com/facebook/react/pull/34846
|
||||
parallelRouteProps.children);
|
||||
} else {
|
||||
serverSegment = createElement(SegmentComponent, {
|
||||
...parallelRouteProps,
|
||||
params: params
|
||||
}, // Force static children here so that they're validated.
|
||||
// See https://github.com/facebook/react/pull/34846
|
||||
parallelRouteProps.children);
|
||||
}
|
||||
if (isRootLayoutWithChildrenSlotAndAtLeastOneMoreSlot) {
|
||||
// TODO-APP: This is a hack to support unmatched parallel routes, which will throw `notFound()`.
|
||||
// This ensures that a `HTTPAccessFallbackBoundary` is available for when that happens,
|
||||
// but it's not ideal, as it needlessly invokes the `NotFound` component and renders the `RootLayout` twice.
|
||||
// We should instead look into handling the fallback behavior differently in development mode so that it doesn't
|
||||
// rely on the `NotFound` behavior.
|
||||
segmentNode = createElement(HTTPAccessFallbackBoundary, {
|
||||
key: cacheNodeKey,
|
||||
notFound: notFoundElement ? createElement(Fragment, null, layerAssets, createElement(SegmentComponent, {
|
||||
params: params
|
||||
}, notFoundStyles, notFoundElement)) : undefined
|
||||
}, layerAssets, serverSegment);
|
||||
} else {
|
||||
segmentNode = createElement(Fragment, {
|
||||
key: cacheNodeKey
|
||||
}, layerAssets, serverSegment);
|
||||
}
|
||||
}
|
||||
const layoutFilePath = (0, _segmentexplorerpath.getConventionPathByType)(tree, dir, 'layout');
|
||||
const wrappedSegmentNode = isSegmentViewEnabled && layoutFilePath ? createElement(SegmentViewNode, {
|
||||
key: 'layout',
|
||||
type: 'layout',
|
||||
pagePath: layoutFilePath
|
||||
}, segmentNode) : segmentNode;
|
||||
// For layouts we just render the component
|
||||
return [
|
||||
wrappedSegmentNode,
|
||||
parallelRouteCacheNodeSeedData,
|
||||
loadingData,
|
||||
isPossiblyPartialResponse,
|
||||
hasRuntimePrefetch
|
||||
];
|
||||
}
|
||||
}
|
||||
function createErrorBoundaryClientSegmentRoot({ ctx, ErrorBoundaryComponent, errorElement, ClientSegmentRoot, layerAssets, SegmentComponent, currentParams }) {
|
||||
const { componentMod: { createElement, Fragment } } = ctx;
|
||||
if (ErrorBoundaryComponent) {
|
||||
const notFoundParallelRouteProps = {
|
||||
children: errorElement
|
||||
};
|
||||
return createElement(Fragment, null, layerAssets, createElement(ClientSegmentRoot, {
|
||||
Component: SegmentComponent,
|
||||
slots: notFoundParallelRouteProps,
|
||||
params: currentParams
|
||||
}));
|
||||
}
|
||||
return null;
|
||||
}
|
||||
function getRootParams(loaderTree, getDynamicParamFromSegment) {
|
||||
return getRootParamsImpl({}, loaderTree, getDynamicParamFromSegment);
|
||||
}
|
||||
function getRootParamsImpl(parentParams, loaderTree, getDynamicParamFromSegment) {
|
||||
const { segment, modules: { layout }, parallelRoutes } = (0, _parseloadertree.parseLoaderTree)(loaderTree);
|
||||
const segmentParam = getDynamicParamFromSegment(segment);
|
||||
let currentParams = parentParams;
|
||||
if (segmentParam && segmentParam.value !== null) {
|
||||
currentParams = {
|
||||
...parentParams,
|
||||
[segmentParam.param]: segmentParam.value
|
||||
};
|
||||
}
|
||||
const isRootLayout = typeof layout !== 'undefined';
|
||||
if (isRootLayout) {
|
||||
return currentParams;
|
||||
} else if (!parallelRoutes.children) {
|
||||
// This should really be an error but there are bugs in Turbopack that cause
|
||||
// the _not-found LoaderTree to not have any layouts. For rootParams sake
|
||||
// this is somewhat irrelevant when you are not customizing the 404 page.
|
||||
// If you are customizing 404
|
||||
// TODO update rootParams to make all params optional if `/app/not-found.tsx` is defined
|
||||
return currentParams;
|
||||
} else {
|
||||
return getRootParamsImpl(currentParams, // We stop looking for root params as soon as we hit the first layout
|
||||
// and it is not possible to use parallel route children above the root layout
|
||||
// so every parallelRoutes object that this function can visit will necessarily
|
||||
// have a single `children` prop and no others.
|
||||
parallelRoutes.children, getDynamicParamFromSegment);
|
||||
}
|
||||
}
|
||||
async function createBoundaryConventionElement({ ctx, conventionName, Component, styles, tree }) {
|
||||
const { componentMod: { createElement, Fragment } } = ctx;
|
||||
const isSegmentViewEnabled = !!ctx.renderOpts.dev;
|
||||
const dir = (process.env.NEXT_RUNTIME === 'edge' ? process.env.__NEXT_EDGE_PROJECT_DIR : ctx.renderOpts.dir) || '';
|
||||
const { SegmentViewNode } = ctx.componentMod;
|
||||
const element = Component ? createElement(Fragment, null, createElement(Component, null), styles) : undefined;
|
||||
const pagePath = (0, _segmentexplorerpath.getConventionPathByType)(tree, dir, conventionName);
|
||||
const wrappedElement = isSegmentViewEnabled && element ? createElement(SegmentViewNode, {
|
||||
key: cacheNodeKey + '-' + conventionName,
|
||||
type: conventionName,
|
||||
// TODO: Discovered when moving to `createElement`.
|
||||
// `SegmentViewNode` doesn't support undefined `pagePath`
|
||||
pagePath: pagePath
|
||||
}, element) : element;
|
||||
return [
|
||||
wrappedElement,
|
||||
pagePath
|
||||
];
|
||||
}
|
||||
|
||||
//# sourceMappingURL=create-component-tree.js.map
|
||||
1
apps/public-web/node_modules/next/dist/server/app-render/create-component-tree.js.map
generated
vendored
Normal file
1
apps/public-web/node_modules/next/dist/server/app-render/create-component-tree.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
20
apps/public-web/node_modules/next/dist/server/app-render/create-error-handler.d.ts
generated
vendored
Normal file
20
apps/public-web/node_modules/next/dist/server/app-render/create-error-handler.d.ts
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
import type { ErrorInfo } from 'react';
|
||||
declare global {
|
||||
var __next_log_error__: undefined | ((err: unknown) => void);
|
||||
}
|
||||
type RSCErrorHandler = (err: unknown) => string | undefined;
|
||||
type SSRErrorHandler = (err: unknown, errorInfo?: ErrorInfo) => string | undefined;
|
||||
export type DigestedError = Error & {
|
||||
digest: string;
|
||||
environmentName?: string;
|
||||
};
|
||||
/**
|
||||
* Returns a digest for well-known Next.js errors, otherwise `undefined`. If a
|
||||
* digest is returned this also means that the error does not need to be
|
||||
* reported.
|
||||
*/
|
||||
export declare function getDigestForWellKnownError(error: unknown): string | undefined;
|
||||
export declare function createReactServerErrorHandler(shouldFormatError: boolean, isNextExport: boolean, reactServerErrors: Map<string, DigestedError>, onReactServerRenderError: (err: DigestedError, silenceLog: boolean) => void, spanToRecordOn?: any): RSCErrorHandler;
|
||||
export declare function createHTMLErrorHandler(shouldFormatError: boolean, isNextExport: boolean, reactServerErrors: Map<string, DigestedError>, allCapturedErrors: Array<unknown>, onHTMLRenderSSRError: (err: DigestedError, errorInfo?: ErrorInfo) => void, spanToRecordOn?: any): SSRErrorHandler;
|
||||
export declare function isUserLandError(err: any): boolean;
|
||||
export {};
|
||||
187
apps/public-web/node_modules/next/dist/server/app-render/create-error-handler.js
generated
vendored
Normal file
187
apps/public-web/node_modules/next/dist/server/app-render/create-error-handler.js
generated
vendored
Normal file
@@ -0,0 +1,187 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
0 && (module.exports = {
|
||||
createHTMLErrorHandler: null,
|
||||
createReactServerErrorHandler: null,
|
||||
getDigestForWellKnownError: null,
|
||||
isUserLandError: null
|
||||
});
|
||||
function _export(target, all) {
|
||||
for(var name in all)Object.defineProperty(target, name, {
|
||||
enumerable: true,
|
||||
get: all[name]
|
||||
});
|
||||
}
|
||||
_export(exports, {
|
||||
createHTMLErrorHandler: function() {
|
||||
return createHTMLErrorHandler;
|
||||
},
|
||||
createReactServerErrorHandler: function() {
|
||||
return createReactServerErrorHandler;
|
||||
},
|
||||
getDigestForWellKnownError: function() {
|
||||
return getDigestForWellKnownError;
|
||||
},
|
||||
isUserLandError: function() {
|
||||
return isUserLandError;
|
||||
}
|
||||
});
|
||||
const _stringhash = /*#__PURE__*/ _interop_require_default(require("next/dist/compiled/string-hash"));
|
||||
const _formatservererror = require("../../lib/format-server-error");
|
||||
const _tracer = require("../lib/trace/tracer");
|
||||
const _pipereadable = require("../pipe-readable");
|
||||
const _bailouttocsr = require("../../shared/lib/lazy-dynamic/bailout-to-csr");
|
||||
const _hooksservercontext = require("../../client/components/hooks-server-context");
|
||||
const _isnextroutererror = require("../../client/components/is-next-router-error");
|
||||
const _dynamicrendering = require("./dynamic-rendering");
|
||||
const _iserror = require("../../lib/is-error");
|
||||
const _errortelemetryutils = require("../../lib/error-telemetry-utils");
|
||||
const _reactlargeshellerror = require("./react-large-shell-error");
|
||||
function _interop_require_default(obj) {
|
||||
return obj && obj.__esModule ? obj : {
|
||||
default: obj
|
||||
};
|
||||
}
|
||||
function getDigestForWellKnownError(error) {
|
||||
// If we're bailing out to CSR, we don't need to log the error.
|
||||
if ((0, _bailouttocsr.isBailoutToCSRError)(error)) return error.digest;
|
||||
// If this is a navigation error, we don't need to log the error.
|
||||
if ((0, _isnextroutererror.isNextRouterError)(error)) return error.digest;
|
||||
// If this error occurs, we know that we should be stopping the static
|
||||
// render. This is only thrown in static generation when PPR is not enabled,
|
||||
// which causes the whole page to be marked as dynamic. We don't need to
|
||||
// tell the user about this error, as it's not actionable.
|
||||
if ((0, _hooksservercontext.isDynamicServerError)(error)) return error.digest;
|
||||
// If this is a prerender interrupted error, we don't need to log the error.
|
||||
if ((0, _dynamicrendering.isPrerenderInterruptedError)(error)) return error.digest;
|
||||
return undefined;
|
||||
}
|
||||
function createReactServerErrorHandler(shouldFormatError, isNextExport, reactServerErrors, onReactServerRenderError, spanToRecordOn) {
|
||||
return (thrownValue)=>{
|
||||
var _err_message;
|
||||
if (typeof thrownValue === 'string') {
|
||||
// TODO-APP: look at using webcrypto instead. Requires a promise to be awaited.
|
||||
return (0, _stringhash.default)(thrownValue).toString();
|
||||
}
|
||||
// If the response was closed, we don't need to log the error.
|
||||
if ((0, _pipereadable.isAbortError)(thrownValue)) return;
|
||||
const digest = getDigestForWellKnownError(thrownValue);
|
||||
if (digest) {
|
||||
return digest;
|
||||
}
|
||||
if ((0, _reactlargeshellerror.isReactLargeShellError)(thrownValue)) {
|
||||
// TODO: Aggregate
|
||||
console.error(thrownValue);
|
||||
return undefined;
|
||||
}
|
||||
let err = (0, _iserror.getProperError)(thrownValue);
|
||||
let silenceLog = false;
|
||||
// If the error already has a digest, respect the original digest,
|
||||
// so it won't get re-generated into another new error.
|
||||
if (err.digest) {
|
||||
if (process.env.NODE_ENV === 'production' && reactServerErrors.has(err.digest)) {
|
||||
// This error is likely an obfuscated error from another react-server
|
||||
// environment (e.g. 'use cache'). We recover the original error here
|
||||
// for reporting purposes.
|
||||
err = reactServerErrors.get(err.digest);
|
||||
// We don't log it again though, as it was already logged in the
|
||||
// original environment.
|
||||
silenceLog = true;
|
||||
} else {
|
||||
// Either we're in development (where we want to keep the transported
|
||||
// error with environmentName), or the error is not in reactServerErrors
|
||||
// but has a digest from other means. Keep the error as-is.
|
||||
}
|
||||
} else {
|
||||
err.digest = (0, _errortelemetryutils.createDigestWithErrorCode)(err, // TODO-APP: look at using webcrypto instead. Requires a promise to be awaited.
|
||||
(0, _stringhash.default)(err.message + (err.stack || '')).toString());
|
||||
}
|
||||
// @TODO by putting this here and not at the top it is possible that
|
||||
// we don't error the build in places we actually expect to
|
||||
if (!reactServerErrors.has(err.digest)) {
|
||||
reactServerErrors.set(err.digest, err);
|
||||
}
|
||||
// Format server errors in development to add more helpful error messages
|
||||
if (shouldFormatError) {
|
||||
(0, _formatservererror.formatServerError)(err);
|
||||
}
|
||||
// Don't log the suppressed error during export
|
||||
if (!(isNextExport && (err == null ? void 0 : (_err_message = err.message) == null ? void 0 : _err_message.includes('The specific message is omitted in production builds to avoid leaking sensitive details.')))) {
|
||||
// Record exception on the provided span if available, otherwise try active span.
|
||||
const span = spanToRecordOn ?? (0, _tracer.getTracer)().getActiveScopeSpan();
|
||||
if (span) {
|
||||
span.recordException(err);
|
||||
span.setAttribute('error.type', err.name);
|
||||
span.setStatus({
|
||||
code: _tracer.SpanStatusCode.ERROR,
|
||||
message: err.message
|
||||
});
|
||||
}
|
||||
onReactServerRenderError(err, silenceLog);
|
||||
}
|
||||
return err.digest;
|
||||
};
|
||||
}
|
||||
function createHTMLErrorHandler(shouldFormatError, isNextExport, reactServerErrors, allCapturedErrors, onHTMLRenderSSRError, spanToRecordOn) {
|
||||
return (thrownValue, errorInfo)=>{
|
||||
var _err_message;
|
||||
if ((0, _reactlargeshellerror.isReactLargeShellError)(thrownValue)) {
|
||||
// TODO: Aggregate
|
||||
console.error(thrownValue);
|
||||
return undefined;
|
||||
}
|
||||
let isSSRError = true;
|
||||
allCapturedErrors.push(thrownValue);
|
||||
// If the response was closed, we don't need to log the error.
|
||||
if ((0, _pipereadable.isAbortError)(thrownValue)) return;
|
||||
const digest = getDigestForWellKnownError(thrownValue);
|
||||
if (digest) {
|
||||
return digest;
|
||||
}
|
||||
const err = (0, _iserror.getProperError)(thrownValue);
|
||||
// If the error already has a digest, respect the original digest,
|
||||
// so it won't get re-generated into another new error.
|
||||
if (err.digest) {
|
||||
if (reactServerErrors.has(err.digest)) {
|
||||
// This error is likely an obfuscated error from react-server.
|
||||
// We recover the original error here.
|
||||
thrownValue = reactServerErrors.get(err.digest);
|
||||
isSSRError = false;
|
||||
} else {
|
||||
// The error is not from react-server but has a digest
|
||||
// from other means so we don't need to produce a new one
|
||||
}
|
||||
} else {
|
||||
err.digest = (0, _errortelemetryutils.createDigestWithErrorCode)(err, (0, _stringhash.default)(err.message + ((errorInfo == null ? void 0 : errorInfo.componentStack) || err.stack || '')).toString());
|
||||
}
|
||||
// Format server errors in development to add more helpful error messages
|
||||
if (shouldFormatError) {
|
||||
(0, _formatservererror.formatServerError)(err);
|
||||
}
|
||||
// Don't log the suppressed error during export
|
||||
if (!(isNextExport && (err == null ? void 0 : (_err_message = err.message) == null ? void 0 : _err_message.includes('The specific message is omitted in production builds to avoid leaking sensitive details.')))) {
|
||||
// HTML errors contain RSC errors as well, filter them out before reporting
|
||||
if (isSSRError) {
|
||||
// Record exception on the provided span if available, otherwise try active span.
|
||||
const span = spanToRecordOn ?? (0, _tracer.getTracer)().getActiveScopeSpan();
|
||||
if (span) {
|
||||
span.recordException(err);
|
||||
span.setAttribute('error.type', err.name);
|
||||
span.setStatus({
|
||||
code: _tracer.SpanStatusCode.ERROR,
|
||||
message: err.message
|
||||
});
|
||||
}
|
||||
onHTMLRenderSSRError(err, errorInfo);
|
||||
}
|
||||
}
|
||||
return err.digest;
|
||||
};
|
||||
}
|
||||
function isUserLandError(err) {
|
||||
return !(0, _pipereadable.isAbortError)(err) && !(0, _bailouttocsr.isBailoutToCSRError)(err) && !(0, _isnextroutererror.isNextRouterError)(err);
|
||||
}
|
||||
|
||||
//# sourceMappingURL=create-error-handler.js.map
|
||||
1
apps/public-web/node_modules/next/dist/server/app-render/create-error-handler.js.map
generated
vendored
Normal file
1
apps/public-web/node_modules/next/dist/server/app-render/create-error-handler.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
5
apps/public-web/node_modules/next/dist/server/app-render/create-flight-router-state-from-loader-tree.d.ts
generated
vendored
Normal file
5
apps/public-web/node_modules/next/dist/server/app-render/create-flight-router-state-from-loader-tree.d.ts
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
import type { LoaderTree } from '../lib/app-dir-module';
|
||||
import { type FlightRouterState } from '../../shared/lib/app-router-types';
|
||||
import type { GetDynamicParamFromSegment } from './app-render';
|
||||
export declare function createFlightRouterStateFromLoaderTree(loaderTree: LoaderTree, getDynamicParamFromSegment: GetDynamicParamFromSegment, searchParams: any): FlightRouterState;
|
||||
export declare function createRouteTreePrefetch(loaderTree: LoaderTree, getDynamicParamFromSegment: GetDynamicParamFromSegment): FlightRouterState;
|
||||
76
apps/public-web/node_modules/next/dist/server/app-render/create-flight-router-state-from-loader-tree.js
generated
vendored
Normal file
76
apps/public-web/node_modules/next/dist/server/app-render/create-flight-router-state-from-loader-tree.js
generated
vendored
Normal file
@@ -0,0 +1,76 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
0 && (module.exports = {
|
||||
createFlightRouterStateFromLoaderTree: null,
|
||||
createRouteTreePrefetch: null
|
||||
});
|
||||
function _export(target, all) {
|
||||
for(var name in all)Object.defineProperty(target, name, {
|
||||
enumerable: true,
|
||||
get: all[name]
|
||||
});
|
||||
}
|
||||
_export(exports, {
|
||||
createFlightRouterStateFromLoaderTree: function() {
|
||||
return createFlightRouterStateFromLoaderTree;
|
||||
},
|
||||
createRouteTreePrefetch: function() {
|
||||
return createRouteTreePrefetch;
|
||||
}
|
||||
});
|
||||
const _approutertypes = require("../../shared/lib/app-router-types");
|
||||
const _segment = require("../../shared/lib/segment");
|
||||
function createFlightRouterStateFromLoaderTreeImpl([segment, parallelRoutes, { layout, loading }], getDynamicParamFromSegment, searchParams, includeHasLoadingBoundary, didFindRootLayout) {
|
||||
const dynamicParam = getDynamicParamFromSegment(segment);
|
||||
const treeSegment = dynamicParam ? dynamicParam.treeSegment : segment;
|
||||
const segmentTree = [
|
||||
(0, _segment.addSearchParamsIfPageSegment)(treeSegment, searchParams),
|
||||
{}
|
||||
];
|
||||
// Mark the first segment that has a layout as the "root" layout
|
||||
if (!didFindRootLayout && typeof layout !== 'undefined') {
|
||||
didFindRootLayout = true;
|
||||
segmentTree[4] = true;
|
||||
}
|
||||
let childHasLoadingBoundary = false;
|
||||
const children = {};
|
||||
Object.keys(parallelRoutes).forEach((parallelRouteKey)=>{
|
||||
const child = createFlightRouterStateFromLoaderTreeImpl(parallelRoutes[parallelRouteKey], getDynamicParamFromSegment, searchParams, includeHasLoadingBoundary, didFindRootLayout);
|
||||
if (includeHasLoadingBoundary && child[5] !== _approutertypes.HasLoadingBoundary.SubtreeHasNoLoadingBoundary) {
|
||||
childHasLoadingBoundary = true;
|
||||
}
|
||||
children[parallelRouteKey] = child;
|
||||
});
|
||||
segmentTree[1] = children;
|
||||
if (includeHasLoadingBoundary) {
|
||||
// During a route tree prefetch, the FlightRouterState includes whether a
|
||||
// tree has a loading boundary. The client uses this to determine if it can
|
||||
// skip the data prefetch request — if `hasLoadingBoundary` is `false`, the
|
||||
// data prefetch response will be empty, so there's no reason to request it.
|
||||
// NOTE: It would be better to accumulate this while building the loader
|
||||
// tree so we don't have to keep re-deriving it, but since this won't be
|
||||
// once PPR is enabled everywhere, it's not that important.
|
||||
segmentTree[5] = loading ? _approutertypes.HasLoadingBoundary.SegmentHasLoadingBoundary : childHasLoadingBoundary ? _approutertypes.HasLoadingBoundary.SubtreeHasLoadingBoundary : _approutertypes.HasLoadingBoundary.SubtreeHasNoLoadingBoundary;
|
||||
}
|
||||
return segmentTree;
|
||||
}
|
||||
function createFlightRouterStateFromLoaderTree(loaderTree, getDynamicParamFromSegment, searchParams) {
|
||||
const includeHasLoadingBoundary = false;
|
||||
const didFindRootLayout = false;
|
||||
return createFlightRouterStateFromLoaderTreeImpl(loaderTree, getDynamicParamFromSegment, searchParams, includeHasLoadingBoundary, didFindRootLayout);
|
||||
}
|
||||
function createRouteTreePrefetch(loaderTree, getDynamicParamFromSegment) {
|
||||
// Search params should not be added to page segment's cache key during a
|
||||
// route tree prefetch request, because they do not affect the structure of
|
||||
// the route. The client cache has its own logic to handle search params.
|
||||
const searchParams = {};
|
||||
// During a route tree prefetch, we include `hasLoadingBoundary` in
|
||||
// the response.
|
||||
const includeHasLoadingBoundary = true;
|
||||
const didFindRootLayout = false;
|
||||
return createFlightRouterStateFromLoaderTreeImpl(loaderTree, getDynamicParamFromSegment, searchParams, includeHasLoadingBoundary, didFindRootLayout);
|
||||
}
|
||||
|
||||
//# sourceMappingURL=create-flight-router-state-from-loader-tree.js.map
|
||||
1
apps/public-web/node_modules/next/dist/server/app-render/create-flight-router-state-from-loader-tree.js.map
generated
vendored
Normal file
1
apps/public-web/node_modules/next/dist/server/app-render/create-flight-router-state-from-loader-tree.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1
apps/public-web/node_modules/next/dist/server/app-render/csrf-protection.d.ts
generated
vendored
Normal file
1
apps/public-web/node_modules/next/dist/server/app-render/csrf-protection.d.ts
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export declare const isCsrfOriginAllowed: (originDomain: string, allowedOrigins?: string[]) => boolean;
|
||||
75
apps/public-web/node_modules/next/dist/server/app-render/csrf-protection.js
generated
vendored
Normal file
75
apps/public-web/node_modules/next/dist/server/app-render/csrf-protection.js
generated
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
// micromatch is only available at node runtime, so it cannot be used here since the code path that calls this function
|
||||
// can be run from edge. This is a simple implementation that safely achieves the required functionality.
|
||||
// the goal is to match the functionality for remotePatterns as defined here -
|
||||
// https://nextjs.org/docs/app/api-reference/components/image#remotepatterns
|
||||
// TODO - retrofit micromatch to work in edge and use that instead
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
Object.defineProperty(exports, "isCsrfOriginAllowed", {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return isCsrfOriginAllowed;
|
||||
}
|
||||
});
|
||||
function matchWildcardDomain(domain, pattern) {
|
||||
const domainParts = domain.split('.');
|
||||
const patternParts = pattern.split('.');
|
||||
if (patternParts.length < 1) {
|
||||
// pattern is empty and therefore invalid to match against
|
||||
return false;
|
||||
}
|
||||
if (domainParts.length < patternParts.length) {
|
||||
// domain has too few segments and thus cannot match
|
||||
return false;
|
||||
}
|
||||
// Prevent wildcards from matching entire domains (e.g. '**' or '*.com')
|
||||
// This ensures wildcards can only match subdomains, not the main domain
|
||||
if (patternParts.length === 1 && (patternParts[0] === '*' || patternParts[0] === '**')) {
|
||||
return false;
|
||||
}
|
||||
while(patternParts.length){
|
||||
const patternPart = patternParts.pop();
|
||||
const domainPart = domainParts.pop();
|
||||
switch(patternPart){
|
||||
case '':
|
||||
{
|
||||
// invalid pattern. pattern segments must be non empty
|
||||
return false;
|
||||
}
|
||||
case '*':
|
||||
{
|
||||
// wildcard matches anything so we continue if the domain part is non-empty
|
||||
if (domainPart) {
|
||||
continue;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
case '**':
|
||||
{
|
||||
// if this is not the last item in the pattern the pattern is invalid
|
||||
if (patternParts.length > 0) {
|
||||
return false;
|
||||
}
|
||||
// recursive wildcard matches anything so we terminate here if the domain part is non empty
|
||||
return domainPart !== undefined;
|
||||
}
|
||||
case undefined:
|
||||
default:
|
||||
{
|
||||
if (domainPart !== patternPart) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// We exhausted the pattern. If we also exhausted the domain we have a match
|
||||
return domainParts.length === 0;
|
||||
}
|
||||
const isCsrfOriginAllowed = (originDomain, allowedOrigins = [])=>{
|
||||
return allowedOrigins.some((allowedOrigin)=>allowedOrigin && (allowedOrigin === originDomain || matchWildcardDomain(originDomain, allowedOrigin)));
|
||||
};
|
||||
|
||||
//# sourceMappingURL=csrf-protection.js.map
|
||||
1
apps/public-web/node_modules/next/dist/server/app-render/csrf-protection.js.map
generated
vendored
Normal file
1
apps/public-web/node_modules/next/dist/server/app-render/csrf-protection.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/app-render/csrf-protection.ts"],"sourcesContent":["// micromatch is only available at node runtime, so it cannot be used here since the code path that calls this function\n// can be run from edge. This is a simple implementation that safely achieves the required functionality.\n// the goal is to match the functionality for remotePatterns as defined here -\n// https://nextjs.org/docs/app/api-reference/components/image#remotepatterns\n// TODO - retrofit micromatch to work in edge and use that instead\nfunction matchWildcardDomain(domain: string, pattern: string) {\n const domainParts = domain.split('.')\n const patternParts = pattern.split('.')\n\n if (patternParts.length < 1) {\n // pattern is empty and therefore invalid to match against\n return false\n }\n\n if (domainParts.length < patternParts.length) {\n // domain has too few segments and thus cannot match\n return false\n }\n\n // Prevent wildcards from matching entire domains (e.g. '**' or '*.com')\n // This ensures wildcards can only match subdomains, not the main domain\n if (\n patternParts.length === 1 &&\n (patternParts[0] === '*' || patternParts[0] === '**')\n ) {\n return false\n }\n\n while (patternParts.length) {\n const patternPart = patternParts.pop()\n const domainPart = domainParts.pop()\n\n switch (patternPart) {\n case '': {\n // invalid pattern. pattern segments must be non empty\n return false\n }\n case '*': {\n // wildcard matches anything so we continue if the domain part is non-empty\n if (domainPart) {\n continue\n } else {\n return false\n }\n }\n case '**': {\n // if this is not the last item in the pattern the pattern is invalid\n if (patternParts.length > 0) {\n return false\n }\n // recursive wildcard matches anything so we terminate here if the domain part is non empty\n return domainPart !== undefined\n }\n case undefined:\n default: {\n if (domainPart !== patternPart) {\n return false\n }\n }\n }\n }\n\n // We exhausted the pattern. If we also exhausted the domain we have a match\n return domainParts.length === 0\n}\n\nexport const isCsrfOriginAllowed = (\n originDomain: string,\n allowedOrigins: string[] = []\n): boolean => {\n return allowedOrigins.some(\n (allowedOrigin) =>\n allowedOrigin &&\n (allowedOrigin === originDomain ||\n matchWildcardDomain(originDomain, allowedOrigin))\n )\n}\n"],"names":["isCsrfOriginAllowed","matchWildcardDomain","domain","pattern","domainParts","split","patternParts","length","patternPart","pop","domainPart","undefined","originDomain","allowedOrigins","some","allowedOrigin"],"mappings":"AAAA,uHAAuH;AACvH,yGAAyG;AACzG,8EAA8E;AAC9E,4EAA4E;AAC5E,kEAAkE;;;;;+BA8DrDA;;;eAAAA;;;AA7Db,SAASC,oBAAoBC,MAAc,EAAEC,OAAe;IAC1D,MAAMC,cAAcF,OAAOG,KAAK,CAAC;IACjC,MAAMC,eAAeH,QAAQE,KAAK,CAAC;IAEnC,IAAIC,aAAaC,MAAM,GAAG,GAAG;QAC3B,0DAA0D;QAC1D,OAAO;IACT;IAEA,IAAIH,YAAYG,MAAM,GAAGD,aAAaC,MAAM,EAAE;QAC5C,oDAAoD;QACpD,OAAO;IACT;IAEA,wEAAwE;IACxE,wEAAwE;IACxE,IACED,aAAaC,MAAM,KAAK,KACvBD,CAAAA,YAAY,CAAC,EAAE,KAAK,OAAOA,YAAY,CAAC,EAAE,KAAK,IAAG,GACnD;QACA,OAAO;IACT;IAEA,MAAOA,aAAaC,MAAM,CAAE;QAC1B,MAAMC,cAAcF,aAAaG,GAAG;QACpC,MAAMC,aAAaN,YAAYK,GAAG;QAElC,OAAQD;YACN,KAAK;gBAAI;oBACP,sDAAsD;oBACtD,OAAO;gBACT;YACA,KAAK;gBAAK;oBACR,2EAA2E;oBAC3E,IAAIE,YAAY;wBACd;oBACF,OAAO;wBACL,OAAO;oBACT;gBACF;YACA,KAAK;gBAAM;oBACT,qEAAqE;oBACrE,IAAIJ,aAAaC,MAAM,GAAG,GAAG;wBAC3B,OAAO;oBACT;oBACA,2FAA2F;oBAC3F,OAAOG,eAAeC;gBACxB;YACA,KAAKA;YACL;gBAAS;oBACP,IAAID,eAAeF,aAAa;wBAC9B,OAAO;oBACT;gBACF;QACF;IACF;IAEA,4EAA4E;IAC5E,OAAOJ,YAAYG,MAAM,KAAK;AAChC;AAEO,MAAMP,sBAAsB,CACjCY,cACAC,iBAA2B,EAAE;IAE7B,OAAOA,eAAeC,IAAI,CACxB,CAACC,gBACCA,iBACCA,CAAAA,kBAAkBH,gBACjBX,oBAAoBW,cAAcG,cAAa;AAEvD","ignoreList":[0]}
|
||||
2
apps/public-web/node_modules/next/dist/server/app-render/dynamic-access-async-storage-instance.d.ts
generated
vendored
Normal file
2
apps/public-web/node_modules/next/dist/server/app-render/dynamic-access-async-storage-instance.d.ts
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
import type { DynamicAccessStorage } from './dynamic-access-async-storage.external';
|
||||
export declare const dynamicAccessAsyncStorageInstance: DynamicAccessStorage;
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user