feat(blog): add file-based blog with dynamic slugs, MDX content and layout shell
- Introduced blog routing using Next.js App Router - Implemented dynamic [slug] pages for blog posts - Added MDX-based content loading via lib/posts - Integrated shared TopBar layout with navigation - Established clear content, lib and component separation
This commit is contained in:
27
apps/public-web/node_modules/next/dist/server/dev/browser-logs/file-logger.d.ts
generated
vendored
Normal file
27
apps/public-web/node_modules/next/dist/server/dev/browser-logs/file-logger.d.ts
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
export interface LogEntry {
|
||||
timestamp: string;
|
||||
source: 'Server' | 'Browser';
|
||||
level: string;
|
||||
message: string;
|
||||
}
|
||||
export declare class FileLogger {
|
||||
private logFilePath;
|
||||
private isInitialized;
|
||||
private logQueue;
|
||||
private flushTimer;
|
||||
private mcpServerEnabled;
|
||||
initialize(distDir: string, mcpServerEnabled: boolean): void;
|
||||
private formatTimestamp;
|
||||
private formatLogEntry;
|
||||
private scheduleFlush;
|
||||
getLogQueue(): string[];
|
||||
private flush;
|
||||
private enqueueLog;
|
||||
log(source: 'Server' | 'Browser', level: string, message: string): void;
|
||||
logServer(level: string, message: string): void;
|
||||
logBrowser(level: string, message: string): void;
|
||||
forceFlush(): void;
|
||||
destroy(): void;
|
||||
}
|
||||
export declare function getFileLogger(): FileLogger;
|
||||
export declare function test__resetFileLogger(): void;
|
||||
183
apps/public-web/node_modules/next/dist/server/dev/browser-logs/file-logger.js
generated
vendored
Normal file
183
apps/public-web/node_modules/next/dist/server/dev/browser-logs/file-logger.js
generated
vendored
Normal file
@@ -0,0 +1,183 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
0 && (module.exports = {
|
||||
FileLogger: null,
|
||||
getFileLogger: null,
|
||||
test__resetFileLogger: null
|
||||
});
|
||||
function _export(target, all) {
|
||||
for(var name in all)Object.defineProperty(target, name, {
|
||||
enumerable: true,
|
||||
get: all[name]
|
||||
});
|
||||
}
|
||||
_export(exports, {
|
||||
FileLogger: function() {
|
||||
return FileLogger;
|
||||
},
|
||||
getFileLogger: function() {
|
||||
return getFileLogger;
|
||||
},
|
||||
test__resetFileLogger: function() {
|
||||
return test__resetFileLogger;
|
||||
}
|
||||
});
|
||||
const _fs = /*#__PURE__*/ _interop_require_default(require("fs"));
|
||||
const _path = /*#__PURE__*/ _interop_require_default(require("path"));
|
||||
function _interop_require_default(obj) {
|
||||
return obj && obj.__esModule ? obj : {
|
||||
default: obj
|
||||
};
|
||||
}
|
||||
class FileLogger {
|
||||
initialize(distDir, mcpServerEnabled) {
|
||||
this.logFilePath = _path.default.join(distDir, 'logs', `next-development.log`);
|
||||
this.mcpServerEnabled = mcpServerEnabled;
|
||||
if (this.isInitialized) {
|
||||
return;
|
||||
}
|
||||
// Only initialize if mcpServer is enabled
|
||||
if (!this.mcpServerEnabled) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
// Clean up the log file on each initialization
|
||||
// ensure the directory exists
|
||||
_fs.default.mkdirSync(_path.default.dirname(this.logFilePath), {
|
||||
recursive: true
|
||||
});
|
||||
_fs.default.writeFileSync(this.logFilePath, '');
|
||||
this.isInitialized = true;
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
}
|
||||
}
|
||||
formatTimestamp() {
|
||||
// Use performance.now() instead of Date.now() for avoid sync IO of cache components
|
||||
const now = performance.now();
|
||||
const hours = Math.floor(now / 3600000).toString().padStart(2, '0');
|
||||
const minutes = Math.floor(now % 3600000 / 60000).toString().padStart(2, '0');
|
||||
const seconds = Math.floor(now % 60000 / 1000).toString().padStart(2, '0');
|
||||
const milliseconds = Math.floor(now % 1000).toString().padStart(3, '0');
|
||||
return `${hours}:${minutes}:${seconds}.${milliseconds}`;
|
||||
}
|
||||
formatLogEntry(entry) {
|
||||
const { timestamp, source, level, message } = entry;
|
||||
const levelPadded = level.toUpperCase().padEnd(7, ' ') // Pad level to 7 characters for alignment
|
||||
;
|
||||
const sourcePadded = source === 'Browser' ? source : 'Server ';
|
||||
return `[${timestamp}] ${sourcePadded} ${levelPadded} ${message}\n`;
|
||||
}
|
||||
scheduleFlush() {
|
||||
// Debounce the flush
|
||||
if (this.flushTimer) {
|
||||
clearTimeout(this.flushTimer);
|
||||
this.flushTimer = null;
|
||||
}
|
||||
// Delay the log flush to ensure more logs can be batched together asynchronously
|
||||
this.flushTimer = setTimeout(()=>{
|
||||
this.flush();
|
||||
}, 100);
|
||||
}
|
||||
getLogQueue() {
|
||||
return this.logQueue;
|
||||
}
|
||||
flush() {
|
||||
if (this.logQueue.length === 0) {
|
||||
return;
|
||||
}
|
||||
// Only flush to disk if mcpServer is enabled
|
||||
if (!this.mcpServerEnabled) {
|
||||
this.logQueue = [] // Clear the queue without writing
|
||||
;
|
||||
this.flushTimer = null;
|
||||
return;
|
||||
}
|
||||
try {
|
||||
// Ensure the directory exists before writing
|
||||
const logDir = _path.default.dirname(this.logFilePath);
|
||||
if (!_fs.default.existsSync(logDir)) {
|
||||
_fs.default.mkdirSync(logDir, {
|
||||
recursive: true
|
||||
});
|
||||
}
|
||||
const logsToWrite = this.logQueue.join('');
|
||||
// Writing logs to files synchronously to ensure they're written before returning
|
||||
_fs.default.appendFileSync(this.logFilePath, logsToWrite);
|
||||
this.logQueue = [];
|
||||
} catch (error) {
|
||||
console.error('Failed to flush logs to file:', error);
|
||||
} finally{
|
||||
this.flushTimer = null;
|
||||
}
|
||||
}
|
||||
enqueueLog(formattedEntry) {
|
||||
this.logQueue.push(formattedEntry);
|
||||
// Cancel existing timer and start a new one to ensure all logs are flushed together
|
||||
if (this.flushTimer) {
|
||||
clearTimeout(this.flushTimer);
|
||||
this.flushTimer = null;
|
||||
}
|
||||
this.scheduleFlush();
|
||||
}
|
||||
log(source, level, message) {
|
||||
// Don't log anything if mcpServer is disabled
|
||||
if (!this.mcpServerEnabled) {
|
||||
return;
|
||||
}
|
||||
if (!this.isInitialized) {
|
||||
return;
|
||||
}
|
||||
const logEntry = {
|
||||
timestamp: this.formatTimestamp(),
|
||||
source,
|
||||
level,
|
||||
message
|
||||
};
|
||||
const formattedEntry = this.formatLogEntry(logEntry);
|
||||
this.enqueueLog(formattedEntry);
|
||||
}
|
||||
logServer(level, message) {
|
||||
this.log('Server', level, message);
|
||||
}
|
||||
logBrowser(level, message) {
|
||||
this.log('Browser', level, message);
|
||||
}
|
||||
// Force flush all queued logs immediately
|
||||
forceFlush() {
|
||||
if (this.flushTimer) {
|
||||
clearTimeout(this.flushTimer);
|
||||
this.flushTimer = null;
|
||||
}
|
||||
this.flush();
|
||||
}
|
||||
// Cleanup method to flush logs on process exit
|
||||
destroy() {
|
||||
this.forceFlush();
|
||||
}
|
||||
constructor(){
|
||||
this.logFilePath = '';
|
||||
this.isInitialized = false;
|
||||
this.logQueue = [];
|
||||
this.flushTimer = null;
|
||||
this.mcpServerEnabled = false;
|
||||
}
|
||||
}
|
||||
// Singleton instance
|
||||
let fileLogger = null;
|
||||
function getFileLogger() {
|
||||
if (!fileLogger || process.env.NODE_ENV === 'test') {
|
||||
fileLogger = new FileLogger();
|
||||
}
|
||||
return fileLogger;
|
||||
}
|
||||
function test__resetFileLogger() {
|
||||
if (fileLogger) {
|
||||
fileLogger.destroy();
|
||||
}
|
||||
fileLogger = null;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=file-logger.js.map
|
||||
1
apps/public-web/node_modules/next/dist/server/dev/browser-logs/file-logger.js.map
generated
vendored
Normal file
1
apps/public-web/node_modules/next/dist/server/dev/browser-logs/file-logger.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
40
apps/public-web/node_modules/next/dist/server/dev/browser-logs/receive-logs.d.ts
generated
vendored
Normal file
40
apps/public-web/node_modules/next/dist/server/dev/browser-logs/receive-logs.d.ts
generated
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
import type { Project } from '../../../build/swc/types';
|
||||
import { type MappingContext } from './source-map';
|
||||
import { type ServerLogEntry } from '../../../next-devtools/shared/forward-logs-shared';
|
||||
export declare function restoreUndefined(x: any): any;
|
||||
export declare function stripFormatSpecifiers(args: any[]): any[];
|
||||
export declare function handleLog(entries: ServerLogEntry[], ctx: MappingContext, distDir: string, config: boolean | {
|
||||
logDepth?: number;
|
||||
showSourceLocation?: boolean;
|
||||
}): Promise<void>;
|
||||
export declare function receiveBrowserLogsWebpack(opts: {
|
||||
entries: ServerLogEntry[];
|
||||
router: 'app' | 'pages';
|
||||
sourceType?: 'server' | 'edge-server';
|
||||
clientStats: () => any;
|
||||
serverStats: () => any;
|
||||
edgeServerStats: () => any;
|
||||
rootDirectory: string;
|
||||
distDir: string;
|
||||
config: boolean | {
|
||||
logDepth?: number;
|
||||
showSourceLocation?: boolean;
|
||||
};
|
||||
}): Promise<void>;
|
||||
export declare function receiveBrowserLogsTurbopack(opts: {
|
||||
entries: ServerLogEntry[];
|
||||
router: 'app' | 'pages';
|
||||
sourceType?: 'server' | 'edge-server';
|
||||
project: Project;
|
||||
projectPath: string;
|
||||
distDir: string;
|
||||
config: boolean | {
|
||||
logDepth?: number;
|
||||
showSourceLocation?: boolean;
|
||||
};
|
||||
}): Promise<void>;
|
||||
export declare function handleClientFileLogs(logs: Array<{
|
||||
timestamp: string;
|
||||
level: string;
|
||||
message: string;
|
||||
}>): Promise<void>;
|
||||
512
apps/public-web/node_modules/next/dist/server/dev/browser-logs/receive-logs.js
generated
vendored
Normal file
512
apps/public-web/node_modules/next/dist/server/dev/browser-logs/receive-logs.js
generated
vendored
Normal file
@@ -0,0 +1,512 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
0 && (module.exports = {
|
||||
handleClientFileLogs: null,
|
||||
handleLog: null,
|
||||
receiveBrowserLogsTurbopack: null,
|
||||
receiveBrowserLogsWebpack: null,
|
||||
restoreUndefined: null,
|
||||
stripFormatSpecifiers: null
|
||||
});
|
||||
function _export(target, all) {
|
||||
for(var name in all)Object.defineProperty(target, name, {
|
||||
enumerable: true,
|
||||
get: all[name]
|
||||
});
|
||||
}
|
||||
_export(exports, {
|
||||
handleClientFileLogs: function() {
|
||||
return handleClientFileLogs;
|
||||
},
|
||||
handleLog: function() {
|
||||
return handleLog;
|
||||
},
|
||||
receiveBrowserLogsTurbopack: function() {
|
||||
return receiveBrowserLogsTurbopack;
|
||||
},
|
||||
receiveBrowserLogsWebpack: function() {
|
||||
return receiveBrowserLogsWebpack;
|
||||
},
|
||||
restoreUndefined: function() {
|
||||
return restoreUndefined;
|
||||
},
|
||||
stripFormatSpecifiers: function() {
|
||||
return stripFormatSpecifiers;
|
||||
}
|
||||
});
|
||||
const _picocolors = require("../../../lib/picocolors");
|
||||
const _util = /*#__PURE__*/ _interop_require_default(require("util"));
|
||||
const _sourcemap = require("./source-map");
|
||||
const _forwardlogsshared = require("../../../next-devtools/shared/forward-logs-shared");
|
||||
const _console = require("../../../client/lib/console");
|
||||
const _filelogger = require("./file-logger");
|
||||
function _interop_require_default(obj) {
|
||||
return obj && obj.__esModule ? obj : {
|
||||
default: obj
|
||||
};
|
||||
}
|
||||
function restoreUndefined(x) {
|
||||
if (x === _forwardlogsshared.UNDEFINED_MARKER) return undefined;
|
||||
if (Array.isArray(x)) return x.map(restoreUndefined);
|
||||
if (x && typeof x === 'object') {
|
||||
for(let k in x){
|
||||
x[k] = restoreUndefined(x[k]);
|
||||
}
|
||||
}
|
||||
return x;
|
||||
}
|
||||
function cleanConsoleArgsForFileLogging(args) {
|
||||
/**
|
||||
* Use formatConsoleArgs to strip out background and color format specifiers
|
||||
* and keep only the original string content for file logging
|
||||
*/ try {
|
||||
return (0, _console.formatConsoleArgs)(args);
|
||||
} catch {
|
||||
// Fallback to simple string conversion if formatting fails
|
||||
return args.map((arg)=>typeof arg === 'string' ? arg : _util.default.inspect(arg, {
|
||||
depth: 2
|
||||
})).join(' ');
|
||||
}
|
||||
}
|
||||
const methods = [
|
||||
'log',
|
||||
'info',
|
||||
'warn',
|
||||
'debug',
|
||||
'table',
|
||||
'error',
|
||||
'assert',
|
||||
'dir',
|
||||
'dirxml',
|
||||
'group',
|
||||
'groupCollapsed',
|
||||
'groupEnd'
|
||||
];
|
||||
const methodsToSkipInspect = new Set([
|
||||
'table',
|
||||
'dir',
|
||||
'dirxml',
|
||||
'group',
|
||||
'groupCollapsed',
|
||||
'groupEnd'
|
||||
]);
|
||||
// we aren't overriding console, we're just making a (slightly convoluted) helper for replaying user console methods
|
||||
const forwardConsole = {
|
||||
...console,
|
||||
...Object.fromEntries(methods.map((method)=>[
|
||||
method,
|
||||
(...args)=>console[method](...args.map((arg)=>methodsToSkipInspect.has(method) || typeof arg !== 'object' || arg === null ? arg : _util.default.inspect(arg, {
|
||||
depth: Infinity,
|
||||
colors: true
|
||||
})))
|
||||
]))
|
||||
};
|
||||
async function deserializeArgData(arg) {
|
||||
try {
|
||||
// we want undefined to be represented as it would be in the browser from the user's perspective (otherwise it would be stripped away/shown as null)
|
||||
if (arg === _forwardlogsshared.UNDEFINED_MARKER) {
|
||||
return restoreUndefined(arg);
|
||||
}
|
||||
return restoreUndefined(JSON.parse(arg));
|
||||
} catch {
|
||||
return arg;
|
||||
}
|
||||
}
|
||||
const colorError = (mapped, config)=>{
|
||||
const colorFn = (config == null ? void 0 : config.applyColor) === undefined || config.applyColor ? _picocolors.red : (x)=>x;
|
||||
switch(mapped.kind){
|
||||
case 'mapped-stack':
|
||||
case 'stack':
|
||||
{
|
||||
return ((config == null ? void 0 : config.prefix) ? colorFn(config == null ? void 0 : config.prefix) : '') + `\n${colorFn(mapped.stack)}`;
|
||||
}
|
||||
case 'with-frame-code':
|
||||
{
|
||||
return ((config == null ? void 0 : config.prefix) ? colorFn(config == null ? void 0 : config.prefix) : '') + `\n${colorFn(mapped.stack)}\n${mapped.frameCode}`;
|
||||
}
|
||||
// a more sophisticated version of this allows the user to config if they want ignored frames (but we need to be sure to source map them)
|
||||
case 'all-ignored':
|
||||
{
|
||||
return (config == null ? void 0 : config.prefix) ? colorFn(config == null ? void 0 : config.prefix) : '';
|
||||
}
|
||||
default:
|
||||
{}
|
||||
}
|
||||
mapped;
|
||||
};
|
||||
function processConsoleFormatStrings(args) {
|
||||
/**
|
||||
* this handles the case formatting is applied to the console log
|
||||
* otherwise we will see the format specifier directly in the terminal output
|
||||
*/ if (args.length > 0 && typeof args[0] === 'string') {
|
||||
const formatString = args[0];
|
||||
if (formatString.includes('%s') || formatString.includes('%d') || formatString.includes('%i') || formatString.includes('%f') || formatString.includes('%o') || formatString.includes('%O') || formatString.includes('%c')) {
|
||||
try {
|
||||
const formatted = _util.default.format(...args);
|
||||
return [
|
||||
formatted
|
||||
];
|
||||
} catch {
|
||||
return args;
|
||||
}
|
||||
}
|
||||
}
|
||||
return args;
|
||||
}
|
||||
function stripFormatSpecifiers(args) {
|
||||
if (args.length === 0 || typeof args[0] !== 'string') return args;
|
||||
const fmtIn = String(args[0]);
|
||||
const rest = args.slice(1);
|
||||
if (!fmtIn.includes('%')) return args;
|
||||
let fmtOut = '';
|
||||
let argPtr = 0;
|
||||
for(let i = 0; i < fmtIn.length; i++){
|
||||
if (fmtIn[i] !== '%') {
|
||||
fmtOut += fmtIn[i];
|
||||
continue;
|
||||
}
|
||||
if (fmtIn[i + 1] === '%') {
|
||||
fmtOut += '%';
|
||||
i++;
|
||||
continue;
|
||||
}
|
||||
const token = fmtIn[++i];
|
||||
if (!token) {
|
||||
fmtOut += '%';
|
||||
continue;
|
||||
}
|
||||
if ('csdifoOj'.includes(token) || token === 'O') {
|
||||
if (argPtr < rest.length) {
|
||||
if (token === 'c') {
|
||||
argPtr++;
|
||||
} else if (token === 'o' || token === 'O' || token === 'j') {
|
||||
const obj = rest[argPtr++];
|
||||
fmtOut += _util.default.inspect(obj, {
|
||||
depth: 2,
|
||||
colors: false
|
||||
});
|
||||
} else {
|
||||
// string(...) is safe for remaining specifiers
|
||||
fmtOut += String(rest[argPtr++]);
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
fmtOut += '%' + token;
|
||||
}
|
||||
const result = [
|
||||
fmtOut
|
||||
];
|
||||
if (argPtr < rest.length) {
|
||||
result.push(...rest.slice(argPtr));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
async function prepareFormattedErrorArgs(entry, ctx, distDir) {
|
||||
const mapped = await (0, _sourcemap.getSourceMappedStackFrames)(entry.stack, ctx, distDir);
|
||||
return [
|
||||
colorError(mapped, {
|
||||
prefix: entry.prefix
|
||||
})
|
||||
];
|
||||
}
|
||||
async function prepareConsoleArgs(entry, ctx, distDir) {
|
||||
const deserialized = await Promise.all(entry.args.map(async (arg)=>{
|
||||
if (arg.kind === 'arg') {
|
||||
const data = await deserializeArgData(arg.data);
|
||||
if (entry.method === 'warn' && typeof data === 'string') {
|
||||
return (0, _picocolors.yellow)(data);
|
||||
}
|
||||
return data;
|
||||
}
|
||||
if (!arg.stack) return (0, _picocolors.red)(arg.prefix);
|
||||
const mapped = await (0, _sourcemap.getSourceMappedStackFrames)(arg.stack, ctx, distDir);
|
||||
return colorError(mapped, {
|
||||
prefix: arg.prefix,
|
||||
applyColor: false
|
||||
});
|
||||
}));
|
||||
return processConsoleFormatStrings(deserialized);
|
||||
}
|
||||
async function prepareConsoleErrorArgs(entry, ctx, distDir) {
|
||||
const deserialized = await Promise.all(entry.args.map(async (arg)=>{
|
||||
if (arg.kind === 'arg') {
|
||||
if (arg.isRejectionMessage) return (0, _picocolors.red)(arg.data);
|
||||
return deserializeArgData(arg.data);
|
||||
}
|
||||
if (!arg.stack) return (0, _picocolors.red)(arg.prefix);
|
||||
const mapped = await (0, _sourcemap.getSourceMappedStackFrames)(arg.stack, ctx, distDir);
|
||||
return colorError(mapped, {
|
||||
prefix: arg.prefix
|
||||
});
|
||||
}));
|
||||
const mappedStack = await (0, _sourcemap.getSourceMappedStackFrames)(entry.consoleErrorStack, ctx, distDir);
|
||||
/**
|
||||
* don't show the stack + codeblock when there are errors present, since:
|
||||
* - it will look overwhelming to see 2 stacks and 2 code blocks
|
||||
* - the user already knows where the console.error is at because we append the location
|
||||
*/ const location = (0, _sourcemap.getConsoleLocation)(mappedStack);
|
||||
if (entry.args.some((a)=>a.kind === 'formatted-error-arg')) {
|
||||
const result = stripFormatSpecifiers(deserialized);
|
||||
if (location) {
|
||||
result.push((0, _picocolors.dim)(`(${location})`));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
const result = [
|
||||
...processConsoleFormatStrings(deserialized),
|
||||
colorError(mappedStack)
|
||||
];
|
||||
if (location) {
|
||||
result.push((0, _picocolors.dim)(`(${location})`));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
async function handleTable(entry, browserPrefix, ctx, distDir) {
|
||||
const deserializedArgs = await Promise.all(entry.args.map(async (arg)=>{
|
||||
if (arg.kind === 'formatted-error-arg') {
|
||||
return {
|
||||
stack: arg.stack
|
||||
};
|
||||
}
|
||||
return deserializeArgData(arg.data);
|
||||
}));
|
||||
const location = await (async ()=>{
|
||||
if (!entry.consoleMethodStack) {
|
||||
return;
|
||||
}
|
||||
const frames = await (0, _sourcemap.getSourceMappedStackFrames)(entry.consoleMethodStack, ctx, distDir);
|
||||
return (0, _sourcemap.getConsoleLocation)(frames);
|
||||
})();
|
||||
// we can't inline pass browser prefix, but it looks better multiline for table anyways
|
||||
forwardConsole.log(browserPrefix);
|
||||
forwardConsole.table(...deserializedArgs);
|
||||
if (location) {
|
||||
forwardConsole.log((0, _picocolors.dim)(`(${location})`));
|
||||
}
|
||||
}
|
||||
async function handleTrace(entry, browserPrefix, ctx, distDir) {
|
||||
const deserializedArgs = await Promise.all(entry.args.map(async (arg)=>{
|
||||
if (arg.kind === 'formatted-error-arg') {
|
||||
if (!arg.stack) return (0, _picocolors.red)(arg.prefix);
|
||||
const mapped = await (0, _sourcemap.getSourceMappedStackFrames)(arg.stack, ctx, distDir);
|
||||
return colorError(mapped, {
|
||||
prefix: arg.prefix
|
||||
});
|
||||
}
|
||||
return deserializeArgData(arg.data);
|
||||
}));
|
||||
if (!entry.consoleMethodStack) {
|
||||
forwardConsole.log(browserPrefix, ...deserializedArgs, '[Trace unavailable]');
|
||||
return;
|
||||
}
|
||||
// TODO(rob): refactor so we can re-use result and not re-run the entire source map to avoid trivial post processing
|
||||
const [mapped, mappedIgnored] = await Promise.all([
|
||||
(0, _sourcemap.getSourceMappedStackFrames)(entry.consoleMethodStack, ctx, distDir, false),
|
||||
(0, _sourcemap.getSourceMappedStackFrames)(entry.consoleMethodStack, ctx, distDir)
|
||||
]);
|
||||
const location = (0, _sourcemap.getConsoleLocation)(mappedIgnored);
|
||||
forwardConsole.log(browserPrefix, ...deserializedArgs, `\n${mapped.stack}`, ...location ? [
|
||||
`\n${(0, _picocolors.dim)(`(${location})`)}`
|
||||
] : []);
|
||||
}
|
||||
async function handleDir(entry, browserPrefix, ctx, distDir) {
|
||||
const loggableEntry = await prepareConsoleArgs(entry, ctx, distDir);
|
||||
const consoleMethod = forwardConsole[entry.method] || forwardConsole.log;
|
||||
if (entry.consoleMethodStack) {
|
||||
const mapped = await (0, _sourcemap.getSourceMappedStackFrames)(entry.consoleMethodStack, ctx, distDir);
|
||||
const location = (0, _picocolors.dim)(`(${(0, _sourcemap.getConsoleLocation)(mapped)})`);
|
||||
const originalWrite = process.stdout.write.bind(process.stdout);
|
||||
let captured = '';
|
||||
process.stdout.write = (chunk)=>{
|
||||
captured += chunk;
|
||||
return true;
|
||||
};
|
||||
try {
|
||||
consoleMethod(...loggableEntry);
|
||||
} finally{
|
||||
process.stdout.write = originalWrite;
|
||||
}
|
||||
const preserved = captured.replace(/\r?\n$/, '');
|
||||
originalWrite(`${browserPrefix}${preserved} ${location}\n`);
|
||||
return;
|
||||
}
|
||||
consoleMethod(browserPrefix, ...loggableEntry);
|
||||
}
|
||||
async function handleDefaultConsole(entry, browserPrefix, ctx, distDir, config, isServerLog) {
|
||||
const consoleArgs = await prepareConsoleArgs(entry, ctx, distDir);
|
||||
const withStackEntry = await (0, _sourcemap.withLocation)({
|
||||
original: consoleArgs,
|
||||
stack: entry.consoleMethodStack || null
|
||||
}, ctx, distDir, config);
|
||||
const consoleMethod = forwardConsole[entry.method] || forwardConsole.log;
|
||||
consoleMethod(browserPrefix, ...withStackEntry);
|
||||
// Process enqueued logs and write to file
|
||||
// Log to file with correct source based on context
|
||||
const fileLogger = (0, _filelogger.getFileLogger)();
|
||||
// Use cleaned console args to strip out background and color format specifiers
|
||||
const message = cleanConsoleArgsForFileLogging(consoleArgs);
|
||||
if (isServerLog) {
|
||||
fileLogger.logServer(entry.method.toUpperCase(), message);
|
||||
} else {
|
||||
fileLogger.logBrowser(entry.method.toUpperCase(), message);
|
||||
}
|
||||
}
|
||||
async function handleLog(entries, ctx, distDir, config) {
|
||||
// Determine the source based on the context
|
||||
const isServerLog = ctx.isServer || ctx.isEdgeServer;
|
||||
const browserPrefix = isServerLog ? (0, _picocolors.cyan)('[server]') : (0, _picocolors.cyan)('[browser]');
|
||||
const fileLogger = (0, _filelogger.getFileLogger)();
|
||||
for (const entry of entries){
|
||||
try {
|
||||
switch(entry.kind){
|
||||
case 'console':
|
||||
{
|
||||
switch(entry.method){
|
||||
case 'table':
|
||||
{
|
||||
// timeout based abort on source mapping result
|
||||
await handleTable(entry, browserPrefix, ctx, distDir);
|
||||
break;
|
||||
}
|
||||
// ignore frames
|
||||
case 'trace':
|
||||
{
|
||||
await handleTrace(entry, browserPrefix, ctx, distDir);
|
||||
break;
|
||||
}
|
||||
case 'dir':
|
||||
{
|
||||
await handleDir(entry, browserPrefix, ctx, distDir);
|
||||
break;
|
||||
}
|
||||
case 'dirxml':
|
||||
{
|
||||
// xml log thing maybe needs an impl
|
||||
// fallthrough
|
||||
}
|
||||
case 'group':
|
||||
case 'groupCollapsed':
|
||||
case 'groupEnd':
|
||||
{
|
||||
// [browser] undefined (app/page.tsx:8:11) console.group
|
||||
// fallthrough
|
||||
}
|
||||
case 'assert':
|
||||
{
|
||||
// check console assert
|
||||
// fallthrough
|
||||
}
|
||||
case 'log':
|
||||
case 'info':
|
||||
case 'debug':
|
||||
case 'error':
|
||||
case 'warn':
|
||||
{
|
||||
await handleDefaultConsole(entry, browserPrefix, ctx, distDir, config, isServerLog);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
{
|
||||
entry;
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
// any logged errors are anything that are logged as "red" in the browser but aren't only an Error (console.error, Promise.reject(100))
|
||||
case 'any-logged-error':
|
||||
{
|
||||
const consoleArgs = await prepareConsoleErrorArgs(entry, ctx, distDir);
|
||||
forwardConsole.error(browserPrefix, ...consoleArgs);
|
||||
// Process enqueued logs and write to file
|
||||
fileLogger.logBrowser('ERROR', cleanConsoleArgsForFileLogging(consoleArgs));
|
||||
break;
|
||||
}
|
||||
// formatted error is an explicit error event (rejections, uncaught errors)
|
||||
case 'formatted-error':
|
||||
{
|
||||
const formattedArgs = await prepareFormattedErrorArgs(entry, ctx, distDir);
|
||||
forwardConsole.error(browserPrefix, ...formattedArgs);
|
||||
// Process enqueued logs and write to file
|
||||
fileLogger.logBrowser('ERROR', cleanConsoleArgsForFileLogging(formattedArgs));
|
||||
break;
|
||||
}
|
||||
default:
|
||||
{}
|
||||
}
|
||||
} catch {
|
||||
switch(entry.kind){
|
||||
case 'any-logged-error':
|
||||
{
|
||||
const consoleArgs = await prepareConsoleErrorArgs(entry, ctx, distDir);
|
||||
forwardConsole.error(browserPrefix, ...consoleArgs);
|
||||
// Process enqueued logs and write to file
|
||||
fileLogger.logBrowser('ERROR', cleanConsoleArgsForFileLogging(consoleArgs));
|
||||
break;
|
||||
}
|
||||
case 'console':
|
||||
{
|
||||
const consoleMethod = forwardConsole[entry.method] || forwardConsole.log;
|
||||
const consoleArgs = await prepareConsoleArgs(entry, ctx, distDir);
|
||||
consoleMethod(browserPrefix, ...consoleArgs);
|
||||
// Process enqueued logs and write to file
|
||||
fileLogger.logBrowser('ERROR', cleanConsoleArgsForFileLogging(consoleArgs));
|
||||
break;
|
||||
}
|
||||
case 'formatted-error':
|
||||
{
|
||||
forwardConsole.error(browserPrefix, `${entry.prefix}\n`, entry.stack);
|
||||
// Process enqueued logs and write to file
|
||||
fileLogger.logBrowser('ERROR', cleanConsoleArgsForFileLogging([
|
||||
`${entry.prefix}\n${entry.stack}`
|
||||
]));
|
||||
break;
|
||||
}
|
||||
default:
|
||||
{}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
async function receiveBrowserLogsWebpack(opts) {
|
||||
const { entries, router, sourceType, clientStats, serverStats, edgeServerStats, rootDirectory, distDir } = opts;
|
||||
const isAppDirectory = router === 'app';
|
||||
const isServer = sourceType === 'server';
|
||||
const isEdgeServer = sourceType === 'edge-server';
|
||||
const ctx = {
|
||||
bundler: 'webpack',
|
||||
isServer,
|
||||
isEdgeServer,
|
||||
isAppDirectory,
|
||||
clientStats,
|
||||
serverStats,
|
||||
edgeServerStats,
|
||||
rootDirectory
|
||||
};
|
||||
await handleLog(entries, ctx, distDir, opts.config);
|
||||
}
|
||||
async function receiveBrowserLogsTurbopack(opts) {
|
||||
const { entries, router, sourceType, project, projectPath, distDir } = opts;
|
||||
const isAppDirectory = router === 'app';
|
||||
const isServer = sourceType === 'server';
|
||||
const isEdgeServer = sourceType === 'edge-server';
|
||||
const ctx = {
|
||||
bundler: 'turbopack',
|
||||
project,
|
||||
projectPath,
|
||||
isServer,
|
||||
isEdgeServer,
|
||||
isAppDirectory
|
||||
};
|
||||
await handleLog(entries, ctx, distDir, opts.config);
|
||||
}
|
||||
async function handleClientFileLogs(logs) {
|
||||
const fileLogger = (0, _filelogger.getFileLogger)();
|
||||
for (const log of logs){
|
||||
fileLogger.logBrowser(log.level, log.message);
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=receive-logs.js.map
|
||||
1
apps/public-web/node_modules/next/dist/server/dev/browser-logs/receive-logs.js.map
generated
vendored
Normal file
1
apps/public-web/node_modules/next/dist/server/dev/browser-logs/receive-logs.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
68
apps/public-web/node_modules/next/dist/server/dev/browser-logs/source-map.d.ts
generated
vendored
Normal file
68
apps/public-web/node_modules/next/dist/server/dev/browser-logs/source-map.d.ts
generated
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
import type { Project } from '../../../build/swc/types';
|
||||
import { type StackFrame } from '../../lib/parse-stack';
|
||||
type WebpackMappingContext = {
|
||||
bundler: 'webpack';
|
||||
isServer: boolean;
|
||||
isEdgeServer: boolean;
|
||||
isAppDirectory: boolean;
|
||||
clientStats: () => any;
|
||||
serverStats: () => any;
|
||||
edgeServerStats: () => any;
|
||||
rootDirectory: string;
|
||||
};
|
||||
type TurbopackMappingContext = {
|
||||
bundler: 'turbopack';
|
||||
isServer: boolean;
|
||||
isEdgeServer: boolean;
|
||||
isAppDirectory: boolean;
|
||||
project: Project;
|
||||
projectPath: string;
|
||||
};
|
||||
export type MappingContext = WebpackMappingContext | TurbopackMappingContext;
|
||||
export declare function mapFramesUsingBundler(frames: StackFrame[], ctx: MappingContext): Promise<import("../../../next-devtools/server/shared").OriginalStackFramesResponse>;
|
||||
export declare function getSourceMappedStackFrames(stackTrace: string, ctx: MappingContext, distDir: string, ignore?: boolean): Promise<{
|
||||
kind: "stack";
|
||||
stack: string;
|
||||
frameCode?: undefined;
|
||||
frames?: undefined;
|
||||
} | {
|
||||
kind: "all-ignored";
|
||||
stack?: undefined;
|
||||
frameCode?: undefined;
|
||||
frames?: undefined;
|
||||
} | {
|
||||
kind: "with-frame-code";
|
||||
frameCode: string;
|
||||
stack: string;
|
||||
frames: ({
|
||||
kind: "rejected";
|
||||
frameText: string;
|
||||
codeFrame: null;
|
||||
} | {
|
||||
kind: "success";
|
||||
frameText: string;
|
||||
codeFrame: string | null;
|
||||
})[];
|
||||
} | {
|
||||
kind: "mapped-stack";
|
||||
stack: string;
|
||||
frames: ({
|
||||
kind: "rejected";
|
||||
frameText: string;
|
||||
codeFrame: null;
|
||||
} | {
|
||||
kind: "success";
|
||||
frameText: string;
|
||||
codeFrame: string | null;
|
||||
})[];
|
||||
frameCode?: undefined;
|
||||
}>;
|
||||
export declare const withLocation: ({ original, stack, }: {
|
||||
original: Array<any>;
|
||||
stack: string | null;
|
||||
}, ctx: MappingContext, distDir: string, config: boolean | {
|
||||
logDepth?: number;
|
||||
showSourceLocation?: boolean;
|
||||
}) => Promise<any[]>;
|
||||
export declare const getConsoleLocation: (mapped: Awaited<ReturnType<typeof getSourceMappedStackFrames>>) => string | null;
|
||||
export {};
|
||||
225
apps/public-web/node_modules/next/dist/server/dev/browser-logs/source-map.js
generated
vendored
Normal file
225
apps/public-web/node_modules/next/dist/server/dev/browser-logs/source-map.js
generated
vendored
Normal file
@@ -0,0 +1,225 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
0 && (module.exports = {
|
||||
getConsoleLocation: null,
|
||||
getSourceMappedStackFrames: null,
|
||||
mapFramesUsingBundler: null,
|
||||
withLocation: null
|
||||
});
|
||||
function _export(target, all) {
|
||||
for(var name in all)Object.defineProperty(target, name, {
|
||||
enumerable: true,
|
||||
get: all[name]
|
||||
});
|
||||
}
|
||||
_export(exports, {
|
||||
getConsoleLocation: function() {
|
||||
return getConsoleLocation;
|
||||
},
|
||||
getSourceMappedStackFrames: function() {
|
||||
return getSourceMappedStackFrames;
|
||||
},
|
||||
mapFramesUsingBundler: function() {
|
||||
return mapFramesUsingBundler;
|
||||
},
|
||||
withLocation: function() {
|
||||
return withLocation;
|
||||
}
|
||||
});
|
||||
const _middlewarewebpack = require("../middleware-webpack");
|
||||
const _middlewareturbopack = require("../middleware-turbopack");
|
||||
const _picocolors = require("../../../lib/picocolors");
|
||||
const _parsestack = require("../../lib/parse-stack");
|
||||
const _path = /*#__PURE__*/ _interop_require_default(require("path"));
|
||||
const _lrucache = require("../../lib/lru-cache");
|
||||
function _interop_require_default(obj) {
|
||||
return obj && obj.__esModule ? obj : {
|
||||
default: obj
|
||||
};
|
||||
}
|
||||
async function mapFramesUsingBundler(frames, ctx) {
|
||||
switch(ctx.bundler){
|
||||
case 'webpack':
|
||||
{
|
||||
const { isServer, isEdgeServer, isAppDirectory, clientStats, serverStats, edgeServerStats, rootDirectory } = ctx;
|
||||
const res = await (0, _middlewarewebpack.getOriginalStackFrames)({
|
||||
isServer,
|
||||
isEdgeServer,
|
||||
isAppDirectory,
|
||||
frames,
|
||||
clientStats,
|
||||
serverStats,
|
||||
edgeServerStats,
|
||||
rootDirectory
|
||||
});
|
||||
return res;
|
||||
}
|
||||
case 'turbopack':
|
||||
{
|
||||
const { project, projectPath, isServer, isEdgeServer, isAppDirectory } = ctx;
|
||||
const res = await (0, _middlewareturbopack.getOriginalStackFrames)({
|
||||
project,
|
||||
projectPath,
|
||||
frames,
|
||||
isServer,
|
||||
isEdgeServer,
|
||||
isAppDirectory
|
||||
});
|
||||
return res;
|
||||
}
|
||||
default:
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
// converts _next/static/chunks/... to file:///.next/static/chunks/... for parseStack
|
||||
// todo: where does next dev overlay handle this case and re-use that logic
|
||||
function preprocessStackTrace(stackTrace, distDir) {
|
||||
return stackTrace.split('\n').map((line)=>{
|
||||
const match = line.match(/^(\s*at\s+.*?)\s+\(([^)]+)\)$/);
|
||||
if (match) {
|
||||
const [, prefix, location] = match;
|
||||
if (location.startsWith('_next/static/') && distDir) {
|
||||
const normalizedDistDir = distDir.replace(/\\/g, '/').replace(/\/$/, '');
|
||||
const absolutePath = normalizedDistDir + '/' + location.slice('_next/'.length);
|
||||
const fileUrl = `file://${_path.default.resolve(absolutePath)}`;
|
||||
return `${prefix} (${fileUrl})`;
|
||||
}
|
||||
}
|
||||
return line;
|
||||
}).join('\n');
|
||||
}
|
||||
const cache = new _lrucache.LRUCache(25);
|
||||
async function getSourceMappedStackFramesInternal(stackTrace, ctx, distDir, ignore = true) {
|
||||
try {
|
||||
var _filteredFrames_find;
|
||||
const normalizedStack = preprocessStackTrace(stackTrace, distDir);
|
||||
const frames = (0, _parsestack.parseStack)(normalizedStack, distDir);
|
||||
if (frames.length === 0) {
|
||||
return {
|
||||
kind: 'stack',
|
||||
stack: stackTrace
|
||||
};
|
||||
}
|
||||
const mappingResults = await mapFramesUsingBundler(frames, ctx);
|
||||
const processedFrames = mappingResults.map((result, index)=>({
|
||||
result,
|
||||
originalFrame: frames[index]
|
||||
})).map(({ result, originalFrame })=>{
|
||||
var _originalStackFrame_file;
|
||||
if (result.status === 'rejected') {
|
||||
return {
|
||||
kind: 'rejected',
|
||||
frameText: formatStackFrame(originalFrame),
|
||||
codeFrame: null
|
||||
};
|
||||
}
|
||||
const { originalStackFrame, originalCodeFrame } = result.value;
|
||||
if ((originalStackFrame == null ? void 0 : originalStackFrame.ignored) && ignore) {
|
||||
return {
|
||||
kind: 'ignored'
|
||||
};
|
||||
}
|
||||
// should we apply this generally to dev overlay (dev overlay does not ignore chrome-extension://)
|
||||
if (originalStackFrame == null ? void 0 : (_originalStackFrame_file = originalStackFrame.file) == null ? void 0 : _originalStackFrame_file.startsWith('chrome-extension://')) {
|
||||
return {
|
||||
kind: 'ignored'
|
||||
};
|
||||
}
|
||||
return {
|
||||
kind: 'success',
|
||||
// invariant: if result is not rejected and not ignored, then original stack frame exists
|
||||
// verifiable by tracing `getOriginalStackFrame`. The invariant exists because of bad types
|
||||
frameText: formatStackFrame(originalStackFrame),
|
||||
codeFrame: originalCodeFrame
|
||||
};
|
||||
});
|
||||
const allIgnored = processedFrames.every((frame)=>frame.kind === 'ignored');
|
||||
// we want to handle **all** ignored vs all/some rejected differently
|
||||
// if all are ignored we should show no frames
|
||||
// if all are rejected, we want to fallback to showing original stack frames
|
||||
if (allIgnored) {
|
||||
return {
|
||||
kind: 'all-ignored'
|
||||
};
|
||||
}
|
||||
const filteredFrames = processedFrames.filter((frame)=>frame.kind !== 'ignored');
|
||||
if (filteredFrames.length === 0) {
|
||||
return {
|
||||
kind: 'stack',
|
||||
stack: stackTrace
|
||||
};
|
||||
}
|
||||
const stackOutput = filteredFrames.map((frame)=>frame.frameText).join('\n');
|
||||
const firstFrameCode = (_filteredFrames_find = filteredFrames.find((frame)=>frame.codeFrame)) == null ? void 0 : _filteredFrames_find.codeFrame;
|
||||
if (firstFrameCode) {
|
||||
return {
|
||||
kind: 'with-frame-code',
|
||||
frameCode: firstFrameCode,
|
||||
stack: stackOutput,
|
||||
frames: filteredFrames
|
||||
};
|
||||
}
|
||||
// i don't think this a real case, but good for exhaustion
|
||||
return {
|
||||
kind: 'mapped-stack',
|
||||
stack: stackOutput,
|
||||
frames: filteredFrames
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
kind: 'stack',
|
||||
stack: stackTrace
|
||||
};
|
||||
}
|
||||
}
|
||||
async function getSourceMappedStackFrames(stackTrace, ctx, distDir, ignore = true) {
|
||||
const cacheKey = `sm_${stackTrace}-${ctx.bundler}-${ctx.isAppDirectory}-${ctx.isEdgeServer}-${ctx.isServer}-${distDir}-${ignore}`;
|
||||
const cacheItem = cache.get(cacheKey);
|
||||
if (cacheItem) {
|
||||
return cacheItem;
|
||||
}
|
||||
const result = await getSourceMappedStackFramesInternal(stackTrace, ctx, distDir, ignore);
|
||||
cache.set(cacheKey, result);
|
||||
return result;
|
||||
}
|
||||
function formatStackFrame(frame) {
|
||||
const functionName = frame.methodName || '<anonymous>';
|
||||
const location = frame.file && frame.line1 ? `${frame.file}:${frame.line1}${frame.column1 ? `:${frame.column1}` : ''}` : frame.file || '<unknown>';
|
||||
return ` at ${functionName} (${location})`;
|
||||
}
|
||||
const withLocation = async ({ original, stack }, ctx, distDir, config)=>{
|
||||
if (typeof config === 'object' && config.showSourceLocation === false) {
|
||||
return original;
|
||||
}
|
||||
if (!stack) {
|
||||
return original;
|
||||
}
|
||||
const res = await getSourceMappedStackFrames(stack, ctx, distDir);
|
||||
const location = getConsoleLocation(res);
|
||||
if (!location) {
|
||||
return original;
|
||||
}
|
||||
return [
|
||||
...original,
|
||||
(0, _picocolors.dim)(`(${location})`)
|
||||
];
|
||||
};
|
||||
const getConsoleLocation = (mapped)=>{
|
||||
if (mapped.kind !== 'mapped-stack' && mapped.kind !== 'with-frame-code') {
|
||||
return null;
|
||||
}
|
||||
const first = mapped.frames.at(0);
|
||||
if (!first) {
|
||||
return null;
|
||||
}
|
||||
// we don't want to show the name of parent function (at <fn> thing in stack), just source location for minimal noise
|
||||
const match = first.frameText.match(/\(([^)]+)\)/);
|
||||
const locationText = match ? match[1] : first.frameText;
|
||||
return locationText;
|
||||
};
|
||||
|
||||
//# sourceMappingURL=source-map.js.map
|
||||
1
apps/public-web/node_modules/next/dist/server/dev/browser-logs/source-map.js.map
generated
vendored
Normal file
1
apps/public-web/node_modules/next/dist/server/dev/browser-logs/source-map.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
Reference in New Issue
Block a user