feat(blog): add file-based blog with dynamic slugs, MDX content and layout shell

- Introduced blog routing using Next.js App Router
- Implemented dynamic [slug] pages for blog posts
- Added MDX-based content loading via lib/posts
- Integrated shared TopBar layout with navigation
- Established clear content, lib and component separation
This commit is contained in:
PascalSchattenburg
2026-01-22 14:14:15 +01:00
parent b717952234
commit d147843c76
10412 changed files with 2475583 additions and 0 deletions

View File

@@ -0,0 +1,5 @@
import { trace, exportTraceState, flushAllTraces, getTraceEvents, initializeTraceState, recordTraceEvents, Span, SpanStatus } from './trace';
import { setGlobal } from './shared';
import type { SpanId, TraceEvent, TraceState } from './types';
export { trace, exportTraceState, flushAllTraces, getTraceEvents, initializeTraceState, recordTraceEvents, Span, setGlobal, SpanStatus, };
export type { SpanId, TraceEvent, TraceState };

54
apps/public-web/node_modules/next/dist/trace/index.js generated vendored Normal file
View File

@@ -0,0 +1,54 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
0 && (module.exports = {
Span: null,
SpanStatus: null,
exportTraceState: null,
flushAllTraces: null,
getTraceEvents: null,
initializeTraceState: null,
recordTraceEvents: null,
setGlobal: null,
trace: null
});
function _export(target, all) {
for(var name in all)Object.defineProperty(target, name, {
enumerable: true,
get: all[name]
});
}
_export(exports, {
Span: function() {
return _trace.Span;
},
SpanStatus: function() {
return _trace.SpanStatus;
},
exportTraceState: function() {
return _trace.exportTraceState;
},
flushAllTraces: function() {
return _trace.flushAllTraces;
},
getTraceEvents: function() {
return _trace.getTraceEvents;
},
initializeTraceState: function() {
return _trace.initializeTraceState;
},
recordTraceEvents: function() {
return _trace.recordTraceEvents;
},
setGlobal: function() {
return _shared.setGlobal;
},
trace: function() {
return _trace.trace;
}
});
const _trace = require("./trace");
const _shared = require("./shared");
//# sourceMappingURL=index.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../src/trace/index.ts"],"sourcesContent":["import {\n trace,\n exportTraceState,\n flushAllTraces,\n getTraceEvents,\n initializeTraceState,\n recordTraceEvents,\n Span,\n SpanStatus,\n} from './trace'\nimport { setGlobal } from './shared'\nimport type { SpanId, TraceEvent, TraceState } from './types'\n\nexport {\n trace,\n exportTraceState,\n flushAllTraces,\n getTraceEvents,\n initializeTraceState,\n recordTraceEvents,\n Span,\n setGlobal,\n SpanStatus,\n}\nexport type { SpanId, TraceEvent, TraceState }\n"],"names":["Span","SpanStatus","exportTraceState","flushAllTraces","getTraceEvents","initializeTraceState","recordTraceEvents","setGlobal","trace"],"mappings":";;;;;;;;;;;;;;;;;;;;;;IAoBEA,IAAI;eAAJA,WAAI;;IAEJC,UAAU;eAAVA,iBAAU;;IAPVC,gBAAgB;eAAhBA,uBAAgB;;IAChBC,cAAc;eAAdA,qBAAc;;IACdC,cAAc;eAAdA,qBAAc;;IACdC,oBAAoB;eAApBA,2BAAoB;;IACpBC,iBAAiB;eAAjBA,wBAAiB;;IAEjBC,SAAS;eAATA,iBAAS;;IAPTC,KAAK;eAALA,YAAK;;;uBALA;wBACmB","ignoreList":[0]}

View File

@@ -0,0 +1,12 @@
import type { TraceEvent } from '../types';
import type { Reporter } from './types';
declare class MultiReporter implements Reporter {
private reporters;
constructor(reporters: Reporter[]);
flushAll(opts?: {
end: boolean;
}): Promise<void>;
report(event: TraceEvent): void;
}
export declare const reporter: MultiReporter;
export {};

View File

@@ -0,0 +1,37 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "reporter", {
enumerable: true,
get: function() {
return reporter;
}
});
const _totelemetry = /*#__PURE__*/ _interop_require_default(require("./to-telemetry"));
const _tojson = /*#__PURE__*/ _interop_require_default(require("./to-json"));
const _tojsonbuild = /*#__PURE__*/ _interop_require_default(require("./to-json-build"));
function _interop_require_default(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
}
class MultiReporter {
constructor(reporters){
this.reporters = [];
this.reporters = reporters;
}
async flushAll(opts) {
await Promise.all(this.reporters.map((reporter)=>reporter.flushAll(opts)));
}
report(event) {
this.reporters.forEach((reporter)=>reporter.report(event));
}
}
const reporter = new MultiReporter([
_tojson.default,
_tojsonbuild.default,
_totelemetry.default
]);
//# sourceMappingURL=index.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../src/trace/report/index.ts"],"sourcesContent":["import type { TraceEvent } from '../types'\nimport reportToTelemetry from './to-telemetry'\nimport reportToJson from './to-json'\nimport reportToJsonBuild from './to-json-build'\nimport type { Reporter } from './types'\n\nclass MultiReporter implements Reporter {\n private reporters: Reporter[] = []\n\n constructor(reporters: Reporter[]) {\n this.reporters = reporters\n }\n\n async flushAll(opts?: { end: boolean }) {\n await Promise.all(this.reporters.map((reporter) => reporter.flushAll(opts)))\n }\n\n report(event: TraceEvent) {\n this.reporters.forEach((reporter) => reporter.report(event))\n }\n}\n\n// JSON is always reported to allow for diagnostics\nexport const reporter = new MultiReporter([\n reportToJson,\n reportToJsonBuild,\n reportToTelemetry,\n])\n"],"names":["reporter","MultiReporter","constructor","reporters","flushAll","opts","Promise","all","map","report","event","forEach","reportToJson","reportToJsonBuild","reportToTelemetry"],"mappings":";;;;+BAuBaA;;;eAAAA;;;oEAtBiB;+DACL;oEACK;;;;;;AAG9B,MAAMC;IAGJC,YAAYC,SAAqB,CAAE;aAF3BA,YAAwB,EAAE;QAGhC,IAAI,CAACA,SAAS,GAAGA;IACnB;IAEA,MAAMC,SAASC,IAAuB,EAAE;QACtC,MAAMC,QAAQC,GAAG,CAAC,IAAI,CAACJ,SAAS,CAACK,GAAG,CAAC,CAACR,WAAaA,SAASI,QAAQ,CAACC;IACvE;IAEAI,OAAOC,KAAiB,EAAE;QACxB,IAAI,CAACP,SAAS,CAACQ,OAAO,CAAC,CAACX,WAAaA,SAASS,MAAM,CAACC;IACvD;AACF;AAGO,MAAMV,WAAW,IAAIC,cAAc;IACxCW,eAAY;IACZC,oBAAiB;IACjBC,oBAAiB;CAClB","ignoreList":[0]}

View File

@@ -0,0 +1,63 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
const _promises = require("fs/promises");
const _ = require(".");
const _shared = require("../shared");
const _path = require("path");
const _os = require("os");
const TRACE_EVENT = {
name: 'test-span',
duration: 321,
timestamp: Date.now(),
id: 127,
startTime: Date.now()
};
const WEBPACK_INVALIDATED_EVENT = {
name: 'webpack-invalidated',
duration: 100,
timestamp: Date.now(),
id: 112,
startTime: Date.now()
};
describe('Trace Reporter', ()=>{
describe('JSON reporter', ()=>{
it('should write the trace events to JSON file', async ()=>{
const tmpDir = await (0, _promises.mkdtemp)((0, _path.join)((0, _os.tmpdir)(), 'json-reporter'));
(0, _shared.setGlobal)('distDir', tmpDir);
(0, _shared.setGlobal)('phase', 'anything');
_.reporter.report(TRACE_EVENT);
await _.reporter.flushAll();
const traceFilename = (0, _path.join)(tmpDir, 'trace');
const traces = JSON.parse(await (0, _promises.readFile)(traceFilename, 'utf-8'));
expect(traces.length).toEqual(1);
expect(traces[0].name).toEqual('test-span');
expect(traces[0].id).toEqual(127);
expect(traces[0].duration).toEqual(321);
expect(traces[0].traceId).toBeDefined();
});
});
describe('Telemetry reporter', ()=>{
it('should record telemetry event', async ()=>{
const recordMock = jest.fn();
const telemetryMock = {
record: recordMock
};
(0, _shared.setGlobal)('telemetry', telemetryMock);
// This should be ignored.
_.reporter.report(TRACE_EVENT);
expect(recordMock).toHaveBeenCalledTimes(0);
_.reporter.report(WEBPACK_INVALIDATED_EVENT);
expect(recordMock).toHaveBeenCalledTimes(1);
expect(recordMock).toHaveBeenCalledWith({
eventName: 'WEBPACK_INVALIDATED',
payload: {
durationInMicroseconds: 100
}
});
});
});
});
//# sourceMappingURL=index.test.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../src/trace/report/index.test.ts"],"sourcesContent":["import { mkdtemp, readFile } from 'fs/promises'\nimport { reporter } from '.'\nimport { setGlobal } from '../shared'\nimport { join } from 'path'\nimport { tmpdir } from 'os'\n\nconst TRACE_EVENT = {\n name: 'test-span',\n duration: 321,\n timestamp: Date.now(),\n id: 127,\n startTime: Date.now(),\n}\nconst WEBPACK_INVALIDATED_EVENT = {\n name: 'webpack-invalidated',\n duration: 100,\n timestamp: Date.now(),\n id: 112,\n startTime: Date.now(),\n}\n\ndescribe('Trace Reporter', () => {\n describe('JSON reporter', () => {\n it('should write the trace events to JSON file', async () => {\n const tmpDir = await mkdtemp(join(tmpdir(), 'json-reporter'))\n setGlobal('distDir', tmpDir)\n setGlobal('phase', 'anything')\n reporter.report(TRACE_EVENT)\n await reporter.flushAll()\n const traceFilename = join(tmpDir, 'trace')\n const traces = JSON.parse(await readFile(traceFilename, 'utf-8'))\n expect(traces.length).toEqual(1)\n expect(traces[0].name).toEqual('test-span')\n expect(traces[0].id).toEqual(127)\n expect(traces[0].duration).toEqual(321)\n expect(traces[0].traceId).toBeDefined()\n })\n })\n\n describe('Telemetry reporter', () => {\n it('should record telemetry event', async () => {\n const recordMock = jest.fn()\n const telemetryMock = {\n record: recordMock,\n }\n setGlobal('telemetry', telemetryMock)\n // This should be ignored.\n reporter.report(TRACE_EVENT)\n expect(recordMock).toHaveBeenCalledTimes(0)\n reporter.report(WEBPACK_INVALIDATED_EVENT)\n expect(recordMock).toHaveBeenCalledTimes(1)\n expect(recordMock).toHaveBeenCalledWith({\n eventName: 'WEBPACK_INVALIDATED',\n payload: {\n durationInMicroseconds: 100,\n },\n })\n })\n })\n})\n"],"names":["TRACE_EVENT","name","duration","timestamp","Date","now","id","startTime","WEBPACK_INVALIDATED_EVENT","describe","it","tmpDir","mkdtemp","join","tmpdir","setGlobal","reporter","report","flushAll","traceFilename","traces","JSON","parse","readFile","expect","length","toEqual","traceId","toBeDefined","recordMock","jest","fn","telemetryMock","record","toHaveBeenCalledTimes","toHaveBeenCalledWith","eventName","payload","durationInMicroseconds"],"mappings":";;;;0BAAkC;kBACT;wBACC;sBACL;oBACE;AAEvB,MAAMA,cAAc;IAClBC,MAAM;IACNC,UAAU;IACVC,WAAWC,KAAKC,GAAG;IACnBC,IAAI;IACJC,WAAWH,KAAKC,GAAG;AACrB;AACA,MAAMG,4BAA4B;IAChCP,MAAM;IACNC,UAAU;IACVC,WAAWC,KAAKC,GAAG;IACnBC,IAAI;IACJC,WAAWH,KAAKC,GAAG;AACrB;AAEAI,SAAS,kBAAkB;IACzBA,SAAS,iBAAiB;QACxBC,GAAG,8CAA8C;YAC/C,MAAMC,SAAS,MAAMC,IAAAA,iBAAO,EAACC,IAAAA,UAAI,EAACC,IAAAA,UAAM,KAAI;YAC5CC,IAAAA,iBAAS,EAAC,WAAWJ;YACrBI,IAAAA,iBAAS,EAAC,SAAS;YACnBC,UAAQ,CAACC,MAAM,CAACjB;YAChB,MAAMgB,UAAQ,CAACE,QAAQ;YACvB,MAAMC,gBAAgBN,IAAAA,UAAI,EAACF,QAAQ;YACnC,MAAMS,SAASC,KAAKC,KAAK,CAAC,MAAMC,IAAAA,kBAAQ,EAACJ,eAAe;YACxDK,OAAOJ,OAAOK,MAAM,EAAEC,OAAO,CAAC;YAC9BF,OAAOJ,MAAM,CAAC,EAAE,CAACnB,IAAI,EAAEyB,OAAO,CAAC;YAC/BF,OAAOJ,MAAM,CAAC,EAAE,CAACd,EAAE,EAAEoB,OAAO,CAAC;YAC7BF,OAAOJ,MAAM,CAAC,EAAE,CAAClB,QAAQ,EAAEwB,OAAO,CAAC;YACnCF,OAAOJ,MAAM,CAAC,EAAE,CAACO,OAAO,EAAEC,WAAW;QACvC;IACF;IAEAnB,SAAS,sBAAsB;QAC7BC,GAAG,iCAAiC;YAClC,MAAMmB,aAAaC,KAAKC,EAAE;YAC1B,MAAMC,gBAAgB;gBACpBC,QAAQJ;YACV;YACAd,IAAAA,iBAAS,EAAC,aAAaiB;YACvB,0BAA0B;YAC1BhB,UAAQ,CAACC,MAAM,CAACjB;YAChBwB,OAAOK,YAAYK,qBAAqB,CAAC;YACzClB,UAAQ,CAACC,MAAM,CAACT;YAChBgB,OAAOK,YAAYK,qBAAqB,CAAC;YACzCV,OAAOK,YAAYM,oBAAoB,CAAC;gBACtCC,WAAW;gBACXC,SAAS;oBACPC,wBAAwB;gBAC1B;YACF;QACF;IACF;AACF","ignoreList":[0]}

View File

@@ -0,0 +1,9 @@
import type { TraceEvent } from '../types';
declare function reportToJsonBuild(event: TraceEvent): void;
declare const _default: {
flushAll: (opts?: {
end: boolean;
}) => Promise<void | undefined> | undefined;
report: typeof reportToJsonBuild;
};
export default _default;

View File

@@ -0,0 +1,137 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "default", {
enumerable: true,
get: function() {
return _default;
}
});
const _shared = require("../shared");
const _fs = /*#__PURE__*/ _interop_require_default(require("fs"));
const _path = /*#__PURE__*/ _interop_require_default(require("path"));
const _constants = require("../../shared/lib/constants");
const _tojson = require("./to-json");
function _interop_require_default(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
}
let writeStream;
let batch;
const writeStreamOptions = {
flags: 'a',
encoding: 'utf8'
};
class RotatingWriteStream {
constructor(file, sizeLimit){
this.file = file;
this.size = 0;
this.sizeLimit = sizeLimit;
this.createWriteStream();
}
createWriteStream() {
this.writeStream = _fs.default.createWriteStream(this.file, writeStreamOptions);
}
// Recreate the file
async rotate() {
await this.end();
try {
_fs.default.unlinkSync(this.file);
} catch (err) {
// It's fine if the file does not exist yet
if (err.code !== 'ENOENT') {
throw err;
}
}
this.size = 0;
this.createWriteStream();
this.rotatePromise = undefined;
}
async write(data) {
if (this.rotatePromise) await this.rotatePromise;
this.size += data.length;
if (this.size > this.sizeLimit) {
await (this.rotatePromise = this.rotate());
}
if (!this.writeStream.write(data, 'utf8')) {
if (this.drainPromise === undefined) {
this.drainPromise = new Promise((resolve, _reject)=>{
this.writeStream.once('drain', ()=>{
this.drainPromise = undefined;
resolve();
});
});
}
await this.drainPromise;
}
}
end() {
return new Promise((resolve)=>{
this.writeStream.end(resolve);
});
}
}
const allowlistedEvents = new Set([
'next-build',
'run-turbopack',
'run-webpack',
'run-typescript',
'run-eslint',
'static-check',
'collect-build-traces',
'static-generation',
'output-export-full-static-export',
'adapter-handle-build-complete',
'output-standalone',
'telemetry-flush'
]);
function reportToJsonBuild(event) {
if (!allowlistedEvents.has(event.name)) {
return;
}
const distDir = _shared.traceGlobals.get('distDir');
const phase = _shared.traceGlobals.get('phase');
if (!distDir || !phase) {
return;
}
// Only report in production builds
if (phase !== _constants.PHASE_PRODUCTION_BUILD) {
return;
}
if (!batch) {
batch = (0, _tojson.batcher)(async (events)=>{
if (!writeStream) {
await _fs.default.promises.mkdir(distDir, {
recursive: true
});
const file = _path.default.join(distDir, 'trace-build');
writeStream = new RotatingWriteStream(file, // Development is limited to 50MB, production is unlimited
phase === _constants.PHASE_DEVELOPMENT_SERVER ? 52428800 : Infinity);
}
const eventsJson = JSON.stringify(events);
try {
await writeStream.write(eventsJson + '\n');
} catch (err) {
console.log(err);
}
});
}
batch.report({
...event,
traceId: _shared.traceId
});
}
const _default = {
flushAll: (opts)=>batch ? batch.flushAll().then(()=>{
const phase = _shared.traceGlobals.get('phase');
// Only end writeStream when manually flushing in production
if ((opts == null ? void 0 : opts.end) || phase !== _constants.PHASE_DEVELOPMENT_SERVER) {
return writeStream.end();
}
}) : undefined,
report: reportToJsonBuild
};
//# sourceMappingURL=to-json-build.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,13 @@
import type { TraceEvent } from '../types';
export declare function batcher(reportEvents: (evts: TraceEvent[]) => Promise<void>): {
flushAll: () => Promise<void>;
report: (event: TraceEvent) => void;
};
declare function reportToJson(event: TraceEvent): void;
declare const _default: {
flushAll: (opts?: {
end: boolean;
}) => Promise<void | undefined> | undefined;
report: typeof reportToJson;
};
export default _default;

View File

@@ -0,0 +1,151 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
0 && (module.exports = {
batcher: null,
default: null
});
function _export(target, all) {
for(var name in all)Object.defineProperty(target, name, {
enumerable: true,
get: all[name]
});
}
_export(exports, {
batcher: function() {
return batcher;
},
default: function() {
return _default;
}
});
const _shared = require("../shared");
const _fs = /*#__PURE__*/ _interop_require_default(require("fs"));
const _path = /*#__PURE__*/ _interop_require_default(require("path"));
const _constants = require("../../shared/lib/constants");
function _interop_require_default(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
}
function batcher(reportEvents) {
const events = [];
// Promise queue to ensure events are always sent on flushAll
const queue = new Set();
return {
flushAll: async ()=>{
await Promise.all(queue);
if (events.length > 0) {
await reportEvents(events);
events.length = 0;
}
},
report: (event)=>{
events.push(event);
if (events.length > 100) {
const evts = events.slice();
events.length = 0;
const report = reportEvents(evts);
queue.add(report);
report.then(()=>queue.delete(report));
}
}
};
}
let writeStream;
let batch;
const writeStreamOptions = {
flags: 'a',
encoding: 'utf8'
};
class RotatingWriteStream {
constructor(file, sizeLimit){
this.file = file;
this.size = 0;
this.sizeLimit = sizeLimit;
this.createWriteStream();
}
createWriteStream() {
this.writeStream = _fs.default.createWriteStream(this.file, writeStreamOptions);
}
// Recreate the file
async rotate() {
await this.end();
try {
_fs.default.unlinkSync(this.file);
} catch (err) {
// It's fine if the file does not exist yet
if (err.code !== 'ENOENT') {
throw err;
}
}
this.size = 0;
this.createWriteStream();
this.rotatePromise = undefined;
}
async write(data) {
if (this.rotatePromise) await this.rotatePromise;
this.size += data.length;
if (this.size > this.sizeLimit) {
await (this.rotatePromise = this.rotate());
}
if (!this.writeStream.write(data, 'utf8')) {
if (this.drainPromise === undefined) {
this.drainPromise = new Promise((resolve, _reject)=>{
this.writeStream.once('drain', ()=>{
this.drainPromise = undefined;
resolve();
});
});
}
await this.drainPromise;
}
}
end() {
return new Promise((resolve)=>{
this.writeStream.end(resolve);
});
}
}
function reportToJson(event) {
const distDir = _shared.traceGlobals.get('distDir');
const phase = _shared.traceGlobals.get('phase');
if (!distDir || !phase) {
return;
}
if (!batch) {
batch = batcher(async (events)=>{
if (!writeStream) {
await _fs.default.promises.mkdir(distDir, {
recursive: true
});
const file = _path.default.join(distDir, 'trace');
writeStream = new RotatingWriteStream(file, // Development is limited to 50MB, production is unlimited
phase === _constants.PHASE_DEVELOPMENT_SERVER ? 52428800 : Infinity);
}
const eventsJson = JSON.stringify(events);
try {
await writeStream.write(eventsJson + '\n');
} catch (err) {
console.log(err);
}
});
}
batch.report({
...event,
traceId: _shared.traceId
});
}
const _default = {
flushAll: (opts)=>batch ? batch.flushAll().then(()=>{
const phase = _shared.traceGlobals.get('phase');
// Only end writeStream when manually flushing in production
if ((opts == null ? void 0 : opts.end) || phase !== _constants.PHASE_DEVELOPMENT_SERVER) {
return writeStream.end();
}
}) : undefined,
report: reportToJson
};
//# sourceMappingURL=to-json.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,6 @@
import type { TraceEvent } from '../types';
declare const _default: {
flushAll: () => void;
report: ({ name, duration }: TraceEvent) => void;
};
export default _default;

View File

@@ -0,0 +1,36 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "default", {
enumerable: true,
get: function() {
return _default;
}
});
const _shared = require("../shared");
const TRACE_EVENT_ACCESSLIST = new Map(Object.entries({
'webpack-invalidated': 'WEBPACK_INVALIDATED'
}));
const reportToTelemetry = ({ name, duration })=>{
const eventName = TRACE_EVENT_ACCESSLIST.get(name);
if (!eventName) {
return;
}
const telemetry = _shared.traceGlobals.get('telemetry');
if (!telemetry) {
return;
}
telemetry.record({
eventName,
payload: {
durationInMicroseconds: duration
}
});
};
const _default = {
flushAll: ()=>{},
report: reportToTelemetry
};
//# sourceMappingURL=to-telemetry.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../src/trace/report/to-telemetry.ts"],"sourcesContent":["import type { Telemetry } from '../../telemetry/storage'\nimport { traceGlobals } from '../shared'\nimport type { TraceEvent } from '../types'\n\nconst TRACE_EVENT_ACCESSLIST = new Map(\n Object.entries({\n 'webpack-invalidated': 'WEBPACK_INVALIDATED',\n })\n)\n\nconst reportToTelemetry = ({ name, duration }: TraceEvent) => {\n const eventName = TRACE_EVENT_ACCESSLIST.get(name)\n if (!eventName) {\n return\n }\n const telemetry: Telemetry | undefined = traceGlobals.get('telemetry')\n if (!telemetry) {\n return\n }\n\n telemetry.record({\n eventName,\n payload: {\n durationInMicroseconds: duration,\n },\n })\n}\n\nexport default {\n flushAll: () => {},\n report: reportToTelemetry,\n}\n"],"names":["TRACE_EVENT_ACCESSLIST","Map","Object","entries","reportToTelemetry","name","duration","eventName","get","telemetry","traceGlobals","record","payload","durationInMicroseconds","flushAll","report"],"mappings":";;;;+BA4BA;;;eAAA;;;wBA3B6B;AAG7B,MAAMA,yBAAyB,IAAIC,IACjCC,OAAOC,OAAO,CAAC;IACb,uBAAuB;AACzB;AAGF,MAAMC,oBAAoB,CAAC,EAAEC,IAAI,EAAEC,QAAQ,EAAc;IACvD,MAAMC,YAAYP,uBAAuBQ,GAAG,CAACH;IAC7C,IAAI,CAACE,WAAW;QACd;IACF;IACA,MAAME,YAAmCC,oBAAY,CAACF,GAAG,CAAC;IAC1D,IAAI,CAACC,WAAW;QACd;IACF;IAEAA,UAAUE,MAAM,CAAC;QACfJ;QACAK,SAAS;YACPC,wBAAwBP;QAC1B;IACF;AACF;MAEA,WAAe;IACbQ,UAAU,KAAO;IACjBC,QAAQX;AACV","ignoreList":[0]}

View File

@@ -0,0 +1,7 @@
import type { TraceEvent } from '../types';
export type Reporter = {
flushAll: (opts?: {
end: boolean;
}) => Promise<void> | void;
report: (event: TraceEvent) => void;
};

View File

@@ -0,0 +1,6 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
//# sourceMappingURL=types.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":[],"names":[],"mappings":"","ignoreList":[]}

View File

@@ -0,0 +1,3 @@
export declare const traceGlobals: Map<any, any>;
export declare const setGlobal: (key: any, val: any) => void;
export declare const traceId: string;

39
apps/public-web/node_modules/next/dist/trace/shared.js generated vendored Normal file
View File

@@ -0,0 +1,39 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
0 && (module.exports = {
setGlobal: null,
traceGlobals: null,
traceId: null
});
function _export(target, all) {
for(var name in all)Object.defineProperty(target, name, {
enumerable: true,
get: all[name]
});
}
_export(exports, {
setGlobal: function() {
return setGlobal;
},
traceGlobals: function() {
return traceGlobals;
},
traceId: function() {
return traceId;
}
});
const _nodecrypto = require("node:crypto");
let _traceGlobals = global._traceGlobals;
if (!_traceGlobals) {
_traceGlobals = new Map();
}
global._traceGlobals = _traceGlobals;
const traceGlobals = _traceGlobals;
const setGlobal = (key, val)=>{
traceGlobals.set(key, val);
};
const traceId = process.env.TRACE_ID || process.env.NEXT_PRIVATE_TRACE_ID || (0, _nodecrypto.randomBytes)(8).toString('hex');
//# sourceMappingURL=shared.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../src/trace/shared.ts"],"sourcesContent":["import { randomBytes } from 'node:crypto'\n\nlet _traceGlobals: Map<any, any> = (global as any)._traceGlobals\n\nif (!_traceGlobals) {\n _traceGlobals = new Map()\n}\n;(global as any)._traceGlobals = _traceGlobals\n\nexport const traceGlobals: Map<any, any> = _traceGlobals\nexport const setGlobal = (key: any, val: any) => {\n traceGlobals.set(key, val)\n}\n\nexport const traceId =\n process.env.TRACE_ID ||\n process.env.NEXT_PRIVATE_TRACE_ID ||\n randomBytes(8).toString('hex')\n"],"names":["setGlobal","traceGlobals","traceId","_traceGlobals","global","Map","key","val","set","process","env","TRACE_ID","NEXT_PRIVATE_TRACE_ID","randomBytes","toString"],"mappings":";;;;;;;;;;;;;;;;IAUaA,SAAS;eAATA;;IADAC,YAAY;eAAZA;;IAKAC,OAAO;eAAPA;;;4BAde;AAE5B,IAAIC,gBAA+B,AAACC,OAAeD,aAAa;AAEhE,IAAI,CAACA,eAAe;IAClBA,gBAAgB,IAAIE;AACtB;AACED,OAAeD,aAAa,GAAGA;AAE1B,MAAMF,eAA8BE;AACpC,MAAMH,YAAY,CAACM,KAAUC;IAClCN,aAAaO,GAAG,CAACF,KAAKC;AACxB;AAEO,MAAML,UACXO,QAAQC,GAAG,CAACC,QAAQ,IACpBF,QAAQC,GAAG,CAACE,qBAAqB,IACjCC,IAAAA,uBAAW,EAAC,GAAGC,QAAQ,CAAC","ignoreList":[0]}

View File

@@ -0,0 +1 @@
export {};

View File

@@ -0,0 +1,137 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
const _findup = /*#__PURE__*/ _interop_require_default(require("next/dist/compiled/find-up"));
const _promises = /*#__PURE__*/ _interop_require_default(require("fs/promises"));
const _child_process = /*#__PURE__*/ _interop_require_default(require("child_process"));
const _assert = /*#__PURE__*/ _interop_require_default(require("assert"));
const _os = /*#__PURE__*/ _interop_require_default(require("os"));
const _readline = require("readline");
const _fs = require("fs");
const _path = /*#__PURE__*/ _interop_require_default(require("path"));
function _interop_require_default(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
}
const COMMON_ALLOWED_EVENTS = [
'memory-usage'
];
// Predefined set of the event names to be included in the trace.
// If the trace span's name matches to one of the event names in the set,
// it'll up uploaded to the trace server.
const DEV_ALLOWED_EVENTS = new Set([
...COMMON_ALLOWED_EVENTS,
'client-hmr-latency',
'hot-reloader',
'webpack-invalid-client',
'webpack-invalidated-server',
'navigation-to-hydration',
'start-dev-server',
'compile-path',
'memory-usage',
'server-restart-close-to-memory-threshold'
]);
const BUILD_ALLOWED_EVENTS = new Set([
...COMMON_ALLOWED_EVENTS,
'next-build',
'run-turbopack',
'webpack-compilation',
'run-webpack-compiler',
'create-entrypoints',
'worker-main-edge-server',
'worker-main-client',
'worker-main-server',
'server',
'make',
'seal',
'chunk-graph',
'optimize-modules',
'optimize-chunks',
'optimize',
'optimize-tree',
'optimize-chunk-modules',
'module-hash',
'client',
'static-check',
'node-file-trace-build',
'static-generation',
'next-export',
'run-typescript',
'run-eslint'
]);
const { NEXT_TRACE_UPLOAD_DEBUG, // An external env to allow to upload full trace without picking up the relavant spans.
// This is mainly for the debugging purpose, to allow manual audit for full trace for the given build.
// [NOTE] This may fail if build is large and generated trace is excessively large.
NEXT_TRACE_UPLOAD_FULL } = process.env;
const isDebugEnabled = !!NEXT_TRACE_UPLOAD_DEBUG || !!NEXT_TRACE_UPLOAD_FULL;
const shouldUploadFullTrace = !!NEXT_TRACE_UPLOAD_FULL;
const [, , traceUploadUrl, mode, projectDir, distDir, _isTurboSession, traceId, anonymousId, sessionId] = process.argv;
const isTurboSession = _isTurboSession === 'true';
(async function upload() {
const nextVersion = JSON.parse(await _promises.default.readFile(_path.default.resolve(__dirname, '../../package.json'), 'utf8')).version;
const projectPkgJsonPath = await (0, _findup.default)('package.json');
(0, _assert.default)(projectPkgJsonPath);
const projectPkgJson = JSON.parse(await _promises.default.readFile(projectPkgJsonPath, 'utf-8'));
const pkgName = projectPkgJson.name;
const commit = _child_process.default.spawnSync(_os.default.platform() === 'win32' ? 'git.exe' : 'git', [
'rev-parse',
'HEAD'
], {
shell: true
}).stdout.toString().trimEnd();
const readLineInterface = (0, _readline.createInterface)({
input: (0, _fs.createReadStream)(_path.default.join(projectDir, distDir, 'trace')),
crlfDelay: Infinity
});
const sessionTrace = [];
for await (const line of readLineInterface){
const lineEvents = JSON.parse(line);
for (const event of lineEvents){
if (event.traceId !== traceId) {
continue;
}
if (// Always include root spans
event.parentId === undefined || shouldUploadFullTrace || (mode === 'dev' ? DEV_ALLOWED_EVENTS.has(event.name) : BUILD_ALLOWED_EVENTS.has(event.name))) {
sessionTrace.push(event);
}
}
}
const body = {
metadata: {
anonymousId,
arch: _os.default.arch(),
commit,
cpus: _os.default.cpus().length,
isTurboSession,
mode,
nextVersion,
pkgName,
platform: _os.default.platform(),
sessionId
},
// The trace file can contain events spanning multiple sessions.
// Only submit traces for the current session, as the metadata we send is
// intended for this session only.
traces: [
sessionTrace
]
};
if (isDebugEnabled) {
console.log('Sending request with body', JSON.stringify(body, null, 2));
}
let res = await fetch(traceUploadUrl, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'x-trace-transfer-mode': shouldUploadFullTrace ? 'full' : 'default'
},
body: JSON.stringify(body)
});
if (isDebugEnabled) {
console.log('Received response', res.status, await res.json());
}
})();
//# sourceMappingURL=trace-uploader.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,42 @@
import type { SpanId, TraceEvent, TraceState } from './types';
export declare enum SpanStatus {
Started = "started",
Stopped = "stopped"
}
interface Attributes {
[key: string]: string;
}
export declare class Span {
private name;
private id;
private parentId?;
private attrs;
private status;
private now;
private _start;
constructor({ name, parentId, attrs, startTime, }: {
name: string;
parentId?: SpanId;
startTime?: bigint;
attrs?: Attributes;
});
stop(stopTime?: bigint): void;
traceChild(name: string, attrs?: Attributes): Span;
manualTraceChild(name: string, startTime?: bigint, stopTime?: bigint, attrs?: Attributes): void;
getId(): number;
setAttribute(key: string, value: string): void;
traceFn<T>(fn: (span: Span) => T): T;
traceAsyncFn<T>(fn: (span: Span) => T | Promise<T>): Promise<T>;
}
export declare const trace: (name: string, parentId?: SpanId, attrs?: {
[key: string]: string;
}) => Span;
export declare const flushAllTraces: (opts?: {
end: boolean;
}) => Promise<void>;
export declare const exportTraceState: () => TraceState;
export declare const initializeTraceState: (state: TraceState) => void;
export declare function getTraceEvents(): TraceEvent[];
export declare function recordTraceEvents(events: TraceEvent[]): void;
export declare const clearTraceEvents: () => never[];
export {};

197
apps/public-web/node_modules/next/dist/trace/trace.js generated vendored Normal file
View File

@@ -0,0 +1,197 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
0 && (module.exports = {
Span: null,
SpanStatus: null,
clearTraceEvents: null,
exportTraceState: null,
flushAllTraces: null,
getTraceEvents: null,
initializeTraceState: null,
recordTraceEvents: null,
trace: null
});
function _export(target, all) {
for(var name in all)Object.defineProperty(target, name, {
enumerable: true,
get: all[name]
});
}
_export(exports, {
Span: function() {
return Span;
},
SpanStatus: function() {
return SpanStatus;
},
clearTraceEvents: function() {
return clearTraceEvents;
},
exportTraceState: function() {
return exportTraceState;
},
flushAllTraces: function() {
return flushAllTraces;
},
getTraceEvents: function() {
return getTraceEvents;
},
initializeTraceState: function() {
return initializeTraceState;
},
recordTraceEvents: function() {
return recordTraceEvents;
},
trace: function() {
return trace;
}
});
const _report = require("./report");
const NUM_OF_MICROSEC_IN_NANOSEC = BigInt('1000');
const NUM_OF_MILLISEC_IN_NANOSEC = BigInt('1000000');
let count = 0;
const getId = ()=>{
count++;
return count;
};
let defaultParentSpanId;
let shouldSaveTraceEvents;
let savedTraceEvents = [];
const RECORD_SPAN_THRESHOLD_MS = parseInt(process.env.NEXT_TRACE_SPAN_THRESHOLD_MS ?? '-1');
var SpanStatus = /*#__PURE__*/ function(SpanStatus) {
SpanStatus["Started"] = "started";
SpanStatus["Stopped"] = "stopped";
return SpanStatus;
}({});
class Span {
constructor({ name, parentId, attrs, startTime }){
this.name = name;
this.parentId = parentId ?? defaultParentSpanId;
this.attrs = attrs ? {
...attrs
} : {};
this.status = "started";
this.id = getId();
this._start = startTime || process.hrtime.bigint();
// hrtime cannot be used to reconstruct tracing span's actual start time
// since it does not have relation to clock time:
// `These times are relative to an arbitrary time in the past, and not related to the time of day and therefore not subject to clock drift`
// https://nodejs.org/api/process.html#processhrtimetime
// Capturing current datetime as additional metadata for external reconstruction.
this.now = Date.now();
}
// Durations are reported as microseconds. This gives 1000x the precision
// of something like Date.now(), which reports in milliseconds.
// Additionally, ~285 years can be safely represented as microseconds as
// a float64 in both JSON and JavaScript.
stop(stopTime) {
if (this.status === "stopped") {
// Don't report the same span twice.
// TODO: In the future this should throw as `.stop()` shouldn't be called multiple times.
return;
}
const end = stopTime || process.hrtime.bigint();
const duration = (end - this._start) / NUM_OF_MICROSEC_IN_NANOSEC;
this.status = "stopped";
if (duration > Number.MAX_SAFE_INTEGER) {
throw Object.defineProperty(new Error(`Duration is too long to express as float64: ${duration}`), "__NEXT_ERROR_CODE", {
value: "E513",
enumerable: false,
configurable: true
});
}
const timestamp = this._start / NUM_OF_MICROSEC_IN_NANOSEC;
const traceEvent = {
name: this.name,
duration: Number(duration),
timestamp: Number(timestamp),
id: this.id,
parentId: this.parentId,
tags: this.attrs,
startTime: this.now
};
if (duration > RECORD_SPAN_THRESHOLD_MS * 1000) {
_report.reporter.report(traceEvent);
if (shouldSaveTraceEvents) {
savedTraceEvents.push(traceEvent);
}
}
}
traceChild(name, attrs) {
return new Span({
name,
parentId: this.id,
attrs
});
}
manualTraceChild(name, // Start time in nanoseconds since epoch.
startTime, // Stop time in nanoseconds since epoch.
stopTime, attrs) {
// We need to convert the time info to the same base as hrtime since that is used usually.
const correction = process.hrtime.bigint() - BigInt(Date.now()) * NUM_OF_MILLISEC_IN_NANOSEC;
const span = new Span({
name,
parentId: this.id,
attrs,
startTime: startTime ? startTime + correction : process.hrtime.bigint()
});
span.stop(stopTime ? stopTime + correction : process.hrtime.bigint());
}
getId() {
return this.id;
}
setAttribute(key, value) {
this.attrs[key] = value;
}
traceFn(fn) {
try {
return fn(this);
} finally{
this.stop();
}
}
async traceAsyncFn(fn) {
try {
return await fn(this);
} finally{
this.stop();
}
}
}
const trace = (name, parentId, attrs)=>{
return new Span({
name,
parentId,
attrs
});
};
const flushAllTraces = (opts)=>_report.reporter.flushAll(opts);
const exportTraceState = ()=>({
defaultParentSpanId,
lastId: count,
shouldSaveTraceEvents
});
const initializeTraceState = (state)=>{
count = state.lastId;
defaultParentSpanId = state.defaultParentSpanId;
shouldSaveTraceEvents = state.shouldSaveTraceEvents;
};
function getTraceEvents() {
return savedTraceEvents;
}
function recordTraceEvents(events) {
for (const traceEvent of events){
_report.reporter.report(traceEvent);
if (traceEvent.id > count) {
count = traceEvent.id + 1;
}
}
if (shouldSaveTraceEvents) {
savedTraceEvents.push(...events);
}
}
const clearTraceEvents = ()=>savedTraceEvents = [];
//# sourceMappingURL=trace.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,175 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
const _promises = require("fs/promises");
const _path = require("path");
const _os = require("os");
const _shared = require("./shared");
const _trace = require("./trace");
describe('Trace', ()=>{
beforeEach(()=>{
(0, _trace.initializeTraceState)({
lastId: 0,
shouldSaveTraceEvents: true
});
(0, _trace.clearTraceEvents)();
});
describe('Tracer', ()=>{
it('traces a block of code', async ()=>{
const tmpDir = await (0, _promises.mkdtemp)((0, _path.join)((0, _os.tmpdir)(), 'json-reporter'));
(0, _shared.setGlobal)('distDir', tmpDir);
(0, _shared.setGlobal)('phase', 'anything');
const root = (0, _trace.trace)('root-span', undefined, {
'some-tag': 'some-value'
});
root.traceChild('child-span').traceFn(()=>null);
await root.traceChild('async-child-span').traceAsyncFn(async ()=>{
const delayedPromise = new Promise((resolve)=>{
setTimeout(resolve, 100);
});
await delayedPromise;
});
root.stop();
const traceEvents = (0, _trace.getTraceEvents)();
expect(traceEvents.length).toEqual(3);
expect(traceEvents[0].name).toEqual('child-span');
expect(traceEvents[1].name).toEqual('async-child-span');
expect(traceEvents[2].name).toEqual('root-span');
// Check that the serialized .next/trace file looks correct.
await (0, _trace.flushAllTraces)();
const traceFilename = (0, _path.join)(tmpDir, 'trace');
const serializedTraces = JSON.parse(await (0, _promises.readFile)(traceFilename, 'utf-8'));
expect(serializedTraces).toMatchObject([
{
id: 2,
name: 'child-span',
parentId: 1,
startTime: expect.any(Number),
timestamp: expect.any(Number),
duration: expect.any(Number),
tags: {}
},
{
id: 3,
name: 'async-child-span',
parentId: 1,
startTime: expect.any(Number),
timestamp: expect.any(Number),
duration: expect.any(Number),
tags: {}
},
{
id: 1,
name: 'root-span',
startTime: expect.any(Number),
timestamp: expect.any(Number),
duration: expect.any(Number),
tags: {
'some-tag': 'some-value'
}
}
]);
});
});
describe('Worker', ()=>{
it('exports and initializes trace state', ()=>{
const root = (0, _trace.trace)('root-span');
expect(root.getId()).toEqual(1);
const traceState = (0, _trace.exportTraceState)();
expect(traceState.lastId).toEqual(1);
(0, _trace.initializeTraceState)({
lastId: 101
});
const span = (0, _trace.trace)('another-span');
expect(span.getId()).toEqual(102);
});
it('trace data is serializable to a worker', async ()=>{
const root = (0, _trace.trace)('root-span');
root.traceChild('child-span').traceFn(()=>null);
root.stop();
const traceEvents = (0, _trace.getTraceEvents)();
expect(traceEvents.length).toEqual(2);
// This is a proxy check to make sure the object would be serializable
// to a worker. It will fail if the data contains some unserializable
// objects like BigInt.
const clone = JSON.parse(JSON.stringify(traceEvents));
expect(clone).toEqual(traceEvents);
});
it('correctly reports trace data from multiple workers', ()=>{
// This test simulates workers creating traces and propagating them
// back to the main process for recording. It doesn't use
// actual workers since they are more difficult to set up in tests.
(0, _trace.initializeTraceState)({
lastId: 5,
defaultParentSpanId: 1,
shouldSaveTraceEvents: true
});
const worker1Span = (0, _trace.trace)('worker1');
worker1Span.traceChild('webpack-compilation1').traceFn(()=>null);
worker1Span.stop();
const worker1Traces = (0, _trace.getTraceEvents)();
expect(worker1Traces.length).toEqual(2);
// Repeat for a second worker.
(0, _trace.clearTraceEvents)();
(0, _trace.initializeTraceState)({
lastId: 10,
defaultParentSpanId: 1,
shouldSaveTraceEvents: true
});
const worker2Span = (0, _trace.trace)('worker2');
worker2Span.traceChild('webpack-compilation2').traceFn(()=>null);
worker2Span.stop();
const worker2Traces = (0, _trace.getTraceEvents)();
expect(worker2Traces.length).toEqual(2);
// Now simulate the traces in the main process and record the traces
// from each worker.
(0, _trace.clearTraceEvents)();
(0, _trace.initializeTraceState)({
lastId: 0,
shouldSaveTraceEvents: true
});
const root = (0, _trace.trace)('next-build');
root.traceChild('some-child-span').traceFn(()=>null);
(0, _trace.recordTraceEvents)(worker1Traces);
expect((0, _trace.exportTraceState)().lastId).toEqual(8);
(0, _trace.recordTraceEvents)(worker2Traces);
expect((0, _trace.exportTraceState)().lastId).toEqual(13);
root.traceChild('another-child-span').traceFn(()=>null);
root.stop();
// Check that the final output looks correct.
const allTraces = (0, _trace.getTraceEvents)();
expect(allTraces.length).toEqual(7);
const firstSpan = allTraces[0];
expect(firstSpan.name).toEqual('some-child-span');
expect(firstSpan.id).toEqual(2);
expect(firstSpan.parentId).toEqual(1);
const worker1Child = allTraces[1];
expect(worker1Child.name).toEqual('webpack-compilation1');
expect(worker1Child.id).toEqual(7);
expect(worker1Child.parentId).toEqual(6);
const worker1Root = allTraces[2];
expect(worker1Root.name).toEqual('worker1');
expect(worker1Root.id).toEqual(6);
expect(worker1Root.parentId).toEqual(1);
const worker2Child = allTraces[3];
expect(worker2Child.name).toEqual('webpack-compilation2');
expect(worker2Child.id).toEqual(12);
expect(worker2Child.parentId).toEqual(11);
const worker2Root = allTraces[4];
expect(worker2Root.name).toEqual('worker2');
expect(worker2Root.id).toEqual(11);
expect(worker2Root.parentId).toEqual(1);
const lastChildSpan = allTraces[5];
expect(lastChildSpan.name).toEqual('another-child-span');
expect(lastChildSpan.id).toEqual(14);
expect(lastChildSpan.parentId).toEqual(1);
const rootSpan = allTraces[6];
expect(rootSpan.name).toEqual('next-build');
expect(rootSpan.id).toEqual(1);
expect(rootSpan.parentId).toBeUndefined();
});
});
});
//# sourceMappingURL=trace.test.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,16 @@
export type SpanId = number;
export interface TraceState {
lastId: number;
defaultParentSpanId?: SpanId;
shouldSaveTraceEvents?: boolean;
}
export type TraceEvent = {
traceId?: string;
parentId?: SpanId;
name: string;
id: SpanId;
timestamp: number;
duration: number;
tags?: Object;
startTime?: number;
};

View File

@@ -0,0 +1,6 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
//# sourceMappingURL=types.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":[],"names":[],"mappings":"","ignoreList":[]}

View File

@@ -0,0 +1,8 @@
export default function uploadTrace({ traceUploadUrl, mode, projectDir, distDir, isTurboSession, sync, }: {
traceUploadUrl: string;
mode: 'dev' | 'build';
projectDir: string;
distDir: string;
isTurboSession: boolean;
sync?: boolean;
}): void;

View File

@@ -0,0 +1,44 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "default", {
enumerable: true,
get: function() {
return uploadTrace;
}
});
const _shared = require("./shared");
const _storage = require("../telemetry/storage");
function uploadTrace({ traceUploadUrl, mode, projectDir, distDir, isTurboSession, sync }) {
const { NEXT_TRACE_UPLOAD_DEBUG } = process.env;
const telemetry = new _storage.Telemetry({
distDir
});
// Note: cross-spawn is not used here as it causes
// a new command window to appear when we don't want it to
const child_process = require('child_process');
// we use spawnSync when debugging to ensure logs are piped
// correctly to stdout/stderr
const spawn = NEXT_TRACE_UPLOAD_DEBUG || sync ? child_process.spawnSync : child_process.spawn;
spawn(process.execPath, [
require.resolve('./trace-uploader'),
traceUploadUrl,
mode,
projectDir,
distDir,
String(isTurboSession),
_shared.traceId,
telemetry.anonymousId,
telemetry.sessionId
], {
detached: !NEXT_TRACE_UPLOAD_DEBUG,
windowsHide: true,
shell: false,
...NEXT_TRACE_UPLOAD_DEBUG ? {
stdio: 'inherit'
} : {}
});
}
//# sourceMappingURL=upload-trace.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../src/trace/upload-trace.ts"],"sourcesContent":["import { traceId } from './shared'\nimport { Telemetry } from '../telemetry/storage'\n\nexport default function uploadTrace({\n traceUploadUrl,\n mode,\n projectDir,\n distDir,\n isTurboSession,\n sync,\n}: {\n traceUploadUrl: string\n mode: 'dev' | 'build'\n projectDir: string\n distDir: string\n isTurboSession: boolean\n sync?: boolean\n}) {\n const { NEXT_TRACE_UPLOAD_DEBUG } = process.env\n const telemetry = new Telemetry({ distDir })\n\n // Note: cross-spawn is not used here as it causes\n // a new command window to appear when we don't want it to\n const child_process =\n require('child_process') as typeof import('child_process')\n\n // we use spawnSync when debugging to ensure logs are piped\n // correctly to stdout/stderr\n const spawn =\n NEXT_TRACE_UPLOAD_DEBUG || sync\n ? child_process.spawnSync\n : child_process.spawn\n\n spawn(\n process.execPath,\n [\n require.resolve('./trace-uploader'),\n traceUploadUrl,\n mode,\n projectDir,\n distDir,\n String(isTurboSession),\n traceId,\n telemetry.anonymousId,\n telemetry.sessionId,\n ],\n {\n detached: !NEXT_TRACE_UPLOAD_DEBUG,\n windowsHide: true,\n shell: false,\n ...(NEXT_TRACE_UPLOAD_DEBUG\n ? {\n stdio: 'inherit',\n }\n : {}),\n }\n )\n}\n"],"names":["uploadTrace","traceUploadUrl","mode","projectDir","distDir","isTurboSession","sync","NEXT_TRACE_UPLOAD_DEBUG","process","env","telemetry","Telemetry","child_process","require","spawn","spawnSync","execPath","resolve","String","traceId","anonymousId","sessionId","detached","windowsHide","shell","stdio"],"mappings":";;;;+BAGA;;;eAAwBA;;;wBAHA;yBACE;AAEX,SAASA,YAAY,EAClCC,cAAc,EACdC,IAAI,EACJC,UAAU,EACVC,OAAO,EACPC,cAAc,EACdC,IAAI,EAQL;IACC,MAAM,EAAEC,uBAAuB,EAAE,GAAGC,QAAQC,GAAG;IAC/C,MAAMC,YAAY,IAAIC,kBAAS,CAAC;QAAEP;IAAQ;IAE1C,kDAAkD;IAClD,0DAA0D;IAC1D,MAAMQ,gBACJC,QAAQ;IAEV,2DAA2D;IAC3D,6BAA6B;IAC7B,MAAMC,QACJP,2BAA2BD,OACvBM,cAAcG,SAAS,GACvBH,cAAcE,KAAK;IAEzBA,MACEN,QAAQQ,QAAQ,EAChB;QACEH,QAAQI,OAAO,CAAC;QAChBhB;QACAC;QACAC;QACAC;QACAc,OAAOb;QACPc,eAAO;QACPT,UAAUU,WAAW;QACrBV,UAAUW,SAAS;KACpB,EACD;QACEC,UAAU,CAACf;QACXgB,aAAa;QACbC,OAAO;QACP,GAAIjB,0BACA;YACEkB,OAAO;QACT,IACA,CAAC,CAAC;IACR;AAEJ","ignoreList":[0]}