"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); 0 && (module.exports = { chainStreams: null, continueDynamicHTMLResume: null, continueDynamicPrerender: null, continueFizzStream: null, continueStaticFallbackPrerender: null, continueStaticPrerender: null, createBufferedTransformStream: null, createDocumentClosingStream: null, createRootLayoutValidatorStream: null, renderToInitialFizzStream: null, streamFromBuffer: null, streamFromString: null, streamToBuffer: null, streamToString: null, streamToUint8Array: null }); function _export(target, all) { for(var name in all)Object.defineProperty(target, name, { enumerable: true, get: all[name] }); } _export(exports, { chainStreams: function() { return chainStreams; }, continueDynamicHTMLResume: function() { return continueDynamicHTMLResume; }, continueDynamicPrerender: function() { return continueDynamicPrerender; }, continueFizzStream: function() { return continueFizzStream; }, continueStaticFallbackPrerender: function() { return continueStaticFallbackPrerender; }, continueStaticPrerender: function() { return continueStaticPrerender; }, createBufferedTransformStream: function() { return createBufferedTransformStream; }, createDocumentClosingStream: function() { return createDocumentClosingStream; }, createRootLayoutValidatorStream: function() { return createRootLayoutValidatorStream; }, renderToInitialFizzStream: function() { return renderToInitialFizzStream; }, streamFromBuffer: function() { return streamFromBuffer; }, streamFromString: function() { return streamFromString; }, streamToBuffer: function() { return streamToBuffer; }, streamToString: function() { return streamToString; }, streamToUint8Array: function() { return streamToUint8Array; } }); const _tracer = require("../lib/trace/tracer"); const _constants = require("../lib/trace/constants"); const _detachedpromise = require("../../lib/detached-promise"); const _scheduler = require("../../lib/scheduler"); const _encodedtags = require("./encoded-tags"); const _uint8arrayhelpers = require("./uint8array-helpers"); const _constants1 = require("../../shared/lib/errors/constants"); const _outputexportprefetchencoding = require("../../shared/lib/segment-cache/output-export-prefetch-encoding"); const _approuterheaders = require("../../client/components/app-router-headers"); const _cachebustingsearchparam = require("../../shared/lib/router/utils/cache-busting-search-param"); function voidCatch() { // this catcher is designed to be used with pipeTo where we expect the underlying // pipe implementation to forward errors but we don't want the pipeTo promise to reject // and be unhandled } // We can share the same encoder instance everywhere // Notably we cannot do the same for TextDecoder because it is stateful // when handling streaming data const encoder = new TextEncoder(); function chainStreams(...streams) { // If we have no streams, return an empty stream. This behavior is // intentional as we're now providing the `RenderResult.EMPTY` value. if (streams.length === 0) { return new ReadableStream({ start (controller) { controller.close(); } }); } // If we only have 1 stream we fast path it by returning just this stream if (streams.length === 1) { return streams[0]; } const { readable, writable } = new TransformStream(); // We always initiate pipeTo immediately. We know we have at least 2 streams // so we need to avoid closing the writable when this one finishes. let promise = streams[0].pipeTo(writable, { preventClose: true }); let i = 1; for(; i < streams.length - 1; i++){ const nextStream = streams[i]; promise = promise.then(()=>nextStream.pipeTo(writable, { preventClose: true })); } // We can omit the length check because we halted before the last stream and there // is at least two streams so the lastStream here will always be defined const lastStream = streams[i]; promise = promise.then(()=>lastStream.pipeTo(writable)); // Catch any errors from the streams and ignore them, they will be handled // by whatever is consuming the readable stream. promise.catch(voidCatch); return readable; } function streamFromString(str) { return new ReadableStream({ start (controller) { controller.enqueue(encoder.encode(str)); controller.close(); } }); } function streamFromBuffer(chunk) { return new ReadableStream({ start (controller) { controller.enqueue(chunk); controller.close(); } }); } async function streamToChunks(stream) { const reader = stream.getReader(); const chunks = []; while(true){ const { done, value } = await reader.read(); if (done) { break; } chunks.push(value); } return chunks; } function concatUint8Arrays(chunks) { const totalLength = chunks.reduce((sum, chunk)=>sum + chunk.length, 0); const result = new Uint8Array(totalLength); let offset = 0; for (const chunk of chunks){ result.set(chunk, offset); offset += chunk.length; } return result; } async function streamToUint8Array(stream) { return concatUint8Arrays(await streamToChunks(stream)); } async function streamToBuffer(stream) { return Buffer.concat(await streamToChunks(stream)); } async function streamToString(stream, signal) { const decoder = new TextDecoder('utf-8', { fatal: true }); let string = ''; for await (const chunk of stream){ if (signal == null ? void 0 : signal.aborted) { return string; } string += decoder.decode(chunk, { stream: true }); } string += decoder.decode(); return string; } function createBufferedTransformStream(options = {}) { const { maxBufferByteLength = Infinity } = options; let bufferedChunks = []; let bufferByteLength = 0; let pending; const flush = (controller)=>{ try { if (bufferedChunks.length === 0) { return; } const chunk = new Uint8Array(bufferByteLength); let copiedBytes = 0; for(let i = 0; i < bufferedChunks.length; i++){ const bufferedChunk = bufferedChunks[i]; chunk.set(bufferedChunk, copiedBytes); copiedBytes += bufferedChunk.byteLength; } // We just wrote all the buffered chunks so we need to reset the bufferedChunks array // and our bufferByteLength to prepare for the next round of buffered chunks bufferedChunks.length = 0; bufferByteLength = 0; controller.enqueue(chunk); } catch { // If an error occurs while enqueuing, it can't be due to this // transformer. It's most likely caused by the controller having been // errored (for example, if the stream was cancelled). } }; const scheduleFlush = (controller)=>{ if (pending) { return; } const detached = new _detachedpromise.DetachedPromise(); pending = detached; (0, _scheduler.scheduleImmediate)(()=>{ try { flush(controller); } finally{ pending = undefined; detached.resolve(); } }); }; return new TransformStream({ transform (chunk, controller) { // Combine the previous buffer with the new chunk. bufferedChunks.push(chunk); bufferByteLength += chunk.byteLength; if (bufferByteLength >= maxBufferByteLength) { flush(controller); } else { scheduleFlush(controller); } }, flush () { return pending == null ? void 0 : pending.promise; } }); } function createPrefetchCommentStream(isBuildTimePrerendering, buildId) { // Insert an extra comment at the beginning of the HTML document. This must // come after the DOCTYPE, which is inserted by React. // // The first chunk sent by React will contain the doctype. After that, we can // pass through the rest of the chunks as-is. let didTransformFirstChunk = false; return new TransformStream({ transform (chunk, controller) { if (isBuildTimePrerendering && !didTransformFirstChunk) { didTransformFirstChunk = true; const decoder = new TextDecoder('utf-8', { fatal: true }); const chunkStr = decoder.decode(chunk, { stream: true }); const updatedChunkStr = (0, _outputexportprefetchencoding.insertBuildIdComment)(chunkStr, buildId); controller.enqueue(encoder.encode(updatedChunkStr)); return; } controller.enqueue(chunk); } }); } function renderToInitialFizzStream({ ReactDOMServer, element, streamOptions }) { return (0, _tracer.getTracer)().trace(_constants.AppRenderSpan.renderToReadableStream, async ()=>ReactDOMServer.renderToReadableStream(element, streamOptions)); } function createMetadataTransformStream(insert) { let chunkIndex = -1; let isMarkRemoved = false; return new TransformStream({ async transform (chunk, controller) { let iconMarkIndex = -1; let closedHeadIndex = -1; chunkIndex++; if (isMarkRemoved) { controller.enqueue(chunk); return; } let iconMarkLength = 0; // Only search for the closed head tag once if (iconMarkIndex === -1) { iconMarkIndex = (0, _uint8arrayhelpers.indexOfUint8Array)(chunk, _encodedtags.ENCODED_TAGS.META.ICON_MARK); if (iconMarkIndex === -1) { controller.enqueue(chunk); return; } else { // When we found the `` or `>`, checking the next char to ensure we cover both cases. iconMarkLength = _encodedtags.ENCODED_TAGS.META.ICON_MARK.length; // Check if next char is /, this is for xml mode. if (chunk[iconMarkIndex + iconMarkLength] === 47) { iconMarkLength += 2; } else { // The last char is `>` iconMarkLength++; } } } // Check if icon mark is inside
tag in the first chunk. if (chunkIndex === 0) { closedHeadIndex = (0, _uint8arrayhelpers.indexOfUint8Array)(chunk, _encodedtags.ENCODED_TAGS.CLOSED.HEAD); if (iconMarkIndex !== -1) { // The mark icon is located in the 1st chunk before the head tag. // We do not need to insert the script tag in this case because it's in the head. // Just remove the icon mark from the chunk. if (iconMarkIndex < closedHeadIndex) { const replaced = new Uint8Array(chunk.length - iconMarkLength); // Remove the icon mark from the chunk. replaced.set(chunk.subarray(0, iconMarkIndex)); replaced.set(chunk.subarray(iconMarkIndex + iconMarkLength), iconMarkIndex); chunk = replaced; } else { // The icon mark is after the head tag, replace and insert the script tag at that position. const insertion = await insert(); const encodedInsertion = encoder.encode(insertion); const insertionLength = encodedInsertion.length; const replaced = new Uint8Array(chunk.length - iconMarkLength + insertionLength); replaced.set(chunk.subarray(0, iconMarkIndex)); replaced.set(encodedInsertion, iconMarkIndex); replaced.set(chunk.subarray(iconMarkIndex + iconMarkLength), iconMarkIndex + insertionLength); chunk = replaced; } isMarkRemoved = true; } // If there's no icon mark located, it will be handled later when if present in the following chunks. } else { // When it's appeared in the following chunks, we'll need to // remove the mark and then insert the script tag at that position. const insertion = await insert(); const encodedInsertion = encoder.encode(insertion); const insertionLength = encodedInsertion.length; // Replace the icon mark with the hoist script or empty string. const replaced = new Uint8Array(chunk.length - iconMarkLength + insertionLength); // Set the first part of the chunk, before the icon mark. replaced.set(chunk.subarray(0, iconMarkIndex)); // Set the insertion after the icon mark. replaced.set(encodedInsertion, iconMarkIndex); // Set the rest of the chunk after the icon mark. replaced.set(chunk.subarray(iconMarkIndex + iconMarkLength), iconMarkIndex + insertionLength); chunk = replaced; isMarkRemoved = true; } controller.enqueue(chunk); } }); } function createHeadInsertionTransformStream(insert) { let inserted = false; // We need to track if this transform saw any bytes because if it didn't // we won't want to insert any server HTML at all let hasBytes = false; return new TransformStream({ async transform (chunk, controller) { hasBytes = true; const insertion = await insert(); if (inserted) { if (insertion) { const encodedInsertion = encoder.encode(insertion); controller.enqueue(encodedInsertion); } controller.enqueue(chunk); } else { // TODO (@Ethan-Arrowood): Replace the generic `indexOfUint8Array` method with something finely tuned for the subset of things actually being checked for. const index = (0, _uint8arrayhelpers.indexOfUint8Array)(chunk, _encodedtags.ENCODED_TAGS.CLOSED.HEAD); // In fully static rendering or non PPR rendering cases: // `/head>` will always be found in the chunk in first chunk rendering. if (index !== -1) { if (insertion) { const encodedInsertion = encoder.encode(insertion); // Get the total count of the bytes in the chunk and the insertion // e.g. // chunk = // insertion = // output = [ ] const insertedHeadContent = new Uint8Array(chunk.length + encodedInsertion.length); // Append the first part of the chunk, before the head tag insertedHeadContent.set(chunk.slice(0, index)); // Append the server inserted content insertedHeadContent.set(encodedInsertion, index); // Append the rest of the chunk insertedHeadContent.set(chunk.slice(index), index + encodedInsertion.length); controller.enqueue(insertedHeadContent); } else { controller.enqueue(chunk); } inserted = true; } else { // This will happens in PPR rendering during next start, when the page is partially rendered. // When the page resumes, the head tag will be found in the middle of the chunk. // Where we just need to append the insertion and chunk to the current stream. // e.g. // PPR-static: ... [ resume content ] // PPR-resume: [ insertion ] [ rest content ] if (insertion) { controller.enqueue(encoder.encode(insertion)); } controller.enqueue(chunk); inserted = true; } } }, async flush (controller) { // Check before closing if there's anything remaining to insert. if (hasBytes) { const insertion = await insert(); if (insertion) { controller.enqueue(encoder.encode(insertion)); } } } }); } function createClientResumeScriptInsertionTransformStream() { const segmentPath = '/_full'; const cacheBustingHeader = (0, _cachebustingsearchparam.computeCacheBustingSearchParam)('1', '/_full', undefined, undefined // headers[NEXT_URL] ); const searchStr = `${_approuterheaders.NEXT_RSC_UNION_QUERY}=${cacheBustingHeader}`; const NEXT_CLIENT_RESUME_SCRIPT = ``; let didAlreadyInsert = false; return new TransformStream({ transform (chunk, controller) { if (didAlreadyInsert) { // Already inserted the script into the head. Pass through. controller.enqueue(chunk); return; } // TODO (@Ethan-Arrowood): Replace the generic `indexOfUint8Array` method with something finely tuned for the subset of things actually being checked for. const headClosingTagIndex = (0, _uint8arrayhelpers.indexOfUint8Array)(chunk, _encodedtags.ENCODED_TAGS.CLOSED.HEAD); if (headClosingTagIndex === -1) { // In fully static rendering or non PPR rendering cases: // `/head>` will always be found in the chunk in first chunk rendering. controller.enqueue(chunk); return; } const encodedInsertion = encoder.encode(NEXT_CLIENT_RESUME_SCRIPT); // Get the total count of the bytes in the chunk and the insertion // e.g. // chunk = // insertion = // output = [ ] const insertedHeadContent = new Uint8Array(chunk.length + encodedInsertion.length); // Append the first part of the chunk, before the head tag insertedHeadContent.set(chunk.slice(0, headClosingTagIndex)); // Append the server inserted content insertedHeadContent.set(encodedInsertion, headClosingTagIndex); // Append the rest of the chunk insertedHeadContent.set(chunk.slice(headClosingTagIndex), headClosingTagIndex + encodedInsertion.length); controller.enqueue(insertedHeadContent); didAlreadyInsert = true; } }); } // Suffix after main body content - scripts before , // but wait for the major chunks to be enqueued. function createDeferredSuffixStream(suffix) { let flushed = false; let pending; const flush = (controller)=>{ const detached = new _detachedpromise.DetachedPromise(); pending = detached; (0, _scheduler.scheduleImmediate)(()=>{ try { controller.enqueue(encoder.encode(suffix)); } catch { // If an error occurs while enqueuing it can't be due to this // transformers fault. It's likely due to the controller being // errored due to the stream being cancelled. } finally{ pending = undefined; detached.resolve(); } }); }; return new TransformStream({ transform (chunk, controller) { controller.enqueue(chunk); // If we've already flushed, we're done. if (flushed) return; // Schedule the flush to happen. flushed = true; flush(controller); }, flush (controller) { if (pending) return pending.promise; if (flushed) return; // Flush now. controller.enqueue(encoder.encode(suffix)); } }); } function createFlightDataInjectionTransformStream(stream, delayDataUntilFirstHtmlChunk) { let htmlStreamFinished = false; let pull = null; let donePulling = false; function startOrContinuePulling(controller) { if (!pull) { pull = startPulling(controller); } return pull; } async function startPulling(controller) { const reader = stream.getReader(); if (delayDataUntilFirstHtmlChunk) { // NOTE: streaming flush // We are buffering here for the inlined data stream because the // "shell" stream might be chunkenized again by the underlying stream // implementation, e.g. with a specific high-water mark. To ensure it's // the safe timing to pipe the data stream, this extra tick is // necessary. // We don't start reading until we've left the current Task to ensure // that it's inserted after flushing the shell. Note that this implementation // might get stale if impl details of Fizz change in the future. await (0, _scheduler.atLeastOneTask)(); } try { while(true){ const { done, value } = await reader.read(); if (done) { donePulling = true; return; } // We want to prioritize HTML over RSC data. // The SSR render is based on the same RSC stream, so when we get a new RSC chunk, // we're likely to produce an HTML chunk as well, so give it a chance to flush first. if (!delayDataUntilFirstHtmlChunk && !htmlStreamFinished) { await (0, _scheduler.atLeastOneTask)(); } controller.enqueue(value); } } catch (err) { controller.error(err); } } return new TransformStream({ start (controller) { if (!delayDataUntilFirstHtmlChunk) { startOrContinuePulling(controller); } }, transform (chunk, controller) { controller.enqueue(chunk); // Start the streaming if it hasn't already been started yet. if (delayDataUntilFirstHtmlChunk) { startOrContinuePulling(controller); } }, flush (controller) { htmlStreamFinished = true; if (donePulling) { return; } return startOrContinuePulling(controller); } }); } const CLOSE_TAG = '