Initial boiler plate project

This commit is contained in:
2024-09-24 03:52:46 +00:00
parent 6120b2d6c3
commit 154b93e267
10034 changed files with 2079352 additions and 2 deletions

View File

@ -0,0 +1,12 @@
export declare const ENCODED_TAGS: {
readonly OPENING: {
readonly HTML: Uint8Array;
readonly BODY: Uint8Array;
};
readonly CLOSED: {
readonly HEAD: Uint8Array;
readonly BODY: Uint8Array;
readonly HTML: Uint8Array;
readonly BODY_AND_HTML: Uint8Array;
};
};

View File

@ -0,0 +1,82 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "ENCODED_TAGS", {
enumerable: true,
get: function() {
return ENCODED_TAGS;
}
});
const ENCODED_TAGS = {
// opening tags do not have the closing `>` since they can contain other attributes such as `<body className=''>`
OPENING: {
// <html
HTML: new Uint8Array([
60,
104,
116,
109,
108
]),
// <body
BODY: new Uint8Array([
60,
98,
111,
100,
121
])
},
CLOSED: {
// </head>
HEAD: new Uint8Array([
60,
47,
104,
101,
97,
100,
62
]),
// </body>
BODY: new Uint8Array([
60,
47,
98,
111,
100,
121,
62
]),
// </html>
HTML: new Uint8Array([
60,
47,
104,
116,
109,
108,
62
]),
// </body></html>
BODY_AND_HTML: new Uint8Array([
60,
47,
98,
111,
100,
121,
62,
60,
47,
104,
116,
109,
108,
62
])
}
};
//# sourceMappingURL=encodedTags.js.map

View File

@ -0,0 +1 @@
{"version":3,"sources":["../../../src/server/stream-utils/encodedTags.ts"],"names":["ENCODED_TAGS","OPENING","HTML","Uint8Array","BODY","CLOSED","HEAD","BODY_AND_HTML"],"mappings":";;;;+BAAaA;;;eAAAA;;;AAAN,MAAMA,eAAe;IAC1B,iHAAiH;IACjHC,SAAS;QACP,QAAQ;QACRC,MAAM,IAAIC,WAAW;YAAC;YAAI;YAAK;YAAK;YAAK;SAAI;QAC7C,QAAQ;QACRC,MAAM,IAAID,WAAW;YAAC;YAAI;YAAI;YAAK;YAAK;SAAI;IAC9C;IACAE,QAAQ;QACN,UAAU;QACVC,MAAM,IAAIH,WAAW;YAAC;YAAI;YAAI;YAAK;YAAK;YAAI;YAAK;SAAG;QACpD,UAAU;QACVC,MAAM,IAAID,WAAW;YAAC;YAAI;YAAI;YAAI;YAAK;YAAK;YAAK;SAAG;QACpD,UAAU;QACVD,MAAM,IAAIC,WAAW;YAAC;YAAI;YAAI;YAAK;YAAK;YAAK;YAAK;SAAG;QACrD,iBAAiB;QACjBI,eAAe,IAAIJ,WAAW;YAC5B;YAAI;YAAI;YAAI;YAAK;YAAK;YAAK;YAAI;YAAI;YAAI;YAAK;YAAK;YAAK;YAAK;SAC5D;IACH;AACF"}

View File

@ -0,0 +1,45 @@
/// <reference types="react" />
export type ReactReadableStream = ReadableStream<Uint8Array> & {
allReady?: Promise<void> | undefined;
};
export declare function chainStreams<T>(...streams: ReadableStream<T>[]): ReadableStream<T>;
export declare function streamFromString(str: string): ReadableStream<Uint8Array>;
export declare function streamToString(stream: ReadableStream<Uint8Array>): Promise<string>;
export declare function createBufferedTransformStream(): TransformStream<Uint8Array, Uint8Array>;
export declare function renderToInitialFizzStream({ ReactDOMServer, element, streamOptions, }: {
ReactDOMServer: typeof import('react-dom/server.edge');
element: React.ReactElement;
streamOptions?: any;
}): Promise<ReactReadableStream>;
export declare function createRootLayoutValidatorStream(): TransformStream<Uint8Array, Uint8Array>;
export type ContinueStreamOptions = {
inlinedDataStream: ReadableStream<Uint8Array> | undefined;
isStaticGeneration: boolean;
getServerInsertedHTML: (() => Promise<string>) | undefined;
serverInsertedHTMLToHead: boolean;
validateRootLayout?: boolean;
/**
* Suffix to inject after the buffered data, but before the close tags.
*/
suffix?: string | undefined;
};
export declare function continueFizzStream(renderStream: ReactReadableStream, { suffix, inlinedDataStream, isStaticGeneration, getServerInsertedHTML, serverInsertedHTMLToHead, validateRootLayout, }: ContinueStreamOptions): Promise<ReadableStream<Uint8Array>>;
type ContinueDynamicPrerenderOptions = {
getServerInsertedHTML: () => Promise<string>;
};
export declare function continueDynamicPrerender(prerenderStream: ReadableStream<Uint8Array>, { getServerInsertedHTML }: ContinueDynamicPrerenderOptions): Promise<ReadableStream<Uint8Array>>;
type ContinueStaticPrerenderOptions = {
inlinedDataStream: ReadableStream<Uint8Array>;
getServerInsertedHTML: () => Promise<string>;
};
export declare function continueStaticPrerender(prerenderStream: ReadableStream<Uint8Array>, { inlinedDataStream, getServerInsertedHTML }: ContinueStaticPrerenderOptions): Promise<ReadableStream<Uint8Array>>;
type ContinueResumeOptions = {
inlinedDataStream: ReadableStream<Uint8Array>;
getServerInsertedHTML: () => Promise<string>;
};
export declare function continueDynamicHTMLResume(renderStream: ReadableStream<Uint8Array>, { inlinedDataStream, getServerInsertedHTML }: ContinueResumeOptions): Promise<ReadableStream<Uint8Array>>;
type ContinueDynamicDataResumeOptions = {
inlinedDataStream: ReadableStream<Uint8Array>;
};
export declare function continueDynamicDataResume(renderStream: ReadableStream<Uint8Array>, { inlinedDataStream }: ContinueDynamicDataResumeOptions): Promise<ReadableStream<Uint8Array>>;
export {};

View File

@ -0,0 +1,484 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
0 && (module.exports = {
chainStreams: null,
continueDynamicDataResume: null,
continueDynamicHTMLResume: null,
continueDynamicPrerender: null,
continueFizzStream: null,
continueStaticPrerender: null,
createBufferedTransformStream: null,
createRootLayoutValidatorStream: null,
renderToInitialFizzStream: null,
streamFromString: null,
streamToString: null
});
function _export(target, all) {
for(var name in all)Object.defineProperty(target, name, {
enumerable: true,
get: all[name]
});
}
_export(exports, {
chainStreams: function() {
return chainStreams;
},
continueDynamicDataResume: function() {
return continueDynamicDataResume;
},
continueDynamicHTMLResume: function() {
return continueDynamicHTMLResume;
},
continueDynamicPrerender: function() {
return continueDynamicPrerender;
},
continueFizzStream: function() {
return continueFizzStream;
},
continueStaticPrerender: function() {
return continueStaticPrerender;
},
createBufferedTransformStream: function() {
return createBufferedTransformStream;
},
createRootLayoutValidatorStream: function() {
return createRootLayoutValidatorStream;
},
renderToInitialFizzStream: function() {
return renderToInitialFizzStream;
},
streamFromString: function() {
return streamFromString;
},
streamToString: function() {
return streamToString;
}
});
const _tracer = require("../lib/trace/tracer");
const _constants = require("../lib/trace/constants");
const _detachedpromise = require("../../lib/detached-promise");
const _scheduler = require("../../lib/scheduler");
const _encodedTags = require("./encodedTags");
const _uint8arrayhelpers = require("./uint8array-helpers");
function voidCatch() {
// this catcher is designed to be used with pipeTo where we expect the underlying
// pipe implementation to forward errors but we don't want the pipeTo promise to reject
// and be unhandled
}
// We can share the same encoder instance everywhere
// Notably we cannot do the same for TextDecoder because it is stateful
// when handling streaming data
const encoder = new TextEncoder();
function chainStreams(...streams) {
// We could encode this invariant in the arguments but current uses of this function pass
// use spread so it would be missed by
if (streams.length === 0) {
throw new Error("Invariant: chainStreams requires at least one stream");
}
// If we only have 1 stream we fast path it by returning just this stream
if (streams.length === 1) {
return streams[0];
}
const { readable, writable } = new TransformStream();
// We always initiate pipeTo immediately. We know we have at least 2 streams
// so we need to avoid closing the writable when this one finishes.
let promise = streams[0].pipeTo(writable, {
preventClose: true
});
let i = 1;
for(; i < streams.length - 1; i++){
const nextStream = streams[i];
promise = promise.then(()=>nextStream.pipeTo(writable, {
preventClose: true
}));
}
// We can omit the length check because we halted before the last stream and there
// is at least two streams so the lastStream here will always be defined
const lastStream = streams[i];
promise = promise.then(()=>lastStream.pipeTo(writable));
// Catch any errors from the streams and ignore them, they will be handled
// by whatever is consuming the readable stream.
promise.catch(voidCatch);
return readable;
}
function streamFromString(str) {
return new ReadableStream({
start (controller) {
controller.enqueue(encoder.encode(str));
controller.close();
}
});
}
async function streamToString(stream) {
const decoder = new TextDecoder("utf-8", {
fatal: true
});
let string = "";
// @ts-expect-error TypeScript gets this wrong (https://nodejs.org/api/webstreams.html#async-iteration)
for await (const chunk of stream){
string += decoder.decode(chunk, {
stream: true
});
}
string += decoder.decode();
return string;
}
function createBufferedTransformStream() {
let bufferedChunks = [];
let bufferByteLength = 0;
let pending;
const flush = (controller)=>{
// If we already have a pending flush, then return early.
if (pending) return;
const detached = new _detachedpromise.DetachedPromise();
pending = detached;
(0, _scheduler.scheduleImmediate)(()=>{
try {
const chunk = new Uint8Array(bufferByteLength);
let copiedBytes = 0;
for(let i = 0; i < bufferedChunks.length; i++){
const bufferedChunk = bufferedChunks[i];
chunk.set(bufferedChunk, copiedBytes);
copiedBytes += bufferedChunk.byteLength;
}
// We just wrote all the buffered chunks so we need to reset the bufferedChunks array
// and our bufferByteLength to prepare for the next round of buffered chunks
bufferedChunks.length = 0;
bufferByteLength = 0;
controller.enqueue(chunk);
} catch {
// If an error occurs while enqueuing it can't be due to this
// transformers fault. It's likely due to the controller being
// errored due to the stream being cancelled.
} finally{
pending = undefined;
detached.resolve();
}
});
};
return new TransformStream({
transform (chunk, controller) {
// Combine the previous buffer with the new chunk.
bufferedChunks.push(chunk);
bufferByteLength += chunk.byteLength;
// Flush the buffer to the controller.
flush(controller);
},
flush () {
if (!pending) return;
return pending.promise;
}
});
}
function createInsertedHTMLStream(getServerInsertedHTML) {
return new TransformStream({
transform: async (chunk, controller)=>{
const html = await getServerInsertedHTML();
if (html) {
controller.enqueue(encoder.encode(html));
}
controller.enqueue(chunk);
}
});
}
function renderToInitialFizzStream({ ReactDOMServer, element, streamOptions }) {
return (0, _tracer.getTracer)().trace(_constants.AppRenderSpan.renderToReadableStream, async ()=>ReactDOMServer.renderToReadableStream(element, streamOptions));
}
function createHeadInsertionTransformStream(insert) {
let inserted = false;
let freezing = false;
// We need to track if this transform saw any bytes because if it didn't
// we won't want to insert any server HTML at all
let hasBytes = false;
return new TransformStream({
async transform (chunk, controller) {
hasBytes = true;
// While react is flushing chunks, we don't apply insertions
if (freezing) {
controller.enqueue(chunk);
return;
}
const insertion = await insert();
if (inserted) {
if (insertion) {
const encodedInsertion = encoder.encode(insertion);
controller.enqueue(encodedInsertion);
}
controller.enqueue(chunk);
freezing = true;
} else {
// TODO (@Ethan-Arrowood): Replace the generic `indexOfUint8Array` method with something finely tuned for the subset of things actually being checked for.
const index = (0, _uint8arrayhelpers.indexOfUint8Array)(chunk, _encodedTags.ENCODED_TAGS.CLOSED.HEAD);
if (index !== -1) {
if (insertion) {
const encodedInsertion = encoder.encode(insertion);
const insertedHeadContent = new Uint8Array(chunk.length + encodedInsertion.length);
insertedHeadContent.set(chunk.slice(0, index));
insertedHeadContent.set(encodedInsertion, index);
insertedHeadContent.set(chunk.slice(index), index + encodedInsertion.length);
controller.enqueue(insertedHeadContent);
} else {
controller.enqueue(chunk);
}
freezing = true;
inserted = true;
}
}
if (!inserted) {
controller.enqueue(chunk);
} else {
(0, _scheduler.scheduleImmediate)(()=>{
freezing = false;
});
}
},
async flush (controller) {
// Check before closing if there's anything remaining to insert.
if (hasBytes) {
const insertion = await insert();
if (insertion) {
controller.enqueue(encoder.encode(insertion));
}
}
}
});
}
// Suffix after main body content - scripts before </body>,
// but wait for the major chunks to be enqueued.
function createDeferredSuffixStream(suffix) {
let flushed = false;
let pending;
const flush = (controller)=>{
const detached = new _detachedpromise.DetachedPromise();
pending = detached;
(0, _scheduler.scheduleImmediate)(()=>{
try {
controller.enqueue(encoder.encode(suffix));
} catch {
// If an error occurs while enqueuing it can't be due to this
// transformers fault. It's likely due to the controller being
// errored due to the stream being cancelled.
} finally{
pending = undefined;
detached.resolve();
}
});
};
return new TransformStream({
transform (chunk, controller) {
controller.enqueue(chunk);
// If we've already flushed, we're done.
if (flushed) return;
// Schedule the flush to happen.
flushed = true;
flush(controller);
},
flush (controller) {
if (pending) return pending.promise;
if (flushed) return;
// Flush now.
controller.enqueue(encoder.encode(suffix));
}
});
}
// Merge two streams into one. Ensure the final transform stream is closed
// when both are finished.
function createMergedTransformStream(stream) {
let pull = null;
let donePulling = false;
async function startPulling(controller) {
if (pull) {
return;
}
const reader = stream.getReader();
// NOTE: streaming flush
// We are buffering here for the inlined data stream because the
// "shell" stream might be chunkenized again by the underlying stream
// implementation, e.g. with a specific high-water mark. To ensure it's
// the safe timing to pipe the data stream, this extra tick is
// necessary.
// We don't start reading until we've left the current Task to ensure
// that it's inserted after flushing the shell. Note that this implementation
// might get stale if impl details of Fizz change in the future.
await (0, _scheduler.atLeastOneTask)();
try {
while(true){
const { done, value } = await reader.read();
if (done) {
donePulling = true;
return;
}
controller.enqueue(value);
}
} catch (err) {
controller.error(err);
}
}
return new TransformStream({
transform (chunk, controller) {
controller.enqueue(chunk);
// Start the streaming if it hasn't already been started yet.
if (!pull) {
pull = startPulling(controller);
}
},
flush (controller) {
if (donePulling) {
return;
}
return pull || startPulling(controller);
}
});
}
/**
* This transform stream moves the suffix to the end of the stream, so results
* like `</body></html><script>...</script>` will be transformed to
* `<script>...</script></body></html>`.
*/ function createMoveSuffixStream(suffix) {
let foundSuffix = false;
const encodedSuffix = encoder.encode(suffix);
return new TransformStream({
transform (chunk, controller) {
if (foundSuffix) {
return controller.enqueue(chunk);
}
const index = (0, _uint8arrayhelpers.indexOfUint8Array)(chunk, encodedSuffix);
if (index > -1) {
foundSuffix = true;
// If the whole chunk is the suffix, then don't write anything, it will
// be written in the flush.
if (chunk.length === suffix.length) {
return;
}
// Write out the part before the suffix.
const before = chunk.slice(0, index);
controller.enqueue(before);
// In the case where the suffix is in the middle of the chunk, we need
// to split the chunk into two parts.
if (chunk.length > suffix.length + index) {
// Write out the part after the suffix.
const after = chunk.slice(index + suffix.length);
controller.enqueue(after);
}
} else {
controller.enqueue(chunk);
}
},
flush (controller) {
// Even if we didn't find the suffix, the HTML is not valid if we don't
// add it, so insert it at the end.
controller.enqueue(encodedSuffix);
}
});
}
function createStripDocumentClosingTagsTransform() {
return new TransformStream({
transform (chunk, controller) {
// We rely on the assumption that chunks will never break across a code unit.
// This is reasonable because we currently concat all of React's output from a single
// flush into one chunk before streaming it forward which means the chunk will represent
// a single coherent utf-8 string. This is not safe to use if we change our streaming to no
// longer do this large buffered chunk
if ((0, _uint8arrayhelpers.isEquivalentUint8Arrays)(chunk, _encodedTags.ENCODED_TAGS.CLOSED.BODY_AND_HTML) || (0, _uint8arrayhelpers.isEquivalentUint8Arrays)(chunk, _encodedTags.ENCODED_TAGS.CLOSED.BODY) || (0, _uint8arrayhelpers.isEquivalentUint8Arrays)(chunk, _encodedTags.ENCODED_TAGS.CLOSED.HTML)) {
// the entire chunk is the closing tags; return without enqueueing anything.
return;
}
// We assume these tags will go at together at the end of the document and that
// they won't appear anywhere else in the document. This is not really a safe assumption
// but until we revamp our streaming infra this is a performant way to string the tags
chunk = (0, _uint8arrayhelpers.removeFromUint8Array)(chunk, _encodedTags.ENCODED_TAGS.CLOSED.BODY);
chunk = (0, _uint8arrayhelpers.removeFromUint8Array)(chunk, _encodedTags.ENCODED_TAGS.CLOSED.HTML);
controller.enqueue(chunk);
}
});
}
function createRootLayoutValidatorStream() {
let foundHtml = false;
let foundBody = false;
return new TransformStream({
async transform (chunk, controller) {
// Peek into the streamed chunk to see if the tags are present.
if (!foundHtml && (0, _uint8arrayhelpers.indexOfUint8Array)(chunk, _encodedTags.ENCODED_TAGS.OPENING.HTML) > -1) {
foundHtml = true;
}
if (!foundBody && (0, _uint8arrayhelpers.indexOfUint8Array)(chunk, _encodedTags.ENCODED_TAGS.OPENING.BODY) > -1) {
foundBody = true;
}
controller.enqueue(chunk);
},
flush (controller) {
const missingTags = [];
if (!foundHtml) missingTags.push("html");
if (!foundBody) missingTags.push("body");
if (!missingTags.length) return;
controller.enqueue(encoder.encode(`<script>self.__next_root_layout_missing_tags=${JSON.stringify(missingTags)}</script>`));
}
});
}
function chainTransformers(readable, transformers) {
let stream = readable;
for (const transformer of transformers){
if (!transformer) continue;
stream = stream.pipeThrough(transformer);
}
return stream;
}
async function continueFizzStream(renderStream, { suffix, inlinedDataStream, isStaticGeneration, getServerInsertedHTML, serverInsertedHTMLToHead, validateRootLayout }) {
const closeTag = "</body></html>";
// Suffix itself might contain close tags at the end, so we need to split it.
const suffixUnclosed = suffix ? suffix.split(closeTag, 1)[0] : null;
// If we're generating static HTML and there's an `allReady` promise on the
// stream, we need to wait for it to resolve before continuing.
if (isStaticGeneration && "allReady" in renderStream) {
await renderStream.allReady;
}
return chainTransformers(renderStream, [
// Buffer everything to avoid flushing too frequently
createBufferedTransformStream(),
// Insert generated tags to head
getServerInsertedHTML && !serverInsertedHTMLToHead ? createInsertedHTMLStream(getServerInsertedHTML) : null,
// Insert suffix content
suffixUnclosed != null && suffixUnclosed.length > 0 ? createDeferredSuffixStream(suffixUnclosed) : null,
// Insert the inlined data (Flight data, form state, etc.) stream into the HTML
inlinedDataStream ? createMergedTransformStream(inlinedDataStream) : null,
// Validate the root layout for missing html or body tags
validateRootLayout ? createRootLayoutValidatorStream() : null,
// Close tags should always be deferred to the end
createMoveSuffixStream(closeTag),
// Special head insertions
// TODO-APP: Insert server side html to end of head in app layout rendering, to avoid
// hydration errors. Remove this once it's ready to be handled by react itself.
getServerInsertedHTML && serverInsertedHTMLToHead ? createHeadInsertionTransformStream(getServerInsertedHTML) : null
]);
}
async function continueDynamicPrerender(prerenderStream, { getServerInsertedHTML }) {
return prerenderStream// Buffer everything to avoid flushing too frequently
.pipeThrough(createBufferedTransformStream()).pipeThrough(createStripDocumentClosingTagsTransform())// Insert generated tags to head
.pipeThrough(createHeadInsertionTransformStream(getServerInsertedHTML));
}
async function continueStaticPrerender(prerenderStream, { inlinedDataStream, getServerInsertedHTML }) {
const closeTag = "</body></html>";
return prerenderStream// Buffer everything to avoid flushing too frequently
.pipeThrough(createBufferedTransformStream())// Insert generated tags to head
.pipeThrough(createHeadInsertionTransformStream(getServerInsertedHTML))// Insert the inlined data (Flight data, form state, etc.) stream into the HTML
.pipeThrough(createMergedTransformStream(inlinedDataStream))// Close tags should always be deferred to the end
.pipeThrough(createMoveSuffixStream(closeTag));
}
async function continueDynamicHTMLResume(renderStream, { inlinedDataStream, getServerInsertedHTML }) {
const closeTag = "</body></html>";
return renderStream// Buffer everything to avoid flushing too frequently
.pipeThrough(createBufferedTransformStream())// Insert generated tags to head
.pipeThrough(createHeadInsertionTransformStream(getServerInsertedHTML))// Insert the inlined data (Flight data, form state, etc.) stream into the HTML
.pipeThrough(createMergedTransformStream(inlinedDataStream))// Close tags should always be deferred to the end
.pipeThrough(createMoveSuffixStream(closeTag));
}
async function continueDynamicDataResume(renderStream, { inlinedDataStream }) {
const closeTag = "</body></html>";
return renderStream// Insert the inlined data (Flight data, form state, etc.) stream into the HTML
.pipeThrough(createMergedTransformStream(inlinedDataStream))// Close tags should always be deferred to the end
.pipeThrough(createMoveSuffixStream(closeTag));
}
//# sourceMappingURL=node-web-streams-helper.js.map

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,16 @@
/**
* Find the starting index of Uint8Array `b` within Uint8Array `a`.
*/
export declare function indexOfUint8Array(a: Uint8Array, b: Uint8Array): number;
/**
* Check if two Uint8Arrays are strictly equivalent.
*/
export declare function isEquivalentUint8Arrays(a: Uint8Array, b: Uint8Array): boolean;
/**
* Remove Uint8Array `b` from Uint8Array `a`.
*
* If `b` is not in `a`, `a` is returned unchanged.
*
* Otherwise, the function returns a new Uint8Array instance with size `a.length - b.length`
*/
export declare function removeFromUint8Array(a: Uint8Array, b: Uint8Array): Uint8Array;

View File

@ -0,0 +1,69 @@
/**
* Find the starting index of Uint8Array `b` within Uint8Array `a`.
*/ "use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
0 && (module.exports = {
indexOfUint8Array: null,
isEquivalentUint8Arrays: null,
removeFromUint8Array: null
});
function _export(target, all) {
for(var name in all)Object.defineProperty(target, name, {
enumerable: true,
get: all[name]
});
}
_export(exports, {
indexOfUint8Array: function() {
return indexOfUint8Array;
},
isEquivalentUint8Arrays: function() {
return isEquivalentUint8Arrays;
},
removeFromUint8Array: function() {
return removeFromUint8Array;
}
});
function indexOfUint8Array(a, b) {
if (b.length === 0) return 0;
if (a.length === 0 || b.length > a.length) return -1;
// start iterating through `a`
for(let i = 0; i <= a.length - b.length; i++){
let completeMatch = true;
// from index `i`, iterate through `b` and check for mismatch
for(let j = 0; j < b.length; j++){
// if the values do not match, then this isn't a complete match, exit `b` iteration early and iterate to next index of `a`.
if (a[i + j] !== b[j]) {
completeMatch = false;
break;
}
}
if (completeMatch) {
return i;
}
}
return -1;
}
function isEquivalentUint8Arrays(a, b) {
if (a.length !== b.length) return false;
for(let i = 0; i < a.length; i++){
if (a[i] !== b[i]) return false;
}
return true;
}
function removeFromUint8Array(a, b) {
const tagIndex = indexOfUint8Array(a, b);
if (tagIndex === 0) return a.subarray(b.length);
if (tagIndex > -1) {
const removed = new Uint8Array(a.length - b.length);
removed.set(a.slice(0, tagIndex));
removed.set(a.slice(tagIndex + b.length), tagIndex);
return removed;
} else {
return a;
}
}
//# sourceMappingURL=uint8array-helpers.js.map

View File

@ -0,0 +1 @@
{"version":3,"sources":["../../../src/server/stream-utils/uint8array-helpers.ts"],"names":["indexOfUint8Array","isEquivalentUint8Arrays","removeFromUint8Array","a","b","length","i","completeMatch","j","tagIndex","subarray","removed","Uint8Array","set","slice"],"mappings":"AAAA;;CAEC;;;;;;;;;;;;;;;;IACeA,iBAAiB;eAAjBA;;IA2BAC,uBAAuB;eAAvBA;;IAiBAC,oBAAoB;eAApBA;;;AA5CT,SAASF,kBAAkBG,CAAa,EAAEC,CAAa;IAC5D,IAAIA,EAAEC,MAAM,KAAK,GAAG,OAAO;IAC3B,IAAIF,EAAEE,MAAM,KAAK,KAAKD,EAAEC,MAAM,GAAGF,EAAEE,MAAM,EAAE,OAAO,CAAC;IAEnD,8BAA8B;IAC9B,IAAK,IAAIC,IAAI,GAAGA,KAAKH,EAAEE,MAAM,GAAGD,EAAEC,MAAM,EAAEC,IAAK;QAC7C,IAAIC,gBAAgB;QACpB,6DAA6D;QAC7D,IAAK,IAAIC,IAAI,GAAGA,IAAIJ,EAAEC,MAAM,EAAEG,IAAK;YACjC,2HAA2H;YAC3H,IAAIL,CAAC,CAACG,IAAIE,EAAE,KAAKJ,CAAC,CAACI,EAAE,EAAE;gBACrBD,gBAAgB;gBAChB;YACF;QACF;QAEA,IAAIA,eAAe;YACjB,OAAOD;QACT;IACF;IAEA,OAAO,CAAC;AACV;AAKO,SAASL,wBAAwBE,CAAa,EAAEC,CAAa;IAClE,IAAID,EAAEE,MAAM,KAAKD,EAAEC,MAAM,EAAE,OAAO;IAElC,IAAK,IAAIC,IAAI,GAAGA,IAAIH,EAAEE,MAAM,EAAEC,IAAK;QACjC,IAAIH,CAAC,CAACG,EAAE,KAAKF,CAAC,CAACE,EAAE,EAAE,OAAO;IAC5B;IAEA,OAAO;AACT;AASO,SAASJ,qBAAqBC,CAAa,EAAEC,CAAa;IAC/D,MAAMK,WAAWT,kBAAkBG,GAAGC;IACtC,IAAIK,aAAa,GAAG,OAAON,EAAEO,QAAQ,CAACN,EAAEC,MAAM;IAC9C,IAAII,WAAW,CAAC,GAAG;QACjB,MAAME,UAAU,IAAIC,WAAWT,EAAEE,MAAM,GAAGD,EAAEC,MAAM;QAClDM,QAAQE,GAAG,CAACV,EAAEW,KAAK,CAAC,GAAGL;QACvBE,QAAQE,GAAG,CAACV,EAAEW,KAAK,CAACL,WAAWL,EAAEC,MAAM,GAAGI;QAC1C,OAAOE;IACT,OAAO;QACL,OAAOR;IACT;AACF"}