Initial boiler plate project
This commit is contained in:
32
node_modules/next/dist/esm/server/lib/app-dir-module.js
generated
vendored
Normal file
32
node_modules/next/dist/esm/server/lib/app-dir-module.js
generated
vendored
Normal file
@ -0,0 +1,32 @@
|
||||
import { DEFAULT_SEGMENT_KEY } from "../../shared/lib/segment";
|
||||
export async function getLayoutOrPageModule(loaderTree) {
|
||||
const { layout, page, defaultPage } = loaderTree[2];
|
||||
const isLayout = typeof layout !== "undefined";
|
||||
const isPage = typeof page !== "undefined";
|
||||
const isDefaultPage = typeof defaultPage !== "undefined" && loaderTree[0] === DEFAULT_SEGMENT_KEY;
|
||||
let value = undefined;
|
||||
let modType = undefined;
|
||||
if (isLayout) {
|
||||
value = await layout[0]();
|
||||
modType = "layout";
|
||||
} else if (isPage) {
|
||||
value = await page[0]();
|
||||
modType = "page";
|
||||
} else if (isDefaultPage) {
|
||||
value = await defaultPage[0]();
|
||||
modType = "page";
|
||||
}
|
||||
return [
|
||||
value,
|
||||
modType
|
||||
];
|
||||
}
|
||||
export async function getComponentTypeModule(loaderTree, componentType) {
|
||||
const { [componentType]: component } = loaderTree[2];
|
||||
if (typeof component !== "undefined") {
|
||||
return await component[0]();
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=app-dir-module.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/app-dir-module.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/app-dir-module.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/lib/app-dir-module.ts"],"names":["DEFAULT_SEGMENT_KEY","getLayoutOrPageModule","loaderTree","layout","page","defaultPage","isLayout","isPage","isDefaultPage","value","undefined","modType","getComponentTypeModule","componentType","component"],"mappings":"AACA,SAASA,mBAAmB,QAAQ,2BAA0B;AAW9D,OAAO,eAAeC,sBAAsBC,UAAsB;IAChE,MAAM,EAAEC,MAAM,EAAEC,IAAI,EAAEC,WAAW,EAAE,GAAGH,UAAU,CAAC,EAAE;IACnD,MAAMI,WAAW,OAAOH,WAAW;IACnC,MAAMI,SAAS,OAAOH,SAAS;IAC/B,MAAMI,gBACJ,OAAOH,gBAAgB,eAAeH,UAAU,CAAC,EAAE,KAAKF;IAE1D,IAAIS,QAAQC;IACZ,IAAIC,UAAyCD;IAE7C,IAAIJ,UAAU;QACZG,QAAQ,MAAMN,MAAM,CAAC,EAAE;QACvBQ,UAAU;IACZ,OAAO,IAAIJ,QAAQ;QACjBE,QAAQ,MAAML,IAAI,CAAC,EAAE;QACrBO,UAAU;IACZ,OAAO,IAAIH,eAAe;QACxBC,QAAQ,MAAMJ,WAAW,CAAC,EAAE;QAC5BM,UAAU;IACZ;IAEA,OAAO;QAACF;QAAOE;KAAQ;AACzB;AAEA,OAAO,eAAeC,uBACpBV,UAAsB,EACtBW,aAAqC;IAErC,MAAM,EAAE,CAACA,cAAc,EAAEC,SAAS,EAAE,GAAGZ,UAAU,CAAC,EAAE;IACpD,IAAI,OAAOY,cAAc,aAAa;QACpC,OAAO,MAAMA,SAAS,CAAC,EAAE;IAC3B;IACA,OAAOJ;AACT"}
|
||||
50
node_modules/next/dist/esm/server/lib/app-info-log.js
generated
vendored
Normal file
50
node_modules/next/dist/esm/server/lib/app-info-log.js
generated
vendored
Normal file
@ -0,0 +1,50 @@
|
||||
import { loadEnvConfig } from "@next/env";
|
||||
import * as Log from "../../build/output/log";
|
||||
import { bold, purple } from "../../lib/picocolors";
|
||||
import { PHASE_DEVELOPMENT_SERVER, PHASE_PRODUCTION_BUILD } from "../../shared/lib/constants";
|
||||
import loadConfig, { getEnabledExperimentalFeatures } from "../config";
|
||||
export function logStartInfo({ networkUrl, appUrl, envInfo, expFeatureInfo, maxExperimentalFeatures = Infinity }) {
|
||||
Log.bootstrap(`${bold(purple(`${Log.prefixes.ready} Next.js ${"14.2.13"}`))}${process.env.TURBOPACK ? " (turbo)" : ""}`);
|
||||
if (appUrl) {
|
||||
Log.bootstrap(`- Local: ${appUrl}`);
|
||||
}
|
||||
if (networkUrl) {
|
||||
Log.bootstrap(`- Network: ${networkUrl}`);
|
||||
}
|
||||
if (envInfo == null ? void 0 : envInfo.length) Log.bootstrap(`- Environments: ${envInfo.join(", ")}`);
|
||||
if (expFeatureInfo == null ? void 0 : expFeatureInfo.length) {
|
||||
Log.bootstrap(`- Experiments (use with caution):`);
|
||||
// only show a maximum number of flags
|
||||
for (const exp of expFeatureInfo.slice(0, maxExperimentalFeatures)){
|
||||
Log.bootstrap(` · ${exp}`);
|
||||
}
|
||||
/* indicate if there are more than the maximum shown no. flags */ if (expFeatureInfo.length > maxExperimentalFeatures) {
|
||||
Log.bootstrap(` · ...`);
|
||||
}
|
||||
}
|
||||
// New line after the bootstrap info
|
||||
Log.info("");
|
||||
}
|
||||
export async function getStartServerInfo(dir, dev) {
|
||||
let expFeatureInfo = [];
|
||||
await loadConfig(dev ? PHASE_DEVELOPMENT_SERVER : PHASE_PRODUCTION_BUILD, dir, {
|
||||
onLoadUserConfig (userConfig) {
|
||||
const userNextConfigExperimental = getEnabledExperimentalFeatures(userConfig.experimental);
|
||||
expFeatureInfo = userNextConfigExperimental.sort((a, b)=>a.length - b.length);
|
||||
}
|
||||
});
|
||||
// we need to reset env if we are going to create
|
||||
// the worker process with the esm loader so that the
|
||||
// initial env state is correct
|
||||
let envInfo = [];
|
||||
const { loadedEnvFiles } = loadEnvConfig(dir, true, console, false);
|
||||
if (loadedEnvFiles.length > 0) {
|
||||
envInfo = loadedEnvFiles.map((f)=>f.path);
|
||||
}
|
||||
return {
|
||||
envInfo,
|
||||
expFeatureInfo
|
||||
};
|
||||
}
|
||||
|
||||
//# sourceMappingURL=app-info-log.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/app-info-log.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/app-info-log.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/lib/app-info-log.ts"],"names":["loadEnvConfig","Log","bold","purple","PHASE_DEVELOPMENT_SERVER","PHASE_PRODUCTION_BUILD","loadConfig","getEnabledExperimentalFeatures","logStartInfo","networkUrl","appUrl","envInfo","expFeatureInfo","maxExperimentalFeatures","Infinity","bootstrap","prefixes","ready","process","env","__NEXT_VERSION","TURBOPACK","length","join","exp","slice","info","getStartServerInfo","dir","dev","onLoadUserConfig","userConfig","userNextConfigExperimental","experimental","sort","a","b","loadedEnvFiles","console","map","f","path"],"mappings":"AAAA,SAASA,aAAa,QAAQ,YAAW;AACzC,YAAYC,SAAS,yBAAwB;AAC7C,SAASC,IAAI,EAAEC,MAAM,QAAQ,uBAAsB;AACnD,SACEC,wBAAwB,EACxBC,sBAAsB,QACjB,6BAA4B;AACnC,OAAOC,cAAcC,8BAA8B,QAAQ,YAAW;AAEtE,OAAO,SAASC,aAAa,EAC3BC,UAAU,EACVC,MAAM,EACNC,OAAO,EACPC,cAAc,EACdC,0BAA0BC,QAAQ,EAOnC;IACCb,IAAIc,SAAS,CACX,CAAC,EAAEb,KACDC,OAAO,CAAC,EAAEF,IAAIe,QAAQ,CAACC,KAAK,CAAC,SAAS,EAAEC,QAAQC,GAAG,CAACC,cAAc,CAAC,CAAC,GACpE,EAAEF,QAAQC,GAAG,CAACE,SAAS,GAAG,aAAa,GAAG,CAAC;IAE/C,IAAIX,QAAQ;QACVT,IAAIc,SAAS,CAAC,CAAC,gBAAgB,EAAEL,OAAO,CAAC;IAC3C;IACA,IAAID,YAAY;QACdR,IAAIc,SAAS,CAAC,CAAC,gBAAgB,EAAEN,WAAW,CAAC;IAC/C;IACA,IAAIE,2BAAAA,QAASW,MAAM,EAAErB,IAAIc,SAAS,CAAC,CAAC,gBAAgB,EAAEJ,QAAQY,IAAI,CAAC,MAAM,CAAC;IAE1E,IAAIX,kCAAAA,eAAgBU,MAAM,EAAE;QAC1BrB,IAAIc,SAAS,CAAC,CAAC,iCAAiC,CAAC;QACjD,sCAAsC;QACtC,KAAK,MAAMS,OAAOZ,eAAea,KAAK,CAAC,GAAGZ,yBAA0B;YAClEZ,IAAIc,SAAS,CAAC,CAAC,IAAI,EAAES,IAAI,CAAC;QAC5B;QACA,+DAA+D,GAC/D,IAAIZ,eAAeU,MAAM,GAAGT,yBAAyB;YACnDZ,IAAIc,SAAS,CAAC,CAAC,OAAO,CAAC;QACzB;IACF;IAEA,oCAAoC;IACpCd,IAAIyB,IAAI,CAAC;AACX;AAEA,OAAO,eAAeC,mBACpBC,GAAW,EACXC,GAAY;IAKZ,IAAIjB,iBAA2B,EAAE;IACjC,MAAMN,WACJuB,MAAMzB,2BAA2BC,wBACjCuB,KACA;QACEE,kBAAiBC,UAAU;YACzB,MAAMC,6BAA6BzB,+BACjCwB,WAAWE,YAAY;YAEzBrB,iBAAiBoB,2BAA2BE,IAAI,CAC9C,CAACC,GAAGC,IAAMD,EAAEb,MAAM,GAAGc,EAAEd,MAAM;QAEjC;IACF;IAGF,iDAAiD;IACjD,qDAAqD;IACrD,+BAA+B;IAC/B,IAAIX,UAAoB,EAAE;IAC1B,MAAM,EAAE0B,cAAc,EAAE,GAAGrC,cAAc4B,KAAK,MAAMU,SAAS;IAC7D,IAAID,eAAef,MAAM,GAAG,GAAG;QAC7BX,UAAU0B,eAAeE,GAAG,CAAC,CAACC,IAAMA,EAAEC,IAAI;IAC5C;IAEA,OAAO;QACL9B;QACAC;IACF;AACF"}
|
||||
27
node_modules/next/dist/esm/server/lib/cpu-profile.js
generated
vendored
Normal file
27
node_modules/next/dist/esm/server/lib/cpu-profile.js
generated
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
const privateCpuProfileName = process.env.__NEXT_PRIVATE_CPU_PROFILE;
|
||||
const isCpuProfileEnabled = process.env.NEXT_CPU_PROF || privateCpuProfileName;
|
||||
if (isCpuProfileEnabled) {
|
||||
const { Session } = require("inspector");
|
||||
const fs = require("fs");
|
||||
const session = new Session();
|
||||
session.connect();
|
||||
session.post("Profiler.enable");
|
||||
session.post("Profiler.start");
|
||||
function saveProfile() {
|
||||
session.post("Profiler.stop", (error, param)=>{
|
||||
if (error) {
|
||||
console.error("Cannot generate CPU profiling:", error);
|
||||
return;
|
||||
}
|
||||
// Write profile to disk
|
||||
const filename = `${privateCpuProfileName || "CPU.main"}.${Date.now()}.cpuprofile`;
|
||||
fs.writeFileSync(`./${filename}`, JSON.stringify(param.profile));
|
||||
process.exit(0);
|
||||
});
|
||||
}
|
||||
process.on("SIGINT", saveProfile);
|
||||
process.on("SIGTERM", saveProfile);
|
||||
process.on("exit", saveProfile);
|
||||
}
|
||||
|
||||
//# sourceMappingURL=cpu-profile.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/cpu-profile.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/cpu-profile.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/lib/cpu-profile.ts"],"names":["privateCpuProfileName","process","env","__NEXT_PRIVATE_CPU_PROFILE","isCpuProfileEnabled","NEXT_CPU_PROF","Session","require","fs","session","connect","post","saveProfile","error","param","console","filename","Date","now","writeFileSync","JSON","stringify","profile","exit","on"],"mappings":"AAAA,MAAMA,wBAAwBC,QAAQC,GAAG,CAACC,0BAA0B;AACpE,MAAMC,sBAAsBH,QAAQC,GAAG,CAACG,aAAa,IAAIL;AAEzD,IAAII,qBAAqB;IACvB,MAAM,EAAEE,OAAO,EAAE,GAAGC,QAAQ;IAC5B,MAAMC,KAAKD,QAAQ;IAEnB,MAAME,UAAU,IAAIH;IACpBG,QAAQC,OAAO;IAEfD,QAAQE,IAAI,CAAC;IACbF,QAAQE,IAAI,CAAC;IAEb,SAASC;QACPH,QAAQE,IAAI,CAAC,iBAAiB,CAACE,OAAOC;YACpC,IAAID,OAAO;gBACTE,QAAQF,KAAK,CAAC,kCAAkCA;gBAChD;YACF;YAEA,wBAAwB;YACxB,MAAMG,WAAW,CAAC,EAChBhB,yBAAyB,WAC1B,CAAC,EAAEiB,KAAKC,GAAG,GAAG,WAAW,CAAC;YAC3BV,GAAGW,aAAa,CAAC,CAAC,EAAE,EAAEH,SAAS,CAAC,EAAEI,KAAKC,SAAS,CAACP,MAAMQ,OAAO;YAC9DrB,QAAQsB,IAAI,CAAC;QACf;IACF;IACAtB,QAAQuB,EAAE,CAAC,UAAUZ;IACrBX,QAAQuB,EAAE,CAAC,WAAWZ;IACtBX,QAAQuB,EAAE,CAAC,QAAQZ;AACrB"}
|
||||
48
node_modules/next/dist/esm/server/lib/dev-bundler-service.js
generated
vendored
Normal file
48
node_modules/next/dist/esm/server/lib/dev-bundler-service.js
generated
vendored
Normal file
@ -0,0 +1,48 @@
|
||||
import { createRequestResponseMocks } from "./mock-request";
|
||||
/**
|
||||
* The DevBundlerService provides an interface to perform tasks with the
|
||||
* bundler while in development.
|
||||
*/ export class DevBundlerService {
|
||||
constructor(bundler, handler){
|
||||
this.bundler = bundler;
|
||||
this.handler = handler;
|
||||
this.ensurePage = async (definition)=>{
|
||||
// TODO: remove after ensure is pulled out of server
|
||||
return await this.bundler.hotReloader.ensurePage(definition);
|
||||
};
|
||||
this.logErrorWithOriginalStack = async (...args)=>{
|
||||
return await this.bundler.logErrorWithOriginalStack(...args);
|
||||
};
|
||||
}
|
||||
async getFallbackErrorComponents(url) {
|
||||
await this.bundler.hotReloader.buildFallbackError();
|
||||
// Build the error page to ensure the fallback is built too.
|
||||
// TODO: See if this can be moved into hotReloader or removed.
|
||||
await this.bundler.hotReloader.ensurePage({
|
||||
page: "/_error",
|
||||
clientOnly: false,
|
||||
definition: undefined,
|
||||
url
|
||||
});
|
||||
}
|
||||
async getCompilationError(page) {
|
||||
const errors = await this.bundler.hotReloader.getCompilationErrors(page);
|
||||
if (!errors) return;
|
||||
// Return the very first error we found.
|
||||
return errors[0];
|
||||
}
|
||||
async revalidate({ urlPath, revalidateHeaders, opts: revalidateOpts }) {
|
||||
const mocked = createRequestResponseMocks({
|
||||
url: urlPath,
|
||||
headers: revalidateHeaders
|
||||
});
|
||||
await this.handler(mocked.req, mocked.res);
|
||||
await mocked.res.hasStreamed;
|
||||
if (mocked.res.getHeader("x-nextjs-cache") !== "REVALIDATED" && !(mocked.res.statusCode === 404 && revalidateOpts.unstable_onlyGenerated)) {
|
||||
throw new Error(`Invalid response ${mocked.res.statusCode}`);
|
||||
}
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=dev-bundler-service.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/dev-bundler-service.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/dev-bundler-service.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/lib/dev-bundler-service.ts"],"names":["createRequestResponseMocks","DevBundlerService","constructor","bundler","handler","ensurePage","definition","hotReloader","logErrorWithOriginalStack","args","getFallbackErrorComponents","url","buildFallbackError","page","clientOnly","undefined","getCompilationError","errors","getCompilationErrors","revalidate","urlPath","revalidateHeaders","opts","revalidateOpts","mocked","headers","req","res","hasStreamed","getHeader","statusCode","unstable_onlyGenerated","Error"],"mappings":"AAIA,SAASA,0BAA0B,QAAQ,iBAAgB;AAE3D;;;CAGC,GACD,OAAO,MAAMC;IACXC,YACE,AAAiBC,OAAmB,EACpC,AAAiBC,OAA6B,CAC9C;aAFiBD,UAAAA;aACAC,UAAAA;aAGZC,aAAyD,OAC9DC;YAEA,oDAAoD;YACpD,OAAO,MAAM,IAAI,CAACH,OAAO,CAACI,WAAW,CAACF,UAAU,CAACC;QACnD;aAEOE,4BACL,OAAO,GAAGC;YACR,OAAO,MAAM,IAAI,CAACN,OAAO,CAACK,yBAAyB,IAAIC;QACzD;IAZC;IAcH,MAAaC,2BAA2BC,GAAY,EAAE;QACpD,MAAM,IAAI,CAACR,OAAO,CAACI,WAAW,CAACK,kBAAkB;QACjD,4DAA4D;QAC5D,8DAA8D;QAC9D,MAAM,IAAI,CAACT,OAAO,CAACI,WAAW,CAACF,UAAU,CAAC;YACxCQ,MAAM;YACNC,YAAY;YACZR,YAAYS;YACZJ;QACF;IACF;IAEA,MAAaK,oBAAoBH,IAAY,EAAE;QAC7C,MAAMI,SAAS,MAAM,IAAI,CAACd,OAAO,CAACI,WAAW,CAACW,oBAAoB,CAACL;QACnE,IAAI,CAACI,QAAQ;QAEb,wCAAwC;QACxC,OAAOA,MAAM,CAAC,EAAE;IAClB;IAEA,MAAaE,WAAW,EACtBC,OAAO,EACPC,iBAAiB,EACjBC,MAAMC,cAAc,EAKrB,EAAE;QACD,MAAMC,SAASxB,2BAA2B;YACxCW,KAAKS;YACLK,SAASJ;QACX;QAEA,MAAM,IAAI,CAACjB,OAAO,CAACoB,OAAOE,GAAG,EAAEF,OAAOG,GAAG;QACzC,MAAMH,OAAOG,GAAG,CAACC,WAAW;QAE5B,IACEJ,OAAOG,GAAG,CAACE,SAAS,CAAC,sBAAsB,iBAC3C,CAAEL,CAAAA,OAAOG,GAAG,CAACG,UAAU,KAAK,OAAOP,eAAeQ,sBAAsB,AAAD,GACvE;YACA,MAAM,IAAIC,MAAM,CAAC,iBAAiB,EAAER,OAAOG,GAAG,CAACG,UAAU,CAAC,CAAC;QAC7D;QAEA,OAAO,CAAC;IACV;AACF"}
|
||||
34
node_modules/next/dist/esm/server/lib/etag.js
generated
vendored
Normal file
34
node_modules/next/dist/esm/server/lib/etag.js
generated
vendored
Normal file
@ -0,0 +1,34 @@
|
||||
/**
|
||||
* FNV-1a Hash implementation
|
||||
* @author Travis Webb (tjwebb) <me@traviswebb.com>
|
||||
*
|
||||
* Ported from https://github.com/tjwebb/fnv-plus/blob/master/index.js
|
||||
*
|
||||
* Simplified, optimized and add modified for 52 bit, which provides a larger hash space
|
||||
* and still making use of Javascript's 53-bit integer space.
|
||||
*/ export const fnv1a52 = (str)=>{
|
||||
const len = str.length;
|
||||
let i = 0, t0 = 0, v0 = 0x2325, t1 = 0, v1 = 0x8422, t2 = 0, v2 = 0x9ce4, t3 = 0, v3 = 0xcbf2;
|
||||
while(i < len){
|
||||
v0 ^= str.charCodeAt(i++);
|
||||
t0 = v0 * 435;
|
||||
t1 = v1 * 435;
|
||||
t2 = v2 * 435;
|
||||
t3 = v3 * 435;
|
||||
t2 += v0 << 8;
|
||||
t3 += v1 << 8;
|
||||
t1 += t0 >>> 16;
|
||||
v0 = t0 & 65535;
|
||||
t2 += t1 >>> 16;
|
||||
v1 = t1 & 65535;
|
||||
v3 = t3 + (t2 >>> 16) & 65535;
|
||||
v2 = t2 & 65535;
|
||||
}
|
||||
return (v3 & 15) * 281474976710656 + v2 * 4294967296 + v1 * 65536 + (v0 ^ v3 >> 4);
|
||||
};
|
||||
export const generateETag = (payload, weak = false)=>{
|
||||
const prefix = weak ? 'W/"' : '"';
|
||||
return prefix + fnv1a52(payload).toString(36) + payload.length.toString(36) + '"';
|
||||
};
|
||||
|
||||
//# sourceMappingURL=etag.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/etag.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/etag.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/lib/etag.ts"],"names":["fnv1a52","str","len","length","i","t0","v0","t1","v1","t2","v2","t3","v3","charCodeAt","generateETag","payload","weak","prefix","toString"],"mappings":"AAAA;;;;;;;;CAQC,GACD,OAAO,MAAMA,UAAU,CAACC;IACtB,MAAMC,MAAMD,IAAIE,MAAM;IACtB,IAAIC,IAAI,GACNC,KAAK,GACLC,KAAK,QACLC,KAAK,GACLC,KAAK,QACLC,KAAK,GACLC,KAAK,QACLC,KAAK,GACLC,KAAK;IAEP,MAAOR,IAAIF,IAAK;QACdI,MAAML,IAAIY,UAAU,CAACT;QACrBC,KAAKC,KAAK;QACVC,KAAKC,KAAK;QACVC,KAAKC,KAAK;QACVC,KAAKC,KAAK;QACVH,MAAMH,MAAM;QACZK,MAAMH,MAAM;QACZD,MAAMF,OAAO;QACbC,KAAKD,KAAK;QACVI,MAAMF,OAAO;QACbC,KAAKD,KAAK;QACVK,KAAK,AAACD,KAAMF,CAAAA,OAAO,EAAC,IAAM;QAC1BC,KAAKD,KAAK;IACZ;IAEA,OACE,AAACG,CAAAA,KAAK,EAAC,IAAK,kBACZF,KAAK,aACLF,KAAK,QACJF,CAAAA,KAAMM,MAAM,CAAC;AAElB,EAAC;AAED,OAAO,MAAME,eAAe,CAACC,SAAiBC,OAAO,KAAK;IACxD,MAAMC,SAASD,OAAO,QAAQ;IAC9B,OACEC,SAASjB,QAAQe,SAASG,QAAQ,CAAC,MAAMH,QAAQZ,MAAM,CAACe,QAAQ,CAAC,MAAM;AAE3E,EAAC"}
|
||||
103
node_modules/next/dist/esm/server/lib/find-page-file.js
generated
vendored
Normal file
103
node_modules/next/dist/esm/server/lib/find-page-file.js
generated
vendored
Normal file
@ -0,0 +1,103 @@
|
||||
import { fileExists } from "../../lib/file-exists";
|
||||
import { getPagePaths } from "../../shared/lib/page-path/get-page-paths";
|
||||
import { nonNullable } from "../../lib/non-nullable";
|
||||
import { join, sep, normalize } from "path";
|
||||
import { promises as fsPromises } from "fs";
|
||||
import { warn } from "../../build/output/log";
|
||||
import { cyan } from "../../lib/picocolors";
|
||||
import { isMetadataRouteFile } from "../../lib/metadata/is-metadata-route";
|
||||
async function isTrueCasePagePath(pagePath, pagesDir) {
|
||||
const pageSegments = normalize(pagePath).split(sep).filter(Boolean);
|
||||
const segmentExistsPromises = pageSegments.map(async (segment, i)=>{
|
||||
const segmentParentDir = join(pagesDir, ...pageSegments.slice(0, i));
|
||||
const parentDirEntries = await fsPromises.readdir(segmentParentDir);
|
||||
return parentDirEntries.includes(segment);
|
||||
});
|
||||
return (await Promise.all(segmentExistsPromises)).every(Boolean);
|
||||
}
|
||||
/**
|
||||
* Finds a page file with the given parameters. If the page is duplicated with
|
||||
* multiple extensions it will throw, otherwise it will return the *relative*
|
||||
* path to the page file or null if it is not found.
|
||||
*
|
||||
* @param pagesDir Absolute path to the pages folder with trailing `/pages`.
|
||||
* @param normalizedPagePath The page normalized (it will be denormalized).
|
||||
* @param pageExtensions Array of page extensions.
|
||||
*/ export async function findPageFile(pagesDir, normalizedPagePath, pageExtensions, isAppDir) {
|
||||
const pagePaths = getPagePaths(normalizedPagePath, pageExtensions, isAppDir);
|
||||
const [existingPath, ...others] = (await Promise.all(pagePaths.map(async (path)=>{
|
||||
const filePath = join(pagesDir, path);
|
||||
try {
|
||||
return await fileExists(filePath) ? path : null;
|
||||
} catch (err) {
|
||||
var _err_code;
|
||||
if (!(err == null ? void 0 : (_err_code = err.code) == null ? void 0 : _err_code.includes("ENOTDIR"))) throw err;
|
||||
}
|
||||
return null;
|
||||
}))).filter(nonNullable);
|
||||
if (!existingPath) {
|
||||
return null;
|
||||
}
|
||||
if (!await isTrueCasePagePath(existingPath, pagesDir)) {
|
||||
return null;
|
||||
}
|
||||
if (others.length > 0) {
|
||||
warn(`Duplicate page detected. ${cyan(join("pages", existingPath))} and ${cyan(join("pages", others[0]))} both resolve to ${cyan(normalizedPagePath)}.`);
|
||||
}
|
||||
return existingPath;
|
||||
}
|
||||
/**
|
||||
*
|
||||
* createValidFileMatcher receives configured page extensions and return helpers to determine:
|
||||
* `isLayoutsLeafPage`: if a file is a valid page file or routes file under app directory
|
||||
* `isTrackedFiles`: if it's a tracked file for webpack watcher
|
||||
*
|
||||
*/ export function createValidFileMatcher(pageExtensions, appDirPath) {
|
||||
const getExtensionRegexString = (extensions)=>`(?:${extensions.join("|")})`;
|
||||
const validExtensionFileRegex = new RegExp("\\." + getExtensionRegexString(pageExtensions) + "$");
|
||||
const leafOnlyPageFileRegex = new RegExp(`(^(page|route)|[\\\\/](page|route))\\.${getExtensionRegexString(pageExtensions)}$`);
|
||||
const rootNotFoundFileRegex = new RegExp(`^not-found\\.${getExtensionRegexString(pageExtensions)}$`);
|
||||
/** TODO-METADATA: support other metadata routes
|
||||
* regex for:
|
||||
*
|
||||
* /robots.txt|<ext>
|
||||
* /sitemap.xml|<ext>
|
||||
* /favicon.ico
|
||||
* /manifest.json|<ext>
|
||||
* <route>/icon.png|jpg|<ext>
|
||||
* <route>/apple-touch-icon.png|jpg|<ext>
|
||||
*
|
||||
*/ /**
|
||||
* Match the file if it's a metadata route file, static: if the file is a static metadata file.
|
||||
* It needs to be a file which doesn't match the custom metadata routes e.g. `app/robots.txt/route.js`
|
||||
*/ function isMetadataFile(filePath) {
|
||||
const appDirRelativePath = appDirPath ? filePath.replace(appDirPath, "") : filePath;
|
||||
return isMetadataRouteFile(appDirRelativePath, pageExtensions, true);
|
||||
}
|
||||
// Determine if the file is leaf node page file or route file under layouts,
|
||||
// 'page.<extension>' | 'route.<extension>'
|
||||
function isAppRouterPage(filePath) {
|
||||
return leafOnlyPageFileRegex.test(filePath) || isMetadataFile(filePath);
|
||||
}
|
||||
function isPageFile(filePath) {
|
||||
return validExtensionFileRegex.test(filePath) || isMetadataFile(filePath);
|
||||
}
|
||||
function isRootNotFound(filePath) {
|
||||
if (!appDirPath) {
|
||||
return false;
|
||||
}
|
||||
if (!filePath.startsWith(appDirPath + sep)) {
|
||||
return false;
|
||||
}
|
||||
const rest = filePath.slice(appDirPath.length + 1);
|
||||
return rootNotFoundFileRegex.test(rest);
|
||||
}
|
||||
return {
|
||||
isPageFile,
|
||||
isAppRouterPage,
|
||||
isMetadataFile,
|
||||
isRootNotFound
|
||||
};
|
||||
}
|
||||
|
||||
//# sourceMappingURL=find-page-file.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/find-page-file.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/find-page-file.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/lib/find-page-file.ts"],"names":["fileExists","getPagePaths","nonNullable","join","sep","normalize","promises","fsPromises","warn","cyan","isMetadataRouteFile","isTrueCasePagePath","pagePath","pagesDir","pageSegments","split","filter","Boolean","segmentExistsPromises","map","segment","i","segmentParentDir","slice","parentDirEntries","readdir","includes","Promise","all","every","findPageFile","normalizedPagePath","pageExtensions","isAppDir","pagePaths","existingPath","others","path","filePath","err","code","length","createValidFileMatcher","appDirPath","getExtensionRegexString","extensions","validExtensionFileRegex","RegExp","leafOnlyPageFileRegex","rootNotFoundFileRegex","isMetadataFile","appDirRelativePath","replace","isAppRouterPage","test","isPageFile","isRootNotFound","startsWith","rest"],"mappings":"AAAA,SAASA,UAAU,QAAQ,wBAAuB;AAClD,SAASC,YAAY,QAAQ,4CAA2C;AACxE,SAASC,WAAW,QAAQ,yBAAwB;AACpD,SAASC,IAAI,EAAEC,GAAG,EAAEC,SAAS,QAAQ,OAAM;AAC3C,SAASC,YAAYC,UAAU,QAAQ,KAAI;AAC3C,SAASC,IAAI,QAAQ,yBAAwB;AAC7C,SAASC,IAAI,QAAQ,uBAAsB;AAC3C,SAASC,mBAAmB,QAAQ,uCAAsC;AAG1E,eAAeC,mBAAmBC,QAAgB,EAAEC,QAAgB;IAClE,MAAMC,eAAeT,UAAUO,UAAUG,KAAK,CAACX,KAAKY,MAAM,CAACC;IAC3D,MAAMC,wBAAwBJ,aAAaK,GAAG,CAAC,OAAOC,SAASC;QAC7D,MAAMC,mBAAmBnB,KAAKU,aAAaC,aAAaS,KAAK,CAAC,GAAGF;QACjE,MAAMG,mBAAmB,MAAMjB,WAAWkB,OAAO,CAACH;QAClD,OAAOE,iBAAiBE,QAAQ,CAACN;IACnC;IAEA,OAAO,AAAC,CAAA,MAAMO,QAAQC,GAAG,CAACV,sBAAqB,EAAGW,KAAK,CAACZ;AAC1D;AAEA;;;;;;;;CAQC,GACD,OAAO,eAAea,aACpBjB,QAAgB,EAChBkB,kBAA0B,EAC1BC,cAA8B,EAC9BC,QAAiB;IAEjB,MAAMC,YAAYjC,aAAa8B,oBAAoBC,gBAAgBC;IACnE,MAAM,CAACE,cAAc,GAAGC,OAAO,GAAG,AAChC,CAAA,MAAMT,QAAQC,GAAG,CACfM,UAAUf,GAAG,CAAC,OAAOkB;QACnB,MAAMC,WAAWnC,KAAKU,UAAUwB;QAChC,IAAI;YACF,OAAO,AAAC,MAAMrC,WAAWsC,YAAaD,OAAO;QAC/C,EAAE,OAAOE,KAAU;gBACZA;YAAL,IAAI,EAACA,wBAAAA,YAAAA,IAAKC,IAAI,qBAATD,UAAWb,QAAQ,CAAC,aAAY,MAAMa;QAC7C;QACA,OAAO;IACT,GACF,EACAvB,MAAM,CAACd;IAET,IAAI,CAACiC,cAAc;QACjB,OAAO;IACT;IAEA,IAAI,CAAE,MAAMxB,mBAAmBwB,cAActB,WAAY;QACvD,OAAO;IACT;IAEA,IAAIuB,OAAOK,MAAM,GAAG,GAAG;QACrBjC,KACE,CAAC,yBAAyB,EAAEC,KAAKN,KAAK,SAASgC,eAAe,KAAK,EAAE1B,KACnEN,KAAK,SAASiC,MAAM,CAAC,EAAE,GACvB,iBAAiB,EAAE3B,KAAKsB,oBAAoB,CAAC,CAAC;IAEpD;IAEA,OAAOI;AACT;AAEA;;;;;;CAMC,GACD,OAAO,SAASO,uBACdV,cAA8B,EAC9BW,UAA8B;IAE9B,MAAMC,0BAA0B,CAACC,aAC/B,CAAC,GAAG,EAAEA,WAAW1C,IAAI,CAAC,KAAK,CAAC,CAAC;IAE/B,MAAM2C,0BAA0B,IAAIC,OAClC,QAAQH,wBAAwBZ,kBAAkB;IAEpD,MAAMgB,wBAAwB,IAAID,OAChC,CAAC,sCAAsC,EAAEH,wBACvCZ,gBACA,CAAC,CAAC;IAEN,MAAMiB,wBAAwB,IAAIF,OAChC,CAAC,aAAa,EAAEH,wBAAwBZ,gBAAgB,CAAC,CAAC;IAE5D;;;;;;;;;;GAUC,GAED;;;GAGC,GACD,SAASkB,eAAeZ,QAAgB;QACtC,MAAMa,qBAAqBR,aACvBL,SAASc,OAAO,CAACT,YAAY,MAC7BL;QAEJ,OAAO5B,oBAAoByC,oBAAoBnB,gBAAgB;IACjE;IAEA,4EAA4E;IAC5E,2CAA2C;IAC3C,SAASqB,gBAAgBf,QAAgB;QACvC,OAAOU,sBAAsBM,IAAI,CAAChB,aAAaY,eAAeZ;IAChE;IAEA,SAASiB,WAAWjB,QAAgB;QAClC,OAAOQ,wBAAwBQ,IAAI,CAAChB,aAAaY,eAAeZ;IAClE;IAEA,SAASkB,eAAelB,QAAgB;QACtC,IAAI,CAACK,YAAY;YACf,OAAO;QACT;QACA,IAAI,CAACL,SAASmB,UAAU,CAACd,aAAavC,MAAM;YAC1C,OAAO;QACT;QACA,MAAMsD,OAAOpB,SAASf,KAAK,CAACoB,WAAWF,MAAM,GAAG;QAChD,OAAOQ,sBAAsBK,IAAI,CAACI;IACpC;IAEA,OAAO;QACLH;QACAF;QACAH;QACAM;IACF;AACF"}
|
||||
11
node_modules/next/dist/esm/server/lib/format-hostname.js
generated
vendored
Normal file
11
node_modules/next/dist/esm/server/lib/format-hostname.js
generated
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
import { isIPv6 } from "./is-ipv6";
|
||||
/**
|
||||
* Formats a hostname so that it is a valid host that can be fetched by wrapping
|
||||
* IPv6 hosts with brackets.
|
||||
* @param hostname
|
||||
* @returns
|
||||
*/ export function formatHostname(hostname) {
|
||||
return isIPv6(hostname) ? `[${hostname}]` : hostname;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=format-hostname.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/format-hostname.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/format-hostname.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/lib/format-hostname.ts"],"names":["isIPv6","formatHostname","hostname"],"mappings":"AAAA,SAASA,MAAM,QAAQ,YAAW;AAElC;;;;;CAKC,GACD,OAAO,SAASC,eAAeC,QAAgB;IAC7C,OAAOF,OAAOE,YAAY,CAAC,CAAC,EAAEA,SAAS,CAAC,CAAC,GAAGA;AAC9C"}
|
||||
29
node_modules/next/dist/esm/server/lib/incremental-cache-server.js
generated
vendored
Normal file
29
node_modules/next/dist/esm/server/lib/incremental-cache-server.js
generated
vendored
Normal file
@ -0,0 +1,29 @@
|
||||
import { createIpcServer } from "./server-ipc";
|
||||
import { IncrementalCache } from "./incremental-cache";
|
||||
let initializeResult;
|
||||
export async function initialize(...constructorArgs) {
|
||||
const incrementalCache = new IncrementalCache(...constructorArgs);
|
||||
const { ipcPort, ipcValidationKey } = await createIpcServer({
|
||||
async revalidateTag (...args) {
|
||||
return incrementalCache.revalidateTag(...args);
|
||||
},
|
||||
async get (...args) {
|
||||
return incrementalCache.get(...args);
|
||||
},
|
||||
async set (...args) {
|
||||
return incrementalCache.set(...args);
|
||||
},
|
||||
async lock (...args) {
|
||||
return incrementalCache.lock(...args);
|
||||
},
|
||||
async unlock (...args) {
|
||||
return incrementalCache.unlock(...args);
|
||||
}
|
||||
});
|
||||
return {
|
||||
ipcPort,
|
||||
ipcValidationKey
|
||||
};
|
||||
}
|
||||
|
||||
//# sourceMappingURL=incremental-cache-server.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/incremental-cache-server.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/incremental-cache-server.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/lib/incremental-cache-server.ts"],"names":["createIpcServer","IncrementalCache","initializeResult","initialize","constructorArgs","incrementalCache","ipcPort","ipcValidationKey","revalidateTag","args","get","set","lock","unlock"],"mappings":"AAAA,SAASA,eAAe,QAAQ,eAAc;AAC9C,SAASC,gBAAgB,QAAQ,sBAAqB;AAEtD,IAAIC;AAOJ,OAAO,eAAeC,WACpB,GAAGC,eAA+D;IAElE,MAAMC,mBAAmB,IAAIJ,oBAAoBG;IAEjD,MAAM,EAAEE,OAAO,EAAEC,gBAAgB,EAAE,GAAG,MAAMP,gBAAgB;QAC1D,MAAMQ,eACJ,GAAGC,IAAmD;YAEtD,OAAOJ,iBAAiBG,aAAa,IAAIC;QAC3C;QAEA,MAAMC,KAAI,GAAGD,IAAyC;YACpD,OAAOJ,iBAAiBK,GAAG,IAAID;QACjC;QAEA,MAAME,KAAI,GAAGF,IAAyC;YACpD,OAAOJ,iBAAiBM,GAAG,IAAIF;QACjC;QAEA,MAAMG,MAAK,GAAGH,IAA0C;YACtD,OAAOJ,iBAAiBO,IAAI,IAAIH;QAClC;QAEA,MAAMI,QAAO,GAAGJ,IAA4C;YAC1D,OAAOJ,iBAAiBQ,MAAM,IAAIJ;QACpC;IACF;IAEA,OAAO;QACLH;QACAC;IACF;AACF"}
|
||||
305
node_modules/next/dist/esm/server/lib/incremental-cache/fetch-cache.js
generated
vendored
Normal file
305
node_modules/next/dist/esm/server/lib/incremental-cache/fetch-cache.js
generated
vendored
Normal file
@ -0,0 +1,305 @@
|
||||
import LRUCache from "next/dist/compiled/lru-cache";
|
||||
import { CACHE_ONE_YEAR, NEXT_CACHE_SOFT_TAGS_HEADER } from "../../../lib/constants";
|
||||
let rateLimitedUntil = 0;
|
||||
let memoryCache;
|
||||
const CACHE_TAGS_HEADER = "x-vercel-cache-tags";
|
||||
const CACHE_HEADERS_HEADER = "x-vercel-sc-headers";
|
||||
const CACHE_STATE_HEADER = "x-vercel-cache-state";
|
||||
const CACHE_REVALIDATE_HEADER = "x-vercel-revalidate";
|
||||
const CACHE_FETCH_URL_HEADER = "x-vercel-cache-item-name";
|
||||
const CACHE_CONTROL_VALUE_HEADER = "x-vercel-cache-control";
|
||||
const DEBUG = Boolean(process.env.NEXT_PRIVATE_DEBUG_CACHE);
|
||||
async function fetchRetryWithTimeout(url, init, retryIndex = 0) {
|
||||
const controller = new AbortController();
|
||||
const timeout = setTimeout(()=>{
|
||||
controller.abort();
|
||||
}, 500);
|
||||
return fetch(url, {
|
||||
...init || {},
|
||||
signal: controller.signal
|
||||
}).catch((err)=>{
|
||||
if (retryIndex === 3) {
|
||||
throw err;
|
||||
} else {
|
||||
if (DEBUG) {
|
||||
console.log(`Fetch failed for ${url} retry ${retryIndex}`);
|
||||
}
|
||||
return fetchRetryWithTimeout(url, init, retryIndex + 1);
|
||||
}
|
||||
}).finally(()=>{
|
||||
clearTimeout(timeout);
|
||||
});
|
||||
}
|
||||
export default class FetchCache {
|
||||
hasMatchingTags(arr1, arr2) {
|
||||
if (arr1.length !== arr2.length) return false;
|
||||
const set1 = new Set(arr1);
|
||||
const set2 = new Set(arr2);
|
||||
if (set1.size !== set2.size) return false;
|
||||
for (let tag of set1){
|
||||
if (!set2.has(tag)) return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
static isAvailable(ctx) {
|
||||
return !!(ctx._requestHeaders["x-vercel-sc-host"] || process.env.SUSPENSE_CACHE_URL);
|
||||
}
|
||||
constructor(ctx){
|
||||
this.headers = {};
|
||||
this.headers["Content-Type"] = "application/json";
|
||||
if (CACHE_HEADERS_HEADER in ctx._requestHeaders) {
|
||||
const newHeaders = JSON.parse(ctx._requestHeaders[CACHE_HEADERS_HEADER]);
|
||||
for(const k in newHeaders){
|
||||
this.headers[k] = newHeaders[k];
|
||||
}
|
||||
delete ctx._requestHeaders[CACHE_HEADERS_HEADER];
|
||||
}
|
||||
const scHost = ctx._requestHeaders["x-vercel-sc-host"] || process.env.SUSPENSE_CACHE_URL;
|
||||
const scBasePath = ctx._requestHeaders["x-vercel-sc-basepath"] || process.env.SUSPENSE_CACHE_BASEPATH;
|
||||
if (process.env.SUSPENSE_CACHE_AUTH_TOKEN) {
|
||||
this.headers["Authorization"] = `Bearer ${process.env.SUSPENSE_CACHE_AUTH_TOKEN}`;
|
||||
}
|
||||
if (scHost) {
|
||||
const scProto = process.env.SUSPENSE_CACHE_PROTO || "https";
|
||||
this.cacheEndpoint = `${scProto}://${scHost}${scBasePath || ""}`;
|
||||
if (DEBUG) {
|
||||
console.log("using cache endpoint", this.cacheEndpoint);
|
||||
}
|
||||
} else if (DEBUG) {
|
||||
console.log("no cache endpoint available");
|
||||
}
|
||||
if (ctx.maxMemoryCacheSize) {
|
||||
if (!memoryCache) {
|
||||
if (DEBUG) {
|
||||
console.log("using memory store for fetch cache");
|
||||
}
|
||||
memoryCache = new LRUCache({
|
||||
max: ctx.maxMemoryCacheSize,
|
||||
length ({ value }) {
|
||||
var _JSON_stringify;
|
||||
if (!value) {
|
||||
return 25;
|
||||
} else if (value.kind === "REDIRECT") {
|
||||
return JSON.stringify(value.props).length;
|
||||
} else if (value.kind === "IMAGE") {
|
||||
throw new Error("invariant image should not be incremental-cache");
|
||||
} else if (value.kind === "FETCH") {
|
||||
return JSON.stringify(value.data || "").length;
|
||||
} else if (value.kind === "ROUTE") {
|
||||
return value.body.length;
|
||||
}
|
||||
// rough estimate of size of cache value
|
||||
return value.html.length + (((_JSON_stringify = JSON.stringify(value.kind === "PAGE" && value.pageData)) == null ? void 0 : _JSON_stringify.length) || 0);
|
||||
}
|
||||
});
|
||||
}
|
||||
} else {
|
||||
if (DEBUG) {
|
||||
console.log("not using memory store for fetch cache");
|
||||
}
|
||||
}
|
||||
}
|
||||
resetRequestCache() {
|
||||
memoryCache == null ? void 0 : memoryCache.reset();
|
||||
}
|
||||
async revalidateTag(...args) {
|
||||
let [tags] = args;
|
||||
tags = typeof tags === "string" ? [
|
||||
tags
|
||||
] : tags;
|
||||
if (DEBUG) {
|
||||
console.log("revalidateTag", tags);
|
||||
}
|
||||
if (!tags.length) return;
|
||||
if (Date.now() < rateLimitedUntil) {
|
||||
if (DEBUG) {
|
||||
console.log("rate limited ", rateLimitedUntil);
|
||||
}
|
||||
return;
|
||||
}
|
||||
for(let i = 0; i < Math.ceil(tags.length / 64); i++){
|
||||
const currentTags = tags.slice(i * 64, i * 64 + 64);
|
||||
try {
|
||||
const res = await fetchRetryWithTimeout(`${this.cacheEndpoint}/v1/suspense-cache/revalidate?tags=${currentTags.map((tag)=>encodeURIComponent(tag)).join(",")}`, {
|
||||
method: "POST",
|
||||
headers: this.headers,
|
||||
// @ts-expect-error not on public type
|
||||
next: {
|
||||
internal: true
|
||||
}
|
||||
});
|
||||
if (res.status === 429) {
|
||||
const retryAfter = res.headers.get("retry-after") || "60000";
|
||||
rateLimitedUntil = Date.now() + parseInt(retryAfter);
|
||||
}
|
||||
if (!res.ok) {
|
||||
throw new Error(`Request failed with status ${res.status}.`);
|
||||
}
|
||||
} catch (err) {
|
||||
console.warn(`Failed to revalidate tag`, currentTags, err);
|
||||
}
|
||||
}
|
||||
}
|
||||
async get(...args) {
|
||||
var _data_value;
|
||||
const [key, ctx = {}] = args;
|
||||
const { tags, softTags, kindHint, fetchIdx, fetchUrl } = ctx;
|
||||
if (kindHint !== "fetch") {
|
||||
return null;
|
||||
}
|
||||
if (Date.now() < rateLimitedUntil) {
|
||||
if (DEBUG) {
|
||||
console.log("rate limited");
|
||||
}
|
||||
return null;
|
||||
}
|
||||
// memory cache is cleared at the end of each request
|
||||
// so that revalidate events are pulled from upstream
|
||||
// on successive requests
|
||||
let data = memoryCache == null ? void 0 : memoryCache.get(key);
|
||||
const hasFetchKindAndMatchingTags = (data == null ? void 0 : (_data_value = data.value) == null ? void 0 : _data_value.kind) === "FETCH" && this.hasMatchingTags(tags ?? [], data.value.tags ?? []);
|
||||
// Get data from fetch cache. Also check if new tags have been
|
||||
// specified with the same cache key (fetch URL)
|
||||
if (this.cacheEndpoint && (!data || !hasFetchKindAndMatchingTags)) {
|
||||
try {
|
||||
const start = Date.now();
|
||||
const fetchParams = {
|
||||
internal: true,
|
||||
fetchType: "cache-get",
|
||||
fetchUrl: fetchUrl,
|
||||
fetchIdx
|
||||
};
|
||||
const res = await fetch(`${this.cacheEndpoint}/v1/suspense-cache/${key}`, {
|
||||
method: "GET",
|
||||
headers: {
|
||||
...this.headers,
|
||||
[CACHE_FETCH_URL_HEADER]: fetchUrl,
|
||||
[CACHE_TAGS_HEADER]: (tags == null ? void 0 : tags.join(",")) || "",
|
||||
[NEXT_CACHE_SOFT_TAGS_HEADER]: (softTags == null ? void 0 : softTags.join(",")) || ""
|
||||
},
|
||||
next: fetchParams
|
||||
});
|
||||
if (res.status === 429) {
|
||||
const retryAfter = res.headers.get("retry-after") || "60000";
|
||||
rateLimitedUntil = Date.now() + parseInt(retryAfter);
|
||||
}
|
||||
if (res.status === 404) {
|
||||
if (DEBUG) {
|
||||
console.log(`no fetch cache entry for ${key}, duration: ${Date.now() - start}ms`);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
if (!res.ok) {
|
||||
console.error(await res.text());
|
||||
throw new Error(`invalid response from cache ${res.status}`);
|
||||
}
|
||||
const cached = await res.json();
|
||||
if (!cached || cached.kind !== "FETCH") {
|
||||
DEBUG && console.log({
|
||||
cached
|
||||
});
|
||||
throw new Error("invalid cache value");
|
||||
}
|
||||
// if new tags were specified, merge those tags to the existing tags
|
||||
if (cached.kind === "FETCH") {
|
||||
cached.tags ??= [];
|
||||
for (const tag of tags ?? []){
|
||||
if (!cached.tags.includes(tag)) {
|
||||
cached.tags.push(tag);
|
||||
}
|
||||
}
|
||||
}
|
||||
const cacheState = res.headers.get(CACHE_STATE_HEADER);
|
||||
const age = res.headers.get("age");
|
||||
data = {
|
||||
value: cached,
|
||||
// if it's already stale set it to a time in the past
|
||||
// if not derive last modified from age
|
||||
lastModified: cacheState !== "fresh" ? Date.now() - CACHE_ONE_YEAR : Date.now() - parseInt(age || "0", 10) * 1000
|
||||
};
|
||||
if (DEBUG) {
|
||||
console.log(`got fetch cache entry for ${key}, duration: ${Date.now() - start}ms, size: ${Object.keys(cached).length}, cache-state: ${cacheState} tags: ${tags == null ? void 0 : tags.join(",")} softTags: ${softTags == null ? void 0 : softTags.join(",")}`);
|
||||
}
|
||||
if (data) {
|
||||
memoryCache == null ? void 0 : memoryCache.set(key, data);
|
||||
}
|
||||
} catch (err) {
|
||||
// unable to get data from fetch-cache
|
||||
if (DEBUG) {
|
||||
console.error(`Failed to get from fetch-cache`, err);
|
||||
}
|
||||
}
|
||||
}
|
||||
return data || null;
|
||||
}
|
||||
async set(...args) {
|
||||
const [key, data, ctx] = args;
|
||||
const { fetchCache, fetchIdx, fetchUrl, tags } = ctx;
|
||||
if (!fetchCache) return;
|
||||
if (Date.now() < rateLimitedUntil) {
|
||||
if (DEBUG) {
|
||||
console.log("rate limited");
|
||||
}
|
||||
return;
|
||||
}
|
||||
memoryCache == null ? void 0 : memoryCache.set(key, {
|
||||
value: data,
|
||||
lastModified: Date.now()
|
||||
});
|
||||
if (this.cacheEndpoint) {
|
||||
try {
|
||||
const start = Date.now();
|
||||
if (data !== null && "revalidate" in data) {
|
||||
this.headers[CACHE_REVALIDATE_HEADER] = data.revalidate.toString();
|
||||
}
|
||||
if (!this.headers[CACHE_REVALIDATE_HEADER] && data !== null && "data" in data) {
|
||||
this.headers[CACHE_CONTROL_VALUE_HEADER] = data.data.headers["cache-control"];
|
||||
}
|
||||
const body = JSON.stringify({
|
||||
...data,
|
||||
// we send the tags in the header instead
|
||||
// of in the body here
|
||||
tags: undefined
|
||||
});
|
||||
if (DEBUG) {
|
||||
console.log("set cache", key);
|
||||
}
|
||||
const fetchParams = {
|
||||
internal: true,
|
||||
fetchType: "cache-set",
|
||||
fetchUrl,
|
||||
fetchIdx
|
||||
};
|
||||
const res = await fetch(`${this.cacheEndpoint}/v1/suspense-cache/${key}`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
...this.headers,
|
||||
[CACHE_FETCH_URL_HEADER]: fetchUrl || "",
|
||||
[CACHE_TAGS_HEADER]: (tags == null ? void 0 : tags.join(",")) || ""
|
||||
},
|
||||
body: body,
|
||||
next: fetchParams
|
||||
});
|
||||
if (res.status === 429) {
|
||||
const retryAfter = res.headers.get("retry-after") || "60000";
|
||||
rateLimitedUntil = Date.now() + parseInt(retryAfter);
|
||||
}
|
||||
if (!res.ok) {
|
||||
DEBUG && console.log(await res.text());
|
||||
throw new Error(`invalid response ${res.status}`);
|
||||
}
|
||||
if (DEBUG) {
|
||||
console.log(`successfully set to fetch-cache for ${key}, duration: ${Date.now() - start}ms, size: ${body.length}`);
|
||||
}
|
||||
} catch (err) {
|
||||
// unable to set to fetch-cache
|
||||
if (DEBUG) {
|
||||
console.error(`Failed to update fetch cache`, err);
|
||||
}
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=fetch-cache.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/incremental-cache/fetch-cache.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/incremental-cache/fetch-cache.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
307
node_modules/next/dist/esm/server/lib/incremental-cache/file-system-cache.js
generated
vendored
Normal file
307
node_modules/next/dist/esm/server/lib/incremental-cache/file-system-cache.js
generated
vendored
Normal file
@ -0,0 +1,307 @@
|
||||
import LRUCache from "next/dist/compiled/lru-cache";
|
||||
import path from "../../../shared/lib/isomorphic/path";
|
||||
import { NEXT_CACHE_TAGS_HEADER, NEXT_DATA_SUFFIX, NEXT_META_SUFFIX, RSC_PREFETCH_SUFFIX, RSC_SUFFIX } from "../../../lib/constants";
|
||||
let memoryCache;
|
||||
let tagsManifest;
|
||||
export default class FileSystemCache {
|
||||
constructor(ctx){
|
||||
this.fs = ctx.fs;
|
||||
this.flushToDisk = ctx.flushToDisk;
|
||||
this.serverDistDir = ctx.serverDistDir;
|
||||
this.appDir = !!ctx._appDir;
|
||||
this.pagesDir = !!ctx._pagesDir;
|
||||
this.revalidatedTags = ctx.revalidatedTags;
|
||||
this.experimental = ctx.experimental;
|
||||
this.debug = !!process.env.NEXT_PRIVATE_DEBUG_CACHE;
|
||||
if (ctx.maxMemoryCacheSize && !memoryCache) {
|
||||
if (this.debug) {
|
||||
console.log("using memory store for fetch cache");
|
||||
}
|
||||
memoryCache = new LRUCache({
|
||||
max: ctx.maxMemoryCacheSize,
|
||||
length ({ value }) {
|
||||
var _JSON_stringify;
|
||||
if (!value) {
|
||||
return 25;
|
||||
} else if (value.kind === "REDIRECT") {
|
||||
return JSON.stringify(value.props).length;
|
||||
} else if (value.kind === "IMAGE") {
|
||||
throw new Error("invariant image should not be incremental-cache");
|
||||
} else if (value.kind === "FETCH") {
|
||||
return JSON.stringify(value.data || "").length;
|
||||
} else if (value.kind === "ROUTE") {
|
||||
return value.body.length;
|
||||
}
|
||||
// rough estimate of size of cache value
|
||||
return value.html.length + (((_JSON_stringify = JSON.stringify(value.pageData)) == null ? void 0 : _JSON_stringify.length) || 0);
|
||||
}
|
||||
});
|
||||
} else if (this.debug) {
|
||||
console.log("not using memory store for fetch cache");
|
||||
}
|
||||
if (this.serverDistDir && this.fs) {
|
||||
this.tagsManifestPath = path.join(this.serverDistDir, "..", "cache", "fetch-cache", "tags-manifest.json");
|
||||
this.loadTagsManifest();
|
||||
}
|
||||
}
|
||||
resetRequestCache() {}
|
||||
loadTagsManifest() {
|
||||
if (!this.tagsManifestPath || !this.fs || tagsManifest) return;
|
||||
try {
|
||||
tagsManifest = JSON.parse(this.fs.readFileSync(this.tagsManifestPath, "utf8"));
|
||||
} catch (err) {
|
||||
tagsManifest = {
|
||||
version: 1,
|
||||
items: {}
|
||||
};
|
||||
}
|
||||
if (this.debug) console.log("loadTagsManifest", tagsManifest);
|
||||
}
|
||||
async revalidateTag(...args) {
|
||||
let [tags] = args;
|
||||
tags = typeof tags === "string" ? [
|
||||
tags
|
||||
] : tags;
|
||||
if (this.debug) {
|
||||
console.log("revalidateTag", tags);
|
||||
}
|
||||
if (tags.length === 0) {
|
||||
return;
|
||||
}
|
||||
// we need to ensure the tagsManifest is refreshed
|
||||
// since separate workers can be updating it at the same
|
||||
// time and we can't flush out of sync data
|
||||
await this.loadTagsManifest();
|
||||
if (!tagsManifest || !this.tagsManifestPath) {
|
||||
return;
|
||||
}
|
||||
for (const tag of tags){
|
||||
const data = tagsManifest.items[tag] || {};
|
||||
data.revalidatedAt = Date.now();
|
||||
tagsManifest.items[tag] = data;
|
||||
}
|
||||
try {
|
||||
await this.fs.mkdir(path.dirname(this.tagsManifestPath));
|
||||
await this.fs.writeFile(this.tagsManifestPath, JSON.stringify(tagsManifest || {}));
|
||||
if (this.debug) {
|
||||
console.log("Updated tags manifest", tagsManifest);
|
||||
}
|
||||
} catch (err) {
|
||||
console.warn("Failed to update tags manifest.", err);
|
||||
}
|
||||
}
|
||||
async get(...args) {
|
||||
var _data_value, _data_value1;
|
||||
const [key, ctx = {}] = args;
|
||||
const { tags, softTags, kindHint } = ctx;
|
||||
let data = memoryCache == null ? void 0 : memoryCache.get(key);
|
||||
if (this.debug) {
|
||||
console.log("get", key, tags, kindHint, !!data);
|
||||
}
|
||||
// let's check the disk for seed data
|
||||
if (!data && process.env.NEXT_RUNTIME !== "edge") {
|
||||
try {
|
||||
const filePath = this.getFilePath(`${key}.body`, "app");
|
||||
const fileData = await this.fs.readFile(filePath);
|
||||
const { mtime } = await this.fs.stat(filePath);
|
||||
const meta = JSON.parse(await this.fs.readFile(filePath.replace(/\.body$/, NEXT_META_SUFFIX), "utf8"));
|
||||
const cacheEntry = {
|
||||
lastModified: mtime.getTime(),
|
||||
value: {
|
||||
kind: "ROUTE",
|
||||
body: fileData,
|
||||
headers: meta.headers,
|
||||
status: meta.status
|
||||
}
|
||||
};
|
||||
return cacheEntry;
|
||||
} catch (_) {
|
||||
// no .meta data for the related key
|
||||
}
|
||||
try {
|
||||
// Determine the file kind if we didn't know it already.
|
||||
let kind = kindHint;
|
||||
if (!kind) {
|
||||
kind = this.detectFileKind(`${key}.html`);
|
||||
}
|
||||
const isAppPath = kind === "app";
|
||||
const filePath = this.getFilePath(kind === "fetch" ? key : `${key}.html`, kind);
|
||||
const fileData = await this.fs.readFile(filePath, "utf8");
|
||||
const { mtime } = await this.fs.stat(filePath);
|
||||
if (kind === "fetch" && this.flushToDisk) {
|
||||
var _data_value2;
|
||||
const lastModified = mtime.getTime();
|
||||
const parsedData = JSON.parse(fileData);
|
||||
data = {
|
||||
lastModified,
|
||||
value: parsedData
|
||||
};
|
||||
if (((_data_value2 = data.value) == null ? void 0 : _data_value2.kind) === "FETCH") {
|
||||
var _data_value3;
|
||||
const storedTags = (_data_value3 = data.value) == null ? void 0 : _data_value3.tags;
|
||||
// update stored tags if a new one is being added
|
||||
// TODO: remove this when we can send the tags
|
||||
// via header on GET same as SET
|
||||
if (!(tags == null ? void 0 : tags.every((tag)=>storedTags == null ? void 0 : storedTags.includes(tag)))) {
|
||||
if (this.debug) {
|
||||
console.log("tags vs storedTags mismatch", tags, storedTags);
|
||||
}
|
||||
await this.set(key, data.value, {
|
||||
tags
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const pageData = isAppPath ? await this.fs.readFile(this.getFilePath(`${key}${this.experimental.ppr ? RSC_PREFETCH_SUFFIX : RSC_SUFFIX}`, "app"), "utf8") : JSON.parse(await this.fs.readFile(this.getFilePath(`${key}${NEXT_DATA_SUFFIX}`, "pages"), "utf8"));
|
||||
let meta;
|
||||
if (isAppPath) {
|
||||
try {
|
||||
meta = JSON.parse(await this.fs.readFile(filePath.replace(/\.html$/, NEXT_META_SUFFIX), "utf8"));
|
||||
} catch {}
|
||||
}
|
||||
data = {
|
||||
lastModified: mtime.getTime(),
|
||||
value: {
|
||||
kind: "PAGE",
|
||||
html: fileData,
|
||||
pageData,
|
||||
postponed: meta == null ? void 0 : meta.postponed,
|
||||
headers: meta == null ? void 0 : meta.headers,
|
||||
status: meta == null ? void 0 : meta.status
|
||||
}
|
||||
};
|
||||
}
|
||||
if (data) {
|
||||
memoryCache == null ? void 0 : memoryCache.set(key, data);
|
||||
}
|
||||
} catch (_) {
|
||||
// unable to get data from disk
|
||||
}
|
||||
}
|
||||
if ((data == null ? void 0 : (_data_value = data.value) == null ? void 0 : _data_value.kind) === "PAGE") {
|
||||
var _data_value_headers;
|
||||
let cacheTags;
|
||||
const tagsHeader = (_data_value_headers = data.value.headers) == null ? void 0 : _data_value_headers[NEXT_CACHE_TAGS_HEADER];
|
||||
if (typeof tagsHeader === "string") {
|
||||
cacheTags = tagsHeader.split(",");
|
||||
}
|
||||
if (cacheTags == null ? void 0 : cacheTags.length) {
|
||||
this.loadTagsManifest();
|
||||
const isStale = cacheTags.some((tag)=>{
|
||||
var _tagsManifest_items_tag;
|
||||
return (tagsManifest == null ? void 0 : (_tagsManifest_items_tag = tagsManifest.items[tag]) == null ? void 0 : _tagsManifest_items_tag.revalidatedAt) && (tagsManifest == null ? void 0 : tagsManifest.items[tag].revalidatedAt) >= ((data == null ? void 0 : data.lastModified) || Date.now());
|
||||
});
|
||||
// we trigger a blocking validation if an ISR page
|
||||
// had a tag revalidated, if we want to be a background
|
||||
// revalidation instead we return data.lastModified = -1
|
||||
if (isStale) {
|
||||
data = undefined;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (data && (data == null ? void 0 : (_data_value1 = data.value) == null ? void 0 : _data_value1.kind) === "FETCH") {
|
||||
this.loadTagsManifest();
|
||||
const combinedTags = [
|
||||
...tags || [],
|
||||
...softTags || []
|
||||
];
|
||||
const wasRevalidated = combinedTags.some((tag)=>{
|
||||
var _tagsManifest_items_tag;
|
||||
if (this.revalidatedTags.includes(tag)) {
|
||||
return true;
|
||||
}
|
||||
return (tagsManifest == null ? void 0 : (_tagsManifest_items_tag = tagsManifest.items[tag]) == null ? void 0 : _tagsManifest_items_tag.revalidatedAt) && (tagsManifest == null ? void 0 : tagsManifest.items[tag].revalidatedAt) >= ((data == null ? void 0 : data.lastModified) || Date.now());
|
||||
});
|
||||
// When revalidate tag is called we don't return
|
||||
// stale data so it's updated right away
|
||||
if (wasRevalidated) {
|
||||
data = undefined;
|
||||
}
|
||||
}
|
||||
return data ?? null;
|
||||
}
|
||||
async set(...args) {
|
||||
const [key, data, ctx] = args;
|
||||
memoryCache == null ? void 0 : memoryCache.set(key, {
|
||||
value: data,
|
||||
lastModified: Date.now()
|
||||
});
|
||||
if (this.debug) {
|
||||
console.log("set", key);
|
||||
}
|
||||
if (!this.flushToDisk) return;
|
||||
if ((data == null ? void 0 : data.kind) === "ROUTE") {
|
||||
const filePath = this.getFilePath(`${key}.body`, "app");
|
||||
await this.fs.mkdir(path.dirname(filePath));
|
||||
await this.fs.writeFile(filePath, data.body);
|
||||
const meta = {
|
||||
headers: data.headers,
|
||||
status: data.status,
|
||||
postponed: undefined
|
||||
};
|
||||
await this.fs.writeFile(filePath.replace(/\.body$/, NEXT_META_SUFFIX), JSON.stringify(meta, null, 2));
|
||||
return;
|
||||
}
|
||||
if ((data == null ? void 0 : data.kind) === "PAGE") {
|
||||
const isAppPath = typeof data.pageData === "string";
|
||||
const htmlPath = this.getFilePath(`${key}.html`, isAppPath ? "app" : "pages");
|
||||
await this.fs.mkdir(path.dirname(htmlPath));
|
||||
await this.fs.writeFile(htmlPath, data.html);
|
||||
await this.fs.writeFile(this.getFilePath(`${key}${isAppPath ? this.experimental.ppr ? RSC_PREFETCH_SUFFIX : RSC_SUFFIX : NEXT_DATA_SUFFIX}`, isAppPath ? "app" : "pages"), isAppPath ? data.pageData : JSON.stringify(data.pageData));
|
||||
if (data.headers || data.status) {
|
||||
const meta = {
|
||||
headers: data.headers,
|
||||
status: data.status,
|
||||
postponed: data.postponed
|
||||
};
|
||||
await this.fs.writeFile(htmlPath.replace(/\.html$/, NEXT_META_SUFFIX), JSON.stringify(meta));
|
||||
}
|
||||
} else if ((data == null ? void 0 : data.kind) === "FETCH") {
|
||||
const filePath = this.getFilePath(key, "fetch");
|
||||
await this.fs.mkdir(path.dirname(filePath));
|
||||
await this.fs.writeFile(filePath, JSON.stringify({
|
||||
...data,
|
||||
tags: ctx.tags
|
||||
}));
|
||||
}
|
||||
}
|
||||
detectFileKind(pathname) {
|
||||
if (!this.appDir && !this.pagesDir) {
|
||||
throw new Error("Invariant: Can't determine file path kind, no page directory enabled");
|
||||
}
|
||||
// If app directory isn't enabled, then assume it's pages and avoid the fs
|
||||
// hit.
|
||||
if (!this.appDir && this.pagesDir) {
|
||||
return "pages";
|
||||
} else if (this.appDir && !this.pagesDir) {
|
||||
return "app";
|
||||
}
|
||||
// If both are enabled, we need to test each in order, starting with
|
||||
// `pages`.
|
||||
let filePath = this.getFilePath(pathname, "pages");
|
||||
if (this.fs.existsSync(filePath)) {
|
||||
return "pages";
|
||||
}
|
||||
filePath = this.getFilePath(pathname, "app");
|
||||
if (this.fs.existsSync(filePath)) {
|
||||
return "app";
|
||||
}
|
||||
throw new Error(`Invariant: Unable to determine file path kind for ${pathname}`);
|
||||
}
|
||||
getFilePath(pathname, kind) {
|
||||
switch(kind){
|
||||
case "fetch":
|
||||
// we store in .next/cache/fetch-cache so it can be persisted
|
||||
// across deploys
|
||||
return path.join(this.serverDistDir, "..", "cache", "fetch-cache", pathname);
|
||||
case "pages":
|
||||
return path.join(this.serverDistDir, "pages", pathname);
|
||||
case "app":
|
||||
return path.join(this.serverDistDir, "app", pathname);
|
||||
default:
|
||||
throw new Error("Invariant: Can't determine file path kind");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=file-system-cache.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/incremental-cache/file-system-cache.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/incremental-cache/file-system-cache.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
380
node_modules/next/dist/esm/server/lib/incremental-cache/index.js
generated
vendored
Normal file
380
node_modules/next/dist/esm/server/lib/incremental-cache/index.js
generated
vendored
Normal file
@ -0,0 +1,380 @@
|
||||
import FetchCache from "./fetch-cache";
|
||||
import FileSystemCache from "./file-system-cache";
|
||||
import { normalizePagePath } from "../../../shared/lib/page-path/normalize-page-path";
|
||||
import { CACHE_ONE_YEAR, NEXT_CACHE_REVALIDATED_TAGS_HEADER, NEXT_CACHE_REVALIDATE_TAG_TOKEN_HEADER, PRERENDER_REVALIDATE_HEADER } from "../../../lib/constants";
|
||||
import { toRoute } from "../to-route";
|
||||
import { SharedRevalidateTimings } from "./shared-revalidate-timings";
|
||||
export class CacheHandler {
|
||||
// eslint-disable-next-line
|
||||
constructor(_ctx){}
|
||||
async get(..._args) {
|
||||
return {};
|
||||
}
|
||||
async set(..._args) {}
|
||||
async revalidateTag(..._args) {}
|
||||
resetRequestCache() {}
|
||||
}
|
||||
export class IncrementalCache {
|
||||
constructor({ fs, dev, appDir, pagesDir, flushToDisk, fetchCache, minimalMode, serverDistDir, requestHeaders, requestProtocol, maxMemoryCacheSize, getPrerenderManifest, fetchCacheKeyPrefix, CurCacheHandler, allowedRevalidateHeaderKeys, experimental }){
|
||||
var _this_prerenderManifest_preview, _this_prerenderManifest, _this_prerenderManifest_preview1, _this_prerenderManifest1;
|
||||
this.locks = new Map();
|
||||
this.unlocks = new Map();
|
||||
const debug = !!process.env.NEXT_PRIVATE_DEBUG_CACHE;
|
||||
this.hasCustomCacheHandler = Boolean(CurCacheHandler);
|
||||
if (!CurCacheHandler) {
|
||||
if (fs && serverDistDir) {
|
||||
if (debug) {
|
||||
console.log("using filesystem cache handler");
|
||||
}
|
||||
CurCacheHandler = FileSystemCache;
|
||||
}
|
||||
if (FetchCache.isAvailable({
|
||||
_requestHeaders: requestHeaders
|
||||
}) && minimalMode && fetchCache) {
|
||||
if (debug) {
|
||||
console.log("using fetch cache handler");
|
||||
}
|
||||
CurCacheHandler = FetchCache;
|
||||
}
|
||||
} else if (debug) {
|
||||
console.log("using custom cache handler", CurCacheHandler.name);
|
||||
}
|
||||
if (process.env.__NEXT_TEST_MAX_ISR_CACHE) {
|
||||
// Allow cache size to be overridden for testing purposes
|
||||
maxMemoryCacheSize = parseInt(process.env.__NEXT_TEST_MAX_ISR_CACHE, 10);
|
||||
}
|
||||
this.dev = dev;
|
||||
this.disableForTestmode = process.env.NEXT_PRIVATE_TEST_PROXY === "true";
|
||||
// this is a hack to avoid Webpack knowing this is equal to this.minimalMode
|
||||
// because we replace this.minimalMode to true in production bundles.
|
||||
const minimalModeKey = "minimalMode";
|
||||
this[minimalModeKey] = minimalMode;
|
||||
this.requestHeaders = requestHeaders;
|
||||
this.requestProtocol = requestProtocol;
|
||||
this.allowedRevalidateHeaderKeys = allowedRevalidateHeaderKeys;
|
||||
this.prerenderManifest = getPrerenderManifest();
|
||||
this.revalidateTimings = new SharedRevalidateTimings(this.prerenderManifest);
|
||||
this.fetchCacheKeyPrefix = fetchCacheKeyPrefix;
|
||||
let revalidatedTags = [];
|
||||
if (requestHeaders[PRERENDER_REVALIDATE_HEADER] === ((_this_prerenderManifest = this.prerenderManifest) == null ? void 0 : (_this_prerenderManifest_preview = _this_prerenderManifest.preview) == null ? void 0 : _this_prerenderManifest_preview.previewModeId)) {
|
||||
this.isOnDemandRevalidate = true;
|
||||
}
|
||||
if (minimalMode && typeof requestHeaders[NEXT_CACHE_REVALIDATED_TAGS_HEADER] === "string" && requestHeaders[NEXT_CACHE_REVALIDATE_TAG_TOKEN_HEADER] === ((_this_prerenderManifest1 = this.prerenderManifest) == null ? void 0 : (_this_prerenderManifest_preview1 = _this_prerenderManifest1.preview) == null ? void 0 : _this_prerenderManifest_preview1.previewModeId)) {
|
||||
revalidatedTags = requestHeaders[NEXT_CACHE_REVALIDATED_TAGS_HEADER].split(",");
|
||||
}
|
||||
if (CurCacheHandler) {
|
||||
this.cacheHandler = new CurCacheHandler({
|
||||
dev,
|
||||
fs,
|
||||
flushToDisk,
|
||||
serverDistDir,
|
||||
revalidatedTags,
|
||||
maxMemoryCacheSize,
|
||||
_pagesDir: !!pagesDir,
|
||||
_appDir: !!appDir,
|
||||
_requestHeaders: requestHeaders,
|
||||
fetchCacheKeyPrefix,
|
||||
experimental
|
||||
});
|
||||
}
|
||||
}
|
||||
calculateRevalidate(pathname, fromTime, dev) {
|
||||
// in development we don't have a prerender-manifest
|
||||
// and default to always revalidating to allow easier debugging
|
||||
if (dev) return new Date().getTime() - 1000;
|
||||
// if an entry isn't present in routes we fallback to a default
|
||||
// of revalidating after 1 second.
|
||||
const initialRevalidateSeconds = this.revalidateTimings.get(toRoute(pathname)) ?? 1;
|
||||
const revalidateAfter = typeof initialRevalidateSeconds === "number" ? initialRevalidateSeconds * 1000 + fromTime : initialRevalidateSeconds;
|
||||
return revalidateAfter;
|
||||
}
|
||||
_getPathname(pathname, fetchCache) {
|
||||
return fetchCache ? pathname : normalizePagePath(pathname);
|
||||
}
|
||||
resetRequestCache() {
|
||||
var _this_cacheHandler_resetRequestCache, _this_cacheHandler;
|
||||
(_this_cacheHandler = this.cacheHandler) == null ? void 0 : (_this_cacheHandler_resetRequestCache = _this_cacheHandler.resetRequestCache) == null ? void 0 : _this_cacheHandler_resetRequestCache.call(_this_cacheHandler);
|
||||
}
|
||||
async unlock(cacheKey) {
|
||||
const unlock = this.unlocks.get(cacheKey);
|
||||
if (unlock) {
|
||||
unlock();
|
||||
this.locks.delete(cacheKey);
|
||||
this.unlocks.delete(cacheKey);
|
||||
}
|
||||
}
|
||||
async lock(cacheKey) {
|
||||
if (process.env.__NEXT_INCREMENTAL_CACHE_IPC_PORT && process.env.__NEXT_INCREMENTAL_CACHE_IPC_KEY && process.env.NEXT_RUNTIME !== "edge") {
|
||||
const invokeIpcMethod = require("../server-ipc/request-utils").invokeIpcMethod;
|
||||
await invokeIpcMethod({
|
||||
method: "lock",
|
||||
ipcPort: process.env.__NEXT_INCREMENTAL_CACHE_IPC_PORT,
|
||||
ipcKey: process.env.__NEXT_INCREMENTAL_CACHE_IPC_KEY,
|
||||
args: [
|
||||
cacheKey
|
||||
]
|
||||
});
|
||||
return async ()=>{
|
||||
await invokeIpcMethod({
|
||||
method: "unlock",
|
||||
ipcPort: process.env.__NEXT_INCREMENTAL_CACHE_IPC_PORT,
|
||||
ipcKey: process.env.__NEXT_INCREMENTAL_CACHE_IPC_KEY,
|
||||
args: [
|
||||
cacheKey
|
||||
]
|
||||
});
|
||||
};
|
||||
}
|
||||
let unlockNext = ()=>Promise.resolve();
|
||||
const existingLock = this.locks.get(cacheKey);
|
||||
if (existingLock) {
|
||||
await existingLock;
|
||||
} else {
|
||||
const newLock = new Promise((resolve)=>{
|
||||
unlockNext = async ()=>{
|
||||
resolve();
|
||||
};
|
||||
});
|
||||
this.locks.set(cacheKey, newLock);
|
||||
this.unlocks.set(cacheKey, unlockNext);
|
||||
}
|
||||
return unlockNext;
|
||||
}
|
||||
async revalidateTag(tags) {
|
||||
var _this_cacheHandler_revalidateTag, _this_cacheHandler;
|
||||
if (process.env.__NEXT_INCREMENTAL_CACHE_IPC_PORT && process.env.__NEXT_INCREMENTAL_CACHE_IPC_KEY && process.env.NEXT_RUNTIME !== "edge") {
|
||||
const invokeIpcMethod = require("../server-ipc/request-utils").invokeIpcMethod;
|
||||
return invokeIpcMethod({
|
||||
method: "revalidateTag",
|
||||
ipcPort: process.env.__NEXT_INCREMENTAL_CACHE_IPC_PORT,
|
||||
ipcKey: process.env.__NEXT_INCREMENTAL_CACHE_IPC_KEY,
|
||||
args: [
|
||||
...arguments
|
||||
]
|
||||
});
|
||||
}
|
||||
return (_this_cacheHandler = this.cacheHandler) == null ? void 0 : (_this_cacheHandler_revalidateTag = _this_cacheHandler.revalidateTag) == null ? void 0 : _this_cacheHandler_revalidateTag.call(_this_cacheHandler, tags);
|
||||
}
|
||||
// x-ref: https://github.com/facebook/react/blob/2655c9354d8e1c54ba888444220f63e836925caa/packages/react/src/ReactFetch.js#L23
|
||||
async fetchCacheKey(url, init = {}) {
|
||||
// this should be bumped anytime a fix is made to cache entries
|
||||
// that should bust the cache
|
||||
const MAIN_KEY_PREFIX = "v3";
|
||||
const bodyChunks = [];
|
||||
const encoder = new TextEncoder();
|
||||
const decoder = new TextDecoder();
|
||||
if (init.body) {
|
||||
// handle ReadableStream body
|
||||
if (typeof init.body.getReader === "function") {
|
||||
const readableBody = init.body;
|
||||
const chunks = [];
|
||||
try {
|
||||
await readableBody.pipeTo(new WritableStream({
|
||||
write (chunk) {
|
||||
if (typeof chunk === "string") {
|
||||
chunks.push(encoder.encode(chunk));
|
||||
bodyChunks.push(chunk);
|
||||
} else {
|
||||
chunks.push(chunk);
|
||||
bodyChunks.push(decoder.decode(chunk, {
|
||||
stream: true
|
||||
}));
|
||||
}
|
||||
}
|
||||
}));
|
||||
// Flush the decoder.
|
||||
bodyChunks.push(decoder.decode());
|
||||
// Create a new buffer with all the chunks.
|
||||
const length = chunks.reduce((total, arr)=>total + arr.length, 0);
|
||||
const arrayBuffer = new Uint8Array(length);
|
||||
// Push each of the chunks into the new array buffer.
|
||||
let offset = 0;
|
||||
for (const chunk of chunks){
|
||||
arrayBuffer.set(chunk, offset);
|
||||
offset += chunk.length;
|
||||
}
|
||||
init._ogBody = arrayBuffer;
|
||||
} catch (err) {
|
||||
console.error("Problem reading body", err);
|
||||
}
|
||||
} else if (typeof init.body.keys === "function") {
|
||||
const formData = init.body;
|
||||
init._ogBody = init.body;
|
||||
for (const key of new Set([
|
||||
...formData.keys()
|
||||
])){
|
||||
const values = formData.getAll(key);
|
||||
bodyChunks.push(`${key}=${(await Promise.all(values.map(async (val)=>{
|
||||
if (typeof val === "string") {
|
||||
return val;
|
||||
} else {
|
||||
return await val.text();
|
||||
}
|
||||
}))).join(",")}`);
|
||||
}
|
||||
// handle blob body
|
||||
} else if (typeof init.body.arrayBuffer === "function") {
|
||||
const blob = init.body;
|
||||
const arrayBuffer = await blob.arrayBuffer();
|
||||
bodyChunks.push(await blob.text());
|
||||
init._ogBody = new Blob([
|
||||
arrayBuffer
|
||||
], {
|
||||
type: blob.type
|
||||
});
|
||||
} else if (typeof init.body === "string") {
|
||||
bodyChunks.push(init.body);
|
||||
init._ogBody = init.body;
|
||||
}
|
||||
}
|
||||
const headers = typeof (init.headers || {}).keys === "function" ? Object.fromEntries(init.headers) : Object.assign({}, init.headers);
|
||||
if ("traceparent" in headers) delete headers["traceparent"];
|
||||
const cacheString = JSON.stringify([
|
||||
MAIN_KEY_PREFIX,
|
||||
this.fetchCacheKeyPrefix || "",
|
||||
url,
|
||||
init.method,
|
||||
headers,
|
||||
init.mode,
|
||||
init.redirect,
|
||||
init.credentials,
|
||||
init.referrer,
|
||||
init.referrerPolicy,
|
||||
init.integrity,
|
||||
init.cache,
|
||||
bodyChunks
|
||||
]);
|
||||
if (process.env.NEXT_RUNTIME === "edge") {
|
||||
function bufferToHex(buffer) {
|
||||
return Array.prototype.map.call(new Uint8Array(buffer), (b)=>b.toString(16).padStart(2, "0")).join("");
|
||||
}
|
||||
const buffer = encoder.encode(cacheString);
|
||||
return bufferToHex(await crypto.subtle.digest("SHA-256", buffer));
|
||||
} else {
|
||||
const crypto1 = require("crypto");
|
||||
return crypto1.createHash("sha256").update(cacheString).digest("hex");
|
||||
}
|
||||
}
|
||||
// get data from cache if available
|
||||
async get(cacheKey, ctx = {}) {
|
||||
var _this_cacheHandler, _cacheData_value;
|
||||
if (process.env.__NEXT_INCREMENTAL_CACHE_IPC_PORT && process.env.__NEXT_INCREMENTAL_CACHE_IPC_KEY && process.env.NEXT_RUNTIME !== "edge") {
|
||||
const invokeIpcMethod = require("../server-ipc/request-utils").invokeIpcMethod;
|
||||
return invokeIpcMethod({
|
||||
method: "get",
|
||||
ipcPort: process.env.__NEXT_INCREMENTAL_CACHE_IPC_PORT,
|
||||
ipcKey: process.env.__NEXT_INCREMENTAL_CACHE_IPC_KEY,
|
||||
args: [
|
||||
...arguments
|
||||
]
|
||||
});
|
||||
}
|
||||
// we don't leverage the prerender cache in dev mode
|
||||
// so that getStaticProps is always called for easier debugging
|
||||
if (this.disableForTestmode || this.dev && (ctx.kindHint !== "fetch" || this.requestHeaders["cache-control"] === "no-cache")) {
|
||||
return null;
|
||||
}
|
||||
cacheKey = this._getPathname(cacheKey, ctx.kindHint === "fetch");
|
||||
let entry = null;
|
||||
let revalidate = ctx.revalidate;
|
||||
const cacheData = await ((_this_cacheHandler = this.cacheHandler) == null ? void 0 : _this_cacheHandler.get(cacheKey, ctx));
|
||||
if ((cacheData == null ? void 0 : (_cacheData_value = cacheData.value) == null ? void 0 : _cacheData_value.kind) === "FETCH") {
|
||||
const combinedTags = [
|
||||
...ctx.tags || [],
|
||||
...ctx.softTags || []
|
||||
];
|
||||
// if a tag was revalidated we don't return stale data
|
||||
if (combinedTags.some((tag)=>{
|
||||
var _this_revalidatedTags;
|
||||
return (_this_revalidatedTags = this.revalidatedTags) == null ? void 0 : _this_revalidatedTags.includes(tag);
|
||||
})) {
|
||||
return null;
|
||||
}
|
||||
revalidate = revalidate || cacheData.value.revalidate;
|
||||
const age = (Date.now() - (cacheData.lastModified || 0)) / 1000;
|
||||
const isStale = age > revalidate;
|
||||
const data = cacheData.value.data;
|
||||
return {
|
||||
isStale: isStale,
|
||||
value: {
|
||||
kind: "FETCH",
|
||||
data,
|
||||
revalidate: revalidate
|
||||
},
|
||||
revalidateAfter: Date.now() + revalidate * 1000
|
||||
};
|
||||
}
|
||||
const curRevalidate = this.revalidateTimings.get(toRoute(cacheKey));
|
||||
let isStale;
|
||||
let revalidateAfter;
|
||||
if ((cacheData == null ? void 0 : cacheData.lastModified) === -1) {
|
||||
isStale = -1;
|
||||
revalidateAfter = -1 * CACHE_ONE_YEAR;
|
||||
} else {
|
||||
revalidateAfter = this.calculateRevalidate(cacheKey, (cacheData == null ? void 0 : cacheData.lastModified) || Date.now(), this.dev && ctx.kindHint !== "fetch");
|
||||
isStale = revalidateAfter !== false && revalidateAfter < Date.now() ? true : undefined;
|
||||
}
|
||||
if (cacheData) {
|
||||
entry = {
|
||||
isStale,
|
||||
curRevalidate,
|
||||
revalidateAfter,
|
||||
value: cacheData.value
|
||||
};
|
||||
}
|
||||
if (!cacheData && this.prerenderManifest.notFoundRoutes.includes(cacheKey)) {
|
||||
// for the first hit after starting the server the cache
|
||||
// may not have a way to save notFound: true so if
|
||||
// the prerender-manifest marks this as notFound then we
|
||||
// return that entry and trigger a cache set to give it a
|
||||
// chance to update in-memory entries
|
||||
entry = {
|
||||
isStale,
|
||||
value: null,
|
||||
curRevalidate,
|
||||
revalidateAfter
|
||||
};
|
||||
this.set(cacheKey, entry.value, ctx);
|
||||
}
|
||||
return entry;
|
||||
}
|
||||
// populate the incremental cache with new data
|
||||
async set(pathname, data, ctx) {
|
||||
if (process.env.__NEXT_INCREMENTAL_CACHE_IPC_PORT && process.env.__NEXT_INCREMENTAL_CACHE_IPC_KEY && process.env.NEXT_RUNTIME !== "edge") {
|
||||
const invokeIpcMethod = require("../server-ipc/request-utils").invokeIpcMethod;
|
||||
return invokeIpcMethod({
|
||||
method: "set",
|
||||
ipcPort: process.env.__NEXT_INCREMENTAL_CACHE_IPC_PORT,
|
||||
ipcKey: process.env.__NEXT_INCREMENTAL_CACHE_IPC_KEY,
|
||||
args: [
|
||||
...arguments
|
||||
]
|
||||
});
|
||||
}
|
||||
if (this.disableForTestmode || this.dev && !ctx.fetchCache) return;
|
||||
// FetchCache has upper limit of 2MB per-entry currently
|
||||
const itemSize = JSON.stringify(data).length;
|
||||
if (ctx.fetchCache && // we don't show this error/warning when a custom cache handler is being used
|
||||
// as it might not have this limit
|
||||
!this.hasCustomCacheHandler && itemSize > 2 * 1024 * 1024) {
|
||||
if (this.dev) {
|
||||
throw new Error(`Failed to set Next.js data cache, items over 2MB can not be cached (${itemSize} bytes)`);
|
||||
}
|
||||
return;
|
||||
}
|
||||
pathname = this._getPathname(pathname, ctx.fetchCache);
|
||||
try {
|
||||
var _this_cacheHandler;
|
||||
// Set the value for the revalidate seconds so if it changes we can
|
||||
// update the cache with the new value.
|
||||
if (typeof ctx.revalidate !== "undefined" && !ctx.fetchCache) {
|
||||
this.revalidateTimings.set(pathname, ctx.revalidate);
|
||||
}
|
||||
await ((_this_cacheHandler = this.cacheHandler) == null ? void 0 : _this_cacheHandler.set(pathname, data, ctx));
|
||||
} catch (error) {
|
||||
console.warn("Failed to update prerender cache for", pathname, error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/incremental-cache/index.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/incremental-cache/index.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
51
node_modules/next/dist/esm/server/lib/incremental-cache/shared-revalidate-timings.js
generated
vendored
Normal file
51
node_modules/next/dist/esm/server/lib/incremental-cache/shared-revalidate-timings.js
generated
vendored
Normal file
@ -0,0 +1,51 @@
|
||||
/**
|
||||
* A shared cache of revalidate timings for routes. This cache is used so we
|
||||
* don't have to modify the prerender manifest when we want to update the
|
||||
* revalidate timings for a route.
|
||||
*/ export class SharedRevalidateTimings {
|
||||
static #_ = /**
|
||||
* The in-memory cache of revalidate timings for routes. This cache is
|
||||
* populated when the cache is updated with new timings.
|
||||
*/ this.timings = new Map();
|
||||
constructor(/**
|
||||
* The prerender manifest that contains the initial revalidate timings for
|
||||
* routes.
|
||||
*/ prerenderManifest){
|
||||
this.prerenderManifest = prerenderManifest;
|
||||
}
|
||||
/**
|
||||
* Try to get the revalidate timings for a route. This will first try to get
|
||||
* the timings from the in-memory cache. If the timings are not present in the
|
||||
* in-memory cache, then the timings will be sourced from the prerender
|
||||
* manifest.
|
||||
*
|
||||
* @param route the route to get the revalidate timings for
|
||||
* @returns the revalidate timings for the route, or undefined if the timings
|
||||
* are not present in the in-memory cache or the prerender manifest
|
||||
*/ get(route) {
|
||||
var _this_prerenderManifest_routes_route;
|
||||
// This is a copy on write cache that is updated when the cache is updated.
|
||||
// If the cache is never written to, then the timings will be sourced from
|
||||
// the prerender manifest.
|
||||
let revalidate = SharedRevalidateTimings.timings.get(route);
|
||||
if (typeof revalidate !== "undefined") return revalidate;
|
||||
revalidate = (_this_prerenderManifest_routes_route = this.prerenderManifest.routes[route]) == null ? void 0 : _this_prerenderManifest_routes_route.initialRevalidateSeconds;
|
||||
if (typeof revalidate !== "undefined") return revalidate;
|
||||
return undefined;
|
||||
}
|
||||
/**
|
||||
* Set the revalidate timings for a route.
|
||||
*
|
||||
* @param route the route to set the revalidate timings for
|
||||
* @param revalidate the revalidate timings for the route
|
||||
*/ set(route, revalidate) {
|
||||
SharedRevalidateTimings.timings.set(route, revalidate);
|
||||
}
|
||||
/**
|
||||
* Clear the in-memory cache of revalidate timings for routes.
|
||||
*/ clear() {
|
||||
SharedRevalidateTimings.timings.clear();
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=shared-revalidate-timings.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/incremental-cache/shared-revalidate-timings.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/incremental-cache/shared-revalidate-timings.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/lib/incremental-cache/shared-revalidate-timings.ts"],"names":["SharedRevalidateTimings","timings","Map","constructor","prerenderManifest","get","route","revalidate","routes","initialRevalidateSeconds","undefined","set","clear"],"mappings":"AAIA;;;;CAIC,GACD,OAAO,MAAMA;gBACX;;;GAGC,QACuBC,UAAU,IAAIC;IAEtCC,YACE;;;KAGC,GACD,AAAiBC,iBAEhB,CACD;aAHiBA,oBAAAA;IAGhB;IAEH;;;;;;;;;GASC,GACD,AAAOC,IAAIC,KAAa,EAA0B;YAOnC;QANb,2EAA2E;QAC3E,0EAA0E;QAC1E,0BAA0B;QAC1B,IAAIC,aAAaP,wBAAwBC,OAAO,CAACI,GAAG,CAACC;QACrD,IAAI,OAAOC,eAAe,aAAa,OAAOA;QAE9CA,cAAa,uCAAA,IAAI,CAACH,iBAAiB,CAACI,MAAM,CAACF,MAAM,qBAApC,qCAAsCG,wBAAwB;QAC3E,IAAI,OAAOF,eAAe,aAAa,OAAOA;QAE9C,OAAOG;IACT;IAEA;;;;;GAKC,GACD,AAAOC,IAAIL,KAAa,EAAEC,UAAsB,EAAE;QAChDP,wBAAwBC,OAAO,CAACU,GAAG,CAACL,OAAOC;IAC7C;IAEA;;GAEC,GACD,AAAOK,QAAQ;QACbZ,wBAAwBC,OAAO,CAACW,KAAK;IACvC;AACF"}
|
||||
31
node_modules/next/dist/esm/server/lib/is-ipv6.js
generated
vendored
Normal file
31
node_modules/next/dist/esm/server/lib/is-ipv6.js
generated
vendored
Normal file
@ -0,0 +1,31 @@
|
||||
// Regex from `node/lib/internal/net.js`: https://github.com/nodejs/node/blob/9fc57006c27564ed7f75eee090eca86786508f51/lib/internal/net.js#L19-L29
|
||||
// License included below:
|
||||
// Copyright Joyent, Inc. and other Node contributors.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||
// copy of this software and associated documentation files (the
|
||||
// "Software"), to deal in the Software without restriction, including
|
||||
// without limitation the rights to use, copy, modify, merge, publish,
|
||||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||
// persons to whom the Software is furnished to do so, subject to the
|
||||
// following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included
|
||||
// in all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
const v4Seg = "(?:[0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])";
|
||||
const v4Str = `(${v4Seg}[.]){3}${v4Seg}`;
|
||||
const v6Seg = "(?:[0-9a-fA-F]{1,4})";
|
||||
const IPv6Reg = new RegExp("^(" + `(?:${v6Seg}:){7}(?:${v6Seg}|:)|` + `(?:${v6Seg}:){6}(?:${v4Str}|:${v6Seg}|:)|` + `(?:${v6Seg}:){5}(?::${v4Str}|(:${v6Seg}){1,2}|:)|` + `(?:${v6Seg}:){4}(?:(:${v6Seg}){0,1}:${v4Str}|(:${v6Seg}){1,3}|:)|` + `(?:${v6Seg}:){3}(?:(:${v6Seg}){0,2}:${v4Str}|(:${v6Seg}){1,4}|:)|` + `(?:${v6Seg}:){2}(?:(:${v6Seg}){0,3}:${v4Str}|(:${v6Seg}){1,5}|:)|` + `(?:${v6Seg}:){1}(?:(:${v6Seg}){0,4}:${v4Str}|(:${v6Seg}){1,6}|:)|` + `(?::((?::${v6Seg}){0,5}:${v4Str}|(?::${v6Seg}){1,7}|:))` + ")(%[0-9a-zA-Z-.:]{1,})?$");
|
||||
export function isIPv6(s) {
|
||||
return IPv6Reg.test(s);
|
||||
}
|
||||
|
||||
//# sourceMappingURL=is-ipv6.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/is-ipv6.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/is-ipv6.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/lib/is-ipv6.ts"],"names":["v4Seg","v4Str","v6Seg","IPv6Reg","RegExp","isIPv6","s","test"],"mappings":"AAAA,kJAAkJ;AAClJ,0BAA0B;AAC1B,sDAAsD;AACtD,EAAE;AACF,0EAA0E;AAC1E,gEAAgE;AAChE,sEAAsE;AACtE,sEAAsE;AACtE,4EAA4E;AAC5E,qEAAqE;AACrE,wBAAwB;AACxB,EAAE;AACF,0EAA0E;AAC1E,yDAAyD;AACzD,EAAE;AACF,0EAA0E;AAC1E,6DAA6D;AAC7D,4EAA4E;AAC5E,2EAA2E;AAC3E,wEAAwE;AACxE,4EAA4E;AAC5E,yCAAyC;AAEzC,MAAMA,QAAQ;AACd,MAAMC,QAAQ,CAAC,CAAC,EAAED,MAAM,OAAO,EAAEA,MAAM,CAAC;AACxC,MAAME,QAAQ;AACd,MAAMC,UAAU,IAAIC,OAClB,OACE,CAAC,GAAG,EAAEF,MAAM,QAAQ,EAAEA,MAAM,IAAI,CAAC,GACjC,CAAC,GAAG,EAAEA,MAAM,QAAQ,EAAED,MAAM,EAAE,EAAEC,MAAM,IAAI,CAAC,GAC3C,CAAC,GAAG,EAAEA,MAAM,SAAS,EAAED,MAAM,GAAG,EAAEC,MAAM,UAAU,CAAC,GACnD,CAAC,GAAG,EAAEA,MAAM,UAAU,EAAEA,MAAM,OAAO,EAAED,MAAM,GAAG,EAAEC,MAAM,UAAU,CAAC,GACnE,CAAC,GAAG,EAAEA,MAAM,UAAU,EAAEA,MAAM,OAAO,EAAED,MAAM,GAAG,EAAEC,MAAM,UAAU,CAAC,GACnE,CAAC,GAAG,EAAEA,MAAM,UAAU,EAAEA,MAAM,OAAO,EAAED,MAAM,GAAG,EAAEC,MAAM,UAAU,CAAC,GACnE,CAAC,GAAG,EAAEA,MAAM,UAAU,EAAEA,MAAM,OAAO,EAAED,MAAM,GAAG,EAAEC,MAAM,UAAU,CAAC,GACnE,CAAC,SAAS,EAAEA,MAAM,OAAO,EAAED,MAAM,KAAK,EAAEC,MAAM,UAAU,CAAC,GACzD;AAGJ,OAAO,SAASG,OAAOC,CAAS;IAC9B,OAAOH,QAAQI,IAAI,CAACD;AACtB"}
|
||||
8
node_modules/next/dist/esm/server/lib/match-next-data-pathname.js
generated
vendored
Normal file
8
node_modules/next/dist/esm/server/lib/match-next-data-pathname.js
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
import { getPathMatch } from "../../shared/lib/router/utils/path-match";
|
||||
const matcher = getPathMatch("/_next/data/:path*");
|
||||
export function matchNextDataPathname(pathname) {
|
||||
if (typeof pathname !== "string") return false;
|
||||
return matcher(pathname);
|
||||
}
|
||||
|
||||
//# sourceMappingURL=match-next-data-pathname.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/match-next-data-pathname.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/match-next-data-pathname.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/lib/match-next-data-pathname.ts"],"names":["getPathMatch","matcher","matchNextDataPathname","pathname"],"mappings":"AAAA,SAASA,YAAY,QAAQ,2CAA0C;AAEvE,MAAMC,UAAUD,aAAa;AAE7B,OAAO,SAASE,sBAAsBC,QAAmC;IACvE,IAAI,OAAOA,aAAa,UAAU,OAAO;IAEzC,OAAOF,QAAQE;AACjB"}
|
||||
298
node_modules/next/dist/esm/server/lib/mock-request.js
generated
vendored
Normal file
298
node_modules/next/dist/esm/server/lib/mock-request.js
generated
vendored
Normal file
@ -0,0 +1,298 @@
|
||||
import Stream from "stream";
|
||||
import { fromNodeOutgoingHttpHeaders, toNodeOutgoingHttpHeaders } from "../web/utils";
|
||||
export class MockedRequest extends Stream.Readable {
|
||||
constructor({ url, headers, method, socket = null, readable }){
|
||||
super();
|
||||
// This is hardcoded for now, but can be updated to be configurable if needed.
|
||||
this.httpVersion = "1.0";
|
||||
this.httpVersionMajor = 1;
|
||||
this.httpVersionMinor = 0;
|
||||
// If we don't actually have a socket, we'll just use a mock one that
|
||||
// always returns false for the `encrypted` property and undefined for the
|
||||
// `remoteAddress` property.
|
||||
this.socket = new Proxy({}, {
|
||||
get: (_target, prop)=>{
|
||||
if (prop !== "encrypted" && prop !== "remoteAddress") {
|
||||
throw new Error("Method not implemented");
|
||||
}
|
||||
if (prop === "remoteAddress") return undefined;
|
||||
// For this mock request, always ensure we just respond with the encrypted
|
||||
// set to false to ensure there's no odd leakages.
|
||||
return false;
|
||||
}
|
||||
});
|
||||
this.url = url;
|
||||
this.headers = headers;
|
||||
this.method = method;
|
||||
if (readable) {
|
||||
this.bodyReadable = readable;
|
||||
this.bodyReadable.on("end", ()=>this.emit("end"));
|
||||
this.bodyReadable.on("close", ()=>this.emit("close"));
|
||||
}
|
||||
if (socket) {
|
||||
this.socket = socket;
|
||||
}
|
||||
}
|
||||
get headersDistinct() {
|
||||
const headers = {};
|
||||
for (const [key, value] of Object.entries(this.headers)){
|
||||
if (!value) continue;
|
||||
headers[key] = Array.isArray(value) ? value : [
|
||||
value
|
||||
];
|
||||
}
|
||||
return headers;
|
||||
}
|
||||
_read(size) {
|
||||
if (this.bodyReadable) {
|
||||
return this.bodyReadable._read(size);
|
||||
} else {
|
||||
this.emit("end");
|
||||
this.emit("close");
|
||||
}
|
||||
}
|
||||
/**
|
||||
* The `connection` property is just an alias for the `socket` property.
|
||||
*
|
||||
* @deprecated — since v13.0.0 - Use socket instead.
|
||||
*/ get connection() {
|
||||
return this.socket;
|
||||
}
|
||||
// The following methods are not implemented as they are not used in the
|
||||
// Next.js codebase.
|
||||
get aborted() {
|
||||
throw new Error("Method not implemented");
|
||||
}
|
||||
get complete() {
|
||||
throw new Error("Method not implemented");
|
||||
}
|
||||
get trailers() {
|
||||
throw new Error("Method not implemented");
|
||||
}
|
||||
get trailersDistinct() {
|
||||
throw new Error("Method not implemented");
|
||||
}
|
||||
get rawTrailers() {
|
||||
throw new Error("Method not implemented");
|
||||
}
|
||||
get rawHeaders() {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
setTimeout() {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
}
|
||||
export class MockedResponse extends Stream.Writable {
|
||||
constructor(res = {}){
|
||||
super();
|
||||
this.statusMessage = "";
|
||||
this.finished = false;
|
||||
this.headersSent = false;
|
||||
/**
|
||||
* A list of buffers that have been written to the response.
|
||||
*
|
||||
* @internal - used internally by Next.js
|
||||
*/ this.buffers = [];
|
||||
this.statusCode = res.statusCode ?? 200;
|
||||
this.socket = res.socket ?? null;
|
||||
this.headers = res.headers ? fromNodeOutgoingHttpHeaders(res.headers) : new Headers();
|
||||
this.headPromise = new Promise((resolve)=>{
|
||||
this.headPromiseResolve = resolve;
|
||||
});
|
||||
// Attach listeners for the `finish`, `end`, and `error` events to the
|
||||
// `MockedResponse` instance.
|
||||
this.hasStreamed = new Promise((resolve, reject)=>{
|
||||
this.on("finish", ()=>resolve(true));
|
||||
this.on("end", ()=>resolve(true));
|
||||
this.on("error", (err)=>reject(err));
|
||||
}).then((val)=>{
|
||||
this.headPromiseResolve == null ? void 0 : this.headPromiseResolve.call(this);
|
||||
return val;
|
||||
});
|
||||
if (res.resWriter) {
|
||||
this.resWriter = res.resWriter;
|
||||
}
|
||||
}
|
||||
appendHeader(name, value) {
|
||||
const values = Array.isArray(value) ? value : [
|
||||
value
|
||||
];
|
||||
for (const v of values){
|
||||
this.headers.append(name, v);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Returns true if the response has been sent, false otherwise.
|
||||
*
|
||||
* @internal - used internally by Next.js
|
||||
*/ get isSent() {
|
||||
return this.finished || this.headersSent;
|
||||
}
|
||||
/**
|
||||
* The `connection` property is just an alias for the `socket` property.
|
||||
*
|
||||
* @deprecated — since v13.0.0 - Use socket instead.
|
||||
*/ get connection() {
|
||||
return this.socket;
|
||||
}
|
||||
write(chunk) {
|
||||
if (this.resWriter) {
|
||||
return this.resWriter(chunk);
|
||||
}
|
||||
this.buffers.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
|
||||
return true;
|
||||
}
|
||||
end() {
|
||||
this.finished = true;
|
||||
return super.end(...arguments);
|
||||
}
|
||||
/**
|
||||
* This method is a no-op because the `MockedResponse` instance is not
|
||||
* actually connected to a socket. This method is not specified on the
|
||||
* interface type for `ServerResponse` but is called by Node.js.
|
||||
*
|
||||
* @see https://github.com/nodejs/node/pull/7949
|
||||
*/ _implicitHeader() {}
|
||||
_write(chunk, _encoding, callback) {
|
||||
this.write(chunk);
|
||||
// According to Node.js documentation, the callback MUST be invoked to
|
||||
// signal that the write completed successfully. If this callback is not
|
||||
// invoked, the 'finish' event will not be emitted.
|
||||
//
|
||||
// https://nodejs.org/docs/latest-v16.x/api/stream.html#writable_writechunk-encoding-callback
|
||||
callback();
|
||||
}
|
||||
writeHead(statusCode, statusMessage, headers) {
|
||||
if (!headers && typeof statusMessage !== "string") {
|
||||
headers = statusMessage;
|
||||
} else if (typeof statusMessage === "string" && statusMessage.length > 0) {
|
||||
this.statusMessage = statusMessage;
|
||||
}
|
||||
if (headers) {
|
||||
// When headers have been set with response.setHeader(), they will be
|
||||
// merged with any headers passed to response.writeHead(), with the
|
||||
// headers passed to response.writeHead() given precedence.
|
||||
//
|
||||
// https://nodejs.org/api/http.html#responsewriteheadstatuscode-statusmessage-headers
|
||||
//
|
||||
// For this reason, we need to only call `set` to ensure that this will
|
||||
// overwrite any existing headers.
|
||||
if (Array.isArray(headers)) {
|
||||
// headers may be an Array where the keys and values are in the same list.
|
||||
// It is not a list of tuples. So, the even-numbered offsets are key
|
||||
// values, and the odd-numbered offsets are the associated values. The
|
||||
// array is in the same format as request.rawHeaders.
|
||||
for(let i = 0; i < headers.length; i += 2){
|
||||
// The header key is always a string according to the spec.
|
||||
this.setHeader(headers[i], headers[i + 1]);
|
||||
}
|
||||
} else {
|
||||
for (const [key, value] of Object.entries(headers)){
|
||||
// Skip undefined values
|
||||
if (typeof value === "undefined") continue;
|
||||
this.setHeader(key, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
this.statusCode = statusCode;
|
||||
this.headersSent = true;
|
||||
this.headPromiseResolve == null ? void 0 : this.headPromiseResolve.call(this);
|
||||
return this;
|
||||
}
|
||||
hasHeader(name) {
|
||||
return this.headers.has(name);
|
||||
}
|
||||
getHeader(name) {
|
||||
return this.headers.get(name) ?? undefined;
|
||||
}
|
||||
getHeaders() {
|
||||
return toNodeOutgoingHttpHeaders(this.headers);
|
||||
}
|
||||
getHeaderNames() {
|
||||
return Array.from(this.headers.keys());
|
||||
}
|
||||
setHeader(name, value) {
|
||||
if (Array.isArray(value)) {
|
||||
// Because `set` here should override any existing values, we need to
|
||||
// delete the existing values before setting the new ones via `append`.
|
||||
this.headers.delete(name);
|
||||
for (const v of value){
|
||||
this.headers.append(name, v);
|
||||
}
|
||||
} else if (typeof value === "number") {
|
||||
this.headers.set(name, value.toString());
|
||||
} else {
|
||||
this.headers.set(name, value);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
removeHeader(name) {
|
||||
this.headers.delete(name);
|
||||
}
|
||||
flushHeaders() {
|
||||
// This is a no-op because we don't actually have a socket to flush the
|
||||
// headers to.
|
||||
}
|
||||
// The following methods are not implemented as they are not used in the
|
||||
// Next.js codebase.
|
||||
get strictContentLength() {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
writeEarlyHints() {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
get req() {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
assignSocket() {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
detachSocket() {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
writeContinue() {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
writeProcessing() {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
get upgrading() {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
get chunkedEncoding() {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
get shouldKeepAlive() {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
get useChunkedEncodingByDefault() {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
get sendDate() {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
setTimeout() {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
addTrailers() {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
}
|
||||
export function createRequestResponseMocks({ url, headers = {}, method = "GET", bodyReadable, resWriter, socket = null }) {
|
||||
return {
|
||||
req: new MockedRequest({
|
||||
url,
|
||||
headers,
|
||||
method,
|
||||
socket,
|
||||
readable: bodyReadable
|
||||
}),
|
||||
res: new MockedResponse({
|
||||
socket,
|
||||
resWriter
|
||||
})
|
||||
};
|
||||
}
|
||||
|
||||
//# sourceMappingURL=mock-request.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/mock-request.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/mock-request.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
13
node_modules/next/dist/esm/server/lib/node-fs-methods.js
generated
vendored
Normal file
13
node_modules/next/dist/esm/server/lib/node-fs-methods.js
generated
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
import fs from "fs";
|
||||
export const nodeFs = {
|
||||
existsSync: fs.existsSync,
|
||||
readFile: fs.promises.readFile,
|
||||
readFileSync: fs.readFileSync,
|
||||
writeFile: (f, d)=>fs.promises.writeFile(f, d),
|
||||
mkdir: (dir)=>fs.promises.mkdir(dir, {
|
||||
recursive: true
|
||||
}),
|
||||
stat: (f)=>fs.promises.stat(f)
|
||||
};
|
||||
|
||||
//# sourceMappingURL=node-fs-methods.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/node-fs-methods.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/node-fs-methods.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/lib/node-fs-methods.ts"],"names":["fs","nodeFs","existsSync","readFile","promises","readFileSync","writeFile","f","d","mkdir","dir","recursive","stat"],"mappings":"AAEA,OAAOA,QAAQ,KAAI;AAEnB,OAAO,MAAMC,SAAkB;IAC7BC,YAAYF,GAAGE,UAAU;IACzBC,UAAUH,GAAGI,QAAQ,CAACD,QAAQ;IAC9BE,cAAcL,GAAGK,YAAY;IAC7BC,WAAW,CAACC,GAAGC,IAAMR,GAAGI,QAAQ,CAACE,SAAS,CAACC,GAAGC;IAC9CC,OAAO,CAACC,MAAQV,GAAGI,QAAQ,CAACK,KAAK,CAACC,KAAK;YAAEC,WAAW;QAAK;IACzDC,MAAM,CAACL,IAAMP,GAAGI,QAAQ,CAACQ,IAAI,CAACL;AAChC,EAAC"}
|
||||
545
node_modules/next/dist/esm/server/lib/patch-fetch.js
generated
vendored
Normal file
545
node_modules/next/dist/esm/server/lib/patch-fetch.js
generated
vendored
Normal file
@ -0,0 +1,545 @@
|
||||
import { AppRenderSpan, NextNodeServerSpan } from "./trace/constants";
|
||||
import { getTracer, SpanKind } from "./trace/tracer";
|
||||
import { CACHE_ONE_YEAR, NEXT_CACHE_IMPLICIT_TAG_ID, NEXT_CACHE_TAG_MAX_ITEMS, NEXT_CACHE_TAG_MAX_LENGTH } from "../../lib/constants";
|
||||
import * as Log from "../../build/output/log";
|
||||
import { trackDynamicFetch } from "../app-render/dynamic-rendering";
|
||||
const isEdgeRuntime = process.env.NEXT_RUNTIME === "edge";
|
||||
function isPatchedFetch(fetch) {
|
||||
return "__nextPatched" in fetch && fetch.__nextPatched === true;
|
||||
}
|
||||
export function validateRevalidate(revalidateVal, pathname) {
|
||||
try {
|
||||
let normalizedRevalidate = undefined;
|
||||
if (revalidateVal === false) {
|
||||
normalizedRevalidate = revalidateVal;
|
||||
} else if (typeof revalidateVal === "number" && !isNaN(revalidateVal) && revalidateVal > -1) {
|
||||
normalizedRevalidate = revalidateVal;
|
||||
} else if (typeof revalidateVal !== "undefined") {
|
||||
throw new Error(`Invalid revalidate value "${revalidateVal}" on "${pathname}", must be a non-negative number or "false"`);
|
||||
}
|
||||
return normalizedRevalidate;
|
||||
} catch (err) {
|
||||
// handle client component error from attempting to check revalidate value
|
||||
if (err instanceof Error && err.message.includes("Invalid revalidate")) {
|
||||
throw err;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
export function validateTags(tags, description) {
|
||||
const validTags = [];
|
||||
const invalidTags = [];
|
||||
for(let i = 0; i < tags.length; i++){
|
||||
const tag = tags[i];
|
||||
if (typeof tag !== "string") {
|
||||
invalidTags.push({
|
||||
tag,
|
||||
reason: "invalid type, must be a string"
|
||||
});
|
||||
} else if (tag.length > NEXT_CACHE_TAG_MAX_LENGTH) {
|
||||
invalidTags.push({
|
||||
tag,
|
||||
reason: `exceeded max length of ${NEXT_CACHE_TAG_MAX_LENGTH}`
|
||||
});
|
||||
} else {
|
||||
validTags.push(tag);
|
||||
}
|
||||
if (validTags.length > NEXT_CACHE_TAG_MAX_ITEMS) {
|
||||
console.warn(`Warning: exceeded max tag count for ${description}, dropped tags:`, tags.slice(i).join(", "));
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (invalidTags.length > 0) {
|
||||
console.warn(`Warning: invalid tags passed to ${description}: `);
|
||||
for (const { tag, reason } of invalidTags){
|
||||
console.log(`tag: "${tag}" ${reason}`);
|
||||
}
|
||||
}
|
||||
return validTags;
|
||||
}
|
||||
const getDerivedTags = (pathname)=>{
|
||||
const derivedTags = [
|
||||
`/layout`
|
||||
];
|
||||
// we automatically add the current path segments as tags
|
||||
// for revalidatePath handling
|
||||
if (pathname.startsWith("/")) {
|
||||
const pathnameParts = pathname.split("/");
|
||||
for(let i = 1; i < pathnameParts.length + 1; i++){
|
||||
let curPathname = pathnameParts.slice(0, i).join("/");
|
||||
if (curPathname) {
|
||||
// all derived tags other than the page are layout tags
|
||||
if (!curPathname.endsWith("/page") && !curPathname.endsWith("/route")) {
|
||||
curPathname = `${curPathname}${!curPathname.endsWith("/") ? "/" : ""}layout`;
|
||||
}
|
||||
derivedTags.push(curPathname);
|
||||
}
|
||||
}
|
||||
}
|
||||
return derivedTags;
|
||||
};
|
||||
export function addImplicitTags(staticGenerationStore) {
|
||||
const newTags = [];
|
||||
const { pagePath, urlPathname } = staticGenerationStore;
|
||||
if (!Array.isArray(staticGenerationStore.tags)) {
|
||||
staticGenerationStore.tags = [];
|
||||
}
|
||||
if (pagePath) {
|
||||
const derivedTags = getDerivedTags(pagePath);
|
||||
for (let tag of derivedTags){
|
||||
var _staticGenerationStore_tags;
|
||||
tag = `${NEXT_CACHE_IMPLICIT_TAG_ID}${tag}`;
|
||||
if (!((_staticGenerationStore_tags = staticGenerationStore.tags) == null ? void 0 : _staticGenerationStore_tags.includes(tag))) {
|
||||
staticGenerationStore.tags.push(tag);
|
||||
}
|
||||
newTags.push(tag);
|
||||
}
|
||||
}
|
||||
if (urlPathname) {
|
||||
var _staticGenerationStore_tags1;
|
||||
const parsedPathname = new URL(urlPathname, "http://n").pathname;
|
||||
const tag = `${NEXT_CACHE_IMPLICIT_TAG_ID}${parsedPathname}`;
|
||||
if (!((_staticGenerationStore_tags1 = staticGenerationStore.tags) == null ? void 0 : _staticGenerationStore_tags1.includes(tag))) {
|
||||
staticGenerationStore.tags.push(tag);
|
||||
}
|
||||
newTags.push(tag);
|
||||
}
|
||||
return newTags;
|
||||
}
|
||||
function trackFetchMetric(staticGenerationStore, ctx) {
|
||||
var _staticGenerationStore_requestEndedState;
|
||||
if (!staticGenerationStore || ((_staticGenerationStore_requestEndedState = staticGenerationStore.requestEndedState) == null ? void 0 : _staticGenerationStore_requestEndedState.ended) || process.env.NODE_ENV !== "development") {
|
||||
return;
|
||||
}
|
||||
staticGenerationStore.fetchMetrics ??= [];
|
||||
const dedupeFields = [
|
||||
"url",
|
||||
"status",
|
||||
"method"
|
||||
];
|
||||
// don't add metric if one already exists for the fetch
|
||||
if (staticGenerationStore.fetchMetrics.some((metric)=>dedupeFields.every((field)=>metric[field] === ctx[field]))) {
|
||||
return;
|
||||
}
|
||||
staticGenerationStore.fetchMetrics.push({
|
||||
...ctx,
|
||||
end: Date.now(),
|
||||
idx: staticGenerationStore.nextFetchId || 0
|
||||
});
|
||||
// only store top 10 metrics to avoid storing too many
|
||||
if (staticGenerationStore.fetchMetrics.length > 10) {
|
||||
// sort slowest first as these should be highlighted
|
||||
staticGenerationStore.fetchMetrics.sort((a, b)=>{
|
||||
const aDur = a.end - a.start;
|
||||
const bDur = b.end - b.start;
|
||||
if (aDur < bDur) {
|
||||
return 1;
|
||||
} else if (aDur > bDur) {
|
||||
return -1;
|
||||
}
|
||||
return 0;
|
||||
});
|
||||
// now grab top 10
|
||||
staticGenerationStore.fetchMetrics = staticGenerationStore.fetchMetrics.slice(0, 10);
|
||||
}
|
||||
}
|
||||
function createPatchedFetcher(originFetch, { serverHooks: { DynamicServerError }, staticGenerationAsyncStorage }) {
|
||||
// Create the patched fetch function. We don't set the type here, as it's
|
||||
// verified as the return value of this function.
|
||||
const patched = async (input, init)=>{
|
||||
var _init_method, _init_next;
|
||||
let url;
|
||||
try {
|
||||
url = new URL(input instanceof Request ? input.url : input);
|
||||
url.username = "";
|
||||
url.password = "";
|
||||
} catch {
|
||||
// Error caused by malformed URL should be handled by native fetch
|
||||
url = undefined;
|
||||
}
|
||||
const fetchUrl = (url == null ? void 0 : url.href) ?? "";
|
||||
const fetchStart = Date.now();
|
||||
const method = (init == null ? void 0 : (_init_method = init.method) == null ? void 0 : _init_method.toUpperCase()) || "GET";
|
||||
// Do create a new span trace for internal fetches in the
|
||||
// non-verbose mode.
|
||||
const isInternal = (init == null ? void 0 : (_init_next = init.next) == null ? void 0 : _init_next.internal) === true;
|
||||
const hideSpan = process.env.NEXT_OTEL_FETCH_DISABLED === "1";
|
||||
return getTracer().trace(isInternal ? NextNodeServerSpan.internalFetch : AppRenderSpan.fetch, {
|
||||
hideSpan,
|
||||
kind: SpanKind.CLIENT,
|
||||
spanName: [
|
||||
"fetch",
|
||||
method,
|
||||
fetchUrl
|
||||
].filter(Boolean).join(" "),
|
||||
attributes: {
|
||||
"http.url": fetchUrl,
|
||||
"http.method": method,
|
||||
"net.peer.name": url == null ? void 0 : url.hostname,
|
||||
"net.peer.port": (url == null ? void 0 : url.port) || undefined
|
||||
}
|
||||
}, async ()=>{
|
||||
var _getRequestMeta;
|
||||
// If this is an internal fetch, we should not do any special treatment.
|
||||
if (isInternal) return originFetch(input, init);
|
||||
const staticGenerationStore = staticGenerationAsyncStorage.getStore();
|
||||
// If the staticGenerationStore is not available, we can't do any
|
||||
// special treatment of fetch, therefore fallback to the original
|
||||
// fetch implementation.
|
||||
if (!staticGenerationStore || staticGenerationStore.isDraftMode) {
|
||||
return originFetch(input, init);
|
||||
}
|
||||
const isRequestInput = input && typeof input === "object" && typeof input.method === "string";
|
||||
const getRequestMeta = (field)=>{
|
||||
// If request input is present but init is not, retrieve from input first.
|
||||
const value = init == null ? void 0 : init[field];
|
||||
return value || (isRequestInput ? input[field] : null);
|
||||
};
|
||||
let revalidate = undefined;
|
||||
const getNextField = (field)=>{
|
||||
var _init_next, _init_next1, _input_next;
|
||||
return typeof (init == null ? void 0 : (_init_next = init.next) == null ? void 0 : _init_next[field]) !== "undefined" ? init == null ? void 0 : (_init_next1 = init.next) == null ? void 0 : _init_next1[field] : isRequestInput ? (_input_next = input.next) == null ? void 0 : _input_next[field] : undefined;
|
||||
};
|
||||
// RequestInit doesn't keep extra fields e.g. next so it's
|
||||
// only available if init is used separate
|
||||
let curRevalidate = getNextField("revalidate");
|
||||
const tags = validateTags(getNextField("tags") || [], `fetch ${input.toString()}`);
|
||||
if (Array.isArray(tags)) {
|
||||
if (!staticGenerationStore.tags) {
|
||||
staticGenerationStore.tags = [];
|
||||
}
|
||||
for (const tag of tags){
|
||||
if (!staticGenerationStore.tags.includes(tag)) {
|
||||
staticGenerationStore.tags.push(tag);
|
||||
}
|
||||
}
|
||||
}
|
||||
const implicitTags = addImplicitTags(staticGenerationStore);
|
||||
const fetchCacheMode = staticGenerationStore.fetchCache;
|
||||
const isUsingNoStore = !!staticGenerationStore.isUnstableNoStore;
|
||||
let _cache = getRequestMeta("cache");
|
||||
let cacheReason = "";
|
||||
if (typeof _cache === "string" && typeof curRevalidate !== "undefined") {
|
||||
// when providing fetch with a Request input, it'll automatically set a cache value of 'default'
|
||||
// we only want to warn if the user is explicitly setting a cache value
|
||||
if (!(isRequestInput && _cache === "default")) {
|
||||
Log.warn(`fetch for ${fetchUrl} on ${staticGenerationStore.urlPathname} specified "cache: ${_cache}" and "revalidate: ${curRevalidate}", only one should be specified.`);
|
||||
}
|
||||
_cache = undefined;
|
||||
}
|
||||
if (_cache === "force-cache") {
|
||||
curRevalidate = false;
|
||||
} else if (_cache === "no-cache" || _cache === "no-store" || fetchCacheMode === "force-no-store" || fetchCacheMode === "only-no-store") {
|
||||
curRevalidate = 0;
|
||||
}
|
||||
if (_cache === "no-cache" || _cache === "no-store") {
|
||||
cacheReason = `cache: ${_cache}`;
|
||||
}
|
||||
revalidate = validateRevalidate(curRevalidate, staticGenerationStore.urlPathname);
|
||||
const _headers = getRequestMeta("headers");
|
||||
const initHeaders = typeof (_headers == null ? void 0 : _headers.get) === "function" ? _headers : new Headers(_headers || {});
|
||||
const hasUnCacheableHeader = initHeaders.get("authorization") || initHeaders.get("cookie");
|
||||
const isUnCacheableMethod = ![
|
||||
"get",
|
||||
"head"
|
||||
].includes(((_getRequestMeta = getRequestMeta("method")) == null ? void 0 : _getRequestMeta.toLowerCase()) || "get");
|
||||
// if there are authorized headers or a POST method and
|
||||
// dynamic data usage was present above the tree we bail
|
||||
// e.g. if cookies() is used before an authed/POST fetch
|
||||
const autoNoCache = (hasUnCacheableHeader || isUnCacheableMethod) && staticGenerationStore.revalidate === 0;
|
||||
switch(fetchCacheMode){
|
||||
case "force-no-store":
|
||||
{
|
||||
cacheReason = "fetchCache = force-no-store";
|
||||
break;
|
||||
}
|
||||
case "only-no-store":
|
||||
{
|
||||
if (_cache === "force-cache" || typeof revalidate !== "undefined" && (revalidate === false || revalidate > 0)) {
|
||||
throw new Error(`cache: 'force-cache' used on fetch for ${fetchUrl} with 'export const fetchCache = 'only-no-store'`);
|
||||
}
|
||||
cacheReason = "fetchCache = only-no-store";
|
||||
break;
|
||||
}
|
||||
case "only-cache":
|
||||
{
|
||||
if (_cache === "no-store") {
|
||||
throw new Error(`cache: 'no-store' used on fetch for ${fetchUrl} with 'export const fetchCache = 'only-cache'`);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case "force-cache":
|
||||
{
|
||||
if (typeof curRevalidate === "undefined" || curRevalidate === 0) {
|
||||
cacheReason = "fetchCache = force-cache";
|
||||
revalidate = false;
|
||||
}
|
||||
break;
|
||||
}
|
||||
default:
|
||||
}
|
||||
if (typeof revalidate === "undefined") {
|
||||
if (fetchCacheMode === "default-cache") {
|
||||
revalidate = false;
|
||||
cacheReason = "fetchCache = default-cache";
|
||||
} else if (autoNoCache) {
|
||||
revalidate = 0;
|
||||
cacheReason = "auto no cache";
|
||||
} else if (fetchCacheMode === "default-no-store") {
|
||||
revalidate = 0;
|
||||
cacheReason = "fetchCache = default-no-store";
|
||||
} else if (isUsingNoStore) {
|
||||
revalidate = 0;
|
||||
cacheReason = "noStore call";
|
||||
} else {
|
||||
cacheReason = "auto cache";
|
||||
revalidate = typeof staticGenerationStore.revalidate === "boolean" || typeof staticGenerationStore.revalidate === "undefined" ? false : staticGenerationStore.revalidate;
|
||||
}
|
||||
} else if (!cacheReason) {
|
||||
cacheReason = `revalidate: ${revalidate}`;
|
||||
}
|
||||
if (// when force static is configured we don't bail from
|
||||
// `revalidate: 0` values
|
||||
!(staticGenerationStore.forceStatic && revalidate === 0) && // we don't consider autoNoCache to switch to dynamic during
|
||||
// revalidate although if it occurs during build we do
|
||||
!autoNoCache && // If the revalidate value isn't currently set or the value is less
|
||||
// than the current revalidate value, we should update the revalidate
|
||||
// value.
|
||||
(typeof staticGenerationStore.revalidate === "undefined" || typeof revalidate === "number" && (staticGenerationStore.revalidate === false || typeof staticGenerationStore.revalidate === "number" && revalidate < staticGenerationStore.revalidate))) {
|
||||
// If we were setting the revalidate value to 0, we should try to
|
||||
// postpone instead first.
|
||||
if (revalidate === 0) {
|
||||
trackDynamicFetch(staticGenerationStore, "revalidate: 0");
|
||||
}
|
||||
staticGenerationStore.revalidate = revalidate;
|
||||
}
|
||||
const isCacheableRevalidate = typeof revalidate === "number" && revalidate > 0 || revalidate === false;
|
||||
let cacheKey;
|
||||
if (staticGenerationStore.incrementalCache && isCacheableRevalidate) {
|
||||
try {
|
||||
cacheKey = await staticGenerationStore.incrementalCache.fetchCacheKey(fetchUrl, isRequestInput ? input : init);
|
||||
} catch (err) {
|
||||
console.error(`Failed to generate cache key for`, input);
|
||||
}
|
||||
}
|
||||
const fetchIdx = staticGenerationStore.nextFetchId ?? 1;
|
||||
staticGenerationStore.nextFetchId = fetchIdx + 1;
|
||||
const normalizedRevalidate = typeof revalidate !== "number" ? CACHE_ONE_YEAR : revalidate;
|
||||
const doOriginalFetch = async (isStale, cacheReasonOverride)=>{
|
||||
const requestInputFields = [
|
||||
"cache",
|
||||
"credentials",
|
||||
"headers",
|
||||
"integrity",
|
||||
"keepalive",
|
||||
"method",
|
||||
"mode",
|
||||
"redirect",
|
||||
"referrer",
|
||||
"referrerPolicy",
|
||||
"window",
|
||||
"duplex",
|
||||
// don't pass through signal when revalidating
|
||||
...isStale ? [] : [
|
||||
"signal"
|
||||
]
|
||||
];
|
||||
if (isRequestInput) {
|
||||
const reqInput = input;
|
||||
const reqOptions = {
|
||||
body: reqInput._ogBody || reqInput.body
|
||||
};
|
||||
for (const field of requestInputFields){
|
||||
// @ts-expect-error custom fields
|
||||
reqOptions[field] = reqInput[field];
|
||||
}
|
||||
input = new Request(reqInput.url, reqOptions);
|
||||
} else if (init) {
|
||||
const { _ogBody, body, signal, ...otherInput } = init;
|
||||
init = {
|
||||
...otherInput,
|
||||
body: _ogBody || body,
|
||||
signal: isStale ? undefined : signal
|
||||
};
|
||||
}
|
||||
// add metadata to init without editing the original
|
||||
const clonedInit = {
|
||||
...init,
|
||||
next: {
|
||||
...init == null ? void 0 : init.next,
|
||||
fetchType: "origin",
|
||||
fetchIdx
|
||||
}
|
||||
};
|
||||
return originFetch(input, clonedInit).then(async (res)=>{
|
||||
if (!isStale) {
|
||||
trackFetchMetric(staticGenerationStore, {
|
||||
start: fetchStart,
|
||||
url: fetchUrl,
|
||||
cacheReason: cacheReasonOverride || cacheReason,
|
||||
cacheStatus: revalidate === 0 || cacheReasonOverride ? "skip" : "miss",
|
||||
status: res.status,
|
||||
method: clonedInit.method || "GET"
|
||||
});
|
||||
}
|
||||
if (res.status === 200 && staticGenerationStore.incrementalCache && cacheKey && isCacheableRevalidate) {
|
||||
const bodyBuffer = Buffer.from(await res.arrayBuffer());
|
||||
try {
|
||||
await staticGenerationStore.incrementalCache.set(cacheKey, {
|
||||
kind: "FETCH",
|
||||
data: {
|
||||
headers: Object.fromEntries(res.headers.entries()),
|
||||
body: bodyBuffer.toString("base64"),
|
||||
status: res.status,
|
||||
url: res.url
|
||||
},
|
||||
revalidate: normalizedRevalidate
|
||||
}, {
|
||||
fetchCache: true,
|
||||
revalidate,
|
||||
fetchUrl,
|
||||
fetchIdx,
|
||||
tags
|
||||
});
|
||||
} catch (err) {
|
||||
console.warn(`Failed to set fetch cache`, input, err);
|
||||
}
|
||||
const response = new Response(bodyBuffer, {
|
||||
headers: new Headers(res.headers),
|
||||
status: res.status
|
||||
});
|
||||
Object.defineProperty(response, "url", {
|
||||
value: res.url
|
||||
});
|
||||
return response;
|
||||
}
|
||||
return res;
|
||||
});
|
||||
};
|
||||
let handleUnlock = ()=>Promise.resolve();
|
||||
let cacheReasonOverride;
|
||||
let isForegroundRevalidate = false;
|
||||
if (cacheKey && staticGenerationStore.incrementalCache) {
|
||||
handleUnlock = await staticGenerationStore.incrementalCache.lock(cacheKey);
|
||||
const entry = staticGenerationStore.isOnDemandRevalidate ? null : await staticGenerationStore.incrementalCache.get(cacheKey, {
|
||||
kindHint: "fetch",
|
||||
revalidate,
|
||||
fetchUrl,
|
||||
fetchIdx,
|
||||
tags,
|
||||
softTags: implicitTags
|
||||
});
|
||||
if (entry) {
|
||||
await handleUnlock();
|
||||
} else {
|
||||
// in dev, incremental cache response will be null in case the browser adds `cache-control: no-cache` in the request headers
|
||||
cacheReasonOverride = "cache-control: no-cache (hard refresh)";
|
||||
}
|
||||
if ((entry == null ? void 0 : entry.value) && entry.value.kind === "FETCH") {
|
||||
// when stale and is revalidating we wait for fresh data
|
||||
// so the revalidated entry has the updated data
|
||||
if (staticGenerationStore.isRevalidate && entry.isStale) {
|
||||
isForegroundRevalidate = true;
|
||||
} else {
|
||||
if (entry.isStale) {
|
||||
staticGenerationStore.pendingRevalidates ??= {};
|
||||
if (!staticGenerationStore.pendingRevalidates[cacheKey]) {
|
||||
staticGenerationStore.pendingRevalidates[cacheKey] = doOriginalFetch(true).catch(console.error).finally(()=>{
|
||||
staticGenerationStore.pendingRevalidates ??= {};
|
||||
delete staticGenerationStore.pendingRevalidates[cacheKey || ""];
|
||||
});
|
||||
}
|
||||
}
|
||||
const resData = entry.value.data;
|
||||
trackFetchMetric(staticGenerationStore, {
|
||||
start: fetchStart,
|
||||
url: fetchUrl,
|
||||
cacheReason,
|
||||
cacheStatus: "hit",
|
||||
status: resData.status || 200,
|
||||
method: (init == null ? void 0 : init.method) || "GET"
|
||||
});
|
||||
const response = new Response(Buffer.from(resData.body, "base64"), {
|
||||
headers: resData.headers,
|
||||
status: resData.status
|
||||
});
|
||||
Object.defineProperty(response, "url", {
|
||||
value: entry.value.data.url
|
||||
});
|
||||
return response;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (staticGenerationStore.isStaticGeneration && init && typeof init === "object") {
|
||||
const { cache } = init;
|
||||
// Delete `cache` property as Cloudflare Workers will throw an error
|
||||
if (isEdgeRuntime) delete init.cache;
|
||||
if (!staticGenerationStore.forceStatic && cache === "no-store") {
|
||||
const dynamicUsageReason = `no-store fetch ${input}${staticGenerationStore.urlPathname ? ` ${staticGenerationStore.urlPathname}` : ""}`;
|
||||
// If enabled, we should bail out of static generation.
|
||||
trackDynamicFetch(staticGenerationStore, dynamicUsageReason);
|
||||
// PPR is not enabled, or React postpone is not available, we
|
||||
// should set the revalidate to 0.
|
||||
staticGenerationStore.revalidate = 0;
|
||||
const err = new DynamicServerError(dynamicUsageReason);
|
||||
staticGenerationStore.dynamicUsageErr = err;
|
||||
staticGenerationStore.dynamicUsageDescription = dynamicUsageReason;
|
||||
throw err;
|
||||
}
|
||||
const hasNextConfig = "next" in init;
|
||||
const { next = {} } = init;
|
||||
if (typeof next.revalidate === "number" && (typeof staticGenerationStore.revalidate === "undefined" || typeof staticGenerationStore.revalidate === "number" && next.revalidate < staticGenerationStore.revalidate)) {
|
||||
if (!staticGenerationStore.forceDynamic && !staticGenerationStore.forceStatic && next.revalidate === 0) {
|
||||
const dynamicUsageReason = `revalidate: 0 fetch ${input}${staticGenerationStore.urlPathname ? ` ${staticGenerationStore.urlPathname}` : ""}`;
|
||||
// If enabled, we should bail out of static generation.
|
||||
trackDynamicFetch(staticGenerationStore, dynamicUsageReason);
|
||||
const err = new DynamicServerError(dynamicUsageReason);
|
||||
staticGenerationStore.dynamicUsageErr = err;
|
||||
staticGenerationStore.dynamicUsageDescription = dynamicUsageReason;
|
||||
throw err;
|
||||
}
|
||||
if (!staticGenerationStore.forceStatic || next.revalidate !== 0) {
|
||||
staticGenerationStore.revalidate = next.revalidate;
|
||||
}
|
||||
}
|
||||
if (hasNextConfig) delete init.next;
|
||||
}
|
||||
// if we are revalidating the whole page via time or on-demand and
|
||||
// the fetch cache entry is stale we should still de-dupe the
|
||||
// origin hit if it's a cache-able entry
|
||||
if (cacheKey && isForegroundRevalidate) {
|
||||
staticGenerationStore.pendingRevalidates ??= {};
|
||||
const pendingRevalidate = staticGenerationStore.pendingRevalidates[cacheKey];
|
||||
if (pendingRevalidate) {
|
||||
const res = await pendingRevalidate;
|
||||
return res.clone();
|
||||
}
|
||||
return staticGenerationStore.pendingRevalidates[cacheKey] = doOriginalFetch(true, cacheReasonOverride).finally(async ()=>{
|
||||
staticGenerationStore.pendingRevalidates ??= {};
|
||||
delete staticGenerationStore.pendingRevalidates[cacheKey || ""];
|
||||
await handleUnlock();
|
||||
});
|
||||
} else {
|
||||
return doOriginalFetch(false, cacheReasonOverride).finally(handleUnlock);
|
||||
}
|
||||
});
|
||||
};
|
||||
// Attach the necessary properties to the patched fetch function.
|
||||
patched.__nextPatched = true;
|
||||
patched.__nextGetStaticStore = ()=>staticGenerationAsyncStorage;
|
||||
patched._nextOriginalFetch = originFetch;
|
||||
return patched;
|
||||
}
|
||||
// we patch fetch to collect cache information used for
|
||||
// determining if a page is static or not
|
||||
export function patchFetch(options) {
|
||||
// If we've already patched fetch, we should not patch it again.
|
||||
if (isPatchedFetch(globalThis.fetch)) return;
|
||||
// Grab the original fetch function. We'll attach this so we can use it in
|
||||
// the patched fetch function.
|
||||
const original = globalThis.fetch;
|
||||
// Set the global fetch to the patched fetch.
|
||||
globalThis.fetch = createPatchedFetcher(original, options);
|
||||
}
|
||||
|
||||
//# sourceMappingURL=patch-fetch.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/patch-fetch.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/patch-fetch.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
71
node_modules/next/dist/esm/server/lib/render-server.js
generated
vendored
Normal file
71
node_modules/next/dist/esm/server/lib/render-server.js
generated
vendored
Normal file
@ -0,0 +1,71 @@
|
||||
import next from "../next";
|
||||
let initializations = {};
|
||||
let sandboxContext;
|
||||
let requireCacheHotReloader;
|
||||
if (process.env.NODE_ENV !== "production") {
|
||||
sandboxContext = require("../web/sandbox/context");
|
||||
requireCacheHotReloader = require("../../build/webpack/plugins/nextjs-require-cache-hot-reloader");
|
||||
}
|
||||
export function clearAllModuleContexts() {
|
||||
return sandboxContext == null ? void 0 : sandboxContext.clearAllModuleContexts();
|
||||
}
|
||||
export function clearModuleContext(target) {
|
||||
return sandboxContext == null ? void 0 : sandboxContext.clearModuleContext(target);
|
||||
}
|
||||
export function deleteAppClientCache() {
|
||||
return requireCacheHotReloader == null ? void 0 : requireCacheHotReloader.deleteAppClientCache();
|
||||
}
|
||||
export function deleteCache(filePaths) {
|
||||
for (const filePath of filePaths){
|
||||
requireCacheHotReloader == null ? void 0 : requireCacheHotReloader.deleteCache(filePath);
|
||||
}
|
||||
}
|
||||
export async function propagateServerField(dir, field, value) {
|
||||
const initialization = await initializations[dir];
|
||||
if (!initialization) {
|
||||
throw new Error("Invariant cant propagate server field, no app initialized");
|
||||
}
|
||||
const { app } = initialization;
|
||||
let appField = app.server;
|
||||
if (appField) {
|
||||
if (typeof appField[field] === "function") {
|
||||
await appField[field].apply(app.server, Array.isArray(value) ? value : []);
|
||||
} else {
|
||||
appField[field] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
async function initializeImpl(opts) {
|
||||
const type = process.env.__NEXT_PRIVATE_RENDER_WORKER;
|
||||
if (type) {
|
||||
process.title = "next-render-worker-" + type;
|
||||
}
|
||||
let requestHandler;
|
||||
let upgradeHandler;
|
||||
const app = next({
|
||||
...opts,
|
||||
hostname: opts.hostname || "localhost",
|
||||
customServer: false,
|
||||
httpServer: opts.server,
|
||||
port: opts.port,
|
||||
isNodeDebugging: opts.isNodeDebugging
|
||||
});
|
||||
requestHandler = app.getRequestHandler();
|
||||
upgradeHandler = app.getUpgradeHandler();
|
||||
await app.prepare(opts.serverFields);
|
||||
return {
|
||||
requestHandler,
|
||||
upgradeHandler,
|
||||
app
|
||||
};
|
||||
}
|
||||
export async function initialize(opts) {
|
||||
// if we already setup the server return as we only need to do
|
||||
// this on first worker boot
|
||||
if (initializations[opts.dir]) {
|
||||
return initializations[opts.dir];
|
||||
}
|
||||
return initializations[opts.dir] = initializeImpl(opts);
|
||||
}
|
||||
|
||||
//# sourceMappingURL=render-server.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/render-server.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/render-server.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/lib/render-server.ts"],"names":["next","initializations","sandboxContext","requireCacheHotReloader","process","env","NODE_ENV","require","clearAllModuleContexts","clearModuleContext","target","deleteAppClientCache","deleteCache","filePaths","filePath","propagateServerField","dir","field","value","initialization","Error","app","appField","server","apply","Array","isArray","initializeImpl","opts","type","__NEXT_PRIVATE_RENDER_WORKER","title","requestHandler","upgradeHandler","hostname","customServer","httpServer","port","isNodeDebugging","getRequestHandler","getUpgradeHandler","prepare","serverFields","initialize"],"mappings":"AAIA,OAAOA,UAAU,UAAS;AAG1B,IAAIC,kBAYA,CAAC;AAEL,IAAIC;AACJ,IAAIC;AAIJ,IAAIC,QAAQC,GAAG,CAACC,QAAQ,KAAK,cAAc;IACzCJ,iBAAiBK,QAAQ;IACzBJ,0BAA0BI,QAAQ;AACpC;AAEA,OAAO,SAASC;IACd,OAAON,kCAAAA,eAAgBM,sBAAsB;AAC/C;AAEA,OAAO,SAASC,mBAAmBC,MAAc;IAC/C,OAAOR,kCAAAA,eAAgBO,kBAAkB,CAACC;AAC5C;AAEA,OAAO,SAASC;IACd,OAAOR,2CAAAA,wBAAyBQ,oBAAoB;AACtD;AAEA,OAAO,SAASC,YAAYC,SAAmB;IAC7C,KAAK,MAAMC,YAAYD,UAAW;QAChCV,2CAAAA,wBAAyBS,WAAW,CAACE;IACvC;AACF;AAEA,OAAO,eAAeC,qBACpBC,GAAW,EACXC,KAA8B,EAC9BC,KAAU;IAEV,MAAMC,iBAAiB,MAAMlB,eAAe,CAACe,IAAI;IACjD,IAAI,CAACG,gBAAgB;QACnB,MAAM,IAAIC,MAAM;IAClB;IACA,MAAM,EAAEC,GAAG,EAAE,GAAGF;IAChB,IAAIG,WAAW,AAACD,IAAYE,MAAM;IAElC,IAAID,UAAU;QACZ,IAAI,OAAOA,QAAQ,CAACL,MAAM,KAAK,YAAY;YACzC,MAAMK,QAAQ,CAACL,MAAM,CAACO,KAAK,CACzB,AAACH,IAAYE,MAAM,EACnBE,MAAMC,OAAO,CAACR,SAASA,QAAQ,EAAE;QAErC,OAAO;YACLI,QAAQ,CAACL,MAAM,GAAGC;QACpB;IACF;AACF;AAEA,eAAeS,eAAeC,IAgB7B;IACC,MAAMC,OAAOzB,QAAQC,GAAG,CAACyB,4BAA4B;IACrD,IAAID,MAAM;QACRzB,QAAQ2B,KAAK,GAAG,wBAAwBF;IAC1C;IAEA,IAAIG;IACJ,IAAIC;IAEJ,MAAMZ,MAAMrB,KAAK;QACf,GAAG4B,IAAI;QACPM,UAAUN,KAAKM,QAAQ,IAAI;QAC3BC,cAAc;QACdC,YAAYR,KAAKL,MAAM;QACvBc,MAAMT,KAAKS,IAAI;QACfC,iBAAiBV,KAAKU,eAAe;IACvC;IACAN,iBAAiBX,IAAIkB,iBAAiB;IACtCN,iBAAiBZ,IAAImB,iBAAiB;IAEtC,MAAMnB,IAAIoB,OAAO,CAACb,KAAKc,YAAY;IAEnC,OAAO;QACLV;QACAC;QACAZ;IACF;AACF;AAEA,OAAO,eAAesB,WACpBf,IAA0C;IAU1C,8DAA8D;IAC9D,4BAA4B;IAC5B,IAAI3B,eAAe,CAAC2B,KAAKZ,GAAG,CAAC,EAAE;QAC7B,OAAOf,eAAe,CAAC2B,KAAKZ,GAAG,CAAC;IAClC;IACA,OAAQf,eAAe,CAAC2B,KAAKZ,GAAG,CAAC,GAAGW,eAAeC;AACrD"}
|
||||
12
node_modules/next/dist/esm/server/lib/revalidate.js
generated
vendored
Normal file
12
node_modules/next/dist/esm/server/lib/revalidate.js
generated
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
import { CACHE_ONE_YEAR } from "../../lib/constants";
|
||||
export function formatRevalidate({ revalidate, swrDelta }) {
|
||||
const swrHeader = swrDelta ? `stale-while-revalidate=${swrDelta}` : "stale-while-revalidate";
|
||||
if (revalidate === 0) {
|
||||
return "private, no-cache, no-store, max-age=0, must-revalidate";
|
||||
} else if (typeof revalidate === "number") {
|
||||
return `s-maxage=${revalidate}, ${swrHeader}`;
|
||||
}
|
||||
return `s-maxage=${CACHE_ONE_YEAR}, ${swrHeader}`;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=revalidate.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/revalidate.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/revalidate.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/lib/revalidate.ts"],"names":["CACHE_ONE_YEAR","formatRevalidate","revalidate","swrDelta","swrHeader"],"mappings":"AAAA,SAASA,cAAc,QAAQ,sBAAqB;AAYpD,OAAO,SAASC,iBAAiB,EAC/BC,UAAU,EACVC,QAAQ,EAIT;IACC,MAAMC,YAAYD,WACd,CAAC,uBAAuB,EAAEA,SAAS,CAAC,GACpC;IAEJ,IAAID,eAAe,GAAG;QACpB,OAAO;IACT,OAAO,IAAI,OAAOA,eAAe,UAAU;QACzC,OAAO,CAAC,SAAS,EAAEA,WAAW,EAAE,EAAEE,UAAU,CAAC;IAC/C;IAEA,OAAO,CAAC,SAAS,EAAEJ,eAAe,EAAE,EAAEI,UAAU,CAAC;AACnD"}
|
||||
476
node_modules/next/dist/esm/server/lib/router-server.js
generated
vendored
Normal file
476
node_modules/next/dist/esm/server/lib/router-server.js
generated
vendored
Normal file
@ -0,0 +1,476 @@
|
||||
// this must come first as it includes require hooks
|
||||
// This is required before other imports to ensure the require hook is setup.
|
||||
import "../node-environment";
|
||||
import "../require-hook";
|
||||
import url from "url";
|
||||
import path from "path";
|
||||
import loadConfig from "../config";
|
||||
import { serveStatic } from "../serve-static";
|
||||
import setupDebug from "next/dist/compiled/debug";
|
||||
import { DecodeError } from "../../shared/lib/utils";
|
||||
import { findPagesDir } from "../../lib/find-pages-dir";
|
||||
import { setupFsCheck } from "./router-utils/filesystem";
|
||||
import { proxyRequest } from "./router-utils/proxy-request";
|
||||
import { isAbortError, pipeToNodeResponse } from "../pipe-readable";
|
||||
import { getResolveRoutes } from "./router-utils/resolve-routes";
|
||||
import { addRequestMeta, getRequestMeta } from "../request-meta";
|
||||
import { pathHasPrefix } from "../../shared/lib/router/utils/path-has-prefix";
|
||||
import { removePathPrefix } from "../../shared/lib/router/utils/remove-path-prefix";
|
||||
import setupCompression from "next/dist/compiled/compression";
|
||||
import { NoFallbackError } from "../base-server";
|
||||
import { signalFromNodeResponse } from "../web/spec-extension/adapters/next-request";
|
||||
import { isPostpone } from "./router-utils/is-postpone";
|
||||
import { parseUrl as parseUrlUtil } from "../../shared/lib/router/utils/parse-url";
|
||||
import { PHASE_PRODUCTION_SERVER, PHASE_DEVELOPMENT_SERVER, UNDERSCORE_NOT_FOUND_ROUTE } from "../../shared/lib/constants";
|
||||
import { RedirectStatusCode } from "../../client/components/redirect-status-code";
|
||||
import { DevBundlerService } from "./dev-bundler-service";
|
||||
import { trace } from "../../trace";
|
||||
import { ensureLeadingSlash } from "../../shared/lib/page-path/ensure-leading-slash";
|
||||
import { getNextPathnameInfo } from "../../shared/lib/router/utils/get-next-pathname-info";
|
||||
import { getHostname } from "../../shared/lib/get-hostname";
|
||||
import { detectDomainLocale } from "../../shared/lib/i18n/detect-domain-locale";
|
||||
import { normalizedAssetPrefix } from "../../shared/lib/normalized-asset-prefix";
|
||||
const debug = setupDebug("next:router-server:main");
|
||||
const isNextFont = (pathname)=>pathname && /\/media\/[^/]+\.(woff|woff2|eot|ttf|otf)$/.test(pathname);
|
||||
const requestHandlers = {};
|
||||
export async function initialize(opts) {
|
||||
if (!process.env.NODE_ENV) {
|
||||
// @ts-ignore not readonly
|
||||
process.env.NODE_ENV = opts.dev ? "development" : "production";
|
||||
}
|
||||
const config = await loadConfig(opts.dev ? PHASE_DEVELOPMENT_SERVER : PHASE_PRODUCTION_SERVER, opts.dir, {
|
||||
silent: false
|
||||
});
|
||||
let compress;
|
||||
if ((config == null ? void 0 : config.compress) !== false) {
|
||||
compress = setupCompression();
|
||||
}
|
||||
const fsChecker = await setupFsCheck({
|
||||
dev: opts.dev,
|
||||
dir: opts.dir,
|
||||
config,
|
||||
minimalMode: opts.minimalMode
|
||||
});
|
||||
const renderServer = {};
|
||||
let developmentBundler;
|
||||
let devBundlerService;
|
||||
if (opts.dev) {
|
||||
const { Telemetry } = require("../../telemetry/storage");
|
||||
const telemetry = new Telemetry({
|
||||
distDir: path.join(opts.dir, config.distDir)
|
||||
});
|
||||
const { pagesDir, appDir } = findPagesDir(opts.dir);
|
||||
const { setupDevBundler } = require("./router-utils/setup-dev-bundler");
|
||||
const setupDevBundlerSpan = opts.startServerSpan ? opts.startServerSpan.traceChild("setup-dev-bundler") : trace("setup-dev-bundler");
|
||||
developmentBundler = await setupDevBundlerSpan.traceAsyncFn(()=>setupDevBundler({
|
||||
// Passed here but the initialization of this object happens below, doing the initialization before the setupDev call breaks.
|
||||
renderServer,
|
||||
appDir,
|
||||
pagesDir,
|
||||
telemetry,
|
||||
fsChecker,
|
||||
dir: opts.dir,
|
||||
nextConfig: config,
|
||||
isCustomServer: opts.customServer,
|
||||
turbo: !!process.env.TURBOPACK,
|
||||
port: opts.port
|
||||
}));
|
||||
devBundlerService = new DevBundlerService(developmentBundler, // The request handler is assigned below, this allows us to create a lazy
|
||||
// reference to it.
|
||||
(req, res)=>{
|
||||
return requestHandlers[opts.dir](req, res);
|
||||
});
|
||||
}
|
||||
renderServer.instance = require("./render-server");
|
||||
const requestHandlerImpl = async (req, res)=>{
|
||||
if (!opts.minimalMode && config.i18n && config.i18n.localeDetection !== false) {
|
||||
var _this;
|
||||
const urlParts = (req.url || "").split("?", 1);
|
||||
let urlNoQuery = urlParts[0] || "";
|
||||
if (config.basePath) {
|
||||
urlNoQuery = removePathPrefix(urlNoQuery, config.basePath);
|
||||
}
|
||||
const pathnameInfo = getNextPathnameInfo(urlNoQuery, {
|
||||
nextConfig: config
|
||||
});
|
||||
const domainLocale = detectDomainLocale(config.i18n.domains, getHostname({
|
||||
hostname: urlNoQuery
|
||||
}, req.headers));
|
||||
const defaultLocale = (domainLocale == null ? void 0 : domainLocale.defaultLocale) || config.i18n.defaultLocale;
|
||||
const { getLocaleRedirect } = require("../../shared/lib/i18n/get-locale-redirect");
|
||||
const parsedUrl = parseUrlUtil((_this = req.url || "") == null ? void 0 : _this.replace(/^\/+/, "/"));
|
||||
const redirect = getLocaleRedirect({
|
||||
defaultLocale,
|
||||
domainLocale,
|
||||
headers: req.headers,
|
||||
nextConfig: config,
|
||||
pathLocale: pathnameInfo.locale,
|
||||
urlParsed: {
|
||||
...parsedUrl,
|
||||
pathname: pathnameInfo.locale ? `/${pathnameInfo.locale}${urlNoQuery}` : urlNoQuery
|
||||
}
|
||||
});
|
||||
if (redirect) {
|
||||
res.setHeader("Location", redirect);
|
||||
res.statusCode = RedirectStatusCode.TemporaryRedirect;
|
||||
res.end(redirect);
|
||||
return;
|
||||
}
|
||||
}
|
||||
if (compress) {
|
||||
// @ts-expect-error not express req/res
|
||||
compress(req, res, ()=>{});
|
||||
}
|
||||
req.on("error", (_err)=>{
|
||||
// TODO: log socket errors?
|
||||
});
|
||||
res.on("error", (_err)=>{
|
||||
// TODO: log socket errors?
|
||||
});
|
||||
const invokedOutputs = new Set();
|
||||
async function invokeRender(parsedUrl, invokePath, handleIndex, additionalRequestMeta) {
|
||||
var _fsChecker_getMiddlewareMatchers;
|
||||
// invokeRender expects /api routes to not be locale prefixed
|
||||
// so normalize here before continuing
|
||||
if (config.i18n && removePathPrefix(invokePath, config.basePath).startsWith(`/${parsedUrl.query.__nextLocale}/api`)) {
|
||||
invokePath = fsChecker.handleLocale(removePathPrefix(invokePath, config.basePath)).pathname;
|
||||
}
|
||||
if (req.headers["x-nextjs-data"] && ((_fsChecker_getMiddlewareMatchers = fsChecker.getMiddlewareMatchers()) == null ? void 0 : _fsChecker_getMiddlewareMatchers.length) && removePathPrefix(invokePath, config.basePath) === "/404") {
|
||||
res.setHeader("x-nextjs-matched-path", parsedUrl.pathname || "");
|
||||
res.statusCode = 404;
|
||||
res.setHeader("content-type", "application/json");
|
||||
res.end("{}");
|
||||
return null;
|
||||
}
|
||||
if (!handlers) {
|
||||
throw new Error("Failed to initialize render server");
|
||||
}
|
||||
addRequestMeta(req, "invokePath", invokePath);
|
||||
addRequestMeta(req, "invokeQuery", parsedUrl.query);
|
||||
addRequestMeta(req, "middlewareInvoke", false);
|
||||
for(const key in additionalRequestMeta || {}){
|
||||
addRequestMeta(req, key, additionalRequestMeta[key]);
|
||||
}
|
||||
debug("invokeRender", req.url, req.headers);
|
||||
try {
|
||||
var _renderServer_instance;
|
||||
const initResult = await (renderServer == null ? void 0 : (_renderServer_instance = renderServer.instance) == null ? void 0 : _renderServer_instance.initialize(renderServerOpts));
|
||||
try {
|
||||
await (initResult == null ? void 0 : initResult.requestHandler(req, res));
|
||||
} catch (err) {
|
||||
if (err instanceof NoFallbackError) {
|
||||
// eslint-disable-next-line
|
||||
await handleRequest(handleIndex + 1);
|
||||
return;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
return;
|
||||
} catch (e) {
|
||||
// If the client aborts before we can receive a response object (when
|
||||
// the headers are flushed), then we can early exit without further
|
||||
// processing.
|
||||
if (isAbortError(e)) {
|
||||
return;
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
const handleRequest = async (handleIndex)=>{
|
||||
if (handleIndex > 5) {
|
||||
throw new Error(`Attempted to handle request too many times ${req.url}`);
|
||||
}
|
||||
// handle hot-reloader first
|
||||
if (developmentBundler) {
|
||||
const origUrl = req.url || "/";
|
||||
if (config.basePath && pathHasPrefix(origUrl, config.basePath)) {
|
||||
req.url = removePathPrefix(origUrl, config.basePath);
|
||||
}
|
||||
const parsedUrl = url.parse(req.url || "/");
|
||||
const hotReloaderResult = await developmentBundler.hotReloader.run(req, res, parsedUrl);
|
||||
if (hotReloaderResult.finished) {
|
||||
return hotReloaderResult;
|
||||
}
|
||||
req.url = origUrl;
|
||||
}
|
||||
const { finished, parsedUrl, statusCode, resHeaders, bodyStream, matchedOutput } = await resolveRoutes({
|
||||
req,
|
||||
res,
|
||||
isUpgradeReq: false,
|
||||
signal: signalFromNodeResponse(res),
|
||||
invokedOutputs
|
||||
});
|
||||
if (res.closed || res.finished) {
|
||||
return;
|
||||
}
|
||||
if (developmentBundler && (matchedOutput == null ? void 0 : matchedOutput.type) === "devVirtualFsItem") {
|
||||
const origUrl = req.url || "/";
|
||||
if (config.basePath && pathHasPrefix(origUrl, config.basePath)) {
|
||||
req.url = removePathPrefix(origUrl, config.basePath);
|
||||
}
|
||||
if (resHeaders) {
|
||||
for (const key of Object.keys(resHeaders)){
|
||||
res.setHeader(key, resHeaders[key]);
|
||||
}
|
||||
}
|
||||
const result = await developmentBundler.requestHandler(req, res);
|
||||
if (result.finished) {
|
||||
return;
|
||||
}
|
||||
// TODO: throw invariant if we resolved to this but it wasn't handled?
|
||||
req.url = origUrl;
|
||||
}
|
||||
debug("requestHandler!", req.url, {
|
||||
matchedOutput,
|
||||
statusCode,
|
||||
resHeaders,
|
||||
bodyStream: !!bodyStream,
|
||||
parsedUrl: {
|
||||
pathname: parsedUrl.pathname,
|
||||
query: parsedUrl.query
|
||||
},
|
||||
finished
|
||||
});
|
||||
// apply any response headers from routing
|
||||
for (const key of Object.keys(resHeaders || {})){
|
||||
res.setHeader(key, resHeaders[key]);
|
||||
}
|
||||
// handle redirect
|
||||
if (!bodyStream && statusCode && statusCode > 300 && statusCode < 400) {
|
||||
const destination = url.format(parsedUrl);
|
||||
res.statusCode = statusCode;
|
||||
res.setHeader("location", destination);
|
||||
if (statusCode === RedirectStatusCode.PermanentRedirect) {
|
||||
res.setHeader("Refresh", `0;url=${destination}`);
|
||||
}
|
||||
return res.end(destination);
|
||||
}
|
||||
// handle middleware body response
|
||||
if (bodyStream) {
|
||||
res.statusCode = statusCode || 200;
|
||||
return await pipeToNodeResponse(bodyStream, res);
|
||||
}
|
||||
if (finished && parsedUrl.protocol) {
|
||||
var _getRequestMeta;
|
||||
return await proxyRequest(req, res, parsedUrl, undefined, (_getRequestMeta = getRequestMeta(req, "clonableBody")) == null ? void 0 : _getRequestMeta.cloneBodyStream(), config.experimental.proxyTimeout);
|
||||
}
|
||||
if ((matchedOutput == null ? void 0 : matchedOutput.fsPath) && matchedOutput.itemPath) {
|
||||
if (opts.dev && (fsChecker.appFiles.has(matchedOutput.itemPath) || fsChecker.pageFiles.has(matchedOutput.itemPath))) {
|
||||
res.statusCode = 500;
|
||||
await invokeRender(parsedUrl, "/_error", handleIndex, {
|
||||
invokeStatus: 500,
|
||||
invokeError: new Error(`A conflicting public file and page file was found for path ${matchedOutput.itemPath} https://nextjs.org/docs/messages/conflicting-public-file-page`)
|
||||
});
|
||||
return;
|
||||
}
|
||||
if (!res.getHeader("cache-control") && matchedOutput.type === "nextStaticFolder") {
|
||||
if (opts.dev && !isNextFont(parsedUrl.pathname)) {
|
||||
res.setHeader("Cache-Control", "no-store, must-revalidate");
|
||||
} else {
|
||||
res.setHeader("Cache-Control", "public, max-age=31536000, immutable");
|
||||
}
|
||||
}
|
||||
if (!(req.method === "GET" || req.method === "HEAD")) {
|
||||
res.setHeader("Allow", [
|
||||
"GET",
|
||||
"HEAD"
|
||||
]);
|
||||
res.statusCode = 405;
|
||||
return await invokeRender(url.parse("/405", true), "/405", handleIndex, {
|
||||
invokeStatus: 405
|
||||
});
|
||||
}
|
||||
try {
|
||||
return await serveStatic(req, res, matchedOutput.itemPath, {
|
||||
root: matchedOutput.itemsRoot,
|
||||
// Ensures that etags are not generated for static files when disabled.
|
||||
etag: config.generateEtags
|
||||
});
|
||||
} catch (err) {
|
||||
/**
|
||||
* Hardcoded every possible error status code that could be thrown by "serveStatic" method
|
||||
* This is done by searching "this.error" inside "send" module's source code:
|
||||
* https://github.com/pillarjs/send/blob/master/index.js
|
||||
* https://github.com/pillarjs/send/blob/develop/index.js
|
||||
*/ const POSSIBLE_ERROR_CODE_FROM_SERVE_STATIC = new Set([
|
||||
// send module will throw 500 when header is already sent or fs.stat error happens
|
||||
// https://github.com/pillarjs/send/blob/53f0ab476145670a9bdd3dc722ab2fdc8d358fc6/index.js#L392
|
||||
// Note: we will use Next.js built-in 500 page to handle 500 errors
|
||||
// 500,
|
||||
// send module will throw 404 when file is missing
|
||||
// https://github.com/pillarjs/send/blob/53f0ab476145670a9bdd3dc722ab2fdc8d358fc6/index.js#L421
|
||||
// Note: we will use Next.js built-in 404 page to handle 404 errors
|
||||
// 404,
|
||||
// send module will throw 403 when redirecting to a directory without enabling directory listing
|
||||
// https://github.com/pillarjs/send/blob/53f0ab476145670a9bdd3dc722ab2fdc8d358fc6/index.js#L484
|
||||
// Note: Next.js throws a different error (without status code) for directory listing
|
||||
// 403,
|
||||
// send module will throw 400 when fails to normalize the path
|
||||
// https://github.com/pillarjs/send/blob/53f0ab476145670a9bdd3dc722ab2fdc8d358fc6/index.js#L520
|
||||
400,
|
||||
// send module will throw 412 with conditional GET request
|
||||
// https://github.com/pillarjs/send/blob/53f0ab476145670a9bdd3dc722ab2fdc8d358fc6/index.js#L632
|
||||
412,
|
||||
// send module will throw 416 when range is not satisfiable
|
||||
// https://github.com/pillarjs/send/blob/53f0ab476145670a9bdd3dc722ab2fdc8d358fc6/index.js#L669
|
||||
416
|
||||
]);
|
||||
let validErrorStatus = POSSIBLE_ERROR_CODE_FROM_SERVE_STATIC.has(err.statusCode);
|
||||
// normalize non-allowed status codes
|
||||
if (!validErrorStatus) {
|
||||
err.statusCode = 400;
|
||||
}
|
||||
if (typeof err.statusCode === "number") {
|
||||
const invokePath = `/${err.statusCode}`;
|
||||
const invokeStatus = err.statusCode;
|
||||
res.statusCode = err.statusCode;
|
||||
return await invokeRender(url.parse(invokePath, true), invokePath, handleIndex, {
|
||||
invokeStatus
|
||||
});
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
if (matchedOutput) {
|
||||
invokedOutputs.add(matchedOutput.itemPath);
|
||||
return await invokeRender(parsedUrl, parsedUrl.pathname || "/", handleIndex, {
|
||||
invokeOutput: matchedOutput.itemPath
|
||||
});
|
||||
}
|
||||
// 404 case
|
||||
res.setHeader("Cache-Control", "private, no-cache, no-store, max-age=0, must-revalidate");
|
||||
// Short-circuit favicon.ico serving so that the 404 page doesn't get built as favicon is requested by the browser when loading any route.
|
||||
if (opts.dev && !matchedOutput && parsedUrl.pathname === "/favicon.ico") {
|
||||
res.statusCode = 404;
|
||||
res.end("");
|
||||
return null;
|
||||
}
|
||||
const appNotFound = opts.dev ? developmentBundler == null ? void 0 : developmentBundler.serverFields.hasAppNotFound : await fsChecker.getItem(UNDERSCORE_NOT_FOUND_ROUTE);
|
||||
res.statusCode = 404;
|
||||
if (appNotFound) {
|
||||
return await invokeRender(parsedUrl, UNDERSCORE_NOT_FOUND_ROUTE, handleIndex, {
|
||||
invokeStatus: 404
|
||||
});
|
||||
}
|
||||
await invokeRender(parsedUrl, "/404", handleIndex, {
|
||||
invokeStatus: 404
|
||||
});
|
||||
};
|
||||
try {
|
||||
await handleRequest(0);
|
||||
} catch (err) {
|
||||
try {
|
||||
let invokePath = "/500";
|
||||
let invokeStatus = "500";
|
||||
if (err instanceof DecodeError) {
|
||||
invokePath = "/400";
|
||||
invokeStatus = "400";
|
||||
} else {
|
||||
console.error(err);
|
||||
}
|
||||
res.statusCode = Number(invokeStatus);
|
||||
return await invokeRender(url.parse(invokePath, true), invokePath, 0, {
|
||||
invokeStatus: res.statusCode
|
||||
});
|
||||
} catch (err2) {
|
||||
console.error(err2);
|
||||
}
|
||||
res.statusCode = 500;
|
||||
res.end("Internal Server Error");
|
||||
}
|
||||
};
|
||||
let requestHandler = requestHandlerImpl;
|
||||
if (config.experimental.testProxy) {
|
||||
// Intercept fetch and other testmode apis.
|
||||
const { wrapRequestHandlerWorker, interceptTestApis } = require("next/dist/experimental/testmode/server");
|
||||
requestHandler = wrapRequestHandlerWorker(requestHandler);
|
||||
interceptTestApis();
|
||||
}
|
||||
requestHandlers[opts.dir] = requestHandler;
|
||||
const renderServerOpts = {
|
||||
port: opts.port,
|
||||
dir: opts.dir,
|
||||
hostname: opts.hostname,
|
||||
minimalMode: opts.minimalMode,
|
||||
dev: !!opts.dev,
|
||||
server: opts.server,
|
||||
isNodeDebugging: !!opts.isNodeDebugging,
|
||||
serverFields: (developmentBundler == null ? void 0 : developmentBundler.serverFields) || {},
|
||||
experimentalTestProxy: !!config.experimental.testProxy,
|
||||
experimentalHttpsServer: !!opts.experimentalHttpsServer,
|
||||
bundlerService: devBundlerService,
|
||||
startServerSpan: opts.startServerSpan
|
||||
};
|
||||
renderServerOpts.serverFields.routerServerHandler = requestHandlerImpl;
|
||||
// pre-initialize workers
|
||||
const handlers = await renderServer.instance.initialize(renderServerOpts);
|
||||
const logError = async (type, err)=>{
|
||||
if (isPostpone(err)) {
|
||||
// React postpones that are unhandled might end up logged here but they're
|
||||
// not really errors. They're just part of rendering.
|
||||
return;
|
||||
}
|
||||
await (developmentBundler == null ? void 0 : developmentBundler.logErrorWithOriginalStack(err, type));
|
||||
};
|
||||
process.on("uncaughtException", logError.bind(null, "uncaughtException"));
|
||||
process.on("unhandledRejection", logError.bind(null, "unhandledRejection"));
|
||||
const resolveRoutes = getResolveRoutes(fsChecker, config, opts, renderServer.instance, renderServerOpts, developmentBundler == null ? void 0 : developmentBundler.ensureMiddleware);
|
||||
const upgradeHandler = async (req, socket, head)=>{
|
||||
try {
|
||||
req.on("error", (_err)=>{
|
||||
// TODO: log socket errors?
|
||||
// console.error(_err);
|
||||
});
|
||||
socket.on("error", (_err)=>{
|
||||
// TODO: log socket errors?
|
||||
// console.error(_err);
|
||||
});
|
||||
if (opts.dev && developmentBundler && req.url) {
|
||||
const { basePath, assetPrefix } = config;
|
||||
let hmrPrefix = basePath;
|
||||
// assetPrefix overrides basePath for HMR path
|
||||
if (assetPrefix) {
|
||||
hmrPrefix = normalizedAssetPrefix(assetPrefix);
|
||||
if (URL.canParse(hmrPrefix)) {
|
||||
// remove trailing slash from pathname
|
||||
// return empty string if pathname is '/'
|
||||
// to avoid conflicts with '/_next' below
|
||||
hmrPrefix = new URL(hmrPrefix).pathname.replace(/\/$/, "");
|
||||
}
|
||||
}
|
||||
const isHMRRequest = req.url.startsWith(ensureLeadingSlash(`${hmrPrefix}/_next/webpack-hmr`));
|
||||
// only handle HMR requests if the basePath in the request
|
||||
// matches the basePath for the handler responding to the request
|
||||
if (isHMRRequest) {
|
||||
return developmentBundler.hotReloader.onHMR(req, socket, head);
|
||||
}
|
||||
}
|
||||
const { matchedOutput, parsedUrl } = await resolveRoutes({
|
||||
req,
|
||||
res: socket,
|
||||
isUpgradeReq: true,
|
||||
signal: signalFromNodeResponse(socket)
|
||||
});
|
||||
// TODO: allow upgrade requests to pages/app paths?
|
||||
// this was not previously supported
|
||||
if (matchedOutput) {
|
||||
return socket.end();
|
||||
}
|
||||
if (parsedUrl.protocol) {
|
||||
return await proxyRequest(req, socket, parsedUrl, head);
|
||||
}
|
||||
// If there's no matched output, we don't handle the request as user's
|
||||
// custom WS server may be listening on the same path.
|
||||
} catch (err) {
|
||||
console.error("Error handling upgrade request", err);
|
||||
socket.end();
|
||||
}
|
||||
};
|
||||
return [
|
||||
requestHandler,
|
||||
upgradeHandler,
|
||||
handlers.app
|
||||
];
|
||||
}
|
||||
|
||||
//# sourceMappingURL=router-server.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/router-server.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/router-server.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
29
node_modules/next/dist/esm/server/lib/router-utils/build-data-route.js
generated
vendored
Normal file
29
node_modules/next/dist/esm/server/lib/router-utils/build-data-route.js
generated
vendored
Normal file
@ -0,0 +1,29 @@
|
||||
import path from "../../../shared/lib/isomorphic/path";
|
||||
import { normalizePagePath } from "../../../shared/lib/page-path/normalize-page-path";
|
||||
import { isDynamicRoute } from "../../../shared/lib/router/utils/is-dynamic";
|
||||
import { getNamedRouteRegex } from "../../../shared/lib/router/utils/route-regex";
|
||||
import { normalizeRouteRegex } from "../../../lib/load-custom-routes";
|
||||
import { escapeStringRegexp } from "../../../shared/lib/escape-regexp";
|
||||
export function buildDataRoute(page, buildId) {
|
||||
const pagePath = normalizePagePath(page);
|
||||
const dataRoute = path.posix.join("/_next/data", buildId, `${pagePath}.json`);
|
||||
let dataRouteRegex;
|
||||
let namedDataRouteRegex;
|
||||
let routeKeys;
|
||||
if (isDynamicRoute(page)) {
|
||||
const routeRegex = getNamedRouteRegex(dataRoute.replace(/\.json$/, ""), true);
|
||||
dataRouteRegex = normalizeRouteRegex(routeRegex.re.source.replace(/\(\?:\\\/\)\?\$$/, `\\.json$`));
|
||||
namedDataRouteRegex = routeRegex.namedRegex.replace(/\(\?:\/\)\?\$$/, `\\.json$`);
|
||||
routeKeys = routeRegex.routeKeys;
|
||||
} else {
|
||||
dataRouteRegex = normalizeRouteRegex(new RegExp(`^${path.posix.join("/_next/data", escapeStringRegexp(buildId), `${pagePath}.json`)}$`).source);
|
||||
}
|
||||
return {
|
||||
page,
|
||||
routeKeys,
|
||||
dataRouteRegex,
|
||||
namedDataRouteRegex
|
||||
};
|
||||
}
|
||||
|
||||
//# sourceMappingURL=build-data-route.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/router-utils/build-data-route.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/router-utils/build-data-route.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/lib/router-utils/build-data-route.ts"],"names":["path","normalizePagePath","isDynamicRoute","getNamedRouteRegex","normalizeRouteRegex","escapeStringRegexp","buildDataRoute","page","buildId","pagePath","dataRoute","posix","join","dataRouteRegex","namedDataRouteRegex","routeKeys","routeRegex","replace","re","source","namedRegex","RegExp"],"mappings":"AAAA,OAAOA,UAAU,sCAAqC;AACtD,SAASC,iBAAiB,QAAQ,oDAAmD;AACrF,SAASC,cAAc,QAAQ,8CAA6C;AAC5E,SAASC,kBAAkB,QAAQ,+CAA8C;AACjF,SAASC,mBAAmB,QAAQ,kCAAiC;AACrE,SAASC,kBAAkB,QAAQ,oCAAmC;AAEtE,OAAO,SAASC,eAAeC,IAAY,EAAEC,OAAe;IAC1D,MAAMC,WAAWR,kBAAkBM;IACnC,MAAMG,YAAYV,KAAKW,KAAK,CAACC,IAAI,CAAC,eAAeJ,SAAS,CAAC,EAAEC,SAAS,KAAK,CAAC;IAE5E,IAAII;IACJ,IAAIC;IACJ,IAAIC;IAEJ,IAAIb,eAAeK,OAAO;QACxB,MAAMS,aAAab,mBACjBO,UAAUO,OAAO,CAAC,WAAW,KAC7B;QAGFJ,iBAAiBT,oBACfY,WAAWE,EAAE,CAACC,MAAM,CAACF,OAAO,CAAC,oBAAoB,CAAC,QAAQ,CAAC;QAE7DH,sBAAsBE,WAAWI,UAAU,CAAEH,OAAO,CAClD,kBACA,CAAC,QAAQ,CAAC;QAEZF,YAAYC,WAAWD,SAAS;IAClC,OAAO;QACLF,iBAAiBT,oBACf,IAAIiB,OACF,CAAC,CAAC,EAAErB,KAAKW,KAAK,CAACC,IAAI,CACjB,eACAP,mBAAmBG,UACnB,CAAC,EAAEC,SAAS,KAAK,CAAC,EAClB,CAAC,CAAC,EACJU,MAAM;IAEZ;IAEA,OAAO;QACLZ;QACAQ;QACAF;QACAC;IACF;AACF"}
|
||||
468
node_modules/next/dist/esm/server/lib/router-utils/filesystem.js
generated
vendored
Normal file
468
node_modules/next/dist/esm/server/lib/router-utils/filesystem.js
generated
vendored
Normal file
@ -0,0 +1,468 @@
|
||||
import path from "path";
|
||||
import fs from "fs/promises";
|
||||
import * as Log from "../../../build/output/log";
|
||||
import setupDebug from "next/dist/compiled/debug";
|
||||
import LRUCache from "next/dist/compiled/lru-cache";
|
||||
import loadCustomRoutes from "../../../lib/load-custom-routes";
|
||||
import { modifyRouteRegex } from "../../../lib/redirect-status";
|
||||
import { FileType, fileExists } from "../../../lib/file-exists";
|
||||
import { recursiveReadDir } from "../../../lib/recursive-readdir";
|
||||
import { isDynamicRoute } from "../../../shared/lib/router/utils";
|
||||
import { escapeStringRegexp } from "../../../shared/lib/escape-regexp";
|
||||
import { getPathMatch } from "../../../shared/lib/router/utils/path-match";
|
||||
import { getRouteRegex } from "../../../shared/lib/router/utils/route-regex";
|
||||
import { getRouteMatcher } from "../../../shared/lib/router/utils/route-matcher";
|
||||
import { pathHasPrefix } from "../../../shared/lib/router/utils/path-has-prefix";
|
||||
import { normalizeLocalePath } from "../../../shared/lib/i18n/normalize-locale-path";
|
||||
import { removePathPrefix } from "../../../shared/lib/router/utils/remove-path-prefix";
|
||||
import { getMiddlewareRouteMatcher } from "../../../shared/lib/router/utils/middleware-route-matcher";
|
||||
import { APP_PATH_ROUTES_MANIFEST, BUILD_ID_FILE, MIDDLEWARE_MANIFEST, PAGES_MANIFEST, PRERENDER_MANIFEST, ROUTES_MANIFEST } from "../../../shared/lib/constants";
|
||||
import { normalizePathSep } from "../../../shared/lib/page-path/normalize-path-sep";
|
||||
import { normalizeMetadataRoute } from "../../../lib/metadata/get-metadata-route";
|
||||
import { RSCPathnameNormalizer } from "../../future/normalizers/request/rsc";
|
||||
import { PostponedPathnameNormalizer } from "../../future/normalizers/request/postponed";
|
||||
import { PrefetchRSCPathnameNormalizer } from "../../future/normalizers/request/prefetch-rsc";
|
||||
const debug = setupDebug("next:router-server:filesystem");
|
||||
export const buildCustomRoute = (type, item, basePath, caseSensitive)=>{
|
||||
const restrictedRedirectPaths = [
|
||||
"/_next"
|
||||
].map((p)=>basePath ? `${basePath}${p}` : p);
|
||||
const match = getPathMatch(item.source, {
|
||||
strict: true,
|
||||
removeUnnamedParams: true,
|
||||
regexModifier: !item.internal ? (regex)=>modifyRouteRegex(regex, type === "redirect" ? restrictedRedirectPaths : undefined) : undefined,
|
||||
sensitive: caseSensitive
|
||||
});
|
||||
return {
|
||||
...item,
|
||||
...type === "rewrite" ? {
|
||||
check: true
|
||||
} : {},
|
||||
match
|
||||
};
|
||||
};
|
||||
export async function setupFsCheck(opts) {
|
||||
const getItemsLru = !opts.dev ? new LRUCache({
|
||||
max: 1024 * 1024,
|
||||
length (value, key) {
|
||||
if (!value) return (key == null ? void 0 : key.length) || 0;
|
||||
return (key || "").length + (value.fsPath || "").length + value.itemPath.length + value.type.length;
|
||||
}
|
||||
}) : undefined;
|
||||
// routes that have _next/data endpoints (SSG/SSP)
|
||||
const nextDataRoutes = new Set();
|
||||
const publicFolderItems = new Set();
|
||||
const nextStaticFolderItems = new Set();
|
||||
const legacyStaticFolderItems = new Set();
|
||||
const appFiles = new Set();
|
||||
const pageFiles = new Set();
|
||||
let dynamicRoutes = [];
|
||||
let middlewareMatcher = ()=>false;
|
||||
const distDir = path.join(opts.dir, opts.config.distDir);
|
||||
const publicFolderPath = path.join(opts.dir, "public");
|
||||
const nextStaticFolderPath = path.join(distDir, "static");
|
||||
const legacyStaticFolderPath = path.join(opts.dir, "static");
|
||||
let customRoutes = {
|
||||
redirects: [],
|
||||
rewrites: {
|
||||
beforeFiles: [],
|
||||
afterFiles: [],
|
||||
fallback: []
|
||||
},
|
||||
headers: []
|
||||
};
|
||||
let buildId = "development";
|
||||
let prerenderManifest;
|
||||
if (!opts.dev) {
|
||||
var _middlewareManifest_middleware_, _middlewareManifest_middleware;
|
||||
const buildIdPath = path.join(opts.dir, opts.config.distDir, BUILD_ID_FILE);
|
||||
try {
|
||||
buildId = await fs.readFile(buildIdPath, "utf8");
|
||||
} catch (err) {
|
||||
if (err.code !== "ENOENT") throw err;
|
||||
throw new Error(`Could not find a production build in the '${opts.config.distDir}' directory. Try building your app with 'next build' before starting the production server. https://nextjs.org/docs/messages/production-start-no-build-id`);
|
||||
}
|
||||
try {
|
||||
for (const file of (await recursiveReadDir(publicFolderPath))){
|
||||
// Ensure filename is encoded and normalized.
|
||||
publicFolderItems.add(encodeURI(normalizePathSep(file)));
|
||||
}
|
||||
} catch (err) {
|
||||
if (err.code !== "ENOENT") {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
try {
|
||||
for (const file of (await recursiveReadDir(legacyStaticFolderPath))){
|
||||
// Ensure filename is encoded and normalized.
|
||||
legacyStaticFolderItems.add(encodeURI(normalizePathSep(file)));
|
||||
}
|
||||
Log.warn(`The static directory has been deprecated in favor of the public directory. https://nextjs.org/docs/messages/static-dir-deprecated`);
|
||||
} catch (err) {
|
||||
if (err.code !== "ENOENT") {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
try {
|
||||
for (const file of (await recursiveReadDir(nextStaticFolderPath))){
|
||||
// Ensure filename is encoded and normalized.
|
||||
nextStaticFolderItems.add(path.posix.join("/_next/static", encodeURI(normalizePathSep(file))));
|
||||
}
|
||||
} catch (err) {
|
||||
if (opts.config.output !== "standalone") throw err;
|
||||
}
|
||||
const routesManifestPath = path.join(distDir, ROUTES_MANIFEST);
|
||||
const prerenderManifestPath = path.join(distDir, PRERENDER_MANIFEST);
|
||||
const middlewareManifestPath = path.join(distDir, "server", MIDDLEWARE_MANIFEST);
|
||||
const pagesManifestPath = path.join(distDir, "server", PAGES_MANIFEST);
|
||||
const appRoutesManifestPath = path.join(distDir, APP_PATH_ROUTES_MANIFEST);
|
||||
const routesManifest = JSON.parse(await fs.readFile(routesManifestPath, "utf8"));
|
||||
prerenderManifest = JSON.parse(await fs.readFile(prerenderManifestPath, "utf8"));
|
||||
const middlewareManifest = JSON.parse(await fs.readFile(middlewareManifestPath, "utf8").catch(()=>"{}"));
|
||||
const pagesManifest = JSON.parse(await fs.readFile(pagesManifestPath, "utf8"));
|
||||
const appRoutesManifest = JSON.parse(await fs.readFile(appRoutesManifestPath, "utf8").catch(()=>"{}"));
|
||||
for (const key of Object.keys(pagesManifest)){
|
||||
// ensure the non-locale version is in the set
|
||||
if (opts.config.i18n) {
|
||||
pageFiles.add(normalizeLocalePath(key, opts.config.i18n.locales).pathname);
|
||||
} else {
|
||||
pageFiles.add(key);
|
||||
}
|
||||
}
|
||||
for (const key of Object.keys(appRoutesManifest)){
|
||||
appFiles.add(appRoutesManifest[key]);
|
||||
}
|
||||
const escapedBuildId = escapeStringRegexp(buildId);
|
||||
for (const route of routesManifest.dataRoutes){
|
||||
if (isDynamicRoute(route.page)) {
|
||||
const routeRegex = getRouteRegex(route.page);
|
||||
dynamicRoutes.push({
|
||||
...route,
|
||||
regex: routeRegex.re.toString(),
|
||||
match: getRouteMatcher({
|
||||
// TODO: fix this in the manifest itself, must also be fixed in
|
||||
// upstream builder that relies on this
|
||||
re: opts.config.i18n ? new RegExp(route.dataRouteRegex.replace(`/${escapedBuildId}/`, `/${escapedBuildId}/(?<nextLocale>[^/]+?)/`)) : new RegExp(route.dataRouteRegex),
|
||||
groups: routeRegex.groups
|
||||
})
|
||||
});
|
||||
}
|
||||
nextDataRoutes.add(route.page);
|
||||
}
|
||||
for (const route of routesManifest.dynamicRoutes){
|
||||
dynamicRoutes.push({
|
||||
...route,
|
||||
match: getRouteMatcher(getRouteRegex(route.page))
|
||||
});
|
||||
}
|
||||
if ((_middlewareManifest_middleware = middlewareManifest.middleware) == null ? void 0 : (_middlewareManifest_middleware_ = _middlewareManifest_middleware["/"]) == null ? void 0 : _middlewareManifest_middleware_.matchers) {
|
||||
var _middlewareManifest_middleware_1, _middlewareManifest_middleware1;
|
||||
middlewareMatcher = getMiddlewareRouteMatcher((_middlewareManifest_middleware1 = middlewareManifest.middleware) == null ? void 0 : (_middlewareManifest_middleware_1 = _middlewareManifest_middleware1["/"]) == null ? void 0 : _middlewareManifest_middleware_1.matchers);
|
||||
}
|
||||
customRoutes = {
|
||||
redirects: routesManifest.redirects,
|
||||
rewrites: routesManifest.rewrites ? Array.isArray(routesManifest.rewrites) ? {
|
||||
beforeFiles: [],
|
||||
afterFiles: routesManifest.rewrites,
|
||||
fallback: []
|
||||
} : routesManifest.rewrites : {
|
||||
beforeFiles: [],
|
||||
afterFiles: [],
|
||||
fallback: []
|
||||
},
|
||||
headers: routesManifest.headers
|
||||
};
|
||||
} else {
|
||||
// dev handling
|
||||
customRoutes = await loadCustomRoutes(opts.config);
|
||||
prerenderManifest = {
|
||||
version: 4,
|
||||
routes: {},
|
||||
dynamicRoutes: {},
|
||||
notFoundRoutes: [],
|
||||
preview: {
|
||||
previewModeId: require("crypto").randomBytes(16).toString("hex"),
|
||||
previewModeSigningKey: require("crypto").randomBytes(32).toString("hex"),
|
||||
previewModeEncryptionKey: require("crypto").randomBytes(32).toString("hex")
|
||||
}
|
||||
};
|
||||
}
|
||||
const headers = customRoutes.headers.map((item)=>buildCustomRoute("header", item, opts.config.basePath, opts.config.experimental.caseSensitiveRoutes));
|
||||
const redirects = customRoutes.redirects.map((item)=>buildCustomRoute("redirect", item, opts.config.basePath, opts.config.experimental.caseSensitiveRoutes));
|
||||
const rewrites = {
|
||||
beforeFiles: customRoutes.rewrites.beforeFiles.map((item)=>buildCustomRoute("before_files_rewrite", item)),
|
||||
afterFiles: customRoutes.rewrites.afterFiles.map((item)=>buildCustomRoute("rewrite", item, opts.config.basePath, opts.config.experimental.caseSensitiveRoutes)),
|
||||
fallback: customRoutes.rewrites.fallback.map((item)=>buildCustomRoute("rewrite", item, opts.config.basePath, opts.config.experimental.caseSensitiveRoutes))
|
||||
};
|
||||
const { i18n } = opts.config;
|
||||
const handleLocale = (pathname, locales)=>{
|
||||
let locale;
|
||||
if (i18n) {
|
||||
const i18nResult = normalizeLocalePath(pathname, locales || i18n.locales);
|
||||
pathname = i18nResult.pathname;
|
||||
locale = i18nResult.detectedLocale;
|
||||
}
|
||||
return {
|
||||
locale,
|
||||
pathname
|
||||
};
|
||||
};
|
||||
debug("nextDataRoutes", nextDataRoutes);
|
||||
debug("dynamicRoutes", dynamicRoutes);
|
||||
debug("pageFiles", pageFiles);
|
||||
debug("appFiles", appFiles);
|
||||
let ensureFn;
|
||||
const normalizers = {
|
||||
// Because we can't know if the app directory is enabled or not at this
|
||||
// stage, we assume that it is.
|
||||
rsc: new RSCPathnameNormalizer(),
|
||||
prefetchRSC: opts.config.experimental.ppr ? new PrefetchRSCPathnameNormalizer() : undefined,
|
||||
postponed: opts.config.experimental.ppr ? new PostponedPathnameNormalizer() : undefined
|
||||
};
|
||||
return {
|
||||
headers,
|
||||
rewrites,
|
||||
redirects,
|
||||
buildId,
|
||||
handleLocale,
|
||||
appFiles,
|
||||
pageFiles,
|
||||
dynamicRoutes,
|
||||
nextDataRoutes,
|
||||
exportPathMapRoutes: undefined,
|
||||
devVirtualFsItems: new Set(),
|
||||
prerenderManifest,
|
||||
middlewareMatcher: middlewareMatcher,
|
||||
ensureCallback (fn) {
|
||||
ensureFn = fn;
|
||||
},
|
||||
async getItem (itemPath) {
|
||||
const originalItemPath = itemPath;
|
||||
const itemKey = originalItemPath;
|
||||
const lruResult = getItemsLru == null ? void 0 : getItemsLru.get(itemKey);
|
||||
if (lruResult) {
|
||||
return lruResult;
|
||||
}
|
||||
const { basePath, assetPrefix } = opts.config;
|
||||
const hasBasePath = pathHasPrefix(itemPath, basePath);
|
||||
const hasAssetPrefix = pathHasPrefix(itemPath, assetPrefix);
|
||||
// Return null if either path doesn't start with basePath or assetPrefix
|
||||
if ((basePath || assetPrefix) && !hasBasePath && !hasAssetPrefix) {
|
||||
return null;
|
||||
}
|
||||
// Either remove basePath or assetPrefix, not both (due to routes with same name as basePath)
|
||||
if (basePath && hasBasePath) {
|
||||
itemPath = removePathPrefix(itemPath, basePath) || "/";
|
||||
} else if (assetPrefix && hasAssetPrefix) {
|
||||
itemPath = removePathPrefix(itemPath, assetPrefix) || "/";
|
||||
}
|
||||
// Simulate minimal mode requests by normalizing RSC and postponed
|
||||
// requests.
|
||||
if (opts.minimalMode) {
|
||||
var _normalizers_prefetchRSC, _normalizers_postponed;
|
||||
if ((_normalizers_prefetchRSC = normalizers.prefetchRSC) == null ? void 0 : _normalizers_prefetchRSC.match(itemPath)) {
|
||||
itemPath = normalizers.prefetchRSC.normalize(itemPath, true);
|
||||
} else if (normalizers.rsc.match(itemPath)) {
|
||||
itemPath = normalizers.rsc.normalize(itemPath, true);
|
||||
} else if ((_normalizers_postponed = normalizers.postponed) == null ? void 0 : _normalizers_postponed.match(itemPath)) {
|
||||
itemPath = normalizers.postponed.normalize(itemPath, true);
|
||||
}
|
||||
}
|
||||
if (itemPath !== "/" && itemPath.endsWith("/")) {
|
||||
itemPath = itemPath.substring(0, itemPath.length - 1);
|
||||
}
|
||||
let decodedItemPath = itemPath;
|
||||
try {
|
||||
decodedItemPath = decodeURIComponent(itemPath);
|
||||
} catch {}
|
||||
if (itemPath === "/_next/image") {
|
||||
return {
|
||||
itemPath,
|
||||
type: "nextImage"
|
||||
};
|
||||
}
|
||||
const itemsToCheck = [
|
||||
[
|
||||
this.devVirtualFsItems,
|
||||
"devVirtualFsItem"
|
||||
],
|
||||
[
|
||||
nextStaticFolderItems,
|
||||
"nextStaticFolder"
|
||||
],
|
||||
[
|
||||
legacyStaticFolderItems,
|
||||
"legacyStaticFolder"
|
||||
],
|
||||
[
|
||||
publicFolderItems,
|
||||
"publicFolder"
|
||||
],
|
||||
[
|
||||
appFiles,
|
||||
"appFile"
|
||||
],
|
||||
[
|
||||
pageFiles,
|
||||
"pageFile"
|
||||
]
|
||||
];
|
||||
for (let [items, type] of itemsToCheck){
|
||||
let locale;
|
||||
let curItemPath = itemPath;
|
||||
let curDecodedItemPath = decodedItemPath;
|
||||
const isDynamicOutput = type === "pageFile" || type === "appFile";
|
||||
if (i18n) {
|
||||
var _i18n_domains;
|
||||
const localeResult = handleLocale(itemPath, // legacy behavior allows visiting static assets under
|
||||
// default locale but no other locale
|
||||
isDynamicOutput ? undefined : [
|
||||
i18n == null ? void 0 : i18n.defaultLocale,
|
||||
// default locales from domains need to be matched too
|
||||
...((_i18n_domains = i18n.domains) == null ? void 0 : _i18n_domains.map((item)=>item.defaultLocale)) || []
|
||||
]);
|
||||
if (localeResult.pathname !== curItemPath) {
|
||||
curItemPath = localeResult.pathname;
|
||||
locale = localeResult.locale;
|
||||
try {
|
||||
curDecodedItemPath = decodeURIComponent(curItemPath);
|
||||
} catch {}
|
||||
}
|
||||
}
|
||||
if (type === "legacyStaticFolder") {
|
||||
if (!pathHasPrefix(curItemPath, "/static")) {
|
||||
continue;
|
||||
}
|
||||
curItemPath = curItemPath.substring("/static".length);
|
||||
try {
|
||||
curDecodedItemPath = decodeURIComponent(curItemPath);
|
||||
} catch {}
|
||||
}
|
||||
if (type === "nextStaticFolder" && !pathHasPrefix(curItemPath, "/_next/static")) {
|
||||
continue;
|
||||
}
|
||||
const nextDataPrefix = `/_next/data/${buildId}/`;
|
||||
if (type === "pageFile" && curItemPath.startsWith(nextDataPrefix) && curItemPath.endsWith(".json")) {
|
||||
items = nextDataRoutes;
|
||||
// remove _next/data/<build-id> prefix
|
||||
curItemPath = curItemPath.substring(nextDataPrefix.length - 1);
|
||||
// remove .json postfix
|
||||
curItemPath = curItemPath.substring(0, curItemPath.length - ".json".length);
|
||||
const curLocaleResult = handleLocale(curItemPath);
|
||||
curItemPath = curLocaleResult.pathname === "/index" ? "/" : curLocaleResult.pathname;
|
||||
locale = curLocaleResult.locale;
|
||||
try {
|
||||
curDecodedItemPath = decodeURIComponent(curItemPath);
|
||||
} catch {}
|
||||
}
|
||||
let matchedItem = items.has(curItemPath);
|
||||
// check decoded variant as well
|
||||
if (!matchedItem && !opts.dev) {
|
||||
matchedItem = items.has(curDecodedItemPath);
|
||||
if (matchedItem) curItemPath = curDecodedItemPath;
|
||||
else {
|
||||
// x-ref: https://github.com/vercel/next.js/issues/54008
|
||||
// There're cases that urls get decoded before requests, we should support both encoded and decoded ones.
|
||||
// e.g. nginx could decode the proxy urls, the below ones should be treated as the same:
|
||||
// decoded version: `/_next/static/chunks/pages/blog/[slug]-d4858831b91b69f6.js`
|
||||
// encoded version: `/_next/static/chunks/pages/blog/%5Bslug%5D-d4858831b91b69f6.js`
|
||||
try {
|
||||
// encode the special characters in the path and retrieve again to determine if path exists.
|
||||
const encodedCurItemPath = encodeURI(curItemPath);
|
||||
matchedItem = items.has(encodedCurItemPath);
|
||||
} catch {}
|
||||
}
|
||||
}
|
||||
if (matchedItem || opts.dev) {
|
||||
let fsPath;
|
||||
let itemsRoot;
|
||||
switch(type){
|
||||
case "nextStaticFolder":
|
||||
{
|
||||
itemsRoot = nextStaticFolderPath;
|
||||
curItemPath = curItemPath.substring("/_next/static".length);
|
||||
break;
|
||||
}
|
||||
case "legacyStaticFolder":
|
||||
{
|
||||
itemsRoot = legacyStaticFolderPath;
|
||||
break;
|
||||
}
|
||||
case "publicFolder":
|
||||
{
|
||||
itemsRoot = publicFolderPath;
|
||||
break;
|
||||
}
|
||||
default:
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (itemsRoot && curItemPath) {
|
||||
fsPath = path.posix.join(itemsRoot, curItemPath);
|
||||
}
|
||||
// dynamically check fs in development so we don't
|
||||
// have to wait on the watcher
|
||||
if (!matchedItem && opts.dev) {
|
||||
const isStaticAsset = [
|
||||
"nextStaticFolder",
|
||||
"publicFolder",
|
||||
"legacyStaticFolder"
|
||||
].includes(type);
|
||||
if (isStaticAsset && itemsRoot) {
|
||||
let found = fsPath && await fileExists(fsPath, FileType.File);
|
||||
if (!found) {
|
||||
try {
|
||||
// In dev, we ensure encoded paths match
|
||||
// decoded paths on the filesystem so check
|
||||
// that variation as well
|
||||
const tempItemPath = decodeURIComponent(curItemPath);
|
||||
fsPath = path.posix.join(itemsRoot, tempItemPath);
|
||||
found = await fileExists(fsPath, FileType.File);
|
||||
} catch {}
|
||||
if (!found) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
} else if (type === "pageFile" || type === "appFile") {
|
||||
var _ensureFn;
|
||||
const isAppFile = type === "appFile";
|
||||
if (ensureFn && await ((_ensureFn = ensureFn({
|
||||
type,
|
||||
itemPath: isAppFile ? normalizeMetadataRoute(curItemPath) : curItemPath
|
||||
})) == null ? void 0 : _ensureFn.catch(()=>"ENSURE_FAILED")) === "ENSURE_FAILED") {
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
// i18n locales aren't matched for app dir
|
||||
if (type === "appFile" && locale && locale !== (i18n == null ? void 0 : i18n.defaultLocale)) {
|
||||
continue;
|
||||
}
|
||||
const itemResult = {
|
||||
type,
|
||||
fsPath,
|
||||
locale,
|
||||
itemsRoot,
|
||||
itemPath: curItemPath
|
||||
};
|
||||
getItemsLru == null ? void 0 : getItemsLru.set(itemKey, itemResult);
|
||||
return itemResult;
|
||||
}
|
||||
}
|
||||
getItemsLru == null ? void 0 : getItemsLru.set(itemKey, null);
|
||||
return null;
|
||||
},
|
||||
getDynamicRoutes () {
|
||||
// this should include data routes
|
||||
return this.dynamicRoutes;
|
||||
},
|
||||
getMiddlewareMatchers () {
|
||||
return this.middlewareMatcher;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
//# sourceMappingURL=filesystem.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/router-utils/filesystem.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/router-utils/filesystem.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
6
node_modules/next/dist/esm/server/lib/router-utils/is-postpone.js
generated
vendored
Normal file
6
node_modules/next/dist/esm/server/lib/router-utils/is-postpone.js
generated
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
const REACT_POSTPONE_TYPE = Symbol.for("react.postpone");
|
||||
export function isPostpone(error) {
|
||||
return typeof error === "object" && error !== null && error.$$typeof === REACT_POSTPONE_TYPE;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=is-postpone.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/router-utils/is-postpone.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/router-utils/is-postpone.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/lib/router-utils/is-postpone.ts"],"names":["REACT_POSTPONE_TYPE","Symbol","for","isPostpone","error","$$typeof"],"mappings":"AAAA,MAAMA,sBAA8BC,OAAOC,GAAG,CAAC;AAE/C,OAAO,SAASC,WAAWC,KAAU;IACnC,OACE,OAAOA,UAAU,YACjBA,UAAU,QACVA,MAAMC,QAAQ,KAAKL;AAEvB"}
|
||||
95
node_modules/next/dist/esm/server/lib/router-utils/proxy-request.js
generated
vendored
Normal file
95
node_modules/next/dist/esm/server/lib/router-utils/proxy-request.js
generated
vendored
Normal file
@ -0,0 +1,95 @@
|
||||
import url from "url";
|
||||
import { stringifyQuery } from "../../server-route-utils";
|
||||
export async function proxyRequest(req, res, parsedUrl, upgradeHead, reqBody, proxyTimeout) {
|
||||
const { query } = parsedUrl;
|
||||
delete parsedUrl.query;
|
||||
parsedUrl.search = stringifyQuery(req, query);
|
||||
const target = url.format(parsedUrl);
|
||||
const HttpProxy = require("next/dist/compiled/http-proxy");
|
||||
const proxy = new HttpProxy({
|
||||
target,
|
||||
changeOrigin: true,
|
||||
ignorePath: true,
|
||||
ws: true,
|
||||
// we limit proxy requests to 30s by default, in development
|
||||
// we don't time out WebSocket requests to allow proxying
|
||||
proxyTimeout: proxyTimeout === null ? undefined : proxyTimeout || 30000,
|
||||
headers: {
|
||||
"x-forwarded-host": req.headers.host || ""
|
||||
}
|
||||
});
|
||||
await new Promise((proxyResolve, proxyReject)=>{
|
||||
let finished = false;
|
||||
// http-proxy does not properly detect a client disconnect in newer
|
||||
// versions of Node.js. This is caused because it only listens for the
|
||||
// `aborted` event on the our request object, but it also fully reads
|
||||
// and closes the request object. Node **will not** fire `aborted` when
|
||||
// the request is already closed. Listening for `close` on our response
|
||||
// object will detect the disconnect, and we can abort the proxy's
|
||||
// connection.
|
||||
proxy.on("proxyReq", (proxyReq)=>{
|
||||
res.on("close", ()=>proxyReq.destroy());
|
||||
});
|
||||
proxy.on("proxyRes", (proxyRes)=>{
|
||||
if (res.destroyed) {
|
||||
proxyRes.destroy();
|
||||
} else {
|
||||
res.on("close", ()=>proxyRes.destroy());
|
||||
}
|
||||
});
|
||||
proxy.on("proxyRes", (proxyRes, innerReq, innerRes)=>{
|
||||
const cleanup = (err)=>{
|
||||
// cleanup event listeners to allow clean garbage collection
|
||||
proxyRes.removeListener("error", cleanup);
|
||||
proxyRes.removeListener("close", cleanup);
|
||||
innerRes.removeListener("error", cleanup);
|
||||
innerRes.removeListener("close", cleanup);
|
||||
// destroy all source streams to propagate the caught event backward
|
||||
innerReq.destroy(err);
|
||||
proxyRes.destroy(err);
|
||||
};
|
||||
proxyRes.once("error", cleanup);
|
||||
proxyRes.once("close", cleanup);
|
||||
innerRes.once("error", cleanup);
|
||||
innerRes.once("close", cleanup);
|
||||
});
|
||||
proxy.on("error", (err)=>{
|
||||
console.error(`Failed to proxy ${target}`, err);
|
||||
if (!finished) {
|
||||
finished = true;
|
||||
proxyReject(err);
|
||||
if (!res.destroyed) {
|
||||
res.statusCode = 500;
|
||||
res.end("Internal Server Error");
|
||||
}
|
||||
}
|
||||
});
|
||||
// if upgrade head is present treat as WebSocket request
|
||||
if (upgradeHead) {
|
||||
proxy.on("proxyReqWs", (proxyReq)=>{
|
||||
proxyReq.on("close", ()=>{
|
||||
if (!finished) {
|
||||
finished = true;
|
||||
proxyResolve(true);
|
||||
}
|
||||
});
|
||||
});
|
||||
proxy.ws(req, res, upgradeHead);
|
||||
proxyResolve(true);
|
||||
} else {
|
||||
proxy.on("proxyReq", (proxyReq)=>{
|
||||
proxyReq.on("close", ()=>{
|
||||
if (!finished) {
|
||||
finished = true;
|
||||
proxyResolve(true);
|
||||
}
|
||||
});
|
||||
});
|
||||
proxy.web(req, res, {
|
||||
buffer: reqBody
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
//# sourceMappingURL=proxy-request.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/router-utils/proxy-request.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/router-utils/proxy-request.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/lib/router-utils/proxy-request.ts"],"names":["url","stringifyQuery","proxyRequest","req","res","parsedUrl","upgradeHead","reqBody","proxyTimeout","query","search","target","format","HttpProxy","require","proxy","changeOrigin","ignorePath","ws","undefined","headers","host","Promise","proxyResolve","proxyReject","finished","on","proxyReq","destroy","proxyRes","destroyed","innerReq","innerRes","cleanup","err","removeListener","once","console","error","statusCode","end","web","buffer"],"mappings":"AAGA,OAAOA,SAAS,MAAK;AACrB,SAASC,cAAc,QAAQ,2BAA0B;AAEzD,OAAO,eAAeC,aACpBC,GAAoB,EACpBC,GAAmB,EACnBC,SAAiC,EACjCC,WAAiB,EACjBC,OAAa,EACbC,YAA4B;IAE5B,MAAM,EAAEC,KAAK,EAAE,GAAGJ;IAClB,OAAO,AAACA,UAAkBI,KAAK;IAC/BJ,UAAUK,MAAM,GAAGT,eAAeE,KAAYM;IAE9C,MAAME,SAASX,IAAIY,MAAM,CAACP;IAC1B,MAAMQ,YACJC,QAAQ;IAEV,MAAMC,QAAQ,IAAIF,UAAU;QAC1BF;QACAK,cAAc;QACdC,YAAY;QACZC,IAAI;QACJ,4DAA4D;QAC5D,yDAAyD;QACzDV,cAAcA,iBAAiB,OAAOW,YAAYX,gBAAgB;QAClEY,SAAS;YACP,oBAAoBjB,IAAIiB,OAAO,CAACC,IAAI,IAAI;QAC1C;IACF;IAEA,MAAM,IAAIC,QAAQ,CAACC,cAAcC;QAC/B,IAAIC,WAAW;QAEf,mEAAmE;QACnE,sEAAsE;QACtE,qEAAqE;QACrE,uEAAuE;QACvE,uEAAuE;QACvE,kEAAkE;QAClE,cAAc;QACdV,MAAMW,EAAE,CAAC,YAAY,CAACC;YACpBvB,IAAIsB,EAAE,CAAC,SAAS,IAAMC,SAASC,OAAO;QACxC;QACAb,MAAMW,EAAE,CAAC,YAAY,CAACG;YACpB,IAAIzB,IAAI0B,SAAS,EAAE;gBACjBD,SAASD,OAAO;YAClB,OAAO;gBACLxB,IAAIsB,EAAE,CAAC,SAAS,IAAMG,SAASD,OAAO;YACxC;QACF;QAEAb,MAAMW,EAAE,CAAC,YAAY,CAACG,UAAUE,UAAUC;YACxC,MAAMC,UAAU,CAACC;gBACf,4DAA4D;gBAC5DL,SAASM,cAAc,CAAC,SAASF;gBACjCJ,SAASM,cAAc,CAAC,SAASF;gBACjCD,SAASG,cAAc,CAAC,SAASF;gBACjCD,SAASG,cAAc,CAAC,SAASF;gBAEjC,oEAAoE;gBACpEF,SAASH,OAAO,CAACM;gBACjBL,SAASD,OAAO,CAACM;YACnB;YAEAL,SAASO,IAAI,CAAC,SAASH;YACvBJ,SAASO,IAAI,CAAC,SAASH;YACvBD,SAASI,IAAI,CAAC,SAASH;YACvBD,SAASI,IAAI,CAAC,SAASH;QACzB;QAEAlB,MAAMW,EAAE,CAAC,SAAS,CAACQ;YACjBG,QAAQC,KAAK,CAAC,CAAC,gBAAgB,EAAE3B,OAAO,CAAC,EAAEuB;YAC3C,IAAI,CAACT,UAAU;gBACbA,WAAW;gBACXD,YAAYU;gBAEZ,IAAI,CAAC9B,IAAI0B,SAAS,EAAE;oBAClB1B,IAAImC,UAAU,GAAG;oBACjBnC,IAAIoC,GAAG,CAAC;gBACV;YACF;QACF;QAEA,wDAAwD;QACxD,IAAIlC,aAAa;YACfS,MAAMW,EAAE,CAAC,cAAc,CAACC;gBACtBA,SAASD,EAAE,CAAC,SAAS;oBACnB,IAAI,CAACD,UAAU;wBACbA,WAAW;wBACXF,aAAa;oBACf;gBACF;YACF;YACAR,MAAMG,EAAE,CAACf,KAA+BC,KAAKE;YAC7CiB,aAAa;QACf,OAAO;YACLR,MAAMW,EAAE,CAAC,YAAY,CAACC;gBACpBA,SAASD,EAAE,CAAC,SAAS;oBACnB,IAAI,CAACD,UAAU;wBACbA,WAAW;wBACXF,aAAa;oBACf;gBACF;YACF;YACAR,MAAM0B,GAAG,CAACtC,KAAKC,KAAK;gBAClBsC,QAAQnC;YACV;QACF;IACF;AACF"}
|
||||
525
node_modules/next/dist/esm/server/lib/router-utils/resolve-routes.js
generated
vendored
Normal file
525
node_modules/next/dist/esm/server/lib/router-utils/resolve-routes.js
generated
vendored
Normal file
@ -0,0 +1,525 @@
|
||||
import url from "url";
|
||||
import path from "node:path";
|
||||
import setupDebug from "next/dist/compiled/debug";
|
||||
import { getCloneableBody } from "../../body-streams";
|
||||
import { filterReqHeaders, ipcForbiddenHeaders } from "../server-ipc/utils";
|
||||
import { stringifyQuery } from "../../server-route-utils";
|
||||
import { formatHostname } from "../format-hostname";
|
||||
import { toNodeOutgoingHttpHeaders } from "../../web/utils";
|
||||
import { isAbortError } from "../../pipe-readable";
|
||||
import { getHostname } from "../../../shared/lib/get-hostname";
|
||||
import { getRedirectStatus } from "../../../lib/redirect-status";
|
||||
import { normalizeRepeatedSlashes } from "../../../shared/lib/utils";
|
||||
import { relativizeURL } from "../../../shared/lib/router/utils/relativize-url";
|
||||
import { addPathPrefix } from "../../../shared/lib/router/utils/add-path-prefix";
|
||||
import { pathHasPrefix } from "../../../shared/lib/router/utils/path-has-prefix";
|
||||
import { detectDomainLocale } from "../../../shared/lib/i18n/detect-domain-locale";
|
||||
import { normalizeLocalePath } from "../../../shared/lib/i18n/normalize-locale-path";
|
||||
import { removePathPrefix } from "../../../shared/lib/router/utils/remove-path-prefix";
|
||||
import { NextDataPathnameNormalizer } from "../../future/normalizers/request/next-data";
|
||||
import { BasePathPathnameNormalizer } from "../../future/normalizers/request/base-path";
|
||||
import { PostponedPathnameNormalizer } from "../../future/normalizers/request/postponed";
|
||||
import { addRequestMeta } from "../../request-meta";
|
||||
import { compileNonPath, matchHas, prepareDestination } from "../../../shared/lib/router/utils/prepare-destination";
|
||||
const debug = setupDebug("next:router-server:resolve-routes");
|
||||
export function getResolveRoutes(fsChecker, config, opts, renderServer, renderServerOpts, ensureMiddleware) {
|
||||
const routes = [
|
||||
// _next/data with middleware handling
|
||||
{
|
||||
match: ()=>({}),
|
||||
name: "middleware_next_data"
|
||||
},
|
||||
...opts.minimalMode ? [] : fsChecker.headers,
|
||||
...opts.minimalMode ? [] : fsChecker.redirects,
|
||||
// check middleware (using matchers)
|
||||
{
|
||||
match: ()=>({}),
|
||||
name: "middleware"
|
||||
},
|
||||
...opts.minimalMode ? [] : fsChecker.rewrites.beforeFiles,
|
||||
// check middleware (using matchers)
|
||||
{
|
||||
match: ()=>({}),
|
||||
name: "before_files_end"
|
||||
},
|
||||
// we check exact matches on fs before continuing to
|
||||
// after files rewrites
|
||||
{
|
||||
match: ()=>({}),
|
||||
name: "check_fs"
|
||||
},
|
||||
...opts.minimalMode ? [] : fsChecker.rewrites.afterFiles,
|
||||
// we always do the check: true handling before continuing to
|
||||
// fallback rewrites
|
||||
{
|
||||
check: true,
|
||||
match: ()=>({}),
|
||||
name: "after files check: true"
|
||||
},
|
||||
...opts.minimalMode ? [] : fsChecker.rewrites.fallback
|
||||
];
|
||||
async function resolveRoutes({ req, res, isUpgradeReq, invokedOutputs }) {
|
||||
var _req_socket, _req_headers_xforwardedproto;
|
||||
let finished = false;
|
||||
let resHeaders = {};
|
||||
let matchedOutput = null;
|
||||
let parsedUrl = url.parse(req.url || "", true);
|
||||
let didRewrite = false;
|
||||
const urlParts = (req.url || "").split("?", 1);
|
||||
const urlNoQuery = urlParts[0];
|
||||
// this normalizes repeated slashes in the path e.g. hello//world ->
|
||||
// hello/world or backslashes to forward slashes, this does not
|
||||
// handle trailing slash as that is handled the same as a next.config.js
|
||||
// redirect
|
||||
if (urlNoQuery == null ? void 0 : urlNoQuery.match(/(\\|\/\/)/)) {
|
||||
parsedUrl = url.parse(normalizeRepeatedSlashes(req.url), true);
|
||||
return {
|
||||
parsedUrl,
|
||||
resHeaders,
|
||||
finished: true,
|
||||
statusCode: 308
|
||||
};
|
||||
}
|
||||
// TODO: inherit this from higher up
|
||||
const protocol = (req == null ? void 0 : (_req_socket = req.socket) == null ? void 0 : _req_socket.encrypted) || ((_req_headers_xforwardedproto = req.headers["x-forwarded-proto"]) == null ? void 0 : _req_headers_xforwardedproto.includes("https")) ? "https" : "http";
|
||||
// When there are hostname and port we build an absolute URL
|
||||
const initUrl = config.experimental.trustHostHeader ? `https://${req.headers.host || "localhost"}${req.url}` : opts.port ? `${protocol}://${formatHostname(opts.hostname || "localhost")}:${opts.port}${req.url}` : req.url || "";
|
||||
addRequestMeta(req, "initURL", initUrl);
|
||||
addRequestMeta(req, "initQuery", {
|
||||
...parsedUrl.query
|
||||
});
|
||||
addRequestMeta(req, "initProtocol", protocol);
|
||||
if (!isUpgradeReq) {
|
||||
addRequestMeta(req, "clonableBody", getCloneableBody(req));
|
||||
}
|
||||
const maybeAddTrailingSlash = (pathname)=>{
|
||||
if (config.trailingSlash && !config.skipMiddlewareUrlNormalize && !pathname.endsWith("/")) {
|
||||
return `${pathname}/`;
|
||||
}
|
||||
return pathname;
|
||||
};
|
||||
let domainLocale;
|
||||
let defaultLocale;
|
||||
let initialLocaleResult = undefined;
|
||||
if (config.i18n) {
|
||||
var _parsedUrl_pathname;
|
||||
const hadTrailingSlash = (_parsedUrl_pathname = parsedUrl.pathname) == null ? void 0 : _parsedUrl_pathname.endsWith("/");
|
||||
const hadBasePath = pathHasPrefix(parsedUrl.pathname || "", config.basePath);
|
||||
initialLocaleResult = normalizeLocalePath(removePathPrefix(parsedUrl.pathname || "/", config.basePath), config.i18n.locales);
|
||||
domainLocale = detectDomainLocale(config.i18n.domains, getHostname(parsedUrl, req.headers));
|
||||
defaultLocale = (domainLocale == null ? void 0 : domainLocale.defaultLocale) || config.i18n.defaultLocale;
|
||||
parsedUrl.query.__nextDefaultLocale = defaultLocale;
|
||||
parsedUrl.query.__nextLocale = initialLocaleResult.detectedLocale || defaultLocale;
|
||||
// ensure locale is present for resolving routes
|
||||
if (!initialLocaleResult.detectedLocale && !initialLocaleResult.pathname.startsWith("/_next/")) {
|
||||
parsedUrl.pathname = addPathPrefix(initialLocaleResult.pathname === "/" ? `/${defaultLocale}` : addPathPrefix(initialLocaleResult.pathname || "", `/${defaultLocale}`), hadBasePath ? config.basePath : "");
|
||||
if (hadTrailingSlash) {
|
||||
parsedUrl.pathname = maybeAddTrailingSlash(parsedUrl.pathname);
|
||||
}
|
||||
}
|
||||
}
|
||||
const checkLocaleApi = (pathname)=>{
|
||||
if (config.i18n && pathname === urlNoQuery && (initialLocaleResult == null ? void 0 : initialLocaleResult.detectedLocale) && pathHasPrefix(initialLocaleResult.pathname, "/api")) {
|
||||
return true;
|
||||
}
|
||||
};
|
||||
async function checkTrue() {
|
||||
const pathname = parsedUrl.pathname || "";
|
||||
if (checkLocaleApi(pathname)) {
|
||||
return;
|
||||
}
|
||||
if (!(invokedOutputs == null ? void 0 : invokedOutputs.has(pathname))) {
|
||||
const output = await fsChecker.getItem(pathname);
|
||||
if (output) {
|
||||
if (config.useFileSystemPublicRoutes || didRewrite || output.type !== "appFile" && output.type !== "pageFile") {
|
||||
return output;
|
||||
}
|
||||
}
|
||||
}
|
||||
const dynamicRoutes = fsChecker.getDynamicRoutes();
|
||||
let curPathname = parsedUrl.pathname;
|
||||
if (config.basePath) {
|
||||
if (!pathHasPrefix(curPathname || "", config.basePath)) {
|
||||
return;
|
||||
}
|
||||
curPathname = (curPathname == null ? void 0 : curPathname.substring(config.basePath.length)) || "/";
|
||||
}
|
||||
const localeResult = fsChecker.handleLocale(curPathname || "");
|
||||
for (const route of dynamicRoutes){
|
||||
// when resolving fallback: false the
|
||||
// render worker may return a no-fallback response
|
||||
// which signals we need to continue resolving.
|
||||
// TODO: optimize this to collect static paths
|
||||
// to use at the routing layer
|
||||
if (invokedOutputs == null ? void 0 : invokedOutputs.has(route.page)) {
|
||||
continue;
|
||||
}
|
||||
const params = route.match(localeResult.pathname);
|
||||
if (params) {
|
||||
const pageOutput = await fsChecker.getItem(addPathPrefix(route.page, config.basePath || ""));
|
||||
// i18n locales aren't matched for app dir
|
||||
if ((pageOutput == null ? void 0 : pageOutput.type) === "appFile" && (initialLocaleResult == null ? void 0 : initialLocaleResult.detectedLocale)) {
|
||||
continue;
|
||||
}
|
||||
if (pageOutput && (curPathname == null ? void 0 : curPathname.startsWith("/_next/data"))) {
|
||||
parsedUrl.query.__nextDataReq = "1";
|
||||
}
|
||||
if (config.useFileSystemPublicRoutes || didRewrite) {
|
||||
return pageOutput;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
const normalizers = {
|
||||
basePath: config.basePath && config.basePath !== "/" ? new BasePathPathnameNormalizer(config.basePath) : undefined,
|
||||
data: new NextDataPathnameNormalizer(fsChecker.buildId),
|
||||
postponed: config.experimental.ppr ? new PostponedPathnameNormalizer() : undefined
|
||||
};
|
||||
async function handleRoute(route) {
|
||||
let curPathname = parsedUrl.pathname || "/";
|
||||
if (config.i18n && route.internal) {
|
||||
const hadTrailingSlash = curPathname.endsWith("/");
|
||||
if (config.basePath) {
|
||||
curPathname = removePathPrefix(curPathname, config.basePath);
|
||||
}
|
||||
const hadBasePath = curPathname !== parsedUrl.pathname;
|
||||
const localeResult = normalizeLocalePath(curPathname, config.i18n.locales);
|
||||
const isDefaultLocale = localeResult.detectedLocale === defaultLocale;
|
||||
if (isDefaultLocale) {
|
||||
curPathname = localeResult.pathname === "/" && hadBasePath ? config.basePath : addPathPrefix(localeResult.pathname, hadBasePath ? config.basePath : "");
|
||||
} else if (hadBasePath) {
|
||||
curPathname = curPathname === "/" ? config.basePath : addPathPrefix(curPathname, config.basePath);
|
||||
}
|
||||
if ((isDefaultLocale || hadBasePath) && hadTrailingSlash) {
|
||||
curPathname = maybeAddTrailingSlash(curPathname);
|
||||
}
|
||||
}
|
||||
let params = route.match(curPathname);
|
||||
if ((route.has || route.missing) && params) {
|
||||
const hasParams = matchHas(req, parsedUrl.query, route.has, route.missing);
|
||||
if (hasParams) {
|
||||
Object.assign(params, hasParams);
|
||||
} else {
|
||||
params = false;
|
||||
}
|
||||
}
|
||||
if (params) {
|
||||
if (fsChecker.exportPathMapRoutes && route.name === "before_files_end") {
|
||||
for (const exportPathMapRoute of fsChecker.exportPathMapRoutes){
|
||||
const result = await handleRoute(exportPathMapRoute);
|
||||
if (result) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (route.name === "middleware_next_data" && parsedUrl.pathname) {
|
||||
var _fsChecker_getMiddlewareMatchers;
|
||||
if ((_fsChecker_getMiddlewareMatchers = fsChecker.getMiddlewareMatchers()) == null ? void 0 : _fsChecker_getMiddlewareMatchers.length) {
|
||||
var _normalizers_basePath, _normalizers_postponed;
|
||||
let normalized = parsedUrl.pathname;
|
||||
// Remove the base path if it exists.
|
||||
const hadBasePath = (_normalizers_basePath = normalizers.basePath) == null ? void 0 : _normalizers_basePath.match(parsedUrl.pathname);
|
||||
if (hadBasePath && normalizers.basePath) {
|
||||
normalized = normalizers.basePath.normalize(normalized, true);
|
||||
}
|
||||
let updated = false;
|
||||
if (normalizers.data.match(normalized)) {
|
||||
updated = true;
|
||||
parsedUrl.query.__nextDataReq = "1";
|
||||
normalized = normalizers.data.normalize(normalized, true);
|
||||
} else if ((_normalizers_postponed = normalizers.postponed) == null ? void 0 : _normalizers_postponed.match(normalized)) {
|
||||
updated = true;
|
||||
normalized = normalizers.postponed.normalize(normalized, true);
|
||||
}
|
||||
if (config.i18n) {
|
||||
const curLocaleResult = normalizeLocalePath(normalized, config.i18n.locales);
|
||||
if (curLocaleResult.detectedLocale) {
|
||||
parsedUrl.query.__nextLocale = curLocaleResult.detectedLocale;
|
||||
}
|
||||
}
|
||||
// If we updated the pathname, and it had a base path, re-add the
|
||||
// base path.
|
||||
if (updated) {
|
||||
if (hadBasePath) {
|
||||
normalized = path.posix.join(config.basePath, normalized);
|
||||
}
|
||||
// Re-add the trailing slash (if required).
|
||||
normalized = maybeAddTrailingSlash(normalized);
|
||||
parsedUrl.pathname = normalized;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (route.name === "check_fs") {
|
||||
const pathname = parsedUrl.pathname || "";
|
||||
if ((invokedOutputs == null ? void 0 : invokedOutputs.has(pathname)) || checkLocaleApi(pathname)) {
|
||||
return;
|
||||
}
|
||||
const output = await fsChecker.getItem(pathname);
|
||||
if (output && !(config.i18n && (initialLocaleResult == null ? void 0 : initialLocaleResult.detectedLocale) && pathHasPrefix(pathname, "/api"))) {
|
||||
if (config.useFileSystemPublicRoutes || didRewrite || output.type !== "appFile" && output.type !== "pageFile") {
|
||||
matchedOutput = output;
|
||||
if (output.locale) {
|
||||
parsedUrl.query.__nextLocale = output.locale;
|
||||
}
|
||||
return {
|
||||
parsedUrl,
|
||||
resHeaders,
|
||||
finished: true,
|
||||
matchedOutput
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!opts.minimalMode && route.name === "middleware") {
|
||||
const match = fsChecker.getMiddlewareMatchers();
|
||||
if (// @ts-expect-error BaseNextRequest stuff
|
||||
match == null ? void 0 : match(parsedUrl.pathname, req, parsedUrl.query)) {
|
||||
if (ensureMiddleware) {
|
||||
await ensureMiddleware(req.url);
|
||||
}
|
||||
const serverResult = await (renderServer == null ? void 0 : renderServer.initialize(renderServerOpts));
|
||||
if (!serverResult) {
|
||||
throw new Error(`Failed to initialize render server "middleware"`);
|
||||
}
|
||||
addRequestMeta(req, "invokePath", "");
|
||||
addRequestMeta(req, "invokeOutput", "");
|
||||
addRequestMeta(req, "invokeQuery", {});
|
||||
addRequestMeta(req, "middlewareInvoke", true);
|
||||
debug("invoking middleware", req.url, req.headers);
|
||||
let middlewareRes = undefined;
|
||||
let bodyStream = undefined;
|
||||
try {
|
||||
try {
|
||||
await serverResult.requestHandler(req, res, parsedUrl);
|
||||
} catch (err) {
|
||||
if (!("result" in err) || !("response" in err.result)) {
|
||||
throw err;
|
||||
}
|
||||
middlewareRes = err.result.response;
|
||||
res.statusCode = middlewareRes.status;
|
||||
if (middlewareRes.body) {
|
||||
bodyStream = middlewareRes.body;
|
||||
} else if (middlewareRes.status) {
|
||||
bodyStream = new ReadableStream({
|
||||
start (controller) {
|
||||
controller.enqueue("");
|
||||
controller.close();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
// If the client aborts before we can receive a response object
|
||||
// (when the headers are flushed), then we can early exit without
|
||||
// further processing.
|
||||
if (isAbortError(e)) {
|
||||
return {
|
||||
parsedUrl,
|
||||
resHeaders,
|
||||
finished: true
|
||||
};
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
if (res.closed || res.finished || !middlewareRes) {
|
||||
return {
|
||||
parsedUrl,
|
||||
resHeaders,
|
||||
finished: true
|
||||
};
|
||||
}
|
||||
const middlewareHeaders = toNodeOutgoingHttpHeaders(middlewareRes.headers);
|
||||
debug("middleware res", middlewareRes.status, middlewareHeaders);
|
||||
if (middlewareHeaders["x-middleware-override-headers"]) {
|
||||
const overriddenHeaders = new Set();
|
||||
let overrideHeaders = middlewareHeaders["x-middleware-override-headers"];
|
||||
if (typeof overrideHeaders === "string") {
|
||||
overrideHeaders = overrideHeaders.split(",");
|
||||
}
|
||||
for (const key of overrideHeaders){
|
||||
overriddenHeaders.add(key.trim());
|
||||
}
|
||||
delete middlewareHeaders["x-middleware-override-headers"];
|
||||
// Delete headers.
|
||||
for (const key of Object.keys(req.headers)){
|
||||
if (!overriddenHeaders.has(key)) {
|
||||
delete req.headers[key];
|
||||
}
|
||||
}
|
||||
// Update or add headers.
|
||||
for (const key of overriddenHeaders.keys()){
|
||||
const valueKey = "x-middleware-request-" + key;
|
||||
const newValue = middlewareHeaders[valueKey];
|
||||
const oldValue = req.headers[key];
|
||||
if (oldValue !== newValue) {
|
||||
req.headers[key] = newValue === null ? undefined : newValue;
|
||||
}
|
||||
delete middlewareHeaders[valueKey];
|
||||
}
|
||||
}
|
||||
if (!middlewareHeaders["x-middleware-rewrite"] && !middlewareHeaders["x-middleware-next"] && !middlewareHeaders["location"]) {
|
||||
middlewareHeaders["x-middleware-refresh"] = "1";
|
||||
}
|
||||
delete middlewareHeaders["x-middleware-next"];
|
||||
for (const [key, value] of Object.entries({
|
||||
...filterReqHeaders(middlewareHeaders, ipcForbiddenHeaders)
|
||||
})){
|
||||
if ([
|
||||
"content-length",
|
||||
"x-middleware-rewrite",
|
||||
"x-middleware-redirect",
|
||||
"x-middleware-refresh"
|
||||
].includes(key)) {
|
||||
continue;
|
||||
}
|
||||
if (value) {
|
||||
resHeaders[key] = value;
|
||||
req.headers[key] = value;
|
||||
}
|
||||
}
|
||||
if (middlewareHeaders["x-middleware-rewrite"]) {
|
||||
const value = middlewareHeaders["x-middleware-rewrite"];
|
||||
const rel = relativizeURL(value, initUrl);
|
||||
resHeaders["x-middleware-rewrite"] = rel;
|
||||
const query = parsedUrl.query;
|
||||
parsedUrl = url.parse(rel, true);
|
||||
if (parsedUrl.protocol) {
|
||||
return {
|
||||
parsedUrl,
|
||||
resHeaders,
|
||||
finished: true
|
||||
};
|
||||
}
|
||||
// keep internal query state
|
||||
for (const key of Object.keys(query)){
|
||||
if (key.startsWith("_next") || key.startsWith("__next")) {
|
||||
parsedUrl.query[key] = query[key];
|
||||
}
|
||||
}
|
||||
if (config.i18n) {
|
||||
const curLocaleResult = normalizeLocalePath(parsedUrl.pathname || "", config.i18n.locales);
|
||||
if (curLocaleResult.detectedLocale) {
|
||||
parsedUrl.query.__nextLocale = curLocaleResult.detectedLocale;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (middlewareHeaders["location"]) {
|
||||
const value = middlewareHeaders["location"];
|
||||
const rel = relativizeURL(value, initUrl);
|
||||
resHeaders["location"] = rel;
|
||||
parsedUrl = url.parse(rel, true);
|
||||
return {
|
||||
parsedUrl,
|
||||
resHeaders,
|
||||
finished: true,
|
||||
statusCode: middlewareRes.status
|
||||
};
|
||||
}
|
||||
if (middlewareHeaders["x-middleware-refresh"]) {
|
||||
return {
|
||||
parsedUrl,
|
||||
resHeaders,
|
||||
finished: true,
|
||||
bodyStream,
|
||||
statusCode: middlewareRes.status
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
// handle redirect
|
||||
if (("statusCode" in route || "permanent" in route) && route.destination) {
|
||||
const { parsedDestination } = prepareDestination({
|
||||
appendParamsToQuery: false,
|
||||
destination: route.destination,
|
||||
params: params,
|
||||
query: parsedUrl.query
|
||||
});
|
||||
const { query } = parsedDestination;
|
||||
delete parsedDestination.query;
|
||||
parsedDestination.search = stringifyQuery(req, query);
|
||||
parsedDestination.pathname = normalizeRepeatedSlashes(parsedDestination.pathname);
|
||||
return {
|
||||
finished: true,
|
||||
// @ts-expect-error custom ParsedUrl
|
||||
parsedUrl: parsedDestination,
|
||||
statusCode: getRedirectStatus(route)
|
||||
};
|
||||
}
|
||||
// handle headers
|
||||
if (route.headers) {
|
||||
const hasParams = Object.keys(params).length > 0;
|
||||
for (const header of route.headers){
|
||||
let { key, value } = header;
|
||||
if (hasParams) {
|
||||
key = compileNonPath(key, params);
|
||||
value = compileNonPath(value, params);
|
||||
}
|
||||
if (key.toLowerCase() === "set-cookie") {
|
||||
if (!Array.isArray(resHeaders[key])) {
|
||||
const val = resHeaders[key];
|
||||
resHeaders[key] = typeof val === "string" ? [
|
||||
val
|
||||
] : [];
|
||||
}
|
||||
resHeaders[key].push(value);
|
||||
} else {
|
||||
resHeaders[key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
// handle rewrite
|
||||
if (route.destination) {
|
||||
const { parsedDestination } = prepareDestination({
|
||||
appendParamsToQuery: true,
|
||||
destination: route.destination,
|
||||
params: params,
|
||||
query: parsedUrl.query
|
||||
});
|
||||
if (parsedDestination.protocol) {
|
||||
return {
|
||||
// @ts-expect-error custom ParsedUrl
|
||||
parsedUrl: parsedDestination,
|
||||
finished: true
|
||||
};
|
||||
}
|
||||
if (config.i18n) {
|
||||
const curLocaleResult = normalizeLocalePath(removePathPrefix(parsedDestination.pathname, config.basePath), config.i18n.locales);
|
||||
if (curLocaleResult.detectedLocale) {
|
||||
parsedUrl.query.__nextLocale = curLocaleResult.detectedLocale;
|
||||
}
|
||||
}
|
||||
didRewrite = true;
|
||||
parsedUrl.pathname = parsedDestination.pathname;
|
||||
Object.assign(parsedUrl.query, parsedDestination.query);
|
||||
}
|
||||
// handle check: true
|
||||
if (route.check) {
|
||||
const output = await checkTrue();
|
||||
if (output) {
|
||||
return {
|
||||
parsedUrl,
|
||||
resHeaders,
|
||||
finished: true,
|
||||
matchedOutput: output
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for (const route of routes){
|
||||
const result = await handleRoute(route);
|
||||
if (result) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
return {
|
||||
finished,
|
||||
parsedUrl,
|
||||
resHeaders,
|
||||
matchedOutput
|
||||
};
|
||||
}
|
||||
return resolveRoutes;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=resolve-routes.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/router-utils/resolve-routes.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/router-utils/resolve-routes.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
762
node_modules/next/dist/esm/server/lib/router-utils/setup-dev-bundler.js
generated
vendored
Normal file
762
node_modules/next/dist/esm/server/lib/router-utils/setup-dev-bundler.js
generated
vendored
Normal file
@ -0,0 +1,762 @@
|
||||
import { createDefineEnv } from "../../../build/swc";
|
||||
import fs from "fs";
|
||||
import url from "url";
|
||||
import path from "path";
|
||||
import qs from "querystring";
|
||||
import Watchpack from "next/dist/compiled/watchpack";
|
||||
import { loadEnvConfig } from "@next/env";
|
||||
import isError from "../../../lib/is-error";
|
||||
import findUp from "next/dist/compiled/find-up";
|
||||
import { buildCustomRoute } from "./filesystem";
|
||||
import * as Log from "../../../build/output/log";
|
||||
import HotReloaderWebpack from "../../dev/hot-reloader-webpack";
|
||||
import { setGlobal } from "../../../trace/shared";
|
||||
import loadJsConfig from "../../../build/load-jsconfig";
|
||||
import { createValidFileMatcher } from "../find-page-file";
|
||||
import { eventCliSession } from "../../../telemetry/events";
|
||||
import { getDefineEnv } from "../../../build/webpack/plugins/define-env-plugin";
|
||||
import { logAppDirError } from "../../dev/log-app-dir-error";
|
||||
import { getSortedRoutes } from "../../../shared/lib/router/utils";
|
||||
import { getStaticInfoIncludingLayouts, sortByPageExts } from "../../../build/entries";
|
||||
import { verifyTypeScriptSetup } from "../../../lib/verify-typescript-setup";
|
||||
import { verifyPartytownSetup } from "../../../lib/verify-partytown-setup";
|
||||
import { getRouteRegex } from "../../../shared/lib/router/utils/route-regex";
|
||||
import { normalizeAppPath } from "../../../shared/lib/router/utils/app-paths";
|
||||
import { buildDataRoute } from "./build-data-route";
|
||||
import { getRouteMatcher } from "../../../shared/lib/router/utils/route-matcher";
|
||||
import { normalizePathSep } from "../../../shared/lib/page-path/normalize-path-sep";
|
||||
import { createClientRouterFilter } from "../../../lib/create-client-router-filter";
|
||||
import { absolutePathToPage } from "../../../shared/lib/page-path/absolute-path-to-page";
|
||||
import { generateInterceptionRoutesRewrites } from "../../../lib/generate-interception-routes-rewrites";
|
||||
import { CLIENT_STATIC_FILES_PATH, COMPILER_NAMES, DEV_CLIENT_PAGES_MANIFEST, DEV_MIDDLEWARE_MANIFEST, PHASE_DEVELOPMENT_SERVER } from "../../../shared/lib/constants";
|
||||
import { getMiddlewareRouteMatcher } from "../../../shared/lib/router/utils/middleware-route-matcher";
|
||||
import { isMiddlewareFile, NestedMiddlewareError, isInstrumentationHookFile, getPossibleMiddlewareFilenames, getPossibleInstrumentationHookFilenames } from "../../../build/utils";
|
||||
import { createOriginalStackFrame, getSourceById, parseStack } from "../../../client/components/react-dev-overlay/server/middleware";
|
||||
import { batchedTraceSource, createOriginalStackFrame as createOriginalTurboStackFrame } from "../../../client/components/react-dev-overlay/server/middleware-turbopack";
|
||||
import { devPageFiles } from "../../../build/webpack/plugins/next-types-plugin/shared";
|
||||
import { HMR_ACTIONS_SENT_TO_BROWSER } from "../../dev/hot-reloader-types";
|
||||
import { PAGE_TYPES } from "../../../lib/page-types";
|
||||
import { createHotReloaderTurbopack } from "../../dev/hot-reloader-turbopack";
|
||||
import { getErrorSource } from "../../../shared/lib/error-source";
|
||||
import { generateEncryptionKeyBase64 } from "../../app-render/encryption-utils";
|
||||
async function verifyTypeScript(opts) {
|
||||
let usingTypeScript = false;
|
||||
const verifyResult = await verifyTypeScriptSetup({
|
||||
dir: opts.dir,
|
||||
distDir: opts.nextConfig.distDir,
|
||||
intentDirs: [
|
||||
opts.pagesDir,
|
||||
opts.appDir
|
||||
].filter(Boolean),
|
||||
typeCheckPreflight: false,
|
||||
tsconfigPath: opts.nextConfig.typescript.tsconfigPath,
|
||||
disableStaticImages: opts.nextConfig.images.disableStaticImages,
|
||||
hasAppDir: !!opts.appDir,
|
||||
hasPagesDir: !!opts.pagesDir
|
||||
});
|
||||
if (verifyResult.version) {
|
||||
usingTypeScript = true;
|
||||
}
|
||||
return usingTypeScript;
|
||||
}
|
||||
class ModuleBuildError extends Error {
|
||||
}
|
||||
export async function propagateServerField(opts, field, args) {
|
||||
var _opts_renderServer_instance, _opts_renderServer;
|
||||
await ((_opts_renderServer = opts.renderServer) == null ? void 0 : (_opts_renderServer_instance = _opts_renderServer.instance) == null ? void 0 : _opts_renderServer_instance.propagateServerField(opts.dir, field, args));
|
||||
}
|
||||
async function startWatcher(opts) {
|
||||
const { nextConfig, appDir, pagesDir, dir } = opts;
|
||||
const { useFileSystemPublicRoutes } = nextConfig;
|
||||
const usingTypeScript = await verifyTypeScript(opts);
|
||||
const distDir = path.join(opts.dir, opts.nextConfig.distDir);
|
||||
setGlobal("distDir", distDir);
|
||||
setGlobal("phase", PHASE_DEVELOPMENT_SERVER);
|
||||
const validFileMatcher = createValidFileMatcher(nextConfig.pageExtensions, appDir);
|
||||
const serverFields = {};
|
||||
const hotReloader = opts.turbo ? await createHotReloaderTurbopack(opts, serverFields, distDir) : new HotReloaderWebpack(opts.dir, {
|
||||
appDir,
|
||||
pagesDir,
|
||||
distDir: distDir,
|
||||
config: opts.nextConfig,
|
||||
buildId: "development",
|
||||
encryptionKey: await generateEncryptionKeyBase64(),
|
||||
telemetry: opts.telemetry,
|
||||
rewrites: opts.fsChecker.rewrites,
|
||||
previewProps: opts.fsChecker.prerenderManifest.preview
|
||||
});
|
||||
await hotReloader.start();
|
||||
if (opts.nextConfig.experimental.nextScriptWorkers) {
|
||||
await verifyPartytownSetup(opts.dir, path.join(distDir, CLIENT_STATIC_FILES_PATH));
|
||||
}
|
||||
opts.fsChecker.ensureCallback(async function ensure(item) {
|
||||
if (item.type === "appFile" || item.type === "pageFile") {
|
||||
await hotReloader.ensurePage({
|
||||
clientOnly: false,
|
||||
page: item.itemPath,
|
||||
isApp: item.type === "appFile",
|
||||
definition: undefined
|
||||
});
|
||||
}
|
||||
});
|
||||
let resolved = false;
|
||||
let prevSortedRoutes = [];
|
||||
await new Promise(async (resolve, reject)=>{
|
||||
if (pagesDir) {
|
||||
// Watchpack doesn't emit an event for an empty directory
|
||||
fs.readdir(pagesDir, (_, files)=>{
|
||||
if (files == null ? void 0 : files.length) {
|
||||
return;
|
||||
}
|
||||
if (!resolved) {
|
||||
resolve();
|
||||
resolved = true;
|
||||
}
|
||||
});
|
||||
}
|
||||
const pages = pagesDir ? [
|
||||
pagesDir
|
||||
] : [];
|
||||
const app = appDir ? [
|
||||
appDir
|
||||
] : [];
|
||||
const directories = [
|
||||
...pages,
|
||||
...app
|
||||
];
|
||||
const rootDir = pagesDir || appDir;
|
||||
const files = [
|
||||
...getPossibleMiddlewareFilenames(path.join(rootDir, ".."), nextConfig.pageExtensions),
|
||||
...getPossibleInstrumentationHookFilenames(path.join(rootDir, ".."), nextConfig.pageExtensions)
|
||||
];
|
||||
let nestedMiddleware = [];
|
||||
const envFiles = [
|
||||
".env.development.local",
|
||||
".env.local",
|
||||
".env.development",
|
||||
".env"
|
||||
].map((file)=>path.join(dir, file));
|
||||
files.push(...envFiles);
|
||||
// tsconfig/jsconfig paths hot-reloading
|
||||
const tsconfigPaths = [
|
||||
path.join(dir, "tsconfig.json"),
|
||||
path.join(dir, "jsconfig.json")
|
||||
];
|
||||
files.push(...tsconfigPaths);
|
||||
const wp = new Watchpack({
|
||||
ignored: (pathname)=>{
|
||||
return !files.some((file)=>file.startsWith(pathname)) && !directories.some((d)=>pathname.startsWith(d) || d.startsWith(pathname));
|
||||
}
|
||||
});
|
||||
const fileWatchTimes = new Map();
|
||||
let enabledTypeScript = usingTypeScript;
|
||||
let previousClientRouterFilters;
|
||||
let previousConflictingPagePaths = new Set();
|
||||
wp.on("aggregated", async ()=>{
|
||||
var _serverFields_middleware, _serverFields_middleware1;
|
||||
let middlewareMatchers;
|
||||
const routedPages = [];
|
||||
const knownFiles = wp.getTimeInfoEntries();
|
||||
const appPaths = {};
|
||||
const pageNameSet = new Set();
|
||||
const conflictingAppPagePaths = new Set();
|
||||
const appPageFilePaths = new Map();
|
||||
const pagesPageFilePaths = new Map();
|
||||
let envChange = false;
|
||||
let tsconfigChange = false;
|
||||
let conflictingPageChange = 0;
|
||||
let hasRootAppNotFound = false;
|
||||
const { appFiles, pageFiles } = opts.fsChecker;
|
||||
appFiles.clear();
|
||||
pageFiles.clear();
|
||||
devPageFiles.clear();
|
||||
const sortedKnownFiles = [
|
||||
...knownFiles.keys()
|
||||
].sort(sortByPageExts(nextConfig.pageExtensions));
|
||||
for (const fileName of sortedKnownFiles){
|
||||
if (!files.includes(fileName) && !directories.some((d)=>fileName.startsWith(d))) {
|
||||
continue;
|
||||
}
|
||||
const meta = knownFiles.get(fileName);
|
||||
const watchTime = fileWatchTimes.get(fileName);
|
||||
// If the file is showing up for the first time or the meta.timestamp is changed since last time
|
||||
const watchTimeChange = watchTime === undefined || watchTime && watchTime !== (meta == null ? void 0 : meta.timestamp);
|
||||
fileWatchTimes.set(fileName, meta == null ? void 0 : meta.timestamp);
|
||||
if (envFiles.includes(fileName)) {
|
||||
if (watchTimeChange) {
|
||||
envChange = true;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (tsconfigPaths.includes(fileName)) {
|
||||
if (fileName.endsWith("tsconfig.json")) {
|
||||
enabledTypeScript = true;
|
||||
}
|
||||
if (watchTimeChange) {
|
||||
tsconfigChange = true;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if ((meta == null ? void 0 : meta.accuracy) === undefined || !validFileMatcher.isPageFile(fileName)) {
|
||||
continue;
|
||||
}
|
||||
const isAppPath = Boolean(appDir && normalizePathSep(fileName).startsWith(normalizePathSep(appDir) + "/"));
|
||||
const isPagePath = Boolean(pagesDir && normalizePathSep(fileName).startsWith(normalizePathSep(pagesDir) + "/"));
|
||||
const rootFile = absolutePathToPage(fileName, {
|
||||
dir: dir,
|
||||
extensions: nextConfig.pageExtensions,
|
||||
keepIndex: false,
|
||||
pagesType: PAGE_TYPES.ROOT
|
||||
});
|
||||
if (isMiddlewareFile(rootFile)) {
|
||||
var _staticInfo_middleware;
|
||||
const staticInfo = await getStaticInfoIncludingLayouts({
|
||||
pageFilePath: fileName,
|
||||
config: nextConfig,
|
||||
appDir: appDir,
|
||||
page: rootFile,
|
||||
isDev: true,
|
||||
isInsideAppDir: isAppPath,
|
||||
pageExtensions: nextConfig.pageExtensions
|
||||
});
|
||||
if (nextConfig.output === "export") {
|
||||
Log.error('Middleware cannot be used with "output: export". See more info here: https://nextjs.org/docs/advanced-features/static-html-export');
|
||||
continue;
|
||||
}
|
||||
serverFields.actualMiddlewareFile = rootFile;
|
||||
await propagateServerField(opts, "actualMiddlewareFile", serverFields.actualMiddlewareFile);
|
||||
middlewareMatchers = ((_staticInfo_middleware = staticInfo.middleware) == null ? void 0 : _staticInfo_middleware.matchers) || [
|
||||
{
|
||||
regexp: ".*",
|
||||
originalSource: "/:path*"
|
||||
}
|
||||
];
|
||||
continue;
|
||||
}
|
||||
if (isInstrumentationHookFile(rootFile) && nextConfig.experimental.instrumentationHook) {
|
||||
serverFields.actualInstrumentationHookFile = rootFile;
|
||||
await propagateServerField(opts, "actualInstrumentationHookFile", serverFields.actualInstrumentationHookFile);
|
||||
continue;
|
||||
}
|
||||
if (fileName.endsWith(".ts") || fileName.endsWith(".tsx")) {
|
||||
enabledTypeScript = true;
|
||||
}
|
||||
if (!(isAppPath || isPagePath)) {
|
||||
continue;
|
||||
}
|
||||
// Collect all current filenames for the TS plugin to use
|
||||
devPageFiles.add(fileName);
|
||||
let pageName = absolutePathToPage(fileName, {
|
||||
dir: isAppPath ? appDir : pagesDir,
|
||||
extensions: nextConfig.pageExtensions,
|
||||
keepIndex: isAppPath,
|
||||
pagesType: isAppPath ? PAGE_TYPES.APP : PAGE_TYPES.PAGES
|
||||
});
|
||||
if (!isAppPath && pageName.startsWith("/api/") && nextConfig.output === "export") {
|
||||
Log.error('API Routes cannot be used with "output: export". See more info here: https://nextjs.org/docs/advanced-features/static-html-export');
|
||||
continue;
|
||||
}
|
||||
if (isAppPath) {
|
||||
const isRootNotFound = validFileMatcher.isRootNotFound(fileName);
|
||||
hasRootAppNotFound = true;
|
||||
if (isRootNotFound) {
|
||||
continue;
|
||||
}
|
||||
if (!isRootNotFound && !validFileMatcher.isAppRouterPage(fileName)) {
|
||||
continue;
|
||||
}
|
||||
// Ignore files/directories starting with `_` in the app directory
|
||||
if (normalizePathSep(pageName).includes("/_")) {
|
||||
continue;
|
||||
}
|
||||
const originalPageName = pageName;
|
||||
pageName = normalizeAppPath(pageName).replace(/%5F/g, "_");
|
||||
if (!appPaths[pageName]) {
|
||||
appPaths[pageName] = [];
|
||||
}
|
||||
appPaths[pageName].push(originalPageName);
|
||||
if (useFileSystemPublicRoutes) {
|
||||
appFiles.add(pageName);
|
||||
}
|
||||
if (routedPages.includes(pageName)) {
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
if (useFileSystemPublicRoutes) {
|
||||
pageFiles.add(pageName);
|
||||
// always add to nextDataRoutes for now but in future only add
|
||||
// entries that actually use getStaticProps/getServerSideProps
|
||||
opts.fsChecker.nextDataRoutes.add(pageName);
|
||||
}
|
||||
}
|
||||
(isAppPath ? appPageFilePaths : pagesPageFilePaths).set(pageName, fileName);
|
||||
if (appDir && pageNameSet.has(pageName)) {
|
||||
conflictingAppPagePaths.add(pageName);
|
||||
} else {
|
||||
pageNameSet.add(pageName);
|
||||
}
|
||||
/**
|
||||
* If there is a middleware that is not declared in the root we will
|
||||
* warn without adding it so it doesn't make its way into the system.
|
||||
*/ if (/[\\\\/]_middleware$/.test(pageName)) {
|
||||
nestedMiddleware.push(pageName);
|
||||
continue;
|
||||
}
|
||||
routedPages.push(pageName);
|
||||
}
|
||||
const numConflicting = conflictingAppPagePaths.size;
|
||||
conflictingPageChange = numConflicting - previousConflictingPagePaths.size;
|
||||
if (conflictingPageChange !== 0) {
|
||||
if (numConflicting > 0) {
|
||||
let errorMessage = `Conflicting app and page file${numConflicting === 1 ? " was" : "s were"} found, please remove the conflicting files to continue:\n`;
|
||||
for (const p of conflictingAppPagePaths){
|
||||
const appPath = path.relative(dir, appPageFilePaths.get(p));
|
||||
const pagesPath = path.relative(dir, pagesPageFilePaths.get(p));
|
||||
errorMessage += ` "${pagesPath}" - "${appPath}"\n`;
|
||||
}
|
||||
hotReloader.setHmrServerError(new Error(errorMessage));
|
||||
} else if (numConflicting === 0) {
|
||||
hotReloader.clearHmrServerError();
|
||||
await propagateServerField(opts, "reloadMatchers", undefined);
|
||||
}
|
||||
}
|
||||
previousConflictingPagePaths = conflictingAppPagePaths;
|
||||
let clientRouterFilters;
|
||||
if (nextConfig.experimental.clientRouterFilter) {
|
||||
clientRouterFilters = createClientRouterFilter(Object.keys(appPaths), nextConfig.experimental.clientRouterFilterRedirects ? (nextConfig._originalRedirects || []).filter((r)=>!r.internal) : [], nextConfig.experimental.clientRouterFilterAllowedRate);
|
||||
if (!previousClientRouterFilters || JSON.stringify(previousClientRouterFilters) !== JSON.stringify(clientRouterFilters)) {
|
||||
envChange = true;
|
||||
previousClientRouterFilters = clientRouterFilters;
|
||||
}
|
||||
}
|
||||
if (!usingTypeScript && enabledTypeScript) {
|
||||
// we tolerate the error here as this is best effort
|
||||
// and the manual install command will be shown
|
||||
await verifyTypeScript(opts).then(()=>{
|
||||
tsconfigChange = true;
|
||||
}).catch(()=>{});
|
||||
}
|
||||
if (envChange || tsconfigChange) {
|
||||
var _hotReloader_activeWebpackConfigs;
|
||||
if (envChange) {
|
||||
// only log changes in router server
|
||||
loadEnvConfig(dir, true, Log, true, (envFilePath)=>{
|
||||
Log.info(`Reload env: ${envFilePath}`);
|
||||
});
|
||||
await propagateServerField(opts, "loadEnvConfig", [
|
||||
{
|
||||
dev: true,
|
||||
forceReload: true,
|
||||
silent: true
|
||||
}
|
||||
]);
|
||||
}
|
||||
let tsconfigResult;
|
||||
if (tsconfigChange) {
|
||||
try {
|
||||
tsconfigResult = await loadJsConfig(dir, nextConfig);
|
||||
} catch (_) {
|
||||
/* do we want to log if there are syntax errors in tsconfig while editing? */ }
|
||||
}
|
||||
if (hotReloader.turbopackProject) {
|
||||
const hasRewrites = opts.fsChecker.rewrites.afterFiles.length > 0 || opts.fsChecker.rewrites.beforeFiles.length > 0 || opts.fsChecker.rewrites.fallback.length > 0;
|
||||
await hotReloader.turbopackProject.update({
|
||||
defineEnv: createDefineEnv({
|
||||
isTurbopack: true,
|
||||
clientRouterFilters,
|
||||
config: nextConfig,
|
||||
dev: true,
|
||||
distDir,
|
||||
fetchCacheKeyPrefix: opts.nextConfig.experimental.fetchCacheKeyPrefix,
|
||||
hasRewrites,
|
||||
// TODO: Implement
|
||||
middlewareMatchers: undefined
|
||||
})
|
||||
});
|
||||
}
|
||||
(_hotReloader_activeWebpackConfigs = hotReloader.activeWebpackConfigs) == null ? void 0 : _hotReloader_activeWebpackConfigs.forEach((config, idx)=>{
|
||||
const isClient = idx === 0;
|
||||
const isNodeServer = idx === 1;
|
||||
const isEdgeServer = idx === 2;
|
||||
const hasRewrites = opts.fsChecker.rewrites.afterFiles.length > 0 || opts.fsChecker.rewrites.beforeFiles.length > 0 || opts.fsChecker.rewrites.fallback.length > 0;
|
||||
if (tsconfigChange) {
|
||||
var _config_resolve_plugins, _config_resolve;
|
||||
(_config_resolve = config.resolve) == null ? void 0 : (_config_resolve_plugins = _config_resolve.plugins) == null ? void 0 : _config_resolve_plugins.forEach((plugin)=>{
|
||||
// look for the JsConfigPathsPlugin and update with
|
||||
// the latest paths/baseUrl config
|
||||
if (plugin && plugin.jsConfigPlugin && tsconfigResult) {
|
||||
var _config_resolve_modules, _config_resolve, _jsConfig_compilerOptions;
|
||||
const { resolvedBaseUrl, jsConfig } = tsconfigResult;
|
||||
const currentResolvedBaseUrl = plugin.resolvedBaseUrl;
|
||||
const resolvedUrlIndex = (_config_resolve = config.resolve) == null ? void 0 : (_config_resolve_modules = _config_resolve.modules) == null ? void 0 : _config_resolve_modules.findIndex((item)=>item === currentResolvedBaseUrl);
|
||||
if (resolvedBaseUrl) {
|
||||
if (resolvedBaseUrl.baseUrl !== currentResolvedBaseUrl.baseUrl) {
|
||||
// remove old baseUrl and add new one
|
||||
if (resolvedUrlIndex && resolvedUrlIndex > -1) {
|
||||
var _config_resolve_modules1, _config_resolve1;
|
||||
(_config_resolve1 = config.resolve) == null ? void 0 : (_config_resolve_modules1 = _config_resolve1.modules) == null ? void 0 : _config_resolve_modules1.splice(resolvedUrlIndex, 1);
|
||||
}
|
||||
// If the resolvedBaseUrl is implicit we only remove the previous value.
|
||||
// Only add the baseUrl if it's explicitly set in tsconfig/jsconfig
|
||||
if (!resolvedBaseUrl.isImplicit) {
|
||||
var _config_resolve_modules2, _config_resolve2;
|
||||
(_config_resolve2 = config.resolve) == null ? void 0 : (_config_resolve_modules2 = _config_resolve2.modules) == null ? void 0 : _config_resolve_modules2.push(resolvedBaseUrl.baseUrl);
|
||||
}
|
||||
}
|
||||
}
|
||||
if ((jsConfig == null ? void 0 : (_jsConfig_compilerOptions = jsConfig.compilerOptions) == null ? void 0 : _jsConfig_compilerOptions.paths) && resolvedBaseUrl) {
|
||||
Object.keys(plugin.paths).forEach((key)=>{
|
||||
delete plugin.paths[key];
|
||||
});
|
||||
Object.assign(plugin.paths, jsConfig.compilerOptions.paths);
|
||||
plugin.resolvedBaseUrl = resolvedBaseUrl;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
if (envChange) {
|
||||
var _config_plugins;
|
||||
(_config_plugins = config.plugins) == null ? void 0 : _config_plugins.forEach((plugin)=>{
|
||||
// we look for the DefinePlugin definitions so we can
|
||||
// update them on the active compilers
|
||||
if (plugin && typeof plugin.definitions === "object" && plugin.definitions.__NEXT_DEFINE_ENV) {
|
||||
const newDefine = getDefineEnv({
|
||||
isTurbopack: false,
|
||||
clientRouterFilters,
|
||||
config: nextConfig,
|
||||
dev: true,
|
||||
distDir,
|
||||
fetchCacheKeyPrefix: opts.nextConfig.experimental.fetchCacheKeyPrefix,
|
||||
hasRewrites,
|
||||
isClient,
|
||||
isEdgeServer,
|
||||
isNodeOrEdgeCompilation: isNodeServer || isEdgeServer,
|
||||
isNodeServer,
|
||||
middlewareMatchers: undefined
|
||||
});
|
||||
Object.keys(plugin.definitions).forEach((key)=>{
|
||||
if (!(key in newDefine)) {
|
||||
delete plugin.definitions[key];
|
||||
}
|
||||
});
|
||||
Object.assign(plugin.definitions, newDefine);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
await hotReloader.invalidate({
|
||||
reloadAfterInvalidation: envChange
|
||||
});
|
||||
}
|
||||
if (nestedMiddleware.length > 0) {
|
||||
Log.error(new NestedMiddlewareError(nestedMiddleware, dir, pagesDir || appDir).message);
|
||||
nestedMiddleware = [];
|
||||
}
|
||||
// Make sure to sort parallel routes to make the result deterministic.
|
||||
serverFields.appPathRoutes = Object.fromEntries(Object.entries(appPaths).map(([k, v])=>[
|
||||
k,
|
||||
v.sort()
|
||||
]));
|
||||
await propagateServerField(opts, "appPathRoutes", serverFields.appPathRoutes);
|
||||
// TODO: pass this to fsChecker/next-dev-server?
|
||||
serverFields.middleware = middlewareMatchers ? {
|
||||
match: null,
|
||||
page: "/",
|
||||
matchers: middlewareMatchers
|
||||
} : undefined;
|
||||
await propagateServerField(opts, "middleware", serverFields.middleware);
|
||||
serverFields.hasAppNotFound = hasRootAppNotFound;
|
||||
opts.fsChecker.middlewareMatcher = ((_serverFields_middleware = serverFields.middleware) == null ? void 0 : _serverFields_middleware.matchers) ? getMiddlewareRouteMatcher((_serverFields_middleware1 = serverFields.middleware) == null ? void 0 : _serverFields_middleware1.matchers) : undefined;
|
||||
const interceptionRoutes = generateInterceptionRoutesRewrites(Object.keys(appPaths), opts.nextConfig.basePath).map((item)=>buildCustomRoute("before_files_rewrite", item, opts.nextConfig.basePath, opts.nextConfig.experimental.caseSensitiveRoutes));
|
||||
opts.fsChecker.rewrites.beforeFiles.push(...interceptionRoutes);
|
||||
const exportPathMap = typeof nextConfig.exportPathMap === "function" && await (nextConfig.exportPathMap == null ? void 0 : nextConfig.exportPathMap.call(nextConfig, {}, {
|
||||
dev: true,
|
||||
dir: opts.dir,
|
||||
outDir: null,
|
||||
distDir: distDir,
|
||||
buildId: "development"
|
||||
})) || {};
|
||||
const exportPathMapEntries = Object.entries(exportPathMap || {});
|
||||
if (exportPathMapEntries.length > 0) {
|
||||
opts.fsChecker.exportPathMapRoutes = exportPathMapEntries.map(([key, value])=>buildCustomRoute("before_files_rewrite", {
|
||||
source: key,
|
||||
destination: `${value.page}${value.query ? "?" : ""}${qs.stringify(value.query)}`
|
||||
}, opts.nextConfig.basePath, opts.nextConfig.experimental.caseSensitiveRoutes));
|
||||
}
|
||||
try {
|
||||
// we serve a separate manifest with all pages for the client in
|
||||
// dev mode so that we can match a page after a rewrite on the client
|
||||
// before it has been built and is populated in the _buildManifest
|
||||
const sortedRoutes = getSortedRoutes(routedPages);
|
||||
opts.fsChecker.dynamicRoutes = sortedRoutes.map((page)=>{
|
||||
const regex = getRouteRegex(page);
|
||||
return {
|
||||
regex: regex.re.toString(),
|
||||
match: getRouteMatcher(regex),
|
||||
page
|
||||
};
|
||||
});
|
||||
const dataRoutes = [];
|
||||
for (const page of sortedRoutes){
|
||||
const route = buildDataRoute(page, "development");
|
||||
const routeRegex = getRouteRegex(route.page);
|
||||
dataRoutes.push({
|
||||
...route,
|
||||
regex: routeRegex.re.toString(),
|
||||
match: getRouteMatcher({
|
||||
// TODO: fix this in the manifest itself, must also be fixed in
|
||||
// upstream builder that relies on this
|
||||
re: opts.nextConfig.i18n ? new RegExp(route.dataRouteRegex.replace(`/development/`, `/development/(?<nextLocale>[^/]+?)/`)) : new RegExp(route.dataRouteRegex),
|
||||
groups: routeRegex.groups
|
||||
})
|
||||
});
|
||||
}
|
||||
opts.fsChecker.dynamicRoutes.unshift(...dataRoutes);
|
||||
if (!(prevSortedRoutes == null ? void 0 : prevSortedRoutes.every((val, idx)=>val === sortedRoutes[idx]))) {
|
||||
const addedRoutes = sortedRoutes.filter((route)=>!prevSortedRoutes.includes(route));
|
||||
const removedRoutes = prevSortedRoutes.filter((route)=>!sortedRoutes.includes(route));
|
||||
// emit the change so clients fetch the update
|
||||
hotReloader.send({
|
||||
action: HMR_ACTIONS_SENT_TO_BROWSER.DEV_PAGES_MANIFEST_UPDATE,
|
||||
data: [
|
||||
{
|
||||
devPagesManifest: true
|
||||
}
|
||||
]
|
||||
});
|
||||
addedRoutes.forEach((route)=>{
|
||||
hotReloader.send({
|
||||
action: HMR_ACTIONS_SENT_TO_BROWSER.ADDED_PAGE,
|
||||
data: [
|
||||
route
|
||||
]
|
||||
});
|
||||
});
|
||||
removedRoutes.forEach((route)=>{
|
||||
hotReloader.send({
|
||||
action: HMR_ACTIONS_SENT_TO_BROWSER.REMOVED_PAGE,
|
||||
data: [
|
||||
route
|
||||
]
|
||||
});
|
||||
});
|
||||
}
|
||||
prevSortedRoutes = sortedRoutes;
|
||||
if (!resolved) {
|
||||
resolve();
|
||||
resolved = true;
|
||||
}
|
||||
} catch (e) {
|
||||
if (!resolved) {
|
||||
reject(e);
|
||||
resolved = true;
|
||||
} else {
|
||||
Log.warn("Failed to reload dynamic routes:", e);
|
||||
}
|
||||
} finally{
|
||||
// Reload the matchers. The filesystem would have been written to,
|
||||
// and the matchers need to re-scan it to update the router.
|
||||
await propagateServerField(opts, "reloadMatchers", undefined);
|
||||
}
|
||||
});
|
||||
wp.watch({
|
||||
directories: [
|
||||
dir
|
||||
],
|
||||
startTime: 0
|
||||
});
|
||||
});
|
||||
const clientPagesManifestPath = `/_next/${CLIENT_STATIC_FILES_PATH}/development/${DEV_CLIENT_PAGES_MANIFEST}`;
|
||||
opts.fsChecker.devVirtualFsItems.add(clientPagesManifestPath);
|
||||
const devMiddlewareManifestPath = `/_next/${CLIENT_STATIC_FILES_PATH}/development/${DEV_MIDDLEWARE_MANIFEST}`;
|
||||
opts.fsChecker.devVirtualFsItems.add(devMiddlewareManifestPath);
|
||||
async function requestHandler(req, res) {
|
||||
var _parsedUrl_pathname, _parsedUrl_pathname1;
|
||||
const parsedUrl = url.parse(req.url || "/");
|
||||
if ((_parsedUrl_pathname = parsedUrl.pathname) == null ? void 0 : _parsedUrl_pathname.includes(clientPagesManifestPath)) {
|
||||
res.statusCode = 200;
|
||||
res.setHeader("Content-Type", "application/json; charset=utf-8");
|
||||
res.end(JSON.stringify({
|
||||
pages: prevSortedRoutes.filter((route)=>!opts.fsChecker.appFiles.has(route))
|
||||
}));
|
||||
return {
|
||||
finished: true
|
||||
};
|
||||
}
|
||||
if ((_parsedUrl_pathname1 = parsedUrl.pathname) == null ? void 0 : _parsedUrl_pathname1.includes(devMiddlewareManifestPath)) {
|
||||
var _serverFields_middleware;
|
||||
res.statusCode = 200;
|
||||
res.setHeader("Content-Type", "application/json; charset=utf-8");
|
||||
res.end(JSON.stringify(((_serverFields_middleware = serverFields.middleware) == null ? void 0 : _serverFields_middleware.matchers) || []));
|
||||
return {
|
||||
finished: true
|
||||
};
|
||||
}
|
||||
return {
|
||||
finished: false
|
||||
};
|
||||
}
|
||||
async function logErrorWithOriginalStack(err, type) {
|
||||
let usedOriginalStack = false;
|
||||
if (isError(err) && err.stack) {
|
||||
try {
|
||||
const frames = parseStack(err.stack);
|
||||
// Filter out internal edge related runtime stack
|
||||
const frame = frames.find(({ file })=>!(file == null ? void 0 : file.startsWith("eval")) && !(file == null ? void 0 : file.includes("web/adapter")) && !(file == null ? void 0 : file.includes("web/globals")) && !(file == null ? void 0 : file.includes("sandbox/context")) && !(file == null ? void 0 : file.includes("<anonymous>")));
|
||||
let originalFrame, isEdgeCompiler;
|
||||
const frameFile = frame == null ? void 0 : frame.file;
|
||||
if ((frame == null ? void 0 : frame.lineNumber) && frameFile) {
|
||||
if (hotReloader.turbopackProject) {
|
||||
try {
|
||||
originalFrame = await createOriginalTurboStackFrame(hotReloader.turbopackProject, {
|
||||
file: frameFile,
|
||||
methodName: frame.methodName,
|
||||
line: frame.lineNumber ?? 0,
|
||||
column: frame.column,
|
||||
isServer: true
|
||||
});
|
||||
} catch {}
|
||||
} else {
|
||||
var _hotReloader_edgeServerStats, _hotReloader_serverStats, _frame_file, _frame_file1;
|
||||
const moduleId = frameFile.replace(/^(webpack-internal:\/\/\/|file:\/\/)/, "");
|
||||
const modulePath = frameFile.replace(/^(webpack-internal:\/\/\/|file:\/\/)(\(.*\)\/)?/, "");
|
||||
const src = getErrorSource(err);
|
||||
isEdgeCompiler = src === COMPILER_NAMES.edgeServer;
|
||||
const compilation = isEdgeCompiler ? (_hotReloader_edgeServerStats = hotReloader.edgeServerStats) == null ? void 0 : _hotReloader_edgeServerStats.compilation : (_hotReloader_serverStats = hotReloader.serverStats) == null ? void 0 : _hotReloader_serverStats.compilation;
|
||||
const source = await getSourceById(!!((_frame_file = frame.file) == null ? void 0 : _frame_file.startsWith(path.sep)) || !!((_frame_file1 = frame.file) == null ? void 0 : _frame_file1.startsWith("file:")), moduleId, compilation);
|
||||
try {
|
||||
var _hotReloader_edgeServerStats1, _hotReloader_serverStats1;
|
||||
originalFrame = await createOriginalStackFrame({
|
||||
source,
|
||||
frame,
|
||||
moduleId,
|
||||
modulePath,
|
||||
rootDirectory: opts.dir,
|
||||
errorMessage: err.message,
|
||||
compilation: isEdgeCompiler ? (_hotReloader_edgeServerStats1 = hotReloader.edgeServerStats) == null ? void 0 : _hotReloader_edgeServerStats1.compilation : (_hotReloader_serverStats1 = hotReloader.serverStats) == null ? void 0 : _hotReloader_serverStats1.compilation
|
||||
});
|
||||
} catch {}
|
||||
}
|
||||
if ((originalFrame == null ? void 0 : originalFrame.originalCodeFrame) && originalFrame.originalStackFrame) {
|
||||
const { originalCodeFrame, originalStackFrame } = originalFrame;
|
||||
const { file, lineNumber, column, methodName } = originalStackFrame;
|
||||
Log[type === "warning" ? "warn" : "error"](`${file} (${lineNumber}:${column}) @ ${methodName}`);
|
||||
let errorToLog;
|
||||
if (isEdgeCompiler) {
|
||||
errorToLog = err.message;
|
||||
} else if (isError(err) && hotReloader.turbopackProject) {
|
||||
const stack = await traceTurbopackErrorStack(hotReloader.turbopackProject, err, frames);
|
||||
const error = new Error(err.message);
|
||||
error.stack = stack;
|
||||
error.digest = err.digest;
|
||||
errorToLog = error;
|
||||
} else {
|
||||
errorToLog = err;
|
||||
}
|
||||
if (type === "warning") {
|
||||
Log.warn(errorToLog);
|
||||
} else if (type === "app-dir") {
|
||||
logAppDirError(errorToLog);
|
||||
} else if (type) {
|
||||
Log.error(`${type}:`, errorToLog);
|
||||
} else {
|
||||
Log.error(errorToLog);
|
||||
}
|
||||
console[type === "warning" ? "warn" : "error"](originalCodeFrame);
|
||||
usedOriginalStack = true;
|
||||
}
|
||||
}
|
||||
} catch (_) {
|
||||
// failed to load original stack using source maps
|
||||
// this un-actionable by users so we don't show the
|
||||
// internal error and only show the provided stack
|
||||
}
|
||||
}
|
||||
if (!usedOriginalStack) {
|
||||
if (err instanceof ModuleBuildError) {
|
||||
Log.error(err.message);
|
||||
} else if (type === "warning") {
|
||||
Log.warn(err);
|
||||
} else if (type === "app-dir") {
|
||||
logAppDirError(err);
|
||||
} else if (type) {
|
||||
Log.error(`${type}:`, err);
|
||||
} else {
|
||||
Log.error(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
return {
|
||||
serverFields,
|
||||
hotReloader,
|
||||
requestHandler,
|
||||
logErrorWithOriginalStack,
|
||||
async ensureMiddleware (requestUrl) {
|
||||
if (!serverFields.actualMiddlewareFile) return;
|
||||
return hotReloader.ensurePage({
|
||||
page: serverFields.actualMiddlewareFile,
|
||||
clientOnly: false,
|
||||
definition: undefined,
|
||||
url: requestUrl
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
export async function setupDevBundler(opts) {
|
||||
const isSrcDir = path.relative(opts.dir, opts.pagesDir || opts.appDir || "").startsWith("src");
|
||||
const result = await startWatcher(opts);
|
||||
opts.telemetry.record(eventCliSession(path.join(opts.dir, opts.nextConfig.distDir), opts.nextConfig, {
|
||||
webpackVersion: 5,
|
||||
isSrcDir,
|
||||
turboFlag: !!opts.turbo,
|
||||
cliCommand: "dev",
|
||||
appDir: !!opts.appDir,
|
||||
pagesDir: !!opts.pagesDir,
|
||||
isCustomServer: !!opts.isCustomServer,
|
||||
hasNowJson: !!await findUp("now.json", {
|
||||
cwd: opts.dir
|
||||
})
|
||||
}));
|
||||
return result;
|
||||
}
|
||||
// Returns a trace rewritten through Turbopack's sourcemaps
|
||||
async function traceTurbopackErrorStack(project, error, frames) {
|
||||
let originalFrames = await Promise.all(frames.map(async (f)=>{
|
||||
try {
|
||||
const traced = await batchedTraceSource(project, {
|
||||
file: f.file,
|
||||
methodName: f.methodName,
|
||||
line: f.lineNumber ?? 0,
|
||||
column: f.column,
|
||||
isServer: true
|
||||
});
|
||||
return (traced == null ? void 0 : traced.frame) ?? f;
|
||||
} catch {
|
||||
return f;
|
||||
}
|
||||
}));
|
||||
return error.name + ": " + error.message + "\n" + originalFrames.map((f)=>{
|
||||
if (f == null) {
|
||||
return null;
|
||||
}
|
||||
let line = " at";
|
||||
if (f.methodName != null) {
|
||||
line += " " + f.methodName;
|
||||
}
|
||||
if (f.file != null) {
|
||||
const file = f.file.startsWith("/") || // Built-in "filenames" like `<anonymous>` shouldn't be made relative
|
||||
f.file.startsWith("<") || f.file.startsWith("node:") ? f.file : `./${f.file}`;
|
||||
line += ` (${file}`;
|
||||
if (f.lineNumber != null) {
|
||||
line += ":" + f.lineNumber;
|
||||
if (f.column != null) {
|
||||
line += ":" + f.column;
|
||||
}
|
||||
}
|
||||
line += ")";
|
||||
}
|
||||
return line;
|
||||
}).filter(Boolean).join("\n");
|
||||
}
|
||||
|
||||
//# sourceMappingURL=setup-dev-bundler.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/router-utils/setup-dev-bundler.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/router-utils/setup-dev-bundler.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
3
node_modules/next/dist/esm/server/lib/router-utils/types.js
generated
vendored
Normal file
3
node_modules/next/dist/esm/server/lib/router-utils/types.js
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
export { };
|
||||
|
||||
//# sourceMappingURL=types.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/router-utils/types.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/router-utils/types.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/lib/router-utils/types.ts"],"names":[],"mappings":"AAAA,WAMgB"}
|
||||
28
node_modules/next/dist/esm/server/lib/server-action-request-meta.js
generated
vendored
Normal file
28
node_modules/next/dist/esm/server/lib/server-action-request-meta.js
generated
vendored
Normal file
@ -0,0 +1,28 @@
|
||||
import { ACTION } from "../../client/components/app-router-headers";
|
||||
export function getServerActionRequestMetadata(req) {
|
||||
let actionId;
|
||||
let contentType;
|
||||
if (req.headers instanceof Headers) {
|
||||
actionId = req.headers.get(ACTION.toLowerCase()) ?? null;
|
||||
contentType = req.headers.get("content-type");
|
||||
} else {
|
||||
actionId = req.headers[ACTION.toLowerCase()] ?? null;
|
||||
contentType = req.headers["content-type"] ?? null;
|
||||
}
|
||||
const isURLEncodedAction = Boolean(req.method === "POST" && contentType === "application/x-www-form-urlencoded");
|
||||
const isMultipartAction = Boolean(req.method === "POST" && (contentType == null ? void 0 : contentType.startsWith("multipart/form-data")));
|
||||
const isFetchAction = Boolean(actionId !== undefined && typeof actionId === "string" && req.method === "POST");
|
||||
const isServerAction = Boolean(isFetchAction || isURLEncodedAction || isMultipartAction);
|
||||
return {
|
||||
actionId,
|
||||
isURLEncodedAction,
|
||||
isMultipartAction,
|
||||
isFetchAction,
|
||||
isServerAction
|
||||
};
|
||||
}
|
||||
export function getIsServerAction(req) {
|
||||
return getServerActionRequestMetadata(req).isServerAction;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=server-action-request-meta.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/server-action-request-meta.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/server-action-request-meta.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/lib/server-action-request-meta.ts"],"names":["ACTION","getServerActionRequestMetadata","req","actionId","contentType","headers","Headers","get","toLowerCase","isURLEncodedAction","Boolean","method","isMultipartAction","startsWith","isFetchAction","undefined","isServerAction","getIsServerAction"],"mappings":"AAGA,SAASA,MAAM,QAAQ,6CAA4C;AAEnE,OAAO,SAASC,+BACdC,GAAoD;IAQpD,IAAIC;IACJ,IAAIC;IAEJ,IAAIF,IAAIG,OAAO,YAAYC,SAAS;QAClCH,WAAWD,IAAIG,OAAO,CAACE,GAAG,CAACP,OAAOQ,WAAW,OAAO;QACpDJ,cAAcF,IAAIG,OAAO,CAACE,GAAG,CAAC;IAChC,OAAO;QACLJ,WAAW,AAACD,IAAIG,OAAO,CAACL,OAAOQ,WAAW,GAAG,IAAe;QAC5DJ,cAAcF,IAAIG,OAAO,CAAC,eAAe,IAAI;IAC/C;IAEA,MAAMI,qBAAqBC,QACzBR,IAAIS,MAAM,KAAK,UAAUP,gBAAgB;IAE3C,MAAMQ,oBAAoBF,QACxBR,IAAIS,MAAM,KAAK,WAAUP,+BAAAA,YAAaS,UAAU,CAAC;IAEnD,MAAMC,gBAAgBJ,QACpBP,aAAaY,aACX,OAAOZ,aAAa,YACpBD,IAAIS,MAAM,KAAK;IAGnB,MAAMK,iBAAiBN,QACrBI,iBAAiBL,sBAAsBG;IAGzC,OAAO;QACLT;QACAM;QACAG;QACAE;QACAE;IACF;AACF;AAEA,OAAO,SAASC,kBACdf,GAAoD;IAEpD,OAAOD,+BAA+BC,KAAKc,cAAc;AAC3D"}
|
||||
64
node_modules/next/dist/esm/server/lib/server-ipc/index.js
generated
vendored
Normal file
64
node_modules/next/dist/esm/server/lib/server-ipc/index.js
generated
vendored
Normal file
@ -0,0 +1,64 @@
|
||||
import { errorToJSON } from "../../render";
|
||||
import crypto from "crypto";
|
||||
import isError from "../../../lib/is-error";
|
||||
import { deserializeErr } from "./request-utils";
|
||||
// we can't use process.send as jest-worker relies on
|
||||
// it already and can cause unexpected message errors
|
||||
// so we create an IPC server for communicating
|
||||
export async function createIpcServer(server) {
|
||||
// Generate a random key in memory to validate messages from other processes.
|
||||
// This is just a simple guard against other processes attempting to send
|
||||
// traffic to the IPC server.
|
||||
const ipcValidationKey = crypto.randomBytes(32).toString("hex");
|
||||
const ipcServer = require("http").createServer(async (req, res)=>{
|
||||
try {
|
||||
const url = new URL(req.url || "/", "http://n");
|
||||
const key = url.searchParams.get("key");
|
||||
if (key !== ipcValidationKey) {
|
||||
return res.end();
|
||||
}
|
||||
const method = url.searchParams.get("method");
|
||||
const args = JSON.parse(url.searchParams.get("args") || "[]");
|
||||
if (!method || !Array.isArray(args)) {
|
||||
return res.end();
|
||||
}
|
||||
if (typeof server[method] === "function") {
|
||||
var _args_;
|
||||
if (method === "logErrorWithOriginalStack" && ((_args_ = args[0]) == null ? void 0 : _args_.stack)) {
|
||||
args[0] = deserializeErr(args[0]);
|
||||
}
|
||||
let result = await server[method](...args);
|
||||
if (result && typeof result === "object" && result.stack) {
|
||||
result = errorToJSON(result);
|
||||
}
|
||||
res.end(JSON.stringify(result || ""));
|
||||
}
|
||||
} catch (err) {
|
||||
if (isError(err) && err.code !== "ENOENT") {
|
||||
console.error(err);
|
||||
}
|
||||
res.end(JSON.stringify({
|
||||
err: {
|
||||
name: err.name,
|
||||
message: err.message,
|
||||
stack: err.stack
|
||||
}
|
||||
}));
|
||||
}
|
||||
});
|
||||
const ipcPort = await new Promise((resolveIpc)=>{
|
||||
ipcServer.listen(0, server.hostname, ()=>{
|
||||
const addr = ipcServer.address();
|
||||
if (addr && typeof addr === "object") {
|
||||
resolveIpc(addr.port);
|
||||
}
|
||||
});
|
||||
});
|
||||
return {
|
||||
ipcPort,
|
||||
ipcServer,
|
||||
ipcValidationKey
|
||||
};
|
||||
}
|
||||
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/server-ipc/index.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/server-ipc/index.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/lib/server-ipc/index.ts"],"names":["errorToJSON","crypto","isError","deserializeErr","createIpcServer","server","ipcValidationKey","randomBytes","toString","ipcServer","require","createServer","req","res","url","URL","key","searchParams","get","end","method","args","JSON","parse","Array","isArray","stack","result","stringify","err","code","console","error","name","message","ipcPort","Promise","resolveIpc","listen","hostname","addr","address","port"],"mappings":"AACA,SAASA,WAAW,QAAQ,eAAc;AAC1C,OAAOC,YAAY,SAAQ;AAC3B,OAAOC,aAAa,wBAAuB;AAC3C,SAASC,cAAc,QAAQ,kBAAiB;AAEhD,qDAAqD;AACrD,qDAAqD;AACrD,+CAA+C;AAC/C,OAAO,eAAeC,gBACpBC,MAAuC;IAMvC,6EAA6E;IAC7E,yEAAyE;IACzE,6BAA6B;IAC7B,MAAMC,mBAAmBL,OAAOM,WAAW,CAAC,IAAIC,QAAQ,CAAC;IAEzD,MAAMC,YAAY,AAACC,QAAQ,QAAkCC,YAAY,CACvE,OAAOC,KAAKC;QACV,IAAI;YACF,MAAMC,MAAM,IAAIC,IAAIH,IAAIE,GAAG,IAAI,KAAK;YACpC,MAAME,MAAMF,IAAIG,YAAY,CAACC,GAAG,CAAC;YAEjC,IAAIF,QAAQV,kBAAkB;gBAC5B,OAAOO,IAAIM,GAAG;YAChB;YAEA,MAAMC,SAASN,IAAIG,YAAY,CAACC,GAAG,CAAC;YACpC,MAAMG,OAAcC,KAAKC,KAAK,CAACT,IAAIG,YAAY,CAACC,GAAG,CAAC,WAAW;YAE/D,IAAI,CAACE,UAAU,CAACI,MAAMC,OAAO,CAACJ,OAAO;gBACnC,OAAOR,IAAIM,GAAG;YAChB;YAEA,IAAI,OAAO,AAACd,MAAc,CAACe,OAAO,KAAK,YAAY;oBACHC;gBAA9C,IAAID,WAAW,iCAA+BC,SAAAA,IAAI,CAAC,EAAE,qBAAPA,OAASK,KAAK,GAAE;oBAC5DL,IAAI,CAAC,EAAE,GAAGlB,eAAekB,IAAI,CAAC,EAAE;gBAClC;gBACA,IAAIM,SAAS,MAAM,AAACtB,MAAc,CAACe,OAAO,IAAIC;gBAE9C,IAAIM,UAAU,OAAOA,WAAW,YAAYA,OAAOD,KAAK,EAAE;oBACxDC,SAAS3B,YAAY2B;gBACvB;gBACAd,IAAIM,GAAG,CAACG,KAAKM,SAAS,CAACD,UAAU;YACnC;QACF,EAAE,OAAOE,KAAU;YACjB,IAAI3B,QAAQ2B,QAAQA,IAAIC,IAAI,KAAK,UAAU;gBACzCC,QAAQC,KAAK,CAACH;YAChB;YACAhB,IAAIM,GAAG,CACLG,KAAKM,SAAS,CAAC;gBACbC,KAAK;oBAAEI,MAAMJ,IAAII,IAAI;oBAAEC,SAASL,IAAIK,OAAO;oBAAER,OAAOG,IAAIH,KAAK;gBAAC;YAChE;QAEJ;IACF;IAGF,MAAMS,UAAU,MAAM,IAAIC,QAAgB,CAACC;QACzC5B,UAAU6B,MAAM,CAAC,GAAGjC,OAAOkC,QAAQ,EAAE;YACnC,MAAMC,OAAO/B,UAAUgC,OAAO;YAE9B,IAAID,QAAQ,OAAOA,SAAS,UAAU;gBACpCH,WAAWG,KAAKE,IAAI;YACtB;QACF;IACF;IAEA,OAAO;QACLP;QACA1B;QACAH;IACF;AACF"}
|
||||
23
node_modules/next/dist/esm/server/lib/server-ipc/invoke-request.js
generated
vendored
Normal file
23
node_modules/next/dist/esm/server/lib/server-ipc/invoke-request.js
generated
vendored
Normal file
@ -0,0 +1,23 @@
|
||||
import { filterReqHeaders, ipcForbiddenHeaders } from "./utils";
|
||||
export const invokeRequest = async (targetUrl, requestInit, readableBody)=>{
|
||||
const invokeHeaders = filterReqHeaders({
|
||||
"cache-control": "",
|
||||
...requestInit.headers
|
||||
}, ipcForbiddenHeaders);
|
||||
return await fetch(targetUrl, {
|
||||
headers: invokeHeaders,
|
||||
method: requestInit.method,
|
||||
redirect: "manual",
|
||||
signal: requestInit.signal,
|
||||
...requestInit.method !== "GET" && requestInit.method !== "HEAD" && readableBody ? {
|
||||
body: readableBody,
|
||||
duplex: "half"
|
||||
} : {},
|
||||
next: {
|
||||
// @ts-ignore
|
||||
internal: true
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
//# sourceMappingURL=invoke-request.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/server-ipc/invoke-request.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/server-ipc/invoke-request.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/lib/server-ipc/invoke-request.ts"],"names":["filterReqHeaders","ipcForbiddenHeaders","invokeRequest","targetUrl","requestInit","readableBody","invokeHeaders","headers","fetch","method","redirect","signal","body","duplex","next","internal"],"mappings":"AAEA,SAASA,gBAAgB,EAAEC,mBAAmB,QAAQ,UAAS;AAE/D,OAAO,MAAMC,gBAAgB,OAC3BC,WACAC,aAKAC;IAEA,MAAMC,gBAAgBN,iBACpB;QACE,iBAAiB;QACjB,GAAGI,YAAYG,OAAO;IACxB,GACAN;IAGF,OAAO,MAAMO,MAAML,WAAW;QAC5BI,SAASD;QACTG,QAAQL,YAAYK,MAAM;QAC1BC,UAAU;QACVC,QAAQP,YAAYO,MAAM;QAE1B,GAAIP,YAAYK,MAAM,KAAK,SAC3BL,YAAYK,MAAM,KAAK,UACvBJ,eACI;YACEO,MAAMP;YACNQ,QAAQ;QACV,IACA,CAAC,CAAC;QAENC,MAAM;YACJ,aAAa;YACbC,UAAU;QACZ;IACF;AACF,EAAC"}
|
||||
38
node_modules/next/dist/esm/server/lib/server-ipc/request-utils.js
generated
vendored
Normal file
38
node_modules/next/dist/esm/server/lib/server-ipc/request-utils.js
generated
vendored
Normal file
@ -0,0 +1,38 @@
|
||||
import { decorateServerError } from "../../../shared/lib/error-source";
|
||||
import { PageNotFoundError } from "../../../shared/lib/utils";
|
||||
import { invokeRequest } from "./invoke-request";
|
||||
export const deserializeErr = (serializedErr)=>{
|
||||
if (!serializedErr || typeof serializedErr !== "object" || !serializedErr.stack) {
|
||||
return serializedErr;
|
||||
}
|
||||
let ErrorType = Error;
|
||||
if (serializedErr.name === "PageNotFoundError") {
|
||||
ErrorType = PageNotFoundError;
|
||||
}
|
||||
const err = new ErrorType(serializedErr.message);
|
||||
err.stack = serializedErr.stack;
|
||||
err.name = serializedErr.name;
|
||||
err.digest = serializedErr.digest;
|
||||
if (process.env.NODE_ENV === "development" && process.env.NEXT_RUNTIME !== "edge") {
|
||||
decorateServerError(err, serializedErr.source || "server");
|
||||
}
|
||||
return err;
|
||||
};
|
||||
export async function invokeIpcMethod({ fetchHostname = "localhost", method, args, ipcPort, ipcKey }) {
|
||||
if (ipcPort) {
|
||||
const res = await invokeRequest(`http://${fetchHostname}:${ipcPort}?key=${ipcKey}&method=${method}&args=${encodeURIComponent(JSON.stringify(args))}`, {
|
||||
method: "GET",
|
||||
headers: {}
|
||||
});
|
||||
const body = await res.text();
|
||||
if (body.startsWith("{") && body.endsWith("}")) {
|
||||
const parsedBody = JSON.parse(body);
|
||||
if (parsedBody && typeof parsedBody === "object" && "err" in parsedBody && "stack" in parsedBody.err) {
|
||||
throw deserializeErr(parsedBody.err);
|
||||
}
|
||||
return parsedBody;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=request-utils.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/server-ipc/request-utils.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/server-ipc/request-utils.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/lib/server-ipc/request-utils.ts"],"names":["decorateServerError","PageNotFoundError","invokeRequest","deserializeErr","serializedErr","stack","ErrorType","Error","name","err","message","digest","process","env","NODE_ENV","NEXT_RUNTIME","source","invokeIpcMethod","fetchHostname","method","args","ipcPort","ipcKey","res","encodeURIComponent","JSON","stringify","headers","body","text","startsWith","endsWith","parsedBody","parse"],"mappings":"AAAA,SAASA,mBAAmB,QAAQ,mCAAkC;AACtE,SAASC,iBAAiB,QAAQ,4BAA2B;AAC7D,SAASC,aAAa,QAAQ,mBAAkB;AAEhD,OAAO,MAAMC,iBAAiB,CAACC;IAC7B,IACE,CAACA,iBACD,OAAOA,kBAAkB,YACzB,CAACA,cAAcC,KAAK,EACpB;QACA,OAAOD;IACT;IACA,IAAIE,YAAiBC;IAErB,IAAIH,cAAcI,IAAI,KAAK,qBAAqB;QAC9CF,YAAYL;IACd;IAEA,MAAMQ,MAAM,IAAIH,UAAUF,cAAcM,OAAO;IAC/CD,IAAIJ,KAAK,GAAGD,cAAcC,KAAK;IAC/BI,IAAID,IAAI,GAAGJ,cAAcI,IAAI;IAC3BC,IAAYE,MAAM,GAAGP,cAAcO,MAAM;IAE3C,IACEC,QAAQC,GAAG,CAACC,QAAQ,KAAK,iBACzBF,QAAQC,GAAG,CAACE,YAAY,KAAK,QAC7B;QACAf,oBAAoBS,KAAKL,cAAcY,MAAM,IAAI;IACnD;IACA,OAAOP;AACT,EAAC;AAED,OAAO,eAAeQ,gBAAgB,EACpCC,gBAAgB,WAAW,EAC3BC,MAAM,EACNC,IAAI,EACJC,OAAO,EACPC,MAAM,EAOP;IACC,IAAID,SAAS;QACX,MAAME,MAAM,MAAMrB,cAChB,CAAC,OAAO,EAAEgB,cAAc,CAAC,EAAEG,QAAQ,KAAK,EAAEC,OAAO,QAAQ,EACvDH,OACD,MAAM,EAAEK,mBAAmBC,KAAKC,SAAS,CAACN,OAAO,CAAC,EACnD;YACED,QAAQ;YACRQ,SAAS,CAAC;QACZ;QAEF,MAAMC,OAAO,MAAML,IAAIM,IAAI;QAE3B,IAAID,KAAKE,UAAU,CAAC,QAAQF,KAAKG,QAAQ,CAAC,MAAM;YAC9C,MAAMC,aAAaP,KAAKQ,KAAK,CAACL;YAE9B,IACEI,cACA,OAAOA,eAAe,YACtB,SAASA,cACT,WAAWA,WAAWvB,GAAG,EACzB;gBACA,MAAMN,eAAe6B,WAAWvB,GAAG;YACrC;YACA,OAAOuB;QACT;IACF;AACF"}
|
||||
31
node_modules/next/dist/esm/server/lib/server-ipc/utils.js
generated
vendored
Normal file
31
node_modules/next/dist/esm/server/lib/server-ipc/utils.js
generated
vendored
Normal file
@ -0,0 +1,31 @@
|
||||
export const ipcForbiddenHeaders = [
|
||||
"accept-encoding",
|
||||
"keepalive",
|
||||
"keep-alive",
|
||||
"content-encoding",
|
||||
"transfer-encoding",
|
||||
// https://github.com/nodejs/undici/issues/1470
|
||||
"connection",
|
||||
// marked as unsupported by undici: https://github.com/nodejs/undici/blob/c83b084879fa0bb8e0469d31ec61428ac68160d5/lib/core/request.js#L354
|
||||
"expect"
|
||||
];
|
||||
export const actionsForbiddenHeaders = [
|
||||
...ipcForbiddenHeaders,
|
||||
"content-length",
|
||||
"set-cookie"
|
||||
];
|
||||
export const filterReqHeaders = (headers, forbiddenHeaders)=>{
|
||||
// Some browsers are not matching spec and sending Content-Length: 0. This causes issues in undici
|
||||
// https://github.com/nodejs/undici/issues/2046
|
||||
if (headers["content-length"] && headers["content-length"] === "0") {
|
||||
delete headers["content-length"];
|
||||
}
|
||||
for (const [key, value] of Object.entries(headers)){
|
||||
if (forbiddenHeaders.includes(key) || !(Array.isArray(value) || typeof value === "string")) {
|
||||
delete headers[key];
|
||||
}
|
||||
}
|
||||
return headers;
|
||||
};
|
||||
|
||||
//# sourceMappingURL=utils.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/server-ipc/utils.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/server-ipc/utils.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/lib/server-ipc/utils.ts"],"names":["ipcForbiddenHeaders","actionsForbiddenHeaders","filterReqHeaders","headers","forbiddenHeaders","key","value","Object","entries","includes","Array","isArray"],"mappings":"AAAA,OAAO,MAAMA,sBAAsB;IACjC;IACA;IACA;IACA;IACA;IACA,+CAA+C;IAC/C;IACA,2IAA2I;IAC3I;CACD,CAAA;AAED,OAAO,MAAMC,0BAA0B;OAClCD;IACH;IACA;CACD,CAAA;AAED,OAAO,MAAME,mBAAmB,CAC9BC,SACAC;IAEA,kGAAkG;IAClG,+CAA+C;IAC/C,IAAID,OAAO,CAAC,iBAAiB,IAAIA,OAAO,CAAC,iBAAiB,KAAK,KAAK;QAClE,OAAOA,OAAO,CAAC,iBAAiB;IAClC;IAEA,KAAK,MAAM,CAACE,KAAKC,MAAM,IAAIC,OAAOC,OAAO,CAACL,SAAU;QAClD,IACEC,iBAAiBK,QAAQ,CAACJ,QAC1B,CAAEK,CAAAA,MAAMC,OAAO,CAACL,UAAU,OAAOA,UAAU,QAAO,GAClD;YACA,OAAOH,OAAO,CAACE,IAAI;QACrB;IACF;IACA,OAAOF;AACT,EAAC"}
|
||||
32
node_modules/next/dist/esm/server/lib/squoosh/avif/avif_enc.d.ts
generated
vendored
Normal file
32
node_modules/next/dist/esm/server/lib/squoosh/avif/avif_enc.d.ts
generated
vendored
Normal file
@ -0,0 +1,32 @@
|
||||
// eslint-disable-next-line no-shadow
|
||||
export const enum AVIFTune {
|
||||
auto,
|
||||
psnr,
|
||||
ssim,
|
||||
}
|
||||
|
||||
export interface EncodeOptions {
|
||||
cqLevel: number
|
||||
denoiseLevel: number
|
||||
cqAlphaLevel: number
|
||||
tileRowsLog2: number
|
||||
tileColsLog2: number
|
||||
speed: number
|
||||
subsample: number
|
||||
chromaDeltaQ: boolean
|
||||
sharpness: number
|
||||
tune: AVIFTune
|
||||
}
|
||||
|
||||
export interface AVIFModule extends EmscriptenWasm.Module {
|
||||
encode(
|
||||
data: BufferSource,
|
||||
width: number,
|
||||
height: number,
|
||||
options: EncodeOptions
|
||||
): Uint8Array
|
||||
}
|
||||
|
||||
declare var moduleFactory: EmscriptenWasm.ModuleFactory<AVIFModule>
|
||||
|
||||
export default moduleFactory
|
||||
1522
node_modules/next/dist/esm/server/lib/squoosh/avif/avif_node_dec.js
generated
vendored
Normal file
1522
node_modules/next/dist/esm/server/lib/squoosh/avif/avif_node_dec.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
node_modules/next/dist/esm/server/lib/squoosh/avif/avif_node_dec.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/squoosh/avif/avif_node_dec.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1711
node_modules/next/dist/esm/server/lib/squoosh/avif/avif_node_enc.js
generated
vendored
Normal file
1711
node_modules/next/dist/esm/server/lib/squoosh/avif/avif_node_enc.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
node_modules/next/dist/esm/server/lib/squoosh/avif/avif_node_enc.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/squoosh/avif/avif_node_enc.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
272
node_modules/next/dist/esm/server/lib/squoosh/codecs.js
generated
vendored
Normal file
272
node_modules/next/dist/esm/server/lib/squoosh/codecs.js
generated
vendored
Normal file
@ -0,0 +1,272 @@
|
||||
import { promises as fsp } from "fs";
|
||||
import * as path from "path";
|
||||
import { instantiateEmscriptenWasm, pathify } from "./emscripten-utils.js";
|
||||
// @ts-ignore
|
||||
import mozEnc from "./mozjpeg/mozjpeg_node_enc.js";
|
||||
const mozEncWasm = path.resolve(__dirname, "./mozjpeg/mozjpeg_node_enc.wasm");
|
||||
// @ts-ignore
|
||||
import mozDec from "./mozjpeg/mozjpeg_node_dec.js";
|
||||
const mozDecWasm = path.resolve(__dirname, "./mozjpeg/mozjpeg_node_dec.wasm");
|
||||
// @ts-ignore
|
||||
import webpEnc from "./webp/webp_node_enc.js";
|
||||
const webpEncWasm = path.resolve(__dirname, "./webp/webp_node_enc.wasm");
|
||||
// @ts-ignore
|
||||
import webpDec from "./webp/webp_node_dec.js";
|
||||
const webpDecWasm = path.resolve(__dirname, "./webp/webp_node_dec.wasm");
|
||||
// @ts-ignore
|
||||
import avifEnc from "./avif/avif_node_enc.js";
|
||||
const avifEncWasm = path.resolve(__dirname, "./avif/avif_node_enc.wasm");
|
||||
// @ts-ignore
|
||||
import avifDec from "./avif/avif_node_dec.js";
|
||||
const avifDecWasm = path.resolve(__dirname, "./avif/avif_node_dec.wasm");
|
||||
// PNG
|
||||
// @ts-ignore
|
||||
import * as pngEncDec from "./png/squoosh_png.js";
|
||||
const pngEncDecWasm = path.resolve(__dirname, "./png/squoosh_png_bg.wasm");
|
||||
const pngEncDecInit = ()=>pngEncDec.default(fsp.readFile(pathify(pngEncDecWasm)));
|
||||
// OxiPNG
|
||||
// @ts-ignore
|
||||
import * as oxipng from "./png/squoosh_oxipng.js";
|
||||
const oxipngWasm = path.resolve(__dirname, "./png/squoosh_oxipng_bg.wasm");
|
||||
const oxipngInit = ()=>oxipng.default(fsp.readFile(pathify(oxipngWasm)));
|
||||
// Resize
|
||||
// @ts-ignore
|
||||
import * as resize from "./resize/squoosh_resize.js";
|
||||
const resizeWasm = path.resolve(__dirname, "./resize/squoosh_resize_bg.wasm");
|
||||
const resizeInit = ()=>resize.default(fsp.readFile(pathify(resizeWasm)));
|
||||
// rotate
|
||||
const rotateWasm = path.resolve(__dirname, "./rotate/rotate.wasm");
|
||||
// Our decoders currently rely on a `ImageData` global.
|
||||
import ImageData from "./image_data";
|
||||
globalThis.ImageData = ImageData;
|
||||
function resizeNameToIndex(name) {
|
||||
switch(name){
|
||||
case "triangle":
|
||||
return 0;
|
||||
case "catrom":
|
||||
return 1;
|
||||
case "mitchell":
|
||||
return 2;
|
||||
case "lanczos3":
|
||||
return 3;
|
||||
default:
|
||||
throw Error(`Unknown resize algorithm "${name}"`);
|
||||
}
|
||||
}
|
||||
function resizeWithAspect({ input_width, input_height, target_width, target_height }) {
|
||||
if (!target_width && !target_height) {
|
||||
throw Error("Need to specify at least width or height when resizing");
|
||||
}
|
||||
if (target_width && target_height) {
|
||||
return {
|
||||
width: target_width,
|
||||
height: target_height
|
||||
};
|
||||
}
|
||||
if (!target_width) {
|
||||
return {
|
||||
width: Math.round(input_width / input_height * target_height),
|
||||
height: target_height
|
||||
};
|
||||
}
|
||||
return {
|
||||
width: target_width,
|
||||
height: Math.round(input_height / input_width * target_width)
|
||||
};
|
||||
}
|
||||
export const preprocessors = {
|
||||
resize: {
|
||||
name: "Resize",
|
||||
description: "Resize the image before compressing",
|
||||
instantiate: async ()=>{
|
||||
await resizeInit();
|
||||
return (buffer, input_width, input_height, { width, height, method, premultiply, linearRGB })=>{
|
||||
({ width, height } = resizeWithAspect({
|
||||
input_width,
|
||||
input_height,
|
||||
target_width: width,
|
||||
target_height: height
|
||||
}));
|
||||
const imageData = new ImageData(resize.resize(buffer, input_width, input_height, width, height, resizeNameToIndex(method), premultiply, linearRGB), width, height);
|
||||
resize.cleanup();
|
||||
return imageData;
|
||||
};
|
||||
},
|
||||
defaultOptions: {
|
||||
method: "lanczos3",
|
||||
fitMethod: "stretch",
|
||||
premultiply: true,
|
||||
linearRGB: true
|
||||
}
|
||||
},
|
||||
rotate: {
|
||||
name: "Rotate",
|
||||
description: "Rotate image",
|
||||
instantiate: async ()=>{
|
||||
return async (buffer, width, height, { numRotations })=>{
|
||||
const degrees = numRotations * 90 % 360;
|
||||
const sameDimensions = degrees === 0 || degrees === 180;
|
||||
const size = width * height * 4;
|
||||
const instance = (await WebAssembly.instantiate(await fsp.readFile(pathify(rotateWasm)))).instance;
|
||||
const { memory } = instance.exports;
|
||||
const additionalPagesNeeded = Math.ceil((size * 2 - memory.buffer.byteLength + 8) / (64 * 1024));
|
||||
if (additionalPagesNeeded > 0) {
|
||||
memory.grow(additionalPagesNeeded);
|
||||
}
|
||||
const view = new Uint8ClampedArray(memory.buffer);
|
||||
view.set(buffer, 8);
|
||||
instance.exports.rotate(width, height, degrees);
|
||||
return new ImageData(view.slice(size + 8, size * 2 + 8), sameDimensions ? width : height, sameDimensions ? height : width);
|
||||
};
|
||||
},
|
||||
defaultOptions: {
|
||||
numRotations: 0
|
||||
}
|
||||
}
|
||||
};
|
||||
export const codecs = {
|
||||
mozjpeg: {
|
||||
name: "MozJPEG",
|
||||
extension: "jpg",
|
||||
detectors: [
|
||||
/^\xFF\xD8\xFF/
|
||||
],
|
||||
dec: ()=>instantiateEmscriptenWasm(mozDec, mozDecWasm),
|
||||
enc: ()=>instantiateEmscriptenWasm(mozEnc, mozEncWasm),
|
||||
defaultEncoderOptions: {
|
||||
quality: 75,
|
||||
baseline: false,
|
||||
arithmetic: false,
|
||||
progressive: true,
|
||||
optimize_coding: true,
|
||||
smoothing: 0,
|
||||
color_space: 3 /*YCbCr*/ ,
|
||||
quant_table: 3,
|
||||
trellis_multipass: false,
|
||||
trellis_opt_zero: false,
|
||||
trellis_opt_table: false,
|
||||
trellis_loops: 1,
|
||||
auto_subsample: true,
|
||||
chroma_subsample: 2,
|
||||
separate_chroma_quality: false,
|
||||
chroma_quality: 75
|
||||
},
|
||||
autoOptimize: {
|
||||
option: "quality",
|
||||
min: 0,
|
||||
max: 100
|
||||
}
|
||||
},
|
||||
webp: {
|
||||
name: "WebP",
|
||||
extension: "webp",
|
||||
detectors: [
|
||||
/^RIFF....WEBPVP8[LX ]/s
|
||||
],
|
||||
dec: ()=>instantiateEmscriptenWasm(webpDec, webpDecWasm),
|
||||
enc: ()=>instantiateEmscriptenWasm(webpEnc, webpEncWasm),
|
||||
defaultEncoderOptions: {
|
||||
quality: 75,
|
||||
target_size: 0,
|
||||
target_PSNR: 0,
|
||||
method: 4,
|
||||
sns_strength: 50,
|
||||
filter_strength: 60,
|
||||
filter_sharpness: 0,
|
||||
filter_type: 1,
|
||||
partitions: 0,
|
||||
segments: 4,
|
||||
pass: 1,
|
||||
show_compressed: 0,
|
||||
preprocessing: 0,
|
||||
autofilter: 0,
|
||||
partition_limit: 0,
|
||||
alpha_compression: 1,
|
||||
alpha_filtering: 1,
|
||||
alpha_quality: 100,
|
||||
lossless: 0,
|
||||
exact: 0,
|
||||
image_hint: 0,
|
||||
emulate_jpeg_size: 0,
|
||||
thread_level: 0,
|
||||
low_memory: 0,
|
||||
near_lossless: 100,
|
||||
use_delta_palette: 0,
|
||||
use_sharp_yuv: 0
|
||||
},
|
||||
autoOptimize: {
|
||||
option: "quality",
|
||||
min: 0,
|
||||
max: 100
|
||||
}
|
||||
},
|
||||
avif: {
|
||||
name: "AVIF",
|
||||
extension: "avif",
|
||||
// eslint-disable-next-line no-control-regex
|
||||
detectors: [
|
||||
/^\x00\x00\x00 ftypavif\x00\x00\x00\x00/
|
||||
],
|
||||
dec: ()=>instantiateEmscriptenWasm(avifDec, avifDecWasm),
|
||||
enc: async ()=>{
|
||||
return instantiateEmscriptenWasm(avifEnc, avifEncWasm);
|
||||
},
|
||||
defaultEncoderOptions: {
|
||||
cqLevel: 33,
|
||||
cqAlphaLevel: -1,
|
||||
denoiseLevel: 0,
|
||||
tileColsLog2: 0,
|
||||
tileRowsLog2: 0,
|
||||
speed: 6,
|
||||
subsample: 1,
|
||||
chromaDeltaQ: false,
|
||||
sharpness: 0,
|
||||
tune: 0 /* AVIFTune.auto */
|
||||
},
|
||||
autoOptimize: {
|
||||
option: "cqLevel",
|
||||
min: 62,
|
||||
max: 0
|
||||
}
|
||||
},
|
||||
oxipng: {
|
||||
name: "OxiPNG",
|
||||
extension: "png",
|
||||
// eslint-disable-next-line no-control-regex
|
||||
detectors: [
|
||||
/^\x89PNG\x0D\x0A\x1A\x0A/
|
||||
],
|
||||
dec: async ()=>{
|
||||
await pngEncDecInit();
|
||||
return {
|
||||
decode: (buffer)=>{
|
||||
const imageData = pngEncDec.decode(buffer);
|
||||
pngEncDec.cleanup();
|
||||
return imageData;
|
||||
}
|
||||
};
|
||||
},
|
||||
enc: async ()=>{
|
||||
await pngEncDecInit();
|
||||
await oxipngInit();
|
||||
return {
|
||||
encode: (buffer, width, height, opts)=>{
|
||||
const simplePng = pngEncDec.encode(new Uint8Array(buffer), width, height);
|
||||
const imageData = oxipng.optimise(simplePng, opts.level, false);
|
||||
oxipng.cleanup();
|
||||
return imageData;
|
||||
}
|
||||
};
|
||||
},
|
||||
defaultEncoderOptions: {
|
||||
level: 2
|
||||
},
|
||||
autoOptimize: {
|
||||
option: "level",
|
||||
min: 6,
|
||||
max: 1
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
//# sourceMappingURL=codecs.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/squoosh/codecs.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/squoosh/codecs.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
121
node_modules/next/dist/esm/server/lib/squoosh/emscripten-types.d.ts
generated
vendored
Normal file
121
node_modules/next/dist/esm/server/lib/squoosh/emscripten-types.d.ts
generated
vendored
Normal file
@ -0,0 +1,121 @@
|
||||
// These types roughly model the object that the JS files generated by Emscripten define. Copied from https://github.com/DefinitelyTyped/DefinitelyTyped/blob/master/types/emscripten/index.d.ts and turned into a type definition rather than a global to support our way of using Emscripten.
|
||||
declare namespace EmscriptenWasm {
|
||||
type ModuleFactory<T extends Module = Module> = (
|
||||
moduleOverrides?: ModuleOpts
|
||||
) => Promise<T>
|
||||
|
||||
type EnvironmentType = 'WEB' | 'NODE' | 'SHELL' | 'WORKER'
|
||||
|
||||
// Options object for modularized Emscripten files. Shoe-horned by @surma.
|
||||
// FIXME: This an incomplete definition!
|
||||
interface ModuleOpts {
|
||||
mainScriptUrlOrBlob?: string
|
||||
noInitialRun?: boolean
|
||||
locateFile?: (url: string) => string
|
||||
onRuntimeInitialized?: () => void
|
||||
}
|
||||
|
||||
interface Module {
|
||||
print(str: string): void
|
||||
printErr(str: string): void
|
||||
arguments: string[]
|
||||
environment: EnvironmentType
|
||||
preInit: { (): void }[]
|
||||
preRun: { (): void }[]
|
||||
postRun: { (): void }[]
|
||||
preinitializedWebGLContext: WebGLRenderingContext
|
||||
noInitialRun: boolean
|
||||
noExitRuntime: boolean
|
||||
logReadFiles: boolean
|
||||
filePackagePrefixURL: string
|
||||
wasmBinary: ArrayBuffer
|
||||
|
||||
destroy(object: object): void
|
||||
getPreloadedPackage(
|
||||
remotePackageName: string,
|
||||
remotePackageSize: number
|
||||
): ArrayBuffer
|
||||
instantiateWasm(
|
||||
imports: WebAssembly.Imports,
|
||||
successCallback: (module: WebAssembly.Module) => void
|
||||
): WebAssembly.Exports
|
||||
locateFile(url: string): string
|
||||
onCustomMessage(event: MessageEvent): void
|
||||
|
||||
Runtime: any
|
||||
|
||||
ccall(
|
||||
ident: string,
|
||||
returnType: string | null,
|
||||
argTypes: string[],
|
||||
args: any[]
|
||||
): any
|
||||
cwrap(ident: string, returnType: string | null, argTypes: string[]): any
|
||||
|
||||
setValue(ptr: number, value: any, type: string, noSafe?: boolean): void
|
||||
getValue(ptr: number, type: string, noSafe?: boolean): number
|
||||
|
||||
ALLOC_NORMAL: number
|
||||
ALLOC_STACK: number
|
||||
ALLOC_STATIC: number
|
||||
ALLOC_DYNAMIC: number
|
||||
ALLOC_NONE: number
|
||||
|
||||
allocate(slab: any, types: string, allocator: number, ptr: number): number
|
||||
allocate(slab: any, types: string[], allocator: number, ptr: number): number
|
||||
|
||||
Pointer_stringify(ptr: number, length?: number): string
|
||||
UTF16ToString(ptr: number): string
|
||||
stringToUTF16(str: string, outPtr: number): void
|
||||
UTF32ToString(ptr: number): string
|
||||
stringToUTF32(str: string, outPtr: number): void
|
||||
|
||||
// USE_TYPED_ARRAYS == 1
|
||||
HEAP: Int32Array
|
||||
IHEAP: Int32Array
|
||||
FHEAP: Float64Array
|
||||
|
||||
// USE_TYPED_ARRAYS == 2
|
||||
HEAP8: Int8Array
|
||||
HEAP16: Int16Array
|
||||
HEAP32: Int32Array
|
||||
HEAPU8: Uint8Array
|
||||
HEAPU16: Uint16Array
|
||||
HEAPU32: Uint32Array
|
||||
HEAPF32: Float32Array
|
||||
HEAPF64: Float64Array
|
||||
|
||||
TOTAL_STACK: number
|
||||
TOTAL_MEMORY: number
|
||||
FAST_MEMORY: number
|
||||
|
||||
addOnPreRun(cb: () => any): void
|
||||
addOnInit(cb: () => any): void
|
||||
addOnPreMain(cb: () => any): void
|
||||
addOnExit(cb: () => any): void
|
||||
addOnPostRun(cb: () => any): void
|
||||
|
||||
// Tools
|
||||
intArrayFromString(
|
||||
stringy: string,
|
||||
dontAddNull?: boolean,
|
||||
length?: number
|
||||
): number[]
|
||||
intArrayToString(array: number[]): string
|
||||
writeStringToMemory(str: string, buffer: number, dontAddNull: boolean): void
|
||||
writeArrayToMemory(array: number[], buffer: number): void
|
||||
writeAsciiToMemory(str: string, buffer: number, dontAddNull: boolean): void
|
||||
|
||||
addRunDependency(id: any): void
|
||||
removeRunDependency(id: any): void
|
||||
|
||||
preloadedImages: any
|
||||
preloadedAudios: any
|
||||
|
||||
_malloc(size: number): number
|
||||
_free(ptr: number): void
|
||||
|
||||
// Augmentations below by @surma.
|
||||
onRuntimeInitialized: () => void | null
|
||||
}
|
||||
}
|
||||
22
node_modules/next/dist/esm/server/lib/squoosh/emscripten-utils.js
generated
vendored
Normal file
22
node_modules/next/dist/esm/server/lib/squoosh/emscripten-utils.js
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
import { fileURLToPath } from "url";
|
||||
export function pathify(path) {
|
||||
if (path.startsWith("file://")) {
|
||||
path = fileURLToPath(path);
|
||||
}
|
||||
return path;
|
||||
}
|
||||
export function instantiateEmscriptenWasm(factory, path, workerJS = "") {
|
||||
return factory({
|
||||
locateFile (requestPath) {
|
||||
// The glue code generated by emscripten uses the original
|
||||
// file names of the worker file and the wasm binary.
|
||||
// These will have changed in the bundling process and
|
||||
// we need to inject them here.
|
||||
if (requestPath.endsWith(".wasm")) return pathify(path);
|
||||
if (requestPath.endsWith(".worker.js")) return pathify(workerJS);
|
||||
return requestPath;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
//# sourceMappingURL=emscripten-utils.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/squoosh/emscripten-utils.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/squoosh/emscripten-utils.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/lib/squoosh/emscripten-utils.ts"],"names":["fileURLToPath","pathify","path","startsWith","instantiateEmscriptenWasm","factory","workerJS","locateFile","requestPath","endsWith"],"mappings":"AAAA,SAASA,aAAa,QAAQ,MAAK;AAEnC,OAAO,SAASC,QAAQC,IAAY;IAClC,IAAIA,KAAKC,UAAU,CAAC,YAAY;QAC9BD,OAAOF,cAAcE;IACvB;IACA,OAAOA;AACT;AAEA,OAAO,SAASE,0BACdC,OAAwC,EACxCH,IAAY,EACZI,WAAmB,EAAE;IAErB,OAAOD,QAAQ;QACbE,YAAWC,WAAW;YACpB,0DAA0D;YAC1D,qDAAqD;YACrD,sDAAsD;YACtD,+BAA+B;YAC/B,IAAIA,YAAYC,QAAQ,CAAC,UAAU,OAAOR,QAAQC;YAClD,IAAIM,YAAYC,QAAQ,CAAC,eAAe,OAAOR,QAAQK;YACvD,OAAOE;QACT;IACF;AACF"}
|
||||
21
node_modules/next/dist/esm/server/lib/squoosh/image_data.js
generated
vendored
Normal file
21
node_modules/next/dist/esm/server/lib/squoosh/image_data.js
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
export default class ImageData {
|
||||
static from(input) {
|
||||
return new ImageData(input.data || input._data, input.width, input.height);
|
||||
}
|
||||
get data() {
|
||||
if (Object.prototype.toString.call(this._data) === "[object Object]") {
|
||||
return Buffer.from(Object.values(this._data));
|
||||
}
|
||||
if (this._data instanceof Buffer || this._data instanceof Uint8Array || this._data instanceof Uint8ClampedArray) {
|
||||
return Buffer.from(this._data);
|
||||
}
|
||||
throw new Error("invariant");
|
||||
}
|
||||
constructor(data, width, height){
|
||||
this._data = data;
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=image_data.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/squoosh/image_data.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/squoosh/image_data.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/lib/squoosh/image_data.ts"],"names":["ImageData","from","input","data","_data","width","height","Object","prototype","toString","call","Buffer","values","Uint8Array","Uint8ClampedArray","Error","constructor"],"mappings":"AAAA,eAAe,MAAMA;IACnB,OAAOC,KAAKC,KAAgB,EAAa;QACvC,OAAO,IAAIF,UAAUE,MAAMC,IAAI,IAAID,MAAME,KAAK,EAAEF,MAAMG,KAAK,EAAEH,MAAMI,MAAM;IAC3E;IAMA,IAAIH,OAAe;QACjB,IAAII,OAAOC,SAAS,CAACC,QAAQ,CAACC,IAAI,CAAC,IAAI,CAACN,KAAK,MAAM,mBAAmB;YACpE,OAAOO,OAAOV,IAAI,CAACM,OAAOK,MAAM,CAAC,IAAI,CAACR,KAAK;QAC7C;QACA,IACE,IAAI,CAACA,KAAK,YAAYO,UACtB,IAAI,CAACP,KAAK,YAAYS,cACtB,IAAI,CAACT,KAAK,YAAYU,mBACtB;YACA,OAAOH,OAAOV,IAAI,CAAC,IAAI,CAACG,KAAK;QAC/B;QACA,MAAM,IAAIW,MAAM;IAClB;IAEAC,YACEb,IAA6C,EAC7CE,KAAa,EACbC,MAAc,CACd;QACA,IAAI,CAACF,KAAK,GAAGD;QACb,IAAI,CAACE,KAAK,GAAGA;QACb,IAAI,CAACC,MAAM,GAAGA;IAChB;AACF"}
|
||||
77
node_modules/next/dist/esm/server/lib/squoosh/impl.js
generated
vendored
Normal file
77
node_modules/next/dist/esm/server/lib/squoosh/impl.js
generated
vendored
Normal file
@ -0,0 +1,77 @@
|
||||
import { codecs as supportedFormats, preprocessors } from "./codecs";
|
||||
import ImageData from "./image_data";
|
||||
export async function decodeBuffer(_buffer) {
|
||||
var _Object_entries_find;
|
||||
const buffer = Buffer.from(_buffer);
|
||||
const firstChunk = buffer.slice(0, 16);
|
||||
const firstChunkString = Array.from(firstChunk).map((v)=>String.fromCodePoint(v)).join("");
|
||||
const key = (_Object_entries_find = Object.entries(supportedFormats).find(([, { detectors }])=>detectors.some((detector)=>detector.exec(firstChunkString)))) == null ? void 0 : _Object_entries_find[0];
|
||||
if (!key) {
|
||||
throw Error(`Buffer has an unsupported format`);
|
||||
}
|
||||
const encoder = supportedFormats[key];
|
||||
const mod = await encoder.dec();
|
||||
const rgba = mod.decode(new Uint8Array(buffer));
|
||||
return rgba;
|
||||
}
|
||||
export async function rotate(image, numRotations) {
|
||||
image = ImageData.from(image);
|
||||
const m = await preprocessors["rotate"].instantiate();
|
||||
return await m(image.data, image.width, image.height, {
|
||||
numRotations
|
||||
});
|
||||
}
|
||||
export async function resize({ image, width, height }) {
|
||||
image = ImageData.from(image);
|
||||
const p = preprocessors["resize"];
|
||||
const m = await p.instantiate();
|
||||
return await m(image.data, image.width, image.height, {
|
||||
...p.defaultOptions,
|
||||
width,
|
||||
height
|
||||
});
|
||||
}
|
||||
export async function encodeJpeg(image, { quality }) {
|
||||
image = ImageData.from(image);
|
||||
const e = supportedFormats["mozjpeg"];
|
||||
const m = await e.enc();
|
||||
const r = await m.encode(image.data, image.width, image.height, {
|
||||
...e.defaultEncoderOptions,
|
||||
quality
|
||||
});
|
||||
return Buffer.from(r);
|
||||
}
|
||||
export async function encodeWebp(image, { quality }) {
|
||||
image = ImageData.from(image);
|
||||
const e = supportedFormats["webp"];
|
||||
const m = await e.enc();
|
||||
const r = await m.encode(image.data, image.width, image.height, {
|
||||
...e.defaultEncoderOptions,
|
||||
quality
|
||||
});
|
||||
return Buffer.from(r);
|
||||
}
|
||||
export async function encodeAvif(image, { quality }) {
|
||||
image = ImageData.from(image);
|
||||
const e = supportedFormats["avif"];
|
||||
const m = await e.enc();
|
||||
const val = e.autoOptimize.min || 62;
|
||||
const r = await m.encode(image.data, image.width, image.height, {
|
||||
...e.defaultEncoderOptions,
|
||||
// Think of cqLevel as the "amount" of quantization (0 to 62),
|
||||
// so a lower value yields higher quality (0 to 100).
|
||||
cqLevel: Math.round(val - quality / 100 * val)
|
||||
});
|
||||
return Buffer.from(r);
|
||||
}
|
||||
export async function encodePng(image) {
|
||||
image = ImageData.from(image);
|
||||
const e = supportedFormats["oxipng"];
|
||||
const m = await e.enc();
|
||||
const r = await m.encode(image.data, image.width, image.height, {
|
||||
...e.defaultEncoderOptions
|
||||
});
|
||||
return Buffer.from(r);
|
||||
}
|
||||
|
||||
//# sourceMappingURL=impl.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/squoosh/impl.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/squoosh/impl.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/lib/squoosh/impl.ts"],"names":["codecs","supportedFormats","preprocessors","ImageData","decodeBuffer","_buffer","Object","buffer","Buffer","from","firstChunk","slice","firstChunkString","Array","map","v","String","fromCodePoint","join","key","entries","find","detectors","some","detector","exec","Error","encoder","mod","dec","rgba","decode","Uint8Array","rotate","image","numRotations","m","instantiate","data","width","height","resize","p","defaultOptions","encodeJpeg","quality","e","enc","r","encode","defaultEncoderOptions","encodeWebp","encodeAvif","val","autoOptimize","min","cqLevel","Math","round","encodePng"],"mappings":"AAAA,SAASA,UAAUC,gBAAgB,EAAEC,aAAa,QAAQ,WAAU;AACpE,OAAOC,eAAe,eAAc;AAIpC,OAAO,eAAeC,aACpBC,OAA4B;QAOhBC;IALZ,MAAMC,SAASC,OAAOC,IAAI,CAACJ;IAC3B,MAAMK,aAAaH,OAAOI,KAAK,CAAC,GAAG;IACnC,MAAMC,mBAAmBC,MAAMJ,IAAI,CAACC,YACjCI,GAAG,CAAC,CAACC,IAAMC,OAAOC,aAAa,CAACF,IAChCG,IAAI,CAAC;IACR,MAAMC,OAAMb,uBAAAA,OAAOc,OAAO,CAACnB,kBAAkBoB,IAAI,CAAC,CAAC,GAAG,EAAEC,SAAS,EAAE,CAAC,GAClEA,UAAUC,IAAI,CAAC,CAACC,WAAaA,SAASC,IAAI,CAACb,wCADjCN,oBAET,CAAC,EAAE;IACN,IAAI,CAACa,KAAK;QACR,MAAMO,MAAM,CAAC,gCAAgC,CAAC;IAChD;IACA,MAAMC,UAAU1B,gBAAgB,CAACkB,IAAI;IACrC,MAAMS,MAAM,MAAMD,QAAQE,GAAG;IAC7B,MAAMC,OAAOF,IAAIG,MAAM,CAAC,IAAIC,WAAWzB;IACvC,OAAOuB;AACT;AAEA,OAAO,eAAeG,OACpBC,KAAgB,EAChBC,YAAoB;IAEpBD,QAAQ/B,UAAUM,IAAI,CAACyB;IAEvB,MAAME,IAAI,MAAMlC,aAAa,CAAC,SAAS,CAACmC,WAAW;IACnD,OAAO,MAAMD,EAAEF,MAAMI,IAAI,EAAEJ,MAAMK,KAAK,EAAEL,MAAMM,MAAM,EAAE;QAAEL;IAAa;AACvE;AAQA,OAAO,eAAeM,OAAO,EAAEP,KAAK,EAAEK,KAAK,EAAEC,MAAM,EAAc;IAC/DN,QAAQ/B,UAAUM,IAAI,CAACyB;IAEvB,MAAMQ,IAAIxC,aAAa,CAAC,SAAS;IACjC,MAAMkC,IAAI,MAAMM,EAAEL,WAAW;IAC7B,OAAO,MAAMD,EAAEF,MAAMI,IAAI,EAAEJ,MAAMK,KAAK,EAAEL,MAAMM,MAAM,EAAE;QACpD,GAAGE,EAAEC,cAAc;QACnBJ;QACAC;IACF;AACF;AAEA,OAAO,eAAeI,WACpBV,KAAgB,EAChB,EAAEW,OAAO,EAAuB;IAEhCX,QAAQ/B,UAAUM,IAAI,CAACyB;IAEvB,MAAMY,IAAI7C,gBAAgB,CAAC,UAAU;IACrC,MAAMmC,IAAI,MAAMU,EAAEC,GAAG;IACrB,MAAMC,IAAI,MAAMZ,EAAEa,MAAM,CAACf,MAAMI,IAAI,EAAEJ,MAAMK,KAAK,EAAEL,MAAMM,MAAM,EAAE;QAC9D,GAAGM,EAAEI,qBAAqB;QAC1BL;IACF;IACA,OAAOrC,OAAOC,IAAI,CAACuC;AACrB;AAEA,OAAO,eAAeG,WACpBjB,KAAgB,EAChB,EAAEW,OAAO,EAAuB;IAEhCX,QAAQ/B,UAAUM,IAAI,CAACyB;IAEvB,MAAMY,IAAI7C,gBAAgB,CAAC,OAAO;IAClC,MAAMmC,IAAI,MAAMU,EAAEC,GAAG;IACrB,MAAMC,IAAI,MAAMZ,EAAEa,MAAM,CAACf,MAAMI,IAAI,EAAEJ,MAAMK,KAAK,EAAEL,MAAMM,MAAM,EAAE;QAC9D,GAAGM,EAAEI,qBAAqB;QAC1BL;IACF;IACA,OAAOrC,OAAOC,IAAI,CAACuC;AACrB;AAEA,OAAO,eAAeI,WACpBlB,KAAgB,EAChB,EAAEW,OAAO,EAAuB;IAEhCX,QAAQ/B,UAAUM,IAAI,CAACyB;IAEvB,MAAMY,IAAI7C,gBAAgB,CAAC,OAAO;IAClC,MAAMmC,IAAI,MAAMU,EAAEC,GAAG;IACrB,MAAMM,MAAMP,EAAEQ,YAAY,CAACC,GAAG,IAAI;IAClC,MAAMP,IAAI,MAAMZ,EAAEa,MAAM,CAACf,MAAMI,IAAI,EAAEJ,MAAMK,KAAK,EAAEL,MAAMM,MAAM,EAAE;QAC9D,GAAGM,EAAEI,qBAAqB;QAC1B,8DAA8D;QAC9D,qDAAqD;QACrDM,SAASC,KAAKC,KAAK,CAACL,MAAM,AAACR,UAAU,MAAOQ;IAC9C;IACA,OAAO7C,OAAOC,IAAI,CAACuC;AACrB;AAEA,OAAO,eAAeW,UACpBzB,KAAgB;IAEhBA,QAAQ/B,UAAUM,IAAI,CAACyB;IAEvB,MAAMY,IAAI7C,gBAAgB,CAAC,SAAS;IACpC,MAAMmC,IAAI,MAAMU,EAAEC,GAAG;IACrB,MAAMC,IAAI,MAAMZ,EAAEa,MAAM,CAACf,MAAMI,IAAI,EAAEJ,MAAMK,KAAK,EAAEL,MAAMM,MAAM,EAAE;QAC9D,GAAGM,EAAEI,qBAAqB;IAC5B;IACA,OAAO1C,OAAOC,IAAI,CAACuC;AACrB"}
|
||||
69
node_modules/next/dist/esm/server/lib/squoosh/main.js
generated
vendored
Normal file
69
node_modules/next/dist/esm/server/lib/squoosh/main.js
generated
vendored
Normal file
@ -0,0 +1,69 @@
|
||||
import { Worker } from "next/dist/compiled/jest-worker";
|
||||
import * as path from "path";
|
||||
import { execOnce } from "../../../shared/lib/utils";
|
||||
import { cpus } from "os";
|
||||
const getWorker = execOnce(()=>new Worker(path.resolve(__dirname, "impl"), {
|
||||
enableWorkerThreads: true,
|
||||
// There will be at most 6 workers needed since each worker will take
|
||||
// at least 1 operation type.
|
||||
numWorkers: Math.max(1, Math.min(cpus().length - 1, 6)),
|
||||
computeWorkerKey: (method)=>method
|
||||
}));
|
||||
export async function getMetadata(buffer) {
|
||||
const worker = getWorker();
|
||||
const { width, height } = await worker.decodeBuffer(buffer);
|
||||
return {
|
||||
width,
|
||||
height
|
||||
};
|
||||
}
|
||||
export async function processBuffer(buffer, operations, encoding, quality) {
|
||||
const worker = getWorker();
|
||||
let imageData = await worker.decodeBuffer(buffer);
|
||||
for (const operation of operations){
|
||||
if (operation.type === "rotate") {
|
||||
imageData = await worker.rotate(imageData, operation.numRotations);
|
||||
} else if (operation.type === "resize") {
|
||||
const opt = {
|
||||
image: imageData,
|
||||
width: 0,
|
||||
height: 0
|
||||
};
|
||||
if (operation.width && imageData.width && imageData.width > operation.width) {
|
||||
opt.width = operation.width;
|
||||
}
|
||||
if (operation.height && imageData.height && imageData.height > operation.height) {
|
||||
opt.height = operation.height;
|
||||
}
|
||||
if (opt.width > 0 || opt.height > 0) {
|
||||
imageData = await worker.resize(opt);
|
||||
}
|
||||
}
|
||||
}
|
||||
switch(encoding){
|
||||
case "jpeg":
|
||||
return Buffer.from(await worker.encodeJpeg(imageData, {
|
||||
quality
|
||||
}));
|
||||
case "webp":
|
||||
return Buffer.from(await worker.encodeWebp(imageData, {
|
||||
quality
|
||||
}));
|
||||
case "avif":
|
||||
const avifQuality = quality - 20;
|
||||
return Buffer.from(await worker.encodeAvif(imageData, {
|
||||
quality: Math.max(avifQuality, 0)
|
||||
}));
|
||||
case "png":
|
||||
return Buffer.from(await worker.encodePng(imageData));
|
||||
default:
|
||||
throw Error(`Unsupported encoding format`);
|
||||
}
|
||||
}
|
||||
export async function decodeBuffer(buffer) {
|
||||
const worker = getWorker();
|
||||
const imageData = await worker.decodeBuffer(buffer);
|
||||
return imageData;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=main.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/squoosh/main.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/squoosh/main.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/lib/squoosh/main.ts"],"names":["Worker","path","execOnce","cpus","getWorker","resolve","__dirname","enableWorkerThreads","numWorkers","Math","max","min","length","computeWorkerKey","method","getMetadata","buffer","worker","width","height","decodeBuffer","processBuffer","operations","encoding","quality","imageData","operation","type","rotate","numRotations","opt","image","resize","Buffer","from","encodeJpeg","encodeWebp","avifQuality","encodeAvif","encodePng","Error"],"mappings":"AAAA,SAASA,MAAM,QAAQ,iCAAgC;AACvD,YAAYC,UAAU,OAAM;AAC5B,SAASC,QAAQ,QAAQ,4BAA2B;AACpD,SAASC,IAAI,QAAQ,KAAI;AAgBzB,MAAMC,YAAYF,SAChB,IACE,IAAIF,OAAOC,KAAKI,OAAO,CAACC,WAAW,SAAS;QAC1CC,qBAAqB;QACrB,qEAAqE;QACrE,6BAA6B;QAC7BC,YAAYC,KAAKC,GAAG,CAAC,GAAGD,KAAKE,GAAG,CAACR,OAAOS,MAAM,GAAG,GAAG;QACpDC,kBAAkB,CAACC,SAAWA;IAChC;AAGJ,OAAO,eAAeC,YACpBC,MAAc;IAEd,MAAMC,SAAkCb;IACxC,MAAM,EAAEc,KAAK,EAAEC,MAAM,EAAE,GAAG,MAAMF,OAAOG,YAAY,CAACJ;IACpD,OAAO;QAAEE;QAAOC;IAAO;AACzB;AAEA,OAAO,eAAeE,cACpBL,MAAc,EACdM,UAAuB,EACvBC,QAAkB,EAClBC,OAAe;IAEf,MAAMP,SAAkCb;IAExC,IAAIqB,YAAY,MAAMR,OAAOG,YAAY,CAACJ;IAC1C,KAAK,MAAMU,aAAaJ,WAAY;QAClC,IAAII,UAAUC,IAAI,KAAK,UAAU;YAC/BF,YAAY,MAAMR,OAAOW,MAAM,CAACH,WAAWC,UAAUG,YAAY;QACnE,OAAO,IAAIH,UAAUC,IAAI,KAAK,UAAU;YACtC,MAAMG,MAAM;gBAAEC,OAAON;gBAAWP,OAAO;gBAAGC,QAAQ;YAAE;YACpD,IACEO,UAAUR,KAAK,IACfO,UAAUP,KAAK,IACfO,UAAUP,KAAK,GAAGQ,UAAUR,KAAK,EACjC;gBACAY,IAAIZ,KAAK,GAAGQ,UAAUR,KAAK;YAC7B;YACA,IACEQ,UAAUP,MAAM,IAChBM,UAAUN,MAAM,IAChBM,UAAUN,MAAM,GAAGO,UAAUP,MAAM,EACnC;gBACAW,IAAIX,MAAM,GAAGO,UAAUP,MAAM;YAC/B;YAEA,IAAIW,IAAIZ,KAAK,GAAG,KAAKY,IAAIX,MAAM,GAAG,GAAG;gBACnCM,YAAY,MAAMR,OAAOe,MAAM,CAACF;YAClC;QACF;IACF;IAEA,OAAQP;QACN,KAAK;YACH,OAAOU,OAAOC,IAAI,CAAC,MAAMjB,OAAOkB,UAAU,CAACV,WAAW;gBAAED;YAAQ;QAClE,KAAK;YACH,OAAOS,OAAOC,IAAI,CAAC,MAAMjB,OAAOmB,UAAU,CAACX,WAAW;gBAAED;YAAQ;QAClE,KAAK;YACH,MAAMa,cAAcb,UAAU;YAC9B,OAAOS,OAAOC,IAAI,CAChB,MAAMjB,OAAOqB,UAAU,CAACb,WAAW;gBACjCD,SAASf,KAAKC,GAAG,CAAC2B,aAAa;YACjC;QAEJ,KAAK;YACH,OAAOJ,OAAOC,IAAI,CAAC,MAAMjB,OAAOsB,SAAS,CAACd;QAC5C;YACE,MAAMe,MAAM,CAAC,2BAA2B,CAAC;IAC7C;AACF;AAEA,OAAO,eAAepB,aAAaJ,MAAc;IAC/C,MAAMC,SAAkCb;IACxC,MAAMqB,YAAY,MAAMR,OAAOG,YAAY,CAACJ;IAC5C,OAAOS;AACT"}
|
||||
38
node_modules/next/dist/esm/server/lib/squoosh/mozjpeg/mozjpeg_enc.d.ts
generated
vendored
Normal file
38
node_modules/next/dist/esm/server/lib/squoosh/mozjpeg/mozjpeg_enc.d.ts
generated
vendored
Normal file
@ -0,0 +1,38 @@
|
||||
// eslint-disable-next-line no-shadow
|
||||
export const enum MozJpegColorSpace {
|
||||
GRAYSCALE = 1,
|
||||
RGB,
|
||||
YCbCr,
|
||||
}
|
||||
|
||||
export interface EncodeOptions {
|
||||
quality: number
|
||||
baseline: boolean
|
||||
arithmetic: boolean
|
||||
progressive: boolean
|
||||
optimize_coding: boolean
|
||||
smoothing: number
|
||||
color_space: MozJpegColorSpace
|
||||
quant_table: number
|
||||
trellis_multipass: boolean
|
||||
trellis_opt_zero: boolean
|
||||
trellis_opt_table: boolean
|
||||
trellis_loops: number
|
||||
auto_subsample: boolean
|
||||
chroma_subsample: number
|
||||
separate_chroma_quality: boolean
|
||||
chroma_quality: number
|
||||
}
|
||||
|
||||
export interface MozJPEGModule extends EmscriptenWasm.Module {
|
||||
encode(
|
||||
data: BufferSource,
|
||||
width: number,
|
||||
height: number,
|
||||
options: EncodeOptions
|
||||
): Uint8Array
|
||||
}
|
||||
|
||||
declare var moduleFactory: EmscriptenWasm.ModuleFactory<MozJPEGModule>
|
||||
|
||||
export default moduleFactory
|
||||
1535
node_modules/next/dist/esm/server/lib/squoosh/mozjpeg/mozjpeg_node_dec.js
generated
vendored
Normal file
1535
node_modules/next/dist/esm/server/lib/squoosh/mozjpeg/mozjpeg_node_dec.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
node_modules/next/dist/esm/server/lib/squoosh/mozjpeg/mozjpeg_node_dec.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/squoosh/mozjpeg/mozjpeg_node_dec.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1625
node_modules/next/dist/esm/server/lib/squoosh/mozjpeg/mozjpeg_node_enc.js
generated
vendored
Normal file
1625
node_modules/next/dist/esm/server/lib/squoosh/mozjpeg/mozjpeg_node_enc.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
node_modules/next/dist/esm/server/lib/squoosh/mozjpeg/mozjpeg_node_enc.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/squoosh/mozjpeg/mozjpeg_node_enc.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
95
node_modules/next/dist/esm/server/lib/squoosh/png/squoosh_oxipng.js
generated
vendored
Normal file
95
node_modules/next/dist/esm/server/lib/squoosh/png/squoosh_oxipng.js
generated
vendored
Normal file
@ -0,0 +1,95 @@
|
||||
let wasm;
|
||||
let cachedTextDecoder = new TextDecoder("utf-8", {
|
||||
ignoreBOM: true,
|
||||
fatal: true
|
||||
});
|
||||
cachedTextDecoder.decode();
|
||||
let cachegetUint8Memory0 = null;
|
||||
function getUint8Memory0() {
|
||||
if (cachegetUint8Memory0 === null || cachegetUint8Memory0.buffer !== wasm.memory.buffer) {
|
||||
cachegetUint8Memory0 = new Uint8Array(wasm.memory.buffer);
|
||||
}
|
||||
return cachegetUint8Memory0;
|
||||
}
|
||||
function getStringFromWasm0(ptr, len) {
|
||||
return cachedTextDecoder.decode(getUint8Memory0().subarray(ptr, ptr + len));
|
||||
}
|
||||
let WASM_VECTOR_LEN = 0;
|
||||
function passArray8ToWasm0(arg, malloc) {
|
||||
const ptr = malloc(arg.length * 1);
|
||||
getUint8Memory0().set(arg, ptr / 1);
|
||||
WASM_VECTOR_LEN = arg.length;
|
||||
return ptr;
|
||||
}
|
||||
let cachegetInt32Memory0 = null;
|
||||
function getInt32Memory0() {
|
||||
if (cachegetInt32Memory0 === null || cachegetInt32Memory0.buffer !== wasm.memory.buffer) {
|
||||
cachegetInt32Memory0 = new Int32Array(wasm.memory.buffer);
|
||||
}
|
||||
return cachegetInt32Memory0;
|
||||
}
|
||||
function getArrayU8FromWasm0(ptr, len) {
|
||||
return getUint8Memory0().subarray(ptr / 1, ptr / 1 + len);
|
||||
}
|
||||
/**
|
||||
* @param {Uint8Array} data
|
||||
* @param {number} level
|
||||
* @param {boolean} interlace
|
||||
* @returns {Uint8Array}
|
||||
*/ export function optimise(data, level, interlace) {
|
||||
try {
|
||||
const retptr = wasm.__wbindgen_add_to_stack_pointer(-16);
|
||||
var ptr0 = passArray8ToWasm0(data, wasm.__wbindgen_malloc);
|
||||
var len0 = WASM_VECTOR_LEN;
|
||||
wasm.optimise(retptr, ptr0, len0, level, interlace);
|
||||
var r0 = getInt32Memory0()[retptr / 4 + 0];
|
||||
var r1 = getInt32Memory0()[retptr / 4 + 1];
|
||||
var v1 = getArrayU8FromWasm0(r0, r1).slice();
|
||||
wasm.__wbindgen_free(r0, r1 * 1);
|
||||
return v1;
|
||||
} finally{
|
||||
wasm.__wbindgen_add_to_stack_pointer(16);
|
||||
}
|
||||
}
|
||||
async function load(module, imports) {
|
||||
if (typeof Response === "function" && module instanceof Response) {
|
||||
if (typeof WebAssembly.instantiateStreaming === "function") {
|
||||
return await WebAssembly.instantiateStreaming(module, imports);
|
||||
}
|
||||
const bytes = await module.arrayBuffer();
|
||||
return await WebAssembly.instantiate(bytes, imports);
|
||||
} else {
|
||||
const instance = await WebAssembly.instantiate(module, imports);
|
||||
if (instance instanceof WebAssembly.Instance) {
|
||||
return {
|
||||
instance,
|
||||
module
|
||||
};
|
||||
} else {
|
||||
return instance;
|
||||
}
|
||||
}
|
||||
}
|
||||
async function init(input) {
|
||||
const imports = {};
|
||||
imports.wbg = {};
|
||||
imports.wbg.__wbindgen_throw = function(arg0, arg1) {
|
||||
throw new Error(getStringFromWasm0(arg0, arg1));
|
||||
};
|
||||
if (typeof input === "string" || typeof Request === "function" && input instanceof Request || typeof URL === "function" && input instanceof URL) {
|
||||
input = fetch(input);
|
||||
}
|
||||
const { instance, module } = await load(await input, imports);
|
||||
wasm = instance.exports;
|
||||
init.__wbindgen_wasm_module = module;
|
||||
return wasm;
|
||||
}
|
||||
export default init;
|
||||
// Manually remove the wasm and memory references to trigger GC
|
||||
export function cleanup() {
|
||||
wasm = null;
|
||||
cachegetUint8Memory0 = null;
|
||||
cachegetInt32Memory0 = null;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=squoosh_oxipng.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/squoosh/png/squoosh_oxipng.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/squoosh/png/squoosh_oxipng.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../../src/server/lib/squoosh/png/squoosh_oxipng.js"],"names":["wasm","cachedTextDecoder","TextDecoder","ignoreBOM","fatal","decode","cachegetUint8Memory0","getUint8Memory0","buffer","memory","Uint8Array","getStringFromWasm0","ptr","len","subarray","WASM_VECTOR_LEN","passArray8ToWasm0","arg","malloc","length","set","cachegetInt32Memory0","getInt32Memory0","Int32Array","getArrayU8FromWasm0","optimise","data","level","interlace","retptr","__wbindgen_add_to_stack_pointer","ptr0","__wbindgen_malloc","len0","r0","r1","v1","slice","__wbindgen_free","load","module","imports","Response","WebAssembly","instantiateStreaming","bytes","arrayBuffer","instantiate","instance","Instance","init","input","wbg","__wbindgen_throw","arg0","arg1","Error","Request","URL","fetch","exports","__wbindgen_wasm_module","cleanup"],"mappings":"AAAA,IAAIA;AAEJ,IAAIC,oBAAoB,IAAIC,YAAY,SAAS;IAC/CC,WAAW;IACXC,OAAO;AACT;AAEAH,kBAAkBI,MAAM;AAExB,IAAIC,uBAAuB;AAC3B,SAASC;IACP,IACED,yBAAyB,QACzBA,qBAAqBE,MAAM,KAAKR,KAAKS,MAAM,CAACD,MAAM,EAClD;QACAF,uBAAuB,IAAII,WAAWV,KAAKS,MAAM,CAACD,MAAM;IAC1D;IACA,OAAOF;AACT;AAEA,SAASK,mBAAmBC,GAAG,EAAEC,GAAG;IAClC,OAAOZ,kBAAkBI,MAAM,CAACE,kBAAkBO,QAAQ,CAACF,KAAKA,MAAMC;AACxE;AAEA,IAAIE,kBAAkB;AAEtB,SAASC,kBAAkBC,GAAG,EAAEC,MAAM;IACpC,MAAMN,MAAMM,OAAOD,IAAIE,MAAM,GAAG;IAChCZ,kBAAkBa,GAAG,CAACH,KAAKL,MAAM;IACjCG,kBAAkBE,IAAIE,MAAM;IAC5B,OAAOP;AACT;AAEA,IAAIS,uBAAuB;AAC3B,SAASC;IACP,IACED,yBAAyB,QACzBA,qBAAqBb,MAAM,KAAKR,KAAKS,MAAM,CAACD,MAAM,EAClD;QACAa,uBAAuB,IAAIE,WAAWvB,KAAKS,MAAM,CAACD,MAAM;IAC1D;IACA,OAAOa;AACT;AAEA,SAASG,oBAAoBZ,GAAG,EAAEC,GAAG;IACnC,OAAON,kBAAkBO,QAAQ,CAACF,MAAM,GAAGA,MAAM,IAAIC;AACvD;AACA;;;;;CAKC,GACD,OAAO,SAASY,SAASC,IAAI,EAAEC,KAAK,EAAEC,SAAS;IAC7C,IAAI;QACF,MAAMC,SAAS7B,KAAK8B,+BAA+B,CAAC,CAAC;QACrD,IAAIC,OAAOf,kBAAkBU,MAAM1B,KAAKgC,iBAAiB;QACzD,IAAIC,OAAOlB;QACXf,KAAKyB,QAAQ,CAACI,QAAQE,MAAME,MAAMN,OAAOC;QACzC,IAAIM,KAAKZ,iBAAiB,CAACO,SAAS,IAAI,EAAE;QAC1C,IAAIM,KAAKb,iBAAiB,CAACO,SAAS,IAAI,EAAE;QAC1C,IAAIO,KAAKZ,oBAAoBU,IAAIC,IAAIE,KAAK;QAC1CrC,KAAKsC,eAAe,CAACJ,IAAIC,KAAK;QAC9B,OAAOC;IACT,SAAU;QACRpC,KAAK8B,+BAA+B,CAAC;IACvC;AACF;AAEA,eAAeS,KAAKC,MAAM,EAAEC,OAAO;IACjC,IAAI,OAAOC,aAAa,cAAcF,kBAAkBE,UAAU;QAChE,IAAI,OAAOC,YAAYC,oBAAoB,KAAK,YAAY;YAC1D,OAAO,MAAMD,YAAYC,oBAAoB,CAACJ,QAAQC;QACxD;QAEA,MAAMI,QAAQ,MAAML,OAAOM,WAAW;QACtC,OAAO,MAAMH,YAAYI,WAAW,CAACF,OAAOJ;IAC9C,OAAO;QACL,MAAMO,WAAW,MAAML,YAAYI,WAAW,CAACP,QAAQC;QAEvD,IAAIO,oBAAoBL,YAAYM,QAAQ,EAAE;YAC5C,OAAO;gBAAED;gBAAUR;YAAO;QAC5B,OAAO;YACL,OAAOQ;QACT;IACF;AACF;AAEA,eAAeE,KAAKC,KAAK;IACvB,MAAMV,UAAU,CAAC;IACjBA,QAAQW,GAAG,GAAG,CAAC;IACfX,QAAQW,GAAG,CAACC,gBAAgB,GAAG,SAAUC,IAAI,EAAEC,IAAI;QACjD,MAAM,IAAIC,MAAM7C,mBAAmB2C,MAAMC;IAC3C;IAEA,IACE,OAAOJ,UAAU,YAChB,OAAOM,YAAY,cAAcN,iBAAiBM,WAClD,OAAOC,QAAQ,cAAcP,iBAAiBO,KAC/C;QACAP,QAAQQ,MAAMR;IAChB;IAEA,MAAM,EAAEH,QAAQ,EAAER,MAAM,EAAE,GAAG,MAAMD,KAAK,MAAMY,OAAOV;IAErDzC,OAAOgD,SAASY,OAAO;IACvBV,KAAKW,sBAAsB,GAAGrB;IAE9B,OAAOxC;AACT;AAEA,eAAekD,KAAI;AAEnB,+DAA+D;AAC/D,OAAO,SAASY;IACd9D,OAAO;IACPM,uBAAuB;IACvBe,uBAAuB;AACzB"}
|
||||
144
node_modules/next/dist/esm/server/lib/squoosh/png/squoosh_png.js
generated
vendored
Normal file
144
node_modules/next/dist/esm/server/lib/squoosh/png/squoosh_png.js
generated
vendored
Normal file
@ -0,0 +1,144 @@
|
||||
let wasm;
|
||||
let cachedTextDecoder = new TextDecoder("utf-8", {
|
||||
ignoreBOM: true,
|
||||
fatal: true
|
||||
});
|
||||
cachedTextDecoder.decode();
|
||||
let cachegetUint8Memory0 = null;
|
||||
function getUint8Memory0() {
|
||||
if (cachegetUint8Memory0 === null || cachegetUint8Memory0.buffer !== wasm.memory.buffer) {
|
||||
cachegetUint8Memory0 = new Uint8Array(wasm.memory.buffer);
|
||||
}
|
||||
return cachegetUint8Memory0;
|
||||
}
|
||||
function getStringFromWasm0(ptr, len) {
|
||||
return cachedTextDecoder.decode(getUint8Memory0().subarray(ptr, ptr + len));
|
||||
}
|
||||
let cachegetUint8ClampedMemory0 = null;
|
||||
function getUint8ClampedMemory0() {
|
||||
if (cachegetUint8ClampedMemory0 === null || cachegetUint8ClampedMemory0.buffer !== wasm.memory.buffer) {
|
||||
cachegetUint8ClampedMemory0 = new Uint8ClampedArray(wasm.memory.buffer);
|
||||
}
|
||||
return cachegetUint8ClampedMemory0;
|
||||
}
|
||||
function getClampedArrayU8FromWasm0(ptr, len) {
|
||||
return getUint8ClampedMemory0().subarray(ptr / 1, ptr / 1 + len);
|
||||
}
|
||||
const heap = new Array(32).fill(undefined);
|
||||
heap.push(undefined, null, true, false);
|
||||
let heap_next = heap.length;
|
||||
function addHeapObject(obj) {
|
||||
if (heap_next === heap.length) heap.push(heap.length + 1);
|
||||
const idx = heap_next;
|
||||
heap_next = heap[idx];
|
||||
heap[idx] = obj;
|
||||
return idx;
|
||||
}
|
||||
let WASM_VECTOR_LEN = 0;
|
||||
function passArray8ToWasm0(arg, malloc) {
|
||||
const ptr = malloc(arg.length * 1);
|
||||
getUint8Memory0().set(arg, ptr / 1);
|
||||
WASM_VECTOR_LEN = arg.length;
|
||||
return ptr;
|
||||
}
|
||||
let cachegetInt32Memory0 = null;
|
||||
function getInt32Memory0() {
|
||||
if (cachegetInt32Memory0 === null || cachegetInt32Memory0.buffer !== wasm.memory.buffer) {
|
||||
cachegetInt32Memory0 = new Int32Array(wasm.memory.buffer);
|
||||
}
|
||||
return cachegetInt32Memory0;
|
||||
}
|
||||
function getArrayU8FromWasm0(ptr, len) {
|
||||
return getUint8Memory0().subarray(ptr / 1, ptr / 1 + len);
|
||||
}
|
||||
/**
|
||||
* @param {Uint8Array} data
|
||||
* @param {number} width
|
||||
* @param {number} height
|
||||
* @returns {Uint8Array}
|
||||
*/ export function encode(data, width, height) {
|
||||
try {
|
||||
const retptr = wasm.__wbindgen_add_to_stack_pointer(-16);
|
||||
var ptr0 = passArray8ToWasm0(data, wasm.__wbindgen_malloc);
|
||||
var len0 = WASM_VECTOR_LEN;
|
||||
wasm.encode(retptr, ptr0, len0, width, height);
|
||||
var r0 = getInt32Memory0()[retptr / 4 + 0];
|
||||
var r1 = getInt32Memory0()[retptr / 4 + 1];
|
||||
var v1 = getArrayU8FromWasm0(r0, r1).slice();
|
||||
wasm.__wbindgen_free(r0, r1 * 1);
|
||||
return v1;
|
||||
} finally{
|
||||
wasm.__wbindgen_add_to_stack_pointer(16);
|
||||
}
|
||||
}
|
||||
function getObject(idx) {
|
||||
return heap[idx];
|
||||
}
|
||||
function dropObject(idx) {
|
||||
if (idx < 36) return;
|
||||
heap[idx] = heap_next;
|
||||
heap_next = idx;
|
||||
}
|
||||
function takeObject(idx) {
|
||||
const ret = getObject(idx);
|
||||
dropObject(idx);
|
||||
return ret;
|
||||
}
|
||||
/**
|
||||
* @param {Uint8Array} data
|
||||
* @returns {ImageData}
|
||||
*/ export function decode(data) {
|
||||
var ptr0 = passArray8ToWasm0(data, wasm.__wbindgen_malloc);
|
||||
var len0 = WASM_VECTOR_LEN;
|
||||
var ret = wasm.decode(ptr0, len0);
|
||||
return takeObject(ret);
|
||||
}
|
||||
async function load(module, imports) {
|
||||
if (typeof Response === "function" && module instanceof Response) {
|
||||
if (typeof WebAssembly.instantiateStreaming === "function") {
|
||||
return await WebAssembly.instantiateStreaming(module, imports);
|
||||
}
|
||||
const bytes = await module.arrayBuffer();
|
||||
return await WebAssembly.instantiate(bytes, imports);
|
||||
} else {
|
||||
const instance = await WebAssembly.instantiate(module, imports);
|
||||
if (instance instanceof WebAssembly.Instance) {
|
||||
return {
|
||||
instance,
|
||||
module
|
||||
};
|
||||
} else {
|
||||
return instance;
|
||||
}
|
||||
}
|
||||
}
|
||||
async function init(input) {
|
||||
const imports = {};
|
||||
imports.wbg = {};
|
||||
imports.wbg.__wbg_newwithownedu8clampedarrayandsh_787b2db8ea6bfd62 = function(arg0, arg1, arg2, arg3) {
|
||||
var v0 = getClampedArrayU8FromWasm0(arg0, arg1).slice();
|
||||
wasm.__wbindgen_free(arg0, arg1 * 1);
|
||||
var ret = new ImageData(v0, arg2 >>> 0, arg3 >>> 0);
|
||||
return addHeapObject(ret);
|
||||
};
|
||||
imports.wbg.__wbindgen_throw = function(arg0, arg1) {
|
||||
throw new Error(getStringFromWasm0(arg0, arg1));
|
||||
};
|
||||
if (typeof input === "string" || typeof Request === "function" && input instanceof Request || typeof URL === "function" && input instanceof URL) {
|
||||
input = fetch(input);
|
||||
}
|
||||
const { instance, module } = await load(await input, imports);
|
||||
wasm = instance.exports;
|
||||
init.__wbindgen_wasm_module = module;
|
||||
return wasm;
|
||||
}
|
||||
export default init;
|
||||
// Manually remove the wasm and memory references to trigger GC
|
||||
export function cleanup() {
|
||||
wasm = null;
|
||||
cachegetUint8ClampedMemory0 = null;
|
||||
cachegetUint8Memory0 = null;
|
||||
cachegetInt32Memory0 = null;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=squoosh_png.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/squoosh/png/squoosh_png.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/squoosh/png/squoosh_png.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../../src/server/lib/squoosh/png/squoosh_png.js"],"names":["wasm","cachedTextDecoder","TextDecoder","ignoreBOM","fatal","decode","cachegetUint8Memory0","getUint8Memory0","buffer","memory","Uint8Array","getStringFromWasm0","ptr","len","subarray","cachegetUint8ClampedMemory0","getUint8ClampedMemory0","Uint8ClampedArray","getClampedArrayU8FromWasm0","heap","Array","fill","undefined","push","heap_next","length","addHeapObject","obj","idx","WASM_VECTOR_LEN","passArray8ToWasm0","arg","malloc","set","cachegetInt32Memory0","getInt32Memory0","Int32Array","getArrayU8FromWasm0","encode","data","width","height","retptr","__wbindgen_add_to_stack_pointer","ptr0","__wbindgen_malloc","len0","r0","r1","v1","slice","__wbindgen_free","getObject","dropObject","takeObject","ret","load","module","imports","Response","WebAssembly","instantiateStreaming","bytes","arrayBuffer","instantiate","instance","Instance","init","input","wbg","__wbg_newwithownedu8clampedarrayandsh_787b2db8ea6bfd62","arg0","arg1","arg2","arg3","v0","ImageData","__wbindgen_throw","Error","Request","URL","fetch","exports","__wbindgen_wasm_module","cleanup"],"mappings":"AAAA,IAAIA;AAEJ,IAAIC,oBAAoB,IAAIC,YAAY,SAAS;IAC/CC,WAAW;IACXC,OAAO;AACT;AAEAH,kBAAkBI,MAAM;AAExB,IAAIC,uBAAuB;AAC3B,SAASC;IACP,IACED,yBAAyB,QACzBA,qBAAqBE,MAAM,KAAKR,KAAKS,MAAM,CAACD,MAAM,EAClD;QACAF,uBAAuB,IAAII,WAAWV,KAAKS,MAAM,CAACD,MAAM;IAC1D;IACA,OAAOF;AACT;AAEA,SAASK,mBAAmBC,GAAG,EAAEC,GAAG;IAClC,OAAOZ,kBAAkBI,MAAM,CAACE,kBAAkBO,QAAQ,CAACF,KAAKA,MAAMC;AACxE;AAEA,IAAIE,8BAA8B;AAClC,SAASC;IACP,IACED,gCAAgC,QAChCA,4BAA4BP,MAAM,KAAKR,KAAKS,MAAM,CAACD,MAAM,EACzD;QACAO,8BAA8B,IAAIE,kBAAkBjB,KAAKS,MAAM,CAACD,MAAM;IACxE;IACA,OAAOO;AACT;AAEA,SAASG,2BAA2BN,GAAG,EAAEC,GAAG;IAC1C,OAAOG,yBAAyBF,QAAQ,CAACF,MAAM,GAAGA,MAAM,IAAIC;AAC9D;AAEA,MAAMM,OAAO,IAAIC,MAAM,IAAIC,IAAI,CAACC;AAEhCH,KAAKI,IAAI,CAACD,WAAW,MAAM,MAAM;AAEjC,IAAIE,YAAYL,KAAKM,MAAM;AAE3B,SAASC,cAAcC,GAAG;IACxB,IAAIH,cAAcL,KAAKM,MAAM,EAAEN,KAAKI,IAAI,CAACJ,KAAKM,MAAM,GAAG;IACvD,MAAMG,MAAMJ;IACZA,YAAYL,IAAI,CAACS,IAAI;IAErBT,IAAI,CAACS,IAAI,GAAGD;IACZ,OAAOC;AACT;AAEA,IAAIC,kBAAkB;AAEtB,SAASC,kBAAkBC,GAAG,EAAEC,MAAM;IACpC,MAAMpB,MAAMoB,OAAOD,IAAIN,MAAM,GAAG;IAChClB,kBAAkB0B,GAAG,CAACF,KAAKnB,MAAM;IACjCiB,kBAAkBE,IAAIN,MAAM;IAC5B,OAAOb;AACT;AAEA,IAAIsB,uBAAuB;AAC3B,SAASC;IACP,IACED,yBAAyB,QACzBA,qBAAqB1B,MAAM,KAAKR,KAAKS,MAAM,CAACD,MAAM,EAClD;QACA0B,uBAAuB,IAAIE,WAAWpC,KAAKS,MAAM,CAACD,MAAM;IAC1D;IACA,OAAO0B;AACT;AAEA,SAASG,oBAAoBzB,GAAG,EAAEC,GAAG;IACnC,OAAON,kBAAkBO,QAAQ,CAACF,MAAM,GAAGA,MAAM,IAAIC;AACvD;AACA;;;;;CAKC,GACD,OAAO,SAASyB,OAAOC,IAAI,EAAEC,KAAK,EAAEC,MAAM;IACxC,IAAI;QACF,MAAMC,SAAS1C,KAAK2C,+BAA+B,CAAC,CAAC;QACrD,IAAIC,OAAOd,kBAAkBS,MAAMvC,KAAK6C,iBAAiB;QACzD,IAAIC,OAAOjB;QACX7B,KAAKsC,MAAM,CAACI,QAAQE,MAAME,MAAMN,OAAOC;QACvC,IAAIM,KAAKZ,iBAAiB,CAACO,SAAS,IAAI,EAAE;QAC1C,IAAIM,KAAKb,iBAAiB,CAACO,SAAS,IAAI,EAAE;QAC1C,IAAIO,KAAKZ,oBAAoBU,IAAIC,IAAIE,KAAK;QAC1ClD,KAAKmD,eAAe,CAACJ,IAAIC,KAAK;QAC9B,OAAOC;IACT,SAAU;QACRjD,KAAK2C,+BAA+B,CAAC;IACvC;AACF;AAEA,SAASS,UAAUxB,GAAG;IACpB,OAAOT,IAAI,CAACS,IAAI;AAClB;AAEA,SAASyB,WAAWzB,GAAG;IACrB,IAAIA,MAAM,IAAI;IACdT,IAAI,CAACS,IAAI,GAAGJ;IACZA,YAAYI;AACd;AAEA,SAAS0B,WAAW1B,GAAG;IACrB,MAAM2B,MAAMH,UAAUxB;IACtByB,WAAWzB;IACX,OAAO2B;AACT;AACA;;;CAGC,GACD,OAAO,SAASlD,OAAOkC,IAAI;IACzB,IAAIK,OAAOd,kBAAkBS,MAAMvC,KAAK6C,iBAAiB;IACzD,IAAIC,OAAOjB;IACX,IAAI0B,MAAMvD,KAAKK,MAAM,CAACuC,MAAME;IAC5B,OAAOQ,WAAWC;AACpB;AAEA,eAAeC,KAAKC,MAAM,EAAEC,OAAO;IACjC,IAAI,OAAOC,aAAa,cAAcF,kBAAkBE,UAAU;QAChE,IAAI,OAAOC,YAAYC,oBAAoB,KAAK,YAAY;YAC1D,OAAO,MAAMD,YAAYC,oBAAoB,CAACJ,QAAQC;QACxD;QAEA,MAAMI,QAAQ,MAAML,OAAOM,WAAW;QACtC,OAAO,MAAMH,YAAYI,WAAW,CAACF,OAAOJ;IAC9C,OAAO;QACL,MAAMO,WAAW,MAAML,YAAYI,WAAW,CAACP,QAAQC;QAEvD,IAAIO,oBAAoBL,YAAYM,QAAQ,EAAE;YAC5C,OAAO;gBAAED;gBAAUR;YAAO;QAC5B,OAAO;YACL,OAAOQ;QACT;IACF;AACF;AAEA,eAAeE,KAAKC,KAAK;IACvB,MAAMV,UAAU,CAAC;IACjBA,QAAQW,GAAG,GAAG,CAAC;IACfX,QAAQW,GAAG,CAACC,sDAAsD,GAChE,SAAUC,IAAI,EAAEC,IAAI,EAAEC,IAAI,EAAEC,IAAI;QAC9B,IAAIC,KAAKzD,2BAA2BqD,MAAMC,MAAMtB,KAAK;QACrDlD,KAAKmD,eAAe,CAACoB,MAAMC,OAAO;QAClC,IAAIjB,MAAM,IAAIqB,UAAUD,IAAIF,SAAS,GAAGC,SAAS;QACjD,OAAOhD,cAAc6B;IACvB;IACFG,QAAQW,GAAG,CAACQ,gBAAgB,GAAG,SAAUN,IAAI,EAAEC,IAAI;QACjD,MAAM,IAAIM,MAAMnE,mBAAmB4D,MAAMC;IAC3C;IAEA,IACE,OAAOJ,UAAU,YAChB,OAAOW,YAAY,cAAcX,iBAAiBW,WAClD,OAAOC,QAAQ,cAAcZ,iBAAiBY,KAC/C;QACAZ,QAAQa,MAAMb;IAChB;IAEA,MAAM,EAAEH,QAAQ,EAAER,MAAM,EAAE,GAAG,MAAMD,KAAK,MAAMY,OAAOV;IAErD1D,OAAOiE,SAASiB,OAAO;IACvBf,KAAKgB,sBAAsB,GAAG1B;IAE9B,OAAOzD;AACT;AAEA,eAAemE,KAAI;AAEnB,+DAA+D;AAC/D,OAAO,SAASiB;IACdpF,OAAO;IACPe,8BAA8B;IAC9BT,uBAAuB;IACvB4B,uBAAuB;AACzB"}
|
||||
95
node_modules/next/dist/esm/server/lib/squoosh/resize/squoosh_resize.js
generated
vendored
Normal file
95
node_modules/next/dist/esm/server/lib/squoosh/resize/squoosh_resize.js
generated
vendored
Normal file
@ -0,0 +1,95 @@
|
||||
let wasm;
|
||||
let cachegetUint8Memory0 = null;
|
||||
function getUint8Memory0() {
|
||||
if (cachegetUint8Memory0 === null || cachegetUint8Memory0.buffer !== wasm.memory.buffer) {
|
||||
cachegetUint8Memory0 = new Uint8Array(wasm.memory.buffer);
|
||||
}
|
||||
return cachegetUint8Memory0;
|
||||
}
|
||||
let WASM_VECTOR_LEN = 0;
|
||||
function passArray8ToWasm0(arg, malloc) {
|
||||
const ptr = malloc(arg.length * 1);
|
||||
getUint8Memory0().set(arg, ptr / 1);
|
||||
WASM_VECTOR_LEN = arg.length;
|
||||
return ptr;
|
||||
}
|
||||
let cachegetInt32Memory0 = null;
|
||||
function getInt32Memory0() {
|
||||
if (cachegetInt32Memory0 === null || cachegetInt32Memory0.buffer !== wasm.memory.buffer) {
|
||||
cachegetInt32Memory0 = new Int32Array(wasm.memory.buffer);
|
||||
}
|
||||
return cachegetInt32Memory0;
|
||||
}
|
||||
let cachegetUint8ClampedMemory0 = null;
|
||||
function getUint8ClampedMemory0() {
|
||||
if (cachegetUint8ClampedMemory0 === null || cachegetUint8ClampedMemory0.buffer !== wasm.memory.buffer) {
|
||||
cachegetUint8ClampedMemory0 = new Uint8ClampedArray(wasm.memory.buffer);
|
||||
}
|
||||
return cachegetUint8ClampedMemory0;
|
||||
}
|
||||
function getClampedArrayU8FromWasm0(ptr, len) {
|
||||
return getUint8ClampedMemory0().subarray(ptr / 1, ptr / 1 + len);
|
||||
}
|
||||
/**
|
||||
* @param {Uint8Array} input_image
|
||||
* @param {number} input_width
|
||||
* @param {number} input_height
|
||||
* @param {number} output_width
|
||||
* @param {number} output_height
|
||||
* @param {number} typ_idx
|
||||
* @param {boolean} premultiply
|
||||
* @param {boolean} color_space_conversion
|
||||
* @returns {Uint8ClampedArray}
|
||||
*/ export function resize(input_image, input_width, input_height, output_width, output_height, typ_idx, premultiply, color_space_conversion) {
|
||||
try {
|
||||
const retptr = wasm.__wbindgen_add_to_stack_pointer(-16);
|
||||
var ptr0 = passArray8ToWasm0(input_image, wasm.__wbindgen_malloc);
|
||||
var len0 = WASM_VECTOR_LEN;
|
||||
wasm.resize(retptr, ptr0, len0, input_width, input_height, output_width, output_height, typ_idx, premultiply, color_space_conversion);
|
||||
var r0 = getInt32Memory0()[retptr / 4 + 0];
|
||||
var r1 = getInt32Memory0()[retptr / 4 + 1];
|
||||
var v1 = getClampedArrayU8FromWasm0(r0, r1).slice();
|
||||
wasm.__wbindgen_free(r0, r1 * 1);
|
||||
return v1;
|
||||
} finally{
|
||||
wasm.__wbindgen_add_to_stack_pointer(16);
|
||||
}
|
||||
}
|
||||
async function load(module, imports) {
|
||||
if (typeof Response === "function" && module instanceof Response) {
|
||||
if (typeof WebAssembly.instantiateStreaming === "function") {
|
||||
return await WebAssembly.instantiateStreaming(module, imports);
|
||||
}
|
||||
const bytes = await module.arrayBuffer();
|
||||
return await WebAssembly.instantiate(bytes, imports);
|
||||
} else {
|
||||
const instance = await WebAssembly.instantiate(module, imports);
|
||||
if (instance instanceof WebAssembly.Instance) {
|
||||
return {
|
||||
instance,
|
||||
module
|
||||
};
|
||||
} else {
|
||||
return instance;
|
||||
}
|
||||
}
|
||||
}
|
||||
async function init(input) {
|
||||
const imports = {};
|
||||
if (typeof input === "string" || typeof Request === "function" && input instanceof Request || typeof URL === "function" && input instanceof URL) {
|
||||
input = fetch(input);
|
||||
}
|
||||
const { instance, module } = await load(await input, imports);
|
||||
wasm = instance.exports;
|
||||
init.__wbindgen_wasm_module = module;
|
||||
return wasm;
|
||||
}
|
||||
export default init;
|
||||
// Manually remove the wasm and memory references to trigger GC
|
||||
export function cleanup() {
|
||||
wasm = null;
|
||||
cachegetUint8Memory0 = null;
|
||||
cachegetInt32Memory0 = null;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=squoosh_resize.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/squoosh/resize/squoosh_resize.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/squoosh/resize/squoosh_resize.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../../src/server/lib/squoosh/resize/squoosh_resize.js"],"names":["wasm","cachegetUint8Memory0","getUint8Memory0","buffer","memory","Uint8Array","WASM_VECTOR_LEN","passArray8ToWasm0","arg","malloc","ptr","length","set","cachegetInt32Memory0","getInt32Memory0","Int32Array","cachegetUint8ClampedMemory0","getUint8ClampedMemory0","Uint8ClampedArray","getClampedArrayU8FromWasm0","len","subarray","resize","input_image","input_width","input_height","output_width","output_height","typ_idx","premultiply","color_space_conversion","retptr","__wbindgen_add_to_stack_pointer","ptr0","__wbindgen_malloc","len0","r0","r1","v1","slice","__wbindgen_free","load","module","imports","Response","WebAssembly","instantiateStreaming","bytes","arrayBuffer","instantiate","instance","Instance","init","input","Request","URL","fetch","exports","__wbindgen_wasm_module","cleanup"],"mappings":"AAAA,IAAIA;AAEJ,IAAIC,uBAAuB;AAC3B,SAASC;IACP,IACED,yBAAyB,QACzBA,qBAAqBE,MAAM,KAAKH,KAAKI,MAAM,CAACD,MAAM,EAClD;QACAF,uBAAuB,IAAII,WAAWL,KAAKI,MAAM,CAACD,MAAM;IAC1D;IACA,OAAOF;AACT;AAEA,IAAIK,kBAAkB;AAEtB,SAASC,kBAAkBC,GAAG,EAAEC,MAAM;IACpC,MAAMC,MAAMD,OAAOD,IAAIG,MAAM,GAAG;IAChCT,kBAAkBU,GAAG,CAACJ,KAAKE,MAAM;IACjCJ,kBAAkBE,IAAIG,MAAM;IAC5B,OAAOD;AACT;AAEA,IAAIG,uBAAuB;AAC3B,SAASC;IACP,IACED,yBAAyB,QACzBA,qBAAqBV,MAAM,KAAKH,KAAKI,MAAM,CAACD,MAAM,EAClD;QACAU,uBAAuB,IAAIE,WAAWf,KAAKI,MAAM,CAACD,MAAM;IAC1D;IACA,OAAOU;AACT;AAEA,IAAIG,8BAA8B;AAClC,SAASC;IACP,IACED,gCAAgC,QAChCA,4BAA4Bb,MAAM,KAAKH,KAAKI,MAAM,CAACD,MAAM,EACzD;QACAa,8BAA8B,IAAIE,kBAAkBlB,KAAKI,MAAM,CAACD,MAAM;IACxE;IACA,OAAOa;AACT;AAEA,SAASG,2BAA2BT,GAAG,EAAEU,GAAG;IAC1C,OAAOH,yBAAyBI,QAAQ,CAACX,MAAM,GAAGA,MAAM,IAAIU;AAC9D;AACA;;;;;;;;;;CAUC,GACD,OAAO,SAASE,OACdC,WAAW,EACXC,WAAW,EACXC,YAAY,EACZC,YAAY,EACZC,aAAa,EACbC,OAAO,EACPC,WAAW,EACXC,sBAAsB;IAEtB,IAAI;QACF,MAAMC,SAAS/B,KAAKgC,+BAA+B,CAAC,CAAC;QACrD,IAAIC,OAAO1B,kBAAkBgB,aAAavB,KAAKkC,iBAAiB;QAChE,IAAIC,OAAO7B;QACXN,KAAKsB,MAAM,CACTS,QACAE,MACAE,MACAX,aACAC,cACAC,cACAC,eACAC,SACAC,aACAC;QAEF,IAAIM,KAAKtB,iBAAiB,CAACiB,SAAS,IAAI,EAAE;QAC1C,IAAIM,KAAKvB,iBAAiB,CAACiB,SAAS,IAAI,EAAE;QAC1C,IAAIO,KAAKnB,2BAA2BiB,IAAIC,IAAIE,KAAK;QACjDvC,KAAKwC,eAAe,CAACJ,IAAIC,KAAK;QAC9B,OAAOC;IACT,SAAU;QACRtC,KAAKgC,+BAA+B,CAAC;IACvC;AACF;AAEA,eAAeS,KAAKC,MAAM,EAAEC,OAAO;IACjC,IAAI,OAAOC,aAAa,cAAcF,kBAAkBE,UAAU;QAChE,IAAI,OAAOC,YAAYC,oBAAoB,KAAK,YAAY;YAC1D,OAAO,MAAMD,YAAYC,oBAAoB,CAACJ,QAAQC;QACxD;QAEA,MAAMI,QAAQ,MAAML,OAAOM,WAAW;QACtC,OAAO,MAAMH,YAAYI,WAAW,CAACF,OAAOJ;IAC9C,OAAO;QACL,MAAMO,WAAW,MAAML,YAAYI,WAAW,CAACP,QAAQC;QAEvD,IAAIO,oBAAoBL,YAAYM,QAAQ,EAAE;YAC5C,OAAO;gBAAED;gBAAUR;YAAO;QAC5B,OAAO;YACL,OAAOQ;QACT;IACF;AACF;AAEA,eAAeE,KAAKC,KAAK;IACvB,MAAMV,UAAU,CAAC;IAEjB,IACE,OAAOU,UAAU,YAChB,OAAOC,YAAY,cAAcD,iBAAiBC,WAClD,OAAOC,QAAQ,cAAcF,iBAAiBE,KAC/C;QACAF,QAAQG,MAAMH;IAChB;IAEA,MAAM,EAAEH,QAAQ,EAAER,MAAM,EAAE,GAAG,MAAMD,KAAK,MAAMY,OAAOV;IAErD3C,OAAOkD,SAASO,OAAO;IACvBL,KAAKM,sBAAsB,GAAGhB;IAE9B,OAAO1C;AACT;AAEA,eAAeoD,KAAI;AAEnB,+DAA+D;AAC/D,OAAO,SAASO;IACd3D,OAAO;IACPC,uBAAuB;IACvBY,uBAAuB;AACzB"}
|
||||
42
node_modules/next/dist/esm/server/lib/squoosh/webp/webp_enc.d.ts
generated
vendored
Normal file
42
node_modules/next/dist/esm/server/lib/squoosh/webp/webp_enc.d.ts
generated
vendored
Normal file
@ -0,0 +1,42 @@
|
||||
export interface EncodeOptions {
|
||||
quality: number
|
||||
target_size: number
|
||||
target_PSNR: number
|
||||
method: number
|
||||
sns_strength: number
|
||||
filter_strength: number
|
||||
filter_sharpness: number
|
||||
filter_type: number
|
||||
partitions: number
|
||||
segments: number
|
||||
pass: number
|
||||
show_compressed: number
|
||||
preprocessing: number
|
||||
autofilter: number
|
||||
partition_limit: number
|
||||
alpha_compression: number
|
||||
alpha_filtering: number
|
||||
alpha_quality: number
|
||||
lossless: number
|
||||
exact: number
|
||||
image_hint: number
|
||||
emulate_jpeg_size: number
|
||||
thread_level: number
|
||||
low_memory: number
|
||||
near_lossless: number
|
||||
use_delta_palette: number
|
||||
use_sharp_yuv: number
|
||||
}
|
||||
|
||||
export interface WebPModule extends EmscriptenWasm.Module {
|
||||
encode(
|
||||
data: BufferSource,
|
||||
width: number,
|
||||
height: number,
|
||||
options: EncodeOptions
|
||||
): Uint8Array
|
||||
}
|
||||
|
||||
declare var moduleFactory: EmscriptenWasm.ModuleFactory<WebPModule>
|
||||
|
||||
export default moduleFactory
|
||||
1384
node_modules/next/dist/esm/server/lib/squoosh/webp/webp_node_dec.js
generated
vendored
Normal file
1384
node_modules/next/dist/esm/server/lib/squoosh/webp/webp_node_dec.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
node_modules/next/dist/esm/server/lib/squoosh/webp/webp_node_dec.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/squoosh/webp/webp_node_dec.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1532
node_modules/next/dist/esm/server/lib/squoosh/webp/webp_node_enc.js
generated
vendored
Normal file
1532
node_modules/next/dist/esm/server/lib/squoosh/webp/webp_node_enc.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
node_modules/next/dist/esm/server/lib/squoosh/webp/webp_node_enc.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/squoosh/webp/webp_node_enc.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
257
node_modules/next/dist/esm/server/lib/start-server.js
generated
vendored
Normal file
257
node_modules/next/dist/esm/server/lib/start-server.js
generated
vendored
Normal file
@ -0,0 +1,257 @@
|
||||
if (performance.getEntriesByName("next-start").length === 0) {
|
||||
performance.mark("next-start");
|
||||
}
|
||||
import "../next";
|
||||
import "../require-hook";
|
||||
import fs from "fs";
|
||||
import v8 from "v8";
|
||||
import path from "path";
|
||||
import http from "http";
|
||||
import https from "https";
|
||||
import os from "os";
|
||||
import Watchpack from "next/dist/compiled/watchpack";
|
||||
import * as Log from "../../build/output/log";
|
||||
import setupDebug from "next/dist/compiled/debug";
|
||||
import { RESTART_EXIT_CODE, checkNodeDebugType, getDebugPort } from "./utils";
|
||||
import { formatHostname } from "./format-hostname";
|
||||
import { initialize } from "./router-server";
|
||||
import { CONFIG_FILES } from "../../shared/lib/constants";
|
||||
import { getStartServerInfo, logStartInfo } from "./app-info-log";
|
||||
import { validateTurboNextConfig } from "../../lib/turbopack-warning";
|
||||
import { trace, flushAllTraces } from "../../trace";
|
||||
import { isPostpone } from "./router-utils/is-postpone";
|
||||
const debug = setupDebug("next:start-server");
|
||||
let startServerSpan;
|
||||
export async function getRequestHandlers({ dir, port, isDev, server, hostname, minimalMode, isNodeDebugging, keepAliveTimeout, experimentalHttpsServer }) {
|
||||
return initialize({
|
||||
dir,
|
||||
port,
|
||||
hostname,
|
||||
dev: isDev,
|
||||
minimalMode,
|
||||
server,
|
||||
isNodeDebugging: isNodeDebugging || false,
|
||||
keepAliveTimeout,
|
||||
experimentalHttpsServer,
|
||||
startServerSpan
|
||||
});
|
||||
}
|
||||
export async function startServer(serverOptions) {
|
||||
const { dir, isDev, hostname, minimalMode, allowRetry, keepAliveTimeout, selfSignedCertificate } = serverOptions;
|
||||
let { port } = serverOptions;
|
||||
process.title = `next-server (v${"14.2.13"})`;
|
||||
let handlersReady = ()=>{};
|
||||
let handlersError = ()=>{};
|
||||
let handlersPromise = new Promise((resolve, reject)=>{
|
||||
handlersReady = resolve;
|
||||
handlersError = reject;
|
||||
});
|
||||
let requestHandler = async (req, res)=>{
|
||||
if (handlersPromise) {
|
||||
await handlersPromise;
|
||||
return requestHandler(req, res);
|
||||
}
|
||||
throw new Error("Invariant request handler was not setup");
|
||||
};
|
||||
let upgradeHandler = async (req, socket, head)=>{
|
||||
if (handlersPromise) {
|
||||
await handlersPromise;
|
||||
return upgradeHandler(req, socket, head);
|
||||
}
|
||||
throw new Error("Invariant upgrade handler was not setup");
|
||||
};
|
||||
// setup server listener as fast as possible
|
||||
if (selfSignedCertificate && !isDev) {
|
||||
throw new Error("Using a self signed certificate is only supported with `next dev`.");
|
||||
}
|
||||
async function requestListener(req, res) {
|
||||
try {
|
||||
if (handlersPromise) {
|
||||
await handlersPromise;
|
||||
handlersPromise = undefined;
|
||||
}
|
||||
await requestHandler(req, res);
|
||||
} catch (err) {
|
||||
res.statusCode = 500;
|
||||
res.end("Internal Server Error");
|
||||
Log.error(`Failed to handle request for ${req.url}`);
|
||||
console.error(err);
|
||||
} finally{
|
||||
if (isDev) {
|
||||
if (v8.getHeapStatistics().used_heap_size > 0.8 * v8.getHeapStatistics().heap_size_limit) {
|
||||
Log.warn(`Server is approaching the used memory threshold, restarting...`);
|
||||
trace("server-restart-close-to-memory-threshold", undefined, {
|
||||
"memory.heapSizeLimit": String(v8.getHeapStatistics().heap_size_limit),
|
||||
"memory.heapUsed": String(v8.getHeapStatistics().used_heap_size)
|
||||
}).stop();
|
||||
await flushAllTraces();
|
||||
process.exit(RESTART_EXIT_CODE);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
const server = selfSignedCertificate ? https.createServer({
|
||||
key: fs.readFileSync(selfSignedCertificate.key),
|
||||
cert: fs.readFileSync(selfSignedCertificate.cert)
|
||||
}, requestListener) : http.createServer(requestListener);
|
||||
if (keepAliveTimeout) {
|
||||
server.keepAliveTimeout = keepAliveTimeout;
|
||||
}
|
||||
server.on("upgrade", async (req, socket, head)=>{
|
||||
try {
|
||||
await upgradeHandler(req, socket, head);
|
||||
} catch (err) {
|
||||
socket.destroy();
|
||||
Log.error(`Failed to handle request for ${req.url}`);
|
||||
console.error(err);
|
||||
}
|
||||
});
|
||||
let portRetryCount = 0;
|
||||
server.on("error", (err)=>{
|
||||
if (allowRetry && port && isDev && err.code === "EADDRINUSE" && portRetryCount < 10) {
|
||||
Log.warn(`Port ${port} is in use, trying ${port + 1} instead.`);
|
||||
port += 1;
|
||||
portRetryCount += 1;
|
||||
server.listen(port, hostname);
|
||||
} else {
|
||||
Log.error(`Failed to start server`);
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
const nodeDebugType = checkNodeDebugType();
|
||||
await new Promise((resolve)=>{
|
||||
server.on("listening", async ()=>{
|
||||
const addr = server.address();
|
||||
const actualHostname = formatHostname(typeof addr === "object" ? (addr == null ? void 0 : addr.address) || hostname || "localhost" : addr);
|
||||
const formattedHostname = !hostname || actualHostname === "0.0.0.0" ? "localhost" : actualHostname === "[::]" ? "[::1]" : formatHostname(hostname);
|
||||
port = typeof addr === "object" ? (addr == null ? void 0 : addr.port) || port : port;
|
||||
const networkUrl = hostname ? `http://${actualHostname}:${port}` : null;
|
||||
const appUrl = `${selfSignedCertificate ? "https" : "http"}://${formattedHostname}:${port}`;
|
||||
if (nodeDebugType) {
|
||||
const debugPort = getDebugPort();
|
||||
Log.info(`the --${nodeDebugType} option was detected, the Next.js router server should be inspected at port ${debugPort}.`);
|
||||
}
|
||||
// expose the main port to render workers
|
||||
process.env.PORT = port + "";
|
||||
process.env.__NEXT_PRIVATE_ORIGIN = appUrl;
|
||||
// Only load env and config in dev to for logging purposes
|
||||
let envInfo;
|
||||
let expFeatureInfo;
|
||||
if (isDev) {
|
||||
const startServerInfo = await getStartServerInfo(dir, isDev);
|
||||
envInfo = startServerInfo.envInfo;
|
||||
expFeatureInfo = startServerInfo.expFeatureInfo;
|
||||
}
|
||||
logStartInfo({
|
||||
networkUrl,
|
||||
appUrl,
|
||||
envInfo,
|
||||
expFeatureInfo,
|
||||
maxExperimentalFeatures: 3
|
||||
});
|
||||
Log.event(`Starting...`);
|
||||
try {
|
||||
const cleanup = ()=>{
|
||||
debug("start-server process cleanup");
|
||||
server.close(()=>process.exit(0));
|
||||
};
|
||||
const exception = (err)=>{
|
||||
if (isPostpone(err)) {
|
||||
// React postpones that are unhandled might end up logged here but they're
|
||||
// not really errors. They're just part of rendering.
|
||||
return;
|
||||
}
|
||||
// This is the render worker, we keep the process alive
|
||||
console.error(err);
|
||||
};
|
||||
// Make sure commands gracefully respect termination signals (e.g. from Docker)
|
||||
// Allow the graceful termination to be manually configurable
|
||||
if (!process.env.NEXT_MANUAL_SIG_HANDLE) {
|
||||
process.on("SIGINT", cleanup);
|
||||
process.on("SIGTERM", cleanup);
|
||||
}
|
||||
process.on("rejectionHandled", ()=>{
|
||||
// It is ok to await a Promise late in Next.js as it allows for better
|
||||
// prefetching patterns to avoid waterfalls. We ignore loggining these.
|
||||
// We should've already errored in anyway unhandledRejection.
|
||||
});
|
||||
process.on("uncaughtException", exception);
|
||||
process.on("unhandledRejection", exception);
|
||||
const initResult = await getRequestHandlers({
|
||||
dir,
|
||||
port,
|
||||
isDev,
|
||||
server,
|
||||
hostname,
|
||||
minimalMode,
|
||||
isNodeDebugging: Boolean(nodeDebugType),
|
||||
keepAliveTimeout,
|
||||
experimentalHttpsServer: !!selfSignedCertificate
|
||||
});
|
||||
requestHandler = initResult[0];
|
||||
upgradeHandler = initResult[1];
|
||||
const startServerProcessDuration = performance.mark("next-start-end") && performance.measure("next-start-duration", "next-start", "next-start-end").duration;
|
||||
handlersReady();
|
||||
const formatDurationText = startServerProcessDuration > 2000 ? `${Math.round(startServerProcessDuration / 100) / 10}s` : `${Math.round(startServerProcessDuration)}ms`;
|
||||
Log.event(`Ready in ${formatDurationText}`);
|
||||
if (process.env.TURBOPACK) {
|
||||
await validateTurboNextConfig({
|
||||
dir: serverOptions.dir,
|
||||
isDev: true
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
// fatal error if we can't setup
|
||||
handlersError();
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
}
|
||||
resolve();
|
||||
});
|
||||
server.listen(port, hostname);
|
||||
});
|
||||
if (isDev) {
|
||||
function watchConfigFiles(dirToWatch, onChange) {
|
||||
const wp = new Watchpack();
|
||||
wp.watch({
|
||||
files: CONFIG_FILES.map((file)=>path.join(dirToWatch, file))
|
||||
});
|
||||
wp.on("change", onChange);
|
||||
}
|
||||
watchConfigFiles(dir, async (filename)=>{
|
||||
if (process.env.__NEXT_DISABLE_MEMORY_WATCHER) {
|
||||
Log.info(`Detected change, manual restart required due to '__NEXT_DISABLE_MEMORY_WATCHER' usage`);
|
||||
return;
|
||||
}
|
||||
Log.warn(`Found a change in ${path.basename(filename)}. Restarting the server to apply the changes...`);
|
||||
process.exit(RESTART_EXIT_CODE);
|
||||
});
|
||||
}
|
||||
}
|
||||
if (process.env.NEXT_PRIVATE_WORKER && process.send) {
|
||||
process.addListener("message", async (msg)=>{
|
||||
if (msg && typeof msg && msg.nextWorkerOptions && process.send) {
|
||||
startServerSpan = trace("start-dev-server", undefined, {
|
||||
cpus: String(os.cpus().length),
|
||||
platform: os.platform(),
|
||||
"memory.freeMem": String(os.freemem()),
|
||||
"memory.totalMem": String(os.totalmem()),
|
||||
"memory.heapSizeLimit": String(v8.getHeapStatistics().heap_size_limit)
|
||||
});
|
||||
await startServerSpan.traceAsyncFn(()=>startServer(msg.nextWorkerOptions));
|
||||
const memoryUsage = process.memoryUsage();
|
||||
startServerSpan.setAttribute("memory.rss", String(memoryUsage.rss));
|
||||
startServerSpan.setAttribute("memory.heapTotal", String(memoryUsage.heapTotal));
|
||||
startServerSpan.setAttribute("memory.heapUsed", String(memoryUsage.heapUsed));
|
||||
process.send({
|
||||
nextServerReady: true
|
||||
});
|
||||
}
|
||||
});
|
||||
process.send({
|
||||
nextWorkerReady: true
|
||||
});
|
||||
}
|
||||
|
||||
//# sourceMappingURL=start-server.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/start-server.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/start-server.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
27
node_modules/next/dist/esm/server/lib/to-route.js
generated
vendored
Normal file
27
node_modules/next/dist/esm/server/lib/to-route.js
generated
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
/**
|
||||
* This transforms a URL pathname into a route. It removes any trailing slashes
|
||||
* and the `/index` suffix.
|
||||
*
|
||||
* @param {string} pathname - The URL path that needs to be optimized.
|
||||
* @returns {string} - The route
|
||||
*
|
||||
* @example
|
||||
* // returns '/example'
|
||||
* toRoute('/example/index/');
|
||||
*
|
||||
* @example
|
||||
* // returns '/example'
|
||||
* toRoute('/example/');
|
||||
*
|
||||
* @example
|
||||
* // returns '/'
|
||||
* toRoute('/index/');
|
||||
*
|
||||
* @example
|
||||
* // returns '/'
|
||||
* toRoute('/');
|
||||
*/ export function toRoute(pathname) {
|
||||
return pathname.replace(/(?:\/index)?\/?$/, "") || "/";
|
||||
}
|
||||
|
||||
//# sourceMappingURL=to-route.js.map
|
||||
1
node_modules/next/dist/esm/server/lib/to-route.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/lib/to-route.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/lib/to-route.ts"],"names":["toRoute","pathname","replace"],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;;;CAsBC,GACD,OAAO,SAASA,QAAQC,QAAgB;IACtC,OAAOA,SAASC,OAAO,CAAC,oBAAoB,OAAO;AACrD"}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user