Initial boiler plate project
This commit is contained in:
27
node_modules/next/dist/esm/server/dev/extract-modules-from-turbopack-message.js
generated
vendored
Normal file
27
node_modules/next/dist/esm/server/dev/extract-modules-from-turbopack-message.js
generated
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
export function extractModulesFromTurbopackMessage(data) {
|
||||
const updatedModules = new Set();
|
||||
const updates = Array.isArray(data) ? data : [
|
||||
data
|
||||
];
|
||||
for (const update of updates){
|
||||
// TODO this won't capture changes to CSS since they don't result in a "merged" update
|
||||
if (update.type !== "partial" || update.instruction.type !== "ChunkListUpdate" || update.instruction.merged === undefined) {
|
||||
continue;
|
||||
}
|
||||
for (const mergedUpdate of update.instruction.merged){
|
||||
for (const name of Object.keys(mergedUpdate.entries)){
|
||||
const res = /(.*)\s+\[.*/.exec(name);
|
||||
if (res === null) {
|
||||
console.error("[Turbopack HMR] Expected module to match pattern: " + name);
|
||||
continue;
|
||||
}
|
||||
updatedModules.add(res[1]);
|
||||
}
|
||||
}
|
||||
}
|
||||
return [
|
||||
...updatedModules
|
||||
];
|
||||
}
|
||||
|
||||
//# sourceMappingURL=extract-modules-from-turbopack-message.js.map
|
||||
1
node_modules/next/dist/esm/server/dev/extract-modules-from-turbopack-message.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/dev/extract-modules-from-turbopack-message.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/dev/extract-modules-from-turbopack-message.ts"],"names":["extractModulesFromTurbopackMessage","data","updatedModules","Set","updates","Array","isArray","update","type","instruction","merged","undefined","mergedUpdate","name","Object","keys","entries","res","exec","console","error","add"],"mappings":"AAEA,OAAO,SAASA,mCACdC,IAAyC;IAEzC,MAAMC,iBAA8B,IAAIC;IAExC,MAAMC,UAAUC,MAAMC,OAAO,CAACL,QAAQA,OAAO;QAACA;KAAK;IACnD,KAAK,MAAMM,UAAUH,QAAS;QAC5B,sFAAsF;QACtF,IACEG,OAAOC,IAAI,KAAK,aAChBD,OAAOE,WAAW,CAACD,IAAI,KAAK,qBAC5BD,OAAOE,WAAW,CAACC,MAAM,KAAKC,WAC9B;YACA;QACF;QAEA,KAAK,MAAMC,gBAAgBL,OAAOE,WAAW,CAACC,MAAM,CAAE;YACpD,KAAK,MAAMG,QAAQC,OAAOC,IAAI,CAACH,aAAaI,OAAO,EAAG;gBACpD,MAAMC,MAAM,cAAcC,IAAI,CAACL;gBAC/B,IAAII,QAAQ,MAAM;oBAChBE,QAAQC,KAAK,CACX,uDAAuDP;oBAEzD;gBACF;gBAEAX,eAAemB,GAAG,CAACJ,GAAG,CAAC,EAAE;YAC3B;QACF;IACF;IAEA,OAAO;WAAIf;KAAe;AAC5B"}
|
||||
205
node_modules/next/dist/esm/server/dev/hot-middleware.js
generated
vendored
Normal file
205
node_modules/next/dist/esm/server/dev/hot-middleware.js
generated
vendored
Normal file
@ -0,0 +1,205 @@
|
||||
// Based on https://github.com/webpack-contrib/webpack-hot-middleware/blob/9708d781ae0e46179cf8ea1a94719de4679aaf53/middleware.js
|
||||
// Included License below
|
||||
// Copyright JS Foundation and other contributors
|
||||
// Permission is hereby granted, free of charge, to any person obtaining
|
||||
// a copy of this software and associated documentation files (the
|
||||
// 'Software'), to deal in the Software without restriction, including
|
||||
// without limitation the rights to use, copy, modify, merge, publish,
|
||||
// distribute, sublicense, and/or sell copies of the Software, and to
|
||||
// permit persons to whom the Software is furnished to do so, subject to
|
||||
// the following conditions:
|
||||
// The above copyright notice and this permission notice shall be
|
||||
// included in all copies or substantial portions of the Software.
|
||||
// THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
|
||||
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
// IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
// TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
// SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
import { isMiddlewareFilename } from "../../build/utils";
|
||||
import { HMR_ACTIONS_SENT_TO_BROWSER } from "./hot-reloader-types";
|
||||
function isMiddlewareStats(stats) {
|
||||
for (const key of stats.compilation.entrypoints.keys()){
|
||||
if (isMiddlewareFilename(key)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
function statsToJson(stats) {
|
||||
if (!stats) return {};
|
||||
return stats.toJson({
|
||||
all: false,
|
||||
errors: true,
|
||||
hash: true,
|
||||
warnings: true
|
||||
});
|
||||
}
|
||||
function getStatsForSyncEvent(clientStats, serverStats) {
|
||||
if (!clientStats) return serverStats == null ? void 0 : serverStats.stats;
|
||||
if (!serverStats) return clientStats == null ? void 0 : clientStats.stats;
|
||||
// Prefer the server compiler stats if it has errors.
|
||||
// Otherwise we may end up in a state where the client compilation is the latest but without errors.
|
||||
// This causes the error overlay to not display the build error.
|
||||
if (serverStats.stats.hasErrors()) {
|
||||
return serverStats.stats;
|
||||
}
|
||||
// Return the latest stats
|
||||
return serverStats.ts > clientStats.ts ? serverStats.stats : clientStats.stats;
|
||||
}
|
||||
class EventStream {
|
||||
constructor(){
|
||||
this.clients = new Set();
|
||||
}
|
||||
everyClient(fn) {
|
||||
for (const client of this.clients){
|
||||
fn(client);
|
||||
}
|
||||
}
|
||||
close() {
|
||||
this.everyClient((client)=>{
|
||||
client.close();
|
||||
});
|
||||
this.clients.clear();
|
||||
}
|
||||
handler(client) {
|
||||
this.clients.add(client);
|
||||
client.addEventListener("close", ()=>{
|
||||
this.clients.delete(client);
|
||||
});
|
||||
}
|
||||
publish(payload) {
|
||||
this.everyClient((client)=>{
|
||||
client.send(JSON.stringify(payload));
|
||||
});
|
||||
}
|
||||
}
|
||||
export class WebpackHotMiddleware {
|
||||
constructor(compilers, versionInfo){
|
||||
this.onClientInvalid = ()=>{
|
||||
var _this_serverLatestStats;
|
||||
if (this.closed || ((_this_serverLatestStats = this.serverLatestStats) == null ? void 0 : _this_serverLatestStats.stats.hasErrors())) return;
|
||||
this.publish({
|
||||
action: HMR_ACTIONS_SENT_TO_BROWSER.BUILDING
|
||||
});
|
||||
};
|
||||
this.onClientDone = (statsResult)=>{
|
||||
var _this_serverLatestStats;
|
||||
this.clientLatestStats = {
|
||||
ts: Date.now(),
|
||||
stats: statsResult
|
||||
};
|
||||
if (this.closed || ((_this_serverLatestStats = this.serverLatestStats) == null ? void 0 : _this_serverLatestStats.stats.hasErrors())) return;
|
||||
this.publishStats(statsResult);
|
||||
};
|
||||
this.onServerInvalid = ()=>{
|
||||
var _this_serverLatestStats, _this_clientLatestStats;
|
||||
if (!((_this_serverLatestStats = this.serverLatestStats) == null ? void 0 : _this_serverLatestStats.stats.hasErrors())) return;
|
||||
this.serverLatestStats = null;
|
||||
if ((_this_clientLatestStats = this.clientLatestStats) == null ? void 0 : _this_clientLatestStats.stats) {
|
||||
this.publishStats(this.clientLatestStats.stats);
|
||||
}
|
||||
};
|
||||
this.onServerDone = (statsResult)=>{
|
||||
if (this.closed) return;
|
||||
if (statsResult.hasErrors()) {
|
||||
this.serverLatestStats = {
|
||||
ts: Date.now(),
|
||||
stats: statsResult
|
||||
};
|
||||
this.publishStats(statsResult);
|
||||
}
|
||||
};
|
||||
this.onEdgeServerInvalid = ()=>{
|
||||
var _this_middlewareLatestStats, _this_clientLatestStats;
|
||||
if (!((_this_middlewareLatestStats = this.middlewareLatestStats) == null ? void 0 : _this_middlewareLatestStats.stats.hasErrors())) return;
|
||||
this.middlewareLatestStats = null;
|
||||
if ((_this_clientLatestStats = this.clientLatestStats) == null ? void 0 : _this_clientLatestStats.stats) {
|
||||
this.publishStats(this.clientLatestStats.stats);
|
||||
}
|
||||
};
|
||||
this.onEdgeServerDone = (statsResult)=>{
|
||||
if (!isMiddlewareStats(statsResult)) {
|
||||
this.onServerInvalid();
|
||||
this.onServerDone(statsResult);
|
||||
return;
|
||||
}
|
||||
if (statsResult.hasErrors()) {
|
||||
this.middlewareLatestStats = {
|
||||
ts: Date.now(),
|
||||
stats: statsResult
|
||||
};
|
||||
this.publishStats(statsResult);
|
||||
}
|
||||
};
|
||||
/**
|
||||
* To sync we use the most recent stats but also we append middleware
|
||||
* errors. This is because it is possible that middleware fails to compile
|
||||
* and we still want to show the client overlay with the error while
|
||||
* the error page should be rendered just fine.
|
||||
*/ this.onHMR = (client)=>{
|
||||
if (this.closed) return;
|
||||
this.eventStream.handler(client);
|
||||
const syncStats = getStatsForSyncEvent(this.clientLatestStats, this.serverLatestStats);
|
||||
if (syncStats) {
|
||||
var _this_middlewareLatestStats;
|
||||
const stats = statsToJson(syncStats);
|
||||
const middlewareStats = statsToJson((_this_middlewareLatestStats = this.middlewareLatestStats) == null ? void 0 : _this_middlewareLatestStats.stats);
|
||||
this.publish({
|
||||
action: HMR_ACTIONS_SENT_TO_BROWSER.SYNC,
|
||||
hash: stats.hash,
|
||||
errors: [
|
||||
...stats.errors || [],
|
||||
...middlewareStats.errors || []
|
||||
],
|
||||
warnings: [
|
||||
...stats.warnings || [],
|
||||
...middlewareStats.warnings || []
|
||||
],
|
||||
versionInfo: this.versionInfo
|
||||
});
|
||||
}
|
||||
};
|
||||
this.publishStats = (statsResult)=>{
|
||||
const stats = statsResult.toJson({
|
||||
all: false,
|
||||
hash: true,
|
||||
warnings: true,
|
||||
errors: true,
|
||||
moduleTrace: true
|
||||
});
|
||||
this.publish({
|
||||
action: HMR_ACTIONS_SENT_TO_BROWSER.BUILT,
|
||||
hash: stats.hash,
|
||||
warnings: stats.warnings || [],
|
||||
errors: stats.errors || []
|
||||
});
|
||||
};
|
||||
this.publish = (payload)=>{
|
||||
if (this.closed) return;
|
||||
this.eventStream.publish(payload);
|
||||
};
|
||||
this.close = ()=>{
|
||||
if (this.closed) return;
|
||||
// Can't remove compiler plugins, so we just set a flag and noop if closed
|
||||
// https://github.com/webpack/tapable/issues/32#issuecomment-350644466
|
||||
this.closed = true;
|
||||
this.eventStream.close();
|
||||
};
|
||||
this.eventStream = new EventStream();
|
||||
this.clientLatestStats = null;
|
||||
this.middlewareLatestStats = null;
|
||||
this.serverLatestStats = null;
|
||||
this.closed = false;
|
||||
this.versionInfo = versionInfo;
|
||||
compilers[0].hooks.invalid.tap("webpack-hot-middleware", this.onClientInvalid);
|
||||
compilers[0].hooks.done.tap("webpack-hot-middleware", this.onClientDone);
|
||||
compilers[1].hooks.invalid.tap("webpack-hot-middleware", this.onServerInvalid);
|
||||
compilers[1].hooks.done.tap("webpack-hot-middleware", this.onServerDone);
|
||||
compilers[2].hooks.done.tap("webpack-hot-middleware", this.onEdgeServerDone);
|
||||
compilers[2].hooks.invalid.tap("webpack-hot-middleware", this.onEdgeServerInvalid);
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=hot-middleware.js.map
|
||||
1
node_modules/next/dist/esm/server/dev/hot-middleware.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/dev/hot-middleware.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
692
node_modules/next/dist/esm/server/dev/hot-reloader-turbopack.js
generated
vendored
Normal file
692
node_modules/next/dist/esm/server/dev/hot-reloader-turbopack.js
generated
vendored
Normal file
@ -0,0 +1,692 @@
|
||||
import { mkdir, writeFile } from "fs/promises";
|
||||
import { join } from "path";
|
||||
import ws from "next/dist/compiled/ws";
|
||||
import { store as consoleStore } from "../../build/output/store";
|
||||
import { HMR_ACTIONS_SENT_TO_BROWSER } from "./hot-reloader-types";
|
||||
import { createDefineEnv } from "../../build/swc";
|
||||
import * as Log from "../../build/output/log";
|
||||
import { getVersionInfo, matchNextPageBundleRequest } from "./hot-reloader-webpack";
|
||||
import { BLOCKED_PAGES } from "../../shared/lib/constants";
|
||||
import { getOverlayMiddleware } from "../../client/components/react-dev-overlay/server/middleware-turbopack";
|
||||
import { PageNotFoundError } from "../../shared/lib/utils";
|
||||
import { debounce } from "../utils";
|
||||
import { deleteAppClientCache, deleteCache } from "../../build/webpack/plugins/nextjs-require-cache-hot-reloader";
|
||||
import { clearAllModuleContexts, clearModuleContext } from "../lib/render-server";
|
||||
import { denormalizePagePath } from "../../shared/lib/page-path/denormalize-page-path";
|
||||
import { trace } from "../../trace";
|
||||
import { AssetMapper, formatIssue, getTurbopackJsConfig, handleEntrypoints, handlePagesErrorRoute, handleRouteType, hasEntrypointForKey, msToNs, processIssues, renderStyledStringToErrorAnsi, processTopLevelIssues, isWellKnownError, printNonFatalIssue } from "./turbopack-utils";
|
||||
import { propagateServerField } from "../lib/router-utils/setup-dev-bundler";
|
||||
import { TurbopackManifestLoader } from "./turbopack/manifest-loader";
|
||||
import { findPagePathData } from "./on-demand-entry-handler";
|
||||
import { getEntryKey, splitEntryKey } from "./turbopack/entry-key";
|
||||
import { FAST_REFRESH_RUNTIME_RELOAD } from "./messages";
|
||||
import { generateEncryptionKeyBase64 } from "../app-render/encryption-utils";
|
||||
const wsServer = new ws.Server({
|
||||
noServer: true
|
||||
});
|
||||
const isTestMode = !!(process.env.NEXT_TEST_MODE || process.env.__NEXT_TEST_MODE || process.env.DEBUG);
|
||||
export async function createHotReloaderTurbopack(opts, serverFields, distDir) {
|
||||
const buildId = "development";
|
||||
const { nextConfig, dir } = opts;
|
||||
const { loadBindings } = require("../../build/swc");
|
||||
let bindings = await loadBindings();
|
||||
// For the debugging purpose, check if createNext or equivalent next instance setup in test cases
|
||||
// works correctly. Normally `run-test` hides output so only will be visible when `--debug` flag is used.
|
||||
if (process.env.TURBOPACK && isTestMode) {
|
||||
require("console").log("Creating turbopack project", {
|
||||
dir,
|
||||
testMode: isTestMode
|
||||
});
|
||||
}
|
||||
const hasRewrites = opts.fsChecker.rewrites.afterFiles.length > 0 || opts.fsChecker.rewrites.beforeFiles.length > 0 || opts.fsChecker.rewrites.fallback.length > 0;
|
||||
const hotReloaderSpan = trace("hot-reloader", undefined, {
|
||||
version: "14.2.13"
|
||||
});
|
||||
// Ensure the hotReloaderSpan is flushed immediately as it's the parentSpan for all processing
|
||||
// of the current `next dev` invocation.
|
||||
hotReloaderSpan.stop();
|
||||
const encryptionKey = await generateEncryptionKeyBase64(true);
|
||||
const project = await bindings.turbo.createProject({
|
||||
projectPath: dir,
|
||||
rootPath: opts.nextConfig.experimental.outputFileTracingRoot || dir,
|
||||
nextConfig: opts.nextConfig,
|
||||
jsConfig: await getTurbopackJsConfig(dir, nextConfig),
|
||||
watch: true,
|
||||
dev: true,
|
||||
env: process.env,
|
||||
defineEnv: createDefineEnv({
|
||||
isTurbopack: true,
|
||||
// TODO: Implement
|
||||
clientRouterFilters: undefined,
|
||||
config: nextConfig,
|
||||
dev: true,
|
||||
distDir,
|
||||
fetchCacheKeyPrefix: opts.nextConfig.experimental.fetchCacheKeyPrefix,
|
||||
hasRewrites,
|
||||
// TODO: Implement
|
||||
middlewareMatchers: undefined
|
||||
}),
|
||||
buildId,
|
||||
encryptionKey,
|
||||
previewProps: opts.fsChecker.prerenderManifest.preview
|
||||
});
|
||||
const entrypointsSubscription = project.entrypointsSubscribe();
|
||||
const currentEntrypoints = {
|
||||
global: {
|
||||
app: undefined,
|
||||
document: undefined,
|
||||
error: undefined,
|
||||
middleware: undefined,
|
||||
instrumentation: undefined
|
||||
},
|
||||
page: new Map(),
|
||||
app: new Map()
|
||||
};
|
||||
const currentTopLevelIssues = new Map();
|
||||
const currentEntryIssues = new Map();
|
||||
const manifestLoader = new TurbopackManifestLoader({
|
||||
buildId,
|
||||
distDir,
|
||||
encryptionKey
|
||||
});
|
||||
// Dev specific
|
||||
const changeSubscriptions = new Map();
|
||||
const serverPathState = new Map();
|
||||
const readyIds = new Set();
|
||||
let currentEntriesHandlingResolve;
|
||||
let currentEntriesHandling = new Promise((resolve)=>currentEntriesHandlingResolve = resolve);
|
||||
const assetMapper = new AssetMapper();
|
||||
function clearRequireCache(key, writtenEndpoint) {
|
||||
// Figure out if the server files have changed
|
||||
let hasChange = false;
|
||||
for (const { path, contentHash } of writtenEndpoint.serverPaths){
|
||||
// We ignore source maps
|
||||
if (path.endsWith(".map")) continue;
|
||||
const localKey = `${key}:${path}`;
|
||||
const localHash = serverPathState.get(localKey);
|
||||
const globalHash = serverPathState.get(path);
|
||||
if (localHash && localHash !== contentHash || globalHash && globalHash !== contentHash) {
|
||||
hasChange = true;
|
||||
serverPathState.set(key, contentHash);
|
||||
serverPathState.set(path, contentHash);
|
||||
} else {
|
||||
if (!localHash) {
|
||||
serverPathState.set(key, contentHash);
|
||||
}
|
||||
if (!globalHash) {
|
||||
serverPathState.set(path, contentHash);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!hasChange) {
|
||||
return;
|
||||
}
|
||||
const hasAppPaths = writtenEndpoint.serverPaths.some(({ path: p })=>p.startsWith("server/app"));
|
||||
if (hasAppPaths) {
|
||||
deleteAppClientCache();
|
||||
}
|
||||
const serverPaths = writtenEndpoint.serverPaths.map(({ path: p })=>join(distDir, p));
|
||||
for (const file of serverPaths){
|
||||
clearModuleContext(file);
|
||||
deleteCache(file);
|
||||
}
|
||||
return;
|
||||
}
|
||||
const buildingIds = new Set();
|
||||
const startBuilding = (id, requestUrl, forceRebuild)=>{
|
||||
if (!forceRebuild && readyIds.has(id)) {
|
||||
return ()=>{};
|
||||
}
|
||||
if (buildingIds.size === 0) {
|
||||
consoleStore.setState({
|
||||
loading: true,
|
||||
trigger: id,
|
||||
url: requestUrl
|
||||
}, true);
|
||||
}
|
||||
buildingIds.add(id);
|
||||
return function finishBuilding() {
|
||||
if (buildingIds.size === 0) {
|
||||
return;
|
||||
}
|
||||
readyIds.add(id);
|
||||
buildingIds.delete(id);
|
||||
if (buildingIds.size === 0) {
|
||||
hmrEventHappened = false;
|
||||
consoleStore.setState({
|
||||
loading: false
|
||||
}, true);
|
||||
}
|
||||
};
|
||||
};
|
||||
let hmrEventHappened = false;
|
||||
let hmrHash = 0;
|
||||
const clients = new Set();
|
||||
const clientStates = new WeakMap();
|
||||
function sendToClient(client, payload) {
|
||||
client.send(JSON.stringify(payload));
|
||||
}
|
||||
function sendEnqueuedMessages() {
|
||||
for (const [, issueMap] of currentEntryIssues){
|
||||
if ([
|
||||
...issueMap.values()
|
||||
].filter((i)=>i.severity !== "warning").length > 0) {
|
||||
// During compilation errors we want to delay the HMR events until errors are fixed
|
||||
return;
|
||||
}
|
||||
}
|
||||
for (const client of clients){
|
||||
const state = clientStates.get(client);
|
||||
if (!state) {
|
||||
continue;
|
||||
}
|
||||
for (const [, issueMap] of state.clientIssues){
|
||||
if ([
|
||||
...issueMap.values()
|
||||
].filter((i)=>i.severity !== "warning").length > 0) {
|
||||
// During compilation errors we want to delay the HMR events until errors are fixed
|
||||
return;
|
||||
}
|
||||
}
|
||||
for (const payload of state.hmrPayloads.values()){
|
||||
sendToClient(client, payload);
|
||||
}
|
||||
state.hmrPayloads.clear();
|
||||
if (state.turbopackUpdates.length > 0) {
|
||||
sendToClient(client, {
|
||||
action: HMR_ACTIONS_SENT_TO_BROWSER.TURBOPACK_MESSAGE,
|
||||
data: state.turbopackUpdates
|
||||
});
|
||||
state.turbopackUpdates.length = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
const sendEnqueuedMessagesDebounce = debounce(sendEnqueuedMessages, 2);
|
||||
const sendHmr = (id, payload)=>{
|
||||
for (const client of clients){
|
||||
var _clientStates_get;
|
||||
(_clientStates_get = clientStates.get(client)) == null ? void 0 : _clientStates_get.hmrPayloads.set(id, payload);
|
||||
}
|
||||
hmrEventHappened = true;
|
||||
sendEnqueuedMessagesDebounce();
|
||||
};
|
||||
function sendTurbopackMessage(payload) {
|
||||
// TODO(PACK-2049): For some reason we end up emitting hundreds of issues messages on bigger apps,
|
||||
// a lot of which are duplicates.
|
||||
// They are currently not handled on the client at all, so might as well not send them for now.
|
||||
payload.diagnostics = [];
|
||||
payload.issues = [];
|
||||
for (const client of clients){
|
||||
var _clientStates_get;
|
||||
(_clientStates_get = clientStates.get(client)) == null ? void 0 : _clientStates_get.turbopackUpdates.push(payload);
|
||||
}
|
||||
hmrEventHappened = true;
|
||||
sendEnqueuedMessagesDebounce();
|
||||
}
|
||||
async function subscribeToChanges(key, includeIssues, endpoint, makePayload) {
|
||||
if (changeSubscriptions.has(key)) {
|
||||
return;
|
||||
}
|
||||
const { side } = splitEntryKey(key);
|
||||
const changedPromise = endpoint[`${side}Changed`](includeIssues);
|
||||
changeSubscriptions.set(key, changedPromise);
|
||||
const changed = await changedPromise;
|
||||
for await (const change of changed){
|
||||
processIssues(currentEntryIssues, key, change, false, true);
|
||||
const payload = await makePayload(change);
|
||||
if (payload) {
|
||||
sendHmr(key, payload);
|
||||
}
|
||||
}
|
||||
}
|
||||
async function unsubscribeFromChanges(key) {
|
||||
const subscription = await changeSubscriptions.get(key);
|
||||
if (subscription) {
|
||||
await (subscription.return == null ? void 0 : subscription.return.call(subscription));
|
||||
changeSubscriptions.delete(key);
|
||||
}
|
||||
currentEntryIssues.delete(key);
|
||||
}
|
||||
async function subscribeToHmrEvents(client, id) {
|
||||
const key = getEntryKey("assets", "client", id);
|
||||
if (!hasEntrypointForKey(currentEntrypoints, key, assetMapper)) {
|
||||
// maybe throw an error / force the client to reload?
|
||||
return;
|
||||
}
|
||||
const state = clientStates.get(client);
|
||||
if (!state || state.subscriptions.has(id)) {
|
||||
return;
|
||||
}
|
||||
const subscription = project.hmrEvents(id);
|
||||
state.subscriptions.set(id, subscription);
|
||||
// The subscription will always emit once, which is the initial
|
||||
// computation. This is not a change, so swallow it.
|
||||
try {
|
||||
await subscription.next();
|
||||
for await (const data of subscription){
|
||||
processIssues(state.clientIssues, key, data, false, true);
|
||||
if (data.type !== "issues") {
|
||||
sendTurbopackMessage(data);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
// The client might be using an HMR session from a previous server, tell them
|
||||
// to fully reload the page to resolve the issue. We can't use
|
||||
// `hotReloader.send` since that would force every connected client to
|
||||
// reload, only this client is out of date.
|
||||
const reloadAction = {
|
||||
action: HMR_ACTIONS_SENT_TO_BROWSER.RELOAD_PAGE
|
||||
};
|
||||
sendToClient(client, reloadAction);
|
||||
client.close();
|
||||
return;
|
||||
}
|
||||
}
|
||||
function unsubscribeFromHmrEvents(client, id) {
|
||||
const state = clientStates.get(client);
|
||||
if (!state) {
|
||||
return;
|
||||
}
|
||||
const subscription = state.subscriptions.get(id);
|
||||
subscription == null ? void 0 : subscription.return();
|
||||
const key = getEntryKey("assets", "client", id);
|
||||
state.clientIssues.delete(key);
|
||||
}
|
||||
async function handleEntrypointsSubscription() {
|
||||
for await (const entrypoints of entrypointsSubscription){
|
||||
if (!currentEntriesHandlingResolve) {
|
||||
currentEntriesHandling = new Promise(// eslint-disable-next-line no-loop-func
|
||||
(resolve)=>currentEntriesHandlingResolve = resolve);
|
||||
}
|
||||
processTopLevelIssues(currentTopLevelIssues, entrypoints);
|
||||
await handleEntrypoints({
|
||||
entrypoints,
|
||||
currentEntrypoints,
|
||||
currentEntryIssues,
|
||||
manifestLoader,
|
||||
nextConfig: opts.nextConfig,
|
||||
rewrites: opts.fsChecker.rewrites,
|
||||
logErrors: true,
|
||||
dev: {
|
||||
assetMapper,
|
||||
changeSubscriptions,
|
||||
clients,
|
||||
clientStates,
|
||||
serverFields,
|
||||
hooks: {
|
||||
handleWrittenEndpoint: (id, result)=>{
|
||||
clearRequireCache(id, result);
|
||||
},
|
||||
propagateServerField: propagateServerField.bind(null, opts),
|
||||
sendHmr,
|
||||
startBuilding,
|
||||
subscribeToChanges,
|
||||
unsubscribeFromChanges,
|
||||
unsubscribeFromHmrEvents
|
||||
}
|
||||
}
|
||||
});
|
||||
currentEntriesHandlingResolve();
|
||||
currentEntriesHandlingResolve = undefined;
|
||||
}
|
||||
}
|
||||
await mkdir(join(distDir, "server"), {
|
||||
recursive: true
|
||||
});
|
||||
await mkdir(join(distDir, "static", buildId), {
|
||||
recursive: true
|
||||
});
|
||||
await writeFile(join(distDir, "package.json"), JSON.stringify({
|
||||
type: "commonjs"
|
||||
}, null, 2));
|
||||
const overlayMiddleware = getOverlayMiddleware(project);
|
||||
const versionInfoPromise = getVersionInfo(isTestMode || opts.telemetry.isEnabled);
|
||||
const hotReloader = {
|
||||
turbopackProject: project,
|
||||
activeWebpackConfigs: undefined,
|
||||
serverStats: null,
|
||||
edgeServerStats: null,
|
||||
async run (req, res, _parsedUrl) {
|
||||
var _req_url;
|
||||
// intercept page chunks request and ensure them with turbopack
|
||||
if ((_req_url = req.url) == null ? void 0 : _req_url.startsWith("/_next/static/chunks/pages/")) {
|
||||
const params = matchNextPageBundleRequest(req.url);
|
||||
if (params) {
|
||||
const decodedPagePath = `/${params.path.map((param)=>decodeURIComponent(param)).join("/")}`;
|
||||
const denormalizedPagePath = denormalizePagePath(decodedPagePath);
|
||||
await hotReloader.ensurePage({
|
||||
page: denormalizedPagePath,
|
||||
clientOnly: false,
|
||||
definition: undefined,
|
||||
url: req.url
|
||||
}).catch(console.error);
|
||||
}
|
||||
}
|
||||
await overlayMiddleware(req, res);
|
||||
// Request was not finished.
|
||||
return {
|
||||
finished: undefined
|
||||
};
|
||||
},
|
||||
// TODO: Figure out if socket type can match the NextJsHotReloaderInterface
|
||||
onHMR (req, socket, head) {
|
||||
wsServer.handleUpgrade(req, socket, head, (client)=>{
|
||||
const clientIssues = new Map();
|
||||
const subscriptions = new Map();
|
||||
clients.add(client);
|
||||
clientStates.set(client, {
|
||||
clientIssues,
|
||||
hmrPayloads: new Map(),
|
||||
turbopackUpdates: [],
|
||||
subscriptions
|
||||
});
|
||||
client.on("close", ()=>{
|
||||
// Remove active subscriptions
|
||||
for (const subscription of subscriptions.values()){
|
||||
subscription.return == null ? void 0 : subscription.return.call(subscription);
|
||||
}
|
||||
clientStates.delete(client);
|
||||
clients.delete(client);
|
||||
});
|
||||
client.addEventListener("message", ({ data })=>{
|
||||
const parsedData = JSON.parse(typeof data !== "string" ? data.toString() : data);
|
||||
// Next.js messages
|
||||
switch(parsedData.event){
|
||||
case "ping":
|
||||
break;
|
||||
case "span-end":
|
||||
{
|
||||
hotReloaderSpan.manualTraceChild(parsedData.spanName, msToNs(parsedData.startTime), msToNs(parsedData.endTime), parsedData.attributes);
|
||||
break;
|
||||
}
|
||||
case "client-hmr-latency":
|
||||
hotReloaderSpan.manualTraceChild(parsedData.event, msToNs(parsedData.startTime), msToNs(parsedData.endTime), {
|
||||
updatedModules: parsedData.updatedModules,
|
||||
page: parsedData.page,
|
||||
isPageHidden: parsedData.isPageHidden
|
||||
});
|
||||
break;
|
||||
case "client-error":
|
||||
case "client-warning":
|
||||
case "client-success":
|
||||
case "server-component-reload-page":
|
||||
case "client-reload-page":
|
||||
case "client-removed-page":
|
||||
case "client-full-reload":
|
||||
const { hadRuntimeError, dependencyChain } = parsedData;
|
||||
if (hadRuntimeError) {
|
||||
Log.warn(FAST_REFRESH_RUNTIME_RELOAD);
|
||||
}
|
||||
if (Array.isArray(dependencyChain) && typeof dependencyChain[0] === "string") {
|
||||
const cleanedModulePath = dependencyChain[0].replace(/^\[project\]/, ".").replace(/ \[.*\] \(.*\)$/, "");
|
||||
Log.warn(`Fast Refresh had to perform a full reload when ${cleanedModulePath} changed. Read more: https://nextjs.org/docs/messages/fast-refresh-reload`);
|
||||
}
|
||||
break;
|
||||
case "client-added-page":
|
||||
break;
|
||||
default:
|
||||
// Might be a Turbopack message...
|
||||
if (!parsedData.type) {
|
||||
throw new Error(`unrecognized HMR message "${data}"`);
|
||||
}
|
||||
}
|
||||
// Turbopack messages
|
||||
switch(parsedData.type){
|
||||
case "turbopack-subscribe":
|
||||
subscribeToHmrEvents(client, parsedData.path);
|
||||
break;
|
||||
case "turbopack-unsubscribe":
|
||||
unsubscribeFromHmrEvents(client, parsedData.path);
|
||||
break;
|
||||
default:
|
||||
if (!parsedData.event) {
|
||||
throw new Error(`unrecognized Turbopack HMR message "${data}"`);
|
||||
}
|
||||
}
|
||||
});
|
||||
const turbopackConnected = {
|
||||
action: HMR_ACTIONS_SENT_TO_BROWSER.TURBOPACK_CONNECTED
|
||||
};
|
||||
sendToClient(client, turbopackConnected);
|
||||
const errors = [];
|
||||
for (const entryIssues of currentEntryIssues.values()){
|
||||
for (const issue of entryIssues.values()){
|
||||
if (issue.severity !== "warning") {
|
||||
errors.push({
|
||||
message: formatIssue(issue)
|
||||
});
|
||||
} else {
|
||||
printNonFatalIssue(issue);
|
||||
}
|
||||
}
|
||||
}
|
||||
(async function() {
|
||||
const versionInfo = await versionInfoPromise;
|
||||
const sync = {
|
||||
action: HMR_ACTIONS_SENT_TO_BROWSER.SYNC,
|
||||
errors,
|
||||
warnings: [],
|
||||
hash: "",
|
||||
versionInfo
|
||||
};
|
||||
sendToClient(client, sync);
|
||||
})();
|
||||
});
|
||||
},
|
||||
send (action) {
|
||||
const payload = JSON.stringify(action);
|
||||
for (const client of clients){
|
||||
client.send(payload);
|
||||
}
|
||||
},
|
||||
setHmrServerError (_error) {
|
||||
// Not implemented yet.
|
||||
},
|
||||
clearHmrServerError () {
|
||||
// Not implemented yet.
|
||||
},
|
||||
async start () {},
|
||||
async stop () {
|
||||
// Not implemented yet.
|
||||
},
|
||||
async getCompilationErrors (page) {
|
||||
const appEntryKey = getEntryKey("app", "server", page);
|
||||
const pagesEntryKey = getEntryKey("pages", "server", page);
|
||||
const topLevelIssues = currentTopLevelIssues.values();
|
||||
const thisEntryIssues = currentEntryIssues.get(appEntryKey) ?? currentEntryIssues.get(pagesEntryKey);
|
||||
if (thisEntryIssues !== undefined && thisEntryIssues.size > 0) {
|
||||
// If there is an error related to the requesting page we display it instead of the first error
|
||||
return [
|
||||
...topLevelIssues,
|
||||
...thisEntryIssues.values()
|
||||
].map((issue)=>{
|
||||
const formattedIssue = formatIssue(issue);
|
||||
if (issue.severity === "warning") {
|
||||
printNonFatalIssue(issue);
|
||||
return null;
|
||||
} else if (isWellKnownError(issue)) {
|
||||
Log.error(formattedIssue);
|
||||
}
|
||||
return new Error(formattedIssue);
|
||||
}).filter((error)=>error !== null);
|
||||
}
|
||||
// Otherwise, return all errors across pages
|
||||
const errors = [];
|
||||
for (const issue of topLevelIssues){
|
||||
if (issue.severity !== "warning") {
|
||||
errors.push(new Error(formatIssue(issue)));
|
||||
}
|
||||
}
|
||||
for (const entryIssues of currentEntryIssues.values()){
|
||||
for (const issue of entryIssues.values()){
|
||||
if (issue.severity !== "warning") {
|
||||
const message = formatIssue(issue);
|
||||
errors.push(new Error(message));
|
||||
} else {
|
||||
printNonFatalIssue(issue);
|
||||
}
|
||||
}
|
||||
}
|
||||
return errors;
|
||||
},
|
||||
async invalidate ({ // .env files or tsconfig/jsconfig change
|
||||
reloadAfterInvalidation }) {
|
||||
if (reloadAfterInvalidation) {
|
||||
await clearAllModuleContexts();
|
||||
this.send({
|
||||
action: HMR_ACTIONS_SENT_TO_BROWSER.SERVER_COMPONENT_CHANGES
|
||||
});
|
||||
}
|
||||
},
|
||||
async buildFallbackError () {
|
||||
// Not implemented yet.
|
||||
},
|
||||
async ensurePage ({ page: inputPage, // Unused parameters
|
||||
// clientOnly,
|
||||
// appPaths,
|
||||
definition, isApp, url: requestUrl }) {
|
||||
if (BLOCKED_PAGES.includes(inputPage) && inputPage !== "/_error") {
|
||||
return;
|
||||
}
|
||||
let routeDef = definition ?? await findPagePathData(dir, inputPage, nextConfig.pageExtensions, opts.pagesDir, opts.appDir);
|
||||
const page = routeDef.page;
|
||||
const pathname = (definition == null ? void 0 : definition.pathname) ?? inputPage;
|
||||
if (page === "/_error") {
|
||||
let finishBuilding = startBuilding(pathname, requestUrl, false);
|
||||
try {
|
||||
await handlePagesErrorRoute({
|
||||
currentEntryIssues,
|
||||
entrypoints: currentEntrypoints,
|
||||
manifestLoader,
|
||||
rewrites: opts.fsChecker.rewrites,
|
||||
logErrors: true,
|
||||
hooks: {
|
||||
subscribeToChanges,
|
||||
handleWrittenEndpoint: (id, result)=>{
|
||||
clearRequireCache(id, result);
|
||||
assetMapper.setPathsForKey(id, result.clientPaths);
|
||||
}
|
||||
}
|
||||
});
|
||||
} finally{
|
||||
finishBuilding();
|
||||
}
|
||||
return;
|
||||
}
|
||||
await currentEntriesHandling;
|
||||
const isInsideAppDir = routeDef.bundlePath.startsWith("app/");
|
||||
const route = isInsideAppDir ? currentEntrypoints.app.get(page) : currentEntrypoints.page.get(page);
|
||||
if (!route) {
|
||||
// TODO: why is this entry missing in turbopack?
|
||||
if (page === "/middleware") return;
|
||||
if (page === "/src/middleware") return;
|
||||
if (page === "/instrumentation") return;
|
||||
if (page === "/src/instrumentation") return;
|
||||
throw new PageNotFoundError(`route not found ${page}`);
|
||||
}
|
||||
// We don't throw on ensureOpts.isApp === true for page-api
|
||||
// since this can happen when app pages make
|
||||
// api requests to page API routes.
|
||||
if (isApp && route.type === "page") {
|
||||
throw new Error(`mis-matched route type: isApp && page for ${page}`);
|
||||
}
|
||||
const finishBuilding = startBuilding(pathname, requestUrl, false);
|
||||
try {
|
||||
await handleRouteType({
|
||||
dev: true,
|
||||
page,
|
||||
pathname,
|
||||
route,
|
||||
currentEntryIssues,
|
||||
entrypoints: currentEntrypoints,
|
||||
manifestLoader,
|
||||
readyIds,
|
||||
rewrites: opts.fsChecker.rewrites,
|
||||
logErrors: true,
|
||||
hooks: {
|
||||
subscribeToChanges,
|
||||
handleWrittenEndpoint: (id, result)=>{
|
||||
clearRequireCache(id, result);
|
||||
assetMapper.setPathsForKey(id, result.clientPaths);
|
||||
}
|
||||
}
|
||||
});
|
||||
} finally{
|
||||
finishBuilding();
|
||||
}
|
||||
}
|
||||
};
|
||||
handleEntrypointsSubscription().catch((err)=>{
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
// Write empty manifests
|
||||
await currentEntriesHandling;
|
||||
await manifestLoader.writeManifests({
|
||||
rewrites: opts.fsChecker.rewrites,
|
||||
pageEntrypoints: currentEntrypoints.page
|
||||
});
|
||||
async function handleProjectUpdates() {
|
||||
for await (const updateMessage of project.updateInfoSubscribe(30)){
|
||||
switch(updateMessage.updateType){
|
||||
case "start":
|
||||
{
|
||||
hotReloader.send({
|
||||
action: HMR_ACTIONS_SENT_TO_BROWSER.BUILDING
|
||||
});
|
||||
break;
|
||||
}
|
||||
case "end":
|
||||
{
|
||||
sendEnqueuedMessages();
|
||||
function addErrors(errorsMap, issues) {
|
||||
for (const issueMap of issues.values()){
|
||||
for (const [key, issue] of issueMap){
|
||||
if (issue.severity === "warning") continue;
|
||||
if (errorsMap.has(key)) continue;
|
||||
const message = formatIssue(issue);
|
||||
errorsMap.set(key, {
|
||||
message,
|
||||
details: issue.detail ? renderStyledStringToErrorAnsi(issue.detail) : undefined
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
const errors = new Map();
|
||||
addErrors(errors, currentEntryIssues);
|
||||
for (const client of clients){
|
||||
const state = clientStates.get(client);
|
||||
if (!state) {
|
||||
continue;
|
||||
}
|
||||
const clientErrors = new Map(errors);
|
||||
addErrors(clientErrors, state.clientIssues);
|
||||
sendToClient(client, {
|
||||
action: HMR_ACTIONS_SENT_TO_BROWSER.BUILT,
|
||||
hash: String(++hmrHash),
|
||||
errors: [
|
||||
...clientErrors.values()
|
||||
],
|
||||
warnings: []
|
||||
});
|
||||
}
|
||||
if (hmrEventHappened) {
|
||||
const time = updateMessage.value.duration;
|
||||
const timeMessage = time > 2000 ? `${Math.round(time / 100) / 10}s` : `${time}ms`;
|
||||
Log.event(`Compiled in ${timeMessage}`);
|
||||
hmrEventHappened = false;
|
||||
}
|
||||
break;
|
||||
}
|
||||
default:
|
||||
}
|
||||
}
|
||||
}
|
||||
handleProjectUpdates().catch((err)=>{
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
return hotReloader;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=hot-reloader-turbopack.js.map
|
||||
1
node_modules/next/dist/esm/server/dev/hot-reloader-turbopack.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/dev/hot-reloader-turbopack.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
19
node_modules/next/dist/esm/server/dev/hot-reloader-types.js
generated
vendored
Normal file
19
node_modules/next/dist/esm/server/dev/hot-reloader-types.js
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
export var HMR_ACTIONS_SENT_TO_BROWSER;
|
||||
(function(HMR_ACTIONS_SENT_TO_BROWSER) {
|
||||
HMR_ACTIONS_SENT_TO_BROWSER["ADDED_PAGE"] = "addedPage";
|
||||
HMR_ACTIONS_SENT_TO_BROWSER["REMOVED_PAGE"] = "removedPage";
|
||||
HMR_ACTIONS_SENT_TO_BROWSER["RELOAD_PAGE"] = "reloadPage";
|
||||
HMR_ACTIONS_SENT_TO_BROWSER["SERVER_COMPONENT_CHANGES"] = "serverComponentChanges";
|
||||
HMR_ACTIONS_SENT_TO_BROWSER["MIDDLEWARE_CHANGES"] = "middlewareChanges";
|
||||
HMR_ACTIONS_SENT_TO_BROWSER["CLIENT_CHANGES"] = "clientChanges";
|
||||
HMR_ACTIONS_SENT_TO_BROWSER["SERVER_ONLY_CHANGES"] = "serverOnlyChanges";
|
||||
HMR_ACTIONS_SENT_TO_BROWSER["SYNC"] = "sync";
|
||||
HMR_ACTIONS_SENT_TO_BROWSER["BUILT"] = "built";
|
||||
HMR_ACTIONS_SENT_TO_BROWSER["BUILDING"] = "building";
|
||||
HMR_ACTIONS_SENT_TO_BROWSER["DEV_PAGES_MANIFEST_UPDATE"] = "devPagesManifestUpdate";
|
||||
HMR_ACTIONS_SENT_TO_BROWSER["TURBOPACK_MESSAGE"] = "turbopack-message";
|
||||
HMR_ACTIONS_SENT_TO_BROWSER["SERVER_ERROR"] = "serverError";
|
||||
HMR_ACTIONS_SENT_TO_BROWSER["TURBOPACK_CONNECTED"] = "turbopack-connected";
|
||||
})(HMR_ACTIONS_SENT_TO_BROWSER || (HMR_ACTIONS_SENT_TO_BROWSER = {}));
|
||||
|
||||
//# sourceMappingURL=hot-reloader-types.js.map
|
||||
1
node_modules/next/dist/esm/server/dev/hot-reloader-types.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/dev/hot-reloader-types.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/dev/hot-reloader-types.ts"],"names":["HMR_ACTIONS_SENT_TO_BROWSER"],"mappings":";UASkBA;;;;;;;;;;;;;;;GAAAA,gCAAAA"}
|
||||
1116
node_modules/next/dist/esm/server/dev/hot-reloader-webpack.js
generated
vendored
Normal file
1116
node_modules/next/dist/esm/server/dev/hot-reloader-webpack.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
node_modules/next/dist/esm/server/dev/hot-reloader-webpack.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/dev/hot-reloader-webpack.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
24
node_modules/next/dist/esm/server/dev/log-app-dir-error.js
generated
vendored
Normal file
24
node_modules/next/dist/esm/server/dev/log-app-dir-error.js
generated
vendored
Normal file
@ -0,0 +1,24 @@
|
||||
import isError from "../../lib/is-error";
|
||||
import * as Log from "../../build/output/log";
|
||||
export function logAppDirError(err) {
|
||||
if (isError(err) && (err == null ? void 0 : err.stack)) {
|
||||
const cleanedStack = err.stack.split("\n").map((line)=>// Remove 'webpack-internal:' noise from the path
|
||||
line.replace(/(webpack-internal:\/\/\/|file:\/\/)(\(.*\)\/)?/, ""));
|
||||
const filteredStack = cleanedStack// Only display stack frames from the user's code
|
||||
.filter((line)=>!/next[\\/]dist[\\/]compiled/.test(line) && !/node_modules[\\/]/.test(line) && !/node:internal[\\/]/.test(line));
|
||||
if (filteredStack.length === 1) {
|
||||
// This is an error that happened outside of user code, keep full stack
|
||||
Log.error(`Internal error: ${cleanedStack.join("\n")}`);
|
||||
} else {
|
||||
Log.error(filteredStack.join("\n"));
|
||||
}
|
||||
if (typeof err.digest !== "undefined") {
|
||||
console.error(`digest: ${JSON.stringify(err.digest)}`);
|
||||
}
|
||||
if (err.cause) console.error("Cause:", err.cause);
|
||||
} else {
|
||||
Log.error(err);
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=log-app-dir-error.js.map
|
||||
1
node_modules/next/dist/esm/server/dev/log-app-dir-error.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/dev/log-app-dir-error.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/dev/log-app-dir-error.ts"],"names":["isError","Log","logAppDirError","err","stack","cleanedStack","split","map","line","replace","filteredStack","filter","test","length","error","join","digest","console","JSON","stringify","cause"],"mappings":"AAAA,OAAOA,aAAa,qBAAoB;AACxC,YAAYC,SAAS,yBAAwB;AAE7C,OAAO,SAASC,eAAeC,GAAY;IACzC,IAAIH,QAAQG,SAAQA,uBAAAA,IAAKC,KAAK,GAAE;QAC9B,MAAMC,eAAeF,IAAIC,KAAK,CAACE,KAAK,CAAC,MAAMC,GAAG,CAAC,CAACC,OAC9C,iDAAiD;YACjDA,KAAKC,OAAO,CAAC,kDAAkD;QAEjE,MAAMC,gBAAgBL,YACpB,iDAAiD;SAChDM,MAAM,CACL,CAACH,OACC,CAAC,6BAA6BI,IAAI,CAACJ,SACnC,CAAC,oBAAoBI,IAAI,CAACJ,SAC1B,CAAC,qBAAqBI,IAAI,CAACJ;QAEjC,IAAIE,cAAcG,MAAM,KAAK,GAAG;YAC9B,uEAAuE;YACvEZ,IAAIa,KAAK,CAAC,CAAC,gBAAgB,EAAET,aAAaU,IAAI,CAAC,MAAM,CAAC;QACxD,OAAO;YACLd,IAAIa,KAAK,CAACJ,cAAcK,IAAI,CAAC;QAC/B;QACA,IAAI,OAAO,AAACZ,IAAYa,MAAM,KAAK,aAAa;YAC9CC,QAAQH,KAAK,CAAC,CAAC,QAAQ,EAAEI,KAAKC,SAAS,CAAC,AAAChB,IAAYa,MAAM,EAAE,CAAC;QAChE;QAEA,IAAIb,IAAIiB,KAAK,EAAEH,QAAQH,KAAK,CAAC,UAAUX,IAAIiB,KAAK;IAClD,OAAO;QACLnB,IAAIa,KAAK,CAACX;IACZ;AACF"}
|
||||
3
node_modules/next/dist/esm/server/dev/messages.js
generated
vendored
Normal file
3
node_modules/next/dist/esm/server/dev/messages.js
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
export const FAST_REFRESH_RUNTIME_RELOAD = "Fast Refresh had to perform a full reload due to a runtime error.";
|
||||
|
||||
//# sourceMappingURL=messages.js.map
|
||||
1
node_modules/next/dist/esm/server/dev/messages.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/dev/messages.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/dev/messages.ts"],"names":["FAST_REFRESH_RUNTIME_RELOAD"],"mappings":"AAAA,OAAO,MAAMA,8BACX,oEAAmE"}
|
||||
545
node_modules/next/dist/esm/server/dev/next-dev-server.js
generated
vendored
Normal file
545
node_modules/next/dist/esm/server/dev/next-dev-server.js
generated
vendored
Normal file
@ -0,0 +1,545 @@
|
||||
import fs from "fs";
|
||||
import { Worker } from "next/dist/compiled/jest-worker";
|
||||
import { join as pathJoin } from "path";
|
||||
import { ampValidation } from "../../build/output";
|
||||
import { INSTRUMENTATION_HOOK_FILENAME, PUBLIC_DIR_MIDDLEWARE_CONFLICT } from "../../lib/constants";
|
||||
import { findPagesDir } from "../../lib/find-pages-dir";
|
||||
import { PHASE_DEVELOPMENT_SERVER, PAGES_MANIFEST, APP_PATHS_MANIFEST, COMPILER_NAMES } from "../../shared/lib/constants";
|
||||
import Server, { WrappedBuildError } from "../next-server";
|
||||
import { normalizePagePath } from "../../shared/lib/page-path/normalize-page-path";
|
||||
import { pathHasPrefix } from "../../shared/lib/router/utils/path-has-prefix";
|
||||
import { removePathPrefix } from "../../shared/lib/router/utils/remove-path-prefix";
|
||||
import { Telemetry } from "../../telemetry/storage";
|
||||
import { setGlobal, trace } from "../../trace";
|
||||
import { findPageFile } from "../lib/find-page-file";
|
||||
import { getNodeOptionsWithoutInspect } from "../lib/utils";
|
||||
import { withCoalescedInvoke } from "../../lib/coalesced-function";
|
||||
import { loadDefaultErrorComponents } from "../load-default-error-components";
|
||||
import { DecodeError, MiddlewareNotFoundError } from "../../shared/lib/utils";
|
||||
import * as Log from "../../build/output/log";
|
||||
import isError, { getProperError } from "../../lib/is-error";
|
||||
import { isMiddlewareFile } from "../../build/utils";
|
||||
import { formatServerError } from "../../lib/format-server-error";
|
||||
import { DevRouteMatcherManager } from "../future/route-matcher-managers/dev-route-matcher-manager";
|
||||
import { DevPagesRouteMatcherProvider } from "../future/route-matcher-providers/dev/dev-pages-route-matcher-provider";
|
||||
import { DevPagesAPIRouteMatcherProvider } from "../future/route-matcher-providers/dev/dev-pages-api-route-matcher-provider";
|
||||
import { DevAppPageRouteMatcherProvider } from "../future/route-matcher-providers/dev/dev-app-page-route-matcher-provider";
|
||||
import { DevAppRouteRouteMatcherProvider } from "../future/route-matcher-providers/dev/dev-app-route-route-matcher-provider";
|
||||
import { NodeManifestLoader } from "../future/route-matcher-providers/helpers/manifest-loaders/node-manifest-loader";
|
||||
import { BatchedFileReader } from "../future/route-matcher-providers/dev/helpers/file-reader/batched-file-reader";
|
||||
import { DefaultFileReader } from "../future/route-matcher-providers/dev/helpers/file-reader/default-file-reader";
|
||||
import LRUCache from "next/dist/compiled/lru-cache";
|
||||
import { getMiddlewareRouteMatcher } from "../../shared/lib/router/utils/middleware-route-matcher";
|
||||
import { DetachedPromise } from "../../lib/detached-promise";
|
||||
import { isPostpone } from "../lib/router-utils/is-postpone";
|
||||
import { generateInterceptionRoutesRewrites } from "../../lib/generate-interception-routes-rewrites";
|
||||
import { buildCustomRoute } from "../../lib/build-custom-route";
|
||||
import { decorateServerError } from "../../shared/lib/error-source";
|
||||
// Load ReactDevOverlay only when needed
|
||||
let ReactDevOverlayImpl;
|
||||
const ReactDevOverlay = (props)=>{
|
||||
if (ReactDevOverlayImpl === undefined) {
|
||||
ReactDevOverlayImpl = require("../../client/components/react-dev-overlay/pages/client").ReactDevOverlay;
|
||||
}
|
||||
return ReactDevOverlayImpl(props);
|
||||
};
|
||||
export default class DevServer extends Server {
|
||||
getStaticPathsWorker() {
|
||||
const worker = new Worker(require.resolve("./static-paths-worker"), {
|
||||
maxRetries: 1,
|
||||
// For dev server, it's not necessary to spin up too many workers as long as you are not doing a load test.
|
||||
// This helps reusing the memory a lot.
|
||||
numWorkers: 1,
|
||||
enableWorkerThreads: this.nextConfig.experimental.workerThreads,
|
||||
forkOptions: {
|
||||
env: {
|
||||
...process.env,
|
||||
// discard --inspect/--inspect-brk flags from process.env.NODE_OPTIONS. Otherwise multiple Node.js debuggers
|
||||
// would be started if user launch Next.js in debugging mode. The number of debuggers is linked to
|
||||
// the number of workers Next.js tries to launch. The only worker users are interested in debugging
|
||||
// is the main Next.js one
|
||||
NODE_OPTIONS: getNodeOptionsWithoutInspect()
|
||||
}
|
||||
}
|
||||
});
|
||||
worker.getStdout().pipe(process.stdout);
|
||||
worker.getStderr().pipe(process.stderr);
|
||||
return worker;
|
||||
}
|
||||
constructor(options){
|
||||
var _this_nextConfig_experimental_amp, _this_nextConfig_experimental;
|
||||
try {
|
||||
// Increase the number of stack frames on the server
|
||||
Error.stackTraceLimit = 50;
|
||||
} catch {}
|
||||
super({
|
||||
...options,
|
||||
dev: true
|
||||
});
|
||||
/**
|
||||
* The promise that resolves when the server is ready. When this is unset
|
||||
* the server is ready.
|
||||
*/ this.ready = new DetachedPromise();
|
||||
this.bundlerService = options.bundlerService;
|
||||
this.startServerSpan = options.startServerSpan ?? trace("start-next-dev-server");
|
||||
this.storeGlobals();
|
||||
this.renderOpts.dev = true;
|
||||
this.renderOpts.appDirDevErrorLogger = (err)=>this.logErrorWithOriginalStack(err, "app-dir");
|
||||
this.renderOpts.ErrorDebug = ReactDevOverlay;
|
||||
this.staticPathsCache = new LRUCache({
|
||||
// 5MB
|
||||
max: 5 * 1024 * 1024,
|
||||
length (value) {
|
||||
return JSON.stringify(value.staticPaths).length;
|
||||
}
|
||||
});
|
||||
this.renderOpts.ampSkipValidation = ((_this_nextConfig_experimental = this.nextConfig.experimental) == null ? void 0 : (_this_nextConfig_experimental_amp = _this_nextConfig_experimental.amp) == null ? void 0 : _this_nextConfig_experimental_amp.skipValidation) ?? false;
|
||||
this.renderOpts.ampValidator = (html, pathname)=>{
|
||||
const validatorPath = this.nextConfig.experimental && this.nextConfig.experimental.amp && this.nextConfig.experimental.amp.validator;
|
||||
const AmpHtmlValidator = require("next/dist/compiled/amphtml-validator");
|
||||
return AmpHtmlValidator.getInstance(validatorPath).then((validator)=>{
|
||||
const result = validator.validateString(html);
|
||||
ampValidation(pathname, result.errors.filter((e)=>e.severity === "ERROR").filter((e)=>this._filterAmpDevelopmentScript(html, e)), result.errors.filter((e)=>e.severity !== "ERROR"));
|
||||
});
|
||||
};
|
||||
const { pagesDir, appDir } = findPagesDir(this.dir);
|
||||
this.pagesDir = pagesDir;
|
||||
this.appDir = appDir;
|
||||
}
|
||||
getRouteMatchers() {
|
||||
const { pagesDir, appDir } = findPagesDir(this.dir);
|
||||
const ensurer = {
|
||||
ensure: async (match, pathname)=>{
|
||||
await this.ensurePage({
|
||||
definition: match.definition,
|
||||
page: match.definition.page,
|
||||
clientOnly: false,
|
||||
url: pathname
|
||||
});
|
||||
}
|
||||
};
|
||||
const matchers = new DevRouteMatcherManager(super.getRouteMatchers(), ensurer, this.dir);
|
||||
const extensions = this.nextConfig.pageExtensions;
|
||||
const extensionsExpression = new RegExp(`\\.(?:${extensions.join("|")})$`);
|
||||
// If the pages directory is available, then configure those matchers.
|
||||
if (pagesDir) {
|
||||
const fileReader = new BatchedFileReader(new DefaultFileReader({
|
||||
// Only allow files that have the correct extensions.
|
||||
pathnameFilter: (pathname)=>extensionsExpression.test(pathname)
|
||||
}));
|
||||
matchers.push(new DevPagesRouteMatcherProvider(pagesDir, extensions, fileReader, this.localeNormalizer));
|
||||
matchers.push(new DevPagesAPIRouteMatcherProvider(pagesDir, extensions, fileReader, this.localeNormalizer));
|
||||
}
|
||||
if (appDir) {
|
||||
// We create a new file reader for the app directory because we don't want
|
||||
// to include any folders or files starting with an underscore. This will
|
||||
// prevent the reader from wasting time reading files that we know we
|
||||
// don't care about.
|
||||
const fileReader = new BatchedFileReader(new DefaultFileReader({
|
||||
// Ignore any directory prefixed with an underscore.
|
||||
ignorePartFilter: (part)=>part.startsWith("_")
|
||||
}));
|
||||
matchers.push(new DevAppPageRouteMatcherProvider(appDir, extensions, fileReader));
|
||||
matchers.push(new DevAppRouteRouteMatcherProvider(appDir, extensions, fileReader));
|
||||
}
|
||||
return matchers;
|
||||
}
|
||||
getBuildId() {
|
||||
return "development";
|
||||
}
|
||||
async prepareImpl() {
|
||||
var _this_ready;
|
||||
setGlobal("distDir", this.distDir);
|
||||
setGlobal("phase", PHASE_DEVELOPMENT_SERVER);
|
||||
const telemetry = new Telemetry({
|
||||
distDir: this.distDir
|
||||
});
|
||||
await super.prepareImpl();
|
||||
await this.startServerSpan.traceChild("run-instrumentation-hook").traceAsyncFn(()=>this.runInstrumentationHookIfAvailable());
|
||||
await this.matchers.reload();
|
||||
// Store globals again to preserve changes made by the instrumentation hook.
|
||||
this.storeGlobals();
|
||||
(_this_ready = this.ready) == null ? void 0 : _this_ready.resolve();
|
||||
this.ready = undefined;
|
||||
// In dev, this needs to be called after prepare because the build entries won't be known in the constructor
|
||||
this.interceptionRoutePatterns = this.getinterceptionRoutePatterns();
|
||||
// This is required by the tracing subsystem.
|
||||
setGlobal("appDir", this.appDir);
|
||||
setGlobal("pagesDir", this.pagesDir);
|
||||
setGlobal("telemetry", telemetry);
|
||||
process.on("unhandledRejection", (reason)=>{
|
||||
if (isPostpone(reason)) {
|
||||
// React postpones that are unhandled might end up logged here but they're
|
||||
// not really errors. They're just part of rendering.
|
||||
return;
|
||||
}
|
||||
this.logErrorWithOriginalStack(reason, "unhandledRejection").catch(()=>{});
|
||||
});
|
||||
process.on("uncaughtException", (err)=>{
|
||||
this.logErrorWithOriginalStack(err, "uncaughtException").catch(()=>{});
|
||||
});
|
||||
}
|
||||
async close() {}
|
||||
async hasPage(pathname) {
|
||||
let normalizedPath;
|
||||
try {
|
||||
normalizedPath = normalizePagePath(pathname);
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
// if normalizing the page fails it means it isn't valid
|
||||
// so it doesn't exist so don't throw and return false
|
||||
// to ensure we return 404 instead of 500
|
||||
return false;
|
||||
}
|
||||
if (isMiddlewareFile(normalizedPath)) {
|
||||
return findPageFile(this.dir, normalizedPath, this.nextConfig.pageExtensions, false).then(Boolean);
|
||||
}
|
||||
let appFile = null;
|
||||
let pagesFile = null;
|
||||
if (this.appDir) {
|
||||
appFile = await findPageFile(this.appDir, normalizedPath + "/page", this.nextConfig.pageExtensions, true);
|
||||
}
|
||||
if (this.pagesDir) {
|
||||
pagesFile = await findPageFile(this.pagesDir, normalizedPath, this.nextConfig.pageExtensions, false);
|
||||
}
|
||||
if (appFile && pagesFile) {
|
||||
return false;
|
||||
}
|
||||
return Boolean(appFile || pagesFile);
|
||||
}
|
||||
async runMiddleware(params) {
|
||||
try {
|
||||
const result = await super.runMiddleware({
|
||||
...params,
|
||||
onWarning: (warn)=>{
|
||||
this.logErrorWithOriginalStack(warn, "warning");
|
||||
}
|
||||
});
|
||||
if ("finished" in result) {
|
||||
return result;
|
||||
}
|
||||
result.waitUntil.catch((error)=>{
|
||||
this.logErrorWithOriginalStack(error, "unhandledRejection");
|
||||
});
|
||||
return result;
|
||||
} catch (error) {
|
||||
if (error instanceof DecodeError) {
|
||||
throw error;
|
||||
}
|
||||
/**
|
||||
* We only log the error when it is not a MiddlewareNotFound error as
|
||||
* in that case we should be already displaying a compilation error
|
||||
* which is what makes the module not found.
|
||||
*/ if (!(error instanceof MiddlewareNotFoundError)) {
|
||||
this.logErrorWithOriginalStack(error);
|
||||
}
|
||||
const err = getProperError(error);
|
||||
decorateServerError(err, COMPILER_NAMES.edgeServer);
|
||||
const { request, response, parsedUrl } = params;
|
||||
/**
|
||||
* When there is a failure for an internal Next.js request from
|
||||
* middleware we bypass the error without finishing the request
|
||||
* so we can serve the required chunks to render the error.
|
||||
*/ if (request.url.includes("/_next/static") || request.url.includes("/__nextjs_original-stack-frame")) {
|
||||
return {
|
||||
finished: false
|
||||
};
|
||||
}
|
||||
response.statusCode = 500;
|
||||
await this.renderError(err, request, response, parsedUrl.pathname);
|
||||
return {
|
||||
finished: true
|
||||
};
|
||||
}
|
||||
}
|
||||
async runEdgeFunction(params) {
|
||||
try {
|
||||
return super.runEdgeFunction({
|
||||
...params,
|
||||
onError: (err)=>this.logErrorWithOriginalStack(err, "app-dir"),
|
||||
onWarning: (warn)=>{
|
||||
this.logErrorWithOriginalStack(warn, "warning");
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
if (error instanceof DecodeError) {
|
||||
throw error;
|
||||
}
|
||||
this.logErrorWithOriginalStack(error, "warning");
|
||||
const err = getProperError(error);
|
||||
const { req, res, page } = params;
|
||||
res.statusCode = 500;
|
||||
await this.renderError(err, req, res, page);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
async handleRequest(req, res, parsedUrl) {
|
||||
const span = trace("handle-request", undefined, {
|
||||
url: req.url
|
||||
});
|
||||
const result = await span.traceAsyncFn(async ()=>{
|
||||
var _this_ready;
|
||||
await ((_this_ready = this.ready) == null ? void 0 : _this_ready.promise);
|
||||
return await super.handleRequest(req, res, parsedUrl);
|
||||
});
|
||||
const memoryUsage = process.memoryUsage();
|
||||
span.traceChild("memory-usage", {
|
||||
url: req.url,
|
||||
"memory.rss": String(memoryUsage.rss),
|
||||
"memory.heapUsed": String(memoryUsage.heapUsed),
|
||||
"memory.heapTotal": String(memoryUsage.heapTotal)
|
||||
}).stop();
|
||||
return result;
|
||||
}
|
||||
async run(req, res, parsedUrl) {
|
||||
var _this_ready;
|
||||
await ((_this_ready = this.ready) == null ? void 0 : _this_ready.promise);
|
||||
const { basePath } = this.nextConfig;
|
||||
let originalPathname = null;
|
||||
// TODO: see if we can remove this in the future
|
||||
if (basePath && pathHasPrefix(parsedUrl.pathname || "/", basePath)) {
|
||||
// strip basePath before handling dev bundles
|
||||
// If replace ends up replacing the full url it'll be `undefined`, meaning we have to default it to `/`
|
||||
originalPathname = parsedUrl.pathname;
|
||||
parsedUrl.pathname = removePathPrefix(parsedUrl.pathname || "/", basePath);
|
||||
}
|
||||
const { pathname } = parsedUrl;
|
||||
if (pathname.startsWith("/_next")) {
|
||||
if (fs.existsSync(pathJoin(this.publicDir, "_next"))) {
|
||||
throw new Error(PUBLIC_DIR_MIDDLEWARE_CONFLICT);
|
||||
}
|
||||
}
|
||||
if (originalPathname) {
|
||||
// restore the path before continuing so that custom-routes can accurately determine
|
||||
// if they should match against the basePath or not
|
||||
parsedUrl.pathname = originalPathname;
|
||||
}
|
||||
try {
|
||||
return await super.run(req, res, parsedUrl);
|
||||
} catch (error) {
|
||||
const err = getProperError(error);
|
||||
formatServerError(err);
|
||||
this.logErrorWithOriginalStack(err).catch(()=>{});
|
||||
if (!res.sent) {
|
||||
res.statusCode = 500;
|
||||
try {
|
||||
return await this.renderError(err, req, res, pathname, {
|
||||
__NEXT_PAGE: isError(err) && err.page || pathname || ""
|
||||
});
|
||||
} catch (internalErr) {
|
||||
console.error(internalErr);
|
||||
res.body("Internal Server Error").send();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
async logErrorWithOriginalStack(err, type) {
|
||||
await this.bundlerService.logErrorWithOriginalStack(err, type);
|
||||
}
|
||||
getPagesManifest() {
|
||||
return NodeManifestLoader.require(pathJoin(this.serverDistDir, PAGES_MANIFEST)) ?? undefined;
|
||||
}
|
||||
getAppPathsManifest() {
|
||||
if (!this.enabledDirectories.app) return undefined;
|
||||
return NodeManifestLoader.require(pathJoin(this.serverDistDir, APP_PATHS_MANIFEST)) ?? undefined;
|
||||
}
|
||||
getinterceptionRoutePatterns() {
|
||||
const rewrites = generateInterceptionRoutesRewrites(Object.keys(this.appPathRoutes ?? {}), this.nextConfig.basePath).map((route)=>new RegExp(buildCustomRoute("rewrite", route).regex));
|
||||
return rewrites ?? [];
|
||||
}
|
||||
getMiddleware() {
|
||||
var _this_middleware;
|
||||
// We need to populate the match
|
||||
// field as it isn't serializable
|
||||
if (((_this_middleware = this.middleware) == null ? void 0 : _this_middleware.match) === null) {
|
||||
this.middleware.match = getMiddlewareRouteMatcher(this.middleware.matchers || []);
|
||||
}
|
||||
return this.middleware;
|
||||
}
|
||||
getNextFontManifest() {
|
||||
return undefined;
|
||||
}
|
||||
async hasMiddleware() {
|
||||
return this.hasPage(this.actualMiddlewareFile);
|
||||
}
|
||||
async ensureMiddleware(url) {
|
||||
return this.ensurePage({
|
||||
page: this.actualMiddlewareFile,
|
||||
clientOnly: false,
|
||||
definition: undefined,
|
||||
url
|
||||
});
|
||||
}
|
||||
async runInstrumentationHookIfAvailable() {
|
||||
if (this.actualInstrumentationHookFile && await this.ensurePage({
|
||||
page: this.actualInstrumentationHookFile,
|
||||
clientOnly: false,
|
||||
definition: undefined
|
||||
}).then(()=>true).catch(()=>false)) {
|
||||
try {
|
||||
const instrumentationHook = await require(pathJoin(this.distDir, "server", INSTRUMENTATION_HOOK_FILENAME));
|
||||
await instrumentationHook.register();
|
||||
} catch (err) {
|
||||
err.message = `An error occurred while loading instrumentation hook: ${err.message}`;
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
async ensureEdgeFunction({ page, appPaths, url }) {
|
||||
return this.ensurePage({
|
||||
page,
|
||||
appPaths,
|
||||
clientOnly: false,
|
||||
definition: undefined,
|
||||
url
|
||||
});
|
||||
}
|
||||
generateRoutes(_dev) {
|
||||
// In development we expose all compiled files for react-error-overlay's line show feature
|
||||
// We use unshift so that we're sure the routes is defined before Next's default routes
|
||||
// routes.unshift({
|
||||
// match: getPathMatch('/_next/development/:path*'),
|
||||
// type: 'route',
|
||||
// name: '_next/development catchall',
|
||||
// fn: async (req, res, params) => {
|
||||
// const p = pathJoin(this.distDir, ...(params.path || []))
|
||||
// await this.serveStatic(req, res, p)
|
||||
// return {
|
||||
// finished: true,
|
||||
// }
|
||||
// },
|
||||
// })
|
||||
}
|
||||
_filterAmpDevelopmentScript(html, event) {
|
||||
if (event.code !== "DISALLOWED_SCRIPT_TAG") {
|
||||
return true;
|
||||
}
|
||||
const snippetChunks = html.split("\n");
|
||||
let snippet;
|
||||
if (!(snippet = html.split("\n")[event.line - 1]) || !(snippet = snippet.substring(event.col))) {
|
||||
return true;
|
||||
}
|
||||
snippet = snippet + snippetChunks.slice(event.line).join("\n");
|
||||
snippet = snippet.substring(0, snippet.indexOf("</script>"));
|
||||
return !snippet.includes("data-amp-development-mode-only");
|
||||
}
|
||||
async getStaticPaths({ pathname, requestHeaders, page, isAppPath }) {
|
||||
// we lazy load the staticPaths to prevent the user
|
||||
// from waiting on them for the page to load in dev mode
|
||||
const __getStaticPaths = async ()=>{
|
||||
const { configFileName, publicRuntimeConfig, serverRuntimeConfig, httpAgentOptions } = this.nextConfig;
|
||||
const { locales, defaultLocale } = this.nextConfig.i18n || {};
|
||||
const staticPathsWorker = this.getStaticPathsWorker();
|
||||
try {
|
||||
const pathsResult = await staticPathsWorker.loadStaticPaths({
|
||||
dir: this.dir,
|
||||
distDir: this.distDir,
|
||||
pathname,
|
||||
config: {
|
||||
configFileName,
|
||||
publicRuntimeConfig,
|
||||
serverRuntimeConfig
|
||||
},
|
||||
httpAgentOptions,
|
||||
locales,
|
||||
defaultLocale,
|
||||
page,
|
||||
isAppPath,
|
||||
requestHeaders,
|
||||
cacheHandler: this.nextConfig.cacheHandler,
|
||||
fetchCacheKeyPrefix: this.nextConfig.experimental.fetchCacheKeyPrefix,
|
||||
isrFlushToDisk: this.nextConfig.experimental.isrFlushToDisk,
|
||||
maxMemoryCacheSize: this.nextConfig.cacheMaxMemorySize,
|
||||
ppr: this.nextConfig.experimental.ppr === true
|
||||
});
|
||||
return pathsResult;
|
||||
} finally{
|
||||
// we don't re-use workers so destroy the used one
|
||||
staticPathsWorker.end();
|
||||
}
|
||||
};
|
||||
const result = this.staticPathsCache.get(pathname);
|
||||
const nextInvoke = withCoalescedInvoke(__getStaticPaths)(`staticPaths-${pathname}`, []).then((res)=>{
|
||||
const { paths: staticPaths = [], fallback } = res.value;
|
||||
if (!isAppPath && this.nextConfig.output === "export") {
|
||||
if (fallback === "blocking") {
|
||||
throw new Error('getStaticPaths with "fallback: blocking" cannot be used with "output: export". See more info here: https://nextjs.org/docs/advanced-features/static-html-export');
|
||||
} else if (fallback === true) {
|
||||
throw new Error('getStaticPaths with "fallback: true" cannot be used with "output: export". See more info here: https://nextjs.org/docs/advanced-features/static-html-export');
|
||||
}
|
||||
}
|
||||
const value = {
|
||||
staticPaths,
|
||||
fallbackMode: fallback === "blocking" ? "blocking" : fallback === true ? "static" : fallback
|
||||
};
|
||||
this.staticPathsCache.set(pathname, value);
|
||||
return value;
|
||||
}).catch((err)=>{
|
||||
this.staticPathsCache.del(pathname);
|
||||
if (!result) throw err;
|
||||
Log.error(`Failed to generate static paths for ${pathname}:`);
|
||||
console.error(err);
|
||||
});
|
||||
if (result) {
|
||||
return result;
|
||||
}
|
||||
return nextInvoke;
|
||||
}
|
||||
storeGlobals() {
|
||||
this.originalFetch = global.fetch;
|
||||
}
|
||||
restorePatchedGlobals() {
|
||||
global.fetch = this.originalFetch ?? global.fetch;
|
||||
}
|
||||
async ensurePage(opts) {
|
||||
await this.bundlerService.ensurePage(opts);
|
||||
}
|
||||
async findPageComponents({ page, query, params, isAppPath, appPaths = null, shouldEnsure, url }) {
|
||||
var _this_ready;
|
||||
await ((_this_ready = this.ready) == null ? void 0 : _this_ready.promise);
|
||||
const compilationErr = await this.getCompilationError(page);
|
||||
if (compilationErr) {
|
||||
// Wrap build errors so that they don't get logged again
|
||||
throw new WrappedBuildError(compilationErr);
|
||||
}
|
||||
try {
|
||||
if (shouldEnsure || this.renderOpts.customServer) {
|
||||
await this.ensurePage({
|
||||
page,
|
||||
appPaths,
|
||||
clientOnly: false,
|
||||
definition: undefined,
|
||||
url
|
||||
});
|
||||
}
|
||||
this.nextFontManifest = super.getNextFontManifest();
|
||||
// before we re-evaluate a route module, we want to restore globals that might
|
||||
// have been patched previously to their original state so that we don't
|
||||
// patch on top of the previous patch, which would keep the context of the previous
|
||||
// patched global in memory, creating a memory leak.
|
||||
this.restorePatchedGlobals();
|
||||
return await super.findPageComponents({
|
||||
page,
|
||||
query,
|
||||
params,
|
||||
isAppPath,
|
||||
shouldEnsure,
|
||||
url
|
||||
});
|
||||
} catch (err) {
|
||||
if (err.code !== "ENOENT") {
|
||||
throw err;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
async getFallbackErrorComponents(url) {
|
||||
await this.bundlerService.getFallbackErrorComponents(url);
|
||||
return await loadDefaultErrorComponents(this.distDir);
|
||||
}
|
||||
async getCompilationError(page) {
|
||||
return await this.bundlerService.getCompilationError(page);
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=next-dev-server.js.map
|
||||
1
node_modules/next/dist/esm/server/dev/next-dev-server.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/dev/next-dev-server.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
624
node_modules/next/dist/esm/server/dev/on-demand-entry-handler.js
generated
vendored
Normal file
624
node_modules/next/dist/esm/server/dev/on-demand-entry-handler.js
generated
vendored
Normal file
@ -0,0 +1,624 @@
|
||||
import createDebug from "next/dist/compiled/debug";
|
||||
import { EventEmitter } from "events";
|
||||
import { findPageFile } from "../lib/find-page-file";
|
||||
import { getStaticInfoIncludingLayouts, runDependingOnPageType } from "../../build/entries";
|
||||
import { join, posix } from "path";
|
||||
import { normalizePathSep } from "../../shared/lib/page-path/normalize-path-sep";
|
||||
import { normalizePagePath } from "../../shared/lib/page-path/normalize-page-path";
|
||||
import { ensureLeadingSlash } from "../../shared/lib/page-path/ensure-leading-slash";
|
||||
import { removePagePathTail } from "../../shared/lib/page-path/remove-page-path-tail";
|
||||
import { reportTrigger } from "../../build/output";
|
||||
import getRouteFromEntrypoint from "../get-route-from-entrypoint";
|
||||
import { isInstrumentationHookFile, isInstrumentationHookFilename, isMiddlewareFile, isMiddlewareFilename } from "../../build/utils";
|
||||
import { PageNotFoundError, stringifyError } from "../../shared/lib/utils";
|
||||
import { COMPILER_INDEXES, COMPILER_NAMES, RSC_MODULE_TYPES, UNDERSCORE_NOT_FOUND_ROUTE_ENTRY } from "../../shared/lib/constants";
|
||||
import { PAGE_SEGMENT_KEY } from "../../shared/lib/segment";
|
||||
import { HMR_ACTIONS_SENT_TO_BROWSER } from "./hot-reloader-types";
|
||||
import { isAppPageRouteDefinition } from "../future/route-definitions/app-page-route-definition";
|
||||
import { scheduleOnNextTick } from "../../lib/scheduler";
|
||||
import { Batcher } from "../../lib/batcher";
|
||||
import { normalizeAppPath } from "../../shared/lib/router/utils/app-paths";
|
||||
import { PAGE_TYPES } from "../../lib/page-types";
|
||||
const debug = createDebug("next:on-demand-entry-handler");
|
||||
/**
|
||||
* Returns object keys with type inferred from the object key
|
||||
*/ const keys = Object.keys;
|
||||
const COMPILER_KEYS = keys(COMPILER_INDEXES);
|
||||
function treePathToEntrypoint(segmentPath, parentPath) {
|
||||
const [parallelRouteKey, segment] = segmentPath;
|
||||
// TODO-APP: modify this path to cover parallelRouteKey convention
|
||||
const path = (parentPath ? parentPath + "/" : "") + (parallelRouteKey !== "children" && !segment.startsWith("@") ? `@${parallelRouteKey}/` : "") + (segment === "" ? "page" : segment);
|
||||
// Last segment
|
||||
if (segmentPath.length === 2) {
|
||||
return path;
|
||||
}
|
||||
const childSegmentPath = segmentPath.slice(2);
|
||||
return treePathToEntrypoint(childSegmentPath, path);
|
||||
}
|
||||
function convertDynamicParamTypeToSyntax(dynamicParamTypeShort, param) {
|
||||
switch(dynamicParamTypeShort){
|
||||
case "c":
|
||||
case "ci":
|
||||
return `[...${param}]`;
|
||||
case "oc":
|
||||
return `[[...${param}]]`;
|
||||
case "d":
|
||||
case "di":
|
||||
return `[${param}]`;
|
||||
default:
|
||||
throw new Error("Unknown dynamic param type");
|
||||
}
|
||||
}
|
||||
/**
|
||||
* format: {compiler type}@{page type}@{page path}
|
||||
* e.g. client@pages@/index
|
||||
* e.g. server@app@app/page
|
||||
*
|
||||
* This guarantees the uniqueness for each page, to avoid conflicts between app/ and pages/
|
||||
*/ export function getEntryKey(compilerType, pageBundleType, page) {
|
||||
// TODO: handle the /children slot better
|
||||
// this is a quick hack to handle when children is provided as children/page instead of /page
|
||||
const pageKey = page.replace(/(@[^/]+)\/children/g, "$1");
|
||||
return `${compilerType}@${pageBundleType}@${pageKey}`;
|
||||
}
|
||||
function getPageBundleType(pageBundlePath) {
|
||||
// Handle special case for /_error
|
||||
if (pageBundlePath === "/_error") return PAGE_TYPES.PAGES;
|
||||
if (isMiddlewareFilename(pageBundlePath)) return PAGE_TYPES.ROOT;
|
||||
return pageBundlePath.startsWith("pages/") ? PAGE_TYPES.PAGES : pageBundlePath.startsWith("app/") ? PAGE_TYPES.APP : PAGE_TYPES.ROOT;
|
||||
}
|
||||
function getEntrypointsFromTree(tree, isFirst, parentPath = []) {
|
||||
const [segment, parallelRoutes] = tree;
|
||||
const currentSegment = Array.isArray(segment) ? convertDynamicParamTypeToSyntax(segment[2], segment[0]) : segment;
|
||||
const isPageSegment = currentSegment.startsWith(PAGE_SEGMENT_KEY);
|
||||
const currentPath = [
|
||||
...parentPath,
|
||||
isPageSegment ? "" : currentSegment
|
||||
];
|
||||
if (!isFirst && isPageSegment) {
|
||||
// TODO get rid of '' at the start of tree
|
||||
return [
|
||||
treePathToEntrypoint(currentPath.slice(1))
|
||||
];
|
||||
}
|
||||
return Object.keys(parallelRoutes).reduce((paths, key)=>{
|
||||
const childTree = parallelRoutes[key];
|
||||
const childPages = getEntrypointsFromTree(childTree, false, [
|
||||
...currentPath,
|
||||
key
|
||||
]);
|
||||
return [
|
||||
...paths,
|
||||
...childPages
|
||||
];
|
||||
}, []);
|
||||
}
|
||||
export const ADDED = Symbol("added");
|
||||
export const BUILDING = Symbol("building");
|
||||
export const BUILT = Symbol("built");
|
||||
export var EntryTypes;
|
||||
(function(EntryTypes) {
|
||||
EntryTypes[EntryTypes["ENTRY"] = 0] = "ENTRY";
|
||||
EntryTypes[EntryTypes["CHILD_ENTRY"] = 1] = "CHILD_ENTRY";
|
||||
})(EntryTypes || (EntryTypes = {}));
|
||||
const entriesMap = new Map();
|
||||
// remove /server from end of output for server compiler
|
||||
const normalizeOutputPath = (dir)=>dir.replace(/[/\\]server$/, "");
|
||||
export const getEntries = (dir)=>{
|
||||
dir = normalizeOutputPath(dir);
|
||||
const entries = entriesMap.get(dir) || {};
|
||||
entriesMap.set(dir, entries);
|
||||
return entries;
|
||||
};
|
||||
const invalidators = new Map();
|
||||
export const getInvalidator = (dir)=>{
|
||||
dir = normalizeOutputPath(dir);
|
||||
return invalidators.get(dir);
|
||||
};
|
||||
const doneCallbacks = new EventEmitter();
|
||||
const lastClientAccessPages = [
|
||||
""
|
||||
];
|
||||
const lastServerAccessPagesForAppDir = [
|
||||
""
|
||||
];
|
||||
// Make sure only one invalidation happens at a time
|
||||
// Otherwise, webpack hash gets changed and it'll force the client to reload.
|
||||
class Invalidator {
|
||||
constructor(multiCompiler){
|
||||
this.building = new Set();
|
||||
this.rebuildAgain = new Set();
|
||||
this.multiCompiler = multiCompiler;
|
||||
}
|
||||
shouldRebuildAll() {
|
||||
return this.rebuildAgain.size > 0;
|
||||
}
|
||||
invalidate(compilerKeys = COMPILER_KEYS) {
|
||||
for (const key of compilerKeys){
|
||||
var _this_multiCompiler_compilers_COMPILER_INDEXES_key_watching;
|
||||
// If there's a current build is processing, we won't abort it by invalidating.
|
||||
// (If aborted, it'll cause a client side hard reload)
|
||||
// But let it to invalidate just after the completion.
|
||||
// So, it can re-build the queued pages at once.
|
||||
if (this.building.has(key)) {
|
||||
this.rebuildAgain.add(key);
|
||||
continue;
|
||||
}
|
||||
this.building.add(key);
|
||||
(_this_multiCompiler_compilers_COMPILER_INDEXES_key_watching = this.multiCompiler.compilers[COMPILER_INDEXES[key]].watching) == null ? void 0 : _this_multiCompiler_compilers_COMPILER_INDEXES_key_watching.invalidate();
|
||||
}
|
||||
}
|
||||
startBuilding(compilerKey) {
|
||||
this.building.add(compilerKey);
|
||||
}
|
||||
doneBuilding(compilerKeys = []) {
|
||||
const rebuild = [];
|
||||
for (const key of compilerKeys){
|
||||
this.building.delete(key);
|
||||
if (this.rebuildAgain.has(key)) {
|
||||
rebuild.push(key);
|
||||
this.rebuildAgain.delete(key);
|
||||
}
|
||||
}
|
||||
this.invalidate(rebuild);
|
||||
}
|
||||
willRebuild(compilerKey) {
|
||||
return this.rebuildAgain.has(compilerKey);
|
||||
}
|
||||
}
|
||||
function disposeInactiveEntries(entries, maxInactiveAge) {
|
||||
Object.keys(entries).forEach((entryKey)=>{
|
||||
const entryData = entries[entryKey];
|
||||
const { lastActiveTime, status, dispose, bundlePath } = entryData;
|
||||
// TODO-APP: implement disposing of CHILD_ENTRY
|
||||
if (entryData.type === 1) {
|
||||
return;
|
||||
}
|
||||
// For the root middleware and the instrumentation hook files,
|
||||
// we don't dispose them periodically as it's needed for every request.
|
||||
if (isMiddlewareFilename(bundlePath) || isInstrumentationHookFilename(bundlePath)) {
|
||||
return;
|
||||
}
|
||||
if (dispose) // Skip pages already scheduled for disposing
|
||||
return;
|
||||
// This means this entry is currently building or just added
|
||||
// We don't need to dispose those entries.
|
||||
if (status !== BUILT) return;
|
||||
// We should not build the last accessed page even we didn't get any pings
|
||||
// Sometimes, it's possible our XHR ping to wait before completing other requests.
|
||||
// In that case, we should not dispose the current viewing page
|
||||
if (lastClientAccessPages.includes(entryKey) || lastServerAccessPagesForAppDir.includes(entryKey)) return;
|
||||
if (lastActiveTime && Date.now() - lastActiveTime > maxInactiveAge) {
|
||||
entries[entryKey].dispose = true;
|
||||
}
|
||||
});
|
||||
}
|
||||
// Normalize both app paths and page paths
|
||||
function tryToNormalizePagePath(page) {
|
||||
try {
|
||||
return normalizePagePath(page);
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
throw new PageNotFoundError(page);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Attempts to find a page file path from the given pages absolute directory,
|
||||
* a page and allowed extensions. If the page can't be found it will throw an
|
||||
* error. It defaults the `/_error` page to Next.js internal error page.
|
||||
*
|
||||
* @param rootDir Absolute path to the project root.
|
||||
* @param page The page normalized (it will be denormalized).
|
||||
* @param extensions Array of page extensions.
|
||||
* @param pagesDir Absolute path to the pages folder with trailing `/pages`.
|
||||
* @param appDir Absolute path to the app folder with trailing `/app`.
|
||||
*/ export async function findPagePathData(rootDir, page, extensions, pagesDir, appDir) {
|
||||
const normalizedPagePath = tryToNormalizePagePath(page);
|
||||
let pagePath = null;
|
||||
const isInstrumentation = isInstrumentationHookFile(normalizedPagePath);
|
||||
if (isMiddlewareFile(normalizedPagePath) || isInstrumentation) {
|
||||
pagePath = await findPageFile(rootDir, normalizedPagePath, extensions, false);
|
||||
if (!pagePath) {
|
||||
throw new PageNotFoundError(normalizedPagePath);
|
||||
}
|
||||
const pageUrl = ensureLeadingSlash(removePagePathTail(normalizePathSep(pagePath), {
|
||||
extensions
|
||||
}));
|
||||
let bundlePath = normalizedPagePath;
|
||||
let pageKey = posix.normalize(pageUrl);
|
||||
if (isInstrumentation) {
|
||||
bundlePath = bundlePath.replace("/src", "");
|
||||
pageKey = page.replace("/src", "");
|
||||
}
|
||||
return {
|
||||
filename: join(rootDir, pagePath),
|
||||
bundlePath: bundlePath.slice(1),
|
||||
page: pageKey
|
||||
};
|
||||
}
|
||||
// Check appDir first falling back to pagesDir
|
||||
if (appDir) {
|
||||
if (page === UNDERSCORE_NOT_FOUND_ROUTE_ENTRY) {
|
||||
const notFoundPath = await findPageFile(appDir, "not-found", extensions, true);
|
||||
if (notFoundPath) {
|
||||
return {
|
||||
filename: join(appDir, notFoundPath),
|
||||
bundlePath: `app${UNDERSCORE_NOT_FOUND_ROUTE_ENTRY}`,
|
||||
page: UNDERSCORE_NOT_FOUND_ROUTE_ENTRY
|
||||
};
|
||||
}
|
||||
return {
|
||||
filename: require.resolve("next/dist/client/components/not-found-error"),
|
||||
bundlePath: `app${UNDERSCORE_NOT_FOUND_ROUTE_ENTRY}`,
|
||||
page: UNDERSCORE_NOT_FOUND_ROUTE_ENTRY
|
||||
};
|
||||
}
|
||||
pagePath = await findPageFile(appDir, normalizedPagePath, extensions, true);
|
||||
if (pagePath) {
|
||||
const pageUrl = ensureLeadingSlash(removePagePathTail(normalizePathSep(pagePath), {
|
||||
keepIndex: true,
|
||||
extensions
|
||||
}));
|
||||
return {
|
||||
filename: join(appDir, pagePath),
|
||||
bundlePath: posix.join("app", pageUrl),
|
||||
page: posix.normalize(pageUrl)
|
||||
};
|
||||
}
|
||||
}
|
||||
if (!pagePath && pagesDir) {
|
||||
pagePath = await findPageFile(pagesDir, normalizedPagePath, extensions, false);
|
||||
}
|
||||
if (pagePath !== null && pagesDir) {
|
||||
const pageUrl = ensureLeadingSlash(removePagePathTail(normalizePathSep(pagePath), {
|
||||
extensions
|
||||
}));
|
||||
return {
|
||||
filename: join(pagesDir, pagePath),
|
||||
bundlePath: posix.join("pages", normalizePagePath(pageUrl)),
|
||||
page: posix.normalize(pageUrl)
|
||||
};
|
||||
}
|
||||
if (page === "/_error") {
|
||||
return {
|
||||
filename: require.resolve("next/dist/pages/_error"),
|
||||
bundlePath: page,
|
||||
page: normalizePathSep(page)
|
||||
};
|
||||
} else {
|
||||
throw new PageNotFoundError(normalizedPagePath);
|
||||
}
|
||||
}
|
||||
export function onDemandEntryHandler({ hotReloader, maxInactiveAge, multiCompiler, nextConfig, pagesBufferLength, pagesDir, rootDir, appDir }) {
|
||||
const hasAppDir = !!appDir;
|
||||
let curInvalidator = getInvalidator(multiCompiler.outputPath);
|
||||
const curEntries = getEntries(multiCompiler.outputPath);
|
||||
if (!curInvalidator) {
|
||||
curInvalidator = new Invalidator(multiCompiler);
|
||||
invalidators.set(multiCompiler.outputPath, curInvalidator);
|
||||
}
|
||||
const startBuilding = (compilation)=>{
|
||||
const compilationName = compilation.name;
|
||||
curInvalidator.startBuilding(compilationName);
|
||||
};
|
||||
for (const compiler of multiCompiler.compilers){
|
||||
compiler.hooks.make.tap("NextJsOnDemandEntries", startBuilding);
|
||||
}
|
||||
function getPagePathsFromEntrypoints(type, entrypoints) {
|
||||
const pagePaths = [];
|
||||
for (const entrypoint of entrypoints.values()){
|
||||
const page = getRouteFromEntrypoint(entrypoint.name, hasAppDir);
|
||||
if (page) {
|
||||
var _entrypoint_name;
|
||||
const pageBundleType = ((_entrypoint_name = entrypoint.name) == null ? void 0 : _entrypoint_name.startsWith("app/")) ? PAGE_TYPES.APP : PAGE_TYPES.PAGES;
|
||||
pagePaths.push(getEntryKey(type, pageBundleType, page));
|
||||
} else if (isMiddlewareFilename(entrypoint.name) || isInstrumentationHookFilename(entrypoint.name)) {
|
||||
pagePaths.push(getEntryKey(type, PAGE_TYPES.ROOT, `/${entrypoint.name}`));
|
||||
}
|
||||
}
|
||||
return pagePaths;
|
||||
}
|
||||
for (const compiler of multiCompiler.compilers){
|
||||
compiler.hooks.done.tap("NextJsOnDemandEntries", ()=>{
|
||||
var _getInvalidator;
|
||||
return (_getInvalidator = getInvalidator(compiler.outputPath)) == null ? void 0 : _getInvalidator.doneBuilding([
|
||||
compiler.name
|
||||
]);
|
||||
});
|
||||
}
|
||||
multiCompiler.hooks.done.tap("NextJsOnDemandEntries", (multiStats)=>{
|
||||
var _getInvalidator;
|
||||
const [clientStats, serverStats, edgeServerStats] = multiStats.stats;
|
||||
const entryNames = [
|
||||
...getPagePathsFromEntrypoints(COMPILER_NAMES.client, clientStats.compilation.entrypoints),
|
||||
...getPagePathsFromEntrypoints(COMPILER_NAMES.server, serverStats.compilation.entrypoints),
|
||||
...edgeServerStats ? getPagePathsFromEntrypoints(COMPILER_NAMES.edgeServer, edgeServerStats.compilation.entrypoints) : []
|
||||
];
|
||||
for (const name of entryNames){
|
||||
const entry = curEntries[name];
|
||||
if (!entry) {
|
||||
continue;
|
||||
}
|
||||
if (entry.status !== BUILDING) {
|
||||
continue;
|
||||
}
|
||||
entry.status = BUILT;
|
||||
doneCallbacks.emit(name);
|
||||
}
|
||||
(_getInvalidator = getInvalidator(multiCompiler.outputPath)) == null ? void 0 : _getInvalidator.doneBuilding([
|
||||
...COMPILER_KEYS
|
||||
]);
|
||||
});
|
||||
const pingIntervalTime = Math.max(1000, Math.min(5000, maxInactiveAge));
|
||||
setInterval(function() {
|
||||
disposeInactiveEntries(curEntries, maxInactiveAge);
|
||||
}, pingIntervalTime + 1000).unref();
|
||||
function handleAppDirPing(tree) {
|
||||
const pages = getEntrypointsFromTree(tree, true);
|
||||
for (const page of pages){
|
||||
for (const compilerType of [
|
||||
COMPILER_NAMES.client,
|
||||
COMPILER_NAMES.server,
|
||||
COMPILER_NAMES.edgeServer
|
||||
]){
|
||||
const entryKey = getEntryKey(compilerType, PAGE_TYPES.APP, `/${page}`);
|
||||
const entryInfo = curEntries[entryKey];
|
||||
// If there's no entry, it may have been invalidated and needs to be re-built.
|
||||
if (!entryInfo) {
|
||||
continue;
|
||||
}
|
||||
// We don't need to maintain active state of anything other than BUILT entries
|
||||
if (entryInfo.status !== BUILT) continue;
|
||||
// If there's an entryInfo
|
||||
if (!lastServerAccessPagesForAppDir.includes(entryKey)) {
|
||||
lastServerAccessPagesForAppDir.unshift(entryKey);
|
||||
// Maintain the buffer max length
|
||||
// TODO: verify that the current pageKey is not at the end of the array as multiple entrypoints can exist
|
||||
if (lastServerAccessPagesForAppDir.length > pagesBufferLength) {
|
||||
lastServerAccessPagesForAppDir.pop();
|
||||
}
|
||||
}
|
||||
entryInfo.lastActiveTime = Date.now();
|
||||
entryInfo.dispose = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
function handlePing(pg) {
|
||||
const page = normalizePathSep(pg);
|
||||
for (const compilerType of [
|
||||
COMPILER_NAMES.client,
|
||||
COMPILER_NAMES.server,
|
||||
COMPILER_NAMES.edgeServer
|
||||
]){
|
||||
const entryKey = getEntryKey(compilerType, PAGE_TYPES.PAGES, page);
|
||||
const entryInfo = curEntries[entryKey];
|
||||
// If there's no entry, it may have been invalidated and needs to be re-built.
|
||||
if (!entryInfo) {
|
||||
// if (page !== lastEntry) client pings, but there's no entry for page
|
||||
if (compilerType === COMPILER_NAMES.client) {
|
||||
return;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
// We don't need to maintain active state of anything other than BUILT entries
|
||||
if (entryInfo.status !== BUILT) continue;
|
||||
// If there's an entryInfo
|
||||
if (!lastClientAccessPages.includes(entryKey)) {
|
||||
lastClientAccessPages.unshift(entryKey);
|
||||
// Maintain the buffer max length
|
||||
if (lastClientAccessPages.length > pagesBufferLength) {
|
||||
lastClientAccessPages.pop();
|
||||
}
|
||||
}
|
||||
entryInfo.lastActiveTime = Date.now();
|
||||
entryInfo.dispose = false;
|
||||
}
|
||||
return;
|
||||
}
|
||||
async function ensurePageImpl({ page, appPaths, definition, isApp, url }) {
|
||||
const stalledTime = 60;
|
||||
const stalledEnsureTimeout = setTimeout(()=>{
|
||||
debug(`Ensuring ${page} has taken longer than ${stalledTime}s, if this continues to stall this may be a bug`);
|
||||
}, stalledTime * 1000);
|
||||
try {
|
||||
let route;
|
||||
if (definition) {
|
||||
route = definition;
|
||||
} else {
|
||||
route = await findPagePathData(rootDir, page, nextConfig.pageExtensions, pagesDir, appDir);
|
||||
}
|
||||
const isInsideAppDir = !!appDir && route.filename.startsWith(appDir);
|
||||
if (typeof isApp === "boolean" && isApp !== isInsideAppDir) {
|
||||
Error.stackTraceLimit = 15;
|
||||
throw new Error(`Ensure bailed, found path "${route.page}" does not match ensure type (${isApp ? "app" : "pages"})`);
|
||||
}
|
||||
const pageBundleType = getPageBundleType(route.bundlePath);
|
||||
const addEntry = (compilerType)=>{
|
||||
const entryKey = getEntryKey(compilerType, pageBundleType, route.page);
|
||||
if (curEntries[entryKey] && // there can be an overlap in the entryKey for the instrumentation hook file and a page named the same
|
||||
// this is a quick fix to support this scenario by overwriting the instrumentation hook entry, since we only use it one time
|
||||
// any changes to the instrumentation hook file will require a restart of the dev server anyway
|
||||
!isInstrumentationHookFilename(curEntries[entryKey].bundlePath)) {
|
||||
curEntries[entryKey].dispose = false;
|
||||
curEntries[entryKey].lastActiveTime = Date.now();
|
||||
if (curEntries[entryKey].status === BUILT) {
|
||||
return {
|
||||
entryKey,
|
||||
newEntry: false,
|
||||
shouldInvalidate: false
|
||||
};
|
||||
}
|
||||
return {
|
||||
entryKey,
|
||||
newEntry: false,
|
||||
shouldInvalidate: true
|
||||
};
|
||||
}
|
||||
curEntries[entryKey] = {
|
||||
type: 0,
|
||||
appPaths,
|
||||
absolutePagePath: route.filename,
|
||||
request: route.filename,
|
||||
bundlePath: route.bundlePath,
|
||||
dispose: false,
|
||||
lastActiveTime: Date.now(),
|
||||
status: ADDED
|
||||
};
|
||||
return {
|
||||
entryKey: entryKey,
|
||||
newEntry: true,
|
||||
shouldInvalidate: true
|
||||
};
|
||||
};
|
||||
const staticInfo = await getStaticInfoIncludingLayouts({
|
||||
page,
|
||||
pageFilePath: route.filename,
|
||||
isInsideAppDir,
|
||||
pageExtensions: nextConfig.pageExtensions,
|
||||
isDev: true,
|
||||
config: nextConfig,
|
||||
appDir
|
||||
});
|
||||
const added = new Map();
|
||||
const isServerComponent = isInsideAppDir && staticInfo.rsc !== RSC_MODULE_TYPES.client;
|
||||
runDependingOnPageType({
|
||||
page: route.page,
|
||||
pageRuntime: staticInfo.runtime,
|
||||
pageType: pageBundleType,
|
||||
onClient: ()=>{
|
||||
// Skip adding the client entry for app / Server Components.
|
||||
if (isServerComponent || isInsideAppDir) {
|
||||
return;
|
||||
}
|
||||
added.set(COMPILER_NAMES.client, addEntry(COMPILER_NAMES.client));
|
||||
},
|
||||
onServer: ()=>{
|
||||
added.set(COMPILER_NAMES.server, addEntry(COMPILER_NAMES.server));
|
||||
const edgeServerEntry = getEntryKey(COMPILER_NAMES.edgeServer, pageBundleType, route.page);
|
||||
if (curEntries[edgeServerEntry] && !isInstrumentationHookFile(route.page)) {
|
||||
// Runtime switched from edge to server
|
||||
delete curEntries[edgeServerEntry];
|
||||
}
|
||||
},
|
||||
onEdgeServer: ()=>{
|
||||
added.set(COMPILER_NAMES.edgeServer, addEntry(COMPILER_NAMES.edgeServer));
|
||||
const serverEntry = getEntryKey(COMPILER_NAMES.server, pageBundleType, route.page);
|
||||
if (curEntries[serverEntry] && !isInstrumentationHookFile(route.page)) {
|
||||
// Runtime switched from server to edge
|
||||
delete curEntries[serverEntry];
|
||||
}
|
||||
}
|
||||
});
|
||||
const addedValues = [
|
||||
...added.values()
|
||||
];
|
||||
const entriesThatShouldBeInvalidated = [
|
||||
...added.entries()
|
||||
].filter(([, entry])=>entry.shouldInvalidate);
|
||||
const hasNewEntry = addedValues.some((entry)=>entry.newEntry);
|
||||
if (hasNewEntry) {
|
||||
const routePage = isApp ? route.page : normalizeAppPath(route.page);
|
||||
reportTrigger(routePage, url);
|
||||
}
|
||||
if (entriesThatShouldBeInvalidated.length > 0) {
|
||||
const invalidatePromise = Promise.all(entriesThatShouldBeInvalidated.map(([compilerKey, { entryKey }])=>{
|
||||
return new Promise((resolve, reject)=>{
|
||||
doneCallbacks.once(entryKey, (err)=>{
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
// If the invalidation also triggers a rebuild, we need to
|
||||
// wait for that additional build to prevent race conditions.
|
||||
const needsRebuild = curInvalidator.willRebuild(compilerKey);
|
||||
if (needsRebuild) {
|
||||
doneCallbacks.once(entryKey, (rebuildErr)=>{
|
||||
if (rebuildErr) {
|
||||
return reject(rebuildErr);
|
||||
}
|
||||
resolve();
|
||||
});
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
}));
|
||||
curInvalidator.invalidate([
|
||||
...added.keys()
|
||||
]);
|
||||
await invalidatePromise;
|
||||
}
|
||||
} finally{
|
||||
clearTimeout(stalledEnsureTimeout);
|
||||
}
|
||||
}
|
||||
// Make sure that we won't have multiple invalidations ongoing concurrently.
|
||||
const batcher = Batcher.create({
|
||||
// The cache key here is composed of the elements that affect the
|
||||
// compilation, namely, the page, whether it's client only, and whether
|
||||
// it's an app page. This ensures that we don't have multiple compilations
|
||||
// for the same page happening concurrently.
|
||||
//
|
||||
// We don't include the whole match because it contains match specific
|
||||
// parameters (like route params) that would just bust this cache. Any
|
||||
// details that would possibly bust the cache should be listed here.
|
||||
cacheKeyFn: (options)=>JSON.stringify(options),
|
||||
// Schedule the invocation of the ensurePageImpl function on the next tick.
|
||||
schedulerFn: scheduleOnNextTick
|
||||
});
|
||||
return {
|
||||
async ensurePage ({ page, appPaths = null, definition, isApp, url }) {
|
||||
// If the route is actually an app page route, then we should have access
|
||||
// to the app route definition, and therefore, the appPaths from it.
|
||||
if (!appPaths && definition && isAppPageRouteDefinition(definition)) {
|
||||
appPaths = definition.appPaths;
|
||||
}
|
||||
// Wrap the invocation of the ensurePageImpl function in the pending
|
||||
// wrapper, which will ensure that we don't have multiple compilations
|
||||
// for the same page happening concurrently.
|
||||
return batcher.batch({
|
||||
page,
|
||||
appPaths,
|
||||
definition,
|
||||
isApp
|
||||
}, async ()=>{
|
||||
await ensurePageImpl({
|
||||
page,
|
||||
appPaths,
|
||||
definition,
|
||||
isApp,
|
||||
url
|
||||
});
|
||||
});
|
||||
},
|
||||
onHMR (client, getHmrServerError) {
|
||||
let bufferedHmrServerError = null;
|
||||
client.addEventListener("close", ()=>{
|
||||
bufferedHmrServerError = null;
|
||||
});
|
||||
client.addEventListener("message", ({ data })=>{
|
||||
try {
|
||||
const error = getHmrServerError();
|
||||
// New error occurred: buffered error is flushed and new error occurred
|
||||
if (!bufferedHmrServerError && error) {
|
||||
hotReloader.send({
|
||||
action: HMR_ACTIONS_SENT_TO_BROWSER.SERVER_ERROR,
|
||||
errorJSON: stringifyError(error)
|
||||
});
|
||||
bufferedHmrServerError = null;
|
||||
}
|
||||
const parsedData = JSON.parse(typeof data !== "string" ? data.toString() : data);
|
||||
if (parsedData.event === "ping") {
|
||||
if (parsedData.appDirRoute) {
|
||||
handleAppDirPing(parsedData.tree);
|
||||
} else {
|
||||
handlePing(parsedData.page);
|
||||
}
|
||||
}
|
||||
} catch {}
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
//# sourceMappingURL=on-demand-entry-handler.js.map
|
||||
1
node_modules/next/dist/esm/server/dev/on-demand-entry-handler.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/dev/on-demand-entry-handler.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
57
node_modules/next/dist/esm/server/dev/parse-version-info.js
generated
vendored
Normal file
57
node_modules/next/dist/esm/server/dev/parse-version-info.js
generated
vendored
Normal file
@ -0,0 +1,57 @@
|
||||
import * as semver from "next/dist/compiled/semver";
|
||||
export function parseVersionInfo(o) {
|
||||
const latest = semver.parse(o.latest);
|
||||
const canary = semver.parse(o.canary);
|
||||
const installedParsed = semver.parse(o.installed);
|
||||
const installed = o.installed;
|
||||
if (installedParsed && latest && canary) {
|
||||
if (installedParsed.major < latest.major) {
|
||||
// Old major version
|
||||
return {
|
||||
staleness: "stale-major",
|
||||
expected: latest.raw,
|
||||
installed
|
||||
};
|
||||
} else if (installedParsed.prerelease[0] === "canary" && semver.lt(installedParsed, canary)) {
|
||||
// Matching major, but old canary
|
||||
return {
|
||||
staleness: "stale-prerelease",
|
||||
expected: canary.raw,
|
||||
installed
|
||||
};
|
||||
} else if (!installedParsed.prerelease.length && semver.lt(installedParsed, latest)) {
|
||||
// Stable, but not the latest
|
||||
if (installedParsed.minor === latest.minor) {
|
||||
// Same major and minor, but not the latest patch
|
||||
return {
|
||||
staleness: "stale-patch",
|
||||
expected: latest.raw,
|
||||
installed
|
||||
};
|
||||
}
|
||||
return {
|
||||
staleness: "stale-minor",
|
||||
expected: latest.raw,
|
||||
installed
|
||||
};
|
||||
} else if (semver.gt(installedParsed, latest) && installedParsed.version !== canary.version) {
|
||||
// Newer major version
|
||||
return {
|
||||
staleness: "newer-than-npm",
|
||||
installed
|
||||
};
|
||||
} else {
|
||||
// Latest and greatest
|
||||
return {
|
||||
staleness: "fresh",
|
||||
installed
|
||||
};
|
||||
}
|
||||
}
|
||||
return {
|
||||
installed: (installedParsed == null ? void 0 : installedParsed.raw) ?? "0.0.0",
|
||||
staleness: "unknown"
|
||||
};
|
||||
}
|
||||
|
||||
//# sourceMappingURL=parse-version-info.js.map
|
||||
1
node_modules/next/dist/esm/server/dev/parse-version-info.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/dev/parse-version-info.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/dev/parse-version-info.ts"],"names":["semver","parseVersionInfo","o","latest","parse","canary","installedParsed","installed","major","staleness","expected","raw","prerelease","lt","length","minor","gt","version"],"mappings":"AAAA,YAAYA,YAAY,4BAA2B;AAenD,OAAO,SAASC,iBAAiBC,CAIhC;IACC,MAAMC,SAASH,OAAOI,KAAK,CAACF,EAAEC,MAAM;IACpC,MAAME,SAASL,OAAOI,KAAK,CAACF,EAAEG,MAAM;IACpC,MAAMC,kBAAkBN,OAAOI,KAAK,CAACF,EAAEK,SAAS;IAChD,MAAMA,YAAYL,EAAEK,SAAS;IAC7B,IAAID,mBAAmBH,UAAUE,QAAQ;QACvC,IAAIC,gBAAgBE,KAAK,GAAGL,OAAOK,KAAK,EAAE;YACxC,oBAAoB;YACpB,OAAO;gBAAEC,WAAW;gBAAeC,UAAUP,OAAOQ,GAAG;gBAAEJ;YAAU;QACrE,OAAO,IACLD,gBAAgBM,UAAU,CAAC,EAAE,KAAK,YAClCZ,OAAOa,EAAE,CAACP,iBAAiBD,SAC3B;YACA,iCAAiC;YACjC,OAAO;gBACLI,WAAW;gBACXC,UAAUL,OAAOM,GAAG;gBACpBJ;YACF;QACF,OAAO,IACL,CAACD,gBAAgBM,UAAU,CAACE,MAAM,IAClCd,OAAOa,EAAE,CAACP,iBAAiBH,SAC3B;YACA,6BAA6B;YAC7B,IAAIG,gBAAgBS,KAAK,KAAKZ,OAAOY,KAAK,EAAE;gBAC1C,iDAAiD;gBACjD,OAAO;oBACLN,WAAW;oBACXC,UAAUP,OAAOQ,GAAG;oBACpBJ;gBACF;YACF;YACA,OAAO;gBAAEE,WAAW;gBAAeC,UAAUP,OAAOQ,GAAG;gBAAEJ;YAAU;QACrE,OAAO,IACLP,OAAOgB,EAAE,CAACV,iBAAiBH,WAC3BG,gBAAgBW,OAAO,KAAKZ,OAAOY,OAAO,EAC1C;YACA,sBAAsB;YACtB,OAAO;gBAAER,WAAW;gBAAkBF;YAAU;QAClD,OAAO;YACL,sBAAsB;YACtB,OAAO;gBAAEE,WAAW;gBAASF;YAAU;QACzC;IACF;IAEA,OAAO;QACLA,WAAWD,CAAAA,mCAAAA,gBAAiBK,GAAG,KAAI;QACnCF,WAAW;IACb;AACF"}
|
||||
64
node_modules/next/dist/esm/server/dev/static-paths-worker.js
generated
vendored
Normal file
64
node_modules/next/dist/esm/server/dev/static-paths-worker.js
generated
vendored
Normal file
@ -0,0 +1,64 @@
|
||||
import "../require-hook";
|
||||
import "../node-environment";
|
||||
import { buildAppStaticPaths, buildStaticPaths, collectGenerateParams } from "../../build/utils";
|
||||
import { loadComponents } from "../load-components";
|
||||
import { setHttpClientAndAgentOptions } from "../setup-http-agent-env";
|
||||
import { isAppRouteRouteModule } from "../future/route-modules/checks";
|
||||
// we call getStaticPaths in a separate process to ensure
|
||||
// side-effects aren't relied on in dev that will break
|
||||
// during a production build
|
||||
export async function loadStaticPaths({ dir, distDir, pathname, config, httpAgentOptions, locales, defaultLocale, isAppPath, page, isrFlushToDisk, fetchCacheKeyPrefix, maxMemoryCacheSize, requestHeaders, cacheHandler, ppr }) {
|
||||
// update work memory runtime-config
|
||||
require("../../shared/lib/runtime-config.external").setConfig(config);
|
||||
setHttpClientAndAgentOptions({
|
||||
httpAgentOptions
|
||||
});
|
||||
const components = await loadComponents({
|
||||
distDir,
|
||||
// In `pages/`, the page is the same as the pathname.
|
||||
page: page || pathname,
|
||||
isAppPath
|
||||
});
|
||||
if (!components.getStaticPaths && !isAppPath) {
|
||||
// we shouldn't get to this point since the worker should
|
||||
// only be called for SSG pages with getStaticPaths
|
||||
throw new Error(`Invariant: failed to load page with getStaticPaths for ${pathname}`);
|
||||
}
|
||||
if (isAppPath) {
|
||||
const { routeModule } = components;
|
||||
const generateParams = routeModule && isAppRouteRouteModule(routeModule) ? [
|
||||
{
|
||||
config: {
|
||||
revalidate: routeModule.userland.revalidate,
|
||||
dynamic: routeModule.userland.dynamic,
|
||||
dynamicParams: routeModule.userland.dynamicParams
|
||||
},
|
||||
generateStaticParams: routeModule.userland.generateStaticParams,
|
||||
segmentPath: pathname
|
||||
}
|
||||
] : await collectGenerateParams(components.ComponentMod.tree);
|
||||
return await buildAppStaticPaths({
|
||||
dir,
|
||||
page: pathname,
|
||||
generateParams,
|
||||
configFileName: config.configFileName,
|
||||
distDir,
|
||||
requestHeaders,
|
||||
cacheHandler,
|
||||
isrFlushToDisk,
|
||||
fetchCacheKeyPrefix,
|
||||
maxMemoryCacheSize,
|
||||
ppr,
|
||||
ComponentMod: components.ComponentMod
|
||||
});
|
||||
}
|
||||
return await buildStaticPaths({
|
||||
page: pathname,
|
||||
getStaticPaths: components.getStaticPaths,
|
||||
configFileName: config.configFileName,
|
||||
locales,
|
||||
defaultLocale
|
||||
});
|
||||
}
|
||||
|
||||
//# sourceMappingURL=static-paths-worker.js.map
|
||||
1
node_modules/next/dist/esm/server/dev/static-paths-worker.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/dev/static-paths-worker.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/dev/static-paths-worker.ts"],"names":["buildAppStaticPaths","buildStaticPaths","collectGenerateParams","loadComponents","setHttpClientAndAgentOptions","isAppRouteRouteModule","loadStaticPaths","dir","distDir","pathname","config","httpAgentOptions","locales","defaultLocale","isAppPath","page","isrFlushToDisk","fetchCacheKeyPrefix","maxMemoryCacheSize","requestHeaders","cacheHandler","ppr","require","setConfig","components","getStaticPaths","Error","routeModule","generateParams","revalidate","userland","dynamic","dynamicParams","generateStaticParams","segmentPath","ComponentMod","tree","configFileName"],"mappings":"AAEA,OAAO,kBAAiB;AACxB,OAAO,sBAAqB;AAE5B,SACEA,mBAAmB,EACnBC,gBAAgB,EAChBC,qBAAqB,QAChB,oBAAmB;AAE1B,SAASC,cAAc,QAAQ,qBAAoB;AACnD,SAASC,4BAA4B,QAAQ,0BAAyB;AAEtE,SAASC,qBAAqB,QAAQ,iCAAgC;AAQtE,yDAAyD;AACzD,uDAAuD;AACvD,4BAA4B;AAC5B,OAAO,eAAeC,gBAAgB,EACpCC,GAAG,EACHC,OAAO,EACPC,QAAQ,EACRC,MAAM,EACNC,gBAAgB,EAChBC,OAAO,EACPC,aAAa,EACbC,SAAS,EACTC,IAAI,EACJC,cAAc,EACdC,mBAAmB,EACnBC,kBAAkB,EAClBC,cAAc,EACdC,YAAY,EACZC,GAAG,EAiBJ;IAKC,oCAAoC;IACpCC,QAAQ,4CAA4CC,SAAS,CAACb;IAC9DN,6BAA6B;QAC3BO;IACF;IAEA,MAAMa,aAAa,MAAMrB,eAAe;QACtCK;QACA,qDAAqD;QACrDO,MAAMA,QAAQN;QACdK;IACF;IAEA,IAAI,CAACU,WAAWC,cAAc,IAAI,CAACX,WAAW;QAC5C,yDAAyD;QACzD,mDAAmD;QACnD,MAAM,IAAIY,MACR,CAAC,uDAAuD,EAAEjB,SAAS,CAAC;IAExE;IAEA,IAAIK,WAAW;QACb,MAAM,EAAEa,WAAW,EAAE,GAAGH;QACxB,MAAMI,iBACJD,eAAetB,sBAAsBsB,eACjC;YACE;gBACEjB,QAAQ;oBACNmB,YAAYF,YAAYG,QAAQ,CAACD,UAAU;oBAC3CE,SAASJ,YAAYG,QAAQ,CAACC,OAAO;oBACrCC,eAAeL,YAAYG,QAAQ,CAACE,aAAa;gBACnD;gBACAC,sBAAsBN,YAAYG,QAAQ,CAACG,oBAAoB;gBAC/DC,aAAazB;YACf;SACD,GACD,MAAMP,sBAAsBsB,WAAWW,YAAY,CAACC,IAAI;QAE9D,OAAO,MAAMpC,oBAAoB;YAC/BO;YACAQ,MAAMN;YACNmB;YACAS,gBAAgB3B,OAAO2B,cAAc;YACrC7B;YACAW;YACAC;YACAJ;YACAC;YACAC;YACAG;YACAc,cAAcX,WAAWW,YAAY;QACvC;IACF;IAEA,OAAO,MAAMlC,iBAAiB;QAC5Bc,MAAMN;QACNgB,gBAAgBD,WAAWC,cAAc;QACzCY,gBAAgB3B,OAAO2B,cAAc;QACrCzB;QACAC;IACF;AACF"}
|
||||
604
node_modules/next/dist/esm/server/dev/turbopack-utils.js
generated
vendored
Normal file
604
node_modules/next/dist/esm/server/dev/turbopack-utils.js
generated
vendored
Normal file
@ -0,0 +1,604 @@
|
||||
import loadJsConfig from "../../build/load-jsconfig";
|
||||
import { decodeMagicIdentifier, MAGIC_IDENTIFIER_REGEX } from "../../shared/lib/magic-identifier";
|
||||
import { bold, green, magenta, red } from "../../lib/picocolors";
|
||||
import { HMR_ACTIONS_SENT_TO_BROWSER } from "./hot-reloader-types";
|
||||
import * as Log from "../../build/output/log";
|
||||
import { getEntryKey, splitEntryKey } from "./turbopack/entry-key";
|
||||
export async function getTurbopackJsConfig(dir, nextConfig) {
|
||||
const { jsConfig } = await loadJsConfig(dir, nextConfig);
|
||||
return jsConfig ?? {
|
||||
compilerOptions: {}
|
||||
};
|
||||
}
|
||||
class ModuleBuildError extends Error {
|
||||
}
|
||||
/**
|
||||
* Thin stopgap workaround layer to mimic existing wellknown-errors-plugin in webpack's build
|
||||
* to emit certain type of errors into cli.
|
||||
*/ export function isWellKnownError(issue) {
|
||||
const { title } = issue;
|
||||
const formattedTitle = renderStyledStringToErrorAnsi(title);
|
||||
// TODO: add more well known errors
|
||||
if (formattedTitle.includes("Module not found") || formattedTitle.includes("Unknown module type")) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
/// Print out an issue to the console which should not block
|
||||
/// the build by throwing out or blocking error overlay.
|
||||
export function printNonFatalIssue(issue) {
|
||||
if (isRelevantWarning(issue)) {
|
||||
Log.warn(formatIssue(issue));
|
||||
}
|
||||
}
|
||||
function isNodeModulesIssue(issue) {
|
||||
return issue.severity === "warning" && issue.filePath.match(/^(?:.*[\\/])?node_modules(?:[\\/].*)?$/) !== null;
|
||||
}
|
||||
export function isRelevantWarning(issue) {
|
||||
return issue.severity === "warning" && !isNodeModulesIssue(issue);
|
||||
}
|
||||
export function formatIssue(issue) {
|
||||
const { filePath, title, description, source } = issue;
|
||||
let { documentationLink } = issue;
|
||||
let formattedTitle = renderStyledStringToErrorAnsi(title).replace(/\n/g, "\n ");
|
||||
// TODO: Use error codes to identify these
|
||||
// TODO: Generalize adapting Turbopack errors to Next.js errors
|
||||
if (formattedTitle.includes("Module not found")) {
|
||||
// For compatiblity with webpack
|
||||
// TODO: include columns in webpack errors.
|
||||
documentationLink = "https://nextjs.org/docs/messages/module-not-found";
|
||||
}
|
||||
let formattedFilePath = filePath.replace("[project]/", "./").replaceAll("/./", "/").replace("\\\\?\\", "");
|
||||
let message = "";
|
||||
if (source && source.range) {
|
||||
const { start } = source.range;
|
||||
message = `${formattedFilePath}:${start.line + 1}:${start.column + 1}\n${formattedTitle}`;
|
||||
} else if (formattedFilePath) {
|
||||
message = `${formattedFilePath}\n${formattedTitle}`;
|
||||
} else {
|
||||
message = formattedTitle;
|
||||
}
|
||||
message += "\n";
|
||||
if ((source == null ? void 0 : source.range) && source.source.content) {
|
||||
const { start, end } = source.range;
|
||||
const { codeFrameColumns } = require("next/dist/compiled/babel/code-frame");
|
||||
message += codeFrameColumns(source.source.content, {
|
||||
start: {
|
||||
line: start.line + 1,
|
||||
column: start.column + 1
|
||||
},
|
||||
end: {
|
||||
line: end.line + 1,
|
||||
column: end.column + 1
|
||||
}
|
||||
}, {
|
||||
forceColor: true
|
||||
}).trim() + "\n\n";
|
||||
}
|
||||
if (description) {
|
||||
message += renderStyledStringToErrorAnsi(description) + "\n\n";
|
||||
}
|
||||
// TODO: make it possible to enable this for debugging, but not in tests.
|
||||
// if (detail) {
|
||||
// message += renderStyledStringToErrorAnsi(detail) + '\n\n'
|
||||
// }
|
||||
// TODO: Include a trace from the issue.
|
||||
if (documentationLink) {
|
||||
message += documentationLink + "\n\n";
|
||||
}
|
||||
return message;
|
||||
}
|
||||
function getIssueKey(issue) {
|
||||
return `${issue.severity}-${issue.filePath}-${JSON.stringify(issue.title)}-${JSON.stringify(issue.description)}`;
|
||||
}
|
||||
export function processTopLevelIssues(currentTopLevelIssues, result) {
|
||||
currentTopLevelIssues.clear();
|
||||
for (const issue of result.issues){
|
||||
const issueKey = getIssueKey(issue);
|
||||
currentTopLevelIssues.set(issueKey, issue);
|
||||
}
|
||||
}
|
||||
export function processIssues(currentEntryIssues, key, result, throwIssue, logErrors) {
|
||||
const newIssues = new Map();
|
||||
currentEntryIssues.set(key, newIssues);
|
||||
const relevantIssues = new Set();
|
||||
for (const issue of result.issues){
|
||||
if (issue.severity !== "error" && issue.severity !== "fatal" && issue.severity !== "warning") continue;
|
||||
const issueKey = getIssueKey(issue);
|
||||
const formatted = formatIssue(issue);
|
||||
newIssues.set(issueKey, issue);
|
||||
if (issue.severity !== "warning") {
|
||||
relevantIssues.add(formatted);
|
||||
if (logErrors && isWellKnownError(issue)) {
|
||||
Log.error(formatted);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (relevantIssues.size && throwIssue) {
|
||||
throw new ModuleBuildError([
|
||||
...relevantIssues
|
||||
].join("\n\n"));
|
||||
}
|
||||
}
|
||||
export function renderStyledStringToErrorAnsi(string) {
|
||||
function decodeMagicIdentifiers(str) {
|
||||
return str.replaceAll(MAGIC_IDENTIFIER_REGEX, (ident)=>{
|
||||
try {
|
||||
return magenta(`{${decodeMagicIdentifier(ident)}}`);
|
||||
} catch (e) {
|
||||
return magenta(`{${ident} (decoding failed: ${e})}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
switch(string.type){
|
||||
case "text":
|
||||
return decodeMagicIdentifiers(string.value);
|
||||
case "strong":
|
||||
return bold(red(decodeMagicIdentifiers(string.value)));
|
||||
case "code":
|
||||
return green(decodeMagicIdentifiers(string.value));
|
||||
case "line":
|
||||
return string.value.map(renderStyledStringToErrorAnsi).join("");
|
||||
case "stack":
|
||||
return string.value.map(renderStyledStringToErrorAnsi).join("\n");
|
||||
default:
|
||||
throw new Error("Unknown StyledString type", string);
|
||||
}
|
||||
}
|
||||
const MILLISECONDS_IN_NANOSECOND = BigInt(1000000);
|
||||
export function msToNs(ms) {
|
||||
return BigInt(Math.floor(ms)) * MILLISECONDS_IN_NANOSECOND;
|
||||
}
|
||||
export async function handleRouteType({ dev, page, pathname, route, currentEntryIssues, entrypoints, manifestLoader, readyIds, rewrites, hooks, logErrors }) {
|
||||
switch(route.type){
|
||||
case "page":
|
||||
{
|
||||
const clientKey = getEntryKey("pages", "client", page);
|
||||
const serverKey = getEntryKey("pages", "server", page);
|
||||
try {
|
||||
if (entrypoints.global.app) {
|
||||
const key = getEntryKey("pages", "server", "_app");
|
||||
const writtenEndpoint = await entrypoints.global.app.writeToDisk();
|
||||
hooks == null ? void 0 : hooks.handleWrittenEndpoint(key, writtenEndpoint);
|
||||
processIssues(currentEntryIssues, key, writtenEndpoint, false, logErrors);
|
||||
}
|
||||
await manifestLoader.loadBuildManifest("_app");
|
||||
await manifestLoader.loadPagesManifest("_app");
|
||||
if (entrypoints.global.document) {
|
||||
const key = getEntryKey("pages", "server", "_document");
|
||||
const writtenEndpoint = await entrypoints.global.document.writeToDisk();
|
||||
hooks == null ? void 0 : hooks.handleWrittenEndpoint(key, writtenEndpoint);
|
||||
processIssues(currentEntryIssues, key, writtenEndpoint, false, logErrors);
|
||||
}
|
||||
await manifestLoader.loadPagesManifest("_document");
|
||||
const writtenEndpoint = await route.htmlEndpoint.writeToDisk();
|
||||
hooks == null ? void 0 : hooks.handleWrittenEndpoint(serverKey, writtenEndpoint);
|
||||
const type = writtenEndpoint == null ? void 0 : writtenEndpoint.type;
|
||||
await manifestLoader.loadBuildManifest(page);
|
||||
await manifestLoader.loadPagesManifest(page);
|
||||
if (type === "edge") {
|
||||
await manifestLoader.loadMiddlewareManifest(page, "pages");
|
||||
} else {
|
||||
manifestLoader.deleteMiddlewareManifest(serverKey);
|
||||
}
|
||||
await manifestLoader.loadFontManifest("/_app", "pages");
|
||||
await manifestLoader.loadFontManifest(page, "pages");
|
||||
await manifestLoader.loadLoadableManifest(page, "pages");
|
||||
await manifestLoader.writeManifests({
|
||||
rewrites,
|
||||
pageEntrypoints: entrypoints.page
|
||||
});
|
||||
processIssues(currentEntryIssues, serverKey, writtenEndpoint, false, logErrors);
|
||||
} finally{
|
||||
// TODO subscriptions should only be caused by the WebSocket connections
|
||||
// otherwise we don't known when to unsubscribe and this leaking
|
||||
hooks == null ? void 0 : hooks.subscribeToChanges(serverKey, false, route.dataEndpoint, ()=>{
|
||||
// Report the next compilation again
|
||||
readyIds == null ? void 0 : readyIds.delete(pathname);
|
||||
return {
|
||||
event: HMR_ACTIONS_SENT_TO_BROWSER.SERVER_ONLY_CHANGES,
|
||||
pages: [
|
||||
page
|
||||
]
|
||||
};
|
||||
});
|
||||
hooks == null ? void 0 : hooks.subscribeToChanges(clientKey, false, route.htmlEndpoint, ()=>{
|
||||
return {
|
||||
event: HMR_ACTIONS_SENT_TO_BROWSER.CLIENT_CHANGES
|
||||
};
|
||||
});
|
||||
if (entrypoints.global.document) {
|
||||
hooks == null ? void 0 : hooks.subscribeToChanges(getEntryKey("pages", "server", "_document"), false, entrypoints.global.document, ()=>{
|
||||
return {
|
||||
action: HMR_ACTIONS_SENT_TO_BROWSER.RELOAD_PAGE
|
||||
};
|
||||
});
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
case "page-api":
|
||||
{
|
||||
const key = getEntryKey("pages", "server", page);
|
||||
const writtenEndpoint = await route.endpoint.writeToDisk();
|
||||
hooks == null ? void 0 : hooks.handleWrittenEndpoint(key, writtenEndpoint);
|
||||
const type = writtenEndpoint == null ? void 0 : writtenEndpoint.type;
|
||||
await manifestLoader.loadPagesManifest(page);
|
||||
if (type === "edge") {
|
||||
await manifestLoader.loadMiddlewareManifest(page, "pages");
|
||||
} else {
|
||||
manifestLoader.deleteMiddlewareManifest(key);
|
||||
}
|
||||
await manifestLoader.loadLoadableManifest(page, "pages");
|
||||
await manifestLoader.writeManifests({
|
||||
rewrites,
|
||||
pageEntrypoints: entrypoints.page
|
||||
});
|
||||
processIssues(currentEntryIssues, key, writtenEndpoint, true, logErrors);
|
||||
break;
|
||||
}
|
||||
case "app-page":
|
||||
{
|
||||
const key = getEntryKey("app", "server", page);
|
||||
const writtenEndpoint = await route.htmlEndpoint.writeToDisk();
|
||||
hooks == null ? void 0 : hooks.handleWrittenEndpoint(key, writtenEndpoint);
|
||||
// TODO subscriptions should only be caused by the WebSocket connections
|
||||
// otherwise we don't known when to unsubscribe and this leaking
|
||||
hooks == null ? void 0 : hooks.subscribeToChanges(key, true, route.rscEndpoint, (change)=>{
|
||||
if (change.issues.some((issue)=>issue.severity === "error")) {
|
||||
// Ignore any updates that has errors
|
||||
// There will be another update without errors eventually
|
||||
return;
|
||||
}
|
||||
// Report the next compilation again
|
||||
readyIds == null ? void 0 : readyIds.delete(pathname);
|
||||
return {
|
||||
action: HMR_ACTIONS_SENT_TO_BROWSER.SERVER_COMPONENT_CHANGES
|
||||
};
|
||||
});
|
||||
const type = writtenEndpoint == null ? void 0 : writtenEndpoint.type;
|
||||
if (type === "edge") {
|
||||
await manifestLoader.loadMiddlewareManifest(page, "app");
|
||||
} else {
|
||||
manifestLoader.deleteMiddlewareManifest(key);
|
||||
}
|
||||
await manifestLoader.loadAppBuildManifest(page);
|
||||
await manifestLoader.loadBuildManifest(page, "app");
|
||||
await manifestLoader.loadAppPathsManifest(page);
|
||||
await manifestLoader.loadActionManifest(page);
|
||||
await manifestLoader.loadLoadableManifest(page, "app");
|
||||
await manifestLoader.loadFontManifest(page, "app");
|
||||
await manifestLoader.writeManifests({
|
||||
rewrites,
|
||||
pageEntrypoints: entrypoints.page
|
||||
});
|
||||
processIssues(currentEntryIssues, key, writtenEndpoint, dev, logErrors);
|
||||
break;
|
||||
}
|
||||
case "app-route":
|
||||
{
|
||||
const key = getEntryKey("app", "server", page);
|
||||
const writtenEndpoint = await route.endpoint.writeToDisk();
|
||||
hooks == null ? void 0 : hooks.handleWrittenEndpoint(key, writtenEndpoint);
|
||||
const type = writtenEndpoint == null ? void 0 : writtenEndpoint.type;
|
||||
await manifestLoader.loadAppPathsManifest(page);
|
||||
if (type === "edge") {
|
||||
await manifestLoader.loadMiddlewareManifest(page, "app");
|
||||
} else {
|
||||
manifestLoader.deleteMiddlewareManifest(key);
|
||||
}
|
||||
await manifestLoader.writeManifests({
|
||||
rewrites,
|
||||
pageEntrypoints: entrypoints.page
|
||||
});
|
||||
processIssues(currentEntryIssues, key, writtenEndpoint, true, logErrors);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
{
|
||||
throw new Error(`unknown route type ${route.type} for ${page}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Maintains a mapping between entrypoins and the corresponding client asset paths.
|
||||
*/ export class AssetMapper {
|
||||
/**
|
||||
* Overrides asset paths for a key and updates the mapping from path to key.
|
||||
*
|
||||
* @param key
|
||||
* @param assetPaths asset paths relative to the .next directory
|
||||
*/ setPathsForKey(key, assetPaths) {
|
||||
this.delete(key);
|
||||
const newAssetPaths = new Set(assetPaths);
|
||||
this.entryMap.set(key, newAssetPaths);
|
||||
for (const assetPath of newAssetPaths){
|
||||
let assetPathKeys = this.assetMap.get(assetPath);
|
||||
if (!assetPathKeys) {
|
||||
assetPathKeys = new Set();
|
||||
this.assetMap.set(assetPath, assetPathKeys);
|
||||
}
|
||||
assetPathKeys.add(key);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Deletes the key and any asset only referenced by this key.
|
||||
*
|
||||
* @param key
|
||||
*/ delete(key) {
|
||||
for (const assetPath of this.getAssetPathsByKey(key)){
|
||||
const assetPathKeys = this.assetMap.get(assetPath);
|
||||
assetPathKeys == null ? void 0 : assetPathKeys.delete(key);
|
||||
if (!(assetPathKeys == null ? void 0 : assetPathKeys.size)) {
|
||||
this.assetMap.delete(assetPath);
|
||||
}
|
||||
}
|
||||
this.entryMap.delete(key);
|
||||
}
|
||||
getAssetPathsByKey(key) {
|
||||
return Array.from(this.entryMap.get(key) ?? []);
|
||||
}
|
||||
getKeysByAsset(path) {
|
||||
return Array.from(this.assetMap.get(path) ?? []);
|
||||
}
|
||||
keys() {
|
||||
return this.entryMap.keys();
|
||||
}
|
||||
constructor(){
|
||||
this.entryMap = new Map();
|
||||
this.assetMap = new Map();
|
||||
}
|
||||
}
|
||||
export function hasEntrypointForKey(entrypoints, key, assetMapper) {
|
||||
const { type, page } = splitEntryKey(key);
|
||||
switch(type){
|
||||
case "app":
|
||||
return entrypoints.app.has(page);
|
||||
case "pages":
|
||||
switch(page){
|
||||
case "_app":
|
||||
return entrypoints.global.app != null;
|
||||
case "_document":
|
||||
return entrypoints.global.document != null;
|
||||
case "_error":
|
||||
return entrypoints.global.error != null;
|
||||
default:
|
||||
return entrypoints.page.has(page);
|
||||
}
|
||||
case "root":
|
||||
switch(page){
|
||||
case "middleware":
|
||||
return entrypoints.global.middleware != null;
|
||||
case "instrumentation":
|
||||
return entrypoints.global.instrumentation != null;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
case "assets":
|
||||
if (!assetMapper) {
|
||||
return false;
|
||||
}
|
||||
return assetMapper.getKeysByAsset(page).some((pageKey)=>hasEntrypointForKey(entrypoints, pageKey, assetMapper));
|
||||
default:
|
||||
{
|
||||
// validation that we covered all cases, this should never run.
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const _ = type;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
export async function handleEntrypoints({ entrypoints, currentEntrypoints, currentEntryIssues, manifestLoader, nextConfig, rewrites, logErrors, dev }) {
|
||||
currentEntrypoints.global.app = entrypoints.pagesAppEndpoint;
|
||||
currentEntrypoints.global.document = entrypoints.pagesDocumentEndpoint;
|
||||
currentEntrypoints.global.error = entrypoints.pagesErrorEndpoint;
|
||||
currentEntrypoints.global.instrumentation = entrypoints.instrumentation;
|
||||
currentEntrypoints.page.clear();
|
||||
currentEntrypoints.app.clear();
|
||||
for (const [pathname, route] of entrypoints.routes){
|
||||
switch(route.type){
|
||||
case "page":
|
||||
case "page-api":
|
||||
currentEntrypoints.page.set(pathname, route);
|
||||
break;
|
||||
case "app-page":
|
||||
{
|
||||
route.pages.forEach((page)=>{
|
||||
currentEntrypoints.app.set(page.originalName, {
|
||||
type: "app-page",
|
||||
...page
|
||||
});
|
||||
});
|
||||
break;
|
||||
}
|
||||
case "app-route":
|
||||
{
|
||||
currentEntrypoints.app.set(route.originalName, route);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
Log.info(`skipping ${pathname} (${route.type})`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (dev) {
|
||||
await handleEntrypointsDevCleanup({
|
||||
currentEntryIssues,
|
||||
currentEntrypoints,
|
||||
...dev
|
||||
});
|
||||
}
|
||||
const { middleware, instrumentation } = entrypoints;
|
||||
// We check for explicit true/false, since it's initialized to
|
||||
// undefined during the first loop (middlewareChanges event is
|
||||
// unnecessary during the first serve)
|
||||
if (currentEntrypoints.global.middleware && !middleware) {
|
||||
const key = getEntryKey("root", "server", "middleware");
|
||||
// Went from middleware to no middleware
|
||||
await (dev == null ? void 0 : dev.hooks.unsubscribeFromChanges(key));
|
||||
currentEntryIssues.delete(key);
|
||||
dev == null ? void 0 : dev.hooks.sendHmr("middleware", {
|
||||
event: HMR_ACTIONS_SENT_TO_BROWSER.MIDDLEWARE_CHANGES
|
||||
});
|
||||
} else if (!currentEntrypoints.global.middleware && middleware) {
|
||||
// Went from no middleware to middleware
|
||||
dev == null ? void 0 : dev.hooks.sendHmr("middleware", {
|
||||
event: HMR_ACTIONS_SENT_TO_BROWSER.MIDDLEWARE_CHANGES
|
||||
});
|
||||
}
|
||||
currentEntrypoints.global.middleware = middleware;
|
||||
if (nextConfig.experimental.instrumentationHook && instrumentation) {
|
||||
const processInstrumentation = async (name, prop)=>{
|
||||
const key = getEntryKey("root", "server", name);
|
||||
const writtenEndpoint = await instrumentation[prop].writeToDisk();
|
||||
dev == null ? void 0 : dev.hooks.handleWrittenEndpoint(key, writtenEndpoint);
|
||||
processIssues(currentEntryIssues, key, writtenEndpoint, false, logErrors);
|
||||
};
|
||||
await processInstrumentation("instrumentation.nodeJs", "nodeJs");
|
||||
await processInstrumentation("instrumentation.edge", "edge");
|
||||
await manifestLoader.loadMiddlewareManifest("instrumentation", "instrumentation");
|
||||
await manifestLoader.writeManifests({
|
||||
rewrites: rewrites,
|
||||
pageEntrypoints: currentEntrypoints.page
|
||||
});
|
||||
if (dev) {
|
||||
dev.serverFields.actualInstrumentationHookFile = "/instrumentation";
|
||||
await dev.hooks.propagateServerField("actualInstrumentationHookFile", dev.serverFields.actualInstrumentationHookFile);
|
||||
}
|
||||
} else {
|
||||
if (dev) {
|
||||
dev.serverFields.actualInstrumentationHookFile = undefined;
|
||||
await dev.hooks.propagateServerField("actualInstrumentationHookFile", dev.serverFields.actualInstrumentationHookFile);
|
||||
}
|
||||
}
|
||||
if (middleware) {
|
||||
const key = getEntryKey("root", "server", "middleware");
|
||||
const endpoint = middleware.endpoint;
|
||||
async function processMiddleware() {
|
||||
const writtenEndpoint = await endpoint.writeToDisk();
|
||||
dev == null ? void 0 : dev.hooks.handleWrittenEndpoint(key, writtenEndpoint);
|
||||
processIssues(currentEntryIssues, key, writtenEndpoint, false, logErrors);
|
||||
await manifestLoader.loadMiddlewareManifest("middleware", "middleware");
|
||||
if (dev) {
|
||||
var _manifestLoader_getMiddlewareManifest;
|
||||
dev.serverFields.middleware = {
|
||||
match: null,
|
||||
page: "/",
|
||||
matchers: (_manifestLoader_getMiddlewareManifest = manifestLoader.getMiddlewareManifest(key)) == null ? void 0 : _manifestLoader_getMiddlewareManifest.middleware["/"].matchers
|
||||
};
|
||||
}
|
||||
}
|
||||
await processMiddleware();
|
||||
dev == null ? void 0 : dev.hooks.subscribeToChanges(key, false, endpoint, async ()=>{
|
||||
const finishBuilding = dev.hooks.startBuilding("middleware", undefined, true);
|
||||
await processMiddleware();
|
||||
await dev.hooks.propagateServerField("actualMiddlewareFile", dev.serverFields.actualMiddlewareFile);
|
||||
await dev.hooks.propagateServerField("middleware", dev.serverFields.middleware);
|
||||
await manifestLoader.writeManifests({
|
||||
rewrites: rewrites,
|
||||
pageEntrypoints: currentEntrypoints.page
|
||||
});
|
||||
finishBuilding == null ? void 0 : finishBuilding();
|
||||
return {
|
||||
event: HMR_ACTIONS_SENT_TO_BROWSER.MIDDLEWARE_CHANGES
|
||||
};
|
||||
});
|
||||
} else {
|
||||
manifestLoader.deleteMiddlewareManifest(getEntryKey("root", "server", "middleware"));
|
||||
if (dev) {
|
||||
dev.serverFields.actualMiddlewareFile = undefined;
|
||||
dev.serverFields.middleware = undefined;
|
||||
}
|
||||
}
|
||||
if (dev) {
|
||||
await dev.hooks.propagateServerField("actualMiddlewareFile", dev.serverFields.actualMiddlewareFile);
|
||||
await dev.hooks.propagateServerField("middleware", dev.serverFields.middleware);
|
||||
}
|
||||
}
|
||||
async function handleEntrypointsDevCleanup({ currentEntryIssues, currentEntrypoints, assetMapper, changeSubscriptions, clients, clientStates, hooks }) {
|
||||
// this needs to be first as `hasEntrypointForKey` uses the `assetMapper`
|
||||
for (const key of assetMapper.keys()){
|
||||
if (!hasEntrypointForKey(currentEntrypoints, key, assetMapper)) {
|
||||
assetMapper.delete(key);
|
||||
}
|
||||
}
|
||||
for (const key of changeSubscriptions.keys()){
|
||||
// middleware is handled separately
|
||||
if (!hasEntrypointForKey(currentEntrypoints, key, assetMapper)) {
|
||||
await hooks.unsubscribeFromChanges(key);
|
||||
}
|
||||
}
|
||||
for (const [key] of currentEntryIssues){
|
||||
if (!hasEntrypointForKey(currentEntrypoints, key, assetMapper)) {
|
||||
currentEntryIssues.delete(key);
|
||||
}
|
||||
}
|
||||
for (const client of clients){
|
||||
const state = clientStates.get(client);
|
||||
if (!state) {
|
||||
continue;
|
||||
}
|
||||
for (const key of state.clientIssues.keys()){
|
||||
if (!hasEntrypointForKey(currentEntrypoints, key, assetMapper)) {
|
||||
state.clientIssues.delete(key);
|
||||
}
|
||||
}
|
||||
for (const id of state.subscriptions.keys()){
|
||||
if (!hasEntrypointForKey(currentEntrypoints, getEntryKey("assets", "client", id), assetMapper)) {
|
||||
hooks.unsubscribeFromHmrEvents(client, id);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
export async function handlePagesErrorRoute({ currentEntryIssues, entrypoints, manifestLoader, rewrites, logErrors, hooks }) {
|
||||
if (entrypoints.global.app) {
|
||||
const key = getEntryKey("pages", "server", "_app");
|
||||
const writtenEndpoint = await entrypoints.global.app.writeToDisk();
|
||||
hooks == null ? void 0 : hooks.handleWrittenEndpoint(key, writtenEndpoint);
|
||||
hooks == null ? void 0 : hooks.subscribeToChanges(key, false, entrypoints.global.app, ()=>{
|
||||
// There's a special case for this in `../client/page-bootstrap.ts`.
|
||||
// https://github.com/vercel/next.js/blob/08d7a7e5189a835f5dcb82af026174e587575c0e/packages/next/src/client/page-bootstrap.ts#L69-L71
|
||||
return {
|
||||
event: HMR_ACTIONS_SENT_TO_BROWSER.CLIENT_CHANGES
|
||||
};
|
||||
});
|
||||
processIssues(currentEntryIssues, key, writtenEndpoint, false, logErrors);
|
||||
}
|
||||
await manifestLoader.loadBuildManifest("_app");
|
||||
await manifestLoader.loadPagesManifest("_app");
|
||||
await manifestLoader.loadFontManifest("_app");
|
||||
if (entrypoints.global.document) {
|
||||
const key = getEntryKey("pages", "server", "_document");
|
||||
const writtenEndpoint = await entrypoints.global.document.writeToDisk();
|
||||
hooks == null ? void 0 : hooks.handleWrittenEndpoint(key, writtenEndpoint);
|
||||
hooks == null ? void 0 : hooks.subscribeToChanges(key, false, entrypoints.global.document, ()=>{
|
||||
return {
|
||||
action: HMR_ACTIONS_SENT_TO_BROWSER.RELOAD_PAGE
|
||||
};
|
||||
});
|
||||
processIssues(currentEntryIssues, key, writtenEndpoint, false, logErrors);
|
||||
}
|
||||
await manifestLoader.loadPagesManifest("_document");
|
||||
if (entrypoints.global.error) {
|
||||
const key = getEntryKey("pages", "server", "_error");
|
||||
const writtenEndpoint = await entrypoints.global.error.writeToDisk();
|
||||
hooks == null ? void 0 : hooks.handleWrittenEndpoint(key, writtenEndpoint);
|
||||
hooks == null ? void 0 : hooks.subscribeToChanges(key, false, entrypoints.global.error, ()=>{
|
||||
// There's a special case for this in `../client/page-bootstrap.ts`.
|
||||
// https://github.com/vercel/next.js/blob/08d7a7e5189a835f5dcb82af026174e587575c0e/packages/next/src/client/page-bootstrap.ts#L69-L71
|
||||
return {
|
||||
event: HMR_ACTIONS_SENT_TO_BROWSER.CLIENT_CHANGES
|
||||
};
|
||||
});
|
||||
processIssues(currentEntryIssues, key, writtenEndpoint, false, logErrors);
|
||||
}
|
||||
await manifestLoader.loadBuildManifest("_error");
|
||||
await manifestLoader.loadPagesManifest("_error");
|
||||
await manifestLoader.loadFontManifest("_error");
|
||||
await manifestLoader.writeManifests({
|
||||
rewrites,
|
||||
pageEntrypoints: entrypoints.page
|
||||
});
|
||||
}
|
||||
|
||||
//# sourceMappingURL=turbopack-utils.js.map
|
||||
1
node_modules/next/dist/esm/server/dev/turbopack-utils.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/dev/turbopack-utils.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
21
node_modules/next/dist/esm/server/dev/turbopack/entry-key.js
generated
vendored
Normal file
21
node_modules/next/dist/esm/server/dev/turbopack/entry-key.js
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
/**
|
||||
* `app` -> app dir
|
||||
* `pages` -> pages dir
|
||||
* `root` -> middleware / instrumentation
|
||||
* `assets` -> assets
|
||||
*/ /**
|
||||
* Get a key that's unique across all entrypoints.
|
||||
*/ export function getEntryKey(type, side, page) {
|
||||
return JSON.stringify({
|
||||
type,
|
||||
side,
|
||||
page
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Split an `EntryKey` up into its components.
|
||||
*/ export function splitEntryKey(key) {
|
||||
return JSON.parse(key);
|
||||
}
|
||||
|
||||
//# sourceMappingURL=entry-key.js.map
|
||||
1
node_modules/next/dist/esm/server/dev/turbopack/entry-key.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/dev/turbopack/entry-key.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/dev/turbopack/entry-key.ts"],"names":["getEntryKey","type","side","page","JSON","stringify","splitEntryKey","key","parse"],"mappings":"AAAA;;;;;CAKC,GAQD;;CAEC,GACD,OAAO,SAASA,YACdC,IAAkB,EAClBC,IAAkB,EAClBC,IAAY;IAEZ,OAAOC,KAAKC,SAAS,CAAC;QAAEJ;QAAMC;QAAMC;IAAK;AAC3C;AAEA;;CAEC,GACD,OAAO,SAASG,cAAcC,GAAa;IAKzC,OAAOH,KAAKI,KAAK,CAACD;AACpB"}
|
||||
312
node_modules/next/dist/esm/server/dev/turbopack/manifest-loader.js
generated
vendored
Normal file
312
node_modules/next/dist/esm/server/dev/turbopack/manifest-loader.js
generated
vendored
Normal file
@ -0,0 +1,312 @@
|
||||
import { pathToRegexp } from "next/dist/compiled/path-to-regexp";
|
||||
import { APP_BUILD_MANIFEST, APP_PATHS_MANIFEST, AUTOMATIC_FONT_OPTIMIZATION_MANIFEST, BUILD_MANIFEST, INTERCEPTION_ROUTE_REWRITE_MANIFEST, MIDDLEWARE_BUILD_MANIFEST, MIDDLEWARE_MANIFEST, MIDDLEWARE_REACT_LOADABLE_MANIFEST, NEXT_FONT_MANIFEST, PAGES_MANIFEST, REACT_LOADABLE_MANIFEST, SERVER_REFERENCE_MANIFEST } from "../../../shared/lib/constants";
|
||||
import { join, posix } from "path";
|
||||
import { readFile, writeFile } from "fs/promises";
|
||||
import { deleteCache } from "../../../build/webpack/plugins/nextjs-require-cache-hot-reloader";
|
||||
import { writeFileAtomic } from "../../../lib/fs/write-atomic";
|
||||
import { isInterceptionRouteRewrite } from "../../../lib/generate-interception-routes-rewrites";
|
||||
import { normalizeRewritesForBuildManifest, srcEmptySsgManifest } from "../../../build/webpack/plugins/build-manifest-plugin";
|
||||
import getAssetPathFromRoute from "../../../shared/lib/router/utils/get-asset-path-from-route";
|
||||
import { getEntryKey } from "./entry-key";
|
||||
async function readPartialManifest(distDir, name, pageName, type = "pages") {
|
||||
const manifestPath = posix.join(distDir, `server`, type, type === "middleware" || type === "instrumentation" ? "" : type === "app" ? pageName : getAssetPathFromRoute(pageName), name);
|
||||
return JSON.parse(await readFile(posix.join(manifestPath), "utf-8"));
|
||||
}
|
||||
export class TurbopackManifestLoader {
|
||||
constructor({ distDir, buildId, encryptionKey }){
|
||||
this.actionManifests = new Map();
|
||||
this.appBuildManifests = new Map();
|
||||
this.appPathsManifests = new Map();
|
||||
this.buildManifests = new Map();
|
||||
this.fontManifests = new Map();
|
||||
this.loadableManifests = new Map();
|
||||
this.middlewareManifests = new Map();
|
||||
this.pagesManifests = new Map();
|
||||
this.distDir = distDir;
|
||||
this.buildId = buildId;
|
||||
this.encryptionKey = encryptionKey;
|
||||
}
|
||||
delete(key) {
|
||||
this.actionManifests.delete(key);
|
||||
this.appBuildManifests.delete(key);
|
||||
this.appPathsManifests.delete(key);
|
||||
this.buildManifests.delete(key);
|
||||
this.fontManifests.delete(key);
|
||||
this.loadableManifests.delete(key);
|
||||
this.middlewareManifests.delete(key);
|
||||
this.pagesManifests.delete(key);
|
||||
}
|
||||
async loadActionManifest(pageName) {
|
||||
this.actionManifests.set(getEntryKey("app", "server", pageName), await readPartialManifest(this.distDir, `${SERVER_REFERENCE_MANIFEST}.json`, pageName, "app"));
|
||||
}
|
||||
async mergeActionManifests(manifests) {
|
||||
const manifest = {
|
||||
node: {},
|
||||
edge: {},
|
||||
encryptionKey: this.encryptionKey
|
||||
};
|
||||
function mergeActionIds(actionEntries, other) {
|
||||
for(const key in other){
|
||||
const action = actionEntries[key] ??= {
|
||||
workers: {},
|
||||
layer: {}
|
||||
};
|
||||
Object.assign(action.workers, other[key].workers);
|
||||
Object.assign(action.layer, other[key].layer);
|
||||
}
|
||||
}
|
||||
for (const m of manifests){
|
||||
mergeActionIds(manifest.node, m.node);
|
||||
mergeActionIds(manifest.edge, m.edge);
|
||||
}
|
||||
return manifest;
|
||||
}
|
||||
async writeActionManifest() {
|
||||
const actionManifest = await this.mergeActionManifests(this.actionManifests.values());
|
||||
const actionManifestJsonPath = join(this.distDir, "server", `${SERVER_REFERENCE_MANIFEST}.json`);
|
||||
const actionManifestJsPath = join(this.distDir, "server", `${SERVER_REFERENCE_MANIFEST}.js`);
|
||||
const json = JSON.stringify(actionManifest, null, 2);
|
||||
deleteCache(actionManifestJsonPath);
|
||||
deleteCache(actionManifestJsPath);
|
||||
await writeFile(actionManifestJsonPath, json, "utf-8");
|
||||
await writeFile(actionManifestJsPath, `self.__RSC_SERVER_MANIFEST=${JSON.stringify(json)}`, "utf-8");
|
||||
}
|
||||
async loadAppBuildManifest(pageName) {
|
||||
this.appBuildManifests.set(getEntryKey("app", "server", pageName), await readPartialManifest(this.distDir, APP_BUILD_MANIFEST, pageName, "app"));
|
||||
}
|
||||
mergeAppBuildManifests(manifests) {
|
||||
const manifest = {
|
||||
pages: {}
|
||||
};
|
||||
for (const m of manifests){
|
||||
Object.assign(manifest.pages, m.pages);
|
||||
}
|
||||
return manifest;
|
||||
}
|
||||
async writeAppBuildManifest() {
|
||||
const appBuildManifest = this.mergeAppBuildManifests(this.appBuildManifests.values());
|
||||
const appBuildManifestPath = join(this.distDir, APP_BUILD_MANIFEST);
|
||||
deleteCache(appBuildManifestPath);
|
||||
await writeFileAtomic(appBuildManifestPath, JSON.stringify(appBuildManifest, null, 2));
|
||||
}
|
||||
async loadAppPathsManifest(pageName) {
|
||||
this.appPathsManifests.set(getEntryKey("app", "server", pageName), await readPartialManifest(this.distDir, APP_PATHS_MANIFEST, pageName, "app"));
|
||||
}
|
||||
async writeAppPathsManifest() {
|
||||
const appPathsManifest = this.mergePagesManifests(this.appPathsManifests.values());
|
||||
const appPathsManifestPath = join(this.distDir, "server", APP_PATHS_MANIFEST);
|
||||
deleteCache(appPathsManifestPath);
|
||||
await writeFileAtomic(appPathsManifestPath, JSON.stringify(appPathsManifest, null, 2));
|
||||
}
|
||||
/**
|
||||
* Turbopack doesn't support this functionality, so it writes an empty manifest.
|
||||
*/ async writeAutomaticFontOptimizationManifest() {
|
||||
const manifestPath = join(this.distDir, "server", AUTOMATIC_FONT_OPTIMIZATION_MANIFEST);
|
||||
await writeFileAtomic(manifestPath, JSON.stringify([]));
|
||||
}
|
||||
async loadBuildManifest(pageName, type = "pages") {
|
||||
this.buildManifests.set(getEntryKey(type, "server", pageName), await readPartialManifest(this.distDir, BUILD_MANIFEST, pageName, type));
|
||||
}
|
||||
mergeBuildManifests(manifests) {
|
||||
const manifest = {
|
||||
pages: {
|
||||
"/_app": []
|
||||
},
|
||||
// Something in next.js depends on these to exist even for app dir rendering
|
||||
devFiles: [],
|
||||
ampDevFiles: [],
|
||||
polyfillFiles: [],
|
||||
lowPriorityFiles: [
|
||||
"static/development/_ssgManifest.js",
|
||||
"static/development/_buildManifest.js"
|
||||
],
|
||||
rootMainFiles: [],
|
||||
ampFirstPages: []
|
||||
};
|
||||
for (const m of manifests){
|
||||
Object.assign(manifest.pages, m.pages);
|
||||
if (m.rootMainFiles.length) manifest.rootMainFiles = m.rootMainFiles;
|
||||
}
|
||||
return manifest;
|
||||
}
|
||||
async writeBuildManifest(pageEntrypoints, rewrites) {
|
||||
const buildManifest = this.mergeBuildManifests(this.buildManifests.values());
|
||||
const buildManifestPath = join(this.distDir, BUILD_MANIFEST);
|
||||
const middlewareBuildManifestPath = join(this.distDir, "server", `${MIDDLEWARE_BUILD_MANIFEST}.js`);
|
||||
const interceptionRewriteManifestPath = join(this.distDir, "server", `${INTERCEPTION_ROUTE_REWRITE_MANIFEST}.js`);
|
||||
deleteCache(buildManifestPath);
|
||||
deleteCache(middlewareBuildManifestPath);
|
||||
deleteCache(interceptionRewriteManifestPath);
|
||||
await writeFileAtomic(buildManifestPath, JSON.stringify(buildManifest, null, 2));
|
||||
await writeFileAtomic(middlewareBuildManifestPath, `self.__BUILD_MANIFEST=${JSON.stringify(buildManifest)};`);
|
||||
const interceptionRewrites = JSON.stringify(rewrites.beforeFiles.filter(isInterceptionRouteRewrite));
|
||||
await writeFileAtomic(interceptionRewriteManifestPath, `self.__INTERCEPTION_ROUTE_REWRITE_MANIFEST=${JSON.stringify(interceptionRewrites)};`);
|
||||
const content = {
|
||||
__rewrites: rewrites ? normalizeRewritesForBuildManifest(rewrites) : {
|
||||
afterFiles: [],
|
||||
beforeFiles: [],
|
||||
fallback: []
|
||||
},
|
||||
...Object.fromEntries([
|
||||
...pageEntrypoints.keys()
|
||||
].map((pathname)=>[
|
||||
pathname,
|
||||
`static/chunks/pages${pathname === "/" ? "/index" : pathname}.js`
|
||||
])),
|
||||
sortedPages: [
|
||||
...pageEntrypoints.keys()
|
||||
]
|
||||
};
|
||||
const buildManifestJs = `self.__BUILD_MANIFEST = ${JSON.stringify(content)};self.__BUILD_MANIFEST_CB && self.__BUILD_MANIFEST_CB()`;
|
||||
await writeFileAtomic(join(this.distDir, "static", this.buildId, "_buildManifest.js"), buildManifestJs);
|
||||
await writeFileAtomic(join(this.distDir, "static", this.buildId, "_ssgManifest.js"), srcEmptySsgManifest);
|
||||
}
|
||||
async writeFallbackBuildManifest() {
|
||||
const fallbackBuildManifest = this.mergeBuildManifests([
|
||||
this.buildManifests.get(getEntryKey("pages", "server", "_app")),
|
||||
this.buildManifests.get(getEntryKey("pages", "server", "_error"))
|
||||
].filter(Boolean));
|
||||
const fallbackBuildManifestPath = join(this.distDir, `fallback-${BUILD_MANIFEST}`);
|
||||
deleteCache(fallbackBuildManifestPath);
|
||||
await writeFileAtomic(fallbackBuildManifestPath, JSON.stringify(fallbackBuildManifest, null, 2));
|
||||
}
|
||||
async loadFontManifest(pageName, type = "pages") {
|
||||
this.fontManifests.set(getEntryKey(type, "server", pageName), await readPartialManifest(this.distDir, `${NEXT_FONT_MANIFEST}.json`, pageName, type));
|
||||
}
|
||||
mergeFontManifests(manifests) {
|
||||
const manifest = {
|
||||
app: {},
|
||||
appUsingSizeAdjust: false,
|
||||
pages: {},
|
||||
pagesUsingSizeAdjust: false
|
||||
};
|
||||
for (const m of manifests){
|
||||
Object.assign(manifest.app, m.app);
|
||||
Object.assign(manifest.pages, m.pages);
|
||||
manifest.appUsingSizeAdjust = manifest.appUsingSizeAdjust || m.appUsingSizeAdjust;
|
||||
manifest.pagesUsingSizeAdjust = manifest.pagesUsingSizeAdjust || m.pagesUsingSizeAdjust;
|
||||
}
|
||||
return manifest;
|
||||
}
|
||||
async writeNextFontManifest() {
|
||||
const fontManifest = this.mergeFontManifests(this.fontManifests.values());
|
||||
const json = JSON.stringify(fontManifest, null, 2);
|
||||
const fontManifestJsonPath = join(this.distDir, "server", `${NEXT_FONT_MANIFEST}.json`);
|
||||
const fontManifestJsPath = join(this.distDir, "server", `${NEXT_FONT_MANIFEST}.js`);
|
||||
deleteCache(fontManifestJsonPath);
|
||||
deleteCache(fontManifestJsPath);
|
||||
await writeFileAtomic(fontManifestJsonPath, json);
|
||||
await writeFileAtomic(fontManifestJsPath, `self.__NEXT_FONT_MANIFEST=${JSON.stringify(json)}`);
|
||||
}
|
||||
async loadLoadableManifest(pageName, type = "pages") {
|
||||
this.loadableManifests.set(getEntryKey(type, "server", pageName), await readPartialManifest(this.distDir, REACT_LOADABLE_MANIFEST, pageName, type));
|
||||
}
|
||||
mergeLoadableManifests(manifests) {
|
||||
const manifest = {};
|
||||
for (const m of manifests){
|
||||
Object.assign(manifest, m);
|
||||
}
|
||||
return manifest;
|
||||
}
|
||||
async writeLoadableManifest() {
|
||||
const loadableManifest = this.mergeLoadableManifests(this.loadableManifests.values());
|
||||
const loadableManifestPath = join(this.distDir, REACT_LOADABLE_MANIFEST);
|
||||
const middlewareloadableManifestPath = join(this.distDir, "server", `${MIDDLEWARE_REACT_LOADABLE_MANIFEST}.js`);
|
||||
const json = JSON.stringify(loadableManifest, null, 2);
|
||||
deleteCache(loadableManifestPath);
|
||||
deleteCache(middlewareloadableManifestPath);
|
||||
await writeFileAtomic(loadableManifestPath, json);
|
||||
await writeFileAtomic(middlewareloadableManifestPath, `self.__REACT_LOADABLE_MANIFEST=${JSON.stringify(json)}`);
|
||||
}
|
||||
async loadMiddlewareManifest(pageName, type) {
|
||||
this.middlewareManifests.set(getEntryKey(type === "middleware" || type === "instrumentation" ? "root" : type, "server", pageName), await readPartialManifest(this.distDir, MIDDLEWARE_MANIFEST, pageName, type));
|
||||
}
|
||||
getMiddlewareManifest(key) {
|
||||
return this.middlewareManifests.get(key);
|
||||
}
|
||||
deleteMiddlewareManifest(key) {
|
||||
return this.middlewareManifests.delete(key);
|
||||
}
|
||||
mergeMiddlewareManifests(manifests) {
|
||||
const manifest = {
|
||||
version: 3,
|
||||
middleware: {},
|
||||
sortedMiddleware: [],
|
||||
functions: {}
|
||||
};
|
||||
let instrumentation = undefined;
|
||||
for (const m of manifests){
|
||||
Object.assign(manifest.functions, m.functions);
|
||||
Object.assign(manifest.middleware, m.middleware);
|
||||
if (m.instrumentation) {
|
||||
instrumentation = m.instrumentation;
|
||||
}
|
||||
}
|
||||
const updateFunctionDefinition = (fun)=>{
|
||||
return {
|
||||
...fun,
|
||||
files: [
|
||||
...(instrumentation == null ? void 0 : instrumentation.files) ?? [],
|
||||
...fun.files
|
||||
]
|
||||
};
|
||||
};
|
||||
for (const key of Object.keys(manifest.middleware)){
|
||||
const value = manifest.middleware[key];
|
||||
manifest.middleware[key] = updateFunctionDefinition(value);
|
||||
}
|
||||
for (const key of Object.keys(manifest.functions)){
|
||||
const value = manifest.functions[key];
|
||||
manifest.functions[key] = updateFunctionDefinition(value);
|
||||
}
|
||||
for (const fun of Object.values(manifest.functions).concat(Object.values(manifest.middleware))){
|
||||
for (const matcher of fun.matchers){
|
||||
if (!matcher.regexp) {
|
||||
matcher.regexp = pathToRegexp(matcher.originalSource, [], {
|
||||
delimiter: "/",
|
||||
sensitive: false,
|
||||
strict: true
|
||||
}).source.replaceAll("\\/", "/");
|
||||
}
|
||||
}
|
||||
}
|
||||
manifest.sortedMiddleware = Object.keys(manifest.middleware);
|
||||
return manifest;
|
||||
}
|
||||
async writeMiddlewareManifest() {
|
||||
const middlewareManifest = this.mergeMiddlewareManifests(this.middlewareManifests.values());
|
||||
const middlewareManifestPath = join(this.distDir, "server", MIDDLEWARE_MANIFEST);
|
||||
deleteCache(middlewareManifestPath);
|
||||
await writeFileAtomic(middlewareManifestPath, JSON.stringify(middlewareManifest, null, 2));
|
||||
}
|
||||
async loadPagesManifest(pageName) {
|
||||
this.pagesManifests.set(getEntryKey("pages", "server", pageName), await readPartialManifest(this.distDir, PAGES_MANIFEST, pageName));
|
||||
}
|
||||
mergePagesManifests(manifests) {
|
||||
const manifest = {};
|
||||
for (const m of manifests){
|
||||
Object.assign(manifest, m);
|
||||
}
|
||||
return manifest;
|
||||
}
|
||||
async writePagesManifest() {
|
||||
const pagesManifest = this.mergePagesManifests(this.pagesManifests.values());
|
||||
const pagesManifestPath = join(this.distDir, "server", PAGES_MANIFEST);
|
||||
deleteCache(pagesManifestPath);
|
||||
await writeFileAtomic(pagesManifestPath, JSON.stringify(pagesManifest, null, 2));
|
||||
}
|
||||
async writeManifests({ rewrites, pageEntrypoints }) {
|
||||
await this.writeActionManifest();
|
||||
await this.writeAppBuildManifest();
|
||||
await this.writeAppPathsManifest();
|
||||
await this.writeAutomaticFontOptimizationManifest();
|
||||
await this.writeBuildManifest(pageEntrypoints, rewrites);
|
||||
await this.writeFallbackBuildManifest();
|
||||
await this.writeLoadableManifest();
|
||||
await this.writeMiddlewareManifest();
|
||||
await this.writeNextFontManifest();
|
||||
await this.writePagesManifest();
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=manifest-loader.js.map
|
||||
1
node_modules/next/dist/esm/server/dev/turbopack/manifest-loader.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/dev/turbopack/manifest-loader.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
3
node_modules/next/dist/esm/server/dev/turbopack/types.js
generated
vendored
Normal file
3
node_modules/next/dist/esm/server/dev/turbopack/types.js
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
export { };
|
||||
|
||||
//# sourceMappingURL=types.js.map
|
||||
1
node_modules/next/dist/esm/server/dev/turbopack/types.js.map
generated
vendored
Normal file
1
node_modules/next/dist/esm/server/dev/turbopack/types.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/dev/turbopack/types.ts"],"names":[],"mappings":"AAuCA,WAKC"}
|
||||
Reference in New Issue
Block a user