Initial boiler plate project
This commit is contained in:
14
node_modules/next/dist/server/lib/incremental-cache/fetch-cache.d.ts
generated
vendored
Normal file
14
node_modules/next/dist/server/lib/incremental-cache/fetch-cache.d.ts
generated
vendored
Normal file
@ -0,0 +1,14 @@
|
||||
import type { CacheHandler, CacheHandlerContext, CacheHandlerValue } from './';
|
||||
export default class FetchCache implements CacheHandler {
|
||||
private headers;
|
||||
private cacheEndpoint?;
|
||||
private hasMatchingTags;
|
||||
static isAvailable(ctx: {
|
||||
_requestHeaders: CacheHandlerContext['_requestHeaders'];
|
||||
}): boolean;
|
||||
constructor(ctx: CacheHandlerContext);
|
||||
resetRequestCache(): void;
|
||||
revalidateTag(...args: Parameters<CacheHandler['revalidateTag']>): Promise<void>;
|
||||
get(...args: Parameters<CacheHandler['get']>): Promise<CacheHandlerValue | null>;
|
||||
set(...args: Parameters<CacheHandler['set']>): Promise<void>;
|
||||
}
|
||||
320
node_modules/next/dist/server/lib/incremental-cache/fetch-cache.js
generated
vendored
Normal file
320
node_modules/next/dist/server/lib/incremental-cache/fetch-cache.js
generated
vendored
Normal file
@ -0,0 +1,320 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
Object.defineProperty(exports, "default", {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return FetchCache;
|
||||
}
|
||||
});
|
||||
const _lrucache = /*#__PURE__*/ _interop_require_default(require("next/dist/compiled/lru-cache"));
|
||||
const _constants = require("../../../lib/constants");
|
||||
function _interop_require_default(obj) {
|
||||
return obj && obj.__esModule ? obj : {
|
||||
default: obj
|
||||
};
|
||||
}
|
||||
let rateLimitedUntil = 0;
|
||||
let memoryCache;
|
||||
const CACHE_TAGS_HEADER = "x-vercel-cache-tags";
|
||||
const CACHE_HEADERS_HEADER = "x-vercel-sc-headers";
|
||||
const CACHE_STATE_HEADER = "x-vercel-cache-state";
|
||||
const CACHE_REVALIDATE_HEADER = "x-vercel-revalidate";
|
||||
const CACHE_FETCH_URL_HEADER = "x-vercel-cache-item-name";
|
||||
const CACHE_CONTROL_VALUE_HEADER = "x-vercel-cache-control";
|
||||
const DEBUG = Boolean(process.env.NEXT_PRIVATE_DEBUG_CACHE);
|
||||
async function fetchRetryWithTimeout(url, init, retryIndex = 0) {
|
||||
const controller = new AbortController();
|
||||
const timeout = setTimeout(()=>{
|
||||
controller.abort();
|
||||
}, 500);
|
||||
return fetch(url, {
|
||||
...init || {},
|
||||
signal: controller.signal
|
||||
}).catch((err)=>{
|
||||
if (retryIndex === 3) {
|
||||
throw err;
|
||||
} else {
|
||||
if (DEBUG) {
|
||||
console.log(`Fetch failed for ${url} retry ${retryIndex}`);
|
||||
}
|
||||
return fetchRetryWithTimeout(url, init, retryIndex + 1);
|
||||
}
|
||||
}).finally(()=>{
|
||||
clearTimeout(timeout);
|
||||
});
|
||||
}
|
||||
class FetchCache {
|
||||
hasMatchingTags(arr1, arr2) {
|
||||
if (arr1.length !== arr2.length) return false;
|
||||
const set1 = new Set(arr1);
|
||||
const set2 = new Set(arr2);
|
||||
if (set1.size !== set2.size) return false;
|
||||
for (let tag of set1){
|
||||
if (!set2.has(tag)) return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
static isAvailable(ctx) {
|
||||
return !!(ctx._requestHeaders["x-vercel-sc-host"] || process.env.SUSPENSE_CACHE_URL);
|
||||
}
|
||||
constructor(ctx){
|
||||
this.headers = {};
|
||||
this.headers["Content-Type"] = "application/json";
|
||||
if (CACHE_HEADERS_HEADER in ctx._requestHeaders) {
|
||||
const newHeaders = JSON.parse(ctx._requestHeaders[CACHE_HEADERS_HEADER]);
|
||||
for(const k in newHeaders){
|
||||
this.headers[k] = newHeaders[k];
|
||||
}
|
||||
delete ctx._requestHeaders[CACHE_HEADERS_HEADER];
|
||||
}
|
||||
const scHost = ctx._requestHeaders["x-vercel-sc-host"] || process.env.SUSPENSE_CACHE_URL;
|
||||
const scBasePath = ctx._requestHeaders["x-vercel-sc-basepath"] || process.env.SUSPENSE_CACHE_BASEPATH;
|
||||
if (process.env.SUSPENSE_CACHE_AUTH_TOKEN) {
|
||||
this.headers["Authorization"] = `Bearer ${process.env.SUSPENSE_CACHE_AUTH_TOKEN}`;
|
||||
}
|
||||
if (scHost) {
|
||||
const scProto = process.env.SUSPENSE_CACHE_PROTO || "https";
|
||||
this.cacheEndpoint = `${scProto}://${scHost}${scBasePath || ""}`;
|
||||
if (DEBUG) {
|
||||
console.log("using cache endpoint", this.cacheEndpoint);
|
||||
}
|
||||
} else if (DEBUG) {
|
||||
console.log("no cache endpoint available");
|
||||
}
|
||||
if (ctx.maxMemoryCacheSize) {
|
||||
if (!memoryCache) {
|
||||
if (DEBUG) {
|
||||
console.log("using memory store for fetch cache");
|
||||
}
|
||||
memoryCache = new _lrucache.default({
|
||||
max: ctx.maxMemoryCacheSize,
|
||||
length ({ value }) {
|
||||
var _JSON_stringify;
|
||||
if (!value) {
|
||||
return 25;
|
||||
} else if (value.kind === "REDIRECT") {
|
||||
return JSON.stringify(value.props).length;
|
||||
} else if (value.kind === "IMAGE") {
|
||||
throw new Error("invariant image should not be incremental-cache");
|
||||
} else if (value.kind === "FETCH") {
|
||||
return JSON.stringify(value.data || "").length;
|
||||
} else if (value.kind === "ROUTE") {
|
||||
return value.body.length;
|
||||
}
|
||||
// rough estimate of size of cache value
|
||||
return value.html.length + (((_JSON_stringify = JSON.stringify(value.kind === "PAGE" && value.pageData)) == null ? void 0 : _JSON_stringify.length) || 0);
|
||||
}
|
||||
});
|
||||
}
|
||||
} else {
|
||||
if (DEBUG) {
|
||||
console.log("not using memory store for fetch cache");
|
||||
}
|
||||
}
|
||||
}
|
||||
resetRequestCache() {
|
||||
memoryCache == null ? void 0 : memoryCache.reset();
|
||||
}
|
||||
async revalidateTag(...args) {
|
||||
let [tags] = args;
|
||||
tags = typeof tags === "string" ? [
|
||||
tags
|
||||
] : tags;
|
||||
if (DEBUG) {
|
||||
console.log("revalidateTag", tags);
|
||||
}
|
||||
if (!tags.length) return;
|
||||
if (Date.now() < rateLimitedUntil) {
|
||||
if (DEBUG) {
|
||||
console.log("rate limited ", rateLimitedUntil);
|
||||
}
|
||||
return;
|
||||
}
|
||||
for(let i = 0; i < Math.ceil(tags.length / 64); i++){
|
||||
const currentTags = tags.slice(i * 64, i * 64 + 64);
|
||||
try {
|
||||
const res = await fetchRetryWithTimeout(`${this.cacheEndpoint}/v1/suspense-cache/revalidate?tags=${currentTags.map((tag)=>encodeURIComponent(tag)).join(",")}`, {
|
||||
method: "POST",
|
||||
headers: this.headers,
|
||||
// @ts-expect-error not on public type
|
||||
next: {
|
||||
internal: true
|
||||
}
|
||||
});
|
||||
if (res.status === 429) {
|
||||
const retryAfter = res.headers.get("retry-after") || "60000";
|
||||
rateLimitedUntil = Date.now() + parseInt(retryAfter);
|
||||
}
|
||||
if (!res.ok) {
|
||||
throw new Error(`Request failed with status ${res.status}.`);
|
||||
}
|
||||
} catch (err) {
|
||||
console.warn(`Failed to revalidate tag`, currentTags, err);
|
||||
}
|
||||
}
|
||||
}
|
||||
async get(...args) {
|
||||
var _data_value;
|
||||
const [key, ctx = {}] = args;
|
||||
const { tags, softTags, kindHint, fetchIdx, fetchUrl } = ctx;
|
||||
if (kindHint !== "fetch") {
|
||||
return null;
|
||||
}
|
||||
if (Date.now() < rateLimitedUntil) {
|
||||
if (DEBUG) {
|
||||
console.log("rate limited");
|
||||
}
|
||||
return null;
|
||||
}
|
||||
// memory cache is cleared at the end of each request
|
||||
// so that revalidate events are pulled from upstream
|
||||
// on successive requests
|
||||
let data = memoryCache == null ? void 0 : memoryCache.get(key);
|
||||
const hasFetchKindAndMatchingTags = (data == null ? void 0 : (_data_value = data.value) == null ? void 0 : _data_value.kind) === "FETCH" && this.hasMatchingTags(tags ?? [], data.value.tags ?? []);
|
||||
// Get data from fetch cache. Also check if new tags have been
|
||||
// specified with the same cache key (fetch URL)
|
||||
if (this.cacheEndpoint && (!data || !hasFetchKindAndMatchingTags)) {
|
||||
try {
|
||||
const start = Date.now();
|
||||
const fetchParams = {
|
||||
internal: true,
|
||||
fetchType: "cache-get",
|
||||
fetchUrl: fetchUrl,
|
||||
fetchIdx
|
||||
};
|
||||
const res = await fetch(`${this.cacheEndpoint}/v1/suspense-cache/${key}`, {
|
||||
method: "GET",
|
||||
headers: {
|
||||
...this.headers,
|
||||
[CACHE_FETCH_URL_HEADER]: fetchUrl,
|
||||
[CACHE_TAGS_HEADER]: (tags == null ? void 0 : tags.join(",")) || "",
|
||||
[_constants.NEXT_CACHE_SOFT_TAGS_HEADER]: (softTags == null ? void 0 : softTags.join(",")) || ""
|
||||
},
|
||||
next: fetchParams
|
||||
});
|
||||
if (res.status === 429) {
|
||||
const retryAfter = res.headers.get("retry-after") || "60000";
|
||||
rateLimitedUntil = Date.now() + parseInt(retryAfter);
|
||||
}
|
||||
if (res.status === 404) {
|
||||
if (DEBUG) {
|
||||
console.log(`no fetch cache entry for ${key}, duration: ${Date.now() - start}ms`);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
if (!res.ok) {
|
||||
console.error(await res.text());
|
||||
throw new Error(`invalid response from cache ${res.status}`);
|
||||
}
|
||||
const cached = await res.json();
|
||||
if (!cached || cached.kind !== "FETCH") {
|
||||
DEBUG && console.log({
|
||||
cached
|
||||
});
|
||||
throw new Error("invalid cache value");
|
||||
}
|
||||
// if new tags were specified, merge those tags to the existing tags
|
||||
if (cached.kind === "FETCH") {
|
||||
cached.tags ??= [];
|
||||
for (const tag of tags ?? []){
|
||||
if (!cached.tags.includes(tag)) {
|
||||
cached.tags.push(tag);
|
||||
}
|
||||
}
|
||||
}
|
||||
const cacheState = res.headers.get(CACHE_STATE_HEADER);
|
||||
const age = res.headers.get("age");
|
||||
data = {
|
||||
value: cached,
|
||||
// if it's already stale set it to a time in the past
|
||||
// if not derive last modified from age
|
||||
lastModified: cacheState !== "fresh" ? Date.now() - _constants.CACHE_ONE_YEAR : Date.now() - parseInt(age || "0", 10) * 1000
|
||||
};
|
||||
if (DEBUG) {
|
||||
console.log(`got fetch cache entry for ${key}, duration: ${Date.now() - start}ms, size: ${Object.keys(cached).length}, cache-state: ${cacheState} tags: ${tags == null ? void 0 : tags.join(",")} softTags: ${softTags == null ? void 0 : softTags.join(",")}`);
|
||||
}
|
||||
if (data) {
|
||||
memoryCache == null ? void 0 : memoryCache.set(key, data);
|
||||
}
|
||||
} catch (err) {
|
||||
// unable to get data from fetch-cache
|
||||
if (DEBUG) {
|
||||
console.error(`Failed to get from fetch-cache`, err);
|
||||
}
|
||||
}
|
||||
}
|
||||
return data || null;
|
||||
}
|
||||
async set(...args) {
|
||||
const [key, data, ctx] = args;
|
||||
const { fetchCache, fetchIdx, fetchUrl, tags } = ctx;
|
||||
if (!fetchCache) return;
|
||||
if (Date.now() < rateLimitedUntil) {
|
||||
if (DEBUG) {
|
||||
console.log("rate limited");
|
||||
}
|
||||
return;
|
||||
}
|
||||
memoryCache == null ? void 0 : memoryCache.set(key, {
|
||||
value: data,
|
||||
lastModified: Date.now()
|
||||
});
|
||||
if (this.cacheEndpoint) {
|
||||
try {
|
||||
const start = Date.now();
|
||||
if (data !== null && "revalidate" in data) {
|
||||
this.headers[CACHE_REVALIDATE_HEADER] = data.revalidate.toString();
|
||||
}
|
||||
if (!this.headers[CACHE_REVALIDATE_HEADER] && data !== null && "data" in data) {
|
||||
this.headers[CACHE_CONTROL_VALUE_HEADER] = data.data.headers["cache-control"];
|
||||
}
|
||||
const body = JSON.stringify({
|
||||
...data,
|
||||
// we send the tags in the header instead
|
||||
// of in the body here
|
||||
tags: undefined
|
||||
});
|
||||
if (DEBUG) {
|
||||
console.log("set cache", key);
|
||||
}
|
||||
const fetchParams = {
|
||||
internal: true,
|
||||
fetchType: "cache-set",
|
||||
fetchUrl,
|
||||
fetchIdx
|
||||
};
|
||||
const res = await fetch(`${this.cacheEndpoint}/v1/suspense-cache/${key}`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
...this.headers,
|
||||
[CACHE_FETCH_URL_HEADER]: fetchUrl || "",
|
||||
[CACHE_TAGS_HEADER]: (tags == null ? void 0 : tags.join(",")) || ""
|
||||
},
|
||||
body: body,
|
||||
next: fetchParams
|
||||
});
|
||||
if (res.status === 429) {
|
||||
const retryAfter = res.headers.get("retry-after") || "60000";
|
||||
rateLimitedUntil = Date.now() + parseInt(retryAfter);
|
||||
}
|
||||
if (!res.ok) {
|
||||
DEBUG && console.log(await res.text());
|
||||
throw new Error(`invalid response ${res.status}`);
|
||||
}
|
||||
if (DEBUG) {
|
||||
console.log(`successfully set to fetch-cache for ${key}, duration: ${Date.now() - start}ms, size: ${body.length}`);
|
||||
}
|
||||
} catch (err) {
|
||||
// unable to set to fetch-cache
|
||||
if (DEBUG) {
|
||||
console.error(`Failed to update fetch cache`, err);
|
||||
}
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=fetch-cache.js.map
|
||||
1
node_modules/next/dist/server/lib/incremental-cache/fetch-cache.js.map
generated
vendored
Normal file
1
node_modules/next/dist/server/lib/incremental-cache/fetch-cache.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
29
node_modules/next/dist/server/lib/incremental-cache/file-system-cache.d.ts
generated
vendored
Normal file
29
node_modules/next/dist/server/lib/incremental-cache/file-system-cache.d.ts
generated
vendored
Normal file
@ -0,0 +1,29 @@
|
||||
import type { CacheHandler, CacheHandlerContext, CacheHandlerValue } from './';
|
||||
import type { CacheFs } from '../../../shared/lib/utils';
|
||||
type FileSystemCacheContext = Omit<CacheHandlerContext, 'fs' | 'serverDistDir'> & {
|
||||
fs: CacheFs;
|
||||
serverDistDir: string;
|
||||
experimental: {
|
||||
ppr: boolean;
|
||||
};
|
||||
};
|
||||
export default class FileSystemCache implements CacheHandler {
|
||||
private fs;
|
||||
private flushToDisk?;
|
||||
private serverDistDir;
|
||||
private appDir;
|
||||
private pagesDir;
|
||||
private tagsManifestPath?;
|
||||
private revalidatedTags;
|
||||
private readonly experimental;
|
||||
private debug;
|
||||
constructor(ctx: FileSystemCacheContext);
|
||||
resetRequestCache(): void;
|
||||
private loadTagsManifest;
|
||||
revalidateTag(...args: Parameters<CacheHandler['revalidateTag']>): Promise<void>;
|
||||
get(...args: Parameters<CacheHandler['get']>): Promise<CacheHandlerValue | null>;
|
||||
set(...args: Parameters<CacheHandler['set']>): Promise<void>;
|
||||
private detectFileKind;
|
||||
private getFilePath;
|
||||
}
|
||||
export {};
|
||||
322
node_modules/next/dist/server/lib/incremental-cache/file-system-cache.js
generated
vendored
Normal file
322
node_modules/next/dist/server/lib/incremental-cache/file-system-cache.js
generated
vendored
Normal file
@ -0,0 +1,322 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
Object.defineProperty(exports, "default", {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return FileSystemCache;
|
||||
}
|
||||
});
|
||||
const _lrucache = /*#__PURE__*/ _interop_require_default(require("next/dist/compiled/lru-cache"));
|
||||
const _path = /*#__PURE__*/ _interop_require_default(require("../../../shared/lib/isomorphic/path"));
|
||||
const _constants = require("../../../lib/constants");
|
||||
function _interop_require_default(obj) {
|
||||
return obj && obj.__esModule ? obj : {
|
||||
default: obj
|
||||
};
|
||||
}
|
||||
let memoryCache;
|
||||
let tagsManifest;
|
||||
class FileSystemCache {
|
||||
constructor(ctx){
|
||||
this.fs = ctx.fs;
|
||||
this.flushToDisk = ctx.flushToDisk;
|
||||
this.serverDistDir = ctx.serverDistDir;
|
||||
this.appDir = !!ctx._appDir;
|
||||
this.pagesDir = !!ctx._pagesDir;
|
||||
this.revalidatedTags = ctx.revalidatedTags;
|
||||
this.experimental = ctx.experimental;
|
||||
this.debug = !!process.env.NEXT_PRIVATE_DEBUG_CACHE;
|
||||
if (ctx.maxMemoryCacheSize && !memoryCache) {
|
||||
if (this.debug) {
|
||||
console.log("using memory store for fetch cache");
|
||||
}
|
||||
memoryCache = new _lrucache.default({
|
||||
max: ctx.maxMemoryCacheSize,
|
||||
length ({ value }) {
|
||||
var _JSON_stringify;
|
||||
if (!value) {
|
||||
return 25;
|
||||
} else if (value.kind === "REDIRECT") {
|
||||
return JSON.stringify(value.props).length;
|
||||
} else if (value.kind === "IMAGE") {
|
||||
throw new Error("invariant image should not be incremental-cache");
|
||||
} else if (value.kind === "FETCH") {
|
||||
return JSON.stringify(value.data || "").length;
|
||||
} else if (value.kind === "ROUTE") {
|
||||
return value.body.length;
|
||||
}
|
||||
// rough estimate of size of cache value
|
||||
return value.html.length + (((_JSON_stringify = JSON.stringify(value.pageData)) == null ? void 0 : _JSON_stringify.length) || 0);
|
||||
}
|
||||
});
|
||||
} else if (this.debug) {
|
||||
console.log("not using memory store for fetch cache");
|
||||
}
|
||||
if (this.serverDistDir && this.fs) {
|
||||
this.tagsManifestPath = _path.default.join(this.serverDistDir, "..", "cache", "fetch-cache", "tags-manifest.json");
|
||||
this.loadTagsManifest();
|
||||
}
|
||||
}
|
||||
resetRequestCache() {}
|
||||
loadTagsManifest() {
|
||||
if (!this.tagsManifestPath || !this.fs || tagsManifest) return;
|
||||
try {
|
||||
tagsManifest = JSON.parse(this.fs.readFileSync(this.tagsManifestPath, "utf8"));
|
||||
} catch (err) {
|
||||
tagsManifest = {
|
||||
version: 1,
|
||||
items: {}
|
||||
};
|
||||
}
|
||||
if (this.debug) console.log("loadTagsManifest", tagsManifest);
|
||||
}
|
||||
async revalidateTag(...args) {
|
||||
let [tags] = args;
|
||||
tags = typeof tags === "string" ? [
|
||||
tags
|
||||
] : tags;
|
||||
if (this.debug) {
|
||||
console.log("revalidateTag", tags);
|
||||
}
|
||||
if (tags.length === 0) {
|
||||
return;
|
||||
}
|
||||
// we need to ensure the tagsManifest is refreshed
|
||||
// since separate workers can be updating it at the same
|
||||
// time and we can't flush out of sync data
|
||||
await this.loadTagsManifest();
|
||||
if (!tagsManifest || !this.tagsManifestPath) {
|
||||
return;
|
||||
}
|
||||
for (const tag of tags){
|
||||
const data = tagsManifest.items[tag] || {};
|
||||
data.revalidatedAt = Date.now();
|
||||
tagsManifest.items[tag] = data;
|
||||
}
|
||||
try {
|
||||
await this.fs.mkdir(_path.default.dirname(this.tagsManifestPath));
|
||||
await this.fs.writeFile(this.tagsManifestPath, JSON.stringify(tagsManifest || {}));
|
||||
if (this.debug) {
|
||||
console.log("Updated tags manifest", tagsManifest);
|
||||
}
|
||||
} catch (err) {
|
||||
console.warn("Failed to update tags manifest.", err);
|
||||
}
|
||||
}
|
||||
async get(...args) {
|
||||
var _data_value, _data_value1;
|
||||
const [key, ctx = {}] = args;
|
||||
const { tags, softTags, kindHint } = ctx;
|
||||
let data = memoryCache == null ? void 0 : memoryCache.get(key);
|
||||
if (this.debug) {
|
||||
console.log("get", key, tags, kindHint, !!data);
|
||||
}
|
||||
// let's check the disk for seed data
|
||||
if (!data && process.env.NEXT_RUNTIME !== "edge") {
|
||||
try {
|
||||
const filePath = this.getFilePath(`${key}.body`, "app");
|
||||
const fileData = await this.fs.readFile(filePath);
|
||||
const { mtime } = await this.fs.stat(filePath);
|
||||
const meta = JSON.parse(await this.fs.readFile(filePath.replace(/\.body$/, _constants.NEXT_META_SUFFIX), "utf8"));
|
||||
const cacheEntry = {
|
||||
lastModified: mtime.getTime(),
|
||||
value: {
|
||||
kind: "ROUTE",
|
||||
body: fileData,
|
||||
headers: meta.headers,
|
||||
status: meta.status
|
||||
}
|
||||
};
|
||||
return cacheEntry;
|
||||
} catch (_) {
|
||||
// no .meta data for the related key
|
||||
}
|
||||
try {
|
||||
// Determine the file kind if we didn't know it already.
|
||||
let kind = kindHint;
|
||||
if (!kind) {
|
||||
kind = this.detectFileKind(`${key}.html`);
|
||||
}
|
||||
const isAppPath = kind === "app";
|
||||
const filePath = this.getFilePath(kind === "fetch" ? key : `${key}.html`, kind);
|
||||
const fileData = await this.fs.readFile(filePath, "utf8");
|
||||
const { mtime } = await this.fs.stat(filePath);
|
||||
if (kind === "fetch" && this.flushToDisk) {
|
||||
var _data_value2;
|
||||
const lastModified = mtime.getTime();
|
||||
const parsedData = JSON.parse(fileData);
|
||||
data = {
|
||||
lastModified,
|
||||
value: parsedData
|
||||
};
|
||||
if (((_data_value2 = data.value) == null ? void 0 : _data_value2.kind) === "FETCH") {
|
||||
var _data_value3;
|
||||
const storedTags = (_data_value3 = data.value) == null ? void 0 : _data_value3.tags;
|
||||
// update stored tags if a new one is being added
|
||||
// TODO: remove this when we can send the tags
|
||||
// via header on GET same as SET
|
||||
if (!(tags == null ? void 0 : tags.every((tag)=>storedTags == null ? void 0 : storedTags.includes(tag)))) {
|
||||
if (this.debug) {
|
||||
console.log("tags vs storedTags mismatch", tags, storedTags);
|
||||
}
|
||||
await this.set(key, data.value, {
|
||||
tags
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const pageData = isAppPath ? await this.fs.readFile(this.getFilePath(`${key}${this.experimental.ppr ? _constants.RSC_PREFETCH_SUFFIX : _constants.RSC_SUFFIX}`, "app"), "utf8") : JSON.parse(await this.fs.readFile(this.getFilePath(`${key}${_constants.NEXT_DATA_SUFFIX}`, "pages"), "utf8"));
|
||||
let meta;
|
||||
if (isAppPath) {
|
||||
try {
|
||||
meta = JSON.parse(await this.fs.readFile(filePath.replace(/\.html$/, _constants.NEXT_META_SUFFIX), "utf8"));
|
||||
} catch {}
|
||||
}
|
||||
data = {
|
||||
lastModified: mtime.getTime(),
|
||||
value: {
|
||||
kind: "PAGE",
|
||||
html: fileData,
|
||||
pageData,
|
||||
postponed: meta == null ? void 0 : meta.postponed,
|
||||
headers: meta == null ? void 0 : meta.headers,
|
||||
status: meta == null ? void 0 : meta.status
|
||||
}
|
||||
};
|
||||
}
|
||||
if (data) {
|
||||
memoryCache == null ? void 0 : memoryCache.set(key, data);
|
||||
}
|
||||
} catch (_) {
|
||||
// unable to get data from disk
|
||||
}
|
||||
}
|
||||
if ((data == null ? void 0 : (_data_value = data.value) == null ? void 0 : _data_value.kind) === "PAGE") {
|
||||
var _data_value_headers;
|
||||
let cacheTags;
|
||||
const tagsHeader = (_data_value_headers = data.value.headers) == null ? void 0 : _data_value_headers[_constants.NEXT_CACHE_TAGS_HEADER];
|
||||
if (typeof tagsHeader === "string") {
|
||||
cacheTags = tagsHeader.split(",");
|
||||
}
|
||||
if (cacheTags == null ? void 0 : cacheTags.length) {
|
||||
this.loadTagsManifest();
|
||||
const isStale = cacheTags.some((tag)=>{
|
||||
var _tagsManifest_items_tag;
|
||||
return (tagsManifest == null ? void 0 : (_tagsManifest_items_tag = tagsManifest.items[tag]) == null ? void 0 : _tagsManifest_items_tag.revalidatedAt) && (tagsManifest == null ? void 0 : tagsManifest.items[tag].revalidatedAt) >= ((data == null ? void 0 : data.lastModified) || Date.now());
|
||||
});
|
||||
// we trigger a blocking validation if an ISR page
|
||||
// had a tag revalidated, if we want to be a background
|
||||
// revalidation instead we return data.lastModified = -1
|
||||
if (isStale) {
|
||||
data = undefined;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (data && (data == null ? void 0 : (_data_value1 = data.value) == null ? void 0 : _data_value1.kind) === "FETCH") {
|
||||
this.loadTagsManifest();
|
||||
const combinedTags = [
|
||||
...tags || [],
|
||||
...softTags || []
|
||||
];
|
||||
const wasRevalidated = combinedTags.some((tag)=>{
|
||||
var _tagsManifest_items_tag;
|
||||
if (this.revalidatedTags.includes(tag)) {
|
||||
return true;
|
||||
}
|
||||
return (tagsManifest == null ? void 0 : (_tagsManifest_items_tag = tagsManifest.items[tag]) == null ? void 0 : _tagsManifest_items_tag.revalidatedAt) && (tagsManifest == null ? void 0 : tagsManifest.items[tag].revalidatedAt) >= ((data == null ? void 0 : data.lastModified) || Date.now());
|
||||
});
|
||||
// When revalidate tag is called we don't return
|
||||
// stale data so it's updated right away
|
||||
if (wasRevalidated) {
|
||||
data = undefined;
|
||||
}
|
||||
}
|
||||
return data ?? null;
|
||||
}
|
||||
async set(...args) {
|
||||
const [key, data, ctx] = args;
|
||||
memoryCache == null ? void 0 : memoryCache.set(key, {
|
||||
value: data,
|
||||
lastModified: Date.now()
|
||||
});
|
||||
if (this.debug) {
|
||||
console.log("set", key);
|
||||
}
|
||||
if (!this.flushToDisk) return;
|
||||
if ((data == null ? void 0 : data.kind) === "ROUTE") {
|
||||
const filePath = this.getFilePath(`${key}.body`, "app");
|
||||
await this.fs.mkdir(_path.default.dirname(filePath));
|
||||
await this.fs.writeFile(filePath, data.body);
|
||||
const meta = {
|
||||
headers: data.headers,
|
||||
status: data.status,
|
||||
postponed: undefined
|
||||
};
|
||||
await this.fs.writeFile(filePath.replace(/\.body$/, _constants.NEXT_META_SUFFIX), JSON.stringify(meta, null, 2));
|
||||
return;
|
||||
}
|
||||
if ((data == null ? void 0 : data.kind) === "PAGE") {
|
||||
const isAppPath = typeof data.pageData === "string";
|
||||
const htmlPath = this.getFilePath(`${key}.html`, isAppPath ? "app" : "pages");
|
||||
await this.fs.mkdir(_path.default.dirname(htmlPath));
|
||||
await this.fs.writeFile(htmlPath, data.html);
|
||||
await this.fs.writeFile(this.getFilePath(`${key}${isAppPath ? this.experimental.ppr ? _constants.RSC_PREFETCH_SUFFIX : _constants.RSC_SUFFIX : _constants.NEXT_DATA_SUFFIX}`, isAppPath ? "app" : "pages"), isAppPath ? data.pageData : JSON.stringify(data.pageData));
|
||||
if (data.headers || data.status) {
|
||||
const meta = {
|
||||
headers: data.headers,
|
||||
status: data.status,
|
||||
postponed: data.postponed
|
||||
};
|
||||
await this.fs.writeFile(htmlPath.replace(/\.html$/, _constants.NEXT_META_SUFFIX), JSON.stringify(meta));
|
||||
}
|
||||
} else if ((data == null ? void 0 : data.kind) === "FETCH") {
|
||||
const filePath = this.getFilePath(key, "fetch");
|
||||
await this.fs.mkdir(_path.default.dirname(filePath));
|
||||
await this.fs.writeFile(filePath, JSON.stringify({
|
||||
...data,
|
||||
tags: ctx.tags
|
||||
}));
|
||||
}
|
||||
}
|
||||
detectFileKind(pathname) {
|
||||
if (!this.appDir && !this.pagesDir) {
|
||||
throw new Error("Invariant: Can't determine file path kind, no page directory enabled");
|
||||
}
|
||||
// If app directory isn't enabled, then assume it's pages and avoid the fs
|
||||
// hit.
|
||||
if (!this.appDir && this.pagesDir) {
|
||||
return "pages";
|
||||
} else if (this.appDir && !this.pagesDir) {
|
||||
return "app";
|
||||
}
|
||||
// If both are enabled, we need to test each in order, starting with
|
||||
// `pages`.
|
||||
let filePath = this.getFilePath(pathname, "pages");
|
||||
if (this.fs.existsSync(filePath)) {
|
||||
return "pages";
|
||||
}
|
||||
filePath = this.getFilePath(pathname, "app");
|
||||
if (this.fs.existsSync(filePath)) {
|
||||
return "app";
|
||||
}
|
||||
throw new Error(`Invariant: Unable to determine file path kind for ${pathname}`);
|
||||
}
|
||||
getFilePath(pathname, kind) {
|
||||
switch(kind){
|
||||
case "fetch":
|
||||
// we store in .next/cache/fetch-cache so it can be persisted
|
||||
// across deploys
|
||||
return _path.default.join(this.serverDistDir, "..", "cache", "fetch-cache", pathname);
|
||||
case "pages":
|
||||
return _path.default.join(this.serverDistDir, "pages", pathname);
|
||||
case "app":
|
||||
return _path.default.join(this.serverDistDir, "app", pathname);
|
||||
default:
|
||||
throw new Error("Invariant: Can't determine file path kind");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=file-system-cache.js.map
|
||||
1
node_modules/next/dist/server/lib/incremental-cache/file-system-cache.js.map
generated
vendored
Normal file
1
node_modules/next/dist/server/lib/incremental-cache/file-system-cache.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
96
node_modules/next/dist/server/lib/incremental-cache/index.d.ts
generated
vendored
Normal file
96
node_modules/next/dist/server/lib/incremental-cache/index.d.ts
generated
vendored
Normal file
@ -0,0 +1,96 @@
|
||||
import type { CacheFs } from '../../../shared/lib/utils';
|
||||
import type { PrerenderManifest } from '../../../build';
|
||||
import type { IncrementalCacheValue, IncrementalCacheEntry, IncrementalCache as IncrementalCacheType, IncrementalCacheKindHint } from '../../response-cache';
|
||||
import type { DeepReadonly } from '../../../shared/lib/deep-readonly';
|
||||
export interface CacheHandlerContext {
|
||||
fs?: CacheFs;
|
||||
dev?: boolean;
|
||||
flushToDisk?: boolean;
|
||||
serverDistDir?: string;
|
||||
maxMemoryCacheSize?: number;
|
||||
fetchCacheKeyPrefix?: string;
|
||||
prerenderManifest?: PrerenderManifest;
|
||||
revalidatedTags: string[];
|
||||
experimental: {
|
||||
ppr: boolean;
|
||||
};
|
||||
_appDir: boolean;
|
||||
_pagesDir: boolean;
|
||||
_requestHeaders: IncrementalCache['requestHeaders'];
|
||||
}
|
||||
export interface CacheHandlerValue {
|
||||
lastModified?: number;
|
||||
age?: number;
|
||||
cacheState?: string;
|
||||
value: IncrementalCacheValue | null;
|
||||
}
|
||||
export declare class CacheHandler {
|
||||
constructor(_ctx: CacheHandlerContext);
|
||||
get(..._args: Parameters<IncrementalCache['get']>): Promise<CacheHandlerValue | null>;
|
||||
set(..._args: Parameters<IncrementalCache['set']>): Promise<void>;
|
||||
revalidateTag(..._args: Parameters<IncrementalCache['revalidateTag']>): Promise<void>;
|
||||
resetRequestCache(): void;
|
||||
}
|
||||
export declare class IncrementalCache implements IncrementalCacheType {
|
||||
readonly dev?: boolean;
|
||||
readonly disableForTestmode?: boolean;
|
||||
readonly cacheHandler?: CacheHandler;
|
||||
readonly hasCustomCacheHandler: boolean;
|
||||
readonly prerenderManifest: DeepReadonly<PrerenderManifest>;
|
||||
readonly requestHeaders: Record<string, undefined | string | string[]>;
|
||||
readonly requestProtocol?: 'http' | 'https';
|
||||
readonly allowedRevalidateHeaderKeys?: string[];
|
||||
readonly minimalMode?: boolean;
|
||||
readonly fetchCacheKeyPrefix?: string;
|
||||
readonly revalidatedTags?: string[];
|
||||
readonly isOnDemandRevalidate?: boolean;
|
||||
private readonly locks;
|
||||
private readonly unlocks;
|
||||
/**
|
||||
* The revalidate timings for routes. This will source the timings from the
|
||||
* prerender manifest until the in-memory cache is updated with new timings.
|
||||
*/
|
||||
private readonly revalidateTimings;
|
||||
constructor({ fs, dev, appDir, pagesDir, flushToDisk, fetchCache, minimalMode, serverDistDir, requestHeaders, requestProtocol, maxMemoryCacheSize, getPrerenderManifest, fetchCacheKeyPrefix, CurCacheHandler, allowedRevalidateHeaderKeys, experimental, }: {
|
||||
fs?: CacheFs;
|
||||
dev: boolean;
|
||||
appDir?: boolean;
|
||||
pagesDir?: boolean;
|
||||
fetchCache?: boolean;
|
||||
minimalMode?: boolean;
|
||||
serverDistDir?: string;
|
||||
flushToDisk?: boolean;
|
||||
requestProtocol?: 'http' | 'https';
|
||||
allowedRevalidateHeaderKeys?: string[];
|
||||
requestHeaders: IncrementalCache['requestHeaders'];
|
||||
maxMemoryCacheSize?: number;
|
||||
getPrerenderManifest: () => DeepReadonly<PrerenderManifest>;
|
||||
fetchCacheKeyPrefix?: string;
|
||||
CurCacheHandler?: typeof CacheHandler;
|
||||
experimental: {
|
||||
ppr: boolean;
|
||||
};
|
||||
});
|
||||
private calculateRevalidate;
|
||||
_getPathname(pathname: string, fetchCache?: boolean): string;
|
||||
resetRequestCache(): void;
|
||||
unlock(cacheKey: string): Promise<void>;
|
||||
lock(cacheKey: string): Promise<() => Promise<void>>;
|
||||
revalidateTag(tags: string | string[]): Promise<void>;
|
||||
fetchCacheKey(url: string, init?: RequestInit | Request): Promise<string>;
|
||||
get(cacheKey: string, ctx?: {
|
||||
kindHint?: IncrementalCacheKindHint;
|
||||
revalidate?: number | false;
|
||||
fetchUrl?: string;
|
||||
fetchIdx?: number;
|
||||
tags?: string[];
|
||||
softTags?: string[];
|
||||
}): Promise<IncrementalCacheEntry | null>;
|
||||
set(pathname: string, data: IncrementalCacheValue | null, ctx: {
|
||||
revalidate?: number | false;
|
||||
fetchCache?: boolean;
|
||||
fetchUrl?: string;
|
||||
fetchIdx?: number;
|
||||
tags?: string[];
|
||||
}): Promise<any>;
|
||||
}
|
||||
407
node_modules/next/dist/server/lib/incremental-cache/index.js
generated
vendored
Normal file
407
node_modules/next/dist/server/lib/incremental-cache/index.js
generated
vendored
Normal file
@ -0,0 +1,407 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
0 && (module.exports = {
|
||||
CacheHandler: null,
|
||||
IncrementalCache: null
|
||||
});
|
||||
function _export(target, all) {
|
||||
for(var name in all)Object.defineProperty(target, name, {
|
||||
enumerable: true,
|
||||
get: all[name]
|
||||
});
|
||||
}
|
||||
_export(exports, {
|
||||
CacheHandler: function() {
|
||||
return CacheHandler;
|
||||
},
|
||||
IncrementalCache: function() {
|
||||
return IncrementalCache;
|
||||
}
|
||||
});
|
||||
const _fetchcache = /*#__PURE__*/ _interop_require_default(require("./fetch-cache"));
|
||||
const _filesystemcache = /*#__PURE__*/ _interop_require_default(require("./file-system-cache"));
|
||||
const _normalizepagepath = require("../../../shared/lib/page-path/normalize-page-path");
|
||||
const _constants = require("../../../lib/constants");
|
||||
const _toroute = require("../to-route");
|
||||
const _sharedrevalidatetimings = require("./shared-revalidate-timings");
|
||||
function _interop_require_default(obj) {
|
||||
return obj && obj.__esModule ? obj : {
|
||||
default: obj
|
||||
};
|
||||
}
|
||||
class CacheHandler {
|
||||
// eslint-disable-next-line
|
||||
constructor(_ctx){}
|
||||
async get(..._args) {
|
||||
return {};
|
||||
}
|
||||
async set(..._args) {}
|
||||
async revalidateTag(..._args) {}
|
||||
resetRequestCache() {}
|
||||
}
|
||||
class IncrementalCache {
|
||||
constructor({ fs, dev, appDir, pagesDir, flushToDisk, fetchCache, minimalMode, serverDistDir, requestHeaders, requestProtocol, maxMemoryCacheSize, getPrerenderManifest, fetchCacheKeyPrefix, CurCacheHandler, allowedRevalidateHeaderKeys, experimental }){
|
||||
var _this_prerenderManifest_preview, _this_prerenderManifest, _this_prerenderManifest_preview1, _this_prerenderManifest1;
|
||||
this.locks = new Map();
|
||||
this.unlocks = new Map();
|
||||
const debug = !!process.env.NEXT_PRIVATE_DEBUG_CACHE;
|
||||
this.hasCustomCacheHandler = Boolean(CurCacheHandler);
|
||||
if (!CurCacheHandler) {
|
||||
if (fs && serverDistDir) {
|
||||
if (debug) {
|
||||
console.log("using filesystem cache handler");
|
||||
}
|
||||
CurCacheHandler = _filesystemcache.default;
|
||||
}
|
||||
if (_fetchcache.default.isAvailable({
|
||||
_requestHeaders: requestHeaders
|
||||
}) && minimalMode && fetchCache) {
|
||||
if (debug) {
|
||||
console.log("using fetch cache handler");
|
||||
}
|
||||
CurCacheHandler = _fetchcache.default;
|
||||
}
|
||||
} else if (debug) {
|
||||
console.log("using custom cache handler", CurCacheHandler.name);
|
||||
}
|
||||
if (process.env.__NEXT_TEST_MAX_ISR_CACHE) {
|
||||
// Allow cache size to be overridden for testing purposes
|
||||
maxMemoryCacheSize = parseInt(process.env.__NEXT_TEST_MAX_ISR_CACHE, 10);
|
||||
}
|
||||
this.dev = dev;
|
||||
this.disableForTestmode = process.env.NEXT_PRIVATE_TEST_PROXY === "true";
|
||||
// this is a hack to avoid Webpack knowing this is equal to this.minimalMode
|
||||
// because we replace this.minimalMode to true in production bundles.
|
||||
const minimalModeKey = "minimalMode";
|
||||
this[minimalModeKey] = minimalMode;
|
||||
this.requestHeaders = requestHeaders;
|
||||
this.requestProtocol = requestProtocol;
|
||||
this.allowedRevalidateHeaderKeys = allowedRevalidateHeaderKeys;
|
||||
this.prerenderManifest = getPrerenderManifest();
|
||||
this.revalidateTimings = new _sharedrevalidatetimings.SharedRevalidateTimings(this.prerenderManifest);
|
||||
this.fetchCacheKeyPrefix = fetchCacheKeyPrefix;
|
||||
let revalidatedTags = [];
|
||||
if (requestHeaders[_constants.PRERENDER_REVALIDATE_HEADER] === ((_this_prerenderManifest = this.prerenderManifest) == null ? void 0 : (_this_prerenderManifest_preview = _this_prerenderManifest.preview) == null ? void 0 : _this_prerenderManifest_preview.previewModeId)) {
|
||||
this.isOnDemandRevalidate = true;
|
||||
}
|
||||
if (minimalMode && typeof requestHeaders[_constants.NEXT_CACHE_REVALIDATED_TAGS_HEADER] === "string" && requestHeaders[_constants.NEXT_CACHE_REVALIDATE_TAG_TOKEN_HEADER] === ((_this_prerenderManifest1 = this.prerenderManifest) == null ? void 0 : (_this_prerenderManifest_preview1 = _this_prerenderManifest1.preview) == null ? void 0 : _this_prerenderManifest_preview1.previewModeId)) {
|
||||
revalidatedTags = requestHeaders[_constants.NEXT_CACHE_REVALIDATED_TAGS_HEADER].split(",");
|
||||
}
|
||||
if (CurCacheHandler) {
|
||||
this.cacheHandler = new CurCacheHandler({
|
||||
dev,
|
||||
fs,
|
||||
flushToDisk,
|
||||
serverDistDir,
|
||||
revalidatedTags,
|
||||
maxMemoryCacheSize,
|
||||
_pagesDir: !!pagesDir,
|
||||
_appDir: !!appDir,
|
||||
_requestHeaders: requestHeaders,
|
||||
fetchCacheKeyPrefix,
|
||||
experimental
|
||||
});
|
||||
}
|
||||
}
|
||||
calculateRevalidate(pathname, fromTime, dev) {
|
||||
// in development we don't have a prerender-manifest
|
||||
// and default to always revalidating to allow easier debugging
|
||||
if (dev) return new Date().getTime() - 1000;
|
||||
// if an entry isn't present in routes we fallback to a default
|
||||
// of revalidating after 1 second.
|
||||
const initialRevalidateSeconds = this.revalidateTimings.get((0, _toroute.toRoute)(pathname)) ?? 1;
|
||||
const revalidateAfter = typeof initialRevalidateSeconds === "number" ? initialRevalidateSeconds * 1000 + fromTime : initialRevalidateSeconds;
|
||||
return revalidateAfter;
|
||||
}
|
||||
_getPathname(pathname, fetchCache) {
|
||||
return fetchCache ? pathname : (0, _normalizepagepath.normalizePagePath)(pathname);
|
||||
}
|
||||
resetRequestCache() {
|
||||
var _this_cacheHandler_resetRequestCache, _this_cacheHandler;
|
||||
(_this_cacheHandler = this.cacheHandler) == null ? void 0 : (_this_cacheHandler_resetRequestCache = _this_cacheHandler.resetRequestCache) == null ? void 0 : _this_cacheHandler_resetRequestCache.call(_this_cacheHandler);
|
||||
}
|
||||
async unlock(cacheKey) {
|
||||
const unlock = this.unlocks.get(cacheKey);
|
||||
if (unlock) {
|
||||
unlock();
|
||||
this.locks.delete(cacheKey);
|
||||
this.unlocks.delete(cacheKey);
|
||||
}
|
||||
}
|
||||
async lock(cacheKey) {
|
||||
if (process.env.__NEXT_INCREMENTAL_CACHE_IPC_PORT && process.env.__NEXT_INCREMENTAL_CACHE_IPC_KEY && process.env.NEXT_RUNTIME !== "edge") {
|
||||
const invokeIpcMethod = require("../server-ipc/request-utils").invokeIpcMethod;
|
||||
await invokeIpcMethod({
|
||||
method: "lock",
|
||||
ipcPort: process.env.__NEXT_INCREMENTAL_CACHE_IPC_PORT,
|
||||
ipcKey: process.env.__NEXT_INCREMENTAL_CACHE_IPC_KEY,
|
||||
args: [
|
||||
cacheKey
|
||||
]
|
||||
});
|
||||
return async ()=>{
|
||||
await invokeIpcMethod({
|
||||
method: "unlock",
|
||||
ipcPort: process.env.__NEXT_INCREMENTAL_CACHE_IPC_PORT,
|
||||
ipcKey: process.env.__NEXT_INCREMENTAL_CACHE_IPC_KEY,
|
||||
args: [
|
||||
cacheKey
|
||||
]
|
||||
});
|
||||
};
|
||||
}
|
||||
let unlockNext = ()=>Promise.resolve();
|
||||
const existingLock = this.locks.get(cacheKey);
|
||||
if (existingLock) {
|
||||
await existingLock;
|
||||
} else {
|
||||
const newLock = new Promise((resolve)=>{
|
||||
unlockNext = async ()=>{
|
||||
resolve();
|
||||
};
|
||||
});
|
||||
this.locks.set(cacheKey, newLock);
|
||||
this.unlocks.set(cacheKey, unlockNext);
|
||||
}
|
||||
return unlockNext;
|
||||
}
|
||||
async revalidateTag(tags) {
|
||||
var _this_cacheHandler_revalidateTag, _this_cacheHandler;
|
||||
if (process.env.__NEXT_INCREMENTAL_CACHE_IPC_PORT && process.env.__NEXT_INCREMENTAL_CACHE_IPC_KEY && process.env.NEXT_RUNTIME !== "edge") {
|
||||
const invokeIpcMethod = require("../server-ipc/request-utils").invokeIpcMethod;
|
||||
return invokeIpcMethod({
|
||||
method: "revalidateTag",
|
||||
ipcPort: process.env.__NEXT_INCREMENTAL_CACHE_IPC_PORT,
|
||||
ipcKey: process.env.__NEXT_INCREMENTAL_CACHE_IPC_KEY,
|
||||
args: [
|
||||
...arguments
|
||||
]
|
||||
});
|
||||
}
|
||||
return (_this_cacheHandler = this.cacheHandler) == null ? void 0 : (_this_cacheHandler_revalidateTag = _this_cacheHandler.revalidateTag) == null ? void 0 : _this_cacheHandler_revalidateTag.call(_this_cacheHandler, tags);
|
||||
}
|
||||
// x-ref: https://github.com/facebook/react/blob/2655c9354d8e1c54ba888444220f63e836925caa/packages/react/src/ReactFetch.js#L23
|
||||
async fetchCacheKey(url, init = {}) {
|
||||
// this should be bumped anytime a fix is made to cache entries
|
||||
// that should bust the cache
|
||||
const MAIN_KEY_PREFIX = "v3";
|
||||
const bodyChunks = [];
|
||||
const encoder = new TextEncoder();
|
||||
const decoder = new TextDecoder();
|
||||
if (init.body) {
|
||||
// handle ReadableStream body
|
||||
if (typeof init.body.getReader === "function") {
|
||||
const readableBody = init.body;
|
||||
const chunks = [];
|
||||
try {
|
||||
await readableBody.pipeTo(new WritableStream({
|
||||
write (chunk) {
|
||||
if (typeof chunk === "string") {
|
||||
chunks.push(encoder.encode(chunk));
|
||||
bodyChunks.push(chunk);
|
||||
} else {
|
||||
chunks.push(chunk);
|
||||
bodyChunks.push(decoder.decode(chunk, {
|
||||
stream: true
|
||||
}));
|
||||
}
|
||||
}
|
||||
}));
|
||||
// Flush the decoder.
|
||||
bodyChunks.push(decoder.decode());
|
||||
// Create a new buffer with all the chunks.
|
||||
const length = chunks.reduce((total, arr)=>total + arr.length, 0);
|
||||
const arrayBuffer = new Uint8Array(length);
|
||||
// Push each of the chunks into the new array buffer.
|
||||
let offset = 0;
|
||||
for (const chunk of chunks){
|
||||
arrayBuffer.set(chunk, offset);
|
||||
offset += chunk.length;
|
||||
}
|
||||
init._ogBody = arrayBuffer;
|
||||
} catch (err) {
|
||||
console.error("Problem reading body", err);
|
||||
}
|
||||
} else if (typeof init.body.keys === "function") {
|
||||
const formData = init.body;
|
||||
init._ogBody = init.body;
|
||||
for (const key of new Set([
|
||||
...formData.keys()
|
||||
])){
|
||||
const values = formData.getAll(key);
|
||||
bodyChunks.push(`${key}=${(await Promise.all(values.map(async (val)=>{
|
||||
if (typeof val === "string") {
|
||||
return val;
|
||||
} else {
|
||||
return await val.text();
|
||||
}
|
||||
}))).join(",")}`);
|
||||
}
|
||||
// handle blob body
|
||||
} else if (typeof init.body.arrayBuffer === "function") {
|
||||
const blob = init.body;
|
||||
const arrayBuffer = await blob.arrayBuffer();
|
||||
bodyChunks.push(await blob.text());
|
||||
init._ogBody = new Blob([
|
||||
arrayBuffer
|
||||
], {
|
||||
type: blob.type
|
||||
});
|
||||
} else if (typeof init.body === "string") {
|
||||
bodyChunks.push(init.body);
|
||||
init._ogBody = init.body;
|
||||
}
|
||||
}
|
||||
const headers = typeof (init.headers || {}).keys === "function" ? Object.fromEntries(init.headers) : Object.assign({}, init.headers);
|
||||
if ("traceparent" in headers) delete headers["traceparent"];
|
||||
const cacheString = JSON.stringify([
|
||||
MAIN_KEY_PREFIX,
|
||||
this.fetchCacheKeyPrefix || "",
|
||||
url,
|
||||
init.method,
|
||||
headers,
|
||||
init.mode,
|
||||
init.redirect,
|
||||
init.credentials,
|
||||
init.referrer,
|
||||
init.referrerPolicy,
|
||||
init.integrity,
|
||||
init.cache,
|
||||
bodyChunks
|
||||
]);
|
||||
if (process.env.NEXT_RUNTIME === "edge") {
|
||||
function bufferToHex(buffer) {
|
||||
return Array.prototype.map.call(new Uint8Array(buffer), (b)=>b.toString(16).padStart(2, "0")).join("");
|
||||
}
|
||||
const buffer = encoder.encode(cacheString);
|
||||
return bufferToHex(await crypto.subtle.digest("SHA-256", buffer));
|
||||
} else {
|
||||
const crypto1 = require("crypto");
|
||||
return crypto1.createHash("sha256").update(cacheString).digest("hex");
|
||||
}
|
||||
}
|
||||
// get data from cache if available
|
||||
async get(cacheKey, ctx = {}) {
|
||||
var _this_cacheHandler, _cacheData_value;
|
||||
if (process.env.__NEXT_INCREMENTAL_CACHE_IPC_PORT && process.env.__NEXT_INCREMENTAL_CACHE_IPC_KEY && process.env.NEXT_RUNTIME !== "edge") {
|
||||
const invokeIpcMethod = require("../server-ipc/request-utils").invokeIpcMethod;
|
||||
return invokeIpcMethod({
|
||||
method: "get",
|
||||
ipcPort: process.env.__NEXT_INCREMENTAL_CACHE_IPC_PORT,
|
||||
ipcKey: process.env.__NEXT_INCREMENTAL_CACHE_IPC_KEY,
|
||||
args: [
|
||||
...arguments
|
||||
]
|
||||
});
|
||||
}
|
||||
// we don't leverage the prerender cache in dev mode
|
||||
// so that getStaticProps is always called for easier debugging
|
||||
if (this.disableForTestmode || this.dev && (ctx.kindHint !== "fetch" || this.requestHeaders["cache-control"] === "no-cache")) {
|
||||
return null;
|
||||
}
|
||||
cacheKey = this._getPathname(cacheKey, ctx.kindHint === "fetch");
|
||||
let entry = null;
|
||||
let revalidate = ctx.revalidate;
|
||||
const cacheData = await ((_this_cacheHandler = this.cacheHandler) == null ? void 0 : _this_cacheHandler.get(cacheKey, ctx));
|
||||
if ((cacheData == null ? void 0 : (_cacheData_value = cacheData.value) == null ? void 0 : _cacheData_value.kind) === "FETCH") {
|
||||
const combinedTags = [
|
||||
...ctx.tags || [],
|
||||
...ctx.softTags || []
|
||||
];
|
||||
// if a tag was revalidated we don't return stale data
|
||||
if (combinedTags.some((tag)=>{
|
||||
var _this_revalidatedTags;
|
||||
return (_this_revalidatedTags = this.revalidatedTags) == null ? void 0 : _this_revalidatedTags.includes(tag);
|
||||
})) {
|
||||
return null;
|
||||
}
|
||||
revalidate = revalidate || cacheData.value.revalidate;
|
||||
const age = (Date.now() - (cacheData.lastModified || 0)) / 1000;
|
||||
const isStale = age > revalidate;
|
||||
const data = cacheData.value.data;
|
||||
return {
|
||||
isStale: isStale,
|
||||
value: {
|
||||
kind: "FETCH",
|
||||
data,
|
||||
revalidate: revalidate
|
||||
},
|
||||
revalidateAfter: Date.now() + revalidate * 1000
|
||||
};
|
||||
}
|
||||
const curRevalidate = this.revalidateTimings.get((0, _toroute.toRoute)(cacheKey));
|
||||
let isStale;
|
||||
let revalidateAfter;
|
||||
if ((cacheData == null ? void 0 : cacheData.lastModified) === -1) {
|
||||
isStale = -1;
|
||||
revalidateAfter = -1 * _constants.CACHE_ONE_YEAR;
|
||||
} else {
|
||||
revalidateAfter = this.calculateRevalidate(cacheKey, (cacheData == null ? void 0 : cacheData.lastModified) || Date.now(), this.dev && ctx.kindHint !== "fetch");
|
||||
isStale = revalidateAfter !== false && revalidateAfter < Date.now() ? true : undefined;
|
||||
}
|
||||
if (cacheData) {
|
||||
entry = {
|
||||
isStale,
|
||||
curRevalidate,
|
||||
revalidateAfter,
|
||||
value: cacheData.value
|
||||
};
|
||||
}
|
||||
if (!cacheData && this.prerenderManifest.notFoundRoutes.includes(cacheKey)) {
|
||||
// for the first hit after starting the server the cache
|
||||
// may not have a way to save notFound: true so if
|
||||
// the prerender-manifest marks this as notFound then we
|
||||
// return that entry and trigger a cache set to give it a
|
||||
// chance to update in-memory entries
|
||||
entry = {
|
||||
isStale,
|
||||
value: null,
|
||||
curRevalidate,
|
||||
revalidateAfter
|
||||
};
|
||||
this.set(cacheKey, entry.value, ctx);
|
||||
}
|
||||
return entry;
|
||||
}
|
||||
// populate the incremental cache with new data
|
||||
async set(pathname, data, ctx) {
|
||||
if (process.env.__NEXT_INCREMENTAL_CACHE_IPC_PORT && process.env.__NEXT_INCREMENTAL_CACHE_IPC_KEY && process.env.NEXT_RUNTIME !== "edge") {
|
||||
const invokeIpcMethod = require("../server-ipc/request-utils").invokeIpcMethod;
|
||||
return invokeIpcMethod({
|
||||
method: "set",
|
||||
ipcPort: process.env.__NEXT_INCREMENTAL_CACHE_IPC_PORT,
|
||||
ipcKey: process.env.__NEXT_INCREMENTAL_CACHE_IPC_KEY,
|
||||
args: [
|
||||
...arguments
|
||||
]
|
||||
});
|
||||
}
|
||||
if (this.disableForTestmode || this.dev && !ctx.fetchCache) return;
|
||||
// FetchCache has upper limit of 2MB per-entry currently
|
||||
const itemSize = JSON.stringify(data).length;
|
||||
if (ctx.fetchCache && // we don't show this error/warning when a custom cache handler is being used
|
||||
// as it might not have this limit
|
||||
!this.hasCustomCacheHandler && itemSize > 2 * 1024 * 1024) {
|
||||
if (this.dev) {
|
||||
throw new Error(`Failed to set Next.js data cache, items over 2MB can not be cached (${itemSize} bytes)`);
|
||||
}
|
||||
return;
|
||||
}
|
||||
pathname = this._getPathname(pathname, ctx.fetchCache);
|
||||
try {
|
||||
var _this_cacheHandler;
|
||||
// Set the value for the revalidate seconds so if it changes we can
|
||||
// update the cache with the new value.
|
||||
if (typeof ctx.revalidate !== "undefined" && !ctx.fetchCache) {
|
||||
this.revalidateTimings.set(pathname, ctx.revalidate);
|
||||
}
|
||||
await ((_this_cacheHandler = this.cacheHandler) == null ? void 0 : _this_cacheHandler.set(pathname, data, ctx));
|
||||
} catch (error) {
|
||||
console.warn("Failed to update prerender cache for", pathname, error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
node_modules/next/dist/server/lib/incremental-cache/index.js.map
generated
vendored
Normal file
1
node_modules/next/dist/server/lib/incremental-cache/index.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
48
node_modules/next/dist/server/lib/incremental-cache/shared-revalidate-timings.d.ts
generated
vendored
Normal file
48
node_modules/next/dist/server/lib/incremental-cache/shared-revalidate-timings.d.ts
generated
vendored
Normal file
@ -0,0 +1,48 @@
|
||||
import type { PrerenderManifest } from '../../../build';
|
||||
import type { DeepReadonly } from '../../../shared/lib/deep-readonly';
|
||||
import type { Revalidate } from '../revalidate';
|
||||
/**
|
||||
* A shared cache of revalidate timings for routes. This cache is used so we
|
||||
* don't have to modify the prerender manifest when we want to update the
|
||||
* revalidate timings for a route.
|
||||
*/
|
||||
export declare class SharedRevalidateTimings {
|
||||
/**
|
||||
* The prerender manifest that contains the initial revalidate timings for
|
||||
* routes.
|
||||
*/
|
||||
private readonly prerenderManifest;
|
||||
/**
|
||||
* The in-memory cache of revalidate timings for routes. This cache is
|
||||
* populated when the cache is updated with new timings.
|
||||
*/
|
||||
private static readonly timings;
|
||||
constructor(
|
||||
/**
|
||||
* The prerender manifest that contains the initial revalidate timings for
|
||||
* routes.
|
||||
*/
|
||||
prerenderManifest: DeepReadonly<Pick<PrerenderManifest, 'routes'>>);
|
||||
/**
|
||||
* Try to get the revalidate timings for a route. This will first try to get
|
||||
* the timings from the in-memory cache. If the timings are not present in the
|
||||
* in-memory cache, then the timings will be sourced from the prerender
|
||||
* manifest.
|
||||
*
|
||||
* @param route the route to get the revalidate timings for
|
||||
* @returns the revalidate timings for the route, or undefined if the timings
|
||||
* are not present in the in-memory cache or the prerender manifest
|
||||
*/
|
||||
get(route: string): Revalidate | undefined;
|
||||
/**
|
||||
* Set the revalidate timings for a route.
|
||||
*
|
||||
* @param route the route to set the revalidate timings for
|
||||
* @param revalidate the revalidate timings for the route
|
||||
*/
|
||||
set(route: string, revalidate: Revalidate): void;
|
||||
/**
|
||||
* Clear the in-memory cache of revalidate timings for routes.
|
||||
*/
|
||||
clear(): void;
|
||||
}
|
||||
57
node_modules/next/dist/server/lib/incremental-cache/shared-revalidate-timings.js
generated
vendored
Normal file
57
node_modules/next/dist/server/lib/incremental-cache/shared-revalidate-timings.js
generated
vendored
Normal file
@ -0,0 +1,57 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
Object.defineProperty(exports, "SharedRevalidateTimings", {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return SharedRevalidateTimings;
|
||||
}
|
||||
});
|
||||
class SharedRevalidateTimings {
|
||||
static #_ = /**
|
||||
* The in-memory cache of revalidate timings for routes. This cache is
|
||||
* populated when the cache is updated with new timings.
|
||||
*/ this.timings = new Map();
|
||||
constructor(/**
|
||||
* The prerender manifest that contains the initial revalidate timings for
|
||||
* routes.
|
||||
*/ prerenderManifest){
|
||||
this.prerenderManifest = prerenderManifest;
|
||||
}
|
||||
/**
|
||||
* Try to get the revalidate timings for a route. This will first try to get
|
||||
* the timings from the in-memory cache. If the timings are not present in the
|
||||
* in-memory cache, then the timings will be sourced from the prerender
|
||||
* manifest.
|
||||
*
|
||||
* @param route the route to get the revalidate timings for
|
||||
* @returns the revalidate timings for the route, or undefined if the timings
|
||||
* are not present in the in-memory cache or the prerender manifest
|
||||
*/ get(route) {
|
||||
var _this_prerenderManifest_routes_route;
|
||||
// This is a copy on write cache that is updated when the cache is updated.
|
||||
// If the cache is never written to, then the timings will be sourced from
|
||||
// the prerender manifest.
|
||||
let revalidate = SharedRevalidateTimings.timings.get(route);
|
||||
if (typeof revalidate !== "undefined") return revalidate;
|
||||
revalidate = (_this_prerenderManifest_routes_route = this.prerenderManifest.routes[route]) == null ? void 0 : _this_prerenderManifest_routes_route.initialRevalidateSeconds;
|
||||
if (typeof revalidate !== "undefined") return revalidate;
|
||||
return undefined;
|
||||
}
|
||||
/**
|
||||
* Set the revalidate timings for a route.
|
||||
*
|
||||
* @param route the route to set the revalidate timings for
|
||||
* @param revalidate the revalidate timings for the route
|
||||
*/ set(route, revalidate) {
|
||||
SharedRevalidateTimings.timings.set(route, revalidate);
|
||||
}
|
||||
/**
|
||||
* Clear the in-memory cache of revalidate timings for routes.
|
||||
*/ clear() {
|
||||
SharedRevalidateTimings.timings.clear();
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=shared-revalidate-timings.js.map
|
||||
1
node_modules/next/dist/server/lib/incremental-cache/shared-revalidate-timings.js.map
generated
vendored
Normal file
1
node_modules/next/dist/server/lib/incremental-cache/shared-revalidate-timings.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/lib/incremental-cache/shared-revalidate-timings.ts"],"names":["SharedRevalidateTimings","timings","Map","constructor","prerenderManifest","get","route","revalidate","routes","initialRevalidateSeconds","undefined","set","clear"],"mappings":";;;;+BASaA;;;eAAAA;;;AAAN,MAAMA;gBACX;;;GAGC,QACuBC,UAAU,IAAIC;IAEtCC,YACE;;;KAGC,GACD,AAAiBC,iBAEhB,CACD;aAHiBA,oBAAAA;IAGhB;IAEH;;;;;;;;;GASC,GACD,AAAOC,IAAIC,KAAa,EAA0B;YAOnC;QANb,2EAA2E;QAC3E,0EAA0E;QAC1E,0BAA0B;QAC1B,IAAIC,aAAaP,wBAAwBC,OAAO,CAACI,GAAG,CAACC;QACrD,IAAI,OAAOC,eAAe,aAAa,OAAOA;QAE9CA,cAAa,uCAAA,IAAI,CAACH,iBAAiB,CAACI,MAAM,CAACF,MAAM,qBAApC,qCAAsCG,wBAAwB;QAC3E,IAAI,OAAOF,eAAe,aAAa,OAAOA;QAE9C,OAAOG;IACT;IAEA;;;;;GAKC,GACD,AAAOC,IAAIL,KAAa,EAAEC,UAAsB,EAAE;QAChDP,wBAAwBC,OAAO,CAACU,GAAG,CAACL,OAAOC;IAC7C;IAEA;;GAEC,GACD,AAAOK,QAAQ;QACbZ,wBAAwBC,OAAO,CAACW,KAAK;IACvC;AACF"}
|
||||
1
node_modules/next/dist/server/lib/incremental-cache/shared-revalidate-timings.test.d.ts
generated
vendored
Normal file
1
node_modules/next/dist/server/lib/incremental-cache/shared-revalidate-timings.test.d.ts
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
export {};
|
||||
Reference in New Issue
Block a user