Initial boiler plate project
This commit is contained in:
149
node_modules/next/dist/client/components/router-reducer/fill-lazy-items-till-leaf-with-head.js
generated
vendored
Normal file
149
node_modules/next/dist/client/components/router-reducer/fill-lazy-items-till-leaf-with-head.js
generated
vendored
Normal file
@ -0,0 +1,149 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
Object.defineProperty(exports, "fillLazyItemsTillLeafWithHead", {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return fillLazyItemsTillLeafWithHead;
|
||||
}
|
||||
});
|
||||
const _createroutercachekey = require("./create-router-cache-key");
|
||||
const _routerreducertypes = require("./router-reducer-types");
|
||||
function fillLazyItemsTillLeafWithHead(newCache, existingCache, routerState, cacheNodeSeedData, head, prefetchEntry) {
|
||||
const isLastSegment = Object.keys(routerState[1]).length === 0;
|
||||
if (isLastSegment) {
|
||||
newCache.head = head;
|
||||
return;
|
||||
}
|
||||
// Remove segment that we got data for so that it is filled in during rendering of rsc.
|
||||
for(const key in routerState[1]){
|
||||
const parallelRouteState = routerState[1][key];
|
||||
const segmentForParallelRoute = parallelRouteState[0];
|
||||
const cacheKey = (0, _createroutercachekey.createRouterCacheKey)(segmentForParallelRoute);
|
||||
// TODO: We should traverse the cacheNodeSeedData tree instead of the router
|
||||
// state tree. Ideally, they would always be the same shape, but because of
|
||||
// the loading.js pattern, cacheNodeSeedData sometimes only represents a
|
||||
// partial tree. That's why this node is sometimes null. Once PPR lands,
|
||||
// loading.js will no longer have special behavior and we can traverse the
|
||||
// data tree instead.
|
||||
//
|
||||
// We should also consider merging the router state tree and the data tree
|
||||
// in the response format, so that we don't have to send the keys twice.
|
||||
// Then the client can convert them into separate representations.
|
||||
const parallelSeedData = cacheNodeSeedData !== null && cacheNodeSeedData[1][key] !== undefined ? cacheNodeSeedData[1][key] : null;
|
||||
if (existingCache) {
|
||||
const existingParallelRoutesCacheNode = existingCache.parallelRoutes.get(key);
|
||||
if (existingParallelRoutesCacheNode) {
|
||||
const hasReusablePrefetch = (prefetchEntry == null ? void 0 : prefetchEntry.kind) === "auto" && prefetchEntry.status === _routerreducertypes.PrefetchCacheEntryStatus.reusable;
|
||||
let parallelRouteCacheNode = new Map(existingParallelRoutesCacheNode);
|
||||
const existingCacheNode = parallelRouteCacheNode.get(cacheKey);
|
||||
let newCacheNode;
|
||||
if (parallelSeedData !== null) {
|
||||
// New data was sent from the server.
|
||||
const seedNode = parallelSeedData[2];
|
||||
const loading = parallelSeedData[3];
|
||||
newCacheNode = {
|
||||
lazyData: null,
|
||||
rsc: seedNode,
|
||||
// This is a PPR-only field. When PPR is enabled, we shouldn't hit
|
||||
// this path during a navigation, but until PPR is fully implemented
|
||||
// yet it's possible the existing node does have a non-null
|
||||
// `prefetchRsc`. As an incremental step, we'll just de-opt to the
|
||||
// old behavior — no PPR value.
|
||||
prefetchRsc: null,
|
||||
head: null,
|
||||
prefetchHead: null,
|
||||
loading,
|
||||
parallelRoutes: new Map(existingCacheNode == null ? void 0 : existingCacheNode.parallelRoutes),
|
||||
lazyDataResolved: false
|
||||
};
|
||||
} else if (hasReusablePrefetch && existingCacheNode) {
|
||||
// No new data was sent from the server, but the existing cache node
|
||||
// was prefetched, so we should reuse that.
|
||||
newCacheNode = {
|
||||
lazyData: existingCacheNode.lazyData,
|
||||
rsc: existingCacheNode.rsc,
|
||||
// This is a PPR-only field. Unlike the previous branch, since we're
|
||||
// just cloning the existing cache node, we might as well keep the
|
||||
// PPR value, if it exists.
|
||||
prefetchRsc: existingCacheNode.prefetchRsc,
|
||||
head: existingCacheNode.head,
|
||||
prefetchHead: existingCacheNode.prefetchHead,
|
||||
parallelRoutes: new Map(existingCacheNode.parallelRoutes),
|
||||
lazyDataResolved: existingCacheNode.lazyDataResolved,
|
||||
loading: existingCacheNode.loading
|
||||
};
|
||||
} else {
|
||||
// No data available for this node. This will trigger a lazy fetch
|
||||
// during render.
|
||||
newCacheNode = {
|
||||
lazyData: null,
|
||||
rsc: null,
|
||||
prefetchRsc: null,
|
||||
head: null,
|
||||
prefetchHead: null,
|
||||
parallelRoutes: new Map(existingCacheNode == null ? void 0 : existingCacheNode.parallelRoutes),
|
||||
lazyDataResolved: false,
|
||||
loading: null
|
||||
};
|
||||
}
|
||||
// Overrides the cache key with the new cache node.
|
||||
parallelRouteCacheNode.set(cacheKey, newCacheNode);
|
||||
// Traverse deeper to apply the head / fill lazy items till the head.
|
||||
fillLazyItemsTillLeafWithHead(newCacheNode, existingCacheNode, parallelRouteState, parallelSeedData ? parallelSeedData : null, head, prefetchEntry);
|
||||
newCache.parallelRoutes.set(key, parallelRouteCacheNode);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
let newCacheNode;
|
||||
if (parallelSeedData !== null) {
|
||||
// New data was sent from the server.
|
||||
const seedNode = parallelSeedData[2];
|
||||
const loading = parallelSeedData[3];
|
||||
newCacheNode = {
|
||||
lazyData: null,
|
||||
rsc: seedNode,
|
||||
prefetchRsc: null,
|
||||
head: null,
|
||||
prefetchHead: null,
|
||||
parallelRoutes: new Map(),
|
||||
lazyDataResolved: false,
|
||||
loading
|
||||
};
|
||||
} else {
|
||||
// No data available for this node. This will trigger a lazy fetch
|
||||
// during render.
|
||||
newCacheNode = {
|
||||
lazyData: null,
|
||||
rsc: null,
|
||||
prefetchRsc: null,
|
||||
head: null,
|
||||
prefetchHead: null,
|
||||
parallelRoutes: new Map(),
|
||||
lazyDataResolved: false,
|
||||
loading: null
|
||||
};
|
||||
}
|
||||
const existingParallelRoutes = newCache.parallelRoutes.get(key);
|
||||
if (existingParallelRoutes) {
|
||||
existingParallelRoutes.set(cacheKey, newCacheNode);
|
||||
} else {
|
||||
newCache.parallelRoutes.set(key, new Map([
|
||||
[
|
||||
cacheKey,
|
||||
newCacheNode
|
||||
]
|
||||
]));
|
||||
}
|
||||
fillLazyItemsTillLeafWithHead(newCacheNode, undefined, parallelRouteState, parallelSeedData, head, prefetchEntry);
|
||||
}
|
||||
}
|
||||
|
||||
if ((typeof exports.default === 'function' || (typeof exports.default === 'object' && exports.default !== null)) && typeof exports.default.__esModule === 'undefined') {
|
||||
Object.defineProperty(exports.default, '__esModule', { value: true });
|
||||
Object.assign(exports.default, exports);
|
||||
module.exports = exports.default;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=fill-lazy-items-till-leaf-with-head.js.map
|
||||
Reference in New Issue
Block a user