@@ -19,6 +19,7 @@ import {isPrimaryRenderer} from './ReactFiberHostConfig';
1919import {createCursor, push, pop} from './ReactFiberStack.new';
2020import {pushProvider, popProvider} from './ReactFiberNewContext.new';
2121import * as Scheduler from 'scheduler';
22+ import {getWorkInProgressRoot} from './ReactFiberWorkLoop.new';
2223
2324export type Cache = {|
2425 controller: AbortController,
@@ -61,13 +62,9 @@ if (__DEV__ && enableCache) {
6162 CacheContext._currentRenderer2 = null;
6263}
6364
64- // The cache that newly mounted Cache boundaries should use. It's either
65- // retrieved from the cache pool, or the result of a refresh.
66- let pooledCache: Cache | null = null;
67-
68- // When retrying a Suspense/Offscreen boundary, we override pooledCache with the
69- // cache from the render that suspended.
70- const prevFreshCacheOnStack: StackCursor<Cache | null> = createCursor(null);
65+ // When retrying a Suspense/Offscreen boundary, we restore the cache that was
66+ // used during the previous render by placing it here, on the stack.
67+ const resumedCache: StackCursor<Cache | null> = createCursor(null);
7168
7269// Creates a new empty Cache instance with a ref-count of 0. The caller is responsible
7370// for retaining the cache once it is in use (retainCache), and releasing the cache
@@ -135,56 +132,70 @@ export function popCacheProvider(workInProgress: Fiber, cache: Cache) {
135132 popProvider(CacheContext, workInProgress);
136133}
137134
138- export function requestCacheFromPool(renderLanes: Lanes ): Cache {
135+ function peekCacheFromPool( ): Cache | null {
139136 if (!enableCache) {
140137 return (null: any);
141138 }
142- if (pooledCache !== null) {
143- return pooledCache;
139+
140+ // Check if the cache pool already has a cache we can use.
141+
142+ // If we're rendering inside a Suspense boundary that is currently hidden,
143+ // we should use the same cache that we used during the previous render, if
144+ // one exists.
145+ const cacheResumedFromPreviousRender = resumedCache.current;
146+ if (cacheResumedFromPreviousRender !== null) {
147+ return cacheResumedFromPreviousRender;
144148 }
145- // Create a fresh cache. The pooled cache must be owned - it is freed
146- // in releaseRootPooledCache() - but the cache instance handed out
147- // is retained/released in the commit phase of the component that
148- // references is (ie the host root, cache boundary, suspense component)
149- // Ie, pooledCache is conceptually an Option<Arc<Cache>> (owned),
150- // whereas the return value of this function is a &Arc<Cache> (borrowed).
151- pooledCache = createCache();
152- retainCache(pooledCache);
153- return pooledCache;
149+
150+ // Otherwise, check the root's cache pool.
151+ const root = (getWorkInProgressRoot(): any);
152+ const cacheFromRootCachePool = root.pooledCache;
153+
154+ return cacheFromRootCachePool;
155+ }
156+
157+ export function requestCacheFromPool(renderLanes: Lanes): Cache {
158+ // Similar to previous function, except if there's not already a cache in the
159+ // pool, we allocate a new one.
160+ const cacheFromPool = peekCacheFromPool();
161+ if (cacheFromPool !== null) {
162+ return cacheFromPool;
163+ }
164+
165+ // Create a fresh cache and add it to the root cache pool. A cache can have
166+ // multiple owners:
167+ // - A cache pool that lives on the FiberRoot. This is where all fresh caches
168+ // are originally created (TODO: except during refreshes, until we implement
169+ // this correctly). The root takes ownership immediately when the cache is
170+ // created. Conceptually, root.pooledCache is an Option<Arc<Cache>> (owned),
171+ // and the return value of this function is a &Arc<Cache> (borrowed).
172+ // - One of several fiber types: host root, cache boundary, suspense
173+ // component. These retain and release in the commit phase.
174+
175+ const root = (getWorkInProgressRoot(): any);
176+ const freshCache = createCache();
177+ root.pooledCache = freshCache;
178+ retainCache(freshCache);
179+ if (freshCache !== null) {
180+ root.pooledCacheLanes |= renderLanes;
181+ }
182+ return freshCache;
154183}
155184
156185export function pushRootCachePool(root: FiberRoot) {
157186 if (!enableCache) {
158187 return;
159188 }
160- // When we start rendering a tree, read the pooled cache for this render
161- // from `root.pooledCache`. If it's currently `null`, we will lazily
162- // initialize it the first type it's requested. However, we only mutate
163- // the root itself during the complete/unwind phase of the HostRoot.
164- const rootCache = root.pooledCache;
165- if (rootCache != null) {
166- pooledCache = rootCache;
167- root.pooledCache = null;
168- } else {
169- pooledCache = null;
170- }
189+ // Note: This function currently does nothing but I'll leave it here for
190+ // code organization purposes in case that changes.
171191}
172192
173193export function popRootCachePool(root: FiberRoot, renderLanes: Lanes) {
174194 if (!enableCache) {
175195 return;
176196 }
177- // The `pooledCache` variable points to the cache that was used for new
178- // cache boundaries during this render, if any. Move ownership of the
179- // cache to the root so that parallel transitions may share the same
180- // cache. We will clear this field once all the transitions that depend
181- // on it (which we track with `pooledCacheLanes`) have committed.
182- root.pooledCache = pooledCache;
183- if (pooledCache !== null) {
184- root.pooledCacheLanes |= renderLanes;
185- }
186- // set to null, conceptually we are moving ownership to the root
187- pooledCache = null;
197+ // Note: This function currently does nothing but I'll leave it here for
198+ // code organization purposes in case that changes.
188199}
189200
190201export function restoreSpawnedCachePool(
@@ -202,51 +213,35 @@ export function restoreSpawnedCachePool(
202213 // will override it.
203214 return null;
204215 } else {
205- // No refresh. Resume with the previous cache. This will override the cache
206- // pool so that any new Cache boundaries in the subtree use this one instead
207- // of requesting a fresh one.
208- push(prevFreshCacheOnStack, pooledCache, offscreenWorkInProgress);
209- pooledCache = prevCachePool.pool;
216+ // No refresh. Resume with the previous cache. New Cache boundaries in the
217+ // subtree use this one instead of requesting a fresh one (see
218+ // peekCacheFromPool).
219+ push(resumedCache, prevCachePool.pool, offscreenWorkInProgress);
210220
211221 // Return the cache pool to signal that we did in fact push it. We will
212222 // assign this to the field on the fiber so we know to pop the context.
213223 return prevCachePool;
214224 }
215225}
216226
217- // Note: Ideally, `popCachePool` would return this value, and then we would pass
218- // it to `getSuspendedCachePool`. But factoring reasons, those two functions are
219- // in different phases/files. They are always called in sequence, though, so we
220- // can stash the value here temporarily.
221- let _suspendedPooledCache: Cache | null = null;
222-
223227export function popCachePool(workInProgress: Fiber) {
224228 if (!enableCache) {
225229 return;
226230 }
227- _suspendedPooledCache = pooledCache;
228- pooledCache = prevFreshCacheOnStack.current;
229- pop(prevFreshCacheOnStack, workInProgress);
231+ pop(resumedCache, workInProgress);
230232}
231233
232234export function getSuspendedCachePool(): SpawnedCachePool | null {
233235 if (!enableCache) {
234236 return null;
235237 }
236- // We check the cache on the stack first, since that's the one any new Caches
237- // would have accessed.
238- let pool = pooledCache;
239- if (pool === null) {
240- // There's no pooled cache above us in the stack. However, a child in the
241- // suspended tree may have requested a fresh cache pool. If so, we would
242- // have unwound it with `popCachePool`.
243- if (_suspendedPooledCache !== null) {
244- pool = _suspendedPooledCache;
245- _suspendedPooledCache = null;
246- } else {
247- // There's no suspended cache pool.
248- return null;
249- }
238+ // This function is called when a Suspense boundary suspends. It returns the
239+ // cache that would have been used to render fresh data during this render,
240+ // if there was any, so that we can resume rendering with the same cache when
241+ // we receive more data.
242+ const cacheFromPool = peekCacheFromPool();
243+ if (cacheFromPool === null) {
244+ return null;
250245 }
251246
252247 return {
@@ -255,7 +250,7 @@ export function getSuspendedCachePool(): SpawnedCachePool | null {
255250 parent: isPrimaryRenderer
256251 ? CacheContext._currentValue
257252 : CacheContext._currentValue2,
258- pool,
253+ pool: cacheFromPool ,
259254 };
260255}
261256
@@ -264,8 +259,8 @@ export function getOffscreenDeferredCachePool(): SpawnedCachePool | null {
264259 return null;
265260 }
266261
267- if (pooledCache === null) {
268- // There's no deferred cache pool.
262+ const cacheFromPool = peekCacheFromPool();
263+ if (cacheFromPool === null) {
269264 return null;
270265 }
271266
@@ -275,6 +270,6 @@ export function getOffscreenDeferredCachePool(): SpawnedCachePool | null {
275270 parent: isPrimaryRenderer
276271 ? CacheContext._currentValue
277272 : CacheContext._currentValue2,
278- pool: pooledCache ,
273+ pool: cacheFromPool ,
279274 };
280275}
0 commit comments