Skip to content

Commit

Permalink
Fix memory cache (#762)
Browse files Browse the repository at this point in the history
* Initial pass at fixing the memory cache

* Account for loading state in lru cache unload logic

* Remove nodes after the fact

* Simplify LRUCache eviction logic

* Add "loading" field for cache tiles

* Handle unloading correctly

* Remove log

* Don't try to dispose of any items above the max cache sizes unless we have tiles that are still loading

* Comment

* comments

* Comments

* Fix tests, ensure we only exit eviction loops when both size conditions are met

* Update tests

* Update LRUCache.js
  • Loading branch information
gkjohnson authored Sep 30, 2024
1 parent fd64f40 commit 7cd3bb5
Show file tree
Hide file tree
Showing 5 changed files with 196 additions and 47 deletions.
26 changes: 20 additions & 6 deletions src/base/TilesRendererBase.js
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,11 @@ const lruPriorityCallback = ( a, b ) => {
// dispose of deeper tiles first
return a.__depthFromRenderedParent > b.__depthFromRenderedParent ? 1 : - 1;

} else if ( a.__loadingState !== b.__loadingState ) {

// dispose of tiles that are earlier along in the loading process first
return a.__loadingState > b.__loadingState ? - 1 : 1;

} else if ( a.__lastFrameVisited !== b.__lastFrameVisited ) {

// dispose of least recent tiles first
Expand Down Expand Up @@ -646,6 +651,7 @@ export class TilesRendererBase {
console.error( `TilesRenderer : Failed to load tile at url "${ tile.content.uri }".` );
console.error( e );
tile.__loadingState = FAILED;
lruCache.setLoaded( tile, true );

} else {

Expand Down Expand Up @@ -736,16 +742,24 @@ export class TilesRendererBase {

stats.parsing --;
tile.__loadingState = LOADED;
lruCache.setLoaded( tile, true );

// if the cache is full due to newly loaded memory then lets discard this tile - it will
// be loaded again later from the disk cache if needed.
if ( lruCache.isFull() ) {
// If the memory of the item hasn't been registered yet then that means the memory usage hasn't
// been accounted for by the cache yet so we need to check if it fits or if we should remove it.
if ( lruCache.getMemoryUsage( tile ) === null ) {

lruCache.remove( tile );
if ( lruCache.isFull() && lruCache.computeMemoryUsageCallback( tile ) > 0 ) {

} else {
// And if the cache is full due to newly loaded memory then lets discard this tile - it will
// be loaded again later from the disk cache if needed.
lruCache.remove( tile );

lruCache.updateMemoryUsage( tile );
} else {

// Otherwise update the item to the latest known value
lruCache.updateMemoryUsage( tile );

}

}

Expand Down
3 changes: 2 additions & 1 deletion src/base/constants.js
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
// FAILED is negative so lru cache priority sorting will unload it first
export const FAILED = - 1;
export const UNLOADED = 0;
export const LOADING = 1;
export const PARSING = 2;
export const LOADED = 3;
export const FAILED = 4;

// https://en.wikipedia.org/wiki/World_Geodetic_System
// https://en.wikipedia.org/wiki/Flattening
Expand Down
2 changes: 1 addition & 1 deletion src/three/TilesRenderer.js
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ export class TilesRenderer extends TilesRendererBase {
this._eventDispatcher = new EventDispatcher();
this._upRotationMatrix = new Matrix4();

this.lruCache.getMemoryUsageCallback = tile => tile.cached.bytesUsed || 0;
this.lruCache.computeMemoryUsageCallback = tile => tile.cached.bytesUsed ?? null;

// flag indicating whether frustum culling should be disabled
this._autoDisableRendererCulling = true;
Expand Down
131 changes: 99 additions & 32 deletions src/utilities/LRUCache.js
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,10 @@ class LRUCache {
this.unloadingHandle = - 1;
this.cachedBytes = 0;
this.bytesMap = new Map();
this.loadedSet = new Set();

this._unloadPriorityCallback = null;
this.getMemoryUsageCallback = () => 0;
this.computeMemoryUsageCallback = () => null;

const itemSet = this.itemSet;
this.defaultPriorityCallback = item => itemSet.get( item );
Expand All @@ -68,6 +69,12 @@ class LRUCache {

}

getMemoryUsage( item ) {

return this.bytesMap.get( item ) ?? null;

}

add( item, removeCb ) {

if ( this.markUnusedQueued ) {
Expand Down Expand Up @@ -98,8 +105,9 @@ class LRUCache {
itemSet.set( item, Date.now() );
callbacks.set( item, removeCb );

const bytes = this.getMemoryUsageCallback( item );
this.cachedBytes += bytes;
// computeMemoryUsageCallback can return "null" if memory usage is not known, yet
const bytes = this.computeMemoryUsageCallback( item );
this.cachedBytes += bytes || 0;
bytesMap.set( item, bytes );

return true;
Expand All @@ -113,10 +121,11 @@ class LRUCache {
const itemList = this.itemList;
const bytesMap = this.bytesMap;
const callbacks = this.callbacks;
const loadedSet = this.loadedSet;

if ( itemSet.has( item ) ) {

this.cachedBytes -= bytesMap.get( item );
this.cachedBytes -= bytesMap.get( item ) || 0;
bytesMap.delete( item );

callbacks.get( item )( item );
Expand All @@ -126,6 +135,7 @@ class LRUCache {
usedSet.delete( item );
itemSet.delete( item );
callbacks.delete( item );
loadedSet.delete( item );

return true;

Expand All @@ -135,6 +145,28 @@ class LRUCache {

}

// Marks whether tiles in the cache have been completely loaded or not. Tiles that have not been completely
// loaded are subject to being disposed early if the cache is full above its max size limits, even if they
// are marked as used.
setLoaded( item, value ) {

const { itemSet, loadedSet } = this;
if ( itemSet.has( item ) ) {

if ( value === true ) {

loadedSet.add( item );

} else {

loadedSet.delete( item );

}

}

}

updateMemoryUsage( item ) {

const itemSet = this.itemSet;
Expand All @@ -145,9 +177,9 @@ class LRUCache {

}

this.cachedBytes -= bytesMap.get( item );
this.cachedBytes -= bytesMap.get( item ) || 0;

const bytes = this.getMemoryUsageCallback( item );
const bytes = this.computeMemoryUsageCallback( item );
bytesMap.set( item, bytes );
this.cachedBytes += bytes;

Expand Down Expand Up @@ -196,32 +228,44 @@ class LRUCache {
itemList,
itemSet,
usedSet,
loadedSet,
callbacks,
bytesMap,
minBytesSize,
maxBytesSize,
} = this;

const unused = itemList.length - usedSet.size;
const unloaded = itemList.length - loadedSet.size;
const excessNodes = Math.max( Math.min( itemList.length - minSize, unused ), 0 );
const excessBytes = this.cachedBytes - minBytesSize;
const unloadPriorityCallback = this.unloadPriorityCallback || this.defaultPriorityCallback;
let needsRerun = false;

const hasNodesToUnload = excessNodes > 0 && unused > 0 || itemList.length > maxSize;
const hasBytesToUnload = unused && this.cachedBytes > minBytesSize || this.cachedBytes > maxBytesSize;
const hasNodesToUnload = excessNodes > 0 && unused > 0 || unloaded && itemList.length > maxSize;
const hasBytesToUnload = unused && this.cachedBytes > minBytesSize || unloaded && this.cachedBytes > maxBytesSize;
if ( hasBytesToUnload || hasNodesToUnload ) {

// used items should be at the end of the array
// used items should be at the end of the array, "unloaded" items in the middle of the array
itemList.sort( ( a, b ) => {

const usedA = usedSet.has( a );
const usedB = usedSet.has( b );
if ( usedA === usedB ) {

// Use the sort function otherwise
// higher priority should be further to the left
return - unloadPriorityCallback( a, b );
const loadedA = loadedSet.has( a );
const loadedB = loadedSet.has( b );
if ( loadedA === loadedB ) {

// Use the sort function otherwise
// higher priority should be further to the left
return - unloadPriorityCallback( a, b );

} else {

return loadedA ? 1 : - 1;

}

} else {

Expand All @@ -241,27 +285,44 @@ class LRUCache {

let removedNodes = 0;
let removedBytes = 0;
while ( true ) {

// evict up to the max node or bytes size, keeping one more item over the max bytes limit
// so the "full" function behaves correctly.
while (
this.cachedBytes - removedBytes > maxBytesSize ||
itemList.length - removedNodes > maxSize
) {

const item = itemList[ removedNodes ];
const bytes = bytesMap.get( item );
const bytes = bytesMap.get( item ) || 0;
if (
usedSet.has( item ) && loadedSet.has( item ) ||
this.cachedBytes - removedBytes - bytes < maxBytesSize &&
itemList.length - removedNodes <= maxSize
) {

// note that these conditions ensure we keep one tile over the byte cap so we can
// align with the the isFull function reports.
break;

}

removedBytes += bytes;
removedNodes ++;

}

// base while condition
const doContinue =
removedNodes < nodesToUnload
|| removedBytes < bytesToUnload
|| this.cachedBytes - removedBytes - bytes > maxBytesSize
|| itemList.length - removedNodes > maxSize;
// evict up to the min node or bytes size, keeping one more item over the min bytes limit
// so we're meeting it
while (
removedBytes < bytesToUnload ||
removedNodes < nodesToUnload
) {

// don't unload any used tiles unless we're above our size cap
const item = itemList[ removedNodes ];
const bytes = bytesMap.get( item ) || 0;
if (
! doContinue
|| removedNodes >= unused
&& this.cachedBytes - removedBytes - bytes <= maxBytesSize
&& itemList.length - removedNodes <= maxSize
usedSet.has( item ) ||
this.cachedBytes - removedBytes - bytes < minBytesSize &&
removedNodes >= nodesToUnload
) {

break;
Expand All @@ -271,15 +332,21 @@ class LRUCache {
removedBytes += bytes;
removedNodes ++;

bytesMap.delete( item );
}

// remove the nodes
itemList.splice( 0, removedNodes ).forEach( item => {

this.cachedBytes -= bytesMap.get( item ) || 0;

callbacks.get( item )( item );
bytesMap.delete( item );
itemSet.delete( item );
callbacks.delete( item );
loadedSet.delete( item );
usedSet.delete( item );

}

itemList.splice( 0, removedNodes );
this.cachedBytes -= removedBytes;
} );

// if we didn't remove enough nodes or we still have excess bytes and there are nodes to removed
// then we want to fire another round of unloading
Expand Down
Loading

0 comments on commit 7cd3bb5

Please sign in to comment.