diff --git a/src/base/TilesRendererBase.js b/src/base/TilesRendererBase.js index ffb42cf11..d1090e7b1 100644 --- a/src/base/TilesRendererBase.js +++ b/src/base/TilesRendererBase.js @@ -52,6 +52,11 @@ const lruPriorityCallback = ( a, b ) => { // dispose of deeper tiles first return a.__depthFromRenderedParent > b.__depthFromRenderedParent ? 1 : - 1; + } else if ( a.__loadingState !== b.__loadingState ) { + + // dispose of tiles that are earlier along in the loading process first + return a.__loadingState > b.__loadingState ? - 1 : 1; + } else if ( a.__lastFrameVisited !== b.__lastFrameVisited ) { // dispose of least recent tiles first @@ -646,6 +651,7 @@ export class TilesRendererBase { console.error( `TilesRenderer : Failed to load tile at url "${ tile.content.uri }".` ); console.error( e ); tile.__loadingState = FAILED; + lruCache.setLoaded( tile, true ); } else { @@ -736,16 +742,24 @@ export class TilesRendererBase { stats.parsing --; tile.__loadingState = LOADED; + lruCache.setLoaded( tile, true ); - // if the cache is full due to newly loaded memory then lets discard this tile - it will - // be loaded again later from the disk cache if needed. - if ( lruCache.isFull() ) { + // If the memory of the item hasn't been registered yet then that means the memory usage hasn't + // been accounted for by the cache yet so we need to check if it fits or if we should remove it. + if ( lruCache.getMemoryUsage( tile ) === null ) { - lruCache.remove( tile ); + if ( lruCache.isFull() && lruCache.computeMemoryUsageCallback( tile ) > 0 ) { - } else { + // And if the cache is full due to newly loaded memory then lets discard this tile - it will + // be loaded again later from the disk cache if needed. + lruCache.remove( tile ); - lruCache.updateMemoryUsage( tile ); + } else { + + // Otherwise update the item to the latest known value + lruCache.updateMemoryUsage( tile ); + + } } diff --git a/src/base/constants.js b/src/base/constants.js index ee656c21b..043b7288b 100644 --- a/src/base/constants.js +++ b/src/base/constants.js @@ -1,8 +1,9 @@ +// FAILED is negative so lru cache priority sorting will unload it first +export const FAILED = - 1; export const UNLOADED = 0; export const LOADING = 1; export const PARSING = 2; export const LOADED = 3; -export const FAILED = 4; // https://en.wikipedia.org/wiki/World_Geodetic_System // https://en.wikipedia.org/wiki/Flattening diff --git a/src/three/TilesRenderer.js b/src/three/TilesRenderer.js index 7dfc5a818..21c283f42 100644 --- a/src/three/TilesRenderer.js +++ b/src/three/TilesRenderer.js @@ -76,7 +76,7 @@ export class TilesRenderer extends TilesRendererBase { this._eventDispatcher = new EventDispatcher(); this._upRotationMatrix = new Matrix4(); - this.lruCache.getMemoryUsageCallback = tile => tile.cached.bytesUsed || 0; + this.lruCache.computeMemoryUsageCallback = tile => tile.cached.bytesUsed ?? null; // flag indicating whether frustum culling should be disabled this._autoDisableRendererCulling = true; diff --git a/src/utilities/LRUCache.js b/src/utilities/LRUCache.js index bb14c1d2e..f6b13039c 100644 --- a/src/utilities/LRUCache.js +++ b/src/utilities/LRUCache.js @@ -52,9 +52,10 @@ class LRUCache { this.unloadingHandle = - 1; this.cachedBytes = 0; this.bytesMap = new Map(); + this.loadedSet = new Set(); this._unloadPriorityCallback = null; - this.getMemoryUsageCallback = () => 0; + this.computeMemoryUsageCallback = () => null; const itemSet = this.itemSet; this.defaultPriorityCallback = item => itemSet.get( item ); @@ -68,6 +69,12 @@ class LRUCache { } + getMemoryUsage( item ) { + + return this.bytesMap.get( item ) ?? null; + + } + add( item, removeCb ) { if ( this.markUnusedQueued ) { @@ -98,8 +105,9 @@ class LRUCache { itemSet.set( item, Date.now() ); callbacks.set( item, removeCb ); - const bytes = this.getMemoryUsageCallback( item ); - this.cachedBytes += bytes; + // computeMemoryUsageCallback can return "null" if memory usage is not known, yet + const bytes = this.computeMemoryUsageCallback( item ); + this.cachedBytes += bytes || 0; bytesMap.set( item, bytes ); return true; @@ -113,10 +121,11 @@ class LRUCache { const itemList = this.itemList; const bytesMap = this.bytesMap; const callbacks = this.callbacks; + const loadedSet = this.loadedSet; if ( itemSet.has( item ) ) { - this.cachedBytes -= bytesMap.get( item ); + this.cachedBytes -= bytesMap.get( item ) || 0; bytesMap.delete( item ); callbacks.get( item )( item ); @@ -126,6 +135,7 @@ class LRUCache { usedSet.delete( item ); itemSet.delete( item ); callbacks.delete( item ); + loadedSet.delete( item ); return true; @@ -135,6 +145,28 @@ class LRUCache { } + // Marks whether tiles in the cache have been completely loaded or not. Tiles that have not been completely + // loaded are subject to being disposed early if the cache is full above its max size limits, even if they + // are marked as used. + setLoaded( item, value ) { + + const { itemSet, loadedSet } = this; + if ( itemSet.has( item ) ) { + + if ( value === true ) { + + loadedSet.add( item ); + + } else { + + loadedSet.delete( item ); + + } + + } + + } + updateMemoryUsage( item ) { const itemSet = this.itemSet; @@ -145,9 +177,9 @@ class LRUCache { } - this.cachedBytes -= bytesMap.get( item ); + this.cachedBytes -= bytesMap.get( item ) || 0; - const bytes = this.getMemoryUsageCallback( item ); + const bytes = this.computeMemoryUsageCallback( item ); bytesMap.set( item, bytes ); this.cachedBytes += bytes; @@ -196,6 +228,7 @@ class LRUCache { itemList, itemSet, usedSet, + loadedSet, callbacks, bytesMap, minBytesSize, @@ -203,25 +236,36 @@ class LRUCache { } = this; const unused = itemList.length - usedSet.size; + const unloaded = itemList.length - loadedSet.size; const excessNodes = Math.max( Math.min( itemList.length - minSize, unused ), 0 ); const excessBytes = this.cachedBytes - minBytesSize; const unloadPriorityCallback = this.unloadPriorityCallback || this.defaultPriorityCallback; let needsRerun = false; - const hasNodesToUnload = excessNodes > 0 && unused > 0 || itemList.length > maxSize; - const hasBytesToUnload = unused && this.cachedBytes > minBytesSize || this.cachedBytes > maxBytesSize; + const hasNodesToUnload = excessNodes > 0 && unused > 0 || unloaded && itemList.length > maxSize; + const hasBytesToUnload = unused && this.cachedBytes > minBytesSize || unloaded && this.cachedBytes > maxBytesSize; if ( hasBytesToUnload || hasNodesToUnload ) { - // used items should be at the end of the array + // used items should be at the end of the array, "unloaded" items in the middle of the array itemList.sort( ( a, b ) => { const usedA = usedSet.has( a ); const usedB = usedSet.has( b ); if ( usedA === usedB ) { - // Use the sort function otherwise - // higher priority should be further to the left - return - unloadPriorityCallback( a, b ); + const loadedA = loadedSet.has( a ); + const loadedB = loadedSet.has( b ); + if ( loadedA === loadedB ) { + + // Use the sort function otherwise + // higher priority should be further to the left + return - unloadPriorityCallback( a, b ); + + } else { + + return loadedA ? 1 : - 1; + + } } else { @@ -241,27 +285,44 @@ class LRUCache { let removedNodes = 0; let removedBytes = 0; - while ( true ) { + + // evict up to the max node or bytes size, keeping one more item over the max bytes limit + // so the "full" function behaves correctly. + while ( + this.cachedBytes - removedBytes > maxBytesSize || + itemList.length - removedNodes > maxSize + ) { const item = itemList[ removedNodes ]; - const bytes = bytesMap.get( item ); + const bytes = bytesMap.get( item ) || 0; + if ( + usedSet.has( item ) && loadedSet.has( item ) || + this.cachedBytes - removedBytes - bytes < maxBytesSize && + itemList.length - removedNodes <= maxSize + ) { - // note that these conditions ensure we keep one tile over the byte cap so we can - // align with the the isFull function reports. + break; + + } + + removedBytes += bytes; + removedNodes ++; + + } - // base while condition - const doContinue = - removedNodes < nodesToUnload - || removedBytes < bytesToUnload - || this.cachedBytes - removedBytes - bytes > maxBytesSize - || itemList.length - removedNodes > maxSize; + // evict up to the min node or bytes size, keeping one more item over the min bytes limit + // so we're meeting it + while ( + removedBytes < bytesToUnload || + removedNodes < nodesToUnload + ) { - // don't unload any used tiles unless we're above our size cap + const item = itemList[ removedNodes ]; + const bytes = bytesMap.get( item ) || 0; if ( - ! doContinue - || removedNodes >= unused - && this.cachedBytes - removedBytes - bytes <= maxBytesSize - && itemList.length - removedNodes <= maxSize + usedSet.has( item ) || + this.cachedBytes - removedBytes - bytes < minBytesSize && + removedNodes >= nodesToUnload ) { break; @@ -271,15 +332,21 @@ class LRUCache { removedBytes += bytes; removedNodes ++; - bytesMap.delete( item ); + } + + // remove the nodes + itemList.splice( 0, removedNodes ).forEach( item => { + + this.cachedBytes -= bytesMap.get( item ) || 0; + callbacks.get( item )( item ); + bytesMap.delete( item ); itemSet.delete( item ); callbacks.delete( item ); + loadedSet.delete( item ); + usedSet.delete( item ); - } - - itemList.splice( 0, removedNodes ); - this.cachedBytes -= removedBytes; + } ); // if we didn't remove enough nodes or we still have excess bytes and there are nodes to removed // then we want to fire another round of unloading diff --git a/test/LRUCache.test.js b/test/LRUCache.test.js index 41705847e..6bdef35c7 100644 --- a/test/LRUCache.test.js +++ b/test/LRUCache.test.js @@ -52,10 +52,10 @@ describe( 'LRUCache', () => { cache.add( {}, () => {} ); expect( cache.isFull() ).toEqual( true ); - cache.unloadUnusedContent( null ); + cache.unloadUnusedContent(); expect( cache.isFull() ).toEqual( true ); cache.markAllUnused(); - cache.unloadUnusedContent( null ); + cache.unloadUnusedContent(); expect( cache.isFull() ).toEqual( false ); @@ -124,7 +124,7 @@ describe( 'LRUCache', () => { cache.minBytesSize = 5; cache.maxBytesSize = 25; cache.unloadPercent = 1; - cache.getMemoryUsageCallback = () => 4; + cache.computeMemoryUsageCallback = () => 4; for ( let i = 0; i < 10; i ++ ) { @@ -138,8 +138,8 @@ describe( 'LRUCache', () => { cache.markAllUnused(); cache.unloadUnusedContent(); - expect( cache.itemList.length ).toEqual( 1 ); - expect( cache.cachedBytes ).toEqual( 4 ); + expect( cache.itemList.length ).toEqual( 2 ); + expect( cache.cachedBytes ).toEqual( 8 ); } ); @@ -149,7 +149,7 @@ describe( 'LRUCache', () => { cache.minBytesSize = 10; cache.maxBytesSize = 25; cache.unloadPercent = 1; - cache.getMemoryUsageCallback = () => 1; + cache.computeMemoryUsageCallback = () => 1; const items = new Array( 10 ).fill().map( () => ( { priority: 1 } ) ); for ( let i = 0; i < 10; i ++ ) { @@ -165,7 +165,7 @@ describe( 'LRUCache', () => { expect( cache.cachedBytes ).toEqual( 10 ); expect( cache.itemList.length ).toEqual( 10 ); - cache.getMemoryUsageCallback = () => 4; + cache.computeMemoryUsageCallback = () => 4; for ( let i = 0; i < 10; i ++ ) { cache.updateMemoryUsage( items[ i ] ); @@ -181,4 +181,71 @@ describe( 'LRUCache', () => { } ); + it( 'should allow for unloading "used" items if they are unloaded and above the max bytes size threshold.', () => { + + const cache = new LRUCache(); + cache.minBytesSize = 0; + cache.maxBytesSize = 5; + cache.unloadPercent = 1; + cache.computeMemoryUsageCallback = () => null; + + // insert items with unknown memory quantity + const items = new Array( 10 ).fill().map( () => ( { priority: 1 } ) ); + for ( let i = 0; i < 10; i ++ ) { + + cache.add( items[ i ], () => {} ); + + } + + expect( cache.isFull() ).toEqual( false ); + + // update all items to have a memory quantity that's over the cache limit and update the items + cache.computeMemoryUsageCallback = () => 1; + cache.itemList.forEach( item => cache.updateMemoryUsage( item ) ); + + expect( cache.isFull() ).toEqual( true ); + expect( cache.itemList.length ).toEqual( 10 ); + + cache.unloadUnusedContent(); + expect( cache.isFull() ).toEqual( true ); + expect( cache.itemList.length ).toEqual( 5 ); + + } ); + + it( 'should not unload "used" items if they are loaded and above the max bytes size threshold.', () => { + + const cache = new LRUCache(); + cache.minBytesSize = 0; + cache.maxBytesSize = 5; + cache.unloadPercent = 1; + cache.computeMemoryUsageCallback = () => null; + + // insert items with unknown memory quantity + const items = new Array( 10 ).fill().map( () => ( { priority: 1 } ) ); + for ( let i = 0; i < 10; i ++ ) { + + cache.add( items[ i ], () => {} ); + + } + + expect( cache.isFull() ).toEqual( false ); + + // update all items to have a memory quantity that's over the cache limit and update the items + cache.computeMemoryUsageCallback = () => 1; + cache.itemList.forEach( item => { + + cache.updateMemoryUsage( item ); + cache.setLoaded( item, true ); + + } ); + + expect( cache.isFull() ).toEqual( true ); + expect( cache.itemList.length ).toEqual( 10 ); + + cache.unloadUnusedContent(); + expect( cache.isFull() ).toEqual( true ); + expect( cache.itemList.length ).toEqual( 10 ); + + } ); + } );