diff --git a/docs/docs/api/CacheStore.md b/docs/docs/api/CacheStore.md index 48e750254ac..b7e3cd192f8 100644 --- a/docs/docs/api/CacheStore.md +++ b/docs/docs/api/CacheStore.md @@ -25,13 +25,14 @@ The store must implement the following functions: Optional. This tells the cache interceptor if the store is full or not. If this is true, the cache interceptor will not attempt to cache the response. -### Function: `getValueByKey` +### Function: `get` Parameters: * **req** `Dispatcher.RequestOptions` - Incoming request -Returns: `GetValueByKeyResult | Promise | undefined` - If the request is cached, a readable for the body is returned. Otherwise, `undefined` is returned. +Returns: `GetResult | Promise | undefined` - If the request is cached, the cached response is returned. If the request's method is anything other than HEAD, the response is also returned. +If the request isn't cached, `undefined` is returned. Response properties: diff --git a/lib/cache/memory-cache-store.js b/lib/cache/memory-cache-store.js index 5d4ff2874e3..913139f0980 100644 --- a/lib/cache/memory-cache-store.js +++ b/lib/cache/memory-cache-store.js @@ -63,9 +63,9 @@ class MemoryCacheStore { /** * @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} key - * @returns {import('../../types/cache-interceptor.d.ts').default.GetValueByKeyResult | undefined} + * @returns {import('../../types/cache-interceptor.d.ts').default.GetResult | undefined} */ - getValueByKey (key) { + get (key) { if (typeof key !== 'object') { throw new TypeError(`expected key to be object, got ${typeof key}`) } @@ -121,9 +121,10 @@ class MemoryCacheStore { const values = this.#getValuesForRequest(key, true) /** - * @type {number} + * @type {(MemoryStoreValue & { index: number }) | undefined} */ let value = this.#findValue(key, values) + let valueIndex = value?.index if (!value) { // The value doesn't already exist, meaning we haven't cached this // response before. Let's assign it a value and insert it into our data @@ -150,9 +151,11 @@ class MemoryCacheStore { // Our value is either the only response for this path or our deleteAt // time is sooner than all the other responses values.push(value) + valueIndex = values.length - 1 } else if (opts.deleteAt >= values[0].deleteAt) { // Our deleteAt is later than everyone elses values.unshift(value) + valueIndex = 0 } else { // We're neither in the front or the end, let's just binary search to // find our stop we need to be in @@ -168,6 +171,7 @@ class MemoryCacheStore { const middleValue = values[middleIndex] if (opts.deleteAt === middleIndex) { values.splice(middleIndex, 0, value) + valueIndex = middleIndex break } else if (opts.deleteAt > middleValue.opts.deleteAt) { endIndex = middleIndex @@ -215,7 +219,7 @@ class MemoryCacheStore { if (currentSize >= maxEntrySize) { body = null this.end() - // TODO remove element from values + shiftAtIndex(values, valueIndex) return callback() } @@ -276,7 +280,7 @@ class MemoryCacheStore { * to respond with. * @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} req * @param {MemoryStoreValue[]} values - * @returns {MemoryStoreValue | undefined} + * @returns {(MemoryStoreValue & { index: number }) | undefined} */ #findValue (req, values) { /** @@ -311,7 +315,10 @@ class MemoryCacheStore { } if (matches) { - value = current + value = { + current, + index: i + } break } } @@ -320,4 +327,16 @@ class MemoryCacheStore { } } +/** + * @param {any[]} array Array to modify + * @param {number} idx Index to delete + */ +function shiftAtIndex (array, idx) { + for (let i = idx + 1; idx < array.length; i++) { + array[i - 1] = array[i] + } + + array.length-- +} + module.exports = MemoryCacheStore diff --git a/lib/handler/cache-handler.js b/lib/handler/cache-handler.js index 5e9cde46b29..fe267cd114e 100644 --- a/lib/handler/cache-handler.js +++ b/lib/handler/cache-handler.js @@ -135,31 +135,43 @@ class CacheHandler extends DecoratorHandler { cacheControlDirectives ) - this.#writeStream = this.#store.createWriteStream(this.#cacheKey, { - statusCode, - statusMessage, - rawHeaders: strippedHeaders, - vary: varyDirectives, - cachedAt: now, - staleAt, - deleteAt - }) - - if (this.#writeStream) { - const handler = this - this.#writeStream - .on('drain', resume) - .on('error', function () { + if (this.#cacheKey.method === 'HEAD') { + this.#store.createWriteStream(this.#cacheKey, { + statusCode, + statusMessage, + rawHeaders: strippedHeaders, + vary: varyDirectives, + cachedAt: now, + staleAt, + deleteAt + }) + } else { + this.#writeStream = this.#store.createWriteStream(this.#cacheKey, { + statusCode, + statusMessage, + rawHeaders: strippedHeaders, + vary: varyDirectives, + cachedAt: now, + staleAt, + deleteAt + }) + + if (this.#writeStream) { + const handler = this + this.#writeStream + .on('drain', resume) + .on('error', function () { // TODO (fix): Make error somehow observable? - }) - .on('close', function () { - if (handler.#writeStream === this) { - handler.#writeStream = undefined - } - - // TODO (fix): Should we resume even if was paused downstream? - resume() - }) + }) + .on('close', function () { + if (handler.#writeStream === this) { + handler.#writeStream = undefined + } + + // TODO (fix): Should we resume even if was paused downstream? + resume() + }) + } } } @@ -175,7 +187,7 @@ class CacheHandler extends DecoratorHandler { onData (chunk) { let paused = false - if (this.#writeStream && this.#cacheKey.method !== 'HEAD') { + if (this.#writeStream) { paused ||= this.#writeStream.write(chunk) === false } @@ -234,7 +246,6 @@ function canCacheResponse (statusCode, headers, cacheControlDirectives) { } if ( - !cacheControlDirectives.public || cacheControlDirectives.private === true || cacheControlDirectives['no-cache'] === true || cacheControlDirectives['no-store'] @@ -249,7 +260,7 @@ function canCacheResponse (statusCode, headers, cacheControlDirectives) { // https://www.rfc-editor.org/rfc/rfc9111.html#name-storing-responses-to-authen if (headers.authorization) { - if (typeof headers.authorization !== 'string') { + if (!cacheControlDirectives.public || typeof headers.authorization !== 'string') { return false } @@ -299,7 +310,10 @@ function determineStaleAt (now, headers, cacheControlDirectives) { if (headers.expire && typeof headers.expire === 'string') { // https://www.rfc-editor.org/rfc/rfc9111.html#section-5.3 - return now + (Date.now() - new Date(headers.expire).getTime()) + const expiresDate = new Date(headers.expire) + if (expiresDate instanceof Date && !isNaN(expiresDate)) { + return now + (Date.now() - expiresDate.getTime()) + } } return undefined diff --git a/lib/interceptor/cache.js b/lib/interceptor/cache.js index 22238992a9e..fca9ee831e1 100644 --- a/lib/interceptor/cache.js +++ b/lib/interceptor/cache.js @@ -55,7 +55,7 @@ module.exports = (opts = {}) => { // TODO (perf): For small entries support returning a Buffer instead of a stream. // Maybe store should return { staleAt, headers, body, etc... } instead of a stream + stream.value? // Where body can be a Buffer, string, stream or blob? - const result = store.getValueByKey(cacheKey) + const result = store.get(cacheKey) if (!result) { // Request isn't cached return dispatch(opts, new CacheHandler(globalOpts, cacheKey, handler)) @@ -114,6 +114,8 @@ module.exports = (opts = {}) => { if (typeof handler.onComplete === 'function') { handler.onComplete([]) } + + stream?.destroy() } else { stream.on('data', function (chunk) { if (typeof handler.onData === 'function' && !handler.onData(chunk)) { @@ -127,7 +129,7 @@ module.exports = (opts = {}) => { } /** - * @param {import('../../types/cache-interceptor.d.ts').default.GetValueByKeyResult} result + * @param {import('../../types/cache-interceptor.d.ts').default.GetResult} result */ const handleStream = (result) => { const { response: value, body: stream } = result diff --git a/lib/util/cache.js b/lib/util/cache.js index 782b9f1d039..6e3cef5978d 100644 --- a/lib/util/cache.js +++ b/lib/util/cache.js @@ -208,7 +208,7 @@ function assertCacheStore (store, name = 'CacheStore') { throw new TypeError(`expected type of ${name} to be a CacheStore, got ${store === null ? 'null' : typeof store}`) } - for (const fn of ['getValueByKey', 'createWriteStream', 'deleteByKey']) { + for (const fn of ['get', 'createWriteStream', 'deleteByKey']) { if (typeof store[fn] !== 'function') { throw new TypeError(`${name} needs to have a \`${fn}()\` function`) } diff --git a/test/cache-interceptor/cache-stores.js b/test/cache-interceptor/cache-stores.js index 189f2b1f74d..f3ef749bbf2 100644 --- a/test/cache-interceptor/cache-stores.js +++ b/test/cache-interceptor/cache-stores.js @@ -15,7 +15,7 @@ function cacheStoreTests (CacheStore) { test('matches interface', async () => { const store = new CacheStore() equal(typeof store.isFull, 'boolean') - equal(typeof store.getValueByKey, 'function') + equal(typeof store.get, 'function') equal(typeof store.createWriteStream, 'function') equal(typeof store.deleteByKey, 'function') }) @@ -44,7 +44,7 @@ function cacheStoreTests (CacheStore) { const store = new CacheStore() // Sanity check - equal(store.getValueByKey(request), undefined) + equal(store.get(request), undefined) // Write the response to the store let writeStream = store.createWriteStream(request, requestValue) @@ -52,7 +52,7 @@ function cacheStoreTests (CacheStore) { writeResponse(writeStream, requestBody) // Now try fetching it with a deep copy of the original request - let readResult = store.getValueByKey(structuredClone(request)) + let readResult = store.get(structuredClone(request)) notEqual(readResult, undefined) deepStrictEqual(await readResponse(readResult), { @@ -79,7 +79,7 @@ function cacheStoreTests (CacheStore) { // We haven't cached this one yet, make sure it doesn't confuse it with // another request - equal(store.getValueByKey(anotherRequest), undefined) + equal(store.get(anotherRequest), undefined) // Now let's cache it writeStream = store.createWriteStream(anotherRequest, { @@ -89,7 +89,7 @@ function cacheStoreTests (CacheStore) { notEqual(writeStream, undefined) writeResponse(writeStream, anotherBody) - readResult = store.getValueByKey(anotherRequest) + readResult = store.get(anotherRequest) notEqual(readResult, undefined) deepStrictEqual(await readResponse(readResult), { ...anotherValue, @@ -123,7 +123,7 @@ function cacheStoreTests (CacheStore) { notEqual(writeStream, undefined) writeResponse(writeStream, requestBody) - const readResult = store.getValueByKey(request) + const readResult = store.get(request) deepStrictEqual(await readResponse(readResult), { ...requestValue, body: requestBody @@ -155,7 +155,7 @@ function cacheStoreTests (CacheStore) { notEqual(writeStream, undefined) writeResponse(writeStream, requestBody) - equal(store.getValueByKey(request), undefined) + equal(store.get(request), undefined) }) test('respects vary directives', async () => { @@ -186,13 +186,13 @@ function cacheStoreTests (CacheStore) { const store = new CacheStore() // Sanity check - equal(store.getValueByKey(request), undefined) + equal(store.get(request), undefined) const writeStream = store.createWriteStream(request, requestValue) notEqual(writeStream, undefined) writeResponse(writeStream, requestBody) - const readStream = store.getValueByKey(structuredClone(request)) + const readStream = store.get(structuredClone(request)) notEqual(readStream, undefined) deepStrictEqual(await readResponse(readStream), { ...requestValue, @@ -207,7 +207,7 @@ function cacheStoreTests (CacheStore) { 'some-header': 'another-value' } } - equal(store.getValueByKey(nonMatchingRequest), undefined) + equal(store.get(nonMatchingRequest), undefined) }) }) } @@ -236,14 +236,14 @@ test('MemoryCacheStore locks values properly', async () => { // Value should now be locked, we shouldn't be able to create a readable or // another writable to it until the first one finishes - equal(store.getValueByKey(request), undefined) + equal(store.get(request), undefined) equal(store.createWriteStream(request, requestValue), undefined) // Close the writable, this should unlock it writeResponse(writable, ['asd']) // Stream is now closed, let's lock any new write streams - const result = store.getValueByKey(request) + const result = store.get(request) notEqual(result, undefined) // Consume & close the result, this should lift the write lock @@ -265,8 +265,8 @@ function writeResponse (stream, body) { } /** - * @param {import('../../types/cache-interceptor.d.ts').default.GetValueByKeyResult} result - * @returns {Promise} + * @param {import('../../types/cache-interceptor.d.ts').default.GetResult} result + * @returns {Promise} */ async function readResponse ({ response, body: stream }) { /** diff --git a/test/types/cache-interceptor.test-d.ts b/test/types/cache-interceptor.test-d.ts index 353e79794d0..33a355fe6f9 100644 --- a/test/types/cache-interceptor.test-d.ts +++ b/test/types/cache-interceptor.test-d.ts @@ -5,7 +5,7 @@ import CacheInterceptor from '../../types/cache-interceptor' const store: CacheInterceptor.CacheStore = { isFull: false, - getValue (_: CacheInterceptor.CacheKey): CacheInterceptor.GetValueByKeyResult | Promise | undefined { + getValue (_: CacheInterceptor.CacheKey): CacheInterceptor.GetResult | Promise | undefined { throw new Error('stub') }, diff --git a/types/cache-interceptor.d.ts b/types/cache-interceptor.d.ts index 10fbc322320..4e49814783a 100644 --- a/types/cache-interceptor.d.ts +++ b/types/cache-interceptor.d.ts @@ -31,7 +31,7 @@ declare namespace CacheHandler { path: string } - export interface GetValueByKeyResult { + export interface GetResult { response: CachedResponse body?: Readable } @@ -45,7 +45,7 @@ declare namespace CacheHandler { */ get isFull(): boolean | undefined - getValueByKey(key: CacheKey): GetValueByKeyResult | Promise | undefined + get(key: CacheKey): GetResult | Promise | undefined createWriteStream(key: CacheKey, value: CachedResponse): Writable | undefined @@ -93,7 +93,7 @@ declare namespace CacheHandler { get isFull (): boolean - getValueByKey (key: CacheKey): GetValueByKeyResult | Promise | undefined + get (key: CacheKey): GetResult | Promise | undefined createWriteStream (key: CacheKey, value: CachedResponse): Writable | undefined