Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(cache): use vary headers to compare cached response with request headers #3

Closed
wants to merge 15 commits into from
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 6 additions & 23 deletions lib/interceptor/cache.js
Original file line number Diff line number Diff line change
Expand Up @@ -127,9 +127,11 @@ export class CacheStore {
#sizeQuery
#size = 0
#maxSize = 128e9
#maxTTL = Infinity

constructor(location = ':memory:', opts = {}) {
this.#maxSize = opts.maxSize ?? this.#maxSize
this.#maxTTL = opts.maxTTL ?? this.#maxTTL

this.#database = new DatabaseSync(location)

Expand Down Expand Up @@ -158,14 +160,14 @@ export class CacheStore {
}

set(key, { data, vary, size, expires }) {
if (!this.#database) {
throw new Error('Database not initialized')
}

if (expires < Date.now()) {
return
}

const maxExpires = Date.now() + this.#maxTTL

expires = expires > maxExpires ? maxExpires : expires
IsakT marked this conversation as resolved.
Show resolved Hide resolved

this.#insertquery.run(key, BJSON.stringify(data), BJSON.stringify(vary), size, expires)

this.#size += size
Expand All @@ -174,10 +176,6 @@ export class CacheStore {
}

get(key) {
if (!this.#database) {
throw new Error('Database not initialized')
}

const rows = this.#getQuery.all(key, Date.now()).map(({ data, vary, size, expires }) => {
return {
data: BJSON.parse(data),
Expand Down Expand Up @@ -223,22 +221,7 @@ export class CacheStore {
}
}
ronag marked this conversation as resolved.
Show resolved Hide resolved

/*
Sort entries by number of vary headers in descending order, because
we need to compare the most complex response to the request first.
A cached response with an empty ´vary´ field will otherwise win every time.
*/
function sortEntriesByVary(entries) {
entries.sort((a, b) => {
const lengthA = a.vary ? a.vary.length : 0
const lengthB = b.vary ? b.vary.length : 0
return lengthB - lengthA
})
}

function findEntryByHeaders(entries, request) {
sortEntriesByVary(entries)

return entries?.find(
(entry) =>
entry.vary?.every(([key, val]) => {
Expand Down
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
"cache-control-parser": "^2.0.6",
"cacheable-lookup": "^7.0.0",
"http-errors": "^2.0.0",
"sqlite3": "^5.1.7",
"undici": "^6.19.5"
},
"devDependencies": {
Expand Down
86 changes: 83 additions & 3 deletions test/cache.js
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ function exampleEntries() {
['User-Agent', 'Mozilla/5.0'],
],
size: 100,
expires: Date.now() + 31556952001 + Math.floor(Math.random() * 100),
expires: Date.now() * 2 + Math.floor(Math.random() * 100),
},
{
data: {
Expand All @@ -56,7 +56,7 @@ function exampleEntries() {
['origin2', 'www.google.com/images'],
],
size: 100,
expires: Date.now() + 31556952002 + Math.floor(Math.random() * 100),
expires: Date.now() * 2 + Math.floor(Math.random() * 100),
},
{
data: {
Expand All @@ -73,7 +73,7 @@ function exampleEntries() {
['origin2', 'www.google.com/images'],
],
size: 100,
expires: Date.now() + 31556952003 + Math.floor(Math.random() * 100),
expires: Date.now() * 2 + Math.floor(Math.random() * 100),
},
{
data: {
Expand Down Expand Up @@ -278,3 +278,83 @@ test('Cache purging based on its maxSize', (t) => {

t.equal(totalSize, 400)
})

test('Cache #maxTTL overwriting entries ttl', (t) => {
t.plan(1)

const day = 1000 * 60 * 60 * 24
const cache = new CacheStore(':memory:', { maxTTL: day })
exampleEntries().forEach((i) => cache.set('GET:/', i))

const row = cache.get('GET:/')[0]
const rowExpires = Math.floor(row.expires / 1000)
const maxExpires = Math.floor((Date.now() + day) / 1000)

t.equal(rowExpires, maxExpires)
})

// test('200-OK, save to cache, fetch from cache', (t) => {
// t.plan(4)
// const server = createServer((req, res) => {
// res.writeHead(307, {
// Vary: 'Origin2, User-Agent, Accept',
// 'Cache-Control': 'public, immutable',
// 'Content-Length': 4,
// 'Content-Type': 'text/html',
// Connection: 'close',
// Location: 'http://www.google.com/',
// })
// res.end('foob')
// })

// t.teardown(server.close.bind(server))

// const cache = dbsetup()

// const cacheLength1 = cache.get('GET:/').length

// server.listen(0, async () => {
// const serverPort = server.address().port
// // response not found in cache, response should be added to cache.
// const response = await undici.request(`http://0.0.0.0:${serverPort}`, {
// dispatcher: new undici.Agent().compose(interceptors.cache()),
// cache,
// })
// let str = ''
// for await (const chunk of response.body) {
// str += chunk
// }
// const cacheLength2 = cache.get('GET:/').length

// // should return the default server response
// t.equal(str, 'foob')

// t.equal(cacheLength2, cacheLength1 + 1)

// // response found in cache, return cached response.
// const response2 = await undici.request(`http://0.0.0.0:${serverPort}`, {
// dispatcher: new undici.Agent().compose(interceptors.cache()),
// headers: {
// Accept: 'application/txt',
// 'User-Agent': 'Chrome',
// origin2: 'www.google.com/images',
// },
// cache,
// })
// let str2 = ''
// for await (const chunk of response2.body) {
// str2 += chunk
// }

// const cacheLength3 = cache.get('GET:/').length

// // should return the body from the cached entry
// t.equal(str2, 'asd2')

// // cache should still have the same number of entries before
// // and after a cached entry was used as a response.
// t.equal(cacheLength3, cacheLength2)

// cache.close()
// })
// })