Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Improving cache to only bust on change to SALT #3

Merged
merged 1 commit into from
Jan 4, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
32 changes: 12 additions & 20 deletions app/lib/docs/doc.server.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ import { readFile } from 'fs/promises'
import LRUCache from 'lru-cache'
import path from 'path'
import { z } from 'zod'
import { NO_CACHE, SALT, createCache } from '~/utils/cache.server.ts'
import { removeLastSlash } from '~/utils/removeEndSlashes.ts'
import { type DocAttributes } from './attrs.server.ts'
import { contentPath, getJsonFile, privateContentPath } from './fs.server.ts'
Expand All @@ -20,31 +21,22 @@ declare global {
var docCache: LRUCache<string, Doc | undefined>
}

let NO_CACHE = process.env.NO_CACHE ?? false

/**
* While we're using HTTP caching, we have this memory cache too so that
* document requests and data request to the same document can do less work for
* new versions. This makes our origin server very fast, but adding HTTP caching
* let's have simpler and faster deployments with just one origin server, but
* still distribute the documents across the CDN.
*/
global.docCache ??= new LRUCache<string, Doc | undefined>({
// let docCache = new LRUCache<string, Doc | undefined>({
max: 300,
ttl: NO_CACHE ? 1 : 1000 * 60 * 60, // 1 hour
allowStale: !NO_CACHE,
noDeleteOnFetchRejection: true,
fetchMethod: async key => {
console.log('Fetching fresh doc', key)
const [access, product, version, slug] = key.split(':')
return getFreshDoc({
product,
version,
slug,
isPrivate: access === 'private',
})
},
global.docCache ??= createCache<Doc | undefined>(async key => {
console.log('Fetching fresh doc', key)
const [access, product, version, slug] = key.split(':')
return getFreshDoc({
product,
version,
slug,
isPrivate: access === 'private',
})
})

export async function getDoc({
Expand All @@ -63,12 +55,12 @@ export async function getDoc({
}

if (isPrivate) {
const key = `private:${product}:${version}:${slug}:2023-12-04`
const key = `private:${product}:${version}:${slug}:${SALT}`
const doc = await docCache.fetch(key)
if (doc) return doc
}

const key = `public:${product}:${version}:${slug}:2023-12-04`
const key = `public:${product}:${version}:${slug}:${SALT}`
const doc = await docCache.fetch(key)
return doc
}
Expand Down
15 changes: 5 additions & 10 deletions app/lib/docs/menu.server.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import fs from 'fs/promises'
import LRUCache from 'lru-cache'
import { NO_CACHE, SALT, createCache } from '~/utils/cache.server.ts'
import { parseAttrs } from './attrs.server.ts'
import { contentPath, privateContentPath, walk } from './fs.server.ts'
import { makeSlug } from './utils.ts'
Expand All @@ -26,14 +27,8 @@ declare global {
var menuCache: LRUCache<string, NavItem[]>
}

let NO_CACHE = process.env.NO_CACHE ?? false

global.menuCache ??= new LRUCache<string, NavItem[]>({
max: 10,
ttl: NO_CACHE ? 1 : 1000 * 60 * 60, // 1 hour
allowStale: !NO_CACHE,
noDeleteOnFetchRejection: true,
fetchMethod: async (cacheKey, _stale, { context }) => {
global.menuCache ??= createCache<NavItem[]>(
async (cacheKey, _stale, { context }) => {
let [access, product, ref] = cacheKey.split(':')
let menu = await getMenuFromDir({
product,
Expand All @@ -43,7 +38,7 @@ global.menuCache ??= new LRUCache<string, NavItem[]>({
})
return menu
},
})
)

export async function getMenu({
product,
Expand All @@ -59,7 +54,7 @@ export async function getMenu({
return NO_CACHE
? getMenuFromDir({ product, version, ref, isPrivate })
: menuCache.fetch(
`${isPrivate ? 'private' : 'public'}:${product}:${ref}:2023-12-04`,
`${isPrivate ? 'private' : 'public'}:${product}:${ref}:${SALT}`,
{
fetchContext: { version },
},
Expand Down
17 changes: 6 additions & 11 deletions app/lib/docs/pdf.server.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ import LRUCache from 'lru-cache'
import { getMDXComponent } from 'mdx-bundler/client/index.js'
import stream from 'node:stream'
import { useMemo } from 'react'
import { NO_CACHE, SALT, createCache } from '~/utils/cache.server.ts'
import { removeEndSlashes } from '~/utils/removeEndSlashes.ts'
import { getConfig, getDocFromDir } from './doc.server.ts'
import { NavItem, getMenu } from './menu.server.ts'
Expand Down Expand Up @@ -74,21 +75,15 @@ declare global {
var pdfCache: LRUCache<string, string[] | undefined>
}

let NO_CACHE = process.env.NO_CACHE ?? false

/**
* While we're using HTTP caching, we have this memory cache too so that
* document requests and data request to the same document can do less work for
* new versions. This makes our origin server very fast, but adding HTTP caching
* let's have simpler and faster deployments with just one origin server, but
* still distribute the documents across the CDN.
*/
global.pdfCache ??= new LRUCache<string, string[] | undefined>({
max: 1000,
ttl: NO_CACHE ? 1 : 1000 * 60 * 60, // 1 hour
allowStale: !NO_CACHE,
noDeleteOnFetchRejection: true,
fetchMethod: async (key, _stale, { context }) => {
global.pdfCache ??= createCache<string[] | undefined>(
async (key, _stale, { context }) => {
console.log('Fetching fresh pdf', key)
const [access, product, version] = key.split(':')
return getFreshPDFData({
Expand All @@ -98,7 +93,7 @@ global.pdfCache ??= new LRUCache<string, string[] | undefined>({
isPrivate: access === 'private',
})
},
})
)

export async function getPDFData({
product,
Expand All @@ -116,14 +111,14 @@ export async function getPDFData({
}

if (isPrivate) {
const key = `private:${product}:${ref}:2023-12-04`
const key = `private:${product}:${ref}:${SALT}`
if (pdfCache.has(key)) {
const doc = await pdfCache.fetch(key, { fetchContext: { ref } })
return doc
}
}

const key = `public:${product}:${ref}:2023-12-04`
const key = `public:${product}:${ref}:${SALT}`
const docs = await pdfCache.fetch(key, { fetchContext: { ref } })
return docs
}
Expand Down
16 changes: 5 additions & 11 deletions app/lib/docs/search.server.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ import LRUCache from 'lru-cache'
import lunr from 'lunr'
import { remark } from 'remark'
import strip from 'strip-markdown'
import { NO_CACHE, SALT, createCache } from '~/utils/cache.server.ts'
import { parseAttrs } from './attrs.server.ts'
import { contentPath, privateContentPath, walk } from './fs.server.ts'
import { makeSlug } from './utils.ts'
Expand All @@ -22,22 +23,15 @@ declare global {
var searchCache: LRUCache<string, SearchCache | undefined>
}

let NO_CACHE = process.env.NO_CACHE ?? false

/**
* While we're using HTTP caching, we have this memory cache too so that
* document requests and data request to the same document can do less work for
* new versions. This makes our origin server very fast, but adding HTTP caching
* let's have simpler and faster deployments with just one origin server, but
* still distribute the documents across the CDN.
*/
global.searchCache ??= new LRUCache<string, SearchCache | undefined>({
// let docCache = new LRUCache<string, Doc | undefined>({
max: 300,
ttl: NO_CACHE ? 1 : 1000 * 60 * 60, // 1 hour
allowStale: !NO_CACHE,
noDeleteOnFetchRejection: true,
fetchMethod: async (key, _stale, { context }) => {
global.searchCache ??= createCache<SearchCache | undefined>(
async (key, _stale, { context }) => {
console.log('Fetching fresh doc', key)
const [access, product] = key.split(':')
return getFreshSearch({
Expand All @@ -46,7 +40,7 @@ global.searchCache ??= new LRUCache<string, SearchCache | undefined>({
isPrivate: access === 'private',
})
},
})
)

export async function getSearch({
product,
Expand All @@ -62,7 +56,7 @@ export async function getSearch({
return NO_CACHE
? getFreshSearch({ product, isPrivate, version })
: searchCache.fetch(
`${isPrivate ? 'private' : 'public'}:${product}:${ref}:2023-12-04`,
`${isPrivate ? 'private' : 'public'}:${product}:${ref}:${SALT}`,
{ fetchContext: { version } },
)
}
Expand Down
20 changes: 6 additions & 14 deletions app/lib/docs/versions.server.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ import path from 'path'
import semver from 'semver'
import { z } from 'zod'
import { type NavLink } from '~/types.ts'
import { createCache, NO_CACHE, SALT } from '~/utils/cache.server.ts'
import {
find,
fromArray,
Expand Down Expand Up @@ -33,20 +34,11 @@ declare global {
var versionsCache: LRUCache<string, string[]>
}

let NO_CACHE = process.env.NO_CACHE ?? false

// global for SS "HMR", we need a better story here
global.versionsCache ??= new LRUCache<string, string[]>({
// let versionsCache = new LRUCache<string, string[]>({
max: 3,
ttl: 1000 * 60 * 60, // 5 minutes, so we can see new versions quickly
allowStale: true,
noDeleteOnFetchRejection: true,
fetchMethod: async key => {
console.log('Fetching fresh versions')
let [access, product] = key.split(':')
return getAllVersions({ product, isPrivate: access === 'private' })
},
global.versionsCache ??= createCache<string[]>(async key => {
console.log('Fetching fresh versions')
let [access, product] = key.split(':')
return getAllVersions({ product, isPrivate: access === 'private' })
})

export async function getProductVersions({
Expand All @@ -60,7 +52,7 @@ export async function getProductVersions({
return getAllVersions({ product, isPrivate })
}
return versionsCache.fetch(
`${isPrivate ? 'private' : 'public'}:${product}:v2`,
`${isPrivate ? 'private' : 'public'}:${product}:${SALT}`,
)
}

Expand Down
14 changes: 14 additions & 0 deletions app/utils/cache.server.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
import LRUCache from 'lru-cache'

export let NO_CACHE = process.env.NO_CACHE ?? false
export let SALT = process.env.CRUNCHY_CACHE_SALT ?? Date.now()

export function createCache<T>(fetchMethod: LRUCache.Fetcher<string, T>) {
return new LRUCache<string, T>({
max: 1000,
ttl: NO_CACHE ? 1 : 1000 * 60 * 60 * 24 * 365, // 1 year
allowStale: !NO_CACHE,
noDeleteOnFetchRejection: true,
fetchMethod,
})
}
7 changes: 6 additions & 1 deletion app/utils/env.server.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,16 @@
import invariant from 'tiny-invariant'

const requiredServerEnvs = ['NODE_ENV', 'SESSION_SECRET'] as const
const allServerEnvs = [
...requiredServerEnvs,
'NO_CACHE',
'CRUNCHY_CACHE_SALT',
] as const

declare global {
namespace NodeJS {
interface ProcessEnv
extends Record<(typeof requiredServerEnvs)[number], string> {}
extends Record<(typeof allServerEnvs)[number], string> {}
}
}

Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
"build:server": "tsx ./scripts/build-server.ts",
"dev": "run-p dev:*",
"dev:remix": "remix dev -c \"npm run dev-server\" --no-restart",
"dev-server": "cross-env MOCKS=true tsx watch --clear-screen=false --ignore \"app/**\" --ignore \"docs-build/**\" --ignore \"node_modules/**\" ./index.js",
"dev-server": "cross-env MOCKS=true tsx watch --clear-screen=false --ignore \"app/**\" --ignore \"docs-build/**\" --ignore \"node_modules/**\" ./docs-index.js",
"format": "prettier --write .",
"lint": "eslint --cache --cache-location ./node_modules/.cache/eslint .",
"lint:files": "ts-node --esm scripts/lint-file-case.ts",
Expand Down
Loading