diff --git a/.gitignore b/.gitignore index dffc7bf..040cce3 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1 @@ -TEST_DATA +TEST_DATA* diff --git a/all-tests.ts b/all-tests.ts index bb19f5a..8027e2a 100644 --- a/all-tests.ts +++ b/all-tests.ts @@ -17,7 +17,7 @@ const PAYLOAD_A = () => new Uint8Array([0, 1, 127, 99, 154, 235]) const PAYLOAD_B = () => new Uint8Array([1, 76, 160, 53, 57, 10, 230]) const PAYLOAD_C = () => new Uint8Array([2, 111, 74, 131, 236, 96, 142, 193]) -const LARGE_PAYLOAD = new Uint8Array(100000).map(() => Math.random() * 256) +const LARGE_PAYLOAD = new Uint8Array(100_000).map(() => Math.random() * 256) type CleanupFunction = () => Promise @@ -62,27 +62,25 @@ export function testAll( assertEquals(actual, PAYLOAD_A()) }) - // TypeError: Value too large (max 65536 bytes) - // Values have a maximum length of 64 KiB after serialization. - // https://docs.deno.com/api/deno/~/Deno.Kv - // await t.step('should work with a large payload', async t => { - // try { - // await adapter.save( - // ['AAAAA', 'sync-state', 'xxxxx'], - // LARGE_PAYLOAD - // ) - // } catch (e) { - // console.log(e) - // } + await t.step('should work with a large payload', async t => { + await cleanup() + try { + await adapter.save( + ['AAAAA', 'sync-state', 'xxxxx'], + LARGE_PAYLOAD + ) + } catch (e) { + assertEquals(true, false, e as string) + } - // const actual = await adapter.load([ - // 'AAAAA', - // 'sync-state', - // 'xxxxx' - // ]) + const actual = await adapter.load([ + 'AAAAA', + 'sync-state', + 'xxxxx' + ]) - // assertEquals(actual, LARGE_PAYLOAD) - // }) + assertEquals(actual, LARGE_PAYLOAD) + }) }) await t.step('loadRange', async t => { diff --git a/deno.lock b/deno.lock index 3923a57..a5ec4c1 100644 --- a/deno.lock +++ b/deno.lock @@ -2,6 +2,7 @@ "version": "4", "specifiers": { "jsr:@std/assert@*": "1.0.6", + "jsr:@std/assert@1": "1.0.6", "jsr:@std/internal@^1.0.4": "1.0.4", "npm:@automerge/automerge-repo@*": "1.2.1", "npm:@automerge/automerge-repo@1.2.1": "1.2.1" diff --git a/kv-storage-adapter.ts b/kv-storage-adapter.ts index e01b5af..e7bc170 100644 --- a/kv-storage-adapter.ts +++ b/kv-storage-adapter.ts @@ -3,23 +3,102 @@ import type { StorageAdapterInterface, StorageKey } from 'npm:@automerge/automerge-repo@1.2.1' +import { getLexicalIndexAt } from './lexicalIndex.ts' type Data = Uint8Array +// Values have a maximum length of 64 KiB after serialization. +// https://docs.deno.com/api/deno/~/Deno.Kv +const VALUE_MAX_LEN = 65536 export class DenoKVStorageAdapter implements StorageAdapterInterface { constructor(private kv: Deno.Kv) {} async load(key: StorageKey): Promise { const entry = await this.kv.get(key) - return entry.value ?? undefined + if (entry.value) return entry.value + + const list = this.kv.list({ + prefix: key + }) + let returnData: number[] = [] + for await (const entry of list) { + returnData = returnData.concat(Array.from(entry.value)) + } + + if (returnData.length === 0) return undefined + + return new Uint8Array(returnData) } async save(key: StorageKey, data: Data): Promise { - await this.kv.set(key, data) + if (data.length > VALUE_MAX_LEN) { + /** + * Threre might be a "single" value for this key, + * so clear it out + */ + await this.kv.delete(key) + + /** + * Split the value into chunks and save them with a `chunk key` + * + * The `chunk key` is constructed by suffixing the original key + * with the lexically ordered index by chunk number: + * + * chunk 0 -> ['original', 'key', 'a'] + * chunk 1 -> ['original', 'key', 'b'] + * ... + * chunk 25 -> ['original', 'key', 'z'] + * chunk 26 -> ['original', 'key', 'za'] + * chunk 27 -> ['original', 'key', 'zb'] + * ... + * chunk 51 -> ['original', 'key', 'zz'] + * chunk 52 -> ['original', 'key', 'zza'] + * chunk 53 -> ['original', 'key', 'zzb'] + * ... + * chunk 77 -> ['original', 'key', 'zzz'] + * ... + */ + const promises: Promise[] = [] + let chunkNumber = 0 + for (let i = 0; i < data.length; i = i + VALUE_MAX_LEN) { + const chunkKey = key.concat(getLexicalIndexAt(chunkNumber++)) + const sliced = data.slice( + i, + Math.min(i + VALUE_MAX_LEN, data.length) + ) + + this.kv.set(chunkKey, sliced) + } + await Promise.all(promises) + } else { + /** + * There might be chunked values for this key, so clear them out + */ + const list = await this.kv.list({ + prefix: key + }) + + const promises = [] + for await (const entry of list) { + promises.push(this.kv.delete(entry.key)) + } + await Promise.all(promises) + // + + await this.kv.set(key, data) + } } - remove(key: StorageKey): Promise { - return this.kv.delete(key) + async remove(key: StorageKey) { + const list = await this.kv.list({ + prefix: key + }) + const promises = [] + for await (const entry of list) { + promises.push(this.kv.delete(entry.key)) + } + await Promise.all(promises) + await this.kv.delete(key) } async loadRange(keyPrefix: StorageKey): Promise { diff --git a/lexicalIndex.ts b/lexicalIndex.ts new file mode 100644 index 0000000..467fa64 --- /dev/null +++ b/lexicalIndex.ts @@ -0,0 +1,19 @@ +import { assertEquals } from '@std/assert/equals' + +const alphabet = 'abcdefghijklmnopqrstuvwxyz'.split('') + +export const getLexicalIndexAt = (i: number): string => { + return ( + alphabet[alphabet.length - 1].repeat(Math.floor(i / alphabet.length)) + + alphabet[i % alphabet.length] + ) +} + +Deno.test('Lexical index', () => { + const actual: string[] = [] + for (let i = 0; i < 1000; i++) { + actual.push(getLexicalIndexAt(i)) + } + + assertEquals(actual, actual.concat().sort()) +})