Skip to content

Commit

Permalink
NEW: start the documentation #42 📝
Browse files Browse the repository at this point in the history
  • Loading branch information
eigenein committed Jul 21, 2023
1 parent 9ce37c2 commit e1c9277
Show file tree
Hide file tree
Showing 11 changed files with 1,316 additions and 17 deletions.
57 changes: 57 additions & 0 deletions .github/workflows/docs.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
name: Docs

on:
push:
branches:
- main
pull_request:
branches:
- main

permissions:
contents: read
pages: write
id-token: write

jobs:
build:
name: "Build"
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v3
name: "📥 Checkout"
with:
fetch-depth: 0

- name: "✨ Install Poetry"
run: pipx install poetry

- uses: actions/setup-python@v4
name: "✨ Set up Python"
id: "setup-python"
with:
python-version: 3.x
cache: poetry

- name: "📥 Install dependencies"
run: poetry install --no-root --with=docs

- name: "📦 Build"
run: poetry run mkdocs build --site-dir _site

- name: "📤 Upload"
uses: actions/upload-pages-artifact@v1

deploy:
name: "Deploy"
runs-on: ubuntu-latest
needs: "build"
if: github.event_name == 'push'
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}

steps:
- name: "✨ Deploy"
uses: actions/deploy-pages@v1
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ clean:

.PHONY: install
install:
poetry install --all-extras --with dev
poetry install --all-extras --with=dev --with=docs

.PHONY: lint
lint: lint/ruff lint/black lint/mypy
Expand Down
2 changes: 1 addition & 1 deletion cachetory/caches/async_.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def __init__(
Args:
serialize_executor:
If specified, underlying serializing and deserializing will be performed
using the executor (for example, ``concurrent.futures.ProcessPoolExecutor``).
using the executor (for example, `ProcessPoolExecutor`).
This may be useful to better utilize CPU when caching large blobs.
If not specified, (de)serialization is performed in the current thread.
"""
Expand Down
38 changes: 36 additions & 2 deletions cachetory/caches/sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ class Cache(AbstractContextManager, Generic[ValueT, WireT]):
__slots__ = ("_serializer", "_backend")

def __init__(self, *, serializer: Serializer[ValueT, WireT], backend: SyncBackend[WireT]) -> None:
"""Instantiate a cache."""
self._serializer = serializer
self._backend = backend

Expand All @@ -23,6 +24,14 @@ def __getitem__(self, key: str) -> ValueT:
Raises:
KeyError: the key is not found in the cache
Examples:
>>> cache["key"] = 42
>>>
>>> assert cache["key"] == 42
>>>
>>> with pytest.raises(KeyError):
>>> _ = cache["missing"]
"""
return self._serializer.deserialize(self._backend.get(key))

Expand All @@ -36,6 +45,11 @@ def get(self, key: str, default: DefaultT = None) -> Union[ValueT, DefaultT]: #
Returns:
Retrieved value if present, or `default` otherwise.
Examples:
>>> cache["key"] = 42
>>> assert cache.get("key") == 42
>>> assert cache.get("missing") is None
"""
try:
return self[key]
Expand All @@ -47,15 +61,26 @@ def get_many(self, *keys: str) -> Dict[str, ValueT]:
Retrieve many values from the cache.
Returns:
Dictionary of existing values indexed by their respective keys.
Missing keys are omitted.
Dictionary of existing values indexed by their respective keys. Missing keys are omitted.
Examples:
>>> cache["key"] = 42
>>> assert cache.get_many("key", "missing") == {"key": 42}
"""
return {key: self._serializer.deserialize(data) for key, data in self._backend.get_many(*keys)}

def expire_in(self, key: str, time_to_live: Optional[timedelta] = None) -> None:
"""
Set the expiration time for the key.
Args:
key: cache key
time_to_live: time to live, or `None` to make it eternal
"""
return self._backend.expire_in(key, time_to_live)

def __setitem__(self, key: str, value: ValueT) -> None:
"""Set the cache item. To customize the behavior, use `set()`."""
self._backend.set(key, self._serializer.serialize(value), time_to_live=None)

def set( # noqa: A003
Expand All @@ -65,6 +90,15 @@ def set( # noqa: A003
time_to_live: Optional[timedelta] = None,
if_not_exists: bool = False,
) -> None:
"""
Set the cache item.
Args:
key: cache key
value: cached value
time_to_live: time to live, or `None` for eternal caching
if_not_exists: only set the item if it does not already exist
"""
self._backend.set(
key,
self._serializer.serialize(value),
Expand Down
15 changes: 15 additions & 0 deletions docs/caches.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
# Caches

## Synchronous cache

::: cachetory.caches.sync.Cache
options:
heading_level: 3
show_root_heading: false

## Asynchronous cache

::: cachetory.caches.async_.Cache
options:
heading_level: 3
show_root_heading: false
59 changes: 59 additions & 0 deletions docs/decorators.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
# Decorators

`#!python @cached` performs [memoization](https://en.wikipedia.org/wiki/Memoization) of a wrapped function:

```python
from cachetory.caches.sync import Cache
from cachetory.decorators.sync import cached

cache = Cache[int, ...](backend=..., serializer=...)


@cached(cache)
def expensive_function(x: int) -> int:
return 42 * x
```

## Key functions

There are a few `make_key` functions provided by default:

- `#!python cachetory.decorators.shared.make_default_key()` builds a human-readable cache key out of decorated function fully-qualified name and stringified arguments. The length of the key depends on the argument values.
- `#!python cachetory.decorators.shared.make_default_hashed_key()` calls `make_default_key()` under the hood but hashes the key and returns a hash hex digest – making it a fixed-length key and not human-readable.

## Purging cache

Specific cached value can be deleted using the added `#!python purge()` function, which accepts the same arguments as the original wrapped callable:

```python
expensive_function(100500)
expensive_function.purge(100500) # purge cached value for this argument
```

## Synchronous `@cached`

::: cachetory.decorators.sync.cached
options:
heading_level: 3
show_root_heading: false

### Cached callable protocol

::: cachetory.decorators.sync._CachedCallable
options:
heading_level: 4
show_root_heading: false

## Asynchronous `@cached`

::: cachetory.decorators.async_.cached
options:
heading_level: 3
show_root_heading: false

### Cached callable protocol

::: cachetory.decorators.async_._CachedCallable
options:
heading_level: 4
show_root_heading: false
55 changes: 55 additions & 0 deletions docs/getting-started.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
# Getting started

## Instantiating a `Cache`

Both sync and async `Cache`s requires at least these parameters to work:

- `backend`: functions as a storage
- `serializer`: is responsible for converting actual values from and to something that a backend would be able to store

`#!python Cache` may be annotated with a value type like this `#!python Cache[ValueT, WireT]`, which provides type hints for the cache methods.

## Instantiating a backend

There are a few ways to instantiate a backend:

- By **directly instantiating** a backend class via its `__init__()`
- By instantiating a specific backend class **via its `from_url()` class method**. In that case the URL is forwarded to underlying client (if any)
- **By using `cachetory.[sync|async_].from_url()` function.** In that case specific backend class is chosen by the URL's scheme, and the URL is forwarded to its `#!python from_url()` class method. This is especially useful to configure an arbitrary backend from a single configuration option – instead of hard-coding a specific backend class.

### Examples

```python
import redis
import cachetory.backends.sync
import cachetory.backends.async_

backend = cachetory.backends.sync.from_url("memory://")
backend = cachetory.backends.async_.from_url("dummy://")
backend = cachetory.backends.sync.RedisBackend(redis.Redis(...))
backend = cachetory.backends.async_.from_url("redis://localhost:6379/1")
```

## Instantiating a serializer

Instantiating of a serializer is very much similar to that of a backend. To instantiate it by a URL use `#!python cachetory.serializers.from_url()` – unlike the back-end case there are no separate sync and async versions.

`#!python cachetory.serializers.from_url()` supports scheme joining with `+`, as in `pickle+zlib://`. In that case multiple serializers are instantiated and applied sequentially (in the example a value would be serialized by `pickle` and the serialized value is then compressed by `zlib`). Deserialization order is, of course, the opposite.

### Examples

```python
import pickle

import cachetory.serializers

serializer = cachetory.serializers.from_url("pickle+zstd://")
serializer = cachetory.serializers.from_url(
"pickle+zstd://?pickle-protocol=4&compression-level=3",
)
serializer = cachetory.serializers.from_url("null://")
serializer = cachetory.serializers.NoopSerializer()
serializer = cachetory.serializers.PickleSerializer(
pickle_protocol=pickle.DEFAULT_PROTOCOL,
)
```
48 changes: 48 additions & 0 deletions docs/index.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
# Quickstart

=== "Sync"

```python
from cachetory import serializers
from cachetory.backends import sync as sync_backends
from cachetory.caches.sync import Cache


cache = Cache[int, bytes](
serializer=serializers.from_url("pickle://"),
backend=sync_backends.from_url("redis://localhost:6379"),
)
with cache:
cache.set("foo", 42)
assert cache.get("foo") == 42
```

=== "Async"

```python
from cachetory import serializers
from cachetory.backends import async_ as async_backends
from cachetory.caches.async_ import Cache


cache = Cache[int, bytes](
serializer=serializers.from_url("pickle://?pickle-protocol=4"),
backend=async_backends.from_url("redis://localhost:6379"),
)
async with cache:
await cache.set("foo", 42)
assert await cache.get("foo") == 42
```

!!! tip

It is perfectly fine not to use the context manager if, for example, you need a cache instance to live through the entire application lifetime:

```python
# caches.py:
cache = Cache(...)

# app.py:
from caches import cache
await cache.set("foo", 42)
```
Loading

0 comments on commit e1c9277

Please sign in to comment.