Skip to content

Commit

Permalink
Ignore function arguments when caching
Browse files Browse the repository at this point in the history
  • Loading branch information
rlouf committed Nov 20, 2023
1 parent 0409e15 commit acbdc4f
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 3 deletions.
9 changes: 6 additions & 3 deletions outlines/caching.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import os
from typing import Callable
from typing import Callable, Optional

from perscache import Cache, NoCache
from perscache.serializers import JSONSerializer
Expand All @@ -10,8 +10,11 @@
memory = Cache(serializer=JSONSerializer(), storage=LocalFileStorage(cache_dir))


def cache(fn: Callable):
return memory.cache()(fn)
def cache(ignore: Optional[str]):
def cache_fn(fn: Callable):
return memory.cache(ignore=ignore)(fn)

return cache_fn


def get_cache():
Expand Down
2 changes: 2 additions & 0 deletions outlines/models/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
import numpy as np

import outlines
from outlines.caching import cache

__all__ = ["OpenAI", "openai"]

Expand Down Expand Up @@ -287,6 +288,7 @@ def __repr__(self):
return str(self.config)


@cache(ignore="client")
@functools.partial(outlines.vectorize, signature="(),(),()->(s)")
async def generate_chat(
prompt: str, client: "AsyncOpenAI", config: OpenAIConfig
Expand Down

0 comments on commit acbdc4f

Please sign in to comment.