Skip to content

Commit

Permalink
Second version
Browse files Browse the repository at this point in the history
  • Loading branch information
Hialus committed Jul 12, 2023
1 parent a6609cb commit 9652ada
Show file tree
Hide file tree
Showing 10 changed files with 86 additions and 85 deletions.
32 changes: 14 additions & 18 deletions app/config.py
Original file line number Diff line number Diff line change
@@ -1,20 +1,23 @@
import os
from pydantic import BaseModel

from pyaml_env import parse_config
from pydantic import BaseModel


class Settings(BaseModel):
class PyrisSettings(BaseModel):
class APIKeyConfig(BaseModel):
token: str
comment: str
llm_access: list[str]
class LLMModelConfig(BaseModel):
name: str
description: str
llm_credentials: dict


class LLMModelConfig(BaseModel):
name: str
description: str
llm_credentials: dict
class APIKeyConfig(BaseModel):
token: str
comment: str
llm_access: list[str]


class Settings(BaseModel):
class PyrisSettings(BaseModel):
api_keys: list[APIKeyConfig]
llms: dict[str, LLMModelConfig]

Expand All @@ -31,10 +34,3 @@ def get_settings(cls):


settings = Settings.get_settings()

# get keys of settings.pyris.llms and for loop over them with print
for key in set(settings.pyris.llms.keys()):
print(key)

for key in settings.pyris.api_keys:
print(key)
10 changes: 5 additions & 5 deletions app/dependencies.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
from fastapi import Depends
from starlette.requests import Request as StarletteRequest
from app.config import settings
from app.config import settings, APIKeyConfig

from app.core.custom_exceptions import (
PermissionDeniedException,
RequiresAuthenticationException,
RequiresAuthenticationException, InvalidModelException,
)


Expand All @@ -18,10 +18,10 @@ def _get_api_key(request: StarletteRequest) -> str:


class TokenValidator:
async def __call__(self, request: StarletteRequest, api_key: str = Depends(_get_api_key)) -> str:
async def __call__(self, request: StarletteRequest, api_key: str = Depends(_get_api_key)) -> APIKeyConfig:
for key in settings.pyris.api_keys:
if key.token == api_key:
return api_key
return key
raise PermissionDeniedException


Expand All @@ -33,7 +33,7 @@ async def __call__(self, request: StarletteRequest, api_key: str = Depends(_get_
if body.get("preferredModel") in key.llm_access:
return
else:
raise PermissionDeniedException
raise InvalidModelException(str(body.get("preferredModel")))
raise PermissionDeniedException


Expand Down
2 changes: 2 additions & 0 deletions app/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,9 @@
from fastapi.responses import ORJSONResponse

from app.routes.messages import router as messages_router
from app.routes.models import router as models_router

app = FastAPI(default_response_class=ORJSONResponse)

app.include_router(messages_router)
app.include_router(models_router)
12 changes: 6 additions & 6 deletions app/models/dtos.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,6 @@
from datetime import datetime


class LLMModel(str, Enum):
GPT35_TURBO = "GPT35_TURBO"
GPT35_TURBO_16K_0613 = "GPT35_TURBO_16K_0613"
GPT35_TURBO_0613 = "GPT35_TURBO_0613"


class ContentType(str, Enum):
TEXT = "text"

Expand Down Expand Up @@ -37,3 +31,9 @@ class Message(BaseModel):

used_model: str = Field(..., alias="usedModel")
message: Message


class LLMModelResponse(BaseModel):
id: str
name: str
description: str
13 changes: 0 additions & 13 deletions app/routes/llms.py

This file was deleted.

8 changes: 3 additions & 5 deletions app/routes/messages.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from fastapi import APIRouter, Depends
from datetime import datetime, timezone

from fastapi import APIRouter, Depends
from parsimonious.exceptions import IncompleteParseError

from app.core.custom_exceptions import (
Expand All @@ -9,12 +9,10 @@
InternalServerException,
InvalidModelException,
)
from app.dependencies import PermissionsValidator
from app.models.dtos import SendMessageRequest, SendMessageResponse, LLMModel
from app.core.custom_exceptions import BadDataException
from app.dependencies import TokenPermissionsValidator
from app.models.dtos import SendMessageRequest, SendMessageResponse
from app.services.guidance_wrapper import GuidanceWrapper
from config import settings

router = APIRouter(tags=["messages"])

Expand All @@ -24,7 +22,7 @@
)
def send_message(body: SendMessageRequest) -> SendMessageResponse:
try:
model = LLMModel(body.preferred_model)
model = settings.pyris.llms[body.preferred_model]
except ValueError as e:
raise InvalidModelException(str(e))

Expand Down
18 changes: 18 additions & 0 deletions app/routes/models.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
from fastapi import APIRouter, Depends

from app.dependencies import TokenValidator
from config import settings, APIKeyConfig
from models.dtos import LLMModelResponse

router = APIRouter(tags=["models"])


@router.get(
"/api/v1/models", dependencies=[Depends(TokenValidator())]
)
def send_message(api_key_config: APIKeyConfig = Depends(TokenValidator())) -> list[LLMModelResponse]:
llm_ids = api_key_config.llm_access

# Convert the result of this to a list of LLMModelResponse
return [LLMModelResponse(id=key, name=config.name, description=config.description) for key, config in
settings.pyris.llms.items() if key in llm_ids]
8 changes: 4 additions & 4 deletions app/services/guidance_wrapper.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
import guidance

from app.config import settings
from app.models.dtos import Content, ContentType, LLMModel
from app.config import settings, LLMModelConfig
from app.models.dtos import Content, ContentType


class GuidanceWrapper:
"""A wrapper service to all guidance package's methods."""

def __init__(
self, model: LLMModel, handlebars: str, parameters=None
self, model: LLMModelConfig, handlebars: str, parameters=None
) -> None:
if parameters is None:
parameters = {}
Expand Down Expand Up @@ -39,5 +39,5 @@ def query(self) -> Content:
return Content(type=ContentType.TEXT, textContent=result["response"])

def _get_llm(self):
llm_credentials = settings.pyris.llms[self.model].llm_credentials
llm_credentials = self.model.llm_credentials
return guidance.llms.OpenAI(**llm_credentials)
34 changes: 17 additions & 17 deletions application.example.yml
Original file line number Diff line number Diff line change
@@ -1,18 +1,18 @@
pyris:
api_key: secret
llm:
GPT35_TURBO:
model:
token:
api_base:
api_type:
api_version:
deployment_id:
GPT35_TURBO_16K_0613:
model:
token:
chat_mode:
GPT35_TURBO_0613:
model:
token:
chat_mode:
api_keys:
- token: "secret"
comment: "DUMMY"
llm_access:
- DUMMY

llms:
DUMMY:
name: "Dummy model"
description: "Dummy model for testing"
llm_credentials:
api_base: ""
api_type: ""
api_version: ""
deployment_id: ""
model: ""
token: ""
34 changes: 17 additions & 17 deletions application.test.yml
Original file line number Diff line number Diff line change
@@ -1,18 +1,18 @@
pyris:
api_key: secret
llm:
GPT35_TURBO:
model:
token:
api_base:
api_type:
api_version:
deployment_id:
GPT35_TURBO_16K_0613:
model:
token:
chat_mode:
GPT35_TURBO_0613:
model:
token:
chat_mode:
api_keys:
- token: "secret"
comment: "DUMMY"
llm_access:
- DUMMY

llms:
DUMMY:
name: "Dummy model"
description: "Dummy model for testing"
llm_credentials:
api_base: ""
api_type: ""
api_version: ""
deployment_id: ""
model: ""
token: ""

0 comments on commit 9652ada

Please sign in to comment.