Skip to content

Commit

Permalink
add: n_messages on /api/ask to set the maximum number of messages to …
Browse files Browse the repository at this point in the history
…include
  • Loading branch information
glorenzo972 committed Jul 29, 2024
1 parent f1282ea commit 07ef7fa
Show file tree
Hide file tree
Showing 4 changed files with 14 additions and 2 deletions.
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,10 @@
*Andrea Sponziello*
### **Copyrigth**: *Tiledesk SRL*

## [2024-07-29]
### 0.2.9
- add: n_messages on /api/ask to set the maximum number of messages to include

## [2024-07-27]
### 0.2.8
- add: history on /api/ask
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "tilellm"
version = "0.2.8"
version = "0.2.9"
description = "tiledesk for RAG"
authors = ["Gianluca Lorenzo <[email protected]>"]
repository = "https://github.com/Tiledesk/tiledesk-llm"
Expand Down
2 changes: 1 addition & 1 deletion tilellm/controller/controller.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,7 @@ async def ask_to_llm(question, chat_model=None):
qa_prompt = ChatPromptTemplate.from_messages(
[
("system", question.system_context),
MessagesPlaceholder("chat_history_a"),
MessagesPlaceholder("chat_history_a", n_messages=question.n_messages),
("human", "{input}"),
]
)
Expand Down
8 changes: 8 additions & 0 deletions tilellm/models/item_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,7 @@ class QuestionToLLM(BaseModel):
debug: bool = Field(default_factory=lambda: False)
system_context: str = Field(default="You are a helpful AI bot. Always reply in the same language of the question.")
chat_history_dict: Optional[Dict[str, ChatEntry]] = None
n_messages: int = Field(default_factory=lambda: None)

@field_validator("temperature")
def temperature_range(cls, v):
Expand All @@ -132,6 +133,13 @@ def temperature_range(cls, v):
raise ValueError("Temperature must be between 0.0 and 1.0.")
return v

@field_validator("n_messages")
def n_messages_range(cls, v):
"""Ensures n_messages is within greater than 0"""
if not v > 0:
raise ValueError("n_messages must be greater than 0")
return v

@field_validator("max_tokens")
def max_tokens_range(cls, v):
"""Ensures max_tokens is a positive integer."""
Expand Down

0 comments on commit 07ef7fa

Please sign in to comment.