Skip to content

Commit

Permalink
Merge pull request #841 from Undertone0809/v1.18.0/optimize-streamlit…
Browse files Browse the repository at this point in the history
…-chatbot

feat: add streamlit sidebar component in beta
  • Loading branch information
Undertone0809 authored Aug 5, 2024
2 parents c5f3fa1 + d31fa19 commit cc66739
Show file tree
Hide file tree
Showing 9 changed files with 146 additions and 71 deletions.
2 changes: 1 addition & 1 deletion docs/_sidebar.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
- [Best practices](use_cases/intro.md#use-cases)
- [Awesome chat function](use_cases/chat_usage.md#chat)
- [Build math application with agent](use_cases/build-math-application-with-agent.md#building-a-math-application-with-promptulate-agents)
- [Build chatbot with streamlit and pne.chat()](use_cases/streamlit-pne.md#build-a-simple-chatbot-using-streamlit-and-pne)
- [Build chatbot with streamlit and pne.chat()](use_cases/streamlit-chatbot#build-a-simple-chatbot-using-streamlit-and-pne)
- [Build a Simple Chatbot Using pne and gradio](use_cases/pne-gradio.md#build-a-simple-chatbot-using-pne-and-gradio)
- [Groq, llama3, Streamlit to build a application](use_cases/streamlit-groq-llama3.md#groq-llama3-streamlit-to-build-a-application)
- [Build knowledge map with streamlit and pne.chat()](use_cases/llmapper.md#llmapper)
Expand Down
Binary file added docs/use_cases/img/mode-sidebar.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Original file line number Diff line number Diff line change
Expand Up @@ -27,25 +27,70 @@ import streamlit as st

### Step 2

Create a sidebar to place the user parameter configuration:
Create a sidebar to place the user parameter configuration. Pne has the streamlit model configuration UI built in, you can use it to quickly configure the model use the following code:

> pne.beta.st.model_sidebar() is currently in beta phase and is expected to be converted to the official version in v1.20.0

```python
import pne
config = pne.beta.st.model_sidebar()
print(config)
```

model_sidebar provide some models default options, you can also pass a list of model names to it if you want to customize the options.

Output:

```text
{'model_name': 'openai/gpt-4o', 'api_key': 'your api key', 'api_base': ''}
```

Render it like this:

![img.png](img/mode-sidebar.png)

The upper code is the same as the following code:

```python
with st.sidebar:
model_name: str = st.selectbox(
label="Language Model Name",
options=[
"openai/gpt-4o",
"openai/gpt-4-turbo",
"deepseek/deepseek-chat",
"zhipu/glm-4",
"ollama/llama2",
],
help="For more details, please see"
"[how to write model name?](https://www.promptulate.cn/#/other/how_to_write_model_name)", # noqa
)
api_key = st.text_input("API Key", key="provider_api_key", type="password")
api_base = st.text_input("OpenAI Proxy URL (Optional)")
from typing import List, TypedDict
class ModelConfig(TypedDict):
model_name: str
api_key: str
api_base: str
def model_sidebar(model_options: List[str] = None) -> ModelConfig:
import streamlit as st
model_options = model_options or [
"Custom Model",
"openai/gpt-4o",
"openai/gpt-4o-mini",
"openai/gpt-4-turbo",
"deepseek/deepseek-chat",
"claude-3-5-sonnet-20240620",
"zhipu/glm-4",
"ollama/llama2",
"groq/llama-3.1-70b-versatile",
]
with st.sidebar:
selected_model = st.selectbox("Language Model Name", model_options)
if selected_model == "Custom Model":
selected_model = st.text_input(
"Enter Custom Model Name",
placeholder="Custom model name, eg: groq/llama3-70b-8192",
help="For more details, please see [how to write model name?](https://www.promptulate.cn/#/other/how_to_write_model_name)", # noqa
)
api_key = st.text_input("API Key", key="provider_api_key", type="password")
api_base = st.text_input("OpenAI Proxy URL (Optional)")
return ModelConfig(model_name=selected_model, api_key=api_key, api_base=api_base)
```
### Step 3
Expand Down
89 changes: 39 additions & 50 deletions example/streamlit-chatbot/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,69 +2,58 @@
import streamlit as st


def main():
with st.sidebar:
model_options = [
"openai/gpt-4o",
"openai/gpt-4-turbo",
"deepseek/deepseek-chat",
"zhipu/glm-4",
"ollama/llama2",
"groq/llama-3.1-70b-versatile",
"claude-3-5-sonnet-20240620",
def initialize_session_state():
if "messages" not in st.session_state:
st.session_state.messages = [
{"role": "assistant", "content": "How can I help you?"}
]

# Add a placeholder for custom model name entry
model_options.insert(0, "Custom Model")

selected_option = st.selectbox(
label="Language Model Name",
options=model_options,
def render_chat_history():
for msg in st.session_state.messages:
st.chat_message(msg["role"]).write(msg["content"])


def get_user_input() -> str:
return st.chat_input("How can I help you?")


def update_chat(role: str, content: str):
"""Update the chat history with the new message from the user or assistant."""
st.session_state.messages.append({"role": role, "content": content})
with st.chat_message(role):
st.markdown(content)


def generate_response(model_name: str, api_base: str, api_key: str) -> str:
"""Generate a response using the specified model."""
with st.chat_message("assistant"):
stream = pne.chat(
model=model_name,
stream=True,
messages=st.session_state.messages,
model_config={"api_base": api_base, "api_key": api_key},
)
response = st.write_stream(stream)
return response

model_name = selected_option
if selected_option == "Custom Model":
model_name = st.text_input(
"Enter Custom Model Name",
placeholder="Custom model name, eg: groq/llama3-70b-8192",
help=(
"For more details, please see "
"[how to write model name?](https://www.promptulate.cn/#/other/how_to_write_model_name)" # noqa
),
)
api_key = st.text_input("API Key", key="provider_api_key", type="password")
api_base = st.text_input("OpenAI Proxy URL (Optional)")

def main():
initialize_session_state()
config = pne.beta.st.sidebar.model_sidebar()

st.title("💬 Chat")
st.caption("🚀 Hi there! 👋 I am a simple chatbot by Promptulate to help you.")

if "messages" not in st.session_state:
st.session_state["messages"] = [
{"role": "assistant", "content": "How can I help you?"}
]

for msg in st.session_state.messages:
st.chat_message(msg["role"]).write(msg["content"])
render_chat_history()

if prompt := st.chat_input("How can I help you?"):
if not api_key:
if prompt := get_user_input():
if not config["api_key"]:
st.info("Please add your API key to continue.")
st.stop()

st.session_state.messages.append({"role": "user", "content": prompt})

with st.chat_message("user"):
st.markdown(prompt)

with st.chat_message("assistant"):
stream = pne.chat(
model=model_name,
stream=True,
messages=st.session_state.messages,
model_config={"api_base": api_base, "api_key": api_key},
)
response = st.write_stream(stream)
st.session_state.messages.append({"role": "assistant", "content": response})
update_chat("user", prompt)
generate_response(**config)


if __name__ == "__main__":
Expand Down
2 changes: 1 addition & 1 deletion promptulate/agents/tool_agent/prompt.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@
```json
{
"thought": "The thought of what to do and why.",
"analysis": "The thought of what to do and why.",
"action": {
"name": "finish",
"args": {"content": "Sorry, I cannot answer your query, because (Summary all the upper steps, and explain)"}
Expand Down
4 changes: 2 additions & 2 deletions promptulate/beta/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
from promptulate.beta import agents, rag
from promptulate.beta import agents, rag, st

__all__ = ["agents", "rag"]
__all__ = ["agents", "rag", "st"]
3 changes: 3 additions & 0 deletions promptulate/beta/st/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
from promptulate.beta.st.sidebar import model_sidebar

__all__ = ["model_sidebar"]
38 changes: 38 additions & 0 deletions promptulate/beta/st/sidebar.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
from typing import List, TypedDict


class ModelConfig(TypedDict):
model_name: str
api_key: str
api_base: str


def model_sidebar(model_options: List[str] = None) -> ModelConfig:
import streamlit as st

model_options = model_options or [
"Custom Model",
"openai/gpt-4o",
"openai/gpt-4o-mini",
"openai/gpt-4-turbo",
"deepseek/deepseek-chat",
"claude-3-5-sonnet-20240620",
"zhipu/glm-4",
"ollama/llama2",
"groq/llama-3.1-70b-versatile",
]

with st.sidebar:
selected_model = st.selectbox("Language Model Name", model_options)

if selected_model == "Custom Model":
selected_model = st.text_input(
"Enter Custom Model Name",
placeholder="Custom model name, eg: groq/llama3-70b-8192",
help="For more details, please see [how to write model name?](https://www.promptulate.cn/#/other/how_to_write_model_name)", # noqa
)

api_key = st.text_input("API Key", key="provider_api_key", type="password")
api_base = st.text_input("OpenAI Proxy URL (Optional)")

return ModelConfig(model_name=selected_model, api_key=api_key, api_base=api_base)
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ name = "promptulate"
readme = "README.md"
homepage = "https://github.com/Undertone0809/promptulate"
repository = "https://github.com/Undertone0809/promptulate"
version = "1.18.0"
version = "1.18.1"
keywords = [
"promptulate",
"pne",
Expand Down

0 comments on commit cc66739

Please sign in to comment.