Skip to content

Commit

Permalink
Merge pull request #840 from Undertone0809/v1.18.0/optimize-streamlit…
Browse files Browse the repository at this point in the history
…-chatbot

pref: optimize streamlit chatbot
  • Loading branch information
Undertone0809 authored Aug 4, 2024
2 parents e97567a + 1a4fe01 commit c5f3fa1
Show file tree
Hide file tree
Showing 3 changed files with 74 additions and 81 deletions.
28 changes: 14 additions & 14 deletions docs/use_cases/streamlit-pne.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Build a simple chatbot using streamlit and pne
# Build a chatbot using streamlit

This demo is how to use `pne.chat()` to create a simple chatbot utilising any model. For the application frontend, there will be using streamlit, an easy-to-use open-source Python framework.

Expand Down Expand Up @@ -74,25 +74,25 @@ for msg in st.session_state.messages:
Set user input:
```python
if prompt := st.chat_input():
if prompt := st.chat_input("How can I help you?"):
if not api_key:
st.info("Please add your API key to continue.")
st.stop()
# Add the message entered by the user to the list of messages in the session state
st.session_state.messages.append({"role": "user", "content": prompt})
# Display in the chat interface
st.chat_message("user").write(prompt)
response: str = pne.chat(
model=model_name,
stream=True,
messages=prompt,
model_config={"api_base": api_base, "api_key": api_key},
)
st.session_state.messages.append({"role": "assistant", "content": "start"})
st.chat_message("assistant").write_stream(response)
with st.chat_message("user"):
st.markdown(prompt)
with st.chat_message("assistant"):
stream = pne.chat(
model=model_name,
stream=True,
messages=st.session_state.messages,
model_config={"api_base": api_base, "api_key": api_key},
)
response = st.write_stream(stream)
st.session_state.messages.append({"role": "assistant", "content": response})
```
## Final Effect
Expand Down
115 changes: 57 additions & 58 deletions example/streamlit-chatbot/app.py
Original file line number Diff line number Diff line change
@@ -1,72 +1,71 @@
import pne
import streamlit as st

# Create a sidebar to place the user parameter configuration
with st.sidebar:
model_options = [
"openai/gpt-4o",
"openai/gpt-4-turbo",
"deepseek/deepseek-chat",
"zhipu/glm-4",
"ollama/llama2",
"groq/llama-3.1-70b-versatile",
"claude-3-5-sonnet-20240620",
]

# Add a placeholder for custom model name entry
model_options.insert(0, "Custom Model")
def main():
with st.sidebar:
model_options = [
"openai/gpt-4o",
"openai/gpt-4-turbo",
"deepseek/deepseek-chat",
"zhipu/glm-4",
"ollama/llama2",
"groq/llama-3.1-70b-versatile",
"claude-3-5-sonnet-20240620",
]

selected_option = st.selectbox(
label="Language Model Name",
options=model_options,
)
# Add a placeholder for custom model name entry
model_options.insert(0, "Custom Model")

model_name = selected_option
if selected_option == "Custom Model":
# Show a text input field for custom model name when "Custom Model" is selected
model_name = st.text_input(
"Enter Custom Model Name",
placeholder="Custom model name, eg: groq/llama3-70b-8192",
help=(
"For more details, please see "
"[how to write model name?](https://www.promptulate.cn/#/other/how_to_write_model_name)" # noqa
),
selected_option = st.selectbox(
label="Language Model Name",
options=model_options,
)
api_key = st.text_input("API Key", key="provider_api_key", type="password")
api_base = st.text_input("OpenAI Proxy URL (Optional)")

# Set title
st.title("💬 Chat")
st.caption("🚀 Hi there! 👋 I am a simple chatbot by Promptulate to help you.")
model_name = selected_option
if selected_option == "Custom Model":
model_name = st.text_input(
"Enter Custom Model Name",
placeholder="Custom model name, eg: groq/llama3-70b-8192",
help=(
"For more details, please see "
"[how to write model name?](https://www.promptulate.cn/#/other/how_to_write_model_name)" # noqa
),
)
api_key = st.text_input("API Key", key="provider_api_key", type="password")
api_base = st.text_input("OpenAI Proxy URL (Optional)")

# Determine whether to initialize the message variable
# otherwise initialize a message dictionary
if "messages" not in st.session_state:
st.session_state["messages"] = [
{"role": "assistant", "content": "How can I help you?"}
]
st.title("💬 Chat")
st.caption("🚀 Hi there! 👋 I am a simple chatbot by Promptulate to help you.")

# Traverse messages in session state
for msg in st.session_state.messages:
st.chat_message(msg["role"]).write(msg["content"])
if "messages" not in st.session_state:
st.session_state["messages"] = [
{"role": "assistant", "content": "How can I help you?"}
]

# User input
if prompt := st.chat_input():
if not api_key:
st.info("Please add your API key to continue.")
st.stop()
for msg in st.session_state.messages:
st.chat_message(msg["role"]).write(msg["content"])

# Add the message entered by the user to the list of messages in the session state
st.session_state.messages.append({"role": "user", "content": prompt})
# Display in the chat interface
st.chat_message("user").write(prompt)
if prompt := st.chat_input("How can I help you?"):
if not api_key:
st.info("Please add your API key to continue.")
st.stop()

response: str = pne.chat(
model=model_name,
stream=True,
messages=prompt,
model_config={"api_base": api_base, "api_key": api_key},
)
st.session_state.messages.append({"role": "user", "content": prompt})

st.session_state.messages.append({"role": "assistant", "content": "start"})
st.chat_message("assistant").write_stream(response)
with st.chat_message("user"):
st.markdown(prompt)

with st.chat_message("assistant"):
stream = pne.chat(
model=model_name,
stream=True,
messages=st.session_state.messages,
model_config={"api_base": api_base, "api_key": api_key},
)
response = st.write_stream(stream)
st.session_state.messages.append({"role": "assistant", "content": response})


if __name__ == "__main__":
main()
12 changes: 3 additions & 9 deletions promptulate/agents/tool_agent/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,23 +69,17 @@ def __init__(

super().__init__(hooks=hooks, agent_type="Tool Agent", _from=_from)
self.llm: BaseLLM = llm or ChatOpenAI(model="gpt-4-1106-preview")
"""llm provider"""
self.tool_manager: ToolManager = (
tool_manager if tool_manager is not None else ToolManager(tools or [])
)
"""Used to manage all tools, Only create a new ToolManager if 'tool_manager' is
not provided."""

self.system_prompt_template: StringTemplate = REACT_SYSTEM_PROMPT_TEMPLATE
"""Preset system prompt template."""
self.prefix_prompt_template: StringTemplate = prefix_prompt_template
"""Prefix system prompt template."""
self.conversation_prompt: str = ""
"""Store all conversation message when conversation. ToolAgent use dynamic
system prompt."""

self.max_iterations: Optional[int] = 15
"""The maximum number of executions."""
self.max_execution_time: Optional[float] = None
"""The longest running time. """

self.enable_role: bool = enable_role
self.agent_name: str = agent_name
self.agent_identity: str = agent_identity
Expand Down

0 comments on commit c5f3fa1

Please sign in to comment.