diff --git a/docs/_sidebar.md b/docs/_sidebar.md index dbcd042b..52014a8a 100644 --- a/docs/_sidebar.md +++ b/docs/_sidebar.md @@ -7,7 +7,7 @@ - [Best practices](use_cases/intro.md#use-cases) - [Awesome chat function](use_cases/chat_usage.md#chat) - [Build math application with agent](use_cases/build-math-application-with-agent.md#building-a-math-application-with-promptulate-agents) - - [Build chatbot with streamlit and pne.chat()](use_cases/streamlit-pne.md#build-a-simple-chatbot-using-streamlit-and-pne) + - [Build chatbot with streamlit and pne.chat()](use_cases/streamlit-chatbot#build-a-simple-chatbot-using-streamlit-and-pne) - [Build a Simple Chatbot Using pne and gradio](use_cases/pne-gradio.md#build-a-simple-chatbot-using-pne-and-gradio) - [Groq, llama3, Streamlit to build a application](use_cases/streamlit-groq-llama3.md#groq-llama3-streamlit-to-build-a-application) - [Build knowledge map with streamlit and pne.chat()](use_cases/llmapper.md#llmapper) diff --git a/docs/use_cases/img/mode-sidebar.png b/docs/use_cases/img/mode-sidebar.png new file mode 100644 index 00000000..ca5eee73 Binary files /dev/null and b/docs/use_cases/img/mode-sidebar.png differ diff --git a/docs/use_cases/streamlit-pne.md b/docs/use_cases/streamlit-chatbot.md similarity index 60% rename from docs/use_cases/streamlit-pne.md rename to docs/use_cases/streamlit-chatbot.md index 900c3668..235c5416 100644 --- a/docs/use_cases/streamlit-pne.md +++ b/docs/use_cases/streamlit-chatbot.md @@ -27,25 +27,70 @@ import streamlit as st ### Step 2 -Create a sidebar to place the user parameter configuration: +Create a sidebar to place the user parameter configuration. Pne has the streamlit model configuration UI built in, you can use it to quickly configure the model use the following code: + +> pne.beta.st.model_sidebar() is currently in beta phase and is expected to be converted to the official version in v1.20.0 + +```python +import pne + +config = pne.beta.st.model_sidebar() +print(config) +``` + +model_sidebar provide some models default options, you can also pass a list of model names to it if you want to customize the options. + +Output: + +```text +{'model_name': 'openai/gpt-4o', 'api_key': 'your api key', 'api_base': ''} +``` + +Render it like this: + +![img.png](img/mode-sidebar.png) + +The upper code is the same as the following code: ```python -with st.sidebar: - model_name: str = st.selectbox( - label="Language Model Name", - options=[ - "openai/gpt-4o", - "openai/gpt-4-turbo", - "deepseek/deepseek-chat", - "zhipu/glm-4", - "ollama/llama2", - ], - help="For more details, please see" - "[how to write model name?](https://www.promptulate.cn/#/other/how_to_write_model_name)", # noqa - ) - api_key = st.text_input("API Key", key="provider_api_key", type="password") - api_base = st.text_input("OpenAI Proxy URL (Optional)") +from typing import List, TypedDict + + +class ModelConfig(TypedDict): + model_name: str + api_key: str + api_base: str + + +def model_sidebar(model_options: List[str] = None) -> ModelConfig: + import streamlit as st + + model_options = model_options or [ + "Custom Model", + "openai/gpt-4o", + "openai/gpt-4o-mini", + "openai/gpt-4-turbo", + "deepseek/deepseek-chat", + "claude-3-5-sonnet-20240620", + "zhipu/glm-4", + "ollama/llama2", + "groq/llama-3.1-70b-versatile", + ] + + with st.sidebar: + selected_model = st.selectbox("Language Model Name", model_options) + + if selected_model == "Custom Model": + selected_model = st.text_input( + "Enter Custom Model Name", + placeholder="Custom model name, eg: groq/llama3-70b-8192", + help="For more details, please see [how to write model name?](https://www.promptulate.cn/#/other/how_to_write_model_name)", # noqa + ) + + api_key = st.text_input("API Key", key="provider_api_key", type="password") + api_base = st.text_input("OpenAI Proxy URL (Optional)") + return ModelConfig(model_name=selected_model, api_key=api_key, api_base=api_base) ``` ### Step 3 diff --git a/example/streamlit-chatbot/app.py b/example/streamlit-chatbot/app.py index 7245c6c5..5d5fb48d 100644 --- a/example/streamlit-chatbot/app.py +++ b/example/streamlit-chatbot/app.py @@ -2,69 +2,58 @@ import streamlit as st -def main(): - with st.sidebar: - model_options = [ - "openai/gpt-4o", - "openai/gpt-4-turbo", - "deepseek/deepseek-chat", - "zhipu/glm-4", - "ollama/llama2", - "groq/llama-3.1-70b-versatile", - "claude-3-5-sonnet-20240620", +def initialize_session_state(): + if "messages" not in st.session_state: + st.session_state.messages = [ + {"role": "assistant", "content": "How can I help you?"} ] - # Add a placeholder for custom model name entry - model_options.insert(0, "Custom Model") - selected_option = st.selectbox( - label="Language Model Name", - options=model_options, +def render_chat_history(): + for msg in st.session_state.messages: + st.chat_message(msg["role"]).write(msg["content"]) + + +def get_user_input() -> str: + return st.chat_input("How can I help you?") + + +def update_chat(role: str, content: str): + """Update the chat history with the new message from the user or assistant.""" + st.session_state.messages.append({"role": role, "content": content}) + with st.chat_message(role): + st.markdown(content) + + +def generate_response(model_name: str, api_base: str, api_key: str) -> str: + """Generate a response using the specified model.""" + with st.chat_message("assistant"): + stream = pne.chat( + model=model_name, + stream=True, + messages=st.session_state.messages, + model_config={"api_base": api_base, "api_key": api_key}, ) + response = st.write_stream(stream) + return response - model_name = selected_option - if selected_option == "Custom Model": - model_name = st.text_input( - "Enter Custom Model Name", - placeholder="Custom model name, eg: groq/llama3-70b-8192", - help=( - "For more details, please see " - "[how to write model name?](https://www.promptulate.cn/#/other/how_to_write_model_name)" # noqa - ), - ) - api_key = st.text_input("API Key", key="provider_api_key", type="password") - api_base = st.text_input("OpenAI Proxy URL (Optional)") + +def main(): + initialize_session_state() + config = pne.beta.st.sidebar.model_sidebar() st.title("💬 Chat") st.caption("🚀 Hi there! 👋 I am a simple chatbot by Promptulate to help you.") - if "messages" not in st.session_state: - st.session_state["messages"] = [ - {"role": "assistant", "content": "How can I help you?"} - ] - - for msg in st.session_state.messages: - st.chat_message(msg["role"]).write(msg["content"]) + render_chat_history() - if prompt := st.chat_input("How can I help you?"): - if not api_key: + if prompt := get_user_input(): + if not config["api_key"]: st.info("Please add your API key to continue.") st.stop() - st.session_state.messages.append({"role": "user", "content": prompt}) - - with st.chat_message("user"): - st.markdown(prompt) - - with st.chat_message("assistant"): - stream = pne.chat( - model=model_name, - stream=True, - messages=st.session_state.messages, - model_config={"api_base": api_base, "api_key": api_key}, - ) - response = st.write_stream(stream) - st.session_state.messages.append({"role": "assistant", "content": response}) + update_chat("user", prompt) + generate_response(**config) if __name__ == "__main__": diff --git a/promptulate/agents/tool_agent/prompt.py b/promptulate/agents/tool_agent/prompt.py index c0f3fd92..4359f216 100644 --- a/promptulate/agents/tool_agent/prompt.py +++ b/promptulate/agents/tool_agent/prompt.py @@ -76,7 +76,7 @@ ```json { -"thought": "The thought of what to do and why.", +"analysis": "The thought of what to do and why.", "action": { "name": "finish", "args": {"content": "Sorry, I cannot answer your query, because (Summary all the upper steps, and explain)"} diff --git a/promptulate/beta/__init__.py b/promptulate/beta/__init__.py index 2786e4c7..423635e7 100644 --- a/promptulate/beta/__init__.py +++ b/promptulate/beta/__init__.py @@ -1,3 +1,3 @@ -from promptulate.beta import agents, rag +from promptulate.beta import agents, rag, st -__all__ = ["agents", "rag"] +__all__ = ["agents", "rag", "st"] diff --git a/promptulate/beta/st/__init__.py b/promptulate/beta/st/__init__.py new file mode 100644 index 00000000..a6729391 --- /dev/null +++ b/promptulate/beta/st/__init__.py @@ -0,0 +1,3 @@ +from promptulate.beta.st.sidebar import model_sidebar + +__all__ = ["model_sidebar"] diff --git a/promptulate/beta/st/sidebar.py b/promptulate/beta/st/sidebar.py new file mode 100644 index 00000000..2dc4c002 --- /dev/null +++ b/promptulate/beta/st/sidebar.py @@ -0,0 +1,38 @@ +from typing import List, TypedDict + + +class ModelConfig(TypedDict): + model_name: str + api_key: str + api_base: str + + +def model_sidebar(model_options: List[str] = None) -> ModelConfig: + import streamlit as st + + model_options = model_options or [ + "Custom Model", + "openai/gpt-4o", + "openai/gpt-4o-mini", + "openai/gpt-4-turbo", + "deepseek/deepseek-chat", + "claude-3-5-sonnet-20240620", + "zhipu/glm-4", + "ollama/llama2", + "groq/llama-3.1-70b-versatile", + ] + + with st.sidebar: + selected_model = st.selectbox("Language Model Name", model_options) + + if selected_model == "Custom Model": + selected_model = st.text_input( + "Enter Custom Model Name", + placeholder="Custom model name, eg: groq/llama3-70b-8192", + help="For more details, please see [how to write model name?](https://www.promptulate.cn/#/other/how_to_write_model_name)", # noqa + ) + + api_key = st.text_input("API Key", key="provider_api_key", type="password") + api_base = st.text_input("OpenAI Proxy URL (Optional)") + + return ModelConfig(model_name=selected_model, api_key=api_key, api_base=api_base) diff --git a/pyproject.toml b/pyproject.toml index b1fe1273..e52e4ecc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,7 +9,7 @@ name = "promptulate" readme = "README.md" homepage = "https://github.com/Undertone0809/promptulate" repository = "https://github.com/Undertone0809/promptulate" -version = "1.18.0" +version = "1.18.1" keywords = [ "promptulate", "pne",