diff --git a/docs/assets/thumbnails/basic_chat.png b/docs/assets/thumbnails/basic_chat.png index f72a9bb..4893ac7 100644 Binary files a/docs/assets/thumbnails/basic_chat.png and b/docs/assets/thumbnails/basic_chat.png differ diff --git a/docs/assets/thumbnails/basic_streaming_chat.png b/docs/assets/thumbnails/basic_streaming_chat.png index b9b9316..1ecd575 100644 Binary files a/docs/assets/thumbnails/basic_streaming_chat.png and b/docs/assets/thumbnails/basic_streaming_chat.png differ diff --git a/docs/assets/thumbnails/basic_streaming_chat_async.png b/docs/assets/thumbnails/basic_streaming_chat_async.png index a970799..63fe317 100644 Binary files a/docs/assets/thumbnails/basic_streaming_chat_async.png and b/docs/assets/thumbnails/basic_streaming_chat_async.png differ diff --git a/docs/assets/thumbnails/component_environment_widget.png b/docs/assets/thumbnails/component_environment_widget.png index 5517526..18929c5 100644 Binary files a/docs/assets/thumbnails/component_environment_widget.png and b/docs/assets/thumbnails/component_environment_widget.png differ diff --git a/docs/assets/thumbnails/feature_chained_response.png b/docs/assets/thumbnails/feature_chained_response.png index 7b28abd..8261199 100644 Binary files a/docs/assets/thumbnails/feature_chained_response.png and b/docs/assets/thumbnails/feature_chained_response.png differ diff --git a/docs/assets/thumbnails/feature_delayed_placeholder.png b/docs/assets/thumbnails/feature_delayed_placeholder.png index 9fcb7a3..ed13245 100644 Binary files a/docs/assets/thumbnails/feature_delayed_placeholder.png and b/docs/assets/thumbnails/feature_delayed_placeholder.png differ diff --git a/docs/assets/thumbnails/feature_replace_response.png b/docs/assets/thumbnails/feature_replace_response.png index df49a3c..38a75cb 100644 Binary files a/docs/assets/thumbnails/feature_replace_response.png and b/docs/assets/thumbnails/feature_replace_response.png differ diff --git a/docs/assets/thumbnails/feature_slim_interface.png b/docs/assets/thumbnails/feature_slim_interface.png index 3ec612c..aa6e827 100644 Binary files a/docs/assets/thumbnails/feature_slim_interface.png and b/docs/assets/thumbnails/feature_slim_interface.png differ diff --git a/docs/assets/thumbnails/langchain_llama_and_mistral.png b/docs/assets/thumbnails/langchain_llama_and_mistral.png index 1373b3c..de3d6a9 100644 Binary files a/docs/assets/thumbnails/langchain_llama_and_mistral.png and b/docs/assets/thumbnails/langchain_llama_and_mistral.png differ diff --git a/docs/assets/thumbnails/langchain_math_assistant.png b/docs/assets/thumbnails/langchain_math_assistant.png index f2f02ae..36bdbc3 100644 Binary files a/docs/assets/thumbnails/langchain_math_assistant.png and b/docs/assets/thumbnails/langchain_math_assistant.png differ diff --git a/docs/assets/thumbnails/langchain_pdf_assistant.png b/docs/assets/thumbnails/langchain_pdf_assistant.png index 907b611..b407bd4 100644 Binary files a/docs/assets/thumbnails/langchain_pdf_assistant.png and b/docs/assets/thumbnails/langchain_pdf_assistant.png differ diff --git a/docs/assets/thumbnails/langchain_with_memory.png b/docs/assets/thumbnails/langchain_with_memory.png index 716a9b0..fb0d5a2 100644 Binary files a/docs/assets/thumbnails/langchain_with_memory.png and b/docs/assets/thumbnails/langchain_with_memory.png differ diff --git a/docs/assets/thumbnails/mistral_and_llama.png b/docs/assets/thumbnails/mistral_and_llama.png new file mode 100644 index 0000000..9bb019d Binary files /dev/null and b/docs/assets/thumbnails/mistral_and_llama.png differ diff --git a/docs/assets/thumbnails/mistral_chat.png b/docs/assets/thumbnails/mistral_chat.png index b0e8b4a..44f28a3 100644 Binary files a/docs/assets/thumbnails/mistral_chat.png and b/docs/assets/thumbnails/mistral_chat.png differ diff --git a/docs/assets/thumbnails/mistral_with_memory.png b/docs/assets/thumbnails/mistral_with_memory.png index fb5e84c..5f9ce0e 100644 Binary files a/docs/assets/thumbnails/mistral_with_memory.png and b/docs/assets/thumbnails/mistral_with_memory.png differ diff --git a/docs/assets/thumbnails/openai_async_chat.png b/docs/assets/thumbnails/openai_async_chat.png index 2fb8622..f31c2ee 100644 Binary files a/docs/assets/thumbnails/openai_async_chat.png and b/docs/assets/thumbnails/openai_async_chat.png differ diff --git a/docs/assets/thumbnails/openai_authentication.png b/docs/assets/thumbnails/openai_authentication.png index 20eac69..a468609 100644 Binary files a/docs/assets/thumbnails/openai_authentication.png and b/docs/assets/thumbnails/openai_authentication.png differ diff --git a/docs/assets/thumbnails/openai_chat.png b/docs/assets/thumbnails/openai_chat.png index 557c2e1..46b6b1c 100644 Binary files a/docs/assets/thumbnails/openai_chat.png and b/docs/assets/thumbnails/openai_chat.png differ diff --git a/docs/assets/thumbnails/openai_hvplot.png b/docs/assets/thumbnails/openai_hvplot.png index 130b3d9..39ed56d 100644 Binary files a/docs/assets/thumbnails/openai_hvplot.png and b/docs/assets/thumbnails/openai_hvplot.png differ diff --git a/docs/assets/thumbnails/openai_image_generation.png b/docs/assets/thumbnails/openai_image_generation.png index 03d4178..8a71c76 100644 Binary files a/docs/assets/thumbnails/openai_image_generation.png and b/docs/assets/thumbnails/openai_image_generation.png differ diff --git a/docs/assets/thumbnails/openai_two_bots.png b/docs/assets/thumbnails/openai_two_bots.png index 7c197eb..a18da1d 100644 Binary files a/docs/assets/thumbnails/openai_two_bots.png and b/docs/assets/thumbnails/openai_two_bots.png differ diff --git a/docs/assets/videos/basic_chat.mp4 b/docs/assets/videos/basic_chat.mp4 index e6596a2..b821848 100644 Binary files a/docs/assets/videos/basic_chat.mp4 and b/docs/assets/videos/basic_chat.mp4 differ diff --git a/docs/assets/videos/basic_streaming_chat.mp4 b/docs/assets/videos/basic_streaming_chat.mp4 index 3fdb922..ed2cbf6 100644 Binary files a/docs/assets/videos/basic_streaming_chat.mp4 and b/docs/assets/videos/basic_streaming_chat.mp4 differ diff --git a/docs/assets/videos/basic_streaming_chat_async.mp4 b/docs/assets/videos/basic_streaming_chat_async.mp4 index cf7ff2e..6cb647b 100644 Binary files a/docs/assets/videos/basic_streaming_chat_async.mp4 and b/docs/assets/videos/basic_streaming_chat_async.mp4 differ diff --git a/docs/assets/videos/component_environment_widget.mp4 b/docs/assets/videos/component_environment_widget.mp4 index 1b64add..4e1eeb2 100644 Binary files a/docs/assets/videos/component_environment_widget.mp4 and b/docs/assets/videos/component_environment_widget.mp4 differ diff --git a/docs/assets/videos/feature_chained_response.mp4 b/docs/assets/videos/feature_chained_response.mp4 index fac43dc..fe7ac2b 100644 Binary files a/docs/assets/videos/feature_chained_response.mp4 and b/docs/assets/videos/feature_chained_response.mp4 differ diff --git a/docs/assets/videos/feature_delayed_placeholder.mp4 b/docs/assets/videos/feature_delayed_placeholder.mp4 index b266bc8..8453d4f 100644 Binary files a/docs/assets/videos/feature_delayed_placeholder.mp4 and b/docs/assets/videos/feature_delayed_placeholder.mp4 differ diff --git a/docs/assets/videos/feature_replace_response.mp4 b/docs/assets/videos/feature_replace_response.mp4 index ed6b5ca..ef757d0 100644 Binary files a/docs/assets/videos/feature_replace_response.mp4 and b/docs/assets/videos/feature_replace_response.mp4 differ diff --git a/docs/assets/videos/feature_slim_interface.mp4 b/docs/assets/videos/feature_slim_interface.mp4 index 0b87706..6597b48 100644 Binary files a/docs/assets/videos/feature_slim_interface.mp4 and b/docs/assets/videos/feature_slim_interface.mp4 differ diff --git a/docs/assets/videos/langchain_llama_and_mistral.mp4 b/docs/assets/videos/langchain_llama_and_mistral.mp4 index 52ea084..72f4003 100644 Binary files a/docs/assets/videos/langchain_llama_and_mistral.mp4 and b/docs/assets/videos/langchain_llama_and_mistral.mp4 differ diff --git a/docs/assets/videos/langchain_math_assistant.mp4 b/docs/assets/videos/langchain_math_assistant.mp4 index 181c8f7..053540c 100644 Binary files a/docs/assets/videos/langchain_math_assistant.mp4 and b/docs/assets/videos/langchain_math_assistant.mp4 differ diff --git a/docs/assets/videos/langchain_pdf_assistant.mp4 b/docs/assets/videos/langchain_pdf_assistant.mp4 index fabc2cc..09675e6 100644 Binary files a/docs/assets/videos/langchain_pdf_assistant.mp4 and b/docs/assets/videos/langchain_pdf_assistant.mp4 differ diff --git a/docs/assets/videos/langchain_with_memory.mp4 b/docs/assets/videos/langchain_with_memory.mp4 index d792011..cd82d2f 100644 Binary files a/docs/assets/videos/langchain_with_memory.mp4 and b/docs/assets/videos/langchain_with_memory.mp4 differ diff --git a/docs/assets/videos/mistral_and_llama.mp4 b/docs/assets/videos/mistral_and_llama.mp4 new file mode 100644 index 0000000..d02c916 Binary files /dev/null and b/docs/assets/videos/mistral_and_llama.mp4 differ diff --git a/docs/assets/videos/mistral_chat.mp4 b/docs/assets/videos/mistral_chat.mp4 index 2d2d247..3c5efc7 100644 Binary files a/docs/assets/videos/mistral_chat.mp4 and b/docs/assets/videos/mistral_chat.mp4 differ diff --git a/docs/assets/videos/mistral_with_memory.mp4 b/docs/assets/videos/mistral_with_memory.mp4 index 681bc65..a1c55d8 100644 Binary files a/docs/assets/videos/mistral_with_memory.mp4 and b/docs/assets/videos/mistral_with_memory.mp4 differ diff --git a/docs/assets/videos/openai_async_chat.mp4 b/docs/assets/videos/openai_async_chat.mp4 index b20d6e1..d482c11 100644 Binary files a/docs/assets/videos/openai_async_chat.mp4 and b/docs/assets/videos/openai_async_chat.mp4 differ diff --git a/docs/assets/videos/openai_authentication.mp4 b/docs/assets/videos/openai_authentication.mp4 index 1afd7f0..99f147b 100644 Binary files a/docs/assets/videos/openai_authentication.mp4 and b/docs/assets/videos/openai_authentication.mp4 differ diff --git a/docs/assets/videos/openai_chat.mp4 b/docs/assets/videos/openai_chat.mp4 index 49f4632..52e3f3b 100644 Binary files a/docs/assets/videos/openai_chat.mp4 and b/docs/assets/videos/openai_chat.mp4 differ diff --git a/docs/assets/videos/openai_hvplot.mp4 b/docs/assets/videos/openai_hvplot.mp4 index 42d7e8f..2c466c9 100644 Binary files a/docs/assets/videos/openai_hvplot.mp4 and b/docs/assets/videos/openai_hvplot.mp4 differ diff --git a/docs/assets/videos/openai_image_generation.mp4 b/docs/assets/videos/openai_image_generation.mp4 index 116e82b..1682d7c 100644 Binary files a/docs/assets/videos/openai_image_generation.mp4 and b/docs/assets/videos/openai_image_generation.mp4 differ diff --git a/docs/assets/videos/openai_two_bots.mp4 b/docs/assets/videos/openai_two_bots.mp4 index eea2564..ff2fd3d 100644 Binary files a/docs/assets/videos/openai_two_bots.mp4 and b/docs/assets/videos/openai_two_bots.mp4 differ diff --git a/docs/examples/features/feature_chained_response.py b/docs/examples/features/feature_chained_response.py index e212459..c306754 100644 --- a/docs/examples/features/feature_chained_response.py +++ b/docs/examples/features/feature_chained_response.py @@ -18,16 +18,16 @@ async def callback(contents: str, user: str, instance: pn.chat.ChatInterface): yield { "user": ARM_BOT, "avatar": "🦾", - "value": f"Hey, {LEG_BOT}! Did you hear the user?", + "object": f"Hey, {LEG_BOT}! Did you hear the user?", } instance.respond() elif user == ARM_BOT: - user_message = instance.value[-2] - user_contents = user_message.value + user_message = instance.objects[-2] + user_contents = user_message.object yield { "user": LEG_BOT, "avatar": "🦿", - "value": f'Yeah! They said "{user_contents}".', + "object": f'Yeah! They said "{user_contents}".', } diff --git a/docs/examples/langchain/langchain_math_assistant.py b/docs/examples/langchain/langchain_math_assistant.py index bfd53ce..f93af3f 100644 --- a/docs/examples/langchain/langchain_math_assistant.py +++ b/docs/examples/langchain/langchain_math_assistant.py @@ -14,7 +14,7 @@ async def callback(contents: str, user: str, instance: pn.chat.ChatInterface): final_answer = await llm_math.arun(question=contents) - instance.stream(final_answer, message=instance.value[-1]) + instance.stream(final_answer, message=instance.objects[-1]) chat_interface = pn.chat.ChatInterface(callback=callback, callback_user="Langchain") diff --git a/docs/examples/langchain/langchain_pdf_assistant.py b/docs/examples/langchain/langchain_pdf_assistant.py index 9f9acf6..c34043f 100644 --- a/docs/examples/langchain/langchain_pdf_assistant.py +++ b/docs/examples/langchain/langchain_pdf_assistant.py @@ -132,7 +132,7 @@ def _send_not_ready_message(chat_interface) -> bool: message = _get_validation_message() if message: - chat_interface.send({"user": "System", "value": message}, respond=False) + chat_interface.send({"user": "System", "object": message}, respond=False) return bool(message) @@ -142,14 +142,14 @@ async def respond(contents, user, chat_interface): if chat_interface.active == 0: chat_interface.active = 1 chat_interface.active_widget.placeholder = "Ask questions here!" - yield {"user": "OpenAI", "value": "Let's chat about the PDF!"} + yield {"user": "OpenAI", "object": "Let's chat about the PDF!"} return response, documents = _get_response(contents) pages_layout = pn.Accordion(*documents, sizing_mode="stretch_width", max_width=800) answers = pn.Column(response["result"], pages_layout) - yield {"user": "OpenAI", "value": answers} + yield {"user": "OpenAI", "object": answers} chat_interface = pn.chat.ChatInterface( diff --git a/docs/examples/mistral/mistral_with_memory.py b/docs/examples/mistral/mistral_with_memory.py index b26b1fd..b86e152 100644 --- a/docs/examples/mistral/mistral_with_memory.py +++ b/docs/examples/mistral/mistral_with_memory.py @@ -18,12 +18,12 @@ def apply_template(history): prompt = "" for i, message in enumerate(history): if i == 0: - prompt += f"[INST]{SYSTEM_INSTRUCTIONS} {message.value}[/INST]" + prompt += f"[INST]{SYSTEM_INSTRUCTIONS} {message.object}[/INST]" else: if message.user == "Mistral": - prompt += f"{message.value}" + prompt += f"{message.object}" else: - prompt += f"""[INST]{message.value}[/INST]""" + prompt += f"""[INST]{message.object}[/INST]""" return prompt @@ -42,7 +42,7 @@ async def callback(contents: str, user: str, instance: pn.chat.ChatInterface): ) llm = llms["mistral"] - history = [message for message in instance.value] + history = [message for message in instance.objects] prompt = apply_template(history) response = llm(prompt, stream=True) message = "" diff --git a/docs/examples/openai/openai_hvplot.py b/docs/examples/openai/openai_hvplot.py index 2fc6872..1ced425 100644 --- a/docs/examples/openai/openai_hvplot.py +++ b/docs/examples/openai/openai_hvplot.py @@ -56,7 +56,7 @@ async def respond_with_executor(code: str): plot = exec_with_return(code=code, global_context=context) return { "user": "Executor", - "value": pn.Tabs( + "object": pn.Tabs( ("Plot", plot), ("Code", code_block), ), diff --git a/docs/examples/openai/openai_two_bots.py b/docs/examples/openai/openai_two_bots.py index ee88f21..792b14f 100644 --- a/docs/examples/openai/openai_two_bots.py +++ b/docs/examples/openai/openai_two_bots.py @@ -32,9 +32,9 @@ async def callback( message = "" async for chunk in response: message += chunk["choices"][0]["delta"].get("content", "") - yield {"user": callback_user, "avatar": callback_avatar, "value": message} + yield {"user": callback_user, "avatar": callback_avatar, "object": message} - if len(instance.value) % 6 == 0: # stop at every 6 messages + if len(instance.objects) % 6 == 0: # stop at every 6 messages instance.send( "That's it for now! Thanks for chatting!", user="System", respond=False ) diff --git a/docs/features.md b/docs/features.md index a6903b4..cdfdd07 100644 --- a/docs/features.md +++ b/docs/features.md @@ -37,16 +37,16 @@ async def callback(contents: str, user: str, instance: pn.widgets.ChatInterface) yield { "user": ARM_BOT, "avatar": "🦾", - "value": f"Hey, {LEG_BOT}! Did you hear the user?", + "object": f"Hey, {LEG_BOT}! Did you hear the user?", } instance.respond() elif user == ARM_BOT: - user_message = instance.value[-2] - user_contents = user_message.value + user_message = instance.objects[-2] + user_contents = user_message.object yield { "user": LEG_BOT, "avatar": "🦿", - "value": f'Yeah! They said "{user_contents}".', + "object": f'Yeah! They said "{user_contents}".', } diff --git a/docs/langchain.md b/docs/langchain.md index 09fb007..4563980 100644 --- a/docs/langchain.md +++ b/docs/langchain.md @@ -133,7 +133,7 @@ pn.extension(design="material") async def callback(contents: str, user: str, instance: pn.widgets.ChatInterface): final_answer = await llm_math.arun(question=contents) - instance.stream(final_answer, message=instance.value[-1]) + instance.stream(final_answer, message=instance.object[-1]) chat_interface = pn.widgets.ChatInterface(callback=callback, callback_user="Langchain") @@ -304,7 +304,7 @@ def _send_not_ready_message(chat_interface) -> bool: message = _get_validation_message() if message: - chat_interface.send({"user": "System", "value": message}, respond=False) + chat_interface.send({"user": "System", "object": message}, respond=False) return bool(message) @@ -314,14 +314,14 @@ async def respond(contents, user, chat_interface): if chat_interface.active == 0: chat_interface.active = 1 chat_interface.active_widget.placeholder = "Ask questions here!" - yield {"user": "OpenAI", "value": "Let's chat about the PDF!"} + yield {"user": "OpenAI", "object": "Let's chat about the PDF!"} return response, documents = _get_response(contents) pages_layout = pn.Accordion(*documents, sizing_mode="stretch_width", max_width=800) answers = pn.Column(response["result"], pages_layout) - yield {"user": "OpenAI", "value": answers} + yield {"user": "OpenAI", "object": answers} chat_interface = pn.widgets.ChatInterface( diff --git a/docs/mistral.md b/docs/mistral.md index 3b7591c..d228e31 100644 --- a/docs/mistral.md +++ b/docs/mistral.md @@ -115,12 +115,12 @@ def apply_template(history): prompt = "" for i, message in enumerate(history): if i == 0: - prompt += f"[INST]{SYSTEM_INSTRUCTIONS} {message.value}[/INST]" + prompt += f"[INST]{SYSTEM_INSTRUCTIONS} {message.object}[/INST]" else: if message.user == "Mistral": - prompt += f"{message.value}" + prompt += f"{message.object}" else: - prompt += f"""[INST]{message.value}[/INST]""" + prompt += f"""[INST]{message.object}[/INST]""" return prompt @@ -139,7 +139,7 @@ async def callback(contents: str, user: str, instance: pn.widgets.ChatInterface) ) llm = llms["mistral"] - history = [message for message in instance.value] + history = [message for message in instance.objects] prompt = apply_template(history) response = llm(prompt, stream=True) message = "" diff --git a/docs/openai.md b/docs/openai.md index c49960a..041870d 100644 --- a/docs/openai.md +++ b/docs/openai.md @@ -253,7 +253,7 @@ async def respond_with_openai(contents: Union[pd.DataFrame, str]): message = "" async for chunk in response: message += chunk["choices"][0]["delta"].get("content", "") - yield {"user": "ChatGPT", "value": message} + yield {"user": "ChatGPT", "object": message} async def respond_with_executor(code: str): @@ -263,7 +263,7 @@ async def respond_with_executor(code: str): plot = exec_with_return(code=code, global_context=context) return { "user": "Executor", - "value": pn.Tabs( + "object": pn.Tabs( ("Plot", plot), ("Code", code_block), ), @@ -402,9 +402,9 @@ async def callback( message = "" async for chunk in response: message += chunk["choices"][0]["delta"].get("content", "") - yield {"user": callback_user, "avatar": callback_avatar, "value": message} + yield {"user": callback_user, "avatar": callback_avatar, "object": message} - if len(instance.value) % 6 == 0: # stop at every 6 messages + if len(instance.objects) % 6 == 0: # stop at every 6 messages instance.send( "That's it for now! Thanks for chatting!", user="System", respond=False ) @@ -420,4 +420,4 @@ chat_interface.send( ) chat_interface.servable() ``` - \ No newline at end of file + diff --git a/tests/ui/test_all.py b/tests/ui/test_all.py index 5da29fe..cdd809b 100644 --- a/tests/ui/test_all.py +++ b/tests/ui/test_all.py @@ -87,7 +87,7 @@ def test_app(server, app_path, port, page): # zoom and run should be defined for all examples # even if we don't run the video run = ACTION[name] - zoom = ZOOM[name] + zoom = ZOOM.get(name, 1.5) # We cannot run these tests in pipelines etc. as they require models downloaded, # api keys etc. diff --git a/tests/ui/user.py b/tests/ui/user.py index 6f9792b..b0b82df 100644 --- a/tests/ui/user.py +++ b/tests/ui/user.py @@ -95,7 +95,7 @@ def langchain_llama_and_mistral(page: Page): # Could not get this working as it always starts by downloading models chat = ChatInterface(page) chat.send("Please explain what kind of model you are in one sentence") - page.wait_for_timeout(10000) + page.wait_for_timeout(15000) def langchain_with_memory(page: Page): @@ -126,6 +126,12 @@ def langchain_pdf_assistant(page: Page): page.wait_for_timeout(10000) +def mistral_and_llama(page: Page): + chat = ChatInterface(page) + chat.send("What do you think about HoloViz in a single sentence?") + page.wait_for_timeout(15000) + + def mistral_chat(page: Page): chat = ChatInterface(page) chat.send("What is HoloViz Panel in one sentence") @@ -205,6 +211,7 @@ def openai_two_bots(page: Page): "langchain_math_assistant.py": langchain_math_assistant, "langchain_pdf_assistant.py": langchain_pdf_assistant, "langchain_with_memory.py": langchain_with_memory, + "mistral_and_llama.py": mistral_and_llama, "mistral_chat.py": mistral_chat, "mistral_with_memory.py": mistral_with_memory, "openai_async_chat.py": openai_async_chat,