From 2b7a9024adac5f708dc1386ab98847050027a15a Mon Sep 17 00:00:00 2001 From: github-actions <${GITHUB_ACTOR}@users.noreply.github.com> Date: Fri, 17 Nov 2023 08:09:11 +0000 Subject: [PATCH] Format Python code with psf/black push --- models/openai_model.py | 13 +++++++++---- services/text_service.py | 12 +++++++++--- 2 files changed, 18 insertions(+), 7 deletions(-) diff --git a/models/openai_model.py b/models/openai_model.py index 89ca7514..0ce6f340 100644 --- a/models/openai_model.py +++ b/models/openai_model.py @@ -1002,7 +1002,9 @@ async def send_chatgpt_chat_request( messages.append( { "role": role, - "name": username_clean if role == "user" else bot_name_clean, + "name": username_clean + if role == "user" + else bot_name_clean, "content": text, } ) @@ -1012,7 +1014,9 @@ async def send_chatgpt_chat_request( messages.append( { "role": role, - "name": username_clean if role == "user" else bot_name_clean, + "name": username_clean + if role == "user" + else bot_name_clean, "content": [ {"type": "text", "text": text}, ], @@ -1028,7 +1032,9 @@ async def send_chatgpt_chat_request( messages.append( { "role": role, - "name": username_clean if role == "user" else bot_name_clean, + "name": username_clean + if role == "user" + else bot_name_clean, "content": [ {"type": "text", "text": text}, ], @@ -1097,7 +1103,6 @@ async def send_chatgpt_chat_request( except Exception: raise ValueError("Could not decode JSON response from the API") - return response @backoff.on_exception( diff --git a/services/text_service.py b/services/text_service.py index 1b8b69de..bd3bd90d 100644 --- a/services/text_service.py +++ b/services/text_service.py @@ -910,7 +910,9 @@ async def process_conversation_message( """ last_messages = converser_cog.conversation_threads[ message.channel.id - ].history[-6:] # Get the last 6 messages to determine context on whether we should draw + ].history[ + -6: + ] # Get the last 6 messages to determine context on whether we should draw last_messages = last_messages[1:] try: thinking_message = await TextService.trigger_thinking(message) @@ -927,7 +929,9 @@ async def process_conversation_message( await TextService.stop_thinking(thinking_message) # This validation is only until we figure out what's wrong with the json response mode for vision. if response_json["intent_to_draw"]: - thinking_message = await TextService.trigger_thinking(message,is_drawing=True) + thinking_message = await TextService.trigger_thinking( + message, is_drawing=True + ) links = await converser_cog.model.send_image_request_within_conversation( response_json["prompt"], @@ -954,7 +958,9 @@ async def process_conversation_message( ) except: try: - await message.reply("I encountered an error while trying to draw..") + await message.reply( + "I encountered an error while trying to draw.." + ) await thinking_message.delete() converser_cog.conversation_threads[ message.channel.id