Skip to content

Commit

Permalink
Proper error messages on an API error
Browse files Browse the repository at this point in the history
  • Loading branch information
Kav-K committed Jan 5, 2023
1 parent aae6069 commit 6cea6da
Show file tree
Hide file tree
Showing 3 changed files with 19 additions and 12 deletions.
8 changes: 7 additions & 1 deletion cogs/gpt_3_commands_and_converser.py
Original file line number Diff line number Diff line change
Expand Up @@ -693,14 +693,20 @@ async def encapsulated_send(

# General catch case for everything
except Exception:

message = "Something went wrong, please try again later. This may be due to upstream issues on the API, or rate limiting."

await ctx.send_followup(message) if from_context else await ctx.reply(
message
)
if user_id in self.awaiting_responses:
self.awaiting_responses.remove(user_id)
traceback.print_exc()
await self.end_conversation(ctx)

try:
await self.end_conversation(ctx)
except:
pass
return

@add_to_group("gpt")
Expand Down
22 changes: 12 additions & 10 deletions models/openai_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def __init__(self, usage_service):
)
self._frequency_penalty = 0 # Penalize new tokens based on their existing frequency in the text so far. (Higher frequency = lower probability of being chosen.)
self._best_of = 1 # Number of responses to compare the loglikelihoods of
self._prompt_min_length = 12
self._prompt_min_length = 8
self._max_conversation_length = 100
self._model = Models.DAVINCI
self._low_usage_mode = False
Expand Down Expand Up @@ -307,6 +307,13 @@ def prompt_min_length(self, value):
)
self._prompt_min_length = value

async def valid_text_request(self, response):
try:
tokens_used = int(response["usage"]["total_tokens"])
self.usage_service.update_usage(tokens_used)
except:
raise ValueError("The API returned an invalid response: " + str(response['error']['message']))

async def send_summary_request(self, prompt):
"""
Sends a summary request to the OpenAI API
Expand All @@ -322,9 +329,6 @@ async def send_summary_request(self, prompt):

tokens = self.usage_service.count_tokens(summary_request_text)

print("The summary request will use " + str(tokens) + " tokens.")
print(f"{self.max_tokens - tokens} is the remaining that we will use.")

async with aiohttp.ClientSession() as session:
payload = {
"model": Models.DAVINCI,
Expand All @@ -345,10 +349,10 @@ async def send_summary_request(self, prompt):
) as resp:
response = await resp.json()

await self.valid_text_request(response)

print(response["choices"][0]["text"])

tokens_used = int(response["usage"]["total_tokens"])
self.usage_service.update_usage(tokens_used)
return response

async def send_request(
Expand All @@ -369,7 +373,7 @@ async def send_request(
# Validate that all the parameters are in a good state before we send the request
if len(prompt) < self.prompt_min_length:
raise ValueError(
"Prompt must be greater than 12 characters, it is currently "
"Prompt must be greater than 8 characters, it is currently "
+ str(len(prompt))
)

Expand Down Expand Up @@ -399,8 +403,7 @@ async def send_request(
response = await resp.json()
print(response)
# Parse the total tokens used for this request and response pair from the response
tokens_used = int(response["usage"]["total_tokens"])
self.usage_service.update_usage(tokens_used)
await self.valid_text_request(response)

return response

Expand Down Expand Up @@ -451,7 +454,6 @@ async def send_image_request(self, prompt, vary=None) -> tuple[File, list[Any]]:
response = await resp.json()

print(response)
print("JUST PRINTED THE RESPONSE")

image_urls = []
for result in response["data"]:
Expand Down
1 change: 0 additions & 1 deletion models/usage_service_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@ def __init__(self, data_dir: Path):
def update_usage(self, tokens_used):
tokens_used = int(tokens_used)
price = (tokens_used / 1000) * 0.02
print("This request cost " + str(price) + " credits")
usage = self.get_usage()
print("The current usage is " + str(usage) + " credits")
with self.usage_file_path.open("w") as f:
Expand Down

0 comments on commit 6cea6da

Please sign in to comment.