Skip to content

Commit

Permalink
update: remove system parameter from request body add them into messa…
Browse files Browse the repository at this point in the history
…ge list because system parameter is useless recently
  • Loading branch information
Maplemx committed Oct 9, 2024
1 parent 91e87b8 commit 4fd918b
Show file tree
Hide file tree
Showing 2 changed files with 21 additions and 11 deletions.
20 changes: 14 additions & 6 deletions Agently/plugins/request/ERNIE.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from .utils import RequestABC, to_prompt_structure, to_instruction, to_json_desc
from .utils import RequestABC, to_prompt_structure, to_instruction, to_json_desc, format_request_messages
from Agently.utils import RuntimeCtxNamespace
import erniebot

Expand All @@ -11,6 +11,12 @@ def __init__(self, request):
self.request_type = "chat"
self.model_name = "ERNIE"
self.model_settings = RuntimeCtxNamespace(f"model.{self.model_name}", self.request.settings)
if not self.model_settings.get_trace_back("message_rules.no_multi_system_messages"):
self.model_settings.set("message_rules.no_multi_system_messages", True)
if not self.model_settings.get_trace_back("message_rules.strict_orders"):
self.model_settings.set("message_rules.strict_orders", True)
if not self.model_settings.get_trace_back("message_rules.no_multi_type_messages"):
self.model_settings.set("message_rules.no_multi_type_messages", True)

def _create_client(self):
if self.request_type == "chat":
Expand Down Expand Up @@ -101,19 +107,21 @@ def generate_request_data(self):
erniebot.access_token = access_token[api_type]
messages = self.construct_request_messages()
request_messages = []
system_prompt = ""
#system_prompt = ""
for message in messages:
if message["role"] == "system":
system_prompt += f"{ message['content'] }\n"
#system_prompt += f"{ message['content'] }\n"
message["role"] = "user"
request_messages.append(message)
else:
request_messages.append(message)
request_data = {
"messages": request_messages,
"messages": format_request_messages(request_messages, self.model_settings),
"stream": True,
**options,
}
if system_prompt != "" and self.request.settings.get_trace_back("retry_count", 0) > 0:
request_data.update({ "system": system_prompt })
#if system_prompt != "" and self.request.settings.get_trace_back("retry_count", 0) > 0:
# request_data.update({ "system": system_prompt })
# request type: embedding
elif self.request_type == "embedding":
if "model" not in options:
Expand Down
12 changes: 7 additions & 5 deletions Agently/plugins/request/QianFan.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,19 +170,21 @@ def generate_request_data(self):
options["model"] = "ERNIE-Speed-8K"
messages = format_request_messages(self.construct_request_messages(), self.model_settings)
request_messages = []
system_prompt = ""
#system_prompt = ""
for message in messages:
if message["role"] == "system":
system_prompt += f"{ message['content'] }\n"
message["role"] = "user"
request_messages.append(message)
#system_prompt += f"{ message['content'] }\n"
else:
request_messages.append(message)
request_data = {
"messages": request_messages,
"messages": format_request_messages(request_messages, self.model_settings),
"stream": True,
**options,
}
if system_prompt != "" and self.request.settings.get_trace_back("retry_count", 0) > 0:
request_data.update({ "system": system_prompt })
#if system_prompt != "" and self.request.settings.get_trace_back("retry_count", 0) > 0:
# request_data.update({ "system": system_prompt })
elif self.request_type in ["completion", "completions"]:
if "model" not in options:
options["model"] = "Yi-34B-Chat"
Expand Down

0 comments on commit 4fd918b

Please sign in to comment.