From 4fd918b2bd4dcfd85670f8fa71d2755444ccbb0b Mon Sep 17 00:00:00 2001 From: Maplemx Date: Wed, 9 Oct 2024 17:10:56 +0800 Subject: [PATCH] update: remove system parameter from request body add them into message list because system parameter is useless recently --- Agently/plugins/request/ERNIE.py | 20 ++++++++++++++------ Agently/plugins/request/QianFan.py | 12 +++++++----- 2 files changed, 21 insertions(+), 11 deletions(-) diff --git a/Agently/plugins/request/ERNIE.py b/Agently/plugins/request/ERNIE.py index a576a829..ea64866a 100644 --- a/Agently/plugins/request/ERNIE.py +++ b/Agently/plugins/request/ERNIE.py @@ -1,4 +1,4 @@ -from .utils import RequestABC, to_prompt_structure, to_instruction, to_json_desc +from .utils import RequestABC, to_prompt_structure, to_instruction, to_json_desc, format_request_messages from Agently.utils import RuntimeCtxNamespace import erniebot @@ -11,6 +11,12 @@ def __init__(self, request): self.request_type = "chat" self.model_name = "ERNIE" self.model_settings = RuntimeCtxNamespace(f"model.{self.model_name}", self.request.settings) + if not self.model_settings.get_trace_back("message_rules.no_multi_system_messages"): + self.model_settings.set("message_rules.no_multi_system_messages", True) + if not self.model_settings.get_trace_back("message_rules.strict_orders"): + self.model_settings.set("message_rules.strict_orders", True) + if not self.model_settings.get_trace_back("message_rules.no_multi_type_messages"): + self.model_settings.set("message_rules.no_multi_type_messages", True) def _create_client(self): if self.request_type == "chat": @@ -101,19 +107,21 @@ def generate_request_data(self): erniebot.access_token = access_token[api_type] messages = self.construct_request_messages() request_messages = [] - system_prompt = "" + #system_prompt = "" for message in messages: if message["role"] == "system": - system_prompt += f"{ message['content'] }\n" + #system_prompt += f"{ message['content'] }\n" + message["role"] = "user" + request_messages.append(message) else: request_messages.append(message) request_data = { - "messages": request_messages, + "messages": format_request_messages(request_messages, self.model_settings), "stream": True, **options, } - if system_prompt != "" and self.request.settings.get_trace_back("retry_count", 0) > 0: - request_data.update({ "system": system_prompt }) + #if system_prompt != "" and self.request.settings.get_trace_back("retry_count", 0) > 0: + # request_data.update({ "system": system_prompt }) # request type: embedding elif self.request_type == "embedding": if "model" not in options: diff --git a/Agently/plugins/request/QianFan.py b/Agently/plugins/request/QianFan.py index 65e76c6e..1900ed5e 100644 --- a/Agently/plugins/request/QianFan.py +++ b/Agently/plugins/request/QianFan.py @@ -170,19 +170,21 @@ def generate_request_data(self): options["model"] = "ERNIE-Speed-8K" messages = format_request_messages(self.construct_request_messages(), self.model_settings) request_messages = [] - system_prompt = "" + #system_prompt = "" for message in messages: if message["role"] == "system": - system_prompt += f"{ message['content'] }\n" + message["role"] = "user" + request_messages.append(message) + #system_prompt += f"{ message['content'] }\n" else: request_messages.append(message) request_data = { - "messages": request_messages, + "messages": format_request_messages(request_messages, self.model_settings), "stream": True, **options, } - if system_prompt != "" and self.request.settings.get_trace_back("retry_count", 0) > 0: - request_data.update({ "system": system_prompt }) + #if system_prompt != "" and self.request.settings.get_trace_back("retry_count", 0) > 0: + # request_data.update({ "system": system_prompt }) elif self.request_type in ["completion", "completions"]: if "model" not in options: options["model"] = "Yi-34B-Chat"