diff --git a/gpt_all_star/core/llm.py b/gpt_all_star/core/llm.py index a3e251ac..0eb51659 100644 --- a/gpt_all_star/core/llm.py +++ b/gpt_all_star/core/llm.py @@ -13,6 +13,7 @@ class LLM_TYPE(str, Enum): AZURE = "AZURE" ANTHROPIC = "ANTHROPIC" ANTHROPIC_TOOLS = "ANTHROPIC_TOOLS" + APIPIE = "APIPIE" def create_llm(llm_name: LLM_TYPE) -> BaseChatModel: @@ -45,6 +46,11 @@ def create_llm(llm_name: LLM_TYPE) -> BaseChatModel: model_name=os.getenv("ANTHROPIC_API_MODEL", "claude-3-opus-20240229"), temperature=0.1, ) + elif llm_name == LLM_TYPE.APIPIE: + return _create_chat_apipie( + model_name=os.getenv("APIPIE_API_MODEL", "default-model"), + temperature=0.1, + ) else: raise ValueError(f"Unsupported LLM type: {llm_name}") @@ -76,6 +82,14 @@ def _create_azure_chat_openai( streaming=True, ) +def _create_chat_apipie(model_name: str, temperature: float) -> ChatAPIpie: + return ChatAPIpie( + model_name=model_name, + temperature=temperature, + streaming=True, + client=openai.chat.completions, # Assuming similar client setup as OpenAI + ) + def _create_chat_anthropic( anthropic_api_key: str, model_name: str, temperature: float diff --git a/gpt_all_star/core/tools/chat_apipie.py b/gpt_all_star/core/tools/chat_apipie.py new file mode 100644 index 00000000..fb519d1f --- /dev/null +++ b/gpt_all_star/core/tools/chat_apipie.py @@ -0,0 +1,29 @@ +import requests +from langchain_core.language_models.chat_models import BaseChatModel + + +class ChatAPIpie(BaseChatModel): + def __init__(self, model_name: str, temperature: float, streaming: bool, api_key: str): + self.model_name = model_name + self.temperature = temperature + self.streaming = streaming + self.api_key = api_key + self.api_url = "https://api.apipie.ai/v1/chat/completions" + + def send_request(self, prompt: str): + headers = {"Authorization": f"Bearer {self.api_key}"} + data = { + "model": self.model_name, + "prompt": prompt, + "temperature": self.temperature, + "stream": self.streaming + } + response = requests.post(self.api_url, json=data, headers=headers) + response.raise_for_status() + return self.process_response(response.json()) + + def process_response(self, response): + if "choices" in response and len(response["choices"]) > 0: + return response["choices"][0]["text"] + else: + raise ValueError("No response from APIpie API")