Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Sweep: Feature req: Please integrate apipie.ai #237

Closed
wants to merge 9 commits into from
14 changes: 14 additions & 0 deletions gpt_all_star/core/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ class LLM_TYPE(str, Enum):
AZURE = "AZURE"
ANTHROPIC = "ANTHROPIC"
ANTHROPIC_TOOLS = "ANTHROPIC_TOOLS"
APIPIE = "APIPIE"


def create_llm(llm_name: LLM_TYPE) -> BaseChatModel:
Expand Down Expand Up @@ -45,6 +46,11 @@ def create_llm(llm_name: LLM_TYPE) -> BaseChatModel:
model_name=os.getenv("ANTHROPIC_API_MODEL", "claude-3-opus-20240229"),
temperature=0.1,
)
elif llm_name == LLM_TYPE.APIPIE:
return _create_chat_apipie(
model_name=os.getenv("APIPIE_API_MODEL", "default-model"),
temperature=0.1,
)
else:
raise ValueError(f"Unsupported LLM type: {llm_name}")

Expand Down Expand Up @@ -76,6 +82,14 @@ def _create_azure_chat_openai(
streaming=True,
)

def _create_chat_apipie(model_name: str, temperature: float) -> ChatAPIpie:
return ChatAPIpie(
model_name=model_name,
temperature=temperature,
streaming=True,
client=openai.chat.completions, # Assuming similar client setup as OpenAI
)


def _create_chat_anthropic(
anthropic_api_key: str, model_name: str, temperature: float
Expand Down
29 changes: 29 additions & 0 deletions gpt_all_star/core/tools/chat_apipie.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
import requests
from langchain_core.language_models.chat_models import BaseChatModel


class ChatAPIpie(BaseChatModel):
def __init__(self, model_name: str, temperature: float, streaming: bool, api_key: str):
self.model_name = model_name
self.temperature = temperature
self.streaming = streaming
self.api_key = api_key
self.api_url = "https://api.apipie.ai/v1/chat/completions"

def send_request(self, prompt: str):
headers = {"Authorization": f"Bearer {self.api_key}"}
data = {
"model": self.model_name,
"prompt": prompt,
"temperature": self.temperature,
"stream": self.streaming
}
response = requests.post(self.api_url, json=data, headers=headers)
response.raise_for_status()
return self.process_response(response.json())

def process_response(self, response):
if "choices" in response and len(response["choices"]) > 0:
return response["choices"][0]["text"]
else:
raise ValueError("No response from APIpie API")
Loading