Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

postpone the import of external different providers #1579

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 23 additions & 0 deletions metagpt/configs/llm_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ class LLMType(Enum):
OPEN_LLM = "open_llm"
GEMINI = "gemini"
METAGPT = "metagpt"
HUMAN = "human"
AZURE = "azure"
OLLAMA = "ollama" # /chat at ollama api
OLLAMA_GENERATE = "ollama.generate" # /generate at ollama api
Expand All @@ -42,6 +43,28 @@ class LLMType(Enum):
def __missing__(self, key):
return self.OPENAI

LLMModuleMap = {
LLMType.OPENAI: "metagpt.provider.openai_api",
LLMType.ANTHROPIC: "metagpt.provider.anthropic_api",
LLMType.CLAUDE: "metagpt.provider.anthropic_api", # Same module as Anthropic
LLMType.SPARK: "metagpt.provider.spark_api",
LLMType.ZHIPUAI: "metagpt.provider.zhipuai_api",
LLMType.FIREWORKS: "metagpt.provider.fireworks_api",
LLMType.OPEN_LLM: "metagpt.provider.open_llm_api",
LLMType.GEMINI: "metagpt.provider.google_gemini_api",
LLMType.METAGPT: "metagpt.provider.metagpt_api",
LLMType.HUMAN: "metagpt.provider.human_provider",
LLMType.AZURE: "metagpt.provider.azure_openai_api",
LLMType.OLLAMA: "metagpt.provider.ollama_api",
LLMType.QIANFAN: "metagpt.provider.qianfan_api", # Baidu BCE
LLMType.DASHSCOPE: "metagpt.provider.dashscope_api", # Aliyun LingJi DashScope
LLMType.MOONSHOT: "metagpt.provider.moonshot_api",
LLMType.MISTRAL: "metagpt.provider.mistral_api",
LLMType.YI: "metagpt.provider.yi_api", # lingyiwanwu
LLMType.OPENROUTER: "metagpt.provider.openrouter_api",
LLMType.BEDROCK: "metagpt.provider.bedrock_api",
LLMType.ARK: "metagpt.provider.ark_api",
}

class LLMConfig(YamlModel):
"""Config for LLM
Expand Down
83 changes: 56 additions & 27 deletions metagpt/provider/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,33 +5,62 @@
@Author : alexanderwu
@File : __init__.py
"""
import importlib
from metagpt.configs.llm_config import LLMType, LLMModuleMap

from metagpt.provider.google_gemini_api import GeminiLLM
from metagpt.provider.ollama_api import OllamaLLM
from metagpt.provider.openai_api import OpenAILLM
from metagpt.provider.zhipuai_api import ZhiPuAILLM
from metagpt.provider.azure_openai_api import AzureOpenAILLM
from metagpt.provider.metagpt_api import MetaGPTLLM
from metagpt.provider.human_provider import HumanProvider
from metagpt.provider.spark_api import SparkLLM
from metagpt.provider.qianfan_api import QianFanLLM
from metagpt.provider.dashscope_api import DashScopeLLM
from metagpt.provider.anthropic_api import AnthropicLLM
from metagpt.provider.bedrock_api import BedrockLLM
from metagpt.provider.ark_api import ArkLLM
class LLMFactory:
def __init__(self, module_name, instance_name):
self.module_name = module_name
self.instance_name = instance_name
self._module = None

__all__ = [
"GeminiLLM",
"OpenAILLM",
"ZhiPuAILLM",
"AzureOpenAILLM",
"MetaGPTLLM",
"OllamaLLM",
"HumanProvider",
"SparkLLM",
"QianFanLLM",
"DashScopeLLM",
"AnthropicLLM",
"BedrockLLM",
"ArkLLM",
def __getattr__(self, name):
if self._module is None:
self._module = importlib.import_module(self.module_name)
return getattr(self._module, name)

def __instancecheck__(self, instance):
if self._module is None:
self._module = importlib.import_module(self.module_name)
return isinstance(instance, getattr(self._module, self.instance_name))

def __call__(self, config):
# Import the module when it鈥檚 called for the first time
if self._module is None:
self._module = importlib.import_module(self.module_name)

# Create an instance of the specified class from the module with the given config
return getattr(self._module, self.instance_name)(config)

def create_llm_symbol(llm_configurations):
factories = {name: LLMFactory(LLMModuleMap[llm_type], name) for llm_type, name in llm_configurations}
# Add the factory created llm objects to the global namespace
globals().update(factories)
return factories.keys()

# List of LLM configurations
llm_configurations = [
(LLMType.GEMINI, "GeminiLLM"),
(LLMType.OLLAMA, "OllamaLLM"),
(LLMType.OPENAI, "OpenAILLM"),
(LLMType.ZHIPUAI, "ZhiPuAILLM"),
(LLMType.AZURE, "AzureOpenAILLM"),
(LLMType.METAGPT, "MetaGPTLLM"),
(LLMType.HUMAN, "HumanProvider"),
(LLMType.SPARK, "SparkLLM"),
(LLMType.QIANFAN, "QianFanLLM"),
(LLMType.DASHSCOPE, "DashScopeLLM"),
(LLMType.ANTHROPIC, "AnthropicLLM"),
(LLMType.BEDROCK, "BedrockLLM"),
(LLMType.ARK, "ArkLLM"),
(LLMType.FIREWORKS, "FireworksLLM"),
(LLMType.OPEN_LLM, "OpenLLM"),
(LLMType.MOONSHOT, "MoonshotLLM"),
(LLMType.MISTRAL, "MistralLLM"),
(LLMType.YI, "YiLLM"),
(LLMType.OPENROUTER, "OpenRouterLLM"),
(LLMType.CLAUDE, "ClaudeLLM"),
]

# Create all LLMFactory instances and get created symbols
__all__ = create_llm_symbol(llm_configurations)
8 changes: 6 additions & 2 deletions metagpt/provider/llm_provider_registry.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@
@Author : alexanderwu
@File : llm_provider_registry.py
"""
from metagpt.configs.llm_config import LLMConfig, LLMType
from metagpt.configs.llm_config import LLMConfig, LLMType, LLMModuleMap
from metagpt.provider.base_llm import BaseLLM

import importlib

class LLMProviderRegistry:
def __init__(self):
Expand All @@ -18,6 +18,10 @@ def register(self, key, provider_cls):

def get_provider(self, enum: LLMType):
"""get provider instance according to the enum"""
if enum not in self.providers:
# Import and register the provider if not already registered
module_name = LLMModuleMap[enum]
importlib.import_module(module_name)
return self.providers[enum]


Expand Down
Loading