forked from Maximilian-Winter/llama-cpp-agent
-
Notifications
You must be signed in to change notification settings - Fork 0
/
pyproject.toml
40 lines (32 loc) · 1.21 KB
/
pyproject.toml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
[build-system]
requires = [ "setuptools>=42"]
build-backend = "setuptools.build_meta"
[project]
name = "llama-cpp-agent"
version = "0.2.35"
description = "A framework for building LLM based AI agents with llama.cpp."
readme = "ReadMe.md"
dependencies = [
"llama-cpp-python>=0.2.60",
"pydantic>=2.5.3",
"requests>=2.31.0",
"docstring_parser",
"aiohttp"
]
requires-python = ">=3.10"
classifiers = [ "Programming Language :: Python :: 3", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent" ]
[[project.authors]]
name = "Maximilian Winter"
email = "[email protected]"
[project.optional-dependencies]
agent_memory = ["chromadb", "SQLAlchemy", "numpy", "scipy"]
rag = ["ragatouille"]
vllm_provider = ["openai", "transformers", "sentencepiece", "protobuf"]
groq_provider = ["groq"]
mixtral_agent = ["mistral-common"]
web_search_summarization = ["duckduckgo_search", "trafilatura", "lxml-html-clean", "lxml", "googlesearch-python" , "beautifulsoup4", "readability-lxml"]
[project.urls]
Homepage = "https://github.com/Maximilian-Winter/llama-cpp-agent"
"Bug Tracker" = "https://github.com/Maximilian-Winter/llama-cpp-agent/issues"
[tool.setuptools.packages.find]
where = ["src"]