Skip to content

Commit

Permalink
feat:
Browse files Browse the repository at this point in the history
- cross config for webui - merging based on selected services
- env manager support for arrays
- default model for vllm is phi-3 - has system prompt
- OpenAI URL/Key management + Open WebUI integration

chore:
- more ergonomic naming for default API keys: "service.key"
  • Loading branch information
av committed Aug 2, 2024
1 parent 50098f0 commit b22b661
Show file tree
Hide file tree
Showing 32 changed files with 729 additions and 210 deletions.
50 changes: 33 additions & 17 deletions .env
Original file line number Diff line number Diff line change
Expand Up @@ -30,28 +30,44 @@ HARBOR_OLLAMA_CACHE="~/.ollama"
# These could be used by specific services,
# in which case they can be set in a centralised
# location like this.
HARBOR_ANYSCALE_API_KEY=""
HARBOR_APIPIE_API_KEY=""
HARBOR_COHERE_API_KEY=""
HARBOR_ANYSCALE_KEY=""
HARBOR_APIPIE_KEY=""
HARBOR_COHERE_KEY=""
HARBOR_FIREWORKS_API_KEY=""
HARBOR_GROQ_API_KEY=""
HARBOR_HUGGINGFACE_TOKEN=""
HARBOR_MISTRAL_API_KEY=""
HARBOR_GROQ_KEY=""
HARBOR_MISTRAL_KEY=""
HARBOR_OPENROUTER_KEY=""
HARBOR_PERPLEXITY_API_KEY=""
HARBOR_SHUTTLEAI_API_KEY=""
HARBOR_TOGETHERAI_API_KEY=""
HARBOR_ANTHROPIC_API_KEY=""
HARBOR_PERPLEXITY_KEY=""
HARBOR_SHUTTLEAI_KEY=""
HARBOR_TOGETHERAI_KEY=""
HARBOR_ANTHROPIC_KEY=""
HARBOR_BINGAI_TOKEN=""
HARBOR_GOOGLE_KEY=""
HARBOR_OPENAI_API_KEY=""
HARBOR_ASSISTANTS_API_KEY=""
HARBOR_ASSISTANTS_KEY=""

HARBOR_UI_MAIN="webui"
HARBOR_SERVICES_DEFAULT="ollama webui"
HARBOR_UI_AUTOOPEN=false
HARBOR_SERVICES_DEFAULT="ollama;webui"

# OpenAI
# ---------------------
# In the Context of Harbor, it means OpenAI API-compatible
# services, such as Ollama, Llama.cpp, LiteLLM, etc.

HARBOR_OPENAI_URLS=""
HARBOR_OPENAI_KEYS=""

# This variable is derived as a first item in the list above
HARBOR_OPENAI_KEY=""
HARBOR_OPENAI_URL=""

# webui
HARBOR_WEBUI_HOST_PORT=33801
# Persistent secret - user stays logged into
# webui between restarts
HARBOR_WEBUI_SECRET="h@rb0r"
HARBOR_WEBUI_NAME="Harbor"
HARBOR_WEBUI_LOG_LEVEL="INFO"

# llamacpp
HARBOR_LLAMACPP_HOST_PORT=33831
Expand Down Expand Up @@ -112,10 +128,10 @@ HARBOR_BIONICGPT_HOST_PORT=33901

# vLLM
HARBOR_VLLM_HOST_PORT=33911
HARBOR_VLLM_MODEL="google/gemma-2-2b-it"
HARBOR_VLLM_MODEL="microsoft/Phi-3-mini-4k-instruct"
HARBOR_VLLM_EXTRA_ARGS=""
HARBOR_VLLM_ATTENTION_BACKEND="FLASHINFER"
HARBOR_VLLM_MODEL_SPECIFIER="--model google/gemma-2-2b-it"
HARBOR_VLLM_ATTENTION_BACKEND="FLASH_ATTN"
HARBOR_VLLM_MODEL_SPECIFIER="--model microsoft/Phi-3-mini-4k-instruct"

# Aphrodite
HARBOR_APHRODITE_HOST_PORT=33921
Expand All @@ -130,4 +146,4 @@ HARBOR_APHRODITE_MODEL="neuralmagic/Mistral-7B-Instruct-v0.3-GPTQ-4bit"
# Open WebUI
# See https://docs.openwebui.com/getting-started/env-configuration/ for reference.
# --------------------------------------------
WEBUI_NAME=Harbor
# WEBUI_NAME=WUI
10 changes: 8 additions & 2 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
# Open WebUI
open-webui/
!open-webui/config.json
open-webui/*
open-webui/config.json

!open-webui/configs/
!open-webui/start_webui.sh

# Ollama
/ollama/ollama/
Expand All @@ -20,3 +23,6 @@ librechat/meili_data_v1.7/
librechat/logs/
librechat/vectordb/
librechat/images/

# Local scripts
scripts/
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,8 @@ harbor eject searxng llamacpp > docker-compose.harbor.yml
Read about supported services and the ways to configure them.
- [Harbor Compose Setup](https://github.com/av/harbor/wiki/Harbor-Compose-Setup)<br/>
Read about the way Harbor uses Docker Compose to manage services.
- [Compatibility](https://github.com/av/harbor/wiki/Compatibility)<br/>
Known compatibility issues between the services and models as well as possible workarounds.

## Why?

Expand Down
24 changes: 12 additions & 12 deletions compose.librechat.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,22 +17,22 @@ services:
- MEILI_HOST=http://lcsearch:7700
- RAG_PORT=${HARBOR_LIBRECHAT_RAG_HOST_PORT}
- RAG_API_URL=http://lcrag:${HARBOR_LIBRECHAT_RAG_HOST_PORT}
- ANYSCALE_API_KEY=${HARBOR_ANYSCALE_API_KEY:-user_provided}
- APIPIE_API_KEY=${HARBOR_APIPIE_API_KEY:-user_provided}
- COHERE_API_KEY=${HARBOR_COHERE_API_KEY:-user_provided}
- ANYSCALE_API_KEY=${HARBOR_ANYSCALE_KEY:-user_provided}
- APIPIE_API_KEY=${HARBOR_APIPIE_KEY:-user_provided}
- COHERE_API_KEY=${HARBOR_COHERE_KEY:-user_provided}
- FIREWORKS_API_KEY=${HARBOR_FIREWORKS_API_KEY:-user_provided}
- GROQ_API_KEY=${HARBOR_GROQ_API_KEY:-user_provided}
- HUGGINGFACE_TOKEN=${HARBOR_HUGGINGFACE_TOKEN:-user_provided}
- MISTRAL_API_KEY=${HARBOR_MISTRAL_API_KEY:-user_provided}
- GROQ_API_KEY=${HARBOR_GROQ_KEY:-user_provided}
- HUGGINGFACE_TOKEN=${HARBOR_HF_HUB_TOKEN:-user_provided}
- MISTRAL_API_KEY=${HARBOR_MISTRAL_KEY:-user_provided}
- OPENROUTER_KEY=${HARBOR_OPENROUTER_KEY:-user_provided}
- PERPLEXITY_API_KEY=${HARBOR_PERPLEXITY_API_KEY:-user_provided}
- SHUTTLEAI_API_KEY=${HARBOR_SHUTTLEAI_API_KEY:-user_provided}
- TOGETHERAI_API_KEY=${HARBOR_TOGETHERAI_API_KEY:-user_provided}
- ANTHROPIC_API_KEY=${HARBOR_ANTHROPIC_API_KEY:-user_provided}
- PERPLEXITY_API_KEY=${HARBOR_PERPLEXITY_KEY:-user_provided}
- SHUTTLEAI_API_KEY=${HARBOR_SHUTTLEAI_KEY:-user_provided}
- TOGETHERAI_API_KEY=${HARBOR_TOGETHERAI_KEY:-user_provided}
- ANTHROPIC_API_KEY=${HARBOR_ANTHROPIC_KEY:-user_provided}
- BINGAI_TOKEN=${HARBOR_BINGAI_TOKEN:-user_provided}
- GOOGLE_KEY=${HARBOR_GOOGLE_KEY:-user_provided}
- OPENAI_API_KEY=${HARBOR_OPENAI_API_KEY:-user_provided}
- ASSISTANTS_API_KEY=${HARBOR_ASSISTANTS_API_KEY:-user_provided}
- OPENAI_API_KEY=${HARBOR_OPENAI_KEY:-user_provided}
- ASSISTANTS_API_KEY=${HARBOR_ASSISTANTS_KEY:-user_provided}
volumes:
- type: bind
source: ./librechat/.env
Expand Down
2 changes: 1 addition & 1 deletion compose.litellm.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ services:
# LiteLLM is combined with other services
- ./litellm/litellm.config.yaml:/app/litellm/config.yaml
- ./litellm/start_litellm.sh:/app/litellm/start_litellm.sh
- ./shared/config_merger.py:/app/config_merger.py
- ./shared/yaml_config_merger.py:/app/yaml_config_merger.py
# Note that this config is "assembled" from
# the parts that implement relevant service
# compatibility, such as vllm or tgi or langfuse.
Expand Down
10 changes: 9 additions & 1 deletion compose.webui.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,15 @@ services:
container_name: webui
volumes:
- ./open-webui:/app/backend/data
- ./open-webui/start_webui.sh:/app/start_webui.sh
- ./shared/json_config_merger.py:/app/json_config_merger.py
- ./open-webui/configs/config.override.json:/app/configs/config.override.json
entrypoint: ["/app/start_webui.sh"]
ports:
- ${HARBOR_WEBUI_HOST_PORT}:8080
networks:
- harbor-network
- harbor-network
environment:
- WEBUI_NAME=${HARBOR_WEBUI_NAME}
- WEBUI_SECRET_KEY=${HARBOR_WEBUI_SECRET}
- GLOBAL_LOG_LEVEL=DEBUG
4 changes: 4 additions & 0 deletions compose.x.webui.aphrodite.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
services:
webui:
volumes:
- ./open-webui/configs/config.aphrodite.json:/app/configs/config.aphrodite.json
4 changes: 4 additions & 0 deletions compose.x.webui.comfyui.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
services:
webui:
volumes:
- ./open-webui/configs/config.comfyui.json:/app/configs/config.comfyui.json
4 changes: 4 additions & 0 deletions compose.x.webui.litellm.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
services:
webui:
volumes:
- ./open-webui/configs/config.litellm.json:/app/configs/config.litellm.json
4 changes: 4 additions & 0 deletions compose.x.webui.llamacpp.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
services:
webui:
volumes:
- ./open-webui/configs/config.llamacpp.json:/app/configs/config.llamacpp.json
4 changes: 4 additions & 0 deletions compose.x.webui.ollama.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
services:
webui:
volumes:
- ./open-webui/configs/config.ollama.json:/app/configs/config.ollama.json
4 changes: 4 additions & 0 deletions compose.x.webui.searxng.ollama.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
services:
webui:
volumes:
- ./open-webui/configs/config.x.searxng.ollama.json:/app/configs/config.x.searxng.ollama.json
4 changes: 4 additions & 0 deletions compose.x.webui.searxng.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
services:
webui:
volumes:
- ./open-webui/configs/config.searxng.json:/app/configs/config.searxng.json
4 changes: 4 additions & 0 deletions compose.x.webui.tts.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
services:
webui:
volumes:
- ./open-webui/configs/config.tts.json:/app/configs/config.tts.json
Loading

0 comments on commit b22b661

Please sign in to comment.