diff --git a/.env b/.env index 1f9c57d..5b6af69 100644 --- a/.env +++ b/.env @@ -30,28 +30,44 @@ HARBOR_OLLAMA_CACHE="~/.ollama" # These could be used by specific services, # in which case they can be set in a centralised # location like this. -HARBOR_ANYSCALE_API_KEY="" -HARBOR_APIPIE_API_KEY="" -HARBOR_COHERE_API_KEY="" +HARBOR_ANYSCALE_KEY="" +HARBOR_APIPIE_KEY="" +HARBOR_COHERE_KEY="" HARBOR_FIREWORKS_API_KEY="" -HARBOR_GROQ_API_KEY="" -HARBOR_HUGGINGFACE_TOKEN="" -HARBOR_MISTRAL_API_KEY="" +HARBOR_GROQ_KEY="" +HARBOR_MISTRAL_KEY="" HARBOR_OPENROUTER_KEY="" -HARBOR_PERPLEXITY_API_KEY="" -HARBOR_SHUTTLEAI_API_KEY="" -HARBOR_TOGETHERAI_API_KEY="" -HARBOR_ANTHROPIC_API_KEY="" +HARBOR_PERPLEXITY_KEY="" +HARBOR_SHUTTLEAI_KEY="" +HARBOR_TOGETHERAI_KEY="" +HARBOR_ANTHROPIC_KEY="" HARBOR_BINGAI_TOKEN="" HARBOR_GOOGLE_KEY="" -HARBOR_OPENAI_API_KEY="" -HARBOR_ASSISTANTS_API_KEY="" +HARBOR_ASSISTANTS_KEY="" HARBOR_UI_MAIN="webui" -HARBOR_SERVICES_DEFAULT="ollama webui" +HARBOR_UI_AUTOOPEN=false +HARBOR_SERVICES_DEFAULT="ollama;webui" + +# OpenAI +# --------------------- +# In the Context of Harbor, it means OpenAI API-compatible +# services, such as Ollama, Llama.cpp, LiteLLM, etc. + +HARBOR_OPENAI_URLS="" +HARBOR_OPENAI_KEYS="" + +# This variable is derived as a first item in the list above +HARBOR_OPENAI_KEY="" +HARBOR_OPENAI_URL="" # webui HARBOR_WEBUI_HOST_PORT=33801 +# Persistent secret - user stays logged into +# webui between restarts +HARBOR_WEBUI_SECRET="h@rb0r" +HARBOR_WEBUI_NAME="Harbor" +HARBOR_WEBUI_LOG_LEVEL="INFO" # llamacpp HARBOR_LLAMACPP_HOST_PORT=33831 @@ -112,10 +128,10 @@ HARBOR_BIONICGPT_HOST_PORT=33901 # vLLM HARBOR_VLLM_HOST_PORT=33911 -HARBOR_VLLM_MODEL="google/gemma-2-2b-it" +HARBOR_VLLM_MODEL="microsoft/Phi-3-mini-4k-instruct" HARBOR_VLLM_EXTRA_ARGS="" -HARBOR_VLLM_ATTENTION_BACKEND="FLASHINFER" -HARBOR_VLLM_MODEL_SPECIFIER="--model google/gemma-2-2b-it" +HARBOR_VLLM_ATTENTION_BACKEND="FLASH_ATTN" +HARBOR_VLLM_MODEL_SPECIFIER="--model microsoft/Phi-3-mini-4k-instruct" # Aphrodite HARBOR_APHRODITE_HOST_PORT=33921 @@ -130,4 +146,4 @@ HARBOR_APHRODITE_MODEL="neuralmagic/Mistral-7B-Instruct-v0.3-GPTQ-4bit" # Open WebUI # See https://docs.openwebui.com/getting-started/env-configuration/ for reference. # -------------------------------------------- -WEBUI_NAME=Harbor +# WEBUI_NAME=WUI \ No newline at end of file diff --git a/.gitignore b/.gitignore index 11a3264..525e0d0 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,9 @@ # Open WebUI -open-webui/ -!open-webui/config.json +open-webui/* +open-webui/config.json + +!open-webui/configs/ +!open-webui/start_webui.sh # Ollama /ollama/ollama/ @@ -20,3 +23,6 @@ librechat/meili_data_v1.7/ librechat/logs/ librechat/vectordb/ librechat/images/ + +# Local scripts +scripts/ \ No newline at end of file diff --git a/README.md b/README.md index c8ff8cf..6b9e2df 100644 --- a/README.md +++ b/README.md @@ -82,6 +82,8 @@ harbor eject searxng llamacpp > docker-compose.harbor.yml Read about supported services and the ways to configure them. - [Harbor Compose Setup](https://github.com/av/harbor/wiki/Harbor-Compose-Setup)
Read about the way Harbor uses Docker Compose to manage services. +- [Compatibility](https://github.com/av/harbor/wiki/Compatibility)
+ Known compatibility issues between the services and models as well as possible workarounds. ## Why? diff --git a/compose.librechat.yml b/compose.librechat.yml index eeed1e7..2c4c8e8 100644 --- a/compose.librechat.yml +++ b/compose.librechat.yml @@ -17,22 +17,22 @@ services: - MEILI_HOST=http://lcsearch:7700 - RAG_PORT=${HARBOR_LIBRECHAT_RAG_HOST_PORT} - RAG_API_URL=http://lcrag:${HARBOR_LIBRECHAT_RAG_HOST_PORT} - - ANYSCALE_API_KEY=${HARBOR_ANYSCALE_API_KEY:-user_provided} - - APIPIE_API_KEY=${HARBOR_APIPIE_API_KEY:-user_provided} - - COHERE_API_KEY=${HARBOR_COHERE_API_KEY:-user_provided} + - ANYSCALE_API_KEY=${HARBOR_ANYSCALE_KEY:-user_provided} + - APIPIE_API_KEY=${HARBOR_APIPIE_KEY:-user_provided} + - COHERE_API_KEY=${HARBOR_COHERE_KEY:-user_provided} - FIREWORKS_API_KEY=${HARBOR_FIREWORKS_API_KEY:-user_provided} - - GROQ_API_KEY=${HARBOR_GROQ_API_KEY:-user_provided} - - HUGGINGFACE_TOKEN=${HARBOR_HUGGINGFACE_TOKEN:-user_provided} - - MISTRAL_API_KEY=${HARBOR_MISTRAL_API_KEY:-user_provided} + - GROQ_API_KEY=${HARBOR_GROQ_KEY:-user_provided} + - HUGGINGFACE_TOKEN=${HARBOR_HF_HUB_TOKEN:-user_provided} + - MISTRAL_API_KEY=${HARBOR_MISTRAL_KEY:-user_provided} - OPENROUTER_KEY=${HARBOR_OPENROUTER_KEY:-user_provided} - - PERPLEXITY_API_KEY=${HARBOR_PERPLEXITY_API_KEY:-user_provided} - - SHUTTLEAI_API_KEY=${HARBOR_SHUTTLEAI_API_KEY:-user_provided} - - TOGETHERAI_API_KEY=${HARBOR_TOGETHERAI_API_KEY:-user_provided} - - ANTHROPIC_API_KEY=${HARBOR_ANTHROPIC_API_KEY:-user_provided} + - PERPLEXITY_API_KEY=${HARBOR_PERPLEXITY_KEY:-user_provided} + - SHUTTLEAI_API_KEY=${HARBOR_SHUTTLEAI_KEY:-user_provided} + - TOGETHERAI_API_KEY=${HARBOR_TOGETHERAI_KEY:-user_provided} + - ANTHROPIC_API_KEY=${HARBOR_ANTHROPIC_KEY:-user_provided} - BINGAI_TOKEN=${HARBOR_BINGAI_TOKEN:-user_provided} - GOOGLE_KEY=${HARBOR_GOOGLE_KEY:-user_provided} - - OPENAI_API_KEY=${HARBOR_OPENAI_API_KEY:-user_provided} - - ASSISTANTS_API_KEY=${HARBOR_ASSISTANTS_API_KEY:-user_provided} + - OPENAI_API_KEY=${HARBOR_OPENAI_KEY:-user_provided} + - ASSISTANTS_API_KEY=${HARBOR_ASSISTANTS_KEY:-user_provided} volumes: - type: bind source: ./librechat/.env diff --git a/compose.litellm.yml b/compose.litellm.yml index d371054..afd4b72 100644 --- a/compose.litellm.yml +++ b/compose.litellm.yml @@ -8,7 +8,7 @@ services: # LiteLLM is combined with other services - ./litellm/litellm.config.yaml:/app/litellm/config.yaml - ./litellm/start_litellm.sh:/app/litellm/start_litellm.sh - - ./shared/config_merger.py:/app/config_merger.py + - ./shared/yaml_config_merger.py:/app/yaml_config_merger.py # Note that this config is "assembled" from # the parts that implement relevant service # compatibility, such as vllm or tgi or langfuse. diff --git a/compose.webui.yml b/compose.webui.yml index 0cc47e3..68624c8 100644 --- a/compose.webui.yml +++ b/compose.webui.yml @@ -6,7 +6,15 @@ services: container_name: webui volumes: - ./open-webui:/app/backend/data + - ./open-webui/start_webui.sh:/app/start_webui.sh + - ./shared/json_config_merger.py:/app/json_config_merger.py + - ./open-webui/configs/config.override.json:/app/configs/config.override.json + entrypoint: ["/app/start_webui.sh"] ports: - ${HARBOR_WEBUI_HOST_PORT}:8080 networks: - - harbor-network \ No newline at end of file + - harbor-network + environment: + - WEBUI_NAME=${HARBOR_WEBUI_NAME} + - WEBUI_SECRET_KEY=${HARBOR_WEBUI_SECRET} + - GLOBAL_LOG_LEVEL=DEBUG \ No newline at end of file diff --git a/compose.x.webui.aphrodite.yml b/compose.x.webui.aphrodite.yml new file mode 100644 index 0000000..5a445c7 --- /dev/null +++ b/compose.x.webui.aphrodite.yml @@ -0,0 +1,4 @@ +services: + webui: + volumes: + - ./open-webui/configs/config.aphrodite.json:/app/configs/config.aphrodite.json \ No newline at end of file diff --git a/compose.x.webui.comfyui.yml b/compose.x.webui.comfyui.yml new file mode 100644 index 0000000..e735251 --- /dev/null +++ b/compose.x.webui.comfyui.yml @@ -0,0 +1,4 @@ +services: + webui: + volumes: + - ./open-webui/configs/config.comfyui.json:/app/configs/config.comfyui.json \ No newline at end of file diff --git a/compose.x.webui.litellm.yml b/compose.x.webui.litellm.yml new file mode 100644 index 0000000..6706e1b --- /dev/null +++ b/compose.x.webui.litellm.yml @@ -0,0 +1,4 @@ +services: + webui: + volumes: + - ./open-webui/configs/config.litellm.json:/app/configs/config.litellm.json \ No newline at end of file diff --git a/compose.x.webui.llamacpp.yml b/compose.x.webui.llamacpp.yml new file mode 100644 index 0000000..577ac9e --- /dev/null +++ b/compose.x.webui.llamacpp.yml @@ -0,0 +1,4 @@ +services: + webui: + volumes: + - ./open-webui/configs/config.llamacpp.json:/app/configs/config.llamacpp.json \ No newline at end of file diff --git a/compose.x.webui.ollama.yml b/compose.x.webui.ollama.yml new file mode 100644 index 0000000..f9a96df --- /dev/null +++ b/compose.x.webui.ollama.yml @@ -0,0 +1,4 @@ +services: + webui: + volumes: + - ./open-webui/configs/config.ollama.json:/app/configs/config.ollama.json \ No newline at end of file diff --git a/compose.x.webui.searxng.ollama.yml b/compose.x.webui.searxng.ollama.yml new file mode 100644 index 0000000..ebdc8b5 --- /dev/null +++ b/compose.x.webui.searxng.ollama.yml @@ -0,0 +1,4 @@ +services: + webui: + volumes: + - ./open-webui/configs/config.x.searxng.ollama.json:/app/configs/config.x.searxng.ollama.json \ No newline at end of file diff --git a/compose.x.webui.searxng.yml b/compose.x.webui.searxng.yml new file mode 100644 index 0000000..bb34de3 --- /dev/null +++ b/compose.x.webui.searxng.yml @@ -0,0 +1,4 @@ +services: + webui: + volumes: + - ./open-webui/configs/config.searxng.json:/app/configs/config.searxng.json \ No newline at end of file diff --git a/compose.x.webui.tts.yml b/compose.x.webui.tts.yml new file mode 100644 index 0000000..00a88e4 --- /dev/null +++ b/compose.x.webui.tts.yml @@ -0,0 +1,4 @@ +services: + webui: + volumes: + - ./open-webui/configs/config.tts.json:/app/configs/config.tts.json \ No newline at end of file diff --git a/harbor.sh b/harbor.sh index 437092e..7fa320a 100755 --- a/harbor.sh +++ b/harbor.sh @@ -4,6 +4,58 @@ # == Functions # ======================================================================== +show_version() { + echo "Harbor CLI version: $version" +} + +show_help() { + show_version + echo "Usage: $0 [options]" + echo + echo "Compose Setup Commands:" + echo " up - Start the containers" + echo " down - Stop and remove the containers" + echo " ps - List the running containers" + echo " logs - View the logs of the containers" + echo " exec - Execute a command in a running service" + echo " pull - Pull the latest images" + echo " dive - Run the Dive CLI to inspect Docker images" + echo + echo "Setup Management Commands:" + echo " ollama - Run the Harbor's Ollama CLI. Ollama service should be running" + echo " smi - Show NVIDIA GPU information" + echo " top - Run nvtop to monitor GPU usage" + echo " llamacpp - Configure llamacpp service" + echo " tgi - Configure text-generation-inference service" + echo " litellm - Configure LiteLLM service" + echo " openai - Configure OpenAI API keys and URLs" + echo " vllm - Configure VLLM service" + echo " aphrodite - Configure Aphrodite service" + echo + echo "Huggingface CLI:" + echo " hf - Run the Harbor's Huggingface CLI. Expanded with a few additional commands." + echo " hf parse-url - Parse file URL from Hugging Face" + echo " hf token - Get/set the Hugging Face Hub token" + echo + echo "Harbor CLI Commands:" + echo " open - Open a service in the default browser" + echo " url - Get the URL for a service" + echo " config - Manage the Harbor environment configuration" + echo " ln - Create a symbolic link to the CLI" + echo " eject - Eject the Compose configuration, accepts same options as 'up'" + echo " defaults [ls] - List default services" + echo " defaults rm - Remove default services, no options to remove all, accepts handle or index" + echo " defaults add - Add a default service" + echo " help - Show this help message" + echo " version - Show the CLI version" + echo " gum - Run the Gum terminal commands" + echo " fixfs - Fix file system ACLs for service volumes" + echo " info - Show system information for debug/issues" + echo + echo "Options:" + echo " Additional options to pass to the compose_with_options function" +} + compose_with_options() { local base_dir="$PWD" local compose_files=("$base_dir/compose.yml") # Always include the base compose file @@ -98,51 +150,6 @@ compose_with_options() { echo "$cmd" } -show_version() { - echo "Harbor CLI version: $version" -} - -show_help() { - show_version - echo "Usage: $0 [options]" - echo - echo "Compose Setup Commands:" - echo " up - Start the containers" - echo " down - Stop and remove the containers" - echo " ps - List the running containers" - echo " logs - View the logs of the containers" - echo " exec - Execute a command in a running service" - echo " pull - Pull the latest images" - echo - echo "Setup Management Commands:" - echo " ollama - Run the Harbor's Ollama CLI. Ollama service should be running" - echo " smi - Show NVIDIA GPU information" - echo " top - Run nvtop to monitor GPU usage" - echo " llamacpp - Configure llamacpp service" - echo " tgi - Configure text-generation-inference service" - echo " litellm - Configure LiteLLM service" - echo - echo "Huggingface CLI:" - echo " hf - Run the Harbor's Huggingface CLI. Expanded with a few additional commands." - echo " hf parse-url - Parse file URL from Hugging Face" - echo " hf token - Get/set the Hugging Face Hub token" - echo - echo "Harbor CLI Commands:" - echo " open - Open a service in the default browser" - echo " url - Get the URL for a service" - echo " config - Manage the Harbor environment configuration" - echo " ln - Create a symbolic link to the CLI" - echo " eject - Eject the Compose configuration, accepts same options as 'up'" - echo " defaults - Show the default services" - echo " help - Show this help message" - echo " version - Show the CLI version" - echo " gum - Run the Gum terminal commands" - echo " fixfs - Fix file system ACLs for service volumes" - echo - echo "Options:" - echo " Additional options to pass to the compose_with_options function" -} - run_hf_cli() { case "$1" in parse-url) @@ -166,11 +173,9 @@ run_gum() { docker run --rm -it -e "TERM=xterm-256color" $gum_image $@ } -show_default_services() { - echo "Default services:" - for service in "${default_options[@]}"; do - echo " - $service" - done +run_dive() { + local dive_image=wagoodman/dive + docker run --rm -it -v /var/run/docker.sock:/var/run/docker.sock $dive_image $@ } link_cli() { @@ -233,6 +238,10 @@ get_service_url() { echo "$url" } +sys_info() { + docker info +} + sys_open() { url=$1 @@ -297,6 +306,10 @@ exec_ollama() { run_in_service ollama ollama "$@" } +# ======================================================================== +# == Env Manager +# ======================================================================== + env_manager() { local env_file=".env" local prefix="HARBOR_" @@ -330,16 +343,6 @@ env_manager() { shift 2 # Remove 'set' and the key from the arguments local value="$*" # Capture all remaining arguments as the value - if [[ "$upper_key" == "LLAMACPP_MODEL" ]]; then - local transformed_value=$(transform_llamacpp_model "$value") - if grep -q "^${prefix}LLAMACPP_MODEL_SPECIFIER=" "$env_file"; then - sed -i "s|^${prefix}LLAMACPP_MODEL_SPECIFIER=.*|${prefix}LLAMACPP_MODEL_SPECIFIER=\"$transformed_value\"|" "$env_file" - else - echo "${prefix}LLAMACPP_MODEL_SPECIFIER=\"$transformed_value\"" >> "$env_file" - fi - echo "Set ${prefix}LLAMACPP_MODEL_SPECIFIER to: \"$transformed_value\"" - fi - if grep -q "^$prefix$upper_key=" "$env_file"; then sed -i "s|^$prefix$upper_key=.*|$prefix$upper_key=\"$value\"|" "$env_file" else @@ -391,6 +394,134 @@ env_manager_alias() { fi } +env_manager_arr() { + local field=$1 + shift + local delimiter=";" + local get_command="" + local set_command="" + local add_command="" + local remove_command="" + + # Parse optional hook commands + while [[ "$1" == --* ]]; do + case "$1" in + --on-get) + get_command="$2" + shift 2 + ;; + --on-set) + set_command="$2" + shift 2 + ;; + --on-add) + add_command="$2" + shift 2 + ;; + --on-remove) + remove_command="$2" + shift 2 + ;; + esac + done + + local action=$1 + local value=$2 + + # Helper function to get the current array + get_array() { + local array_string=$(env_manager get "$field") + echo "$array_string" + } + + # Helper function to set the array + set_array() { + local new_array=$1 + env_manager set "$field" "$new_array" + if [ -n "$set_command" ]; then + eval "$set_command" + fi + } + + case "$action" in + ls|"") + # Show all values + local array=$(get_array) + if [ -z "$array" ]; then + echo "Config $field is empty" + else + echo "$array" | tr "$delimiter" "\n" + fi + if [ -n "$get_command" ]; then + eval "$get_command" + fi + ;; + rm) + if [ -z "$value" ]; then + # Remove all values + set_array "" + echo "All values removed from $field" + else + # Remove one value + local array=$(get_array) + if [ "$value" -eq "$value" ] 2>/dev/null; then + # If value is a number, treat it as an index + local new_array=$(echo "$array" | awk -F"$delimiter" -v idx="$value" '{ + OFS=FS; + for(i=1;i<=NF;i++) { + if(i-1 != idx) { + a[++n] = $i + } + } + for(i=1;i<=n;i++) { + printf("%s%s", a[i], (i==n)?"":OFS) + } + }') + else + # Otherwise, treat it as a value to be removed + local new_array=$(echo "$array" | awk -F"$delimiter" -v val="$value" '{ + OFS=FS; + for(i=1;i<=NF;i++) { + if($i != val) { + a[++n] = $i + } + } + for(i=1;i<=n;i++) { + printf("%s%s", a[i], (i==n)?"":OFS) + } + }') + fi + set_array "$new_array" + echo "Value removed from $field" + fi + if [ -n "$remove_command" ]; then + eval "$remove_command" + fi + ;; + add) + if [ -z "$value" ]; then + echo "Usage: env_manager_arr $field add " + return 1 + fi + local array=$(get_array) + if [ -z "$array" ]; then + new_array="$value" + else + new_array="${array}${delimiter}${value}" + fi + set_array "$new_array" + echo "Value added to $field" + if [ -n "$add_command" ]; then + eval "$add_command" + fi + ;; + *) + echo "Usage: env_manager_arr [--on-get ] [--on-set ] [--on-add ] [--on-remove ] {ls|rm|add} [value]" + return 1 + ;; + esac +} + override_yaml_value() { local file="$1" local key="$2" @@ -442,10 +573,21 @@ hf_url_2_llama_spec() { } run_llamacpp_command() { + update_model_spec() { + local spec="" + local current_model=$(env_manager get llamacpp.model) + + if [ -n "$current_model" ]; then + spec=$(hf_url_2_llama_spec $current_model) + fi + + env_manager set llamacpp.model.specifier "$spec" + } + case "$1" in model) shift - env_manager_alias llamacpp.model $@ + env_manager_alias llamacpp.model --on-set update_model_spec $@ ;; args) shift @@ -530,6 +672,7 @@ fix_fs_acl() { docker_fsacl ./open-webui docker_fsacl ./tts docker_fsacl ./librechat + docker_fsacl ./searxng } run_litellm_command() { @@ -629,6 +772,66 @@ run_aphrodite_command() { esac } +run_open_ai_command() { + update_main_key() { + local key=$(env_manager get openai.keys | cut -d";" -f1) + env_manager set openai.key "$key" + } + + update_main_url() { + local url=$(env_manager get openai.urls | cut -d";" -f1) + env_manager set openai.url "$url" + } + + case "$1" in + keys) + shift + env_manager_arr openai.keys --on-set update_main_key $@ + ;; + urls) + shift + env_manager_arr openai.urls --on-set update_main_url $@ + ;; + *) + echo "Please note that this is not an OpenAI CLI, but a Harbor CLI to manage OpenAI configuration." + echo + echo "Usage: harbor openai " + echo + echo "Commands:" + echo " harbor openai keys [ls|rm|add] - Get/set the API Keys for the OpenAI-compatible APIs." + echo " harbor openai urls [ls|rm|add] - Get/set the API URLs for the OpenAI-compatible APIs." + ;; + esac +} + +run_webui_command() { + case "$1" in + secret) + shift + env_manager_alias webui.secret $@ + ;; + name) + shift + env_manager_alias webui.name $@ + ;; + log) + shift + env_manager_alias webui.log.level $@ + ;; + *) + echo "Please note that this is not WebUI CLI, but a Harbor CLI to manage WebUI service." + echo + echo "Usage: harbor webui " + echo + echo "Commands:" + echo " harbor webui secret [secret] - Get/set WebUI JWT Secret" + echo " harbor webui name [name] - Get/set the name WebUI will present" + echo " harbor webui log [level] - Get/set WebUI log level" + ;; + esac + +} + # ======================================================================== # == Main script @@ -638,16 +841,23 @@ version="0.0.10" delimiter="|" harbor_home=$(dirname "$(readlink -f "${BASH_SOURCE[0]}")") +original_dir=$PWD + cd $harbor_home -default_options=($(env_manager get services.default)) +default_options=($(env_manager get services.default | tr ';' ' ')) default_open=$(env_manager get ui.main) +default_autoopen=$(env_manager get ui.autoopen) # Main script logic case "$1" in up) shift $(compose_with_options "$@") up -d + + if [ "$default_autoopen" = "true" ]; then + open_service $default_open + fi ;; down) shift @@ -689,7 +899,7 @@ case "$1" in ;; defaults) shift - env_manager_alias services.default $@ + env_manager_arr services.default $@ ;; ln) shift @@ -715,6 +925,10 @@ case "$1" in shift nvidia_top ;; + dive) + shift + run_dive $@ + ;; eject) shift eject $@ @@ -743,6 +957,14 @@ case "$1" in shift run_aphrodite_command $@ ;; + openai) + shift + run_open_ai_command $@ + ;; + webui) + shift + run_webui_command $@ + ;; config) shift env_manager $@ @@ -755,9 +977,13 @@ case "$1" in shift fix_fs_acl ;; + info) + shift + sys_info + ;; *) echo "Unknown command: $1" show_help exit 1 ;; -esac \ No newline at end of file +esac diff --git a/litellm/litellm.vllm.yaml b/litellm/litellm.vllm.yaml index 31cfee7..0e4b80f 100644 --- a/litellm/litellm.vllm.yaml +++ b/litellm/litellm.vllm.yaml @@ -1,6 +1,6 @@ model_list: - model_name: vllm litellm_params: - model: openai/google/gemma-2-2b-it + model: openai/microsoft/Phi-3-mini-4k-instruct api_base: http://vllm:8000/v1 api_key: "---" diff --git a/litellm/start_litellm.sh b/litellm/start_litellm.sh index 9a1074c..2c07284 100755 --- a/litellm/start_litellm.sh +++ b/litellm/start_litellm.sh @@ -7,7 +7,7 @@ echo "Harbor: Custom LiteLLM Entrypoint" python --version echo "YAML Merger is starting..." -python /app/config_merger.py --pattern ".yaml" --output "/app/proxy.yaml" --directory "/app/litellm" +python /app/yaml_config_merger.py --pattern ".yaml" --output "/app/proxy.yaml" --directory "/app/litellm" echo "Merged Configs:" cat /app/proxy.yaml diff --git a/open-webui/config.json b/open-webui/config.json index 47be6c6..51b5c1c 100644 --- a/open-webui/config.json +++ b/open-webui/config.json @@ -1,65 +1,22 @@ { - "rag": { - "pdf_extract_images": false, - "youtube_loader_language": [ - "en" - ], - "enable_web_loader_ssl_verification": null, - "template": "Use the following context as your learned knowledge, inside XML tags.\n\n [context]\n\n\nWhen answer to user:\n- If you don't know, just say that you don't know.\n- If you don't know when you are not sure, ask for clarification.\nAvoid mentioning that you obtained the information from the context.\nAnd answer according to the language of the user's question.\n\nGiven the context information, answer the query.\nQuery: [query]", - "top_k": 8, - "relevance_threshold": 0.0, - "enable_hybrid_search": true, - "reranking_model": "", - "embedding_engine": "ollama", - "embedding_model": "mxbai-embed-large:latest", - "chunk_size": 1500, - "chunk_overlap": 100 - }, - "ollama": { - "base_urls": [ - "http://ollama:11434" - ] - }, - "openai": { - "api_base_urls": [ - "http://llamacpp:8080/v1", - "http://litellm:4000/v1", - "http://aphrodite:7860/v1" - ], - "api_keys": [ - "", - "sk-litellm", - "" - ], - "enabled": true - }, - "image_generation": { - "engine": "comfyui", - "enable": true, - "model": "Juggernaut_X_RunDiffusion_Hyper.safetensors", - "size": "512x512", - "steps": 50, - "comfyui": { - "base_url": "http://comfyui:8188" - } - }, - "audio": { - "tts": { - "openai": { - "api_base_url": "http://tts:8000/v1", - "api_key": "123" - }, - "engine": "openai", - "model": "tts-1", - "voice": "alloy" - }, - "stt": { - "openai": { - "api_base_url": "https://api.openai.com/v1", - "api_key": "" - }, - "engine": "", - "model": "whisper-1" - } - } + "ollama": { + "base_urls": [ + "http://ollama:11434" + ] + }, + "audio": { + "tts": { + "openai": { + "api_base_url": "http://tts:8000/v1", + "api_key": "sk-dummy-key" + }, + "engine": "openai", + "model": "tts-1", + "voice": "alloy" + }, + "stt": { + "engine": "", + "model": "whisper-1" + } + } } \ No newline at end of file diff --git a/open-webui/configs/config.aphrodite.json b/open-webui/configs/config.aphrodite.json new file mode 100644 index 0000000..817a95a --- /dev/null +++ b/open-webui/configs/config.aphrodite.json @@ -0,0 +1,11 @@ +{ + "openai": { + "api_base_urls": [ + "http://aphrodite:7860/v1" + ], + "api_keys": [ + "" + ], + "enabled": true + } +} \ No newline at end of file diff --git a/open-webui/configs/config.comfyui.json b/open-webui/configs/config.comfyui.json new file mode 100644 index 0000000..17d49ed --- /dev/null +++ b/open-webui/configs/config.comfyui.json @@ -0,0 +1,12 @@ +{ + "image_generation": { + "engine": "comfyui", + "enable": true, + "model": "Juggernaut_X_RunDiffusion_Hyper.safetensors", + "size": "512x512", + "steps": 50, + "comfyui": { + "base_url": "http://comfyui:8188" + } + } +} \ No newline at end of file diff --git a/open-webui/configs/config.json b/open-webui/configs/config.json new file mode 100644 index 0000000..9e26dfe --- /dev/null +++ b/open-webui/configs/config.json @@ -0,0 +1 @@ +{} \ No newline at end of file diff --git a/open-webui/configs/config.litellm.json b/open-webui/configs/config.litellm.json new file mode 100644 index 0000000..c16c181 --- /dev/null +++ b/open-webui/configs/config.litellm.json @@ -0,0 +1,11 @@ +{ + "openai": { + "api_base_urls": [ + "http://litellm:4000/v1" + ], + "api_keys": [ + "sk-litellm" + ], + "enabled": true + } +} \ No newline at end of file diff --git a/open-webui/configs/config.llamacpp.json b/open-webui/configs/config.llamacpp.json new file mode 100644 index 0000000..2beaa91 --- /dev/null +++ b/open-webui/configs/config.llamacpp.json @@ -0,0 +1,11 @@ +{ + "openai": { + "api_base_urls": [ + "http://llamacpp:8080/v1" + ], + "api_keys": [ + "" + ], + "enabled": true + } +} \ No newline at end of file diff --git a/open-webui/configs/config.ollama.json b/open-webui/configs/config.ollama.json new file mode 100644 index 0000000..3cf92b2 --- /dev/null +++ b/open-webui/configs/config.ollama.json @@ -0,0 +1,7 @@ +{ + "ollama": { + "base_urls": [ + "http://ollama:11434" + ] + } +} \ No newline at end of file diff --git a/open-webui/configs/config.override.json b/open-webui/configs/config.override.json new file mode 100644 index 0000000..3dc6a2d --- /dev/null +++ b/open-webui/configs/config.override.json @@ -0,0 +1,11 @@ +{ + "openai": { + "api_base_urls": [ + "${...HARBOR_OPENAI_URLS}" + ], + "api_keys": [ + "${...HARBOR_OPENAI_KEYS}" + ], + "enabled": true + } +} \ No newline at end of file diff --git a/open-webui/configs/config.searxng.json b/open-webui/configs/config.searxng.json new file mode 100644 index 0000000..17c54d8 --- /dev/null +++ b/open-webui/configs/config.searxng.json @@ -0,0 +1,16 @@ +{ + "rag": { + "pdf_extract_images": false, + "youtube_loader_language": [ + "en" + ], + "enable_web_loader_ssl_verification": null, + "template": "Use the following context as your learned knowledge, inside XML tags.\n\n [context]\n\n\nWhen answer to user:\n- If you don't know, just say that you don't know.\n- If you don't know when you are not sure, ask for clarification.\nAvoid mentioning that you obtained the information from the context.\nAnd answer according to the language of the user's question.\n\nGiven the context information, answer the query.\nQuery: [query]", + "top_k": 8, + "relevance_threshold": 0.0, + "enable_hybrid_search": true, + "reranking_model": "", + "chunk_size": 1500, + "chunk_overlap": 100 + } +} \ No newline at end of file diff --git a/open-webui/configs/config.tts.json b/open-webui/configs/config.tts.json new file mode 100644 index 0000000..853d456 --- /dev/null +++ b/open-webui/configs/config.tts.json @@ -0,0 +1,17 @@ +{ + "audio": { + "tts": { + "openai": { + "api_base_url": "http://tts:8000/v1", + "api_key": "sk-dummy-key" + }, + "engine": "openai", + "model": "tts-1", + "voice": "alloy" + }, + "stt": { + "engine": "", + "model": "whisper-1" + } + } +} \ No newline at end of file diff --git a/open-webui/configs/config.x.searxng.ollama.json b/open-webui/configs/config.x.searxng.ollama.json new file mode 100644 index 0000000..f35d9a4 --- /dev/null +++ b/open-webui/configs/config.x.searxng.ollama.json @@ -0,0 +1,6 @@ +{ + "rag": { + "embedding_engine": "ollama", + "embedding_model": "mxbai-embed-large:latest" + } +} \ No newline at end of file diff --git a/open-webui/start_webui.sh b/open-webui/start_webui.sh new file mode 100755 index 0000000..4e76fc4 --- /dev/null +++ b/open-webui/start_webui.sh @@ -0,0 +1,15 @@ +#!/bin/bash + +echo "Harbor: Custom Open WebUI Entrypoint" +python --version + +echo "JSON Merger is starting..." +python /app/json_config_merger.py --pattern ".json" --output "/app/backend/data/config.json" --directory "/app/configs" + +echo "Merged Configs:" +cat /app/backend/data/config.json + +echo +echo "Starting Open WebUI..." +# Original entrypoint +bash start.sh \ No newline at end of file diff --git a/shared/config_merger.py b/shared/config_merger.py deleted file mode 100644 index a8fcbcd..0000000 --- a/shared/config_merger.py +++ /dev/null @@ -1,48 +0,0 @@ -import os -import yaml -import argparse - -def read_yaml(file_path): - with open(file_path, 'r') as file: - return yaml.safe_load(file) - -def write_yaml(data, file_path): - with open(file_path, 'w') as file: - yaml.dump(data, file, default_flow_style=False) - -def merge_yaml_files(directory, pattern, output_file): - merged_data = {} - - for filename in os.listdir(directory): - if filename.endswith(pattern): - file_path = os.path.join(directory, filename) - yaml_data = read_yaml(file_path) - - # Merge the data - for key, value in yaml_data.items(): - if key in merged_data: - if isinstance(merged_data[key], dict) and isinstance(value, dict): - merged_data[key].update(value) - elif isinstance(merged_data[key], list) and isinstance(value, list): - merged_data[key].extend(value) - else: - merged_data[key] = value - else: - merged_data[key] = value - - # Write the merged data to the output file - write_yaml(merged_data, output_file) - -def main(): - parser = argparse.ArgumentParser(description='Merge YAML files in a directory.') - parser.add_argument('--pattern', default='.yaml', help='File pattern to match (default: .yaml)') - parser.add_argument('--output', default='merged_output.yaml', help='Output file name (default: merged_output.yaml)') - parser.add_argument('--directory', default='.', help='Directory to search for YAML files (default: current directory)') - - args = parser.parse_args() - - merge_yaml_files(args.directory, args.pattern, args.output) - print(f"Merged YAML files matching '{args.pattern}' into '{args.output}'") - -if __name__ == '__main__': - main() \ No newline at end of file diff --git a/shared/json_config_merger.py b/shared/json_config_merger.py new file mode 100644 index 0000000..585b50f --- /dev/null +++ b/shared/json_config_merger.py @@ -0,0 +1,125 @@ +import os +import json +import argparse +import re + +def read_json(file_path): + with open(file_path, 'r') as file: + return json.load(file) + +def write_json(data, file_path): + with open(file_path, 'w') as file: + json.dump(data, file, indent=2) + +def render_env_vars(value): + def is_section_enabled(section): + pattern = r'\$\{\.\.\.([^}]+)\}|\$\{([^}]+)\}|\$([a-zA-Z_][a-zA-Z0-9_]*)' + env_vars = re.findall(pattern, json.dumps(section)) + if not env_vars: + return True # Always include sections without env vars + return any(os.environ.get(var[0] or var[1] or var[2]) for var in env_vars) + + if isinstance(value, str): + if not value: # Return empty string as is + return value + pattern = r'\$\{\.\.\.([^}]+)\}|\$\{([^}]+)\}|\$([a-zA-Z_][a-zA-Z0-9_]*)' + + def replace_env_var(match): + spread_var = match.group(1) + normal_var = match.group(2) or match.group(3) + + if spread_var: + env_value = os.environ.get(spread_var, '') + return [v.strip() for v in env_value.split(';') if v.strip()] + else: + return os.environ.get(normal_var, match.group(0)) + + parts = re.split(pattern, value) + result = [] + for i, part in enumerate(parts): + if i % 4 == 0: # Normal text + if part: + result.append(part) + elif i % 4 == 1: # Spread variable + if part: + env_value = os.environ.get(part, '') + result.extend([v.strip() for v in env_value.split(';') if v.strip()]) + else: # Normal variable + if part: + env_value = os.environ.get(part, f'${{{part}}}') + if env_value: + result.append(env_value) + + if not result: # Return empty string if result is empty + return value + if len(result) == 1 and isinstance(result[0], str): + return result[0] + return result + elif isinstance(value, list): + flattened = [] + for item in value: + rendered_item = render_env_vars(item) + if isinstance(rendered_item, list): + flattened.extend(rendered_item) + else: + flattened.append(rendered_item) + return flattened + elif isinstance(value, dict): + rendered_dict = {} + for k, v in value.items(): + if isinstance(v, dict) and not is_section_enabled(v): + continue + rendered_value = render_env_vars(v) + rendered_dict[k] = rendered_value + return rendered_dict + else: + return value + +def merge_dicts(dict1, dict2): + """ + Recursively merge two dictionaries. + Lists are combined, dictionaries are recursively merged, other values are overwritten. + """ + result = dict1.copy() + for key, value in dict2.items(): + if key in result: + if isinstance(result[key], dict) and isinstance(value, dict): + result[key] = merge_dicts(result[key], value) + elif isinstance(result[key], list) and isinstance(value, list): + result[key].extend(value) + else: + result[key] = value + else: + result[key] = value + return result + +def merge_json_files(directory, pattern, output_file): + merged_data = {} + + for filename in os.listdir(directory): + if filename.endswith(pattern): + file_path = os.path.join(directory, filename) + json_data = read_json(file_path) + + # Render environment variables + json_data = render_env_vars(json_data) + + # Merge the data + merged_data = merge_dicts(merged_data, json_data) + + # Write the merged data to the output file + write_json(merged_data, output_file) + +def main(): + parser = argparse.ArgumentParser(description='Merge JSON files in a directory and render environment variables.') + parser.add_argument('--pattern', default='.json', help='File pattern to match (default: .json)') + parser.add_argument('--output', default='merged_output.json', help='Output file name (default: merged_output.json)') + parser.add_argument('--directory', default='.', help='Directory to search for JSON files (default: current directory)') + + args = parser.parse_args() + + merge_json_files(args.directory, args.pattern, args.output) + print(f"Merged JSON files matching '{args.pattern}' into '{args.output}' with environment variables rendered") + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/shared/yaml_config_merger.py b/shared/yaml_config_merger.py new file mode 100644 index 0000000..a69f40a --- /dev/null +++ b/shared/yaml_config_merger.py @@ -0,0 +1,77 @@ +import os +import yaml +import argparse +import re + +def read_yaml(file_path): + with open(file_path, 'r') as file: + return yaml.safe_load(file) + +def write_yaml(data, file_path): + with open(file_path, 'w') as file: + yaml.dump(data, file, default_flow_style=False) + +def render_env_vars(value): + if isinstance(value, str): + pattern = r'\$\{([^}]+)\}|\$([a-zA-Z_][a-zA-Z0-9_]*)' + + def replace_env_var(match): + var_name = match.group(1) or match.group(2) + return os.environ.get(var_name, match.group(0)) + + return re.sub(pattern, replace_env_var, value) + elif isinstance(value, list): + return [render_env_vars(item) for item in value] + elif isinstance(value, dict): + return {k: render_env_vars(v) for k, v in value.items()} + else: + return value + +def merge_dicts(dict1, dict2): + """ + Recursively merge two dictionaries. + Lists are combined, dictionaries are recursively merged, other values are overwritten. + """ + result = dict1.copy() + for key, value in dict2.items(): + if key in result: + if isinstance(result[key], dict) and isinstance(value, dict): + result[key] = merge_dicts(result[key], value) + elif isinstance(result[key], list) and isinstance(value, list): + result[key].extend(value) + else: + result[key] = value + else: + result[key] = value + return result + +def merge_yaml_files(directory, pattern, output_file): + merged_data = {} + + for filename in os.listdir(directory): + if filename.endswith(pattern): + file_path = os.path.join(directory, filename) + yaml_data = read_yaml(file_path) + + # Render environment variables + yaml_data = render_env_vars(yaml_data) + + # Merge the data + merged_data = merge_dicts(merged_data, yaml_data) + + # Write the merged data to the output file + write_yaml(merged_data, output_file) + +def main(): + parser = argparse.ArgumentParser(description='Merge YAML files in a directory and render environment variables.') + parser.add_argument('--pattern', default='.yaml', help='File pattern to match (default: .yaml)') + parser.add_argument('--output', default='merged_output.yaml', help='Output file name (default: merged_output.yaml)') + parser.add_argument('--directory', default='.', help='Directory to search for YAML files (default: current directory)') + + args = parser.parse_args() + + merge_yaml_files(args.directory, args.pattern, args.output) + print(f"Merged YAML files matching '{args.pattern}' into '{args.output}' with environment variables rendered") + +if __name__ == '__main__': + main() \ No newline at end of file