Skip to content

Commit

Permalink
feat: multi-backend for opint, fixing webui configs without the api keys
Browse files Browse the repository at this point in the history
  • Loading branch information
av committed Aug 4, 2024
1 parent e417b41 commit ebef7bb
Show file tree
Hide file tree
Showing 14 changed files with 89 additions and 4 deletions.
7 changes: 7 additions & 0 deletions compose.x.opint.aphrodite.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
services:
opint:
entrypoint: >
interpreter
--api_base http://aphrodite:7860/v1
--api_key sk-aphrodite
${HARBOR_OPINT_CMD}
7 changes: 7 additions & 0 deletions compose.x.opint.litellm.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
services:
opint:
entrypoint: >
interpreter
--api_base http://litellm:4000/v1
--api_key ${HARBOR_LITELLM_MASTER_KEY}
${HARBOR_OPINT_CMD}
7 changes: 7 additions & 0 deletions compose.x.opint.llamacpp.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
services:
opint:
entrypoint: >
interpreter
--api_base http://llamacpp:8080/v1
--api_key sk-llamacpp
${HARBOR_OPINT_CMD}
7 changes: 7 additions & 0 deletions compose.x.opint.mistralrs.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
services:
opint:
entrypoint: >
interpreter
--api_base http://mistralrs:8021/v1
--api_key sk-mistralrs
${HARBOR_OPINT_CMD}
7 changes: 7 additions & 0 deletions compose.x.opint.tabbyapi.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
services:
opint:
entrypoint: >
interpreter
--api_base http://tabbyapi:5000/v1
--api_key ${HARBOR_TABBYAPI_ADMIN_KEY}
${HARBOR_OPINT_CMD}
7 changes: 7 additions & 0 deletions compose.x.opint.vllm.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
services:
opint:
entrypoint: >
interpreter
--api_base http://vllm:8000/v1
--api_key sk-vllm
${HARBOR_OPINT_CMD}
4 changes: 4 additions & 0 deletions compose.x.webui.vllm.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
services:
webui:
volumes:
- ./open-webui/configs/config.vllm.json:/app/configs/config.vllm.json
1 change: 1 addition & 0 deletions default.env
Original file line number Diff line number Diff line change
Expand Up @@ -171,6 +171,7 @@ HARBOR_OPINT_CONFIG_PATH="~/.config/open-interpreter"
HARBOR_OPINT_EXTRA_ARGS=""
HARBOR_OPINT_MODEL="llama3.1"
HARBOR_OPINT_CMD="--model llama3.1"
HARBOR_OPINT_BACKEND=""

# ============================================
# Service Configuration.
Expand Down
18 changes: 16 additions & 2 deletions harbor.sh
Original file line number Diff line number Diff line change
Expand Up @@ -706,6 +706,10 @@ unsafe_update() {
git pull
}

get_active_services() {
docker compose ps --format "{{.Service}}" | tr '\n' ' '
}

# ========================================================================
# == Service CLIs
# ========================================================================
Expand Down Expand Up @@ -1141,6 +1145,10 @@ run_opint_command() {
}

case "$1" in
backend)
shift
env_manager_alias opint.backend "$@"
;;
profiles|--profiles)
shift
execute_and_process "env_manager get opint.config.path" "sys_open {{output}}/profiles" "No opint.config.path set"
Expand Down Expand Up @@ -1170,9 +1178,15 @@ run_opint_command() {
echo "Harbor does not support Open Interpreter OS mode".
;;
*)
# Everything is proxied to the opint container
# Allow permanent override of the target backend
local services=$(env_manager get opint.backend)

if [ -z "$services" ]; then
services=$(get_active_services)
fi

# Mount the current directory and set it as the working directory
$(compose_with_options "opint") run -v "$original_dir:$original_dir" --workdir "$original_dir" opint $@
$(compose_with_options "$services" "opint") run -v "$original_dir:$original_dir" --workdir "$original_dir" opint $@
;;
esac
}
Expand Down
4 changes: 4 additions & 0 deletions http-catalog/vllm.http
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,10 @@

###

curl {{host}}/v1/models

###

curl {{host}}/v1/chat/completions -H 'Content-Type: application/json' -d '{
"model": "google/gemma-2-2b-it",
"messages": [
Expand Down
9 changes: 9 additions & 0 deletions open-webui/config.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,5 +3,14 @@
"base_urls": [
"http://ollama:11434"
]
},
"openai": {
"api_base_urls": [
"http://vllm:8000/v1"
],
"api_keys": [
"sk-vllm"
],
"enabled": true
}
}
2 changes: 1 addition & 1 deletion open-webui/configs/config.aphrodite.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
"http://aphrodite:7860/v1"
],
"api_keys": [
""
"sk-aphrodite"
],
"enabled": true
}
Expand Down
2 changes: 1 addition & 1 deletion open-webui/configs/config.llamacpp.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
"http://llamacpp:8080/v1"
],
"api_keys": [
""
"sk-llamacpp"
],
"enabled": true
}
Expand Down
11 changes: 11 additions & 0 deletions open-webui/configs/config.vllm.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
{
"openai": {
"api_base_urls": [
"http://vllm:8000/v1"
],
"api_keys": [
"sk-vllm"
],
"enabled": true
}
}

0 comments on commit ebef7bb

Please sign in to comment.