From eb60c7307d101e81847b6eae5395a542070371e3 Mon Sep 17 00:00:00 2001 From: Ivan Charapanau Date: Wed, 2 Oct 2024 22:32:13 +0200 Subject: [PATCH] feat: anythingllm integration, v0.2.4 bump --- .gitignore | 5 ++++- README.md | 2 +- anythingllm/override.env | 2 ++ app/package.json | 2 +- app/src-tauri/Cargo.lock | 2 +- app/src-tauri/Cargo.toml | 2 +- app/src/serviceMetadata.tsx | 3 +++ compose.anythingllm.yml | 19 +++++++++++++++++++ compose.x.anythingllm.llamacpp.yml | 6 ++++++ compose.x.anythingllm.ollama.yml | 7 +++++++ compose.x.anythingllm.searxng.yml | 4 ++++ harbor.sh | 3 ++- package.json | 2 +- profiles/default.env | 6 ++++++ searxng/settings.yml.new | 2 +- 15 files changed, 59 insertions(+), 8 deletions(-) create mode 100644 anythingllm/override.env create mode 100644 compose.anythingllm.yml create mode 100644 compose.x.anythingllm.llamacpp.yml create mode 100644 compose.x.anythingllm.ollama.yml create mode 100644 compose.x.anythingllm.searxng.yml diff --git a/.gitignore b/.gitignore index 138c667..61e262e 100644 --- a/.gitignore +++ b/.gitignore @@ -92,4 +92,7 @@ jupyter/workspace/* .history # Boost -boost/src/**/__pycache__/ \ No newline at end of file +boost/src/**/__pycache__/ + +# AnythingLLM +anythingllm/storage/ \ No newline at end of file diff --git a/README.md b/README.md index 1b4c645..64669d6 100644 --- a/README.md +++ b/README.md @@ -12,7 +12,7 @@ Harbor is a containerized LLM toolkit that allows you to run LLMs and additional ##### UIs -[Open WebUI](https://github.com/av/harbor/wiki/2.1.1-Frontend:-Open-WebUI) ⦁︎ [ComfyUI](https://github.com/av/harbor/wiki/2.1.2-Frontend:-ComfyUI) ⦁︎ [LibreChat](https://github.com/av/harbor/wiki/2.1.3-Frontend:-LibreChat) ⦁︎ [HuggingFace ChatUI](https://github.com/av/harbor/wiki/2.1.4-Frontend:-ChatUI) ⦁︎ [Lobe Chat](https://github.com/av/harbor/wiki/2.1.5-Frontend:-Lobe-Chat) ⦁︎ [Hollama](https://github.com/av/harbor/wiki/2.1.6-Frontend:-hollama) ⦁︎ [parllama](https://github.com/av/harbor/wiki/2.1.7-Frontend:-parllama) ⦁︎ [BionicGPT](https://github.com/av/harbor/wiki/2.1.8-Frontend:-BionicGPT) +[Open WebUI](https://github.com/av/harbor/wiki/2.1.1-Frontend:-Open-WebUI) ⦁︎ [ComfyUI](https://github.com/av/harbor/wiki/2.1.2-Frontend:-ComfyUI) ⦁︎ [LibreChat](https://github.com/av/harbor/wiki/2.1.3-Frontend:-LibreChat) ⦁︎ [HuggingFace ChatUI](https://github.com/av/harbor/wiki/2.1.4-Frontend:-ChatUI) ⦁︎ [Lobe Chat](https://github.com/av/harbor/wiki/2.1.5-Frontend:-Lobe-Chat) ⦁︎ [Hollama](https://github.com/av/harbor/wiki/2.1.6-Frontend:-hollama) ⦁︎ [parllama](https://github.com/av/harbor/wiki/2.1.7-Frontend:-parllama) ⦁︎ [BionicGPT](https://github.com/av/harbor/wiki/2.1.8-Frontend:-BionicGPT) ⦁︎ [AnythingLLM](https://github.com/av/harbor/wiki/2.1.9-Frontend:-AnythingLLM) ##### Backends diff --git a/anythingllm/override.env b/anythingllm/override.env new file mode 100644 index 0000000..c6919cd --- /dev/null +++ b/anythingllm/override.env @@ -0,0 +1,2 @@ +# This file can be used for additional environment +# variables for the anythingllm service diff --git a/app/package.json b/app/package.json index c3a0c51..19d0446 100644 --- a/app/package.json +++ b/app/package.json @@ -1,7 +1,7 @@ { "name": "@av/harbor-app", "private": true, - "version": "0.2.3", + "version": "0.2.4", "type": "module", "scripts": { "dev": "vite", diff --git a/app/src-tauri/Cargo.lock b/app/src-tauri/Cargo.lock index 5789dc3..6992ce1 100644 --- a/app/src-tauri/Cargo.lock +++ b/app/src-tauri/Cargo.lock @@ -1474,7 +1474,7 @@ dependencies = [ [[package]] name = "harbor-app" -version = "0.2.0" +version = "0.2.3" dependencies = [ "fix-path-env", "serde", diff --git a/app/src-tauri/Cargo.toml b/app/src-tauri/Cargo.toml index 59b5dcd..ca54243 100644 --- a/app/src-tauri/Cargo.toml +++ b/app/src-tauri/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "harbor-app" -version = "0.2.3" +version = "0.2.4" description = "A companion app for Harbor LLM toolkit" authors = ["av"] edition = "2021" diff --git a/app/src/serviceMetadata.tsx b/app/src/serviceMetadata.tsx index 400dba6..d3669f9 100644 --- a/app/src/serviceMetadata.tsx +++ b/app/src/serviceMetadata.tsx @@ -173,5 +173,8 @@ export const serviceMetadata: Record> = { }, litlytics: { tags: [HST.satellite, HST.partial], + }, + anythingllm: { + tags: [HST.frontend, HST.partial] } }; \ No newline at end of file diff --git a/compose.anythingllm.yml b/compose.anythingllm.yml new file mode 100644 index 0000000..5237bcc --- /dev/null +++ b/compose.anythingllm.yml @@ -0,0 +1,19 @@ +services: + anythingllm: + container_name: ${HARBOR_CONTAINER_PREFIX}.anythingllm + env_file: + - ./.env + - ./anythingllm/override.env + image: ${HARBOR_ANYTHINGLLM_IMAGE}:${HARBOR_ANYTHINGLLM_VERSION} + ports: + - ${HARBOR_ANYTHINGLLM_HOST_PORT}:3001 + volumes: + - ./anythingllm/storage:/app/server/storage + cap_add: + - SYS_ADMIN + environment: + - STORAGE_DIR=/app/server/storage + - VECTOR_DB=lancedb + - PASSWORDMINCHAT=8 + networks: + - harbor-network \ No newline at end of file diff --git a/compose.x.anythingllm.llamacpp.yml b/compose.x.anythingllm.llamacpp.yml new file mode 100644 index 0000000..95c89c4 --- /dev/null +++ b/compose.x.anythingllm.llamacpp.yml @@ -0,0 +1,6 @@ +services: + anythingllm: + environment: + - LLM_PROVIDER=generic-openai + - GENERIC_OPEN_AI_BASE_PATH=http://llamacpp:8080/v1 + - GENERIC_OPEN_AI_API_KEY=sk-llamacpp \ No newline at end of file diff --git a/compose.x.anythingllm.ollama.yml b/compose.x.anythingllm.ollama.yml new file mode 100644 index 0000000..d496d12 --- /dev/null +++ b/compose.x.anythingllm.ollama.yml @@ -0,0 +1,7 @@ +services: + anythingllm: + environment: + - LLM_PROVIDER=ollama + - OLLAMA_BASE_PATH=${HARBOR_OLLAMA_INTERNAL_URL} + - EMBEDDING_ENGINE=ollama + - EMBEDDING_BASE_PATH=${HARBOR_OLLAMA_INTERNAL_URL} \ No newline at end of file diff --git a/compose.x.anythingllm.searxng.yml b/compose.x.anythingllm.searxng.yml new file mode 100644 index 0000000..cac28b1 --- /dev/null +++ b/compose.x.anythingllm.searxng.yml @@ -0,0 +1,4 @@ +services: + anythingllm: + environment: + - AGENT_SEARXNG_API_URL=http://searxng:8080 \ No newline at end of file diff --git a/harbor.sh b/harbor.sh index 95263e0..138309d 100755 --- a/harbor.sh +++ b/harbor.sh @@ -1637,6 +1637,7 @@ fix_fs_acl() { docker_fsacl ./bench docker_fsacl ./jupyter docker_fsacl ./ktransformers + docker_fsacl ./anythingllm docker_fsacl $(eval echo "$(env_manager get hf.cache)") docker_fsacl $(eval echo "$(env_manager get vllm.cache)") @@ -3340,7 +3341,7 @@ run_stt_command() { # ======================================================================== # Globals -version="0.2.3" +version="0.2.4" harbor_repo_url="https://github.com/av/harbor.git" harbor_release_url="https://api.github.com/repos/av/harbor/releases/latest" delimiter="|" diff --git a/package.json b/package.json index 61dbbdf..ba1f898 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@av/harbor", - "version": "0.2.3", + "version": "0.2.4", "bin": { "harbor": "./bin/harbor" } diff --git a/profiles/default.env b/profiles/default.env index 0e53a82..a614b01 100644 --- a/profiles/default.env +++ b/profiles/default.env @@ -371,6 +371,12 @@ HARBOR_STT_MODEL="Systran/faster-distil-whisper-large-v3" HARBOR_LITLYTICS_HOST_PORT=34161 HARBOR_LITLYTICS_VERSION="latest" +# AnythingLLM +HARBOR_ANYTHINGLLM_HOST_PORT=34171 +HARBOR_ANYTHINGLLM_IMAGE="mintplexlabs/anythingllm" +HARBOR_ANYTHINGLLM_VERSION="latest" +HARBOR_ANYTHINGLLM_JWT_SECRET="sk-anythingllm-jwt" + # ============================================ # Service Configuration. # You can specify any of the service's own environment variables here. diff --git a/searxng/settings.yml.new b/searxng/settings.yml.new index d1c8540..53b7c24 100644 --- a/searxng/settings.yml.new +++ b/searxng/settings.yml.new @@ -88,7 +88,7 @@ server: # If your instance owns a /etc/searxng/settings.yml file, then set the following # values there. - secret_key: "3af0fffe5074568b6e26828123c9595f150fa35bffaad0ac1d975475d4f508d8" # Is overwritten by ${SEARXNG_SECRET} + secret_key: "dee52de1cc979941035b8528ac6a3d8a26b0f5b563bfebe2a6c17fb5e35d0aff" # Is overwritten by ${SEARXNG_SECRET} # Proxy image results through SearXNG. Is overwritten by ${SEARXNG_IMAGE_PROXY} image_proxy: false # 1.0 and 1.1 are supported