diff --git a/.github/workflows/build_and_deploy.yaml b/.github/workflows/build_and_deploy.yaml index 01616de..baf3c3e 100644 --- a/.github/workflows/build_and_deploy.yaml +++ b/.github/workflows/build_and_deploy.yaml @@ -1,10 +1,10 @@ name: Build Docker Image, Push to GHCR and Deploy to GKE on: + workflow_dispatch: # allows for manual triggering push: branches: - main - - feat/deployment jobs: build: @@ -54,7 +54,6 @@ jobs: - name: "Deploy Helm Chart" run: | helm upgrade --install project-llllm deployment/helm --create-namespace --namespace project-llllm \ - --set secrets.openai_api_key=${{ secrets.OPENAI_API_KEY }} \ --set streamlit.image.tag=${{ github.sha }} - name: Create contacts configmap run: kubectl apply -f deployment/k8s/configmap.yaml --namespace project-llllm diff --git a/README.md b/README.md index c8aa66e..04fa53d 100644 --- a/README.md +++ b/README.md @@ -5,8 +5,8 @@ A suite of tools to perform geospatial operations using Large Language Models. LLLLM stands for Lat-Lng-Large-Language-Model, you can call it as "el el el el emm" or "L4M". ## Setup -1. Create the llllm-env - `mamba env create -f environment.yaml` -2. Set your OpenAI API key as an environment variable - `export OPENAI_API_KEY=` + +Create the llllm-env - `mamba env create -f environment.yaml` ## Getting Started diff --git a/deployment/helm/templates/secrets.yaml b/deployment/helm/templates/secrets.yaml deleted file mode 100644 index 0afd7da..0000000 --- a/deployment/helm/templates/secrets.yaml +++ /dev/null @@ -1,7 +0,0 @@ -apiVersion: v1 -kind: Secret -metadata: - name: llllm-secrets -type: Opaque -data: - openai_api_key: {{ .Values.secrets.openai_api_key | b64enc }} diff --git a/deployment/helm/templates/streamlit.yaml b/deployment/helm/templates/streamlit.yaml index 207116e..a0fe315 100644 --- a/deployment/helm/templates/streamlit.yaml +++ b/deployment/helm/templates/streamlit.yaml @@ -17,12 +17,6 @@ spec: image: {{ .Values.streamlit.image.repository }}:{{ .Values.streamlit.image.tag }} command: ["/opt/conda/envs/llllm-env/bin/streamlit"] args: ["run", "app.py", "--server.port=8501", "--server.address=0.0.0.0"] - env: - - name: OPENAI_API_KEY - valueFrom: - secretKeyRef: - name: llllm-secrets - key: openai_api_key ports: - containerPort: 8501 volumeMounts: diff --git a/deployment/helm/values.yaml b/deployment/helm/values.yaml index bfe8267..d45a8a4 100644 --- a/deployment/helm/values.yaml +++ b/deployment/helm/values.yaml @@ -3,6 +3,3 @@ streamlit: repository: ghcr.io/developmentseed/llllm tag: latest host: llllm.k8s.labs.ds.io - -secrets: - openai_api_key: