From d37e8ce60b5dade9c1ea6b0b2b5e5c4c9b7b17a2 Mon Sep 17 00:00:00 2001 From: Weves Date: Mon, 28 Aug 2023 13:21:31 -0700 Subject: [PATCH] Fix startup exception + mypy --- backend/danswer/background/connector_deletion.py | 2 +- backend/danswer/llm/azure.py | 6 ++++++ backend/danswer/llm/openai.py | 6 ++++++ 3 files changed, 13 insertions(+), 1 deletion(-) diff --git a/backend/danswer/background/connector_deletion.py b/backend/danswer/background/connector_deletion.py index f3b27c2c191..e5d0c6d2eea 100644 --- a/backend/danswer/background/connector_deletion.py +++ b/backend/danswer/background/connector_deletion.py @@ -90,7 +90,7 @@ def _update_multi_indexed_docs() -> None: def _get_user( credential: Credential, ) -> str: - if credential.public_doc: + if credential.public_doc or not credential.user: return PUBLIC_DOC_PAT return str(credential.user.id) diff --git a/backend/danswer/llm/azure.py b/backend/danswer/llm/azure.py index 49a91afacf6..cce164466eb 100644 --- a/backend/danswer/llm/azure.py +++ b/backend/danswer/llm/azure.py @@ -1,3 +1,4 @@ +import os from typing import Any from langchain.chat_models.azure_openai import AzureChatOpenAI @@ -22,6 +23,11 @@ def __init__( *args: list[Any], **kwargs: dict[str, Any] ): + # set a dummy API key if not specified so that LangChain doesn't throw an + # exception when trying to initialize the LLM which would prevent the API + # server from starting up + if not api_key: + api_key = os.environ.get("OPENAI_API_KEY") or "dummy_api_key" self._llm = AzureChatOpenAI( model=model_version, openai_api_type="azure", diff --git a/backend/danswer/llm/openai.py b/backend/danswer/llm/openai.py index 4aa9274a0bc..891e5258650 100644 --- a/backend/danswer/llm/openai.py +++ b/backend/danswer/llm/openai.py @@ -1,3 +1,4 @@ +import os from typing import Any from langchain.chat_models.openai import ChatOpenAI @@ -16,6 +17,11 @@ def __init__( *args: list[Any], **kwargs: dict[str, Any] ): + # set a dummy API key if not specified so that LangChain doesn't throw an + # exception when trying to initialize the LLM which would prevent the API + # server from starting up + if not api_key: + api_key = os.environ.get("OPENAI_API_KEY") or "dummy_api_key" self._llm = ChatOpenAI( model=model_version, openai_api_key=api_key,