Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add healthcheck endpoints for both bots #78

Merged
merged 5 commits into from
Sep 6, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 11 additions & 9 deletions langchain-llamaindex-slackbot/src/chat_lanarky.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,21 +164,23 @@ async def get(request: Request):
allow_headers=["*"],
)

if __name__ == "__main__":
import uvicorn

uvicorn.run(app, host="0.0.0.0", port=8080)


@app.get("/health")
def health_check():
return {"status": "OK"}


def run_fastapi():
uvicorn.run(app, host="0.0.0.0", port=8080)
@app.get("/bot_health")
def bot_health_check():
"""Checks whether the bot is able to answer questions properly"""
question = {"question": "What is ZenML?"}
output = chain(question, return_only_outputs=True)
answer = output.get("answer")
if answer and "ZenML" in answer:
return {"status": "OK"}
else:
return {"status": "ERROR"}


if __name__ == "__main__":
fastapi_thread = Thread(target=run_fastapi)
fastapi_thread.start()
uvicorn.run(app, host="0.0.0.0", port=8080)
12 changes: 12 additions & 0 deletions langchain-llamaindex-slackbot/src/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,6 +147,18 @@ def health_check():
return {"status": "OK"}


@fast_api_app.get("/bot_health")
def bot_health_check():
"""Checks whether the bot is able to answer questions properly"""
question = {"question": "What is ZenML?", "chat_history": ""}
output = chatgpt_chain(question, return_only_outputs=True)
answer = output.get("answer")
if answer and "ZenML" in answer:
return {"status": "OK"}
else:
return {"status": "ERROR"}


def run_fastapi():
uvicorn.run(fast_api_app, host="0.0.0.0", port=8080)

Expand Down
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
langchain>=0.0.125,<=0.0.211
langchain>=0.0.125,<=0.0.263
openai>=0.27.2,<=0.27.8
slack-bolt==1.16.2
slack-sdk==3.20.0
zenml==0.41.0
zenml==0.44.1
fastapi
flask
uvicorn
gcsfs==2023.3.0
faiss-cpu>=1.7.3,<=1.7.4
unstructured>=0.5.7,<=0.7.8
lanarky==0.7.12
tiktoken
tiktoken
Loading