Skip to content

Commit

Permalink
Merge pull request #585 from openchatai/feat/follow_up
Browse files Browse the repository at this point in the history
move the followup question to another thread
  • Loading branch information
codebanesr authored Jan 27, 2024
2 parents 0410c3c + 75088e4 commit 4f67ca5
Show file tree
Hide file tree
Showing 3 changed files with 12 additions and 8 deletions.
5 changes: 2 additions & 3 deletions llm-server/routes/chat/chat_controller.py
Original file line number Diff line number Diff line change
Expand Up @@ -232,9 +232,7 @@ async def handle_chat_send_common(
if result.error:
logger.error("chat_conversation_error", message=result.error)

emit(session_id, "|im_end|") if is_streaming else jsonify(
{"type": "text", "response": {"text": result.message}}
)
return jsonify({"type": "text", "response": {"text": result.message}})
except Exception as e:
logger.error(
"An exception occurred",
Expand Down Expand Up @@ -273,6 +271,7 @@ def m_called_actions_by_bot(bot_id: str):
response = most_called_actions_by_bot(bot_id)
return jsonify(response)


@chat_workflow.route("/transcribe", methods=["POST"])
async def transcribe_audio():
# Check if the post request has the file part
Expand Down
11 changes: 6 additions & 5 deletions llm-server/routes/chat/implementation/chain_strategy.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
)
from utils.llm_consts import VectorCollections
from models.repository.action_call_repo import add_action_call
from utils.llm_consts import enable_followup_questions


class ChainStrategy(ChatRequestHandler):
Expand Down Expand Up @@ -114,15 +115,15 @@ async def handle_request(
session_id=session_id,
)

emit(session_id, "|im_end|") if is_streaming else None

# we only support follow_up question this in streaming mode
if is_streaming:

if enable_followup_questions:
followup_question_list = await generate_follow_up_questions(
conversations_history, response.message or "", current_input=text
conversations_history, response.message or "", text
)

print(followup_question_list.json())
if is_streaming:
emit(f"{session_id}_follow_qns", followup_question_list.json())

response.knowledgebase_called = True
return response
4 changes: 4 additions & 0 deletions llm-server/utils/llm_consts.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,3 +78,7 @@ def get_mysql_uri():

chat_strategy = os.getenv("CHAT_STRATEGY", ChatStrategy.chain)
max_pages_to_crawl = int(os.getenv("MAX_PAGES_TO_CRAWL", "15"))

enable_followup_questions = (
True if os.getenv("ENABLE_FOLLOWUP_QUESTIONS", "YES") == "YES" else False
)

0 comments on commit 4f67ca5

Please sign in to comment.