Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Chat history is not getting retrieved on the chat resume #1499

Open
ashish6ai opened this issue Nov 5, 2024 · 1 comment
Open

Chat history is not getting retrieved on the chat resume #1499

ashish6ai opened this issue Nov 5, 2024 · 1 comment
Labels
bug Something isn't working data layer Pertains to data layers.

Comments

@ashish6ai
Copy link

Hi
I'm saving the chat history to the postgresdb through data layer but when I'm doing the chat resume the history is not getting loaded but the chat title is coming up in the side bar and chat is also getting stored into the threads tables.
I have provided the code snippet which I'm using, please let me know if there are any issues in the approach or any changes needs to be done.

Code Snippet -
"""
connection_string = os.getenv('DB_CONNECTION_STRING')
cl_data._data_layer = SQLAlchemyDataLayer(conninfo=connection_string)

@cl.on_chat_start
async def start():
settings = await cl.ChatSettings(
[
Select(
id="model",
label="Large Language Model",
values=[
"gemini-1.5-flash",
"gpt-4o",
"llama3.2:3b"
],
initial_index=0,
),
]
).send()
setup_llm(settings)

welcome_message = await cl.Message(author="Codegen",content=prompts.WELCOME_CONTENT).send()

# Save welcome message to database
await cl_data._data_layer.create_step({
    "id": welcome_message.id,
    "threadId": cl.user_session.get("thread_id"),
    "type": "ai",
    "name": "welcome",
    "output": prompts.WELCOME_CONTENT
})

cl.user_session.set("message_history_openai", [{"role": "system", "content": prompts.WELCOME_CONTENT}])
cl.user_session.set("message_history_gemini", [{"role": "model", "parts": prompts.WELCOME_CONTENT}])
cl.user_session.set("message_history_ollama", [{"role": "system", "content": prompts.SYSTEM_PROMPT}])

@cl.on_chat_resume
async def on_chat_resume(thread: ThreadDict):
settings = await cl.ChatSettings(
[
Select(
id="model",
label="Large Language Model",
values=[
"gemini-1.5-flash",
"gpt-4o",
"llama3.2:3b"
],
initial_index=0,
),
]).send()
setup_llm(settings)

thread_data = await cl_data._data_layer.get_thread(thread['id'])
print(thread_data)
# Reconstruct message history for each model
if thread_data:
    messages = thread_data['steps']

    message_history_openai = [{"role": "system", "content": prompts.WELCOME_CONTENT}]
    message_history_gemini = [{"role": "model", "parts": prompts.WELCOME_CONTENT}]
    message_history_ollama = [{"role": "system", "content": prompts.SYSTEM_PROMPT}]

    for step in messages:
        if step['type'] == 'human':
            message_history_openai.append({"role": "user", "content": step['output']})
            message_history_gemini.append({"role": "user", "parts": step['output']})
            message_history_ollama.append({"role": "user", "content": step['output']})
        else:
            message_history_openai.append({"role": "assistant", "content": step['output']})
            message_history_gemini.append({"role": "model", "parts": step['output']})
            message_history_ollama.append({"role": "assistant", "content": step['output']})

cl.user_session.set("message_history_openai", message_history_openai)
cl.user_session.set("message_history_gemini", message_history_gemini)
cl.user_session.set("message_history_ollama", message_history_ollama)

await cl.Message(content="Welcome back! How can I assist you?").send()

@cl.on_message
async def on_message(msg: cl.Message):
model_name = cl.user_session.get("model_name")
model = cl.user_session.get("model")

# Save user message to database
await cl_data._data_layer.create_step({
    "id": msg.id,
    "threadId": cl.user_session.get("thread_id"),
    "type": "human",
    "name": "user_message",
    "output": msg.content
})

if not msg.elements:
    if "gemini" in model_name:
        message_history_gemini = cl.user_session.get("message_history_gemini")
        message_history_gemini.append({"role": "user", "parts": msg.content})
        res = gemini_tool_chat(msg.content, model, message_history_gemini)
        message_history_gemini.append({"role": "model", "parts": res})
        cl.user_session.set("message_history_gemini", message_history_gemini)
    elif "gpt" in model_name:
        message_history_openai = cl.user_session.get("message_history_openai")
        message_history_openai.append({"role": "user", "content": msg.content})
        res = openai_tool(prompts.SYSTEM_PROMPT, str(message_history_openai), model, model_name)
        message_history_openai.append({"role": "assistant", "content": res})
        cl.user_session.set("message_history_openai", message_history_openai)
    elif "llama" in model_name:
        message_history_ollama = cl.user_session.get("message_history_ollama")
        message_history_ollama.append({"role": "user", "content": msg.content})
        res = ollama_chat_tool(OLLAMA_API_URL, model_name, message_history_ollama)
        message_history_ollama.append({"role": "assistant", "content": res})

    response_message = await cl.Message(author="Codegen", content=res).send()

    # Save assistant's response to database
    await cl_data._data_layer.create_step({
        "id": response_message.id,
        "threadId": cl.user_session.get("thread_id"),
        "type": "ai",
        "name": "assistant_response",
        "output": res
    })

"""

@dosubot dosubot bot added bug Something isn't working data layer Pertains to data layers. labels Nov 5, 2024
@daviddwlee84
Copy link

Same issue using Literal AI

I was able to recover chat history and continue chatting, but the history of messages won't show after resuming

  1. Select a history session
    image
  2. Click resume
    image
  3. Missing history messages
    image
  4. Create a new chat and the history session will refresh
    image

Followed the cookbook with little modification
cookbook/resume-chat at main · Chainlit/cookbook

With Python 3.11 and dependencies

chainlit==1.3.1
langchain==0.3.7
langchain-community==0.3.5
langchain-core==0.3.15
langchain-openai==0.2.3

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
bug Something isn't working data layer Pertains to data layers.
Projects
None yet
Development

No branches or pull requests

2 participants