import chainlit as cl from legal_agent.components.full_workflow import run_user_query async def user_query_func(user_question): response = run_user_query(user_question) # Replace this with your actual logic for processing the user query # It could involve interacting with an LLM, searching web documents, etc. # For illustration purposes, let's just return a simple response return response @cl.on_chat_start def start(): # user_query print("Chat started!") @cl.on_message async def main(message: cl.Message): # user_query user_question = message.content # response = user_query(user_question) # response = await user_query_func("What happended to the birds") response = await user_query_func(user_question) print(user_question, "see") # user_query = cl.make_async(user_query) # await user_query("What happended to the birds") # print(user_question, "see22222222") # Use LangchainCallbackHandler to capture the final answer # callback_handler = LangchainCallbackHandler(stream_final_answer=True) # response = await cl.make_async(user_query)(user_question) # response = await cl.make_async(user_query)(user_question) # await message.reply(response) await cl.Message(content=response["response"]).send()