Add PostMessageHandler try to fix deployment
Browse files
app.py
CHANGED
@@ -3,6 +3,7 @@ import pandas as pd
|
|
3 |
import os
|
4 |
from langchain.document_loaders import CSVLoader
|
5 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
|
|
6 |
from langchain_core.output_parsers import StrOutputParser
|
7 |
from langchain_core.prompts import ChatPromptTemplate
|
8 |
from langchain_core.runnables import RunnablePassthrough, RunnableConfig
|
@@ -97,3 +98,43 @@ async def on_message(message: cl.Message):
|
|
97 |
await msg.stream_token(chunk)
|
98 |
|
99 |
await msg.send()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
3 |
import os
|
4 |
from langchain.document_loaders import CSVLoader
|
5 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
6 |
+
from langchain_core.callbacks import BaseCallbackHandler
|
7 |
from langchain_core.output_parsers import StrOutputParser
|
8 |
from langchain_core.prompts import ChatPromptTemplate
|
9 |
from langchain_core.runnables import RunnablePassthrough, RunnableConfig
|
|
|
98 |
await msg.stream_token(chunk)
|
99 |
|
100 |
await msg.send()
|
101 |
+
|
102 |
+
@cl.on_message
|
103 |
+
async def on_message(message: cl.Message):
|
104 |
+
runnable = cl.user_session.get("runnable_chain")
|
105 |
+
msg = cl.Message(content="")
|
106 |
+
|
107 |
+
class PostMessageHandler(BaseCallbackHandler):
|
108 |
+
"""
|
109 |
+
Callback handler for handling the retriever and LLM processes.
|
110 |
+
Used to post the sources of the retrieved documents as a Chainlit element.
|
111 |
+
"""
|
112 |
+
|
113 |
+
def __init__(self, msg: cl.Message):
|
114 |
+
BaseCallbackHandler.__init__(self)
|
115 |
+
self.msg = msg
|
116 |
+
self.sources = set() # To store unique pairs
|
117 |
+
|
118 |
+
def on_retriever_end(self, documents, *, run_id, parent_run_id, **kwargs):
|
119 |
+
for d in documents:
|
120 |
+
source_page_pair = (d.metadata['source'], d.metadata['page'])
|
121 |
+
self.sources.add(source_page_pair) # Add unique pairs to the set
|
122 |
+
|
123 |
+
def on_llm_end(self, response, *, run_id, parent_run_id, **kwargs):
|
124 |
+
if len(self.sources):
|
125 |
+
sources_text = "\n".join([f"{source}#page={page}" for source, page in self.sources])
|
126 |
+
self.msg.elements.append(
|
127 |
+
cl.Text(name="Sources", content=sources_text, display="inline")
|
128 |
+
)
|
129 |
+
|
130 |
+
async with cl.Step(type="run", name="Movie Assistant"):
|
131 |
+
async for chunk in runnable.astream(
|
132 |
+
message.content,
|
133 |
+
config=RunnableConfig(callbacks=[
|
134 |
+
cl.LangchainCallbackHandler(),
|
135 |
+
PostMessageHandler(msg)
|
136 |
+
]),
|
137 |
+
):
|
138 |
+
await msg.stream_token(chunk)
|
139 |
+
|
140 |
+
await msg.send()
|