update runnable config
Browse files
app.py
CHANGED
@@ -4,7 +4,7 @@ from datasets import load_dataset
|
|
4 |
from langchain.embeddings import CacheBackedEmbeddings
|
5 |
from langchain.storage import LocalFileStore
|
6 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
7 |
-
from langchain_core.runnables
|
8 |
from langchain_core.runnables.passthrough import RunnablePassthrough
|
9 |
from langchain_core.output_parsers import StrOutputParser
|
10 |
from langchain_core.prompts import ChatPromptTemplate
|
@@ -67,10 +67,9 @@ async def on_chat_start():
|
|
67 |
async def main(message):
|
68 |
print("On Message Start")
|
69 |
chain = cl.user_session.get("chain")
|
70 |
-
cb = cl.AsyncLangchainCallbackHandler()
|
71 |
user_input = cl.Message(content="")
|
72 |
await user_input.send()
|
73 |
|
74 |
-
async for stream in chain.astream(user_input.content,
|
75 |
print("Stream is: " + stream)
|
76 |
await user_input.stream_token(stream)
|
|
|
4 |
from langchain.embeddings import CacheBackedEmbeddings
|
5 |
from langchain.storage import LocalFileStore
|
6 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
7 |
+
from langchain_core.runnables import RunnableConfig
|
8 |
from langchain_core.runnables.passthrough import RunnablePassthrough
|
9 |
from langchain_core.output_parsers import StrOutputParser
|
10 |
from langchain_core.prompts import ChatPromptTemplate
|
|
|
67 |
async def main(message):
|
68 |
print("On Message Start")
|
69 |
chain = cl.user_session.get("chain")
|
|
|
70 |
user_input = cl.Message(content="")
|
71 |
await user_input.send()
|
72 |
|
73 |
+
async for stream in chain.astream(user_input.content, config=RunnableConfig(callbacks=[cl.LangchainCallbackHandler()])):
|
74 |
print("Stream is: " + stream)
|
75 |
await user_input.stream_token(stream)
|