Spaces:
Runtime error
Runtime error
André Oriani
commited on
Commit
·
cacc27c
1
Parent(s):
43b89fd
Adding Time to logs
Browse files
app.py
CHANGED
@@ -1,3 +1,4 @@
|
|
|
|
1 |
import chainlit as cl
|
2 |
import os
|
3 |
from datasets import load_dataset
|
@@ -14,7 +15,7 @@ from langchain_core.prompts import ChatPromptTemplate
|
|
14 |
from langchain_openai import ChatOpenAI
|
15 |
import asyncio
|
16 |
|
17 |
-
|
18 |
=================================================================================
|
19 |
STARTING
|
20 |
=================================================================================
|
@@ -40,17 +41,17 @@ cached_embedder = CacheBackedEmbeddings.from_bytes_store(embedding_model, store,
|
|
40 |
index_path = "faiss_index"
|
41 |
if os.path.exists(index_path):
|
42 |
vector_store = FAISS.load_local(index_path, cached_embedder, allow_dangerous_deserialization=True)
|
43 |
-
|
44 |
else:
|
45 |
vector_store = FAISS.from_documents(chunked_documents, cached_embedder)
|
46 |
-
|
47 |
vector_store.save_local(index_path)
|
48 |
-
|
49 |
|
50 |
|
51 |
@cl.on_chat_start
|
52 |
async def on_chat_start():
|
53 |
-
|
54 |
=================================================================================
|
55 |
ON START CHAT
|
56 |
=================================================================================
|
@@ -72,7 +73,7 @@ async def on_chat_start():
|
|
72 |
|
73 |
@cl.on_message
|
74 |
async def on_message(message: cl.Message):
|
75 |
-
|
76 |
=================================================================================
|
77 |
ON MESSAGE: {message.content}
|
78 |
=================================================================================
|
@@ -85,9 +86,9 @@ async def on_message(message: cl.Message):
|
|
85 |
message.content,
|
86 |
config=RunnableConfig(callbacks=[cl.LangchainCallbackHandler()]),
|
87 |
):
|
88 |
-
|
89 |
await msg.stream_token(chunk)
|
90 |
|
91 |
-
|
92 |
await msg.send()
|
93 |
-
|
|
|
1 |
+
import logging
|
2 |
import chainlit as cl
|
3 |
import os
|
4 |
from datasets import load_dataset
|
|
|
15 |
from langchain_openai import ChatOpenAI
|
16 |
import asyncio
|
17 |
|
18 |
+
logging.info("""
|
19 |
=================================================================================
|
20 |
STARTING
|
21 |
=================================================================================
|
|
|
41 |
index_path = "faiss_index"
|
42 |
if os.path.exists(index_path):
|
43 |
vector_store = FAISS.load_local(index_path, cached_embedder, allow_dangerous_deserialization=True)
|
44 |
+
logging.info("Vector store loaded from saved index.")
|
45 |
else:
|
46 |
vector_store = FAISS.from_documents(chunked_documents, cached_embedder)
|
47 |
+
logging.info("Vector store created from documents.")
|
48 |
vector_store.save_local(index_path)
|
49 |
+
logging.info("Vector store saved locally.")
|
50 |
|
51 |
|
52 |
@cl.on_chat_start
|
53 |
async def on_chat_start():
|
54 |
+
logging.info("""
|
55 |
=================================================================================
|
56 |
ON START CHAT
|
57 |
=================================================================================
|
|
|
73 |
|
74 |
@cl.on_message
|
75 |
async def on_message(message: cl.Message):
|
76 |
+
logging.info(f"""
|
77 |
=================================================================================
|
78 |
ON MESSAGE: {message.content}
|
79 |
=================================================================================
|
|
|
86 |
message.content,
|
87 |
config=RunnableConfig(callbacks=[cl.LangchainCallbackHandler()]),
|
88 |
):
|
89 |
+
logging.info(f"Received chunk <{chunk}>")
|
90 |
await msg.stream_token(chunk)
|
91 |
|
92 |
+
logging.info(f"Sending message")
|
93 |
await msg.send()
|
94 |
+
logging.info(f"Done with <{message.content}>")
|