Spaces:
Sleeping
Sleeping
xicocdi
commited on
Commit
·
acff654
1
Parent(s):
a49221e
use openai_embedding
Browse files
app.py
CHANGED
@@ -11,7 +11,7 @@ from langchain.prompts import PromptTemplate
|
|
11 |
from langchain.chains import ConversationalRetrievalChain
|
12 |
from langchain_community.vectorstores import Qdrant
|
13 |
from langchain.memory import ConversationBufferMemory
|
14 |
-
from
|
15 |
|
16 |
import chainlit as cl
|
17 |
|
@@ -34,7 +34,7 @@ text_splitter = RecursiveCharacterTextSplitter(
|
|
34 |
|
35 |
docs = text_splitter.split_documents(documents)
|
36 |
|
37 |
-
embedding =
|
38 |
|
39 |
vectorstore = Qdrant.from_documents(
|
40 |
documents=docs,
|
@@ -79,6 +79,11 @@ llm = ChatOpenAI(
|
|
79 |
streaming=True,
|
80 |
)
|
81 |
|
|
|
|
|
|
|
|
|
|
|
82 |
|
83 |
@cl.on_chat_start
|
84 |
async def start_chat():
|
@@ -88,7 +93,7 @@ async def start_chat():
|
|
88 |
|
89 |
qa = ConversationalRetrievalChain.from_llm(
|
90 |
llm,
|
91 |
-
retriever=
|
92 |
memory=memory,
|
93 |
combine_docs_chain_kwargs={"prompt": PROMPT},
|
94 |
return_source_documents=True,
|
|
|
11 |
from langchain.chains import ConversationalRetrievalChain
|
12 |
from langchain_community.vectorstores import Qdrant
|
13 |
from langchain.memory import ConversationBufferMemory
|
14 |
+
from langchain.retrievers.multi_query import MultiQueryRetriever
|
15 |
|
16 |
import chainlit as cl
|
17 |
|
|
|
34 |
|
35 |
docs = text_splitter.split_documents(documents)
|
36 |
|
37 |
+
embedding = OpenAIEmbeddings(model="text-embedding-3-small")
|
38 |
|
39 |
vectorstore = Qdrant.from_documents(
|
40 |
documents=docs,
|
|
|
79 |
streaming=True,
|
80 |
)
|
81 |
|
82 |
+
retriever_llm = ChatOpenAI(model="gpt-4o-mini", temperature=0)
|
83 |
+
multiquery_retriever = MultiQueryRetriever.from_llm(
|
84 |
+
retriever=retriever, llm=retriever_llm
|
85 |
+
)
|
86 |
+
|
87 |
|
88 |
@cl.on_chat_start
|
89 |
async def start_chat():
|
|
|
93 |
|
94 |
qa = ConversationalRetrievalChain.from_llm(
|
95 |
llm,
|
96 |
+
retriever=multiquery_retriever,
|
97 |
memory=memory,
|
98 |
combine_docs_chain_kwargs={"prompt": PROMPT},
|
99 |
return_source_documents=True,
|