Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -473,7 +473,7 @@ def retrieval_blocks(
|
|
473 |
persist_directory = current_dir + "/" + vectorstore_name,
|
474 |
embedding_function=embeddings
|
475 |
)
|
476 |
-
|
477 |
|
478 |
# 6. base retriever: Vector store-backed retriever
|
479 |
base_retriever = Vectorstore_backed_retriever(
|
@@ -660,14 +660,14 @@ def answer_template(language="english"):
|
|
660 |
answer_prompt = ChatPromptTemplate.from_template(answer_template())
|
661 |
|
662 |
|
663 |
-
|
664 |
# invoke the ChatPromptTemplate
|
665 |
answer_prompt.invoke(
|
666 |
{"question":"plaese give more details about DTC, including its use cases and implementation.",
|
667 |
"context":[Document(page_content="DTC use cases include...")], # the context is a list of retrieved documents.
|
668 |
"chat_history":memory.chat_memory}
|
669 |
)
|
670 |
-
|
671 |
|
672 |
|
673 |
|
@@ -778,7 +778,7 @@ def create_ConversationalRetrievalChain(
|
|
778 |
rephrase the follow up question to be a standalone question, in its original language.\n\n
|
779 |
Chat History:\n{chat_history}\n
|
780 |
Follow Up Input: {question}\n
|
781 |
-
Standalone question:""")
|
782 |
|
783 |
# 2. Define the answer_prompt
|
784 |
# Pass the standalone question + the chat history + the context (retrieved documents) to the `LLM` wihch will answer
|
|
|
473 |
persist_directory = current_dir + "/" + vectorstore_name,
|
474 |
embedding_function=embeddings
|
475 |
)
|
476 |
+
|
477 |
|
478 |
# 6. base retriever: Vector store-backed retriever
|
479 |
base_retriever = Vectorstore_backed_retriever(
|
|
|
660 |
answer_prompt = ChatPromptTemplate.from_template(answer_template())
|
661 |
|
662 |
|
663 |
+
"""
|
664 |
# invoke the ChatPromptTemplate
|
665 |
answer_prompt.invoke(
|
666 |
{"question":"plaese give more details about DTC, including its use cases and implementation.",
|
667 |
"context":[Document(page_content="DTC use cases include...")], # the context is a list of retrieved documents.
|
668 |
"chat_history":memory.chat_memory}
|
669 |
)
|
670 |
+
"""
|
671 |
|
672 |
|
673 |
|
|
|
778 |
rephrase the follow up question to be a standalone question, in its original language.\n\n
|
779 |
Chat History:\n{chat_history}\n
|
780 |
Follow Up Input: {question}\n
|
781 |
+
Standalone question: {question}""")
|
782 |
|
783 |
# 2. Define the answer_prompt
|
784 |
# Pass the standalone question + the chat history + the context (retrieved documents) to the `LLM` wihch will answer
|