Spaces:
Sleeping
Sleeping
Commit
·
e50bd38
1
Parent(s):
6532d26
add compare trigger
Browse files- app.py +13 -1
- chatbot/__pycache__/core.cpython-310.pyc +0 -0
- chatbot/__pycache__/llm.cpython-310.pyc +0 -0
- chatbot/__pycache__/memory.cpython-310.pyc +0 -0
- chatbot/__pycache__/metadata_selfquery.cpython-310.pyc +0 -0
- chatbot/__pycache__/prompts.cpython-310.pyc +0 -0
- chatbot/__pycache__/retrieval.cpython-310.pyc +0 -0
- chatbot/core.py +6 -1
app.py
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
import streamlit as st
|
2 |
-
from chatbot.core import get_chat_response
|
3 |
|
4 |
# Cấu hình giao diện Streamlit
|
5 |
st.set_page_config(page_title="RangDong Chatbot", layout="wide")
|
@@ -29,8 +29,10 @@ for message in st.session_state.messages:
|
|
29 |
st.markdown(message["content"])
|
30 |
|
31 |
# Ô nhập liệu chat
|
|
|
32 |
user_input = st.chat_input("Nhập tin nhắn của bạn...")
|
33 |
|
|
|
34 |
if user_input:
|
35 |
# Hiển thị tin nhắn của user
|
36 |
st.session_state.messages.append({"role": "user", "content": user_input})
|
@@ -44,3 +46,13 @@ if user_input:
|
|
44 |
st.session_state.messages.append({"role": "assistant", "content": response})
|
45 |
with st.chat_message("assistant"):
|
46 |
st.markdown(response)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
import streamlit as st
|
2 |
+
from chatbot.core import get_chat_response, memory, cmp
|
3 |
|
4 |
# Cấu hình giao diện Streamlit
|
5 |
st.set_page_config(page_title="RangDong Chatbot", layout="wide")
|
|
|
29 |
st.markdown(message["content"])
|
30 |
|
31 |
# Ô nhập liệu chat
|
32 |
+
|
33 |
user_input = st.chat_input("Nhập tin nhắn của bạn...")
|
34 |
|
35 |
+
|
36 |
if user_input:
|
37 |
# Hiển thị tin nhắn của user
|
38 |
st.session_state.messages.append({"role": "user", "content": user_input})
|
|
|
46 |
st.session_state.messages.append({"role": "assistant", "content": response})
|
47 |
with st.chat_message("assistant"):
|
48 |
st.markdown(response)
|
49 |
+
|
50 |
+
if memory.cache:
|
51 |
+
cmp_button = st.button("So sánh các sản phẩm tương đồng")
|
52 |
+
|
53 |
+
if cmp_button:
|
54 |
+
response = cmp()
|
55 |
+
|
56 |
+
st.session_state.messages.append({"role": "assistant", "content": response})
|
57 |
+
with st.chat_message("assistant"):
|
58 |
+
st.markdown(response)
|
chatbot/__pycache__/core.cpython-310.pyc
CHANGED
Binary files a/chatbot/__pycache__/core.cpython-310.pyc and b/chatbot/__pycache__/core.cpython-310.pyc differ
|
|
chatbot/__pycache__/llm.cpython-310.pyc
CHANGED
Binary files a/chatbot/__pycache__/llm.cpython-310.pyc and b/chatbot/__pycache__/llm.cpython-310.pyc differ
|
|
chatbot/__pycache__/memory.cpython-310.pyc
CHANGED
Binary files a/chatbot/__pycache__/memory.cpython-310.pyc and b/chatbot/__pycache__/memory.cpython-310.pyc differ
|
|
chatbot/__pycache__/metadata_selfquery.cpython-310.pyc
CHANGED
Binary files a/chatbot/__pycache__/metadata_selfquery.cpython-310.pyc and b/chatbot/__pycache__/metadata_selfquery.cpython-310.pyc differ
|
|
chatbot/__pycache__/prompts.cpython-310.pyc
CHANGED
Binary files a/chatbot/__pycache__/prompts.cpython-310.pyc and b/chatbot/__pycache__/prompts.cpython-310.pyc differ
|
|
chatbot/__pycache__/retrieval.cpython-310.pyc
CHANGED
Binary files a/chatbot/__pycache__/retrieval.cpython-310.pyc and b/chatbot/__pycache__/retrieval.cpython-310.pyc differ
|
|
chatbot/core.py
CHANGED
@@ -49,7 +49,6 @@ qa_chain2 = MyCustomMemoryRetrievalChain(
|
|
49 |
# output_key="result"
|
50 |
# )
|
51 |
|
52 |
-
|
53 |
def get_chat_response(user_input: str) -> str:
|
54 |
restriction = classify_query(user_input.strip())
|
55 |
|
@@ -68,3 +67,9 @@ def get_chat_response(user_input: str) -> str:
|
|
68 |
memory.add(user_input, response["result"])
|
69 |
print(memory.restrict)
|
70 |
return response["result"]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
49 |
# output_key="result"
|
50 |
# )
|
51 |
|
|
|
52 |
def get_chat_response(user_input: str) -> str:
|
53 |
restriction = classify_query(user_input.strip())
|
54 |
|
|
|
67 |
memory.add(user_input, response["result"])
|
68 |
print(memory.restrict)
|
69 |
return response["result"]
|
70 |
+
|
71 |
+
|
72 |
+
def cmp():
|
73 |
+
prompt = "So sánh các sản phẩm trên"
|
74 |
+
response = qa_chain2({"question": prompt + memory.restrict , "memory": memory.get_memory_text()})
|
75 |
+
return response["result"]
|