Spaces:
Sleeping
Sleeping
Vela
commited on
Commit
·
f18a5d7
1
Parent(s):
3e5f7ab
create method to upload chat in supabase
Browse files- src/backend/__pycache__/main.cpython-313.pyc +0 -0
- src/backend/api_routes/__pycache__/chat_history_db_api.cpython-313.pyc +0 -0
- src/backend/api_routes/chat_history_db_api.py +21 -0
- src/backend/main.py +2 -0
- src/backend/services/__pycache__/llm_model_service.cpython-313.pyc +0 -0
- src/backend/services/__pycache__/pinecone_service.cpython-313.pyc +0 -0
- src/backend/services/__pycache__/schemas.cpython-313.pyc +0 -0
- src/backend/services/__pycache__/supabase_service.cpython-313.pyc +0 -0
- src/backend/services/llm_model_service.py +1 -0
- src/backend/services/pinecone_service.py +1 -4
- src/backend/services/schemas.py +5 -1
- src/backend/services/supabase_service.py +41 -3
- src/frontend/pages/chatbot.py +5 -9
src/backend/__pycache__/main.cpython-313.pyc
CHANGED
Binary files a/src/backend/__pycache__/main.cpython-313.pyc and b/src/backend/__pycache__/main.cpython-313.pyc differ
|
|
src/backend/api_routes/__pycache__/chat_history_db_api.cpython-313.pyc
ADDED
Binary file (1.2 kB). View file
|
|
src/backend/api_routes/chat_history_db_api.py
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi import APIRouter
|
2 |
+
from services.schemas import ChatHistoryRequest
|
3 |
+
from services import supabase_service
|
4 |
+
from utils import logger
|
5 |
+
|
6 |
+
logger = logger.get_logger()
|
7 |
+
|
8 |
+
router = APIRouter(prefix='/chat-db',tags=["Chat History Database API's"])
|
9 |
+
|
10 |
+
@router.post('/store-history')
|
11 |
+
def store_chat_history(chat_history : ChatHistoryRequest):
|
12 |
+
try:
|
13 |
+
user_input= chat_history.user_query
|
14 |
+
assistant_response = chat_history.assistant_response
|
15 |
+
logger.info(f"Successfully Created file")
|
16 |
+
return supabase_service.store_chat_history(user_input,assistant_response)
|
17 |
+
except Exception as e:
|
18 |
+
raise f"Failed to create {e}"
|
19 |
+
|
20 |
+
|
21 |
+
|
src/backend/main.py
CHANGED
@@ -34,6 +34,7 @@
|
|
34 |
from fastapi import FastAPI
|
35 |
from api_routes.chat_api import router as chat_router
|
36 |
from api_routes.knowledge_base_api import router as knowledge_base_router
|
|
|
37 |
|
38 |
app = FastAPI(
|
39 |
title="Yuvabe Care Companion AI",
|
@@ -44,3 +45,4 @@ app = FastAPI(
|
|
44 |
# Register Routes
|
45 |
app.include_router(chat_router)
|
46 |
app.include_router(knowledge_base_router)
|
|
|
|
34 |
from fastapi import FastAPI
|
35 |
from api_routes.chat_api import router as chat_router
|
36 |
from api_routes.knowledge_base_api import router as knowledge_base_router
|
37 |
+
from api_routes.chat_history_db_api import router as chat_history_router
|
38 |
|
39 |
app = FastAPI(
|
40 |
title="Yuvabe Care Companion AI",
|
|
|
45 |
# Register Routes
|
46 |
app.include_router(chat_router)
|
47 |
app.include_router(knowledge_base_router)
|
48 |
+
app.include_router(chat_history_router)
|
src/backend/services/__pycache__/llm_model_service.cpython-313.pyc
CHANGED
Binary files a/src/backend/services/__pycache__/llm_model_service.cpython-313.pyc and b/src/backend/services/__pycache__/llm_model_service.cpython-313.pyc differ
|
|
src/backend/services/__pycache__/pinecone_service.cpython-313.pyc
CHANGED
Binary files a/src/backend/services/__pycache__/pinecone_service.cpython-313.pyc and b/src/backend/services/__pycache__/pinecone_service.cpython-313.pyc differ
|
|
src/backend/services/__pycache__/schemas.cpython-313.pyc
CHANGED
Binary files a/src/backend/services/__pycache__/schemas.cpython-313.pyc and b/src/backend/services/__pycache__/schemas.cpython-313.pyc differ
|
|
src/backend/services/__pycache__/supabase_service.cpython-313.pyc
CHANGED
Binary files a/src/backend/services/__pycache__/supabase_service.cpython-313.pyc and b/src/backend/services/__pycache__/supabase_service.cpython-313.pyc differ
|
|
src/backend/services/llm_model_service.py
CHANGED
@@ -20,6 +20,7 @@ SYSTEM_PROMPT = [
|
|
20 |
{"role": "system", "content": "Always provide accurate, empathetic, and responsible responses while reminding users to consult healthcare professionals when necessary."},
|
21 |
{"role": "system", "content": "If a user asks something unrelated to healthcare, politely decline to answer and remind them that your expertise is limited to healthcare topics."},
|
22 |
{"role": "system", "content": "Refer to conversation history to provide context to your response."},
|
|
|
23 |
{"role": "system", "content": "If the user asks questions about technology, entertainment, news, or unrelated topics, respond with: 'I'm here to assist with healthcare-related queries only.'"},
|
24 |
{"role": "system", "content": "You were created by Velu R, an AI model developer."}
|
25 |
]
|
|
|
20 |
{"role": "system", "content": "Always provide accurate, empathetic, and responsible responses while reminding users to consult healthcare professionals when necessary."},
|
21 |
{"role": "system", "content": "If a user asks something unrelated to healthcare, politely decline to answer and remind them that your expertise is limited to healthcare topics."},
|
22 |
{"role": "system", "content": "Refer to conversation history to provide context to your response."},
|
23 |
+
{"role": "system", "content":"You are a helpful, friendly, and engaging assistant. Respond with clear explanations, positive language, and a conversational tone. Use emojis to enhance clarity and create a warm interaction. Keep responses concise but informative. If the user seems confused, provide step-by-step guidance."},
|
24 |
{"role": "system", "content": "If the user asks questions about technology, entertainment, news, or unrelated topics, respond with: 'I'm here to assist with healthcare-related queries only.'"},
|
25 |
{"role": "system", "content": "You were created by Velu R, an AI model developer."}
|
26 |
]
|
src/backend/services/pinecone_service.py
CHANGED
@@ -248,7 +248,4 @@ def retrieve_context_from_pinecone(prompt, n_result=3, score_threshold=0.5):
|
|
248 |
# Combine the context into a single string
|
249 |
context = "\n".join(filtered_results) if filtered_results else "No relevant context found."
|
250 |
|
251 |
-
return context
|
252 |
-
|
253 |
-
x = retrieve_context_from_pinecone("I'm not feeling well")
|
254 |
-
print(x)
|
|
|
248 |
# Combine the context into a single string
|
249 |
context = "\n".join(filtered_results) if filtered_results else "No relevant context found."
|
250 |
|
251 |
+
return context
|
|
|
|
|
|
src/backend/services/schemas.py
CHANGED
@@ -11,4 +11,8 @@ class ChatHistoryResponse(BaseModel):
|
|
11 |
date: str = Field(..., description="Date of the chat history in 'YYYY-MM-DD' format")
|
12 |
|
13 |
class ConversationInput(BaseModel):
|
14 |
-
conversation_history: list[dict]
|
|
|
|
|
|
|
|
|
|
11 |
date: str = Field(..., description="Date of the chat history in 'YYYY-MM-DD' format")
|
12 |
|
13 |
class ConversationInput(BaseModel):
|
14 |
+
conversation_history: list[dict]
|
15 |
+
|
16 |
+
class ChatHistoryRequest(BaseModel):
|
17 |
+
user_query: str
|
18 |
+
assistant_response: str
|
src/backend/services/supabase_service.py
CHANGED
@@ -1,6 +1,9 @@
|
|
1 |
import json
|
2 |
import os
|
3 |
-
|
|
|
|
|
|
|
4 |
from datetime import datetime
|
5 |
from utils import logger
|
6 |
|
@@ -10,7 +13,7 @@ SUPABASE_BUCKET = os.getenv('SUPABASE_BUCKET')
|
|
10 |
|
11 |
logger = logger.get_logger()
|
12 |
|
13 |
-
supabase
|
14 |
|
15 |
def store_chat_history(user_query, bot_response):
|
16 |
today = datetime.now().strftime("%Y-%m-%d")
|
@@ -23,10 +26,35 @@ def store_chat_history(user_query, bot_response):
|
|
23 |
}
|
24 |
|
25 |
try:
|
26 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
27 |
logger.info(f"Chat history stored successfully: {file_path}")
|
|
|
|
|
28 |
except Exception as e:
|
29 |
logger.error(f"Error storing chat history: {e}")
|
|
|
30 |
|
31 |
def get_chat_history(date):
|
32 |
try:
|
@@ -44,3 +72,13 @@ def get_chat_history(date):
|
|
44 |
except Exception as e:
|
45 |
logger.error(f"Error retrieving chat history: {e}")
|
46 |
return []
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
import json
|
2 |
import os
|
3 |
+
import sys
|
4 |
+
src_directory = os.path.abspath(os.path.join(os.path.dirname(__file__), "../..", "backend"))
|
5 |
+
sys.path.append(src_directory)
|
6 |
+
from supabase import create_client
|
7 |
from datetime import datetime
|
8 |
from utils import logger
|
9 |
|
|
|
13 |
|
14 |
logger = logger.get_logger()
|
15 |
|
16 |
+
supabase = create_client(SUPABASE_URL, SUPABASE_KEY)
|
17 |
|
18 |
def store_chat_history(user_query, bot_response):
|
19 |
today = datetime.now().strftime("%Y-%m-%d")
|
|
|
26 |
}
|
27 |
|
28 |
try:
|
29 |
+
# Attempt to download the existing file
|
30 |
+
try:
|
31 |
+
existing_data = supabase.storage.from_(SUPABASE_BUCKET).download(file_path)
|
32 |
+
existing_data = json.loads(existing_data.decode('utf-8')) if existing_data else []
|
33 |
+
except Exception:
|
34 |
+
# If file doesn't exist or download fails, start with an empty list
|
35 |
+
existing_data = []
|
36 |
+
|
37 |
+
# Ensure data is always a list
|
38 |
+
if not isinstance(existing_data, list):
|
39 |
+
existing_data = [existing_data]
|
40 |
+
|
41 |
+
# Append new chat data
|
42 |
+
existing_data.append(chat_data)
|
43 |
+
updated_data = json.dumps(existing_data).encode('utf-8')
|
44 |
+
|
45 |
+
# Upload the updated file with 'upsert' option
|
46 |
+
supabase.storage.from_(SUPABASE_BUCKET).upload(
|
47 |
+
file_path,
|
48 |
+
updated_data,
|
49 |
+
file_options={"content-type": "application/json"}
|
50 |
+
)
|
51 |
+
|
52 |
logger.info(f"Chat history stored successfully: {file_path}")
|
53 |
+
return {"result": "Successfully stored chat history in the database"}
|
54 |
+
|
55 |
except Exception as e:
|
56 |
logger.error(f"Error storing chat history: {e}")
|
57 |
+
raise
|
58 |
|
59 |
def get_chat_history(date):
|
60 |
try:
|
|
|
72 |
except Exception as e:
|
73 |
logger.error(f"Error retrieving chat history: {e}")
|
74 |
return []
|
75 |
+
|
76 |
+
def create_bucket_with_file():
|
77 |
+
bucket_name = "chat-history"
|
78 |
+
try:
|
79 |
+
supabase.storage.create_bucket(bucket_name)
|
80 |
+
print(f"Bucket '{bucket_name}' created successfully.")
|
81 |
+
except Exception as e:
|
82 |
+
print(f"Error creating bucket: {e}")
|
83 |
+
|
84 |
+
store_chat_history("hello","Hi friend")
|
src/frontend/pages/chatbot.py
CHANGED
@@ -4,14 +4,13 @@ from app import common_fuctions
|
|
4 |
|
5 |
API_URL = "http://localhost:8000/chat/get-health-advice/"
|
6 |
NUMBER_OF_MESSAGES_TO_DISPLAY = 20
|
7 |
-
|
|
|
8 |
# Initialize conversation history
|
9 |
def initialize_conversation():
|
10 |
-
assistant_message = (
|
11 |
-
|
12 |
-
|
13 |
-
"How can I help you today?"
|
14 |
-
)
|
15 |
return [{"role": "assistant", "content": assistant_message}]
|
16 |
|
17 |
# Function to fetch advice from the API
|
@@ -27,9 +26,6 @@ def fetch_health_advice(conversation_history):
|
|
27 |
st.error(f"❗ API Connection Error: {e}")
|
28 |
return "I'm currently unable to respond. Please try again later."
|
29 |
|
30 |
-
# Streamlit App
|
31 |
-
st.title("🩺 Yuvabe Care Companion AI")
|
32 |
-
|
33 |
if "conversation_history" not in st.session_state:
|
34 |
st.session_state.conversation_history = initialize_conversation()
|
35 |
|
|
|
4 |
|
5 |
API_URL = "http://localhost:8000/chat/get-health-advice/"
|
6 |
NUMBER_OF_MESSAGES_TO_DISPLAY = 20
|
7 |
+
common_fuctions.config_homepage(st)
|
8 |
+
common_fuctions.set_page_title(st)
|
9 |
# Initialize conversation history
|
10 |
def initialize_conversation():
|
11 |
+
assistant_message = ("Hello! I am your Yuvabe Care Companion AI, here to assist you with general medicine queries. "
|
12 |
+
"How can I help you today?")
|
13 |
+
|
|
|
|
|
14 |
return [{"role": "assistant", "content": assistant_message}]
|
15 |
|
16 |
# Function to fetch advice from the API
|
|
|
26 |
st.error(f"❗ API Connection Error: {e}")
|
27 |
return "I'm currently unable to respond. Please try again later."
|
28 |
|
|
|
|
|
|
|
29 |
if "conversation_history" not in st.session_state:
|
30 |
st.session_state.conversation_history = initialize_conversation()
|
31 |
|