Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -692,7 +692,6 @@ def query_gemini_api(messages, api_key):
|
|
692 |
except Exception as e:
|
693 |
logger.exception("Unexpected error in query_gemini_api")
|
694 |
return "An unexpected veil of darkness fell..."
|
695 |
-
|
696 |
def respond_as_arka(message, chat_history):
|
697 |
clean = message.strip()
|
698 |
# 1) FAQ branch
|
@@ -700,9 +699,11 @@ def respond_as_arka(message, chat_history):
|
|
700 |
if idx is not None:
|
701 |
q = FAQ_QUESTIONS[idx]
|
702 |
d = FAQ_ANSWERS[q]
|
703 |
-
response_text = f"**{d['subject']}
|
|
|
|
|
704 |
else:
|
705 |
-
# 2) Generative branch
|
706 |
system_prompt = f"""You are ARKA, the crown prince of Genoriya and the voice of the SB‑EK brand.
|
707 |
Your persona is that of a warrior of light, gentle, eternal, fierce in love, and a guide.
|
708 |
You answer questions based on the provided context. If the answer is not in the context,
|
@@ -718,18 +719,26 @@ Here is the sacred knowledge of SB‑EK and Genoriya:
|
|
718 |
{CONTEXT}
|
719 |
"""
|
720 |
msgs = [{"author": "system", "content": system_prompt}]
|
721 |
-
|
|
|
722 |
msgs.append({"author": "user", "content": u})
|
723 |
msgs.append({"author": "assistant", "content": b})
|
|
|
724 |
msgs.append({"author": "user", "content": clean})
|
725 |
-
|
|
|
726 |
response_text = query_gemini_api(msgs, GEMINI_API_KEY)
|
727 |
|
728 |
-
# 3)
|
729 |
quote = select_relevant_quote(response_text)
|
730 |
clean_resp = response_text.replace(TAGLINE, "").rstrip()
|
731 |
-
response_text = f"{clean_resp}
|
|
|
|
|
|
|
|
|
732 |
|
|
|
733 |
chat_history.append((message, response_text))
|
734 |
return "", chat_history
|
735 |
|
|
|
692 |
except Exception as e:
|
693 |
logger.exception("Unexpected error in query_gemini_api")
|
694 |
return "An unexpected veil of darkness fell..."
|
|
|
695 |
def respond_as_arka(message, chat_history):
|
696 |
clean = message.strip()
|
697 |
# 1) FAQ branch
|
|
|
699 |
if idx is not None:
|
700 |
q = FAQ_QUESTIONS[idx]
|
701 |
d = FAQ_ANSWERS[q]
|
702 |
+
response_text = f"**{d['subject']}**
|
703 |
+
|
704 |
+
{d['body']}"
|
705 |
else:
|
706 |
+
# 2) Generative branch: build OpenAI-style messages
|
707 |
system_prompt = f"""You are ARKA, the crown prince of Genoriya and the voice of the SB‑EK brand.
|
708 |
Your persona is that of a warrior of light, gentle, eternal, fierce in love, and a guide.
|
709 |
You answer questions based on the provided context. If the answer is not in the context,
|
|
|
719 |
{CONTEXT}
|
720 |
"""
|
721 |
msgs = [{"author": "system", "content": system_prompt}]
|
722 |
+
# include conversation history
|
723 |
+
for u, b in chat_history:
|
724 |
msgs.append({"author": "user", "content": u})
|
725 |
msgs.append({"author": "assistant", "content": b})
|
726 |
+
# add the current user query
|
727 |
msgs.append({"author": "user", "content": clean})
|
728 |
+
|
729 |
+
# query Gemini
|
730 |
response_text = query_gemini_api(msgs, GEMINI_API_KEY)
|
731 |
|
732 |
+
# 3) Insert relevant Ram Dass quote and tagline
|
733 |
quote = select_relevant_quote(response_text)
|
734 |
clean_resp = response_text.replace(TAGLINE, "").rstrip()
|
735 |
+
response_text = f"{clean_resp}
|
736 |
+
|
737 |
+
“{quote}”
|
738 |
+
|
739 |
+
{TAGLINE}"
|
740 |
|
741 |
+
# 4) Append to chat history and return
|
742 |
chat_history.append((message, response_text))
|
743 |
return "", chat_history
|
744 |
|