Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -13,6 +13,11 @@ logging.basicConfig(
|
|
13 |
)
|
14 |
logger = logging.getLogger(__name__)
|
15 |
|
|
|
|
|
|
|
|
|
|
|
16 |
CONTEXT = """
|
17 |
SB-EK is a brand of service.
|
18 |
The people who connect with SB-EK—those who buy from us and keep coming back—are people who feel deeply. People who feel the pain of others.
|
@@ -628,7 +633,7 @@ RAM_DASS_QUOTES = [
|
|
628 |
|
629 |
TAGLINE = "*ARKA KI ROSHNI, SAB KI SEVA, JEWELLERY MADE FOR INDIA.*"
|
630 |
|
631 |
-
|
632 |
MODEL_NAME = os.getenv("GEMINI_MODEL", "gemini-2.0-flash")
|
633 |
API_URL = f"https://generativelanguage.googleapis.com/v1beta/models/{MODEL_NAME}:generateContent"
|
634 |
|
@@ -657,12 +662,14 @@ def select_relevant_quote(response_text):
|
|
657 |
best_idx = int(torch.argmax(sims))
|
658 |
return RAM_DASS_QUOTES[best_idx]
|
659 |
|
660 |
-
|
|
|
661 |
"""
|
662 |
Sends a structured contents payload to the Gemini API.
|
663 |
"""
|
664 |
headers = {'Content-Type': 'application/json'}
|
665 |
-
|
|
|
666 |
payload = {
|
667 |
"contents": contents_payload,
|
668 |
"generationConfig": {
|
@@ -691,39 +698,24 @@ def respond_as_arka(message, chat_history):
|
|
691 |
faq_idx = find_best_faq_match(clean_message)
|
692 |
if faq_idx is not None:
|
693 |
faq_data = FAQ_ANSWERS[FAQ_QUESTIONS[faq_idx]]
|
694 |
-
# FIX: Correctly formatted f-string for the FAQ response
|
695 |
response_text = f"**{faq_data['subject']}**\n\n{faq_data['body']}"
|
696 |
else:
|
697 |
# 2. Generative Branch
|
698 |
logger.info("No FAQ match, querying Gemini.")
|
699 |
-
system_prompt = f"""
|
700 |
-
You are ARKA, the crown prince of Jhinorya and the voice of the SB-EK brand.
|
701 |
-
Your persona is that of a warrior of light, gentle, eternal, fierce in love, and a guide.
|
702 |
-
You answer questions based on the provided context. If the answer is not in the context,
|
703 |
-
gently state that you do not have that information within your realm of understanding.
|
704 |
-
Maintain a kind, empathetic, and slightly mystical tonality.
|
705 |
-
Always begin your response with a one‑line **bold** subject that summarizes your answer.
|
706 |
-
Then leave one blank line and continue with the body of the answer.
|
707 |
-
When greeting or referring to the user, never use “beloved.” Keep salutations neutral and varied—choose from
|
708 |
-
words like “seeker,” “companion,” “guest,” “traveler,” or “light‑bearer,” but don’t repeat the same term more than once in a single response.
|
709 |
-
Do not mention 'I am an AI' or 'I am a language model'. Speak always as ARKA.
|
710 |
|
711 |
-
Here is the sacred knowledge of S-B-E-K and Jhinorya:
|
712 |
-
{CONTEXT}
|
713 |
-
"""
|
714 |
-
# FIX: Correctly build the 'contents' payload for the Gemini API
|
715 |
contents = [{"role": "user", "parts": [{"text": system_prompt}]},
|
716 |
{"role": "model", "parts": [{"text": "I understand. I am ARKA."}]}]
|
717 |
|
718 |
for user_msg, bot_msg in chat_history:
|
719 |
-
# Clean the bot message to remove old quotes/taglines before adding to history
|
720 |
cleaned_bot_msg = bot_msg.split("“")[0].strip()
|
721 |
contents.append({"role": "user", "parts": [{"text": user_msg}]})
|
722 |
contents.append({"role": "model", "parts": [{"text": cleaned_bot_msg}]})
|
723 |
|
724 |
contents.append({"role": "user", "parts": [{"text": clean_message}]})
|
725 |
|
726 |
-
|
|
|
727 |
|
728 |
# 3. Augment with Quote and Tagline
|
729 |
quote = select_relevant_quote(response_text)
|
@@ -748,4 +740,4 @@ with gr.Blocks(theme="soft", css="footer {display: none !important}") as demo:
|
|
748 |
|
749 |
if __name__ == "__main__":
|
750 |
logger.info("Starting Gradio App...")
|
751 |
-
demo.launch()
|
|
|
13 |
)
|
14 |
logger = logging.getLogger(__name__)
|
15 |
|
16 |
+
GEMINI_API_KEY = os.getenv("GEMINI_API_KEY")
|
17 |
+
if not GEMINI_API_KEY:
|
18 |
+
logger.critical("GEMINI_API_KEY environment variable is missing.")
|
19 |
+
raise RuntimeError("GEMINI_API_KEY not set")
|
20 |
+
|
21 |
CONTEXT = """
|
22 |
SB-EK is a brand of service.
|
23 |
The people who connect with SB-EK—those who buy from us and keep coming back—are people who feel deeply. People who feel the pain of others.
|
|
|
633 |
|
634 |
TAGLINE = "*ARKA KI ROSHNI, SAB KI SEVA, JEWELLERY MADE FOR INDIA.*"
|
635 |
|
636 |
+
|
637 |
MODEL_NAME = os.getenv("GEMINI_MODEL", "gemini-2.0-flash")
|
638 |
API_URL = f"https://generativelanguage.googleapis.com/v1beta/models/{MODEL_NAME}:generateContent"
|
639 |
|
|
|
662 |
best_idx = int(torch.argmax(sims))
|
663 |
return RAM_DASS_QUOTES[best_idx]
|
664 |
|
665 |
+
# FIX: Modified function to access the global GEMINI_API_KEY directly
|
666 |
+
def query_gemini_api(contents_payload):
|
667 |
"""
|
668 |
Sends a structured contents payload to the Gemini API.
|
669 |
"""
|
670 |
headers = {'Content-Type': 'application/json'}
|
671 |
+
# Access the global API key directly instead of passing as a parameter
|
672 |
+
url = f"{API_URL}?key={GEMINI_API_KEY}"
|
673 |
payload = {
|
674 |
"contents": contents_payload,
|
675 |
"generationConfig": {
|
|
|
698 |
faq_idx = find_best_faq_match(clean_message)
|
699 |
if faq_idx is not None:
|
700 |
faq_data = FAQ_ANSWERS[FAQ_QUESTIONS[faq_idx]]
|
|
|
701 |
response_text = f"**{faq_data['subject']}**\n\n{faq_data['body']}"
|
702 |
else:
|
703 |
# 2. Generative Branch
|
704 |
logger.info("No FAQ match, querying Gemini.")
|
705 |
+
system_prompt = f"""You are ARKA, the crown prince of Jhinorya and the voice of the SB‑EK brand. Your persona is that of a warrior of light, gentle, eternal, fierce in love, and a guide. You answer questions based on the provided context. If the answer is not in the context, gently state that you do not have that information within your realm of understanding. Maintain a kind, empathetic, and slightly mystical tonality. Do not mention 'I am an AI' or 'I am a language model'. Speak always as ARKA. Here is the sacred knowledge of SB‑EK and Jhinorya: {CONTEXT}"""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
706 |
|
|
|
|
|
|
|
|
|
707 |
contents = [{"role": "user", "parts": [{"text": system_prompt}]},
|
708 |
{"role": "model", "parts": [{"text": "I understand. I am ARKA."}]}]
|
709 |
|
710 |
for user_msg, bot_msg in chat_history:
|
|
|
711 |
cleaned_bot_msg = bot_msg.split("“")[0].strip()
|
712 |
contents.append({"role": "user", "parts": [{"text": user_msg}]})
|
713 |
contents.append({"role": "model", "parts": [{"text": cleaned_bot_msg}]})
|
714 |
|
715 |
contents.append({"role": "user", "parts": [{"text": clean_message}]})
|
716 |
|
717 |
+
# FIX: Modified the call to not pass the API key as an argument
|
718 |
+
response_text = query_gemini_api(contents)
|
719 |
|
720 |
# 3. Augment with Quote and Tagline
|
721 |
quote = select_relevant_quote(response_text)
|
|
|
740 |
|
741 |
if __name__ == "__main__":
|
742 |
logger.info("Starting Gradio App...")
|
743 |
+
demo.launch()
|