Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,23 +1,39 @@
|
|
1 |
import os
|
|
|
2 |
import gradio as gr
|
3 |
from gtts import gTTS
|
4 |
from datetime import datetime
|
5 |
from openpyxl import Workbook, load_workbook
|
6 |
-
import re
|
7 |
|
8 |
-
#
|
9 |
try:
|
10 |
import google.generativeai as genai
|
11 |
-
except
|
12 |
import subprocess
|
13 |
subprocess.check_call(["pip", "install", "google-generativeai"])
|
14 |
import google.generativeai as genai
|
15 |
|
16 |
-
|
17 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
18 |
model_name = "models/gemini-1.5-flash-latest"
|
19 |
-
model = genai.GenerativeModel(model_name)
|
20 |
|
|
|
|
|
|
|
|
|
|
|
|
|
21 |
MENU = {
|
22 |
"Cheeseburger": 5.99,
|
23 |
"Fries": 2.99,
|
@@ -27,7 +43,6 @@ MENU = {
|
|
27 |
"Salad": 6.99
|
28 |
}
|
29 |
|
30 |
-
chat_history = []
|
31 |
order = []
|
32 |
customer_name = ""
|
33 |
|
@@ -53,10 +68,10 @@ def save_to_excel(name, items):
|
|
53 |
wb.save(EXCEL_FILE)
|
54 |
return order_id
|
55 |
|
56 |
-
# ==========
|
57 |
def clean_text(text):
|
58 |
-
text = re.sub(r"\*\*(.*?)\*\*", r"\1", text) #
|
59 |
-
text = re.sub(r"Bot\s*:\s*", "", text, flags=re.IGNORECASE) #
|
60 |
return text.strip()
|
61 |
|
62 |
def speak(text, filename="response.mp3"):
|
@@ -65,44 +80,15 @@ def speak(text, filename="response.mp3"):
|
|
65 |
tts.save(filename)
|
66 |
return filename
|
67 |
|
68 |
-
# ========== Gemini
|
69 |
def generate_response(user_input):
|
70 |
-
global customer_name, order
|
71 |
-
|
72 |
-
menu_description = "\n".join([f"{item}: ${price}" for item, price in MENU.items()])
|
73 |
-
order_summary = ", ".join([f"{qty} x {item}" for item, qty in order]) if order else "No items yet"
|
74 |
-
|
75 |
-
context = f"""
|
76 |
-
You are a friendly, helpful restaurant assistant at 'Systaurant'.
|
77 |
-
|
78 |
-
MENU:
|
79 |
-
{menu_description}
|
80 |
-
|
81 |
-
Customer name: {customer_name}
|
82 |
-
Current order: {order_summary}
|
83 |
-
|
84 |
-
Instructions:
|
85 |
-
- Ask for name if not known.
|
86 |
-
- Show menu if requested.
|
87 |
-
- Extract item names and quantities from messages.
|
88 |
-
- Say 'Order summary' and ask 'Confirm?' when user is done.
|
89 |
-
- Respond only as the bot, no need to prefix with "Bot:".
|
90 |
-
- Keep tone human, natural, and friendly.
|
91 |
-
|
92 |
-
Conversation:
|
93 |
-
"""
|
94 |
-
for u, b in chat_history:
|
95 |
-
context += f"\nCustomer: {u}\n{b}"
|
96 |
-
|
97 |
-
context += f"\nCustomer: {user_input}\n"
|
98 |
-
|
99 |
try:
|
100 |
-
response =
|
101 |
-
return response
|
102 |
except Exception as e:
|
103 |
return f"β Error from Gemini: {str(e)}"
|
104 |
|
105 |
-
|
106 |
def handle_chat(user_input):
|
107 |
global customer_name, order
|
108 |
|
@@ -112,7 +98,7 @@ def handle_chat(user_input):
|
|
112 |
if "my name is" in user_input.lower():
|
113 |
customer_name = user_input.split("my name is")[-1].strip().split()[0].title()
|
114 |
|
115 |
-
#
|
116 |
for item in MENU:
|
117 |
if item.lower() in user_input.lower():
|
118 |
qty = 1
|
@@ -122,17 +108,16 @@ def handle_chat(user_input):
|
|
122 |
break
|
123 |
order.append((item, qty))
|
124 |
|
125 |
-
# Confirm
|
126 |
if "confirm" in user_input.lower() or "yes" in user_input.lower():
|
127 |
if customer_name and order:
|
128 |
order_id = save_to_excel(customer_name, order)
|
129 |
bot_reply += f"\nβ
Your order ID is {order_id}. Thank you for ordering from Saad's Restaurant!"
|
130 |
|
131 |
-
chat_history.append((user_input, bot_reply))
|
132 |
audio_file = speak(bot_reply)
|
133 |
return bot_reply, audio_file
|
134 |
|
135 |
-
|
136 |
gr.Interface(
|
137 |
fn=handle_chat,
|
138 |
inputs=gr.Textbox(label="π€ You", placeholder="Type your order..."),
|
@@ -140,9 +125,7 @@ gr.Interface(
|
|
140 |
gr.Textbox(label="π€ Bot Response"),
|
141 |
gr.Audio(label="π Speaking", autoplay=True)
|
142 |
],
|
143 |
-
title="π SysTaurant Voice Bot",
|
144 |
-
description="Talk to the bot and it will handle your order.",
|
145 |
theme="soft"
|
146 |
).launch(share=True)
|
147 |
-
|
148 |
-
#hello# ###
|
|
|
1 |
import os
|
2 |
+
import re
|
3 |
import gradio as gr
|
4 |
from gtts import gTTS
|
5 |
from datetime import datetime
|
6 |
from openpyxl import Workbook, load_workbook
|
|
|
7 |
|
8 |
+
# --- Install required packages on first run (optional) ---
|
9 |
try:
|
10 |
import google.generativeai as genai
|
11 |
+
except ImportError:
|
12 |
import subprocess
|
13 |
subprocess.check_call(["pip", "install", "google-generativeai"])
|
14 |
import google.generativeai as genai
|
15 |
|
16 |
+
try:
|
17 |
+
from langchain.chat_models import ChatGoogleGenerativeAI
|
18 |
+
from langchain.chains import ConversationChain
|
19 |
+
from langchain.memory import ConversationBufferMemory
|
20 |
+
except ImportError:
|
21 |
+
import subprocess
|
22 |
+
subprocess.check_call(["pip", "install", "langchain"])
|
23 |
+
from langchain.chat_models import ChatGoogleGenerativeAI
|
24 |
+
from langchain.chains import ConversationChain
|
25 |
+
from langchain.memory import ConversationBufferMemory
|
26 |
+
|
27 |
+
# ========== API Setup ==========
|
28 |
+
genai.configure(api_key="AIzaSyBJFmohAmhmqXQlM3fVxj8MLegVb26kyJk") # Replace with your key
|
29 |
model_name = "models/gemini-1.5-flash-latest"
|
|
|
30 |
|
31 |
+
# LangChain LLM + Memory
|
32 |
+
llm = ChatGoogleGenerativeAI(model=model_name, google_api_key=os.getenv("GOOGLE_API_KEY", "AIzaSyBJFmohAmhmqXQlM3fVxj8MLegVb26kyJk"))
|
33 |
+
memory = ConversationBufferMemory(return_messages=True)
|
34 |
+
conversation = ConversationChain(llm=llm, memory=memory)
|
35 |
+
|
36 |
+
# ========== Menu ==========
|
37 |
MENU = {
|
38 |
"Cheeseburger": 5.99,
|
39 |
"Fries": 2.99,
|
|
|
43 |
"Salad": 6.99
|
44 |
}
|
45 |
|
|
|
46 |
order = []
|
47 |
customer_name = ""
|
48 |
|
|
|
68 |
wb.save(EXCEL_FILE)
|
69 |
return order_id
|
70 |
|
71 |
+
# ========== Voice ==========
|
72 |
def clean_text(text):
|
73 |
+
text = re.sub(r"\*\*(.*?)\*\*", r"\1", text) # Remove bold symbols
|
74 |
+
text = re.sub(r"Bot\s*:\s*", "", text, flags=re.IGNORECASE) # Remove "Bot:" label
|
75 |
return text.strip()
|
76 |
|
77 |
def speak(text, filename="response.mp3"):
|
|
|
80 |
tts.save(filename)
|
81 |
return filename
|
82 |
|
83 |
+
# ========== Gemini + LangChain Response ==========
|
84 |
def generate_response(user_input):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
85 |
try:
|
86 |
+
response = conversation.predict(input=user_input)
|
87 |
+
return response
|
88 |
except Exception as e:
|
89 |
return f"β Error from Gemini: {str(e)}"
|
90 |
|
91 |
+
# ========== Handle Chat ==========
|
92 |
def handle_chat(user_input):
|
93 |
global customer_name, order
|
94 |
|
|
|
98 |
if "my name is" in user_input.lower():
|
99 |
customer_name = user_input.split("my name is")[-1].strip().split()[0].title()
|
100 |
|
101 |
+
# Detect order items
|
102 |
for item in MENU:
|
103 |
if item.lower() in user_input.lower():
|
104 |
qty = 1
|
|
|
108 |
break
|
109 |
order.append((item, qty))
|
110 |
|
111 |
+
# Confirm and save
|
112 |
if "confirm" in user_input.lower() or "yes" in user_input.lower():
|
113 |
if customer_name and order:
|
114 |
order_id = save_to_excel(customer_name, order)
|
115 |
bot_reply += f"\nβ
Your order ID is {order_id}. Thank you for ordering from Saad's Restaurant!"
|
116 |
|
|
|
117 |
audio_file = speak(bot_reply)
|
118 |
return bot_reply, audio_file
|
119 |
|
120 |
+
# ========== Gradio UI ==========
|
121 |
gr.Interface(
|
122 |
fn=handle_chat,
|
123 |
inputs=gr.Textbox(label="π€ You", placeholder="Type your order..."),
|
|
|
125 |
gr.Textbox(label="π€ Bot Response"),
|
126 |
gr.Audio(label="π Speaking", autoplay=True)
|
127 |
],
|
128 |
+
title="π SysTaurant Voice Bot (LangChain + Gemini)",
|
129 |
+
description="Talk to the bot and it will handle your order with memory.",
|
130 |
theme="soft"
|
131 |
).launch(share=True)
|
|
|
|