Engr-Usman-Ali commited on
Commit
f80eb2f
Β·
verified Β·
1 Parent(s): 62c9e13

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +111 -71
app.py CHANGED
@@ -3,35 +3,55 @@ from groq import Groq
3
  import requests
4
 
5
  # =======================
6
- # API Clients
7
  # =======================
8
- client = Groq(api_key=st.secrets["GROQ_API_KEY"])
9
- HF_API_KEY = st.secrets["HF_API_KEY"]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10
 
11
  # =======================
12
  # Sidebar Settings
13
  # =======================
14
  st.sidebar.title("βš™οΈ Settings")
15
- provider_choice = st.sidebar.radio(
16
- "Choose Provider",
17
- ["Groq", "Hugging Face"]
 
 
 
 
18
  )
19
 
20
- if provider_choice == "Groq":
21
- model_choice = st.sidebar.selectbox(
22
- "Choose Groq Model",
23
- ["llama-3.1-8b-instant", "llama-3.1-70b-versatile", "mixtral-8x7b-32768"]
24
- )
25
- else:
26
- model_choice = st.sidebar.selectbox(
27
- "Choose HF Model",
28
- ["mistralai/Mixtral-8x7B-Instruct-v0.1", "tiiuae/falcon-7b-instruct"]
29
- )
30
 
31
  st.title("πŸ€– CodeCraft AI - Mini Copilot (Chat Edition)")
32
 
 
33
  # =======================
34
- # Session state for chats
35
  # =======================
36
  if "generate_chat" not in st.session_state:
37
  st.session_state.generate_chat = []
@@ -42,55 +62,80 @@ if "explain_chat" not in st.session_state:
42
 
43
 
44
  # =======================
45
- # Helper functions
46
  # =======================
47
- def call_groq(chat_history, system_prompt):
48
- response = client.chat.completions.create(
49
- model=model_choice,
50
- messages=[{"role": "system", "content": system_prompt}]
51
- + [{"role": role, "content": msg} for role, msg in chat_history],
52
- temperature=0.4
53
- )
54
- return response.choices[0].message.content
55
-
56
-
57
- def call_hf(prompt):
58
- headers = {"Authorization": f"Bearer {HF_API_KEY}"}
59
- payload = {"inputs": prompt}
60
- response = requests.post(
61
- f"https://api-inference.huggingface.co/models/{model_choice}",
62
- headers=headers,
63
- json=payload,
64
- timeout=60
65
- )
66
- if response.status_code == 200:
67
- result = response.json()
68
- if isinstance(result, list) and "generated_text" in result[0]:
69
- return result[0]["generated_text"]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
70
  else:
71
- return str(result)
72
- return f"⚠️ HF Error: {response.text}"
 
 
 
73
 
74
 
75
- def get_ai_response(chat_history, system_prompt):
76
- if provider_choice == "Groq":
77
- return call_groq(chat_history, system_prompt)
78
- else:
79
- # Convert history into plain text prompt for Hugging Face
80
- prompt = system_prompt + "\n\n"
81
- for role, msg in chat_history:
82
- prompt += f"{role.upper()}: {msg}\n"
83
- return call_hf(prompt)
 
 
 
 
 
84
 
85
 
86
  # =======================
87
- # Chat UI
88
  # =======================
89
  def chat_ui(tab_name, chat_history, system_prompt, input_key):
 
 
90
  st.subheader(tab_name)
91
 
92
  # --- Chat history display ---
93
- with st.container():
 
94
  for role, msg in chat_history:
95
  if role == "user":
96
  with st.chat_message("user"):
@@ -109,26 +154,20 @@ def chat_ui(tab_name, chat_history, system_prompt, input_key):
109
  else:
110
  st.write(msg)
111
 
112
- # --- Input bar + send button in one row ---
113
- col1, col2 = st.columns([10, 1])
114
- with col1:
115
- user_input = st.text_input(
116
- "Type your message...",
117
- key=input_key,
118
- label_visibility="collapsed"
119
- )
120
- with col2:
121
- send_btn = st.button("➀", key=input_key + "_send")
122
 
123
- # --- Handle input ---
124
- if send_btn and user_input.strip():
125
- chat_history.append(("user", user_input.strip()))
 
126
 
127
- with st.spinner("Thinking..."):
128
- ai_msg = get_ai_response(chat_history, system_prompt)
 
129
 
130
  chat_history.append(("assistant", ai_msg))
131
- st.session_state[input_key] = "" # clear input
132
  st.rerun()
133
 
134
 
@@ -161,8 +200,9 @@ with tab3:
161
  input_key="explain_input"
162
  )
163
 
 
164
  # =======================
165
  # Footer
166
  # =======================
167
  st.markdown("---")
168
- st.caption("πŸš€ Built for Hackathons using Streamlit + Hugging Face + Groq")
 
3
  import requests
4
 
5
  # =======================
6
+ # API Clients Initialization
7
  # =======================
8
+ groq_client = None
9
+ groq_status = "❌"
10
+ if "GROQ_API_KEY" in st.secrets:
11
+ try:
12
+ groq_client = Groq(api_key=st.secrets["GROQ_API_KEY"])
13
+ # simple test request
14
+ groq_client.models.list()
15
+ groq_status = "βœ…"
16
+ except Exception:
17
+ groq_client = None
18
+ groq_status = "❌"
19
+
20
+ huggingface_api_key = st.secrets.get("HF_API_KEY", None)
21
+ hf_status = "❌"
22
+ if huggingface_api_key:
23
+ try:
24
+ headers = {"Authorization": f"Bearer {huggingface_api_key}"}
25
+ response = requests.get("https://api-inference.huggingface.co/status", headers=headers)
26
+ if response.status_code in [200, 401, 403]: # API exists
27
+ hf_status = "βœ…"
28
+ except Exception:
29
+ hf_status = "❌"
30
+
31
 
32
  # =======================
33
  # Sidebar Settings
34
  # =======================
35
  st.sidebar.title("βš™οΈ Settings")
36
+ st.sidebar.markdown(f"**Groq API Status:** {groq_status}")
37
+ st.sidebar.markdown(f"**HuggingFace API Status:** {hf_status}")
38
+
39
+ api_priority = st.sidebar.radio(
40
+ "Choose API Priority",
41
+ ["Groq First", "HuggingFace First"],
42
+ index=0
43
  )
44
 
45
+ model_choice = st.sidebar.selectbox(
46
+ "Choose Model",
47
+ ["llama-3.1-8b-instant", "llama-3.1-70b-versatile", "mixtral-8x7b-32768"]
48
+ )
 
 
 
 
 
 
49
 
50
  st.title("πŸ€– CodeCraft AI - Mini Copilot (Chat Edition)")
51
 
52
+
53
  # =======================
54
+ # Session State
55
  # =======================
56
  if "generate_chat" not in st.session_state:
57
  st.session_state.generate_chat = []
 
62
 
63
 
64
  # =======================
65
+ # Helper Functions
66
  # =======================
67
+ def call_groq(system_prompt, chat_history):
68
+ """Call Groq API with error handling"""
69
+ if not groq_client:
70
+ return None
71
+ try:
72
+ response = groq_client.chat.completions.create(
73
+ model=model_choice,
74
+ messages=[{"role": "system", "content": system_prompt}]
75
+ + [{"role": role, "content": msg} for role, msg in chat_history],
76
+ temperature=0.4
77
+ )
78
+ return response.choices[0].message.content
79
+ except Exception as e:
80
+ st.warning(f"⚠️ Groq API error: {e}")
81
+ return None
82
+
83
+
84
+ def call_huggingface(system_prompt, chat_history):
85
+ """Call Hugging Face Inference API with error handling"""
86
+ if not huggingface_api_key:
87
+ return None
88
+ try:
89
+ headers = {"Authorization": f"Bearer {huggingface_api_key}"}
90
+ payload = {
91
+ "inputs": "\n".join([msg for _, msg in chat_history]),
92
+ "parameters": {"temperature": 0.5, "max_new_tokens": 500}
93
+ }
94
+ response = requests.post(
95
+ f"https://api-inference.huggingface.co/models/{model_choice}",
96
+ headers=headers,
97
+ json=payload
98
+ )
99
+ if response.status_code == 200:
100
+ data = response.json()
101
+ if isinstance(data, list) and "generated_text" in data[0]:
102
+ return data[0]["generated_text"]
103
+ return str(data)
104
  else:
105
+ st.warning(f"⚠️ Hugging Face error: {response.text}")
106
+ return None
107
+ except Exception as e:
108
+ st.warning(f"⚠️ Hugging Face exception: {e}")
109
+ return None
110
 
111
 
112
+ def get_ai_response(system_prompt, chat_history):
113
+ """Get AI response using priority + fallback"""
114
+ if api_priority == "Groq First":
115
+ ai_msg = call_groq(system_prompt, chat_history)
116
+ if ai_msg is None:
117
+ ai_msg = call_huggingface(system_prompt, chat_history)
118
+ else: # HuggingFace First
119
+ ai_msg = call_huggingface(system_prompt, chat_history)
120
+ if ai_msg is None:
121
+ ai_msg = call_groq(system_prompt, chat_history)
122
+
123
+ if ai_msg is None:
124
+ return "❌ Both APIs failed. Please check your API keys or try again later."
125
+ return ai_msg
126
 
127
 
128
  # =======================
129
+ # Chat UI Function
130
  # =======================
131
  def chat_ui(tab_name, chat_history, system_prompt, input_key):
132
+ """Reusable chat UI for each tab with fixed bottom input bar"""
133
+
134
  st.subheader(tab_name)
135
 
136
  # --- Chat history display ---
137
+ chat_container = st.container()
138
+ with chat_container:
139
  for role, msg in chat_history:
140
  if role == "user":
141
  with st.chat_message("user"):
 
154
  else:
155
  st.write(msg)
156
 
157
+ # --- Chat input ---
158
+ user_input = st.chat_input("Type your message...", key=input_key)
 
 
 
 
 
 
 
 
159
 
160
+ if user_input:
161
+ # Save user input
162
+ st.session_state[input_key + "_last"] = user_input
163
+ chat_history.append(("user", user_input))
164
 
165
+ # --- Get AI reply with fallback ---
166
+ with st.spinner("πŸ€” Thinking..."):
167
+ ai_msg = get_ai_response(system_prompt, chat_history)
168
 
169
  chat_history.append(("assistant", ai_msg))
170
+
171
  st.rerun()
172
 
173
 
 
200
  input_key="explain_input"
201
  )
202
 
203
+
204
  # =======================
205
  # Footer
206
  # =======================
207
  st.markdown("---")
208
+ st.caption("✨ CodeCraft may make mistakes, so double-check important info.")