Engr-Usman-Ali commited on
Commit
927e3c1
Β·
verified Β·
1 Parent(s): 5ac943d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +88 -31
app.py CHANGED
@@ -1,19 +1,38 @@
1
  import streamlit as st
2
  from groq import Groq
 
3
 
4
- # Initialize Groq client
 
 
5
  client = Groq(api_key=st.secrets["GROQ_API_KEY"])
 
6
 
7
- # Sidebar model selector
 
 
8
  st.sidebar.title("βš™οΈ Settings")
9
- model_choice = st.sidebar.selectbox(
10
- "Choose AI Model",
11
- ["llama-3.1-8b-instant", "llama-3.1-70b-versatile", "mixtral-8x7b-32768"]
12
  )
13
 
 
 
 
 
 
 
 
 
 
 
 
14
  st.title("πŸ€– CodeCraft AI - Mini Copilot (Chat Edition)")
15
 
 
16
  # Session state for chats
 
17
  if "generate_chat" not in st.session_state:
18
  st.session_state.generate_chat = []
19
  if "debug_chat" not in st.session_state:
@@ -22,14 +41,56 @@ if "explain_chat" not in st.session_state:
22
  st.session_state.explain_chat = []
23
 
24
 
25
- def chat_ui(tab_name, chat_history, system_prompt, input_key):
26
- """Reusable chat UI for each tab with fixed bottom input bar"""
 
 
 
 
 
 
 
 
 
 
27
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
  st.subheader(tab_name)
29
 
30
  # --- Chat history display ---
31
- chat_container = st.container()
32
- with chat_container:
33
  for role, msg in chat_history:
34
  if role == "user":
35
  with st.chat_message("user"):
@@ -48,34 +109,32 @@ def chat_ui(tab_name, chat_history, system_prompt, input_key):
48
  else:
49
  st.write(msg)
50
 
51
- # --- Chat input at bottom (centered) ---
52
- input_container = st.container()
53
- with input_container:
54
- user_input = st.chat_input("Type your message...", key=input_key)
 
 
 
 
 
 
 
 
 
 
55
 
56
- # Handle input
57
- if user_input:
58
- # Save user message
59
- st.session_state[input_key + "_last"] = user_input
60
- chat_history.append(("user", user_input))
61
-
62
- # Generate AI reply
63
  with st.spinner("Thinking..."):
64
- response = client.chat.completions.create(
65
- model=model_choice,
66
- messages=[{"role": "system", "content": system_prompt}]
67
- + [{"role": role, "content": msg} for role, msg in chat_history],
68
- temperature=0.4
69
- )
70
- ai_msg = response.choices[0].message.content
71
 
72
  chat_history.append(("assistant", ai_msg))
73
-
74
- # Force rerun so chat history appears ABOVE input
75
  st.rerun()
76
 
77
 
 
78
  # Tabs
 
79
  tab1, tab2, tab3 = st.tabs(["πŸ’‘ Generate Code", "πŸ›  Debug Code", "πŸ“˜ Explain Code"])
80
 
81
  with tab1:
@@ -102,8 +161,6 @@ with tab3:
102
  input_key="explain_input"
103
  )
104
 
105
-
106
-
107
  # =======================
108
  # Footer
109
  # =======================
 
1
  import streamlit as st
2
  from groq import Groq
3
+ import requests
4
 
5
+ # =======================
6
+ # API Clients
7
+ # =======================
8
  client = Groq(api_key=st.secrets["GROQ_API_KEY"])
9
+ HF_API_KEY = st.secrets["HF_API_KEY"]
10
 
11
+ # =======================
12
+ # Sidebar Settings
13
+ # =======================
14
  st.sidebar.title("βš™οΈ Settings")
15
+ provider_choice = st.sidebar.radio(
16
+ "Choose Provider",
17
+ ["Groq", "Hugging Face"]
18
  )
19
 
20
+ if provider_choice == "Groq":
21
+ model_choice = st.sidebar.selectbox(
22
+ "Choose Groq Model",
23
+ ["llama-3.1-8b-instant", "llama-3.1-70b-versatile", "mixtral-8x7b-32768"]
24
+ )
25
+ else:
26
+ model_choice = st.sidebar.selectbox(
27
+ "Choose HF Model",
28
+ ["mistralai/Mixtral-8x7B-Instruct-v0.1", "tiiuae/falcon-7b-instruct"]
29
+ )
30
+
31
  st.title("πŸ€– CodeCraft AI - Mini Copilot (Chat Edition)")
32
 
33
+ # =======================
34
  # Session state for chats
35
+ # =======================
36
  if "generate_chat" not in st.session_state:
37
  st.session_state.generate_chat = []
38
  if "debug_chat" not in st.session_state:
 
41
  st.session_state.explain_chat = []
42
 
43
 
44
+ # =======================
45
+ # Helper functions
46
+ # =======================
47
+ def call_groq(chat_history, system_prompt):
48
+ response = client.chat.completions.create(
49
+ model=model_choice,
50
+ messages=[{"role": "system", "content": system_prompt}]
51
+ + [{"role": role, "content": msg} for role, msg in chat_history],
52
+ temperature=0.4
53
+ )
54
+ return response.choices[0].message.content
55
+
56
 
57
+ def call_hf(prompt):
58
+ headers = {"Authorization": f"Bearer {HF_API_KEY}"}
59
+ payload = {"inputs": prompt}
60
+ response = requests.post(
61
+ f"https://api-inference.huggingface.co/models/{model_choice}",
62
+ headers=headers,
63
+ json=payload,
64
+ timeout=60
65
+ )
66
+ if response.status_code == 200:
67
+ result = response.json()
68
+ if isinstance(result, list) and "generated_text" in result[0]:
69
+ return result[0]["generated_text"]
70
+ else:
71
+ return str(result)
72
+ return f"⚠️ HF Error: {response.text}"
73
+
74
+
75
+ def get_ai_response(chat_history, system_prompt):
76
+ if provider_choice == "Groq":
77
+ return call_groq(chat_history, system_prompt)
78
+ else:
79
+ # Convert history into plain text prompt for Hugging Face
80
+ prompt = system_prompt + "\n\n"
81
+ for role, msg in chat_history:
82
+ prompt += f"{role.upper()}: {msg}\n"
83
+ return call_hf(prompt)
84
+
85
+
86
+ # =======================
87
+ # Chat UI
88
+ # =======================
89
+ def chat_ui(tab_name, chat_history, system_prompt, input_key):
90
  st.subheader(tab_name)
91
 
92
  # --- Chat history display ---
93
+ with st.container():
 
94
  for role, msg in chat_history:
95
  if role == "user":
96
  with st.chat_message("user"):
 
109
  else:
110
  st.write(msg)
111
 
112
+ # --- Input bar + send button in one row ---
113
+ col1, col2 = st.columns([10, 1])
114
+ with col1:
115
+ user_input = st.text_input(
116
+ "Type your message...",
117
+ key=input_key,
118
+ label_visibility="collapsed"
119
+ )
120
+ with col2:
121
+ send_btn = st.button("➀", key=input_key + "_send")
122
+
123
+ # --- Handle input ---
124
+ if send_btn and user_input.strip():
125
+ chat_history.append(("user", user_input.strip()))
126
 
 
 
 
 
 
 
 
127
  with st.spinner("Thinking..."):
128
+ ai_msg = get_ai_response(chat_history, system_prompt)
 
 
 
 
 
 
129
 
130
  chat_history.append(("assistant", ai_msg))
131
+ st.session_state[input_key] = "" # clear input
 
132
  st.rerun()
133
 
134
 
135
+ # =======================
136
  # Tabs
137
+ # =======================
138
  tab1, tab2, tab3 = st.tabs(["πŸ’‘ Generate Code", "πŸ›  Debug Code", "πŸ“˜ Explain Code"])
139
 
140
  with tab1:
 
161
  input_key="explain_input"
162
  )
163
 
 
 
164
  # =======================
165
  # Footer
166
  # =======================