Spaces:
Sleeping
Sleeping
import streamlit as st | |
from groq import Groq | |
import requests | |
# ======================= | |
# API Clients | |
# ======================= | |
client = Groq(api_key=st.secrets["GROQ_API_KEY"]) | |
HF_API_KEY = st.secrets["HF_API_KEY"] | |
# ======================= | |
# Sidebar Settings | |
# ======================= | |
st.sidebar.title("βοΈ Settings") | |
provider_choice = st.sidebar.radio( | |
"Choose Provider", | |
["Groq", "Hugging Face"] | |
) | |
if provider_choice == "Groq": | |
model_choice = st.sidebar.selectbox( | |
"Choose Groq Model", | |
["llama-3.1-8b-instant", "llama-3.1-70b-versatile", "mixtral-8x7b-32768"] | |
) | |
else: | |
model_choice = st.sidebar.selectbox( | |
"Choose HF Model", | |
["mistralai/Mixtral-8x7B-Instruct-v0.1", "tiiuae/falcon-7b-instruct"] | |
) | |
st.title("π€ CodeCraft AI - Mini Copilot (Chat Edition)") | |
# ======================= | |
# Session state for chats | |
# ======================= | |
if "generate_chat" not in st.session_state: | |
st.session_state.generate_chat = [] | |
if "debug_chat" not in st.session_state: | |
st.session_state.debug_chat = [] | |
if "explain_chat" not in st.session_state: | |
st.session_state.explain_chat = [] | |
# ======================= | |
# Helper functions | |
# ======================= | |
def call_groq(chat_history, system_prompt): | |
response = client.chat.completions.create( | |
model=model_choice, | |
messages=[{"role": "system", "content": system_prompt}] | |
+ [{"role": role, "content": msg} for role, msg in chat_history], | |
temperature=0.4 | |
) | |
return response.choices[0].message.content | |
def call_hf(prompt): | |
headers = {"Authorization": f"Bearer {HF_API_KEY}"} | |
payload = {"inputs": prompt} | |
response = requests.post( | |
f"https://api-inference.huggingface.co/models/{model_choice}", | |
headers=headers, | |
json=payload, | |
timeout=60 | |
) | |
if response.status_code == 200: | |
result = response.json() | |
if isinstance(result, list) and "generated_text" in result[0]: | |
return result[0]["generated_text"] | |
else: | |
return str(result) | |
return f"β οΈ HF Error: {response.text}" | |
def get_ai_response(chat_history, system_prompt): | |
if provider_choice == "Groq": | |
return call_groq(chat_history, system_prompt) | |
else: | |
# Convert history into plain text prompt for Hugging Face | |
prompt = system_prompt + "\n\n" | |
for role, msg in chat_history: | |
prompt += f"{role.upper()}: {msg}\n" | |
return call_hf(prompt) | |
# ======================= | |
# Chat UI | |
# ======================= | |
def chat_ui(tab_name, chat_history, system_prompt, input_key): | |
st.subheader(tab_name) | |
# --- Chat history display --- | |
with st.container(): | |
for role, msg in chat_history: | |
if role == "user": | |
with st.chat_message("user"): | |
st.write(msg) | |
else: | |
with st.chat_message("assistant"): | |
if "```" in msg: # detect code blocks | |
parts = msg.split("```") | |
for i, part in enumerate(parts): | |
if i % 2 == 1: # inside code block | |
lang, *code_lines = part.split("\n") | |
code = "\n".join(code_lines) | |
st.code(code, language=lang if lang else "python") | |
else: | |
st.write(part) | |
else: | |
st.write(msg) | |
# --- Input bar + send button in one row --- | |
col1, col2 = st.columns([10, 1]) | |
with col1: | |
user_input = st.text_input( | |
"Type your message...", | |
key=input_key, | |
label_visibility="collapsed" | |
) | |
with col2: | |
send_btn = st.button("β€", key=input_key + "_send") | |
# --- Handle input --- | |
if send_btn and user_input.strip(): | |
chat_history.append(("user", user_input.strip())) | |
with st.spinner("Thinking..."): | |
ai_msg = get_ai_response(chat_history, system_prompt) | |
chat_history.append(("assistant", ai_msg)) | |
st.session_state[input_key] = "" # clear input | |
st.rerun() | |
# ======================= | |
# Tabs | |
# ======================= | |
tab1, tab2, tab3 = st.tabs(["π‘ Generate Code", "π Debug Code", "π Explain Code"]) | |
with tab1: | |
chat_ui( | |
"π‘ Generate Code", | |
st.session_state.generate_chat, | |
"You are a helpful coding assistant. Generate correct code first, then a short simple explanation.", | |
input_key="generate_input" | |
) | |
with tab2: | |
chat_ui( | |
"π Debug Code", | |
st.session_state.debug_chat, | |
"You are an expert code debugger. Fix errors and give corrected code, then explain what changed and why in simple terms.", | |
input_key="debug_input" | |
) | |
with tab3: | |
chat_ui( | |
"π Explain Code", | |
st.session_state.explain_chat, | |
"You are a teacher that explains code in simple words. The user pastes code, and you explain step by step.", | |
input_key="explain_input" | |
) | |
# ======================= | |
# Footer | |
# ======================= | |
st.markdown("---") | |
st.caption("π Built for Hackathons using Streamlit + Hugging Face + Groq") | |