File size: 7,989 Bytes
e6f643b
2e0e60d
927e3c1
e7306f7
80ede23
f80eb2f
80ede23
f80eb2f
 
 
 
 
b735927
f80eb2f
 
 
 
 
 
 
 
 
 
80ede23
6e48fda
f80eb2f
 
 
 
818d965
80ede23
e1c545c
80ede23
b735927
9976ba7
b735927
9976ba7
916eef9
6e48fda
e1c545c
7c0f9c0
e1c545c
b735927
 
7c0f9c0
55731be
 
 
 
7c0f9c0
55731be
9976ba7
7c0f9c0
 
55731be
 
 
 
 
b735927
55731be
b735927
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
02a681f
 
 
b735927
02a681f
 
e1c545c
 
6e48fda
e1c545c
 
 
 
 
916eef9
e1c545c
 
 
 
80ede23
02a681f
80ede23
b735927
80ede23
f80eb2f
 
 
 
916eef9
f80eb2f
818d965
80ede23
 
f80eb2f
916eef9
80ede23
 
f80eb2f
 
916eef9
 
f80eb2f
 
 
 
916eef9
80ede23
 
916eef9
 
 
 
80ede23
916eef9
 
 
80ede23
f80eb2f
916eef9
80ede23
 
 
 
 
f80eb2f
818d965
916eef9
f80eb2f
 
80ede23
 
 
6e48fda
80ede23
 
 
b735927
80ede23
02a681f
88367c1
 
 
 
1dfc7d3
02a681f
80ede23
 
e1c545c
80ede23
b735927
 
 
e1c545c
b735927
80ede23
 
 
 
e1c545c
 
 
 
 
 
 
 
 
 
 
80ede23
e1c545c
80ede23
 
 
 
 
 
 
 
 
 
b735927
80ede23
b735927
 
 
 
 
 
 
 
 
 
 
 
 
80ede23
 
e1c545c
80ede23
e1c545c
b735927
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
import streamlit as st
from groq import Groq
import requests

# =======================
# API Clients Initialization
# =======================
groq_client = None
groq_status = "โŒ"
if "GROQ_API_KEY" in st.secrets:
    try:
        groq_client = Groq(api_key=st.secrets["GROQ_API_KEY"])
        groq_client.models.list()  # test
        groq_status = "โœ…"
    except Exception:
        groq_client = None
        groq_status = "โŒ"

huggingface_api_key = st.secrets.get("HF_API_KEY", None)
hf_status = "โŒ"
if huggingface_api_key:
    try:
        headers = {"Authorization": f"Bearer {huggingface_api_key}"}
        response = requests.get("https://api-inference.huggingface.co/status", headers=headers)
        if response.status_code in [200, 401, 403]:
            hf_status = "โœ…"
    except Exception:
        hf_status = "โŒ"


# =======================
# Session State
# =======================
if "projects" not in st.session_state:
    st.session_state.projects = {"Default Chat": {"tab": "generate", "history": []}}
if "active_project" not in st.session_state:
    st.session_state.active_project = "Default Chat"


# =======================
# Sidebar Project Manager
# =======================
st.sidebar.title("๐Ÿ“‚ Chats / Projects")

# Sidebar only scrolls when hovered
st.markdown(
    """
    <style>
    [data-testid="stSidebar"] {
        overflow-y: hidden !important;   /* No scroll by default */
        height: 100vh !important;
    }
    [data-testid="stSidebar"]:hover {
        overflow-y: auto !important;    /* Scroll only when hovered */
    }
    </style>
    """,
    unsafe_allow_html=True
)

with st.sidebar:
    for project_name in list(st.session_state.projects.keys()):
        cols = st.columns([4, 1])
        with cols[0]:
            if st.button(project_name, use_container_width=True):
                st.session_state.active_project = project_name
        with cols[1]:
            with st.popover("โ‹ฎ"):
                new_name = st.text_input("Rename", value=project_name, key=f"rename_{project_name}")
                if st.button("Save", key=f"save_{project_name}"):
                    if new_name and new_name != project_name:
                        st.session_state.projects[new_name] = st.session_state.projects.pop(project_name)
                        if st.session_state.active_project == project_name:
                            st.session_state.active_project = new_name
                        st.rerun()
                if st.button("Delete", key=f"delete_{project_name}"):
                    if project_name in st.session_state.projects and len(st.session_state.projects) > 1:
                        st.session_state.projects.pop(project_name)
                        st.session_state.active_project = list(st.session_state.projects.keys())[0]
                        st.rerun()

    if st.button("โž• New Chat"):
        new_name = f"Chat {len(st.session_state.projects) + 1}"
        st.session_state.projects[new_name] = {"tab": "generate", "history": []}
        st.session_state.active_project = new_name
        st.rerun()

# =======================
# Sidebar API Settings
# =======================
st.sidebar.markdown("---")
st.sidebar.title("โš™๏ธ Settings")

st.sidebar.markdown(f"**Groq API Status:** {groq_status}")
st.sidebar.markdown(f"**HuggingFace API Status:** {hf_status}")

api_priority = st.sidebar.radio(
    "Choose API Priority",
    ["Groq First", "HuggingFace First"],
    index=0
)

model_choice = st.sidebar.selectbox(
    "Choose Model",
    ["llama-3.1-8b-instant", "llama-3.1-70b-versatile", "mixtral-8x7b-32768"]
)


# =======================
# Helper Functions
# =======================
def call_groq(system_prompt, chat_history):
    if not groq_client:
        return None
    try:
        response = groq_client.chat.completions.create(
            model=model_choice,
            messages=[{"role": "system", "content": system_prompt}]
                     + [{"role": role, "content": msg} for role, msg in chat_history],
            temperature=0.4
        )
        return response.choices[0].message.content
    except Exception as e:
        st.warning(f"โš ๏ธ Groq API error: {e}")
        return None


def call_huggingface(system_prompt, chat_history):
    if not huggingface_api_key:
        return None
    try:
        headers = {"Authorization": f"Bearer {huggingface_api_key}"}
        payload = {
            "inputs": "\n".join([msg for _, msg in chat_history]),
            "parameters": {"temperature": 0.5, "max_new_tokens": 500}
        }
        response = requests.post(
            f"https://api-inference.huggingface.co/models/{model_choice}",
            headers=headers,
            json=payload
        )
        if response.status_code == 200:
            data = response.json()
            if isinstance(data, list) and "generated_text" in data[0]:
                return data[0]["generated_text"]
            return str(data)
        else:
            st.warning(f"โš ๏ธ Hugging Face error: {response.text}")
            return None
    except Exception as e:
        st.warning(f"โš ๏ธ Hugging Face exception: {e}")
        return None


def get_ai_response(system_prompt, chat_history):
    if api_priority == "Groq First":
        ai_msg = call_groq(system_prompt, chat_history)
        if ai_msg is None:
            ai_msg = call_huggingface(system_prompt, chat_history)
    else:
        ai_msg = call_huggingface(system_prompt, chat_history)
        if ai_msg is None:
            ai_msg = call_groq(system_prompt, chat_history)
    return ai_msg or "โŒ Both APIs failed. Please check your API keys or try again later."


# =======================
# Main Title
# =======================
st.title("๐Ÿค– CodeCraft AI - Mini Copilot (Chat Edition)")
st.write("")  # Empty line after title


# =======================
# Chat UI
# =======================
def chat_ui(tab_name, system_prompt, input_key):
    project = st.session_state.projects[st.session_state.active_project]
    chat_history = project["history"]

    st.subheader(f"{tab_name} ({st.session_state.active_project})")

    chat_container = st.container()
    with chat_container:
        for role, msg in chat_history:
            with st.chat_message(role):
                if role == "assistant" and "```" in msg:
                    parts = msg.split("```")
                    for i, part in enumerate(parts):
                        if i % 2 == 1:
                            lang, *code_lines = part.split("\n")
                            code = "\n".join(code_lines)
                            st.code(code, language=lang if lang else "python")
                        else:
                            st.write(part)
                else:
                    st.write(msg)

    user_input = st.chat_input("Type your message...", key=input_key)
    if user_input:
        chat_history.append(("user", user_input))
        with st.spinner("๐Ÿค” Thinking..."):
            ai_msg = get_ai_response(system_prompt, chat_history)
        chat_history.append(("assistant", ai_msg))
        st.rerun()


# =======================
# Tabs
# =======================
tab1, tab2, tab3 = st.tabs(["๐Ÿ’ก Generate Code", "๐Ÿ›  Debug Code", "๐Ÿ“˜ Explain Code"])
with tab1:
    chat_ui("๐Ÿ’ก Generate Code",
            "You are a helpful coding assistant. Generate correct code first, then a short simple explanation.",
            "generate_input")
with tab2:
    chat_ui("๐Ÿ›  Debug Code",
            "You are an expert code debugger. Fix errors and give corrected code, then explain what changed and why.",
            "debug_input")
with tab3:
    chat_ui("๐Ÿ“˜ Explain Code",
            "You are a teacher that explains code in simple words. The user pastes code, and you explain step by step.",
            "explain_input")

# =======================
# Footer
# =======================
st.markdown("---")
st.caption("โœจ CodeCraft may make mistakes. Always check important info.")