|
import streamlit as st |
|
from agents import investigator, router |
|
|
|
|
|
st.title('Cyber Hunter!') |
|
st.caption("🚀 A streamlit chatbot powered by OpenAI LLM") |
|
|
|
if "messages" not in st.session_state: |
|
st.session_state["messages"] = [{"role": "assistant", "content": "How can I help you?"}] |
|
|
|
for msg in st.session_state.messages: |
|
st.chat_message(msg["role"]).write(msg["content"]) |
|
|
|
if prompt := st.chat_input(): |
|
|
|
st.session_state.messages.append({"role": "user", "content": prompt}) |
|
st.chat_message("user").write(prompt) |
|
response = router.invoke(prompt) |
|
msg = response["output"] |
|
print("Title : ", router.generate_title(prompt)) |
|
|
|
|
|
|
|
|
|
if st.session_state.messages[-1]["role"] == "assistant": |
|
st.session_state.messages[-1]["content"] = msg |
|
else: |
|
st.session_state.messages.append({"role": "assistant", "content": msg}) |
|
|
|
st.chat_message("assistant").write(msg) |
|
|
|
|
|
if st.button("Regenerate Response"): |
|
st.session_state.messages.pop() |
|
prompt = st.session_state.messages[-1]["content"] |
|
with st.spinner("Searching..."): |
|
response = router.invoke(prompt) |
|
msg = response["output"] |
|
|
|
|
|
if st.session_state.messages[-1]["role"] == "assistant": |
|
st.session_state.messages[-1]["content"] = msg |
|
else: |
|
st.session_state.messages.append({"role": "assistant", "content": msg}) |
|
|
|
st.chat_message("assistant").write(msg) |
|
|
|
if st.button("Clear Chat"): |
|
st.session_state.messages = [{"role": "assistant", "content": "How can I help you?"}] |
|
router.memory.clear() |
|
for msg in st.session_state.messages: |
|
st.chat_message(msg["role"]).write(msg["content"]) |
|
st.success("Chat cleared!") |
|
|