Spaces:
Build error
Build error
import streamlit as st | |
from llama_cpp import Llama | |
st.set_page_config(page_title="🩺 Medical Chatbot", layout="centered") | |
st.title("🩺 Medical Chatbot (Doctor Mistral)") | |
def load_model(): | |
return Llama( | |
model_path="./models/BioMistral-7B.Q4_K_M.gguf", # <- Update to your actual path | |
n_ctx=2048, | |
n_threads=8, | |
n_gpu_layers=0 # Use GPU if available | |
) | |
llm = load_model() | |
if "chat_history" not in st.session_state: | |
st.session_state.chat_history = [] | |
prompt = st.text_input("Ask me a medical question:") | |
if st.button("Send") and prompt: | |
st.session_state.chat_history.append(("You", prompt)) | |
full_prompt = f"You are a helpful medical assistant.\n\nUser: {prompt}\nDoctor:" | |
output = llm(full_prompt, max_tokens=512, stop=["User:", "Doctor:"]) | |
reply = output["choices"][0]["text"].strip() | |
st.session_state.chat_history.append(("Doctor", reply)) | |
# Show chat history | |
for role, message in st.session_state.chat_history: | |
st.markdown(f"**{role}:** {message}") | |