import io
import streamlit as st
class AIAnalysis:
def __init__(self, client):
self.client = client
def prepare_llm_input(self, student_metrics_df):
metrics_str = student_metrics_df.to_string(index=False)
llm_input = f"""
Based on the following student metrics:
{metrics_str}
Provide:
1. Notes and Key Takeaways: Summarize the data, highlight students with the lowest and highest attendance and engagement percentages, identify students who may need adjustments to their intervention due to low attendance or engagement, and highlight students who are showing strong performance.
2. Recommendations and Next Steps: Provide interpretations based on the analysis and suggest possible next steps or strategies to improve student outcomes.
"""
return llm_input
def prompt_response_from_hf_llm(self, llm_input):
system_prompt = """
An expert Implementation Specialist at Michigan's Multi-Tiered System of Support Technical Assistance Center (MiMTSS TA Center) with deep expertise in SWPBIS, SEL, Structured Literacy, Science of Reading, and family engagement practices.
Analyze educational data and provide evidence-based recommendations for improving student outcomes across multiple tiers of support, drawing from established frameworks in behavioral interventions, literacy instruction, and family engagement.
Operating within Michigan's educational system to support schools in implementing multi-tiered support systems, with access to student metrics data and knowledge of state-specific educational requirements and MTSS frameworks.
Deliver insights through clear, actionable recommendations supported by data analysis, incorporating technical expertise while maintaining accessibility for educators and administrators at various levels of MTSS implementation.
"""
response = self.client.chat.completions.create(
model="meta-llama/Llama-3.3-70B-Instruct",
messages=[
{"role": "system", "content": system_prompt},
{"role": "user", "content": llm_input}
],
stream=True,
temperature=0.5,
max_tokens=1024,
top_p=0.7
)
response_content = ""
for message in response:
response_content += message.choices[0].delta.content
return response_content.strip()
def download_llm_output(self, content, filename):
buffer = io.BytesIO()
buffer.write(content.encode('utf-8'))
buffer.seek(0)
st.download_button(label="Download AI Output", data=buffer, file_name=filename, mime='text/plain', icon="✏️", use_container_width=True)