|
import streamlit as st |
|
import os, pickle, faiss, numpy as np |
|
from groq import Groq |
|
from sentence_transformers import SentenceTransformer |
|
from langdetect import detect |
|
import requests |
|
from datetime import datetime |
|
from rapidfuzz import process |
|
import json |
|
|
|
|
|
PAK_FLAG_URL = "https://flagcdn.com/w320/pk.png" |
|
|
|
client = Groq(api_key=os.environ.get("GROQ_API_KEY")) |
|
|
|
@st.cache_resource |
|
def load_data(): |
|
idx = faiss.read_index("resqbot_index.faiss") |
|
with open("resqbot_chunks.pkl","rb") as f: |
|
ch = pickle.load(f) |
|
return idx, ch |
|
|
|
@st.cache_resource |
|
def load_model(): |
|
return SentenceTransformer('all-MiniLM-L6-v2') |
|
|
|
embed_model = load_model() |
|
|
|
def detect_language_fallback(text): |
|
try: |
|
lang = detect(text) |
|
if lang not in ["en", "ur"]: |
|
if any("\u0600" <= c <= "\u06FF" for c in text): |
|
return "ur" |
|
else: |
|
return "en" |
|
return lang |
|
except: |
|
return "en" |
|
|
|
st.title("🤖 ResQBot – Disaster QA (Urdu + English)") |
|
|
|
with st.spinner("🛡️ Loading ResQBot..."): |
|
index, chunks = load_data() |
|
|
|
st.markdown(""" |
|
<style> |
|
.grid-section { |
|
display: grid; |
|
grid-template-columns: repeat(auto-fit, minmax(320px, 1fr)); |
|
gap: 1rem; |
|
} |
|
</style> |
|
""", unsafe_allow_html=True) |
|
|
|
st.markdown(f"### 🌌 <img src='{PAK_FLAG_URL}' width='30' style='vertical-align:middle;'> Disaster Alerts", unsafe_allow_html=True) |
|
|
|
st.markdown('<div class="grid-section">', unsafe_allow_html=True) |
|
|
|
|
|
quakes = [ |
|
{"mag": 5.4, "place": "Quetta, Balochistan", "time": "2025-07-27 03:45 AM"}, |
|
{"mag": 4.8, "place": "Peshawar, KPK", "time": "2025-07-26 11:30 PM"}, |
|
] |
|
st.markdown("#### Earthquake Alerts") |
|
if quakes: |
|
for q in quakes: |
|
st.warning(f"Magnitude {q['mag']} quake in {q['place']} at {q['time']}") |
|
if len(quakes) > 1: |
|
st.error("⚠️ Increased seismic activity detected.") |
|
else: |
|
st.success("✅ No notable earthquakes in Pakistan.") |
|
|
|
|
|
floods = [ |
|
("2025-07-28", 9200, "High"), |
|
("2025-07-29", 7800, "Medium"), |
|
("2025-07-30", 4000, "Low"), |
|
] |
|
st.markdown("#### Flood Forecast") |
|
if floods: |
|
high_risk_days = [f for f in floods if f[2] in ["High", "Medium"]] |
|
if high_risk_days: |
|
for d, v, risk in floods: |
|
st.info(f"{d}: Discharge {v:.1f} m³/s – Risk level: {risk}") |
|
else: |
|
st.success("✅ No significant flood risk detected in Pakistan.") |
|
else: |
|
st.error("⚠️ Unable to fetch flood data at the moment.") |
|
|
|
st.markdown('</div>', unsafe_allow_html=True) |
|
|
|
st.markdown("---") |
|
st.markdown("### 💬 Ask About Disaster Preparedness") |
|
st.markdown("You can ask about earthquake, flood, shelter advice or precaution/preparations etc. in English or Urdu.") |
|
query = st.text_input("❓ Your question (English یا اردو/or):") |
|
if query: |
|
with st.spinner("🤖 Thinking..."): |
|
emb = embed_model.encode([query]) |
|
D,I = index.search(np.array(emb), k=3) |
|
context_chunks = [chunks[i] for i in I[0]] |
|
context = "\n".join(context_chunks) |
|
if len(context) > 4000: |
|
context = context[:4000] + "..." |
|
|
|
lang = detect_language_fallback(query) |
|
|
|
if lang=="ur": |
|
prompt = f"""اس سیاق و سباق کی بنیاد پر اردو میں کم از کم 3-4 لائنوں میں جواب دیں۔:\n\n{context}\n\nسوال: {query}""" |
|
else: |
|
prompt = f"""Answer in at least 3-4 lines and to the point in English based on this context:\n\n{context}\n\nQuestion: {query}""" |
|
|
|
resp = client.chat.completions.create( |
|
messages=[{"role":"user","content":prompt}], |
|
model="llama-3.1-8b-instant" |
|
) |
|
st.markdown("### 💬 ResQBot Answer:") |
|
st.write(resp.choices[0].message.content) |
|
|