samiasohail25gmailcom commited on
Commit
c7de61d
Β·
verified Β·
1 Parent(s): b32737b

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +104 -0
app.py ADDED
@@ -0,0 +1,104 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from transformers import pipeline
3
+ from sentence_transformers import SentenceTransformer
4
+ import faiss
5
+ import json
6
+ import random
7
+ import os
8
+
9
+ # ----------------- DATA SECTION -----------------
10
+
11
+ # Quran + Hadith sample data (You can expand later)
12
+ quran_data = [
13
+ {
14
+ "source": "Surah Al-Baqarah, Ayah 2",
15
+ "text": "This is the Book about which there is no doubt, a guidance for those conscious of Allah."
16
+ },
17
+ {
18
+ "source": "Surah Al-Ikhlas, Ayah 1",
19
+ "text": "Say, 'He is Allah, [who is] One.'"
20
+ }
21
+ ]
22
+
23
+ hadith_data = [
24
+ {
25
+ "source": "Sahih Bukhari, Book 2, Hadith 13",
26
+ "text": "None of you will have faith till he wishes for his brother what he likes for himself."
27
+ },
28
+ {
29
+ "source": "Sahih Muslim, Book 1, Hadith 1",
30
+ "text": "Actions are judged by intentions."
31
+ }
32
+ ]
33
+
34
+ # ----------------- EMBEDDING + FAISS -----------------
35
+
36
+ @st.cache_resource
37
+ def build_index(passages):
38
+ model = SentenceTransformer('all-MiniLM-L6-v2')
39
+ texts = [p['text'] for p in passages]
40
+ embeddings = model.encode(texts)
41
+ index = faiss.IndexFlatL2(embeddings.shape[1])
42
+ index.add(embeddings)
43
+ return model, index, passages
44
+
45
+ model, index, passages = build_index(quran_data + hadith_data)
46
+
47
+ def retrieve_passages(query, k=3):
48
+ query_vec = model.encode([query])
49
+ scores, idxs = index.search(query_vec, k)
50
+ return [passages[i] for i in idxs[0]]
51
+
52
+ # ----------------- TRANSLATION -----------------
53
+
54
+ @st.cache_resource
55
+ def load_translators():
56
+ trans_ur = pipeline("translation", model="Helsinki-NLP/opus-mt-en-ur")
57
+ trans_ar = pipeline("translation", model="Helsinki-NLP/opus-mt-en-ar")
58
+ return trans_ur, trans_ar
59
+
60
+ translator_ur, translator_ar = load_translators()
61
+
62
+ def translate(text, lang):
63
+ if lang == "Urdu":
64
+ return translator_ur(text)[0]['translation_text']
65
+ elif lang == "Arabic":
66
+ return translator_ar(text)[0]['translation_text']
67
+ return text
68
+
69
+ # ----------------- DAILY VERSES -----------------
70
+
71
+ def get_random_ayah():
72
+ return random.choice(quran_data)
73
+
74
+ def get_random_hadith():
75
+ return random.choice(hadith_data)
76
+
77
+ # ----------------- STREAMLIT UI -----------------
78
+
79
+ st.set_page_config(page_title="Noor-e-Hidayat", layout="centered")
80
+ st.title("πŸ•ŠοΈ Noor-e-Hidayat – Your Islamic AI Assistant")
81
+
82
+ lang = st.selectbox("🌐 Choose Language", ["English", "Urdu", "Arabic"])
83
+ st.markdown("---")
84
+
85
+ st.subheader("πŸ” Ask Noor-e-Hidayat")
86
+ query = st.text_input("Type your question related to Qur’an, Hadith, or Islamic guidance...")
87
+
88
+ if query:
89
+ results = retrieve_passages(query)
90
+ for r in results:
91
+ st.markdown(f"πŸ“– **{r['source']}**")
92
+ st.write(translate(r['text'], lang))
93
+ st.markdown("---")
94
+
95
+ st.subheader("πŸ“œ Ayah of the Day")
96
+ ayah = get_random_ayah()
97
+ st.info(f"**{ayah['source']}**\n\n{translate(ayah['text'], lang)}")
98
+
99
+ st.subheader("πŸ“œ Hadith of the Day")
100
+ hadith = get_random_hadith()
101
+ st.success(f"**{hadith['source']}**\n\n{translate(hadith['text'], lang)}")
102
+
103
+ st.markdown("---")
104
+ st.caption("βš™οΈ Powered by Transformers, Sentence-BERT, and FAISS β€’ Built with ❀️ using Streamlit")