saadawaissheikh commited on
Commit
2e5ddcc
·
verified ·
1 Parent(s): 8da37f1

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +148 -0
app.py ADDED
@@ -0,0 +1,148 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import gradio as gr
3
+ import pdfplumber
4
+ import re
5
+
6
+ from langchain.docstore.document import Document
7
+ from langchain.vectorstores import FAISS
8
+ from langchain.embeddings.base import Embeddings
9
+ from sklearn.feature_extraction.text import TfidfVectorizer
10
+ from langchain.chains import RetrievalQA
11
+ from langchain.prompts import PromptTemplate
12
+ from langchain_openai import ChatOpenAI
13
+ from transformers import pipeline
14
+
15
+ # Hugging Face-compatible OpenRouter setup
16
+ os.environ["OPENAI_API_KEY"] = os.environ.get("OPENROUTER_API_KEY")
17
+ os.environ["OPENAI_API_BASE"] = "https://openrouter.ai/api/v1"
18
+ os.environ["OPENAI_API_HEADERS"] = '{"HTTP-Referer":"https://huggingface.co", "X-Title":"PDF-RAG"}'
19
+
20
+ # Translation models (global)
21
+ translator_en2ur = None
22
+ translator_ur2en = None
23
+
24
+ # Load and clean the PDF
25
+ def extract_clean_sections(file_path):
26
+ with pdfplumber.open(file_path) as pdf:
27
+ full_text = ""
28
+ for page in pdf.pages:
29
+ text = page.extract_text()
30
+ if text:
31
+ text = re.sub(r'Systems Campus.*?Lahore', '', text)
32
+ text = re.sub(r'E-mail:.*?systemsltd\.com', '', text)
33
+ full_text += text + "\n"
34
+
35
+ pattern = r"(?<=\n)([A-Z][^\n]{3,50}):"
36
+ parts = re.split(pattern, full_text)
37
+
38
+ docs = []
39
+ for i in range(1, len(parts), 2):
40
+ title = parts[i].strip()
41
+ content = parts[i + 1].strip()
42
+ if len(content) > 20:
43
+ docs.append(Document(page_content=f"{title}:\n{content}", metadata={"section": title}))
44
+ return docs
45
+
46
+ # TF-IDF Embedding for LangChain
47
+ class TfidfEmbedding(Embeddings):
48
+ def __init__(self):
49
+ self.vectorizer = TfidfVectorizer()
50
+
51
+ def fit(self, texts):
52
+ self.vectorizer.fit(texts)
53
+
54
+ def embed_documents(self, texts):
55
+ return self.vectorizer.transform(texts).toarray()
56
+
57
+ def embed_query(self, text):
58
+ return self.vectorizer.transform([text]).toarray()[0]
59
+
60
+ # Prompt template
61
+ TEMPLATE = """
62
+ You are a strict healthcare policy checker for Systems Ltd.
63
+ Always begin your answer clearly:
64
+ - Say "Yes, ..." if the claim is valid
65
+ - Say "No, ..." if the claim is not valid
66
+ - Say "Partially, ..." if it's conditionally allowed
67
+ Use the following policy information to support your answer.
68
+ {context}
69
+ Question: {question}
70
+ Answer:
71
+ """
72
+ custom_prompt = PromptTemplate(template=TEMPLATE, input_variables=["context", "question"])
73
+
74
+ # Initialize policy + QA chain + translation models
75
+ qa_chain = None
76
+
77
+ def initialize_policy():
78
+ global qa_chain, translator_en2ur, translator_ur2en
79
+ docs = extract_clean_sections("healthcare_policy.pdf")
80
+ texts = [doc.page_content for doc in docs]
81
+ embedder = TfidfEmbedding()
82
+ embedder.fit(texts)
83
+ vectordb = FAISS.from_texts(texts, embedder)
84
+ retriever = vectordb.as_retriever()
85
+
86
+ llm = ChatOpenAI(
87
+ model="tngtech/deepseek-r1t2-chimera:free",
88
+ base_url="https://openrouter.ai/api/v1",
89
+ api_key=os.getenv("OPENAI_API_KEY"),
90
+ default_headers={
91
+ "HTTP-Referer": "https://huggingface.co",
92
+ "X-Title": "PDF-RAG"
93
+ },
94
+ temperature=0.0
95
+ )
96
+
97
+ qa_chain = RetrievalQA.from_chain_type(
98
+ llm=llm,
99
+ chain_type="stuff",
100
+ retriever=retriever,
101
+ return_source_documents=False,
102
+ chain_type_kwargs={"prompt": custom_prompt}
103
+ )
104
+
105
+ translator_en2ur = pipeline("translation", model="Helsinki-NLP/opus-mt-en-ur")
106
+ translator_ur2en = pipeline("translation", model="Helsinki-NLP/opus-mt-ur-en")
107
+
108
+ # QA logic with bilingual support
109
+ def ask_policy_question(inputs):
110
+ question, language = inputs
111
+ if qa_chain is None:
112
+ return "The policy is still loading. Please wait."
113
+ try:
114
+ if language == "Urdu":
115
+ question_en = translator_ur2en(question)[0]['translation_text']
116
+ answer_en = qa_chain.run(question_en)
117
+ answer_ur = translator_en2ur(answer_en)[0]['translation_text']
118
+ return answer_ur
119
+ else:
120
+ return qa_chain.run(question)
121
+ except Exception as e:
122
+ return f"Error: {str(e)}"
123
+
124
+ # Gradio UI
125
+ status_text = "Loading..."
126
+
127
+ with gr.Blocks() as demo:
128
+ gr.Markdown("## 📋 SL HealthCare Claim Checker (Bilingual: English / اردو)")
129
+
130
+ status_box = gr.Textbox(label="Status", value=status_text, interactive=False)
131
+
132
+ with gr.Row():
133
+ language = gr.Radio(choices=["English", "Urdu"], label="Select Language / زبان منتخب کریں", value="English")
134
+ question = gr.Textbox(label="Enter your claim question / اپنا سوال درج کریں")
135
+ ask_btn = gr.Button("Ask / پوچھیں")
136
+
137
+ answer = gr.Textbox(label="Answer / جواب", lines=6)
138
+ ask_btn.click(fn=ask_policy_question, inputs=[question, language], outputs=answer)
139
+
140
+ def startup():
141
+ global status_text
142
+ initialize_policy()
143
+ status_text = "Policy loaded. You may now ask questions."
144
+ return status_text
145
+
146
+ demo.load(fn=startup, outputs=status_box)
147
+
148
+ demo.launch()