saadawaissheikh commited on
Commit
05cbe4d
Β·
verified Β·
1 Parent(s): 6238318

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -8
app.py CHANGED
@@ -12,12 +12,12 @@ from langchain.prompts import PromptTemplate
12
  from langchain_openai import ChatOpenAI
13
 
14
 
15
- # βœ… OpenRouter API setup (use Hugging Face Secret)
16
  os.environ["OPENAI_API_KEY"] = os.environ["OPENROUTER_API_KEY"]
17
  os.environ["OPENAI_API_BASE"] = "https://openrouter.ai/api/v1"
18
  os.environ["OPENAI_API_HEADERS"] = '{"HTTP-Referer":"https://huggingface.co", "X-Title":"PDF-RAG"}'
19
 
20
- # βœ… Load and clean the policy PDF
21
  def extract_clean_sections(file_path):
22
  with pdfplumber.open(file_path) as pdf:
23
  full_text = ""
@@ -39,7 +39,7 @@ def extract_clean_sections(file_path):
39
  docs.append(Document(page_content=f"{title}:\n{content}", metadata={"section": title}))
40
  return docs
41
 
42
- # βœ… TF-IDF Embeddings
43
  class TfidfEmbedding(Embeddings):
44
  def __init__(self):
45
  self.vectorizer = TfidfVectorizer()
@@ -53,7 +53,7 @@ class TfidfEmbedding(Embeddings):
53
  def embed_query(self, text):
54
  return self.vectorizer.transform([text]).toarray()[0]
55
 
56
- # βœ… Prompt Template (no emojis, no markdown)
57
  TEMPLATE = """
58
  You are a strict healthcare policy checker for Systems Ltd.
59
 
@@ -72,7 +72,7 @@ Answer:
72
  custom_prompt = PromptTemplate(template=TEMPLATE, input_variables=["context", "question"])
73
 
74
 
75
- # βœ… Load the policy at startup
76
  def initialize_policy():
77
  global qa_chain
78
  docs = extract_clean_sections("healthcare_policy.pdf")
@@ -101,7 +101,7 @@ def initialize_policy():
101
  chain_type_kwargs={"prompt": custom_prompt}
102
  )
103
 
104
- # βœ… Run QA on user question
105
  def ask_policy_question(question):
106
  if qa_chain is None:
107
  return "The policy is still loading. Please wait."
@@ -111,9 +111,9 @@ def ask_policy_question(question):
111
  return f"Error: {str(e)}"
112
 
113
 
114
- # βœ… Gradio Interface
115
  qa_chain = None
116
- status_text = "Loading..." # Initial status
117
 
118
  with gr.Blocks() as demo:
119
  gr.Markdown("## SL HealthCare Claim Checker (RAG)")
 
12
  from langchain_openai import ChatOpenAI
13
 
14
 
15
+
16
  os.environ["OPENAI_API_KEY"] = os.environ["OPENROUTER_API_KEY"]
17
  os.environ["OPENAI_API_BASE"] = "https://openrouter.ai/api/v1"
18
  os.environ["OPENAI_API_HEADERS"] = '{"HTTP-Referer":"https://huggingface.co", "X-Title":"PDF-RAG"}'
19
 
20
+ #Load and clean the policy PDF
21
  def extract_clean_sections(file_path):
22
  with pdfplumber.open(file_path) as pdf:
23
  full_text = ""
 
39
  docs.append(Document(page_content=f"{title}:\n{content}", metadata={"section": title}))
40
  return docs
41
 
42
+ #TF-IDF Embeddings
43
  class TfidfEmbedding(Embeddings):
44
  def __init__(self):
45
  self.vectorizer = TfidfVectorizer()
 
53
  def embed_query(self, text):
54
  return self.vectorizer.transform([text]).toarray()[0]
55
 
56
+ # Prompt Template
57
  TEMPLATE = """
58
  You are a strict healthcare policy checker for Systems Ltd.
59
 
 
72
  custom_prompt = PromptTemplate(template=TEMPLATE, input_variables=["context", "question"])
73
 
74
 
75
+ # Load the policy at startup
76
  def initialize_policy():
77
  global qa_chain
78
  docs = extract_clean_sections("healthcare_policy.pdf")
 
101
  chain_type_kwargs={"prompt": custom_prompt}
102
  )
103
 
104
+ # Run QA on user question
105
  def ask_policy_question(question):
106
  if qa_chain is None:
107
  return "The policy is still loading. Please wait."
 
111
  return f"Error: {str(e)}"
112
 
113
 
114
+ # Gradio Interface
115
  qa_chain = None
116
+ status_text = "Loading..."
117
 
118
  with gr.Blocks() as demo:
119
  gr.Markdown("## SL HealthCare Claim Checker (RAG)")