dsouzaJithesh commited on
Commit
20b0da7
·
verified ·
1 Parent(s): e20cf0f

Upload indexer.py

Browse files
Files changed (1) hide show
  1. indexer.py +23 -20
indexer.py CHANGED
@@ -21,34 +21,37 @@ def index_text():
21
  return vectorstore
22
 
23
 
24
- def answer_query(query, vectorstore):
 
 
 
 
25
  RAG_TEMPLATE = """
26
  #CONTEXT:
27
  {context}
28
-
29
- QUERY:
30
- {query}
31
-
32
- Use the provided context to answer the user query. Only use the provided context to answer the query.
33
- If you do not know the answer, or it's not contained in the provided context, respond with "I don't know".
34
  """
35
- os.environ["GEMINI_API_KEY"] = os.getenv("GEMINI_API_KEY")
36
- client = genai.Client()
37
- # Get relevant documents
38
  retriever = vectorstore.as_retriever()
39
  search_results = retriever.invoke(query, k=2)
40
-
41
- # Combine context from retrieved documents
42
  context = " ".join([doc.page_content for doc in search_results])
43
-
44
- # Build prompt
45
  prompt = RAG_TEMPLATE.format(context=context, query=query)
46
 
47
- # Generate response using Gemini
48
- response = client.models.generate_content(
49
- model="gemini-2.5-pro",
50
- contents=prompt,
51
- config=types.GenerateContentConfig(),
 
 
 
 
 
 
 
 
 
 
52
  )
53
 
54
- return response.text
 
 
21
  return vectorstore
22
 
23
 
24
+ def answer_query(query, history,vectorstore):
25
+
26
+ os.environ["GEMINI_API_KEY"] = os.getenv("GEMINI_API_KEY")
27
+ client = genai.Client()
28
+
29
  RAG_TEMPLATE = """
30
  #CONTEXT:
31
  {context}
32
+ Use the provided context to answer the user query.
 
 
 
 
 
33
  """
 
 
 
34
  retriever = vectorstore.as_retriever()
35
  search_results = retriever.invoke(query, k=2)
 
 
36
  context = " ".join([doc.page_content for doc in search_results])
 
 
37
  prompt = RAG_TEMPLATE.format(context=context, query=query)
38
 
39
+
40
+ gemini_history = []
41
+ for msg in history:
42
+ # The Gemini API uses 'model' for the assistant's role
43
+
44
+ role = 'model' if msg['role'] == 'assistant' else 'user'
45
+ gemini_history.append(
46
+ types.Content(role=role, parts=[types.Part(text=msg['content'])])
47
+ )
48
+
49
+ chat = client.chats.create(
50
+ model="gemini-2.0-flash",
51
+ history=gemini_history,
52
+ config=types.GenerateContentConfig(
53
+ system_instruction=prompt)
54
  )
55
 
56
+ response=chat.send_message(message=query)
57
+ return response.text