Spaces:
Sleeping
Sleeping
Vineela Gampa
commited on
chatbot chagnes
Browse files- backend.py +31 -12
- data/app.db +0 -0
- web/analyzer.html +6 -6
backend.py
CHANGED
@@ -24,7 +24,7 @@ from bert import analyze_with_clinicalBert, classify_disease_and_severity, extra
|
|
24 |
from disease_links import diseases as disease_links
|
25 |
from disease_steps import disease_next_steps
|
26 |
from disease_support import disease_doctor_specialty, disease_home_care
|
27 |
-
from past_reports import router as reports_router
|
28 |
|
29 |
model = genai.GenerativeModel('gemini-1.5-flash')
|
30 |
df = pd.read_csv("measurement.csv")
|
@@ -96,6 +96,7 @@ except Exception as e:
|
|
96 |
raise RuntimeError(f"Failed to configure Firebase: {e}")
|
97 |
|
98 |
class ChatRequest(BaseModel):
|
|
|
99 |
question: str
|
100 |
|
101 |
class ChatResponse(BaseModel):
|
@@ -159,16 +160,7 @@ def ocr_text_from_image(image_bytes: bytes) -> str:
|
|
159 |
print(response_text)
|
160 |
|
161 |
return response_text
|
162 |
-
|
163 |
-
@app.post("/chat/", response_model=ChatResponse)
|
164 |
-
async def chat_endpoint(request: ChatRequest):
|
165 |
-
"""
|
166 |
-
Chatbot endpoint that answers questions based on the last analyzed document and user history.
|
167 |
-
"""
|
168 |
-
global EXTRACTED_TEXT_CACHE
|
169 |
-
if not EXTRACTED_TEXT_CACHE:
|
170 |
-
raise HTTPException(status_code=400, detail="Please provide a document context by analyzing text first.")
|
171 |
-
|
172 |
try:
|
173 |
reports_ref = db.collection('users').document(request.user_id).collection('reports')
|
174 |
docs = reports_ref.order_by('timestamp', direction=firestore.Query.DESCENDING).limit(10).stream()
|
@@ -179,8 +171,35 @@ async def chat_endpoint(request: ChatRequest):
|
|
179 |
history_text += f"Report from {report_data.get('timestamp', 'N/A')}:\n{report_data.get('ocr_text', 'No OCR text found')}\n\n"
|
180 |
except Exception as e:
|
181 |
history_text = "No past reports found for this user."
|
182 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
183 |
full_document_text = EXTRACTED_TEXT_CACHE + "\n\n" + "PAST REPORTS:\n" + history_text
|
|
|
|
|
|
|
|
|
|
|
|
|
184 |
|
185 |
try:
|
186 |
full_prompt = system_prompt_chat.format(
|
|
|
24 |
from disease_links import diseases as disease_links
|
25 |
from disease_steps import disease_next_steps
|
26 |
from disease_support import disease_doctor_specialty, disease_home_care
|
27 |
+
from past_reports import router as reports_router, db_fetch_reports
|
28 |
|
29 |
model = genai.GenerativeModel('gemini-1.5-flash')
|
30 |
df = pd.read_csv("measurement.csv")
|
|
|
96 |
raise RuntimeError(f"Failed to configure Firebase: {e}")
|
97 |
|
98 |
class ChatRequest(BaseModel):
|
99 |
+
user_id: str
|
100 |
question: str
|
101 |
|
102 |
class ChatResponse(BaseModel):
|
|
|
160 |
print(response_text)
|
161 |
|
162 |
return response_text
|
163 |
+
def get_past_reports_from_firestore(user_id: str):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
164 |
try:
|
165 |
reports_ref = db.collection('users').document(request.user_id).collection('reports')
|
166 |
docs = reports_ref.order_by('timestamp', direction=firestore.Query.DESCENDING).limit(10).stream()
|
|
|
171 |
history_text += f"Report from {report_data.get('timestamp', 'N/A')}:\n{report_data.get('ocr_text', 'No OCR text found')}\n\n"
|
172 |
except Exception as e:
|
173 |
history_text = "No past reports found for this user."
|
174 |
+
return history_text
|
175 |
+
|
176 |
+
def get_past_reports_from_sqllite(user_id: str):
|
177 |
+
try:
|
178 |
+
reports = db_fetch_reports(user_id=user_id, limit=10, offset=0)
|
179 |
+
|
180 |
+
history_text = ""
|
181 |
+
for report in reports:
|
182 |
+
history_text += f"Report from {report.get('report_date', 'N/A')}:\n{report.get('ocr_text', 'No OCR text found')}\n\n"
|
183 |
+
except Exception as e:
|
184 |
+
history_text = "No past reports found for this user."
|
185 |
+
return history_text
|
186 |
+
|
187 |
+
@app.post("/chat/", response_model=ChatResponse)
|
188 |
+
async def chat_endpoint(request: ChatRequest):
|
189 |
+
"""
|
190 |
+
Chatbot endpoint that answers questions based on the last analyzed document and user history.
|
191 |
+
"""
|
192 |
+
print("Received chat request for user:", request.user_id)
|
193 |
+
#history_text = get_past_reports_from_firestore(request.user_id)
|
194 |
+
history_text = get_past_reports_from_sqllite(request.user_id)
|
195 |
+
|
196 |
full_document_text = EXTRACTED_TEXT_CACHE + "\n\n" + "PAST REPORTS:\n" + history_text
|
197 |
+
|
198 |
+
if not full_document_text:
|
199 |
+
raise HTTPException(status_code=400, detail="No past reports or current data exists for this user")
|
200 |
+
|
201 |
+
|
202 |
+
|
203 |
|
204 |
try:
|
205 |
full_prompt = system_prompt_chat.format(
|
data/app.db
CHANGED
Binary files a/data/app.db and b/data/app.db differ
|
|
web/analyzer.html
CHANGED
@@ -231,11 +231,11 @@
|
|
231 |
accept=".pdf, image/*"
|
232 |
class="w-full mb-4 rounded px-3 py-2"
|
233 |
/>
|
234 |
-
|
235 |
type="date"
|
236 |
id="report-date"
|
237 |
class="w-full mb-4 rounded px-3 py-2"
|
238 |
-
/>
|
239 |
<button id="analyze-btn" class="btn-primary px-4 py-2 rounded">
|
240 |
Analyze with AI
|
241 |
</button>
|
@@ -451,15 +451,15 @@
|
|
451 |
.getElementById("analyze-btn")
|
452 |
.addEventListener("click", async () => {
|
453 |
const file = document.getElementById("pdf-upload").files[0];
|
454 |
-
const date = document.getElementById("report-date").value;
|
455 |
if (!file) {
|
456 |
loadingEl.textContent = "Please upload a file first.";
|
457 |
return;
|
458 |
}
|
459 |
-
if (!date) {
|
460 |
loadingEl.textContent = "Please select the report date.";
|
461 |
return;
|
462 |
-
}
|
463 |
|
464 |
loadingEl.textContent = "Processing with AI...";
|
465 |
textOutput.textContent = "";
|
@@ -519,7 +519,7 @@
|
|
519 |
|
520 |
await postReportToBackend({
|
521 |
user_id: currentUser.email,
|
522 |
-
report_date:
|
523 |
ocr_text: extractedText,
|
524 |
anomalies: JSON.stringify(recs),
|
525 |
measurements: JSON.stringify(findings),
|
|
|
231 |
accept=".pdf, image/*"
|
232 |
class="w-full mb-4 rounded px-3 py-2"
|
233 |
/>
|
234 |
+
<!-- <input
|
235 |
type="date"
|
236 |
id="report-date"
|
237 |
class="w-full mb-4 rounded px-3 py-2"
|
238 |
+
/> -->
|
239 |
<button id="analyze-btn" class="btn-primary px-4 py-2 rounded">
|
240 |
Analyze with AI
|
241 |
</button>
|
|
|
451 |
.getElementById("analyze-btn")
|
452 |
.addEventListener("click", async () => {
|
453 |
const file = document.getElementById("pdf-upload").files[0];
|
454 |
+
//const date = document.getElementById("report-date").value;
|
455 |
if (!file) {
|
456 |
loadingEl.textContent = "Please upload a file first.";
|
457 |
return;
|
458 |
}
|
459 |
+
/*if (!date) {
|
460 |
loadingEl.textContent = "Please select the report date.";
|
461 |
return;
|
462 |
+
}*/
|
463 |
|
464 |
loadingEl.textContent = "Processing with AI...";
|
465 |
textOutput.textContent = "";
|
|
|
519 |
|
520 |
await postReportToBackend({
|
521 |
user_id: currentUser.email,
|
522 |
+
report_date: new Date(),
|
523 |
ocr_text: extractedText,
|
524 |
anomalies: JSON.stringify(recs),
|
525 |
measurements: JSON.stringify(findings),
|