Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -2,140 +2,113 @@ import gradio as gr
|
|
2 |
import cv2
|
3 |
import time
|
4 |
import os
|
5 |
-
import
|
6 |
-
import matplotlib.pyplot as plt
|
7 |
-
import numpy as np
|
8 |
from datetime import datetime
|
9 |
-
from collections import Counter
|
10 |
from services.video_service import get_next_video_frame, reset_video_index, preload_video
|
11 |
-
|
12 |
-
from services.
|
13 |
-
from services.
|
14 |
-
from services.
|
15 |
-
|
16 |
-
#
|
17 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
18 |
|
19 |
# Globals
|
20 |
paused = False
|
21 |
-
frame_rate = 0.
|
22 |
frame_count = 0
|
23 |
log_entries = []
|
24 |
-
crack_counts = []
|
25 |
-
crack_severity_all = []
|
26 |
last_frame = None
|
27 |
-
|
28 |
last_timestamp = ""
|
29 |
-
last_detected_cracks = [] # Store up to 100+ crack images
|
30 |
-
last_detected_holes = [] # Store up to 100+ hole images
|
31 |
-
gps_coordinates = []
|
32 |
|
33 |
# Constants
|
34 |
TEMP_IMAGE_PATH = "temp.jpg"
|
35 |
-
|
36 |
-
os.makedirs(
|
37 |
|
38 |
-
# Core monitor function
|
39 |
def monitor_feed():
|
40 |
-
|
|
|
|
|
|
|
41 |
|
42 |
if paused and last_frame is not None:
|
43 |
frame = last_frame.copy()
|
44 |
-
|
45 |
else:
|
46 |
try:
|
47 |
frame = get_next_video_frame()
|
48 |
except RuntimeError as e:
|
49 |
log_entries.append(f"Error: {str(e)}")
|
50 |
-
return None,
|
51 |
-
|
52 |
-
|
53 |
-
frame =
|
54 |
-
|
55 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
56 |
|
57 |
frame_count += 1
|
58 |
last_timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
59 |
-
|
60 |
-
gps_coordinates.append(gps_coord)
|
61 |
-
|
62 |
-
# Save detected cracks and holes separately
|
63 |
-
if detected_items:
|
64 |
-
captured_frame_path = os.path.join(CAPTURED_FRAMES_DIR, f"detected_{frame_count}.jpg")
|
65 |
-
cv2.imwrite(captured_frame_path, frame)
|
66 |
-
for item in detected_items:
|
67 |
-
if item['type'] == 'crack':
|
68 |
-
last_detected_cracks.append(captured_frame_path)
|
69 |
-
if len(last_detected_cracks) > 100:
|
70 |
-
last_detected_cracks.pop(0)
|
71 |
-
elif item['type'] == 'hole':
|
72 |
-
last_detected_holes.append(captured_frame_path)
|
73 |
-
if len(last_detected_holes) > 100:
|
74 |
-
last_detected_holes.pop(0)
|
75 |
|
76 |
last_frame = frame.copy()
|
77 |
-
|
78 |
-
|
79 |
-
# Update logs and stats
|
80 |
-
crack_detected = len([item for item in last_metrics.get('items', []) if item['type'] == 'crack'])
|
81 |
-
hole_detected = len([item for item in last_metrics.get('items', []) if item['type'] == 'hole'])
|
82 |
-
crack_severity_all.extend([
|
83 |
-
item['severity']
|
84 |
-
for item in last_metrics.get('items', [])
|
85 |
-
if item['type'] in ['crack', 'hole'] and isinstance(item, dict) and 'severity' in item
|
86 |
-
])
|
87 |
-
|
88 |
-
log_entries.append(f"{last_timestamp} - Frame {frame_count} - Cracks: {crack_detected} - Holes: {hole_detected} - GPS: {gps_coord}")
|
89 |
-
crack_counts.append(crack_detected + hole_detected)
|
90 |
|
91 |
if len(log_entries) > 100:
|
92 |
log_entries.pop(0)
|
93 |
-
if len(crack_counts) > 500:
|
94 |
-
crack_counts.pop(0)
|
95 |
-
if len(crack_severity_all) > 500:
|
96 |
-
crack_severity_all.pop(0)
|
97 |
|
|
|
98 |
frame = cv2.resize(last_frame, (640, 480))
|
99 |
cv2.putText(frame, f"Frame: {frame_count}", (10, 25), cv2.FONT_HERSHEY_SIMPLEX, 0.6, (0, 255, 0), 2)
|
100 |
cv2.putText(frame, f"{last_timestamp}", (10, 50), cv2.FONT_HERSHEY_SIMPLEX, 0.6, (0, 255, 0), 2)
|
101 |
|
102 |
-
|
103 |
-
|
104 |
-
return frame[:, :, ::-1], last_metrics, "\n".join(log_entries[-10:]), generate_line_chart(), generate_pie_chart(), last_detected_cracks, last_detected_holes, map_path
|
105 |
-
|
106 |
-
# Line chart
|
107 |
-
def generate_line_chart():
|
108 |
-
if not crack_counts:
|
109 |
-
return None
|
110 |
-
fig, ax = plt.subplots(figsize=(4, 2))
|
111 |
-
ax.plot(crack_counts[-50:], marker='o')
|
112 |
-
ax.set_title("Cracks/Holes Over Time")
|
113 |
-
ax.set_xlabel("Frame")
|
114 |
-
ax.set_ylabel("Count")
|
115 |
-
fig.tight_layout()
|
116 |
-
chart_path = "chart_temp.png"
|
117 |
-
fig.savefig(chart_path)
|
118 |
-
plt.close(fig)
|
119 |
-
return chart_path
|
120 |
-
|
121 |
-
# Pie chart for severity
|
122 |
-
def generate_pie_chart():
|
123 |
-
if not crack_severity_all:
|
124 |
-
return None
|
125 |
-
fig, ax = plt.subplots(figsize=(4, 2))
|
126 |
-
count = Counter(crack_severity_all[-200:])
|
127 |
-
labels, sizes = zip(*count.items())
|
128 |
-
ax.pie(sizes, labels=labels, autopct='%1.1f%%', startangle=140)
|
129 |
-
ax.axis('equal')
|
130 |
-
fig.tight_layout()
|
131 |
-
pie_path = "pie_temp.png"
|
132 |
-
fig.savefig(pie_path)
|
133 |
-
plt.close(fig)
|
134 |
-
return pie_path
|
135 |
|
136 |
# Gradio UI
|
137 |
with gr.Blocks(theme=gr.themes.Soft()) as app:
|
138 |
-
gr.Markdown("# 🛡️ Drone
|
139 |
|
140 |
status_text = gr.Markdown("**Status:** 🟢 Running")
|
141 |
|
@@ -143,23 +116,15 @@ with gr.Blocks(theme=gr.themes.Soft()) as app:
|
|
143 |
with gr.Column(scale=3):
|
144 |
video_output = gr.Image(label="Live Drone Feed", width=640, height=480)
|
145 |
with gr.Column(scale=1):
|
146 |
-
|
147 |
|
148 |
with gr.Row():
|
149 |
logs_output = gr.Textbox(label="Live Logs", lines=8)
|
150 |
-
chart_output = gr.Image(label="Crack/Hole Trend")
|
151 |
-
pie_output = gr.Image(label="Severity Distribution")
|
152 |
-
|
153 |
-
with gr.Row():
|
154 |
-
map_output = gr.Image(label="Crack/Hole Locations Map")
|
155 |
-
with gr.Column():
|
156 |
-
crack_images = gr.Gallery(label="Detected Cracks (Last 100+)", columns=4, rows=13)
|
157 |
-
hole_images = gr.Gallery(label="Detected Holes (Last 100+)", columns=4, rows=13)
|
158 |
|
159 |
with gr.Row():
|
160 |
pause_btn = gr.Button("⏸️ Pause")
|
161 |
resume_btn = gr.Button("▶️ Resume")
|
162 |
-
frame_slider = gr.Slider(0.
|
163 |
|
164 |
def toggle_pause():
|
165 |
global paused
|
@@ -181,14 +146,14 @@ with gr.Blocks(theme=gr.themes.Soft()) as app:
|
|
181 |
|
182 |
def streaming_loop():
|
183 |
while True:
|
184 |
-
frame,
|
185 |
if frame is None:
|
186 |
-
yield None,
|
187 |
else:
|
188 |
-
yield frame,
|
189 |
time.sleep(frame_rate)
|
190 |
|
191 |
-
app.load(streaming_loop, outputs=[video_output,
|
192 |
|
193 |
if __name__ == "__main__":
|
194 |
app.launch(share=True)
|
|
|
2 |
import cv2
|
3 |
import time
|
4 |
import os
|
5 |
+
import json
|
|
|
|
|
6 |
from datetime import datetime
|
|
|
7 |
from services.video_service import get_next_video_frame, reset_video_index, preload_video
|
8 |
+
# Under Construction services
|
9 |
+
from services.under_construction.earthwork_detection import process_earthwork
|
10 |
+
from services.under_construction.culvert_check import process_culverts
|
11 |
+
from services.under_construction.bridge_pier_check import process_bridge_piers
|
12 |
+
# Comment out other services
|
13 |
+
# from services.operations_maintenance.pothole_detection import process_potholes
|
14 |
+
# from services.operations_maintenance.crack_detection import process_cracks
|
15 |
+
# from services.operations_maintenance.signage_check import process_signages
|
16 |
+
# from services.road_safety.barrier_check import process_barriers
|
17 |
+
# from services.road_safety.lighting_check import process_lighting
|
18 |
+
# from services.road_safety.accident_spot_check import process_accident_spots
|
19 |
+
# from services.plantation.plant_count import process_plants
|
20 |
+
# from services.plantation.plant_health import process_plant_health
|
21 |
+
# from services.plantation.missing_patch_check import process_missing_patches
|
22 |
+
# Original services (not used in this mode but imported for potential future use)
|
23 |
+
from services.detection_service import process_frame as process_generic
|
24 |
+
from services.metrics_service import compute_metrics
|
25 |
+
from services.overlay_service import add_overlay
|
26 |
+
from services.salesforce_dispatcher import dispatch_to_salesforce
|
27 |
+
from services.shadow_detection import detect_shadows
|
28 |
+
from services.thermal_service import process_thermal
|
29 |
+
|
30 |
+
# Preload video
|
31 |
+
try:
|
32 |
+
preload_video()
|
33 |
+
except Exception as e:
|
34 |
+
print(f"Error preloading video: {str(e)}")
|
35 |
|
36 |
# Globals
|
37 |
paused = False
|
38 |
+
frame_rate = 0.5 # Process every 0.5 seconds for real-time feel
|
39 |
frame_count = 0
|
40 |
log_entries = []
|
|
|
|
|
41 |
last_frame = None
|
42 |
+
last_detections = {}
|
43 |
last_timestamp = ""
|
|
|
|
|
|
|
44 |
|
45 |
# Constants
|
46 |
TEMP_IMAGE_PATH = "temp.jpg"
|
47 |
+
OUTPUT_DIR = "outputs"
|
48 |
+
os.makedirs(OUTPUT_DIR, exist_ok=True)
|
49 |
|
|
|
50 |
def monitor_feed():
|
51 |
+
"""
|
52 |
+
Main function to process video frames in real-time.
|
53 |
+
"""
|
54 |
+
global paused, frame_count, last_frame, last_detections, last_timestamp
|
55 |
|
56 |
if paused and last_frame is not None:
|
57 |
frame = last_frame.copy()
|
58 |
+
detections = last_detections.copy()
|
59 |
else:
|
60 |
try:
|
61 |
frame = get_next_video_frame()
|
62 |
except RuntimeError as e:
|
63 |
log_entries.append(f"Error: {str(e)}")
|
64 |
+
return None, json.dumps(last_detections, indent=2), "\n".join(log_entries[-10:])
|
65 |
+
|
66 |
+
# Run Under Construction detections
|
67 |
+
earthwork_dets, frame = process_earthwork(frame)
|
68 |
+
culvert_dets, frame = process_culverts(frame)
|
69 |
+
bridge_pier_dets, frame = process_bridge_piers(frame)
|
70 |
+
|
71 |
+
# Combine detections
|
72 |
+
all_detections = {
|
73 |
+
"earthwork": earthwork_dets,
|
74 |
+
"culverts": culvert_dets,
|
75 |
+
"bridge_piers": bridge_pier_dets,
|
76 |
+
"timestamp": datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
|
77 |
+
"frame_count": frame_count
|
78 |
+
}
|
79 |
+
|
80 |
+
# Compute metrics
|
81 |
+
all_dets_list = earthwork_dets + culvert_dets + bridge_pier_dets
|
82 |
+
metrics = compute_metrics(all_dets_list)
|
83 |
+
all_detections["metrics"] = metrics
|
84 |
+
|
85 |
+
# Dispatch to Salesforce
|
86 |
+
dispatch_to_salesforce(all_detections, all_detections["timestamp"])
|
87 |
+
|
88 |
+
# Save annotated frame
|
89 |
+
frame_path = os.path.join(OUTPUT_DIR, f"frame_{frame_count:04d}.jpg")
|
90 |
+
cv2.imwrite(frame_path, frame)
|
91 |
|
92 |
frame_count += 1
|
93 |
last_timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
94 |
+
log_entries.append(f"{last_timestamp} - Frame {frame_count} - Detections: {len(all_dets_list)} - Avg Conf: {metrics['avg_confidence']:.2f}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
95 |
|
96 |
last_frame = frame.copy()
|
97 |
+
last_detections = all_detections
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
98 |
|
99 |
if len(log_entries) > 100:
|
100 |
log_entries.pop(0)
|
|
|
|
|
|
|
|
|
101 |
|
102 |
+
# Add frame count and timestamp to display
|
103 |
frame = cv2.resize(last_frame, (640, 480))
|
104 |
cv2.putText(frame, f"Frame: {frame_count}", (10, 25), cv2.FONT_HERSHEY_SIMPLEX, 0.6, (0, 255, 0), 2)
|
105 |
cv2.putText(frame, f"{last_timestamp}", (10, 50), cv2.FONT_HERSHEY_SIMPLEX, 0.6, (0, 255, 0), 2)
|
106 |
|
107 |
+
return frame[:, :, ::-1], json.dumps(last_detections, indent=2), "\n".join(log_entries[-10:])
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
108 |
|
109 |
# Gradio UI
|
110 |
with gr.Blocks(theme=gr.themes.Soft()) as app:
|
111 |
+
gr.Markdown("# 🛡️ NHAI Drone Analytics Monitoring System - Under Construction")
|
112 |
|
113 |
status_text = gr.Markdown("**Status:** 🟢 Running")
|
114 |
|
|
|
116 |
with gr.Column(scale=3):
|
117 |
video_output = gr.Image(label="Live Drone Feed", width=640, height=480)
|
118 |
with gr.Column(scale=1):
|
119 |
+
detections_output = gr.Textbox(label="Detections", lines=10)
|
120 |
|
121 |
with gr.Row():
|
122 |
logs_output = gr.Textbox(label="Live Logs", lines=8)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
123 |
|
124 |
with gr.Row():
|
125 |
pause_btn = gr.Button("⏸️ Pause")
|
126 |
resume_btn = gr.Button("▶️ Resume")
|
127 |
+
frame_slider = gr.Slider(0.1, 5, value=0.5, label="Frame Interval (seconds)")
|
128 |
|
129 |
def toggle_pause():
|
130 |
global paused
|
|
|
146 |
|
147 |
def streaming_loop():
|
148 |
while True:
|
149 |
+
frame, detections, logs = monitor_feed()
|
150 |
if frame is None:
|
151 |
+
yield None, detections, logs
|
152 |
else:
|
153 |
+
yield frame, detections, logs
|
154 |
time.sleep(frame_rate)
|
155 |
|
156 |
+
app.load(streaming_loop, outputs=[video_output, detections_output, logs_output])
|
157 |
|
158 |
if __name__ == "__main__":
|
159 |
app.launch(share=True)
|