zolodickk commited on
Commit
721cf91
·
verified ·
1 Parent(s): 9f76838

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +20 -208
app.py CHANGED
@@ -1,220 +1,32 @@
1
- from ultralytics import YOLO
2
- import time
3
- import numpy as np
4
- import mediapipe as mp
5
-
6
  import cv2
7
- from flask import Flask, render_template, request, Response, session, redirect, url_for
8
- from flask_socketio import SocketIO
9
- import yt_dlp as youtube_dl
10
-
11
- model_object_detection = YOLO("bisindov2.pt")
12
 
13
  app = Flask(__name__)
14
- app.config['SECRET_KEY'] = 'secret!'
15
- socketio = SocketIO(app, async_mode='threading')
16
- stop_flag = False
17
-
18
- # Translation mappings
19
- classes_translation = {
20
- # Add your translations here
21
- }
22
-
23
- class VideoStreaming(object):
24
- def __init__(self):
25
- print ("===== Video Streaming =====")
26
- self._preview = False
27
- self._flipH = False
28
- self._detect = False
29
- self._mediaPipe = False
30
- self._confidence = 75.0
31
- self.mp_hands = mp.solutions.hands
32
- self.hands = self.mp_hands.Hands()
33
-
34
- @property
35
- def confidence(self):
36
- return self._confidence
37
-
38
- @confidence.setter
39
- def confidence(self, value):
40
- self._confidence = int(value)
41
-
42
- @property
43
- def preview(self):
44
- return self._preview
45
-
46
- @preview.setter
47
- def preview(self, value):
48
- self._preview = bool(value)
49
-
50
- @property
51
- def flipH(self):
52
- return self._flipH
53
-
54
- @flipH.setter
55
- def flipH(self, value):
56
- self._flipH = bool(value)
57
-
58
- @property
59
- def detect(self):
60
- return self._detect
61
-
62
- @detect.setter
63
- def detect(self, value):
64
- self._detect = bool(value)
65
-
66
- @property
67
- def mediaPipe(self):
68
- return self._mediaPipe
69
-
70
- @mediaPipe.setter
71
- def mediaPipe(self, value):
72
- self._mediaPipe = bool(value)
73
-
74
- def show(self, url):
75
- print(url)
76
- self._preview = False
77
- self._flipH = False
78
- self._detect = False
79
- self._mediaPipe = False
80
- self._confidence = 75.0
81
-
82
- # Video capture setup
83
- ydl_opts = {
84
- "quiet": True,
85
- "no_warnings": True,
86
- "format": "best",
87
- "forceurl": True,
88
- }
89
 
90
- #cap = cv2.VideoCapture(0 if url == '0' else url)
91
- print("ayoub")
92
- print(url)
93
- cap = cv2.VideoCapture(0)
94
-
95
- while True:
96
- if True:
97
- #if self._preview:
98
- print("ayoub2")
99
- if stop_flag:
100
- print("Process Stopped")
101
- return
102
-
103
- grabbed, frame = cap.read()
104
-
105
- if not grabbed:
106
- break
107
 
108
- if self.flipH:
109
- frame = cv2.flip(frame, 1)
110
 
111
- # Perform YOLO object detection
112
- if self.detect:
113
- frame_yolo = frame.copy()
114
- results_yolo = model_object_detection.predict(frame_yolo, conf=self._confidence / 100)
115
- frame_yolo, labels = results_yolo[0].plot()
116
-
117
- for label in labels:
118
- confidence = label.split(" ")[-1]
119
- label_name = " ".join(label.split(" ")[:-1])
120
- translated_label = classes_translation.get(label_name, label_name)
121
- list_labels = [translated_label, confidence]
122
- socketio.emit('label', list_labels)
123
 
124
- # Process hand landmarks with MediaPipe
125
- if self.mediaPipe:
126
- image = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
127
- results = self.hands.process(image)
128
-
129
- if results.multi_hand_landmarks:
130
- for hand_landmarks in results.multi_hand_landmarks:
131
- mp.solutions.drawing_utils.draw_landmarks(
132
- frame,
133
- hand_landmarks,
134
- self.mp_hands.HAND_CONNECTIONS,
135
- landmark_drawing_spec=mp.solutions.drawing_utils.DrawingSpec(color=(255, 0, 0), thickness=4, circle_radius=3),
136
- connection_drawing_spec=mp.solutions.drawing_utils.DrawingSpec(color=(255, 255, 255), thickness=2, circle_radius=2),
137
- )
138
-
139
- frame = cv2.imencode(".jpg", frame)[1].tobytes()
140
- yield (b'--frame\r\nContent-Type: image/jpeg\r\n\r\n' + frame + b'\r\n')
141
- else:
142
- snap = np.zeros((1000, 1000), np.uint8)
143
- label = "Streaming Off"
144
- H, W = snap.shape
145
- font = cv2.FONT_HERSHEY_PLAIN
146
- color = (255, 255, 255)
147
- cv2.putText(snap, label, (W//2 - 100, H//2), font, 2, color, 2)
148
- frame = cv2.imencode(".jpg", snap)[1].tobytes()
149
- yield (b'--frame\r\nContent-Type: image/jpeg\r\n\r\n' + frame + b'\r\n')
150
-
151
-
152
- # Initialize VideoStreaming object
153
- VIDEO = VideoStreaming()
154
-
155
- # Flask routes for the web application
156
- @app.route('/', methods=['GET', 'POST'])
157
- def homepage():
158
- return render_template('hompage.html')
159
-
160
- @app.route('/index', methods=['GET', 'POST'])
161
  def index():
162
- global stop_flag
163
- stop_flag = False
164
- if request.method == 'POST':
165
- url = request.form['url']
166
- session['url'] = url
167
- return redirect(url_for('index'))
168
  return render_template('index.html')
169
 
170
- @app.route('/video_feed')
171
- def video_feed():
172
- url = session.get('url', None)
173
- if url is None:
174
- url = "0"
175
- return Response(VIDEO.show(url), mimetype='multipart/x-mixed-replace; boundary=frame')
176
- return Response(VIDEO.show(url), mimetype='multipart/x-mixed-replace; boundary=frame')
177
-
178
- # Button request routes for controlling settings
179
- @app.route("/request_preview_switch")
180
- def request_preview_switch():
181
- VIDEO.preview = not VIDEO.preview
182
- return "nothing"
183
-
184
- @app.route("/request_flipH_switch")
185
- def request_flipH_switch():
186
- VIDEO.flipH = not VIDEO.flipH
187
- return "nothing"
188
-
189
- @app.route("/request_run_model_switch")
190
- def request_run_model_switch():
191
- VIDEO.detect = not VIDEO.detect
192
- return "nothing"
193
-
194
- @app.route("/request_mediapipe_switch")
195
- def request_mediapipe_switch():
196
- VIDEO.mediaPipe = not VIDEO.mediaPipe
197
- return "nothing"
198
-
199
- @app.route('/update_slider_value', methods=['POST'])
200
- def update_slider_value():
201
- slider_value = request.form['sliderValue']
202
- VIDEO.confidence = slider_value
203
- return 'OK'
204
-
205
- @app.route('/stop_process')
206
- def stop_process():
207
- global stop_flag
208
- stop_flag = True
209
- return 'Process Stop Request'
210
-
211
- @socketio.on('connect')
212
- def test_connect():
213
- print('Connected')
214
-
215
-
216
-
217
-
218
 
219
  if __name__ == '__main__':
220
- socketio.run(app, host="0.0.0.0", allow_unsafe_werkzeug=True,port=7860)
 
1
+ from flask import Flask, Response, render_template
 
 
 
 
2
  import cv2
3
+ import base64
 
 
 
 
4
 
5
  app = Flask(__name__)
6
+ camera = cv2.VideoCapture(0)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7
 
8
+ def generate_frames():
9
+ while True:
10
+ success, frame = camera.read()
11
+ if not success:
12
+ break
13
+ else:
14
+ # Preprocess the frame here (e.g., resize, color conversion)
15
+ processed_frame = frame # Placeholder for preprocessing
 
 
 
 
 
 
 
 
 
16
 
17
+ _, buffer = cv2.imencode('.jpg', processed_frame)
18
+ frame_bytes = base64.b64encode(buffer.tobytes()).decode('utf-8')
19
 
20
+ yield frame_bytes
 
 
 
 
 
 
 
 
 
 
 
21
 
22
+ @app.route('/')
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
23
  def index():
 
 
 
 
 
 
24
  return render_template('index.html')
25
 
26
+ @app.route('/capture_frame', methods=['GET', 'POST'])
27
+ def capture_frame():
28
+ frame = next(generate_frames())
29
+ return frame
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
30
 
31
  if __name__ == '__main__':
32
+ app.run(debug=True)