File size: 12,430 Bytes
a2b8e1d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
<!DOCTYPE html>
<html lang="en">
<head>
    <meta charset="UTF-8">
    <meta name="viewport" content="width=device-width, initial-scale=1.0">
    <title>Mood Detector</title>
    <script src="https://cdn.tailwindcss.com"></script>
    <script src="https://cdn.jsdelivr.net/npm/[email protected]/dist/face-api.min.js"></script>
    <style>
        .video-overlay {
            position: absolute;
            top: 0;
            left: 0;
            width: 100%;
            height: 100%;
        }
        .face-box {
            position: absolute;
            border: 2px solid #3B82F6;
            background-color: rgba(59, 130, 246, 0.2);
        }
        .mood-text {
            position: absolute;
            color: white;
            background-color: rgba(0, 0, 0, 0.7);
            padding: 2px 5px;
            border-radius: 4px;
            font-size: 12px;
        }
        .pulse {
            animation: pulse 2s infinite;
        }
        @keyframes pulse {
            0% {
                transform: scale(1);
                box-shadow: 0 0 0 0 rgba(59, 130, 246, 0.7);
            }
            70% {
                transform: scale(1.05);
                box-shadow: 0 0 0 10px rgba(59, 130, 246, 0);
            }
            100% {
                transform: scale(1);
                box-shadow: 0 0 0 0 rgba(59, 130, 246, 0);
            }
        }
    </style>
</head>
<body class="bg-gray-900 text-white min-h-screen flex flex-col">
    <header class="bg-gray-800 py-6 shadow-lg">
        <div class="container mx-auto px-4">
            <h1 class="text-3xl md:text-4xl font-bold text-center text-blue-400">Mood Detector</h1>
            <p class="text-center text-gray-300 mt-2">Let your face reveal your emotions</p>
        </div>
    </header>

    <main class="relative h-screen w-full overflow-hidden">
        <div class="absolute inset-0">
            <video id="video" autoplay muted class="w-full h-full object-cover bg-gray-800"></video>
            <canvas id="canvas" class="absolute inset-0 w-full h-full"></canvas>
        </div>

        <div class="absolute bottom-4 left-0 right-0 flex justify-center gap-4">
            <button id="start-btn" class="bg-blue-600 hover:bg-blue-700 text-white font-bold py-2 px-4 rounded-lg transition duration-300">
                Start
            </button>
            <button id="stop-btn" class="bg-gray-700 hover:bg-gray-600 text-white font-bold py-2 px-4 rounded-lg transition duration-300" disabled>
                Stop
            </button>
        </div>

        <div class="absolute top-4 right-4 bg-gray-800 bg-opacity-80 rounded-lg p-4 shadow-lg">
            <h2 class="text-lg font-semibold mb-2">Mood Analysis</h2>
            <div id="mood-text" class="text-xl font-bold text-blue-400">Neutral</div>
            <div id="confidence" class="text-gray-300 text-sm">Confidence: 0%</div>
            
            <div class="mt-4 grid grid-cols-2 gap-2">
                <div>
                    <div class="font-medium">Happy</div>
                    <div id="happy-score" class="text-gray-300 text-sm">0%</div>
                </div>
                <div>
                    <div class="font-medium">Sad</div>
                    <div id="sad-score" class="text-gray-300 text-sm">0%</div>
                </div>
                <div>
                    <div class="font-medium">Angry</div>
                    <div id="angry-score" class="text-gray-300 text-sm">0%</div>
                </div>
                <div>
                    <div class="font-medium">Fearful</div>
                    <div id="fearful-score" class="text-gray-300 text-sm">0%</div>
                </div>
            </div>
        </div>
    </main>

    <footer class="bg-gray-800 py-4">
        <div class="container mx-auto px-4 text-center text-gray-400">
            <p>Mood Detector using Face API | © 2023 All Rights Reserved</p>
        </div>
    </footer>

    <script>
        // DOM Elements
        const video = document.getElementById('video');
        const canvas = document.getElementById('canvas');
        const startBtn = document.getElementById('start-btn');
        const stopBtn = document.getElementById('stop-btn');
        const moodDisplay = document.getElementById('mood-display');
        const moodText = document.getElementById('mood-text');
        const confidenceText = document.getElementById('confidence');
        
        // Emotion score elements
        const happyScore = document.getElementById('happy-score');
        const sadScore = document.getElementById('sad-score');
        const angryScore = document.getElementById('angry-score');
        const fearfulScore = document.getElementById('fearful-score');

        let detectionInterval;
        let isDetecting = false;

        // Load models
        async function loadModels() {
            try {
                await Promise.all([
                    faceapi.nets.tinyFaceDetector.loadFromUri('https://justadudewhohacks.github.io/face-api.js/models'),
                    faceapi.nets.faceLandmark68Net.loadFromUri('https://justadudewhohacks.github.io/face-api.js/models'),
                    faceapi.nets.faceRecognitionNet.loadFromUri('https://justadudewhohacks.github.io/face-api.js/models'),
                    faceapi.nets.faceExpressionNet.loadFromUri('https://justadudewhohacks.github.io/face-api.js/models')
                ]);
                console.log('Models loaded successfully');
            } catch (err) {
                console.error('Failed to load models:', err);
                alert('Failed to load face detection models. Please try again later.');
            }
        }

        // Start video stream
        async function startVideo() {
            try {
                const stream = await navigator.mediaDevices.getUserMedia({ video: {} });
                video.srcObject = stream;
                return true;
            } catch (err) {
                console.error('Error accessing camera:', err);
                alert('Could not access the camera. Please ensure you have granted camera permissions.');
                return false;
            }
        }

        // Detect faces and emotions
        async function detectFaces() {
            if (!isDetecting) return;

            const options = new faceapi.TinyFaceDetectorOptions();
            const result = await faceapi.detectAllFaces(video, options)
                .withFaceLandmarks()
                .withFaceExpressions();

            // Clear canvas
            const context = canvas.getContext('2d');
            context.clearRect(0, 0, canvas.width, canvas.height);

            // Resize canvas to match video dimensions
            canvas.width = video.videoWidth;
            canvas.height = video.videoHeight;

            // Draw detections
            faceapi.draw.drawDetections(canvas, result);
            faceapi.draw.drawFaceLandmarks(canvas, result);
            faceapi.draw.drawFaceExpressions(canvas, result);

            // Update mood display if face detected
            if (result.length > 0) {
                const expressions = result[0].expressions;
                updateMoodDisplay(expressions);
            } else {
                resetMoodDisplay();
            }
        }

        // Update mood display based on expressions
        function updateMoodDisplay(expressions) {
            // Get the dominant emotion
            const emotions = Object.entries(expressions);
            const dominantEmotion = emotions.reduce((max, emotion) => 
                emotion[1] > max[1] ? emotion : max
            );
            
            const [emotion, confidence] = dominantEmotion;
            const confidencePercent = Math.round(confidence * 100);

            // Update scores
            happyScore.textContent = `${Math.round(expressions.happy * 100)}%`;
            sadScore.textContent = `${Math.round(expressions.sad * 100)}%`;
            angryScore.textContent = `${Math.round(expressions.angry * 100)}%`;
            fearfulScore.textContent = `${Math.round(expressions.fearful * 100)}%`;

            // Update main display
            moodText.textContent = capitalizeFirstLetter(emotion);
            confidenceText.textContent = `Confidence: ${confidencePercent}%`;

            // Update mood indicator
            updateMoodIndicator(emotion, confidencePercent);
        }

        // Reset mood display when no face is detected
        function resetMoodDisplay() {
            moodText.textContent = "No face detected";
            confidenceText.textContent = "Confidence: 0%";
            
            // Reset scores
            happyScore.textContent = "0%";
            sadScore.textContent = "0%";
            angryScore.textContent = "0%";
            fearfulScore.textContent = "0%";
        }

        // Update mood indicator
        function updateMoodIndicator(emotion, confidence) {
            // Adjust text color based on emotion
            let colorClass = "text-blue-400";
            
            switch(emotion) {
                case 'happy':
                    colorClass = "text-green-400";
                    break;
                case 'sad':
                    colorClass = "text-blue-400";
                    break;
                case 'angry':
                    colorClass = "text-red-400";
                    break;
                case 'fearful':
                    colorClass = "text-purple-400";
                    break;
                case 'disgusted':
                    colorClass = "text-yellow-400";
                    break;
                case 'surprised':
                    colorClass = "text-orange-400";
                    break;
                case 'neutral':
                    colorClass = "text-gray-400";
                    break;
            }
            
            moodText.className = `text-xl font-bold ${colorClass}`;
        }

        // Helper function to capitalize first letter
        function capitalizeFirstLetter(string) {
            return string.charAt(0).toUpperCase() + string.slice(1);
        }

        // Start detection
        async function startDetection() {
            const cameraStarted = await startVideo();
            if (!cameraStarted) return;

            await loadModels();
            
            isDetecting = true;
            startBtn.disabled = true;
            stopBtn.disabled = false;
            
            // Start detecting faces every 500ms
            detectionInterval = setInterval(detectFaces, 500);
        }

        // Stop detection
        function stopDetection() {
            isDetecting = false;
            clearInterval(detectionInterval);
            
            // Stop video stream
            const stream = video.srcObject;
            if (stream) {
                const tracks = stream.getTracks();
                tracks.forEach(track => track.stop());
                video.srcObject = null;
            }
            
            // Clear canvas
            const context = canvas.getContext('2d');
            context.clearRect(0, 0, canvas.width, canvas.height);
            
            // Reset UI
            startBtn.disabled = false;
            stopBtn.disabled = true;
            resetMoodDisplay();
        }

        // Event listeners
        startBtn.addEventListener('click', startDetection);
        stopBtn.addEventListener('click', stopDetection);

        // Initialize
        document.addEventListener('DOMContentLoaded', () => {
            // Set canvas size to match video (will be updated in detectFaces)
            canvas.width = 640;
            canvas.height = 480;
        });
    </script>
<p style="border-radius: 8px; text-align: center; font-size: 12px; color: #fff; margin-top: 16px;position: fixed; left: 8px; bottom: 8px; z-index: 10; background: rgba(0, 0, 0, 0.8); padding: 4px 8px;">Made with <img src="https://enzostvs-deepsite.hf.space/logo.svg" alt="DeepSite Logo" style="width: 16px; height: 16px; vertical-align: middle;display:inline-block;margin-right:3px;filter:brightness(0) invert(1);"><a href="https://enzostvs-deepsite.hf.space" style="color: #fff;text-decoration: underline;" target="_blank" >DeepSite</a> - 🧬 <a href="https://enzostvs-deepsite.hf.space?remix=Bahrudin/mood" style="color: #fff;text-decoration: underline;" target="_blank" >Remix</a></p></body>
</html>