DINOv3-video-tracking / index.html
Xenova's picture
Xenova HF Staff
Update index.html
10250fc verified
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>DINOv3 Video Tracking</title>
<script src="https://cdn.tailwindcss.com"></script>
<link href="https://fonts.googleapis.com/css2?family=Inter:wght@400;500;600;700&display=swap" rel="stylesheet" />
<style>
body {
font-family:
"Inter",
-apple-system,
BlinkMacSystemFont,
"Segoe UI",
Roboto,
Helvetica,
Arial,
sans-serif;
}
#video-container {
position: relative;
background-color: #111827;
border-radius: 0.5rem;
overflow: hidden;
cursor: crosshair;
}
#video-player,
#overlay-canvas {
position: absolute;
top: 0;
left: 0;
width: 100%;
height: 100%;
border-radius: 0.5rem;
}
#overlay-canvas {
z-index: 10;
/* Allow clicks to pass through to the video player before processing */
pointer-events: none;
}
#video-player {
z-index: 5;
}
/* When processing is done, the canvas handles interaction */
#video-container.processed #overlay-canvas {
pointer-events: auto;
}
button:disabled,
input:disabled {
opacity: 0.5;
cursor: not-allowed;
}
.dragover {
border-color: #22d3ee;
background-color: #374151;
}
input[type="range"] {
-webkit-appearance: none;
appearance: none;
width: 100%;
height: 0.5rem;
background: #4a5568; /* Default gray track for sliders */
border-radius: 0.25rem;
outline: none;
}
input[type="range"]::-webkit-slider-thumb {
-webkit-appearance: none;
appearance: none;
width: 1.25rem;
height: 1.25rem;
background: #38bdf8;
cursor: pointer;
border-radius: 50%;
}
input[type="range"]:disabled::-webkit-slider-thumb {
background: #6b7280;
}
#slider-container {
position: relative;
display: flex;
align-items: center;
background-color: #4a5568;
border-radius: 0.25rem;
height: 0.5rem;
}
#slider-progress {
position: absolute;
top: 0;
left: 0;
height: 100%;
background-color: #22d3ee;
border-radius: 0.25rem;
z-index: 1;
width: 0%;
}
#frame-slider {
background: transparent;
position: relative;
z-index: 2;
}
video::-webkit-media-controls {
display: none !important;
}
video::-webkit-media-controls-enclosure {
display: none !important;
}
video::-webkit-media-controls-panel {
display: none !important;
}
.example-video-card {
transition: all 0.2s ease-in-out;
}
.example-video-card:hover {
transform: scale(1.05);
box-shadow: 0 0 15px rgba(34, 211, 238, 0.5);
}
</style>
</head>
<body class="bg-gray-900 text-gray-300 flex flex-col items-center justify-center min-h-screen p-4 sm:p-6 lg:p-8">
<div class="w-full max-w-4xl bg-gray-800/50 backdrop-blur-sm rounded-2xl shadow-2xl shadow-black/30 border border-gray-700 p-6 sm:p-8 text-center">
<h1 class="text-3xl sm:text-4xl font-bold text-transparent bg-clip-text bg-gradient-to-r from-teal-400 to-cyan-500 mb-2">DINOv3 Video Tracking</h1>
<p class="text-gray-400 mb-6 max-w-2xl mx-auto">In-browser video tracking powered by Transformers.js</p>
<div class="space-y-4">
<!-- Video Uploader -->
<div id="drop-zone" class="flex items-center justify-center w-full">
<label
for="video-uploader"
class="flex flex-col items-center justify-center w-full h-40 border-2 border-gray-600 border-dashed rounded-lg cursor-pointer bg-gray-900/50 hover:bg-gray-700/50 transition-colors"
>
<div class="flex flex-col items-center justify-center pt-5 pb-6">
<svg class="w-10 h-10 mb-2 text-gray-500" aria-hidden="true" xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24">
<path
d="M9.5 9V15M6.5 12H12.5M16 10L18.5768 8.45392C19.3699 7.97803 19.7665 7.74009 20.0928 7.77051C20.3773 7.79703 20.6369 7.944 20.806 8.17433C21 8.43848 21 8.90095 21 9.8259V14.1741C21 15.099 21 15.5615 20.806 15.8257C20.6369 16.056 20.3773 16.203 20.0928 16.2295C19.7665 16.2599 19.3699 16.022 18.5768 15.5461L16 14M6.2 18H12.8C13.9201 18 14.4802 18 14.908 17.782C15.2843 17.5903 15.5903 17.2843 15.782 16.908C16 16.4802 16 15.9201 16 14.8V9.2C16 8.0799 16 7.51984 15.782 7.09202C15.5903 6.71569 15.2843 6.40973 14.908 6.21799C14.4802 6 13.9201 6 12.8 6H6.2C5.0799 6 4.51984 6 4.09202 6.21799C3.71569 6.40973 3.40973 6.71569 3.21799 7.09202C3 7.51984 3 8.07989 3 9.2V14.8C3 15.9201 3 16.4802 3.21799 16.908C3.40973 17.2843 3.71569 17.5903 4.09202 17.782C4.51984 18 5.07989 18 6.2 18Z"
stroke="currentColor"
stroke-width="2"
stroke-linecap="round"
stroke-linejoin="round"
></path>
</svg>
<p class="mb-2 text-sm text-gray-400"><span class="font-semibold text-cyan-400">Select a video</span><br />or drag and drop</p>
</div>
<input id="video-uploader" type="file" class="hidden" accept="video/*" />
</label>
</div>
<!-- Status Display -->
<div id="status" class="flex items-center justify-center w-full font-medium text-gray-400 h-6">
<svg id="spinner" class="animate-spin mr-3 h-5 w-5 text-cyan-400 hidden" xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24">
<circle class="opacity-25" cx="12" cy="12" r="10" stroke="currentColor" stroke-width="4"></circle>
<path
class="opacity-75"
fill="currentColor"
d="M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4zm2 5.291A7.962 7.962 0 014 12H0c0 3.042 1.135 5.824 3 7.938l3-2.647z"
></path>
</svg>
<span id="status-text"></span>
</div>
<!-- Examples Section -->
<div id="examples-container" class="space-y-3 pt-2">
<h2 class="text-sm font-semibold text-gray-400">OR TRY AN EXAMPLE</h2>
<div id="examples-grid" class="grid grid-cols-2 md:grid-cols-4 gap-4">
<!-- Example videos will be injected here -->
</div>
</div>
<!-- Video Player and Canvas Container -->
<div id="video-container" class="w-full aspect-video hidden">
<video id="video-player" playsinline muted></video>
<canvas id="overlay-canvas"></canvas>
</div>
<!-- Controls -->
<div id="controls" class="space-y-4 pt-2 hidden">
<!-- Row 1: Frame Slider & Counter -->
<div class="flex items-center space-x-4">
<button
id="play-btn"
class="w-20 px-4 py-2 text-sm font-medium text-white bg-gray-700 rounded-lg hover:bg-gray-600 focus:ring-4 focus:outline-none focus:ring-gray-600 transition-colors"
disabled
>
Play
</button>
<div id="slider-container" class="w-full">
<div id="slider-progress"></div>
<input id="frame-slider" type="range" min="0" max="100" step="1" value="0" class="w-full" disabled />
</div>
<div class="flex items-center space-x-1">
<button id="prev-frame-btn" class="p-2 text-sm font-medium text-white bg-gray-700 rounded-lg hover:bg-gray-600" disabled>
<svg class="w-4 h-4" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M15 19l-7-7 7-7"></path>
</svg>
</button>
<span id="frame-counter" class="text-sm font-medium text-gray-400 w-28 text-center">Frame 0 / 0</span>
<button id="next-frame-btn" class="p-2 text-sm font-medium text-white bg-gray-700 rounded-lg hover:bg-gray-600" disabled>
<svg class="w-4 h-4" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M9 5l7 7-7 7"></path>
</svg>
</button>
</div>
</div>
<!-- Row 2: Main Actions & FPS -->
<div class="flex justify-center items-center space-x-4">
<button
id="process-btn"
class="px-5 py-2.5 text-sm font-medium text-white bg-cyan-600 rounded-lg hover:bg-cyan-700 focus:ring-4 focus:outline-none focus:ring-cyan-800 transition-colors"
disabled
>
Process Video
</button>
<div class="flex items-center space-x-2 text-sm">
<label for="fps-input" class="font-medium">at</label>
<input
type="number"
id="fps-input"
value="20"
step="1"
min="1"
max="30"
class="bg-gray-700 border border-gray-600 text-white text-sm rounded-lg focus:ring-cyan-500 focus:border-cyan-500 p-1.5 w-20 text-center"
/>
<label for="fps-input" class="font-medium">fps</label>
</div>
<button
id="clear-btn"
class="px-5 py-2.5 text-sm font-medium text-gray-300 bg-gray-700 rounded-lg hover:bg-gray-600 focus:ring-4 focus:outline-none focus:ring-gray-600 transition-colors"
>
Clear Selections
</button>
<button
id="start-over-btn"
class="px-5 py-2.5 text-sm font-medium text-gray-300 bg-gray-700 rounded-lg hover:bg-gray-600 focus:ring-4 focus:outline-none focus:ring-gray-600 transition-colors"
>
Start Over
</button>
</div>
<!-- Row 3: Fine-tuning controls -->
<div class="grid grid-cols-1 md:grid-cols-2 gap-6 items-center pt-2">
<div class="flex items-center space-x-3">
<label for="scale-slider" class="text-sm font-medium text-gray-400 whitespace-nowrap">Scale:</label>
<input id="scale-slider" type="range" min="262144" max="2097152" step="10000" value="500000" class="w-full" />
<span id="scale-value" class="text-sm font-medium text-gray-400 w-28 text-right">0.50 Mpx</span>
</div>
<div class="flex items-center space-x-3">
<label for="threshold-slider" class="text-sm font-medium text-gray-400 whitespace-nowrap">Similarity:</label>
<input id="threshold-slider" type="range" min="0" max="1" step="0.01" value="0.75" class="w-full" />
<span id="threshold-value" class="text-sm font-medium text-gray-400 w-12 text-right">0.75</span>
</div>
</div>
</div>
</div>
</div>
<script type="module">
// --- 1. Imports and Configuration ---
import { AutoProcessor, AutoModel, RawImage, matmul, Tensor, PreTrainedModel } from "https://cdn.jsdelivr.net/npm/@huggingface/[email protected]";
const MODEL_ID = "onnx-community/dinov3-vits16-pretrain-lvd1689m-ONNX-MHA-scores";
const MIN_PIXELS = 262144;
const MAX_PIXELS = 2097152;
const DEFAULT_FPS = 20;
const BASE_URL = "https://huggingface.co/spaces/webml-community/DINOv3-video-tracking/resolve/main/examples/";
const EXAMPLE_VIDEOS = [
"12927739_60fps.mp4",
"12927746_60fps.mp4",
"4411457_25fps.mp4",
"5191588_30fps.mp4",
"6298719_30fps.mp4",
"8624901_30fps.mp4",
"9517738_24fps.mp4",
"9517745_24fps.mp4",
].map((video) => BASE_URL + video);
// --- 2. DOM Element References ---
const dropZone = document.getElementById("drop-zone");
const videoUploader = document.getElementById("video-uploader");
const statusText = document.getElementById("status-text");
const spinner = document.getElementById("spinner");
const examplesContainer = document.getElementById("examples-container");
const examplesGrid = document.getElementById("examples-grid");
const videoContainer = document.getElementById("video-container");
const videoPlayer = document.getElementById("video-player");
const overlayCanvas = document.getElementById("overlay-canvas");
const controls = document.getElementById("controls");
const processBtn = document.getElementById("process-btn");
const clearBtn = document.getElementById("clear-btn");
const startOverBtn = document.getElementById("start-over-btn");
const playBtn = document.getElementById("play-btn");
const frameSlider = document.getElementById("frame-slider");
const frameCounter = document.getElementById("frame-counter");
const sliderProgress = document.getElementById("slider-progress");
const prevFrameBtn = document.getElementById("prev-frame-btn");
const nextFrameBtn = document.getElementById("next-frame-btn");
const thresholdSlider = document.getElementById("threshold-slider");
const thresholdValue = document.getElementById("threshold-value");
const scaleSlider = document.getElementById("scale-slider");
const scaleValue = document.getElementById("scale-value");
const fpsInput = document.getElementById("fps-input");
// --- 3. Application State ---
let processor, model, postprocessorModel, patchSize;
let currentVideoUrl = null;
let frameCache = new Map();
let totalFrames = 0;
let gridInfo = null;
let isCachingFrames = false;
let isPlaying = false;
let animationFrameId = null;
let seekTimeoutId = null;
let multiFrameSelections = new Map();
let lastFrameTime = 0;
let processingStartTime = 0;
const offscreenCanvas = document.createElement("canvas");
const offscreenCtx = offscreenCanvas.getContext("2d", {
willReadFrequently: true,
});
const overlayCtx = overlayCanvas.getContext("2d");
// --- 4. Core Functions ---
function setAllControlsDisabled(disabled) {
playBtn.disabled = disabled;
frameSlider.disabled = disabled;
prevFrameBtn.disabled = disabled;
nextFrameBtn.disabled = disabled;
clearBtn.disabled = disabled;
thresholdSlider.disabled = disabled;
scaleSlider.disabled = disabled;
fpsInput.disabled = disabled;
}
function updateStatus(text, isLoading = false) {
statusText.textContent = text;
spinner.style.display = isLoading ? "block" : "none";
}
async function initialize() {
populateExamples();
updateStatus("Loading model...", true);
try {
processor = await AutoProcessor.from_pretrained(MODEL_ID);
processor.image_processor.do_resize = false;
model = await AutoModel.from_pretrained(MODEL_ID, {
device: "webgpu",
dtype: "fp32",
session_options: {
// Optimization: Keep model output on GPU for post-processing
preferredOutputLocation: { last_hidden_state: "gpu-buffer" },
},
});
postprocessorModel = await PreTrainedModel.from_pretrained(MODEL_ID, {
model_file_name: "postprocess",
dtype: "fp32",
device: "webgpu",
});
patchSize = model.config.patch_size;
updateStatus("Models loaded. Please select a video to begin.");
} catch (error) {
console.error("Model initialization failed:", error);
updateStatus("Error: Could not load models. " + (error.message ?? error.toString()));
}
}
function populateExamples() {
EXAMPLE_VIDEOS.forEach((url) => {
const card = document.createElement("div");
card.className = "example-video-card aspect-video bg-gray-900 rounded-lg overflow-hidden cursor-pointer border-2 border-gray-700";
const video = document.createElement("video");
video.src = url;
video.muted = true;
video.loop = true;
video.playsInline = true;
video.className = "w-full h-full object-cover";
card.addEventListener("mouseenter", () => video.play());
card.addEventListener("mouseleave", () => {
video.pause();
video.currentTime = 0;
});
card.addEventListener("click", () => loadExampleVideo(url));
card.appendChild(video);
examplesGrid.appendChild(card);
});
}
async function loadExampleVideo(url) {
updateStatus("Loading example video...", true);
try {
const response = await fetch(url);
if (!response.ok) {
throw new Error(`HTTP error! status: ${response.status}`);
}
const blob = await response.blob();
const fileName = url.split("/").pop();
const file = new File([blob], fileName, { type: blob.type });
loadVideoFile(file);
} catch (error) {
console.error("Failed to load example video:", error);
updateStatus("Error: Could not load example video.", false);
}
}
function loadVideoFile(file) {
if (!file || !file.type.startsWith("video/")) return;
if (currentVideoUrl) URL.revokeObjectURL(currentVideoUrl);
dropZone.classList.add("hidden");
examplesContainer.classList.add("hidden");
resetAppState();
currentVideoUrl = URL.createObjectURL(file);
videoPlayer.src = currentVideoUrl;
updateStatus("Loading video metadata...");
}
function setupAndValidateVideo() {
const { videoWidth, videoHeight } = videoPlayer;
const originalPixels = videoWidth * videoHeight;
scaleSlider.min = Math.min(originalPixels, MIN_PIXELS);
scaleSlider.value = Math.min(originalPixels, 500000);
overlayCanvas.width = videoWidth;
overlayCanvas.height = videoHeight;
updateFrameCount();
updateProcessingDimensions();
videoPlayer.addEventListener(
"seeked",
() => {
overlayCtx.drawImage(videoPlayer, 0, 0, overlayCanvas.width, overlayCanvas.height);
},
{ once: true },
);
videoPlayer.currentTime = 0;
videoContainer.classList.remove("hidden");
controls.classList.remove("hidden");
processBtn.disabled = false;
updateStatus("Video loaded. Press 'Process Video' to begin analysis.");
}
function updateProcessingDimensions() {
const { videoWidth, videoHeight } = videoPlayer;
const aspectRatio = videoWidth / videoHeight;
const targetPixels = parseFloat(scaleSlider.value);
const newHeight = Math.sqrt(targetPixels / aspectRatio);
const newWidth = newHeight * aspectRatio;
offscreenCanvas.width = Math.round(newWidth / patchSize) * patchSize;
offscreenCanvas.height = Math.round(newHeight / patchSize) * patchSize;
scaleValue.textContent = `${(targetPixels / 1_000_000).toFixed(2)} Mpx`;
if (frameCache.size > 0) {
resetProcessingState();
updateStatus("Scale changed. Please process the video again.");
}
}
function updateFrameCount() {
const fps = parseInt(fpsInput.value, 10) || DEFAULT_FPS;
totalFrames = Math.floor(videoPlayer.duration * fps);
frameSlider.max = totalFrames > 0 ? totalFrames - 1 : 0;
frameSlider.disabled = totalFrames === 0;
playBtn.disabled = totalFrames === 0;
prevFrameBtn.disabled = totalFrames === 0;
nextFrameBtn.disabled = totalFrames === 0;
updateFrameCounter();
}
async function handleProcessButtonClick() {
if (isCachingFrames) {
isCachingFrames = false;
updateStatus("Stopping processing...", true);
processBtn.disabled = true;
} else {
if (!videoPlayer.paused) togglePlayback();
overlayCtx.clearRect(0, 0, overlayCanvas.width, overlayCanvas.height);
isCachingFrames = true;
processingStartTime = performance.now();
setAllControlsDisabled(true);
processBtn.textContent = "Stop Processing";
await processAndCacheAllFrames();
}
}
async function processAndCacheAllFrames() {
const fps = parseInt(fpsInput.value, 10);
for (let i = 0; i < totalFrames; i++) {
if (!isCachingFrames) {
updateStatus(`Processing stopped at frame ${i}.`, false);
break;
}
if (frameCache.has(i)) {
updateProgressBar(i);
continue;
}
const elapsedTime = performance.now() - processingStartTime;
const avgTimePerFrame = elapsedTime / (i + 1);
const framesRemaining = totalFrames - (i + 1);
const etaSeconds = Math.ceil((framesRemaining * avgTimePerFrame) / 1000);
const minutes = Math.floor(etaSeconds / 60);
const seconds = etaSeconds % 60;
const etaString = etaSeconds > 0 ? ` (ETA: ${minutes}m ${seconds.toString().padStart(2, "0")}s)` : "";
updateStatus(`Processing frame ${i + 1} of ${totalFrames}...${etaString}`, true);
const time = i / fps;
await new Promise((resolve) => {
videoPlayer.addEventListener(
"seeked",
async function onSeek() {
videoPlayer.removeEventListener("seeked", onSeek);
await processSingleFrame(i);
updateProgressBar(i);
resolve();
},
{ once: true },
);
videoPlayer.currentTime = time;
});
}
isCachingFrames = false;
processBtn.textContent = "Process Video";
processBtn.disabled = false;
setAllControlsDisabled(false);
videoContainer.classList.add("processed");
if (frameCache.size > 0) {
frameSlider.value = 0;
await drawHighlightsForCurrentFrame();
}
if (frameCache.size === totalFrames) {
updateStatus("Processing complete. Click on an object to select it.", false);
}
}
function updateProgressBar(frameIndex) {
const progress = totalFrames > 0 ? ((frameIndex + 1) / totalFrames) * 100 : 0;
sliderProgress.style.width = `${progress}%`;
}
async function processSingleFrame(frameIndex) {
try {
const frameCanvas = document.createElement("canvas");
frameCanvas.width = overlayCanvas.width;
frameCanvas.height = overlayCanvas.height;
frameCanvas.getContext("2d").drawImage(videoPlayer, 0, 0, frameCanvas.width, frameCanvas.height);
offscreenCtx.drawImage(videoPlayer, 0, 0, offscreenCanvas.width, offscreenCanvas.height);
const image = await RawImage.fromCanvas(offscreenCanvas);
const inputs = await processor(image);
if (!gridInfo) {
const [_, __, h, w] = inputs.pixel_values.dims;
gridInfo = {
width: w / patchSize,
height: h / patchSize,
numPatches: (w / patchSize) * (h / patchSize),
};
}
const { last_hidden_state } = await model(inputs);
const offset = new Tensor("int64", [1 + (model.config.num_register_tokens ?? 0)], []);
const postprocessInputs = { last_hidden_state, offset };
const { normalized_features } = await postprocessorModel(postprocessInputs);
frameCache.set(frameIndex, {
canvas: frameCanvas,
features: normalized_features,
});
last_hidden_state.dispose();
} catch (error) {
console.error(`Failed to process frame ${frameIndex}:`, error);
}
}
async function handleUserSelection(event) {
event.preventDefault();
if (isCachingFrames) return;
if (!frameCache.size || !gridInfo) {
updateStatus("Please process the video before making a selection.");
return;
}
const currentFrame = parseInt(frameSlider.value, 10);
if (!multiFrameSelections.has(currentFrame)) {
multiFrameSelections.set(currentFrame, { positive: new Set() });
}
const frameSelection = multiFrameSelections.get(currentFrame);
const rect = videoContainer.getBoundingClientRect();
const gridX = Math.floor(((event.clientX - rect.left) / rect.width) * gridInfo.width);
const gridY = Math.floor(((event.clientY - rect.top) / rect.height) * gridInfo.height);
const patchIndex = gridY * gridInfo.width + gridX;
if (event.button === 0) {
frameSelection.positive.add(patchIndex);
}
await drawHighlightsForCurrentFrame();
}
async function drawHighlightsForCurrentFrame() {
const currentFrame = parseInt(frameSlider.value, 10);
const frameData = frameCache.get(currentFrame);
overlayCtx.clearRect(0, 0, overlayCanvas.width, overlayCanvas.height);
if (frameData) {
overlayCtx.drawImage(frameData.canvas, 0, 0, overlayCanvas.width, overlayCanvas.height);
} else {
overlayCtx.drawImage(videoPlayer, 0, 0, overlayCanvas.width, overlayCanvas.height);
return;
}
if (!gridInfo) return;
const patchDisplayW = overlayCanvas.width / gridInfo.width;
const patchDisplayH = overlayCanvas.height / gridInfo.height;
const drawPatches = (indices, color) => {
overlayCtx.fillStyle = color;
for (const idx of indices) {
const x = (idx % gridInfo.width) * patchDisplayW;
const y = Math.floor(idx / gridInfo.width) * patchDisplayH;
overlayCtx.fillRect(x, y, patchDisplayW, patchDisplayH);
}
};
const highlighted = await getSimilarPatches();
if (highlighted) {
drawPatches(highlighted, "rgba(56, 189, 248, 0.6)");
}
if (multiFrameSelections.has(currentFrame)) {
const frameSelection = multiFrameSelections.get(currentFrame);
drawPatches(frameSelection.positive, "rgba(251, 146, 60, 0.7)");
}
}
async function getSimilarPatches() {
const currentFrame = parseInt(frameSlider.value, 10);
const currentFrameData = frameCache.get(currentFrame);
const allPositiveFeaturesData = [];
let totalPositiveSelections = 0;
for (const [frameIdx, selection] of multiFrameSelections.entries()) {
if (selection.positive.size > 0) {
const sourceFrameData = frameCache.get(frameIdx);
if (!sourceFrameData) continue;
const sourceFeaturesData = sourceFrameData.features.data;
const [_, __, hiddenSize] = sourceFrameData.features.dims;
for (const index of selection.positive) {
const startIndex = index * hiddenSize;
allPositiveFeaturesData.push(...sourceFeaturesData.slice(startIndex, startIndex + hiddenSize));
totalPositiveSelections++;
}
}
}
if (!currentFrameData || totalPositiveSelections === 0) {
return null;
}
const highlighted = new Set();
const threshold = parseFloat(thresholdSlider.value);
const [batch, __, hiddenSize] = currentFrameData.features.dims;
const allPositiveFeaturesTensor = new Tensor("float32", allPositiveFeaturesData, [batch, totalPositiveSelections, hiddenSize]);
const scoresTensor = await matmul(allPositiveFeaturesTensor, currentFrameData.features.permute(0, 2, 1));
const maxScores = scoresTensor.max(1);
const scoresData = maxScores.data;
for (let i = 0; i < scoresData.length; ++i) {
if (scoresData[i] > threshold) {
highlighted.add(i);
}
}
allPositiveFeaturesTensor.dispose();
scoresTensor.dispose();
maxScores.dispose();
return highlighted;
}
function handleSliderInput() {
if (isPlaying) togglePlayback();
if (!videoPlayer.paused) videoPlayer.pause();
clearTimeout(seekTimeoutId);
if (frameCache.size > 0) {
seekTimeoutId = setTimeout(async () => {
updateFrameCounter();
await drawHighlightsForCurrentFrame();
}, 16);
} else {
seekTimeoutId = setTimeout(() => {
const fps = parseInt(fpsInput.value, 10);
const frame = parseInt(frameSlider.value, 10);
const time = frame / fps;
videoPlayer.addEventListener(
"seeked",
() => {
requestAnimationFrame(async () => {
updateFrameCounter();
await drawHighlightsForCurrentFrame();
});
},
{ once: true },
);
videoPlayer.currentTime = time;
}, 50);
}
}
function updateFrameCounter() {
const currentFrameDisplay = totalFrames > 0 ? parseInt(frameSlider.value, 10) + 1 : 0;
frameCounter.textContent = `Frame ${currentFrameDisplay} / ${totalFrames}`;
}
function navigateFrames(direction) {
let currentFrame = parseInt(frameSlider.value, 10);
currentFrame += direction;
if (currentFrame >= 0 && currentFrame < totalFrames) {
frameSlider.value = currentFrame;
handleSliderInput();
}
}
function playbackLoop(currentTime) {
if (!isPlaying) return;
animationFrameId = requestAnimationFrame(playbackLoop);
const fps = parseInt(fpsInput.value, 10);
const interval = 1000 / fps;
const elapsed = currentTime - lastFrameTime;
if (elapsed >= interval) {
lastFrameTime = currentTime - (elapsed % interval);
let currentFrame = parseInt(frameSlider.value, 10);
if (currentFrame >= totalFrames - 1) {
togglePlayback();
return;
}
currentFrame++;
frameSlider.value = currentFrame;
updateFrameCounter();
drawHighlightsForCurrentFrame();
}
}
function togglePlayback() {
if (frameCache.size === 0) {
if (videoPlayer.paused) {
videoPlayer.play();
playBtn.textContent = "Pause";
} else {
videoPlayer.pause();
playBtn.textContent = "Play";
}
return;
}
isPlaying = !isPlaying;
playBtn.textContent = isPlaying ? "Pause" : "Play";
if (isPlaying) {
let currentFrame = parseInt(frameSlider.value, 10);
if (currentFrame >= totalFrames - 1) {
frameSlider.value = 0;
}
lastFrameTime = performance.now();
animationFrameId = requestAnimationFrame(playbackLoop);
} else {
cancelAnimationFrame(animationFrameId);
}
}
function handleVideoTimeUpdate() {
if (frameCache.size === 0 && !videoPlayer.paused) {
const fps = parseInt(fpsInput.value, 10);
const currentFrame = Math.floor(videoPlayer.currentTime * fps);
if (parseInt(frameSlider.value, 10) !== currentFrame) {
frameSlider.value = currentFrame;
updateFrameCounter();
overlayCtx.drawImage(videoPlayer, 0, 0, overlayCanvas.width, overlayCanvas.height);
}
}
}
async function clearSelection(updateUI = true) {
multiFrameSelections.clear();
await drawHighlightsForCurrentFrame();
if (updateUI) updateStatus("All selections cleared.");
}
function resetProcessingState() {
frameCache.forEach((frameData) => frameData.features.dispose());
frameCache.clear();
gridInfo = null;
processBtn.disabled = false;
sliderProgress.style.width = "0%";
videoContainer.classList.remove("processed");
clearSelection(false);
}
function resetAppState() {
resetVideoState();
resetProcessingState();
totalFrames = 0;
frameSlider.value = 0;
updateFrameCounter();
}
function resetVideoState() {
if (isPlaying) togglePlayback();
if (videoPlayer.src) {
videoPlayer.pause();
videoPlayer.removeAttribute("src");
videoPlayer.load();
}
if (currentVideoUrl) URL.revokeObjectURL(currentVideoUrl);
currentVideoUrl = null;
overlayCtx.clearRect(0, 0, overlayCanvas.width, overlayCanvas.height);
videoContainer.classList.add("hidden");
controls.classList.add("hidden");
}
function startOver() {
resetAppState();
dropZone.classList.remove("hidden");
examplesContainer.classList.remove("hidden");
updateStatus("Select a video or try an example.");
}
// --- 5. Event Listeners ---
videoUploader.addEventListener("change", (e) => loadVideoFile(e.target.files[0]));
videoPlayer.addEventListener("loadedmetadata", setupAndValidateVideo);
videoPlayer.addEventListener("timeupdate", handleVideoTimeUpdate);
dropZone.addEventListener("dragover", (e) => {
e.preventDefault();
dropZone.firstElementChild.classList.add("dragover");
});
dropZone.addEventListener("dragleave", () => dropZone.firstElementChild.classList.remove("dragover"));
dropZone.addEventListener("drop", (e) => {
e.preventDefault();
dropZone.firstElementChild.classList.remove("dragover");
loadVideoFile(e.dataTransfer.files[0]);
});
processBtn.addEventListener("click", handleProcessButtonClick);
clearBtn.addEventListener("click", () => clearSelection(true));
startOverBtn.addEventListener("click", startOver);
playBtn.addEventListener("click", togglePlayback);
prevFrameBtn.addEventListener("click", () => navigateFrames(-1));
nextFrameBtn.addEventListener("click", () => navigateFrames(1));
frameSlider.addEventListener("input", handleSliderInput);
overlayCanvas.addEventListener("mousedown", handleUserSelection);
overlayCanvas.addEventListener("contextmenu", (e) => e.preventDefault());
thresholdSlider.addEventListener("input", (e) => {
thresholdValue.textContent = parseFloat(e.target.value).toFixed(2);
if (multiFrameSelections.size > 0) drawHighlightsForCurrentFrame();
});
scaleSlider.addEventListener("input", updateProcessingDimensions);
fpsInput.addEventListener("change", () => {
if (videoPlayer.src) {
resetProcessingState();
updateFrameCount();
updateStatus("FPS changed. Please process the video again.");
}
});
// --- 6. Initial Load ---
initialize();
</script>
</body>
</html>