Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
|
@@ -14,8 +14,8 @@ from transformers import AutoImageProcessor, AutoModel
|
|
| 14 |
DEVICE = "cuda" if torch.cuda.is_available() else "cpu"
|
| 15 |
|
| 16 |
MODEL_MAP = {
|
| 17 |
-
"DINOv3 ViT-L/16 Satellite
|
| 18 |
-
"DINOv3 ViT-L/16 LVD (
|
| 19 |
"⚠️ DINOv3 ViT-7B/16 Satellite": "facebook/dinov3-vit7b16-pretrain-sat493m",
|
| 20 |
}
|
| 21 |
|
|
@@ -49,33 +49,28 @@ def load_model(name):
|
|
| 49 |
"""Load model with proper memory management and dtype handling"""
|
| 50 |
global processor, model
|
| 51 |
|
| 52 |
-
|
| 53 |
-
|
| 54 |
-
cleanup_memory()
|
| 55 |
|
| 56 |
-
|
| 57 |
|
| 58 |
-
|
| 59 |
-
|
| 60 |
|
| 61 |
-
|
| 62 |
-
|
| 63 |
-
|
| 64 |
-
|
| 65 |
-
torch_dtype="auto",
|
| 66 |
-
)
|
| 67 |
-
.to(DEVICE)
|
| 68 |
-
.eval()
|
| 69 |
)
|
|
|
|
|
|
|
|
|
|
| 70 |
|
| 71 |
-
|
| 72 |
-
|
| 73 |
|
| 74 |
-
|
| 75 |
|
| 76 |
-
except Exception as e:
|
| 77 |
-
cleanup_memory()
|
| 78 |
-
return f"Failed to load {name}: {str(e)}"
|
| 79 |
|
| 80 |
|
| 81 |
# Initialize default model
|
|
@@ -226,4 +221,5 @@ with gr.Blocks() as demo:
|
|
| 226 |
cache_examples=False,
|
| 227 |
)
|
| 228 |
|
| 229 |
-
|
|
|
|
|
|
| 14 |
DEVICE = "cuda" if torch.cuda.is_available() else "cpu"
|
| 15 |
|
| 16 |
MODEL_MAP = {
|
| 17 |
+
"DINOv3 ViT-L/16 Satellite": "facebook/dinov3-vitl16-pretrain-sat493m",
|
| 18 |
+
"DINOv3 ViT-L/16 LVD (web data)": "facebook/dinov3-vitl16-pretrain-lvd1689m",
|
| 19 |
"⚠️ DINOv3 ViT-7B/16 Satellite": "facebook/dinov3-vit7b16-pretrain-sat493m",
|
| 20 |
}
|
| 21 |
|
|
|
|
| 49 |
"""Load model with proper memory management and dtype handling"""
|
| 50 |
global processor, model
|
| 51 |
|
| 52 |
+
# Clean up existing model
|
| 53 |
+
cleanup_memory()
|
|
|
|
| 54 |
|
| 55 |
+
model_id = MODEL_MAP[name]
|
| 56 |
|
| 57 |
+
# Load processor
|
| 58 |
+
processor = AutoImageProcessor.from_pretrained(model_id)
|
| 59 |
|
| 60 |
+
model = (
|
| 61 |
+
AutoModel.from_pretrained(
|
| 62 |
+
model_id,
|
| 63 |
+
torch_dtype="auto",
|
|
|
|
|
|
|
|
|
|
|
|
|
| 64 |
)
|
| 65 |
+
.to(DEVICE)
|
| 66 |
+
.eval()
|
| 67 |
+
)
|
| 68 |
|
| 69 |
+
# Get model info
|
| 70 |
+
param_count = sum(p.numel() for p in model.parameters()) / 1e9
|
| 71 |
|
| 72 |
+
return f"Loaded: {name} | {param_count:.1f}B params | {DEVICE.upper()}"
|
| 73 |
|
|
|
|
|
|
|
|
|
|
| 74 |
|
| 75 |
|
| 76 |
# Initialize default model
|
|
|
|
| 221 |
cache_examples=False,
|
| 222 |
)
|
| 223 |
|
| 224 |
+
if __name__ == "__main__":
|
| 225 |
+
demo.launch(share=False, debug=True)
|