Spaces:
Running
Running
Update app.py
Browse filesKey Changes:
Removed all the logic that uses get_from_cache. The function now relies entirely on hf_hub_download for Hugging Face repositories and URLs. hf_hub_download always checks the cache first and only downloads if necessary. This is the intended behavior.
Key changes:
Removed get_from_cache import.
Corrected Action:
Update download_model: Replace your download_model function with the corrected version above.
Update Imports: Replace your imports with the corrected ones.
Rebuild/Restart: Rebuild or restart your Gradio Space.
app.py
CHANGED
|
@@ -7,23 +7,16 @@ from safetensors.torch import load_file
|
|
| 7 |
from collections import OrderedDict
|
| 8 |
import re
|
| 9 |
import json
|
| 10 |
-
|
| 11 |
-
import requests # Re-added for URL handling
|
| 12 |
import subprocess
|
| 13 |
from urllib.parse import urlparse, unquote
|
| 14 |
from pathlib import Path
|
| 15 |
-
import tempfile
|
| 16 |
-
# from tqdm import tqdm # Removed: not crucial and can break display in gradio.
|
| 17 |
-
import psutil
|
| 18 |
-
import math
|
| 19 |
-
import shutil
|
| 20 |
import hashlib
|
| 21 |
from datetime import datetime
|
| 22 |
from typing import Dict, List, Optional
|
| 23 |
-
from huggingface_hub import login, HfApi, hf_hub_download
|
| 24 |
-
from huggingface_hub.utils import validate_repo_id, HFValidationError
|
| 25 |
-
from huggingface_hub.
|
| 26 |
-
from huggingface_hub.constants import HUGGINGFACE_HUB_CACHE # Import HUGGINGFACE_HUB_CACHE
|
| 27 |
|
| 28 |
|
| 29 |
# ---------------------- DEPENDENCIES ----------------------
|
|
@@ -95,12 +88,8 @@ def download_model(model_path_or_url):
|
|
| 95 |
if model_path_or_url.startswith("http://") or model_path_or_url.startswith(
|
| 96 |
"https://"
|
| 97 |
):
|
| 98 |
-
#
|
| 99 |
-
cache_path = get_from_cache(model_path_or_url) # Use get_from_cache
|
| 100 |
-
if cache_path is not None:
|
| 101 |
-
return cache_path
|
| 102 |
|
| 103 |
-
# It's a URL and not in cache: download manually and put into HF cache
|
| 104 |
response = requests.get(model_path_or_url, stream=True)
|
| 105 |
response.raise_for_status() # Raise HTTPError for bad requests (4xx or 5xx)
|
| 106 |
|
|
|
|
| 7 |
from collections import OrderedDict
|
| 8 |
import re
|
| 9 |
import json
|
| 10 |
+
import requests
|
|
|
|
| 11 |
import subprocess
|
| 12 |
from urllib.parse import urlparse, unquote
|
| 13 |
from pathlib import Path
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 14 |
import hashlib
|
| 15 |
from datetime import datetime
|
| 16 |
from typing import Dict, List, Optional
|
| 17 |
+
from huggingface_hub import login, HfApi, hf_hub_download
|
| 18 |
+
from huggingface_hub.utils import validate_repo_id, HFValidationError #Removed get_from_cache
|
| 19 |
+
from huggingface_hub.constants import HUGGINGFACE_HUB_CACHE
|
|
|
|
| 20 |
|
| 21 |
|
| 22 |
# ---------------------- DEPENDENCIES ----------------------
|
|
|
|
| 88 |
if model_path_or_url.startswith("http://") or model_path_or_url.startswith(
|
| 89 |
"https://"
|
| 90 |
):
|
| 91 |
+
# It's a URL : download and put into HF cache
|
|
|
|
|
|
|
|
|
|
| 92 |
|
|
|
|
| 93 |
response = requests.get(model_path_or_url, stream=True)
|
| 94 |
response.raise_for_status() # Raise HTTPError for bad requests (4xx or 5xx)
|
| 95 |
|