Spaces:
Running
on
Zero
Running
on
Zero
Commit
·
22bfe4c
1
Parent(s):
1fd5aaa
Add new model BiRefNet_HR to the model list.
Browse files- app.py +8 -2
- app_local.py +8 -2
- requirements.txt +12 -12
app.py
CHANGED
|
@@ -73,6 +73,7 @@ class ImagePreprocessor():
|
|
| 73 |
|
| 74 |
usage_to_weights_file = {
|
| 75 |
'General': 'BiRefNet',
|
|
|
|
| 76 |
'General-Lite': 'BiRefNet_lite',
|
| 77 |
'General-Lite-2K': 'BiRefNet_lite-2K',
|
| 78 |
'Matting': 'BiRefNet-matting',
|
|
@@ -104,8 +105,13 @@ def predict(images, resolution, weights_file):
|
|
| 104 |
try:
|
| 105 |
resolution = [int(int(reso)//32*32) for reso in resolution.strip().split('x')]
|
| 106 |
except:
|
| 107 |
-
|
| 108 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 109 |
|
| 110 |
if isinstance(images, list):
|
| 111 |
# For tab_batch
|
|
|
|
| 73 |
|
| 74 |
usage_to_weights_file = {
|
| 75 |
'General': 'BiRefNet',
|
| 76 |
+
'General-HR': 'BiRefNet_HR',
|
| 77 |
'General-Lite': 'BiRefNet_lite',
|
| 78 |
'General-Lite-2K': 'BiRefNet_lite-2K',
|
| 79 |
'Matting': 'BiRefNet-matting',
|
|
|
|
| 105 |
try:
|
| 106 |
resolution = [int(int(reso)//32*32) for reso in resolution.strip().split('x')]
|
| 107 |
except:
|
| 108 |
+
if weights_file == 'General-HR':
|
| 109 |
+
resolution = (2048, 2048)
|
| 110 |
+
elif weights_file == 'General-Lite-2K':
|
| 111 |
+
resolution = (2560, 1440)
|
| 112 |
+
else:
|
| 113 |
+
resolution = (1024, 1024)
|
| 114 |
+
print('Invalid resolution input. Automatically changed to 1024x1024 / 2048x2048 / 2560x1440.')
|
| 115 |
|
| 116 |
if isinstance(images, list):
|
| 117 |
# For tab_batch
|
app_local.py
CHANGED
|
@@ -73,6 +73,7 @@ class ImagePreprocessor():
|
|
| 73 |
|
| 74 |
usage_to_weights_file = {
|
| 75 |
'General': 'BiRefNet',
|
|
|
|
| 76 |
'General-Lite': 'BiRefNet_lite',
|
| 77 |
'General-Lite-2K': 'BiRefNet_lite-2K',
|
| 78 |
'Matting': 'BiRefNet-matting',
|
|
@@ -104,8 +105,13 @@ def predict(images, resolution, weights_file):
|
|
| 104 |
try:
|
| 105 |
resolution = [int(int(reso)//32*32) for reso in resolution.strip().split('x')]
|
| 106 |
except:
|
| 107 |
-
|
| 108 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 109 |
|
| 110 |
if isinstance(images, list):
|
| 111 |
# For tab_batch
|
|
|
|
| 73 |
|
| 74 |
usage_to_weights_file = {
|
| 75 |
'General': 'BiRefNet',
|
| 76 |
+
'General-HR': 'BiRefNet_HR',
|
| 77 |
'General-Lite': 'BiRefNet_lite',
|
| 78 |
'General-Lite-2K': 'BiRefNet_lite-2K',
|
| 79 |
'Matting': 'BiRefNet-matting',
|
|
|
|
| 105 |
try:
|
| 106 |
resolution = [int(int(reso)//32*32) for reso in resolution.strip().split('x')]
|
| 107 |
except:
|
| 108 |
+
if weights_file == 'General-HR':
|
| 109 |
+
resolution = (2048, 2048)
|
| 110 |
+
elif weights_file == 'General-Lite-2K':
|
| 111 |
+
resolution = (2560, 1440)
|
| 112 |
+
else:
|
| 113 |
+
resolution = (1024, 1024)
|
| 114 |
+
print('Invalid resolution input. Automatically changed to 1024x1024 / 2048x2048 / 2560x1440.')
|
| 115 |
|
| 116 |
if isinstance(images, list):
|
| 117 |
# For tab_batch
|
requirements.txt
CHANGED
|
@@ -1,13 +1,13 @@
|
|
| 1 |
-
torch==2.
|
| 2 |
-
torchvision==0.
|
| 3 |
-
opencv-python
|
| 4 |
-
tqdm
|
| 5 |
-
timm
|
| 6 |
-
prettytable
|
| 7 |
-
scipy
|
| 8 |
-
scikit-image
|
| 9 |
-
kornia
|
| 10 |
-
gradio_imageslider
|
| 11 |
-
transformers
|
| 12 |
-
huggingface_hub
|
| 13 |
einops
|
|
|
|
| 1 |
+
torch==2.5.1
|
| 2 |
+
torchvision==0.20.1
|
| 3 |
+
opencv-python
|
| 4 |
+
tqdm
|
| 5 |
+
timm
|
| 6 |
+
prettytable
|
| 7 |
+
scipy
|
| 8 |
+
scikit-image
|
| 9 |
+
kornia
|
| 10 |
+
gradio_imageslider>=0.0.18
|
| 11 |
+
transformers>=4.42.4
|
| 12 |
+
huggingface_hub>=0.25
|
| 13 |
einops
|