Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
|
@@ -38,7 +38,6 @@ with open("flux_loras.json", "r") as file:
|
|
| 38 |
]
|
| 39 |
print(f"Loaded {len(flux_loras_raw)} LoRAs from JSON")
|
| 40 |
# Global variables for LoRA management
|
| 41 |
-
current_lora = None
|
| 42 |
lora_cache = {}
|
| 43 |
|
| 44 |
def load_lora_weights(repo_id, weights_filename):
|
|
@@ -144,7 +143,7 @@ def infer_with_lora_wrapper(input_image, prompt, selected_index, lora_state, cus
|
|
| 144 |
@spaces.GPU
|
| 145 |
def infer_with_lora(input_image, prompt, selected_index, lora_state, custom_lora, seed=42, randomize_seed=False, guidance_scale=2.5, lora_scale=1.0, portrait_mode=False, flux_loras=None, progress=gr.Progress(track_tqdm=True)):
|
| 146 |
"""Generate image with selected LoRA"""
|
| 147 |
-
global
|
| 148 |
|
| 149 |
if randomize_seed:
|
| 150 |
seed = random.randint(0, MAX_SEED)
|
|
@@ -157,10 +156,10 @@ def infer_with_lora(input_image, prompt, selected_index, lora_state, custom_lora
|
|
| 157 |
lora_to_use = flux_loras[selected_index]
|
| 158 |
print(f"Loaded {len(flux_loras)} LoRAs from JSON")
|
| 159 |
# Load LoRA if needed
|
| 160 |
-
print(f"LoRA to use: {lora_to_use}
|
| 161 |
-
if lora_to_use
|
| 162 |
try:
|
| 163 |
-
if
|
| 164 |
pipe.unload_lora_weights()
|
| 165 |
|
| 166 |
lora_path = load_lora_weights(lora_to_use["repo"], lora_to_use["weights"])
|
|
@@ -168,14 +167,9 @@ def infer_with_lora(input_image, prompt, selected_index, lora_state, custom_lora
|
|
| 168 |
pipe.load_lora_weights(lora_path, adapter_name="selected_lora")
|
| 169 |
pipe.set_adapters(["selected_lora"], adapter_weights=[lora_scale])
|
| 170 |
print(f"loaded: {lora_path} with scale {lora_scale}")
|
| 171 |
-
current_lora = lora_to_use
|
| 172 |
|
| 173 |
except Exception as e:
|
| 174 |
print(f"Error loading LoRA: {e}")
|
| 175 |
-
# Continue without LoRA
|
| 176 |
-
elif lora_scale != lora_state:
|
| 177 |
-
print(f"using already loaded lora: {lora_to_use}, udpated {lora_scale} based on user preference")
|
| 178 |
-
pipe.set_adapters(["selected_lora"], adapter_weights=[lora_scale])
|
| 179 |
|
| 180 |
|
| 181 |
input_image = input_image.convert("RGB")
|
|
|
|
| 38 |
]
|
| 39 |
print(f"Loaded {len(flux_loras_raw)} LoRAs from JSON")
|
| 40 |
# Global variables for LoRA management
|
|
|
|
| 41 |
lora_cache = {}
|
| 42 |
|
| 43 |
def load_lora_weights(repo_id, weights_filename):
|
|
|
|
| 143 |
@spaces.GPU
|
| 144 |
def infer_with_lora(input_image, prompt, selected_index, lora_state, custom_lora, seed=42, randomize_seed=False, guidance_scale=2.5, lora_scale=1.0, portrait_mode=False, flux_loras=None, progress=gr.Progress(track_tqdm=True)):
|
| 145 |
"""Generate image with selected LoRA"""
|
| 146 |
+
global pipe
|
| 147 |
|
| 148 |
if randomize_seed:
|
| 149 |
seed = random.randint(0, MAX_SEED)
|
|
|
|
| 156 |
lora_to_use = flux_loras[selected_index]
|
| 157 |
print(f"Loaded {len(flux_loras)} LoRAs from JSON")
|
| 158 |
# Load LoRA if needed
|
| 159 |
+
print(f"LoRA to use: {lora_to_use}")
|
| 160 |
+
if lora_to_use:
|
| 161 |
try:
|
| 162 |
+
if "selected_lora" in pipe.get_active_adapters():
|
| 163 |
pipe.unload_lora_weights()
|
| 164 |
|
| 165 |
lora_path = load_lora_weights(lora_to_use["repo"], lora_to_use["weights"])
|
|
|
|
| 167 |
pipe.load_lora_weights(lora_path, adapter_name="selected_lora")
|
| 168 |
pipe.set_adapters(["selected_lora"], adapter_weights=[lora_scale])
|
| 169 |
print(f"loaded: {lora_path} with scale {lora_scale}")
|
|
|
|
| 170 |
|
| 171 |
except Exception as e:
|
| 172 |
print(f"Error loading LoRA: {e}")
|
|
|
|
|
|
|
|
|
|
|
|
|
| 173 |
|
| 174 |
|
| 175 |
input_image = input_image.convert("RGB")
|