Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
|
@@ -204,36 +204,43 @@ def run_lora(prompt, image_input, image_strength, cfg_scale, steps, selected_ind
|
|
| 204 |
selected_loras = [loras[idx] for idx in selected_indices]
|
| 205 |
|
| 206 |
# Build the prompt with trigger words
|
| 207 |
-
|
|
|
|
| 208 |
for lora in selected_loras:
|
| 209 |
trigger_word = lora.get('trigger_word', '')
|
| 210 |
if trigger_word:
|
| 211 |
if lora.get("trigger_position") == "prepend":
|
| 212 |
-
|
| 213 |
else:
|
| 214 |
-
|
| 215 |
-
|
|
|
|
| 216 |
# Unload previous LoRA weights
|
| 217 |
with calculateDuration("Unloading LoRA"):
|
| 218 |
pipe.unload_lora_weights()
|
| 219 |
pipe_i2i.unload_lora_weights()
|
| 220 |
|
| 221 |
# Load LoRA weights with respective scales
|
|
|
|
| 222 |
with calculateDuration("Loading LoRA weights"):
|
| 223 |
for idx, lora in enumerate(selected_loras):
|
|
|
|
|
|
|
| 224 |
lora_path = lora['repo']
|
| 225 |
scale = lora_scale_1 if idx == 0 else lora_scale_2
|
| 226 |
if image_input is not None:
|
| 227 |
if "weights" in lora:
|
| 228 |
-
pipe_i2i.load_lora_weights(lora_path, weight_name=lora["weights"],
|
| 229 |
else:
|
| 230 |
-
pipe_i2i.load_lora_weights(lora_path,
|
| 231 |
else:
|
| 232 |
if "weights" in lora:
|
| 233 |
-
pipe.load_lora_weights(lora_path, weight_name=lora["weights"],
|
| 234 |
else:
|
| 235 |
-
pipe.load_lora_weights(lora_path,
|
| 236 |
|
|
|
|
|
|
|
| 237 |
# Set random seed for reproducibility
|
| 238 |
with calculateDuration("Randomizing seed"):
|
| 239 |
if randomize_seed:
|
|
|
|
| 204 |
selected_loras = [loras[idx] for idx in selected_indices]
|
| 205 |
|
| 206 |
# Build the prompt with trigger words
|
| 207 |
+
prepends = []
|
| 208 |
+
appends = []
|
| 209 |
for lora in selected_loras:
|
| 210 |
trigger_word = lora.get('trigger_word', '')
|
| 211 |
if trigger_word:
|
| 212 |
if lora.get("trigger_position") == "prepend":
|
| 213 |
+
prepends.append(trigger_word)
|
| 214 |
else:
|
| 215 |
+
appends.append(trigger_word)
|
| 216 |
+
prompt_mash = " ".join(prepends + [prompt] + appends)
|
| 217 |
+
|
| 218 |
# Unload previous LoRA weights
|
| 219 |
with calculateDuration("Unloading LoRA"):
|
| 220 |
pipe.unload_lora_weights()
|
| 221 |
pipe_i2i.unload_lora_weights()
|
| 222 |
|
| 223 |
# Load LoRA weights with respective scales
|
| 224 |
+
lora_names = []
|
| 225 |
with calculateDuration("Loading LoRA weights"):
|
| 226 |
for idx, lora in enumerate(selected_loras):
|
| 227 |
+
lora_name = f"lora_{idx}"
|
| 228 |
+
lora_names.append(lora_name)
|
| 229 |
lora_path = lora['repo']
|
| 230 |
scale = lora_scale_1 if idx == 0 else lora_scale_2
|
| 231 |
if image_input is not None:
|
| 232 |
if "weights" in lora:
|
| 233 |
+
pipe_i2i.load_lora_weights(lora_path, weight_name=lora["weights"], low_cpu_mem_usage=True, adapter_name=lora_name)
|
| 234 |
else:
|
| 235 |
+
pipe_i2i.load_lora_weights(lora_path, low_cpu_mem_usage=True, adapter_name=lora_name)
|
| 236 |
else:
|
| 237 |
if "weights" in lora:
|
| 238 |
+
pipe.load_lora_weights(lora_path, weight_name=lora["weights"], low_cpu_mem_usage=True, adapter_name=lora_name)
|
| 239 |
else:
|
| 240 |
+
pipe.load_lora_weights(lora_path, low_cpu_mem_usage=True, adapter_name=lora_name)
|
| 241 |
|
| 242 |
+
pipeline.set_adapters(lora_names, adapter_weights=[lora_scale_1, lora_scale_2])
|
| 243 |
+
|
| 244 |
# Set random seed for reproducibility
|
| 245 |
with calculateDuration("Randomizing seed"):
|
| 246 |
if randomize_seed:
|