multimodalart HF Staff commited on
Commit
56469e7
·
verified ·
1 Parent(s): 0f8720a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -1
app.py CHANGED
@@ -157,6 +157,7 @@ def infer_with_lora(input_image, prompt, selected_index, lora_state, custom_lora
157
  lora_to_use = flux_loras[selected_index]
158
  print(f"Loaded {len(flux_loras)} LoRAs from JSON")
159
  # Load LoRA if needed
 
160
  if lora_to_use and lora_to_use != current_lora:
161
  try:
162
  if current_lora:
@@ -173,8 +174,9 @@ def infer_with_lora(input_image, prompt, selected_index, lora_state, custom_lora
173
  print(f"Error loading LoRA: {e}")
174
  # Continue without LoRA
175
  elif lora_scale != lora_state:
176
- pipe.set_adapters(["selected_lora"], adapter_weights=[lora_scale])
177
  print(f"using already loaded lora: {lora_to_use}, udpated {lora_scale} based on user preference")
 
 
178
 
179
  input_image = input_image.convert("RGB")
180
  # Add trigger word to prompt
 
157
  lora_to_use = flux_loras[selected_index]
158
  print(f"Loaded {len(flux_loras)} LoRAs from JSON")
159
  # Load LoRA if needed
160
+ print(f"LoRA to use: {lora_to_use}, Current LoRA: {current_lora}")
161
  if lora_to_use and lora_to_use != current_lora:
162
  try:
163
  if current_lora:
 
174
  print(f"Error loading LoRA: {e}")
175
  # Continue without LoRA
176
  elif lora_scale != lora_state:
 
177
  print(f"using already loaded lora: {lora_to_use}, udpated {lora_scale} based on user preference")
178
+ pipe.set_adapters(["selected_lora"], adapter_weights=[lora_scale])
179
+
180
 
181
  input_image = input_image.convert("RGB")
182
  # Add trigger word to prompt