Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -18,8 +18,11 @@ DEFAULT_INFERENCE_STEPS = 1
|
|
18 |
dtype = torch.float16
|
19 |
pipe = FLUXPipelineWithIntermediateOutputs.from_pretrained(
|
20 |
"ostris/OpenFLUX.1", torch_dtype=dtype
|
21 |
-
)
|
22 |
-
pipe.vae = AutoencoderTiny.from_pretrained("madebyollin/taef1", torch_dtype=torch.float16)
|
|
|
|
|
|
|
23 |
# pipe.transformer.to(memory_format=torch.channels_last)
|
24 |
# pipe.transformer = torch.compile(
|
25 |
# pipe.transformer, mode="max-autotune", fullgraph=True
|
|
|
18 |
dtype = torch.float16
|
19 |
pipe = FLUXPipelineWithIntermediateOutputs.from_pretrained(
|
20 |
"ostris/OpenFLUX.1", torch_dtype=dtype
|
21 |
+
)
|
22 |
+
pipe.vae = AutoencoderTiny.from_pretrained("madebyollin/taef1", torch_dtype=torch.float16)
|
23 |
+
pipe.load_lora_weights("ostris/OpenFLUX.1", weight_name="openflux1-v0.1.0-fast-lora.safetensors", adapter_name="fast")
|
24 |
+
pipe.set_adapters("fast")
|
25 |
+
pipe.to("cuda")
|
26 |
# pipe.transformer.to(memory_format=torch.channels_last)
|
27 |
# pipe.transformer = torch.compile(
|
28 |
# pipe.transformer, mode="max-autotune", fullgraph=True
|