Update app.py
Browse files
app.py
CHANGED
|
@@ -7,7 +7,7 @@ import numpy as np
|
|
| 7 |
import PIL.Image
|
| 8 |
import torch
|
| 9 |
import torchvision.transforms.functional as TF
|
| 10 |
-
from diffusers import
|
| 11 |
|
| 12 |
DESCRIPTION = "# T2I-Adapter-SDXL Sketch"
|
| 13 |
|
|
@@ -65,7 +65,7 @@ device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
|
| 65 |
if torch.cuda.is_available():
|
| 66 |
model_id = "stabilityai/stable-diffusion-xl-base-1.0"
|
| 67 |
adapter = T2IAdapter.from_pretrained("TencentARC/t2i-adapter-sketch-sdxl-1.0", torch_dtype=torch.float16, variant="fp16")
|
| 68 |
-
scheduler =
|
| 69 |
pipe = StableDiffusionXLAdapterPipeline.from_pretrained(
|
| 70 |
model_id,
|
| 71 |
adapter=adapter,
|
|
@@ -115,7 +115,7 @@ def run(
|
|
| 115 |
adapter_conditioning_scale=adapter_conditioning_scale,
|
| 116 |
cond_tau=cond_tau,
|
| 117 |
).images[0]
|
| 118 |
-
return out
|
| 119 |
|
| 120 |
|
| 121 |
with gr.Blocks() as demo:
|
|
@@ -141,7 +141,7 @@ with gr.Blocks() as demo:
|
|
| 141 |
label="Style"
|
| 142 |
)
|
| 143 |
negative_prompt = gr.Textbox(
|
| 144 |
-
label="Negative prompt", value="
|
| 145 |
)
|
| 146 |
num_steps = gr.Slider(
|
| 147 |
label="Number of steps",
|
|
@@ -180,7 +180,8 @@ with gr.Blocks() as demo:
|
|
| 180 |
)
|
| 181 |
randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
|
| 182 |
with gr.Column():
|
| 183 |
-
result = gr.Image(label="Result", height=600)
|
|
|
|
| 184 |
|
| 185 |
inputs = [
|
| 186 |
image,
|
|
|
|
| 7 |
import PIL.Image
|
| 8 |
import torch
|
| 9 |
import torchvision.transforms.functional as TF
|
| 10 |
+
from diffusers import EulerAncestralDiscreteScheduler, StableDiffusionXLAdapterPipeline, T2IAdapter
|
| 11 |
|
| 12 |
DESCRIPTION = "# T2I-Adapter-SDXL Sketch"
|
| 13 |
|
|
|
|
| 65 |
if torch.cuda.is_available():
|
| 66 |
model_id = "stabilityai/stable-diffusion-xl-base-1.0"
|
| 67 |
adapter = T2IAdapter.from_pretrained("TencentARC/t2i-adapter-sketch-sdxl-1.0", torch_dtype=torch.float16, variant="fp16")
|
| 68 |
+
scheduler = EulerAncestralDiscreteScheduler.from_pretrained(model_id, subfolder="scheduler")
|
| 69 |
pipe = StableDiffusionXLAdapterPipeline.from_pretrained(
|
| 70 |
model_id,
|
| 71 |
adapter=adapter,
|
|
|
|
| 115 |
adapter_conditioning_scale=adapter_conditioning_scale,
|
| 116 |
cond_tau=cond_tau,
|
| 117 |
).images[0]
|
| 118 |
+
return out, image
|
| 119 |
|
| 120 |
|
| 121 |
with gr.Blocks() as demo:
|
|
|
|
| 141 |
label="Style"
|
| 142 |
)
|
| 143 |
negative_prompt = gr.Textbox(
|
| 144 |
+
label="Negative prompt", value=""
|
| 145 |
)
|
| 146 |
num_steps = gr.Slider(
|
| 147 |
label="Number of steps",
|
|
|
|
| 180 |
)
|
| 181 |
randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
|
| 182 |
with gr.Column():
|
| 183 |
+
# result = gr.Image(label="Result", height=600)
|
| 184 |
+
result = gr.Gallery(label="Result").style(grid=(1,2))
|
| 185 |
|
| 186 |
inputs = [
|
| 187 |
image,
|