Spaces:
Running
on
Zero
Running
on
Zero
yisol
commited on
Commit
·
ab2e314
1
Parent(s):
c123434
add auto crop
Browse files
app.py
CHANGED
|
@@ -122,7 +122,7 @@ pipe = TryonPipeline.from_pretrained(
|
|
| 122 |
pipe.unet_encoder = UNet_Encoder
|
| 123 |
|
| 124 |
@spaces.GPU
|
| 125 |
-
def start_tryon(dict,garm_img,garment_des,is_checked,denoise_steps,seed):
|
| 126 |
device = "cuda"
|
| 127 |
|
| 128 |
openpose_model.preprocessor.body_estimation.model.to(device)
|
|
@@ -130,8 +130,23 @@ def start_tryon(dict,garm_img,garment_des,is_checked,denoise_steps,seed):
|
|
| 130 |
pipe.unet_encoder.to(device)
|
| 131 |
|
| 132 |
garm_img= garm_img.convert("RGB").resize((768,1024))
|
| 133 |
-
|
| 134 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 135 |
if is_checked:
|
| 136 |
keypoints = openpose_model(human_img.resize((384,512)))
|
| 137 |
model_parse, _ = parsing_model(human_img.resize((384,512)))
|
|
@@ -217,7 +232,14 @@ def start_tryon(dict,garm_img,garment_des,is_checked,denoise_steps,seed):
|
|
| 217 |
ip_adapter_image = garm_img.resize((768,1024)),
|
| 218 |
guidance_scale=2.0,
|
| 219 |
)[0]
|
| 220 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 221 |
|
| 222 |
garm_list = os.listdir(os.path.join(example_path,"cloth"))
|
| 223 |
garm_list_path = [os.path.join(example_path,"cloth",garm) for garm in garm_list]
|
|
@@ -241,7 +263,10 @@ with image_blocks as demo:
|
|
| 241 |
with gr.Column():
|
| 242 |
imgs = gr.ImageEditor(sources='upload', type="pil", label='Human. Mask with pen or use auto-masking', interactive=True)
|
| 243 |
with gr.Row():
|
| 244 |
-
is_checked = gr.Checkbox(label="Yes", info="Use auto-generated mask (Takes 5
|
|
|
|
|
|
|
|
|
|
| 245 |
example = gr.Examples(
|
| 246 |
inputs=imgs,
|
| 247 |
examples_per_page=10,
|
|
@@ -255,7 +280,7 @@ with image_blocks as demo:
|
|
| 255 |
prompt = gr.Textbox(placeholder="Description of garment ex) Short Sleeve Round Neck T-shirts", show_label=False, elem_id="prompt")
|
| 256 |
example = gr.Examples(
|
| 257 |
inputs=garm_img,
|
| 258 |
-
examples_per_page=
|
| 259 |
examples=garm_list_path)
|
| 260 |
with gr.Column():
|
| 261 |
# image_out = gr.Image(label="Output", elem_id="output-img", height=400)
|
|
@@ -275,7 +300,7 @@ with image_blocks as demo:
|
|
| 275 |
seed = gr.Number(label="Seed", minimum=-1, maximum=2147483647, step=1, value=42)
|
| 276 |
|
| 277 |
|
| 278 |
-
try_button.click(fn=start_tryon, inputs=[imgs, garm_img, prompt, is_checked, denoise_steps, seed], outputs=[image_out,masked_img], api_name='tryon')
|
| 279 |
|
| 280 |
|
| 281 |
|
|
|
|
| 122 |
pipe.unet_encoder = UNet_Encoder
|
| 123 |
|
| 124 |
@spaces.GPU
|
| 125 |
+
def start_tryon(dict,garm_img,garment_des,is_checked,is_checked_crop,denoise_steps,seed):
|
| 126 |
device = "cuda"
|
| 127 |
|
| 128 |
openpose_model.preprocessor.body_estimation.model.to(device)
|
|
|
|
| 130 |
pipe.unet_encoder.to(device)
|
| 131 |
|
| 132 |
garm_img= garm_img.convert("RGB").resize((768,1024))
|
| 133 |
+
human_img_orig = dict["background"].resize((768,1024)).convert("RGB")
|
| 134 |
|
| 135 |
+
if is_checked_crop:
|
| 136 |
+
width, height = human_img_orig.size
|
| 137 |
+
target_width = int(min(width, height * (3 / 4)))
|
| 138 |
+
target_height = int(min(height, width * (4 / 3)))
|
| 139 |
+
left = (width - target_width) / 2
|
| 140 |
+
top = (height - target_height) / 2
|
| 141 |
+
right = (width + target_width) / 2
|
| 142 |
+
bottom = (height + target_height) / 2
|
| 143 |
+
cropped_img = human_img_orig.crop((left, top, right, bottom))
|
| 144 |
+
crop_size = cropped_img.size
|
| 145 |
+
human_img = cropped_img.resize((768,1024))
|
| 146 |
+
else:
|
| 147 |
+
human_img = human_img_orig.resize((768,1024))
|
| 148 |
+
|
| 149 |
+
|
| 150 |
if is_checked:
|
| 151 |
keypoints = openpose_model(human_img.resize((384,512)))
|
| 152 |
model_parse, _ = parsing_model(human_img.resize((384,512)))
|
|
|
|
| 232 |
ip_adapter_image = garm_img.resize((768,1024)),
|
| 233 |
guidance_scale=2.0,
|
| 234 |
)[0]
|
| 235 |
+
|
| 236 |
+
if is_checked_crop:
|
| 237 |
+
out_img = images[0].resize(crop_size)
|
| 238 |
+
human_img_orig.paste(out_img, (int(left), int(top)))
|
| 239 |
+
return human_img_orig, mask_gray
|
| 240 |
+
else:
|
| 241 |
+
return images[0], mask_gray
|
| 242 |
+
# return images[0], mask_gray
|
| 243 |
|
| 244 |
garm_list = os.listdir(os.path.join(example_path,"cloth"))
|
| 245 |
garm_list_path = [os.path.join(example_path,"cloth",garm) for garm in garm_list]
|
|
|
|
| 263 |
with gr.Column():
|
| 264 |
imgs = gr.ImageEditor(sources='upload', type="pil", label='Human. Mask with pen or use auto-masking', interactive=True)
|
| 265 |
with gr.Row():
|
| 266 |
+
is_checked = gr.Checkbox(label="Yes", info="Use auto-generated mask (Takes 5 seconds)",value=True)
|
| 267 |
+
with gr.Row():
|
| 268 |
+
is_checked_crop = gr.Checkbox(label="Yes", info="Use auto-crop & resizing",value=False)
|
| 269 |
+
|
| 270 |
example = gr.Examples(
|
| 271 |
inputs=imgs,
|
| 272 |
examples_per_page=10,
|
|
|
|
| 280 |
prompt = gr.Textbox(placeholder="Description of garment ex) Short Sleeve Round Neck T-shirts", show_label=False, elem_id="prompt")
|
| 281 |
example = gr.Examples(
|
| 282 |
inputs=garm_img,
|
| 283 |
+
examples_per_page=8,
|
| 284 |
examples=garm_list_path)
|
| 285 |
with gr.Column():
|
| 286 |
# image_out = gr.Image(label="Output", elem_id="output-img", height=400)
|
|
|
|
| 300 |
seed = gr.Number(label="Seed", minimum=-1, maximum=2147483647, step=1, value=42)
|
| 301 |
|
| 302 |
|
| 303 |
+
try_button.click(fn=start_tryon, inputs=[imgs, garm_img, prompt, is_checked,is_checked_crop, denoise_steps, seed], outputs=[image_out,masked_img], api_name='tryon')
|
| 304 |
|
| 305 |
|
| 306 |
|