Spaces:
Runtime error
Runtime error
Commit
·
3ce6bf0
1
Parent(s):
a0d75fa
Update app.py
Browse files
app.py
CHANGED
@@ -5,6 +5,7 @@ os.system("pip install huggingface_hub==0.24.7")
|
|
5 |
os.system("pip install gradio accelerate==0.25.0 torchmetrics==1.2.1 tqdm==4.66.1 transformers==4.36.2 diffusers==0.25 einops==0.7.0 bitsandbytes==0.39.0 scipy==1.11.1 opencv-python gradio==4.24.0 fvcore cloudpickle omegaconf pycocotools basicsr av onnxruntime==1.16.2 peft==0.11.1 huggingface_hub==0.24.7 --no-deps")
|
6 |
import gradio as gr
|
7 |
import torch
|
|
|
8 |
from PIL import Image
|
9 |
import torch.nn.functional as F
|
10 |
from transformers import CLIPImageProcessor
|
@@ -130,7 +131,7 @@ pipe = TryonPipeline.from_pretrained(
|
|
130 |
unet_encoder = unet_encoder,
|
131 |
torch_dtype=torch.float16,
|
132 |
)
|
133 |
-
|
134 |
def generate_virtual_try_on(person_image, cloth_image, mask_image, pose_image,cloth_des):
|
135 |
pipe.to(device)
|
136 |
# Prepare the input images as tensors
|
|
|
5 |
os.system("pip install gradio accelerate==0.25.0 torchmetrics==1.2.1 tqdm==4.66.1 transformers==4.36.2 diffusers==0.25 einops==0.7.0 bitsandbytes==0.39.0 scipy==1.11.1 opencv-python gradio==4.24.0 fvcore cloudpickle omegaconf pycocotools basicsr av onnxruntime==1.16.2 peft==0.11.1 huggingface_hub==0.24.7 --no-deps")
|
6 |
import gradio as gr
|
7 |
import torch
|
8 |
+
import spaces
|
9 |
from PIL import Image
|
10 |
import torch.nn.functional as F
|
11 |
from transformers import CLIPImageProcessor
|
|
|
131 |
unet_encoder = unet_encoder,
|
132 |
torch_dtype=torch.float16,
|
133 |
)
|
134 |
+
@spaces.GPU
|
135 |
def generate_virtual_try_on(person_image, cloth_image, mask_image, pose_image,cloth_des):
|
136 |
pipe.to(device)
|
137 |
# Prepare the input images as tensors
|