Spaces:
Sleeping
Sleeping
Yaron Koresh
commited on
Update app.py
Browse files
app.py
CHANGED
|
@@ -10,8 +10,9 @@ import numpy as np
|
|
| 10 |
from lxml.html import fromstring
|
| 11 |
#from transformers import pipeline
|
| 12 |
from torch import multiprocessing as mp
|
|
|
|
| 13 |
#from pathos.multiprocessing import ProcessPool as Pool
|
| 14 |
-
from pathos.threading import ThreadPool as Pool
|
| 15 |
#from diffusers.pipelines.flux import FluxPipeline
|
| 16 |
#from diffusers.utils import export_to_gif
|
| 17 |
#from huggingface_hub import hf_hub_download
|
|
@@ -35,10 +36,6 @@ def init_pool(_1,_2):
|
|
| 35 |
infer2 = _2
|
| 36 |
port_inc()
|
| 37 |
|
| 38 |
-
#pipe = DiffusionPipeline.from_pretrained("black-forest-labs/FLUX.1-schnell", torch_dtype=torch.bfloat16, token=os.getenv("hf_token")).to(device)
|
| 39 |
-
#pipe2 = StableDiffusionXLImg2ImgPipeline.from_pretrained("stabilityai/stable-diffusion-xl-refiner-1.0", torch_dtype=torch.float16, variant="fp16", use_safetensors=True).to(device)
|
| 40 |
-
#pipe2.unet = torch.compile(pipe2.unet, mode="reduce-overhead", fullgraph=True)
|
| 41 |
-
|
| 42 |
def pipe_t2i():
|
| 43 |
PIPE = DiffusionPipeline.from_pretrained("black-forest-labs/FLUX.1-schnell", torch_dtype=torch.bfloat16, token=os.getenv("hf_token"), device=-1)
|
| 44 |
return PIPE
|
|
@@ -214,7 +211,7 @@ def main():
|
|
| 214 |
return list(out)
|
| 215 |
port_inc()
|
| 216 |
|
| 217 |
-
|
| 218 |
|
| 219 |
with gr.Blocks(theme=gr.themes.Soft(),css=css,js=js) as demo:
|
| 220 |
with gr.Column(elem_id="col-container"):
|
|
|
|
| 10 |
from lxml.html import fromstring
|
| 11 |
#from transformers import pipeline
|
| 12 |
from torch import multiprocessing as mp
|
| 13 |
+
from torch.multiprocessing import Pool
|
| 14 |
#from pathos.multiprocessing import ProcessPool as Pool
|
| 15 |
+
#from pathos.threading import ThreadPool as Pool
|
| 16 |
#from diffusers.pipelines.flux import FluxPipeline
|
| 17 |
#from diffusers.utils import export_to_gif
|
| 18 |
#from huggingface_hub import hf_hub_download
|
|
|
|
| 36 |
infer2 = _2
|
| 37 |
port_inc()
|
| 38 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 39 |
def pipe_t2i():
|
| 40 |
PIPE = DiffusionPipeline.from_pretrained("black-forest-labs/FLUX.1-schnell", torch_dtype=torch.bfloat16, token=os.getenv("hf_token"), device=-1)
|
| 41 |
return PIPE
|
|
|
|
| 211 |
return list(out)
|
| 212 |
port_inc()
|
| 213 |
|
| 214 |
+
mp.set_start_method("spawn", force=True)
|
| 215 |
|
| 216 |
with gr.Blocks(theme=gr.themes.Soft(),css=css,js=js) as demo:
|
| 217 |
with gr.Column(elem_id="col-container"):
|