Spaces:
Runtime error
Runtime error
patrickvonplaten
commited on
Commit
•
cbd23ec
1
Parent(s):
44f3bce
improve
Browse files
app.py
CHANGED
@@ -9,11 +9,14 @@ from PIL import Image
|
|
9 |
import time
|
10 |
import psutil
|
11 |
import random
|
|
|
12 |
|
13 |
|
14 |
start_time = time.time()
|
15 |
current_steps = 25
|
16 |
|
|
|
|
|
17 |
|
18 |
class Model:
|
19 |
def __init__(self, name, path=""):
|
@@ -22,12 +25,12 @@ class Model:
|
|
22 |
|
23 |
if path != "":
|
24 |
self.pipe_t2i = StableDiffusionPipeline.from_pretrained(
|
25 |
-
path, torch_dtype=torch.float16
|
26 |
)
|
27 |
self.pipe_t2i.scheduler = DPMSolverMultistepScheduler.from_config(
|
28 |
self.pipe_t2i.scheduler.config
|
29 |
)
|
30 |
-
self.pipe_i2i = StableDiffusionImg2ImgPipeline(**self.pipe_t2i.components)
|
31 |
else:
|
32 |
self.pipe_t2i = None
|
33 |
self.pipe_i2i = None
|
@@ -36,9 +39,9 @@ class Model:
|
|
36 |
models = [
|
37 |
Model("2.2", "darkstorm2150/Protogen_v2.2_Official_Release"),
|
38 |
Model("3.4", "darkstorm2150/Protogen_x3.4_Official_Release"),
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
]
|
43 |
|
44 |
MODELS = {m.name: m for m in models}
|
@@ -46,11 +49,6 @@ MODELS = {m.name: m for m in models}
|
|
46 |
device = "GPU 🔥" if torch.cuda.is_available() else "CPU 🥶"
|
47 |
|
48 |
|
49 |
-
# if torch.cuda.is_available():
|
50 |
-
# pipe = pipe.to("cuda")
|
51 |
-
# pipe.enable_xformers_memory_efficient_attention()
|
52 |
-
|
53 |
-
|
54 |
def error_str(error, title="Error"):
|
55 |
return (
|
56 |
f"""#### {title}
|
|
|
9 |
import time
|
10 |
import psutil
|
11 |
import random
|
12 |
+
from diffusers.pipelines.stable_diffusion.safety_checker import StableDiffusionSafetyChecker
|
13 |
|
14 |
|
15 |
start_time = time.time()
|
16 |
current_steps = 25
|
17 |
|
18 |
+
SAFETY_CHECKER = StableDiffusionSafetyChecker.from_pretrained("CompVis/stable-diffusion-safety-checker", torch_dtype=torch.float16)
|
19 |
+
|
20 |
|
21 |
class Model:
|
22 |
def __init__(self, name, path=""):
|
|
|
25 |
|
26 |
if path != "":
|
27 |
self.pipe_t2i = StableDiffusionPipeline.from_pretrained(
|
28 |
+
path, torch_dtype=torch.float16, safety_checker=SAFETY_CHECKER
|
29 |
)
|
30 |
self.pipe_t2i.scheduler = DPMSolverMultistepScheduler.from_config(
|
31 |
self.pipe_t2i.scheduler.config
|
32 |
)
|
33 |
+
self.pipe_i2i = StableDiffusionImg2ImgPipeline(**self.pipe_t2i.components, safety_checker=SAFETY_CHECKER)
|
34 |
else:
|
35 |
self.pipe_t2i = None
|
36 |
self.pipe_i2i = None
|
|
|
39 |
models = [
|
40 |
Model("2.2", "darkstorm2150/Protogen_v2.2_Official_Release"),
|
41 |
Model("3.4", "darkstorm2150/Protogen_x3.4_Official_Release"),
|
42 |
+
Model("5.3", "darkstorm2150/Protogen_v5.3_Official_Release"),
|
43 |
+
Model("5.8", "darkstorm2150/Protogen_x5.8_Official_Release"),
|
44 |
+
Model("Dragon", "darkstorm2150/Protogen_Dragon_Official_Release"),
|
45 |
]
|
46 |
|
47 |
MODELS = {m.name: m for m in models}
|
|
|
49 |
device = "GPU 🔥" if torch.cuda.is_available() else "CPU 🥶"
|
50 |
|
51 |
|
|
|
|
|
|
|
|
|
|
|
52 |
def error_str(error, title="Error"):
|
53 |
return (
|
54 |
f"""#### {title}
|