Spaces:
Runtime error
Runtime error
Commit
·
c128306
1
Parent(s):
0728ac1
Upload app.py
Browse files
app.py
ADDED
@@ -0,0 +1,202 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio
|
2 |
+
import subprocess
|
3 |
+
from PIL import Image
|
4 |
+
import torch, torch.backends.cudnn, torch.backends.cuda
|
5 |
+
from min_dalle import MinDalle
|
6 |
+
from emoji import demojize
|
7 |
+
import string
|
8 |
+
|
9 |
+
def filename_from_text(text: str) -> str:
|
10 |
+
text = demojize(text, delimiters=['', ''])
|
11 |
+
text = text.lower().encode('ascii', errors='ignore').decode()
|
12 |
+
allowed_chars = string.ascii_lowercase + ' '
|
13 |
+
text = ''.join(i for i in text.lower() if i in allowed_chars)
|
14 |
+
text = text[:64]
|
15 |
+
text = '-'.join(text.strip().split())
|
16 |
+
if len(text) == 0: text = 'blank'
|
17 |
+
return text
|
18 |
+
|
19 |
+
def log_gpu_memory():
|
20 |
+
print(subprocess.check_output('nvidia-smi').decode('utf-8'))
|
21 |
+
|
22 |
+
# log_gpu_memory()
|
23 |
+
|
24 |
+
model = MinDalle(
|
25 |
+
is_mega=True,
|
26 |
+
is_reusable=True,
|
27 |
+
device='cpu',
|
28 |
+
# dtype=torch.float32
|
29 |
+
)
|
30 |
+
|
31 |
+
# log_gpu_memory()
|
32 |
+
|
33 |
+
def run_model(
|
34 |
+
text: str,
|
35 |
+
grid_size: int,
|
36 |
+
is_seamless: bool,
|
37 |
+
save_as_png: bool,
|
38 |
+
temperature: float,
|
39 |
+
supercondition: str,
|
40 |
+
top_k: str
|
41 |
+
) -> str:
|
42 |
+
torch.set_grad_enabled(False)
|
43 |
+
torch.backends.cudnn.enabled = True
|
44 |
+
torch.backends.cudnn.deterministic = False
|
45 |
+
torch.backends.cudnn.benchmark = True
|
46 |
+
torch.backends.cuda.matmul.allow_tf32 = True
|
47 |
+
torch.backends.cuda.matmul.allow_fp16_reduced_precision_reduction = True
|
48 |
+
|
49 |
+
print('text:', text)
|
50 |
+
print('grid_size:', grid_size)
|
51 |
+
print('is_seamless:', is_seamless)
|
52 |
+
print('temperature:', temperature)
|
53 |
+
print('supercondition:', supercondition)
|
54 |
+
print('top_k:', top_k)
|
55 |
+
|
56 |
+
try:
|
57 |
+
temperature = float(temperature)
|
58 |
+
assert(temperature > 1e-6)
|
59 |
+
except:
|
60 |
+
raise Exception('Temperature must be a positive nonzero number')
|
61 |
+
try:
|
62 |
+
grid_size = int(grid_size)
|
63 |
+
assert(grid_size <= 5)
|
64 |
+
assert(grid_size >= 1)
|
65 |
+
except:
|
66 |
+
raise Exception('Grid size must be between 1 and 5')
|
67 |
+
try:
|
68 |
+
top_k = int(top_k)
|
69 |
+
assert(top_k <= 16384)
|
70 |
+
assert(top_k >= 1)
|
71 |
+
except:
|
72 |
+
raise Exception('Top k must be between 1 and 16384')
|
73 |
+
|
74 |
+
with torch.no_grad():
|
75 |
+
image = model.generate_image(
|
76 |
+
text = text,
|
77 |
+
seed = -1,
|
78 |
+
grid_size = grid_size,
|
79 |
+
is_seamless = bool(is_seamless),
|
80 |
+
temperature = temperature,
|
81 |
+
supercondition_factor = float(supercondition),
|
82 |
+
top_k = top_k,
|
83 |
+
is_verbose = True
|
84 |
+
)
|
85 |
+
|
86 |
+
log_gpu_memory()
|
87 |
+
|
88 |
+
ext = 'png' if bool(save_as_png) else 'jpg'
|
89 |
+
filename = filename_from_text(text)
|
90 |
+
image_path = '{}.{}'.format(filename, ext)
|
91 |
+
image.save(image_path)
|
92 |
+
|
93 |
+
return image_path
|
94 |
+
|
95 |
+
demo = gradio.Blocks(analytics_enabled=True)
|
96 |
+
|
97 |
+
with demo:
|
98 |
+
with gradio.Row():
|
99 |
+
with gradio.Column():
|
100 |
+
input_text = gradio.Textbox(
|
101 |
+
label='Input Text',
|
102 |
+
value='Rusty Iron Man suit found abandoned in the woods being reclaimed by nature',
|
103 |
+
lines=3
|
104 |
+
)
|
105 |
+
run_button = gradio.Button(value='Generate Image').style(full_width=True)
|
106 |
+
output_image = gradio.Image(
|
107 |
+
value='example/8k dog.png',
|
108 |
+
label='Output Image',
|
109 |
+
type='file',
|
110 |
+
interactive=False
|
111 |
+
)
|
112 |
+
|
113 |
+
with gradio.Column():
|
114 |
+
gradio.Markdown('## Settings')
|
115 |
+
with gradio.Row():
|
116 |
+
grid_size = gradio.Slider(
|
117 |
+
label='Grid Size',
|
118 |
+
value=3,
|
119 |
+
minimum=1,
|
120 |
+
maximum=5,
|
121 |
+
step=1
|
122 |
+
)
|
123 |
+
save_as_png = gradio.Checkbox(
|
124 |
+
label='Output PNG',
|
125 |
+
value=False
|
126 |
+
)
|
127 |
+
is_seamless = gradio.Checkbox(
|
128 |
+
label='Seamless',
|
129 |
+
value=False
|
130 |
+
)
|
131 |
+
gradio.Markdown('#### Advanced')
|
132 |
+
with gradio.Row():
|
133 |
+
temperature = gradio.Number(
|
134 |
+
label='Temperature',
|
135 |
+
value=1
|
136 |
+
)
|
137 |
+
top_k = gradio.Dropdown(
|
138 |
+
label='Top-k',
|
139 |
+
choices=[str(2 ** i) for i in range(15)],
|
140 |
+
value='128'
|
141 |
+
)
|
142 |
+
supercondition = gradio.Dropdown(
|
143 |
+
label='Super Condition',
|
144 |
+
choices=[str(2 ** i) for i in range(2, 7)],
|
145 |
+
value='16'
|
146 |
+
)
|
147 |
+
|
148 |
+
gradio.Markdown(
|
149 |
+
"""
|
150 |
+
#### Parameter
|
151 |
+
- **Input Text**: For long prompts, only the first 64 text tokens will be used to generate the image.
|
152 |
+
- **Grid Size**: Size of the image grid. 3x3 takes about 15 seconds.
|
153 |
+
- **Seamless**: Tile images in image token space instead of pixel space.
|
154 |
+
- **Temperature**: High temperature increases the probability of sampling low scoring image tokens.
|
155 |
+
- **Top-k**: Each image token is sampled from the top-k scoring tokens.
|
156 |
+
- **Super Condition**: Higher values can result in better agreement with the text.
|
157 |
+
|
158 |
+
####
|
159 |
+
"""
|
160 |
+
)
|
161 |
+
|
162 |
+
gradio.Examples(
|
163 |
+
examples=[
|
164 |
+
['Portrait of a basset hound, 8k, photograph', 3, 'example/8k dog.png'],
|
165 |
+
['A diorama of Puppy cloud ,8k, photograph', 3, 'example/puppy.png'],
|
166 |
+
['A dragon that looks like a cream', 3, 'example/cream.png'],
|
167 |
+
['A photo of a sleeping orange tabby cat', 3, 'example/tabby.png'],
|
168 |
+
['A diorama of a bunny family sitting around the table having dinner ,8k, photograph', 3, 'example/table.png'],
|
169 |
+
['A white cat with golden sunglasses on, pink background, studio lighting, 4k, award winning photography', 2, 'example/cat.png'],
|
170 |
+
['an astronaut dancing on the moon’s surface, close-up photo', 2, 'example/astronaut.png'],
|
171 |
+
['A photo of a Samoyed dog with its tongue out hugging a white Siamese cat', 5, 'example/dog.png'],
|
172 |
+
['Dragons of Earth, Wind, Fire, powering up a huge sphere of compressed energy, digital art', 2, 'example/dragon.png'],
|
173 |
+
['A snowboarder jumping in the air while coming down a ski mountain, concept art, artstation, unreal engine, 3d render, HD, Bokeh', 3, 'example/snow.png'],
|
174 |
+
['Antique photo of a dragon fire', 3, 'example/fire.png'],
|
175 |
+
['A space parrot flying through the cosmos, digital art', 3, 'example/parrot.png'],
|
176 |
+
],
|
177 |
+
inputs=[
|
178 |
+
input_text,
|
179 |
+
grid_size,
|
180 |
+
output_image
|
181 |
+
],
|
182 |
+
examples_per_page=20
|
183 |
+
)
|
184 |
+
|
185 |
+
run_button.click(
|
186 |
+
fn=run_model,
|
187 |
+
inputs=[
|
188 |
+
input_text,
|
189 |
+
grid_size,
|
190 |
+
is_seamless,
|
191 |
+
save_as_png,
|
192 |
+
temperature,
|
193 |
+
supercondition,
|
194 |
+
top_k
|
195 |
+
],
|
196 |
+
outputs=[
|
197 |
+
output_image
|
198 |
+
]
|
199 |
+
)
|
200 |
+
|
201 |
+
|
202 |
+
demo.launch()
|