Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -93,7 +93,51 @@ examples = [
|
|
93 |
]
|
94 |
|
95 |
def generate_examples(input_image, prompt):
|
96 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
97 |
|
98 |
|
99 |
@torch.no_grad()
|
|
|
93 |
]
|
94 |
|
95 |
def generate_examples(input_image, prompt):
|
96 |
+
|
97 |
+
t2v=False
|
98 |
+
n_prompt=""
|
99 |
+
seed=31337
|
100 |
+
total_second_length=5
|
101 |
+
latent_window_size=9
|
102 |
+
steps=25
|
103 |
+
cfg=1.0
|
104 |
+
gs=10.0
|
105 |
+
rs=0.0
|
106 |
+
gpu_memory_preservation=6
|
107 |
+
use_teacache=True
|
108 |
+
mp4_crf=16
|
109 |
+
|
110 |
+
global stream
|
111 |
+
|
112 |
+
# assert input_image is not None, 'No input image!'
|
113 |
+
if t2v:
|
114 |
+
default_height, default_width = 640, 640
|
115 |
+
input_image = np.ones((default_height, default_width, 3), dtype=np.uint8) * 255
|
116 |
+
print("No input image provided. Using a blank white image.")
|
117 |
+
|
118 |
+
yield None, None, '', '', gr.update(interactive=False), gr.update(interactive=True)
|
119 |
+
|
120 |
+
stream = AsyncStream()
|
121 |
+
|
122 |
+
async_run(worker, input_image, prompt, n_prompt, seed, total_second_length, latent_window_size, steps, cfg, gs, rs, gpu_memory_preservation, use_teacache, mp4_crf)
|
123 |
+
|
124 |
+
output_filename = None
|
125 |
+
|
126 |
+
while True:
|
127 |
+
flag, data = stream.output_queue.next()
|
128 |
+
|
129 |
+
if flag == 'file':
|
130 |
+
output_filename = data
|
131 |
+
yield output_filename, gr.update(), gr.update(), gr.update(), gr.update(interactive=False), gr.update(interactive=True)
|
132 |
+
|
133 |
+
if flag == 'progress':
|
134 |
+
preview, desc, html = data
|
135 |
+
yield gr.update(), gr.update(visible=True, value=preview), desc, html, gr.update(interactive=False), gr.update(interactive=True)
|
136 |
+
|
137 |
+
if flag == 'end':
|
138 |
+
yield output_filename, gr.update(visible=False), gr.update(), '', gr.update(interactive=True), gr.update(interactive=False)
|
139 |
+
break
|
140 |
+
|
141 |
|
142 |
|
143 |
@torch.no_grad()
|