Spaces:
Running
on
Zero
Running
on
Zero
RageshAntony
commited on
added common ProgressPipeline
Browse files- check_app.py +12 -14
check_app.py
CHANGED
@@ -180,22 +180,20 @@ def create_pipeline_logic(prompt_text, model_name):
|
|
180 |
config = MODEL_CONFIGS[model_name]
|
181 |
pipe_class = config["pipeline_class"]
|
182 |
pipe = None
|
183 |
-
|
184 |
-
|
185 |
-
|
186 |
-
|
187 |
-
|
188 |
-
|
189 |
-
|
190 |
-
|
|
|
|
|
191 |
pipe = ProgressPipeline(b_pipe)
|
|
|
192 |
else:
|
193 |
-
pipe =
|
194 |
-
config["repo_id"],
|
195 |
-
#variant="fp16",
|
196 |
-
#cache_dir=config["cache_dir"],
|
197 |
-
torch_dtype=torch.bfloat16
|
198 |
-
).to("cuda")
|
199 |
|
200 |
image = generate_image_with_progress(
|
201 |
model_name,pipe, prompt_text, num_steps=num_steps, guidance_scale=guidance_scale, seed=seed, progress=progress
|
|
|
180 |
config = MODEL_CONFIGS[model_name]
|
181 |
pipe_class = config["pipeline_class"]
|
182 |
pipe = None
|
183 |
+
b_pipe = AutoPipelineForText2Image.from_pretrained(
|
184 |
+
config["repo_id"],
|
185 |
+
#variant="fp16",
|
186 |
+
#cache_dir=config["cache_dir"],
|
187 |
+
torch_dtype=torch.bfloat16
|
188 |
+
).to("cuda")
|
189 |
+
pipe_signature = signature(b_pipe)
|
190 |
+
# Check for the presence of "callback_on_step_end" in the signature
|
191 |
+
has_callback_on_step_end = "callback_on_step_end" in pipe_signature.parameters
|
192 |
+
if not has_callback_on_step_end
|
193 |
pipe = ProgressPipeline(b_pipe)
|
194 |
+
print("ProgressPipeline specal")
|
195 |
else:
|
196 |
+
pipe = b_pipe
|
|
|
|
|
|
|
|
|
|
|
197 |
|
198 |
image = generate_image_with_progress(
|
199 |
model_name,pipe, prompt_text, num_steps=num_steps, guidance_scale=guidance_scale, seed=seed, progress=progress
|