RageshAntony commited on
Commit
fbcf80b
·
verified ·
1 Parent(s): cdd3493

added auraflow autopiope

Browse files
Files changed (1) hide show
  1. check_app.py +8 -5
check_app.py CHANGED
@@ -91,18 +91,21 @@ def generate_image_with_progress(pipe, prompt, num_steps, guidance_scale=None, s
91
  @spaces.GPU(duration=170)
92
  def create_pipeline_logic(prompt_text, model_name):
93
  print(f"starting {model_name}")
94
- progress = gr.Progress()
95
  num_steps = 30
96
  guidance_scale = 7.5 # Example guidance scale, can be adjusted per model
97
  seed = 42
98
  config = MODEL_CONFIGS[model_name]
99
  pipe_class = config["pipeline_class"]
100
  pipe = None
101
- if model_name == "Kandinsky":
102
- print("Kandinsky Special")
103
  pipe = AutoPipelineForText2Image.from_pretrained(
104
- "kandinsky-community/kandinsky-3", variant="fp16", torch_dtype=torch.float16
105
- )
 
 
 
106
  else:
107
  pipe = pipe_class.from_pretrained(
108
  config["repo_id"],
 
91
  @spaces.GPU(duration=170)
92
  def create_pipeline_logic(prompt_text, model_name):
93
  print(f"starting {model_name}")
94
+ progress = gr.Progress(track_tqdm=True)
95
  num_steps = 30
96
  guidance_scale = 7.5 # Example guidance scale, can be adjusted per model
97
  seed = 42
98
  config = MODEL_CONFIGS[model_name]
99
  pipe_class = config["pipeline_class"]
100
  pipe = None
101
+ if model_name == "Kandinsky" or model_name == "AuraFlow":
102
+ print("Kandinsky or AuraFlow Special")
103
  pipe = AutoPipelineForText2Image.from_pretrained(
104
+ config["repo_id"],
105
+ variant="fp16",
106
+ #cache_dir=config["cache_dir"],
107
+ torch_dtype=torch.bfloat16
108
+ ).to("cuda")
109
  else:
110
  pipe = pipe_class.from_pretrained(
111
  config["repo_id"],