RageshAntony commited on
Commit
b4f7f07
·
verified ·
1 Parent(s): 1b07bac

fixed bugs

Browse files
Files changed (1) hide show
  1. check_app.py +9 -5
check_app.py CHANGED
@@ -116,10 +116,14 @@ MODEL_CONFIGS = {
116
  }
117
  }
118
 
119
- def generate_image_with_progress(model_name,pipe, prompt, num_steps, guidance_scale=None, seed=None,negative_prompt, randomize_seed, width, height, guidance_scale, num_inference_steps, progress=gr.Progress(track_tqdm=True)):
120
  generator = None
 
 
121
  if seed is not None:
122
  generator = torch.Generator("cuda").manual_seed(seed)
 
 
123
 
124
  def callback(pipe, step_index, timestep, callback_kwargs):
125
  print(f" callback => {step_index}, {timestep}")
@@ -160,7 +164,7 @@ def generate_image_with_progress(model_name,pipe, prompt, num_steps, guidance_sc
160
  # Generate image
161
  image = pipe(**common_args).images[0]
162
 
163
- return image
164
 
165
  @spaces.GPU(duration=170)
166
  def create_pipeline_logic(prompt_text, model_name, negative_prompt="", seed=42, randomize_seed=False, width=1024, height=1024, guidance_scale=4.5, num_inference_steps=40,):
@@ -184,10 +188,10 @@ def create_pipeline_logic(prompt_text, model_name, negative_prompt="", seed=42,
184
  else:
185
  pipe = b_pipe
186
 
187
- image = generate_image_with_progress(
188
- model_name,pipe, prompt_text, num_steps=num_inference_steps, guidance_scale=guidance_scale, seed=seed,negative_prompt, randomize_seed, width, height, guidance_scale, num_inference_steps, progress=progress
189
  )
190
- return f"Seed: {seed}", image
191
 
192
  def main():
193
  with gr.Blocks() as app:
 
116
  }
117
  }
118
 
119
+ def generate_image_with_progress(model_name,pipe, prompt, num_steps, guidance_scale=3.5, seed=None,negative_prompt=None, randomize_seed=None, width=1024, height=1024, num_inference_steps=40, progress=gr.Progress(track_tqdm=True)):
120
  generator = None
121
+ if randomize_seed:
122
+ seed = random.randint(0, MAX_SEED)
123
  if seed is not None:
124
  generator = torch.Generator("cuda").manual_seed(seed)
125
+ else:
126
+ generator = torch.Generator("cuda")
127
 
128
  def callback(pipe, step_index, timestep, callback_kwargs):
129
  print(f" callback => {step_index}, {timestep}")
 
164
  # Generate image
165
  image = pipe(**common_args).images[0]
166
 
167
+ return seed, image
168
 
169
  @spaces.GPU(duration=170)
170
  def create_pipeline_logic(prompt_text, model_name, negative_prompt="", seed=42, randomize_seed=False, width=1024, height=1024, guidance_scale=4.5, num_inference_steps=40,):
 
188
  else:
189
  pipe = b_pipe
190
 
191
+ gen_seed,image = generate_image_with_progress(
192
+ model_name,pipe, prompt_text, num_steps=num_inference_steps, guidance_scale=guidance_scale, seed=seed,negative_prompt = negative_prompt, randomize_seed = randomize_seed, width = width, height = height, progress=progress
193
  )
194
+ return f"Seed: {gen_seed}", image
195
 
196
  def main():
197
  with gr.Blocks() as app: