charliebaby2023 commited on
Commit
4ceba74
·
verified ·
1 Parent(s): 9df39b1

Update app_demo.py

Browse files
Files changed (1) hide show
  1. app_demo.py +3 -15
app_demo.py CHANGED
@@ -27,12 +27,6 @@ executor = ThreadPoolExecutor()
27
  model_cache = {}
28
 
29
  model_id = "Lykon/dreamshaper-xl-v2-turbo"
30
- #custom_pipe = DiffusionPipeline.from_pretrained(model_id, custom_pipeline="latent_consistency_txt2img", custom_revision="main")
31
- pipe = DiffusionPipeline.from_pretrained("SimianLuo/LCM_Dreamshaper_v7", custom_pipeline="latent_consistency_txt2img", custom_revision="main")
32
- pipe.to(torch_device="cpu", torch_dtype=DTYPE)
33
- pipe.safety_checker = None
34
- #pipe = StableDiffusionPipeline.from_pretrained( model_id, safety_checker=None, torch_dtype=DTYPE, use_safetensors=True).to("cpu")
35
-
36
  custom_pipe = DiffusionPipeline.from_pretrained(
37
  model_id,
38
  custom_pipeline="latent_consistency_txt2img",
@@ -40,23 +34,22 @@ custom_pipe = DiffusionPipeline.from_pretrained(
40
  safety_checker=None,
41
  feature_extractor=None
42
  )
 
 
43
 
44
  def randomize_seed_fn(seed: int, randomize_seed: bool) -> int:
45
  return random.randint(0, MAX_SEED) if randomize_seed else seed
46
 
47
-
48
  def save_image(img, profile: gr.OAuthProfile | None, metadata: dict):
49
  unique_name = str(uuid.uuid4()) + '.png'
50
  img.save(unique_name)
51
  gr_user_history.save_image(label=metadata["prompt"], image=img, profile=profile, metadata=metadata)
52
  return unique_name
53
 
54
-
55
  def save_images(image_array, profile: gr.OAuthProfile | None, metadata: dict):
56
  with ThreadPoolExecutor() as executor:
57
  return list(executor.map(save_image, image_array, [profile]*len(image_array), [metadata]*len(image_array)))
58
 
59
-
60
  def generate(prompt: str, seed: int = 0, width: int = 512, height: int = 512,
61
  guidance_scale: float = 8.0, num_inference_steps: int = 4,
62
  num_images: int = 1, randomize_seed: bool = False,
@@ -75,7 +68,6 @@ def generate(prompt: str, seed: int = 0, width: int = 512, height: int = 512,
75
  "num_inference_steps": num_inference_steps})
76
  return paths, seed
77
 
78
-
79
  def validate_and_list_models(hfuser):
80
  try:
81
  models = api.list_models(author=hfuser)
@@ -83,7 +75,6 @@ def validate_and_list_models(hfuser):
83
  except Exception:
84
  return []
85
 
86
-
87
  def parse_user_model_dict(user_model_dict_str):
88
  try:
89
  data = ast.literal_eval(user_model_dict_str)
@@ -93,7 +84,6 @@ def parse_user_model_dict(user_model_dict_str):
93
  except Exception:
94
  return {}
95
 
96
-
97
  def load_model(model_id):
98
  if model_id in model_cache:
99
  return f"{model_id} loaded from cache"
@@ -104,14 +94,12 @@ def load_model(model_id):
104
  except Exception as e:
105
  return f"{model_id} failed to load: {str(e)}"
106
 
107
-
108
  def run_models(models, parallel):
109
  if parallel:
110
  futures = [executor.submit(load_model, m) for m in models]
111
  return [f.result() for f in futures]
112
  return [load_model(m) for m in models]
113
 
114
-
115
  with gr.Blocks() as demo:
116
  with gr.Row():
117
  gr.HTML("""
@@ -175,4 +163,4 @@ with gr.Blocks() as demo:
175
  fn=generate,
176
  inputs=[prompt, seed, width, height, guidance_scale, num_inference_steps, num_images, randomize_seed],
177
  outputs=[gallery, seed]
178
- )
 
27
  model_cache = {}
28
 
29
  model_id = "Lykon/dreamshaper-xl-v2-turbo"
 
 
 
 
 
 
30
  custom_pipe = DiffusionPipeline.from_pretrained(
31
  model_id,
32
  custom_pipeline="latent_consistency_txt2img",
 
34
  safety_checker=None,
35
  feature_extractor=None
36
  )
37
+ custom_pipe.to(torch_device="cpu", torch_dtype=DTYPE)
38
+ pipe = custom_pipe
39
 
40
  def randomize_seed_fn(seed: int, randomize_seed: bool) -> int:
41
  return random.randint(0, MAX_SEED) if randomize_seed else seed
42
 
 
43
  def save_image(img, profile: gr.OAuthProfile | None, metadata: dict):
44
  unique_name = str(uuid.uuid4()) + '.png'
45
  img.save(unique_name)
46
  gr_user_history.save_image(label=metadata["prompt"], image=img, profile=profile, metadata=metadata)
47
  return unique_name
48
 
 
49
  def save_images(image_array, profile: gr.OAuthProfile | None, metadata: dict):
50
  with ThreadPoolExecutor() as executor:
51
  return list(executor.map(save_image, image_array, [profile]*len(image_array), [metadata]*len(image_array)))
52
 
 
53
  def generate(prompt: str, seed: int = 0, width: int = 512, height: int = 512,
54
  guidance_scale: float = 8.0, num_inference_steps: int = 4,
55
  num_images: int = 1, randomize_seed: bool = False,
 
68
  "num_inference_steps": num_inference_steps})
69
  return paths, seed
70
 
 
71
  def validate_and_list_models(hfuser):
72
  try:
73
  models = api.list_models(author=hfuser)
 
75
  except Exception:
76
  return []
77
 
 
78
  def parse_user_model_dict(user_model_dict_str):
79
  try:
80
  data = ast.literal_eval(user_model_dict_str)
 
84
  except Exception:
85
  return {}
86
 
 
87
  def load_model(model_id):
88
  if model_id in model_cache:
89
  return f"{model_id} loaded from cache"
 
94
  except Exception as e:
95
  return f"{model_id} failed to load: {str(e)}"
96
 
 
97
  def run_models(models, parallel):
98
  if parallel:
99
  futures = [executor.submit(load_model, m) for m in models]
100
  return [f.result() for f in futures]
101
  return [load_model(m) for m in models]
102
 
 
103
  with gr.Blocks() as demo:
104
  with gr.Row():
105
  gr.HTML("""
 
163
  fn=generate,
164
  inputs=[prompt, seed, width, height, guidance_scale, num_inference_steps, num_images, randomize_seed],
165
  outputs=[gallery, seed]
166
+ )