John6666 commited on
Commit
61db31a
·
verified ·
1 Parent(s): f22289a

Delete convert_url_to_diffusers_sdxl_gr.py

Browse files
Files changed (1) hide show
  1. convert_url_to_diffusers_sdxl_gr.py +0 -291
convert_url_to_diffusers_sdxl_gr.py DELETED
@@ -1,291 +0,0 @@
1
- import os
2
- if os.environ.get("SPACES_ZERO_GPU") is not None:
3
- import spaces
4
- else:
5
- class spaces:
6
- @staticmethod
7
- def GPU(func):
8
- def wrapper(*args, **kwargs):
9
- return func(*args, **kwargs)
10
- return wrapper
11
- import argparse
12
- from pathlib import Path
13
- import os
14
- import torch
15
- from diffusers import StableDiffusionXLPipeline, AutoencoderKL
16
- from transformers import CLIPTokenizer, CLIPTextModel
17
- import gradio as gr
18
- import shutil
19
- import gc
20
- # also requires aria, gdown, peft, huggingface_hub, safetensors, transformers, accelerate, pytorch_lightning
21
- from utils import (set_token, is_repo_exists, is_repo_name, get_download_file, upload_repo)
22
-
23
-
24
- @spaces.GPU
25
- def fake_gpu():
26
- pass
27
-
28
-
29
- TEMP_DIR = "."
30
-
31
-
32
- DTYPE_DICT = {
33
- "fp16": torch.float16,
34
- "bf16": torch.bfloat16,
35
- "fp32": torch.float32,
36
- "fp8": torch.float8_e4m3fn
37
- }
38
-
39
-
40
- def get_dtype(dtype: str):
41
- return DTYPE_DICT.get(dtype, torch.float16)
42
-
43
-
44
- from diffusers import (
45
- DPMSolverMultistepScheduler,
46
- DPMSolverSinglestepScheduler,
47
- KDPM2DiscreteScheduler,
48
- EulerDiscreteScheduler,
49
- EulerAncestralDiscreteScheduler,
50
- HeunDiscreteScheduler,
51
- LMSDiscreteScheduler,
52
- DDIMScheduler,
53
- DEISMultistepScheduler,
54
- UniPCMultistepScheduler,
55
- LCMScheduler,
56
- PNDMScheduler,
57
- KDPM2AncestralDiscreteScheduler,
58
- DPMSolverSDEScheduler,
59
- EDMDPMSolverMultistepScheduler,
60
- DDPMScheduler,
61
- EDMEulerScheduler,
62
- TCDScheduler,
63
- )
64
-
65
-
66
- SCHEDULER_CONFIG_MAP = {
67
- "DPM++ 2M": (DPMSolverMultistepScheduler, {"use_karras_sigmas": False}),
68
- "DPM++ 2M Karras": (DPMSolverMultistepScheduler, {"use_karras_sigmas": True}),
69
- "DPM++ 2M SDE": (DPMSolverMultistepScheduler, {"use_karras_sigmas": False, "algorithm_type": "sde-dpmsolver++"}),
70
- "DPM++ 2M SDE Karras": (DPMSolverMultistepScheduler, {"use_karras_sigmas": True, "algorithm_type": "sde-dpmsolver++"}),
71
- "DPM++ 2S": (DPMSolverSinglestepScheduler, {"use_karras_sigmas": False}),
72
- "DPM++ 2S Karras": (DPMSolverSinglestepScheduler, {"use_karras_sigmas": True}),
73
- "DPM++ 1S": (DPMSolverMultistepScheduler, {"solver_order": 1}),
74
- "DPM++ 1S Karras": (DPMSolverMultistepScheduler, {"solver_order": 1, "use_karras_sigmas": True}),
75
- "DPM++ 3M": (DPMSolverMultistepScheduler, {"solver_order": 3}),
76
- "DPM++ 3M Karras": (DPMSolverMultistepScheduler, {"solver_order": 3, "use_karras_sigmas": True}),
77
- "DPM++ SDE": (DPMSolverSDEScheduler, {"use_karras_sigmas": False}),
78
- "DPM++ SDE Karras": (DPMSolverSDEScheduler, {"use_karras_sigmas": True}),
79
- "DPM2": (KDPM2DiscreteScheduler, {}),
80
- "DPM2 Karras": (KDPM2DiscreteScheduler, {"use_karras_sigmas": True}),
81
- "DPM2 a": (KDPM2AncestralDiscreteScheduler, {}),
82
- "DPM2 a Karras": (KDPM2AncestralDiscreteScheduler, {"use_karras_sigmas": True}),
83
- "Euler": (EulerDiscreteScheduler, {}),
84
- "Euler a": (EulerAncestralDiscreteScheduler, {}),
85
- "Euler trailing": (EulerDiscreteScheduler, {"timestep_spacing": "trailing", "prediction_type": "sample"}),
86
- "Euler a trailing": (EulerAncestralDiscreteScheduler, {"timestep_spacing": "trailing"}),
87
- "Heun": (HeunDiscreteScheduler, {}),
88
- "Heun Karras": (HeunDiscreteScheduler, {"use_karras_sigmas": True}),
89
- "LMS": (LMSDiscreteScheduler, {}),
90
- "LMS Karras": (LMSDiscreteScheduler, {"use_karras_sigmas": True}),
91
- "DDIM": (DDIMScheduler, {}),
92
- "DDIM trailing": (DDIMScheduler, {"timestep_spacing": "trailing"}),
93
- "DEIS": (DEISMultistepScheduler, {}),
94
- "UniPC": (UniPCMultistepScheduler, {}),
95
- "UniPC Karras": (UniPCMultistepScheduler, {"use_karras_sigmas": True}),
96
- "PNDM": (PNDMScheduler, {}),
97
- "Euler EDM": (EDMEulerScheduler, {}),
98
- "Euler EDM Karras": (EDMEulerScheduler, {"use_karras_sigmas": True}),
99
- "DPM++ 2M EDM": (EDMDPMSolverMultistepScheduler, {"solver_order": 2, "solver_type": "midpoint", "final_sigmas_type": "zero", "algorithm_type": "dpmsolver++"}),
100
- "DPM++ 2M EDM Karras": (EDMDPMSolverMultistepScheduler, {"use_karras_sigmas": True, "solver_order": 2, "solver_type": "midpoint", "final_sigmas_type": "zero", "algorithm_type": "dpmsolver++"}),
101
- "DDPM": (DDPMScheduler, {}),
102
-
103
- "DPM++ 2M Lu": (DPMSolverMultistepScheduler, {"use_lu_lambdas": True}),
104
- "DPM++ 2M Ef": (DPMSolverMultistepScheduler, {"euler_at_final": True}),
105
- "DPM++ 2M SDE Lu": (DPMSolverMultistepScheduler, {"use_lu_lambdas": True, "algorithm_type": "sde-dpmsolver++"}),
106
- "DPM++ 2M SDE Ef": (DPMSolverMultistepScheduler, {"algorithm_type": "sde-dpmsolver++", "euler_at_final": True}),
107
-
108
- "LCM": (LCMScheduler, {}),
109
- "TCD": (TCDScheduler, {}),
110
- "LCM trailing": (LCMScheduler, {"timestep_spacing": "trailing"}),
111
- "TCD trailing": (TCDScheduler, {"timestep_spacing": "trailing"}),
112
- "LCM Auto-Loader": (LCMScheduler, {}),
113
- "TCD Auto-Loader": (TCDScheduler, {}),
114
- }
115
-
116
-
117
- def get_scheduler_config(name):
118
- if not name in SCHEDULER_CONFIG_MAP.keys(): return SCHEDULER_CONFIG_MAP["Euler a"]
119
- return SCHEDULER_CONFIG_MAP[name]
120
-
121
-
122
- def save_readme_md(dir, url):
123
- orig_url = ""
124
- orig_name = ""
125
- if is_repo_name(url):
126
- orig_name = url
127
- orig_url = f"https://huggingface.co/{url}/"
128
- elif "http" in url:
129
- orig_name = url
130
- orig_url = url
131
- if orig_name and orig_url:
132
- md = f"""---
133
- license: other
134
- language:
135
- - en
136
- library_name: diffusers
137
- pipeline_tag: text-to-image
138
- tags:
139
- - text-to-image
140
- ---
141
- Converted from [{orig_name}]({orig_url}).
142
- """
143
- else:
144
- md = f"""---
145
- license: other
146
- language:
147
- - en
148
- library_name: diffusers
149
- pipeline_tag: text-to-image
150
- tags:
151
- - text-to-image
152
- ---
153
- """
154
- path = str(Path(dir, "README.md"))
155
- with open(path, mode='w', encoding="utf-8") as f:
156
- f.write(md)
157
-
158
-
159
- def fuse_loras(pipe, lora_dict={}, temp_dir=TEMP_DIR, civitai_key=""):
160
- if not lora_dict or not isinstance(lora_dict, dict): return pipe
161
- a_list = []
162
- w_list = []
163
- for k, v in lora_dict.items():
164
- if not k: continue
165
- new_lora_file = get_download_file(temp_dir, k, civitai_key)
166
- if not new_lora_file or not Path(new_lora_file).exists():
167
- print(f"LoRA not found: {k}")
168
- continue
169
- w_name = Path(new_lora_file).name
170
- a_name = Path(new_lora_file).stem
171
- pipe.load_lora_weights(new_lora_file, weight_name=w_name, adapter_name=a_name)
172
- a_list.append(a_name)
173
- w_list.append(v)
174
- if not a_list: return pipe
175
- pipe.set_adapters(a_list, adapter_weights=w_list)
176
- pipe.fuse_lora(adapter_names=a_list, lora_scale=1.0)
177
- pipe.unload_lora_weights()
178
- return pipe
179
-
180
-
181
- def convert_url_to_diffusers_sdxl(url, civitai_key="", is_upload_sf=False, dtype="fp16", vae="", clip="",
182
- scheduler="Euler a", lora_dict={}, is_local=True, progress=gr.Progress(track_tqdm=True)):
183
- progress(0, desc="Start converting...")
184
- temp_dir = TEMP_DIR
185
- new_file = get_download_file(temp_dir, url, civitai_key)
186
- if not new_file:
187
- print(f"Not found: {url}")
188
- return ""
189
- new_dir = Path(new_file).stem.replace(" ", "_").replace(",", "_").replace(".", "_") #
190
-
191
- kwargs = {}
192
- type_kwargs = {}
193
- if dtype != "default": type_kwargs["torch_dtype"] = get_dtype(dtype)
194
-
195
- new_vae_file = ""
196
- if vae:
197
- if is_repo_name(vae): my_vae = AutoencoderKL.from_pretrained(vae, **type_kwargs)
198
- else:
199
- new_vae_file = get_download_file(temp_dir, vae, civitai_key)
200
- my_vae = AutoencoderKL.from_single_file(new_vae_file, **type_kwargs) if new_vae_file else None
201
- if my_vae: kwargs["vae"] = my_vae
202
-
203
- if clip:
204
- my_tokenizer = CLIPTokenizer.from_pretrained(clip)
205
- if my_tokenizer: kwargs["tokenizer"] = my_tokenizer
206
- my_text_encoder = CLIPTextModel.from_pretrained(clip, **type_kwargs)
207
- if my_text_encoder: kwargs["text_encoder"] = my_text_encoder
208
-
209
- pipe = None
210
- if is_repo_name(url): pipe = StableDiffusionXLPipeline.from_pretrained(new_file, use_safetensors=True, **kwargs, **type_kwargs)
211
- else: pipe = StableDiffusionXLPipeline.from_single_file(new_file, use_safetensors=True, **kwargs, **type_kwargs)
212
-
213
- pipe = fuse_loras(pipe, lora_dict, temp_dir, civitai_key)
214
-
215
- sconf = get_scheduler_config(scheduler)
216
- pipe.scheduler = sconf[0].from_config(pipe.scheduler.config, **sconf[1])
217
-
218
- pipe.save_pretrained(new_dir, safe_serialization=True, use_safetensors=True)
219
-
220
- if Path(new_dir).exists(): save_readme_md(new_dir, url)
221
-
222
- if not is_local:
223
- if not is_repo_name(new_file) and is_upload_sf: shutil.move(str(Path(new_file).resolve()), str(Path(new_dir, Path(new_file).name).resolve()))
224
- else: os.remove(new_file)
225
- del pipe
226
- torch.cuda.empty_cache()
227
- gc.collect()
228
-
229
- progress(1, desc="Converted.")
230
- return new_dir
231
-
232
-
233
- def convert_url_to_diffusers_repo(dl_url, hf_user, hf_repo, hf_token, civitai_key="", is_private=True, is_overwrite=False, is_upload_sf=False,
234
- urls=[], dtype="fp16", vae="", clip="", scheduler="Euler a",
235
- lora1=None, lora1s=1.0, lora2=None, lora2s=1.0, lora3=None, lora3s=1.0,
236
- lora4=None, lora4s=1.0, lora5=None, lora5s=1.0, progress=gr.Progress(track_tqdm=True)):
237
- is_local = False
238
- if not civitai_key and os.environ.get("CIVITAI_API_KEY"): civitai_key = os.environ.get("CIVITAI_API_KEY") # default Civitai API key
239
- if not hf_token and os.environ.get("HF_TOKEN"): hf_token = os.environ.get("HF_TOKEN") # default HF write token
240
- if not hf_user and os.environ.get("HF_USER"): hf_user = os.environ.get("HF_USER") # default username
241
- if not hf_user: raise gr.Error(f"Invalid user name: {hf_user}")
242
- if not hf_repo and os.environ.get("HF_REPO"): hf_repo = os.environ.get("HF_REPO") # default reponame
243
- set_token(hf_token)
244
- lora_dict = {lora1: lora1s, lora2: lora2s, lora3: lora3s, lora4: lora4s, lora5: lora5s}
245
- new_path = convert_url_to_diffusers_sdxl(dl_url, civitai_key, is_upload_sf, dtype, vae, clip, scheduler, lora_dict, is_local)
246
- if not new_path: return ""
247
- new_repo_id = f"{hf_user}/{Path(new_path).stem}"
248
- if hf_repo != "": new_repo_id = f"{hf_user}/{hf_repo}"
249
- if not is_repo_name(new_repo_id): raise gr.Error(f"Invalid repo name: {new_repo_id}")
250
- if not is_overwrite and is_repo_exists(new_repo_id): raise gr.Error(f"Repo already exists: {new_repo_id}")
251
- repo_url = upload_repo(new_repo_id, new_path, is_private)
252
- shutil.rmtree(new_path)
253
- if not urls: urls = []
254
- urls.append(repo_url)
255
- md = "### Your new repo:\n"
256
- for u in urls:
257
- md += f"[{str(u).split('/')[-2]}/{str(u).split('/')[-1]}]({str(u)})<br>"
258
- return gr.update(value=urls, choices=urls), gr.update(value=md)
259
-
260
-
261
- if __name__ == "__main__":
262
- parser = argparse.ArgumentParser()
263
-
264
- parser.add_argument("--url", default=None, type=str, required=True, help="URL of the model to convert.")
265
- parser.add_argument("--dtype", default="fp16", type=str, choices=["fp16", "fp32", "bf16", "fp8", "default"], help='Output data type. (Default: "fp16")')
266
- parser.add_argument("--scheduler", default="Euler a", type=str, choices=list(SCHEDULER_CONFIG_MAP.keys()), required=False, help="Scheduler name to use.")
267
- parser.add_argument("--vae", default=None, type=str, required=False, help="URL of the VAE to use.")
268
- parser.add_argument("--civitai_key", default=None, type=str, required=False, help="Civitai API Key (If you want to download file from Civitai).")
269
- parser.add_argument("--lora1", default=None, type=str, required=False, help="URL of the LoRA to use.")
270
- parser.add_argument("--lora1s", default=1.0, type=float, required=False, help="LoRA weight scale of --lora1.")
271
- parser.add_argument("--lora2", default=None, type=str, required=False, help="URL of the LoRA to use.")
272
- parser.add_argument("--lora2s", default=1.0, type=float, required=False, help="LoRA weight scale of --lora2.")
273
- parser.add_argument("--lora3", default=None, type=str, required=False, help="URL of the LoRA to use.")
274
- parser.add_argument("--lora3s", default=1.0, type=float, required=False, help="LoRA weight scale of --lora3.")
275
- parser.add_argument("--lora4", default=None, type=str, required=False, help="URL of the LoRA to use.")
276
- parser.add_argument("--lora4s", default=1.0, type=float, required=False, help="LoRA weight scale of --lora4.")
277
- parser.add_argument("--lora5", default=None, type=str, required=False, help="URL of the LoRA to use.")
278
- parser.add_argument("--lora5s", default=1.0, type=float, required=False, help="LoRA weight scale of --lora5.")
279
- parser.add_argument("--loras", default=None, type=str, required=False, help="Folder of the LoRA to use.")
280
-
281
- args = parser.parse_args()
282
- assert args.url is not None, "Must provide a URL!"
283
-
284
- is_local = True
285
- lora_dict = {args.lora1: args.lora1s, args.lora2: args.lora2s, args.lora3: args.lora3s, args.lora4: args.lora4s, args.lora5: args.lora5s}
286
- if args.loras and Path(args.loras).exists():
287
- for p in Path(args.loras).glob('**/*.safetensors'):
288
- lora_dict[str(p)] = 1.0
289
- clip = ""
290
-
291
- convert_url_to_diffusers_sdxl(args.url, args.civitai_key, args.dtype, args.vae, clip, args.scheduler, lora_dict, is_local)