John6666 commited on
Commit
e1e9e3d
·
verified ·
1 Parent(s): 9b522d4

Upload 6 files

Browse files
Files changed (6) hide show
  1. README.md +6 -5
  2. app.py +66 -0
  3. packages.txt +1 -0
  4. pre-requirements.txt +1 -0
  5. requirements.txt +46 -0
  6. utils.py +229 -0
README.md CHANGED
@@ -1,12 +1,13 @@
1
  ---
2
- title: Webui Test2
3
- emoji: 🏢
4
- colorFrom: gray
5
- colorTo: red
6
  sdk: gradio
7
- sdk_version: 5.1.0
8
  app_file: app.py
9
  pinned: false
 
10
  ---
11
 
12
  Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
1
  ---
2
+ title: SuperMerger test
3
+ emoji: 🐶
4
+ colorFrom: yellow
5
+ colorTo: blue
6
  sdk: gradio
7
+ sdk_version: 4.41.0
8
  app_file: app.py
9
  pinned: false
10
+ license: mit
11
  ---
12
 
13
  Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
app.py ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ if os.environ.get("SPACES_ZERO_GPU") is not None:
3
+ import spaces
4
+ else:
5
+ class spaces:
6
+ @staticmethod
7
+ def GPU(func):
8
+ def wrapper(*args, **kwargs):
9
+ return func(*args, **kwargs)
10
+ return wrapper
11
+ import subprocess
12
+ import gc
13
+ import sys
14
+ from pathlib import Path
15
+ from utils import set_token, get_download_file
16
+
17
+ @spaces.GPU
18
+ def fake_gpu():
19
+ pass
20
+
21
+ HF_TOKEN = os.environ.get("HF_TOKEN")
22
+ CIVITAI_API_KEY = os.environ.get("CIVITAI_API_KEY")
23
+ set_token(HF_TOKEN)
24
+ BASE_DIR = str(Path(__file__).resolve().parent.resolve())
25
+ WEBUI_DIR = str(Path(BASE_DIR, "stable-diffusion-webui").resolve())
26
+ #os.environ["PYTHONPATH"] = os.environ.get("PYTHONPATH") + f":{WEBUI_DIR}:."
27
+
28
+ def get_file(url: str, path: str):
29
+ print(f"Downloading {url} to {path}...")
30
+ get_download_file(path, url, CIVITAI_API_KEY)
31
+
32
+ def git_clone(url: str, path: str, pip: bool=False, addcmd: str=""):
33
+ os.makedirs(str(Path(BASE_DIR, path)), exist_ok=True)
34
+ os.chdir(Path(BASE_DIR, path))
35
+ print(f"Cloning {url} to {path}...")
36
+ cmd = f'git clone {url}'
37
+ print(f'Running {cmd} at {Path.cwd()}')
38
+ i = subprocess.run(cmd, shell=True).returncode
39
+ if i != 0: print(f'Error occured at running {cmd}')
40
+ p = url.split("/")[-1]
41
+ if not Path(p).exists: return
42
+ if pip:
43
+ os.chdir(Path(BASE_DIR, path, p))
44
+ cmd = f'pip install -r requirements.txt'
45
+ print(f'Running {cmd} at {Path.cwd()}')
46
+ i = subprocess.run(cmd, shell=True).returncode
47
+ if i != 0: print(f'Error occured at running {cmd}')
48
+ if addcmd:
49
+ os.chdir(Path(BASE_DIR, path, p))
50
+ cmd = addcmd
51
+ print(f'Running {cmd} at {Path.cwd()}')
52
+ i = subprocess.run(cmd, shell=True).returncode
53
+ if i != 0: print(f'Error occured at running {cmd}')
54
+
55
+ def run(cmd: str):
56
+ print(f'Running {cmd} at {Path.cwd()}')
57
+ i = subprocess.run(cmd, shell=True).returncode
58
+ if i != 0: print(f'Error occured at running {cmd}')
59
+
60
+ git_clone("https://github.com/AUTOMATIC1111/stable-diffusion-webui.git", BASE_DIR)
61
+
62
+ os.chdir(WEBUI_DIR)
63
+ #sys.path.insert(0, WEBUI_DIR)
64
+ #run(f"python3 launch.py --precision full --no-half --no-half-vae --enable-insecure-extension-access --medvram --always-cpu --skip-torch-cuda-test --disable-gpu-warning --enable-console-prompts")
65
+ run(f"python3 launch.py --precision full --no-half --no-half-vae --enable-insecure-extension-access --medvram --skip-torch-cuda-test --enable-console-prompts")
66
+
packages.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ git-lfs aria2
pre-requirements.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ pip>=23.0.0
requirements.txt ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ huggingface-hub
2
+ gdown
3
+ diffusers
4
+ scikit-learn
5
+ setuptools==69.5.1 # temp fix for compatibility with some old packages
6
+ GitPython==3.1.32
7
+ Pillow==9.5.0
8
+ accelerate
9
+ blendmodes==2022
10
+ clean-fid==0.1.35
11
+ diskcache==5.6.3
12
+ einops==0.4.1
13
+ facexlib==0.3.0
14
+ fastapi
15
+ httpcore==0.15
16
+ inflection==0.5.1
17
+ jsonmerge==1.8.0
18
+ kornia==0.6.7
19
+ lark==1.1.2
20
+ numpy==1.26.2
21
+ omegaconf==2.2.3
22
+ open-clip-torch==2.20.0
23
+ piexif==1.1.3
24
+ protobuf==3.20.0
25
+ psutil==5.9.5
26
+ pytorch_lightning==1.9.4
27
+ resize-right==0.0.2
28
+ safetensors==0.4.2
29
+ scikit-image==0.21.0
30
+ spandrel==0.3.4
31
+ spandrel-extra-arches==0.1.1
32
+ tomesd==0.1.3
33
+ torch
34
+ torchdiffeq==0.2.3
35
+ torchsde==0.2.6
36
+ transformers==4.44.0
37
+ httpx==0.24.1
38
+ pillow-avif-plugin==1.4.3
39
+ diffusers>=0.29.2
40
+ gradio_rangeslider==0.0.6
41
+ gradio_imageslider==0.0.20
42
+ loadimg==0.1.2
43
+ tqdm==4.66.1
44
+ peft
45
+ pydantic==2.8.2
46
+ huggingface-hub
utils.py ADDED
@@ -0,0 +1,229 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from huggingface_hub import HfApi, HfFolder, hf_hub_download, snapshot_download
3
+ import os
4
+ from pathlib import Path
5
+ import shutil
6
+ import gc
7
+ import re
8
+ import urllib.parse
9
+
10
+
11
+ def get_token():
12
+ try:
13
+ token = HfFolder.get_token()
14
+ except Exception:
15
+ token = ""
16
+ return token
17
+
18
+
19
+ def set_token(token):
20
+ try:
21
+ HfFolder.save_token(token)
22
+ except Exception:
23
+ print(f"Error: Failed to save token.")
24
+
25
+
26
+ def get_user_agent():
27
+ return 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:127.0) Gecko/20100101 Firefox/127.0'
28
+
29
+
30
+ def is_repo_exists(repo_id: str, repo_type: str="model"):
31
+ hf_token = get_token()
32
+ api = HfApi(token=hf_token)
33
+ try:
34
+ if api.repo_exists(repo_id=repo_id, repo_type=repo_type, token=hf_token): return True
35
+ else: return False
36
+ except Exception as e:
37
+ print(f"Error: Failed to connect {repo_id} ({repo_type}). {e}")
38
+ return True # for safe
39
+
40
+
41
+ MODEL_TYPE_CLASS = {
42
+ "diffusers:StableDiffusionPipeline": "SD 1.5",
43
+ "diffusers:StableDiffusionXLPipeline": "SDXL",
44
+ "diffusers:FluxPipeline": "FLUX",
45
+ }
46
+
47
+
48
+ def get_model_type(repo_id: str):
49
+ hf_token = get_token()
50
+ api = HfApi(token=hf_token)
51
+ lora_filename = "pytorch_lora_weights.safetensors"
52
+ diffusers_filename = "model_index.json"
53
+ default = "SDXL"
54
+ try:
55
+ if api.file_exists(repo_id=repo_id, filename=lora_filename, token=hf_token): return "LoRA"
56
+ if not api.file_exists(repo_id=repo_id, filename=diffusers_filename, token=hf_token): return "None"
57
+ model = api.model_info(repo_id=repo_id, token=hf_token)
58
+ tags = model.tags
59
+ for tag in tags:
60
+ if tag in MODEL_TYPE_CLASS.keys(): return MODEL_TYPE_CLASS.get(tag, default)
61
+ except Exception:
62
+ return default
63
+ return default
64
+
65
+
66
+ def list_uniq(l):
67
+ return sorted(set(l), key=l.index)
68
+
69
+
70
+ def list_sub(a, b):
71
+ return [e for e in a if e not in b]
72
+
73
+
74
+ def is_repo_name(s):
75
+ return re.fullmatch(r'^[\w_\-\.]+/[\w_\-\.]+$', s)
76
+
77
+
78
+ def get_hf_url(repo_id: str, repo_type: str="model"):
79
+ if repo_type == "dataset": url = f"https://huggingface.co/datasets/{repo_id}"
80
+ elif repo_type == "space": url = f"https://huggingface.co/spaces/{repo_id}"
81
+ else: url = f"https://huggingface.co/{repo_id}"
82
+ return url
83
+
84
+
85
+ def split_hf_url(url: str):
86
+ try:
87
+ s = list(re.findall(r'^(?:https?://huggingface.co/)(?:(datasets|spaces)/)?(.+?/.+?)/\w+?/.+?/(?:(.+)/)?(.+?.\w+)(?:\?download=true)?$', url)[0])
88
+ if len(s) < 4: return "", "", "", ""
89
+ repo_id = s[1]
90
+ if s[0] == "datasets": repo_type = "dataset"
91
+ elif s[0] == "spaces": repo_type = "space"
92
+ else: repo_type = "model"
93
+ subfolder = urllib.parse.unquote(s[2]) if s[2] else None
94
+ filename = urllib.parse.unquote(s[3])
95
+ return repo_id, filename, subfolder, repo_type
96
+ except Exception as e:
97
+ print(e)
98
+
99
+
100
+ def download_hf_file(directory, url, progress=gr.Progress(track_tqdm=True)):
101
+ hf_token = get_token()
102
+ repo_id, filename, subfolder, repo_type = split_hf_url(url)
103
+ try:
104
+ print(f"Downloading {url} to {directory}")
105
+ if subfolder is not None: path = hf_hub_download(repo_id=repo_id, filename=filename, subfolder=subfolder, repo_type=repo_type, local_dir=directory, token=hf_token)
106
+ else: path = hf_hub_download(repo_id=repo_id, filename=filename, repo_type=repo_type, local_dir=directory, token=hf_token)
107
+ return path
108
+ except Exception as e:
109
+ print(f"Failed to download: {e}")
110
+ return None
111
+
112
+
113
+ def download_thing(directory, url, civitai_api_key="", progress=gr.Progress(track_tqdm=True)): # requires aria2, gdown
114
+ url = url.strip()
115
+ if "drive.google.com" in url:
116
+ original_dir = os.getcwd()
117
+ os.chdir(directory)
118
+ os.system(f"gdown --fuzzy {url}")
119
+ os.chdir(original_dir)
120
+ elif "huggingface.co" in url:
121
+ url = url.replace("?download=true", "")
122
+ if "/blob/" in url: url = url.replace("/blob/", "/resolve/")
123
+ download_hf_file(directory, url)
124
+ elif "civitai.com" in url:
125
+ if "?" in url:
126
+ url = url.split("?")[0]
127
+ if civitai_api_key:
128
+ url = url + f"?token={civitai_api_key}"
129
+ os.system(f"aria2c --console-log-level=error --summary-interval=10 -c -x 16 -k 1M -s 16 -d {directory} {url}")
130
+ else:
131
+ print("You need an API key to download Civitai models.")
132
+ else:
133
+ os.system(f"aria2c --console-log-level=error --summary-interval=10 -c -x 16 -k 1M -s 16 -d {directory} {url}")
134
+
135
+
136
+ def get_local_file_list(dir_path):
137
+ file_list = []
138
+ for file in Path(dir_path).glob("**/*.*"):
139
+ if file.is_file():
140
+ file_path = str(file)
141
+ file_list.append(file_path)
142
+ return file_list
143
+
144
+
145
+ def get_download_file(temp_dir, url, civitai_key, progress=gr.Progress(track_tqdm=True)):
146
+ if not "http" in url and is_repo_name(url) and not Path(url).exists():
147
+ print(f"Use HF Repo: {url}")
148
+ new_file = url
149
+ elif not "http" in url and Path(url).exists():
150
+ print(f"Use local file: {url}")
151
+ new_file = url
152
+ elif Path(f"{temp_dir}/{url.split('/')[-1]}").exists():
153
+ print(f"File to download alreday exists: {url}")
154
+ new_file = f"{temp_dir}/{url.split('/')[-1]}"
155
+ else:
156
+ print(f"Start downloading: {url}")
157
+ before = get_local_file_list(temp_dir)
158
+ try:
159
+ download_thing(temp_dir, url.strip(), civitai_key)
160
+ except Exception:
161
+ print(f"Download failed: {url}")
162
+ return ""
163
+ after = get_local_file_list(temp_dir)
164
+ new_file = list_sub(after, before)[0] if list_sub(after, before) else ""
165
+ if not new_file:
166
+ print(f"Download failed: {url}")
167
+ return ""
168
+ print(f"Download completed: {url}")
169
+ return new_file
170
+
171
+
172
+ def download_repo(repo_id: str, dir_path: str, progress=gr.Progress(track_tqdm=True)): # for diffusers repo
173
+ hf_token = get_token()
174
+ try:
175
+ snapshot_download(repo_id=repo_id, local_dir=dir_path, token=hf_token, allow_patterns=["*.safetensors", "*.bin"],
176
+ ignore_patterns=["*.fp16.*", "/*.safetensors", "/*.bin"], force_download=True)
177
+ return True
178
+ except Exception as e:
179
+ print(f"Error: Failed to download {repo_id}. {e}")
180
+ gr.Warning(f"Error: Failed to download {repo_id}. {e}")
181
+ return False
182
+
183
+
184
+ def upload_repo(repo_id: str, dir_path: str, is_private: bool, progress=gr.Progress(track_tqdm=True)): # for diffusers repo
185
+ hf_token = get_token()
186
+ api = HfApi(token=hf_token)
187
+ try:
188
+ progress(0, desc="Start uploading...")
189
+ api.create_repo(repo_id=repo_id, token=hf_token, private=is_private, exist_ok=True)
190
+ for path in Path(dir_path).glob("*"):
191
+ if path.is_dir():
192
+ api.upload_folder(repo_id=repo_id, folder_path=str(path), path_in_repo=path.name, token=hf_token)
193
+ elif path.is_file():
194
+ api.upload_file(repo_id=repo_id, path_or_fileobj=str(path), path_in_repo=path.name, token=hf_token)
195
+ progress(1, desc="Uploaded.")
196
+ return get_hf_url(repo_id, "model")
197
+ except Exception as e:
198
+ print(f"Error: Failed to upload to {repo_id}. {e}")
199
+ return ""
200
+
201
+
202
+ HF_SUBFOLDER_NAME = ["None", "user_repo"]
203
+
204
+
205
+ def duplicate_hf_repo(src_repo: str, dst_repo: str, src_repo_type: str, dst_repo_type: str,
206
+ is_private: bool, subfolder_type: str=HF_SUBFOLDER_NAME[1], progress=gr.Progress(track_tqdm=True)):
207
+ hf_token = get_token()
208
+ api = HfApi(token=hf_token)
209
+ try:
210
+ if subfolder_type == "user_repo": subfolder = src_repo.replace("/", "_")
211
+ else: subfolder = ""
212
+ progress(0, desc="Start duplicating...")
213
+ api.create_repo(repo_id=dst_repo, repo_type=dst_repo_type, private=is_private, exist_ok=True, token=hf_token)
214
+ for path in api.list_repo_files(repo_id=src_repo, repo_type=src_repo_type, token=hf_token):
215
+ file = hf_hub_download(repo_id=src_repo, filename=path, repo_type=src_repo_type, token=hf_token)
216
+ if not Path(file).exists(): continue
217
+ if Path(file).is_dir(): # unused for now
218
+ api.upload_folder(repo_id=dst_repo, folder_path=file, path_in_repo=f"{subfolder}/{path}" if subfolder else path,
219
+ repo_type=dst_repo_type, token=hf_token)
220
+ elif Path(file).is_file():
221
+ api.upload_file(repo_id=dst_repo, path_or_fileobj=file, path_in_repo=f"{subfolder}/{path}" if subfolder else path,
222
+ repo_type=dst_repo_type, token=hf_token)
223
+ if Path(file).exists(): Path(file).unlink()
224
+ progress(1, desc="Duplicated.")
225
+ return f"{get_hf_url(dst_repo, dst_repo_type)}/tree/main/{subfolder}" if subfolder else get_hf_url(dst_repo, dst_repo_type)
226
+ except Exception as e:
227
+ print(f"Error: Failed to duplicate repo {src_repo} to {dst_repo}. {e}")
228
+ return ""
229
+