Add the ability to set the tag --trust_remote_code to true
#19
by
1TSnakers
- opened
app.py
CHANGED
@@ -2,9 +2,12 @@ import logging
|
|
2 |
import os
|
3 |
import subprocess
|
4 |
import sys
|
|
|
|
|
|
|
5 |
from dataclasses import dataclass
|
6 |
from pathlib import Path
|
7 |
-
from typing import Optional, Tuple
|
8 |
from urllib.request import urlopen, urlretrieve
|
9 |
|
10 |
import streamlit as st
|
@@ -20,6 +23,7 @@ class Config:
|
|
20 |
|
21 |
hf_token: str
|
22 |
hf_username: str
|
|
|
23 |
transformers_version: str = "3.5.0"
|
24 |
hf_base_url: str = "https://huggingface.co"
|
25 |
transformers_base_url: str = (
|
@@ -32,18 +36,26 @@ class Config:
|
|
32 |
"""Create config from environment variables and secrets."""
|
33 |
system_token = st.secrets.get("HF_TOKEN")
|
34 |
user_token = st.session_state.get("user_hf_token")
|
|
|
35 |
if user_token:
|
36 |
hf_username = whoami(token=user_token)["name"]
|
37 |
else:
|
38 |
hf_username = (
|
39 |
os.getenv("SPACE_AUTHOR_NAME") or whoami(token=system_token)["name"]
|
40 |
)
|
|
|
41 |
hf_token = user_token or system_token
|
42 |
|
43 |
if not hf_token:
|
44 |
-
raise ValueError(
|
|
|
|
|
45 |
|
46 |
-
return cls(
|
|
|
|
|
|
|
|
|
47 |
|
48 |
|
49 |
class ModelConverter:
|
@@ -82,9 +94,6 @@ class ModelConverter:
|
|
82 |
|
83 |
def _extract_archive(self, archive_path: Path) -> None:
|
84 |
"""Extract the downloaded archive."""
|
85 |
-
import tarfile
|
86 |
-
import tempfile
|
87 |
-
|
88 |
with tempfile.TemporaryDirectory() as tmp_dir:
|
89 |
with tarfile.open(archive_path, "r:gz") as tar:
|
90 |
tar.extractall(tmp_dir)
|
@@ -92,23 +101,48 @@ class ModelConverter:
|
|
92 |
extracted_folder = next(Path(tmp_dir).iterdir())
|
93 |
extracted_folder.rename(self.config.repo_path)
|
94 |
|
95 |
-
def
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
96 |
"""Convert the model to ONNX format."""
|
97 |
try:
|
98 |
-
|
99 |
-
|
100 |
-
|
101 |
-
|
102 |
-
|
103 |
-
|
104 |
-
|
105 |
-
input_model_id,
|
106 |
-
|
107 |
-
|
108 |
-
|
109 |
-
text=True,
|
110 |
-
env={},
|
111 |
-
)
|
112 |
|
113 |
if result.returncode != 0:
|
114 |
return False, result.stderr
|
@@ -138,8 +172,6 @@ class ModelConverter:
|
|
138 |
except Exception as e:
|
139 |
return str(e)
|
140 |
finally:
|
141 |
-
import shutil
|
142 |
-
|
143 |
shutil.rmtree(model_folder_path, ignore_errors=True)
|
144 |
|
145 |
def generate_readme(self, imi: str):
|
@@ -177,6 +209,11 @@ def main():
|
|
177 |
type="password",
|
178 |
key="user_hf_token",
|
179 |
)
|
|
|
|
|
|
|
|
|
|
|
180 |
|
181 |
if config.hf_username == input_model_id.split("/")[0]:
|
182 |
same_repo = st.checkbox(
|
@@ -206,7 +243,9 @@ def main():
|
|
206 |
return
|
207 |
|
208 |
with st.spinner("Converting model..."):
|
209 |
-
success, stderr = converter.convert_model(
|
|
|
|
|
210 |
if not success:
|
211 |
st.error(f"Conversion failed: {stderr}")
|
212 |
return
|
|
|
2 |
import os
|
3 |
import subprocess
|
4 |
import sys
|
5 |
+
import tempfile
|
6 |
+
import tarfile
|
7 |
+
import shutil
|
8 |
from dataclasses import dataclass
|
9 |
from pathlib import Path
|
10 |
+
from typing import Dict, List, Optional, Tuple
|
11 |
from urllib.request import urlopen, urlretrieve
|
12 |
|
13 |
import streamlit as st
|
|
|
23 |
|
24 |
hf_token: str
|
25 |
hf_username: str
|
26 |
+
is_using_user_token: bool
|
27 |
transformers_version: str = "3.5.0"
|
28 |
hf_base_url: str = "https://huggingface.co"
|
29 |
transformers_base_url: str = (
|
|
|
36 |
"""Create config from environment variables and secrets."""
|
37 |
system_token = st.secrets.get("HF_TOKEN")
|
38 |
user_token = st.session_state.get("user_hf_token")
|
39 |
+
|
40 |
if user_token:
|
41 |
hf_username = whoami(token=user_token)["name"]
|
42 |
else:
|
43 |
hf_username = (
|
44 |
os.getenv("SPACE_AUTHOR_NAME") or whoami(token=system_token)["name"]
|
45 |
)
|
46 |
+
|
47 |
hf_token = user_token or system_token
|
48 |
|
49 |
if not hf_token:
|
50 |
+
raise ValueError(
|
51 |
+
"When the user token is not provided, the system token must be set."
|
52 |
+
)
|
53 |
|
54 |
+
return cls(
|
55 |
+
hf_token=hf_token,
|
56 |
+
hf_username=hf_username,
|
57 |
+
is_using_user_token=bool(user_token),
|
58 |
+
)
|
59 |
|
60 |
|
61 |
class ModelConverter:
|
|
|
94 |
|
95 |
def _extract_archive(self, archive_path: Path) -> None:
|
96 |
"""Extract the downloaded archive."""
|
|
|
|
|
|
|
97 |
with tempfile.TemporaryDirectory() as tmp_dir:
|
98 |
with tarfile.open(archive_path, "r:gz") as tar:
|
99 |
tar.extractall(tmp_dir)
|
|
|
101 |
extracted_folder = next(Path(tmp_dir).iterdir())
|
102 |
extracted_folder.rename(self.config.repo_path)
|
103 |
|
104 |
+
def _run_conversion_subprocess(
|
105 |
+
self, input_model_id: str, extra_args: List[str] = None
|
106 |
+
) -> subprocess.CompletedProcess:
|
107 |
+
"""Run the conversion subprocess with the given arguments."""
|
108 |
+
cmd = [
|
109 |
+
sys.executable,
|
110 |
+
"-m",
|
111 |
+
"scripts.convert",
|
112 |
+
"--quantize",
|
113 |
+
"--model_id",
|
114 |
+
input_model_id,
|
115 |
+
]
|
116 |
+
|
117 |
+
if extra_args:
|
118 |
+
cmd.extend(extra_args)
|
119 |
+
|
120 |
+
return subprocess.run(
|
121 |
+
cmd,
|
122 |
+
cwd=self.config.repo_path,
|
123 |
+
capture_output=True,
|
124 |
+
text=True,
|
125 |
+
env={
|
126 |
+
"HF_TOKEN": self.config.hf_token,
|
127 |
+
},
|
128 |
+
)
|
129 |
+
|
130 |
+
def convert_model(
|
131 |
+
self, input_model_id: str, trust_remote_code=False
|
132 |
+
) -> Tuple[bool, Optional[str]]:
|
133 |
"""Convert the model to ONNX format."""
|
134 |
try:
|
135 |
+
if trust_remote_code:
|
136 |
+
if not self.config.is_using_user_token:
|
137 |
+
raise Exception(
|
138 |
+
"Trust Remote Code requires your own HuggingFace token."
|
139 |
+
)
|
140 |
+
|
141 |
+
result = self._run_conversion_subprocess(
|
142 |
+
input_model_id, extra_args=["--trust_remote_code"]
|
143 |
+
)
|
144 |
+
else:
|
145 |
+
result = self._run_conversion_subprocess(input_model_id)
|
|
|
|
|
|
|
146 |
|
147 |
if result.returncode != 0:
|
148 |
return False, result.stderr
|
|
|
172 |
except Exception as e:
|
173 |
return str(e)
|
174 |
finally:
|
|
|
|
|
175 |
shutil.rmtree(model_folder_path, ignore_errors=True)
|
176 |
|
177 |
def generate_readme(self, imi: str):
|
|
|
209 |
type="password",
|
210 |
key="user_hf_token",
|
211 |
)
|
212 |
+
trust_remote_code = st.toggle("Optional: Trust Remote Code.")
|
213 |
+
if trust_remote_code:
|
214 |
+
st.warning(
|
215 |
+
"This option should only be enabled for repositories you trust and in which you have read the code, as it will execute arbitrary code present in the model repository. When this option is enabled, you must use your own Hugging Face write token."
|
216 |
+
)
|
217 |
|
218 |
if config.hf_username == input_model_id.split("/")[0]:
|
219 |
same_repo = st.checkbox(
|
|
|
243 |
return
|
244 |
|
245 |
with st.spinner("Converting model..."):
|
246 |
+
success, stderr = converter.convert_model(
|
247 |
+
input_model_id, trust_remote_code=trust_remote_code
|
248 |
+
)
|
249 |
if not success:
|
250 |
st.error(f"Conversion failed: {stderr}")
|
251 |
return
|