mergekit-gui / app.py
Steelskull's picture
Update app.py
29e570d verified
raw
history blame contribute delete
17 kB
import os
import pathlib
import random
import string
import tempfile
import time
from concurrent.futures import ThreadPoolExecutor
from typing import Iterable, List
import gradio as gr
import huggingface_hub
import torch
import base64
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
from cryptography.hazmat.primitives import padding as sym_padding # Use symmetric padding
from cryptography.hazmat.backends import default_backend
import yaml
from gradio_logsview.logsview import Log, LogsView, LogsViewRunner
from mergekit.config import MergeConfiguration
from clean_community_org import garbage_collect_empty_models
from apscheduler.schedulers.background import BackgroundScheduler
from datetime import datetime, timezone
has_gpu = torch.cuda.is_available()
cli = "mergekit-yaml config.yaml merge --copy-tokenizer" + (
" --cuda --low-cpu-memory --allow-crimes" if has_gpu else " --allow-crimes --out-shard-size 1B --lazy-unpickle"
)
MARKDOWN_DESCRIPTION = """
# mergekit-gui
The fastest way to perform a model merge πŸ”₯
Specify a YAML configuration file (see examples below) and a HF token and this app will perform the merge and upload the merged model to your user profile. Includes encryption for the `mergekit_config.yml` and a tool to decrypt it.
"""
MARKDOWN_ARTICLE = """
___
## Merge Configuration
[Mergekit](https://github.com/arcee-ai/mergekit) configurations are YAML documents specifying the operations to perform in order to produce your merged model.
Below are the primary elements of a configuration file:
- `merge_method`: Specifies the method to use for merging models. See [Merge Methods](https://github.com/arcee-ai/mergekit#merge-methods) for a list.
- `slices`: Defines slices of layers from different models to be used. This field is mutually exclusive with `models`.
- `models`: Defines entire models to be used for merging. This field is mutually exclusive with `slices`.
- `base_model`: Specifies the base model used in some merging methods.
- `parameters`: Holds various parameters such as weights and densities, which can also be specified at different levels of the configuration.
- `dtype`: Specifies the data type used for the merging operation.
- `tokenizer_source`: Determines how to construct a tokenizer for the merged model.
## Merge Methods
A quick overview of the currently supported merge methods:
| Method | `merge_method` value | Multi-Model | Uses base model |
| -------------------------------------------------------------------------------------------- | -------------------- | ----------- | --------------- |
| Linear ([Model Soups](https://arxiv.org/abs/2203.05482)) | `linear` | βœ… | ❌ |
| SLERP | `slerp` | ❌ | βœ… |
| [Task Arithmetic](https://arxiv.org/abs/2212.04089) | `task_arithmetic` | βœ… | βœ… |
| [TIES](https://arxiv.org/abs/2306.01708) | `ties` | βœ… | βœ… |
| [DARE](https://arxiv.org/abs/2311.03099) [TIES](https://arxiv.org/abs/2306.01708) | `dare_ties` | βœ… | βœ… |
| [DARE](https://arxiv.org/abs/2311.03099) [Task Arithmetic](https://arxiv.org/abs/2212.04089) | `dare_linear` | βœ… | βœ… |
| Passthrough | `passthrough` | ❌ | ❌ |
| [Model Stock](https://arxiv.org/abs/2403.19522) | `model_stock` | βœ… | βœ… |
```
This Space is heavily inspired by LazyMergeKit by Maxime Labonne (see [Colab](https://colab.research.google.com/drive/1obulZ1ROXHjYLn6PPZJwRR6GzgQogxxb)).
"""
examples = [[str(f)] for f in pathlib.Path("examples").glob("*.yaml")]
def _prepare_key(key: str) -> bytes:
"""Pads or truncates the key to 32 bytes (256 bits) for AES."""
key_bytes = key.encode('utf-8')
if len(key_bytes) < 32:
return key_bytes + b'\0' * (32 - len(key_bytes))
else:
return key_bytes[:32]
def encrypt_file(file_path, key: str) -> bool:
"""
Encrypt the contents of a file using AES-256-CBC encryption with the provided key.
The output is Base64 encoded.
Args:
file_path: Path to the file to encrypt (pathlib.Path or string)
key: Encryption key string.
Returns:
bool: True if encryption was successful, False otherwise
"""
try:
file_path = pathlib.Path(file_path)
if not file_path.exists():
print(f"Encryption error: File not found at {file_path}")
return False
key_bytes = _prepare_key(key)
# Generate a random IV (Initialization Vector) - 16 bytes for AES
iv = os.urandom(16)
# Create an AES cipher instance with CBC mode
cipher = Cipher(algorithms.AES(key_bytes), modes.CBC(iv), backend=default_backend())
encryptor = cipher.encryptor()
# Use PKCS7 padding
padder = sym_padding.PKCS7(algorithms.AES.block_size).padder()
with open(file_path, 'rb') as f:
plaintext = f.read()
# Pad the data
padded_data = padder.update(plaintext) + padder.finalize()
# Encrypt the padded data
ciphertext = encryptor.update(padded_data) + encryptor.finalize()
# Prepend the IV to the ciphertext and base64 encode the result
encrypted_data_with_iv = base64.b64encode(iv + ciphertext)
# Write the base64 encoded encrypted data back to the file
with open(file_path, 'wb') as f:
f.write(encrypted_data_with_iv)
return True
except Exception as e:
print(f"Encryption error: {e}")
return False
def decrypt_file_content(file_input, key: str) -> str:
"""
Decrypts the content of an uploaded file using AES-256-CBC and returns the result.
Assumes the file content is Base64 encoded IV + ciphertext.
Args:
file_input: Gradio File component output (temporary file object).
key: Decryption key string.
Returns:
str: Decrypted content as a UTF-8 string, or an error message.
"""
if file_input is None:
return "Error: No file provided for decryption."
if not key:
return "Error: Decryption key cannot be empty."
try:
file_path = file_input.name # Get the temporary file path from Gradio
key_bytes = _prepare_key(key)
with open(file_path, 'rb') as f:
base64_encoded_data = f.read()
# Decode from Base64
encrypted_data_with_iv = base64.b64decode(base64_encoded_data)
# Extract the IV (first 16 bytes)
iv = encrypted_data_with_iv[:16]
# Extract the ciphertext (the rest)
ciphertext = encrypted_data_with_iv[16:]
# Create an AES cipher instance with CBC mode for decryption
cipher = Cipher(algorithms.AES(key_bytes), modes.CBC(iv), backend=default_backend())
decryptor = cipher.decryptor()
# Decrypt the data
padded_plaintext = decryptor.update(ciphertext) + decryptor.finalize()
# Unpad the data using PKCS7
unpadder = sym_padding.PKCS7(algorithms.AES.block_size).unpadder()
plaintext = unpadder.update(padded_plaintext) + unpadder.finalize()
# Decode the plaintext from bytes to string (assuming UTF-8)
return plaintext.decode('utf-8')
except (ValueError, TypeError) as e:
# Catches Base64 decoding errors, incorrect key type errors
return f"Decryption Error: Invalid input data or key format. ({e})"
except Exception as e:
# Catches padding errors (often due to wrong key), or other crypto issues
print(f"Decryption error details: {e}")
return f"Decryption Failed: Likely incorrect key or corrupted file. Error: {type(e).__name__}"
def merge(yaml_config: str, hf_token: str, repo_name: str, cipher_key: str) -> Iterable[List[Log]]:
runner = LogsViewRunner()
if not yaml_config:
yield runner.log("Empty yaml, pick an example below", level="ERROR")
return
try:
merge_config = MergeConfiguration.model_validate(yaml.safe_load(yaml_config))
except Exception as e:
yield runner.log(f"Invalid yaml {e}", level="ERROR")
return
# Check if HF token is provided
if not hf_token:
yield runner.log("No HF token provided. A valid token is required for uploading.", level="ERROR")
return
# Validate that the token works by trying to get user info
try:
api = huggingface_hub.HfApi(token=hf_token)
me = api.whoami()
yield runner.log(f"Authenticated as: {me['name']} ({me.get('fullname', '')})")
except Exception as e:
yield runner.log(f"Invalid HF token: {e}", level="ERROR")
return
# Use default key if none provided, but log a warning
if not cipher_key:
cipher_key = "default_insecure_key" # Make default explicitely insecure sounding
yield runner.log("No cipher key provided. Using a default, insecure key. Please provide your own key for security.", level="WARNING")
elif cipher_key == "Default": # Check against the placeholder value
cipher_key = "default_insecure_key" # Treat placeholder as no key provided
yield runner.log("Default placeholder key detected. Using an insecure key. Please provide your own key.", level="WARNING")
with tempfile.TemporaryDirectory(ignore_cleanup_errors=True) as tmpdirname:
tmpdir = pathlib.Path(tmpdirname)
merged_path = tmpdir / "merged"
merged_path.mkdir(parents=True, exist_ok=True)
config_path = merged_path / "config.yaml"
config_path.write_text(yaml_config)
yield runner.log(f"Merge configuration saved in {config_path}")
if not repo_name:
yield runner.log("No repo name provided. Generating a random one.")
repo_name = f"mergekit-{merge_config.merge_method}"
# Make repo_name "unique" (no need to be extra careful on uniqueness)
repo_name += "-" + "".join(random.choices(string.ascii_lowercase, k=7))
repo_name = repo_name.replace("/", "-").strip("-")
try:
yield runner.log(f"Creating repo {repo_name}")
repo_url = api.create_repo(repo_name, exist_ok=True)
yield runner.log(f"Repo created: {repo_url}")
except Exception as e:
yield runner.log(f"Error creating repo {e}", level="ERROR")
return
# Set tmp HF_HOME to avoid filling up disk Space
tmp_env = os.environ.copy() # taken from https://stackoverflow.com/a/4453495
tmp_env["HF_HOME"] = f"{tmpdirname}/.cache"
full_cli = cli + f" --lora-merge-cache {tmpdirname}/.lora_cache"
yield from runner.run_command(full_cli.split(), cwd=merged_path, env=tmp_env)
if runner.exit_code != 0:
yield runner.log("Merge failed. Deleting repo as no model is uploaded.", level="ERROR")
try:
api.delete_repo(repo_url.repo_id)
yield runner.log(f"Repo {repo_url.repo_id} deleted.")
except Exception as delete_e:
yield runner.log(f"Failed to delete repo {repo_url.repo_id}: {delete_e}", level="WARNING")
return
yield runner.log("Model merged successfully. Preparing for upload.")
# ---- Encryption Step ----
merge_dir = merged_path / "merge"
config_yml_path = merge_dir / "mergekit_config.yml"
if config_yml_path.exists():
yield runner.log(f"Found {config_yml_path.name}. Encrypting...")
if encrypt_file(config_yml_path, cipher_key):
yield runner.log(f"Successfully encrypted {config_yml_path.name} with provided key.")
else:
yield runner.log(f"Failed to encrypt {config_yml_path.name}. Uploading unencrypted.", level="ERROR")
else:
yield runner.log(f"{config_yml_path.name} not found in merge output, nothing to encrypt.", level="INFO")
# ---- End Encryption Step ----
# Delete Readme.md if it exists (case-insensitive check) before upload
readme_deleted = False
try:
for file in merge_dir.glob("*"):
if file.name.lower() == "readme.md":
file.unlink()
readme_deleted = True
yield runner.log(f"Deleted {file.name} file before upload")
break # Assume only one readme
except Exception as e:
yield runner.log(f"Error deleting Readme.md: {e}", level="WARNING")
if not readme_deleted:
yield runner.log("No Readme.md file found to delete.", level="INFO")
yield runner.log("Uploading merged model files to HF.")
yield from runner.run_python(
api.upload_folder,
repo_id=repo_url.repo_id,
folder_path=merge_dir, # Upload from the 'merge' subdirectory
)
yield runner.log(f"Model successfully uploaded to HF: {repo_url.repo_id}")
# Run garbage collection every hour to keep the community org clean.
# Empty models might exists if the merge fails abruptly (e.g. if user leaves the Space).
def _garbage_remover():
try:
garbage_collect_empty_models(token=os.getenv("COMMUNITY_HF_TOKEN"))
except Exception as e:
print("Error running garbage collection", e)
scheduler = BackgroundScheduler()
garbage_remover_job = scheduler.add_job(_garbage_remover, "interval", seconds=3600)
scheduler.start()
next_run_time_utc = garbage_remover_job.next_run_time.astimezone(timezone.utc)
NEXT_RESTART = f"Next Restart: {next_run_time_utc.strftime('%Y-%m-%d %H:%M:%S')} (UTC)"
with gr.Blocks() as demo:
gr.Markdown(MARKDOWN_DESCRIPTION)
gr.Markdown(NEXT_RESTART)
with gr.Tabs():
with gr.TabItem("Merge Model"):
with gr.Row():
filename = gr.Textbox(visible=False, label="filename")
config = gr.Code(language="yaml", lines=10, label="config.yaml")
with gr.Column():
token = gr.Textbox(
lines=1,
label="HF Write Token",
info="https://hf.co/settings/token",
type="password",
placeholder="Required for model upload.",
)
repo_name = gr.Textbox(
lines=1,
label="Repo name",
placeholder="Optional. Will create a random name if empty.",
)
cipher_key = gr.Textbox(
lines=1,
label="Encryption Key",
type="password",
info="Key used to encrypt the generated mergekit_config.yml file before upload. Leave blank or 'Default' for no encryption (or insecure default).",
placeholder="Enter your secret key here",
value="Default" # Set a default placeholder
)
button = gr.Button("Merge and Upload", variant="primary")
logs = LogsView(label="Merge Progress / Terminal output")
gr.Examples(
examples,
fn=lambda s: (s,),
run_on_click=True,
label="Merge Examples",
inputs=[filename],
outputs=[config],
)
gr.Markdown(MARKDOWN_ARTICLE)
button.click(fn=merge, inputs=[config, token, repo_name, cipher_key], outputs=[logs])
with gr.TabItem("Decrypt Configuration"):
gr.Markdown("Upload an encrypted `mergekit_config.yml` file and provide the key to decrypt it.")
with gr.Row():
decrypt_file_input = gr.File(label="Upload Encrypted mergekit_config.yml")
decrypt_key_input = gr.Textbox(
lines=1,
label="Decryption Key",
type="password",
placeholder="Enter the key used for encryption",
)
decrypt_button = gr.Button("Decrypt File", variant="secondary")
decrypted_output = gr.Code(language="yaml", label="Decrypted Configuration", lines=15, interactive=False)
decrypt_button.click(
fn=decrypt_file_content,
inputs=[decrypt_file_input, decrypt_key_input],
outputs=[decrypted_output]
)
demo.queue(default_concurrency_limit=1).launch()