File size: 602 Bytes
c38e5cb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
from transformers import AutoModelForCausalLM, PeftModel
from huggingface_hub import HfApi

# Load base model and adapter model
base_model = AutoModelForCausalLM.from_pretrained("microsoft/phi-4")
adapter_model = PeftModel.from_pretrained(base_model, "ivxxdegen/mibera-v1")

# Merge the LoRA adapter into the base model
merged_model = adapter_model.merge_and_unload()
merged_model.save_pretrained("merged_model")

# Upload the merged model to Hugging Face
api = HfApi()
api.upload_folder(folder_path="merged_model", repo_id="ivxxdegen/mibera-v1")

print("✅ Merge and upload completed successfully.")