| from transformers import AutoModelForCausalLM, PeftModel | |
| from huggingface_hub import HfApi | |
| # Load base model and adapter model | |
| base_model = AutoModelForCausalLM.from_pretrained("microsoft/phi-4") | |
| adapter_model = PeftModel.from_pretrained(base_model, "ivxxdegen/mibera-v1") | |
| # Merge the LoRA adapter into the base model | |
| merged_model = adapter_model.merge_and_unload() | |
| merged_model.save_pretrained("merged_model") | |
| # Upload the merged model to Hugging Face | |
| api = HfApi() | |
| api.upload_folder(folder_path="merged_model", repo_id="ivxxdegen/mibera-v1") | |
| print("✅ Merge and upload completed successfully.") | |