Save adapter for lora
Browse files- scripts/finetune.py +3 -0
scripts/finetune.py
CHANGED
|
@@ -230,6 +230,9 @@ def train(
|
|
| 230 |
)
|
| 231 |
# TODO do we need this fix? https://huggingface.co/docs/accelerate/usage_guides/fsdp#saving-and-loading
|
| 232 |
trainer.save_model(cfg.output_dir)
|
|
|
|
|
|
|
|
|
|
| 233 |
|
| 234 |
|
| 235 |
if __name__ == "__main__":
|
|
|
|
| 230 |
)
|
| 231 |
# TODO do we need this fix? https://huggingface.co/docs/accelerate/usage_guides/fsdp#saving-and-loading
|
| 232 |
trainer.save_model(cfg.output_dir)
|
| 233 |
+
|
| 234 |
+
if cfg.adapter == 'lora':
|
| 235 |
+
trainer.save_pretrained(cfg.output_dir)
|
| 236 |
|
| 237 |
|
| 238 |
if __name__ == "__main__":
|