xhedit
commited on
Fix typo (#1231) [skip ci]
Browse files
src/axolotl/utils/models.py
CHANGED
|
@@ -756,7 +756,7 @@ def load_llama_adapter(model, cfg):
|
|
| 756 |
)
|
| 757 |
|
| 758 |
if cfg.lora_model_dir:
|
| 759 |
-
LOG.debug("Loading
|
| 760 |
model = PeftModel.from_pretrained(
|
| 761 |
model,
|
| 762 |
cfg.lora_model_dir,
|
|
@@ -825,7 +825,7 @@ def load_lora(model, cfg, inference=False, config_only=False):
|
|
| 825 |
return None, lora_config
|
| 826 |
|
| 827 |
if cfg.lora_model_dir:
|
| 828 |
-
LOG.debug("Loading
|
| 829 |
model_kwargs: Any = {}
|
| 830 |
if cfg.lora_on_cpu:
|
| 831 |
model_kwargs["max_memory"] = {"cpu": "256GiB"}
|
|
|
|
| 756 |
)
|
| 757 |
|
| 758 |
if cfg.lora_model_dir:
|
| 759 |
+
LOG.debug("Loading pretrained PEFT - llama_adapter")
|
| 760 |
model = PeftModel.from_pretrained(
|
| 761 |
model,
|
| 762 |
cfg.lora_model_dir,
|
|
|
|
| 825 |
return None, lora_config
|
| 826 |
|
| 827 |
if cfg.lora_model_dir:
|
| 828 |
+
LOG.debug("Loading pretrained PEFT - LoRA")
|
| 829 |
model_kwargs: Any = {}
|
| 830 |
if cfg.lora_on_cpu:
|
| 831 |
model_kwargs["max_memory"] = {"cpu": "256GiB"}
|