don't use llama if trust_remote_code is set since that needs to use AutoModel path
Browse files
src/axolotl/utils/models.py
CHANGED
|
@@ -202,7 +202,7 @@ def load_model(
|
|
| 202 |
else True,
|
| 203 |
)
|
| 204 |
load_in_8bit = False
|
| 205 |
-
elif cfg.is_llama_derived_model:
|
| 206 |
from transformers import LlamaForCausalLM
|
| 207 |
|
| 208 |
config = LlamaConfig.from_pretrained(base_model_config)
|
|
|
|
| 202 |
else True,
|
| 203 |
)
|
| 204 |
load_in_8bit = False
|
| 205 |
+
elif cfg.is_llama_derived_model and not cfg.trust_remote_code:
|
| 206 |
from transformers import LlamaForCausalLM
|
| 207 |
|
| 208 |
config = LlamaConfig.from_pretrained(base_model_config)
|