File size: 184 Bytes
f6778c6
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
{
    "base_model_name_or_path": "meta-llama/Llama-3.2-3B-Instruct",
    "peft_type": "LORA",
    "r": 8,
    "lora_alpha": 16,
    "lora_dropout": 0.0,
    "task_type": "CAUSAL_LM"
}