Alexis-Az commited on
Commit
22f91af
·
verified ·
1 Parent(s): 7428fb3

Update adapter_config.json

Browse files

removed modules to save, changed task type to causal_lm, set auto_mapping to none

Files changed (1) hide show
  1. adapter_config.json +3 -5
adapter_config.json CHANGED
@@ -1,5 +1,6 @@
1
  {
2
  "alpha_pattern": {},
 
3
  "base_model_name_or_path": "meta-llama/Meta-Llama-3-8B-Instruct",
4
  "bias": "none",
5
  "fan_in_fan_out": false,
@@ -13,10 +14,7 @@
13
  "lora_dropout": 0.05,
14
  "megatron_config": null,
15
  "megatron_core": "megatron.core",
16
- "modules_to_save": [
17
- "lm_head",
18
- "embed_tokens"
19
- ],
20
  "peft_type": "LORA",
21
  "r": 48,
22
  "rank_pattern": {},
@@ -30,7 +28,7 @@
30
  "k_proj",
31
  "gate_proj"
32
  ],
33
- "task_type": null,
34
  "use_dora": false,
35
  "use_rslora": false
36
  }
 
1
  {
2
  "alpha_pattern": {},
3
+ "auto_mapping": null,
4
  "base_model_name_or_path": "meta-llama/Meta-Llama-3-8B-Instruct",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
 
14
  "lora_dropout": 0.05,
15
  "megatron_config": null,
16
  "megatron_core": "megatron.core",
17
+ "modules_to_save": null,
 
 
 
18
  "peft_type": "LORA",
19
  "r": 48,
20
  "rank_pattern": {},
 
28
  "k_proj",
29
  "gate_proj"
30
  ],
31
+ "task_type": "CAUSAL_LM",
32
  "use_dora": false,
33
  "use_rslora": false
34
  }