Delta-Vector commited on
Commit
1fad9e1
·
verified ·
1 Parent(s): 38d830f

Update kto-4b.yml

Browse files
Files changed (1) hide show
  1. kto-4b.yml +26 -12
kto-4b.yml CHANGED
@@ -1,12 +1,24 @@
1
- base_model: NewEden/Hamanasu-4B-R2
2
  model_type: AutoModelForCausalLM
3
  tokenizer_type: AutoTokenizer
4
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  load_in_8bit: false
6
  load_in_4bit: false
7
  strict: false
8
 
9
- hub_model_id: NewEden/KTO-4B
10
  hub_strategy: "all_checkpoints"
11
  push_dataset_to_hub:
12
  hf_use_auth_token: true
@@ -14,17 +26,19 @@ hf_use_auth_token: true
14
  chat_template: chatml
15
 
16
  rl: kto
17
- rl_beta: 0.2
18
- kto_desirable_weight: 0.2
19
 
20
  datasets:
21
- - path: NewEden/KTO-IF-Dans
 
 
 
22
  split: train
23
  type: chatml.argilla
24
  dataset_prepared_path: last_run_prepared
25
 
26
  shuffle_merged_datasets: true
27
- val_set_size: 0.0
28
  output_dir: ./outputs/out
29
 
30
  sequence_len: 8192
@@ -35,22 +49,22 @@ pad_to_sequence_len: false
35
  wandb_project: tavbussy
36
  wandb_entity:
37
  wandb_watch:
38
- wandb_name: kto-1
39
  wandb_log_model:
40
 
41
- gradient_accumulation_steps: 16
42
  micro_batch_size: 2
43
  num_epochs: 1
44
- optimizer: paged_adamw_8bit
45
- learning_rate: 1e-6
46
  max_grad_norm: 0.001
47
- lr_scheduler: cosine
48
  weight_decay: 0.02
49
 
50
 
51
  lora_r: 64
52
  lora_alpha: 32
53
- lora_dropout: 0.0
54
  lora_target_linear: true
55
  lora_fan_in_fan_out:
56
  lora_target_modules:
 
1
+ base_model: ./model
2
  model_type: AutoModelForCausalLM
3
  tokenizer_type: AutoTokenizer
4
 
5
+
6
+ plugins:
7
+ - axolotl.integrations.liger.LigerPlugin
8
+ - axolotl.integrations.cut_cross_entropy.CutCrossEntropyPlugin
9
+ liger_rope: true
10
+ liger_rms_norm: true
11
+ liger_layer_norm: true
12
+ liger_glu_activation: true
13
+ liger_fused_linear_cross_entropy: false
14
+ cut_cross_entropy: true
15
+
16
+
17
  load_in_8bit: false
18
  load_in_4bit: false
19
  strict: false
20
 
21
+ hub_model_id: NewEden/Hamanasu-KTO-2
22
  hub_strategy: "all_checkpoints"
23
  push_dataset_to_hub:
24
  hf_use_auth_token: true
 
26
  chat_template: chatml
27
 
28
  rl: kto
29
+ kto_undesirable_weight: 1.0
 
30
 
31
  datasets:
32
+ - path: NewEden/Opus-accepted-hermes-rejected-shuffled
33
+ split: train
34
+ type: chatml.argilla
35
+ - path: NewEden/KTO-Instruct-Mix
36
  split: train
37
  type: chatml.argilla
38
  dataset_prepared_path: last_run_prepared
39
 
40
  shuffle_merged_datasets: true
41
+ val_set_size: 0.01
42
  output_dir: ./outputs/out
43
 
44
  sequence_len: 8192
 
49
  wandb_project: tavbussy
50
  wandb_entity:
51
  wandb_watch:
52
+ wandb_name: kto-chat-2
53
  wandb_log_model:
54
 
55
+ gradient_accumulation_steps: 4
56
  micro_batch_size: 2
57
  num_epochs: 1
58
+ optimizer: adamw_bnb_8bit
59
+ learning_rate: 5e-7
60
  max_grad_norm: 0.001
61
+ lr_scheduler: constant_with_warmup
62
  weight_decay: 0.02
63
 
64
 
65
  lora_r: 64
66
  lora_alpha: 32
67
+ lora_dropout: 0.1
68
  lora_target_linear: true
69
  lora_fan_in_fan_out:
70
  lora_target_modules: