hardlyworking commited on
Commit
7219902
·
verified ·
1 Parent(s): ead9d21

Upload folder using huggingface_hub

Browse files
Files changed (46) hide show
  1. .gitattributes +3 -0
  2. README.md +175 -0
  3. checkpoint-301/config.json +28 -0
  4. checkpoint-301/generation_config.json +9 -0
  5. checkpoint-301/model-00001-of-00005.safetensors +3 -0
  6. checkpoint-301/model-00002-of-00005.safetensors +3 -0
  7. checkpoint-301/model-00003-of-00005.safetensors +3 -0
  8. checkpoint-301/model-00004-of-00005.safetensors +3 -0
  9. checkpoint-301/model-00005-of-00005.safetensors +3 -0
  10. checkpoint-301/model.safetensors.index.json +370 -0
  11. checkpoint-301/optimizer.pt +3 -0
  12. checkpoint-301/rng_state.pth +3 -0
  13. checkpoint-301/scheduler.pt +3 -0
  14. checkpoint-301/special_tokens_map.json +30 -0
  15. checkpoint-301/tokenizer.json +3 -0
  16. checkpoint-301/tokenizer_config.json +0 -0
  17. checkpoint-301/trainer_state.json +2205 -0
  18. checkpoint-301/training_args.bin +3 -0
  19. checkpoint-602/config.json +28 -0
  20. checkpoint-602/generation_config.json +9 -0
  21. checkpoint-602/model-00001-of-00005.safetensors +3 -0
  22. checkpoint-602/model-00002-of-00005.safetensors +3 -0
  23. checkpoint-602/model-00003-of-00005.safetensors +3 -0
  24. checkpoint-602/model-00004-of-00005.safetensors +3 -0
  25. checkpoint-602/model-00005-of-00005.safetensors +3 -0
  26. checkpoint-602/model.safetensors.index.json +370 -0
  27. checkpoint-602/optimizer.pt +3 -0
  28. checkpoint-602/rng_state.pth +3 -0
  29. checkpoint-602/scheduler.pt +3 -0
  30. checkpoint-602/special_tokens_map.json +30 -0
  31. checkpoint-602/tokenizer.json +3 -0
  32. checkpoint-602/tokenizer_config.json +0 -0
  33. checkpoint-602/trainer_state.json +0 -0
  34. checkpoint-602/training_args.bin +3 -0
  35. config.json +28 -0
  36. generation_config.json +9 -0
  37. model-00001-of-00005.safetensors +3 -0
  38. model-00002-of-00005.safetensors +3 -0
  39. model-00003-of-00005.safetensors +3 -0
  40. model-00004-of-00005.safetensors +3 -0
  41. model-00005-of-00005.safetensors +3 -0
  42. model.safetensors.index.json +370 -0
  43. special_tokens_map.json +30 -0
  44. tokenizer.json +3 -0
  45. tokenizer_config.json +0 -0
  46. training_args.bin +3 -0
.gitattributes CHANGED
@@ -33,3 +33,6 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ checkpoint-301/tokenizer.json filter=lfs diff=lfs merge=lfs -text
37
+ checkpoint-602/tokenizer.json filter=lfs diff=lfs merge=lfs -text
38
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,175 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: transformers
3
+ license: apache-2.0
4
+ base_model: NewEden/MistralAI-Nemo-Instruct-ChatML
5
+ tags:
6
+ - axolotl
7
+ - generated_from_trainer
8
+ datasets:
9
+ - hardlyworking/HardlyRP
10
+ - jeiku/Writing
11
+ model-index:
12
+ - name: Sapphire-12B
13
+ results: []
14
+ ---
15
+
16
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
17
+ should probably proofread and complete it, then remove this comment. -->
18
+
19
+ [<img src="https://raw.githubusercontent.com/axolotl-ai-cloud/axolotl/main/image/axolotl-badge-web.png" alt="Built with Axolotl" width="200" height="32"/>](https://github.com/axolotl-ai-cloud/axolotl)
20
+ <details><summary>See axolotl config</summary>
21
+
22
+ axolotl version: `0.8.0`
23
+ ```yaml
24
+ base_model: NewEden/MistralAI-Nemo-Instruct-ChatML
25
+ model_type: AutoModelForCausalLM
26
+ tokenizer_type: AutoTokenizer
27
+
28
+ load_in_8bit: false
29
+ load_in_4bit: false
30
+ strict: false
31
+
32
+ datasets:
33
+ - path: hardlyworking/HardlyRP
34
+ type: chat_template
35
+ chat_template: chatml
36
+ roles_to_train: ["gpt"]
37
+ field_messages: conversations
38
+ message_field_role: from
39
+ message_field_content: value
40
+ train_on_eos: turn
41
+ - path: jeiku/Writing
42
+ type: completion
43
+ field: text
44
+
45
+ shuffle_merged_datasets: true
46
+ dataset_prepared_path: dataset_preparedss
47
+ val_set_size: 0.0025
48
+ output_dir: 12b-out-0001-max_grad_norm
49
+
50
+ hub_model_id: hardlyworking/Sapphire-12B
51
+ hub_strategy: "all_checkpoints"
52
+ push_dataset_to_hub:
53
+ hf_use_auth_token: true
54
+
55
+ plugins:
56
+ - axolotl.integrations.liger.LigerPlugin
57
+ liger_rope: true
58
+ liger_rms_norm: true
59
+ liger_layer_norm: true
60
+ liger_glu_activation: true
61
+ liger_fused_linear_cross_entropy: true
62
+
63
+ sequence_len: 8192
64
+ sample_packing: true
65
+ eval_sample_packing: false
66
+ pad_to_sequence_len: true
67
+
68
+ max_grad_norm: 0.001
69
+
70
+ wandb_project: Sapphire
71
+ wandb_entity:
72
+ wandb_watch:
73
+ wandb_name: Sapphire
74
+ wandb_log_model:
75
+
76
+ evals_per_epoch: 8
77
+ eval_table_size:
78
+ eval_max_new_tokens: 128
79
+
80
+ gradient_accumulation_steps: 8
81
+ micro_batch_size: 2
82
+ num_epochs: 2
83
+ optimizer: adamw_bnb_8bit
84
+ lr_scheduler: cosine
85
+ learning_rate: 2e-6
86
+
87
+ train_on_inputs: false
88
+ group_by_length: false
89
+ bf16: auto
90
+ fp16:
91
+ tf32: false
92
+
93
+ gradient_checkpointing: true
94
+ early_stopping_patience:
95
+ resume_from_checkpoint:
96
+ local_rank:
97
+ logging_steps: 1
98
+ xformers_attention:
99
+ flash_attention: true
100
+ s2_attention:
101
+
102
+ warmup_ratio: 0.05
103
+ saves_per_epoch: 1
104
+ debug:
105
+ weight_decay: 0.0001
106
+ fsdp:
107
+ fsdp_config:
108
+ special_tokens:
109
+ pad_token: <pad>
110
+ ```
111
+
112
+ </details><br>
113
+
114
+ # Sapphire-12B
115
+
116
+ This model is a fine-tuned version of [NewEden/MistralAI-Nemo-Instruct-ChatML](https://huggingface.co/NewEden/MistralAI-Nemo-Instruct-ChatML) on the hardlyworking/HardlyRP and the jeiku/Writing datasets.
117
+ It achieves the following results on the evaluation set:
118
+ - Loss: 1.6799
119
+
120
+ ## Model description
121
+
122
+ More information needed
123
+
124
+ ## Intended uses & limitations
125
+
126
+ More information needed
127
+
128
+ ## Training and evaluation data
129
+
130
+ More information needed
131
+
132
+ ## Training procedure
133
+
134
+ ### Training hyperparameters
135
+
136
+ The following hyperparameters were used during training:
137
+ - learning_rate: 2e-06
138
+ - train_batch_size: 2
139
+ - eval_batch_size: 2
140
+ - seed: 42
141
+ - gradient_accumulation_steps: 8
142
+ - total_train_batch_size: 16
143
+ - optimizer: Use OptimizerNames.ADAMW_BNB with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
144
+ - lr_scheduler_type: cosine
145
+ - lr_scheduler_warmup_steps: 30
146
+ - num_epochs: 2.0
147
+
148
+ ### Training results
149
+
150
+ | Training Loss | Epoch | Step | Validation Loss |
151
+ |:-------------:|:------:|:----:|:---------------:|
152
+ | 1.8932 | 0.0033 | 1 | 1.9155 |
153
+ | 1.7729 | 0.1262 | 38 | 1.7802 |
154
+ | 1.7163 | 0.2525 | 76 | 1.7111 |
155
+ | 1.6484 | 0.3787 | 114 | 1.6970 |
156
+ | 1.7006 | 0.5050 | 152 | 1.6907 |
157
+ | 1.7276 | 0.6312 | 190 | 1.6874 |
158
+ | 1.7042 | 0.7575 | 228 | 1.6847 |
159
+ | 1.5575 | 0.8837 | 266 | 1.6825 |
160
+ | 1.5451 | 1.0100 | 304 | 1.6816 |
161
+ | 1.6592 | 1.1362 | 342 | 1.6807 |
162
+ | 1.7344 | 1.2625 | 380 | 1.6805 |
163
+ | 1.6953 | 1.3887 | 418 | 1.6798 |
164
+ | 1.5799 | 1.5150 | 456 | 1.6799 |
165
+ | 1.5241 | 1.6412 | 494 | 1.6799 |
166
+ | 1.548 | 1.7674 | 532 | 1.6797 |
167
+ | 1.6254 | 1.8937 | 570 | 1.6799 |
168
+
169
+
170
+ ### Framework versions
171
+
172
+ - Transformers 4.51.0
173
+ - Pytorch 2.6.0+cu124
174
+ - Datasets 3.5.0
175
+ - Tokenizers 0.21.1
checkpoint-301/config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "MistralForCausalLM"
4
+ ],
5
+ "attention_dropout": 0.0,
6
+ "bos_token_id": 1,
7
+ "eos_token_id": 2,
8
+ "head_dim": 128,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 5120,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 14336,
13
+ "max_position_embeddings": 131072,
14
+ "model_type": "mistral",
15
+ "num_attention_heads": 32,
16
+ "num_hidden_layers": 40,
17
+ "num_key_value_heads": 8,
18
+ "pad_token_id": 10,
19
+ "rms_norm_eps": 1e-05,
20
+ "rope_theta": 1000000.0,
21
+ "sliding_window": null,
22
+ "tie_word_embeddings": false,
23
+ "torch_dtype": "bfloat16",
24
+ "transformers_version": "4.51.0",
25
+ "unsloth_version": "2024.9",
26
+ "use_cache": false,
27
+ "vocab_size": 131072
28
+ }
checkpoint-301/generation_config.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "do_sample": true,
5
+ "eos_token_id": 2,
6
+ "max_length": 1024000,
7
+ "pad_token_id": 10,
8
+ "transformers_version": "4.51.0"
9
+ }
checkpoint-301/model-00001-of-00005.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:575a9c002d7e1c713a5dd1c2a2cb9babae9a45fc4ac37929322415b9a8b61a3a
3
+ size 4865522496
checkpoint-301/model-00002-of-00005.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:20b5fb3bb1d4acbf5b58e36649d3b16bdf56131dc9e77019d76f881c8725119d
3
+ size 4907529424
checkpoint-301/model-00003-of-00005.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3c47e0c72aeb47fdce0b2b811f43cc8b27a78a0f7d07c675c8fdbcb5cfc92401
3
+ size 4907529456
checkpoint-301/model-00004-of-00005.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f4c045ee8362dd2002a0c8e7567a5529ad84178a61968fe8e5472d1164387dfa
3
+ size 4907529456
checkpoint-301/model-00005-of-00005.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5876a0acdaf1bbbeda1517093207c9fb9952f08168ff32ca6803883c47e741c6
3
+ size 4907496272
checkpoint-301/model.safetensors.index.json ADDED
@@ -0,0 +1,370 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 24495564800
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "model-00005-of-00005.safetensors",
7
+ "model.embed_tokens.weight": "model-00001-of-00005.safetensors",
8
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00005.safetensors",
9
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
10
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
11
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
12
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
13
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
14
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
15
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
16
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
17
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00005.safetensors",
18
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
19
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
20
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
21
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
22
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
23
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
24
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
25
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
26
+ "model.layers.10.input_layernorm.weight": "model-00002-of-00005.safetensors",
27
+ "model.layers.10.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
28
+ "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
29
+ "model.layers.10.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
30
+ "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
31
+ "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
32
+ "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
33
+ "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
34
+ "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
35
+ "model.layers.11.input_layernorm.weight": "model-00002-of-00005.safetensors",
36
+ "model.layers.11.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
37
+ "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
38
+ "model.layers.11.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
39
+ "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
40
+ "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
41
+ "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
42
+ "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
43
+ "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
44
+ "model.layers.12.input_layernorm.weight": "model-00002-of-00005.safetensors",
45
+ "model.layers.12.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
46
+ "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
47
+ "model.layers.12.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
48
+ "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
49
+ "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
50
+ "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
51
+ "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
52
+ "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
53
+ "model.layers.13.input_layernorm.weight": "model-00002-of-00005.safetensors",
54
+ "model.layers.13.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
55
+ "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
56
+ "model.layers.13.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
57
+ "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
58
+ "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
59
+ "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
60
+ "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
61
+ "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
62
+ "model.layers.14.input_layernorm.weight": "model-00002-of-00005.safetensors",
63
+ "model.layers.14.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
64
+ "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
65
+ "model.layers.14.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
66
+ "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
67
+ "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
68
+ "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
69
+ "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
70
+ "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
71
+ "model.layers.15.input_layernorm.weight": "model-00003-of-00005.safetensors",
72
+ "model.layers.15.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
73
+ "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
74
+ "model.layers.15.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
75
+ "model.layers.15.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
76
+ "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
77
+ "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
78
+ "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
79
+ "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
80
+ "model.layers.16.input_layernorm.weight": "model-00003-of-00005.safetensors",
81
+ "model.layers.16.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
82
+ "model.layers.16.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
83
+ "model.layers.16.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
84
+ "model.layers.16.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
85
+ "model.layers.16.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
86
+ "model.layers.16.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
87
+ "model.layers.16.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
88
+ "model.layers.16.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
89
+ "model.layers.17.input_layernorm.weight": "model-00003-of-00005.safetensors",
90
+ "model.layers.17.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
91
+ "model.layers.17.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
92
+ "model.layers.17.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
93
+ "model.layers.17.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
94
+ "model.layers.17.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
95
+ "model.layers.17.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
96
+ "model.layers.17.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
97
+ "model.layers.17.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
98
+ "model.layers.18.input_layernorm.weight": "model-00003-of-00005.safetensors",
99
+ "model.layers.18.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
100
+ "model.layers.18.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
101
+ "model.layers.18.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
102
+ "model.layers.18.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
103
+ "model.layers.18.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
104
+ "model.layers.18.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
105
+ "model.layers.18.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
106
+ "model.layers.18.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
107
+ "model.layers.19.input_layernorm.weight": "model-00003-of-00005.safetensors",
108
+ "model.layers.19.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
109
+ "model.layers.19.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
110
+ "model.layers.19.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
111
+ "model.layers.19.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
112
+ "model.layers.19.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
113
+ "model.layers.19.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
114
+ "model.layers.19.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
115
+ "model.layers.19.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
116
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00005.safetensors",
117
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
118
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
119
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
120
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
121
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
122
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
123
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
124
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
125
+ "model.layers.20.input_layernorm.weight": "model-00003-of-00005.safetensors",
126
+ "model.layers.20.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
127
+ "model.layers.20.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
128
+ "model.layers.20.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
129
+ "model.layers.20.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
130
+ "model.layers.20.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
131
+ "model.layers.20.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
132
+ "model.layers.20.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
133
+ "model.layers.20.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
134
+ "model.layers.21.input_layernorm.weight": "model-00003-of-00005.safetensors",
135
+ "model.layers.21.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
136
+ "model.layers.21.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
137
+ "model.layers.21.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
138
+ "model.layers.21.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
139
+ "model.layers.21.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
140
+ "model.layers.21.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
141
+ "model.layers.21.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
142
+ "model.layers.21.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
143
+ "model.layers.22.input_layernorm.weight": "model-00003-of-00005.safetensors",
144
+ "model.layers.22.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
145
+ "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
146
+ "model.layers.22.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
147
+ "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
148
+ "model.layers.22.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
149
+ "model.layers.22.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
150
+ "model.layers.22.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
151
+ "model.layers.22.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
152
+ "model.layers.23.input_layernorm.weight": "model-00003-of-00005.safetensors",
153
+ "model.layers.23.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
154
+ "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
155
+ "model.layers.23.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
156
+ "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
157
+ "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
158
+ "model.layers.23.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
159
+ "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
160
+ "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
161
+ "model.layers.24.input_layernorm.weight": "model-00004-of-00005.safetensors",
162
+ "model.layers.24.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
163
+ "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
164
+ "model.layers.24.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
165
+ "model.layers.24.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
166
+ "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
167
+ "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
168
+ "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
169
+ "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
170
+ "model.layers.25.input_layernorm.weight": "model-00004-of-00005.safetensors",
171
+ "model.layers.25.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
172
+ "model.layers.25.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
173
+ "model.layers.25.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
174
+ "model.layers.25.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
175
+ "model.layers.25.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
176
+ "model.layers.25.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
177
+ "model.layers.25.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
178
+ "model.layers.25.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
179
+ "model.layers.26.input_layernorm.weight": "model-00004-of-00005.safetensors",
180
+ "model.layers.26.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
181
+ "model.layers.26.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
182
+ "model.layers.26.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
183
+ "model.layers.26.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
184
+ "model.layers.26.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
185
+ "model.layers.26.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
186
+ "model.layers.26.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
187
+ "model.layers.26.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
188
+ "model.layers.27.input_layernorm.weight": "model-00004-of-00005.safetensors",
189
+ "model.layers.27.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
190
+ "model.layers.27.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
191
+ "model.layers.27.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
192
+ "model.layers.27.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
193
+ "model.layers.27.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
194
+ "model.layers.27.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
195
+ "model.layers.27.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
196
+ "model.layers.27.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
197
+ "model.layers.28.input_layernorm.weight": "model-00004-of-00005.safetensors",
198
+ "model.layers.28.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
199
+ "model.layers.28.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
200
+ "model.layers.28.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
201
+ "model.layers.28.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
202
+ "model.layers.28.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
203
+ "model.layers.28.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
204
+ "model.layers.28.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
205
+ "model.layers.28.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
206
+ "model.layers.29.input_layernorm.weight": "model-00004-of-00005.safetensors",
207
+ "model.layers.29.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
208
+ "model.layers.29.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
209
+ "model.layers.29.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
210
+ "model.layers.29.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
211
+ "model.layers.29.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
212
+ "model.layers.29.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
213
+ "model.layers.29.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
214
+ "model.layers.29.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
215
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00005.safetensors",
216
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
217
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
218
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
219
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
220
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
221
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
222
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
223
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
224
+ "model.layers.30.input_layernorm.weight": "model-00004-of-00005.safetensors",
225
+ "model.layers.30.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
226
+ "model.layers.30.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
227
+ "model.layers.30.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
228
+ "model.layers.30.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
229
+ "model.layers.30.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
230
+ "model.layers.30.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
231
+ "model.layers.30.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
232
+ "model.layers.30.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
233
+ "model.layers.31.input_layernorm.weight": "model-00004-of-00005.safetensors",
234
+ "model.layers.31.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
235
+ "model.layers.31.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
236
+ "model.layers.31.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
237
+ "model.layers.31.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
238
+ "model.layers.31.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
239
+ "model.layers.31.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
240
+ "model.layers.31.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
241
+ "model.layers.31.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
242
+ "model.layers.32.input_layernorm.weight": "model-00004-of-00005.safetensors",
243
+ "model.layers.32.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
244
+ "model.layers.32.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
245
+ "model.layers.32.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
246
+ "model.layers.32.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
247
+ "model.layers.32.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
248
+ "model.layers.32.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
249
+ "model.layers.32.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
250
+ "model.layers.32.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
251
+ "model.layers.33.input_layernorm.weight": "model-00005-of-00005.safetensors",
252
+ "model.layers.33.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
253
+ "model.layers.33.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
254
+ "model.layers.33.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
255
+ "model.layers.33.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
256
+ "model.layers.33.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
257
+ "model.layers.33.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
258
+ "model.layers.33.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
259
+ "model.layers.33.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
260
+ "model.layers.34.input_layernorm.weight": "model-00005-of-00005.safetensors",
261
+ "model.layers.34.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
262
+ "model.layers.34.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
263
+ "model.layers.34.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
264
+ "model.layers.34.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
265
+ "model.layers.34.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
266
+ "model.layers.34.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
267
+ "model.layers.34.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
268
+ "model.layers.34.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
269
+ "model.layers.35.input_layernorm.weight": "model-00005-of-00005.safetensors",
270
+ "model.layers.35.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
271
+ "model.layers.35.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
272
+ "model.layers.35.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
273
+ "model.layers.35.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
274
+ "model.layers.35.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
275
+ "model.layers.35.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
276
+ "model.layers.35.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
277
+ "model.layers.35.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
278
+ "model.layers.36.input_layernorm.weight": "model-00005-of-00005.safetensors",
279
+ "model.layers.36.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
280
+ "model.layers.36.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
281
+ "model.layers.36.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
282
+ "model.layers.36.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
283
+ "model.layers.36.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
284
+ "model.layers.36.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
285
+ "model.layers.36.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
286
+ "model.layers.36.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
287
+ "model.layers.37.input_layernorm.weight": "model-00005-of-00005.safetensors",
288
+ "model.layers.37.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
289
+ "model.layers.37.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
290
+ "model.layers.37.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
291
+ "model.layers.37.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
292
+ "model.layers.37.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
293
+ "model.layers.37.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
294
+ "model.layers.37.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
295
+ "model.layers.37.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
296
+ "model.layers.38.input_layernorm.weight": "model-00005-of-00005.safetensors",
297
+ "model.layers.38.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
298
+ "model.layers.38.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
299
+ "model.layers.38.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
300
+ "model.layers.38.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
301
+ "model.layers.38.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
302
+ "model.layers.38.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
303
+ "model.layers.38.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
304
+ "model.layers.38.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
305
+ "model.layers.39.input_layernorm.weight": "model-00005-of-00005.safetensors",
306
+ "model.layers.39.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
307
+ "model.layers.39.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
308
+ "model.layers.39.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
309
+ "model.layers.39.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
310
+ "model.layers.39.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
311
+ "model.layers.39.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
312
+ "model.layers.39.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
313
+ "model.layers.39.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
314
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00005.safetensors",
315
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
316
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
317
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
318
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
319
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
320
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
321
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
322
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
323
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00005.safetensors",
324
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
325
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
326
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
327
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
328
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
329
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
330
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
331
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
332
+ "model.layers.6.input_layernorm.weight": "model-00002-of-00005.safetensors",
333
+ "model.layers.6.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
334
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
335
+ "model.layers.6.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
336
+ "model.layers.6.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
337
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
338
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
339
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
340
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
341
+ "model.layers.7.input_layernorm.weight": "model-00002-of-00005.safetensors",
342
+ "model.layers.7.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
343
+ "model.layers.7.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
344
+ "model.layers.7.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
345
+ "model.layers.7.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
346
+ "model.layers.7.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
347
+ "model.layers.7.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
348
+ "model.layers.7.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
349
+ "model.layers.7.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
350
+ "model.layers.8.input_layernorm.weight": "model-00002-of-00005.safetensors",
351
+ "model.layers.8.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
352
+ "model.layers.8.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
353
+ "model.layers.8.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
354
+ "model.layers.8.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
355
+ "model.layers.8.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
356
+ "model.layers.8.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
357
+ "model.layers.8.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
358
+ "model.layers.8.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
359
+ "model.layers.9.input_layernorm.weight": "model-00002-of-00005.safetensors",
360
+ "model.layers.9.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
361
+ "model.layers.9.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
362
+ "model.layers.9.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
363
+ "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
364
+ "model.layers.9.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
365
+ "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
366
+ "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
367
+ "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
368
+ "model.norm.weight": "model-00005-of-00005.safetensors"
369
+ }
370
+ }
checkpoint-301/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:131e50e7b14a29622d115df889446c34f2380aa8467f50afceccd200202917fc
3
+ size 24878749708
checkpoint-301/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f9a6944c405a002fce05f295d08ea6650e2e2ad6dbf5d6da1e9053f7bf7f5827
3
+ size 14244
checkpoint-301/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2484557978a2fe6c6378261a6c7d3f6207df09e4f40e7670ffa3875d12d2508d
3
+ size 1064
checkpoint-301/special_tokens_map.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|im_end|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<pad>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "<unk>",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }
checkpoint-301/tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bc07f4f61632a89d8248b43f25649d6cc45200f8709e9d9bcd0414b00a4064e2
3
+ size 17078342
checkpoint-301/tokenizer_config.json ADDED
The diff for this file is too large to render. See raw diff
 
checkpoint-301/trainer_state.json ADDED
@@ -0,0 +1,2205 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_global_step": null,
3
+ "best_metric": null,
4
+ "best_model_checkpoint": null,
5
+ "epoch": 1.0,
6
+ "eval_steps": 38,
7
+ "global_step": 301,
8
+ "is_hyper_param_search": false,
9
+ "is_local_process_zero": true,
10
+ "is_world_process_zero": true,
11
+ "log_history": [
12
+ {
13
+ "epoch": 0.0033222591362126247,
14
+ "grad_norm": 103.0,
15
+ "learning_rate": 0.0,
16
+ "loss": 1.8932,
17
+ "step": 1
18
+ },
19
+ {
20
+ "epoch": 0.0033222591362126247,
21
+ "eval_loss": 1.915544867515564,
22
+ "eval_runtime": 18.4554,
23
+ "eval_samples_per_second": 3.034,
24
+ "eval_steps_per_second": 1.517,
25
+ "step": 1
26
+ },
27
+ {
28
+ "epoch": 0.006644518272425249,
29
+ "grad_norm": 50.5,
30
+ "learning_rate": 6.666666666666667e-08,
31
+ "loss": 1.9696,
32
+ "step": 2
33
+ },
34
+ {
35
+ "epoch": 0.009966777408637873,
36
+ "grad_norm": 120.5,
37
+ "learning_rate": 1.3333333333333334e-07,
38
+ "loss": 1.8563,
39
+ "step": 3
40
+ },
41
+ {
42
+ "epoch": 0.013289036544850499,
43
+ "grad_norm": 133.0,
44
+ "learning_rate": 2e-07,
45
+ "loss": 1.6208,
46
+ "step": 4
47
+ },
48
+ {
49
+ "epoch": 0.016611295681063124,
50
+ "grad_norm": 115.0,
51
+ "learning_rate": 2.6666666666666667e-07,
52
+ "loss": 1.8607,
53
+ "step": 5
54
+ },
55
+ {
56
+ "epoch": 0.019933554817275746,
57
+ "grad_norm": 42.0,
58
+ "learning_rate": 3.333333333333333e-07,
59
+ "loss": 1.9029,
60
+ "step": 6
61
+ },
62
+ {
63
+ "epoch": 0.023255813953488372,
64
+ "grad_norm": 112.0,
65
+ "learning_rate": 4e-07,
66
+ "loss": 1.8704,
67
+ "step": 7
68
+ },
69
+ {
70
+ "epoch": 0.026578073089700997,
71
+ "grad_norm": 84.0,
72
+ "learning_rate": 4.6666666666666666e-07,
73
+ "loss": 1.9463,
74
+ "step": 8
75
+ },
76
+ {
77
+ "epoch": 0.029900332225913623,
78
+ "grad_norm": 87.5,
79
+ "learning_rate": 5.333333333333333e-07,
80
+ "loss": 1.9199,
81
+ "step": 9
82
+ },
83
+ {
84
+ "epoch": 0.03322259136212625,
85
+ "grad_norm": 98.0,
86
+ "learning_rate": 6e-07,
87
+ "loss": 1.7595,
88
+ "step": 10
89
+ },
90
+ {
91
+ "epoch": 0.036544850498338874,
92
+ "grad_norm": 74.5,
93
+ "learning_rate": 6.666666666666666e-07,
94
+ "loss": 1.8111,
95
+ "step": 11
96
+ },
97
+ {
98
+ "epoch": 0.03986710963455149,
99
+ "grad_norm": 38.0,
100
+ "learning_rate": 7.333333333333332e-07,
101
+ "loss": 2.0669,
102
+ "step": 12
103
+ },
104
+ {
105
+ "epoch": 0.04318936877076412,
106
+ "grad_norm": 17.875,
107
+ "learning_rate": 8e-07,
108
+ "loss": 1.9348,
109
+ "step": 13
110
+ },
111
+ {
112
+ "epoch": 0.046511627906976744,
113
+ "grad_norm": 27.125,
114
+ "learning_rate": 8.666666666666667e-07,
115
+ "loss": 1.7987,
116
+ "step": 14
117
+ },
118
+ {
119
+ "epoch": 0.04983388704318937,
120
+ "grad_norm": 15.75,
121
+ "learning_rate": 9.333333333333333e-07,
122
+ "loss": 1.7805,
123
+ "step": 15
124
+ },
125
+ {
126
+ "epoch": 0.053156146179401995,
127
+ "grad_norm": 22.375,
128
+ "learning_rate": 1e-06,
129
+ "loss": 1.8799,
130
+ "step": 16
131
+ },
132
+ {
133
+ "epoch": 0.05647840531561462,
134
+ "grad_norm": 14.5,
135
+ "learning_rate": 1.0666666666666667e-06,
136
+ "loss": 1.7922,
137
+ "step": 17
138
+ },
139
+ {
140
+ "epoch": 0.059800664451827246,
141
+ "grad_norm": 18.5,
142
+ "learning_rate": 1.1333333333333332e-06,
143
+ "loss": 1.6291,
144
+ "step": 18
145
+ },
146
+ {
147
+ "epoch": 0.06312292358803986,
148
+ "grad_norm": 32.75,
149
+ "learning_rate": 1.2e-06,
150
+ "loss": 1.8288,
151
+ "step": 19
152
+ },
153
+ {
154
+ "epoch": 0.0664451827242525,
155
+ "grad_norm": 35.0,
156
+ "learning_rate": 1.2666666666666665e-06,
157
+ "loss": 1.8509,
158
+ "step": 20
159
+ },
160
+ {
161
+ "epoch": 0.06976744186046512,
162
+ "grad_norm": 21.25,
163
+ "learning_rate": 1.3333333333333332e-06,
164
+ "loss": 1.7168,
165
+ "step": 21
166
+ },
167
+ {
168
+ "epoch": 0.07308970099667775,
169
+ "grad_norm": 20.375,
170
+ "learning_rate": 1.4e-06,
171
+ "loss": 1.6161,
172
+ "step": 22
173
+ },
174
+ {
175
+ "epoch": 0.07641196013289037,
176
+ "grad_norm": 18.75,
177
+ "learning_rate": 1.4666666666666665e-06,
178
+ "loss": 1.8325,
179
+ "step": 23
180
+ },
181
+ {
182
+ "epoch": 0.07973421926910298,
183
+ "grad_norm": 31.875,
184
+ "learning_rate": 1.5333333333333334e-06,
185
+ "loss": 1.7156,
186
+ "step": 24
187
+ },
188
+ {
189
+ "epoch": 0.08305647840531562,
190
+ "grad_norm": 29.75,
191
+ "learning_rate": 1.6e-06,
192
+ "loss": 1.7127,
193
+ "step": 25
194
+ },
195
+ {
196
+ "epoch": 0.08637873754152824,
197
+ "grad_norm": 19.5,
198
+ "learning_rate": 1.6666666666666667e-06,
199
+ "loss": 1.7001,
200
+ "step": 26
201
+ },
202
+ {
203
+ "epoch": 0.08970099667774087,
204
+ "grad_norm": 15.0,
205
+ "learning_rate": 1.7333333333333334e-06,
206
+ "loss": 1.9355,
207
+ "step": 27
208
+ },
209
+ {
210
+ "epoch": 0.09302325581395349,
211
+ "grad_norm": 11.125,
212
+ "learning_rate": 1.8e-06,
213
+ "loss": 1.8541,
214
+ "step": 28
215
+ },
216
+ {
217
+ "epoch": 0.09634551495016612,
218
+ "grad_norm": 11.5,
219
+ "learning_rate": 1.8666666666666667e-06,
220
+ "loss": 1.7974,
221
+ "step": 29
222
+ },
223
+ {
224
+ "epoch": 0.09966777408637874,
225
+ "grad_norm": 11.8125,
226
+ "learning_rate": 1.933333333333333e-06,
227
+ "loss": 1.7858,
228
+ "step": 30
229
+ },
230
+ {
231
+ "epoch": 0.10299003322259136,
232
+ "grad_norm": 24.5,
233
+ "learning_rate": 2e-06,
234
+ "loss": 1.8134,
235
+ "step": 31
236
+ },
237
+ {
238
+ "epoch": 0.10631229235880399,
239
+ "grad_norm": 17.75,
240
+ "learning_rate": 1.9999849173865606e-06,
241
+ "loss": 1.8065,
242
+ "step": 32
243
+ },
244
+ {
245
+ "epoch": 0.10963455149501661,
246
+ "grad_norm": 20.625,
247
+ "learning_rate": 1.9999396700012128e-06,
248
+ "loss": 1.7911,
249
+ "step": 33
250
+ },
251
+ {
252
+ "epoch": 0.11295681063122924,
253
+ "grad_norm": 17.0,
254
+ "learning_rate": 1.999864259208854e-06,
255
+ "loss": 1.8103,
256
+ "step": 34
257
+ },
258
+ {
259
+ "epoch": 0.11627906976744186,
260
+ "grad_norm": 12.0,
261
+ "learning_rate": 1.999758687284268e-06,
262
+ "loss": 1.8241,
263
+ "step": 35
264
+ },
265
+ {
266
+ "epoch": 0.11960132890365449,
267
+ "grad_norm": 6.875,
268
+ "learning_rate": 1.999622957412056e-06,
269
+ "loss": 1.6427,
270
+ "step": 36
271
+ },
272
+ {
273
+ "epoch": 0.12292358803986711,
274
+ "grad_norm": 11.0,
275
+ "learning_rate": 1.9994570736865402e-06,
276
+ "loss": 1.781,
277
+ "step": 37
278
+ },
279
+ {
280
+ "epoch": 0.12624584717607973,
281
+ "grad_norm": 6.09375,
282
+ "learning_rate": 1.999261041111641e-06,
283
+ "loss": 1.7729,
284
+ "step": 38
285
+ },
286
+ {
287
+ "epoch": 0.12624584717607973,
288
+ "eval_loss": 1.7802449464797974,
289
+ "eval_runtime": 18.2451,
290
+ "eval_samples_per_second": 3.069,
291
+ "eval_steps_per_second": 1.535,
292
+ "step": 38
293
+ },
294
+ {
295
+ "epoch": 0.12956810631229235,
296
+ "grad_norm": 6.875,
297
+ "learning_rate": 1.999034865600726e-06,
298
+ "loss": 1.6201,
299
+ "step": 39
300
+ },
301
+ {
302
+ "epoch": 0.132890365448505,
303
+ "grad_norm": 10.1875,
304
+ "learning_rate": 1.9987785539764298e-06,
305
+ "loss": 1.6546,
306
+ "step": 40
307
+ },
308
+ {
309
+ "epoch": 0.1362126245847176,
310
+ "grad_norm": 7.8125,
311
+ "learning_rate": 1.998492113970451e-06,
312
+ "loss": 1.7503,
313
+ "step": 41
314
+ },
315
+ {
316
+ "epoch": 0.13953488372093023,
317
+ "grad_norm": 5.8125,
318
+ "learning_rate": 1.9981755542233175e-06,
319
+ "loss": 1.608,
320
+ "step": 42
321
+ },
322
+ {
323
+ "epoch": 0.14285714285714285,
324
+ "grad_norm": 4.875,
325
+ "learning_rate": 1.9978288842841257e-06,
326
+ "loss": 1.7755,
327
+ "step": 43
328
+ },
329
+ {
330
+ "epoch": 0.1461794019933555,
331
+ "grad_norm": 4.09375,
332
+ "learning_rate": 1.9974521146102534e-06,
333
+ "loss": 1.7116,
334
+ "step": 44
335
+ },
336
+ {
337
+ "epoch": 0.14950166112956811,
338
+ "grad_norm": 5.625,
339
+ "learning_rate": 1.997045256567043e-06,
340
+ "loss": 1.9239,
341
+ "step": 45
342
+ },
343
+ {
344
+ "epoch": 0.15282392026578073,
345
+ "grad_norm": 5.59375,
346
+ "learning_rate": 1.99660832242746e-06,
347
+ "loss": 1.7939,
348
+ "step": 46
349
+ },
350
+ {
351
+ "epoch": 0.15614617940199335,
352
+ "grad_norm": 6.75,
353
+ "learning_rate": 1.9961413253717214e-06,
354
+ "loss": 1.7379,
355
+ "step": 47
356
+ },
357
+ {
358
+ "epoch": 0.15946843853820597,
359
+ "grad_norm": 7.09375,
360
+ "learning_rate": 1.995644279486899e-06,
361
+ "loss": 1.6323,
362
+ "step": 48
363
+ },
364
+ {
365
+ "epoch": 0.16279069767441862,
366
+ "grad_norm": 9.4375,
367
+ "learning_rate": 1.9951171997664958e-06,
368
+ "loss": 1.4941,
369
+ "step": 49
370
+ },
371
+ {
372
+ "epoch": 0.16611295681063123,
373
+ "grad_norm": 6.90625,
374
+ "learning_rate": 1.99456010210999e-06,
375
+ "loss": 1.7755,
376
+ "step": 50
377
+ },
378
+ {
379
+ "epoch": 0.16943521594684385,
380
+ "grad_norm": 6.5,
381
+ "learning_rate": 1.99397300332236e-06,
382
+ "loss": 1.725,
383
+ "step": 51
384
+ },
385
+ {
386
+ "epoch": 0.17275747508305647,
387
+ "grad_norm": 4.1875,
388
+ "learning_rate": 1.993355921113573e-06,
389
+ "loss": 1.8318,
390
+ "step": 52
391
+ },
392
+ {
393
+ "epoch": 0.1760797342192691,
394
+ "grad_norm": 3.78125,
395
+ "learning_rate": 1.9927088740980536e-06,
396
+ "loss": 1.6346,
397
+ "step": 53
398
+ },
399
+ {
400
+ "epoch": 0.17940199335548174,
401
+ "grad_norm": 3.625,
402
+ "learning_rate": 1.992031881794123e-06,
403
+ "loss": 1.6807,
404
+ "step": 54
405
+ },
406
+ {
407
+ "epoch": 0.18272425249169436,
408
+ "grad_norm": 3.5625,
409
+ "learning_rate": 1.9913249646234067e-06,
410
+ "loss": 1.8396,
411
+ "step": 55
412
+ },
413
+ {
414
+ "epoch": 0.18604651162790697,
415
+ "grad_norm": 6.875,
416
+ "learning_rate": 1.990588143910222e-06,
417
+ "loss": 1.5897,
418
+ "step": 56
419
+ },
420
+ {
421
+ "epoch": 0.1893687707641196,
422
+ "grad_norm": 6.0,
423
+ "learning_rate": 1.9898214418809326e-06,
424
+ "loss": 1.7284,
425
+ "step": 57
426
+ },
427
+ {
428
+ "epoch": 0.19269102990033224,
429
+ "grad_norm": 5.90625,
430
+ "learning_rate": 1.989024881663279e-06,
431
+ "loss": 1.6945,
432
+ "step": 58
433
+ },
434
+ {
435
+ "epoch": 0.19601328903654486,
436
+ "grad_norm": 4.28125,
437
+ "learning_rate": 1.9881984872856818e-06,
438
+ "loss": 1.6839,
439
+ "step": 59
440
+ },
441
+ {
442
+ "epoch": 0.19933554817275748,
443
+ "grad_norm": 4.15625,
444
+ "learning_rate": 1.9873422836765135e-06,
445
+ "loss": 1.6582,
446
+ "step": 60
447
+ },
448
+ {
449
+ "epoch": 0.2026578073089701,
450
+ "grad_norm": 3.8125,
451
+ "learning_rate": 1.9864562966633515e-06,
452
+ "loss": 1.8207,
453
+ "step": 61
454
+ },
455
+ {
456
+ "epoch": 0.2059800664451827,
457
+ "grad_norm": 4.78125,
458
+ "learning_rate": 1.9855405529721944e-06,
459
+ "loss": 1.6543,
460
+ "step": 62
461
+ },
462
+ {
463
+ "epoch": 0.20930232558139536,
464
+ "grad_norm": 3.546875,
465
+ "learning_rate": 1.9845950802266584e-06,
466
+ "loss": 1.817,
467
+ "step": 63
468
+ },
469
+ {
470
+ "epoch": 0.21262458471760798,
471
+ "grad_norm": 4.03125,
472
+ "learning_rate": 1.9836199069471437e-06,
473
+ "loss": 1.5228,
474
+ "step": 64
475
+ },
476
+ {
477
+ "epoch": 0.2159468438538206,
478
+ "grad_norm": 3.328125,
479
+ "learning_rate": 1.982615062549973e-06,
480
+ "loss": 1.6324,
481
+ "step": 65
482
+ },
483
+ {
484
+ "epoch": 0.21926910299003322,
485
+ "grad_norm": 3.25,
486
+ "learning_rate": 1.9815805773465063e-06,
487
+ "loss": 1.7938,
488
+ "step": 66
489
+ },
490
+ {
491
+ "epoch": 0.22259136212624583,
492
+ "grad_norm": 3.265625,
493
+ "learning_rate": 1.9805164825422237e-06,
494
+ "loss": 1.7176,
495
+ "step": 67
496
+ },
497
+ {
498
+ "epoch": 0.22591362126245848,
499
+ "grad_norm": 3.6875,
500
+ "learning_rate": 1.9794228102357864e-06,
501
+ "loss": 1.8079,
502
+ "step": 68
503
+ },
504
+ {
505
+ "epoch": 0.2292358803986711,
506
+ "grad_norm": 2.953125,
507
+ "learning_rate": 1.9782995934180687e-06,
508
+ "loss": 1.6981,
509
+ "step": 69
510
+ },
511
+ {
512
+ "epoch": 0.23255813953488372,
513
+ "grad_norm": 3.046875,
514
+ "learning_rate": 1.9771468659711594e-06,
515
+ "loss": 1.6665,
516
+ "step": 70
517
+ },
518
+ {
519
+ "epoch": 0.23588039867109634,
520
+ "grad_norm": 3.296875,
521
+ "learning_rate": 1.9759646626673444e-06,
522
+ "loss": 1.7792,
523
+ "step": 71
524
+ },
525
+ {
526
+ "epoch": 0.23920265780730898,
527
+ "grad_norm": 3.515625,
528
+ "learning_rate": 1.974753019168054e-06,
529
+ "loss": 1.7666,
530
+ "step": 72
531
+ },
532
+ {
533
+ "epoch": 0.2425249169435216,
534
+ "grad_norm": 3.234375,
535
+ "learning_rate": 1.97351197202279e-06,
536
+ "loss": 1.7061,
537
+ "step": 73
538
+ },
539
+ {
540
+ "epoch": 0.24584717607973422,
541
+ "grad_norm": 2.8125,
542
+ "learning_rate": 1.9722415586680203e-06,
543
+ "loss": 1.6216,
544
+ "step": 74
545
+ },
546
+ {
547
+ "epoch": 0.24916943521594684,
548
+ "grad_norm": 6.6875,
549
+ "learning_rate": 1.970941817426052e-06,
550
+ "loss": 1.6851,
551
+ "step": 75
552
+ },
553
+ {
554
+ "epoch": 0.25249169435215946,
555
+ "grad_norm": 2.84375,
556
+ "learning_rate": 1.969612787503875e-06,
557
+ "loss": 1.7163,
558
+ "step": 76
559
+ },
560
+ {
561
+ "epoch": 0.25249169435215946,
562
+ "eval_loss": 1.7110573053359985,
563
+ "eval_runtime": 18.248,
564
+ "eval_samples_per_second": 3.069,
565
+ "eval_steps_per_second": 1.534,
566
+ "step": 76
567
+ },
568
+ {
569
+ "epoch": 0.2558139534883721,
570
+ "grad_norm": 3.484375,
571
+ "learning_rate": 1.968254508991978e-06,
572
+ "loss": 1.6136,
573
+ "step": 77
574
+ },
575
+ {
576
+ "epoch": 0.2591362126245847,
577
+ "grad_norm": 2.84375,
578
+ "learning_rate": 1.9668670228631416e-06,
579
+ "loss": 1.6798,
580
+ "step": 78
581
+ },
582
+ {
583
+ "epoch": 0.26245847176079734,
584
+ "grad_norm": 2.6875,
585
+ "learning_rate": 1.965450370971198e-06,
586
+ "loss": 1.6868,
587
+ "step": 79
588
+ },
589
+ {
590
+ "epoch": 0.26578073089701,
591
+ "grad_norm": 2.828125,
592
+ "learning_rate": 1.964004596049774e-06,
593
+ "loss": 1.8666,
594
+ "step": 80
595
+ },
596
+ {
597
+ "epoch": 0.2691029900332226,
598
+ "grad_norm": 2.640625,
599
+ "learning_rate": 1.962529741710998e-06,
600
+ "loss": 1.5132,
601
+ "step": 81
602
+ },
603
+ {
604
+ "epoch": 0.2724252491694352,
605
+ "grad_norm": 4.125,
606
+ "learning_rate": 1.961025852444185e-06,
607
+ "loss": 1.7368,
608
+ "step": 82
609
+ },
610
+ {
611
+ "epoch": 0.2757475083056478,
612
+ "grad_norm": 2.953125,
613
+ "learning_rate": 1.9594929736144973e-06,
614
+ "loss": 1.766,
615
+ "step": 83
616
+ },
617
+ {
618
+ "epoch": 0.27906976744186046,
619
+ "grad_norm": 2.71875,
620
+ "learning_rate": 1.9579311514615715e-06,
621
+ "loss": 1.742,
622
+ "step": 84
623
+ },
624
+ {
625
+ "epoch": 0.2823920265780731,
626
+ "grad_norm": 5.25,
627
+ "learning_rate": 1.9563404330981273e-06,
628
+ "loss": 1.6776,
629
+ "step": 85
630
+ },
631
+ {
632
+ "epoch": 0.2857142857142857,
633
+ "grad_norm": 2.609375,
634
+ "learning_rate": 1.9547208665085457e-06,
635
+ "loss": 1.665,
636
+ "step": 86
637
+ },
638
+ {
639
+ "epoch": 0.28903654485049834,
640
+ "grad_norm": 2.96875,
641
+ "learning_rate": 1.9530725005474194e-06,
642
+ "loss": 1.7081,
643
+ "step": 87
644
+ },
645
+ {
646
+ "epoch": 0.292358803986711,
647
+ "grad_norm": 2.890625,
648
+ "learning_rate": 1.951395384938082e-06,
649
+ "loss": 1.6911,
650
+ "step": 88
651
+ },
652
+ {
653
+ "epoch": 0.2956810631229236,
654
+ "grad_norm": 2.59375,
655
+ "learning_rate": 1.9496895702711067e-06,
656
+ "loss": 1.722,
657
+ "step": 89
658
+ },
659
+ {
660
+ "epoch": 0.29900332225913623,
661
+ "grad_norm": 2.8125,
662
+ "learning_rate": 1.9479551080027796e-06,
663
+ "loss": 1.5948,
664
+ "step": 90
665
+ },
666
+ {
667
+ "epoch": 0.3023255813953488,
668
+ "grad_norm": 2.71875,
669
+ "learning_rate": 1.9461920504535483e-06,
670
+ "loss": 1.6996,
671
+ "step": 91
672
+ },
673
+ {
674
+ "epoch": 0.30564784053156147,
675
+ "grad_norm": 2.6875,
676
+ "learning_rate": 1.9444004508064445e-06,
677
+ "loss": 1.8464,
678
+ "step": 92
679
+ },
680
+ {
681
+ "epoch": 0.3089700996677741,
682
+ "grad_norm": 2.890625,
683
+ "learning_rate": 1.942580363105477e-06,
684
+ "loss": 1.6863,
685
+ "step": 93
686
+ },
687
+ {
688
+ "epoch": 0.3122923588039867,
689
+ "grad_norm": 3.453125,
690
+ "learning_rate": 1.9407318422540057e-06,
691
+ "loss": 1.6463,
692
+ "step": 94
693
+ },
694
+ {
695
+ "epoch": 0.31561461794019935,
696
+ "grad_norm": 11.4375,
697
+ "learning_rate": 1.9388549440130797e-06,
698
+ "loss": 1.7122,
699
+ "step": 95
700
+ },
701
+ {
702
+ "epoch": 0.31893687707641194,
703
+ "grad_norm": 3.046875,
704
+ "learning_rate": 1.936949724999762e-06,
705
+ "loss": 1.6855,
706
+ "step": 96
707
+ },
708
+ {
709
+ "epoch": 0.3222591362126246,
710
+ "grad_norm": 2.53125,
711
+ "learning_rate": 1.9350162426854148e-06,
712
+ "loss": 1.6138,
713
+ "step": 97
714
+ },
715
+ {
716
+ "epoch": 0.32558139534883723,
717
+ "grad_norm": 2.59375,
718
+ "learning_rate": 1.9330545553939717e-06,
719
+ "loss": 1.6139,
720
+ "step": 98
721
+ },
722
+ {
723
+ "epoch": 0.3289036544850498,
724
+ "grad_norm": 3.046875,
725
+ "learning_rate": 1.9310647223001747e-06,
726
+ "loss": 1.5904,
727
+ "step": 99
728
+ },
729
+ {
730
+ "epoch": 0.33222591362126247,
731
+ "grad_norm": 2.515625,
732
+ "learning_rate": 1.9290468034277907e-06,
733
+ "loss": 1.5604,
734
+ "step": 100
735
+ },
736
+ {
737
+ "epoch": 0.33554817275747506,
738
+ "grad_norm": 3.234375,
739
+ "learning_rate": 1.9270008596478004e-06,
740
+ "loss": 1.6564,
741
+ "step": 101
742
+ },
743
+ {
744
+ "epoch": 0.3388704318936877,
745
+ "grad_norm": 2.53125,
746
+ "learning_rate": 1.9249269526765616e-06,
747
+ "loss": 1.6987,
748
+ "step": 102
749
+ },
750
+ {
751
+ "epoch": 0.34219269102990035,
752
+ "grad_norm": 2.46875,
753
+ "learning_rate": 1.922825145073949e-06,
754
+ "loss": 1.7714,
755
+ "step": 103
756
+ },
757
+ {
758
+ "epoch": 0.34551495016611294,
759
+ "grad_norm": 2.453125,
760
+ "learning_rate": 1.9206955002414657e-06,
761
+ "loss": 1.7206,
762
+ "step": 104
763
+ },
764
+ {
765
+ "epoch": 0.3488372093023256,
766
+ "grad_norm": 2.890625,
767
+ "learning_rate": 1.9185380824203314e-06,
768
+ "loss": 1.8164,
769
+ "step": 105
770
+ },
771
+ {
772
+ "epoch": 0.3521594684385382,
773
+ "grad_norm": 2.546875,
774
+ "learning_rate": 1.9163529566895437e-06,
775
+ "loss": 1.7213,
776
+ "step": 106
777
+ },
778
+ {
779
+ "epoch": 0.3554817275747508,
780
+ "grad_norm": 2.65625,
781
+ "learning_rate": 1.9141401889639164e-06,
782
+ "loss": 1.7515,
783
+ "step": 107
784
+ },
785
+ {
786
+ "epoch": 0.3588039867109635,
787
+ "grad_norm": 2.796875,
788
+ "learning_rate": 1.91189984599209e-06,
789
+ "loss": 1.7709,
790
+ "step": 108
791
+ },
792
+ {
793
+ "epoch": 0.36212624584717606,
794
+ "grad_norm": 2.75,
795
+ "learning_rate": 1.9096319953545185e-06,
796
+ "loss": 1.7297,
797
+ "step": 109
798
+ },
799
+ {
800
+ "epoch": 0.3654485049833887,
801
+ "grad_norm": 2.546875,
802
+ "learning_rate": 1.9073367054614305e-06,
803
+ "loss": 1.709,
804
+ "step": 110
805
+ },
806
+ {
807
+ "epoch": 0.3687707641196013,
808
+ "grad_norm": 2.453125,
809
+ "learning_rate": 1.9050140455507666e-06,
810
+ "loss": 1.6388,
811
+ "step": 111
812
+ },
813
+ {
814
+ "epoch": 0.37209302325581395,
815
+ "grad_norm": 2.640625,
816
+ "learning_rate": 1.9026640856860902e-06,
817
+ "loss": 1.7441,
818
+ "step": 112
819
+ },
820
+ {
821
+ "epoch": 0.3754152823920266,
822
+ "grad_norm": 2.65625,
823
+ "learning_rate": 1.900286896754474e-06,
824
+ "loss": 1.6313,
825
+ "step": 113
826
+ },
827
+ {
828
+ "epoch": 0.3787375415282392,
829
+ "grad_norm": 2.484375,
830
+ "learning_rate": 1.8978825504643607e-06,
831
+ "loss": 1.6484,
832
+ "step": 114
833
+ },
834
+ {
835
+ "epoch": 0.3787375415282392,
836
+ "eval_loss": 1.697015404701233,
837
+ "eval_runtime": 18.2245,
838
+ "eval_samples_per_second": 3.073,
839
+ "eval_steps_per_second": 1.536,
840
+ "step": 114
841
+ },
842
+ {
843
+ "epoch": 0.38205980066445183,
844
+ "grad_norm": 2.59375,
845
+ "learning_rate": 1.8954511193434021e-06,
846
+ "loss": 1.5279,
847
+ "step": 115
848
+ },
849
+ {
850
+ "epoch": 0.3853820598006645,
851
+ "grad_norm": 2.859375,
852
+ "learning_rate": 1.8929926767362697e-06,
853
+ "loss": 1.7743,
854
+ "step": 116
855
+ },
856
+ {
857
+ "epoch": 0.38870431893687707,
858
+ "grad_norm": 2.640625,
859
+ "learning_rate": 1.8905072968024423e-06,
860
+ "loss": 1.9903,
861
+ "step": 117
862
+ },
863
+ {
864
+ "epoch": 0.3920265780730897,
865
+ "grad_norm": 2.765625,
866
+ "learning_rate": 1.8879950545139692e-06,
867
+ "loss": 1.6696,
868
+ "step": 118
869
+ },
870
+ {
871
+ "epoch": 0.3953488372093023,
872
+ "grad_norm": 3.6875,
873
+ "learning_rate": 1.8854560256532098e-06,
874
+ "loss": 1.5742,
875
+ "step": 119
876
+ },
877
+ {
878
+ "epoch": 0.39867109634551495,
879
+ "grad_norm": 2.59375,
880
+ "learning_rate": 1.8828902868105452e-06,
881
+ "loss": 1.6362,
882
+ "step": 120
883
+ },
884
+ {
885
+ "epoch": 0.4019933554817276,
886
+ "grad_norm": 3.21875,
887
+ "learning_rate": 1.8802979153820696e-06,
888
+ "loss": 1.7119,
889
+ "step": 121
890
+ },
891
+ {
892
+ "epoch": 0.4053156146179402,
893
+ "grad_norm": 2.296875,
894
+ "learning_rate": 1.8776789895672556e-06,
895
+ "loss": 1.6249,
896
+ "step": 122
897
+ },
898
+ {
899
+ "epoch": 0.40863787375415284,
900
+ "grad_norm": 2.828125,
901
+ "learning_rate": 1.8750335883665945e-06,
902
+ "loss": 1.7243,
903
+ "step": 123
904
+ },
905
+ {
906
+ "epoch": 0.4119601328903654,
907
+ "grad_norm": 2.671875,
908
+ "learning_rate": 1.8723617915792135e-06,
909
+ "loss": 1.7061,
910
+ "step": 124
911
+ },
912
+ {
913
+ "epoch": 0.4152823920265781,
914
+ "grad_norm": 2.5,
915
+ "learning_rate": 1.869663679800469e-06,
916
+ "loss": 1.7035,
917
+ "step": 125
918
+ },
919
+ {
920
+ "epoch": 0.4186046511627907,
921
+ "grad_norm": 2.5,
922
+ "learning_rate": 1.866939334419515e-06,
923
+ "loss": 1.6081,
924
+ "step": 126
925
+ },
926
+ {
927
+ "epoch": 0.4219269102990033,
928
+ "grad_norm": 2.640625,
929
+ "learning_rate": 1.8641888376168483e-06,
930
+ "loss": 1.5159,
931
+ "step": 127
932
+ },
933
+ {
934
+ "epoch": 0.42524916943521596,
935
+ "grad_norm": 3.3125,
936
+ "learning_rate": 1.8614122723618283e-06,
937
+ "loss": 1.6889,
938
+ "step": 128
939
+ },
940
+ {
941
+ "epoch": 0.42857142857142855,
942
+ "grad_norm": 2.65625,
943
+ "learning_rate": 1.8586097224101765e-06,
944
+ "loss": 1.8138,
945
+ "step": 129
946
+ },
947
+ {
948
+ "epoch": 0.4318936877076412,
949
+ "grad_norm": 2.546875,
950
+ "learning_rate": 1.8557812723014475e-06,
951
+ "loss": 1.6101,
952
+ "step": 130
953
+ },
954
+ {
955
+ "epoch": 0.43521594684385384,
956
+ "grad_norm": 2.6875,
957
+ "learning_rate": 1.852927007356481e-06,
958
+ "loss": 1.7078,
959
+ "step": 131
960
+ },
961
+ {
962
+ "epoch": 0.43853820598006643,
963
+ "grad_norm": 2.5625,
964
+ "learning_rate": 1.8500470136748263e-06,
965
+ "loss": 1.7445,
966
+ "step": 132
967
+ },
968
+ {
969
+ "epoch": 0.4418604651162791,
970
+ "grad_norm": 2.640625,
971
+ "learning_rate": 1.8471413781321465e-06,
972
+ "loss": 1.7156,
973
+ "step": 133
974
+ },
975
+ {
976
+ "epoch": 0.44518272425249167,
977
+ "grad_norm": 4.4375,
978
+ "learning_rate": 1.8442101883775967e-06,
979
+ "loss": 1.6427,
980
+ "step": 134
981
+ },
982
+ {
983
+ "epoch": 0.4485049833887043,
984
+ "grad_norm": 2.59375,
985
+ "learning_rate": 1.8412535328311812e-06,
986
+ "loss": 1.7093,
987
+ "step": 135
988
+ },
989
+ {
990
+ "epoch": 0.45182724252491696,
991
+ "grad_norm": 3.484375,
992
+ "learning_rate": 1.838271500681085e-06,
993
+ "loss": 1.5621,
994
+ "step": 136
995
+ },
996
+ {
997
+ "epoch": 0.45514950166112955,
998
+ "grad_norm": 2.953125,
999
+ "learning_rate": 1.8352641818809846e-06,
1000
+ "loss": 1.8257,
1001
+ "step": 137
1002
+ },
1003
+ {
1004
+ "epoch": 0.4584717607973422,
1005
+ "grad_norm": 2.65625,
1006
+ "learning_rate": 1.8322316671473342e-06,
1007
+ "loss": 1.6519,
1008
+ "step": 138
1009
+ },
1010
+ {
1011
+ "epoch": 0.46179401993355484,
1012
+ "grad_norm": 2.578125,
1013
+ "learning_rate": 1.8291740479566282e-06,
1014
+ "loss": 1.7275,
1015
+ "step": 139
1016
+ },
1017
+ {
1018
+ "epoch": 0.46511627906976744,
1019
+ "grad_norm": 2.53125,
1020
+ "learning_rate": 1.8260914165426438e-06,
1021
+ "loss": 1.6894,
1022
+ "step": 140
1023
+ },
1024
+ {
1025
+ "epoch": 0.4684385382059801,
1026
+ "grad_norm": 2.40625,
1027
+ "learning_rate": 1.8229838658936564e-06,
1028
+ "loss": 1.6601,
1029
+ "step": 141
1030
+ },
1031
+ {
1032
+ "epoch": 0.4717607973421927,
1033
+ "grad_norm": 2.640625,
1034
+ "learning_rate": 1.8198514897496367e-06,
1035
+ "loss": 1.6062,
1036
+ "step": 142
1037
+ },
1038
+ {
1039
+ "epoch": 0.4750830564784053,
1040
+ "grad_norm": 2.5,
1041
+ "learning_rate": 1.8166943825994216e-06,
1042
+ "loss": 1.5786,
1043
+ "step": 143
1044
+ },
1045
+ {
1046
+ "epoch": 0.47840531561461797,
1047
+ "grad_norm": 2.875,
1048
+ "learning_rate": 1.813512639677865e-06,
1049
+ "loss": 1.7378,
1050
+ "step": 144
1051
+ },
1052
+ {
1053
+ "epoch": 0.48172757475083056,
1054
+ "grad_norm": 3.390625,
1055
+ "learning_rate": 1.8103063569629632e-06,
1056
+ "loss": 1.6687,
1057
+ "step": 145
1058
+ },
1059
+ {
1060
+ "epoch": 0.4850498338870432,
1061
+ "grad_norm": 2.53125,
1062
+ "learning_rate": 1.8070756311729625e-06,
1063
+ "loss": 1.714,
1064
+ "step": 146
1065
+ },
1066
+ {
1067
+ "epoch": 0.4883720930232558,
1068
+ "grad_norm": 2.375,
1069
+ "learning_rate": 1.803820559763439e-06,
1070
+ "loss": 1.7529,
1071
+ "step": 147
1072
+ },
1073
+ {
1074
+ "epoch": 0.49169435215946844,
1075
+ "grad_norm": 2.859375,
1076
+ "learning_rate": 1.8005412409243603e-06,
1077
+ "loss": 1.7381,
1078
+ "step": 148
1079
+ },
1080
+ {
1081
+ "epoch": 0.4950166112956811,
1082
+ "grad_norm": 2.71875,
1083
+ "learning_rate": 1.7972377735771232e-06,
1084
+ "loss": 1.6673,
1085
+ "step": 149
1086
+ },
1087
+ {
1088
+ "epoch": 0.4983388704318937,
1089
+ "grad_norm": 2.5,
1090
+ "learning_rate": 1.7939102573715696e-06,
1091
+ "loss": 1.7715,
1092
+ "step": 150
1093
+ },
1094
+ {
1095
+ "epoch": 0.5016611295681063,
1096
+ "grad_norm": 2.359375,
1097
+ "learning_rate": 1.7905587926829812e-06,
1098
+ "loss": 1.7926,
1099
+ "step": 151
1100
+ },
1101
+ {
1102
+ "epoch": 0.5049833887043189,
1103
+ "grad_norm": 2.4375,
1104
+ "learning_rate": 1.78718348060905e-06,
1105
+ "loss": 1.7006,
1106
+ "step": 152
1107
+ },
1108
+ {
1109
+ "epoch": 0.5049833887043189,
1110
+ "eval_loss": 1.690727949142456,
1111
+ "eval_runtime": 18.2527,
1112
+ "eval_samples_per_second": 3.068,
1113
+ "eval_steps_per_second": 1.534,
1114
+ "step": 152
1115
+ },
1116
+ {
1117
+ "epoch": 0.5083056478405316,
1118
+ "grad_norm": 2.375,
1119
+ "learning_rate": 1.7837844229668311e-06,
1120
+ "loss": 1.6887,
1121
+ "step": 153
1122
+ },
1123
+ {
1124
+ "epoch": 0.5116279069767442,
1125
+ "grad_norm": 3.1875,
1126
+ "learning_rate": 1.7803617222896694e-06,
1127
+ "loss": 1.6259,
1128
+ "step": 154
1129
+ },
1130
+ {
1131
+ "epoch": 0.5149501661129569,
1132
+ "grad_norm": 3.0,
1133
+ "learning_rate": 1.776915481824107e-06,
1134
+ "loss": 1.7458,
1135
+ "step": 155
1136
+ },
1137
+ {
1138
+ "epoch": 0.5182724252491694,
1139
+ "grad_norm": 3.359375,
1140
+ "learning_rate": 1.7734458055267699e-06,
1141
+ "loss": 1.5833,
1142
+ "step": 156
1143
+ },
1144
+ {
1145
+ "epoch": 0.521594684385382,
1146
+ "grad_norm": 2.3125,
1147
+ "learning_rate": 1.7699527980612304e-06,
1148
+ "loss": 1.6552,
1149
+ "step": 157
1150
+ },
1151
+ {
1152
+ "epoch": 0.5249169435215947,
1153
+ "grad_norm": 2.34375,
1154
+ "learning_rate": 1.766436564794851e-06,
1155
+ "loss": 1.697,
1156
+ "step": 158
1157
+ },
1158
+ {
1159
+ "epoch": 0.5282392026578073,
1160
+ "grad_norm": 2.3125,
1161
+ "learning_rate": 1.7628972117956066e-06,
1162
+ "loss": 1.6248,
1163
+ "step": 159
1164
+ },
1165
+ {
1166
+ "epoch": 0.53156146179402,
1167
+ "grad_norm": 2.640625,
1168
+ "learning_rate": 1.7593348458288833e-06,
1169
+ "loss": 1.7622,
1170
+ "step": 160
1171
+ },
1172
+ {
1173
+ "epoch": 0.5348837209302325,
1174
+ "grad_norm": 3.296875,
1175
+ "learning_rate": 1.7557495743542582e-06,
1176
+ "loss": 1.7791,
1177
+ "step": 161
1178
+ },
1179
+ {
1180
+ "epoch": 0.5382059800664452,
1181
+ "grad_norm": 2.84375,
1182
+ "learning_rate": 1.7521415055222592e-06,
1183
+ "loss": 1.7372,
1184
+ "step": 162
1185
+ },
1186
+ {
1187
+ "epoch": 0.5415282392026578,
1188
+ "grad_norm": 6.625,
1189
+ "learning_rate": 1.748510748171101e-06,
1190
+ "loss": 1.6665,
1191
+ "step": 163
1192
+ },
1193
+ {
1194
+ "epoch": 0.5448504983388704,
1195
+ "grad_norm": 2.953125,
1196
+ "learning_rate": 1.744857411823403e-06,
1197
+ "loss": 1.5668,
1198
+ "step": 164
1199
+ },
1200
+ {
1201
+ "epoch": 0.5481727574750831,
1202
+ "grad_norm": 3.546875,
1203
+ "learning_rate": 1.7411816066828849e-06,
1204
+ "loss": 1.5494,
1205
+ "step": 165
1206
+ },
1207
+ {
1208
+ "epoch": 0.5514950166112956,
1209
+ "grad_norm": 3.578125,
1210
+ "learning_rate": 1.7374834436310426e-06,
1211
+ "loss": 1.6968,
1212
+ "step": 166
1213
+ },
1214
+ {
1215
+ "epoch": 0.5548172757475083,
1216
+ "grad_norm": 2.671875,
1217
+ "learning_rate": 1.7337630342238039e-06,
1218
+ "loss": 1.5636,
1219
+ "step": 167
1220
+ },
1221
+ {
1222
+ "epoch": 0.5581395348837209,
1223
+ "grad_norm": 2.78125,
1224
+ "learning_rate": 1.7300204906881626e-06,
1225
+ "loss": 1.7675,
1226
+ "step": 168
1227
+ },
1228
+ {
1229
+ "epoch": 0.5614617940199336,
1230
+ "grad_norm": 2.34375,
1231
+ "learning_rate": 1.7262559259187931e-06,
1232
+ "loss": 1.7314,
1233
+ "step": 169
1234
+ },
1235
+ {
1236
+ "epoch": 0.5647840531561462,
1237
+ "grad_norm": 4.25,
1238
+ "learning_rate": 1.7224694534746464e-06,
1239
+ "loss": 1.8208,
1240
+ "step": 170
1241
+ },
1242
+ {
1243
+ "epoch": 0.5681063122923588,
1244
+ "grad_norm": 2.484375,
1245
+ "learning_rate": 1.7186611875755225e-06,
1246
+ "loss": 1.7366,
1247
+ "step": 171
1248
+ },
1249
+ {
1250
+ "epoch": 0.5714285714285714,
1251
+ "grad_norm": 2.515625,
1252
+ "learning_rate": 1.7148312430986262e-06,
1253
+ "loss": 1.6048,
1254
+ "step": 172
1255
+ },
1256
+ {
1257
+ "epoch": 0.574750830564784,
1258
+ "grad_norm": 2.671875,
1259
+ "learning_rate": 1.7109797355751016e-06,
1260
+ "loss": 1.7103,
1261
+ "step": 173
1262
+ },
1263
+ {
1264
+ "epoch": 0.5780730897009967,
1265
+ "grad_norm": 2.65625,
1266
+ "learning_rate": 1.7071067811865474e-06,
1267
+ "loss": 1.6318,
1268
+ "step": 174
1269
+ },
1270
+ {
1271
+ "epoch": 0.5813953488372093,
1272
+ "grad_norm": 2.21875,
1273
+ "learning_rate": 1.7032124967615109e-06,
1274
+ "loss": 1.5906,
1275
+ "step": 175
1276
+ },
1277
+ {
1278
+ "epoch": 0.584717607973422,
1279
+ "grad_norm": 2.4375,
1280
+ "learning_rate": 1.6992969997719657e-06,
1281
+ "loss": 1.5532,
1282
+ "step": 176
1283
+ },
1284
+ {
1285
+ "epoch": 0.5880398671096345,
1286
+ "grad_norm": 2.453125,
1287
+ "learning_rate": 1.6953604083297663e-06,
1288
+ "loss": 1.6565,
1289
+ "step": 177
1290
+ },
1291
+ {
1292
+ "epoch": 0.5913621262458472,
1293
+ "grad_norm": 3.03125,
1294
+ "learning_rate": 1.6914028411830877e-06,
1295
+ "loss": 1.6321,
1296
+ "step": 178
1297
+ },
1298
+ {
1299
+ "epoch": 0.5946843853820598,
1300
+ "grad_norm": 3.484375,
1301
+ "learning_rate": 1.6874244177128393e-06,
1302
+ "loss": 1.63,
1303
+ "step": 179
1304
+ },
1305
+ {
1306
+ "epoch": 0.5980066445182725,
1307
+ "grad_norm": 2.421875,
1308
+ "learning_rate": 1.6834252579290688e-06,
1309
+ "loss": 1.7393,
1310
+ "step": 180
1311
+ },
1312
+ {
1313
+ "epoch": 0.6013289036544851,
1314
+ "grad_norm": 2.578125,
1315
+ "learning_rate": 1.679405482467338e-06,
1316
+ "loss": 1.6178,
1317
+ "step": 181
1318
+ },
1319
+ {
1320
+ "epoch": 0.6046511627906976,
1321
+ "grad_norm": 2.9375,
1322
+ "learning_rate": 1.6753652125850862e-06,
1323
+ "loss": 1.7313,
1324
+ "step": 182
1325
+ },
1326
+ {
1327
+ "epoch": 0.6079734219269103,
1328
+ "grad_norm": 4.53125,
1329
+ "learning_rate": 1.6713045701579702e-06,
1330
+ "loss": 1.5631,
1331
+ "step": 183
1332
+ },
1333
+ {
1334
+ "epoch": 0.6112956810631229,
1335
+ "grad_norm": 2.640625,
1336
+ "learning_rate": 1.6672236776761906e-06,
1337
+ "loss": 1.5669,
1338
+ "step": 184
1339
+ },
1340
+ {
1341
+ "epoch": 0.6146179401993356,
1342
+ "grad_norm": 2.59375,
1343
+ "learning_rate": 1.6631226582407952e-06,
1344
+ "loss": 1.753,
1345
+ "step": 185
1346
+ },
1347
+ {
1348
+ "epoch": 0.6179401993355482,
1349
+ "grad_norm": 2.546875,
1350
+ "learning_rate": 1.6590016355599652e-06,
1351
+ "loss": 1.5346,
1352
+ "step": 186
1353
+ },
1354
+ {
1355
+ "epoch": 0.6212624584717608,
1356
+ "grad_norm": 2.203125,
1357
+ "learning_rate": 1.6548607339452852e-06,
1358
+ "loss": 1.6781,
1359
+ "step": 187
1360
+ },
1361
+ {
1362
+ "epoch": 0.6245847176079734,
1363
+ "grad_norm": 2.3125,
1364
+ "learning_rate": 1.6507000783079911e-06,
1365
+ "loss": 1.5406,
1366
+ "step": 188
1367
+ },
1368
+ {
1369
+ "epoch": 0.627906976744186,
1370
+ "grad_norm": 3.09375,
1371
+ "learning_rate": 1.6465197941552053e-06,
1372
+ "loss": 1.6581,
1373
+ "step": 189
1374
+ },
1375
+ {
1376
+ "epoch": 0.6312292358803987,
1377
+ "grad_norm": 3.140625,
1378
+ "learning_rate": 1.642320007586147e-06,
1379
+ "loss": 1.7276,
1380
+ "step": 190
1381
+ },
1382
+ {
1383
+ "epoch": 0.6312292358803987,
1384
+ "eval_loss": 1.6873770952224731,
1385
+ "eval_runtime": 18.1896,
1386
+ "eval_samples_per_second": 3.079,
1387
+ "eval_steps_per_second": 1.539,
1388
+ "step": 190
1389
+ },
1390
+ {
1391
+ "epoch": 0.6345514950166113,
1392
+ "grad_norm": 2.671875,
1393
+ "learning_rate": 1.6381008452883307e-06,
1394
+ "loss": 1.7267,
1395
+ "step": 191
1396
+ },
1397
+ {
1398
+ "epoch": 0.6378737541528239,
1399
+ "grad_norm": 2.4375,
1400
+ "learning_rate": 1.6338624345337451e-06,
1401
+ "loss": 1.7332,
1402
+ "step": 192
1403
+ },
1404
+ {
1405
+ "epoch": 0.6411960132890365,
1406
+ "grad_norm": 2.4375,
1407
+ "learning_rate": 1.6296049031750111e-06,
1408
+ "loss": 1.8304,
1409
+ "step": 193
1410
+ },
1411
+ {
1412
+ "epoch": 0.6445182724252492,
1413
+ "grad_norm": 2.34375,
1414
+ "learning_rate": 1.6253283796415293e-06,
1415
+ "loss": 1.659,
1416
+ "step": 194
1417
+ },
1418
+ {
1419
+ "epoch": 0.6478405315614618,
1420
+ "grad_norm": 2.546875,
1421
+ "learning_rate": 1.6210329929356017e-06,
1422
+ "loss": 1.5788,
1423
+ "step": 195
1424
+ },
1425
+ {
1426
+ "epoch": 0.6511627906976745,
1427
+ "grad_norm": 2.34375,
1428
+ "learning_rate": 1.6167188726285433e-06,
1429
+ "loss": 1.6042,
1430
+ "step": 196
1431
+ },
1432
+ {
1433
+ "epoch": 0.654485049833887,
1434
+ "grad_norm": 2.46875,
1435
+ "learning_rate": 1.6123861488567708e-06,
1436
+ "loss": 1.5796,
1437
+ "step": 197
1438
+ },
1439
+ {
1440
+ "epoch": 0.6578073089700996,
1441
+ "grad_norm": 2.4375,
1442
+ "learning_rate": 1.608034952317881e-06,
1443
+ "loss": 1.7463,
1444
+ "step": 198
1445
+ },
1446
+ {
1447
+ "epoch": 0.6611295681063123,
1448
+ "grad_norm": 2.3125,
1449
+ "learning_rate": 1.603665414266704e-06,
1450
+ "loss": 1.563,
1451
+ "step": 199
1452
+ },
1453
+ {
1454
+ "epoch": 0.6644518272425249,
1455
+ "grad_norm": 2.25,
1456
+ "learning_rate": 1.5992776665113468e-06,
1457
+ "loss": 1.6348,
1458
+ "step": 200
1459
+ },
1460
+ {
1461
+ "epoch": 0.6677740863787376,
1462
+ "grad_norm": 2.21875,
1463
+ "learning_rate": 1.594871841409216e-06,
1464
+ "loss": 1.6304,
1465
+ "step": 201
1466
+ },
1467
+ {
1468
+ "epoch": 0.6710963455149501,
1469
+ "grad_norm": 2.328125,
1470
+ "learning_rate": 1.5904480718630252e-06,
1471
+ "loss": 1.7017,
1472
+ "step": 202
1473
+ },
1474
+ {
1475
+ "epoch": 0.6744186046511628,
1476
+ "grad_norm": 2.328125,
1477
+ "learning_rate": 1.5860064913167862e-06,
1478
+ "loss": 1.5088,
1479
+ "step": 203
1480
+ },
1481
+ {
1482
+ "epoch": 0.6777408637873754,
1483
+ "grad_norm": 2.46875,
1484
+ "learning_rate": 1.581547233751784e-06,
1485
+ "loss": 1.6744,
1486
+ "step": 204
1487
+ },
1488
+ {
1489
+ "epoch": 0.6810631229235881,
1490
+ "grad_norm": 2.421875,
1491
+ "learning_rate": 1.577070433682535e-06,
1492
+ "loss": 1.7003,
1493
+ "step": 205
1494
+ },
1495
+ {
1496
+ "epoch": 0.6843853820598007,
1497
+ "grad_norm": 2.34375,
1498
+ "learning_rate": 1.572576226152729e-06,
1499
+ "loss": 1.6027,
1500
+ "step": 206
1501
+ },
1502
+ {
1503
+ "epoch": 0.6877076411960132,
1504
+ "grad_norm": 2.828125,
1505
+ "learning_rate": 1.5680647467311555e-06,
1506
+ "loss": 1.6389,
1507
+ "step": 207
1508
+ },
1509
+ {
1510
+ "epoch": 0.6910299003322259,
1511
+ "grad_norm": 2.703125,
1512
+ "learning_rate": 1.5635361315076154e-06,
1513
+ "loss": 1.5774,
1514
+ "step": 208
1515
+ },
1516
+ {
1517
+ "epoch": 0.6943521594684385,
1518
+ "grad_norm": 2.453125,
1519
+ "learning_rate": 1.5589905170888133e-06,
1520
+ "loss": 1.5609,
1521
+ "step": 209
1522
+ },
1523
+ {
1524
+ "epoch": 0.6976744186046512,
1525
+ "grad_norm": 3.90625,
1526
+ "learning_rate": 1.5544280405942406e-06,
1527
+ "loss": 1.6494,
1528
+ "step": 210
1529
+ },
1530
+ {
1531
+ "epoch": 0.7009966777408638,
1532
+ "grad_norm": 2.25,
1533
+ "learning_rate": 1.5498488396520347e-06,
1534
+ "loss": 1.7061,
1535
+ "step": 211
1536
+ },
1537
+ {
1538
+ "epoch": 0.7043189368770764,
1539
+ "grad_norm": 2.96875,
1540
+ "learning_rate": 1.5452530523948321e-06,
1541
+ "loss": 1.6194,
1542
+ "step": 212
1543
+ },
1544
+ {
1545
+ "epoch": 0.707641196013289,
1546
+ "grad_norm": 3.28125,
1547
+ "learning_rate": 1.5406408174555977e-06,
1548
+ "loss": 1.8289,
1549
+ "step": 213
1550
+ },
1551
+ {
1552
+ "epoch": 0.7109634551495017,
1553
+ "grad_norm": 2.421875,
1554
+ "learning_rate": 1.5360122739634442e-06,
1555
+ "loss": 1.793,
1556
+ "step": 214
1557
+ },
1558
+ {
1559
+ "epoch": 0.7142857142857143,
1560
+ "grad_norm": 2.21875,
1561
+ "learning_rate": 1.5313675615394372e-06,
1562
+ "loss": 1.7017,
1563
+ "step": 215
1564
+ },
1565
+ {
1566
+ "epoch": 0.717607973421927,
1567
+ "grad_norm": 2.78125,
1568
+ "learning_rate": 1.52670682029238e-06,
1569
+ "loss": 1.5668,
1570
+ "step": 216
1571
+ },
1572
+ {
1573
+ "epoch": 0.7209302325581395,
1574
+ "grad_norm": 2.46875,
1575
+ "learning_rate": 1.5220301908145903e-06,
1576
+ "loss": 1.6355,
1577
+ "step": 217
1578
+ },
1579
+ {
1580
+ "epoch": 0.7242524916943521,
1581
+ "grad_norm": 2.265625,
1582
+ "learning_rate": 1.5173378141776567e-06,
1583
+ "loss": 1.5903,
1584
+ "step": 218
1585
+ },
1586
+ {
1587
+ "epoch": 0.7275747508305648,
1588
+ "grad_norm": 2.328125,
1589
+ "learning_rate": 1.5126298319281855e-06,
1590
+ "loss": 1.6904,
1591
+ "step": 219
1592
+ },
1593
+ {
1594
+ "epoch": 0.7308970099667774,
1595
+ "grad_norm": 2.59375,
1596
+ "learning_rate": 1.5079063860835292e-06,
1597
+ "loss": 1.603,
1598
+ "step": 220
1599
+ },
1600
+ {
1601
+ "epoch": 0.7342192691029901,
1602
+ "grad_norm": 2.453125,
1603
+ "learning_rate": 1.5031676191275037e-06,
1604
+ "loss": 1.5458,
1605
+ "step": 221
1606
+ },
1607
+ {
1608
+ "epoch": 0.7375415282392026,
1609
+ "grad_norm": 3.578125,
1610
+ "learning_rate": 1.498413674006089e-06,
1611
+ "loss": 1.8769,
1612
+ "step": 222
1613
+ },
1614
+ {
1615
+ "epoch": 0.7408637873754153,
1616
+ "grad_norm": 2.90625,
1617
+ "learning_rate": 1.4936446941231185e-06,
1618
+ "loss": 1.6354,
1619
+ "step": 223
1620
+ },
1621
+ {
1622
+ "epoch": 0.7441860465116279,
1623
+ "grad_norm": 2.28125,
1624
+ "learning_rate": 1.4888608233359518e-06,
1625
+ "loss": 1.6826,
1626
+ "step": 224
1627
+ },
1628
+ {
1629
+ "epoch": 0.7475083056478405,
1630
+ "grad_norm": 2.796875,
1631
+ "learning_rate": 1.4840622059511374e-06,
1632
+ "loss": 1.7523,
1633
+ "step": 225
1634
+ },
1635
+ {
1636
+ "epoch": 0.7508305647840532,
1637
+ "grad_norm": 2.484375,
1638
+ "learning_rate": 1.4792489867200568e-06,
1639
+ "loss": 1.7502,
1640
+ "step": 226
1641
+ },
1642
+ {
1643
+ "epoch": 0.7541528239202658,
1644
+ "grad_norm": 2.421875,
1645
+ "learning_rate": 1.4744213108345602e-06,
1646
+ "loss": 1.6862,
1647
+ "step": 227
1648
+ },
1649
+ {
1650
+ "epoch": 0.7574750830564784,
1651
+ "grad_norm": 3.140625,
1652
+ "learning_rate": 1.4695793239225863e-06,
1653
+ "loss": 1.7042,
1654
+ "step": 228
1655
+ },
1656
+ {
1657
+ "epoch": 0.7574750830564784,
1658
+ "eval_loss": 1.6847137212753296,
1659
+ "eval_runtime": 18.2782,
1660
+ "eval_samples_per_second": 3.064,
1661
+ "eval_steps_per_second": 1.532,
1662
+ "step": 228
1663
+ },
1664
+ {
1665
+ "epoch": 0.760797342192691,
1666
+ "grad_norm": 2.578125,
1667
+ "learning_rate": 1.4647231720437684e-06,
1668
+ "loss": 1.7811,
1669
+ "step": 229
1670
+ },
1671
+ {
1672
+ "epoch": 0.7641196013289037,
1673
+ "grad_norm": 2.1875,
1674
+ "learning_rate": 1.4598530016850301e-06,
1675
+ "loss": 1.6068,
1676
+ "step": 230
1677
+ },
1678
+ {
1679
+ "epoch": 0.7674418604651163,
1680
+ "grad_norm": 2.546875,
1681
+ "learning_rate": 1.454968959756165e-06,
1682
+ "loss": 1.7465,
1683
+ "step": 231
1684
+ },
1685
+ {
1686
+ "epoch": 0.770764119601329,
1687
+ "grad_norm": 3.125,
1688
+ "learning_rate": 1.450071193585406e-06,
1689
+ "loss": 1.5262,
1690
+ "step": 232
1691
+ },
1692
+ {
1693
+ "epoch": 0.7740863787375415,
1694
+ "grad_norm": 2.4375,
1695
+ "learning_rate": 1.4451598509149806e-06,
1696
+ "loss": 1.6686,
1697
+ "step": 233
1698
+ },
1699
+ {
1700
+ "epoch": 0.7774086378737541,
1701
+ "grad_norm": 2.328125,
1702
+ "learning_rate": 1.4402350798966554e-06,
1703
+ "loss": 1.6948,
1704
+ "step": 234
1705
+ },
1706
+ {
1707
+ "epoch": 0.7807308970099668,
1708
+ "grad_norm": 2.609375,
1709
+ "learning_rate": 1.435297029087265e-06,
1710
+ "loss": 1.7236,
1711
+ "step": 235
1712
+ },
1713
+ {
1714
+ "epoch": 0.7840531561461794,
1715
+ "grad_norm": 2.796875,
1716
+ "learning_rate": 1.4303458474442322e-06,
1717
+ "loss": 1.8507,
1718
+ "step": 236
1719
+ },
1720
+ {
1721
+ "epoch": 0.7873754152823921,
1722
+ "grad_norm": 2.796875,
1723
+ "learning_rate": 1.4253816843210748e-06,
1724
+ "loss": 1.6888,
1725
+ "step": 237
1726
+ },
1727
+ {
1728
+ "epoch": 0.7906976744186046,
1729
+ "grad_norm": 2.3125,
1730
+ "learning_rate": 1.4204046894629e-06,
1731
+ "loss": 1.6293,
1732
+ "step": 238
1733
+ },
1734
+ {
1735
+ "epoch": 0.7940199335548173,
1736
+ "grad_norm": 2.53125,
1737
+ "learning_rate": 1.4154150130018865e-06,
1738
+ "loss": 1.6534,
1739
+ "step": 239
1740
+ },
1741
+ {
1742
+ "epoch": 0.7973421926910299,
1743
+ "grad_norm": 2.65625,
1744
+ "learning_rate": 1.410412805452757e-06,
1745
+ "loss": 1.6228,
1746
+ "step": 240
1747
+ },
1748
+ {
1749
+ "epoch": 0.8006644518272426,
1750
+ "grad_norm": 2.34375,
1751
+ "learning_rate": 1.4053982177082366e-06,
1752
+ "loss": 1.7322,
1753
+ "step": 241
1754
+ },
1755
+ {
1756
+ "epoch": 0.8039867109634552,
1757
+ "grad_norm": 3.1875,
1758
+ "learning_rate": 1.400371401034503e-06,
1759
+ "loss": 1.6871,
1760
+ "step": 242
1761
+ },
1762
+ {
1763
+ "epoch": 0.8073089700996677,
1764
+ "grad_norm": 2.640625,
1765
+ "learning_rate": 1.3953325070666213e-06,
1766
+ "loss": 1.6272,
1767
+ "step": 243
1768
+ },
1769
+ {
1770
+ "epoch": 0.8106312292358804,
1771
+ "grad_norm": 2.578125,
1772
+ "learning_rate": 1.3902816878039713e-06,
1773
+ "loss": 1.5389,
1774
+ "step": 244
1775
+ },
1776
+ {
1777
+ "epoch": 0.813953488372093,
1778
+ "grad_norm": 2.375,
1779
+ "learning_rate": 1.3852190956056622e-06,
1780
+ "loss": 1.6966,
1781
+ "step": 245
1782
+ },
1783
+ {
1784
+ "epoch": 0.8172757475083057,
1785
+ "grad_norm": 2.203125,
1786
+ "learning_rate": 1.3801448831859362e-06,
1787
+ "loss": 1.7712,
1788
+ "step": 246
1789
+ },
1790
+ {
1791
+ "epoch": 0.8205980066445183,
1792
+ "grad_norm": 2.3125,
1793
+ "learning_rate": 1.3750592036095619e-06,
1794
+ "loss": 1.6111,
1795
+ "step": 247
1796
+ },
1797
+ {
1798
+ "epoch": 0.8239202657807309,
1799
+ "grad_norm": 2.75,
1800
+ "learning_rate": 1.3699622102872174e-06,
1801
+ "loss": 1.6272,
1802
+ "step": 248
1803
+ },
1804
+ {
1805
+ "epoch": 0.8272425249169435,
1806
+ "grad_norm": 2.25,
1807
+ "learning_rate": 1.3648540569708635e-06,
1808
+ "loss": 1.8452,
1809
+ "step": 249
1810
+ },
1811
+ {
1812
+ "epoch": 0.8305647840531561,
1813
+ "grad_norm": 2.328125,
1814
+ "learning_rate": 1.359734897749103e-06,
1815
+ "loss": 1.6621,
1816
+ "step": 250
1817
+ },
1818
+ {
1819
+ "epoch": 0.8338870431893688,
1820
+ "grad_norm": 2.296875,
1821
+ "learning_rate": 1.3546048870425354e-06,
1822
+ "loss": 1.6064,
1823
+ "step": 251
1824
+ },
1825
+ {
1826
+ "epoch": 0.8372093023255814,
1827
+ "grad_norm": 3.53125,
1828
+ "learning_rate": 1.3494641795990985e-06,
1829
+ "loss": 1.6591,
1830
+ "step": 252
1831
+ },
1832
+ {
1833
+ "epoch": 0.840531561461794,
1834
+ "grad_norm": 2.296875,
1835
+ "learning_rate": 1.3443129304893973e-06,
1836
+ "loss": 1.6282,
1837
+ "step": 253
1838
+ },
1839
+ {
1840
+ "epoch": 0.8438538205980066,
1841
+ "grad_norm": 2.34375,
1842
+ "learning_rate": 1.339151295102031e-06,
1843
+ "loss": 1.5982,
1844
+ "step": 254
1845
+ },
1846
+ {
1847
+ "epoch": 0.8471760797342193,
1848
+ "grad_norm": 2.4375,
1849
+ "learning_rate": 1.3339794291389012e-06,
1850
+ "loss": 1.6695,
1851
+ "step": 255
1852
+ },
1853
+ {
1854
+ "epoch": 0.8504983388704319,
1855
+ "grad_norm": 2.234375,
1856
+ "learning_rate": 1.3287974886105188e-06,
1857
+ "loss": 1.6515,
1858
+ "step": 256
1859
+ },
1860
+ {
1861
+ "epoch": 0.8538205980066446,
1862
+ "grad_norm": 2.453125,
1863
+ "learning_rate": 1.3236056298312956e-06,
1864
+ "loss": 1.595,
1865
+ "step": 257
1866
+ },
1867
+ {
1868
+ "epoch": 0.8571428571428571,
1869
+ "grad_norm": 2.515625,
1870
+ "learning_rate": 1.3184040094148289e-06,
1871
+ "loss": 1.653,
1872
+ "step": 258
1873
+ },
1874
+ {
1875
+ "epoch": 0.8604651162790697,
1876
+ "grad_norm": 3.0625,
1877
+ "learning_rate": 1.313192784269179e-06,
1878
+ "loss": 1.6397,
1879
+ "step": 259
1880
+ },
1881
+ {
1882
+ "epoch": 0.8637873754152824,
1883
+ "grad_norm": 2.359375,
1884
+ "learning_rate": 1.3079721115921346e-06,
1885
+ "loss": 1.7935,
1886
+ "step": 260
1887
+ },
1888
+ {
1889
+ "epoch": 0.867109634551495,
1890
+ "grad_norm": 2.75,
1891
+ "learning_rate": 1.302742148866472e-06,
1892
+ "loss": 1.6791,
1893
+ "step": 261
1894
+ },
1895
+ {
1896
+ "epoch": 0.8704318936877077,
1897
+ "grad_norm": 2.546875,
1898
+ "learning_rate": 1.297503053855203e-06,
1899
+ "loss": 1.7556,
1900
+ "step": 262
1901
+ },
1902
+ {
1903
+ "epoch": 0.8737541528239202,
1904
+ "grad_norm": 2.671875,
1905
+ "learning_rate": 1.2922549845968172e-06,
1906
+ "loss": 1.6982,
1907
+ "step": 263
1908
+ },
1909
+ {
1910
+ "epoch": 0.8770764119601329,
1911
+ "grad_norm": 2.390625,
1912
+ "learning_rate": 1.2869980994005146e-06,
1913
+ "loss": 1.8154,
1914
+ "step": 264
1915
+ },
1916
+ {
1917
+ "epoch": 0.8803986710963455,
1918
+ "grad_norm": 2.421875,
1919
+ "learning_rate": 1.2817325568414297e-06,
1920
+ "loss": 1.6978,
1921
+ "step": 265
1922
+ },
1923
+ {
1924
+ "epoch": 0.8837209302325582,
1925
+ "grad_norm": 2.859375,
1926
+ "learning_rate": 1.2764585157558485e-06,
1927
+ "loss": 1.5575,
1928
+ "step": 266
1929
+ },
1930
+ {
1931
+ "epoch": 0.8837209302325582,
1932
+ "eval_loss": 1.6824702024459839,
1933
+ "eval_runtime": 18.5874,
1934
+ "eval_samples_per_second": 3.013,
1935
+ "eval_steps_per_second": 1.506,
1936
+ "step": 266
1937
+ },
1938
+ {
1939
+ "epoch": 0.8870431893687708,
1940
+ "grad_norm": 2.203125,
1941
+ "learning_rate": 1.271176135236417e-06,
1942
+ "loss": 1.7344,
1943
+ "step": 267
1944
+ },
1945
+ {
1946
+ "epoch": 0.8903654485049833,
1947
+ "grad_norm": 3.984375,
1948
+ "learning_rate": 1.2658855746273418e-06,
1949
+ "loss": 1.6611,
1950
+ "step": 268
1951
+ },
1952
+ {
1953
+ "epoch": 0.893687707641196,
1954
+ "grad_norm": 2.828125,
1955
+ "learning_rate": 1.2605869935195844e-06,
1956
+ "loss": 1.8148,
1957
+ "step": 269
1958
+ },
1959
+ {
1960
+ "epoch": 0.8970099667774086,
1961
+ "grad_norm": 3.3125,
1962
+ "learning_rate": 1.2552805517460456e-06,
1963
+ "loss": 1.7148,
1964
+ "step": 270
1965
+ },
1966
+ {
1967
+ "epoch": 0.9003322259136213,
1968
+ "grad_norm": 3.453125,
1969
+ "learning_rate": 1.2499664093767458e-06,
1970
+ "loss": 1.7084,
1971
+ "step": 271
1972
+ },
1973
+ {
1974
+ "epoch": 0.9036544850498339,
1975
+ "grad_norm": 2.546875,
1976
+ "learning_rate": 1.2446447267139945e-06,
1977
+ "loss": 1.6583,
1978
+ "step": 272
1979
+ },
1980
+ {
1981
+ "epoch": 0.9069767441860465,
1982
+ "grad_norm": 2.515625,
1983
+ "learning_rate": 1.2393156642875577e-06,
1984
+ "loss": 1.6741,
1985
+ "step": 273
1986
+ },
1987
+ {
1988
+ "epoch": 0.9102990033222591,
1989
+ "grad_norm": 2.390625,
1990
+ "learning_rate": 1.2339793828498118e-06,
1991
+ "loss": 1.6266,
1992
+ "step": 274
1993
+ },
1994
+ {
1995
+ "epoch": 0.9136212624584718,
1996
+ "grad_norm": 2.375,
1997
+ "learning_rate": 1.2286360433708973e-06,
1998
+ "loss": 1.7373,
1999
+ "step": 275
2000
+ },
2001
+ {
2002
+ "epoch": 0.9169435215946844,
2003
+ "grad_norm": 3.03125,
2004
+ "learning_rate": 1.2232858070338617e-06,
2005
+ "loss": 1.7677,
2006
+ "step": 276
2007
+ },
2008
+ {
2009
+ "epoch": 0.920265780730897,
2010
+ "grad_norm": 2.546875,
2011
+ "learning_rate": 1.2179288352297982e-06,
2012
+ "loss": 1.5966,
2013
+ "step": 277
2014
+ },
2015
+ {
2016
+ "epoch": 0.9235880398671097,
2017
+ "grad_norm": 2.65625,
2018
+ "learning_rate": 1.2125652895529766e-06,
2019
+ "loss": 1.7274,
2020
+ "step": 278
2021
+ },
2022
+ {
2023
+ "epoch": 0.9269102990033222,
2024
+ "grad_norm": 3.265625,
2025
+ "learning_rate": 1.207195331795969e-06,
2026
+ "loss": 1.7407,
2027
+ "step": 279
2028
+ },
2029
+ {
2030
+ "epoch": 0.9302325581395349,
2031
+ "grad_norm": 3.859375,
2032
+ "learning_rate": 1.2018191239447696e-06,
2033
+ "loss": 1.7359,
2034
+ "step": 280
2035
+ },
2036
+ {
2037
+ "epoch": 0.9335548172757475,
2038
+ "grad_norm": 3.203125,
2039
+ "learning_rate": 1.1964368281739076e-06,
2040
+ "loss": 1.6033,
2041
+ "step": 281
2042
+ },
2043
+ {
2044
+ "epoch": 0.9368770764119602,
2045
+ "grad_norm": 2.515625,
2046
+ "learning_rate": 1.191048606841557e-06,
2047
+ "loss": 1.6048,
2048
+ "step": 282
2049
+ },
2050
+ {
2051
+ "epoch": 0.9401993355481728,
2052
+ "grad_norm": 2.875,
2053
+ "learning_rate": 1.1856546224846353e-06,
2054
+ "loss": 1.6088,
2055
+ "step": 283
2056
+ },
2057
+ {
2058
+ "epoch": 0.9435215946843853,
2059
+ "grad_norm": 2.875,
2060
+ "learning_rate": 1.1802550378139057e-06,
2061
+ "loss": 1.7292,
2062
+ "step": 284
2063
+ },
2064
+ {
2065
+ "epoch": 0.946843853820598,
2066
+ "grad_norm": 2.3125,
2067
+ "learning_rate": 1.1748500157090644e-06,
2068
+ "loss": 1.6852,
2069
+ "step": 285
2070
+ },
2071
+ {
2072
+ "epoch": 0.9501661129568106,
2073
+ "grad_norm": 2.703125,
2074
+ "learning_rate": 1.1694397192138294e-06,
2075
+ "loss": 1.6983,
2076
+ "step": 286
2077
+ },
2078
+ {
2079
+ "epoch": 0.9534883720930233,
2080
+ "grad_norm": 2.53125,
2081
+ "learning_rate": 1.1640243115310217e-06,
2082
+ "loss": 1.6619,
2083
+ "step": 287
2084
+ },
2085
+ {
2086
+ "epoch": 0.9568106312292359,
2087
+ "grad_norm": 2.21875,
2088
+ "learning_rate": 1.1586039560176432e-06,
2089
+ "loss": 1.7379,
2090
+ "step": 288
2091
+ },
2092
+ {
2093
+ "epoch": 0.9601328903654485,
2094
+ "grad_norm": 2.53125,
2095
+ "learning_rate": 1.1531788161799478e-06,
2096
+ "loss": 1.6973,
2097
+ "step": 289
2098
+ },
2099
+ {
2100
+ "epoch": 0.9634551495016611,
2101
+ "grad_norm": 2.390625,
2102
+ "learning_rate": 1.1477490556685091e-06,
2103
+ "loss": 1.66,
2104
+ "step": 290
2105
+ },
2106
+ {
2107
+ "epoch": 0.9667774086378738,
2108
+ "grad_norm": 2.65625,
2109
+ "learning_rate": 1.1423148382732853e-06,
2110
+ "loss": 1.6192,
2111
+ "step": 291
2112
+ },
2113
+ {
2114
+ "epoch": 0.9700996677740864,
2115
+ "grad_norm": 2.3125,
2116
+ "learning_rate": 1.1368763279186763e-06,
2117
+ "loss": 1.5784,
2118
+ "step": 292
2119
+ },
2120
+ {
2121
+ "epoch": 0.973421926910299,
2122
+ "grad_norm": 2.421875,
2123
+ "learning_rate": 1.1314336886585816e-06,
2124
+ "loss": 1.6717,
2125
+ "step": 293
2126
+ },
2127
+ {
2128
+ "epoch": 0.9767441860465116,
2129
+ "grad_norm": 2.40625,
2130
+ "learning_rate": 1.1259870846714488e-06,
2131
+ "loss": 1.7301,
2132
+ "step": 294
2133
+ },
2134
+ {
2135
+ "epoch": 0.9800664451827242,
2136
+ "grad_norm": 2.25,
2137
+ "learning_rate": 1.1205366802553228e-06,
2138
+ "loss": 1.6053,
2139
+ "step": 295
2140
+ },
2141
+ {
2142
+ "epoch": 0.9833887043189369,
2143
+ "grad_norm": 2.359375,
2144
+ "learning_rate": 1.1150826398228903e-06,
2145
+ "loss": 1.5935,
2146
+ "step": 296
2147
+ },
2148
+ {
2149
+ "epoch": 0.9867109634551495,
2150
+ "grad_norm": 2.34375,
2151
+ "learning_rate": 1.1096251278965172e-06,
2152
+ "loss": 1.6469,
2153
+ "step": 297
2154
+ },
2155
+ {
2156
+ "epoch": 0.9900332225913622,
2157
+ "grad_norm": 2.203125,
2158
+ "learning_rate": 1.10416430910329e-06,
2159
+ "loss": 1.5799,
2160
+ "step": 298
2161
+ },
2162
+ {
2163
+ "epoch": 0.9933554817275747,
2164
+ "grad_norm": 2.578125,
2165
+ "learning_rate": 1.0987003481700454e-06,
2166
+ "loss": 1.5128,
2167
+ "step": 299
2168
+ },
2169
+ {
2170
+ "epoch": 0.9966777408637874,
2171
+ "grad_norm": 2.453125,
2172
+ "learning_rate": 1.0932334099184058e-06,
2173
+ "loss": 1.6065,
2174
+ "step": 300
2175
+ },
2176
+ {
2177
+ "epoch": 1.0,
2178
+ "grad_norm": 2.171875,
2179
+ "learning_rate": 1.087763659259803e-06,
2180
+ "loss": 1.6959,
2181
+ "step": 301
2182
+ }
2183
+ ],
2184
+ "logging_steps": 1,
2185
+ "max_steps": 602,
2186
+ "num_input_tokens_seen": 0,
2187
+ "num_train_epochs": 2,
2188
+ "save_steps": 301,
2189
+ "stateful_callbacks": {
2190
+ "TrainerControl": {
2191
+ "args": {
2192
+ "should_epoch_stop": false,
2193
+ "should_evaluate": false,
2194
+ "should_log": false,
2195
+ "should_save": true,
2196
+ "should_training_stop": false
2197
+ },
2198
+ "attributes": {}
2199
+ }
2200
+ },
2201
+ "total_flos": 2.7403890105463603e+18,
2202
+ "train_batch_size": 2,
2203
+ "trial_name": null,
2204
+ "trial_params": null
2205
+ }
checkpoint-301/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e6f068bf39065e2d2447ecd75968d1cdd55febe6373ac5d659a8ba8da543e600
3
+ size 6648
checkpoint-602/config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "MistralForCausalLM"
4
+ ],
5
+ "attention_dropout": 0.0,
6
+ "bos_token_id": 1,
7
+ "eos_token_id": 2,
8
+ "head_dim": 128,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 5120,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 14336,
13
+ "max_position_embeddings": 131072,
14
+ "model_type": "mistral",
15
+ "num_attention_heads": 32,
16
+ "num_hidden_layers": 40,
17
+ "num_key_value_heads": 8,
18
+ "pad_token_id": 10,
19
+ "rms_norm_eps": 1e-05,
20
+ "rope_theta": 1000000.0,
21
+ "sliding_window": null,
22
+ "tie_word_embeddings": false,
23
+ "torch_dtype": "bfloat16",
24
+ "transformers_version": "4.51.0",
25
+ "unsloth_version": "2024.9",
26
+ "use_cache": false,
27
+ "vocab_size": 131072
28
+ }
checkpoint-602/generation_config.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "do_sample": true,
5
+ "eos_token_id": 2,
6
+ "max_length": 1024000,
7
+ "pad_token_id": 10,
8
+ "transformers_version": "4.51.0"
9
+ }
checkpoint-602/model-00001-of-00005.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:acfa7e0d5b706fc4a78bde2f2330febe655d0ab2db4f78dbfecee20c356e808a
3
+ size 4865522496
checkpoint-602/model-00002-of-00005.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7591fae00e44857bdbaf90f4a72a0957de341df5cc8ddcfbf77d94e9c0c339eb
3
+ size 4907529424
checkpoint-602/model-00003-of-00005.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:90546050eacb77f0ff852f7ce903d03205662330c899bdaff306f5e11191223f
3
+ size 4907529456
checkpoint-602/model-00004-of-00005.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6fe924de0c8131926cfa602d5b59ef587fa79787dc9225e301c43d2b17e21bc1
3
+ size 4907529456
checkpoint-602/model-00005-of-00005.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d623a801b0b8bdd10b1905236a05766be4d51ad366ea7d9e9c2265cac894ebaf
3
+ size 4907496272
checkpoint-602/model.safetensors.index.json ADDED
@@ -0,0 +1,370 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 24495564800
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "model-00005-of-00005.safetensors",
7
+ "model.embed_tokens.weight": "model-00001-of-00005.safetensors",
8
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00005.safetensors",
9
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
10
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
11
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
12
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
13
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
14
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
15
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
16
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
17
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00005.safetensors",
18
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
19
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
20
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
21
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
22
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
23
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
24
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
25
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
26
+ "model.layers.10.input_layernorm.weight": "model-00002-of-00005.safetensors",
27
+ "model.layers.10.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
28
+ "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
29
+ "model.layers.10.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
30
+ "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
31
+ "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
32
+ "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
33
+ "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
34
+ "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
35
+ "model.layers.11.input_layernorm.weight": "model-00002-of-00005.safetensors",
36
+ "model.layers.11.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
37
+ "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
38
+ "model.layers.11.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
39
+ "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
40
+ "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
41
+ "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
42
+ "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
43
+ "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
44
+ "model.layers.12.input_layernorm.weight": "model-00002-of-00005.safetensors",
45
+ "model.layers.12.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
46
+ "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
47
+ "model.layers.12.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
48
+ "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
49
+ "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
50
+ "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
51
+ "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
52
+ "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
53
+ "model.layers.13.input_layernorm.weight": "model-00002-of-00005.safetensors",
54
+ "model.layers.13.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
55
+ "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
56
+ "model.layers.13.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
57
+ "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
58
+ "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
59
+ "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
60
+ "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
61
+ "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
62
+ "model.layers.14.input_layernorm.weight": "model-00002-of-00005.safetensors",
63
+ "model.layers.14.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
64
+ "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
65
+ "model.layers.14.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
66
+ "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
67
+ "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
68
+ "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
69
+ "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
70
+ "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
71
+ "model.layers.15.input_layernorm.weight": "model-00003-of-00005.safetensors",
72
+ "model.layers.15.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
73
+ "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
74
+ "model.layers.15.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
75
+ "model.layers.15.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
76
+ "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
77
+ "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
78
+ "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
79
+ "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
80
+ "model.layers.16.input_layernorm.weight": "model-00003-of-00005.safetensors",
81
+ "model.layers.16.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
82
+ "model.layers.16.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
83
+ "model.layers.16.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
84
+ "model.layers.16.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
85
+ "model.layers.16.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
86
+ "model.layers.16.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
87
+ "model.layers.16.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
88
+ "model.layers.16.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
89
+ "model.layers.17.input_layernorm.weight": "model-00003-of-00005.safetensors",
90
+ "model.layers.17.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
91
+ "model.layers.17.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
92
+ "model.layers.17.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
93
+ "model.layers.17.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
94
+ "model.layers.17.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
95
+ "model.layers.17.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
96
+ "model.layers.17.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
97
+ "model.layers.17.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
98
+ "model.layers.18.input_layernorm.weight": "model-00003-of-00005.safetensors",
99
+ "model.layers.18.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
100
+ "model.layers.18.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
101
+ "model.layers.18.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
102
+ "model.layers.18.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
103
+ "model.layers.18.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
104
+ "model.layers.18.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
105
+ "model.layers.18.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
106
+ "model.layers.18.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
107
+ "model.layers.19.input_layernorm.weight": "model-00003-of-00005.safetensors",
108
+ "model.layers.19.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
109
+ "model.layers.19.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
110
+ "model.layers.19.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
111
+ "model.layers.19.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
112
+ "model.layers.19.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
113
+ "model.layers.19.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
114
+ "model.layers.19.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
115
+ "model.layers.19.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
116
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00005.safetensors",
117
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
118
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
119
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
120
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
121
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
122
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
123
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
124
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
125
+ "model.layers.20.input_layernorm.weight": "model-00003-of-00005.safetensors",
126
+ "model.layers.20.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
127
+ "model.layers.20.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
128
+ "model.layers.20.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
129
+ "model.layers.20.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
130
+ "model.layers.20.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
131
+ "model.layers.20.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
132
+ "model.layers.20.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
133
+ "model.layers.20.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
134
+ "model.layers.21.input_layernorm.weight": "model-00003-of-00005.safetensors",
135
+ "model.layers.21.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
136
+ "model.layers.21.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
137
+ "model.layers.21.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
138
+ "model.layers.21.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
139
+ "model.layers.21.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
140
+ "model.layers.21.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
141
+ "model.layers.21.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
142
+ "model.layers.21.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
143
+ "model.layers.22.input_layernorm.weight": "model-00003-of-00005.safetensors",
144
+ "model.layers.22.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
145
+ "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
146
+ "model.layers.22.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
147
+ "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
148
+ "model.layers.22.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
149
+ "model.layers.22.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
150
+ "model.layers.22.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
151
+ "model.layers.22.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
152
+ "model.layers.23.input_layernorm.weight": "model-00003-of-00005.safetensors",
153
+ "model.layers.23.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
154
+ "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
155
+ "model.layers.23.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
156
+ "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
157
+ "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
158
+ "model.layers.23.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
159
+ "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
160
+ "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
161
+ "model.layers.24.input_layernorm.weight": "model-00004-of-00005.safetensors",
162
+ "model.layers.24.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
163
+ "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
164
+ "model.layers.24.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
165
+ "model.layers.24.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
166
+ "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
167
+ "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
168
+ "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
169
+ "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
170
+ "model.layers.25.input_layernorm.weight": "model-00004-of-00005.safetensors",
171
+ "model.layers.25.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
172
+ "model.layers.25.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
173
+ "model.layers.25.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
174
+ "model.layers.25.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
175
+ "model.layers.25.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
176
+ "model.layers.25.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
177
+ "model.layers.25.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
178
+ "model.layers.25.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
179
+ "model.layers.26.input_layernorm.weight": "model-00004-of-00005.safetensors",
180
+ "model.layers.26.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
181
+ "model.layers.26.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
182
+ "model.layers.26.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
183
+ "model.layers.26.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
184
+ "model.layers.26.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
185
+ "model.layers.26.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
186
+ "model.layers.26.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
187
+ "model.layers.26.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
188
+ "model.layers.27.input_layernorm.weight": "model-00004-of-00005.safetensors",
189
+ "model.layers.27.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
190
+ "model.layers.27.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
191
+ "model.layers.27.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
192
+ "model.layers.27.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
193
+ "model.layers.27.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
194
+ "model.layers.27.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
195
+ "model.layers.27.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
196
+ "model.layers.27.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
197
+ "model.layers.28.input_layernorm.weight": "model-00004-of-00005.safetensors",
198
+ "model.layers.28.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
199
+ "model.layers.28.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
200
+ "model.layers.28.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
201
+ "model.layers.28.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
202
+ "model.layers.28.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
203
+ "model.layers.28.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
204
+ "model.layers.28.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
205
+ "model.layers.28.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
206
+ "model.layers.29.input_layernorm.weight": "model-00004-of-00005.safetensors",
207
+ "model.layers.29.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
208
+ "model.layers.29.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
209
+ "model.layers.29.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
210
+ "model.layers.29.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
211
+ "model.layers.29.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
212
+ "model.layers.29.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
213
+ "model.layers.29.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
214
+ "model.layers.29.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
215
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00005.safetensors",
216
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
217
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
218
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
219
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
220
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
221
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
222
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
223
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
224
+ "model.layers.30.input_layernorm.weight": "model-00004-of-00005.safetensors",
225
+ "model.layers.30.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
226
+ "model.layers.30.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
227
+ "model.layers.30.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
228
+ "model.layers.30.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
229
+ "model.layers.30.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
230
+ "model.layers.30.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
231
+ "model.layers.30.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
232
+ "model.layers.30.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
233
+ "model.layers.31.input_layernorm.weight": "model-00004-of-00005.safetensors",
234
+ "model.layers.31.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
235
+ "model.layers.31.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
236
+ "model.layers.31.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
237
+ "model.layers.31.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
238
+ "model.layers.31.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
239
+ "model.layers.31.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
240
+ "model.layers.31.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
241
+ "model.layers.31.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
242
+ "model.layers.32.input_layernorm.weight": "model-00004-of-00005.safetensors",
243
+ "model.layers.32.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
244
+ "model.layers.32.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
245
+ "model.layers.32.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
246
+ "model.layers.32.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
247
+ "model.layers.32.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
248
+ "model.layers.32.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
249
+ "model.layers.32.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
250
+ "model.layers.32.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
251
+ "model.layers.33.input_layernorm.weight": "model-00005-of-00005.safetensors",
252
+ "model.layers.33.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
253
+ "model.layers.33.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
254
+ "model.layers.33.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
255
+ "model.layers.33.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
256
+ "model.layers.33.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
257
+ "model.layers.33.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
258
+ "model.layers.33.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
259
+ "model.layers.33.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
260
+ "model.layers.34.input_layernorm.weight": "model-00005-of-00005.safetensors",
261
+ "model.layers.34.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
262
+ "model.layers.34.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
263
+ "model.layers.34.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
264
+ "model.layers.34.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
265
+ "model.layers.34.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
266
+ "model.layers.34.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
267
+ "model.layers.34.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
268
+ "model.layers.34.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
269
+ "model.layers.35.input_layernorm.weight": "model-00005-of-00005.safetensors",
270
+ "model.layers.35.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
271
+ "model.layers.35.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
272
+ "model.layers.35.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
273
+ "model.layers.35.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
274
+ "model.layers.35.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
275
+ "model.layers.35.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
276
+ "model.layers.35.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
277
+ "model.layers.35.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
278
+ "model.layers.36.input_layernorm.weight": "model-00005-of-00005.safetensors",
279
+ "model.layers.36.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
280
+ "model.layers.36.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
281
+ "model.layers.36.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
282
+ "model.layers.36.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
283
+ "model.layers.36.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
284
+ "model.layers.36.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
285
+ "model.layers.36.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
286
+ "model.layers.36.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
287
+ "model.layers.37.input_layernorm.weight": "model-00005-of-00005.safetensors",
288
+ "model.layers.37.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
289
+ "model.layers.37.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
290
+ "model.layers.37.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
291
+ "model.layers.37.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
292
+ "model.layers.37.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
293
+ "model.layers.37.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
294
+ "model.layers.37.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
295
+ "model.layers.37.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
296
+ "model.layers.38.input_layernorm.weight": "model-00005-of-00005.safetensors",
297
+ "model.layers.38.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
298
+ "model.layers.38.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
299
+ "model.layers.38.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
300
+ "model.layers.38.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
301
+ "model.layers.38.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
302
+ "model.layers.38.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
303
+ "model.layers.38.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
304
+ "model.layers.38.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
305
+ "model.layers.39.input_layernorm.weight": "model-00005-of-00005.safetensors",
306
+ "model.layers.39.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
307
+ "model.layers.39.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
308
+ "model.layers.39.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
309
+ "model.layers.39.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
310
+ "model.layers.39.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
311
+ "model.layers.39.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
312
+ "model.layers.39.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
313
+ "model.layers.39.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
314
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00005.safetensors",
315
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
316
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
317
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
318
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
319
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
320
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
321
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
322
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
323
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00005.safetensors",
324
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
325
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
326
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
327
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
328
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
329
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
330
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
331
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
332
+ "model.layers.6.input_layernorm.weight": "model-00002-of-00005.safetensors",
333
+ "model.layers.6.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
334
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
335
+ "model.layers.6.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
336
+ "model.layers.6.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
337
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
338
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
339
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
340
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
341
+ "model.layers.7.input_layernorm.weight": "model-00002-of-00005.safetensors",
342
+ "model.layers.7.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
343
+ "model.layers.7.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
344
+ "model.layers.7.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
345
+ "model.layers.7.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
346
+ "model.layers.7.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
347
+ "model.layers.7.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
348
+ "model.layers.7.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
349
+ "model.layers.7.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
350
+ "model.layers.8.input_layernorm.weight": "model-00002-of-00005.safetensors",
351
+ "model.layers.8.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
352
+ "model.layers.8.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
353
+ "model.layers.8.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
354
+ "model.layers.8.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
355
+ "model.layers.8.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
356
+ "model.layers.8.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
357
+ "model.layers.8.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
358
+ "model.layers.8.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
359
+ "model.layers.9.input_layernorm.weight": "model-00002-of-00005.safetensors",
360
+ "model.layers.9.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
361
+ "model.layers.9.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
362
+ "model.layers.9.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
363
+ "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
364
+ "model.layers.9.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
365
+ "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
366
+ "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
367
+ "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
368
+ "model.norm.weight": "model-00005-of-00005.safetensors"
369
+ }
370
+ }
checkpoint-602/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7c7b3c5dba03346c7a72772a53e17467aab96b2c6401e7063f5da10e66b39a6f
3
+ size 24878749708
checkpoint-602/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9213080fe2b45399b87036ca9ff9164533abe6b368e5c828136ee184486749d4
3
+ size 14244
checkpoint-602/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:34a41fce2fdc6ca92fcca972f29cb3d0dcf70f9a63c920ef9b54688eec81f6fb
3
+ size 1064
checkpoint-602/special_tokens_map.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|im_end|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<pad>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "<unk>",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }
checkpoint-602/tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bc07f4f61632a89d8248b43f25649d6cc45200f8709e9d9bcd0414b00a4064e2
3
+ size 17078342
checkpoint-602/tokenizer_config.json ADDED
The diff for this file is too large to render. See raw diff
 
checkpoint-602/trainer_state.json ADDED
The diff for this file is too large to render. See raw diff
 
checkpoint-602/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e6f068bf39065e2d2447ecd75968d1cdd55febe6373ac5d659a8ba8da543e600
3
+ size 6648
config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "MistralForCausalLM"
4
+ ],
5
+ "attention_dropout": 0.0,
6
+ "bos_token_id": 1,
7
+ "eos_token_id": 2,
8
+ "head_dim": 128,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 5120,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 14336,
13
+ "max_position_embeddings": 131072,
14
+ "model_type": "mistral",
15
+ "num_attention_heads": 32,
16
+ "num_hidden_layers": 40,
17
+ "num_key_value_heads": 8,
18
+ "pad_token_id": 10,
19
+ "rms_norm_eps": 1e-05,
20
+ "rope_theta": 1000000.0,
21
+ "sliding_window": null,
22
+ "tie_word_embeddings": false,
23
+ "torch_dtype": "bfloat16",
24
+ "transformers_version": "4.51.0",
25
+ "unsloth_version": "2024.9",
26
+ "use_cache": false,
27
+ "vocab_size": 131072
28
+ }
generation_config.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "do_sample": true,
5
+ "eos_token_id": 2,
6
+ "max_length": 1024000,
7
+ "pad_token_id": 10,
8
+ "transformers_version": "4.51.0"
9
+ }
model-00001-of-00005.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:acfa7e0d5b706fc4a78bde2f2330febe655d0ab2db4f78dbfecee20c356e808a
3
+ size 4865522496
model-00002-of-00005.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7591fae00e44857bdbaf90f4a72a0957de341df5cc8ddcfbf77d94e9c0c339eb
3
+ size 4907529424
model-00003-of-00005.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:90546050eacb77f0ff852f7ce903d03205662330c899bdaff306f5e11191223f
3
+ size 4907529456
model-00004-of-00005.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6fe924de0c8131926cfa602d5b59ef587fa79787dc9225e301c43d2b17e21bc1
3
+ size 4907529456
model-00005-of-00005.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d623a801b0b8bdd10b1905236a05766be4d51ad366ea7d9e9c2265cac894ebaf
3
+ size 4907496272
model.safetensors.index.json ADDED
@@ -0,0 +1,370 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 24495564800
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "model-00005-of-00005.safetensors",
7
+ "model.embed_tokens.weight": "model-00001-of-00005.safetensors",
8
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00005.safetensors",
9
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
10
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
11
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
12
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
13
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
14
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
15
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
16
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
17
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00005.safetensors",
18
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
19
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
20
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
21
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
22
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
23
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
24
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
25
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
26
+ "model.layers.10.input_layernorm.weight": "model-00002-of-00005.safetensors",
27
+ "model.layers.10.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
28
+ "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
29
+ "model.layers.10.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
30
+ "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
31
+ "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
32
+ "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
33
+ "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
34
+ "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
35
+ "model.layers.11.input_layernorm.weight": "model-00002-of-00005.safetensors",
36
+ "model.layers.11.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
37
+ "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
38
+ "model.layers.11.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
39
+ "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
40
+ "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
41
+ "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
42
+ "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
43
+ "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
44
+ "model.layers.12.input_layernorm.weight": "model-00002-of-00005.safetensors",
45
+ "model.layers.12.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
46
+ "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
47
+ "model.layers.12.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
48
+ "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
49
+ "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
50
+ "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
51
+ "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
52
+ "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
53
+ "model.layers.13.input_layernorm.weight": "model-00002-of-00005.safetensors",
54
+ "model.layers.13.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
55
+ "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
56
+ "model.layers.13.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
57
+ "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
58
+ "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
59
+ "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
60
+ "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
61
+ "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
62
+ "model.layers.14.input_layernorm.weight": "model-00002-of-00005.safetensors",
63
+ "model.layers.14.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
64
+ "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
65
+ "model.layers.14.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
66
+ "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
67
+ "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
68
+ "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
69
+ "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
70
+ "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
71
+ "model.layers.15.input_layernorm.weight": "model-00003-of-00005.safetensors",
72
+ "model.layers.15.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
73
+ "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
74
+ "model.layers.15.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
75
+ "model.layers.15.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
76
+ "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
77
+ "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
78
+ "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
79
+ "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
80
+ "model.layers.16.input_layernorm.weight": "model-00003-of-00005.safetensors",
81
+ "model.layers.16.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
82
+ "model.layers.16.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
83
+ "model.layers.16.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
84
+ "model.layers.16.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
85
+ "model.layers.16.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
86
+ "model.layers.16.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
87
+ "model.layers.16.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
88
+ "model.layers.16.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
89
+ "model.layers.17.input_layernorm.weight": "model-00003-of-00005.safetensors",
90
+ "model.layers.17.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
91
+ "model.layers.17.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
92
+ "model.layers.17.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
93
+ "model.layers.17.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
94
+ "model.layers.17.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
95
+ "model.layers.17.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
96
+ "model.layers.17.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
97
+ "model.layers.17.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
98
+ "model.layers.18.input_layernorm.weight": "model-00003-of-00005.safetensors",
99
+ "model.layers.18.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
100
+ "model.layers.18.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
101
+ "model.layers.18.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
102
+ "model.layers.18.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
103
+ "model.layers.18.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
104
+ "model.layers.18.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
105
+ "model.layers.18.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
106
+ "model.layers.18.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
107
+ "model.layers.19.input_layernorm.weight": "model-00003-of-00005.safetensors",
108
+ "model.layers.19.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
109
+ "model.layers.19.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
110
+ "model.layers.19.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
111
+ "model.layers.19.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
112
+ "model.layers.19.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
113
+ "model.layers.19.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
114
+ "model.layers.19.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
115
+ "model.layers.19.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
116
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00005.safetensors",
117
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
118
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
119
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
120
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
121
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
122
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
123
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
124
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
125
+ "model.layers.20.input_layernorm.weight": "model-00003-of-00005.safetensors",
126
+ "model.layers.20.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
127
+ "model.layers.20.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
128
+ "model.layers.20.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
129
+ "model.layers.20.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
130
+ "model.layers.20.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
131
+ "model.layers.20.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
132
+ "model.layers.20.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
133
+ "model.layers.20.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
134
+ "model.layers.21.input_layernorm.weight": "model-00003-of-00005.safetensors",
135
+ "model.layers.21.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
136
+ "model.layers.21.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
137
+ "model.layers.21.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
138
+ "model.layers.21.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
139
+ "model.layers.21.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
140
+ "model.layers.21.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
141
+ "model.layers.21.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
142
+ "model.layers.21.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
143
+ "model.layers.22.input_layernorm.weight": "model-00003-of-00005.safetensors",
144
+ "model.layers.22.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
145
+ "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
146
+ "model.layers.22.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
147
+ "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
148
+ "model.layers.22.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
149
+ "model.layers.22.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
150
+ "model.layers.22.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
151
+ "model.layers.22.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
152
+ "model.layers.23.input_layernorm.weight": "model-00003-of-00005.safetensors",
153
+ "model.layers.23.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
154
+ "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
155
+ "model.layers.23.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
156
+ "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
157
+ "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
158
+ "model.layers.23.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
159
+ "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
160
+ "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
161
+ "model.layers.24.input_layernorm.weight": "model-00004-of-00005.safetensors",
162
+ "model.layers.24.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
163
+ "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
164
+ "model.layers.24.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
165
+ "model.layers.24.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
166
+ "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
167
+ "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
168
+ "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
169
+ "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
170
+ "model.layers.25.input_layernorm.weight": "model-00004-of-00005.safetensors",
171
+ "model.layers.25.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
172
+ "model.layers.25.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
173
+ "model.layers.25.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
174
+ "model.layers.25.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
175
+ "model.layers.25.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
176
+ "model.layers.25.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
177
+ "model.layers.25.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
178
+ "model.layers.25.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
179
+ "model.layers.26.input_layernorm.weight": "model-00004-of-00005.safetensors",
180
+ "model.layers.26.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
181
+ "model.layers.26.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
182
+ "model.layers.26.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
183
+ "model.layers.26.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
184
+ "model.layers.26.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
185
+ "model.layers.26.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
186
+ "model.layers.26.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
187
+ "model.layers.26.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
188
+ "model.layers.27.input_layernorm.weight": "model-00004-of-00005.safetensors",
189
+ "model.layers.27.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
190
+ "model.layers.27.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
191
+ "model.layers.27.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
192
+ "model.layers.27.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
193
+ "model.layers.27.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
194
+ "model.layers.27.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
195
+ "model.layers.27.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
196
+ "model.layers.27.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
197
+ "model.layers.28.input_layernorm.weight": "model-00004-of-00005.safetensors",
198
+ "model.layers.28.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
199
+ "model.layers.28.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
200
+ "model.layers.28.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
201
+ "model.layers.28.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
202
+ "model.layers.28.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
203
+ "model.layers.28.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
204
+ "model.layers.28.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
205
+ "model.layers.28.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
206
+ "model.layers.29.input_layernorm.weight": "model-00004-of-00005.safetensors",
207
+ "model.layers.29.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
208
+ "model.layers.29.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
209
+ "model.layers.29.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
210
+ "model.layers.29.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
211
+ "model.layers.29.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
212
+ "model.layers.29.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
213
+ "model.layers.29.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
214
+ "model.layers.29.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
215
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00005.safetensors",
216
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
217
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
218
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
219
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
220
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
221
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
222
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
223
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
224
+ "model.layers.30.input_layernorm.weight": "model-00004-of-00005.safetensors",
225
+ "model.layers.30.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
226
+ "model.layers.30.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
227
+ "model.layers.30.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
228
+ "model.layers.30.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
229
+ "model.layers.30.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
230
+ "model.layers.30.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
231
+ "model.layers.30.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
232
+ "model.layers.30.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
233
+ "model.layers.31.input_layernorm.weight": "model-00004-of-00005.safetensors",
234
+ "model.layers.31.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
235
+ "model.layers.31.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
236
+ "model.layers.31.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
237
+ "model.layers.31.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
238
+ "model.layers.31.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
239
+ "model.layers.31.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
240
+ "model.layers.31.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
241
+ "model.layers.31.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
242
+ "model.layers.32.input_layernorm.weight": "model-00004-of-00005.safetensors",
243
+ "model.layers.32.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
244
+ "model.layers.32.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
245
+ "model.layers.32.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
246
+ "model.layers.32.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
247
+ "model.layers.32.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
248
+ "model.layers.32.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
249
+ "model.layers.32.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
250
+ "model.layers.32.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
251
+ "model.layers.33.input_layernorm.weight": "model-00005-of-00005.safetensors",
252
+ "model.layers.33.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
253
+ "model.layers.33.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
254
+ "model.layers.33.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
255
+ "model.layers.33.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
256
+ "model.layers.33.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
257
+ "model.layers.33.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
258
+ "model.layers.33.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
259
+ "model.layers.33.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
260
+ "model.layers.34.input_layernorm.weight": "model-00005-of-00005.safetensors",
261
+ "model.layers.34.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
262
+ "model.layers.34.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
263
+ "model.layers.34.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
264
+ "model.layers.34.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
265
+ "model.layers.34.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
266
+ "model.layers.34.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
267
+ "model.layers.34.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
268
+ "model.layers.34.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
269
+ "model.layers.35.input_layernorm.weight": "model-00005-of-00005.safetensors",
270
+ "model.layers.35.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
271
+ "model.layers.35.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
272
+ "model.layers.35.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
273
+ "model.layers.35.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
274
+ "model.layers.35.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
275
+ "model.layers.35.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
276
+ "model.layers.35.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
277
+ "model.layers.35.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
278
+ "model.layers.36.input_layernorm.weight": "model-00005-of-00005.safetensors",
279
+ "model.layers.36.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
280
+ "model.layers.36.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
281
+ "model.layers.36.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
282
+ "model.layers.36.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
283
+ "model.layers.36.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
284
+ "model.layers.36.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
285
+ "model.layers.36.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
286
+ "model.layers.36.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
287
+ "model.layers.37.input_layernorm.weight": "model-00005-of-00005.safetensors",
288
+ "model.layers.37.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
289
+ "model.layers.37.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
290
+ "model.layers.37.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
291
+ "model.layers.37.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
292
+ "model.layers.37.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
293
+ "model.layers.37.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
294
+ "model.layers.37.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
295
+ "model.layers.37.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
296
+ "model.layers.38.input_layernorm.weight": "model-00005-of-00005.safetensors",
297
+ "model.layers.38.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
298
+ "model.layers.38.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
299
+ "model.layers.38.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
300
+ "model.layers.38.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
301
+ "model.layers.38.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
302
+ "model.layers.38.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
303
+ "model.layers.38.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
304
+ "model.layers.38.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
305
+ "model.layers.39.input_layernorm.weight": "model-00005-of-00005.safetensors",
306
+ "model.layers.39.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
307
+ "model.layers.39.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
308
+ "model.layers.39.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
309
+ "model.layers.39.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
310
+ "model.layers.39.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
311
+ "model.layers.39.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
312
+ "model.layers.39.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
313
+ "model.layers.39.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
314
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00005.safetensors",
315
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
316
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
317
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
318
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
319
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
320
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
321
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
322
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
323
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00005.safetensors",
324
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
325
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
326
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
327
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
328
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
329
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
330
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
331
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
332
+ "model.layers.6.input_layernorm.weight": "model-00002-of-00005.safetensors",
333
+ "model.layers.6.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
334
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
335
+ "model.layers.6.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
336
+ "model.layers.6.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
337
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
338
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
339
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
340
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
341
+ "model.layers.7.input_layernorm.weight": "model-00002-of-00005.safetensors",
342
+ "model.layers.7.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
343
+ "model.layers.7.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
344
+ "model.layers.7.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
345
+ "model.layers.7.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
346
+ "model.layers.7.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
347
+ "model.layers.7.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
348
+ "model.layers.7.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
349
+ "model.layers.7.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
350
+ "model.layers.8.input_layernorm.weight": "model-00002-of-00005.safetensors",
351
+ "model.layers.8.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
352
+ "model.layers.8.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
353
+ "model.layers.8.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
354
+ "model.layers.8.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
355
+ "model.layers.8.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
356
+ "model.layers.8.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
357
+ "model.layers.8.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
358
+ "model.layers.8.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
359
+ "model.layers.9.input_layernorm.weight": "model-00002-of-00005.safetensors",
360
+ "model.layers.9.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
361
+ "model.layers.9.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
362
+ "model.layers.9.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
363
+ "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
364
+ "model.layers.9.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
365
+ "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
366
+ "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
367
+ "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
368
+ "model.norm.weight": "model-00005-of-00005.safetensors"
369
+ }
370
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|im_end|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<pad>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "<unk>",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bc07f4f61632a89d8248b43f25649d6cc45200f8709e9d9bcd0414b00a4064e2
3
+ size 17078342
tokenizer_config.json ADDED
The diff for this file is too large to render. See raw diff
 
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e6f068bf39065e2d2447ecd75968d1cdd55febe6373ac5d659a8ba8da543e600
3
+ size 6648