hardlyworking commited on
Commit
8cd7712
·
verified ·
1 Parent(s): d90754e

Delete checkpoint-282

Browse files
checkpoint-282/added_tokens.json DELETED
@@ -1,5 +0,0 @@
1
- {
2
- "<|endofprompt|>": 100276,
3
- "<|im_end|>": 100265,
4
- "<|im_start|>": 100264
5
- }
 
 
 
 
 
 
checkpoint-282/chat_template.jinja DELETED
@@ -1,4 +0,0 @@
1
- {% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% for message in messages %}{{'<|im_start|>' + message['role'] + '
2
- ' + message['content'] + '<|im_end|>' + '
3
- '}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant
4
- ' }}{% endif %}
 
 
 
 
 
checkpoint-282/config.json DELETED
@@ -1,32 +0,0 @@
1
- {
2
- "architectures": [
3
- "LlamaForCausalLM"
4
- ],
5
- "attention_bias": false,
6
- "attention_dropout": 0.0,
7
- "bos_token_id": 100257,
8
- "embd_pdrop": 0.0,
9
- "eos_token_id": 100265,
10
- "head_dim": 128,
11
- "hidden_act": "silu",
12
- "hidden_size": 4096,
13
- "initializer_range": 0.02,
14
- "intermediate_size": 8192,
15
- "max_position_embeddings": 262144,
16
- "mlp_bias": false,
17
- "model_type": "llama",
18
- "num_attention_heads": 32,
19
- "num_hidden_layers": 28,
20
- "num_key_value_heads": 8,
21
- "pad_token_id": 100257,
22
- "pretraining_tp": 1,
23
- "resid_pdrop": 0.0,
24
- "rms_norm_eps": 1e-06,
25
- "rope_scaling": null,
26
- "rope_theta": 128000000,
27
- "tie_word_embeddings": true,
28
- "torch_dtype": "bfloat16",
29
- "transformers_version": "4.52.4",
30
- "use_cache": false,
31
- "vocab_size": 102400
32
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoint-282/generation_config.json DELETED
@@ -1,9 +0,0 @@
1
- {
2
- "_from_model_config": true,
3
- "bos_token_id": 100257,
4
- "do_sample": true,
5
- "eos_token_id": 100265,
6
- "pad_token_id": 100257,
7
- "transformers_version": "4.52.4",
8
- "use_cache": false
9
- }
 
 
 
 
 
 
 
 
 
 
checkpoint-282/merges.txt DELETED
The diff for this file is too large to render. See raw diff
 
checkpoint-282/model-00001-of-00002.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:99044790e2d064bf8559a16d99a20473efab74cb5a047fc445e142b91f1d4d24
3
- size 4983077832
 
 
 
 
checkpoint-282/model-00002-of-00002.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:18db60ae16f6f83b77c2bb84a7550f81fe0607f1fbb3761f2940c96b2084298c
3
- size 3842234168
 
 
 
 
checkpoint-282/model.safetensors.index.json DELETED
@@ -1,261 +0,0 @@
1
- {
2
- "metadata": {
3
- "total_size": 8825282560
4
- },
5
- "weight_map": {
6
- "model.embed_tokens.weight": "model-00001-of-00002.safetensors",
7
- "model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
8
- "model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
9
- "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
10
- "model.layers.0.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
11
- "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
12
- "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
13
- "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
14
- "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
15
- "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
16
- "model.layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors",
17
- "model.layers.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
18
- "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
19
- "model.layers.1.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
20
- "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
21
- "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
22
- "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
23
- "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
24
- "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
25
- "model.layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors",
26
- "model.layers.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
27
- "model.layers.10.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
28
- "model.layers.10.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
29
- "model.layers.10.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
30
- "model.layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
31
- "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
32
- "model.layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
33
- "model.layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
34
- "model.layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors",
35
- "model.layers.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
36
- "model.layers.11.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
37
- "model.layers.11.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
38
- "model.layers.11.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
39
- "model.layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
40
- "model.layers.11.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
41
- "model.layers.11.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
42
- "model.layers.11.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
43
- "model.layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors",
44
- "model.layers.12.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
45
- "model.layers.12.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
46
- "model.layers.12.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
47
- "model.layers.12.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
48
- "model.layers.12.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
49
- "model.layers.12.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
50
- "model.layers.12.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
51
- "model.layers.12.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
52
- "model.layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors",
53
- "model.layers.13.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
54
- "model.layers.13.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
55
- "model.layers.13.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
56
- "model.layers.13.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
57
- "model.layers.13.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
58
- "model.layers.13.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
59
- "model.layers.13.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
60
- "model.layers.13.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
61
- "model.layers.14.input_layernorm.weight": "model-00002-of-00002.safetensors",
62
- "model.layers.14.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
63
- "model.layers.14.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
64
- "model.layers.14.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
65
- "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
66
- "model.layers.14.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
67
- "model.layers.14.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
68
- "model.layers.14.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
69
- "model.layers.14.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
70
- "model.layers.15.input_layernorm.weight": "model-00002-of-00002.safetensors",
71
- "model.layers.15.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
72
- "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
73
- "model.layers.15.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
74
- "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
75
- "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
76
- "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
77
- "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
78
- "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
79
- "model.layers.16.input_layernorm.weight": "model-00002-of-00002.safetensors",
80
- "model.layers.16.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
81
- "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
82
- "model.layers.16.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
83
- "model.layers.16.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
84
- "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
85
- "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
86
- "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
87
- "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
88
- "model.layers.17.input_layernorm.weight": "model-00002-of-00002.safetensors",
89
- "model.layers.17.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
90
- "model.layers.17.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
91
- "model.layers.17.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
92
- "model.layers.17.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
93
- "model.layers.17.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
94
- "model.layers.17.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
95
- "model.layers.17.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
96
- "model.layers.17.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
97
- "model.layers.18.input_layernorm.weight": "model-00002-of-00002.safetensors",
98
- "model.layers.18.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
99
- "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
100
- "model.layers.18.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
101
- "model.layers.18.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
102
- "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
103
- "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
104
- "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
105
- "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
106
- "model.layers.19.input_layernorm.weight": "model-00002-of-00002.safetensors",
107
- "model.layers.19.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
108
- "model.layers.19.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
109
- "model.layers.19.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
110
- "model.layers.19.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
111
- "model.layers.19.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
112
- "model.layers.19.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
113
- "model.layers.19.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
114
- "model.layers.19.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
115
- "model.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors",
116
- "model.layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
117
- "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
118
- "model.layers.2.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
119
- "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
120
- "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
121
- "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
122
- "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
123
- "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
124
- "model.layers.20.input_layernorm.weight": "model-00002-of-00002.safetensors",
125
- "model.layers.20.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
126
- "model.layers.20.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
127
- "model.layers.20.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
128
- "model.layers.20.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
129
- "model.layers.20.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
130
- "model.layers.20.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
131
- "model.layers.20.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
132
- "model.layers.20.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
133
- "model.layers.21.input_layernorm.weight": "model-00002-of-00002.safetensors",
134
- "model.layers.21.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
135
- "model.layers.21.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
136
- "model.layers.21.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
137
- "model.layers.21.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
138
- "model.layers.21.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
139
- "model.layers.21.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
140
- "model.layers.21.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
141
- "model.layers.21.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
142
- "model.layers.22.input_layernorm.weight": "model-00002-of-00002.safetensors",
143
- "model.layers.22.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
144
- "model.layers.22.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
145
- "model.layers.22.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
146
- "model.layers.22.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
147
- "model.layers.22.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
148
- "model.layers.22.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
149
- "model.layers.22.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
150
- "model.layers.22.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
151
- "model.layers.23.input_layernorm.weight": "model-00002-of-00002.safetensors",
152
- "model.layers.23.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
153
- "model.layers.23.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
154
- "model.layers.23.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
155
- "model.layers.23.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
156
- "model.layers.23.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
157
- "model.layers.23.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
158
- "model.layers.23.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
159
- "model.layers.23.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
160
- "model.layers.24.input_layernorm.weight": "model-00002-of-00002.safetensors",
161
- "model.layers.24.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
162
- "model.layers.24.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
163
- "model.layers.24.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
164
- "model.layers.24.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
165
- "model.layers.24.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
166
- "model.layers.24.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
167
- "model.layers.24.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
168
- "model.layers.24.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
169
- "model.layers.25.input_layernorm.weight": "model-00002-of-00002.safetensors",
170
- "model.layers.25.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
171
- "model.layers.25.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
172
- "model.layers.25.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
173
- "model.layers.25.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
174
- "model.layers.25.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
175
- "model.layers.25.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
176
- "model.layers.25.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
177
- "model.layers.25.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
178
- "model.layers.26.input_layernorm.weight": "model-00002-of-00002.safetensors",
179
- "model.layers.26.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
180
- "model.layers.26.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
181
- "model.layers.26.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
182
- "model.layers.26.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
183
- "model.layers.26.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
184
- "model.layers.26.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
185
- "model.layers.26.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
186
- "model.layers.26.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
187
- "model.layers.27.input_layernorm.weight": "model-00002-of-00002.safetensors",
188
- "model.layers.27.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
189
- "model.layers.27.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
190
- "model.layers.27.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
191
- "model.layers.27.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
192
- "model.layers.27.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
193
- "model.layers.27.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
194
- "model.layers.27.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
195
- "model.layers.27.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
196
- "model.layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors",
197
- "model.layers.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
198
- "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
199
- "model.layers.3.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
200
- "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
201
- "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
202
- "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
203
- "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
204
- "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
205
- "model.layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors",
206
- "model.layers.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
207
- "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
208
- "model.layers.4.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
209
- "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
210
- "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
211
- "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
212
- "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
213
- "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
214
- "model.layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors",
215
- "model.layers.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
216
- "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
217
- "model.layers.5.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
218
- "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
219
- "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
220
- "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
221
- "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
222
- "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
223
- "model.layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors",
224
- "model.layers.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
225
- "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
226
- "model.layers.6.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
227
- "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
228
- "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
229
- "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
230
- "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
231
- "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
232
- "model.layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors",
233
- "model.layers.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
234
- "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
235
- "model.layers.7.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
236
- "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
237
- "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
238
- "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
239
- "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
240
- "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
241
- "model.layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors",
242
- "model.layers.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
243
- "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
244
- "model.layers.8.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
245
- "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
246
- "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
247
- "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
248
- "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
249
- "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
250
- "model.layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors",
251
- "model.layers.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
252
- "model.layers.9.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
253
- "model.layers.9.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
254
- "model.layers.9.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
255
- "model.layers.9.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
256
- "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
257
- "model.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
258
- "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
259
- "model.norm.weight": "model-00002-of-00002.safetensors"
260
- }
261
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoint-282/optimizer.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:d0985d2fee9eaf37acf0486d816ea2576408d6e44e654694fecb6e5603a8b736
3
- size 11466963134
 
 
 
 
checkpoint-282/rng_state.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:b1a97db8e41139aa1239ba7fb79ddeb0af5998c6305a440c1fe182e6ad02f2f5
3
- size 14244
 
 
 
 
checkpoint-282/scheduler.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:0e6574c450b3dc19a94f903b026d84caab58438abeecc47a4b4bbb8468d394fc
3
- size 1064
 
 
 
 
checkpoint-282/special_tokens_map.json DELETED
@@ -1,30 +0,0 @@
1
- {
2
- "bos_token": {
3
- "content": "<|endoftext|>",
4
- "lstrip": false,
5
- "normalized": false,
6
- "rstrip": false,
7
- "single_word": false
8
- },
9
- "eos_token": {
10
- "content": "<|im_end|>",
11
- "lstrip": false,
12
- "normalized": false,
13
- "rstrip": false,
14
- "single_word": false
15
- },
16
- "pad_token": {
17
- "content": "<|endoftext|>",
18
- "lstrip": false,
19
- "normalized": false,
20
- "rstrip": false,
21
- "single_word": false
22
- },
23
- "unk_token": {
24
- "content": "<|endoftext|>",
25
- "lstrip": false,
26
- "normalized": false,
27
- "rstrip": false,
28
- "single_word": false
29
- }
30
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoint-282/tokenizer.json DELETED
The diff for this file is too large to render. See raw diff
 
checkpoint-282/tokenizer_config.json DELETED
@@ -1,69 +0,0 @@
1
- {
2
- "add_prefix_space": false,
3
- "added_tokens_decoder": {
4
- "100257": {
5
- "content": "<|endoftext|>",
6
- "lstrip": false,
7
- "normalized": false,
8
- "rstrip": false,
9
- "single_word": false,
10
- "special": true
11
- },
12
- "100258": {
13
- "content": "<|fim_prefix|>",
14
- "lstrip": false,
15
- "normalized": false,
16
- "rstrip": false,
17
- "single_word": false,
18
- "special": true
19
- },
20
- "100259": {
21
- "content": "<|fim_middle|>",
22
- "lstrip": false,
23
- "normalized": false,
24
- "rstrip": false,
25
- "single_word": false,
26
- "special": true
27
- },
28
- "100260": {
29
- "content": "<|fim_suffix|>",
30
- "lstrip": false,
31
- "normalized": false,
32
- "rstrip": false,
33
- "single_word": false,
34
- "special": true
35
- },
36
- "100264": {
37
- "content": "<|im_start|>",
38
- "lstrip": false,
39
- "normalized": false,
40
- "rstrip": false,
41
- "single_word": false,
42
- "special": false
43
- },
44
- "100265": {
45
- "content": "<|im_end|>",
46
- "lstrip": false,
47
- "normalized": false,
48
- "rstrip": false,
49
- "single_word": false,
50
- "special": true
51
- },
52
- "100276": {
53
- "content": "<|endofprompt|>",
54
- "lstrip": false,
55
- "normalized": false,
56
- "rstrip": false,
57
- "single_word": false,
58
- "special": true
59
- }
60
- },
61
- "bos_token": "<|endoftext|>",
62
- "clean_up_tokenization_spaces": false,
63
- "eos_token": "<|im_end|>",
64
- "extra_special_tokens": {},
65
- "model_max_length": 262144,
66
- "pad_token": "<|endoftext|>",
67
- "tokenizer_class": "GPT2Tokenizer",
68
- "unk_token": "<|endoftext|>"
69
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoint-282/trainer_state.json DELETED
@@ -1,2008 +0,0 @@
1
- {
2
- "best_global_step": null,
3
- "best_metric": null,
4
- "best_model_checkpoint": null,
5
- "epoch": 1.989399293286219,
6
- "eval_steps": 500,
7
- "global_step": 282,
8
- "is_hyper_param_search": false,
9
- "is_local_process_zero": true,
10
- "is_world_process_zero": true,
11
- "log_history": [
12
- {
13
- "epoch": 0.007067137809187279,
14
- "grad_norm": 1.2265625,
15
- "learning_rate": 0.0,
16
- "loss": 1.9174,
17
- "step": 1
18
- },
19
- {
20
- "epoch": 0.014134275618374558,
21
- "grad_norm": 1.0234375,
22
- "learning_rate": 3.5714285714285716e-07,
23
- "loss": 2.2074,
24
- "step": 2
25
- },
26
- {
27
- "epoch": 0.02120141342756184,
28
- "grad_norm": 1.2421875,
29
- "learning_rate": 7.142857142857143e-07,
30
- "loss": 1.7854,
31
- "step": 3
32
- },
33
- {
34
- "epoch": 0.028268551236749116,
35
- "grad_norm": 1.1484375,
36
- "learning_rate": 1.0714285714285714e-06,
37
- "loss": 2.1768,
38
- "step": 4
39
- },
40
- {
41
- "epoch": 0.0353356890459364,
42
- "grad_norm": 0.9375,
43
- "learning_rate": 1.4285714285714286e-06,
44
- "loss": 2.1438,
45
- "step": 5
46
- },
47
- {
48
- "epoch": 0.04240282685512368,
49
- "grad_norm": 3.484375,
50
- "learning_rate": 1.7857142857142859e-06,
51
- "loss": 2.5976,
52
- "step": 6
53
- },
54
- {
55
- "epoch": 0.04946996466431095,
56
- "grad_norm": 1.2265625,
57
- "learning_rate": 2.1428571428571427e-06,
58
- "loss": 2.2351,
59
- "step": 7
60
- },
61
- {
62
- "epoch": 0.05653710247349823,
63
- "grad_norm": 0.984375,
64
- "learning_rate": 2.5e-06,
65
- "loss": 2.0092,
66
- "step": 8
67
- },
68
- {
69
- "epoch": 0.0636042402826855,
70
- "grad_norm": 1.1015625,
71
- "learning_rate": 2.8571428571428573e-06,
72
- "loss": 2.1347,
73
- "step": 9
74
- },
75
- {
76
- "epoch": 0.0706713780918728,
77
- "grad_norm": 1.03125,
78
- "learning_rate": 3.2142857142857147e-06,
79
- "loss": 2.0748,
80
- "step": 10
81
- },
82
- {
83
- "epoch": 0.07773851590106007,
84
- "grad_norm": 0.9140625,
85
- "learning_rate": 3.5714285714285718e-06,
86
- "loss": 1.7444,
87
- "step": 11
88
- },
89
- {
90
- "epoch": 0.08480565371024736,
91
- "grad_norm": 0.83984375,
92
- "learning_rate": 3.928571428571429e-06,
93
- "loss": 1.5372,
94
- "step": 12
95
- },
96
- {
97
- "epoch": 0.09187279151943463,
98
- "grad_norm": 0.71484375,
99
- "learning_rate": 4.2857142857142855e-06,
100
- "loss": 1.4944,
101
- "step": 13
102
- },
103
- {
104
- "epoch": 0.0989399293286219,
105
- "grad_norm": 0.8046875,
106
- "learning_rate": 4.642857142857144e-06,
107
- "loss": 1.8555,
108
- "step": 14
109
- },
110
- {
111
- "epoch": 0.10600706713780919,
112
- "grad_norm": 0.92578125,
113
- "learning_rate": 5e-06,
114
- "loss": 1.5569,
115
- "step": 15
116
- },
117
- {
118
- "epoch": 0.11307420494699646,
119
- "grad_norm": 1.34375,
120
- "learning_rate": 5.357142857142857e-06,
121
- "loss": 2.1602,
122
- "step": 16
123
- },
124
- {
125
- "epoch": 0.12014134275618374,
126
- "grad_norm": 0.84375,
127
- "learning_rate": 5.7142857142857145e-06,
128
- "loss": 2.0495,
129
- "step": 17
130
- },
131
- {
132
- "epoch": 0.127208480565371,
133
- "grad_norm": 2.9375,
134
- "learning_rate": 6.071428571428571e-06,
135
- "loss": 2.2094,
136
- "step": 18
137
- },
138
- {
139
- "epoch": 0.13427561837455831,
140
- "grad_norm": 0.8046875,
141
- "learning_rate": 6.4285714285714295e-06,
142
- "loss": 1.3931,
143
- "step": 19
144
- },
145
- {
146
- "epoch": 0.1413427561837456,
147
- "grad_norm": 0.71875,
148
- "learning_rate": 6.785714285714287e-06,
149
- "loss": 1.7009,
150
- "step": 20
151
- },
152
- {
153
- "epoch": 0.14840989399293286,
154
- "grad_norm": 0.859375,
155
- "learning_rate": 7.1428571428571436e-06,
156
- "loss": 1.8362,
157
- "step": 21
158
- },
159
- {
160
- "epoch": 0.15547703180212014,
161
- "grad_norm": 0.67578125,
162
- "learning_rate": 7.500000000000001e-06,
163
- "loss": 1.9375,
164
- "step": 22
165
- },
166
- {
167
- "epoch": 0.1625441696113074,
168
- "grad_norm": 0.9765625,
169
- "learning_rate": 7.857142857142858e-06,
170
- "loss": 2.1988,
171
- "step": 23
172
- },
173
- {
174
- "epoch": 0.1696113074204947,
175
- "grad_norm": 0.71484375,
176
- "learning_rate": 8.214285714285714e-06,
177
- "loss": 1.7918,
178
- "step": 24
179
- },
180
- {
181
- "epoch": 0.17667844522968199,
182
- "grad_norm": 1.0234375,
183
- "learning_rate": 8.571428571428571e-06,
184
- "loss": 2.1915,
185
- "step": 25
186
- },
187
- {
188
- "epoch": 0.18374558303886926,
189
- "grad_norm": 0.796875,
190
- "learning_rate": 8.92857142857143e-06,
191
- "loss": 2.3101,
192
- "step": 26
193
- },
194
- {
195
- "epoch": 0.19081272084805653,
196
- "grad_norm": 0.71875,
197
- "learning_rate": 9.285714285714288e-06,
198
- "loss": 1.9351,
199
- "step": 27
200
- },
201
- {
202
- "epoch": 0.1978798586572438,
203
- "grad_norm": 1.4296875,
204
- "learning_rate": 9.642857142857144e-06,
205
- "loss": 1.7059,
206
- "step": 28
207
- },
208
- {
209
- "epoch": 0.2049469964664311,
210
- "grad_norm": 0.7734375,
211
- "learning_rate": 1e-05,
212
- "loss": 2.1424,
213
- "step": 29
214
- },
215
- {
216
- "epoch": 0.21201413427561838,
217
- "grad_norm": 0.6328125,
218
- "learning_rate": 9.999914116658896e-06,
219
- "loss": 1.7643,
220
- "step": 30
221
- },
222
- {
223
- "epoch": 0.21908127208480566,
224
- "grad_norm": 0.79296875,
225
- "learning_rate": 9.999656469585957e-06,
226
- "loss": 1.8743,
227
- "step": 31
228
- },
229
- {
230
- "epoch": 0.22614840989399293,
231
- "grad_norm": 0.74609375,
232
- "learning_rate": 9.999227067632223e-06,
233
- "loss": 2.0273,
234
- "step": 32
235
- },
236
- {
237
- "epoch": 0.2332155477031802,
238
- "grad_norm": 0.8359375,
239
- "learning_rate": 9.99862592554908e-06,
240
- "loss": 1.8072,
241
- "step": 33
242
- },
243
- {
244
- "epoch": 0.24028268551236748,
245
- "grad_norm": 0.82421875,
246
- "learning_rate": 9.997853063987768e-06,
247
- "loss": 2.0589,
248
- "step": 34
249
- },
250
- {
251
- "epoch": 0.24734982332155478,
252
- "grad_norm": 0.66796875,
253
- "learning_rate": 9.996908509498662e-06,
254
- "loss": 1.9282,
255
- "step": 35
256
- },
257
- {
258
- "epoch": 0.254416961130742,
259
- "grad_norm": 1.046875,
260
- "learning_rate": 9.995792294530356e-06,
261
- "loss": 1.538,
262
- "step": 36
263
- },
264
- {
265
- "epoch": 0.26148409893992935,
266
- "grad_norm": 0.81640625,
267
- "learning_rate": 9.994504457428557e-06,
268
- "loss": 1.9843,
269
- "step": 37
270
- },
271
- {
272
- "epoch": 0.26855123674911663,
273
- "grad_norm": 1.84375,
274
- "learning_rate": 9.993045042434772e-06,
275
- "loss": 2.7539,
276
- "step": 38
277
- },
278
- {
279
- "epoch": 0.2756183745583039,
280
- "grad_norm": 0.66796875,
281
- "learning_rate": 9.99141409968477e-06,
282
- "loss": 1.7211,
283
- "step": 39
284
- },
285
- {
286
- "epoch": 0.2826855123674912,
287
- "grad_norm": 0.71875,
288
- "learning_rate": 9.989611685206881e-06,
289
- "loss": 2.1478,
290
- "step": 40
291
- },
292
- {
293
- "epoch": 0.28975265017667845,
294
- "grad_norm": 0.65234375,
295
- "learning_rate": 9.987637860920053e-06,
296
- "loss": 2.0103,
297
- "step": 41
298
- },
299
- {
300
- "epoch": 0.2968197879858657,
301
- "grad_norm": 0.63671875,
302
- "learning_rate": 9.985492694631733e-06,
303
- "loss": 1.4481,
304
- "step": 42
305
- },
306
- {
307
- "epoch": 0.303886925795053,
308
- "grad_norm": 0.80078125,
309
- "learning_rate": 9.983176260035546e-06,
310
- "loss": 1.7512,
311
- "step": 43
312
- },
313
- {
314
- "epoch": 0.31095406360424027,
315
- "grad_norm": 0.71875,
316
- "learning_rate": 9.980688636708744e-06,
317
- "loss": 2.061,
318
- "step": 44
319
- },
320
- {
321
- "epoch": 0.31802120141342755,
322
- "grad_norm": 0.59765625,
323
- "learning_rate": 9.978029910109491e-06,
324
- "loss": 1.572,
325
- "step": 45
326
- },
327
- {
328
- "epoch": 0.3250883392226148,
329
- "grad_norm": 0.66015625,
330
- "learning_rate": 9.975200171573917e-06,
331
- "loss": 2.0828,
332
- "step": 46
333
- },
334
- {
335
- "epoch": 0.3321554770318021,
336
- "grad_norm": 0.63671875,
337
- "learning_rate": 9.972199518312979e-06,
338
- "loss": 1.8459,
339
- "step": 47
340
- },
341
- {
342
- "epoch": 0.3392226148409894,
343
- "grad_norm": 0.6796875,
344
- "learning_rate": 9.969028053409131e-06,
345
- "loss": 2.2307,
346
- "step": 48
347
- },
348
- {
349
- "epoch": 0.3462897526501767,
350
- "grad_norm": 0.65625,
351
- "learning_rate": 9.965685885812773e-06,
352
- "loss": 1.5793,
353
- "step": 49
354
- },
355
- {
356
- "epoch": 0.35335689045936397,
357
- "grad_norm": 0.75,
358
- "learning_rate": 9.96217313033851e-06,
359
- "loss": 2.1297,
360
- "step": 50
361
- },
362
- {
363
- "epoch": 0.36042402826855124,
364
- "grad_norm": 1.0625,
365
- "learning_rate": 9.958489907661217e-06,
366
- "loss": 1.912,
367
- "step": 51
368
- },
369
- {
370
- "epoch": 0.3674911660777385,
371
- "grad_norm": 0.72265625,
372
- "learning_rate": 9.95463634431188e-06,
373
- "loss": 2.122,
374
- "step": 52
375
- },
376
- {
377
- "epoch": 0.3745583038869258,
378
- "grad_norm": 0.59375,
379
- "learning_rate": 9.950612572673255e-06,
380
- "loss": 1.9928,
381
- "step": 53
382
- },
383
- {
384
- "epoch": 0.38162544169611307,
385
- "grad_norm": 0.8984375,
386
- "learning_rate": 9.946418730975326e-06,
387
- "loss": 1.9401,
388
- "step": 54
389
- },
390
- {
391
- "epoch": 0.38869257950530034,
392
- "grad_norm": 0.578125,
393
- "learning_rate": 9.942054963290549e-06,
394
- "loss": 1.742,
395
- "step": 55
396
- },
397
- {
398
- "epoch": 0.3957597173144876,
399
- "grad_norm": 0.70703125,
400
- "learning_rate": 9.9375214195289e-06,
401
- "loss": 1.775,
402
- "step": 56
403
- },
404
- {
405
- "epoch": 0.4028268551236749,
406
- "grad_norm": 0.64453125,
407
- "learning_rate": 9.932818255432733e-06,
408
- "loss": 1.9486,
409
- "step": 57
410
- },
411
- {
412
- "epoch": 0.4098939929328622,
413
- "grad_norm": 0.7265625,
414
- "learning_rate": 9.92794563257143e-06,
415
- "loss": 1.7127,
416
- "step": 58
417
- },
418
- {
419
- "epoch": 0.4169611307420495,
420
- "grad_norm": 0.66796875,
421
- "learning_rate": 9.92290371833584e-06,
422
- "loss": 2.1642,
423
- "step": 59
424
- },
425
- {
426
- "epoch": 0.42402826855123676,
427
- "grad_norm": 0.83984375,
428
- "learning_rate": 9.91769268593254e-06,
429
- "loss": 1.7884,
430
- "step": 60
431
- },
432
- {
433
- "epoch": 0.43109540636042404,
434
- "grad_norm": 0.68359375,
435
- "learning_rate": 9.91231271437788e-06,
436
- "loss": 2.1369,
437
- "step": 61
438
- },
439
- {
440
- "epoch": 0.4381625441696113,
441
- "grad_norm": 0.75390625,
442
- "learning_rate": 9.906763988491834e-06,
443
- "loss": 2.2394,
444
- "step": 62
445
- },
446
- {
447
- "epoch": 0.4452296819787986,
448
- "grad_norm": 0.5859375,
449
- "learning_rate": 9.901046698891648e-06,
450
- "loss": 1.4972,
451
- "step": 63
452
- },
453
- {
454
- "epoch": 0.45229681978798586,
455
- "grad_norm": 0.96875,
456
- "learning_rate": 9.895161041985295e-06,
457
- "loss": 1.8555,
458
- "step": 64
459
- },
460
- {
461
- "epoch": 0.45936395759717313,
462
- "grad_norm": 0.74609375,
463
- "learning_rate": 9.889107219964726e-06,
464
- "loss": 2.2285,
465
- "step": 65
466
- },
467
- {
468
- "epoch": 0.4664310954063604,
469
- "grad_norm": 0.6015625,
470
- "learning_rate": 9.882885440798928e-06,
471
- "loss": 1.6138,
472
- "step": 66
473
- },
474
- {
475
- "epoch": 0.4734982332155477,
476
- "grad_norm": 0.6171875,
477
- "learning_rate": 9.876495918226772e-06,
478
- "loss": 1.7915,
479
- "step": 67
480
- },
481
- {
482
- "epoch": 0.48056537102473496,
483
- "grad_norm": 0.56640625,
484
- "learning_rate": 9.869938871749676e-06,
485
- "loss": 1.6075,
486
- "step": 68
487
- },
488
- {
489
- "epoch": 0.4876325088339223,
490
- "grad_norm": 0.6640625,
491
- "learning_rate": 9.863214526624065e-06,
492
- "loss": 1.9207,
493
- "step": 69
494
- },
495
- {
496
- "epoch": 0.49469964664310956,
497
- "grad_norm": 0.703125,
498
- "learning_rate": 9.856323113853632e-06,
499
- "loss": 1.9285,
500
- "step": 70
501
- },
502
- {
503
- "epoch": 0.5017667844522968,
504
- "grad_norm": 0.68359375,
505
- "learning_rate": 9.849264870181393e-06,
506
- "loss": 1.9508,
507
- "step": 71
508
- },
509
- {
510
- "epoch": 0.508833922261484,
511
- "grad_norm": 0.6640625,
512
- "learning_rate": 9.842040038081572e-06,
513
- "loss": 1.9983,
514
- "step": 72
515
- },
516
- {
517
- "epoch": 0.5159010600706714,
518
- "grad_norm": 0.5703125,
519
- "learning_rate": 9.834648865751254e-06,
520
- "loss": 1.6503,
521
- "step": 73
522
- },
523
- {
524
- "epoch": 0.5229681978798587,
525
- "grad_norm": 1.046875,
526
- "learning_rate": 9.827091607101871e-06,
527
- "loss": 1.8562,
528
- "step": 74
529
- },
530
- {
531
- "epoch": 0.5300353356890459,
532
- "grad_norm": 0.78515625,
533
- "learning_rate": 9.81936852175047e-06,
534
- "loss": 1.5611,
535
- "step": 75
536
- },
537
- {
538
- "epoch": 0.5371024734982333,
539
- "grad_norm": 0.61328125,
540
- "learning_rate": 9.811479875010801e-06,
541
- "loss": 1.7828,
542
- "step": 76
543
- },
544
- {
545
- "epoch": 0.5441696113074205,
546
- "grad_norm": 0.6015625,
547
- "learning_rate": 9.803425937884202e-06,
548
- "loss": 1.5878,
549
- "step": 77
550
- },
551
- {
552
- "epoch": 0.5512367491166078,
553
- "grad_norm": 0.76171875,
554
- "learning_rate": 9.79520698705028e-06,
555
- "loss": 2.0218,
556
- "step": 78
557
- },
558
- {
559
- "epoch": 0.558303886925795,
560
- "grad_norm": 0.640625,
561
- "learning_rate": 9.786823304857424e-06,
562
- "loss": 1.4419,
563
- "step": 79
564
- },
565
- {
566
- "epoch": 0.5653710247349824,
567
- "grad_norm": 1.2265625,
568
- "learning_rate": 9.778275179313084e-06,
569
- "loss": 1.9871,
570
- "step": 80
571
- },
572
- {
573
- "epoch": 0.5724381625441696,
574
- "grad_norm": 0.71875,
575
- "learning_rate": 9.769562904073896e-06,
576
- "loss": 1.9268,
577
- "step": 81
578
- },
579
- {
580
- "epoch": 0.5795053003533569,
581
- "grad_norm": 0.57421875,
582
- "learning_rate": 9.76068677843558e-06,
583
- "loss": 1.4848,
584
- "step": 82
585
- },
586
- {
587
- "epoch": 0.5865724381625441,
588
- "grad_norm": 0.65234375,
589
- "learning_rate": 9.751647107322668e-06,
590
- "loss": 2.0959,
591
- "step": 83
592
- },
593
- {
594
- "epoch": 0.5936395759717314,
595
- "grad_norm": 0.74609375,
596
- "learning_rate": 9.742444201278022e-06,
597
- "loss": 2.3728,
598
- "step": 84
599
- },
600
- {
601
- "epoch": 0.6007067137809188,
602
- "grad_norm": 0.671875,
603
- "learning_rate": 9.733078376452172e-06,
604
- "loss": 1.3637,
605
- "step": 85
606
- },
607
- {
608
- "epoch": 0.607773851590106,
609
- "grad_norm": 0.671875,
610
- "learning_rate": 9.723549954592447e-06,
611
- "loss": 1.8211,
612
- "step": 86
613
- },
614
- {
615
- "epoch": 0.6148409893992933,
616
- "grad_norm": 0.64453125,
617
- "learning_rate": 9.713859263031928e-06,
618
- "loss": 1.9919,
619
- "step": 87
620
- },
621
- {
622
- "epoch": 0.6219081272084805,
623
- "grad_norm": 0.6015625,
624
- "learning_rate": 9.704006634678205e-06,
625
- "loss": 1.5786,
626
- "step": 88
627
- },
628
- {
629
- "epoch": 0.6289752650176679,
630
- "grad_norm": 0.6640625,
631
- "learning_rate": 9.693992408001934e-06,
632
- "loss": 1.9939,
633
- "step": 89
634
- },
635
- {
636
- "epoch": 0.6360424028268551,
637
- "grad_norm": 0.60546875,
638
- "learning_rate": 9.683816927025212e-06,
639
- "loss": 2.1518,
640
- "step": 90
641
- },
642
- {
643
- "epoch": 0.6431095406360424,
644
- "grad_norm": 0.7578125,
645
- "learning_rate": 9.673480541309761e-06,
646
- "loss": 2.2444,
647
- "step": 91
648
- },
649
- {
650
- "epoch": 0.6501766784452296,
651
- "grad_norm": 0.609375,
652
- "learning_rate": 9.662983605944918e-06,
653
- "loss": 1.9037,
654
- "step": 92
655
- },
656
- {
657
- "epoch": 0.657243816254417,
658
- "grad_norm": 1.5625,
659
- "learning_rate": 9.652326481535434e-06,
660
- "loss": 2.3459,
661
- "step": 93
662
- },
663
- {
664
- "epoch": 0.6643109540636042,
665
- "grad_norm": 0.609375,
666
- "learning_rate": 9.64150953418909e-06,
667
- "loss": 1.7578,
668
- "step": 94
669
- },
670
- {
671
- "epoch": 0.6713780918727915,
672
- "grad_norm": 0.6328125,
673
- "learning_rate": 9.630533135504118e-06,
674
- "loss": 1.5717,
675
- "step": 95
676
- },
677
- {
678
- "epoch": 0.6784452296819788,
679
- "grad_norm": 0.7109375,
680
- "learning_rate": 9.619397662556434e-06,
681
- "loss": 1.7722,
682
- "step": 96
683
- },
684
- {
685
- "epoch": 0.6855123674911661,
686
- "grad_norm": 0.6796875,
687
- "learning_rate": 9.608103497886687e-06,
688
- "loss": 1.8594,
689
- "step": 97
690
- },
691
- {
692
- "epoch": 0.6925795053003534,
693
- "grad_norm": 0.66796875,
694
- "learning_rate": 9.596651029487116e-06,
695
- "loss": 2.1158,
696
- "step": 98
697
- },
698
- {
699
- "epoch": 0.6996466431095406,
700
- "grad_norm": 0.75,
701
- "learning_rate": 9.585040650788222e-06,
702
- "loss": 1.2966,
703
- "step": 99
704
- },
705
- {
706
- "epoch": 0.7067137809187279,
707
- "grad_norm": 1.2890625,
708
- "learning_rate": 9.573272760645248e-06,
709
- "loss": 2.3551,
710
- "step": 100
711
- },
712
- {
713
- "epoch": 0.7137809187279152,
714
- "grad_norm": 0.61328125,
715
- "learning_rate": 9.561347763324484e-06,
716
- "loss": 2.0097,
717
- "step": 101
718
- },
719
- {
720
- "epoch": 0.7208480565371025,
721
- "grad_norm": 0.6640625,
722
- "learning_rate": 9.549266068489377e-06,
723
- "loss": 1.7785,
724
- "step": 102
725
- },
726
- {
727
- "epoch": 0.7279151943462897,
728
- "grad_norm": 0.59765625,
729
- "learning_rate": 9.537028091186453e-06,
730
- "loss": 1.7432,
731
- "step": 103
732
- },
733
- {
734
- "epoch": 0.734982332155477,
735
- "grad_norm": 0.57421875,
736
- "learning_rate": 9.524634251831064e-06,
737
- "loss": 1.9585,
738
- "step": 104
739
- },
740
- {
741
- "epoch": 0.7420494699646644,
742
- "grad_norm": 0.7890625,
743
- "learning_rate": 9.512084976192944e-06,
744
- "loss": 1.8571,
745
- "step": 105
746
- },
747
- {
748
- "epoch": 0.7491166077738516,
749
- "grad_norm": 0.62890625,
750
- "learning_rate": 9.499380695381577e-06,
751
- "loss": 1.8799,
752
- "step": 106
753
- },
754
- {
755
- "epoch": 0.7561837455830389,
756
- "grad_norm": 0.7421875,
757
- "learning_rate": 9.486521845831403e-06,
758
- "loss": 1.8795,
759
- "step": 107
760
- },
761
- {
762
- "epoch": 0.7632508833922261,
763
- "grad_norm": 0.69140625,
764
- "learning_rate": 9.4735088692868e-06,
765
- "loss": 1.8043,
766
- "step": 108
767
- },
768
- {
769
- "epoch": 0.7703180212014135,
770
- "grad_norm": 0.8125,
771
- "learning_rate": 9.460342212786933e-06,
772
- "loss": 2.1479,
773
- "step": 109
774
- },
775
- {
776
- "epoch": 0.7773851590106007,
777
- "grad_norm": 0.7890625,
778
- "learning_rate": 9.447022328650382e-06,
779
- "loss": 1.7199,
780
- "step": 110
781
- },
782
- {
783
- "epoch": 0.784452296819788,
784
- "grad_norm": 0.80078125,
785
- "learning_rate": 9.43354967445961e-06,
786
- "loss": 2.1058,
787
- "step": 111
788
- },
789
- {
790
- "epoch": 0.7915194346289752,
791
- "grad_norm": 0.828125,
792
- "learning_rate": 9.419924713045234e-06,
793
- "loss": 1.9554,
794
- "step": 112
795
- },
796
- {
797
- "epoch": 0.7985865724381626,
798
- "grad_norm": 0.64453125,
799
- "learning_rate": 9.406147912470142e-06,
800
- "loss": 1.3284,
801
- "step": 113
802
- },
803
- {
804
- "epoch": 0.8056537102473498,
805
- "grad_norm": 0.66015625,
806
- "learning_rate": 9.392219746013399e-06,
807
- "loss": 1.8721,
808
- "step": 114
809
- },
810
- {
811
- "epoch": 0.8127208480565371,
812
- "grad_norm": 0.5703125,
813
- "learning_rate": 9.378140692153991e-06,
814
- "loss": 1.8162,
815
- "step": 115
816
- },
817
- {
818
- "epoch": 0.8197879858657244,
819
- "grad_norm": 0.6171875,
820
- "learning_rate": 9.363911234554394e-06,
821
- "loss": 2.1554,
822
- "step": 116
823
- },
824
- {
825
- "epoch": 0.8268551236749117,
826
- "grad_norm": 0.703125,
827
- "learning_rate": 9.349531862043952e-06,
828
- "loss": 2.1176,
829
- "step": 117
830
- },
831
- {
832
- "epoch": 0.833922261484099,
833
- "grad_norm": 0.609375,
834
- "learning_rate": 9.335003068602087e-06,
835
- "loss": 1.7494,
836
- "step": 118
837
- },
838
- {
839
- "epoch": 0.8409893992932862,
840
- "grad_norm": 0.62109375,
841
- "learning_rate": 9.320325353341325e-06,
842
- "loss": 1.8479,
843
- "step": 119
844
- },
845
- {
846
- "epoch": 0.8480565371024735,
847
- "grad_norm": 0.6328125,
848
- "learning_rate": 9.305499220490162e-06,
849
- "loss": 1.5816,
850
- "step": 120
851
- },
852
- {
853
- "epoch": 0.8551236749116607,
854
- "grad_norm": 0.70703125,
855
- "learning_rate": 9.290525179375722e-06,
856
- "loss": 2.1121,
857
- "step": 121
858
- },
859
- {
860
- "epoch": 0.8621908127208481,
861
- "grad_norm": 0.67578125,
862
- "learning_rate": 9.275403744406282e-06,
863
- "loss": 1.9516,
864
- "step": 122
865
- },
866
- {
867
- "epoch": 0.8692579505300353,
868
- "grad_norm": 0.609375,
869
- "learning_rate": 9.260135435053583e-06,
870
- "loss": 1.8081,
871
- "step": 123
872
- },
873
- {
874
- "epoch": 0.8763250883392226,
875
- "grad_norm": 0.62890625,
876
- "learning_rate": 9.244720775834993e-06,
877
- "loss": 2.2066,
878
- "step": 124
879
- },
880
- {
881
- "epoch": 0.8833922261484098,
882
- "grad_norm": 0.80859375,
883
- "learning_rate": 9.229160296295488e-06,
884
- "loss": 2.0408,
885
- "step": 125
886
- },
887
- {
888
- "epoch": 0.8904593639575972,
889
- "grad_norm": 0.6875,
890
- "learning_rate": 9.213454530989454e-06,
891
- "loss": 1.7211,
892
- "step": 126
893
- },
894
- {
895
- "epoch": 0.8975265017667845,
896
- "grad_norm": 0.58984375,
897
- "learning_rate": 9.197604019462334e-06,
898
- "loss": 0.905,
899
- "step": 127
900
- },
901
- {
902
- "epoch": 0.9045936395759717,
903
- "grad_norm": 0.65234375,
904
- "learning_rate": 9.18160930623208e-06,
905
- "loss": 1.4443,
906
- "step": 128
907
- },
908
- {
909
- "epoch": 0.911660777385159,
910
- "grad_norm": 0.72265625,
911
- "learning_rate": 9.165470940770458e-06,
912
- "loss": 1.8498,
913
- "step": 129
914
- },
915
- {
916
- "epoch": 0.9187279151943463,
917
- "grad_norm": 0.6875,
918
- "learning_rate": 9.149189477484169e-06,
919
- "loss": 1.7645,
920
- "step": 130
921
- },
922
- {
923
- "epoch": 0.9257950530035336,
924
- "grad_norm": 0.59375,
925
- "learning_rate": 9.132765475695795e-06,
926
- "loss": 1.7509,
927
- "step": 131
928
- },
929
- {
930
- "epoch": 0.9328621908127208,
931
- "grad_norm": 0.61328125,
932
- "learning_rate": 9.116199499624596e-06,
933
- "loss": 2.2509,
934
- "step": 132
935
- },
936
- {
937
- "epoch": 0.9399293286219081,
938
- "grad_norm": 0.765625,
939
- "learning_rate": 9.099492118367123e-06,
940
- "loss": 1.5412,
941
- "step": 133
942
- },
943
- {
944
- "epoch": 0.9469964664310954,
945
- "grad_norm": 0.609375,
946
- "learning_rate": 9.082643905877665e-06,
947
- "loss": 1.5065,
948
- "step": 134
949
- },
950
- {
951
- "epoch": 0.9540636042402827,
952
- "grad_norm": 0.625,
953
- "learning_rate": 9.065655440948536e-06,
954
- "loss": 1.8347,
955
- "step": 135
956
- },
957
- {
958
- "epoch": 0.9611307420494699,
959
- "grad_norm": 0.765625,
960
- "learning_rate": 9.048527307190182e-06,
961
- "loss": 2.1884,
962
- "step": 136
963
- },
964
- {
965
- "epoch": 0.9681978798586572,
966
- "grad_norm": 0.62109375,
967
- "learning_rate": 9.03126009301115e-06,
968
- "loss": 1.7727,
969
- "step": 137
970
- },
971
- {
972
- "epoch": 0.9752650176678446,
973
- "grad_norm": 0.61328125,
974
- "learning_rate": 9.013854391597856e-06,
975
- "loss": 1.7151,
976
- "step": 138
977
- },
978
- {
979
- "epoch": 0.9823321554770318,
980
- "grad_norm": 0.66796875,
981
- "learning_rate": 8.996310800894215e-06,
982
- "loss": 1.992,
983
- "step": 139
984
- },
985
- {
986
- "epoch": 0.9893992932862191,
987
- "grad_norm": 0.69140625,
988
- "learning_rate": 8.978629923581104e-06,
989
- "loss": 2.2281,
990
- "step": 140
991
- },
992
- {
993
- "epoch": 0.9964664310954063,
994
- "grad_norm": 1.15625,
995
- "learning_rate": 8.960812367055646e-06,
996
- "loss": 2.874,
997
- "step": 141
998
- },
999
- {
1000
- "epoch": 1.0,
1001
- "grad_norm": 1.0625,
1002
- "learning_rate": 8.94285874341036e-06,
1003
- "loss": 2.0683,
1004
- "step": 142
1005
- },
1006
- {
1007
- "epoch": 1.0070671378091873,
1008
- "grad_norm": 0.59375,
1009
- "learning_rate": 8.924769669412117e-06,
1010
- "loss": 1.8785,
1011
- "step": 143
1012
- },
1013
- {
1014
- "epoch": 1.0141342756183747,
1015
- "grad_norm": 0.46484375,
1016
- "learning_rate": 8.906545766480961e-06,
1017
- "loss": 1.1151,
1018
- "step": 144
1019
- },
1020
- {
1021
- "epoch": 1.0212014134275618,
1022
- "grad_norm": 0.55859375,
1023
- "learning_rate": 8.888187660668762e-06,
1024
- "loss": 1.6426,
1025
- "step": 145
1026
- },
1027
- {
1028
- "epoch": 1.028268551236749,
1029
- "grad_norm": 0.57421875,
1030
- "learning_rate": 8.869695982637703e-06,
1031
- "loss": 1.5826,
1032
- "step": 146
1033
- },
1034
- {
1035
- "epoch": 1.0353356890459364,
1036
- "grad_norm": 0.6171875,
1037
- "learning_rate": 8.851071367638625e-06,
1038
- "loss": 1.844,
1039
- "step": 147
1040
- },
1041
- {
1042
- "epoch": 1.0424028268551238,
1043
- "grad_norm": 1.03125,
1044
- "learning_rate": 8.832314455489188e-06,
1045
- "loss": 1.8547,
1046
- "step": 148
1047
- },
1048
- {
1049
- "epoch": 1.0494699646643109,
1050
- "grad_norm": 0.6796875,
1051
- "learning_rate": 8.81342589055191e-06,
1052
- "loss": 1.834,
1053
- "step": 149
1054
- },
1055
- {
1056
- "epoch": 1.0565371024734982,
1057
- "grad_norm": 0.64453125,
1058
- "learning_rate": 8.794406321712017e-06,
1059
- "loss": 1.7167,
1060
- "step": 150
1061
- },
1062
- {
1063
- "epoch": 1.0636042402826855,
1064
- "grad_norm": 0.55859375,
1065
- "learning_rate": 8.775256402355155e-06,
1066
- "loss": 1.6672,
1067
- "step": 151
1068
- },
1069
- {
1070
- "epoch": 1.0706713780918728,
1071
- "grad_norm": 0.68359375,
1072
- "learning_rate": 8.755976790344945e-06,
1073
- "loss": 1.7684,
1074
- "step": 152
1075
- },
1076
- {
1077
- "epoch": 1.0777385159010602,
1078
- "grad_norm": 0.609375,
1079
- "learning_rate": 8.736568148000386e-06,
1080
- "loss": 2.1258,
1081
- "step": 153
1082
- },
1083
- {
1084
- "epoch": 1.0848056537102473,
1085
- "grad_norm": 0.65234375,
1086
- "learning_rate": 8.7170311420731e-06,
1087
- "loss": 1.8917,
1088
- "step": 154
1089
- },
1090
- {
1091
- "epoch": 1.0918727915194346,
1092
- "grad_norm": 0.93359375,
1093
- "learning_rate": 8.697366443724424e-06,
1094
- "loss": 2.4136,
1095
- "step": 155
1096
- },
1097
- {
1098
- "epoch": 1.098939929328622,
1099
- "grad_norm": 0.61328125,
1100
- "learning_rate": 8.677574728502355e-06,
1101
- "loss": 1.9583,
1102
- "step": 156
1103
- },
1104
- {
1105
- "epoch": 1.1060070671378093,
1106
- "grad_norm": 0.62109375,
1107
- "learning_rate": 8.657656676318346e-06,
1108
- "loss": 1.974,
1109
- "step": 157
1110
- },
1111
- {
1112
- "epoch": 1.1130742049469964,
1113
- "grad_norm": 0.58203125,
1114
- "learning_rate": 8.637612971423945e-06,
1115
- "loss": 1.4882,
1116
- "step": 158
1117
- },
1118
- {
1119
- "epoch": 1.1201413427561837,
1120
- "grad_norm": 0.61328125,
1121
- "learning_rate": 8.617444302387288e-06,
1122
- "loss": 2.2903,
1123
- "step": 159
1124
- },
1125
- {
1126
- "epoch": 1.127208480565371,
1127
- "grad_norm": 0.55078125,
1128
- "learning_rate": 8.597151362069452e-06,
1129
- "loss": 1.8166,
1130
- "step": 160
1131
- },
1132
- {
1133
- "epoch": 1.1342756183745584,
1134
- "grad_norm": 0.578125,
1135
- "learning_rate": 8.576734847600639e-06,
1136
- "loss": 1.9671,
1137
- "step": 161
1138
- },
1139
- {
1140
- "epoch": 1.1413427561837457,
1141
- "grad_norm": 0.53125,
1142
- "learning_rate": 8.55619546035624e-06,
1143
- "loss": 1.6708,
1144
- "step": 162
1145
- },
1146
- {
1147
- "epoch": 1.1484098939929328,
1148
- "grad_norm": 1.2109375,
1149
- "learning_rate": 8.535533905932739e-06,
1150
- "loss": 2.8918,
1151
- "step": 163
1152
- },
1153
- {
1154
- "epoch": 1.1554770318021201,
1155
- "grad_norm": 0.57421875,
1156
- "learning_rate": 8.514750894123463e-06,
1157
- "loss": 1.6384,
1158
- "step": 164
1159
- },
1160
- {
1161
- "epoch": 1.1625441696113075,
1162
- "grad_norm": 0.62109375,
1163
- "learning_rate": 8.49384713889421e-06,
1164
- "loss": 1.8558,
1165
- "step": 165
1166
- },
1167
- {
1168
- "epoch": 1.1696113074204948,
1169
- "grad_norm": 0.5,
1170
- "learning_rate": 8.472823358358716e-06,
1171
- "loss": 1.5123,
1172
- "step": 166
1173
- },
1174
- {
1175
- "epoch": 1.176678445229682,
1176
- "grad_norm": 0.6171875,
1177
- "learning_rate": 8.451680274753986e-06,
1178
- "loss": 2.0303,
1179
- "step": 167
1180
- },
1181
- {
1182
- "epoch": 1.1837455830388692,
1183
- "grad_norm": 0.640625,
1184
- "learning_rate": 8.430418614415488e-06,
1185
- "loss": 1.8954,
1186
- "step": 168
1187
- },
1188
- {
1189
- "epoch": 1.1908127208480566,
1190
- "grad_norm": 0.609375,
1191
- "learning_rate": 8.40903910775219e-06,
1192
- "loss": 2.1229,
1193
- "step": 169
1194
- },
1195
- {
1196
- "epoch": 1.197879858657244,
1197
- "grad_norm": 0.6796875,
1198
- "learning_rate": 8.387542489221477e-06,
1199
- "loss": 1.6479,
1200
- "step": 170
1201
- },
1202
- {
1203
- "epoch": 1.2049469964664312,
1204
- "grad_norm": 0.68359375,
1205
- "learning_rate": 8.36592949730392e-06,
1206
- "loss": 1.9559,
1207
- "step": 171
1208
- },
1209
- {
1210
- "epoch": 1.2120141342756183,
1211
- "grad_norm": 0.921875,
1212
- "learning_rate": 8.344200874477901e-06,
1213
- "loss": 2.436,
1214
- "step": 172
1215
- },
1216
- {
1217
- "epoch": 1.2190812720848057,
1218
- "grad_norm": 0.64453125,
1219
- "learning_rate": 8.32235736719411e-06,
1220
- "loss": 1.8343,
1221
- "step": 173
1222
- },
1223
- {
1224
- "epoch": 1.226148409893993,
1225
- "grad_norm": 0.61328125,
1226
- "learning_rate": 8.300399725849902e-06,
1227
- "loss": 2.1196,
1228
- "step": 174
1229
- },
1230
- {
1231
- "epoch": 1.23321554770318,
1232
- "grad_norm": 0.6953125,
1233
- "learning_rate": 8.278328704763516e-06,
1234
- "loss": 2.2121,
1235
- "step": 175
1236
- },
1237
- {
1238
- "epoch": 1.2402826855123674,
1239
- "grad_norm": 0.703125,
1240
- "learning_rate": 8.256145062148168e-06,
1241
- "loss": 1.8644,
1242
- "step": 176
1243
- },
1244
- {
1245
- "epoch": 1.2473498233215548,
1246
- "grad_norm": 0.67578125,
1247
- "learning_rate": 8.233849560085994e-06,
1248
- "loss": 2.0178,
1249
- "step": 177
1250
- },
1251
- {
1252
- "epoch": 1.254416961130742,
1253
- "grad_norm": 0.6640625,
1254
- "learning_rate": 8.211442964501879e-06,
1255
- "loss": 2.1933,
1256
- "step": 178
1257
- },
1258
- {
1259
- "epoch": 1.2614840989399294,
1260
- "grad_norm": 0.63671875,
1261
- "learning_rate": 8.188926045137139e-06,
1262
- "loss": 1.9427,
1263
- "step": 179
1264
- },
1265
- {
1266
- "epoch": 1.2685512367491167,
1267
- "grad_norm": 0.6640625,
1268
- "learning_rate": 8.166299575523081e-06,
1269
- "loss": 1.9772,
1270
- "step": 180
1271
- },
1272
- {
1273
- "epoch": 1.2756183745583038,
1274
- "grad_norm": 0.6484375,
1275
- "learning_rate": 8.143564332954426e-06,
1276
- "loss": 1.6406,
1277
- "step": 181
1278
- },
1279
- {
1280
- "epoch": 1.2826855123674912,
1281
- "grad_norm": 0.60546875,
1282
- "learning_rate": 8.120721098462612e-06,
1283
- "loss": 1.9615,
1284
- "step": 182
1285
- },
1286
- {
1287
- "epoch": 1.2897526501766785,
1288
- "grad_norm": 0.68359375,
1289
- "learning_rate": 8.097770656788961e-06,
1290
- "loss": 1.9663,
1291
- "step": 183
1292
- },
1293
- {
1294
- "epoch": 1.2968197879858656,
1295
- "grad_norm": 0.62109375,
1296
- "learning_rate": 8.074713796357717e-06,
1297
- "loss": 1.756,
1298
- "step": 184
1299
- },
1300
- {
1301
- "epoch": 1.303886925795053,
1302
- "grad_norm": 0.69140625,
1303
- "learning_rate": 8.051551309248961e-06,
1304
- "loss": 1.8254,
1305
- "step": 185
1306
- },
1307
- {
1308
- "epoch": 1.3109540636042403,
1309
- "grad_norm": 0.66015625,
1310
- "learning_rate": 8.028283991171408e-06,
1311
- "loss": 1.9618,
1312
- "step": 186
1313
- },
1314
- {
1315
- "epoch": 1.3180212014134276,
1316
- "grad_norm": 0.6484375,
1317
- "learning_rate": 8.004912641435064e-06,
1318
- "loss": 1.9003,
1319
- "step": 187
1320
- },
1321
- {
1322
- "epoch": 1.325088339222615,
1323
- "grad_norm": 0.6640625,
1324
- "learning_rate": 7.981438062923767e-06,
1325
- "loss": 1.9284,
1326
- "step": 188
1327
- },
1328
- {
1329
- "epoch": 1.332155477031802,
1330
- "grad_norm": 0.71484375,
1331
- "learning_rate": 7.957861062067614e-06,
1332
- "loss": 1.9229,
1333
- "step": 189
1334
- },
1335
- {
1336
- "epoch": 1.3392226148409894,
1337
- "grad_norm": 0.703125,
1338
- "learning_rate": 7.934182448815244e-06,
1339
- "loss": 1.5828,
1340
- "step": 190
1341
- },
1342
- {
1343
- "epoch": 1.3462897526501767,
1344
- "grad_norm": 0.58203125,
1345
- "learning_rate": 7.910403036606028e-06,
1346
- "loss": 2.1854,
1347
- "step": 191
1348
- },
1349
- {
1350
- "epoch": 1.353356890459364,
1351
- "grad_norm": 0.55859375,
1352
- "learning_rate": 7.88652364234211e-06,
1353
- "loss": 1.492,
1354
- "step": 192
1355
- },
1356
- {
1357
- "epoch": 1.3604240282685511,
1358
- "grad_norm": 0.625,
1359
- "learning_rate": 7.86254508636036e-06,
1360
- "loss": 1.6163,
1361
- "step": 193
1362
- },
1363
- {
1364
- "epoch": 1.3674911660777385,
1365
- "grad_norm": 0.73046875,
1366
- "learning_rate": 7.838468192404176e-06,
1367
- "loss": 1.8491,
1368
- "step": 194
1369
- },
1370
- {
1371
- "epoch": 1.3745583038869258,
1372
- "grad_norm": 0.59765625,
1373
- "learning_rate": 7.814293787595197e-06,
1374
- "loss": 1.472,
1375
- "step": 195
1376
- },
1377
- {
1378
- "epoch": 1.3816254416961131,
1379
- "grad_norm": 0.62890625,
1380
- "learning_rate": 7.790022702404887e-06,
1381
- "loss": 2.0314,
1382
- "step": 196
1383
- },
1384
- {
1385
- "epoch": 1.3886925795053005,
1386
- "grad_norm": 0.64453125,
1387
- "learning_rate": 7.765655770625997e-06,
1388
- "loss": 1.7422,
1389
- "step": 197
1390
- },
1391
- {
1392
- "epoch": 1.3957597173144876,
1393
- "grad_norm": 0.625,
1394
- "learning_rate": 7.741193829343937e-06,
1395
- "loss": 1.1329,
1396
- "step": 198
1397
- },
1398
- {
1399
- "epoch": 1.4028268551236749,
1400
- "grad_norm": 0.9140625,
1401
- "learning_rate": 7.716637718908002e-06,
1402
- "loss": 1.4366,
1403
- "step": 199
1404
- },
1405
- {
1406
- "epoch": 1.4098939929328622,
1407
- "grad_norm": 0.56640625,
1408
- "learning_rate": 7.691988282902519e-06,
1409
- "loss": 1.4888,
1410
- "step": 200
1411
- },
1412
- {
1413
- "epoch": 1.4169611307420495,
1414
- "grad_norm": 0.69140625,
1415
- "learning_rate": 7.667246368117852e-06,
1416
- "loss": 1.8835,
1417
- "step": 201
1418
- },
1419
- {
1420
- "epoch": 1.4240282685512367,
1421
- "grad_norm": 0.60546875,
1422
- "learning_rate": 7.642412824521328e-06,
1423
- "loss": 1.6722,
1424
- "step": 202
1425
- },
1426
- {
1427
- "epoch": 1.431095406360424,
1428
- "grad_norm": 0.6328125,
1429
- "learning_rate": 7.617488505228023e-06,
1430
- "loss": 2.0471,
1431
- "step": 203
1432
- },
1433
- {
1434
- "epoch": 1.4381625441696113,
1435
- "grad_norm": 0.58203125,
1436
- "learning_rate": 7.592474266471464e-06,
1437
- "loss": 1.944,
1438
- "step": 204
1439
- },
1440
- {
1441
- "epoch": 1.4452296819787986,
1442
- "grad_norm": 0.6171875,
1443
- "learning_rate": 7.56737096757421e-06,
1444
- "loss": 1.6996,
1445
- "step": 205
1446
- },
1447
- {
1448
- "epoch": 1.452296819787986,
1449
- "grad_norm": 0.7109375,
1450
- "learning_rate": 7.542179470918336e-06,
1451
- "loss": 1.898,
1452
- "step": 206
1453
- },
1454
- {
1455
- "epoch": 1.459363957597173,
1456
- "grad_norm": 0.62890625,
1457
- "learning_rate": 7.5169006419157985e-06,
1458
- "loss": 1.6575,
1459
- "step": 207
1460
- },
1461
- {
1462
- "epoch": 1.4664310954063604,
1463
- "grad_norm": 0.6328125,
1464
- "learning_rate": 7.491535348978719e-06,
1465
- "loss": 1.6194,
1466
- "step": 208
1467
- },
1468
- {
1469
- "epoch": 1.4734982332155477,
1470
- "grad_norm": 0.79296875,
1471
- "learning_rate": 7.466084463489537e-06,
1472
- "loss": 2.1004,
1473
- "step": 209
1474
- },
1475
- {
1476
- "epoch": 1.4805653710247348,
1477
- "grad_norm": 0.8828125,
1478
- "learning_rate": 7.440548859771086e-06,
1479
- "loss": 2.1903,
1480
- "step": 210
1481
- },
1482
- {
1483
- "epoch": 1.4876325088339222,
1484
- "grad_norm": 0.73046875,
1485
- "learning_rate": 7.414929415056551e-06,
1486
- "loss": 2.0521,
1487
- "step": 211
1488
- },
1489
- {
1490
- "epoch": 1.4946996466431095,
1491
- "grad_norm": 0.6484375,
1492
- "learning_rate": 7.389227009459335e-06,
1493
- "loss": 1.8102,
1494
- "step": 212
1495
- },
1496
- {
1497
- "epoch": 1.5017667844522968,
1498
- "grad_norm": 0.56640625,
1499
- "learning_rate": 7.363442525942827e-06,
1500
- "loss": 1.8402,
1501
- "step": 213
1502
- },
1503
- {
1504
- "epoch": 1.5088339222614842,
1505
- "grad_norm": 0.65234375,
1506
- "learning_rate": 7.3375768502900626e-06,
1507
- "loss": 1.949,
1508
- "step": 214
1509
- },
1510
- {
1511
- "epoch": 1.5159010600706715,
1512
- "grad_norm": 0.65625,
1513
- "learning_rate": 7.311630871073301e-06,
1514
- "loss": 1.6098,
1515
- "step": 215
1516
- },
1517
- {
1518
- "epoch": 1.5229681978798588,
1519
- "grad_norm": 0.62890625,
1520
- "learning_rate": 7.2856054796234944e-06,
1521
- "loss": 1.4942,
1522
- "step": 216
1523
- },
1524
- {
1525
- "epoch": 1.530035335689046,
1526
- "grad_norm": 0.6328125,
1527
- "learning_rate": 7.25950156999967e-06,
1528
- "loss": 1.9404,
1529
- "step": 217
1530
- },
1531
- {
1532
- "epoch": 1.5371024734982333,
1533
- "grad_norm": 0.64453125,
1534
- "learning_rate": 7.23332003895822e-06,
1535
- "loss": 1.8286,
1536
- "step": 218
1537
- },
1538
- {
1539
- "epoch": 1.5441696113074204,
1540
- "grad_norm": 0.75390625,
1541
- "learning_rate": 7.207061785922089e-06,
1542
- "loss": 2.0283,
1543
- "step": 219
1544
- },
1545
- {
1546
- "epoch": 1.5512367491166077,
1547
- "grad_norm": 0.6953125,
1548
- "learning_rate": 7.1807277129498774e-06,
1549
- "loss": 1.8301,
1550
- "step": 220
1551
- },
1552
- {
1553
- "epoch": 1.558303886925795,
1554
- "grad_norm": 0.62109375,
1555
- "learning_rate": 7.1543187247048525e-06,
1556
- "loss": 2.0651,
1557
- "step": 221
1558
- },
1559
- {
1560
- "epoch": 1.5653710247349824,
1561
- "grad_norm": 0.6015625,
1562
- "learning_rate": 7.1278357284238745e-06,
1563
- "loss": 1.6387,
1564
- "step": 222
1565
- },
1566
- {
1567
- "epoch": 1.5724381625441697,
1568
- "grad_norm": 0.64453125,
1569
- "learning_rate": 7.101279633886222e-06,
1570
- "loss": 1.5276,
1571
- "step": 223
1572
- },
1573
- {
1574
- "epoch": 1.579505300353357,
1575
- "grad_norm": 0.609375,
1576
- "learning_rate": 7.074651353382349e-06,
1577
- "loss": 1.8464,
1578
- "step": 224
1579
- },
1580
- {
1581
- "epoch": 1.5865724381625441,
1582
- "grad_norm": 0.65625,
1583
- "learning_rate": 7.047951801682533e-06,
1584
- "loss": 2.1482,
1585
- "step": 225
1586
- },
1587
- {
1588
- "epoch": 1.5936395759717314,
1589
- "grad_norm": 0.63671875,
1590
- "learning_rate": 7.021181896005456e-06,
1591
- "loss": 2.2091,
1592
- "step": 226
1593
- },
1594
- {
1595
- "epoch": 1.6007067137809188,
1596
- "grad_norm": 0.59765625,
1597
- "learning_rate": 6.994342555986692e-06,
1598
- "loss": 1.707,
1599
- "step": 227
1600
- },
1601
- {
1602
- "epoch": 1.6077738515901059,
1603
- "grad_norm": 0.6015625,
1604
- "learning_rate": 6.967434703647123e-06,
1605
- "loss": 1.7147,
1606
- "step": 228
1607
- },
1608
- {
1609
- "epoch": 1.6148409893992932,
1610
- "grad_norm": 0.59375,
1611
- "learning_rate": 6.9404592633612486e-06,
1612
- "loss": 1.5438,
1613
- "step": 229
1614
- },
1615
- {
1616
- "epoch": 1.6219081272084805,
1617
- "grad_norm": 0.640625,
1618
- "learning_rate": 6.913417161825449e-06,
1619
- "loss": 1.8477,
1620
- "step": 230
1621
- },
1622
- {
1623
- "epoch": 1.6289752650176679,
1624
- "grad_norm": 0.55859375,
1625
- "learning_rate": 6.886309328026135e-06,
1626
- "loss": 1.4667,
1627
- "step": 231
1628
- },
1629
- {
1630
- "epoch": 1.6360424028268552,
1631
- "grad_norm": 0.5859375,
1632
- "learning_rate": 6.85913669320784e-06,
1633
- "loss": 1.4187,
1634
- "step": 232
1635
- },
1636
- {
1637
- "epoch": 1.6431095406360425,
1638
- "grad_norm": 0.59765625,
1639
- "learning_rate": 6.831900190841232e-06,
1640
- "loss": 2.2355,
1641
- "step": 233
1642
- },
1643
- {
1644
- "epoch": 1.6501766784452296,
1645
- "grad_norm": 0.5703125,
1646
- "learning_rate": 6.804600756591037e-06,
1647
- "loss": 1.9369,
1648
- "step": 234
1649
- },
1650
- {
1651
- "epoch": 1.657243816254417,
1652
- "grad_norm": 0.671875,
1653
- "learning_rate": 6.777239328283909e-06,
1654
- "loss": 1.8834,
1655
- "step": 235
1656
- },
1657
- {
1658
- "epoch": 1.664310954063604,
1659
- "grad_norm": 0.65625,
1660
- "learning_rate": 6.749816845876196e-06,
1661
- "loss": 1.8316,
1662
- "step": 236
1663
- },
1664
- {
1665
- "epoch": 1.6713780918727914,
1666
- "grad_norm": 0.5859375,
1667
- "learning_rate": 6.722334251421665e-06,
1668
- "loss": 1.5946,
1669
- "step": 237
1670
- },
1671
- {
1672
- "epoch": 1.6784452296819787,
1673
- "grad_norm": 0.625,
1674
- "learning_rate": 6.6947924890391295e-06,
1675
- "loss": 1.6439,
1676
- "step": 238
1677
- },
1678
- {
1679
- "epoch": 1.685512367491166,
1680
- "grad_norm": 0.625,
1681
- "learning_rate": 6.667192504880016e-06,
1682
- "loss": 2.0241,
1683
- "step": 239
1684
- },
1685
- {
1686
- "epoch": 1.6925795053003534,
1687
- "grad_norm": 0.59375,
1688
- "learning_rate": 6.639535247095868e-06,
1689
- "loss": 1.5223,
1690
- "step": 240
1691
- },
1692
- {
1693
- "epoch": 1.6996466431095407,
1694
- "grad_norm": 0.6796875,
1695
- "learning_rate": 6.611821665805769e-06,
1696
- "loss": 2.1195,
1697
- "step": 241
1698
- },
1699
- {
1700
- "epoch": 1.706713780918728,
1701
- "grad_norm": 0.51953125,
1702
- "learning_rate": 6.5840527130637e-06,
1703
- "loss": 1.4851,
1704
- "step": 242
1705
- },
1706
- {
1707
- "epoch": 1.7137809187279152,
1708
- "grad_norm": 0.63671875,
1709
- "learning_rate": 6.556229342825835e-06,
1710
- "loss": 1.8208,
1711
- "step": 243
1712
- },
1713
- {
1714
- "epoch": 1.7208480565371025,
1715
- "grad_norm": 0.59765625,
1716
- "learning_rate": 6.528352510917774e-06,
1717
- "loss": 1.5442,
1718
- "step": 244
1719
- },
1720
- {
1721
- "epoch": 1.7279151943462896,
1722
- "grad_norm": 0.66015625,
1723
- "learning_rate": 6.500423175001705e-06,
1724
- "loss": 1.8611,
1725
- "step": 245
1726
- },
1727
- {
1728
- "epoch": 1.734982332155477,
1729
- "grad_norm": 0.953125,
1730
- "learning_rate": 6.472442294543497e-06,
1731
- "loss": 2.7323,
1732
- "step": 246
1733
- },
1734
- {
1735
- "epoch": 1.7420494699646643,
1736
- "grad_norm": 0.83203125,
1737
- "learning_rate": 6.444410830779753e-06,
1738
- "loss": 1.4034,
1739
- "step": 247
1740
- },
1741
- {
1742
- "epoch": 1.7491166077738516,
1743
- "grad_norm": 0.609375,
1744
- "learning_rate": 6.4163297466847795e-06,
1745
- "loss": 2.1511,
1746
- "step": 248
1747
- },
1748
- {
1749
- "epoch": 1.756183745583039,
1750
- "grad_norm": 0.60546875,
1751
- "learning_rate": 6.388200006937503e-06,
1752
- "loss": 1.6779,
1753
- "step": 249
1754
- },
1755
- {
1756
- "epoch": 1.7632508833922262,
1757
- "grad_norm": 0.5703125,
1758
- "learning_rate": 6.3600225778883395e-06,
1759
- "loss": 1.3638,
1760
- "step": 250
1761
- },
1762
- {
1763
- "epoch": 1.7703180212014136,
1764
- "grad_norm": 0.625,
1765
- "learning_rate": 6.33179842752599e-06,
1766
- "loss": 1.8632,
1767
- "step": 251
1768
- },
1769
- {
1770
- "epoch": 1.7773851590106007,
1771
- "grad_norm": 0.59765625,
1772
- "learning_rate": 6.303528525444185e-06,
1773
- "loss": 2.0674,
1774
- "step": 252
1775
- },
1776
- {
1777
- "epoch": 1.784452296819788,
1778
- "grad_norm": 0.80078125,
1779
- "learning_rate": 6.275213842808383e-06,
1780
- "loss": 2.6682,
1781
- "step": 253
1782
- },
1783
- {
1784
- "epoch": 1.7915194346289751,
1785
- "grad_norm": 0.90234375,
1786
- "learning_rate": 6.246855352322403e-06,
1787
- "loss": 3.1173,
1788
- "step": 254
1789
- },
1790
- {
1791
- "epoch": 1.7985865724381624,
1792
- "grad_norm": 0.6171875,
1793
- "learning_rate": 6.21845402819501e-06,
1794
- "loss": 2.3359,
1795
- "step": 255
1796
- },
1797
- {
1798
- "epoch": 1.8056537102473498,
1799
- "grad_norm": 0.59765625,
1800
- "learning_rate": 6.190010846106446e-06,
1801
- "loss": 1.6528,
1802
- "step": 256
1803
- },
1804
- {
1805
- "epoch": 1.812720848056537,
1806
- "grad_norm": 0.69140625,
1807
- "learning_rate": 6.161526783174917e-06,
1808
- "loss": 2.0144,
1809
- "step": 257
1810
- },
1811
- {
1812
- "epoch": 1.8197879858657244,
1813
- "grad_norm": 0.6796875,
1814
- "learning_rate": 6.1330028179230185e-06,
1815
- "loss": 2.078,
1816
- "step": 258
1817
- },
1818
- {
1819
- "epoch": 1.8268551236749118,
1820
- "grad_norm": 0.6640625,
1821
- "learning_rate": 6.104439930244125e-06,
1822
- "loss": 2.0176,
1823
- "step": 259
1824
- },
1825
- {
1826
- "epoch": 1.833922261484099,
1827
- "grad_norm": 0.95703125,
1828
- "learning_rate": 6.075839101368728e-06,
1829
- "loss": 1.7709,
1830
- "step": 260
1831
- },
1832
- {
1833
- "epoch": 1.8409893992932862,
1834
- "grad_norm": 0.6484375,
1835
- "learning_rate": 6.047201313830724e-06,
1836
- "loss": 1.7037,
1837
- "step": 261
1838
- },
1839
- {
1840
- "epoch": 1.8480565371024735,
1841
- "grad_norm": 0.6796875,
1842
- "learning_rate": 6.01852755143366e-06,
1843
- "loss": 1.8551,
1844
- "step": 262
1845
- },
1846
- {
1847
- "epoch": 1.8551236749116606,
1848
- "grad_norm": 0.640625,
1849
- "learning_rate": 5.98981879921695e-06,
1850
- "loss": 1.7625,
1851
- "step": 263
1852
- },
1853
- {
1854
- "epoch": 1.862190812720848,
1855
- "grad_norm": 0.59765625,
1856
- "learning_rate": 5.961076043422011e-06,
1857
- "loss": 2.0804,
1858
- "step": 264
1859
- },
1860
- {
1861
- "epoch": 1.8692579505300353,
1862
- "grad_norm": 0.625,
1863
- "learning_rate": 5.932300271458406e-06,
1864
- "loss": 1.7134,
1865
- "step": 265
1866
- },
1867
- {
1868
- "epoch": 1.8763250883392226,
1869
- "grad_norm": 0.57421875,
1870
- "learning_rate": 5.90349247186991e-06,
1871
- "loss": 1.9591,
1872
- "step": 266
1873
- },
1874
- {
1875
- "epoch": 1.88339222614841,
1876
- "grad_norm": 0.71875,
1877
- "learning_rate": 5.874653634300555e-06,
1878
- "loss": 1.8638,
1879
- "step": 267
1880
- },
1881
- {
1882
- "epoch": 1.8904593639575973,
1883
- "grad_norm": 0.65625,
1884
- "learning_rate": 5.845784749460632e-06,
1885
- "loss": 1.9674,
1886
- "step": 268
1887
- },
1888
- {
1889
- "epoch": 1.8975265017667846,
1890
- "grad_norm": 0.60546875,
1891
- "learning_rate": 5.816886809092651e-06,
1892
- "loss": 1.8133,
1893
- "step": 269
1894
- },
1895
- {
1896
- "epoch": 1.9045936395759717,
1897
- "grad_norm": 0.71484375,
1898
- "learning_rate": 5.787960805937283e-06,
1899
- "loss": 2.166,
1900
- "step": 270
1901
- },
1902
- {
1903
- "epoch": 1.911660777385159,
1904
- "grad_norm": 0.61328125,
1905
- "learning_rate": 5.759007733699245e-06,
1906
- "loss": 1.818,
1907
- "step": 271
1908
- },
1909
- {
1910
- "epoch": 1.9187279151943462,
1911
- "grad_norm": 0.58203125,
1912
- "learning_rate": 5.730028587013168e-06,
1913
- "loss": 1.4026,
1914
- "step": 272
1915
- },
1916
- {
1917
- "epoch": 1.9257950530035335,
1918
- "grad_norm": 0.69921875,
1919
- "learning_rate": 5.701024361409431e-06,
1920
- "loss": 2.0534,
1921
- "step": 273
1922
- },
1923
- {
1924
- "epoch": 1.9328621908127208,
1925
- "grad_norm": 0.734375,
1926
- "learning_rate": 5.671996053279949e-06,
1927
- "loss": 1.7194,
1928
- "step": 274
1929
- },
1930
- {
1931
- "epoch": 1.9399293286219081,
1932
- "grad_norm": 0.69140625,
1933
- "learning_rate": 5.642944659843962e-06,
1934
- "loss": 2.1351,
1935
- "step": 275
1936
- },
1937
- {
1938
- "epoch": 1.9469964664310955,
1939
- "grad_norm": 0.6796875,
1940
- "learning_rate": 5.613871179113761e-06,
1941
- "loss": 1.8866,
1942
- "step": 276
1943
- },
1944
- {
1945
- "epoch": 1.9540636042402828,
1946
- "grad_norm": 0.69921875,
1947
- "learning_rate": 5.584776609860414e-06,
1948
- "loss": 2.0398,
1949
- "step": 277
1950
- },
1951
- {
1952
- "epoch": 1.96113074204947,
1953
- "grad_norm": 0.703125,
1954
- "learning_rate": 5.555661951579443e-06,
1955
- "loss": 1.4643,
1956
- "step": 278
1957
- },
1958
- {
1959
- "epoch": 1.9681978798586572,
1960
- "grad_norm": 0.58984375,
1961
- "learning_rate": 5.5265282044565005e-06,
1962
- "loss": 1.4311,
1963
- "step": 279
1964
- },
1965
- {
1966
- "epoch": 1.9752650176678446,
1967
- "grad_norm": 0.69921875,
1968
- "learning_rate": 5.497376369333005e-06,
1969
- "loss": 1.8944,
1970
- "step": 280
1971
- },
1972
- {
1973
- "epoch": 1.9823321554770317,
1974
- "grad_norm": 0.62109375,
1975
- "learning_rate": 5.468207447671755e-06,
1976
- "loss": 2.3742,
1977
- "step": 281
1978
- },
1979
- {
1980
- "epoch": 1.989399293286219,
1981
- "grad_norm": 0.625,
1982
- "learning_rate": 5.43902244152253e-06,
1983
- "loss": 1.7038,
1984
- "step": 282
1985
- }
1986
- ],
1987
- "logging_steps": 1,
1988
- "max_steps": 564,
1989
- "num_input_tokens_seen": 0,
1990
- "num_train_epochs": 4,
1991
- "save_steps": 141,
1992
- "stateful_callbacks": {
1993
- "TrainerControl": {
1994
- "args": {
1995
- "should_epoch_stop": false,
1996
- "should_evaluate": false,
1997
- "should_log": false,
1998
- "should_save": true,
1999
- "should_training_stop": false
2000
- },
2001
- "attributes": {}
2002
- }
2003
- },
2004
- "total_flos": 8.84019452486615e+17,
2005
- "train_batch_size": 2,
2006
- "trial_name": null,
2007
- "trial_params": null
2008
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoint-282/training_args.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:406982d99a22da9752c26535cc9f5894b29792d701bc3fe309a0db5d133444df
3
- size 6840
 
 
 
 
checkpoint-282/vocab.json DELETED
The diff for this file is too large to render. See raw diff