Pclanglais commited on
Commit
367679c
·
verified ·
1 Parent(s): 60a5eed

Upload folder using huggingface_hub

Browse files
config.json ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "ModernBertForSequenceClassification"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 50281,
8
+ "classifier_activation": "gelu",
9
+ "classifier_bias": false,
10
+ "classifier_dropout": 0.0,
11
+ "classifier_pooling": "mean",
12
+ "cls_token_id": 50281,
13
+ "decoder_bias": true,
14
+ "deterministic_flash_attn": false,
15
+ "embedding_dropout": 0.0,
16
+ "eos_token_id": 50282,
17
+ "global_attn_every_n_layers": 3,
18
+ "global_rope_theta": 160000.0,
19
+ "gradient_checkpointing": false,
20
+ "hidden_activation": "gelu",
21
+ "hidden_size": 1024,
22
+ "id2label": {
23
+ "0": "inheritance_inpassing",
24
+ "1": "inheritance_nonexistent",
25
+ "2": "inheritance_main_topic",
26
+ "3": "inheritance_topic"
27
+ },
28
+ "initializer_cutoff_factor": 2.0,
29
+ "initializer_range": 0.02,
30
+ "intermediate_size": 2624,
31
+ "label2id": {
32
+ "inheritance_inpassing": 0,
33
+ "inheritance_main_topic": 2,
34
+ "inheritance_nonexistent": 1,
35
+ "inheritance_topic": 3
36
+ },
37
+ "layer_norm_eps": 1e-05,
38
+ "local_attention": 128,
39
+ "local_rope_theta": 10000.0,
40
+ "max_position_embeddings": 8192,
41
+ "mlp_bias": false,
42
+ "mlp_dropout": 0.0,
43
+ "model_type": "modernbert",
44
+ "norm_bias": false,
45
+ "norm_eps": 1e-05,
46
+ "num_attention_heads": 16,
47
+ "num_hidden_layers": 28,
48
+ "pad_token_id": 50283,
49
+ "position_embedding_type": "absolute",
50
+ "problem_type": "multi_label_classification",
51
+ "reference_compile": true,
52
+ "repad_logits_with_grad": false,
53
+ "sep_token_id": 50282,
54
+ "sparse_pred_ignore_index": -100,
55
+ "sparse_prediction": false,
56
+ "torch_dtype": "float32",
57
+ "transformers_version": "4.50.0",
58
+ "vocab_size": 50368
59
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d2d5c085f440e7bcf318e5b4cb09b1352f2834702eeef32ea5541b1b43bbd822
3
+ size 1583359840
optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2147b708f3ce1eaa4c8700d166073f07c8fe9c20d6214d53783e32b400686cd7
3
+ size 3166829562
rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c062f7f375beded48b5337f5a3f3a5cb38807fa3e85dbf3e294c0ab6b627bfc2
3
+ size 14244
scaler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cbebdcc5894ea8cff9f58bb2595e1204adaaf872a439b9ba78f956d9339b0a52
3
+ size 988
scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3a2652cf357cd7393c76836bde50fd1ceb04db4dc509376dd9574f6ca3a96a12
3
+ size 1064
special_tokens_map.json ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": {
3
+ "content": "[CLS]",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "mask_token": {
10
+ "content": "[MASK]",
11
+ "lstrip": true,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "[PAD]",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "sep_token": {
24
+ "content": "[SEP]",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ },
30
+ "unk_token": {
31
+ "content": "[UNK]",
32
+ "lstrip": false,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false
36
+ }
37
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,945 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "|||IP_ADDRESS|||",
5
+ "lstrip": false,
6
+ "normalized": true,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": false
10
+ },
11
+ "1": {
12
+ "content": "<|padding|>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "50254": {
20
+ "content": " ",
21
+ "lstrip": false,
22
+ "normalized": true,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": false
26
+ },
27
+ "50255": {
28
+ "content": " ",
29
+ "lstrip": false,
30
+ "normalized": true,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": false
34
+ },
35
+ "50256": {
36
+ "content": " ",
37
+ "lstrip": false,
38
+ "normalized": true,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": false
42
+ },
43
+ "50257": {
44
+ "content": " ",
45
+ "lstrip": false,
46
+ "normalized": true,
47
+ "rstrip": false,
48
+ "single_word": false,
49
+ "special": false
50
+ },
51
+ "50258": {
52
+ "content": " ",
53
+ "lstrip": false,
54
+ "normalized": true,
55
+ "rstrip": false,
56
+ "single_word": false,
57
+ "special": false
58
+ },
59
+ "50259": {
60
+ "content": " ",
61
+ "lstrip": false,
62
+ "normalized": true,
63
+ "rstrip": false,
64
+ "single_word": false,
65
+ "special": false
66
+ },
67
+ "50260": {
68
+ "content": " ",
69
+ "lstrip": false,
70
+ "normalized": true,
71
+ "rstrip": false,
72
+ "single_word": false,
73
+ "special": false
74
+ },
75
+ "50261": {
76
+ "content": " ",
77
+ "lstrip": false,
78
+ "normalized": true,
79
+ "rstrip": false,
80
+ "single_word": false,
81
+ "special": false
82
+ },
83
+ "50262": {
84
+ "content": " ",
85
+ "lstrip": false,
86
+ "normalized": true,
87
+ "rstrip": false,
88
+ "single_word": false,
89
+ "special": false
90
+ },
91
+ "50263": {
92
+ "content": " ",
93
+ "lstrip": false,
94
+ "normalized": true,
95
+ "rstrip": false,
96
+ "single_word": false,
97
+ "special": false
98
+ },
99
+ "50264": {
100
+ "content": " ",
101
+ "lstrip": false,
102
+ "normalized": true,
103
+ "rstrip": false,
104
+ "single_word": false,
105
+ "special": false
106
+ },
107
+ "50265": {
108
+ "content": " ",
109
+ "lstrip": false,
110
+ "normalized": true,
111
+ "rstrip": false,
112
+ "single_word": false,
113
+ "special": false
114
+ },
115
+ "50266": {
116
+ "content": " ",
117
+ "lstrip": false,
118
+ "normalized": true,
119
+ "rstrip": false,
120
+ "single_word": false,
121
+ "special": false
122
+ },
123
+ "50267": {
124
+ "content": " ",
125
+ "lstrip": false,
126
+ "normalized": true,
127
+ "rstrip": false,
128
+ "single_word": false,
129
+ "special": false
130
+ },
131
+ "50268": {
132
+ "content": " ",
133
+ "lstrip": false,
134
+ "normalized": true,
135
+ "rstrip": false,
136
+ "single_word": false,
137
+ "special": false
138
+ },
139
+ "50269": {
140
+ "content": " ",
141
+ "lstrip": false,
142
+ "normalized": true,
143
+ "rstrip": false,
144
+ "single_word": false,
145
+ "special": false
146
+ },
147
+ "50270": {
148
+ "content": " ",
149
+ "lstrip": false,
150
+ "normalized": true,
151
+ "rstrip": false,
152
+ "single_word": false,
153
+ "special": false
154
+ },
155
+ "50271": {
156
+ "content": " ",
157
+ "lstrip": false,
158
+ "normalized": true,
159
+ "rstrip": false,
160
+ "single_word": false,
161
+ "special": false
162
+ },
163
+ "50272": {
164
+ "content": " ",
165
+ "lstrip": false,
166
+ "normalized": true,
167
+ "rstrip": false,
168
+ "single_word": false,
169
+ "special": false
170
+ },
171
+ "50273": {
172
+ "content": " ",
173
+ "lstrip": false,
174
+ "normalized": true,
175
+ "rstrip": false,
176
+ "single_word": false,
177
+ "special": false
178
+ },
179
+ "50274": {
180
+ "content": " ",
181
+ "lstrip": false,
182
+ "normalized": true,
183
+ "rstrip": false,
184
+ "single_word": false,
185
+ "special": false
186
+ },
187
+ "50275": {
188
+ "content": " ",
189
+ "lstrip": false,
190
+ "normalized": true,
191
+ "rstrip": false,
192
+ "single_word": false,
193
+ "special": false
194
+ },
195
+ "50276": {
196
+ "content": " ",
197
+ "lstrip": false,
198
+ "normalized": true,
199
+ "rstrip": false,
200
+ "single_word": false,
201
+ "special": false
202
+ },
203
+ "50277": {
204
+ "content": "|||EMAIL_ADDRESS|||",
205
+ "lstrip": false,
206
+ "normalized": true,
207
+ "rstrip": false,
208
+ "single_word": false,
209
+ "special": false
210
+ },
211
+ "50278": {
212
+ "content": "|||PHONE_NUMBER|||",
213
+ "lstrip": false,
214
+ "normalized": true,
215
+ "rstrip": false,
216
+ "single_word": false,
217
+ "special": false
218
+ },
219
+ "50279": {
220
+ "content": "<|endoftext|>",
221
+ "lstrip": false,
222
+ "normalized": false,
223
+ "rstrip": false,
224
+ "single_word": false,
225
+ "special": true
226
+ },
227
+ "50280": {
228
+ "content": "[UNK]",
229
+ "lstrip": false,
230
+ "normalized": false,
231
+ "rstrip": false,
232
+ "single_word": false,
233
+ "special": true
234
+ },
235
+ "50281": {
236
+ "content": "[CLS]",
237
+ "lstrip": false,
238
+ "normalized": false,
239
+ "rstrip": false,
240
+ "single_word": false,
241
+ "special": true
242
+ },
243
+ "50282": {
244
+ "content": "[SEP]",
245
+ "lstrip": false,
246
+ "normalized": false,
247
+ "rstrip": false,
248
+ "single_word": false,
249
+ "special": true
250
+ },
251
+ "50283": {
252
+ "content": "[PAD]",
253
+ "lstrip": false,
254
+ "normalized": false,
255
+ "rstrip": false,
256
+ "single_word": false,
257
+ "special": true
258
+ },
259
+ "50284": {
260
+ "content": "[MASK]",
261
+ "lstrip": true,
262
+ "normalized": false,
263
+ "rstrip": false,
264
+ "single_word": false,
265
+ "special": true
266
+ },
267
+ "50285": {
268
+ "content": "[unused0]",
269
+ "lstrip": false,
270
+ "normalized": true,
271
+ "rstrip": false,
272
+ "single_word": false,
273
+ "special": false
274
+ },
275
+ "50286": {
276
+ "content": "[unused1]",
277
+ "lstrip": false,
278
+ "normalized": true,
279
+ "rstrip": false,
280
+ "single_word": false,
281
+ "special": false
282
+ },
283
+ "50287": {
284
+ "content": "[unused2]",
285
+ "lstrip": false,
286
+ "normalized": true,
287
+ "rstrip": false,
288
+ "single_word": false,
289
+ "special": false
290
+ },
291
+ "50288": {
292
+ "content": "[unused3]",
293
+ "lstrip": false,
294
+ "normalized": true,
295
+ "rstrip": false,
296
+ "single_word": false,
297
+ "special": false
298
+ },
299
+ "50289": {
300
+ "content": "[unused4]",
301
+ "lstrip": false,
302
+ "normalized": true,
303
+ "rstrip": false,
304
+ "single_word": false,
305
+ "special": false
306
+ },
307
+ "50290": {
308
+ "content": "[unused5]",
309
+ "lstrip": false,
310
+ "normalized": true,
311
+ "rstrip": false,
312
+ "single_word": false,
313
+ "special": false
314
+ },
315
+ "50291": {
316
+ "content": "[unused6]",
317
+ "lstrip": false,
318
+ "normalized": true,
319
+ "rstrip": false,
320
+ "single_word": false,
321
+ "special": false
322
+ },
323
+ "50292": {
324
+ "content": "[unused7]",
325
+ "lstrip": false,
326
+ "normalized": true,
327
+ "rstrip": false,
328
+ "single_word": false,
329
+ "special": false
330
+ },
331
+ "50293": {
332
+ "content": "[unused8]",
333
+ "lstrip": false,
334
+ "normalized": true,
335
+ "rstrip": false,
336
+ "single_word": false,
337
+ "special": false
338
+ },
339
+ "50294": {
340
+ "content": "[unused9]",
341
+ "lstrip": false,
342
+ "normalized": true,
343
+ "rstrip": false,
344
+ "single_word": false,
345
+ "special": false
346
+ },
347
+ "50295": {
348
+ "content": "[unused10]",
349
+ "lstrip": false,
350
+ "normalized": true,
351
+ "rstrip": false,
352
+ "single_word": false,
353
+ "special": false
354
+ },
355
+ "50296": {
356
+ "content": "[unused11]",
357
+ "lstrip": false,
358
+ "normalized": true,
359
+ "rstrip": false,
360
+ "single_word": false,
361
+ "special": false
362
+ },
363
+ "50297": {
364
+ "content": "[unused12]",
365
+ "lstrip": false,
366
+ "normalized": true,
367
+ "rstrip": false,
368
+ "single_word": false,
369
+ "special": false
370
+ },
371
+ "50298": {
372
+ "content": "[unused13]",
373
+ "lstrip": false,
374
+ "normalized": true,
375
+ "rstrip": false,
376
+ "single_word": false,
377
+ "special": false
378
+ },
379
+ "50299": {
380
+ "content": "[unused14]",
381
+ "lstrip": false,
382
+ "normalized": true,
383
+ "rstrip": false,
384
+ "single_word": false,
385
+ "special": false
386
+ },
387
+ "50300": {
388
+ "content": "[unused15]",
389
+ "lstrip": false,
390
+ "normalized": true,
391
+ "rstrip": false,
392
+ "single_word": false,
393
+ "special": false
394
+ },
395
+ "50301": {
396
+ "content": "[unused16]",
397
+ "lstrip": false,
398
+ "normalized": true,
399
+ "rstrip": false,
400
+ "single_word": false,
401
+ "special": false
402
+ },
403
+ "50302": {
404
+ "content": "[unused17]",
405
+ "lstrip": false,
406
+ "normalized": true,
407
+ "rstrip": false,
408
+ "single_word": false,
409
+ "special": false
410
+ },
411
+ "50303": {
412
+ "content": "[unused18]",
413
+ "lstrip": false,
414
+ "normalized": true,
415
+ "rstrip": false,
416
+ "single_word": false,
417
+ "special": false
418
+ },
419
+ "50304": {
420
+ "content": "[unused19]",
421
+ "lstrip": false,
422
+ "normalized": true,
423
+ "rstrip": false,
424
+ "single_word": false,
425
+ "special": false
426
+ },
427
+ "50305": {
428
+ "content": "[unused20]",
429
+ "lstrip": false,
430
+ "normalized": true,
431
+ "rstrip": false,
432
+ "single_word": false,
433
+ "special": false
434
+ },
435
+ "50306": {
436
+ "content": "[unused21]",
437
+ "lstrip": false,
438
+ "normalized": true,
439
+ "rstrip": false,
440
+ "single_word": false,
441
+ "special": false
442
+ },
443
+ "50307": {
444
+ "content": "[unused22]",
445
+ "lstrip": false,
446
+ "normalized": true,
447
+ "rstrip": false,
448
+ "single_word": false,
449
+ "special": false
450
+ },
451
+ "50308": {
452
+ "content": "[unused23]",
453
+ "lstrip": false,
454
+ "normalized": true,
455
+ "rstrip": false,
456
+ "single_word": false,
457
+ "special": false
458
+ },
459
+ "50309": {
460
+ "content": "[unused24]",
461
+ "lstrip": false,
462
+ "normalized": true,
463
+ "rstrip": false,
464
+ "single_word": false,
465
+ "special": false
466
+ },
467
+ "50310": {
468
+ "content": "[unused25]",
469
+ "lstrip": false,
470
+ "normalized": true,
471
+ "rstrip": false,
472
+ "single_word": false,
473
+ "special": false
474
+ },
475
+ "50311": {
476
+ "content": "[unused26]",
477
+ "lstrip": false,
478
+ "normalized": true,
479
+ "rstrip": false,
480
+ "single_word": false,
481
+ "special": false
482
+ },
483
+ "50312": {
484
+ "content": "[unused27]",
485
+ "lstrip": false,
486
+ "normalized": true,
487
+ "rstrip": false,
488
+ "single_word": false,
489
+ "special": false
490
+ },
491
+ "50313": {
492
+ "content": "[unused28]",
493
+ "lstrip": false,
494
+ "normalized": true,
495
+ "rstrip": false,
496
+ "single_word": false,
497
+ "special": false
498
+ },
499
+ "50314": {
500
+ "content": "[unused29]",
501
+ "lstrip": false,
502
+ "normalized": true,
503
+ "rstrip": false,
504
+ "single_word": false,
505
+ "special": false
506
+ },
507
+ "50315": {
508
+ "content": "[unused30]",
509
+ "lstrip": false,
510
+ "normalized": true,
511
+ "rstrip": false,
512
+ "single_word": false,
513
+ "special": false
514
+ },
515
+ "50316": {
516
+ "content": "[unused31]",
517
+ "lstrip": false,
518
+ "normalized": true,
519
+ "rstrip": false,
520
+ "single_word": false,
521
+ "special": false
522
+ },
523
+ "50317": {
524
+ "content": "[unused32]",
525
+ "lstrip": false,
526
+ "normalized": true,
527
+ "rstrip": false,
528
+ "single_word": false,
529
+ "special": false
530
+ },
531
+ "50318": {
532
+ "content": "[unused33]",
533
+ "lstrip": false,
534
+ "normalized": true,
535
+ "rstrip": false,
536
+ "single_word": false,
537
+ "special": false
538
+ },
539
+ "50319": {
540
+ "content": "[unused34]",
541
+ "lstrip": false,
542
+ "normalized": true,
543
+ "rstrip": false,
544
+ "single_word": false,
545
+ "special": false
546
+ },
547
+ "50320": {
548
+ "content": "[unused35]",
549
+ "lstrip": false,
550
+ "normalized": true,
551
+ "rstrip": false,
552
+ "single_word": false,
553
+ "special": false
554
+ },
555
+ "50321": {
556
+ "content": "[unused36]",
557
+ "lstrip": false,
558
+ "normalized": true,
559
+ "rstrip": false,
560
+ "single_word": false,
561
+ "special": false
562
+ },
563
+ "50322": {
564
+ "content": "[unused37]",
565
+ "lstrip": false,
566
+ "normalized": true,
567
+ "rstrip": false,
568
+ "single_word": false,
569
+ "special": false
570
+ },
571
+ "50323": {
572
+ "content": "[unused38]",
573
+ "lstrip": false,
574
+ "normalized": true,
575
+ "rstrip": false,
576
+ "single_word": false,
577
+ "special": false
578
+ },
579
+ "50324": {
580
+ "content": "[unused39]",
581
+ "lstrip": false,
582
+ "normalized": true,
583
+ "rstrip": false,
584
+ "single_word": false,
585
+ "special": false
586
+ },
587
+ "50325": {
588
+ "content": "[unused40]",
589
+ "lstrip": false,
590
+ "normalized": true,
591
+ "rstrip": false,
592
+ "single_word": false,
593
+ "special": false
594
+ },
595
+ "50326": {
596
+ "content": "[unused41]",
597
+ "lstrip": false,
598
+ "normalized": true,
599
+ "rstrip": false,
600
+ "single_word": false,
601
+ "special": false
602
+ },
603
+ "50327": {
604
+ "content": "[unused42]",
605
+ "lstrip": false,
606
+ "normalized": true,
607
+ "rstrip": false,
608
+ "single_word": false,
609
+ "special": false
610
+ },
611
+ "50328": {
612
+ "content": "[unused43]",
613
+ "lstrip": false,
614
+ "normalized": true,
615
+ "rstrip": false,
616
+ "single_word": false,
617
+ "special": false
618
+ },
619
+ "50329": {
620
+ "content": "[unused44]",
621
+ "lstrip": false,
622
+ "normalized": true,
623
+ "rstrip": false,
624
+ "single_word": false,
625
+ "special": false
626
+ },
627
+ "50330": {
628
+ "content": "[unused45]",
629
+ "lstrip": false,
630
+ "normalized": true,
631
+ "rstrip": false,
632
+ "single_word": false,
633
+ "special": false
634
+ },
635
+ "50331": {
636
+ "content": "[unused46]",
637
+ "lstrip": false,
638
+ "normalized": true,
639
+ "rstrip": false,
640
+ "single_word": false,
641
+ "special": false
642
+ },
643
+ "50332": {
644
+ "content": "[unused47]",
645
+ "lstrip": false,
646
+ "normalized": true,
647
+ "rstrip": false,
648
+ "single_word": false,
649
+ "special": false
650
+ },
651
+ "50333": {
652
+ "content": "[unused48]",
653
+ "lstrip": false,
654
+ "normalized": true,
655
+ "rstrip": false,
656
+ "single_word": false,
657
+ "special": false
658
+ },
659
+ "50334": {
660
+ "content": "[unused49]",
661
+ "lstrip": false,
662
+ "normalized": true,
663
+ "rstrip": false,
664
+ "single_word": false,
665
+ "special": false
666
+ },
667
+ "50335": {
668
+ "content": "[unused50]",
669
+ "lstrip": false,
670
+ "normalized": true,
671
+ "rstrip": false,
672
+ "single_word": false,
673
+ "special": false
674
+ },
675
+ "50336": {
676
+ "content": "[unused51]",
677
+ "lstrip": false,
678
+ "normalized": true,
679
+ "rstrip": false,
680
+ "single_word": false,
681
+ "special": false
682
+ },
683
+ "50337": {
684
+ "content": "[unused52]",
685
+ "lstrip": false,
686
+ "normalized": true,
687
+ "rstrip": false,
688
+ "single_word": false,
689
+ "special": false
690
+ },
691
+ "50338": {
692
+ "content": "[unused53]",
693
+ "lstrip": false,
694
+ "normalized": true,
695
+ "rstrip": false,
696
+ "single_word": false,
697
+ "special": false
698
+ },
699
+ "50339": {
700
+ "content": "[unused54]",
701
+ "lstrip": false,
702
+ "normalized": true,
703
+ "rstrip": false,
704
+ "single_word": false,
705
+ "special": false
706
+ },
707
+ "50340": {
708
+ "content": "[unused55]",
709
+ "lstrip": false,
710
+ "normalized": true,
711
+ "rstrip": false,
712
+ "single_word": false,
713
+ "special": false
714
+ },
715
+ "50341": {
716
+ "content": "[unused56]",
717
+ "lstrip": false,
718
+ "normalized": true,
719
+ "rstrip": false,
720
+ "single_word": false,
721
+ "special": false
722
+ },
723
+ "50342": {
724
+ "content": "[unused57]",
725
+ "lstrip": false,
726
+ "normalized": true,
727
+ "rstrip": false,
728
+ "single_word": false,
729
+ "special": false
730
+ },
731
+ "50343": {
732
+ "content": "[unused58]",
733
+ "lstrip": false,
734
+ "normalized": true,
735
+ "rstrip": false,
736
+ "single_word": false,
737
+ "special": false
738
+ },
739
+ "50344": {
740
+ "content": "[unused59]",
741
+ "lstrip": false,
742
+ "normalized": true,
743
+ "rstrip": false,
744
+ "single_word": false,
745
+ "special": false
746
+ },
747
+ "50345": {
748
+ "content": "[unused60]",
749
+ "lstrip": false,
750
+ "normalized": true,
751
+ "rstrip": false,
752
+ "single_word": false,
753
+ "special": false
754
+ },
755
+ "50346": {
756
+ "content": "[unused61]",
757
+ "lstrip": false,
758
+ "normalized": true,
759
+ "rstrip": false,
760
+ "single_word": false,
761
+ "special": false
762
+ },
763
+ "50347": {
764
+ "content": "[unused62]",
765
+ "lstrip": false,
766
+ "normalized": true,
767
+ "rstrip": false,
768
+ "single_word": false,
769
+ "special": false
770
+ },
771
+ "50348": {
772
+ "content": "[unused63]",
773
+ "lstrip": false,
774
+ "normalized": true,
775
+ "rstrip": false,
776
+ "single_word": false,
777
+ "special": false
778
+ },
779
+ "50349": {
780
+ "content": "[unused64]",
781
+ "lstrip": false,
782
+ "normalized": true,
783
+ "rstrip": false,
784
+ "single_word": false,
785
+ "special": false
786
+ },
787
+ "50350": {
788
+ "content": "[unused65]",
789
+ "lstrip": false,
790
+ "normalized": true,
791
+ "rstrip": false,
792
+ "single_word": false,
793
+ "special": false
794
+ },
795
+ "50351": {
796
+ "content": "[unused66]",
797
+ "lstrip": false,
798
+ "normalized": true,
799
+ "rstrip": false,
800
+ "single_word": false,
801
+ "special": false
802
+ },
803
+ "50352": {
804
+ "content": "[unused67]",
805
+ "lstrip": false,
806
+ "normalized": true,
807
+ "rstrip": false,
808
+ "single_word": false,
809
+ "special": false
810
+ },
811
+ "50353": {
812
+ "content": "[unused68]",
813
+ "lstrip": false,
814
+ "normalized": true,
815
+ "rstrip": false,
816
+ "single_word": false,
817
+ "special": false
818
+ },
819
+ "50354": {
820
+ "content": "[unused69]",
821
+ "lstrip": false,
822
+ "normalized": true,
823
+ "rstrip": false,
824
+ "single_word": false,
825
+ "special": false
826
+ },
827
+ "50355": {
828
+ "content": "[unused70]",
829
+ "lstrip": false,
830
+ "normalized": true,
831
+ "rstrip": false,
832
+ "single_word": false,
833
+ "special": false
834
+ },
835
+ "50356": {
836
+ "content": "[unused71]",
837
+ "lstrip": false,
838
+ "normalized": true,
839
+ "rstrip": false,
840
+ "single_word": false,
841
+ "special": false
842
+ },
843
+ "50357": {
844
+ "content": "[unused72]",
845
+ "lstrip": false,
846
+ "normalized": true,
847
+ "rstrip": false,
848
+ "single_word": false,
849
+ "special": false
850
+ },
851
+ "50358": {
852
+ "content": "[unused73]",
853
+ "lstrip": false,
854
+ "normalized": true,
855
+ "rstrip": false,
856
+ "single_word": false,
857
+ "special": false
858
+ },
859
+ "50359": {
860
+ "content": "[unused74]",
861
+ "lstrip": false,
862
+ "normalized": true,
863
+ "rstrip": false,
864
+ "single_word": false,
865
+ "special": false
866
+ },
867
+ "50360": {
868
+ "content": "[unused75]",
869
+ "lstrip": false,
870
+ "normalized": true,
871
+ "rstrip": false,
872
+ "single_word": false,
873
+ "special": false
874
+ },
875
+ "50361": {
876
+ "content": "[unused76]",
877
+ "lstrip": false,
878
+ "normalized": true,
879
+ "rstrip": false,
880
+ "single_word": false,
881
+ "special": false
882
+ },
883
+ "50362": {
884
+ "content": "[unused77]",
885
+ "lstrip": false,
886
+ "normalized": true,
887
+ "rstrip": false,
888
+ "single_word": false,
889
+ "special": false
890
+ },
891
+ "50363": {
892
+ "content": "[unused78]",
893
+ "lstrip": false,
894
+ "normalized": true,
895
+ "rstrip": false,
896
+ "single_word": false,
897
+ "special": false
898
+ },
899
+ "50364": {
900
+ "content": "[unused79]",
901
+ "lstrip": false,
902
+ "normalized": true,
903
+ "rstrip": false,
904
+ "single_word": false,
905
+ "special": false
906
+ },
907
+ "50365": {
908
+ "content": "[unused80]",
909
+ "lstrip": false,
910
+ "normalized": true,
911
+ "rstrip": false,
912
+ "single_word": false,
913
+ "special": false
914
+ },
915
+ "50366": {
916
+ "content": "[unused81]",
917
+ "lstrip": false,
918
+ "normalized": true,
919
+ "rstrip": false,
920
+ "single_word": false,
921
+ "special": false
922
+ },
923
+ "50367": {
924
+ "content": "[unused82]",
925
+ "lstrip": false,
926
+ "normalized": true,
927
+ "rstrip": false,
928
+ "single_word": false,
929
+ "special": false
930
+ }
931
+ },
932
+ "clean_up_tokenization_spaces": true,
933
+ "cls_token": "[CLS]",
934
+ "extra_special_tokens": {},
935
+ "mask_token": "[MASK]",
936
+ "model_input_names": [
937
+ "input_ids",
938
+ "attention_mask"
939
+ ],
940
+ "model_max_length": 8192,
941
+ "pad_token": "[PAD]",
942
+ "sep_token": "[SEP]",
943
+ "tokenizer_class": "PreTrainedTokenizer",
944
+ "unk_token": "[UNK]"
945
+ }
trainer_state.json ADDED
@@ -0,0 +1,3623 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_global_step": 169336,
3
+ "best_metric": 0.5241418199760344,
4
+ "best_model_checkpoint": "modernbert-heritage-saliency/checkpoint-169336",
5
+ "epoch": 3.0,
6
+ "eval_steps": 500,
7
+ "global_step": 254004,
8
+ "is_hyper_param_search": false,
9
+ "is_local_process_zero": true,
10
+ "is_world_process_zero": true,
11
+ "log_history": [
12
+ {
13
+ "epoch": 0.005905418812302168,
14
+ "grad_norm": 6.885669708251953,
15
+ "learning_rate": 1.9960866758003812e-05,
16
+ "loss": 0.778,
17
+ "step": 500
18
+ },
19
+ {
20
+ "epoch": 0.011810837624604336,
21
+ "grad_norm": 7.079469203948975,
22
+ "learning_rate": 1.992149729925513e-05,
23
+ "loss": 0.663,
24
+ "step": 1000
25
+ },
26
+ {
27
+ "epoch": 0.017716256436906504,
28
+ "grad_norm": 6.217828273773193,
29
+ "learning_rate": 1.988212784050645e-05,
30
+ "loss": 0.6988,
31
+ "step": 1500
32
+ },
33
+ {
34
+ "epoch": 0.023621675249208673,
35
+ "grad_norm": 3.856297016143799,
36
+ "learning_rate": 1.984275838175777e-05,
37
+ "loss": 0.6711,
38
+ "step": 2000
39
+ },
40
+ {
41
+ "epoch": 0.029527094061510842,
42
+ "grad_norm": 3.044447660446167,
43
+ "learning_rate": 1.9803388923009087e-05,
44
+ "loss": 0.6287,
45
+ "step": 2500
46
+ },
47
+ {
48
+ "epoch": 0.03543251287381301,
49
+ "grad_norm": 2.8974101543426514,
50
+ "learning_rate": 1.9764019464260406e-05,
51
+ "loss": 0.6121,
52
+ "step": 3000
53
+ },
54
+ {
55
+ "epoch": 0.04133793168611518,
56
+ "grad_norm": 11.309990882873535,
57
+ "learning_rate": 1.972488622226422e-05,
58
+ "loss": 0.6392,
59
+ "step": 3500
60
+ },
61
+ {
62
+ "epoch": 0.047243350498417346,
63
+ "grad_norm": 3.9195759296417236,
64
+ "learning_rate": 1.9685516763515536e-05,
65
+ "loss": 0.6841,
66
+ "step": 4000
67
+ },
68
+ {
69
+ "epoch": 0.05314876931071952,
70
+ "grad_norm": 0.9045548439025879,
71
+ "learning_rate": 1.9646147304766856e-05,
72
+ "loss": 0.6066,
73
+ "step": 4500
74
+ },
75
+ {
76
+ "epoch": 0.059054188123021684,
77
+ "grad_norm": 2.262979030609131,
78
+ "learning_rate": 1.9606777846018175e-05,
79
+ "loss": 0.5993,
80
+ "step": 5000
81
+ },
82
+ {
83
+ "epoch": 0.06495960693532385,
84
+ "grad_norm": 3.5913796424865723,
85
+ "learning_rate": 1.9567408387269495e-05,
86
+ "loss": 0.5916,
87
+ "step": 5500
88
+ },
89
+ {
90
+ "epoch": 0.07086502574762601,
91
+ "grad_norm": 0.9315924048423767,
92
+ "learning_rate": 1.952803892852081e-05,
93
+ "loss": 0.6104,
94
+ "step": 6000
95
+ },
96
+ {
97
+ "epoch": 0.0767704445599282,
98
+ "grad_norm": 3.631446123123169,
99
+ "learning_rate": 1.948866946977213e-05,
100
+ "loss": 0.5821,
101
+ "step": 6500
102
+ },
103
+ {
104
+ "epoch": 0.08267586337223036,
105
+ "grad_norm": 3.5221781730651855,
106
+ "learning_rate": 1.944930001102345e-05,
107
+ "loss": 0.5843,
108
+ "step": 7000
109
+ },
110
+ {
111
+ "epoch": 0.08858128218453253,
112
+ "grad_norm": 1.012416958808899,
113
+ "learning_rate": 1.940993055227477e-05,
114
+ "loss": 0.6374,
115
+ "step": 7500
116
+ },
117
+ {
118
+ "epoch": 0.09448670099683469,
119
+ "grad_norm": 1.7101526260375977,
120
+ "learning_rate": 1.9370718571361084e-05,
121
+ "loss": 0.5932,
122
+ "step": 8000
123
+ },
124
+ {
125
+ "epoch": 0.10039211980913687,
126
+ "grad_norm": 3.8092918395996094,
127
+ "learning_rate": 1.93313491126124e-05,
128
+ "loss": 0.5771,
129
+ "step": 8500
130
+ },
131
+ {
132
+ "epoch": 0.10629753862143904,
133
+ "grad_norm": 1.8706109523773193,
134
+ "learning_rate": 1.929197965386372e-05,
135
+ "loss": 0.6001,
136
+ "step": 9000
137
+ },
138
+ {
139
+ "epoch": 0.1122029574337412,
140
+ "grad_norm": 10.529691696166992,
141
+ "learning_rate": 1.925261019511504e-05,
142
+ "loss": 0.5962,
143
+ "step": 9500
144
+ },
145
+ {
146
+ "epoch": 0.11810837624604337,
147
+ "grad_norm": 1.8136720657348633,
148
+ "learning_rate": 1.921324073636636e-05,
149
+ "loss": 0.6957,
150
+ "step": 10000
151
+ },
152
+ {
153
+ "epoch": 0.12401379505834553,
154
+ "grad_norm": 0.7268733382225037,
155
+ "learning_rate": 1.9173871277617678e-05,
156
+ "loss": 0.7129,
157
+ "step": 10500
158
+ },
159
+ {
160
+ "epoch": 0.1299192138706477,
161
+ "grad_norm": 3.032724142074585,
162
+ "learning_rate": 1.9134501818868997e-05,
163
+ "loss": 0.6435,
164
+ "step": 11000
165
+ },
166
+ {
167
+ "epoch": 0.13582463268294986,
168
+ "grad_norm": 1.1200387477874756,
169
+ "learning_rate": 1.9095132360120313e-05,
170
+ "loss": 0.6221,
171
+ "step": 11500
172
+ },
173
+ {
174
+ "epoch": 0.14173005149525203,
175
+ "grad_norm": 2.7086682319641113,
176
+ "learning_rate": 1.9055762901371633e-05,
177
+ "loss": 0.6087,
178
+ "step": 12000
179
+ },
180
+ {
181
+ "epoch": 0.14763547030755422,
182
+ "grad_norm": 1.4301010370254517,
183
+ "learning_rate": 1.9016393442622952e-05,
184
+ "loss": 0.5724,
185
+ "step": 12500
186
+ },
187
+ {
188
+ "epoch": 0.1535408891198564,
189
+ "grad_norm": 3.6675240993499756,
190
+ "learning_rate": 1.8977023983874272e-05,
191
+ "loss": 0.5667,
192
+ "step": 13000
193
+ },
194
+ {
195
+ "epoch": 0.15944630793215855,
196
+ "grad_norm": 0.9690624475479126,
197
+ "learning_rate": 1.893765452512559e-05,
198
+ "loss": 0.5825,
199
+ "step": 13500
200
+ },
201
+ {
202
+ "epoch": 0.16535172674446072,
203
+ "grad_norm": 21.52179527282715,
204
+ "learning_rate": 1.8898285066376907e-05,
205
+ "loss": 0.5963,
206
+ "step": 14000
207
+ },
208
+ {
209
+ "epoch": 0.17125714555676289,
210
+ "grad_norm": 2.331463575363159,
211
+ "learning_rate": 1.8858915607628227e-05,
212
+ "loss": 0.5802,
213
+ "step": 14500
214
+ },
215
+ {
216
+ "epoch": 0.17716256436906505,
217
+ "grad_norm": 11.624436378479004,
218
+ "learning_rate": 1.8819546148879546e-05,
219
+ "loss": 0.5931,
220
+ "step": 15000
221
+ },
222
+ {
223
+ "epoch": 0.18306798318136722,
224
+ "grad_norm": 10.969076156616211,
225
+ "learning_rate": 1.8780176690130866e-05,
226
+ "loss": 0.5779,
227
+ "step": 15500
228
+ },
229
+ {
230
+ "epoch": 0.18897340199366938,
231
+ "grad_norm": 17.681808471679688,
232
+ "learning_rate": 1.874088597029968e-05,
233
+ "loss": 0.5822,
234
+ "step": 16000
235
+ },
236
+ {
237
+ "epoch": 0.19487882080597155,
238
+ "grad_norm": 3.5414836406707764,
239
+ "learning_rate": 1.8701516511551e-05,
240
+ "loss": 0.5749,
241
+ "step": 16500
242
+ },
243
+ {
244
+ "epoch": 0.20078423961827374,
245
+ "grad_norm": 13.94709587097168,
246
+ "learning_rate": 1.866214705280232e-05,
247
+ "loss": 0.6799,
248
+ "step": 17000
249
+ },
250
+ {
251
+ "epoch": 0.2066896584305759,
252
+ "grad_norm": 1.3140314817428589,
253
+ "learning_rate": 1.8622856332971136e-05,
254
+ "loss": 0.6906,
255
+ "step": 17500
256
+ },
257
+ {
258
+ "epoch": 0.21259507724287807,
259
+ "grad_norm": 0.7483753561973572,
260
+ "learning_rate": 1.858356561313995e-05,
261
+ "loss": 0.7139,
262
+ "step": 18000
263
+ },
264
+ {
265
+ "epoch": 0.21850049605518024,
266
+ "grad_norm": 6.807902812957764,
267
+ "learning_rate": 1.854419615439127e-05,
268
+ "loss": 0.6635,
269
+ "step": 18500
270
+ },
271
+ {
272
+ "epoch": 0.2244059148674824,
273
+ "grad_norm": 21.158859252929688,
274
+ "learning_rate": 1.850482669564259e-05,
275
+ "loss": 0.7046,
276
+ "step": 19000
277
+ },
278
+ {
279
+ "epoch": 0.23031133367978457,
280
+ "grad_norm": 0.47579389810562134,
281
+ "learning_rate": 1.846545723689391e-05,
282
+ "loss": 0.6757,
283
+ "step": 19500
284
+ },
285
+ {
286
+ "epoch": 0.23621675249208673,
287
+ "grad_norm": 14.090885162353516,
288
+ "learning_rate": 1.842608777814523e-05,
289
+ "loss": 0.7157,
290
+ "step": 20000
291
+ },
292
+ {
293
+ "epoch": 0.2421221713043889,
294
+ "grad_norm": 5.890013217926025,
295
+ "learning_rate": 1.8386797058314044e-05,
296
+ "loss": 0.6787,
297
+ "step": 20500
298
+ },
299
+ {
300
+ "epoch": 0.24802759011669107,
301
+ "grad_norm": 17.978776931762695,
302
+ "learning_rate": 1.834742759956536e-05,
303
+ "loss": 0.6719,
304
+ "step": 21000
305
+ },
306
+ {
307
+ "epoch": 0.25393300892899323,
308
+ "grad_norm": 19.15386962890625,
309
+ "learning_rate": 1.8308058140816683e-05,
310
+ "loss": 0.6684,
311
+ "step": 21500
312
+ },
313
+ {
314
+ "epoch": 0.2598384277412954,
315
+ "grad_norm": 14.03354549407959,
316
+ "learning_rate": 1.8268688682068e-05,
317
+ "loss": 0.7036,
318
+ "step": 22000
319
+ },
320
+ {
321
+ "epoch": 0.26574384655359756,
322
+ "grad_norm": 1.1214927434921265,
323
+ "learning_rate": 1.8229397962236818e-05,
324
+ "loss": 0.6637,
325
+ "step": 22500
326
+ },
327
+ {
328
+ "epoch": 0.27164926536589973,
329
+ "grad_norm": 0.4743577241897583,
330
+ "learning_rate": 1.8190028503488134e-05,
331
+ "loss": 0.6966,
332
+ "step": 23000
333
+ },
334
+ {
335
+ "epoch": 0.2775546841782019,
336
+ "grad_norm": 14.990901947021484,
337
+ "learning_rate": 1.815073778365695e-05,
338
+ "loss": 0.6642,
339
+ "step": 23500
340
+ },
341
+ {
342
+ "epoch": 0.28346010299050406,
343
+ "grad_norm": 1.3651882410049438,
344
+ "learning_rate": 1.8111368324908272e-05,
345
+ "loss": 0.6331,
346
+ "step": 24000
347
+ },
348
+ {
349
+ "epoch": 0.2893655218028063,
350
+ "grad_norm": 5.043566703796387,
351
+ "learning_rate": 1.8071998866159592e-05,
352
+ "loss": 0.657,
353
+ "step": 24500
354
+ },
355
+ {
356
+ "epoch": 0.29527094061510845,
357
+ "grad_norm": 17.113506317138672,
358
+ "learning_rate": 1.8032629407410908e-05,
359
+ "loss": 0.6101,
360
+ "step": 25000
361
+ },
362
+ {
363
+ "epoch": 0.3011763594274106,
364
+ "grad_norm": 12.973406791687012,
365
+ "learning_rate": 1.7993259948662227e-05,
366
+ "loss": 0.648,
367
+ "step": 25500
368
+ },
369
+ {
370
+ "epoch": 0.3070817782397128,
371
+ "grad_norm": 1.021547555923462,
372
+ "learning_rate": 1.7953890489913547e-05,
373
+ "loss": 0.609,
374
+ "step": 26000
375
+ },
376
+ {
377
+ "epoch": 0.31298719705201494,
378
+ "grad_norm": 1.919687271118164,
379
+ "learning_rate": 1.7914521031164863e-05,
380
+ "loss": 0.5473,
381
+ "step": 26500
382
+ },
383
+ {
384
+ "epoch": 0.3188926158643171,
385
+ "grad_norm": 1.6815394163131714,
386
+ "learning_rate": 1.7875151572416182e-05,
387
+ "loss": 0.5526,
388
+ "step": 27000
389
+ },
390
+ {
391
+ "epoch": 0.3247980346766193,
392
+ "grad_norm": 11.55897045135498,
393
+ "learning_rate": 1.7835782113667502e-05,
394
+ "loss": 0.5738,
395
+ "step": 27500
396
+ },
397
+ {
398
+ "epoch": 0.33070345348892144,
399
+ "grad_norm": 5.713680267333984,
400
+ "learning_rate": 1.779641265491882e-05,
401
+ "loss": 0.5765,
402
+ "step": 28000
403
+ },
404
+ {
405
+ "epoch": 0.3366088723012236,
406
+ "grad_norm": 6.03076696395874,
407
+ "learning_rate": 1.775704319617014e-05,
408
+ "loss": 0.622,
409
+ "step": 28500
410
+ },
411
+ {
412
+ "epoch": 0.34251429111352577,
413
+ "grad_norm": 3.5472631454467773,
414
+ "learning_rate": 1.771767373742146e-05,
415
+ "loss": 0.5828,
416
+ "step": 29000
417
+ },
418
+ {
419
+ "epoch": 0.34841970992582794,
420
+ "grad_norm": 5.590924263000488,
421
+ "learning_rate": 1.7678383017590276e-05,
422
+ "loss": 0.595,
423
+ "step": 29500
424
+ },
425
+ {
426
+ "epoch": 0.3543251287381301,
427
+ "grad_norm": 7.295816421508789,
428
+ "learning_rate": 1.7639013558841592e-05,
429
+ "loss": 0.5718,
430
+ "step": 30000
431
+ },
432
+ {
433
+ "epoch": 0.36023054755043227,
434
+ "grad_norm": 0.5159268379211426,
435
+ "learning_rate": 1.7599644100092915e-05,
436
+ "loss": 0.5478,
437
+ "step": 30500
438
+ },
439
+ {
440
+ "epoch": 0.36613596636273443,
441
+ "grad_norm": 116.56455993652344,
442
+ "learning_rate": 1.7560274641344234e-05,
443
+ "loss": 0.5919,
444
+ "step": 31000
445
+ },
446
+ {
447
+ "epoch": 0.3720413851750366,
448
+ "grad_norm": 13.48291301727295,
449
+ "learning_rate": 1.752090518259555e-05,
450
+ "loss": 0.5718,
451
+ "step": 31500
452
+ },
453
+ {
454
+ "epoch": 0.37794680398733876,
455
+ "grad_norm": 11.179370880126953,
456
+ "learning_rate": 1.748153572384687e-05,
457
+ "loss": 0.6018,
458
+ "step": 32000
459
+ },
460
+ {
461
+ "epoch": 0.38385222279964093,
462
+ "grad_norm": 13.461835861206055,
463
+ "learning_rate": 1.744216626509819e-05,
464
+ "loss": 0.5671,
465
+ "step": 32500
466
+ },
467
+ {
468
+ "epoch": 0.3897576416119431,
469
+ "grad_norm": 3.035508394241333,
470
+ "learning_rate": 1.7402875545267005e-05,
471
+ "loss": 0.6002,
472
+ "step": 33000
473
+ },
474
+ {
475
+ "epoch": 0.39566306042424526,
476
+ "grad_norm": 0.8200084567070007,
477
+ "learning_rate": 1.7363506086518324e-05,
478
+ "loss": 0.5577,
479
+ "step": 33500
480
+ },
481
+ {
482
+ "epoch": 0.4015684792365475,
483
+ "grad_norm": 0.5664367079734802,
484
+ "learning_rate": 1.7324136627769644e-05,
485
+ "loss": 0.5948,
486
+ "step": 34000
487
+ },
488
+ {
489
+ "epoch": 0.40747389804884965,
490
+ "grad_norm": 1.102321982383728,
491
+ "learning_rate": 1.7284767169020963e-05,
492
+ "loss": 0.5755,
493
+ "step": 34500
494
+ },
495
+ {
496
+ "epoch": 0.4133793168611518,
497
+ "grad_norm": 0.8719478845596313,
498
+ "learning_rate": 1.724539771027228e-05,
499
+ "loss": 0.5754,
500
+ "step": 35000
501
+ },
502
+ {
503
+ "epoch": 0.419284735673454,
504
+ "grad_norm": 4.414661407470703,
505
+ "learning_rate": 1.7206106990441095e-05,
506
+ "loss": 0.5768,
507
+ "step": 35500
508
+ },
509
+ {
510
+ "epoch": 0.42519015448575614,
511
+ "grad_norm": 8.893802642822266,
512
+ "learning_rate": 1.7166737531692417e-05,
513
+ "loss": 0.558,
514
+ "step": 36000
515
+ },
516
+ {
517
+ "epoch": 0.4310955732980583,
518
+ "grad_norm": 1.9466989040374756,
519
+ "learning_rate": 1.7127368072943737e-05,
520
+ "loss": 0.6029,
521
+ "step": 36500
522
+ },
523
+ {
524
+ "epoch": 0.4370009921103605,
525
+ "grad_norm": 0.6918290853500366,
526
+ "learning_rate": 1.7087998614195053e-05,
527
+ "loss": 0.5743,
528
+ "step": 37000
529
+ },
530
+ {
531
+ "epoch": 0.44290641092266264,
532
+ "grad_norm": 0.16082176566123962,
533
+ "learning_rate": 1.7048629155446373e-05,
534
+ "loss": 0.5468,
535
+ "step": 37500
536
+ },
537
+ {
538
+ "epoch": 0.4488118297349648,
539
+ "grad_norm": 3.5709893703460693,
540
+ "learning_rate": 1.7009259696697692e-05,
541
+ "loss": 0.5936,
542
+ "step": 38000
543
+ },
544
+ {
545
+ "epoch": 0.454717248547267,
546
+ "grad_norm": 4.856642723083496,
547
+ "learning_rate": 1.6969890237949008e-05,
548
+ "loss": 0.5805,
549
+ "step": 38500
550
+ },
551
+ {
552
+ "epoch": 0.46062266735956914,
553
+ "grad_norm": 4.781003952026367,
554
+ "learning_rate": 1.6930520779200328e-05,
555
+ "loss": 0.5854,
556
+ "step": 39000
557
+ },
558
+ {
559
+ "epoch": 0.4665280861718713,
560
+ "grad_norm": 19.200620651245117,
561
+ "learning_rate": 1.6891151320451647e-05,
562
+ "loss": 0.591,
563
+ "step": 39500
564
+ },
565
+ {
566
+ "epoch": 0.47243350498417347,
567
+ "grad_norm": 0.6837007999420166,
568
+ "learning_rate": 1.6851781861702967e-05,
569
+ "loss": 0.5812,
570
+ "step": 40000
571
+ },
572
+ {
573
+ "epoch": 0.47833892379647563,
574
+ "grad_norm": 0.7044087648391724,
575
+ "learning_rate": 1.6812491141871782e-05,
576
+ "loss": 0.5772,
577
+ "step": 40500
578
+ },
579
+ {
580
+ "epoch": 0.4842443426087778,
581
+ "grad_norm": 8.194652557373047,
582
+ "learning_rate": 1.67731216831231e-05,
583
+ "loss": 0.5556,
584
+ "step": 41000
585
+ },
586
+ {
587
+ "epoch": 0.49014976142107997,
588
+ "grad_norm": 4.029700756072998,
589
+ "learning_rate": 1.673375222437442e-05,
590
+ "loss": 0.5656,
591
+ "step": 41500
592
+ },
593
+ {
594
+ "epoch": 0.49605518023338213,
595
+ "grad_norm": 2.9741246700286865,
596
+ "learning_rate": 1.6694382765625737e-05,
597
+ "loss": 0.6014,
598
+ "step": 42000
599
+ },
600
+ {
601
+ "epoch": 0.5019605990456844,
602
+ "grad_norm": 3.5334889888763428,
603
+ "learning_rate": 1.665501330687706e-05,
604
+ "loss": 0.5648,
605
+ "step": 42500
606
+ },
607
+ {
608
+ "epoch": 0.5078660178579865,
609
+ "grad_norm": 0.5247611403465271,
610
+ "learning_rate": 1.6615722587045875e-05,
611
+ "loss": 0.5546,
612
+ "step": 43000
613
+ },
614
+ {
615
+ "epoch": 0.5137714366702887,
616
+ "grad_norm": 3.2951791286468506,
617
+ "learning_rate": 1.6576353128297195e-05,
618
+ "loss": 0.5764,
619
+ "step": 43500
620
+ },
621
+ {
622
+ "epoch": 0.5196768554825908,
623
+ "grad_norm": 0.4771710932254791,
624
+ "learning_rate": 1.653698366954851e-05,
625
+ "loss": 0.5742,
626
+ "step": 44000
627
+ },
628
+ {
629
+ "epoch": 0.525582274294893,
630
+ "grad_norm": 5.949660301208496,
631
+ "learning_rate": 1.649761421079983e-05,
632
+ "loss": 0.5664,
633
+ "step": 44500
634
+ },
635
+ {
636
+ "epoch": 0.5314876931071951,
637
+ "grad_norm": 1.1648470163345337,
638
+ "learning_rate": 1.645824475205115e-05,
639
+ "loss": 0.5612,
640
+ "step": 45000
641
+ },
642
+ {
643
+ "epoch": 0.5373931119194973,
644
+ "grad_norm": 4.647109031677246,
645
+ "learning_rate": 1.641887529330247e-05,
646
+ "loss": 0.5693,
647
+ "step": 45500
648
+ },
649
+ {
650
+ "epoch": 0.5432985307317995,
651
+ "grad_norm": 3.4232962131500244,
652
+ "learning_rate": 1.6379663312388784e-05,
653
+ "loss": 0.5884,
654
+ "step": 46000
655
+ },
656
+ {
657
+ "epoch": 0.5492039495441017,
658
+ "grad_norm": 0.6715309619903564,
659
+ "learning_rate": 1.63402938536401e-05,
660
+ "loss": 0.5734,
661
+ "step": 46500
662
+ },
663
+ {
664
+ "epoch": 0.5551093683564038,
665
+ "grad_norm": 10.699589729309082,
666
+ "learning_rate": 1.630092439489142e-05,
667
+ "loss": 0.5842,
668
+ "step": 47000
669
+ },
670
+ {
671
+ "epoch": 0.561014787168706,
672
+ "grad_norm": 4.567357540130615,
673
+ "learning_rate": 1.626155493614274e-05,
674
+ "loss": 0.5356,
675
+ "step": 47500
676
+ },
677
+ {
678
+ "epoch": 0.5669202059810081,
679
+ "grad_norm": 0.851794421672821,
680
+ "learning_rate": 1.622218547739406e-05,
681
+ "loss": 0.6188,
682
+ "step": 48000
683
+ },
684
+ {
685
+ "epoch": 0.5728256247933103,
686
+ "grad_norm": 0.710367739200592,
687
+ "learning_rate": 1.6182816018645378e-05,
688
+ "loss": 0.586,
689
+ "step": 48500
690
+ },
691
+ {
692
+ "epoch": 0.5787310436056126,
693
+ "grad_norm": 16.63396644592285,
694
+ "learning_rate": 1.6143446559896697e-05,
695
+ "loss": 0.5638,
696
+ "step": 49000
697
+ },
698
+ {
699
+ "epoch": 0.5846364624179147,
700
+ "grad_norm": 14.063051223754883,
701
+ "learning_rate": 1.6104155840065513e-05,
702
+ "loss": 0.5631,
703
+ "step": 49500
704
+ },
705
+ {
706
+ "epoch": 0.5905418812302169,
707
+ "grad_norm": 2.5714404582977295,
708
+ "learning_rate": 1.606478638131683e-05,
709
+ "loss": 0.5947,
710
+ "step": 50000
711
+ },
712
+ {
713
+ "epoch": 0.596447300042519,
714
+ "grad_norm": 2.1997830867767334,
715
+ "learning_rate": 1.6025416922568148e-05,
716
+ "loss": 0.5288,
717
+ "step": 50500
718
+ },
719
+ {
720
+ "epoch": 0.6023527188548212,
721
+ "grad_norm": 0.6119571328163147,
722
+ "learning_rate": 1.598604746381947e-05,
723
+ "loss": 0.6011,
724
+ "step": 51000
725
+ },
726
+ {
727
+ "epoch": 0.6082581376671233,
728
+ "grad_norm": 13.032819747924805,
729
+ "learning_rate": 1.5946678005070787e-05,
730
+ "loss": 0.5671,
731
+ "step": 51500
732
+ },
733
+ {
734
+ "epoch": 0.6141635564794256,
735
+ "grad_norm": 5.521139621734619,
736
+ "learning_rate": 1.5907308546322107e-05,
737
+ "loss": 0.5946,
738
+ "step": 52000
739
+ },
740
+ {
741
+ "epoch": 0.6200689752917277,
742
+ "grad_norm": 29.162424087524414,
743
+ "learning_rate": 1.5867939087573426e-05,
744
+ "loss": 0.5604,
745
+ "step": 52500
746
+ },
747
+ {
748
+ "epoch": 0.6259743941040299,
749
+ "grad_norm": 4.117544651031494,
750
+ "learning_rate": 1.5828569628824742e-05,
751
+ "loss": 0.5436,
752
+ "step": 53000
753
+ },
754
+ {
755
+ "epoch": 0.631879812916332,
756
+ "grad_norm": 0.4661092758178711,
757
+ "learning_rate": 1.578927890899356e-05,
758
+ "loss": 0.5801,
759
+ "step": 53500
760
+ },
761
+ {
762
+ "epoch": 0.6377852317286342,
763
+ "grad_norm": 5.056973457336426,
764
+ "learning_rate": 1.574990945024488e-05,
765
+ "loss": 0.5693,
766
+ "step": 54000
767
+ },
768
+ {
769
+ "epoch": 0.6436906505409363,
770
+ "grad_norm": 7.182854652404785,
771
+ "learning_rate": 1.57105399914962e-05,
772
+ "loss": 0.5936,
773
+ "step": 54500
774
+ },
775
+ {
776
+ "epoch": 0.6495960693532385,
777
+ "grad_norm": 6.386303901672363,
778
+ "learning_rate": 1.5671170532747516e-05,
779
+ "loss": 0.5751,
780
+ "step": 55000
781
+ },
782
+ {
783
+ "epoch": 0.6555014881655407,
784
+ "grad_norm": 4.122045040130615,
785
+ "learning_rate": 1.5631801073998836e-05,
786
+ "loss": 0.5348,
787
+ "step": 55500
788
+ },
789
+ {
790
+ "epoch": 0.6614069069778429,
791
+ "grad_norm": 0.5306248068809509,
792
+ "learning_rate": 1.559251035416765e-05,
793
+ "loss": 0.5793,
794
+ "step": 56000
795
+ },
796
+ {
797
+ "epoch": 0.667312325790145,
798
+ "grad_norm": 5.596012115478516,
799
+ "learning_rate": 1.555314089541897e-05,
800
+ "loss": 0.5481,
801
+ "step": 56500
802
+ },
803
+ {
804
+ "epoch": 0.6732177446024472,
805
+ "grad_norm": 0.6556316614151001,
806
+ "learning_rate": 1.551377143667029e-05,
807
+ "loss": 0.5549,
808
+ "step": 57000
809
+ },
810
+ {
811
+ "epoch": 0.6791231634147493,
812
+ "grad_norm": 1.0770095586776733,
813
+ "learning_rate": 1.547440197792161e-05,
814
+ "loss": 0.5888,
815
+ "step": 57500
816
+ },
817
+ {
818
+ "epoch": 0.6850285822270515,
819
+ "grad_norm": 0.6506383419036865,
820
+ "learning_rate": 1.543503251917293e-05,
821
+ "loss": 0.5782,
822
+ "step": 58000
823
+ },
824
+ {
825
+ "epoch": 0.6909340010393538,
826
+ "grad_norm": 5.457004547119141,
827
+ "learning_rate": 1.5395663060424245e-05,
828
+ "loss": 0.5687,
829
+ "step": 58500
830
+ },
831
+ {
832
+ "epoch": 0.6968394198516559,
833
+ "grad_norm": 1.7497150897979736,
834
+ "learning_rate": 1.5356293601675564e-05,
835
+ "loss": 0.5685,
836
+ "step": 59000
837
+ },
838
+ {
839
+ "epoch": 0.7027448386639581,
840
+ "grad_norm": 2.551649570465088,
841
+ "learning_rate": 1.5316924142926884e-05,
842
+ "loss": 0.5457,
843
+ "step": 59500
844
+ },
845
+ {
846
+ "epoch": 0.7086502574762602,
847
+ "grad_norm": 1.2479901313781738,
848
+ "learning_rate": 1.5277554684178203e-05,
849
+ "loss": 0.5814,
850
+ "step": 60000
851
+ },
852
+ {
853
+ "epoch": 0.7145556762885624,
854
+ "grad_norm": 2.57185697555542,
855
+ "learning_rate": 1.5238185225429523e-05,
856
+ "loss": 0.556,
857
+ "step": 60500
858
+ },
859
+ {
860
+ "epoch": 0.7204610951008645,
861
+ "grad_norm": 3.0134148597717285,
862
+ "learning_rate": 1.5198894505598338e-05,
863
+ "loss": 0.5646,
864
+ "step": 61000
865
+ },
866
+ {
867
+ "epoch": 0.7263665139131668,
868
+ "grad_norm": 3.8693532943725586,
869
+ "learning_rate": 1.5159525046849656e-05,
870
+ "loss": 0.5655,
871
+ "step": 61500
872
+ },
873
+ {
874
+ "epoch": 0.7322719327254689,
875
+ "grad_norm": 56.192054748535156,
876
+ "learning_rate": 1.5120234327018473e-05,
877
+ "loss": 0.5617,
878
+ "step": 62000
879
+ },
880
+ {
881
+ "epoch": 0.7381773515377711,
882
+ "grad_norm": 1.0195658206939697,
883
+ "learning_rate": 1.5080864868269793e-05,
884
+ "loss": 0.5524,
885
+ "step": 62500
886
+ },
887
+ {
888
+ "epoch": 0.7440827703500732,
889
+ "grad_norm": 2.2935259342193604,
890
+ "learning_rate": 1.5041495409521112e-05,
891
+ "loss": 0.5626,
892
+ "step": 63000
893
+ },
894
+ {
895
+ "epoch": 0.7499881891623754,
896
+ "grad_norm": 46.69443893432617,
897
+ "learning_rate": 1.500212595077243e-05,
898
+ "loss": 0.5839,
899
+ "step": 63500
900
+ },
901
+ {
902
+ "epoch": 0.7558936079746775,
903
+ "grad_norm": 0.6499876976013184,
904
+ "learning_rate": 1.496275649202375e-05,
905
+ "loss": 0.5501,
906
+ "step": 64000
907
+ },
908
+ {
909
+ "epoch": 0.7617990267869797,
910
+ "grad_norm": 6.221270561218262,
911
+ "learning_rate": 1.4923387033275067e-05,
912
+ "loss": 0.5691,
913
+ "step": 64500
914
+ },
915
+ {
916
+ "epoch": 0.7677044455992819,
917
+ "grad_norm": 4.556798458099365,
918
+ "learning_rate": 1.4884017574526387e-05,
919
+ "loss": 0.5731,
920
+ "step": 65000
921
+ },
922
+ {
923
+ "epoch": 0.7736098644115841,
924
+ "grad_norm": 3.3265926837921143,
925
+ "learning_rate": 1.4844648115777704e-05,
926
+ "loss": 0.5697,
927
+ "step": 65500
928
+ },
929
+ {
930
+ "epoch": 0.7795152832238862,
931
+ "grad_norm": 4.640722274780273,
932
+ "learning_rate": 1.4805278657029026e-05,
933
+ "loss": 0.5832,
934
+ "step": 66000
935
+ },
936
+ {
937
+ "epoch": 0.7854207020361884,
938
+ "grad_norm": 4.069241046905518,
939
+ "learning_rate": 1.4765909198280343e-05,
940
+ "loss": 0.5997,
941
+ "step": 66500
942
+ },
943
+ {
944
+ "epoch": 0.7913261208484905,
945
+ "grad_norm": 4.073843479156494,
946
+ "learning_rate": 1.4726539739531663e-05,
947
+ "loss": 0.5475,
948
+ "step": 67000
949
+ },
950
+ {
951
+ "epoch": 0.7972315396607927,
952
+ "grad_norm": 3.5267622470855713,
953
+ "learning_rate": 1.468717028078298e-05,
954
+ "loss": 0.551,
955
+ "step": 67500
956
+ },
957
+ {
958
+ "epoch": 0.803136958473095,
959
+ "grad_norm": 3.477355480194092,
960
+ "learning_rate": 1.46478008220343e-05,
961
+ "loss": 0.5237,
962
+ "step": 68000
963
+ },
964
+ {
965
+ "epoch": 0.8090423772853971,
966
+ "grad_norm": 2.0641283988952637,
967
+ "learning_rate": 1.4608431363285618e-05,
968
+ "loss": 0.5747,
969
+ "step": 68500
970
+ },
971
+ {
972
+ "epoch": 0.8149477960976993,
973
+ "grad_norm": 9.778463363647461,
974
+ "learning_rate": 1.4569140643454435e-05,
975
+ "loss": 0.5741,
976
+ "step": 69000
977
+ },
978
+ {
979
+ "epoch": 0.8208532149100014,
980
+ "grad_norm": 10.189579010009766,
981
+ "learning_rate": 1.4529849923623252e-05,
982
+ "loss": 0.5655,
983
+ "step": 69500
984
+ },
985
+ {
986
+ "epoch": 0.8267586337223036,
987
+ "grad_norm": 7.845795631408691,
988
+ "learning_rate": 1.449048046487457e-05,
989
+ "loss": 0.558,
990
+ "step": 70000
991
+ },
992
+ {
993
+ "epoch": 0.8326640525346057,
994
+ "grad_norm": 4.319666862487793,
995
+ "learning_rate": 1.445111100612589e-05,
996
+ "loss": 0.5814,
997
+ "step": 70500
998
+ },
999
+ {
1000
+ "epoch": 0.838569471346908,
1001
+ "grad_norm": 7.940633296966553,
1002
+ "learning_rate": 1.4411741547377207e-05,
1003
+ "loss": 0.5671,
1004
+ "step": 71000
1005
+ },
1006
+ {
1007
+ "epoch": 0.8444748901592101,
1008
+ "grad_norm": 2.4097514152526855,
1009
+ "learning_rate": 1.4372372088628525e-05,
1010
+ "loss": 0.5851,
1011
+ "step": 71500
1012
+ },
1013
+ {
1014
+ "epoch": 0.8503803089715123,
1015
+ "grad_norm": 4.664578437805176,
1016
+ "learning_rate": 1.4333081368797344e-05,
1017
+ "loss": 0.5454,
1018
+ "step": 72000
1019
+ },
1020
+ {
1021
+ "epoch": 0.8562857277838144,
1022
+ "grad_norm": 3.250767230987549,
1023
+ "learning_rate": 1.4293711910048661e-05,
1024
+ "loss": 0.5581,
1025
+ "step": 72500
1026
+ },
1027
+ {
1028
+ "epoch": 0.8621911465961166,
1029
+ "grad_norm": 11.224815368652344,
1030
+ "learning_rate": 1.4254342451299981e-05,
1031
+ "loss": 0.5398,
1032
+ "step": 73000
1033
+ },
1034
+ {
1035
+ "epoch": 0.8680965654084187,
1036
+ "grad_norm": 0.5496272444725037,
1037
+ "learning_rate": 1.4214972992551299e-05,
1038
+ "loss": 0.5797,
1039
+ "step": 73500
1040
+ },
1041
+ {
1042
+ "epoch": 0.874001984220721,
1043
+ "grad_norm": 3.5080580711364746,
1044
+ "learning_rate": 1.4175603533802618e-05,
1045
+ "loss": 0.5421,
1046
+ "step": 74000
1047
+ },
1048
+ {
1049
+ "epoch": 0.8799074030330231,
1050
+ "grad_norm": 0.6545143723487854,
1051
+ "learning_rate": 1.4136312813971435e-05,
1052
+ "loss": 0.5369,
1053
+ "step": 74500
1054
+ },
1055
+ {
1056
+ "epoch": 0.8858128218453253,
1057
+ "grad_norm": 0.41881951689720154,
1058
+ "learning_rate": 1.4096943355222755e-05,
1059
+ "loss": 0.5392,
1060
+ "step": 75000
1061
+ },
1062
+ {
1063
+ "epoch": 0.8917182406576274,
1064
+ "grad_norm": 0.576518177986145,
1065
+ "learning_rate": 1.4057573896474073e-05,
1066
+ "loss": 0.5325,
1067
+ "step": 75500
1068
+ },
1069
+ {
1070
+ "epoch": 0.8976236594699296,
1071
+ "grad_norm": 3.194174289703369,
1072
+ "learning_rate": 1.4018204437725392e-05,
1073
+ "loss": 0.6139,
1074
+ "step": 76000
1075
+ },
1076
+ {
1077
+ "epoch": 0.9035290782822317,
1078
+ "grad_norm": 5.220823287963867,
1079
+ "learning_rate": 1.3978913717894207e-05,
1080
+ "loss": 0.5577,
1081
+ "step": 76500
1082
+ },
1083
+ {
1084
+ "epoch": 0.909434497094534,
1085
+ "grad_norm": 11.443092346191406,
1086
+ "learning_rate": 1.3939544259145525e-05,
1087
+ "loss": 0.5547,
1088
+ "step": 77000
1089
+ },
1090
+ {
1091
+ "epoch": 0.9153399159068362,
1092
+ "grad_norm": 0.620442807674408,
1093
+ "learning_rate": 1.3900253539314344e-05,
1094
+ "loss": 0.5679,
1095
+ "step": 77500
1096
+ },
1097
+ {
1098
+ "epoch": 0.9212453347191383,
1099
+ "grad_norm": 0.49119341373443604,
1100
+ "learning_rate": 1.3860884080565662e-05,
1101
+ "loss": 0.5166,
1102
+ "step": 78000
1103
+ },
1104
+ {
1105
+ "epoch": 0.9271507535314405,
1106
+ "grad_norm": 15.640247344970703,
1107
+ "learning_rate": 1.3821514621816981e-05,
1108
+ "loss": 0.58,
1109
+ "step": 78500
1110
+ },
1111
+ {
1112
+ "epoch": 0.9330561723437426,
1113
+ "grad_norm": 2.5728659629821777,
1114
+ "learning_rate": 1.3782145163068299e-05,
1115
+ "loss": 0.5688,
1116
+ "step": 79000
1117
+ },
1118
+ {
1119
+ "epoch": 0.9389615911560448,
1120
+ "grad_norm": 0.5715045928955078,
1121
+ "learning_rate": 1.3742775704319617e-05,
1122
+ "loss": 0.5345,
1123
+ "step": 79500
1124
+ },
1125
+ {
1126
+ "epoch": 0.9448670099683469,
1127
+ "grad_norm": 0.5369954109191895,
1128
+ "learning_rate": 1.3703406245570936e-05,
1129
+ "loss": 0.5259,
1130
+ "step": 80000
1131
+ },
1132
+ {
1133
+ "epoch": 0.9507724287806492,
1134
+ "grad_norm": 14.094406127929688,
1135
+ "learning_rate": 1.3664036786822257e-05,
1136
+ "loss": 0.5841,
1137
+ "step": 80500
1138
+ },
1139
+ {
1140
+ "epoch": 0.9566778475929513,
1141
+ "grad_norm": 45.60070037841797,
1142
+ "learning_rate": 1.3624667328073575e-05,
1143
+ "loss": 0.524,
1144
+ "step": 81000
1145
+ },
1146
+ {
1147
+ "epoch": 0.9625832664052535,
1148
+ "grad_norm": 0.13448235392570496,
1149
+ "learning_rate": 1.3585297869324893e-05,
1150
+ "loss": 0.5377,
1151
+ "step": 81500
1152
+ },
1153
+ {
1154
+ "epoch": 0.9684886852175556,
1155
+ "grad_norm": 41.70237731933594,
1156
+ "learning_rate": 1.3545928410576212e-05,
1157
+ "loss": 0.562,
1158
+ "step": 82000
1159
+ },
1160
+ {
1161
+ "epoch": 0.9743941040298578,
1162
+ "grad_norm": 4.349485874176025,
1163
+ "learning_rate": 1.350655895182753e-05,
1164
+ "loss": 0.5193,
1165
+ "step": 82500
1166
+ },
1167
+ {
1168
+ "epoch": 0.9802995228421599,
1169
+ "grad_norm": 0.3272392749786377,
1170
+ "learning_rate": 1.346718949307885e-05,
1171
+ "loss": 0.5396,
1172
+ "step": 83000
1173
+ },
1174
+ {
1175
+ "epoch": 0.9862049416544622,
1176
+ "grad_norm": 0.6910300254821777,
1177
+ "learning_rate": 1.3427820034330171e-05,
1178
+ "loss": 0.5163,
1179
+ "step": 83500
1180
+ },
1181
+ {
1182
+ "epoch": 0.9921103604667643,
1183
+ "grad_norm": 1.2478801012039185,
1184
+ "learning_rate": 1.3388450575581489e-05,
1185
+ "loss": 0.5474,
1186
+ "step": 84000
1187
+ },
1188
+ {
1189
+ "epoch": 0.9980157792790665,
1190
+ "grad_norm": 26.1578369140625,
1191
+ "learning_rate": 1.3349159855750304e-05,
1192
+ "loss": 0.5688,
1193
+ "step": 84500
1194
+ },
1195
+ {
1196
+ "epoch": 1.0,
1197
+ "eval_accuracy": 0.492972828269308,
1198
+ "eval_f1": 0.5198082616664317,
1199
+ "eval_loss": 0.5405558347702026,
1200
+ "eval_roc_auc": 0.6791354970679471,
1201
+ "eval_runtime": 111.677,
1202
+ "eval_samples_per_second": 66.898,
1203
+ "eval_steps_per_second": 66.898,
1204
+ "step": 84668
1205
+ },
1206
+ {
1207
+ "epoch": 1.0039211980913687,
1208
+ "grad_norm": 16.267274856567383,
1209
+ "learning_rate": 1.3309790397001624e-05,
1210
+ "loss": 0.5494,
1211
+ "step": 85000
1212
+ },
1213
+ {
1214
+ "epoch": 1.0098266169036707,
1215
+ "grad_norm": 3.8913004398345947,
1216
+ "learning_rate": 1.3270420938252941e-05,
1217
+ "loss": 0.5073,
1218
+ "step": 85500
1219
+ },
1220
+ {
1221
+ "epoch": 1.015732035715973,
1222
+ "grad_norm": 4.086480617523193,
1223
+ "learning_rate": 1.323105147950426e-05,
1224
+ "loss": 0.5299,
1225
+ "step": 86000
1226
+ },
1227
+ {
1228
+ "epoch": 1.0216374545282751,
1229
+ "grad_norm": 0.4327312707901001,
1230
+ "learning_rate": 1.3191760759673078e-05,
1231
+ "loss": 0.5854,
1232
+ "step": 86500
1233
+ },
1234
+ {
1235
+ "epoch": 1.0275428733405774,
1236
+ "grad_norm": 4.263016223907471,
1237
+ "learning_rate": 1.3152391300924396e-05,
1238
+ "loss": 0.5211,
1239
+ "step": 87000
1240
+ },
1241
+ {
1242
+ "epoch": 1.0334482921528796,
1243
+ "grad_norm": 18.525869369506836,
1244
+ "learning_rate": 1.3113021842175715e-05,
1245
+ "loss": 0.5393,
1246
+ "step": 87500
1247
+ },
1248
+ {
1249
+ "epoch": 1.0393537109651816,
1250
+ "grad_norm": 15.347341537475586,
1251
+ "learning_rate": 1.3073652383427033e-05,
1252
+ "loss": 0.5444,
1253
+ "step": 88000
1254
+ },
1255
+ {
1256
+ "epoch": 1.0452591297774838,
1257
+ "grad_norm": 0.81917804479599,
1258
+ "learning_rate": 1.3034282924678352e-05,
1259
+ "loss": 0.5622,
1260
+ "step": 88500
1261
+ },
1262
+ {
1263
+ "epoch": 1.051164548589786,
1264
+ "grad_norm": 0.8243115544319153,
1265
+ "learning_rate": 1.299491346592967e-05,
1266
+ "loss": 0.5485,
1267
+ "step": 89000
1268
+ },
1269
+ {
1270
+ "epoch": 1.0570699674020883,
1271
+ "grad_norm": 0.9479914903640747,
1272
+ "learning_rate": 1.2955544007180991e-05,
1273
+ "loss": 0.5653,
1274
+ "step": 89500
1275
+ },
1276
+ {
1277
+ "epoch": 1.0629753862143902,
1278
+ "grad_norm": 11.97670841217041,
1279
+ "learning_rate": 1.2916253287349807e-05,
1280
+ "loss": 0.5698,
1281
+ "step": 90000
1282
+ },
1283
+ {
1284
+ "epoch": 1.0688808050266925,
1285
+ "grad_norm": 6.69007682800293,
1286
+ "learning_rate": 1.2876883828601126e-05,
1287
+ "loss": 0.5151,
1288
+ "step": 90500
1289
+ },
1290
+ {
1291
+ "epoch": 1.0747862238389947,
1292
+ "grad_norm": 17.800846099853516,
1293
+ "learning_rate": 1.2837514369852444e-05,
1294
+ "loss": 0.5431,
1295
+ "step": 91000
1296
+ },
1297
+ {
1298
+ "epoch": 1.080691642651297,
1299
+ "grad_norm": 6.926564693450928,
1300
+ "learning_rate": 1.2798144911103762e-05,
1301
+ "loss": 0.5562,
1302
+ "step": 91500
1303
+ },
1304
+ {
1305
+ "epoch": 1.086597061463599,
1306
+ "grad_norm": 3.1776883602142334,
1307
+ "learning_rate": 1.275885419127258e-05,
1308
+ "loss": 0.5498,
1309
+ "step": 92000
1310
+ },
1311
+ {
1312
+ "epoch": 1.0925024802759011,
1313
+ "grad_norm": 25.895919799804688,
1314
+ "learning_rate": 1.2719484732523898e-05,
1315
+ "loss": 0.5481,
1316
+ "step": 92500
1317
+ },
1318
+ {
1319
+ "epoch": 1.0984078990882034,
1320
+ "grad_norm": 0.28274840116500854,
1321
+ "learning_rate": 1.2680194012692715e-05,
1322
+ "loss": 0.5394,
1323
+ "step": 93000
1324
+ },
1325
+ {
1326
+ "epoch": 1.1043133179005056,
1327
+ "grad_norm": 0.2646944224834442,
1328
+ "learning_rate": 1.2640824553944033e-05,
1329
+ "loss": 0.5297,
1330
+ "step": 93500
1331
+ },
1332
+ {
1333
+ "epoch": 1.1102187367128076,
1334
+ "grad_norm": 0.24462255835533142,
1335
+ "learning_rate": 1.2601533834112849e-05,
1336
+ "loss": 0.5158,
1337
+ "step": 94000
1338
+ },
1339
+ {
1340
+ "epoch": 1.1161241555251098,
1341
+ "grad_norm": 10.8624267578125,
1342
+ "learning_rate": 1.2562164375364168e-05,
1343
+ "loss": 0.5647,
1344
+ "step": 94500
1345
+ },
1346
+ {
1347
+ "epoch": 1.122029574337412,
1348
+ "grad_norm": 4.669233798980713,
1349
+ "learning_rate": 1.2522794916615488e-05,
1350
+ "loss": 0.5599,
1351
+ "step": 95000
1352
+ },
1353
+ {
1354
+ "epoch": 1.1279349931497142,
1355
+ "grad_norm": 7.873256206512451,
1356
+ "learning_rate": 1.2483425457866807e-05,
1357
+ "loss": 0.555,
1358
+ "step": 95500
1359
+ },
1360
+ {
1361
+ "epoch": 1.1338404119620162,
1362
+ "grad_norm": 0.3633436858654022,
1363
+ "learning_rate": 1.2444055999118125e-05,
1364
+ "loss": 0.5347,
1365
+ "step": 96000
1366
+ },
1367
+ {
1368
+ "epoch": 1.1397458307743185,
1369
+ "grad_norm": 63.884368896484375,
1370
+ "learning_rate": 1.2404686540369444e-05,
1371
+ "loss": 0.5518,
1372
+ "step": 96500
1373
+ },
1374
+ {
1375
+ "epoch": 1.1456512495866207,
1376
+ "grad_norm": 9.837042808532715,
1377
+ "learning_rate": 1.2365317081620762e-05,
1378
+ "loss": 0.5211,
1379
+ "step": 97000
1380
+ },
1381
+ {
1382
+ "epoch": 1.151556668398923,
1383
+ "grad_norm": 0.36800965666770935,
1384
+ "learning_rate": 1.2325947622872082e-05,
1385
+ "loss": 0.5463,
1386
+ "step": 97500
1387
+ },
1388
+ {
1389
+ "epoch": 1.1574620872112251,
1390
+ "grad_norm": 11.544560432434082,
1391
+ "learning_rate": 1.2286578164123401e-05,
1392
+ "loss": 0.54,
1393
+ "step": 98000
1394
+ },
1395
+ {
1396
+ "epoch": 1.1633675060235271,
1397
+ "grad_norm": 3.7216811180114746,
1398
+ "learning_rate": 1.224720870537472e-05,
1399
+ "loss": 0.5314,
1400
+ "step": 98500
1401
+ },
1402
+ {
1403
+ "epoch": 1.1692729248358293,
1404
+ "grad_norm": 12.869535446166992,
1405
+ "learning_rate": 1.2207917985543536e-05,
1406
+ "loss": 0.5655,
1407
+ "step": 99000
1408
+ },
1409
+ {
1410
+ "epoch": 1.1751783436481316,
1411
+ "grad_norm": 1.4455419778823853,
1412
+ "learning_rate": 1.2168548526794854e-05,
1413
+ "loss": 0.5208,
1414
+ "step": 99500
1415
+ },
1416
+ {
1417
+ "epoch": 1.1810837624604338,
1418
+ "grad_norm": 0.6675042510032654,
1419
+ "learning_rate": 1.2129179068046173e-05,
1420
+ "loss": 0.5584,
1421
+ "step": 100000
1422
+ },
1423
+ {
1424
+ "epoch": 1.1869891812727358,
1425
+ "grad_norm": 0.5745788812637329,
1426
+ "learning_rate": 1.2089888348214989e-05,
1427
+ "loss": 0.5369,
1428
+ "step": 100500
1429
+ },
1430
+ {
1431
+ "epoch": 1.192894600085038,
1432
+ "grad_norm": 0.47152823209762573,
1433
+ "learning_rate": 1.205051888946631e-05,
1434
+ "loss": 0.5601,
1435
+ "step": 101000
1436
+ },
1437
+ {
1438
+ "epoch": 1.1988000188973402,
1439
+ "grad_norm": 0.3635103702545166,
1440
+ "learning_rate": 1.2011149430717628e-05,
1441
+ "loss": 0.5322,
1442
+ "step": 101500
1443
+ },
1444
+ {
1445
+ "epoch": 1.2047054377096424,
1446
+ "grad_norm": 6.082094192504883,
1447
+ "learning_rate": 1.1971779971968947e-05,
1448
+ "loss": 0.5548,
1449
+ "step": 102000
1450
+ },
1451
+ {
1452
+ "epoch": 1.2106108565219444,
1453
+ "grad_norm": 5.332359313964844,
1454
+ "learning_rate": 1.1932489252137762e-05,
1455
+ "loss": 0.5641,
1456
+ "step": 102500
1457
+ },
1458
+ {
1459
+ "epoch": 1.2165162753342467,
1460
+ "grad_norm": 0.7001695036888123,
1461
+ "learning_rate": 1.189311979338908e-05,
1462
+ "loss": 0.55,
1463
+ "step": 103000
1464
+ },
1465
+ {
1466
+ "epoch": 1.2224216941465489,
1467
+ "grad_norm": 0.35426071286201477,
1468
+ "learning_rate": 1.18537503346404e-05,
1469
+ "loss": 0.544,
1470
+ "step": 103500
1471
+ },
1472
+ {
1473
+ "epoch": 1.228327112958851,
1474
+ "grad_norm": 10.50427532196045,
1475
+ "learning_rate": 1.181438087589172e-05,
1476
+ "loss": 0.567,
1477
+ "step": 104000
1478
+ },
1479
+ {
1480
+ "epoch": 1.2342325317711533,
1481
+ "grad_norm": 0.4111456871032715,
1482
+ "learning_rate": 1.1775011417143039e-05,
1483
+ "loss": 0.5346,
1484
+ "step": 104500
1485
+ },
1486
+ {
1487
+ "epoch": 1.2401379505834553,
1488
+ "grad_norm": 3.6128838062286377,
1489
+ "learning_rate": 1.1735720697311854e-05,
1490
+ "loss": 0.5173,
1491
+ "step": 105000
1492
+ },
1493
+ {
1494
+ "epoch": 1.2460433693957575,
1495
+ "grad_norm": 0.2788158059120178,
1496
+ "learning_rate": 1.1696351238563173e-05,
1497
+ "loss": 0.5486,
1498
+ "step": 105500
1499
+ },
1500
+ {
1501
+ "epoch": 1.2519487882080598,
1502
+ "grad_norm": 2.4274394512176514,
1503
+ "learning_rate": 1.1656981779814491e-05,
1504
+ "loss": 0.573,
1505
+ "step": 106000
1506
+ },
1507
+ {
1508
+ "epoch": 1.2578542070203618,
1509
+ "grad_norm": 26.823347091674805,
1510
+ "learning_rate": 1.1617612321065809e-05,
1511
+ "loss": 0.5389,
1512
+ "step": 106500
1513
+ },
1514
+ {
1515
+ "epoch": 1.263759625832664,
1516
+ "grad_norm": 5.76626443862915,
1517
+ "learning_rate": 1.157824286231713e-05,
1518
+ "loss": 0.5697,
1519
+ "step": 107000
1520
+ },
1521
+ {
1522
+ "epoch": 1.2696650446449662,
1523
+ "grad_norm": 6.441776752471924,
1524
+ "learning_rate": 1.153887340356845e-05,
1525
+ "loss": 0.5172,
1526
+ "step": 107500
1527
+ },
1528
+ {
1529
+ "epoch": 1.2755704634572684,
1530
+ "grad_norm": 4.22221040725708,
1531
+ "learning_rate": 1.1499503944819767e-05,
1532
+ "loss": 0.5803,
1533
+ "step": 108000
1534
+ },
1535
+ {
1536
+ "epoch": 1.2814758822695707,
1537
+ "grad_norm": 0.2600265145301819,
1538
+ "learning_rate": 1.1460213224988583e-05,
1539
+ "loss": 0.5235,
1540
+ "step": 108500
1541
+ },
1542
+ {
1543
+ "epoch": 1.2873813010818727,
1544
+ "grad_norm": 3.5664291381835938,
1545
+ "learning_rate": 1.1420843766239902e-05,
1546
+ "loss": 0.5762,
1547
+ "step": 109000
1548
+ },
1549
+ {
1550
+ "epoch": 1.2932867198941749,
1551
+ "grad_norm": 4.590908050537109,
1552
+ "learning_rate": 1.1381474307491222e-05,
1553
+ "loss": 0.554,
1554
+ "step": 109500
1555
+ },
1556
+ {
1557
+ "epoch": 1.299192138706477,
1558
+ "grad_norm": 0.40613120794296265,
1559
+ "learning_rate": 1.1342183587660039e-05,
1560
+ "loss": 0.5396,
1561
+ "step": 110000
1562
+ },
1563
+ {
1564
+ "epoch": 1.305097557518779,
1565
+ "grad_norm": 0.6809604167938232,
1566
+ "learning_rate": 1.1302814128911357e-05,
1567
+ "loss": 0.5567,
1568
+ "step": 110500
1569
+ },
1570
+ {
1571
+ "epoch": 1.3110029763310815,
1572
+ "grad_norm": 5.76853084564209,
1573
+ "learning_rate": 1.1263444670162676e-05,
1574
+ "loss": 0.5376,
1575
+ "step": 111000
1576
+ },
1577
+ {
1578
+ "epoch": 1.3169083951433835,
1579
+ "grad_norm": 8.591459274291992,
1580
+ "learning_rate": 1.1224075211413994e-05,
1581
+ "loss": 0.5463,
1582
+ "step": 111500
1583
+ },
1584
+ {
1585
+ "epoch": 1.3228138139556858,
1586
+ "grad_norm": 9.847731590270996,
1587
+ "learning_rate": 1.1184705752665312e-05,
1588
+ "loss": 0.5211,
1589
+ "step": 112000
1590
+ },
1591
+ {
1592
+ "epoch": 1.328719232767988,
1593
+ "grad_norm": 2.084632158279419,
1594
+ "learning_rate": 1.1145336293916633e-05,
1595
+ "loss": 0.5687,
1596
+ "step": 112500
1597
+ },
1598
+ {
1599
+ "epoch": 1.33462465158029,
1600
+ "grad_norm": 0.2582547962665558,
1601
+ "learning_rate": 1.1105966835167952e-05,
1602
+ "loss": 0.5258,
1603
+ "step": 113000
1604
+ },
1605
+ {
1606
+ "epoch": 1.3405300703925922,
1607
+ "grad_norm": 4.441883563995361,
1608
+ "learning_rate": 1.106659737641927e-05,
1609
+ "loss": 0.5789,
1610
+ "step": 113500
1611
+ },
1612
+ {
1613
+ "epoch": 1.3464354892048944,
1614
+ "grad_norm": 3.4951181411743164,
1615
+ "learning_rate": 1.1027227917670588e-05,
1616
+ "loss": 0.5395,
1617
+ "step": 114000
1618
+ },
1619
+ {
1620
+ "epoch": 1.3523409080171966,
1621
+ "grad_norm": 7.546202659606934,
1622
+ "learning_rate": 1.0987858458921907e-05,
1623
+ "loss": 0.5546,
1624
+ "step": 114500
1625
+ },
1626
+ {
1627
+ "epoch": 1.3582463268294989,
1628
+ "grad_norm": 27.956619262695312,
1629
+ "learning_rate": 1.0948489000173225e-05,
1630
+ "loss": 0.5503,
1631
+ "step": 115000
1632
+ },
1633
+ {
1634
+ "epoch": 1.3641517456418009,
1635
+ "grad_norm": 14.313876152038574,
1636
+ "learning_rate": 1.0909119541424545e-05,
1637
+ "loss": 0.5552,
1638
+ "step": 115500
1639
+ },
1640
+ {
1641
+ "epoch": 1.370057164454103,
1642
+ "grad_norm": 3.3627312183380127,
1643
+ "learning_rate": 1.0869750082675866e-05,
1644
+ "loss": 0.562,
1645
+ "step": 116000
1646
+ },
1647
+ {
1648
+ "epoch": 1.3759625832664053,
1649
+ "grad_norm": 4.319688320159912,
1650
+ "learning_rate": 1.0830380623927184e-05,
1651
+ "loss": 0.513,
1652
+ "step": 116500
1653
+ },
1654
+ {
1655
+ "epoch": 1.3818680020787073,
1656
+ "grad_norm": 0.45721712708473206,
1657
+ "learning_rate": 1.0791089904095999e-05,
1658
+ "loss": 0.5349,
1659
+ "step": 117000
1660
+ },
1661
+ {
1662
+ "epoch": 1.3877734208910095,
1663
+ "grad_norm": 0.3523353934288025,
1664
+ "learning_rate": 1.0751720445347319e-05,
1665
+ "loss": 0.5367,
1666
+ "step": 117500
1667
+ },
1668
+ {
1669
+ "epoch": 1.3936788397033117,
1670
+ "grad_norm": 5.773875713348389,
1671
+ "learning_rate": 1.0712350986598636e-05,
1672
+ "loss": 0.5093,
1673
+ "step": 118000
1674
+ },
1675
+ {
1676
+ "epoch": 1.399584258515614,
1677
+ "grad_norm": 0.3608294725418091,
1678
+ "learning_rate": 1.0672981527849954e-05,
1679
+ "loss": 0.5124,
1680
+ "step": 118500
1681
+ },
1682
+ {
1683
+ "epoch": 1.4054896773279162,
1684
+ "grad_norm": 0.5569049119949341,
1685
+ "learning_rate": 1.0633612069101275e-05,
1686
+ "loss": 0.5796,
1687
+ "step": 119000
1688
+ },
1689
+ {
1690
+ "epoch": 1.4113950961402182,
1691
+ "grad_norm": 0.26475992798805237,
1692
+ "learning_rate": 1.0594242610352595e-05,
1693
+ "loss": 0.5263,
1694
+ "step": 119500
1695
+ },
1696
+ {
1697
+ "epoch": 1.4173005149525204,
1698
+ "grad_norm": 5.134099960327148,
1699
+ "learning_rate": 1.0555030629438908e-05,
1700
+ "loss": 0.498,
1701
+ "step": 120000
1702
+ },
1703
+ {
1704
+ "epoch": 1.4232059337648226,
1705
+ "grad_norm": 4.049677848815918,
1706
+ "learning_rate": 1.0515661170690225e-05,
1707
+ "loss": 0.5565,
1708
+ "step": 120500
1709
+ },
1710
+ {
1711
+ "epoch": 1.4291113525771248,
1712
+ "grad_norm": 4.562811851501465,
1713
+ "learning_rate": 1.0476291711941545e-05,
1714
+ "loss": 0.5173,
1715
+ "step": 121000
1716
+ },
1717
+ {
1718
+ "epoch": 1.435016771389427,
1719
+ "grad_norm": 0.3501911759376526,
1720
+ "learning_rate": 1.0436922253192864e-05,
1721
+ "loss": 0.5658,
1722
+ "step": 121500
1723
+ },
1724
+ {
1725
+ "epoch": 1.440922190201729,
1726
+ "grad_norm": 0.5130248665809631,
1727
+ "learning_rate": 1.0397552794444184e-05,
1728
+ "loss": 0.5157,
1729
+ "step": 122000
1730
+ },
1731
+ {
1732
+ "epoch": 1.4468276090140313,
1733
+ "grad_norm": 3.659543037414551,
1734
+ "learning_rate": 1.0358183335695502e-05,
1735
+ "loss": 0.5515,
1736
+ "step": 122500
1737
+ },
1738
+ {
1739
+ "epoch": 1.4527330278263335,
1740
+ "grad_norm": 0.807367742061615,
1741
+ "learning_rate": 1.0318892615864317e-05,
1742
+ "loss": 0.5411,
1743
+ "step": 123000
1744
+ },
1745
+ {
1746
+ "epoch": 1.4586384466386355,
1747
+ "grad_norm": 4.063622951507568,
1748
+ "learning_rate": 1.0279523157115637e-05,
1749
+ "loss": 0.5604,
1750
+ "step": 123500
1751
+ },
1752
+ {
1753
+ "epoch": 1.4645438654509377,
1754
+ "grad_norm": 0.5569178462028503,
1755
+ "learning_rate": 1.0240153698366954e-05,
1756
+ "loss": 0.546,
1757
+ "step": 124000
1758
+ },
1759
+ {
1760
+ "epoch": 1.47044928426324,
1761
+ "grad_norm": 0.4748586416244507,
1762
+ "learning_rate": 1.0200784239618276e-05,
1763
+ "loss": 0.5357,
1764
+ "step": 124500
1765
+ },
1766
+ {
1767
+ "epoch": 1.4763547030755422,
1768
+ "grad_norm": 0.5214936137199402,
1769
+ "learning_rate": 1.0161414780869593e-05,
1770
+ "loss": 0.5815,
1771
+ "step": 125000
1772
+ },
1773
+ {
1774
+ "epoch": 1.4822601218878444,
1775
+ "grad_norm": 0.48534107208251953,
1776
+ "learning_rate": 1.0122045322120913e-05,
1777
+ "loss": 0.5601,
1778
+ "step": 125500
1779
+ },
1780
+ {
1781
+ "epoch": 1.4881655407001464,
1782
+ "grad_norm": 4.443326473236084,
1783
+ "learning_rate": 1.008267586337223e-05,
1784
+ "loss": 0.5422,
1785
+ "step": 126000
1786
+ },
1787
+ {
1788
+ "epoch": 1.4940709595124486,
1789
+ "grad_norm": 0.4154174327850342,
1790
+ "learning_rate": 1.004330640462355e-05,
1791
+ "loss": 0.532,
1792
+ "step": 126500
1793
+ },
1794
+ {
1795
+ "epoch": 1.4999763783247508,
1796
+ "grad_norm": 0.4233492612838745,
1797
+ "learning_rate": 1.0003936945874868e-05,
1798
+ "loss": 0.5632,
1799
+ "step": 127000
1800
+ },
1801
+ {
1802
+ "epoch": 1.5058817971370528,
1803
+ "grad_norm": 4.048283576965332,
1804
+ "learning_rate": 9.964646226043685e-06,
1805
+ "loss": 0.5167,
1806
+ "step": 127500
1807
+ },
1808
+ {
1809
+ "epoch": 1.5117872159493553,
1810
+ "grad_norm": 18.454174041748047,
1811
+ "learning_rate": 9.925355506212502e-06,
1812
+ "loss": 0.5577,
1813
+ "step": 128000
1814
+ },
1815
+ {
1816
+ "epoch": 1.5176926347616573,
1817
+ "grad_norm": 6.030921459197998,
1818
+ "learning_rate": 9.88598604746382e-06,
1819
+ "loss": 0.5741,
1820
+ "step": 128500
1821
+ },
1822
+ {
1823
+ "epoch": 1.5235980535739595,
1824
+ "grad_norm": 0.2656106948852539,
1825
+ "learning_rate": 9.84661658871514e-06,
1826
+ "loss": 0.5258,
1827
+ "step": 129000
1828
+ },
1829
+ {
1830
+ "epoch": 1.5295034723862617,
1831
+ "grad_norm": 13.179415702819824,
1832
+ "learning_rate": 9.807247129966459e-06,
1833
+ "loss": 0.5633,
1834
+ "step": 129500
1835
+ },
1836
+ {
1837
+ "epoch": 1.5354088911985637,
1838
+ "grad_norm": 9.94273567199707,
1839
+ "learning_rate": 9.767877671217777e-06,
1840
+ "loss": 0.5204,
1841
+ "step": 130000
1842
+ },
1843
+ {
1844
+ "epoch": 1.541314310010866,
1845
+ "grad_norm": 0.41358762979507446,
1846
+ "learning_rate": 9.728586951386592e-06,
1847
+ "loss": 0.5533,
1848
+ "step": 130500
1849
+ },
1850
+ {
1851
+ "epoch": 1.5472197288231682,
1852
+ "grad_norm": 10.123603820800781,
1853
+ "learning_rate": 9.689217492637913e-06,
1854
+ "loss": 0.5438,
1855
+ "step": 131000
1856
+ },
1857
+ {
1858
+ "epoch": 1.5531251476354702,
1859
+ "grad_norm": 0.499338299036026,
1860
+ "learning_rate": 9.64984803388923e-06,
1861
+ "loss": 0.5479,
1862
+ "step": 131500
1863
+ },
1864
+ {
1865
+ "epoch": 1.5590305664477726,
1866
+ "grad_norm": 0.45118188858032227,
1867
+ "learning_rate": 9.610478575140549e-06,
1868
+ "loss": 0.5273,
1869
+ "step": 132000
1870
+ },
1871
+ {
1872
+ "epoch": 1.5649359852600746,
1873
+ "grad_norm": 7.0303473472595215,
1874
+ "learning_rate": 9.57110911639187e-06,
1875
+ "loss": 0.5668,
1876
+ "step": 132500
1877
+ },
1878
+ {
1879
+ "epoch": 1.5708414040723768,
1880
+ "grad_norm": 3.8048393726348877,
1881
+ "learning_rate": 9.531739657643188e-06,
1882
+ "loss": 0.5587,
1883
+ "step": 133000
1884
+ },
1885
+ {
1886
+ "epoch": 1.576746822884679,
1887
+ "grad_norm": 4.167747974395752,
1888
+ "learning_rate": 9.492370198894505e-06,
1889
+ "loss": 0.5506,
1890
+ "step": 133500
1891
+ },
1892
+ {
1893
+ "epoch": 1.582652241696981,
1894
+ "grad_norm": 0.5552203059196472,
1895
+ "learning_rate": 9.453000740145825e-06,
1896
+ "loss": 0.5672,
1897
+ "step": 134000
1898
+ },
1899
+ {
1900
+ "epoch": 1.5885576605092833,
1901
+ "grad_norm": 14.452466011047363,
1902
+ "learning_rate": 9.413631281397144e-06,
1903
+ "loss": 0.5626,
1904
+ "step": 134500
1905
+ },
1906
+ {
1907
+ "epoch": 1.5944630793215855,
1908
+ "grad_norm": 3.762688636779785,
1909
+ "learning_rate": 9.374261822648462e-06,
1910
+ "loss": 0.5351,
1911
+ "step": 135000
1912
+ },
1913
+ {
1914
+ "epoch": 1.6003684981338877,
1915
+ "grad_norm": 5.434040069580078,
1916
+ "learning_rate": 9.33497110281728e-06,
1917
+ "loss": 0.5493,
1918
+ "step": 135500
1919
+ },
1920
+ {
1921
+ "epoch": 1.60627391694619,
1922
+ "grad_norm": 0.42822033166885376,
1923
+ "learning_rate": 9.295601644068599e-06,
1924
+ "loss": 0.5373,
1925
+ "step": 136000
1926
+ },
1927
+ {
1928
+ "epoch": 1.612179335758492,
1929
+ "grad_norm": 5.176085472106934,
1930
+ "learning_rate": 9.256232185319916e-06,
1931
+ "loss": 0.5474,
1932
+ "step": 136500
1933
+ },
1934
+ {
1935
+ "epoch": 1.6180847545707941,
1936
+ "grad_norm": 0.9219861626625061,
1937
+ "learning_rate": 9.216862726571236e-06,
1938
+ "loss": 0.5238,
1939
+ "step": 137000
1940
+ },
1941
+ {
1942
+ "epoch": 1.6239901733830964,
1943
+ "grad_norm": 0.42050594091415405,
1944
+ "learning_rate": 9.177572006740051e-06,
1945
+ "loss": 0.5089,
1946
+ "step": 137500
1947
+ },
1948
+ {
1949
+ "epoch": 1.6298955921953984,
1950
+ "grad_norm": 0.4675411880016327,
1951
+ "learning_rate": 9.13820254799137e-06,
1952
+ "loss": 0.5347,
1953
+ "step": 138000
1954
+ },
1955
+ {
1956
+ "epoch": 1.6358010110077008,
1957
+ "grad_norm": 0.4429144561290741,
1958
+ "learning_rate": 9.098911828160188e-06,
1959
+ "loss": 0.53,
1960
+ "step": 138500
1961
+ },
1962
+ {
1963
+ "epoch": 1.6417064298200028,
1964
+ "grad_norm": 0.5213695168495178,
1965
+ "learning_rate": 9.059542369411506e-06,
1966
+ "loss": 0.5393,
1967
+ "step": 139000
1968
+ },
1969
+ {
1970
+ "epoch": 1.647611848632305,
1971
+ "grad_norm": 5.133869171142578,
1972
+ "learning_rate": 9.020172910662825e-06,
1973
+ "loss": 0.5551,
1974
+ "step": 139500
1975
+ },
1976
+ {
1977
+ "epoch": 1.6535172674446073,
1978
+ "grad_norm": 0.7379022836685181,
1979
+ "learning_rate": 8.980803451914145e-06,
1980
+ "loss": 0.5823,
1981
+ "step": 140000
1982
+ },
1983
+ {
1984
+ "epoch": 1.6594226862569093,
1985
+ "grad_norm": 9.675186157226562,
1986
+ "learning_rate": 8.941433993165462e-06,
1987
+ "loss": 0.5478,
1988
+ "step": 140500
1989
+ },
1990
+ {
1991
+ "epoch": 1.6653281050692115,
1992
+ "grad_norm": 8.614265441894531,
1993
+ "learning_rate": 8.902064534416782e-06,
1994
+ "loss": 0.5602,
1995
+ "step": 141000
1996
+ },
1997
+ {
1998
+ "epoch": 1.6712335238815137,
1999
+ "grad_norm": 5.278205871582031,
2000
+ "learning_rate": 8.862695075668101e-06,
2001
+ "loss": 0.5239,
2002
+ "step": 141500
2003
+ },
2004
+ {
2005
+ "epoch": 1.6771389426938157,
2006
+ "grad_norm": 0.5190747380256653,
2007
+ "learning_rate": 8.82332561691942e-06,
2008
+ "loss": 0.5538,
2009
+ "step": 142000
2010
+ },
2011
+ {
2012
+ "epoch": 1.6830443615061181,
2013
+ "grad_norm": 5.418485641479492,
2014
+ "learning_rate": 8.784034897088236e-06,
2015
+ "loss": 0.5344,
2016
+ "step": 142500
2017
+ },
2018
+ {
2019
+ "epoch": 1.6889497803184201,
2020
+ "grad_norm": 11.25378704071045,
2021
+ "learning_rate": 8.744665438339554e-06,
2022
+ "loss": 0.5454,
2023
+ "step": 143000
2024
+ },
2025
+ {
2026
+ "epoch": 1.6948551991307224,
2027
+ "grad_norm": 0.3652142584323883,
2028
+ "learning_rate": 8.705295979590873e-06,
2029
+ "loss": 0.5576,
2030
+ "step": 143500
2031
+ },
2032
+ {
2033
+ "epoch": 1.7007606179430246,
2034
+ "grad_norm": 5.730474472045898,
2035
+ "learning_rate": 8.665926520842191e-06,
2036
+ "loss": 0.5605,
2037
+ "step": 144000
2038
+ },
2039
+ {
2040
+ "epoch": 1.7066660367553266,
2041
+ "grad_norm": 3.815854072570801,
2042
+ "learning_rate": 8.62655706209351e-06,
2043
+ "loss": 0.5292,
2044
+ "step": 144500
2045
+ },
2046
+ {
2047
+ "epoch": 1.712571455567629,
2048
+ "grad_norm": 4.446095943450928,
2049
+ "learning_rate": 8.587266342262328e-06,
2050
+ "loss": 0.5489,
2051
+ "step": 145000
2052
+ },
2053
+ {
2054
+ "epoch": 1.718476874379931,
2055
+ "grad_norm": 14.915255546569824,
2056
+ "learning_rate": 8.547896883513647e-06,
2057
+ "loss": 0.5334,
2058
+ "step": 145500
2059
+ },
2060
+ {
2061
+ "epoch": 1.7243822931922332,
2062
+ "grad_norm": 4.688992977142334,
2063
+ "learning_rate": 8.508606163682463e-06,
2064
+ "loss": 0.5645,
2065
+ "step": 146000
2066
+ },
2067
+ {
2068
+ "epoch": 1.7302877120045355,
2069
+ "grad_norm": 1.7567044496536255,
2070
+ "learning_rate": 8.46923670493378e-06,
2071
+ "loss": 0.5774,
2072
+ "step": 146500
2073
+ },
2074
+ {
2075
+ "epoch": 1.7361931308168375,
2076
+ "grad_norm": 3.2886345386505127,
2077
+ "learning_rate": 8.4298672461851e-06,
2078
+ "loss": 0.582,
2079
+ "step": 147000
2080
+ },
2081
+ {
2082
+ "epoch": 1.7420985496291397,
2083
+ "grad_norm": 16.65200424194336,
2084
+ "learning_rate": 8.39049778743642e-06,
2085
+ "loss": 0.5144,
2086
+ "step": 147500
2087
+ },
2088
+ {
2089
+ "epoch": 1.748003968441442,
2090
+ "grad_norm": 4.4966301918029785,
2091
+ "learning_rate": 8.351128328687737e-06,
2092
+ "loss": 0.5518,
2093
+ "step": 148000
2094
+ },
2095
+ {
2096
+ "epoch": 1.753909387253744,
2097
+ "grad_norm": 6.592241287231445,
2098
+ "learning_rate": 8.311758869939057e-06,
2099
+ "loss": 0.5226,
2100
+ "step": 148500
2101
+ },
2102
+ {
2103
+ "epoch": 1.7598148060660463,
2104
+ "grad_norm": 2.397592782974243,
2105
+ "learning_rate": 8.272389411190376e-06,
2106
+ "loss": 0.5193,
2107
+ "step": 149000
2108
+ },
2109
+ {
2110
+ "epoch": 1.7657202248783483,
2111
+ "grad_norm": 3.1350808143615723,
2112
+ "learning_rate": 8.233019952441694e-06,
2113
+ "loss": 0.5367,
2114
+ "step": 149500
2115
+ },
2116
+ {
2117
+ "epoch": 1.7716256436906506,
2118
+ "grad_norm": 0.9412761330604553,
2119
+ "learning_rate": 8.193650493693013e-06,
2120
+ "loss": 0.5596,
2121
+ "step": 150000
2122
+ },
2123
+ {
2124
+ "epoch": 1.7775310625029528,
2125
+ "grad_norm": 1.5991017818450928,
2126
+ "learning_rate": 8.154359773861829e-06,
2127
+ "loss": 0.5282,
2128
+ "step": 150500
2129
+ },
2130
+ {
2131
+ "epoch": 1.7834364813152548,
2132
+ "grad_norm": 0.35781508684158325,
2133
+ "learning_rate": 8.115069054030646e-06,
2134
+ "loss": 0.5356,
2135
+ "step": 151000
2136
+ },
2137
+ {
2138
+ "epoch": 1.789341900127557,
2139
+ "grad_norm": 0.567905068397522,
2140
+ "learning_rate": 8.075699595281965e-06,
2141
+ "loss": 0.5814,
2142
+ "step": 151500
2143
+ },
2144
+ {
2145
+ "epoch": 1.7952473189398592,
2146
+ "grad_norm": 0.3652840256690979,
2147
+ "learning_rate": 8.036330136533283e-06,
2148
+ "loss": 0.5148,
2149
+ "step": 152000
2150
+ },
2151
+ {
2152
+ "epoch": 1.8011527377521612,
2153
+ "grad_norm": 8.219497680664062,
2154
+ "learning_rate": 7.996960677784603e-06,
2155
+ "loss": 0.5564,
2156
+ "step": 152500
2157
+ },
2158
+ {
2159
+ "epoch": 1.8070581565644637,
2160
+ "grad_norm": 11.082978248596191,
2161
+ "learning_rate": 7.957591219035922e-06,
2162
+ "loss": 0.5772,
2163
+ "step": 153000
2164
+ },
2165
+ {
2166
+ "epoch": 1.8129635753767657,
2167
+ "grad_norm": 9.251150131225586,
2168
+ "learning_rate": 7.91822176028724e-06,
2169
+ "loss": 0.5433,
2170
+ "step": 153500
2171
+ },
2172
+ {
2173
+ "epoch": 1.818868994189068,
2174
+ "grad_norm": 25.916170120239258,
2175
+ "learning_rate": 7.87885230153856e-06,
2176
+ "loss": 0.5636,
2177
+ "step": 154000
2178
+ },
2179
+ {
2180
+ "epoch": 1.82477441300137,
2181
+ "grad_norm": 21.578245162963867,
2182
+ "learning_rate": 7.839482842789879e-06,
2183
+ "loss": 0.5564,
2184
+ "step": 154500
2185
+ },
2186
+ {
2187
+ "epoch": 1.830679831813672,
2188
+ "grad_norm": 25.095073699951172,
2189
+ "learning_rate": 7.800192122958694e-06,
2190
+ "loss": 0.5253,
2191
+ "step": 155000
2192
+ },
2193
+ {
2194
+ "epoch": 1.8365852506259746,
2195
+ "grad_norm": 6.516243934631348,
2196
+ "learning_rate": 7.760822664210012e-06,
2197
+ "loss": 0.4997,
2198
+ "step": 155500
2199
+ },
2200
+ {
2201
+ "epoch": 1.8424906694382766,
2202
+ "grad_norm": 0.37588614225387573,
2203
+ "learning_rate": 7.721531944378829e-06,
2204
+ "loss": 0.5575,
2205
+ "step": 156000
2206
+ },
2207
+ {
2208
+ "epoch": 1.8483960882505788,
2209
+ "grad_norm": 0.3492718040943146,
2210
+ "learning_rate": 7.682162485630149e-06,
2211
+ "loss": 0.5366,
2212
+ "step": 156500
2213
+ },
2214
+ {
2215
+ "epoch": 1.854301507062881,
2216
+ "grad_norm": 4.217477321624756,
2217
+ "learning_rate": 7.642793026881466e-06,
2218
+ "loss": 0.5493,
2219
+ "step": 157000
2220
+ },
2221
+ {
2222
+ "epoch": 1.860206925875183,
2223
+ "grad_norm": 4.931800842285156,
2224
+ "learning_rate": 7.603423568132786e-06,
2225
+ "loss": 0.5619,
2226
+ "step": 157500
2227
+ },
2228
+ {
2229
+ "epoch": 1.8661123446874852,
2230
+ "grad_norm": 4.721713066101074,
2231
+ "learning_rate": 7.5640541093841045e-06,
2232
+ "loss": 0.5369,
2233
+ "step": 158000
2234
+ },
2235
+ {
2236
+ "epoch": 1.8720177634997874,
2237
+ "grad_norm": 0.4199652671813965,
2238
+ "learning_rate": 7.524684650635423e-06,
2239
+ "loss": 0.554,
2240
+ "step": 158500
2241
+ },
2242
+ {
2243
+ "epoch": 1.8779231823120894,
2244
+ "grad_norm": 18.513507843017578,
2245
+ "learning_rate": 7.4853151918867426e-06,
2246
+ "loss": 0.5413,
2247
+ "step": 159000
2248
+ },
2249
+ {
2250
+ "epoch": 1.8838286011243919,
2251
+ "grad_norm": 19.6885929107666,
2252
+ "learning_rate": 7.445945733138061e-06,
2253
+ "loss": 0.5472,
2254
+ "step": 159500
2255
+ },
2256
+ {
2257
+ "epoch": 1.8897340199366939,
2258
+ "grad_norm": 9.544459342956543,
2259
+ "learning_rate": 7.406655013306878e-06,
2260
+ "loss": 0.5185,
2261
+ "step": 160000
2262
+ },
2263
+ {
2264
+ "epoch": 1.895639438748996,
2265
+ "grad_norm": 4.329341411590576,
2266
+ "learning_rate": 7.367285554558197e-06,
2267
+ "loss": 0.488,
2268
+ "step": 160500
2269
+ },
2270
+ {
2271
+ "epoch": 1.9015448575612983,
2272
+ "grad_norm": 0.7727178931236267,
2273
+ "learning_rate": 7.3279160958095155e-06,
2274
+ "loss": 0.5912,
2275
+ "step": 161000
2276
+ },
2277
+ {
2278
+ "epoch": 1.9074502763736003,
2279
+ "grad_norm": 15.250825881958008,
2280
+ "learning_rate": 7.288546637060834e-06,
2281
+ "loss": 0.4984,
2282
+ "step": 161500
2283
+ },
2284
+ {
2285
+ "epoch": 1.9133556951859025,
2286
+ "grad_norm": 0.4790278673171997,
2287
+ "learning_rate": 7.249177178312154e-06,
2288
+ "loss": 0.5443,
2289
+ "step": 162000
2290
+ },
2291
+ {
2292
+ "epoch": 1.9192611139982048,
2293
+ "grad_norm": 13.285900115966797,
2294
+ "learning_rate": 7.209886458480969e-06,
2295
+ "loss": 0.5444,
2296
+ "step": 162500
2297
+ },
2298
+ {
2299
+ "epoch": 1.9251665328105068,
2300
+ "grad_norm": 7.882303237915039,
2301
+ "learning_rate": 7.1705169997322885e-06,
2302
+ "loss": 0.5251,
2303
+ "step": 163000
2304
+ },
2305
+ {
2306
+ "epoch": 1.9310719516228092,
2307
+ "grad_norm": 6.691035270690918,
2308
+ "learning_rate": 7.131147540983607e-06,
2309
+ "loss": 0.5672,
2310
+ "step": 163500
2311
+ },
2312
+ {
2313
+ "epoch": 1.9369773704351112,
2314
+ "grad_norm": 14.164164543151855,
2315
+ "learning_rate": 7.091778082234926e-06,
2316
+ "loss": 0.4994,
2317
+ "step": 164000
2318
+ },
2319
+ {
2320
+ "epoch": 1.9428827892474134,
2321
+ "grad_norm": 0.4879821538925171,
2322
+ "learning_rate": 7.0525661013212396e-06,
2323
+ "loss": 0.5328,
2324
+ "step": 164500
2325
+ },
2326
+ {
2327
+ "epoch": 1.9487882080597156,
2328
+ "grad_norm": 7.822917461395264,
2329
+ "learning_rate": 7.013196642572558e-06,
2330
+ "loss": 0.5718,
2331
+ "step": 165000
2332
+ },
2333
+ {
2334
+ "epoch": 1.9546936268720176,
2335
+ "grad_norm": 0.2471114546060562,
2336
+ "learning_rate": 6.973827183823877e-06,
2337
+ "loss": 0.5178,
2338
+ "step": 165500
2339
+ },
2340
+ {
2341
+ "epoch": 1.96059904568432,
2342
+ "grad_norm": 1.3693519830703735,
2343
+ "learning_rate": 6.934457725075196e-06,
2344
+ "loss": 0.5818,
2345
+ "step": 166000
2346
+ },
2347
+ {
2348
+ "epoch": 1.966504464496622,
2349
+ "grad_norm": 0.4519917070865631,
2350
+ "learning_rate": 6.895088266326515e-06,
2351
+ "loss": 0.5191,
2352
+ "step": 166500
2353
+ },
2354
+ {
2355
+ "epoch": 1.9724098833089243,
2356
+ "grad_norm": 4.211235046386719,
2357
+ "learning_rate": 6.855718807577834e-06,
2358
+ "loss": 0.536,
2359
+ "step": 167000
2360
+ },
2361
+ {
2362
+ "epoch": 1.9783153021212265,
2363
+ "grad_norm": 13.497679710388184,
2364
+ "learning_rate": 6.816349348829153e-06,
2365
+ "loss": 0.5625,
2366
+ "step": 167500
2367
+ },
2368
+ {
2369
+ "epoch": 1.9842207209335285,
2370
+ "grad_norm": 3.9584550857543945,
2371
+ "learning_rate": 6.776979890080472e-06,
2372
+ "loss": 0.5704,
2373
+ "step": 168000
2374
+ },
2375
+ {
2376
+ "epoch": 1.9901261397458307,
2377
+ "grad_norm": 15.123173713684082,
2378
+ "learning_rate": 6.73761043133179e-06,
2379
+ "loss": 0.5284,
2380
+ "step": 168500
2381
+ },
2382
+ {
2383
+ "epoch": 1.996031558558133,
2384
+ "grad_norm": 8.3848876953125,
2385
+ "learning_rate": 6.698319711500607e-06,
2386
+ "loss": 0.5433,
2387
+ "step": 169000
2388
+ },
2389
+ {
2390
+ "epoch": 2.0,
2391
+ "eval_accuracy": 0.497122205862669,
2392
+ "eval_f1": 0.5241418199760344,
2393
+ "eval_loss": 0.5884916186332703,
2394
+ "eval_roc_auc": 0.6818782020829143,
2395
+ "eval_runtime": 112.692,
2396
+ "eval_samples_per_second": 66.296,
2397
+ "eval_steps_per_second": 66.296,
2398
+ "step": 169336
2399
+ },
2400
+ {
2401
+ "epoch": 2.001936977370435,
2402
+ "grad_norm": 0.37359023094177246,
2403
+ "learning_rate": 6.659028991669423e-06,
2404
+ "loss": 0.5203,
2405
+ "step": 169500
2406
+ },
2407
+ {
2408
+ "epoch": 2.0078423961827374,
2409
+ "grad_norm": 0.13417910039424896,
2410
+ "learning_rate": 6.619659532920742e-06,
2411
+ "loss": 0.4761,
2412
+ "step": 170000
2413
+ },
2414
+ {
2415
+ "epoch": 2.0137478149950394,
2416
+ "grad_norm": 0.16143397986888885,
2417
+ "learning_rate": 6.580290074172061e-06,
2418
+ "loss": 0.5239,
2419
+ "step": 170500
2420
+ },
2421
+ {
2422
+ "epoch": 2.0196532338073414,
2423
+ "grad_norm": 5.961240291595459,
2424
+ "learning_rate": 6.5409206154233795e-06,
2425
+ "loss": 0.5321,
2426
+ "step": 171000
2427
+ },
2428
+ {
2429
+ "epoch": 2.025558652619644,
2430
+ "grad_norm": 10.15018367767334,
2431
+ "learning_rate": 6.501551156674698e-06,
2432
+ "loss": 0.4946,
2433
+ "step": 171500
2434
+ },
2435
+ {
2436
+ "epoch": 2.031464071431946,
2437
+ "grad_norm": 13.941854476928711,
2438
+ "learning_rate": 6.462260436843515e-06,
2439
+ "loss": 0.5375,
2440
+ "step": 172000
2441
+ },
2442
+ {
2443
+ "epoch": 2.0373694902442483,
2444
+ "grad_norm": 90.04341888427734,
2445
+ "learning_rate": 6.422890978094833e-06,
2446
+ "loss": 0.558,
2447
+ "step": 172500
2448
+ },
2449
+ {
2450
+ "epoch": 2.0432749090565503,
2451
+ "grad_norm": 0.3982486128807068,
2452
+ "learning_rate": 6.383521519346153e-06,
2453
+ "loss": 0.4909,
2454
+ "step": 173000
2455
+ },
2456
+ {
2457
+ "epoch": 2.0491803278688523,
2458
+ "grad_norm": 14.257329940795898,
2459
+ "learning_rate": 6.344152060597472e-06,
2460
+ "loss": 0.5097,
2461
+ "step": 173500
2462
+ },
2463
+ {
2464
+ "epoch": 2.0550857466811547,
2465
+ "grad_norm": 0.2994830310344696,
2466
+ "learning_rate": 6.304861340766287e-06,
2467
+ "loss": 0.523,
2468
+ "step": 174000
2469
+ },
2470
+ {
2471
+ "epoch": 2.0609911654934567,
2472
+ "grad_norm": 21.691524505615234,
2473
+ "learning_rate": 6.265491882017607e-06,
2474
+ "loss": 0.5254,
2475
+ "step": 174500
2476
+ },
2477
+ {
2478
+ "epoch": 2.066896584305759,
2479
+ "grad_norm": 7.8108134269714355,
2480
+ "learning_rate": 6.2261224232689255e-06,
2481
+ "loss": 0.548,
2482
+ "step": 175000
2483
+ },
2484
+ {
2485
+ "epoch": 2.072802003118061,
2486
+ "grad_norm": 6.7383928298950195,
2487
+ "learning_rate": 6.186752964520244e-06,
2488
+ "loss": 0.5098,
2489
+ "step": 175500
2490
+ },
2491
+ {
2492
+ "epoch": 2.078707421930363,
2493
+ "grad_norm": 5.449107646942139,
2494
+ "learning_rate": 6.1473835057715636e-06,
2495
+ "loss": 0.5506,
2496
+ "step": 176000
2497
+ },
2498
+ {
2499
+ "epoch": 2.0846128407426656,
2500
+ "grad_norm": 35.49790573120117,
2501
+ "learning_rate": 6.108014047022882e-06,
2502
+ "loss": 0.5142,
2503
+ "step": 176500
2504
+ },
2505
+ {
2506
+ "epoch": 2.0905182595549676,
2507
+ "grad_norm": 7.552992820739746,
2508
+ "learning_rate": 6.068644588274201e-06,
2509
+ "loss": 0.5508,
2510
+ "step": 177000
2511
+ },
2512
+ {
2513
+ "epoch": 2.0964236783672696,
2514
+ "grad_norm": 128.05538940429688,
2515
+ "learning_rate": 6.029353868443018e-06,
2516
+ "loss": 0.4966,
2517
+ "step": 177500
2518
+ },
2519
+ {
2520
+ "epoch": 2.102329097179572,
2521
+ "grad_norm": 8.854266166687012,
2522
+ "learning_rate": 5.989984409694336e-06,
2523
+ "loss": 0.4974,
2524
+ "step": 178000
2525
+ },
2526
+ {
2527
+ "epoch": 2.108234515991874,
2528
+ "grad_norm": 0.3475770950317383,
2529
+ "learning_rate": 5.950614950945654e-06,
2530
+ "loss": 0.5212,
2531
+ "step": 178500
2532
+ },
2533
+ {
2534
+ "epoch": 2.1141399348041765,
2535
+ "grad_norm": 2.3008108139038086,
2536
+ "learning_rate": 5.911245492196974e-06,
2537
+ "loss": 0.5311,
2538
+ "step": 179000
2539
+ },
2540
+ {
2541
+ "epoch": 2.1200453536164785,
2542
+ "grad_norm": 8.415946960449219,
2543
+ "learning_rate": 5.87195477236579e-06,
2544
+ "loss": 0.5654,
2545
+ "step": 179500
2546
+ },
2547
+ {
2548
+ "epoch": 2.1259507724287805,
2549
+ "grad_norm": 6.0155863761901855,
2550
+ "learning_rate": 5.832585313617109e-06,
2551
+ "loss": 0.5129,
2552
+ "step": 180000
2553
+ },
2554
+ {
2555
+ "epoch": 2.131856191241083,
2556
+ "grad_norm": 0.5401498079299927,
2557
+ "learning_rate": 5.793215854868428e-06,
2558
+ "loss": 0.5199,
2559
+ "step": 180500
2560
+ },
2561
+ {
2562
+ "epoch": 2.137761610053385,
2563
+ "grad_norm": 0.5212739109992981,
2564
+ "learning_rate": 5.753846396119747e-06,
2565
+ "loss": 0.5369,
2566
+ "step": 181000
2567
+ },
2568
+ {
2569
+ "epoch": 2.1436670288656874,
2570
+ "grad_norm": 15.587518692016602,
2571
+ "learning_rate": 5.714476937371065e-06,
2572
+ "loss": 0.5185,
2573
+ "step": 181500
2574
+ },
2575
+ {
2576
+ "epoch": 2.1495724476779894,
2577
+ "grad_norm": 0.30678337812423706,
2578
+ "learning_rate": 5.675186217539882e-06,
2579
+ "loss": 0.5435,
2580
+ "step": 182000
2581
+ },
2582
+ {
2583
+ "epoch": 2.1554778664902914,
2584
+ "grad_norm": 0.4348577558994293,
2585
+ "learning_rate": 5.6358167587912e-06,
2586
+ "loss": 0.5337,
2587
+ "step": 182500
2588
+ },
2589
+ {
2590
+ "epoch": 2.161383285302594,
2591
+ "grad_norm": 5.63192892074585,
2592
+ "learning_rate": 5.596526038960017e-06,
2593
+ "loss": 0.4621,
2594
+ "step": 183000
2595
+ },
2596
+ {
2597
+ "epoch": 2.167288704114896,
2598
+ "grad_norm": 0.7454834580421448,
2599
+ "learning_rate": 5.557156580211336e-06,
2600
+ "loss": 0.5674,
2601
+ "step": 183500
2602
+ },
2603
+ {
2604
+ "epoch": 2.173194122927198,
2605
+ "grad_norm": 0.37728533148765564,
2606
+ "learning_rate": 5.517787121462655e-06,
2607
+ "loss": 0.5372,
2608
+ "step": 184000
2609
+ },
2610
+ {
2611
+ "epoch": 2.1790995417395003,
2612
+ "grad_norm": 5.926390171051025,
2613
+ "learning_rate": 5.478417662713973e-06,
2614
+ "loss": 0.5288,
2615
+ "step": 184500
2616
+ },
2617
+ {
2618
+ "epoch": 2.1850049605518023,
2619
+ "grad_norm": 13.641364097595215,
2620
+ "learning_rate": 5.4391269428827894e-06,
2621
+ "loss": 0.5299,
2622
+ "step": 185000
2623
+ },
2624
+ {
2625
+ "epoch": 2.1909103793641047,
2626
+ "grad_norm": 24.8594970703125,
2627
+ "learning_rate": 5.399757484134108e-06,
2628
+ "loss": 0.5262,
2629
+ "step": 185500
2630
+ },
2631
+ {
2632
+ "epoch": 2.1968157981764067,
2633
+ "grad_norm": 8.509349822998047,
2634
+ "learning_rate": 5.3603880253854276e-06,
2635
+ "loss": 0.5463,
2636
+ "step": 186000
2637
+ },
2638
+ {
2639
+ "epoch": 2.2027212169887087,
2640
+ "grad_norm": 7.8245415687561035,
2641
+ "learning_rate": 5.321018566636746e-06,
2642
+ "loss": 0.5001,
2643
+ "step": 186500
2644
+ },
2645
+ {
2646
+ "epoch": 2.208626635801011,
2647
+ "grad_norm": 0.7356006503105164,
2648
+ "learning_rate": 5.281649107888065e-06,
2649
+ "loss": 0.4916,
2650
+ "step": 187000
2651
+ },
2652
+ {
2653
+ "epoch": 2.214532054613313,
2654
+ "grad_norm": 3.2764933109283447,
2655
+ "learning_rate": 5.242279649139384e-06,
2656
+ "loss": 0.5417,
2657
+ "step": 187500
2658
+ },
2659
+ {
2660
+ "epoch": 2.220437473425615,
2661
+ "grad_norm": 9.338927268981934,
2662
+ "learning_rate": 5.202910190390703e-06,
2663
+ "loss": 0.5484,
2664
+ "step": 188000
2665
+ },
2666
+ {
2667
+ "epoch": 2.2263428922379176,
2668
+ "grad_norm": 104.39839172363281,
2669
+ "learning_rate": 5.163540731642022e-06,
2670
+ "loss": 0.496,
2671
+ "step": 188500
2672
+ },
2673
+ {
2674
+ "epoch": 2.2322483110502196,
2675
+ "grad_norm": 0.22737614810466766,
2676
+ "learning_rate": 5.12417127289334e-06,
2677
+ "loss": 0.5135,
2678
+ "step": 189000
2679
+ },
2680
+ {
2681
+ "epoch": 2.238153729862522,
2682
+ "grad_norm": 7.401103973388672,
2683
+ "learning_rate": 5.084880553062157e-06,
2684
+ "loss": 0.4895,
2685
+ "step": 189500
2686
+ },
2687
+ {
2688
+ "epoch": 2.244059148674824,
2689
+ "grad_norm": 179.85498046875,
2690
+ "learning_rate": 5.045511094313476e-06,
2691
+ "loss": 0.4943,
2692
+ "step": 190000
2693
+ },
2694
+ {
2695
+ "epoch": 2.249964567487126,
2696
+ "grad_norm": 0.13216468691825867,
2697
+ "learning_rate": 5.006141635564795e-06,
2698
+ "loss": 0.5314,
2699
+ "step": 190500
2700
+ },
2701
+ {
2702
+ "epoch": 2.2558699862994285,
2703
+ "grad_norm": 58.944786071777344,
2704
+ "learning_rate": 4.966772176816114e-06,
2705
+ "loss": 0.5789,
2706
+ "step": 191000
2707
+ },
2708
+ {
2709
+ "epoch": 2.2617754051117305,
2710
+ "grad_norm": 6.750619411468506,
2711
+ "learning_rate": 4.92748145698493e-06,
2712
+ "loss": 0.512,
2713
+ "step": 191500
2714
+ },
2715
+ {
2716
+ "epoch": 2.2676808239240325,
2717
+ "grad_norm": 0.23294350504875183,
2718
+ "learning_rate": 4.888111998236249e-06,
2719
+ "loss": 0.5074,
2720
+ "step": 192000
2721
+ },
2722
+ {
2723
+ "epoch": 2.273586242736335,
2724
+ "grad_norm": 4.00797176361084,
2725
+ "learning_rate": 4.8487425394875675e-06,
2726
+ "loss": 0.4682,
2727
+ "step": 192500
2728
+ },
2729
+ {
2730
+ "epoch": 2.279491661548637,
2731
+ "grad_norm": 11.367417335510254,
2732
+ "learning_rate": 4.809373080738887e-06,
2733
+ "loss": 0.5287,
2734
+ "step": 193000
2735
+ },
2736
+ {
2737
+ "epoch": 2.2853970803609394,
2738
+ "grad_norm": 0.20395159721374512,
2739
+ "learning_rate": 4.770082360907702e-06,
2740
+ "loss": 0.5177,
2741
+ "step": 193500
2742
+ },
2743
+ {
2744
+ "epoch": 2.2913024991732414,
2745
+ "grad_norm": 0.48907470703125,
2746
+ "learning_rate": 4.730712902159022e-06,
2747
+ "loss": 0.5235,
2748
+ "step": 194000
2749
+ },
2750
+ {
2751
+ "epoch": 2.2972079179855434,
2752
+ "grad_norm": 1.3965227603912354,
2753
+ "learning_rate": 4.6913434434103405e-06,
2754
+ "loss": 0.5368,
2755
+ "step": 194500
2756
+ },
2757
+ {
2758
+ "epoch": 2.303113336797846,
2759
+ "grad_norm": 0.20722346007823944,
2760
+ "learning_rate": 4.651973984661659e-06,
2761
+ "loss": 0.5241,
2762
+ "step": 195000
2763
+ },
2764
+ {
2765
+ "epoch": 2.309018755610148,
2766
+ "grad_norm": 0.56623375415802,
2767
+ "learning_rate": 4.612604525912978e-06,
2768
+ "loss": 0.5127,
2769
+ "step": 195500
2770
+ },
2771
+ {
2772
+ "epoch": 2.3149241744224502,
2773
+ "grad_norm": 14.25666618347168,
2774
+ "learning_rate": 4.573313806081795e-06,
2775
+ "loss": 0.5303,
2776
+ "step": 196000
2777
+ },
2778
+ {
2779
+ "epoch": 2.3208295932347522,
2780
+ "grad_norm": 0.31170445680618286,
2781
+ "learning_rate": 4.5339443473331134e-06,
2782
+ "loss": 0.5233,
2783
+ "step": 196500
2784
+ },
2785
+ {
2786
+ "epoch": 2.3267350120470542,
2787
+ "grad_norm": 0.19042448699474335,
2788
+ "learning_rate": 4.494574888584432e-06,
2789
+ "loss": 0.5049,
2790
+ "step": 197000
2791
+ },
2792
+ {
2793
+ "epoch": 2.3326404308593567,
2794
+ "grad_norm": 16.922496795654297,
2795
+ "learning_rate": 4.455284168753248e-06,
2796
+ "loss": 0.5722,
2797
+ "step": 197500
2798
+ },
2799
+ {
2800
+ "epoch": 2.3385458496716587,
2801
+ "grad_norm": 9.51486587524414,
2802
+ "learning_rate": 4.415914710004568e-06,
2803
+ "loss": 0.502,
2804
+ "step": 198000
2805
+ },
2806
+ {
2807
+ "epoch": 2.3444512684839607,
2808
+ "grad_norm": 12.042534828186035,
2809
+ "learning_rate": 4.3765452512558856e-06,
2810
+ "loss": 0.5382,
2811
+ "step": 198500
2812
+ },
2813
+ {
2814
+ "epoch": 2.350356687296263,
2815
+ "grad_norm": 0.15318746864795685,
2816
+ "learning_rate": 4.337175792507205e-06,
2817
+ "loss": 0.4805,
2818
+ "step": 199000
2819
+ },
2820
+ {
2821
+ "epoch": 2.356262106108565,
2822
+ "grad_norm": 6.209574222564697,
2823
+ "learning_rate": 4.297885072676021e-06,
2824
+ "loss": 0.5378,
2825
+ "step": 199500
2826
+ },
2827
+ {
2828
+ "epoch": 2.3621675249208676,
2829
+ "grad_norm": 7.745547294616699,
2830
+ "learning_rate": 4.25851561392734e-06,
2831
+ "loss": 0.528,
2832
+ "step": 200000
2833
+ },
2834
+ {
2835
+ "epoch": 2.3680729437331696,
2836
+ "grad_norm": 51.523094177246094,
2837
+ "learning_rate": 4.2191461551786585e-06,
2838
+ "loss": 0.5285,
2839
+ "step": 200500
2840
+ },
2841
+ {
2842
+ "epoch": 2.3739783625454716,
2843
+ "grad_norm": 6.393804550170898,
2844
+ "learning_rate": 4.179776696429978e-06,
2845
+ "loss": 0.5711,
2846
+ "step": 201000
2847
+ },
2848
+ {
2849
+ "epoch": 2.379883781357774,
2850
+ "grad_norm": 0.2673965394496918,
2851
+ "learning_rate": 4.140407237681297e-06,
2852
+ "loss": 0.534,
2853
+ "step": 201500
2854
+ },
2855
+ {
2856
+ "epoch": 2.385789200170076,
2857
+ "grad_norm": 7.325765609741211,
2858
+ "learning_rate": 4.101037778932615e-06,
2859
+ "loss": 0.5324,
2860
+ "step": 202000
2861
+ },
2862
+ {
2863
+ "epoch": 2.3916946189823785,
2864
+ "grad_norm": 0.23673997819423676,
2865
+ "learning_rate": 4.061668320183935e-06,
2866
+ "loss": 0.5753,
2867
+ "step": 202500
2868
+ },
2869
+ {
2870
+ "epoch": 2.3976000377946804,
2871
+ "grad_norm": 47.96986389160156,
2872
+ "learning_rate": 4.022298861435253e-06,
2873
+ "loss": 0.4933,
2874
+ "step": 203000
2875
+ },
2876
+ {
2877
+ "epoch": 2.4035054566069824,
2878
+ "grad_norm": 7.979691505432129,
2879
+ "learning_rate": 3.982929402686572e-06,
2880
+ "loss": 0.5321,
2881
+ "step": 203500
2882
+ },
2883
+ {
2884
+ "epoch": 2.409410875419285,
2885
+ "grad_norm": 0.8339338302612305,
2886
+ "learning_rate": 3.943638682855388e-06,
2887
+ "loss": 0.5473,
2888
+ "step": 204000
2889
+ },
2890
+ {
2891
+ "epoch": 2.415316294231587,
2892
+ "grad_norm": 47.454368591308594,
2893
+ "learning_rate": 3.904269224106707e-06,
2894
+ "loss": 0.559,
2895
+ "step": 204500
2896
+ },
2897
+ {
2898
+ "epoch": 2.421221713043889,
2899
+ "grad_norm": 0.33361056447029114,
2900
+ "learning_rate": 3.864978504275523e-06,
2901
+ "loss": 0.5517,
2902
+ "step": 205000
2903
+ },
2904
+ {
2905
+ "epoch": 2.4271271318561913,
2906
+ "grad_norm": 17.990787506103516,
2907
+ "learning_rate": 3.8256090455268426e-06,
2908
+ "loss": 0.511,
2909
+ "step": 205500
2910
+ },
2911
+ {
2912
+ "epoch": 2.4330325506684933,
2913
+ "grad_norm": 9.788211822509766,
2914
+ "learning_rate": 3.7862395867781616e-06,
2915
+ "loss": 0.5394,
2916
+ "step": 206000
2917
+ },
2918
+ {
2919
+ "epoch": 2.4389379694807953,
2920
+ "grad_norm": 0.29322516918182373,
2921
+ "learning_rate": 3.74687012802948e-06,
2922
+ "loss": 0.5919,
2923
+ "step": 206500
2924
+ },
2925
+ {
2926
+ "epoch": 2.4448433882930978,
2927
+ "grad_norm": 81.08358764648438,
2928
+ "learning_rate": 3.707500669280799e-06,
2929
+ "loss": 0.5396,
2930
+ "step": 207000
2931
+ },
2932
+ {
2933
+ "epoch": 2.4507488071053998,
2934
+ "grad_norm": 0.4258115887641907,
2935
+ "learning_rate": 3.6682099494496155e-06,
2936
+ "loss": 0.472,
2937
+ "step": 207500
2938
+ },
2939
+ {
2940
+ "epoch": 2.456654225917702,
2941
+ "grad_norm": 6.371994972229004,
2942
+ "learning_rate": 3.6288404907009338e-06,
2943
+ "loss": 0.5882,
2944
+ "step": 208000
2945
+ },
2946
+ {
2947
+ "epoch": 2.462559644730004,
2948
+ "grad_norm": 11.043325424194336,
2949
+ "learning_rate": 3.589471031952253e-06,
2950
+ "loss": 0.5457,
2951
+ "step": 208500
2952
+ },
2953
+ {
2954
+ "epoch": 2.4684650635423067,
2955
+ "grad_norm": 9.996238708496094,
2956
+ "learning_rate": 3.550101573203572e-06,
2957
+ "loss": 0.5346,
2958
+ "step": 209000
2959
+ },
2960
+ {
2961
+ "epoch": 2.4743704823546087,
2962
+ "grad_norm": 0.32652753591537476,
2963
+ "learning_rate": 3.5107321144548905e-06,
2964
+ "loss": 0.5461,
2965
+ "step": 209500
2966
+ },
2967
+ {
2968
+ "epoch": 2.4802759011669107,
2969
+ "grad_norm": 0.2573184370994568,
2970
+ "learning_rate": 3.4713626557062096e-06,
2971
+ "loss": 0.5025,
2972
+ "step": 210000
2973
+ },
2974
+ {
2975
+ "epoch": 2.486181319979213,
2976
+ "grad_norm": 9.490194320678711,
2977
+ "learning_rate": 3.4319931969575286e-06,
2978
+ "loss": 0.5443,
2979
+ "step": 210500
2980
+ },
2981
+ {
2982
+ "epoch": 2.492086738791515,
2983
+ "grad_norm": 1.0135701894760132,
2984
+ "learning_rate": 3.3926237382088473e-06,
2985
+ "loss": 0.5176,
2986
+ "step": 211000
2987
+ },
2988
+ {
2989
+ "epoch": 2.497992157603817,
2990
+ "grad_norm": 0.3167944848537445,
2991
+ "learning_rate": 3.3533330183776635e-06,
2992
+ "loss": 0.542,
2993
+ "step": 211500
2994
+ },
2995
+ {
2996
+ "epoch": 2.5038975764161195,
2997
+ "grad_norm": 0.47053876519203186,
2998
+ "learning_rate": 3.3140422985464797e-06,
2999
+ "loss": 0.4944,
3000
+ "step": 212000
3001
+ },
3002
+ {
3003
+ "epoch": 2.5098029952284215,
3004
+ "grad_norm": 0.2583182752132416,
3005
+ "learning_rate": 3.2746728397977987e-06,
3006
+ "loss": 0.5167,
3007
+ "step": 212500
3008
+ },
3009
+ {
3010
+ "epoch": 2.5157084140407235,
3011
+ "grad_norm": 11.087862968444824,
3012
+ "learning_rate": 3.2353033810491174e-06,
3013
+ "loss": 0.5117,
3014
+ "step": 213000
3015
+ },
3016
+ {
3017
+ "epoch": 2.521613832853026,
3018
+ "grad_norm": 0.197793111205101,
3019
+ "learning_rate": 3.1959339223004364e-06,
3020
+ "loss": 0.5265,
3021
+ "step": 213500
3022
+ },
3023
+ {
3024
+ "epoch": 2.527519251665328,
3025
+ "grad_norm": 0.09116879105567932,
3026
+ "learning_rate": 3.1565644635517555e-06,
3027
+ "loss": 0.4583,
3028
+ "step": 214000
3029
+ },
3030
+ {
3031
+ "epoch": 2.5334246704776304,
3032
+ "grad_norm": 0.3125598728656769,
3033
+ "learning_rate": 3.1172737437205713e-06,
3034
+ "loss": 0.5588,
3035
+ "step": 214500
3036
+ },
3037
+ {
3038
+ "epoch": 2.5393300892899324,
3039
+ "grad_norm": 0.20921526849269867,
3040
+ "learning_rate": 3.0779042849718903e-06,
3041
+ "loss": 0.5181,
3042
+ "step": 215000
3043
+ },
3044
+ {
3045
+ "epoch": 2.545235508102235,
3046
+ "grad_norm": 0.8386293649673462,
3047
+ "learning_rate": 3.0385348262232094e-06,
3048
+ "loss": 0.5606,
3049
+ "step": 215500
3050
+ },
3051
+ {
3052
+ "epoch": 2.551140926914537,
3053
+ "grad_norm": 1.4247181415557861,
3054
+ "learning_rate": 2.9992441063920256e-06,
3055
+ "loss": 0.5632,
3056
+ "step": 216000
3057
+ },
3058
+ {
3059
+ "epoch": 2.557046345726839,
3060
+ "grad_norm": 56.29065704345703,
3061
+ "learning_rate": 2.9598746476433443e-06,
3062
+ "loss": 0.536,
3063
+ "step": 216500
3064
+ },
3065
+ {
3066
+ "epoch": 2.5629517645391413,
3067
+ "grad_norm": 0.20264124870300293,
3068
+ "learning_rate": 2.9205051888946633e-06,
3069
+ "loss": 0.5904,
3070
+ "step": 217000
3071
+ },
3072
+ {
3073
+ "epoch": 2.5688571833514433,
3074
+ "grad_norm": 5.4820733070373535,
3075
+ "learning_rate": 2.881135730145982e-06,
3076
+ "loss": 0.5075,
3077
+ "step": 217500
3078
+ },
3079
+ {
3080
+ "epoch": 2.5747626021637453,
3081
+ "grad_norm": 0.5356336832046509,
3082
+ "learning_rate": 2.841766271397301e-06,
3083
+ "loss": 0.5345,
3084
+ "step": 218000
3085
+ },
3086
+ {
3087
+ "epoch": 2.5806680209760477,
3088
+ "grad_norm": 11.729897499084473,
3089
+ "learning_rate": 2.80239681264862e-06,
3090
+ "loss": 0.5263,
3091
+ "step": 218500
3092
+ },
3093
+ {
3094
+ "epoch": 2.5865734397883497,
3095
+ "grad_norm": 0.6651723384857178,
3096
+ "learning_rate": 2.7630273538999387e-06,
3097
+ "loss": 0.5669,
3098
+ "step": 219000
3099
+ },
3100
+ {
3101
+ "epoch": 2.5924788586006517,
3102
+ "grad_norm": 0.3053622245788574,
3103
+ "learning_rate": 2.7236578951512578e-06,
3104
+ "loss": 0.5336,
3105
+ "step": 219500
3106
+ },
3107
+ {
3108
+ "epoch": 2.598384277412954,
3109
+ "grad_norm": 0.8806591033935547,
3110
+ "learning_rate": 2.684288436402577e-06,
3111
+ "loss": 0.5252,
3112
+ "step": 220000
3113
+ },
3114
+ {
3115
+ "epoch": 2.604289696225256,
3116
+ "grad_norm": 0.2266666144132614,
3117
+ "learning_rate": 2.6449189776538955e-06,
3118
+ "loss": 0.5234,
3119
+ "step": 220500
3120
+ },
3121
+ {
3122
+ "epoch": 2.610195115037558,
3123
+ "grad_norm": 16.65863800048828,
3124
+ "learning_rate": 2.6055495189052145e-06,
3125
+ "loss": 0.5589,
3126
+ "step": 221000
3127
+ },
3128
+ {
3129
+ "epoch": 2.6161005338498606,
3130
+ "grad_norm": 3.0743777751922607,
3131
+ "learning_rate": 2.5662587990740307e-06,
3132
+ "loss": 0.5399,
3133
+ "step": 221500
3134
+ },
3135
+ {
3136
+ "epoch": 2.622005952662163,
3137
+ "grad_norm": 3.1333186626434326,
3138
+ "learning_rate": 2.5268893403253494e-06,
3139
+ "loss": 0.4949,
3140
+ "step": 222000
3141
+ },
3142
+ {
3143
+ "epoch": 2.627911371474465,
3144
+ "grad_norm": 20.595041275024414,
3145
+ "learning_rate": 2.4875198815766684e-06,
3146
+ "loss": 0.5801,
3147
+ "step": 222500
3148
+ },
3149
+ {
3150
+ "epoch": 2.633816790286767,
3151
+ "grad_norm": 0.6876373887062073,
3152
+ "learning_rate": 2.448150422827987e-06,
3153
+ "loss": 0.4822,
3154
+ "step": 223000
3155
+ },
3156
+ {
3157
+ "epoch": 2.6397222090990695,
3158
+ "grad_norm": 0.30532965064048767,
3159
+ "learning_rate": 2.4088597029968033e-06,
3160
+ "loss": 0.496,
3161
+ "step": 223500
3162
+ },
3163
+ {
3164
+ "epoch": 2.6456276279113715,
3165
+ "grad_norm": 0.18945026397705078,
3166
+ "learning_rate": 2.3694902442481223e-06,
3167
+ "loss": 0.5391,
3168
+ "step": 224000
3169
+ },
3170
+ {
3171
+ "epoch": 2.6515330467236735,
3172
+ "grad_norm": 0.17213118076324463,
3173
+ "learning_rate": 2.3301207854994414e-06,
3174
+ "loss": 0.5181,
3175
+ "step": 224500
3176
+ },
3177
+ {
3178
+ "epoch": 2.657438465535976,
3179
+ "grad_norm": 0.49905282258987427,
3180
+ "learning_rate": 2.2908300656682576e-06,
3181
+ "loss": 0.5248,
3182
+ "step": 225000
3183
+ },
3184
+ {
3185
+ "epoch": 2.663343884348278,
3186
+ "grad_norm": 5.821164608001709,
3187
+ "learning_rate": 2.2514606069195762e-06,
3188
+ "loss": 0.5082,
3189
+ "step": 225500
3190
+ },
3191
+ {
3192
+ "epoch": 2.66924930316058,
3193
+ "grad_norm": 0.1486668437719345,
3194
+ "learning_rate": 2.2120911481708953e-06,
3195
+ "loss": 0.5384,
3196
+ "step": 226000
3197
+ },
3198
+ {
3199
+ "epoch": 2.6751547219728824,
3200
+ "grad_norm": 51.314842224121094,
3201
+ "learning_rate": 2.172721689422214e-06,
3202
+ "loss": 0.5064,
3203
+ "step": 226500
3204
+ },
3205
+ {
3206
+ "epoch": 2.6810601407851844,
3207
+ "grad_norm": 27.27803611755371,
3208
+ "learning_rate": 2.133352230673533e-06,
3209
+ "loss": 0.5766,
3210
+ "step": 227000
3211
+ },
3212
+ {
3213
+ "epoch": 2.6869655595974864,
3214
+ "grad_norm": 8.757097244262695,
3215
+ "learning_rate": 2.0939827719248516e-06,
3216
+ "loss": 0.5541,
3217
+ "step": 227500
3218
+ },
3219
+ {
3220
+ "epoch": 2.692870978409789,
3221
+ "grad_norm": 7.907627582550049,
3222
+ "learning_rate": 2.0546133131761707e-06,
3223
+ "loss": 0.5248,
3224
+ "step": 228000
3225
+ },
3226
+ {
3227
+ "epoch": 2.698776397222091,
3228
+ "grad_norm": 8.180453300476074,
3229
+ "learning_rate": 2.0152438544274893e-06,
3230
+ "loss": 0.4975,
3231
+ "step": 228500
3232
+ },
3233
+ {
3234
+ "epoch": 2.7046818160343933,
3235
+ "grad_norm": 0.3046265244483948,
3236
+ "learning_rate": 1.976031873513803e-06,
3237
+ "loss": 0.5666,
3238
+ "step": 229000
3239
+ },
3240
+ {
3241
+ "epoch": 2.7105872348466953,
3242
+ "grad_norm": 0.474902480840683,
3243
+ "learning_rate": 1.936662414765122e-06,
3244
+ "loss": 0.5491,
3245
+ "step": 229500
3246
+ },
3247
+ {
3248
+ "epoch": 2.7164926536589977,
3249
+ "grad_norm": 9.716066360473633,
3250
+ "learning_rate": 1.8972929560164408e-06,
3251
+ "loss": 0.5345,
3252
+ "step": 230000
3253
+ },
3254
+ {
3255
+ "epoch": 2.7223980724712997,
3256
+ "grad_norm": 0.4300796687602997,
3257
+ "learning_rate": 1.8579234972677599e-06,
3258
+ "loss": 0.565,
3259
+ "step": 230500
3260
+ },
3261
+ {
3262
+ "epoch": 2.7283034912836017,
3263
+ "grad_norm": 0.3592308759689331,
3264
+ "learning_rate": 1.8185540385190787e-06,
3265
+ "loss": 0.514,
3266
+ "step": 231000
3267
+ },
3268
+ {
3269
+ "epoch": 2.734208910095904,
3270
+ "grad_norm": 0.587499737739563,
3271
+ "learning_rate": 1.7791845797703973e-06,
3272
+ "loss": 0.5698,
3273
+ "step": 231500
3274
+ },
3275
+ {
3276
+ "epoch": 2.740114328908206,
3277
+ "grad_norm": 38.904014587402344,
3278
+ "learning_rate": 1.7398151210217162e-06,
3279
+ "loss": 0.5504,
3280
+ "step": 232000
3281
+ },
3282
+ {
3283
+ "epoch": 2.746019747720508,
3284
+ "grad_norm": 0.6263485550880432,
3285
+ "learning_rate": 1.7004456622730352e-06,
3286
+ "loss": 0.5276,
3287
+ "step": 232500
3288
+ },
3289
+ {
3290
+ "epoch": 2.7519251665328106,
3291
+ "grad_norm": 0.22008846700191498,
3292
+ "learning_rate": 1.661076203524354e-06,
3293
+ "loss": 0.5478,
3294
+ "step": 233000
3295
+ },
3296
+ {
3297
+ "epoch": 2.7578305853451126,
3298
+ "grad_norm": 0.35815027356147766,
3299
+ "learning_rate": 1.621706744775673e-06,
3300
+ "loss": 0.5106,
3301
+ "step": 233500
3302
+ },
3303
+ {
3304
+ "epoch": 2.7637360041574146,
3305
+ "grad_norm": 0.17706379294395447,
3306
+ "learning_rate": 1.5824160249444892e-06,
3307
+ "loss": 0.5848,
3308
+ "step": 234000
3309
+ },
3310
+ {
3311
+ "epoch": 2.769641422969717,
3312
+ "grad_norm": 0.4346071481704712,
3313
+ "learning_rate": 1.543046566195808e-06,
3314
+ "loss": 0.5091,
3315
+ "step": 234500
3316
+ },
3317
+ {
3318
+ "epoch": 2.775546841782019,
3319
+ "grad_norm": 2.6666038036346436,
3320
+ "learning_rate": 1.5036771074471268e-06,
3321
+ "loss": 0.5276,
3322
+ "step": 235000
3323
+ },
3324
+ {
3325
+ "epoch": 2.7814522605943215,
3326
+ "grad_norm": 0.46572089195251465,
3327
+ "learning_rate": 1.464307648698446e-06,
3328
+ "loss": 0.5342,
3329
+ "step": 235500
3330
+ },
3331
+ {
3332
+ "epoch": 2.7873576794066235,
3333
+ "grad_norm": 0.4890391528606415,
3334
+ "learning_rate": 1.4249381899497648e-06,
3335
+ "loss": 0.5455,
3336
+ "step": 236000
3337
+ },
3338
+ {
3339
+ "epoch": 2.793263098218926,
3340
+ "grad_norm": 0.31877875328063965,
3341
+ "learning_rate": 1.3855687312010834e-06,
3342
+ "loss": 0.5748,
3343
+ "step": 236500
3344
+ },
3345
+ {
3346
+ "epoch": 2.799168517031228,
3347
+ "grad_norm": 6.273202896118164,
3348
+ "learning_rate": 1.3462780113698998e-06,
3349
+ "loss": 0.5051,
3350
+ "step": 237000
3351
+ },
3352
+ {
3353
+ "epoch": 2.80507393584353,
3354
+ "grad_norm": 0.2578466534614563,
3355
+ "learning_rate": 1.3069085526212187e-06,
3356
+ "loss": 0.5167,
3357
+ "step": 237500
3358
+ },
3359
+ {
3360
+ "epoch": 2.8109793546558324,
3361
+ "grad_norm": 0.690581738948822,
3362
+ "learning_rate": 1.2675390938725377e-06,
3363
+ "loss": 0.54,
3364
+ "step": 238000
3365
+ },
3366
+ {
3367
+ "epoch": 2.8168847734681344,
3368
+ "grad_norm": 101.09083557128906,
3369
+ "learning_rate": 1.2281696351238564e-06,
3370
+ "loss": 0.5451,
3371
+ "step": 238500
3372
+ },
3373
+ {
3374
+ "epoch": 2.8227901922804364,
3375
+ "grad_norm": 66.44615936279297,
3376
+ "learning_rate": 1.1888001763751752e-06,
3377
+ "loss": 0.5273,
3378
+ "step": 239000
3379
+ },
3380
+ {
3381
+ "epoch": 2.828695611092739,
3382
+ "grad_norm": 10.100146293640137,
3383
+ "learning_rate": 1.1495094565439916e-06,
3384
+ "loss": 0.5677,
3385
+ "step": 239500
3386
+ },
3387
+ {
3388
+ "epoch": 2.834601029905041,
3389
+ "grad_norm": 10.71104621887207,
3390
+ "learning_rate": 1.1101399977953103e-06,
3391
+ "loss": 0.53,
3392
+ "step": 240000
3393
+ },
3394
+ {
3395
+ "epoch": 2.840506448717343,
3396
+ "grad_norm": 0.15605445206165314,
3397
+ "learning_rate": 1.0707705390466293e-06,
3398
+ "loss": 0.5373,
3399
+ "step": 240500
3400
+ },
3401
+ {
3402
+ "epoch": 2.8464118675296453,
3403
+ "grad_norm": 0.21397539973258972,
3404
+ "learning_rate": 1.0314010802979482e-06,
3405
+ "loss": 0.5476,
3406
+ "step": 241000
3407
+ },
3408
+ {
3409
+ "epoch": 2.8523172863419473,
3410
+ "grad_norm": 10.296300888061523,
3411
+ "learning_rate": 9.92031621549267e-07,
3412
+ "loss": 0.4866,
3413
+ "step": 241500
3414
+ },
3415
+ {
3416
+ "epoch": 2.8582227051542497,
3417
+ "grad_norm": 52.738101959228516,
3418
+ "learning_rate": 9.52662162800586e-07,
3419
+ "loss": 0.5803,
3420
+ "step": 242000
3421
+ },
3422
+ {
3423
+ "epoch": 2.8641281239665517,
3424
+ "grad_norm": 0.5352908968925476,
3425
+ "learning_rate": 9.133714429694021e-07,
3426
+ "loss": 0.5345,
3427
+ "step": 242500
3428
+ },
3429
+ {
3430
+ "epoch": 2.870033542778854,
3431
+ "grad_norm": 9.421175956726074,
3432
+ "learning_rate": 8.74001984220721e-07,
3433
+ "loss": 0.4852,
3434
+ "step": 243000
3435
+ },
3436
+ {
3437
+ "epoch": 2.875938961591156,
3438
+ "grad_norm": 5.814650535583496,
3439
+ "learning_rate": 8.346325254720399e-07,
3440
+ "loss": 0.5233,
3441
+ "step": 243500
3442
+ },
3443
+ {
3444
+ "epoch": 2.881844380403458,
3445
+ "grad_norm": 0.7075567245483398,
3446
+ "learning_rate": 7.952630667233588e-07,
3447
+ "loss": 0.5545,
3448
+ "step": 244000
3449
+ },
3450
+ {
3451
+ "epoch": 2.8877497992157606,
3452
+ "grad_norm": 0.2958831787109375,
3453
+ "learning_rate": 7.558936079746776e-07,
3454
+ "loss": 0.5806,
3455
+ "step": 244500
3456
+ },
3457
+ {
3458
+ "epoch": 2.8936552180280626,
3459
+ "grad_norm": 0.37747666239738464,
3460
+ "learning_rate": 7.166028881434939e-07,
3461
+ "loss": 0.495,
3462
+ "step": 245000
3463
+ },
3464
+ {
3465
+ "epoch": 2.8995606368403646,
3466
+ "grad_norm": 0.3326238989830017,
3467
+ "learning_rate": 6.772334293948127e-07,
3468
+ "loss": 0.5034,
3469
+ "step": 245500
3470
+ },
3471
+ {
3472
+ "epoch": 2.905466055652667,
3473
+ "grad_norm": 7.006785869598389,
3474
+ "learning_rate": 6.378639706461316e-07,
3475
+ "loss": 0.5441,
3476
+ "step": 246000
3477
+ },
3478
+ {
3479
+ "epoch": 2.911371474464969,
3480
+ "grad_norm": 0.21972544491291046,
3481
+ "learning_rate": 5.984945118974505e-07,
3482
+ "loss": 0.5684,
3483
+ "step": 246500
3484
+ },
3485
+ {
3486
+ "epoch": 2.917276893277271,
3487
+ "grad_norm": 5.747308254241943,
3488
+ "learning_rate": 5.591250531487694e-07,
3489
+ "loss": 0.5395,
3490
+ "step": 247000
3491
+ },
3492
+ {
3493
+ "epoch": 2.9231823120895735,
3494
+ "grad_norm": 0.17187917232513428,
3495
+ "learning_rate": 5.197555944000882e-07,
3496
+ "loss": 0.5133,
3497
+ "step": 247500
3498
+ },
3499
+ {
3500
+ "epoch": 2.9290877309018755,
3501
+ "grad_norm": 119.28266906738281,
3502
+ "learning_rate": 4.803861356514071e-07,
3503
+ "loss": 0.4941,
3504
+ "step": 248000
3505
+ },
3506
+ {
3507
+ "epoch": 2.9349931497141775,
3508
+ "grad_norm": 6.082529544830322,
3509
+ "learning_rate": 4.4109541582022334e-07,
3510
+ "loss": 0.6088,
3511
+ "step": 248500
3512
+ },
3513
+ {
3514
+ "epoch": 2.94089856852648,
3515
+ "grad_norm": 0.683964192867279,
3516
+ "learning_rate": 4.017259570715422e-07,
3517
+ "loss": 0.5156,
3518
+ "step": 249000
3519
+ },
3520
+ {
3521
+ "epoch": 2.946803987338782,
3522
+ "grad_norm": 7.52407693862915,
3523
+ "learning_rate": 3.6235649832286103e-07,
3524
+ "loss": 0.4735,
3525
+ "step": 249500
3526
+ },
3527
+ {
3528
+ "epoch": 2.9527094061510843,
3529
+ "grad_norm": 66.87844848632812,
3530
+ "learning_rate": 3.2298703957417993e-07,
3531
+ "loss": 0.5418,
3532
+ "step": 250000
3533
+ },
3534
+ {
3535
+ "epoch": 2.9586148249633863,
3536
+ "grad_norm": 21.71000862121582,
3537
+ "learning_rate": 2.8361758082549883e-07,
3538
+ "loss": 0.5636,
3539
+ "step": 250500
3540
+ },
3541
+ {
3542
+ "epoch": 2.964520243775689,
3543
+ "grad_norm": 0.41531023383140564,
3544
+ "learning_rate": 2.442481220768177e-07,
3545
+ "loss": 0.5394,
3546
+ "step": 251000
3547
+ },
3548
+ {
3549
+ "epoch": 2.970425662587991,
3550
+ "grad_norm": 10.309664726257324,
3551
+ "learning_rate": 2.0487866332813658e-07,
3552
+ "loss": 0.5248,
3553
+ "step": 251500
3554
+ },
3555
+ {
3556
+ "epoch": 2.976331081400293,
3557
+ "grad_norm": 10.109683990478516,
3558
+ "learning_rate": 1.6566668241445019e-07,
3559
+ "loss": 0.5491,
3560
+ "step": 252000
3561
+ },
3562
+ {
3563
+ "epoch": 2.9822365002125952,
3564
+ "grad_norm": 12.154350280761719,
3565
+ "learning_rate": 1.2629722366576906e-07,
3566
+ "loss": 0.5542,
3567
+ "step": 252500
3568
+ },
3569
+ {
3570
+ "epoch": 2.9881419190248972,
3571
+ "grad_norm": 22.466415405273438,
3572
+ "learning_rate": 8.692776491708793e-08,
3573
+ "loss": 0.5347,
3574
+ "step": 253000
3575
+ },
3576
+ {
3577
+ "epoch": 2.9940473378371992,
3578
+ "grad_norm": 0.20442518591880798,
3579
+ "learning_rate": 4.7558306168406806e-08,
3580
+ "loss": 0.5602,
3581
+ "step": 253500
3582
+ },
3583
+ {
3584
+ "epoch": 2.9999527566495017,
3585
+ "grad_norm": 0.14298155903816223,
3586
+ "learning_rate": 8.188847419725674e-09,
3587
+ "loss": 0.5458,
3588
+ "step": 254000
3589
+ },
3590
+ {
3591
+ "epoch": 3.0,
3592
+ "eval_accuracy": 0.4869495382144291,
3593
+ "eval_f1": 0.51451578724306,
3594
+ "eval_loss": 0.713314414024353,
3595
+ "eval_roc_auc": 0.6757688508976673,
3596
+ "eval_runtime": 112.702,
3597
+ "eval_samples_per_second": 66.29,
3598
+ "eval_steps_per_second": 66.29,
3599
+ "step": 254004
3600
+ }
3601
+ ],
3602
+ "logging_steps": 500,
3603
+ "max_steps": 254004,
3604
+ "num_input_tokens_seen": 0,
3605
+ "num_train_epochs": 3,
3606
+ "save_steps": 500,
3607
+ "stateful_callbacks": {
3608
+ "TrainerControl": {
3609
+ "args": {
3610
+ "should_epoch_stop": false,
3611
+ "should_evaluate": false,
3612
+ "should_log": false,
3613
+ "should_save": true,
3614
+ "should_training_stop": true
3615
+ },
3616
+ "attributes": {}
3617
+ }
3618
+ },
3619
+ "total_flos": 2.6862505663566643e+17,
3620
+ "train_batch_size": 1,
3621
+ "trial_name": null,
3622
+ "trial_params": null
3623
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:545d36916e1f2bbe18dbb49265acc11dc63792eda420c211df59503b028b766c
3
+ size 5368