dakwi commited on
Commit
5d9f826
1 Parent(s): 538b501

Training in progress, step 21000, checkpoint

Browse files
checkpoint-21000/config.json ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "gpt2-medium",
3
+ "activation_function": "gelu_new",
4
+ "architectures": [
5
+ "GPT2LMHeadModel"
6
+ ],
7
+ "attn_pdrop": 0.1,
8
+ "bos_token_id": 0,
9
+ "embd_pdrop": 0.1,
10
+ "eos_token_id": 1,
11
+ "initializer_range": 0.02,
12
+ "layer_norm_epsilon": 1e-05,
13
+ "model_type": "gpt2",
14
+ "n_ctx": 512,
15
+ "n_embd": 1024,
16
+ "n_head": 16,
17
+ "n_inner": null,
18
+ "n_layer": 24,
19
+ "n_positions": 512,
20
+ "n_special": 0,
21
+ "pad_token_id": 2,
22
+ "predict_special_tokens": true,
23
+ "reorder_and_upcast_attn": false,
24
+ "resid_pdrop": 0.1,
25
+ "scale_attn_by_inverse_layer_idx": false,
26
+ "scale_attn_weights": true,
27
+ "summary_activation": null,
28
+ "summary_first_dropout": 0.1,
29
+ "summary_proj_to_labels": true,
30
+ "summary_type": "cls_index",
31
+ "summary_use_proj": true,
32
+ "task_specific_params": {
33
+ "text-generation": {
34
+ "do_sample": true,
35
+ "max_length": 50
36
+ }
37
+ },
38
+ "torch_dtype": "float32",
39
+ "transformers_version": "4.44.2",
40
+ "use_cache": true,
41
+ "vocab_size": 256
42
+ }
checkpoint-21000/generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 0,
4
+ "eos_token_id": 1,
5
+ "transformers_version": "4.44.2"
6
+ }
checkpoint-21000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5544915fd64f995caa96caddf67361ed0bb4034a4a43b28ddb9be3578de7d15b
3
+ size 1212421632
checkpoint-21000/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7491ab34e89dffe455ea3a57cc2a723e2eef5b04dd8ec82478ecd00c91ae8fe1
3
+ size 2425026746
checkpoint-21000/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cee22defc241950aea149faffbb3b1d903db6813a0cb249cd1cb3b56bb8ab3b6
3
+ size 14244
checkpoint-21000/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a8b91ef81fddc70fa35dbdb0a20666678108baa40b6214068a8d278b6104acb3
3
+ size 1064
checkpoint-21000/special_tokens_map.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<bos>",
4
+ "lstrip": false,
5
+ "normalized": true,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<eos>",
11
+ "lstrip": false,
12
+ "normalized": true,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<pad>",
18
+ "lstrip": false,
19
+ "normalized": true,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ }
23
+ }
checkpoint-21000/tokenizer.json ADDED
@@ -0,0 +1,553 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "version": "1.0",
3
+ "truncation": {
4
+ "direction": "Right",
5
+ "max_length": 512,
6
+ "strategy": "LongestFirst",
7
+ "stride": 0
8
+ },
9
+ "padding": null,
10
+ "added_tokens": [
11
+ {
12
+ "id": 0,
13
+ "content": "<bos>",
14
+ "single_word": false,
15
+ "lstrip": false,
16
+ "rstrip": false,
17
+ "normalized": true,
18
+ "special": true
19
+ },
20
+ {
21
+ "id": 1,
22
+ "content": "<eos>",
23
+ "single_word": false,
24
+ "lstrip": false,
25
+ "rstrip": false,
26
+ "normalized": true,
27
+ "special": true
28
+ },
29
+ {
30
+ "id": 2,
31
+ "content": "<pad>",
32
+ "single_word": false,
33
+ "lstrip": false,
34
+ "rstrip": false,
35
+ "normalized": true,
36
+ "special": true
37
+ }
38
+ ],
39
+ "normalizer": null,
40
+ "pre_tokenizer": {
41
+ "type": "ByteLevel",
42
+ "add_prefix_space": false,
43
+ "trim_offsets": true,
44
+ "use_regex": true
45
+ },
46
+ "post_processor": {
47
+ "type": "ByteLevel",
48
+ "add_prefix_space": true,
49
+ "trim_offsets": false,
50
+ "use_regex": true
51
+ },
52
+ "decoder": {
53
+ "type": "ByteLevel",
54
+ "add_prefix_space": true,
55
+ "trim_offsets": true,
56
+ "use_regex": true
57
+ },
58
+ "model": {
59
+ "type": "BPE",
60
+ "dropout": null,
61
+ "unk_token": null,
62
+ "continuing_subword_prefix": null,
63
+ "end_of_word_suffix": null,
64
+ "fuse_unk": false,
65
+ "byte_fallback": false,
66
+ "ignore_merges": false,
67
+ "vocab": {
68
+ "<bos>": 0,
69
+ "<eos>": 1,
70
+ "<pad>": 2,
71
+ "#": 3,
72
+ "+": 4,
73
+ "-": 5,
74
+ "1": 6,
75
+ "2": 7,
76
+ "3": 8,
77
+ "4": 9,
78
+ "5": 10,
79
+ "6": 11,
80
+ "7": 12,
81
+ "8": 13,
82
+ "=": 14,
83
+ "B": 15,
84
+ "K": 16,
85
+ "N": 17,
86
+ "O": 18,
87
+ "Q": 19,
88
+ "R": 20,
89
+ "a": 21,
90
+ "b": 22,
91
+ "c": 23,
92
+ "d": 24,
93
+ "e": 25,
94
+ "f": 26,
95
+ "g": 27,
96
+ "h": 28,
97
+ "x": 29,
98
+ "Ġ": 30,
99
+ "ĠN": 31,
100
+ "ĠR": 32,
101
+ "ĠB": 33,
102
+ "ĠQ": 34,
103
+ "ĠK": 35,
104
+ "xd": 36,
105
+ "xe": 37,
106
+ "Ġe": 38,
107
+ "Ġd": 39,
108
+ "Ġc": 40,
109
+ "xf": 41,
110
+ "ĠNf": 42,
111
+ "xc": 43,
112
+ "Ġh": 44,
113
+ "Ġg": 45,
114
+ "ĠNc": 46,
115
+ "Ġf": 47,
116
+ "Ġb": 48,
117
+ "Ġa": 49,
118
+ "ĠO": 50,
119
+ "ĠBe": 51,
120
+ "xg": 52,
121
+ "ĠBg": 53,
122
+ "ĠKf": 54,
123
+ "ĠNd": 55,
124
+ "ĠNe": 56,
125
+ "xb": 57,
126
+ "ĠBd": 58,
127
+ "ĠKg": 59,
128
+ "ĠRe": 60,
129
+ "ĠRa": 61,
130
+ "ĠRd": 62,
131
+ "ĠRc": 63,
132
+ "ĠRf": 64,
133
+ "ĠKe": 65,
134
+ "ĠQd": 66,
135
+ "ĠQe": 67,
136
+ "ĠBf": 68,
137
+ "ĠKh": 69,
138
+ "ĠBb": 70,
139
+ "ĠQc": 71,
140
+ "ĠNb": 72,
141
+ "ĠKd": 73,
142
+ "ĠRb": 74,
143
+ "xa": 75,
144
+ "ĠBc": 76,
145
+ "xh": 77,
146
+ "ĠQf": 78,
147
+ "ĠNg": 79,
148
+ "Ġcxd": 80,
149
+ "ĠNxe": 81,
150
+ "ĠQb": 82,
151
+ "ĠNxd": 83,
152
+ "ĠKc": 84,
153
+ "ĠBxf": 85,
154
+ "ĠQxd": 86,
155
+ "ĠQg": 87,
156
+ "ĠRxd": 88,
157
+ "ĠRh": 89,
158
+ "ĠKb": 90,
159
+ "Ġexd": 91,
160
+ "ĠRg": 92,
161
+ "ĠBxe": 93,
162
+ "ĠBxd": 94,
163
+ "Ġdxe": 95,
164
+ "ĠRxe": 96,
165
+ "ĠBxc": 97,
166
+ "ĠQh": 98,
167
+ "ĠQa": 99,
168
+ "ĠQxe": 100,
169
+ "ĠRxc": 101,
170
+ "Ġbxc": 102,
171
+ "ĠRxf": 103,
172
+ "ĠNbd": 104,
173
+ "ĠNxc": 105,
174
+ "ĠQxf": 106,
175
+ "ĠBh": 107,
176
+ "Ġhxg": 108,
177
+ "Ġfxe": 109,
178
+ "ĠNxf": 110,
179
+ "Ġaxb": 111,
180
+ "Ġdxc": 112,
181
+ "ĠNh": 113,
182
+ "ĠQxc": 114,
183
+ "Ġgxf": 115,
184
+ "Ġexf": 116,
185
+ "ĠRxa": 117,
186
+ "ĠRxb": 118,
187
+ "ĠNa": 119,
188
+ "ĠBxg": 120,
189
+ "ĠBa": 121,
190
+ "ĠKa": 122,
191
+ "ĠQxb": 123,
192
+ "ĠRad": 124,
193
+ "ĠBxb": 125,
194
+ "ĠQxg": 126,
195
+ "ĠKxg": 127,
196
+ "ĠRxg": 128,
197
+ "Ġfxg": 129,
198
+ "ĠRxh": 130,
199
+ "ĠKxf": 131,
200
+ "ĠQxa": 132,
201
+ "ĠRfd": 133,
202
+ "ĠNxg": 134,
203
+ "ĠRac": 135,
204
+ "ĠRfe": 136,
205
+ "ĠNxb": 137,
206
+ "ĠQxh": 138,
207
+ "ĠBxh": 139,
208
+ "ĠBxa": 140,
209
+ "Ġcxb": 141,
210
+ "Ġgxh": 142,
211
+ "Nf": 143,
212
+ "ĠRae": 144,
213
+ "ĠKxd": 145,
214
+ "ĠKxe": 146,
215
+ "Ġbxa": 147,
216
+ "ĠNge": 148,
217
+ "ĠKxh": 149,
218
+ "ĠNxa": 150,
219
+ "ĠNxh": 151,
220
+ "ĠRab": 152,
221
+ "ĠRfc": 153,
222
+ "ĠNgf": 154,
223
+ "ĠKxc": 155,
224
+ "ĠNfd": 156,
225
+ "ĠKxb": 157,
226
+ "ĠRhe": 158,
227
+ "ĠKxa": 159,
228
+ "ĠRed": 160,
229
+ "ĠNce": 161,
230
+ "ĠRde": 162,
231
+ "ĠRcd": 163,
232
+ "ĠRhd": 164,
233
+ "ĠRaf": 165,
234
+ "ĠNdf": 166,
235
+ "ĠNbc": 167,
236
+ "ĠRhg": 168,
237
+ "ĠRfb": 169,
238
+ "ĠRhf": 170,
239
+ "ĠRdd": 171,
240
+ "ĠRbc": 172,
241
+ "ĠRee": 173,
242
+ "ĠRec": 174,
243
+ "ĠNde": 175,
244
+ "ĠRbd": 176,
245
+ "ĠRdc": 177,
246
+ "ĠRcc": 178,
247
+ "ĠRdf": 179,
248
+ "ĠRce": 180,
249
+ "ĠRdg": 181,
250
+ "ĠNed": 182,
251
+ "ĠNcd": 183,
252
+ "ĠNdb": 184,
253
+ "ĠNfe": 185,
254
+ "ĠRbe": 186,
255
+ "ĠRhc": 187,
256
+ "ĠNdxe": 188,
257
+ "ĠRbb": 189,
258
+ "ĠRff": 190,
259
+ "ĠRef": 191,
260
+ "Nc": 192,
261
+ "ĠRaa": 193,
262
+ "ĠNbxd": 194,
263
+ "ĠNhf": 195,
264
+ "ĠNcxe": 196,
265
+ "ĠRag": 197,
266
+ "ĠNeg": 198,
267
+ "ĠNgxe": 199,
268
+ "ĠNec": 200,
269
+ "ĠReb": 201,
270
+ "ĠNef": 202,
271
+ "ĠRaxd": 203,
272
+ "ĠRfa": 204,
273
+ "ĠRgf": 205,
274
+ "ĠRfxd": 206,
275
+ "ĠNdc": 207,
276
+ "ĠNac": 208,
277
+ "ĠRcf": 209,
278
+ "ĠNfxd": 210,
279
+ "ĠRcb": 211,
280
+ "ĠNfg": 212,
281
+ "ĠRdh": 213,
282
+ "ĠRdb": 214,
283
+ "ĠRaxc": 215,
284
+ "ĠRge": 216,
285
+ "ĠRbf": 217,
286
+ "ĠNcb": 218,
287
+ "ĠNfxe": 219,
288
+ "ĠRhb": 220,
289
+ "ĠNexd": 221,
290
+ "ĠRfg": 222,
291
+ "ĠRea": 223,
292
+ "ĠRgg": 224,
293
+ "ĠRhh": 225,
294
+ "ĠNcxd": 226,
295
+ "ĠRgd": 227,
296
+ "ĠRfxf": 228,
297
+ "ĠRah": 229,
298
+ "ĠQff": 230,
299
+ "ĠQgg": 231,
300
+ "ĠRcg": 232,
301
+ "ĠReg": 233,
302
+ "ĠQee": 234,
303
+ "ĠQbb": 235,
304
+ "ĠRca": 236,
305
+ "ĠRdxd": 237,
306
+ "ĠRexe": 238,
307
+ "ĠQcc": 239,
308
+ "ĠNab": 240,
309
+ "ĠQdd": 241,
310
+ "ĠRba": 242,
311
+ "ĠRcxc": 243,
312
+ "ĠRda": 244,
313
+ "ĠRbg": 245,
314
+ "ĠRexd": 246,
315
+ "ĠQca": 247,
316
+ "ĠRgc": 248,
317
+ "ĠRbxb": 249,
318
+ "ĠRgh": 250,
319
+ "ĠRcxd": 251,
320
+ "ĠQhh": 252,
321
+ "ĠNdxc": 253,
322
+ "ĠQdb": 254,
323
+ "ĠQaa": 255
324
+ },
325
+ "merges": [
326
+ "Ġ N",
327
+ "Ġ R",
328
+ "Ġ B",
329
+ "Ġ Q",
330
+ "Ġ K",
331
+ "x d",
332
+ "x e",
333
+ "Ġ e",
334
+ "Ġ d",
335
+ "Ġ c",
336
+ "x f",
337
+ "ĠN f",
338
+ "x c",
339
+ "Ġ h",
340
+ "Ġ g",
341
+ "ĠN c",
342
+ "Ġ f",
343
+ "Ġ b",
344
+ "Ġ a",
345
+ "Ġ O",
346
+ "ĠB e",
347
+ "x g",
348
+ "ĠB g",
349
+ "ĠK f",
350
+ "ĠN d",
351
+ "ĠN e",
352
+ "x b",
353
+ "ĠB d",
354
+ "ĠK g",
355
+ "ĠR e",
356
+ "ĠR a",
357
+ "ĠR d",
358
+ "ĠR c",
359
+ "ĠR f",
360
+ "ĠK e",
361
+ "ĠQ d",
362
+ "ĠQ e",
363
+ "ĠB f",
364
+ "ĠK h",
365
+ "ĠB b",
366
+ "ĠQ c",
367
+ "ĠN b",
368
+ "ĠK d",
369
+ "ĠR b",
370
+ "x a",
371
+ "ĠB c",
372
+ "x h",
373
+ "ĠQ f",
374
+ "ĠN g",
375
+ "Ġc xd",
376
+ "ĠN xe",
377
+ "ĠQ b",
378
+ "ĠN xd",
379
+ "ĠK c",
380
+ "ĠB xf",
381
+ "ĠQ xd",
382
+ "ĠQ g",
383
+ "ĠR xd",
384
+ "ĠR h",
385
+ "ĠK b",
386
+ "Ġe xd",
387
+ "ĠR g",
388
+ "ĠB xe",
389
+ "ĠB xd",
390
+ "Ġd xe",
391
+ "ĠR xe",
392
+ "ĠB xc",
393
+ "ĠQ h",
394
+ "ĠQ a",
395
+ "ĠQ xe",
396
+ "ĠR xc",
397
+ "Ġb xc",
398
+ "ĠR xf",
399
+ "ĠNb d",
400
+ "ĠN xc",
401
+ "ĠQ xf",
402
+ "ĠB h",
403
+ "Ġh xg",
404
+ "Ġf xe",
405
+ "ĠN xf",
406
+ "Ġa xb",
407
+ "Ġd xc",
408
+ "ĠN h",
409
+ "ĠQ xc",
410
+ "Ġg xf",
411
+ "Ġe xf",
412
+ "ĠR xa",
413
+ "ĠR xb",
414
+ "ĠN a",
415
+ "ĠB xg",
416
+ "ĠB a",
417
+ "ĠK a",
418
+ "ĠQ xb",
419
+ "ĠRa d",
420
+ "ĠB xb",
421
+ "ĠQ xg",
422
+ "ĠK xg",
423
+ "ĠR xg",
424
+ "Ġf xg",
425
+ "ĠR xh",
426
+ "ĠK xf",
427
+ "ĠQ xa",
428
+ "ĠRf d",
429
+ "ĠN xg",
430
+ "ĠRa c",
431
+ "ĠRf e",
432
+ "ĠN xb",
433
+ "ĠQ xh",
434
+ "ĠB xh",
435
+ "ĠB xa",
436
+ "Ġc xb",
437
+ "Ġg xh",
438
+ "N f",
439
+ "ĠRa e",
440
+ "ĠK xd",
441
+ "ĠK xe",
442
+ "Ġb xa",
443
+ "ĠNg e",
444
+ "ĠK xh",
445
+ "ĠN xa",
446
+ "ĠN xh",
447
+ "ĠRa b",
448
+ "ĠRf c",
449
+ "ĠNg f",
450
+ "ĠK xc",
451
+ "ĠNf d",
452
+ "ĠK xb",
453
+ "ĠRh e",
454
+ "ĠK xa",
455
+ "ĠRe d",
456
+ "ĠNc e",
457
+ "ĠRd e",
458
+ "ĠRc d",
459
+ "ĠRh d",
460
+ "ĠRa f",
461
+ "ĠNd f",
462
+ "ĠNb c",
463
+ "ĠRh g",
464
+ "ĠRf b",
465
+ "ĠRh f",
466
+ "ĠRd d",
467
+ "ĠRb c",
468
+ "ĠRe e",
469
+ "ĠRe c",
470
+ "ĠNd e",
471
+ "ĠRb d",
472
+ "ĠRd c",
473
+ "ĠRc c",
474
+ "ĠRd f",
475
+ "ĠRc e",
476
+ "ĠRd g",
477
+ "ĠNe d",
478
+ "ĠNc d",
479
+ "ĠNd b",
480
+ "ĠNf e",
481
+ "ĠRb e",
482
+ "ĠRh c",
483
+ "ĠNd xe",
484
+ "ĠRb b",
485
+ "ĠRf f",
486
+ "ĠRe f",
487
+ "N c",
488
+ "ĠRa a",
489
+ "ĠNb xd",
490
+ "ĠNh f",
491
+ "ĠNc xe",
492
+ "ĠRa g",
493
+ "ĠNe g",
494
+ "ĠNg xe",
495
+ "ĠNe c",
496
+ "ĠRe b",
497
+ "ĠNe f",
498
+ "ĠRa xd",
499
+ "ĠRf a",
500
+ "ĠRg f",
501
+ "ĠRf xd",
502
+ "ĠNd c",
503
+ "ĠNa c",
504
+ "ĠRc f",
505
+ "ĠNf xd",
506
+ "ĠRc b",
507
+ "ĠNf g",
508
+ "ĠRd h",
509
+ "ĠRd b",
510
+ "ĠRa xc",
511
+ "ĠRg e",
512
+ "ĠRb f",
513
+ "ĠNc b",
514
+ "ĠNf xe",
515
+ "ĠRh b",
516
+ "ĠNe xd",
517
+ "ĠRf g",
518
+ "ĠRe a",
519
+ "ĠRg g",
520
+ "ĠRh h",
521
+ "ĠNc xd",
522
+ "ĠRg d",
523
+ "ĠRf xf",
524
+ "ĠRa h",
525
+ "ĠQf f",
526
+ "ĠQg g",
527
+ "ĠRc g",
528
+ "ĠRe g",
529
+ "ĠQe e",
530
+ "ĠQb b",
531
+ "ĠRc a",
532
+ "ĠRd xd",
533
+ "ĠRe xe",
534
+ "ĠQc c",
535
+ "ĠNa b",
536
+ "ĠQd d",
537
+ "ĠRb a",
538
+ "ĠRc xc",
539
+ "ĠRd a",
540
+ "ĠRb g",
541
+ "ĠRe xd",
542
+ "ĠQc a",
543
+ "ĠRg c",
544
+ "ĠRb xb",
545
+ "ĠRg h",
546
+ "ĠRc xd",
547
+ "ĠQh h",
548
+ "ĠNd xc",
549
+ "ĠQd b",
550
+ "ĠQa a"
551
+ ]
552
+ }
553
+ }
checkpoint-21000/tokenizer_config.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "<bos>",
5
+ "lstrip": false,
6
+ "normalized": true,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "1": {
12
+ "content": "<eos>",
13
+ "lstrip": false,
14
+ "normalized": true,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "2": {
20
+ "content": "<pad>",
21
+ "lstrip": false,
22
+ "normalized": true,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ }
27
+ },
28
+ "bos_token": "<bos>",
29
+ "clean_up_tokenization_spaces": true,
30
+ "eos_token": "<eos>",
31
+ "model_max_length": 512,
32
+ "pad_token": "<pad>",
33
+ "tokenizer_class": "PreTrainedTokenizerFast"
34
+ }
checkpoint-21000/trainer_state.json ADDED
@@ -0,0 +1,357 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": 0.967136025428772,
3
+ "best_model_checkpoint": "chessgpt2-medium-m/checkpoint-21000",
4
+ "epoch": 2.68748400307141,
5
+ "eval_steps": 1000,
6
+ "global_step": 21000,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.1279754287176862,
13
+ "grad_norm": 1.6428743600845337,
14
+ "learning_rate": 4.9959725443266765e-05,
15
+ "loss": 2.4607,
16
+ "step": 1000
17
+ },
18
+ {
19
+ "epoch": 0.1279754287176862,
20
+ "eval_loss": 1.8105345964431763,
21
+ "eval_runtime": 376.5583,
22
+ "eval_samples_per_second": 73.781,
23
+ "eval_steps_per_second": 9.223,
24
+ "step": 1000
25
+ },
26
+ {
27
+ "epoch": 0.2559508574353724,
28
+ "grad_norm": 1.0423442125320435,
29
+ "learning_rate": 4.952992634500169e-05,
30
+ "loss": 1.6726,
31
+ "step": 2000
32
+ },
33
+ {
34
+ "epoch": 0.2559508574353724,
35
+ "eval_loss": 1.4938699007034302,
36
+ "eval_runtime": 376.7393,
37
+ "eval_samples_per_second": 73.746,
38
+ "eval_steps_per_second": 9.219,
39
+ "step": 2000
40
+ },
41
+ {
42
+ "epoch": 0.3839262861530586,
43
+ "grad_norm": 1.234433889389038,
44
+ "learning_rate": 4.863737385256343e-05,
45
+ "loss": 1.4686,
46
+ "step": 3000
47
+ },
48
+ {
49
+ "epoch": 0.3839262861530586,
50
+ "eval_loss": 1.3617429733276367,
51
+ "eval_runtime": 379.1693,
52
+ "eval_samples_per_second": 73.273,
53
+ "eval_steps_per_second": 9.159,
54
+ "step": 3000
55
+ },
56
+ {
57
+ "epoch": 0.5119017148707448,
58
+ "grad_norm": 1.1198837757110596,
59
+ "learning_rate": 4.729890583531792e-05,
60
+ "loss": 1.3601,
61
+ "step": 4000
62
+ },
63
+ {
64
+ "epoch": 0.5119017148707448,
65
+ "eval_loss": 1.2697532176971436,
66
+ "eval_runtime": 376.7562,
67
+ "eval_samples_per_second": 73.743,
68
+ "eval_steps_per_second": 9.218,
69
+ "step": 4000
70
+ },
71
+ {
72
+ "epoch": 0.6398771435884311,
73
+ "grad_norm": 0.9552892446517944,
74
+ "learning_rate": 4.5539772292358576e-05,
75
+ "loss": 1.2875,
76
+ "step": 5000
77
+ },
78
+ {
79
+ "epoch": 0.6398771435884311,
80
+ "eval_loss": 1.2163749933242798,
81
+ "eval_runtime": 378.6404,
82
+ "eval_samples_per_second": 73.376,
83
+ "eval_steps_per_second": 9.172,
84
+ "step": 5000
85
+ },
86
+ {
87
+ "epoch": 0.7678525723061173,
88
+ "grad_norm": 1.001042127609253,
89
+ "learning_rate": 4.3393159015047314e-05,
90
+ "loss": 1.2366,
91
+ "step": 6000
92
+ },
93
+ {
94
+ "epoch": 0.7678525723061173,
95
+ "eval_loss": 1.1716755628585815,
96
+ "eval_runtime": 376.0934,
97
+ "eval_samples_per_second": 73.873,
98
+ "eval_steps_per_second": 9.234,
99
+ "step": 6000
100
+ },
101
+ {
102
+ "epoch": 0.8958280010238034,
103
+ "grad_norm": 1.0304712057113647,
104
+ "learning_rate": 4.089956154202057e-05,
105
+ "loss": 1.1992,
106
+ "step": 7000
107
+ },
108
+ {
109
+ "epoch": 0.8958280010238034,
110
+ "eval_loss": 1.1404258012771606,
111
+ "eval_runtime": 377.955,
112
+ "eval_samples_per_second": 73.509,
113
+ "eval_steps_per_second": 9.189,
114
+ "step": 7000
115
+ },
116
+ {
117
+ "epoch": 1.0238034297414895,
118
+ "grad_norm": 0.7832671403884888,
119
+ "learning_rate": 3.810602121690553e-05,
120
+ "loss": 1.1646,
121
+ "step": 8000
122
+ },
123
+ {
124
+ "epoch": 1.0238034297414895,
125
+ "eval_loss": 1.1096745729446411,
126
+ "eval_runtime": 377.602,
127
+ "eval_samples_per_second": 73.577,
128
+ "eval_steps_per_second": 9.198,
129
+ "step": 8000
130
+ },
131
+ {
132
+ "epoch": 1.151778858459176,
133
+ "grad_norm": 0.7852849364280701,
134
+ "learning_rate": 3.5065237760403566e-05,
135
+ "loss": 1.126,
136
+ "step": 9000
137
+ },
138
+ {
139
+ "epoch": 1.151778858459176,
140
+ "eval_loss": 1.0933990478515625,
141
+ "eval_runtime": 378.2151,
142
+ "eval_samples_per_second": 73.458,
143
+ "eval_steps_per_second": 9.183,
144
+ "step": 9000
145
+ },
146
+ {
147
+ "epoch": 1.279754287176862,
148
+ "grad_norm": 0.7889285087585449,
149
+ "learning_rate": 3.183457509793587e-05,
150
+ "loss": 1.1085,
151
+ "step": 10000
152
+ },
153
+ {
154
+ "epoch": 1.279754287176862,
155
+ "eval_loss": 1.0705418586730957,
156
+ "eval_runtime": 376.2258,
157
+ "eval_samples_per_second": 73.847,
158
+ "eval_steps_per_second": 9.231,
159
+ "step": 10000
160
+ },
161
+ {
162
+ "epoch": 1.4077297158945483,
163
+ "grad_norm": 0.8414849638938904,
164
+ "learning_rate": 2.8478382244815133e-05,
165
+ "loss": 1.0875,
166
+ "step": 11000
167
+ },
168
+ {
169
+ "epoch": 1.4077297158945483,
170
+ "eval_loss": 1.0541125535964966,
171
+ "eval_runtime": 378.6403,
172
+ "eval_samples_per_second": 73.376,
173
+ "eval_steps_per_second": 9.172,
174
+ "step": 11000
175
+ },
176
+ {
177
+ "epoch": 1.5357051446122343,
178
+ "grad_norm": 0.8442362546920776,
179
+ "learning_rate": 2.5053264766232426e-05,
180
+ "loss": 1.0733,
181
+ "step": 12000
182
+ },
183
+ {
184
+ "epoch": 1.5357051446122343,
185
+ "eval_loss": 1.0388243198394775,
186
+ "eval_runtime": 376.572,
187
+ "eval_samples_per_second": 73.779,
188
+ "eval_steps_per_second": 9.223,
189
+ "step": 12000
190
+ },
191
+ {
192
+ "epoch": 1.6636805733299207,
193
+ "grad_norm": 0.9030175805091858,
194
+ "learning_rate": 2.1627142455795886e-05,
195
+ "loss": 1.059,
196
+ "step": 13000
197
+ },
198
+ {
199
+ "epoch": 1.6636805733299207,
200
+ "eval_loss": 1.0245987176895142,
201
+ "eval_runtime": 378.3137,
202
+ "eval_samples_per_second": 73.439,
203
+ "eval_steps_per_second": 9.18,
204
+ "step": 13000
205
+ },
206
+ {
207
+ "epoch": 1.7916560020476069,
208
+ "grad_norm": 0.8050616979598999,
209
+ "learning_rate": 1.8264648598812123e-05,
210
+ "loss": 1.0451,
211
+ "step": 14000
212
+ },
213
+ {
214
+ "epoch": 1.7916560020476069,
215
+ "eval_loss": 1.0108946561813354,
216
+ "eval_runtime": 376.6418,
217
+ "eval_samples_per_second": 73.765,
218
+ "eval_steps_per_second": 9.221,
219
+ "step": 14000
220
+ },
221
+ {
222
+ "epoch": 1.919631430765293,
223
+ "grad_norm": 0.7604677677154541,
224
+ "learning_rate": 1.503551911234875e-05,
225
+ "loss": 1.0327,
226
+ "step": 15000
227
+ },
228
+ {
229
+ "epoch": 1.919631430765293,
230
+ "eval_loss": 1.0017954111099243,
231
+ "eval_runtime": 378.423,
232
+ "eval_samples_per_second": 73.418,
233
+ "eval_steps_per_second": 9.178,
234
+ "step": 15000
235
+ },
236
+ {
237
+ "epoch": 2.047606859482979,
238
+ "grad_norm": 0.8110759258270264,
239
+ "learning_rate": 1.1987749064346765e-05,
240
+ "loss": 1.0118,
241
+ "step": 16000
242
+ },
243
+ {
244
+ "epoch": 2.047606859482979,
245
+ "eval_loss": 0.991495668888092,
246
+ "eval_runtime": 377.8976,
247
+ "eval_samples_per_second": 73.52,
248
+ "eval_steps_per_second": 9.19,
249
+ "step": 16000
250
+ },
251
+ {
252
+ "epoch": 2.1755822882006655,
253
+ "grad_norm": 0.8434863090515137,
254
+ "learning_rate": 9.185453185391116e-06,
255
+ "loss": 0.9862,
256
+ "step": 17000
257
+ },
258
+ {
259
+ "epoch": 2.1755822882006655,
260
+ "eval_loss": 0.9861236810684204,
261
+ "eval_runtime": 378.3771,
262
+ "eval_samples_per_second": 73.427,
263
+ "eval_steps_per_second": 9.179,
264
+ "step": 17000
265
+ },
266
+ {
267
+ "epoch": 2.303557716918352,
268
+ "grad_norm": 0.8797647356987,
269
+ "learning_rate": 6.681496368397716e-06,
270
+ "loss": 0.9806,
271
+ "step": 18000
272
+ },
273
+ {
274
+ "epoch": 2.303557716918352,
275
+ "eval_loss": 0.9783245325088501,
276
+ "eval_runtime": 376.5913,
277
+ "eval_samples_per_second": 73.775,
278
+ "eval_steps_per_second": 9.222,
279
+ "step": 18000
280
+ },
281
+ {
282
+ "epoch": 2.431533145636038,
283
+ "grad_norm": 0.8292227983474731,
284
+ "learning_rate": 4.5231153844693594e-06,
285
+ "loss": 0.9757,
286
+ "step": 19000
287
+ },
288
+ {
289
+ "epoch": 2.431533145636038,
290
+ "eval_loss": 0.9735883474349976,
291
+ "eval_runtime": 372.1389,
292
+ "eval_samples_per_second": 74.658,
293
+ "eval_steps_per_second": 9.333,
294
+ "step": 19000
295
+ },
296
+ {
297
+ "epoch": 2.559508574353724,
298
+ "grad_norm": 0.8722350597381592,
299
+ "learning_rate": 2.75102776826896e-06,
300
+ "loss": 0.9713,
301
+ "step": 20000
302
+ },
303
+ {
304
+ "epoch": 2.559508574353724,
305
+ "eval_loss": 0.969468891620636,
306
+ "eval_runtime": 372.7114,
307
+ "eval_samples_per_second": 74.543,
308
+ "eval_steps_per_second": 9.318,
309
+ "step": 20000
310
+ },
311
+ {
312
+ "epoch": 2.68748400307141,
313
+ "grad_norm": 0.8890399932861328,
314
+ "learning_rate": 1.3986636878139808e-06,
315
+ "loss": 0.9675,
316
+ "step": 21000
317
+ },
318
+ {
319
+ "epoch": 2.68748400307141,
320
+ "eval_loss": 0.967136025428772,
321
+ "eval_runtime": 373.3491,
322
+ "eval_samples_per_second": 74.416,
323
+ "eval_steps_per_second": 9.302,
324
+ "step": 21000
325
+ }
326
+ ],
327
+ "logging_steps": 1000,
328
+ "max_steps": 23442,
329
+ "num_input_tokens_seen": 0,
330
+ "num_train_epochs": 3,
331
+ "save_steps": 1000,
332
+ "stateful_callbacks": {
333
+ "EarlyStoppingCallback": {
334
+ "args": {
335
+ "early_stopping_patience": 3,
336
+ "early_stopping_threshold": 0.0
337
+ },
338
+ "attributes": {
339
+ "early_stopping_patience_counter": 0
340
+ }
341
+ },
342
+ "TrainerControl": {
343
+ "args": {
344
+ "should_epoch_stop": false,
345
+ "should_evaluate": false,
346
+ "should_log": false,
347
+ "should_save": true,
348
+ "should_training_stop": false
349
+ },
350
+ "attributes": {}
351
+ }
352
+ },
353
+ "total_flos": 8.263893377270415e+17,
354
+ "train_batch_size": 32,
355
+ "trial_name": null,
356
+ "trial_params": null
357
+ }
checkpoint-21000/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4be634794f762eec900845d9a15b90be09108788836f54018376246e20eedf5d
3
+ size 5240