Xristo commited on
Commit
0d944f9
·
verified ·
1 Parent(s): 14ade55

Upload 13 files

Browse files
added_tokens.json ADDED
@@ -0,0 +1,102 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "<extra_id_0>": 32099,
3
+ "<extra_id_10>": 32089,
4
+ "<extra_id_11>": 32088,
5
+ "<extra_id_12>": 32087,
6
+ "<extra_id_13>": 32086,
7
+ "<extra_id_14>": 32085,
8
+ "<extra_id_15>": 32084,
9
+ "<extra_id_16>": 32083,
10
+ "<extra_id_17>": 32082,
11
+ "<extra_id_18>": 32081,
12
+ "<extra_id_19>": 32080,
13
+ "<extra_id_1>": 32098,
14
+ "<extra_id_20>": 32079,
15
+ "<extra_id_21>": 32078,
16
+ "<extra_id_22>": 32077,
17
+ "<extra_id_23>": 32076,
18
+ "<extra_id_24>": 32075,
19
+ "<extra_id_25>": 32074,
20
+ "<extra_id_26>": 32073,
21
+ "<extra_id_27>": 32072,
22
+ "<extra_id_28>": 32071,
23
+ "<extra_id_29>": 32070,
24
+ "<extra_id_2>": 32097,
25
+ "<extra_id_30>": 32069,
26
+ "<extra_id_31>": 32068,
27
+ "<extra_id_32>": 32067,
28
+ "<extra_id_33>": 32066,
29
+ "<extra_id_34>": 32065,
30
+ "<extra_id_35>": 32064,
31
+ "<extra_id_36>": 32063,
32
+ "<extra_id_37>": 32062,
33
+ "<extra_id_38>": 32061,
34
+ "<extra_id_39>": 32060,
35
+ "<extra_id_3>": 32096,
36
+ "<extra_id_40>": 32059,
37
+ "<extra_id_41>": 32058,
38
+ "<extra_id_42>": 32057,
39
+ "<extra_id_43>": 32056,
40
+ "<extra_id_44>": 32055,
41
+ "<extra_id_45>": 32054,
42
+ "<extra_id_46>": 32053,
43
+ "<extra_id_47>": 32052,
44
+ "<extra_id_48>": 32051,
45
+ "<extra_id_49>": 32050,
46
+ "<extra_id_4>": 32095,
47
+ "<extra_id_50>": 32049,
48
+ "<extra_id_51>": 32048,
49
+ "<extra_id_52>": 32047,
50
+ "<extra_id_53>": 32046,
51
+ "<extra_id_54>": 32045,
52
+ "<extra_id_55>": 32044,
53
+ "<extra_id_56>": 32043,
54
+ "<extra_id_57>": 32042,
55
+ "<extra_id_58>": 32041,
56
+ "<extra_id_59>": 32040,
57
+ "<extra_id_5>": 32094,
58
+ "<extra_id_60>": 32039,
59
+ "<extra_id_61>": 32038,
60
+ "<extra_id_62>": 32037,
61
+ "<extra_id_63>": 32036,
62
+ "<extra_id_64>": 32035,
63
+ "<extra_id_65>": 32034,
64
+ "<extra_id_66>": 32033,
65
+ "<extra_id_67>": 32032,
66
+ "<extra_id_68>": 32031,
67
+ "<extra_id_69>": 32030,
68
+ "<extra_id_6>": 32093,
69
+ "<extra_id_70>": 32029,
70
+ "<extra_id_71>": 32028,
71
+ "<extra_id_72>": 32027,
72
+ "<extra_id_73>": 32026,
73
+ "<extra_id_74>": 32025,
74
+ "<extra_id_75>": 32024,
75
+ "<extra_id_76>": 32023,
76
+ "<extra_id_77>": 32022,
77
+ "<extra_id_78>": 32021,
78
+ "<extra_id_79>": 32020,
79
+ "<extra_id_7>": 32092,
80
+ "<extra_id_80>": 32019,
81
+ "<extra_id_81>": 32018,
82
+ "<extra_id_82>": 32017,
83
+ "<extra_id_83>": 32016,
84
+ "<extra_id_84>": 32015,
85
+ "<extra_id_85>": 32014,
86
+ "<extra_id_86>": 32013,
87
+ "<extra_id_87>": 32012,
88
+ "<extra_id_88>": 32011,
89
+ "<extra_id_89>": 32010,
90
+ "<extra_id_8>": 32091,
91
+ "<extra_id_90>": 32009,
92
+ "<extra_id_91>": 32008,
93
+ "<extra_id_92>": 32007,
94
+ "<extra_id_93>": 32006,
95
+ "<extra_id_94>": 32005,
96
+ "<extra_id_95>": 32004,
97
+ "<extra_id_96>": 32003,
98
+ "<extra_id_97>": 32002,
99
+ "<extra_id_98>": 32001,
100
+ "<extra_id_99>": 32000,
101
+ "<extra_id_9>": 32090
102
+ }
config.json ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_num_labels": 2,
3
+ "architectures": [
4
+ "T5ForConditionalGeneration"
5
+ ],
6
+ "classifier_dropout": 0.0,
7
+ "d_ff": 3072,
8
+ "d_kv": 64,
9
+ "d_model": 768,
10
+ "decoder_start_token_id": 0,
11
+ "dense_act_fn": "relu",
12
+ "dropout_rate": 0.1,
13
+ "eos_token_id": 2,
14
+ "feed_forward_proj": "relu",
15
+ "initializer_factor": 1.0,
16
+ "is_encoder_decoder": true,
17
+ "is_gated_act": false,
18
+ "layer_norm_epsilon": 1e-06,
19
+ "model_type": "t5",
20
+ "n_positions": 512,
21
+ "num_decoder_layers": 12,
22
+ "num_heads": 12,
23
+ "num_layers": 12,
24
+ "output_past": true,
25
+ "pad_token_id": 0,
26
+ "relative_attention_max_distance": 128,
27
+ "relative_attention_num_buckets": 32,
28
+ "task_specific_params": {
29
+ "summarization": {
30
+ "early_stopping": true,
31
+ "length_penalty": 2.0,
32
+ "max_length": 200,
33
+ "min_length": 30,
34
+ "no_repeat_ngram_size": 3,
35
+ "num_beams": 4,
36
+ "prefix": "summarize: "
37
+ },
38
+ "translation_en_to_de": {
39
+ "early_stopping": true,
40
+ "max_length": 300,
41
+ "num_beams": 4,
42
+ "prefix": "translate English to German: "
43
+ },
44
+ "translation_en_to_fr": {
45
+ "early_stopping": true,
46
+ "max_length": 300,
47
+ "num_beams": 4,
48
+ "prefix": "translate English to French: "
49
+ },
50
+ "translation_en_to_ro": {
51
+ "early_stopping": true,
52
+ "max_length": 300,
53
+ "num_beams": 4,
54
+ "prefix": "translate English to Romanian: "
55
+ }
56
+ },
57
+ "torch_dtype": "float32",
58
+ "transformers_version": "4.51.3",
59
+ "use_cache": true,
60
+ "vocab_size": 32128
61
+ }
generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "decoder_start_token_id": 0,
4
+ "eos_token_id": 2,
5
+ "pad_token_id": 0,
6
+ "transformers_version": "4.51.3"
7
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e6282d81308a4c20f381a6c67b6fbbe1ae4af5e5ecbb68e502ead3c0e2ffaa30
3
+ size 891644712
optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:74bea9a94b6f03823c7da7d884048e24f54f99c71b6ad4185af42c96c3eb0f24
3
+ size 1783445259
rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:49a85e4ea0dabf583c084cfb7053d3b461fd5a45bc525dffc02a2ebaa8d96f97
3
+ size 14645
scaler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:104398ff0996f21172574347c3f37cabce8d2845ea276c31d216e4233b8835e6
3
+ size 1383
scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:47ac71d6a40c55dd69ede47858c8d98a3b6ecca3725727071949ec3bc2b94e14
3
+ size 1465
special_tokens_map.json ADDED
@@ -0,0 +1,125 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<extra_id_0>",
4
+ "<extra_id_1>",
5
+ "<extra_id_2>",
6
+ "<extra_id_3>",
7
+ "<extra_id_4>",
8
+ "<extra_id_5>",
9
+ "<extra_id_6>",
10
+ "<extra_id_7>",
11
+ "<extra_id_8>",
12
+ "<extra_id_9>",
13
+ "<extra_id_10>",
14
+ "<extra_id_11>",
15
+ "<extra_id_12>",
16
+ "<extra_id_13>",
17
+ "<extra_id_14>",
18
+ "<extra_id_15>",
19
+ "<extra_id_16>",
20
+ "<extra_id_17>",
21
+ "<extra_id_18>",
22
+ "<extra_id_19>",
23
+ "<extra_id_20>",
24
+ "<extra_id_21>",
25
+ "<extra_id_22>",
26
+ "<extra_id_23>",
27
+ "<extra_id_24>",
28
+ "<extra_id_25>",
29
+ "<extra_id_26>",
30
+ "<extra_id_27>",
31
+ "<extra_id_28>",
32
+ "<extra_id_29>",
33
+ "<extra_id_30>",
34
+ "<extra_id_31>",
35
+ "<extra_id_32>",
36
+ "<extra_id_33>",
37
+ "<extra_id_34>",
38
+ "<extra_id_35>",
39
+ "<extra_id_36>",
40
+ "<extra_id_37>",
41
+ "<extra_id_38>",
42
+ "<extra_id_39>",
43
+ "<extra_id_40>",
44
+ "<extra_id_41>",
45
+ "<extra_id_42>",
46
+ "<extra_id_43>",
47
+ "<extra_id_44>",
48
+ "<extra_id_45>",
49
+ "<extra_id_46>",
50
+ "<extra_id_47>",
51
+ "<extra_id_48>",
52
+ "<extra_id_49>",
53
+ "<extra_id_50>",
54
+ "<extra_id_51>",
55
+ "<extra_id_52>",
56
+ "<extra_id_53>",
57
+ "<extra_id_54>",
58
+ "<extra_id_55>",
59
+ "<extra_id_56>",
60
+ "<extra_id_57>",
61
+ "<extra_id_58>",
62
+ "<extra_id_59>",
63
+ "<extra_id_60>",
64
+ "<extra_id_61>",
65
+ "<extra_id_62>",
66
+ "<extra_id_63>",
67
+ "<extra_id_64>",
68
+ "<extra_id_65>",
69
+ "<extra_id_66>",
70
+ "<extra_id_67>",
71
+ "<extra_id_68>",
72
+ "<extra_id_69>",
73
+ "<extra_id_70>",
74
+ "<extra_id_71>",
75
+ "<extra_id_72>",
76
+ "<extra_id_73>",
77
+ "<extra_id_74>",
78
+ "<extra_id_75>",
79
+ "<extra_id_76>",
80
+ "<extra_id_77>",
81
+ "<extra_id_78>",
82
+ "<extra_id_79>",
83
+ "<extra_id_80>",
84
+ "<extra_id_81>",
85
+ "<extra_id_82>",
86
+ "<extra_id_83>",
87
+ "<extra_id_84>",
88
+ "<extra_id_85>",
89
+ "<extra_id_86>",
90
+ "<extra_id_87>",
91
+ "<extra_id_88>",
92
+ "<extra_id_89>",
93
+ "<extra_id_90>",
94
+ "<extra_id_91>",
95
+ "<extra_id_92>",
96
+ "<extra_id_93>",
97
+ "<extra_id_94>",
98
+ "<extra_id_95>",
99
+ "<extra_id_96>",
100
+ "<extra_id_97>",
101
+ "<extra_id_98>",
102
+ "<extra_id_99>"
103
+ ],
104
+ "eos_token": {
105
+ "content": "</s>",
106
+ "lstrip": false,
107
+ "normalized": false,
108
+ "rstrip": false,
109
+ "single_word": false
110
+ },
111
+ "pad_token": {
112
+ "content": "<pad>",
113
+ "lstrip": false,
114
+ "normalized": false,
115
+ "rstrip": false,
116
+ "single_word": false
117
+ },
118
+ "unk_token": {
119
+ "content": "<unk>",
120
+ "lstrip": false,
121
+ "normalized": false,
122
+ "rstrip": false,
123
+ "single_word": false
124
+ }
125
+ }
spiece.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7a4eb87011448a4564a3144979384da51eee1da95e554feb22ccc85529535dd5
3
+ size 1003118
tokenizer_config.json ADDED
@@ -0,0 +1,941 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": true,
3
+ "added_tokens_decoder": {
4
+ "0": {
5
+ "content": "<pad>",
6
+ "lstrip": false,
7
+ "normalized": false,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "1": {
13
+ "content": "<unk>",
14
+ "lstrip": false,
15
+ "normalized": false,
16
+ "rstrip": false,
17
+ "single_word": false,
18
+ "special": true
19
+ },
20
+ "2": {
21
+ "content": "</s>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false,
26
+ "special": true
27
+ },
28
+ "32000": {
29
+ "content": "<extra_id_99>",
30
+ "lstrip": true,
31
+ "normalized": false,
32
+ "rstrip": true,
33
+ "single_word": true,
34
+ "special": true
35
+ },
36
+ "32001": {
37
+ "content": "<extra_id_98>",
38
+ "lstrip": true,
39
+ "normalized": false,
40
+ "rstrip": true,
41
+ "single_word": true,
42
+ "special": true
43
+ },
44
+ "32002": {
45
+ "content": "<extra_id_97>",
46
+ "lstrip": true,
47
+ "normalized": false,
48
+ "rstrip": true,
49
+ "single_word": true,
50
+ "special": true
51
+ },
52
+ "32003": {
53
+ "content": "<extra_id_96>",
54
+ "lstrip": true,
55
+ "normalized": false,
56
+ "rstrip": true,
57
+ "single_word": true,
58
+ "special": true
59
+ },
60
+ "32004": {
61
+ "content": "<extra_id_95>",
62
+ "lstrip": true,
63
+ "normalized": false,
64
+ "rstrip": true,
65
+ "single_word": true,
66
+ "special": true
67
+ },
68
+ "32005": {
69
+ "content": "<extra_id_94>",
70
+ "lstrip": true,
71
+ "normalized": false,
72
+ "rstrip": true,
73
+ "single_word": true,
74
+ "special": true
75
+ },
76
+ "32006": {
77
+ "content": "<extra_id_93>",
78
+ "lstrip": true,
79
+ "normalized": false,
80
+ "rstrip": true,
81
+ "single_word": true,
82
+ "special": true
83
+ },
84
+ "32007": {
85
+ "content": "<extra_id_92>",
86
+ "lstrip": true,
87
+ "normalized": false,
88
+ "rstrip": true,
89
+ "single_word": true,
90
+ "special": true
91
+ },
92
+ "32008": {
93
+ "content": "<extra_id_91>",
94
+ "lstrip": true,
95
+ "normalized": false,
96
+ "rstrip": true,
97
+ "single_word": true,
98
+ "special": true
99
+ },
100
+ "32009": {
101
+ "content": "<extra_id_90>",
102
+ "lstrip": true,
103
+ "normalized": false,
104
+ "rstrip": true,
105
+ "single_word": true,
106
+ "special": true
107
+ },
108
+ "32010": {
109
+ "content": "<extra_id_89>",
110
+ "lstrip": true,
111
+ "normalized": false,
112
+ "rstrip": true,
113
+ "single_word": true,
114
+ "special": true
115
+ },
116
+ "32011": {
117
+ "content": "<extra_id_88>",
118
+ "lstrip": true,
119
+ "normalized": false,
120
+ "rstrip": true,
121
+ "single_word": true,
122
+ "special": true
123
+ },
124
+ "32012": {
125
+ "content": "<extra_id_87>",
126
+ "lstrip": true,
127
+ "normalized": false,
128
+ "rstrip": true,
129
+ "single_word": true,
130
+ "special": true
131
+ },
132
+ "32013": {
133
+ "content": "<extra_id_86>",
134
+ "lstrip": true,
135
+ "normalized": false,
136
+ "rstrip": true,
137
+ "single_word": true,
138
+ "special": true
139
+ },
140
+ "32014": {
141
+ "content": "<extra_id_85>",
142
+ "lstrip": true,
143
+ "normalized": false,
144
+ "rstrip": true,
145
+ "single_word": true,
146
+ "special": true
147
+ },
148
+ "32015": {
149
+ "content": "<extra_id_84>",
150
+ "lstrip": true,
151
+ "normalized": false,
152
+ "rstrip": true,
153
+ "single_word": true,
154
+ "special": true
155
+ },
156
+ "32016": {
157
+ "content": "<extra_id_83>",
158
+ "lstrip": true,
159
+ "normalized": false,
160
+ "rstrip": true,
161
+ "single_word": true,
162
+ "special": true
163
+ },
164
+ "32017": {
165
+ "content": "<extra_id_82>",
166
+ "lstrip": true,
167
+ "normalized": false,
168
+ "rstrip": true,
169
+ "single_word": true,
170
+ "special": true
171
+ },
172
+ "32018": {
173
+ "content": "<extra_id_81>",
174
+ "lstrip": true,
175
+ "normalized": false,
176
+ "rstrip": true,
177
+ "single_word": true,
178
+ "special": true
179
+ },
180
+ "32019": {
181
+ "content": "<extra_id_80>",
182
+ "lstrip": true,
183
+ "normalized": false,
184
+ "rstrip": true,
185
+ "single_word": true,
186
+ "special": true
187
+ },
188
+ "32020": {
189
+ "content": "<extra_id_79>",
190
+ "lstrip": true,
191
+ "normalized": false,
192
+ "rstrip": true,
193
+ "single_word": true,
194
+ "special": true
195
+ },
196
+ "32021": {
197
+ "content": "<extra_id_78>",
198
+ "lstrip": true,
199
+ "normalized": false,
200
+ "rstrip": true,
201
+ "single_word": true,
202
+ "special": true
203
+ },
204
+ "32022": {
205
+ "content": "<extra_id_77>",
206
+ "lstrip": true,
207
+ "normalized": false,
208
+ "rstrip": true,
209
+ "single_word": true,
210
+ "special": true
211
+ },
212
+ "32023": {
213
+ "content": "<extra_id_76>",
214
+ "lstrip": true,
215
+ "normalized": false,
216
+ "rstrip": true,
217
+ "single_word": true,
218
+ "special": true
219
+ },
220
+ "32024": {
221
+ "content": "<extra_id_75>",
222
+ "lstrip": true,
223
+ "normalized": false,
224
+ "rstrip": true,
225
+ "single_word": true,
226
+ "special": true
227
+ },
228
+ "32025": {
229
+ "content": "<extra_id_74>",
230
+ "lstrip": true,
231
+ "normalized": false,
232
+ "rstrip": true,
233
+ "single_word": true,
234
+ "special": true
235
+ },
236
+ "32026": {
237
+ "content": "<extra_id_73>",
238
+ "lstrip": true,
239
+ "normalized": false,
240
+ "rstrip": true,
241
+ "single_word": true,
242
+ "special": true
243
+ },
244
+ "32027": {
245
+ "content": "<extra_id_72>",
246
+ "lstrip": true,
247
+ "normalized": false,
248
+ "rstrip": true,
249
+ "single_word": true,
250
+ "special": true
251
+ },
252
+ "32028": {
253
+ "content": "<extra_id_71>",
254
+ "lstrip": true,
255
+ "normalized": false,
256
+ "rstrip": true,
257
+ "single_word": true,
258
+ "special": true
259
+ },
260
+ "32029": {
261
+ "content": "<extra_id_70>",
262
+ "lstrip": true,
263
+ "normalized": false,
264
+ "rstrip": true,
265
+ "single_word": true,
266
+ "special": true
267
+ },
268
+ "32030": {
269
+ "content": "<extra_id_69>",
270
+ "lstrip": true,
271
+ "normalized": false,
272
+ "rstrip": true,
273
+ "single_word": true,
274
+ "special": true
275
+ },
276
+ "32031": {
277
+ "content": "<extra_id_68>",
278
+ "lstrip": true,
279
+ "normalized": false,
280
+ "rstrip": true,
281
+ "single_word": true,
282
+ "special": true
283
+ },
284
+ "32032": {
285
+ "content": "<extra_id_67>",
286
+ "lstrip": true,
287
+ "normalized": false,
288
+ "rstrip": true,
289
+ "single_word": true,
290
+ "special": true
291
+ },
292
+ "32033": {
293
+ "content": "<extra_id_66>",
294
+ "lstrip": true,
295
+ "normalized": false,
296
+ "rstrip": true,
297
+ "single_word": true,
298
+ "special": true
299
+ },
300
+ "32034": {
301
+ "content": "<extra_id_65>",
302
+ "lstrip": true,
303
+ "normalized": false,
304
+ "rstrip": true,
305
+ "single_word": true,
306
+ "special": true
307
+ },
308
+ "32035": {
309
+ "content": "<extra_id_64>",
310
+ "lstrip": true,
311
+ "normalized": false,
312
+ "rstrip": true,
313
+ "single_word": true,
314
+ "special": true
315
+ },
316
+ "32036": {
317
+ "content": "<extra_id_63>",
318
+ "lstrip": true,
319
+ "normalized": false,
320
+ "rstrip": true,
321
+ "single_word": true,
322
+ "special": true
323
+ },
324
+ "32037": {
325
+ "content": "<extra_id_62>",
326
+ "lstrip": true,
327
+ "normalized": false,
328
+ "rstrip": true,
329
+ "single_word": true,
330
+ "special": true
331
+ },
332
+ "32038": {
333
+ "content": "<extra_id_61>",
334
+ "lstrip": true,
335
+ "normalized": false,
336
+ "rstrip": true,
337
+ "single_word": true,
338
+ "special": true
339
+ },
340
+ "32039": {
341
+ "content": "<extra_id_60>",
342
+ "lstrip": true,
343
+ "normalized": false,
344
+ "rstrip": true,
345
+ "single_word": true,
346
+ "special": true
347
+ },
348
+ "32040": {
349
+ "content": "<extra_id_59>",
350
+ "lstrip": true,
351
+ "normalized": false,
352
+ "rstrip": true,
353
+ "single_word": true,
354
+ "special": true
355
+ },
356
+ "32041": {
357
+ "content": "<extra_id_58>",
358
+ "lstrip": true,
359
+ "normalized": false,
360
+ "rstrip": true,
361
+ "single_word": true,
362
+ "special": true
363
+ },
364
+ "32042": {
365
+ "content": "<extra_id_57>",
366
+ "lstrip": true,
367
+ "normalized": false,
368
+ "rstrip": true,
369
+ "single_word": true,
370
+ "special": true
371
+ },
372
+ "32043": {
373
+ "content": "<extra_id_56>",
374
+ "lstrip": true,
375
+ "normalized": false,
376
+ "rstrip": true,
377
+ "single_word": true,
378
+ "special": true
379
+ },
380
+ "32044": {
381
+ "content": "<extra_id_55>",
382
+ "lstrip": true,
383
+ "normalized": false,
384
+ "rstrip": true,
385
+ "single_word": true,
386
+ "special": true
387
+ },
388
+ "32045": {
389
+ "content": "<extra_id_54>",
390
+ "lstrip": true,
391
+ "normalized": false,
392
+ "rstrip": true,
393
+ "single_word": true,
394
+ "special": true
395
+ },
396
+ "32046": {
397
+ "content": "<extra_id_53>",
398
+ "lstrip": true,
399
+ "normalized": false,
400
+ "rstrip": true,
401
+ "single_word": true,
402
+ "special": true
403
+ },
404
+ "32047": {
405
+ "content": "<extra_id_52>",
406
+ "lstrip": true,
407
+ "normalized": false,
408
+ "rstrip": true,
409
+ "single_word": true,
410
+ "special": true
411
+ },
412
+ "32048": {
413
+ "content": "<extra_id_51>",
414
+ "lstrip": true,
415
+ "normalized": false,
416
+ "rstrip": true,
417
+ "single_word": true,
418
+ "special": true
419
+ },
420
+ "32049": {
421
+ "content": "<extra_id_50>",
422
+ "lstrip": true,
423
+ "normalized": false,
424
+ "rstrip": true,
425
+ "single_word": true,
426
+ "special": true
427
+ },
428
+ "32050": {
429
+ "content": "<extra_id_49>",
430
+ "lstrip": true,
431
+ "normalized": false,
432
+ "rstrip": true,
433
+ "single_word": true,
434
+ "special": true
435
+ },
436
+ "32051": {
437
+ "content": "<extra_id_48>",
438
+ "lstrip": true,
439
+ "normalized": false,
440
+ "rstrip": true,
441
+ "single_word": true,
442
+ "special": true
443
+ },
444
+ "32052": {
445
+ "content": "<extra_id_47>",
446
+ "lstrip": true,
447
+ "normalized": false,
448
+ "rstrip": true,
449
+ "single_word": true,
450
+ "special": true
451
+ },
452
+ "32053": {
453
+ "content": "<extra_id_46>",
454
+ "lstrip": true,
455
+ "normalized": false,
456
+ "rstrip": true,
457
+ "single_word": true,
458
+ "special": true
459
+ },
460
+ "32054": {
461
+ "content": "<extra_id_45>",
462
+ "lstrip": true,
463
+ "normalized": false,
464
+ "rstrip": true,
465
+ "single_word": true,
466
+ "special": true
467
+ },
468
+ "32055": {
469
+ "content": "<extra_id_44>",
470
+ "lstrip": true,
471
+ "normalized": false,
472
+ "rstrip": true,
473
+ "single_word": true,
474
+ "special": true
475
+ },
476
+ "32056": {
477
+ "content": "<extra_id_43>",
478
+ "lstrip": true,
479
+ "normalized": false,
480
+ "rstrip": true,
481
+ "single_word": true,
482
+ "special": true
483
+ },
484
+ "32057": {
485
+ "content": "<extra_id_42>",
486
+ "lstrip": true,
487
+ "normalized": false,
488
+ "rstrip": true,
489
+ "single_word": true,
490
+ "special": true
491
+ },
492
+ "32058": {
493
+ "content": "<extra_id_41>",
494
+ "lstrip": true,
495
+ "normalized": false,
496
+ "rstrip": true,
497
+ "single_word": true,
498
+ "special": true
499
+ },
500
+ "32059": {
501
+ "content": "<extra_id_40>",
502
+ "lstrip": true,
503
+ "normalized": false,
504
+ "rstrip": true,
505
+ "single_word": true,
506
+ "special": true
507
+ },
508
+ "32060": {
509
+ "content": "<extra_id_39>",
510
+ "lstrip": true,
511
+ "normalized": false,
512
+ "rstrip": true,
513
+ "single_word": true,
514
+ "special": true
515
+ },
516
+ "32061": {
517
+ "content": "<extra_id_38>",
518
+ "lstrip": true,
519
+ "normalized": false,
520
+ "rstrip": true,
521
+ "single_word": true,
522
+ "special": true
523
+ },
524
+ "32062": {
525
+ "content": "<extra_id_37>",
526
+ "lstrip": true,
527
+ "normalized": false,
528
+ "rstrip": true,
529
+ "single_word": true,
530
+ "special": true
531
+ },
532
+ "32063": {
533
+ "content": "<extra_id_36>",
534
+ "lstrip": true,
535
+ "normalized": false,
536
+ "rstrip": true,
537
+ "single_word": true,
538
+ "special": true
539
+ },
540
+ "32064": {
541
+ "content": "<extra_id_35>",
542
+ "lstrip": true,
543
+ "normalized": false,
544
+ "rstrip": true,
545
+ "single_word": true,
546
+ "special": true
547
+ },
548
+ "32065": {
549
+ "content": "<extra_id_34>",
550
+ "lstrip": true,
551
+ "normalized": false,
552
+ "rstrip": true,
553
+ "single_word": true,
554
+ "special": true
555
+ },
556
+ "32066": {
557
+ "content": "<extra_id_33>",
558
+ "lstrip": true,
559
+ "normalized": false,
560
+ "rstrip": true,
561
+ "single_word": true,
562
+ "special": true
563
+ },
564
+ "32067": {
565
+ "content": "<extra_id_32>",
566
+ "lstrip": true,
567
+ "normalized": false,
568
+ "rstrip": true,
569
+ "single_word": true,
570
+ "special": true
571
+ },
572
+ "32068": {
573
+ "content": "<extra_id_31>",
574
+ "lstrip": true,
575
+ "normalized": false,
576
+ "rstrip": true,
577
+ "single_word": true,
578
+ "special": true
579
+ },
580
+ "32069": {
581
+ "content": "<extra_id_30>",
582
+ "lstrip": true,
583
+ "normalized": false,
584
+ "rstrip": true,
585
+ "single_word": true,
586
+ "special": true
587
+ },
588
+ "32070": {
589
+ "content": "<extra_id_29>",
590
+ "lstrip": true,
591
+ "normalized": false,
592
+ "rstrip": true,
593
+ "single_word": true,
594
+ "special": true
595
+ },
596
+ "32071": {
597
+ "content": "<extra_id_28>",
598
+ "lstrip": true,
599
+ "normalized": false,
600
+ "rstrip": true,
601
+ "single_word": true,
602
+ "special": true
603
+ },
604
+ "32072": {
605
+ "content": "<extra_id_27>",
606
+ "lstrip": true,
607
+ "normalized": false,
608
+ "rstrip": true,
609
+ "single_word": true,
610
+ "special": true
611
+ },
612
+ "32073": {
613
+ "content": "<extra_id_26>",
614
+ "lstrip": true,
615
+ "normalized": false,
616
+ "rstrip": true,
617
+ "single_word": true,
618
+ "special": true
619
+ },
620
+ "32074": {
621
+ "content": "<extra_id_25>",
622
+ "lstrip": true,
623
+ "normalized": false,
624
+ "rstrip": true,
625
+ "single_word": true,
626
+ "special": true
627
+ },
628
+ "32075": {
629
+ "content": "<extra_id_24>",
630
+ "lstrip": true,
631
+ "normalized": false,
632
+ "rstrip": true,
633
+ "single_word": true,
634
+ "special": true
635
+ },
636
+ "32076": {
637
+ "content": "<extra_id_23>",
638
+ "lstrip": true,
639
+ "normalized": false,
640
+ "rstrip": true,
641
+ "single_word": true,
642
+ "special": true
643
+ },
644
+ "32077": {
645
+ "content": "<extra_id_22>",
646
+ "lstrip": true,
647
+ "normalized": false,
648
+ "rstrip": true,
649
+ "single_word": true,
650
+ "special": true
651
+ },
652
+ "32078": {
653
+ "content": "<extra_id_21>",
654
+ "lstrip": true,
655
+ "normalized": false,
656
+ "rstrip": true,
657
+ "single_word": true,
658
+ "special": true
659
+ },
660
+ "32079": {
661
+ "content": "<extra_id_20>",
662
+ "lstrip": true,
663
+ "normalized": false,
664
+ "rstrip": true,
665
+ "single_word": true,
666
+ "special": true
667
+ },
668
+ "32080": {
669
+ "content": "<extra_id_19>",
670
+ "lstrip": true,
671
+ "normalized": false,
672
+ "rstrip": true,
673
+ "single_word": true,
674
+ "special": true
675
+ },
676
+ "32081": {
677
+ "content": "<extra_id_18>",
678
+ "lstrip": true,
679
+ "normalized": false,
680
+ "rstrip": true,
681
+ "single_word": true,
682
+ "special": true
683
+ },
684
+ "32082": {
685
+ "content": "<extra_id_17>",
686
+ "lstrip": true,
687
+ "normalized": false,
688
+ "rstrip": true,
689
+ "single_word": true,
690
+ "special": true
691
+ },
692
+ "32083": {
693
+ "content": "<extra_id_16>",
694
+ "lstrip": true,
695
+ "normalized": false,
696
+ "rstrip": true,
697
+ "single_word": true,
698
+ "special": true
699
+ },
700
+ "32084": {
701
+ "content": "<extra_id_15>",
702
+ "lstrip": true,
703
+ "normalized": false,
704
+ "rstrip": true,
705
+ "single_word": true,
706
+ "special": true
707
+ },
708
+ "32085": {
709
+ "content": "<extra_id_14>",
710
+ "lstrip": true,
711
+ "normalized": false,
712
+ "rstrip": true,
713
+ "single_word": true,
714
+ "special": true
715
+ },
716
+ "32086": {
717
+ "content": "<extra_id_13>",
718
+ "lstrip": true,
719
+ "normalized": false,
720
+ "rstrip": true,
721
+ "single_word": true,
722
+ "special": true
723
+ },
724
+ "32087": {
725
+ "content": "<extra_id_12>",
726
+ "lstrip": true,
727
+ "normalized": false,
728
+ "rstrip": true,
729
+ "single_word": true,
730
+ "special": true
731
+ },
732
+ "32088": {
733
+ "content": "<extra_id_11>",
734
+ "lstrip": true,
735
+ "normalized": false,
736
+ "rstrip": true,
737
+ "single_word": true,
738
+ "special": true
739
+ },
740
+ "32089": {
741
+ "content": "<extra_id_10>",
742
+ "lstrip": true,
743
+ "normalized": false,
744
+ "rstrip": true,
745
+ "single_word": true,
746
+ "special": true
747
+ },
748
+ "32090": {
749
+ "content": "<extra_id_9>",
750
+ "lstrip": true,
751
+ "normalized": false,
752
+ "rstrip": true,
753
+ "single_word": true,
754
+ "special": true
755
+ },
756
+ "32091": {
757
+ "content": "<extra_id_8>",
758
+ "lstrip": true,
759
+ "normalized": false,
760
+ "rstrip": true,
761
+ "single_word": true,
762
+ "special": true
763
+ },
764
+ "32092": {
765
+ "content": "<extra_id_7>",
766
+ "lstrip": true,
767
+ "normalized": false,
768
+ "rstrip": true,
769
+ "single_word": true,
770
+ "special": true
771
+ },
772
+ "32093": {
773
+ "content": "<extra_id_6>",
774
+ "lstrip": true,
775
+ "normalized": false,
776
+ "rstrip": true,
777
+ "single_word": true,
778
+ "special": true
779
+ },
780
+ "32094": {
781
+ "content": "<extra_id_5>",
782
+ "lstrip": true,
783
+ "normalized": false,
784
+ "rstrip": true,
785
+ "single_word": true,
786
+ "special": true
787
+ },
788
+ "32095": {
789
+ "content": "<extra_id_4>",
790
+ "lstrip": true,
791
+ "normalized": false,
792
+ "rstrip": true,
793
+ "single_word": true,
794
+ "special": true
795
+ },
796
+ "32096": {
797
+ "content": "<extra_id_3>",
798
+ "lstrip": true,
799
+ "normalized": false,
800
+ "rstrip": true,
801
+ "single_word": true,
802
+ "special": true
803
+ },
804
+ "32097": {
805
+ "content": "<extra_id_2>",
806
+ "lstrip": true,
807
+ "normalized": false,
808
+ "rstrip": true,
809
+ "single_word": true,
810
+ "special": true
811
+ },
812
+ "32098": {
813
+ "content": "<extra_id_1>",
814
+ "lstrip": true,
815
+ "normalized": false,
816
+ "rstrip": true,
817
+ "single_word": true,
818
+ "special": true
819
+ },
820
+ "32099": {
821
+ "content": "<extra_id_0>",
822
+ "lstrip": true,
823
+ "normalized": false,
824
+ "rstrip": true,
825
+ "single_word": true,
826
+ "special": true
827
+ }
828
+ },
829
+ "additional_special_tokens": [
830
+ "<extra_id_0>",
831
+ "<extra_id_1>",
832
+ "<extra_id_2>",
833
+ "<extra_id_3>",
834
+ "<extra_id_4>",
835
+ "<extra_id_5>",
836
+ "<extra_id_6>",
837
+ "<extra_id_7>",
838
+ "<extra_id_8>",
839
+ "<extra_id_9>",
840
+ "<extra_id_10>",
841
+ "<extra_id_11>",
842
+ "<extra_id_12>",
843
+ "<extra_id_13>",
844
+ "<extra_id_14>",
845
+ "<extra_id_15>",
846
+ "<extra_id_16>",
847
+ "<extra_id_17>",
848
+ "<extra_id_18>",
849
+ "<extra_id_19>",
850
+ "<extra_id_20>",
851
+ "<extra_id_21>",
852
+ "<extra_id_22>",
853
+ "<extra_id_23>",
854
+ "<extra_id_24>",
855
+ "<extra_id_25>",
856
+ "<extra_id_26>",
857
+ "<extra_id_27>",
858
+ "<extra_id_28>",
859
+ "<extra_id_29>",
860
+ "<extra_id_30>",
861
+ "<extra_id_31>",
862
+ "<extra_id_32>",
863
+ "<extra_id_33>",
864
+ "<extra_id_34>",
865
+ "<extra_id_35>",
866
+ "<extra_id_36>",
867
+ "<extra_id_37>",
868
+ "<extra_id_38>",
869
+ "<extra_id_39>",
870
+ "<extra_id_40>",
871
+ "<extra_id_41>",
872
+ "<extra_id_42>",
873
+ "<extra_id_43>",
874
+ "<extra_id_44>",
875
+ "<extra_id_45>",
876
+ "<extra_id_46>",
877
+ "<extra_id_47>",
878
+ "<extra_id_48>",
879
+ "<extra_id_49>",
880
+ "<extra_id_50>",
881
+ "<extra_id_51>",
882
+ "<extra_id_52>",
883
+ "<extra_id_53>",
884
+ "<extra_id_54>",
885
+ "<extra_id_55>",
886
+ "<extra_id_56>",
887
+ "<extra_id_57>",
888
+ "<extra_id_58>",
889
+ "<extra_id_59>",
890
+ "<extra_id_60>",
891
+ "<extra_id_61>",
892
+ "<extra_id_62>",
893
+ "<extra_id_63>",
894
+ "<extra_id_64>",
895
+ "<extra_id_65>",
896
+ "<extra_id_66>",
897
+ "<extra_id_67>",
898
+ "<extra_id_68>",
899
+ "<extra_id_69>",
900
+ "<extra_id_70>",
901
+ "<extra_id_71>",
902
+ "<extra_id_72>",
903
+ "<extra_id_73>",
904
+ "<extra_id_74>",
905
+ "<extra_id_75>",
906
+ "<extra_id_76>",
907
+ "<extra_id_77>",
908
+ "<extra_id_78>",
909
+ "<extra_id_79>",
910
+ "<extra_id_80>",
911
+ "<extra_id_81>",
912
+ "<extra_id_82>",
913
+ "<extra_id_83>",
914
+ "<extra_id_84>",
915
+ "<extra_id_85>",
916
+ "<extra_id_86>",
917
+ "<extra_id_87>",
918
+ "<extra_id_88>",
919
+ "<extra_id_89>",
920
+ "<extra_id_90>",
921
+ "<extra_id_91>",
922
+ "<extra_id_92>",
923
+ "<extra_id_93>",
924
+ "<extra_id_94>",
925
+ "<extra_id_95>",
926
+ "<extra_id_96>",
927
+ "<extra_id_97>",
928
+ "<extra_id_98>",
929
+ "<extra_id_99>"
930
+ ],
931
+ "clean_up_tokenization_spaces": false,
932
+ "eos_token": "</s>",
933
+ "extra_ids": 100,
934
+ "extra_special_tokens": {},
935
+ "legacy": true,
936
+ "model_max_length": 1000000000000000019884624838656,
937
+ "pad_token": "<pad>",
938
+ "sp_model_kwargs": {},
939
+ "tokenizer_class": "T5Tokenizer",
940
+ "unk_token": "<unk>"
941
+ }
trainer_state.json ADDED
@@ -0,0 +1,2448 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_global_step": null,
3
+ "best_metric": null,
4
+ "best_model_checkpoint": null,
5
+ "epoch": 19.999535243996903,
6
+ "eval_steps": 500,
7
+ "global_step": 32260,
8
+ "is_hyper_param_search": false,
9
+ "is_local_process_zero": true,
10
+ "is_world_process_zero": true,
11
+ "log_history": [
12
+ {
13
+ "epoch": 0.061967467079783116,
14
+ "grad_norm": 20.777441024780273,
15
+ "learning_rate": 9.97179169249845e-06,
16
+ "loss": 5.1314,
17
+ "step": 100
18
+ },
19
+ {
20
+ "epoch": 0.12393493415956623,
21
+ "grad_norm": 7.313554763793945,
22
+ "learning_rate": 9.940793552386857e-06,
23
+ "loss": 2.5079,
24
+ "step": 200
25
+ },
26
+ {
27
+ "epoch": 0.18590240123934934,
28
+ "grad_norm": 6.049108982086182,
29
+ "learning_rate": 9.909795412275263e-06,
30
+ "loss": 2.262,
31
+ "step": 300
32
+ },
33
+ {
34
+ "epoch": 0.24786986831913246,
35
+ "grad_norm": 7.533471584320068,
36
+ "learning_rate": 9.878797272163671e-06,
37
+ "loss": 2.1505,
38
+ "step": 400
39
+ },
40
+ {
41
+ "epoch": 0.30983733539891556,
42
+ "grad_norm": 7.321562767028809,
43
+ "learning_rate": 9.847799132052077e-06,
44
+ "loss": 2.0719,
45
+ "step": 500
46
+ },
47
+ {
48
+ "epoch": 0.3718048024786987,
49
+ "grad_norm": 9.049120903015137,
50
+ "learning_rate": 9.816800991940483e-06,
51
+ "loss": 2.0476,
52
+ "step": 600
53
+ },
54
+ {
55
+ "epoch": 0.4337722695584818,
56
+ "grad_norm": 4.262355804443359,
57
+ "learning_rate": 9.785802851828891e-06,
58
+ "loss": 2.0214,
59
+ "step": 700
60
+ },
61
+ {
62
+ "epoch": 0.4957397366382649,
63
+ "grad_norm": 4.2409491539001465,
64
+ "learning_rate": 9.754804711717297e-06,
65
+ "loss": 2.0108,
66
+ "step": 800
67
+ },
68
+ {
69
+ "epoch": 0.557707203718048,
70
+ "grad_norm": 4.823696136474609,
71
+ "learning_rate": 9.723806571605703e-06,
72
+ "loss": 1.9636,
73
+ "step": 900
74
+ },
75
+ {
76
+ "epoch": 0.6196746707978311,
77
+ "grad_norm": 16.929576873779297,
78
+ "learning_rate": 9.692808431494111e-06,
79
+ "loss": 1.9787,
80
+ "step": 1000
81
+ },
82
+ {
83
+ "epoch": 0.6816421378776143,
84
+ "grad_norm": 4.100649833679199,
85
+ "learning_rate": 9.661810291382517e-06,
86
+ "loss": 1.9076,
87
+ "step": 1100
88
+ },
89
+ {
90
+ "epoch": 0.7436096049573974,
91
+ "grad_norm": 8.428403854370117,
92
+ "learning_rate": 9.630812151270923e-06,
93
+ "loss": 1.9312,
94
+ "step": 1200
95
+ },
96
+ {
97
+ "epoch": 0.8055770720371804,
98
+ "grad_norm": 21.134408950805664,
99
+ "learning_rate": 9.599814011159331e-06,
100
+ "loss": 1.9262,
101
+ "step": 1300
102
+ },
103
+ {
104
+ "epoch": 0.8675445391169636,
105
+ "grad_norm": 4.445871829986572,
106
+ "learning_rate": 9.568815871047737e-06,
107
+ "loss": 1.9038,
108
+ "step": 1400
109
+ },
110
+ {
111
+ "epoch": 0.9295120061967467,
112
+ "grad_norm": 10.70151424407959,
113
+ "learning_rate": 9.537817730936143e-06,
114
+ "loss": 1.909,
115
+ "step": 1500
116
+ },
117
+ {
118
+ "epoch": 0.9914794732765299,
119
+ "grad_norm": 4.220454692840576,
120
+ "learning_rate": 9.506819590824551e-06,
121
+ "loss": 1.9011,
122
+ "step": 1600
123
+ },
124
+ {
125
+ "epoch": 0.9995352439969016,
126
+ "eval_loss": 1.478387713432312,
127
+ "eval_runtime": 71.2201,
128
+ "eval_samples_per_second": 45.31,
129
+ "eval_steps_per_second": 22.662,
130
+ "step": 1613
131
+ },
132
+ {
133
+ "epoch": 1.0539116963594113,
134
+ "grad_norm": 5.241837024688721,
135
+ "learning_rate": 9.475821450712957e-06,
136
+ "loss": 1.8493,
137
+ "step": 1700
138
+ },
139
+ {
140
+ "epoch": 1.1158791634391945,
141
+ "grad_norm": 8.490580558776855,
142
+ "learning_rate": 9.444823310601364e-06,
143
+ "loss": 1.8844,
144
+ "step": 1800
145
+ },
146
+ {
147
+ "epoch": 1.1778466305189776,
148
+ "grad_norm": 4.5867743492126465,
149
+ "learning_rate": 9.413825170489771e-06,
150
+ "loss": 1.8588,
151
+ "step": 1900
152
+ },
153
+ {
154
+ "epoch": 1.2398140975987606,
155
+ "grad_norm": 5.750781536102295,
156
+ "learning_rate": 9.382827030378178e-06,
157
+ "loss": 1.8462,
158
+ "step": 2000
159
+ },
160
+ {
161
+ "epoch": 1.3017815646785438,
162
+ "grad_norm": 3.4414772987365723,
163
+ "learning_rate": 9.351828890266584e-06,
164
+ "loss": 1.8476,
165
+ "step": 2100
166
+ },
167
+ {
168
+ "epoch": 1.363749031758327,
169
+ "grad_norm": 3.991649627685547,
170
+ "learning_rate": 9.320830750154991e-06,
171
+ "loss": 1.8111,
172
+ "step": 2200
173
+ },
174
+ {
175
+ "epoch": 1.42571649883811,
176
+ "grad_norm": 3.774946689605713,
177
+ "learning_rate": 9.289832610043398e-06,
178
+ "loss": 1.8224,
179
+ "step": 2300
180
+ },
181
+ {
182
+ "epoch": 1.487683965917893,
183
+ "grad_norm": 11.512676239013672,
184
+ "learning_rate": 9.258834469931804e-06,
185
+ "loss": 1.8164,
186
+ "step": 2400
187
+ },
188
+ {
189
+ "epoch": 1.5496514329976763,
190
+ "grad_norm": 4.229243755340576,
191
+ "learning_rate": 9.227836329820212e-06,
192
+ "loss": 1.8052,
193
+ "step": 2500
194
+ },
195
+ {
196
+ "epoch": 1.6116189000774592,
197
+ "grad_norm": 3.6015431880950928,
198
+ "learning_rate": 9.196838189708618e-06,
199
+ "loss": 1.8253,
200
+ "step": 2600
201
+ },
202
+ {
203
+ "epoch": 1.6735863671572424,
204
+ "grad_norm": 3.8703105449676514,
205
+ "learning_rate": 9.165840049597024e-06,
206
+ "loss": 1.7702,
207
+ "step": 2700
208
+ },
209
+ {
210
+ "epoch": 1.7355538342370256,
211
+ "grad_norm": 3.6859114170074463,
212
+ "learning_rate": 9.134841909485432e-06,
213
+ "loss": 1.7925,
214
+ "step": 2800
215
+ },
216
+ {
217
+ "epoch": 1.7975213013168085,
218
+ "grad_norm": 6.744151592254639,
219
+ "learning_rate": 9.103843769373838e-06,
220
+ "loss": 1.7995,
221
+ "step": 2900
222
+ },
223
+ {
224
+ "epoch": 1.859488768396592,
225
+ "grad_norm": 5.791450500488281,
226
+ "learning_rate": 9.072845629262244e-06,
227
+ "loss": 1.7894,
228
+ "step": 3000
229
+ },
230
+ {
231
+ "epoch": 1.921456235476375,
232
+ "grad_norm": 5.028300762176514,
233
+ "learning_rate": 9.041847489150652e-06,
234
+ "loss": 1.7955,
235
+ "step": 3100
236
+ },
237
+ {
238
+ "epoch": 1.9834237025561579,
239
+ "grad_norm": 8.930970191955566,
240
+ "learning_rate": 9.010849349039058e-06,
241
+ "loss": 1.7856,
242
+ "step": 3200
243
+ },
244
+ {
245
+ "epoch": 1.9995352439969016,
246
+ "eval_loss": 1.438106894493103,
247
+ "eval_runtime": 71.4662,
248
+ "eval_samples_per_second": 45.154,
249
+ "eval_steps_per_second": 22.584,
250
+ "step": 3226
251
+ },
252
+ {
253
+ "epoch": 2.0458559256390396,
254
+ "grad_norm": 4.4157915115356445,
255
+ "learning_rate": 8.979851208927464e-06,
256
+ "loss": 1.7661,
257
+ "step": 3300
258
+ },
259
+ {
260
+ "epoch": 2.1078233927188226,
261
+ "grad_norm": 5.970052242279053,
262
+ "learning_rate": 8.948853068815872e-06,
263
+ "loss": 1.7539,
264
+ "step": 3400
265
+ },
266
+ {
267
+ "epoch": 2.1697908597986055,
268
+ "grad_norm": 6.610071659088135,
269
+ "learning_rate": 8.917854928704278e-06,
270
+ "loss": 1.7629,
271
+ "step": 3500
272
+ },
273
+ {
274
+ "epoch": 2.231758326878389,
275
+ "grad_norm": 5.124378204345703,
276
+ "learning_rate": 8.886856788592684e-06,
277
+ "loss": 1.7512,
278
+ "step": 3600
279
+ },
280
+ {
281
+ "epoch": 2.293725793958172,
282
+ "grad_norm": 3.7913496494293213,
283
+ "learning_rate": 8.855858648481092e-06,
284
+ "loss": 1.7574,
285
+ "step": 3700
286
+ },
287
+ {
288
+ "epoch": 2.3556932610379553,
289
+ "grad_norm": 3.5346639156341553,
290
+ "learning_rate": 8.824860508369498e-06,
291
+ "loss": 1.7298,
292
+ "step": 3800
293
+ },
294
+ {
295
+ "epoch": 2.4176607281177382,
296
+ "grad_norm": 3.6782634258270264,
297
+ "learning_rate": 8.793862368257904e-06,
298
+ "loss": 1.756,
299
+ "step": 3900
300
+ },
301
+ {
302
+ "epoch": 2.479628195197521,
303
+ "grad_norm": 3.087649345397949,
304
+ "learning_rate": 8.762864228146312e-06,
305
+ "loss": 1.7557,
306
+ "step": 4000
307
+ },
308
+ {
309
+ "epoch": 2.541595662277304,
310
+ "grad_norm": 3.854729413986206,
311
+ "learning_rate": 8.731866088034718e-06,
312
+ "loss": 1.7425,
313
+ "step": 4100
314
+ },
315
+ {
316
+ "epoch": 2.6035631293570876,
317
+ "grad_norm": 3.7198565006256104,
318
+ "learning_rate": 8.700867947923124e-06,
319
+ "loss": 1.7379,
320
+ "step": 4200
321
+ },
322
+ {
323
+ "epoch": 2.6655305964368705,
324
+ "grad_norm": 4.5739593505859375,
325
+ "learning_rate": 8.669869807811532e-06,
326
+ "loss": 1.7379,
327
+ "step": 4300
328
+ },
329
+ {
330
+ "epoch": 2.727498063516654,
331
+ "grad_norm": 4.533520698547363,
332
+ "learning_rate": 8.638871667699938e-06,
333
+ "loss": 1.7475,
334
+ "step": 4400
335
+ },
336
+ {
337
+ "epoch": 2.789465530596437,
338
+ "grad_norm": 3.113757610321045,
339
+ "learning_rate": 8.607873527588344e-06,
340
+ "loss": 1.7243,
341
+ "step": 4500
342
+ },
343
+ {
344
+ "epoch": 2.85143299767622,
345
+ "grad_norm": 3.061246871948242,
346
+ "learning_rate": 8.576875387476752e-06,
347
+ "loss": 1.7114,
348
+ "step": 4600
349
+ },
350
+ {
351
+ "epoch": 2.9134004647560032,
352
+ "grad_norm": 9.025867462158203,
353
+ "learning_rate": 8.545877247365158e-06,
354
+ "loss": 1.7184,
355
+ "step": 4700
356
+ },
357
+ {
358
+ "epoch": 2.975367931835786,
359
+ "grad_norm": 18.252887725830078,
360
+ "learning_rate": 8.514879107253565e-06,
361
+ "loss": 1.7165,
362
+ "step": 4800
363
+ },
364
+ {
365
+ "epoch": 2.9995352439969016,
366
+ "eval_loss": 1.4203578233718872,
367
+ "eval_runtime": 71.3513,
368
+ "eval_samples_per_second": 45.227,
369
+ "eval_steps_per_second": 22.62,
370
+ "step": 4839
371
+ },
372
+ {
373
+ "epoch": 3.0378001549186675,
374
+ "grad_norm": 3.237386703491211,
375
+ "learning_rate": 8.483880967141972e-06,
376
+ "loss": 1.7036,
377
+ "step": 4900
378
+ },
379
+ {
380
+ "epoch": 3.099767621998451,
381
+ "grad_norm": 3.4343390464782715,
382
+ "learning_rate": 8.452882827030378e-06,
383
+ "loss": 1.6937,
384
+ "step": 5000
385
+ },
386
+ {
387
+ "epoch": 3.161735089078234,
388
+ "grad_norm": 4.684260845184326,
389
+ "learning_rate": 8.421884686918785e-06,
390
+ "loss": 1.6825,
391
+ "step": 5100
392
+ },
393
+ {
394
+ "epoch": 3.223702556158017,
395
+ "grad_norm": 4.4700236320495605,
396
+ "learning_rate": 8.390886546807192e-06,
397
+ "loss": 1.7013,
398
+ "step": 5200
399
+ },
400
+ {
401
+ "epoch": 3.2856700232378,
402
+ "grad_norm": 3.5046918392181396,
403
+ "learning_rate": 8.359888406695599e-06,
404
+ "loss": 1.6819,
405
+ "step": 5300
406
+ },
407
+ {
408
+ "epoch": 3.347637490317583,
409
+ "grad_norm": 3.9169533252716064,
410
+ "learning_rate": 8.328890266584005e-06,
411
+ "loss": 1.6939,
412
+ "step": 5400
413
+ },
414
+ {
415
+ "epoch": 3.4096049573973666,
416
+ "grad_norm": 3.2398970127105713,
417
+ "learning_rate": 8.297892126472413e-06,
418
+ "loss": 1.689,
419
+ "step": 5500
420
+ },
421
+ {
422
+ "epoch": 3.4715724244771495,
423
+ "grad_norm": 5.543476104736328,
424
+ "learning_rate": 8.266893986360819e-06,
425
+ "loss": 1.6819,
426
+ "step": 5600
427
+ },
428
+ {
429
+ "epoch": 3.5335398915569325,
430
+ "grad_norm": 3.900956153869629,
431
+ "learning_rate": 8.235895846249225e-06,
432
+ "loss": 1.7153,
433
+ "step": 5700
434
+ },
435
+ {
436
+ "epoch": 3.595507358636716,
437
+ "grad_norm": 3.7085001468658447,
438
+ "learning_rate": 8.204897706137633e-06,
439
+ "loss": 1.6663,
440
+ "step": 5800
441
+ },
442
+ {
443
+ "epoch": 3.657474825716499,
444
+ "grad_norm": 3.672489881515503,
445
+ "learning_rate": 8.173899566026039e-06,
446
+ "loss": 1.6961,
447
+ "step": 5900
448
+ },
449
+ {
450
+ "epoch": 3.719442292796282,
451
+ "grad_norm": 5.308550834655762,
452
+ "learning_rate": 8.142901425914445e-06,
453
+ "loss": 1.6702,
454
+ "step": 6000
455
+ },
456
+ {
457
+ "epoch": 3.781409759876065,
458
+ "grad_norm": 6.25948429107666,
459
+ "learning_rate": 8.111903285802853e-06,
460
+ "loss": 1.7,
461
+ "step": 6100
462
+ },
463
+ {
464
+ "epoch": 3.843377226955848,
465
+ "grad_norm": 4.889005661010742,
466
+ "learning_rate": 8.080905145691259e-06,
467
+ "loss": 1.6783,
468
+ "step": 6200
469
+ },
470
+ {
471
+ "epoch": 3.905344694035631,
472
+ "grad_norm": 3.8252604007720947,
473
+ "learning_rate": 8.049907005579665e-06,
474
+ "loss": 1.6679,
475
+ "step": 6300
476
+ },
477
+ {
478
+ "epoch": 3.9673121611154145,
479
+ "grad_norm": 5.56712532043457,
480
+ "learning_rate": 8.018908865468073e-06,
481
+ "loss": 1.6778,
482
+ "step": 6400
483
+ },
484
+ {
485
+ "epoch": 3.9995352439969016,
486
+ "eval_loss": 1.4068138599395752,
487
+ "eval_runtime": 71.8216,
488
+ "eval_samples_per_second": 44.931,
489
+ "eval_steps_per_second": 22.472,
490
+ "step": 6452
491
+ },
492
+ {
493
+ "epoch": 4.029744384198296,
494
+ "grad_norm": 4.865719318389893,
495
+ "learning_rate": 7.987910725356479e-06,
496
+ "loss": 1.6718,
497
+ "step": 6500
498
+ },
499
+ {
500
+ "epoch": 4.091711851278079,
501
+ "grad_norm": 3.5504400730133057,
502
+ "learning_rate": 7.956912585244885e-06,
503
+ "loss": 1.6252,
504
+ "step": 6600
505
+ },
506
+ {
507
+ "epoch": 4.153679318357862,
508
+ "grad_norm": 2.8009276390075684,
509
+ "learning_rate": 7.925914445133293e-06,
510
+ "loss": 1.6529,
511
+ "step": 6700
512
+ },
513
+ {
514
+ "epoch": 4.215646785437645,
515
+ "grad_norm": 3.3487560749053955,
516
+ "learning_rate": 7.894916305021699e-06,
517
+ "loss": 1.6402,
518
+ "step": 6800
519
+ },
520
+ {
521
+ "epoch": 4.277614252517428,
522
+ "grad_norm": 3.037754535675049,
523
+ "learning_rate": 7.863918164910105e-06,
524
+ "loss": 1.6659,
525
+ "step": 6900
526
+ },
527
+ {
528
+ "epoch": 4.339581719597211,
529
+ "grad_norm": 3.340179204940796,
530
+ "learning_rate": 7.832920024798513e-06,
531
+ "loss": 1.646,
532
+ "step": 7000
533
+ },
534
+ {
535
+ "epoch": 4.401549186676995,
536
+ "grad_norm": 4.411381244659424,
537
+ "learning_rate": 7.801921884686919e-06,
538
+ "loss": 1.6583,
539
+ "step": 7100
540
+ },
541
+ {
542
+ "epoch": 4.463516653756778,
543
+ "grad_norm": 4.7763352394104,
544
+ "learning_rate": 7.770923744575325e-06,
545
+ "loss": 1.6524,
546
+ "step": 7200
547
+ },
548
+ {
549
+ "epoch": 4.525484120836561,
550
+ "grad_norm": 3.343606472015381,
551
+ "learning_rate": 7.739925604463733e-06,
552
+ "loss": 1.6454,
553
+ "step": 7300
554
+ },
555
+ {
556
+ "epoch": 4.587451587916344,
557
+ "grad_norm": 5.252141952514648,
558
+ "learning_rate": 7.70892746435214e-06,
559
+ "loss": 1.6555,
560
+ "step": 7400
561
+ },
562
+ {
563
+ "epoch": 4.649419054996127,
564
+ "grad_norm": 2.8249170780181885,
565
+ "learning_rate": 7.677929324240545e-06,
566
+ "loss": 1.6506,
567
+ "step": 7500
568
+ },
569
+ {
570
+ "epoch": 4.711386522075911,
571
+ "grad_norm": 6.525687217712402,
572
+ "learning_rate": 7.646931184128953e-06,
573
+ "loss": 1.6215,
574
+ "step": 7600
575
+ },
576
+ {
577
+ "epoch": 4.7733539891556935,
578
+ "grad_norm": 4.455771446228027,
579
+ "learning_rate": 7.615933044017359e-06,
580
+ "loss": 1.6465,
581
+ "step": 7700
582
+ },
583
+ {
584
+ "epoch": 4.8353214562354765,
585
+ "grad_norm": 2.8847739696502686,
586
+ "learning_rate": 7.584934903905766e-06,
587
+ "loss": 1.6411,
588
+ "step": 7800
589
+ },
590
+ {
591
+ "epoch": 4.8972889233152594,
592
+ "grad_norm": 2.8771958351135254,
593
+ "learning_rate": 7.553936763794173e-06,
594
+ "loss": 1.6353,
595
+ "step": 7900
596
+ },
597
+ {
598
+ "epoch": 4.959256390395042,
599
+ "grad_norm": 3.5082240104675293,
600
+ "learning_rate": 7.522938623682579e-06,
601
+ "loss": 1.6679,
602
+ "step": 8000
603
+ },
604
+ {
605
+ "epoch": 4.999535243996902,
606
+ "eval_loss": 1.3975656032562256,
607
+ "eval_runtime": 71.9374,
608
+ "eval_samples_per_second": 44.858,
609
+ "eval_steps_per_second": 22.436,
610
+ "step": 8065
611
+ },
612
+ {
613
+ "epoch": 5.021688613477924,
614
+ "grad_norm": 3.450159788131714,
615
+ "learning_rate": 7.491940483570986e-06,
616
+ "loss": 1.6245,
617
+ "step": 8100
618
+ },
619
+ {
620
+ "epoch": 5.083656080557708,
621
+ "grad_norm": 3.078734874725342,
622
+ "learning_rate": 7.460942343459393e-06,
623
+ "loss": 1.5949,
624
+ "step": 8200
625
+ },
626
+ {
627
+ "epoch": 5.1456235476374905,
628
+ "grad_norm": 3.123811721801758,
629
+ "learning_rate": 7.4299442033477995e-06,
630
+ "loss": 1.5804,
631
+ "step": 8300
632
+ },
633
+ {
634
+ "epoch": 5.2075910147172735,
635
+ "grad_norm": 2.746354341506958,
636
+ "learning_rate": 7.3989460632362065e-06,
637
+ "loss": 1.6264,
638
+ "step": 8400
639
+ },
640
+ {
641
+ "epoch": 5.269558481797056,
642
+ "grad_norm": 3.969360113143921,
643
+ "learning_rate": 7.3679479231246135e-06,
644
+ "loss": 1.6285,
645
+ "step": 8500
646
+ },
647
+ {
648
+ "epoch": 5.331525948876839,
649
+ "grad_norm": 11.645624160766602,
650
+ "learning_rate": 7.33694978301302e-06,
651
+ "loss": 1.6196,
652
+ "step": 8600
653
+ },
654
+ {
655
+ "epoch": 5.393493415956623,
656
+ "grad_norm": 4.237279415130615,
657
+ "learning_rate": 7.305951642901427e-06,
658
+ "loss": 1.6271,
659
+ "step": 8700
660
+ },
661
+ {
662
+ "epoch": 5.455460883036406,
663
+ "grad_norm": 5.038291931152344,
664
+ "learning_rate": 7.2749535027898336e-06,
665
+ "loss": 1.6097,
666
+ "step": 8800
667
+ },
668
+ {
669
+ "epoch": 5.517428350116189,
670
+ "grad_norm": 6.474031448364258,
671
+ "learning_rate": 7.24395536267824e-06,
672
+ "loss": 1.6148,
673
+ "step": 8900
674
+ },
675
+ {
676
+ "epoch": 5.579395817195972,
677
+ "grad_norm": 3.735469341278076,
678
+ "learning_rate": 7.212957222566647e-06,
679
+ "loss": 1.608,
680
+ "step": 9000
681
+ },
682
+ {
683
+ "epoch": 5.641363284275755,
684
+ "grad_norm": 4.774326801300049,
685
+ "learning_rate": 7.181959082455054e-06,
686
+ "loss": 1.6362,
687
+ "step": 9100
688
+ },
689
+ {
690
+ "epoch": 5.703330751355538,
691
+ "grad_norm": 6.094056606292725,
692
+ "learning_rate": 7.15096094234346e-06,
693
+ "loss": 1.5946,
694
+ "step": 9200
695
+ },
696
+ {
697
+ "epoch": 5.765298218435321,
698
+ "grad_norm": 2.613210439682007,
699
+ "learning_rate": 7.119962802231867e-06,
700
+ "loss": 1.6237,
701
+ "step": 9300
702
+ },
703
+ {
704
+ "epoch": 5.827265685515105,
705
+ "grad_norm": 5.291254043579102,
706
+ "learning_rate": 7.088964662120274e-06,
707
+ "loss": 1.6225,
708
+ "step": 9400
709
+ },
710
+ {
711
+ "epoch": 5.889233152594888,
712
+ "grad_norm": 3.139016628265381,
713
+ "learning_rate": 7.05796652200868e-06,
714
+ "loss": 1.6229,
715
+ "step": 9500
716
+ },
717
+ {
718
+ "epoch": 5.951200619674671,
719
+ "grad_norm": 2.9346985816955566,
720
+ "learning_rate": 7.026968381897087e-06,
721
+ "loss": 1.6091,
722
+ "step": 9600
723
+ },
724
+ {
725
+ "epoch": 5.999535243996902,
726
+ "eval_loss": 1.3893319368362427,
727
+ "eval_runtime": 71.9002,
728
+ "eval_samples_per_second": 44.882,
729
+ "eval_steps_per_second": 22.448,
730
+ "step": 9678
731
+ },
732
+ {
733
+ "epoch": 6.013632842757552,
734
+ "grad_norm": 2.6308019161224365,
735
+ "learning_rate": 6.995970241785494e-06,
736
+ "loss": 1.6145,
737
+ "step": 9700
738
+ },
739
+ {
740
+ "epoch": 6.075600309837335,
741
+ "grad_norm": 6.885835647583008,
742
+ "learning_rate": 6.9649721016739e-06,
743
+ "loss": 1.575,
744
+ "step": 9800
745
+ },
746
+ {
747
+ "epoch": 6.137567776917119,
748
+ "grad_norm": 8.84401798248291,
749
+ "learning_rate": 6.933973961562307e-06,
750
+ "loss": 1.5861,
751
+ "step": 9900
752
+ },
753
+ {
754
+ "epoch": 6.199535243996902,
755
+ "grad_norm": 3.830970287322998,
756
+ "learning_rate": 6.902975821450714e-06,
757
+ "loss": 1.6048,
758
+ "step": 10000
759
+ },
760
+ {
761
+ "epoch": 6.261502711076685,
762
+ "grad_norm": 2.8187596797943115,
763
+ "learning_rate": 6.87197768133912e-06,
764
+ "loss": 1.5923,
765
+ "step": 10100
766
+ },
767
+ {
768
+ "epoch": 6.323470178156468,
769
+ "grad_norm": 3.0165576934814453,
770
+ "learning_rate": 6.840979541227527e-06,
771
+ "loss": 1.5773,
772
+ "step": 10200
773
+ },
774
+ {
775
+ "epoch": 6.385437645236251,
776
+ "grad_norm": 8.133129119873047,
777
+ "learning_rate": 6.809981401115934e-06,
778
+ "loss": 1.576,
779
+ "step": 10300
780
+ },
781
+ {
782
+ "epoch": 6.447405112316034,
783
+ "grad_norm": 3.153198480606079,
784
+ "learning_rate": 6.77898326100434e-06,
785
+ "loss": 1.5922,
786
+ "step": 10400
787
+ },
788
+ {
789
+ "epoch": 6.5093725793958175,
790
+ "grad_norm": 3.5953714847564697,
791
+ "learning_rate": 6.7482951022938625e-06,
792
+ "loss": 1.5954,
793
+ "step": 10500
794
+ },
795
+ {
796
+ "epoch": 6.5713400464756,
797
+ "grad_norm": 3.0549609661102295,
798
+ "learning_rate": 6.7172969621822695e-06,
799
+ "loss": 1.5751,
800
+ "step": 10600
801
+ },
802
+ {
803
+ "epoch": 6.633307513555383,
804
+ "grad_norm": 3.2885892391204834,
805
+ "learning_rate": 6.686298822070676e-06,
806
+ "loss": 1.6103,
807
+ "step": 10700
808
+ },
809
+ {
810
+ "epoch": 6.695274980635166,
811
+ "grad_norm": 3.2449259757995605,
812
+ "learning_rate": 6.6553006819590826e-06,
813
+ "loss": 1.5879,
814
+ "step": 10800
815
+ },
816
+ {
817
+ "epoch": 6.757242447714949,
818
+ "grad_norm": 3.855558395385742,
819
+ "learning_rate": 6.6243025418474895e-06,
820
+ "loss": 1.5863,
821
+ "step": 10900
822
+ },
823
+ {
824
+ "epoch": 6.819209914794733,
825
+ "grad_norm": 7.185797214508057,
826
+ "learning_rate": 6.593304401735896e-06,
827
+ "loss": 1.5594,
828
+ "step": 11000
829
+ },
830
+ {
831
+ "epoch": 6.881177381874516,
832
+ "grad_norm": 5.2492804527282715,
833
+ "learning_rate": 6.562306261624303e-06,
834
+ "loss": 1.5716,
835
+ "step": 11100
836
+ },
837
+ {
838
+ "epoch": 6.943144848954299,
839
+ "grad_norm": 3.938108444213867,
840
+ "learning_rate": 6.53130812151271e-06,
841
+ "loss": 1.6033,
842
+ "step": 11200
843
+ },
844
+ {
845
+ "epoch": 6.999535243996902,
846
+ "eval_loss": 1.3868727684020996,
847
+ "eval_runtime": 71.7471,
848
+ "eval_samples_per_second": 44.977,
849
+ "eval_steps_per_second": 22.496,
850
+ "step": 11291
851
+ },
852
+ {
853
+ "epoch": 7.00557707203718,
854
+ "grad_norm": 3.1100411415100098,
855
+ "learning_rate": 6.500309981401116e-06,
856
+ "loss": 1.5961,
857
+ "step": 11300
858
+ },
859
+ {
860
+ "epoch": 7.067544539116963,
861
+ "grad_norm": 4.577524185180664,
862
+ "learning_rate": 6.469311841289523e-06,
863
+ "loss": 1.5562,
864
+ "step": 11400
865
+ },
866
+ {
867
+ "epoch": 7.129512006196746,
868
+ "grad_norm": 3.86492919921875,
869
+ "learning_rate": 6.43831370117793e-06,
870
+ "loss": 1.5757,
871
+ "step": 11500
872
+ },
873
+ {
874
+ "epoch": 7.19147947327653,
875
+ "grad_norm": 6.450658321380615,
876
+ "learning_rate": 6.407315561066336e-06,
877
+ "loss": 1.5467,
878
+ "step": 11600
879
+ },
880
+ {
881
+ "epoch": 7.253446940356313,
882
+ "grad_norm": 3.469329357147217,
883
+ "learning_rate": 6.376317420954743e-06,
884
+ "loss": 1.5676,
885
+ "step": 11700
886
+ },
887
+ {
888
+ "epoch": 7.315414407436096,
889
+ "grad_norm": 3.4291296005249023,
890
+ "learning_rate": 6.34531928084315e-06,
891
+ "loss": 1.5693,
892
+ "step": 11800
893
+ },
894
+ {
895
+ "epoch": 7.377381874515879,
896
+ "grad_norm": 2.776810884475708,
897
+ "learning_rate": 6.314321140731556e-06,
898
+ "loss": 1.5761,
899
+ "step": 11900
900
+ },
901
+ {
902
+ "epoch": 7.439349341595662,
903
+ "grad_norm": 3.687276840209961,
904
+ "learning_rate": 6.283323000619963e-06,
905
+ "loss": 1.5582,
906
+ "step": 12000
907
+ },
908
+ {
909
+ "epoch": 7.501316808675446,
910
+ "grad_norm": 3.623704195022583,
911
+ "learning_rate": 6.25232486050837e-06,
912
+ "loss": 1.557,
913
+ "step": 12100
914
+ },
915
+ {
916
+ "epoch": 7.563284275755229,
917
+ "grad_norm": 2.607255220413208,
918
+ "learning_rate": 6.221326720396776e-06,
919
+ "loss": 1.5467,
920
+ "step": 12200
921
+ },
922
+ {
923
+ "epoch": 7.625251742835012,
924
+ "grad_norm": 4.738313674926758,
925
+ "learning_rate": 6.190328580285183e-06,
926
+ "loss": 1.5648,
927
+ "step": 12300
928
+ },
929
+ {
930
+ "epoch": 7.687219209914795,
931
+ "grad_norm": 3.4459781646728516,
932
+ "learning_rate": 6.15933044017359e-06,
933
+ "loss": 1.57,
934
+ "step": 12400
935
+ },
936
+ {
937
+ "epoch": 7.749186676994578,
938
+ "grad_norm": 2.6644325256347656,
939
+ "learning_rate": 6.128332300061996e-06,
940
+ "loss": 1.5757,
941
+ "step": 12500
942
+ },
943
+ {
944
+ "epoch": 7.811154144074361,
945
+ "grad_norm": 6.151153564453125,
946
+ "learning_rate": 6.097334159950403e-06,
947
+ "loss": 1.5653,
948
+ "step": 12600
949
+ },
950
+ {
951
+ "epoch": 7.873121611154144,
952
+ "grad_norm": 4.14183235168457,
953
+ "learning_rate": 6.06633601983881e-06,
954
+ "loss": 1.5708,
955
+ "step": 12700
956
+ },
957
+ {
958
+ "epoch": 7.935089078233927,
959
+ "grad_norm": 11.848958015441895,
960
+ "learning_rate": 6.035337879727216e-06,
961
+ "loss": 1.5633,
962
+ "step": 12800
963
+ },
964
+ {
965
+ "epoch": 7.99705654531371,
966
+ "grad_norm": 3.1628572940826416,
967
+ "learning_rate": 6.004339739615623e-06,
968
+ "loss": 1.548,
969
+ "step": 12900
970
+ },
971
+ {
972
+ "epoch": 7.999535243996902,
973
+ "eval_loss": 1.3799071311950684,
974
+ "eval_runtime": 71.5231,
975
+ "eval_samples_per_second": 45.118,
976
+ "eval_steps_per_second": 22.566,
977
+ "step": 12904
978
+ },
979
+ {
980
+ "epoch": 8.059488768396593,
981
+ "grad_norm": 2.565005302429199,
982
+ "learning_rate": 5.97334159950403e-06,
983
+ "loss": 1.5559,
984
+ "step": 13000
985
+ },
986
+ {
987
+ "epoch": 8.121456235476375,
988
+ "grad_norm": 3.772648334503174,
989
+ "learning_rate": 5.942343459392436e-06,
990
+ "loss": 1.5516,
991
+ "step": 13100
992
+ },
993
+ {
994
+ "epoch": 8.183423702556158,
995
+ "grad_norm": 3.1010353565216064,
996
+ "learning_rate": 5.911345319280843e-06,
997
+ "loss": 1.5494,
998
+ "step": 13200
999
+ },
1000
+ {
1001
+ "epoch": 8.24539116963594,
1002
+ "grad_norm": 4.525482177734375,
1003
+ "learning_rate": 5.88034717916925e-06,
1004
+ "loss": 1.5391,
1005
+ "step": 13300
1006
+ },
1007
+ {
1008
+ "epoch": 8.307358636715724,
1009
+ "grad_norm": 4.108111381530762,
1010
+ "learning_rate": 5.8493490390576564e-06,
1011
+ "loss": 1.5798,
1012
+ "step": 13400
1013
+ },
1014
+ {
1015
+ "epoch": 8.369326103795508,
1016
+ "grad_norm": 3.5569262504577637,
1017
+ "learning_rate": 5.818350898946063e-06,
1018
+ "loss": 1.5332,
1019
+ "step": 13500
1020
+ },
1021
+ {
1022
+ "epoch": 8.43129357087529,
1023
+ "grad_norm": 2.829259157180786,
1024
+ "learning_rate": 5.78735275883447e-06,
1025
+ "loss": 1.5564,
1026
+ "step": 13600
1027
+ },
1028
+ {
1029
+ "epoch": 8.493261037955074,
1030
+ "grad_norm": 9.748035430908203,
1031
+ "learning_rate": 5.7563546187228765e-06,
1032
+ "loss": 1.5082,
1033
+ "step": 13700
1034
+ },
1035
+ {
1036
+ "epoch": 8.555228505034856,
1037
+ "grad_norm": 5.017675399780273,
1038
+ "learning_rate": 5.7253564786112835e-06,
1039
+ "loss": 1.5086,
1040
+ "step": 13800
1041
+ },
1042
+ {
1043
+ "epoch": 8.61719597211464,
1044
+ "grad_norm": 3.3384227752685547,
1045
+ "learning_rate": 5.6943583384996905e-06,
1046
+ "loss": 1.5682,
1047
+ "step": 13900
1048
+ },
1049
+ {
1050
+ "epoch": 8.679163439194422,
1051
+ "grad_norm": 3.0591983795166016,
1052
+ "learning_rate": 5.663670179789214e-06,
1053
+ "loss": 1.5359,
1054
+ "step": 14000
1055
+ },
1056
+ {
1057
+ "epoch": 8.741130906274206,
1058
+ "grad_norm": 3.4246435165405273,
1059
+ "learning_rate": 5.632672039677621e-06,
1060
+ "loss": 1.5611,
1061
+ "step": 14100
1062
+ },
1063
+ {
1064
+ "epoch": 8.80309837335399,
1065
+ "grad_norm": 3.443187713623047,
1066
+ "learning_rate": 5.601673899566027e-06,
1067
+ "loss": 1.5592,
1068
+ "step": 14200
1069
+ },
1070
+ {
1071
+ "epoch": 8.865065840433772,
1072
+ "grad_norm": 2.6613450050354004,
1073
+ "learning_rate": 5.570675759454434e-06,
1074
+ "loss": 1.5484,
1075
+ "step": 14300
1076
+ },
1077
+ {
1078
+ "epoch": 8.927033307513556,
1079
+ "grad_norm": 3.1063573360443115,
1080
+ "learning_rate": 5.539677619342841e-06,
1081
+ "loss": 1.5146,
1082
+ "step": 14400
1083
+ },
1084
+ {
1085
+ "epoch": 8.989000774593338,
1086
+ "grad_norm": 3.6589224338531494,
1087
+ "learning_rate": 5.508679479231247e-06,
1088
+ "loss": 1.5641,
1089
+ "step": 14500
1090
+ },
1091
+ {
1092
+ "epoch": 8.999535243996903,
1093
+ "eval_loss": 1.3773841857910156,
1094
+ "eval_runtime": 71.387,
1095
+ "eval_samples_per_second": 45.204,
1096
+ "eval_steps_per_second": 22.609,
1097
+ "step": 14517
1098
+ },
1099
+ {
1100
+ "epoch": 9.05143299767622,
1101
+ "grad_norm": 3.909100294113159,
1102
+ "learning_rate": 5.477681339119654e-06,
1103
+ "loss": 1.5137,
1104
+ "step": 14600
1105
+ },
1106
+ {
1107
+ "epoch": 9.113400464756003,
1108
+ "grad_norm": 2.70833683013916,
1109
+ "learning_rate": 5.446683199008061e-06,
1110
+ "loss": 1.519,
1111
+ "step": 14700
1112
+ },
1113
+ {
1114
+ "epoch": 9.175367931835787,
1115
+ "grad_norm": 2.6168839931488037,
1116
+ "learning_rate": 5.415685058896467e-06,
1117
+ "loss": 1.5082,
1118
+ "step": 14800
1119
+ },
1120
+ {
1121
+ "epoch": 9.237335398915569,
1122
+ "grad_norm": 2.4784512519836426,
1123
+ "learning_rate": 5.384686918784874e-06,
1124
+ "loss": 1.5098,
1125
+ "step": 14900
1126
+ },
1127
+ {
1128
+ "epoch": 9.299302865995353,
1129
+ "grad_norm": 2.7963085174560547,
1130
+ "learning_rate": 5.353688778673281e-06,
1131
+ "loss": 1.5257,
1132
+ "step": 15000
1133
+ },
1134
+ {
1135
+ "epoch": 9.361270333075135,
1136
+ "grad_norm": 2.6264543533325195,
1137
+ "learning_rate": 5.322690638561687e-06,
1138
+ "loss": 1.5122,
1139
+ "step": 15100
1140
+ },
1141
+ {
1142
+ "epoch": 9.423237800154919,
1143
+ "grad_norm": 2.796173095703125,
1144
+ "learning_rate": 5.291692498450094e-06,
1145
+ "loss": 1.5629,
1146
+ "step": 15200
1147
+ },
1148
+ {
1149
+ "epoch": 9.485205267234702,
1150
+ "grad_norm": 2.661559820175171,
1151
+ "learning_rate": 5.260694358338501e-06,
1152
+ "loss": 1.5337,
1153
+ "step": 15300
1154
+ },
1155
+ {
1156
+ "epoch": 9.547172734314485,
1157
+ "grad_norm": 2.721785068511963,
1158
+ "learning_rate": 5.229696218226907e-06,
1159
+ "loss": 1.5443,
1160
+ "step": 15400
1161
+ },
1162
+ {
1163
+ "epoch": 9.609140201394268,
1164
+ "grad_norm": 3.5480453968048096,
1165
+ "learning_rate": 5.198698078115314e-06,
1166
+ "loss": 1.5475,
1167
+ "step": 15500
1168
+ },
1169
+ {
1170
+ "epoch": 9.67110766847405,
1171
+ "grad_norm": 4.556975841522217,
1172
+ "learning_rate": 5.167699938003721e-06,
1173
+ "loss": 1.5032,
1174
+ "step": 15600
1175
+ },
1176
+ {
1177
+ "epoch": 9.733075135553834,
1178
+ "grad_norm": 3.4796106815338135,
1179
+ "learning_rate": 5.136701797892127e-06,
1180
+ "loss": 1.5328,
1181
+ "step": 15700
1182
+ },
1183
+ {
1184
+ "epoch": 9.795042602633618,
1185
+ "grad_norm": 2.3589675426483154,
1186
+ "learning_rate": 5.105703657780534e-06,
1187
+ "loss": 1.5333,
1188
+ "step": 15800
1189
+ },
1190
+ {
1191
+ "epoch": 9.8570100697134,
1192
+ "grad_norm": 8.50188159942627,
1193
+ "learning_rate": 5.074705517668941e-06,
1194
+ "loss": 1.5401,
1195
+ "step": 15900
1196
+ },
1197
+ {
1198
+ "epoch": 9.918977536793184,
1199
+ "grad_norm": 2.818558931350708,
1200
+ "learning_rate": 5.043707377557347e-06,
1201
+ "loss": 1.5305,
1202
+ "step": 16000
1203
+ },
1204
+ {
1205
+ "epoch": 9.980945003872966,
1206
+ "grad_norm": 7.929882526397705,
1207
+ "learning_rate": 5.01301921884687e-06,
1208
+ "loss": 1.5037,
1209
+ "step": 16100
1210
+ },
1211
+ {
1212
+ "epoch": 9.999535243996903,
1213
+ "eval_loss": 1.3749291896820068,
1214
+ "eval_runtime": 71.4165,
1215
+ "eval_samples_per_second": 45.186,
1216
+ "eval_steps_per_second": 22.6,
1217
+ "step": 16130
1218
+ },
1219
+ {
1220
+ "epoch": 10.043377226955847,
1221
+ "grad_norm": 2.9387943744659424,
1222
+ "learning_rate": 4.982021078735277e-06,
1223
+ "loss": 1.5233,
1224
+ "step": 16200
1225
+ },
1226
+ {
1227
+ "epoch": 10.105344694035631,
1228
+ "grad_norm": 3.994661331176758,
1229
+ "learning_rate": 4.951022938623683e-06,
1230
+ "loss": 1.4957,
1231
+ "step": 16300
1232
+ },
1233
+ {
1234
+ "epoch": 10.167312161115415,
1235
+ "grad_norm": 5.1318840980529785,
1236
+ "learning_rate": 4.92002479851209e-06,
1237
+ "loss": 1.5198,
1238
+ "step": 16400
1239
+ },
1240
+ {
1241
+ "epoch": 10.229279628195197,
1242
+ "grad_norm": 5.07450008392334,
1243
+ "learning_rate": 4.889026658400497e-06,
1244
+ "loss": 1.5168,
1245
+ "step": 16500
1246
+ },
1247
+ {
1248
+ "epoch": 10.291247095274981,
1249
+ "grad_norm": 3.5865488052368164,
1250
+ "learning_rate": 4.858028518288903e-06,
1251
+ "loss": 1.5136,
1252
+ "step": 16600
1253
+ },
1254
+ {
1255
+ "epoch": 10.353214562354763,
1256
+ "grad_norm": 3.8123815059661865,
1257
+ "learning_rate": 4.82703037817731e-06,
1258
+ "loss": 1.5095,
1259
+ "step": 16700
1260
+ },
1261
+ {
1262
+ "epoch": 10.415182029434547,
1263
+ "grad_norm": 3.0475172996520996,
1264
+ "learning_rate": 4.796032238065717e-06,
1265
+ "loss": 1.5005,
1266
+ "step": 16800
1267
+ },
1268
+ {
1269
+ "epoch": 10.47714949651433,
1270
+ "grad_norm": 3.0361108779907227,
1271
+ "learning_rate": 4.765034097954123e-06,
1272
+ "loss": 1.5169,
1273
+ "step": 16900
1274
+ },
1275
+ {
1276
+ "epoch": 10.539116963594113,
1277
+ "grad_norm": 2.7775065898895264,
1278
+ "learning_rate": 4.73403595784253e-06,
1279
+ "loss": 1.5025,
1280
+ "step": 17000
1281
+ },
1282
+ {
1283
+ "epoch": 10.601084430673897,
1284
+ "grad_norm": 4.8608598709106445,
1285
+ "learning_rate": 4.703037817730937e-06,
1286
+ "loss": 1.5216,
1287
+ "step": 17100
1288
+ },
1289
+ {
1290
+ "epoch": 10.663051897753679,
1291
+ "grad_norm": 3.024165630340576,
1292
+ "learning_rate": 4.672039677619343e-06,
1293
+ "loss": 1.5107,
1294
+ "step": 17200
1295
+ },
1296
+ {
1297
+ "epoch": 10.725019364833463,
1298
+ "grad_norm": 3.055216073989868,
1299
+ "learning_rate": 4.64104153750775e-06,
1300
+ "loss": 1.5083,
1301
+ "step": 17300
1302
+ },
1303
+ {
1304
+ "epoch": 10.786986831913246,
1305
+ "grad_norm": 2.7835144996643066,
1306
+ "learning_rate": 4.610043397396157e-06,
1307
+ "loss": 1.501,
1308
+ "step": 17400
1309
+ },
1310
+ {
1311
+ "epoch": 10.848954298993029,
1312
+ "grad_norm": 3.1248793601989746,
1313
+ "learning_rate": 4.579045257284563e-06,
1314
+ "loss": 1.519,
1315
+ "step": 17500
1316
+ },
1317
+ {
1318
+ "epoch": 10.910921766072812,
1319
+ "grad_norm": 3.8737070560455322,
1320
+ "learning_rate": 4.54804711717297e-06,
1321
+ "loss": 1.5087,
1322
+ "step": 17600
1323
+ },
1324
+ {
1325
+ "epoch": 10.972889233152594,
1326
+ "grad_norm": 2.474802017211914,
1327
+ "learning_rate": 4.517048977061377e-06,
1328
+ "loss": 1.4803,
1329
+ "step": 17700
1330
+ },
1331
+ {
1332
+ "epoch": 10.999535243996903,
1333
+ "eval_loss": 1.3729863166809082,
1334
+ "eval_runtime": 71.8366,
1335
+ "eval_samples_per_second": 44.921,
1336
+ "eval_steps_per_second": 22.468,
1337
+ "step": 17743
1338
+ },
1339
+ {
1340
+ "epoch": 11.035321456235476,
1341
+ "grad_norm": 2.705595016479492,
1342
+ "learning_rate": 4.486050836949783e-06,
1343
+ "loss": 1.5091,
1344
+ "step": 17800
1345
+ },
1346
+ {
1347
+ "epoch": 11.09728892331526,
1348
+ "grad_norm": 2.9070913791656494,
1349
+ "learning_rate": 4.45505269683819e-06,
1350
+ "loss": 1.5065,
1351
+ "step": 17900
1352
+ },
1353
+ {
1354
+ "epoch": 11.159256390395043,
1355
+ "grad_norm": 2.360722780227661,
1356
+ "learning_rate": 4.424054556726597e-06,
1357
+ "loss": 1.4999,
1358
+ "step": 18000
1359
+ },
1360
+ {
1361
+ "epoch": 11.221223857474826,
1362
+ "grad_norm": 2.7821319103240967,
1363
+ "learning_rate": 4.393056416615003e-06,
1364
+ "loss": 1.4863,
1365
+ "step": 18100
1366
+ },
1367
+ {
1368
+ "epoch": 11.28319132455461,
1369
+ "grad_norm": 3.3408045768737793,
1370
+ "learning_rate": 4.36205827650341e-06,
1371
+ "loss": 1.4846,
1372
+ "step": 18200
1373
+ },
1374
+ {
1375
+ "epoch": 11.345158791634391,
1376
+ "grad_norm": 2.658116579055786,
1377
+ "learning_rate": 4.331060136391817e-06,
1378
+ "loss": 1.4911,
1379
+ "step": 18300
1380
+ },
1381
+ {
1382
+ "epoch": 11.407126258714175,
1383
+ "grad_norm": 7.110032081604004,
1384
+ "learning_rate": 4.300061996280223e-06,
1385
+ "loss": 1.5037,
1386
+ "step": 18400
1387
+ },
1388
+ {
1389
+ "epoch": 11.469093725793957,
1390
+ "grad_norm": 3.009939193725586,
1391
+ "learning_rate": 4.26906385616863e-06,
1392
+ "loss": 1.481,
1393
+ "step": 18500
1394
+ },
1395
+ {
1396
+ "epoch": 11.531061192873741,
1397
+ "grad_norm": 3.2190310955047607,
1398
+ "learning_rate": 4.238065716057037e-06,
1399
+ "loss": 1.5163,
1400
+ "step": 18600
1401
+ },
1402
+ {
1403
+ "epoch": 11.593028659953525,
1404
+ "grad_norm": 3.240391969680786,
1405
+ "learning_rate": 4.2070675759454435e-06,
1406
+ "loss": 1.4934,
1407
+ "step": 18700
1408
+ },
1409
+ {
1410
+ "epoch": 11.654996127033307,
1411
+ "grad_norm": 2.4078099727630615,
1412
+ "learning_rate": 4.1760694358338505e-06,
1413
+ "loss": 1.5076,
1414
+ "step": 18800
1415
+ },
1416
+ {
1417
+ "epoch": 11.716963594113091,
1418
+ "grad_norm": 2.757641077041626,
1419
+ "learning_rate": 4.1450712957222575e-06,
1420
+ "loss": 1.5011,
1421
+ "step": 18900
1422
+ },
1423
+ {
1424
+ "epoch": 11.778931061192873,
1425
+ "grad_norm": 3.859056234359741,
1426
+ "learning_rate": 4.11438313701178e-06,
1427
+ "loss": 1.5009,
1428
+ "step": 19000
1429
+ },
1430
+ {
1431
+ "epoch": 11.840898528272657,
1432
+ "grad_norm": 2.826043128967285,
1433
+ "learning_rate": 4.083384996900187e-06,
1434
+ "loss": 1.5103,
1435
+ "step": 19100
1436
+ },
1437
+ {
1438
+ "epoch": 11.90286599535244,
1439
+ "grad_norm": 3.0263473987579346,
1440
+ "learning_rate": 4.052386856788593e-06,
1441
+ "loss": 1.4945,
1442
+ "step": 19200
1443
+ },
1444
+ {
1445
+ "epoch": 11.964833462432223,
1446
+ "grad_norm": 3.322974920272827,
1447
+ "learning_rate": 4.021388716677e-06,
1448
+ "loss": 1.4653,
1449
+ "step": 19300
1450
+ },
1451
+ {
1452
+ "epoch": 11.999535243996903,
1453
+ "eval_loss": 1.3713266849517822,
1454
+ "eval_runtime": 71.4255,
1455
+ "eval_samples_per_second": 45.18,
1456
+ "eval_steps_per_second": 22.597,
1457
+ "step": 19356
1458
+ },
1459
+ {
1460
+ "epoch": 12.027265685515104,
1461
+ "grad_norm": 3.6630640029907227,
1462
+ "learning_rate": 3.990390576565407e-06,
1463
+ "loss": 1.5061,
1464
+ "step": 19400
1465
+ },
1466
+ {
1467
+ "epoch": 12.089233152594888,
1468
+ "grad_norm": 2.810619831085205,
1469
+ "learning_rate": 3.959392436453813e-06,
1470
+ "loss": 1.5,
1471
+ "step": 19500
1472
+ },
1473
+ {
1474
+ "epoch": 12.15120061967467,
1475
+ "grad_norm": 2.4999682903289795,
1476
+ "learning_rate": 3.92839429634222e-06,
1477
+ "loss": 1.4863,
1478
+ "step": 19600
1479
+ },
1480
+ {
1481
+ "epoch": 12.213168086754454,
1482
+ "grad_norm": 3.3250415325164795,
1483
+ "learning_rate": 3.897396156230627e-06,
1484
+ "loss": 1.4719,
1485
+ "step": 19700
1486
+ },
1487
+ {
1488
+ "epoch": 12.275135553834238,
1489
+ "grad_norm": 3.2047810554504395,
1490
+ "learning_rate": 3.866398016119033e-06,
1491
+ "loss": 1.492,
1492
+ "step": 19800
1493
+ },
1494
+ {
1495
+ "epoch": 12.33710302091402,
1496
+ "grad_norm": 2.8707590103149414,
1497
+ "learning_rate": 3.83539987600744e-06,
1498
+ "loss": 1.4772,
1499
+ "step": 19900
1500
+ },
1501
+ {
1502
+ "epoch": 12.399070487993804,
1503
+ "grad_norm": 5.695312023162842,
1504
+ "learning_rate": 3.804401735895847e-06,
1505
+ "loss": 1.4785,
1506
+ "step": 20000
1507
+ },
1508
+ {
1509
+ "epoch": 12.461037955073586,
1510
+ "grad_norm": 3.48818039894104,
1511
+ "learning_rate": 3.7734035957842536e-06,
1512
+ "loss": 1.4789,
1513
+ "step": 20100
1514
+ },
1515
+ {
1516
+ "epoch": 12.52300542215337,
1517
+ "grad_norm": 2.8195247650146484,
1518
+ "learning_rate": 3.7424054556726597e-06,
1519
+ "loss": 1.4859,
1520
+ "step": 20200
1521
+ },
1522
+ {
1523
+ "epoch": 12.584972889233153,
1524
+ "grad_norm": 3.1309738159179688,
1525
+ "learning_rate": 3.7114073155610663e-06,
1526
+ "loss": 1.4914,
1527
+ "step": 20300
1528
+ },
1529
+ {
1530
+ "epoch": 12.646940356312935,
1531
+ "grad_norm": 2.7474892139434814,
1532
+ "learning_rate": 3.6804091754494733e-06,
1533
+ "loss": 1.4776,
1534
+ "step": 20400
1535
+ },
1536
+ {
1537
+ "epoch": 12.70890782339272,
1538
+ "grad_norm": 3.022172451019287,
1539
+ "learning_rate": 3.64941103533788e-06,
1540
+ "loss": 1.498,
1541
+ "step": 20500
1542
+ },
1543
+ {
1544
+ "epoch": 12.770875290472501,
1545
+ "grad_norm": 2.747607946395874,
1546
+ "learning_rate": 3.6184128952262864e-06,
1547
+ "loss": 1.4901,
1548
+ "step": 20600
1549
+ },
1550
+ {
1551
+ "epoch": 12.832842757552285,
1552
+ "grad_norm": 3.077296733856201,
1553
+ "learning_rate": 3.5874147551146933e-06,
1554
+ "loss": 1.4773,
1555
+ "step": 20700
1556
+ },
1557
+ {
1558
+ "epoch": 12.894810224632067,
1559
+ "grad_norm": 4.3411078453063965,
1560
+ "learning_rate": 3.5564166150031e-06,
1561
+ "loss": 1.4909,
1562
+ "step": 20800
1563
+ },
1564
+ {
1565
+ "epoch": 12.956777691711851,
1566
+ "grad_norm": 3.112104654312134,
1567
+ "learning_rate": 3.5254184748915065e-06,
1568
+ "loss": 1.5027,
1569
+ "step": 20900
1570
+ },
1571
+ {
1572
+ "epoch": 12.999535243996903,
1573
+ "eval_loss": 1.3685630559921265,
1574
+ "eval_runtime": 70.8923,
1575
+ "eval_samples_per_second": 45.52,
1576
+ "eval_steps_per_second": 22.767,
1577
+ "step": 20969
1578
+ },
1579
+ {
1580
+ "epoch": 13.019209914794732,
1581
+ "grad_norm": 2.8162424564361572,
1582
+ "learning_rate": 3.4944203347799134e-06,
1583
+ "loss": 1.4868,
1584
+ "step": 21000
1585
+ },
1586
+ {
1587
+ "epoch": 13.081177381874516,
1588
+ "grad_norm": 2.862400770187378,
1589
+ "learning_rate": 3.4637321760694358e-06,
1590
+ "loss": 1.4565,
1591
+ "step": 21100
1592
+ },
1593
+ {
1594
+ "epoch": 13.143144848954298,
1595
+ "grad_norm": 9.166791915893555,
1596
+ "learning_rate": 3.4327340359578427e-06,
1597
+ "loss": 1.4968,
1598
+ "step": 21200
1599
+ },
1600
+ {
1601
+ "epoch": 13.205112316034082,
1602
+ "grad_norm": 2.9333505630493164,
1603
+ "learning_rate": 3.4017358958462493e-06,
1604
+ "loss": 1.4707,
1605
+ "step": 21300
1606
+ },
1607
+ {
1608
+ "epoch": 13.267079783113866,
1609
+ "grad_norm": 3.7271652221679688,
1610
+ "learning_rate": 3.370737755734656e-06,
1611
+ "loss": 1.4769,
1612
+ "step": 21400
1613
+ },
1614
+ {
1615
+ "epoch": 13.329047250193648,
1616
+ "grad_norm": 4.895938396453857,
1617
+ "learning_rate": 3.339739615623063e-06,
1618
+ "loss": 1.4825,
1619
+ "step": 21500
1620
+ },
1621
+ {
1622
+ "epoch": 13.391014717273432,
1623
+ "grad_norm": 4.098718643188477,
1624
+ "learning_rate": 3.3087414755114694e-06,
1625
+ "loss": 1.4473,
1626
+ "step": 21600
1627
+ },
1628
+ {
1629
+ "epoch": 13.452982184353214,
1630
+ "grad_norm": 2.393143892288208,
1631
+ "learning_rate": 3.277743335399876e-06,
1632
+ "loss": 1.4547,
1633
+ "step": 21700
1634
+ },
1635
+ {
1636
+ "epoch": 13.514949651432998,
1637
+ "grad_norm": 2.8273305892944336,
1638
+ "learning_rate": 3.246745195288283e-06,
1639
+ "loss": 1.4937,
1640
+ "step": 21800
1641
+ },
1642
+ {
1643
+ "epoch": 13.576917118512782,
1644
+ "grad_norm": 2.560944080352783,
1645
+ "learning_rate": 3.2157470551766895e-06,
1646
+ "loss": 1.4617,
1647
+ "step": 21900
1648
+ },
1649
+ {
1650
+ "epoch": 13.638884585592564,
1651
+ "grad_norm": 2.7508301734924316,
1652
+ "learning_rate": 3.184748915065096e-06,
1653
+ "loss": 1.4681,
1654
+ "step": 22000
1655
+ },
1656
+ {
1657
+ "epoch": 13.700852052672348,
1658
+ "grad_norm": 3.0896778106689453,
1659
+ "learning_rate": 3.153750774953503e-06,
1660
+ "loss": 1.4863,
1661
+ "step": 22100
1662
+ },
1663
+ {
1664
+ "epoch": 13.76281951975213,
1665
+ "grad_norm": 3.0587971210479736,
1666
+ "learning_rate": 3.1227526348419096e-06,
1667
+ "loss": 1.4872,
1668
+ "step": 22200
1669
+ },
1670
+ {
1671
+ "epoch": 13.824786986831914,
1672
+ "grad_norm": 5.594135284423828,
1673
+ "learning_rate": 3.091754494730316e-06,
1674
+ "loss": 1.4551,
1675
+ "step": 22300
1676
+ },
1677
+ {
1678
+ "epoch": 13.886754453911696,
1679
+ "grad_norm": 4.74953031539917,
1680
+ "learning_rate": 3.060756354618723e-06,
1681
+ "loss": 1.458,
1682
+ "step": 22400
1683
+ },
1684
+ {
1685
+ "epoch": 13.94872192099148,
1686
+ "grad_norm": 2.3097078800201416,
1687
+ "learning_rate": 3.0297582145071297e-06,
1688
+ "loss": 1.4707,
1689
+ "step": 22500
1690
+ },
1691
+ {
1692
+ "epoch": 13.999535243996903,
1693
+ "eval_loss": 1.367042899131775,
1694
+ "eval_runtime": 71.4468,
1695
+ "eval_samples_per_second": 45.167,
1696
+ "eval_steps_per_second": 22.59,
1697
+ "step": 22582
1698
+ },
1699
+ {
1700
+ "epoch": 14.01115414407436,
1701
+ "grad_norm": 2.73579478263855,
1702
+ "learning_rate": 2.9987600743955362e-06,
1703
+ "loss": 1.4815,
1704
+ "step": 22600
1705
+ },
1706
+ {
1707
+ "epoch": 14.073121611154145,
1708
+ "grad_norm": 2.643583297729492,
1709
+ "learning_rate": 2.967761934283943e-06,
1710
+ "loss": 1.4623,
1711
+ "step": 22700
1712
+ },
1713
+ {
1714
+ "epoch": 14.135089078233927,
1715
+ "grad_norm": 4.030912399291992,
1716
+ "learning_rate": 2.9367637941723498e-06,
1717
+ "loss": 1.4406,
1718
+ "step": 22800
1719
+ },
1720
+ {
1721
+ "epoch": 14.19705654531371,
1722
+ "grad_norm": 3.95310378074646,
1723
+ "learning_rate": 2.9057656540607563e-06,
1724
+ "loss": 1.4533,
1725
+ "step": 22900
1726
+ },
1727
+ {
1728
+ "epoch": 14.259024012393493,
1729
+ "grad_norm": 2.382369041442871,
1730
+ "learning_rate": 2.8747675139491633e-06,
1731
+ "loss": 1.4729,
1732
+ "step": 23000
1733
+ },
1734
+ {
1735
+ "epoch": 14.320991479473276,
1736
+ "grad_norm": 3.3369767665863037,
1737
+ "learning_rate": 2.84376937383757e-06,
1738
+ "loss": 1.447,
1739
+ "step": 23100
1740
+ },
1741
+ {
1742
+ "epoch": 14.38295894655306,
1743
+ "grad_norm": 3.532909393310547,
1744
+ "learning_rate": 2.8127712337259764e-06,
1745
+ "loss": 1.4646,
1746
+ "step": 23200
1747
+ },
1748
+ {
1749
+ "epoch": 14.444926413632842,
1750
+ "grad_norm": 2.707247257232666,
1751
+ "learning_rate": 2.7817730936143834e-06,
1752
+ "loss": 1.4782,
1753
+ "step": 23300
1754
+ },
1755
+ {
1756
+ "epoch": 14.506893880712626,
1757
+ "grad_norm": 2.511441707611084,
1758
+ "learning_rate": 2.75077495350279e-06,
1759
+ "loss": 1.4732,
1760
+ "step": 23400
1761
+ },
1762
+ {
1763
+ "epoch": 14.568861347792408,
1764
+ "grad_norm": 6.0035576820373535,
1765
+ "learning_rate": 2.7197768133911965e-06,
1766
+ "loss": 1.4625,
1767
+ "step": 23500
1768
+ },
1769
+ {
1770
+ "epoch": 14.630828814872192,
1771
+ "grad_norm": 3.396369695663452,
1772
+ "learning_rate": 2.6887786732796035e-06,
1773
+ "loss": 1.4794,
1774
+ "step": 23600
1775
+ },
1776
+ {
1777
+ "epoch": 14.692796281951976,
1778
+ "grad_norm": 2.5729544162750244,
1779
+ "learning_rate": 2.65778053316801e-06,
1780
+ "loss": 1.4666,
1781
+ "step": 23700
1782
+ },
1783
+ {
1784
+ "epoch": 14.754763749031758,
1785
+ "grad_norm": 2.8480477333068848,
1786
+ "learning_rate": 2.6270923744575328e-06,
1787
+ "loss": 1.4644,
1788
+ "step": 23800
1789
+ },
1790
+ {
1791
+ "epoch": 14.816731216111542,
1792
+ "grad_norm": 2.4807331562042236,
1793
+ "learning_rate": 2.5960942343459393e-06,
1794
+ "loss": 1.4752,
1795
+ "step": 23900
1796
+ },
1797
+ {
1798
+ "epoch": 14.878698683191324,
1799
+ "grad_norm": 2.850637435913086,
1800
+ "learning_rate": 2.5650960942343463e-06,
1801
+ "loss": 1.4769,
1802
+ "step": 24000
1803
+ },
1804
+ {
1805
+ "epoch": 14.940666150271108,
1806
+ "grad_norm": 2.480095148086548,
1807
+ "learning_rate": 2.534097954122753e-06,
1808
+ "loss": 1.4791,
1809
+ "step": 24100
1810
+ },
1811
+ {
1812
+ "epoch": 14.999535243996903,
1813
+ "eval_loss": 1.3670425415039062,
1814
+ "eval_runtime": 71.2802,
1815
+ "eval_samples_per_second": 45.272,
1816
+ "eval_steps_per_second": 22.643,
1817
+ "step": 24195
1818
+ },
1819
+ {
1820
+ "epoch": 15.003098373353989,
1821
+ "grad_norm": 2.5988998413085938,
1822
+ "learning_rate": 2.5030998140111594e-06,
1823
+ "loss": 1.4693,
1824
+ "step": 24200
1825
+ },
1826
+ {
1827
+ "epoch": 15.065065840433773,
1828
+ "grad_norm": 2.674956798553467,
1829
+ "learning_rate": 2.4721016738995664e-06,
1830
+ "loss": 1.4584,
1831
+ "step": 24300
1832
+ },
1833
+ {
1834
+ "epoch": 15.127033307513555,
1835
+ "grad_norm": 20.858081817626953,
1836
+ "learning_rate": 2.441103533787973e-06,
1837
+ "loss": 1.4593,
1838
+ "step": 24400
1839
+ },
1840
+ {
1841
+ "epoch": 15.189000774593339,
1842
+ "grad_norm": 2.9068284034729004,
1843
+ "learning_rate": 2.4101053936763795e-06,
1844
+ "loss": 1.437,
1845
+ "step": 24500
1846
+ },
1847
+ {
1848
+ "epoch": 15.250968241673121,
1849
+ "grad_norm": 2.5827035903930664,
1850
+ "learning_rate": 2.3791072535647865e-06,
1851
+ "loss": 1.4371,
1852
+ "step": 24600
1853
+ },
1854
+ {
1855
+ "epoch": 15.312935708752905,
1856
+ "grad_norm": 2.852088451385498,
1857
+ "learning_rate": 2.348109113453193e-06,
1858
+ "loss": 1.4434,
1859
+ "step": 24700
1860
+ },
1861
+ {
1862
+ "epoch": 15.374903175832689,
1863
+ "grad_norm": 3.28821063041687,
1864
+ "learning_rate": 2.3171109733415996e-06,
1865
+ "loss": 1.4606,
1866
+ "step": 24800
1867
+ },
1868
+ {
1869
+ "epoch": 15.43687064291247,
1870
+ "grad_norm": 3.762103319168091,
1871
+ "learning_rate": 2.2861128332300066e-06,
1872
+ "loss": 1.4584,
1873
+ "step": 24900
1874
+ },
1875
+ {
1876
+ "epoch": 15.498838109992255,
1877
+ "grad_norm": 2.845301866531372,
1878
+ "learning_rate": 2.255114693118413e-06,
1879
+ "loss": 1.4781,
1880
+ "step": 25000
1881
+ },
1882
+ {
1883
+ "epoch": 15.560805577072037,
1884
+ "grad_norm": 3.243618965148926,
1885
+ "learning_rate": 2.2241165530068197e-06,
1886
+ "loss": 1.4489,
1887
+ "step": 25100
1888
+ },
1889
+ {
1890
+ "epoch": 15.62277304415182,
1891
+ "grad_norm": 2.8567991256713867,
1892
+ "learning_rate": 2.1931184128952267e-06,
1893
+ "loss": 1.4768,
1894
+ "step": 25200
1895
+ },
1896
+ {
1897
+ "epoch": 15.684740511231603,
1898
+ "grad_norm": 2.683873414993286,
1899
+ "learning_rate": 2.162430254184749e-06,
1900
+ "loss": 1.4578,
1901
+ "step": 25300
1902
+ },
1903
+ {
1904
+ "epoch": 15.746707978311386,
1905
+ "grad_norm": 3.3914294242858887,
1906
+ "learning_rate": 2.131432114073156e-06,
1907
+ "loss": 1.4564,
1908
+ "step": 25400
1909
+ },
1910
+ {
1911
+ "epoch": 15.80867544539117,
1912
+ "grad_norm": 2.8761868476867676,
1913
+ "learning_rate": 2.1004339739615625e-06,
1914
+ "loss": 1.4648,
1915
+ "step": 25500
1916
+ },
1917
+ {
1918
+ "epoch": 15.870642912470952,
1919
+ "grad_norm": 3.788562059402466,
1920
+ "learning_rate": 2.069435833849969e-06,
1921
+ "loss": 1.4568,
1922
+ "step": 25600
1923
+ },
1924
+ {
1925
+ "epoch": 15.932610379550736,
1926
+ "grad_norm": 3.973054885864258,
1927
+ "learning_rate": 2.038437693738376e-06,
1928
+ "loss": 1.4785,
1929
+ "step": 25700
1930
+ },
1931
+ {
1932
+ "epoch": 15.994577846630518,
1933
+ "grad_norm": 4.635127544403076,
1934
+ "learning_rate": 2.0074395536267826e-06,
1935
+ "loss": 1.4741,
1936
+ "step": 25800
1937
+ },
1938
+ {
1939
+ "epoch": 15.999535243996903,
1940
+ "eval_loss": 1.3650240898132324,
1941
+ "eval_runtime": 71.1811,
1942
+ "eval_samples_per_second": 45.335,
1943
+ "eval_steps_per_second": 22.675,
1944
+ "step": 25808
1945
+ },
1946
+ {
1947
+ "epoch": 16.0570100697134,
1948
+ "grad_norm": 3.8094494342803955,
1949
+ "learning_rate": 1.976441413515189e-06,
1950
+ "loss": 1.4585,
1951
+ "step": 25900
1952
+ },
1953
+ {
1954
+ "epoch": 16.118977536793185,
1955
+ "grad_norm": 2.649723529815674,
1956
+ "learning_rate": 1.945443273403596e-06,
1957
+ "loss": 1.4624,
1958
+ "step": 26000
1959
+ },
1960
+ {
1961
+ "epoch": 16.180945003872967,
1962
+ "grad_norm": 2.537461757659912,
1963
+ "learning_rate": 1.9144451332920027e-06,
1964
+ "loss": 1.4576,
1965
+ "step": 26100
1966
+ },
1967
+ {
1968
+ "epoch": 16.24291247095275,
1969
+ "grad_norm": 3.1259541511535645,
1970
+ "learning_rate": 1.8834469931804095e-06,
1971
+ "loss": 1.4657,
1972
+ "step": 26200
1973
+ },
1974
+ {
1975
+ "epoch": 16.30487993803253,
1976
+ "grad_norm": 3.396057367324829,
1977
+ "learning_rate": 1.8524488530688158e-06,
1978
+ "loss": 1.4532,
1979
+ "step": 26300
1980
+ },
1981
+ {
1982
+ "epoch": 16.366847405112317,
1983
+ "grad_norm": 2.7809932231903076,
1984
+ "learning_rate": 1.8214507129572226e-06,
1985
+ "loss": 1.4483,
1986
+ "step": 26400
1987
+ },
1988
+ {
1989
+ "epoch": 16.4288148721921,
1990
+ "grad_norm": 4.369870662689209,
1991
+ "learning_rate": 1.7904525728456294e-06,
1992
+ "loss": 1.4457,
1993
+ "step": 26500
1994
+ },
1995
+ {
1996
+ "epoch": 16.49078233927188,
1997
+ "grad_norm": 2.7700088024139404,
1998
+ "learning_rate": 1.759454432734036e-06,
1999
+ "loss": 1.4722,
2000
+ "step": 26600
2001
+ },
2002
+ {
2003
+ "epoch": 16.552749806351667,
2004
+ "grad_norm": 2.55792236328125,
2005
+ "learning_rate": 1.7284562926224427e-06,
2006
+ "loss": 1.4242,
2007
+ "step": 26700
2008
+ },
2009
+ {
2010
+ "epoch": 16.61471727343145,
2011
+ "grad_norm": 3.0416712760925293,
2012
+ "learning_rate": 1.6974581525108495e-06,
2013
+ "loss": 1.4516,
2014
+ "step": 26800
2015
+ },
2016
+ {
2017
+ "epoch": 16.67668474051123,
2018
+ "grad_norm": 2.6362993717193604,
2019
+ "learning_rate": 1.666460012399256e-06,
2020
+ "loss": 1.4263,
2021
+ "step": 26900
2022
+ },
2023
+ {
2024
+ "epoch": 16.738652207591016,
2025
+ "grad_norm": 2.5275797843933105,
2026
+ "learning_rate": 1.6354618722876628e-06,
2027
+ "loss": 1.4513,
2028
+ "step": 27000
2029
+ },
2030
+ {
2031
+ "epoch": 16.8006196746708,
2032
+ "grad_norm": 3.758063793182373,
2033
+ "learning_rate": 1.6044637321760696e-06,
2034
+ "loss": 1.4629,
2035
+ "step": 27100
2036
+ },
2037
+ {
2038
+ "epoch": 16.86258714175058,
2039
+ "grad_norm": 2.653982162475586,
2040
+ "learning_rate": 1.5734655920644761e-06,
2041
+ "loss": 1.4549,
2042
+ "step": 27200
2043
+ },
2044
+ {
2045
+ "epoch": 16.924554608830363,
2046
+ "grad_norm": 3.276362180709839,
2047
+ "learning_rate": 1.5424674519528829e-06,
2048
+ "loss": 1.4645,
2049
+ "step": 27300
2050
+ },
2051
+ {
2052
+ "epoch": 16.98652207591015,
2053
+ "grad_norm": 4.8964056968688965,
2054
+ "learning_rate": 1.5114693118412897e-06,
2055
+ "loss": 1.4292,
2056
+ "step": 27400
2057
+ },
2058
+ {
2059
+ "epoch": 16.999535243996903,
2060
+ "eval_loss": 1.3658363819122314,
2061
+ "eval_runtime": 71.5249,
2062
+ "eval_samples_per_second": 45.117,
2063
+ "eval_steps_per_second": 22.566,
2064
+ "step": 27421
2065
+ },
2066
+ {
2067
+ "epoch": 17.048954298993028,
2068
+ "grad_norm": 2.632962703704834,
2069
+ "learning_rate": 1.4804711717296962e-06,
2070
+ "loss": 1.4485,
2071
+ "step": 27500
2072
+ },
2073
+ {
2074
+ "epoch": 17.110921766072813,
2075
+ "grad_norm": 2.65366792678833,
2076
+ "learning_rate": 1.449473031618103e-06,
2077
+ "loss": 1.4335,
2078
+ "step": 27600
2079
+ },
2080
+ {
2081
+ "epoch": 17.172889233152596,
2082
+ "grad_norm": 7.086144924163818,
2083
+ "learning_rate": 1.4184748915065097e-06,
2084
+ "loss": 1.4483,
2085
+ "step": 27700
2086
+ },
2087
+ {
2088
+ "epoch": 17.234856700232378,
2089
+ "grad_norm": 4.501598834991455,
2090
+ "learning_rate": 1.3874767513949163e-06,
2091
+ "loss": 1.4328,
2092
+ "step": 27800
2093
+ },
2094
+ {
2095
+ "epoch": 17.29682416731216,
2096
+ "grad_norm": 3.042269706726074,
2097
+ "learning_rate": 1.356478611283323e-06,
2098
+ "loss": 1.4476,
2099
+ "step": 27900
2100
+ },
2101
+ {
2102
+ "epoch": 17.358791634391945,
2103
+ "grad_norm": 2.902914047241211,
2104
+ "learning_rate": 1.3254804711717298e-06,
2105
+ "loss": 1.4551,
2106
+ "step": 28000
2107
+ },
2108
+ {
2109
+ "epoch": 17.420759101471727,
2110
+ "grad_norm": 4.015540599822998,
2111
+ "learning_rate": 1.2944823310601364e-06,
2112
+ "loss": 1.4408,
2113
+ "step": 28100
2114
+ },
2115
+ {
2116
+ "epoch": 17.48272656855151,
2117
+ "grad_norm": 2.1433181762695312,
2118
+ "learning_rate": 1.2634841909485432e-06,
2119
+ "loss": 1.4363,
2120
+ "step": 28200
2121
+ },
2122
+ {
2123
+ "epoch": 17.544694035631295,
2124
+ "grad_norm": 3.0657026767730713,
2125
+ "learning_rate": 1.23248605083695e-06,
2126
+ "loss": 1.4519,
2127
+ "step": 28300
2128
+ },
2129
+ {
2130
+ "epoch": 17.606661502711077,
2131
+ "grad_norm": 2.260270357131958,
2132
+ "learning_rate": 1.2014879107253565e-06,
2133
+ "loss": 1.4609,
2134
+ "step": 28400
2135
+ },
2136
+ {
2137
+ "epoch": 17.66862896979086,
2138
+ "grad_norm": 3.2962841987609863,
2139
+ "learning_rate": 1.1704897706137633e-06,
2140
+ "loss": 1.4267,
2141
+ "step": 28500
2142
+ },
2143
+ {
2144
+ "epoch": 17.73059643687064,
2145
+ "grad_norm": 2.8596572875976562,
2146
+ "learning_rate": 1.13949163050217e-06,
2147
+ "loss": 1.4485,
2148
+ "step": 28600
2149
+ },
2150
+ {
2151
+ "epoch": 17.792563903950427,
2152
+ "grad_norm": 2.8049371242523193,
2153
+ "learning_rate": 1.1084934903905766e-06,
2154
+ "loss": 1.4272,
2155
+ "step": 28700
2156
+ },
2157
+ {
2158
+ "epoch": 17.85453137103021,
2159
+ "grad_norm": 3.8641581535339355,
2160
+ "learning_rate": 1.0774953502789834e-06,
2161
+ "loss": 1.4426,
2162
+ "step": 28800
2163
+ },
2164
+ {
2165
+ "epoch": 17.91649883810999,
2166
+ "grad_norm": 2.718754529953003,
2167
+ "learning_rate": 1.0464972101673901e-06,
2168
+ "loss": 1.4696,
2169
+ "step": 28900
2170
+ },
2171
+ {
2172
+ "epoch": 17.978466305189777,
2173
+ "grad_norm": 2.8387906551361084,
2174
+ "learning_rate": 1.0154990700557969e-06,
2175
+ "loss": 1.437,
2176
+ "step": 29000
2177
+ },
2178
+ {
2179
+ "epoch": 17.999535243996903,
2180
+ "eval_loss": 1.3650579452514648,
2181
+ "eval_runtime": 71.088,
2182
+ "eval_samples_per_second": 45.394,
2183
+ "eval_steps_per_second": 22.704,
2184
+ "step": 29034
2185
+ },
2186
+ {
2187
+ "epoch": 18.040898528272656,
2188
+ "grad_norm": 7.15321159362793,
2189
+ "learning_rate": 9.845009299442034e-07,
2190
+ "loss": 1.4728,
2191
+ "step": 29100
2192
+ },
2193
+ {
2194
+ "epoch": 18.10286599535244,
2195
+ "grad_norm": 3.0295169353485107,
2196
+ "learning_rate": 9.535027898326102e-07,
2197
+ "loss": 1.444,
2198
+ "step": 29200
2199
+ },
2200
+ {
2201
+ "epoch": 18.164833462432224,
2202
+ "grad_norm": 4.30358362197876,
2203
+ "learning_rate": 9.225046497210168e-07,
2204
+ "loss": 1.4373,
2205
+ "step": 29300
2206
+ },
2207
+ {
2208
+ "epoch": 18.226800929512006,
2209
+ "grad_norm": 2.6451058387756348,
2210
+ "learning_rate": 8.915065096094234e-07,
2211
+ "loss": 1.4633,
2212
+ "step": 29400
2213
+ },
2214
+ {
2215
+ "epoch": 18.288768396591788,
2216
+ "grad_norm": 2.5222158432006836,
2217
+ "learning_rate": 8.605083694978301e-07,
2218
+ "loss": 1.4361,
2219
+ "step": 29500
2220
+ },
2221
+ {
2222
+ "epoch": 18.350735863671574,
2223
+ "grad_norm": 3.099586009979248,
2224
+ "learning_rate": 8.295102293862369e-07,
2225
+ "loss": 1.4451,
2226
+ "step": 29600
2227
+ },
2228
+ {
2229
+ "epoch": 18.412703330751356,
2230
+ "grad_norm": 2.569441795349121,
2231
+ "learning_rate": 7.985120892746435e-07,
2232
+ "loss": 1.4167,
2233
+ "step": 29700
2234
+ },
2235
+ {
2236
+ "epoch": 18.474670797831138,
2237
+ "grad_norm": 3.3484814167022705,
2238
+ "learning_rate": 7.678239305641662e-07,
2239
+ "loss": 1.4573,
2240
+ "step": 29800
2241
+ },
2242
+ {
2243
+ "epoch": 18.536638264910923,
2244
+ "grad_norm": 3.530923843383789,
2245
+ "learning_rate": 7.368257904525729e-07,
2246
+ "loss": 1.4653,
2247
+ "step": 29900
2248
+ },
2249
+ {
2250
+ "epoch": 18.598605731990705,
2251
+ "grad_norm": 2.9912829399108887,
2252
+ "learning_rate": 7.058276503409796e-07,
2253
+ "loss": 1.4376,
2254
+ "step": 30000
2255
+ },
2256
+ {
2257
+ "epoch": 18.660573199070488,
2258
+ "grad_norm": 4.254590034484863,
2259
+ "learning_rate": 6.748295102293862e-07,
2260
+ "loss": 1.4159,
2261
+ "step": 30100
2262
+ },
2263
+ {
2264
+ "epoch": 18.72254066615027,
2265
+ "grad_norm": 2.5701520442962646,
2266
+ "learning_rate": 6.43831370117793e-07,
2267
+ "loss": 1.4456,
2268
+ "step": 30200
2269
+ },
2270
+ {
2271
+ "epoch": 18.784508133230055,
2272
+ "grad_norm": 2.5587334632873535,
2273
+ "learning_rate": 6.131432114073156e-07,
2274
+ "loss": 1.4398,
2275
+ "step": 30300
2276
+ },
2277
+ {
2278
+ "epoch": 18.846475600309837,
2279
+ "grad_norm": 2.7531826496124268,
2280
+ "learning_rate": 5.821450712957223e-07,
2281
+ "loss": 1.4373,
2282
+ "step": 30400
2283
+ },
2284
+ {
2285
+ "epoch": 18.90844306738962,
2286
+ "grad_norm": 2.606565237045288,
2287
+ "learning_rate": 5.51146931184129e-07,
2288
+ "loss": 1.4322,
2289
+ "step": 30500
2290
+ },
2291
+ {
2292
+ "epoch": 18.970410534469405,
2293
+ "grad_norm": 2.6392128467559814,
2294
+ "learning_rate": 5.201487910725357e-07,
2295
+ "loss": 1.4442,
2296
+ "step": 30600
2297
+ },
2298
+ {
2299
+ "epoch": 18.999535243996903,
2300
+ "eval_loss": 1.3643269538879395,
2301
+ "eval_runtime": 71.2663,
2302
+ "eval_samples_per_second": 45.281,
2303
+ "eval_steps_per_second": 22.647,
2304
+ "step": 30647
2305
+ },
2306
+ {
2307
+ "epoch": 19.032842757552284,
2308
+ "grad_norm": 2.5503151416778564,
2309
+ "learning_rate": 4.891506509609424e-07,
2310
+ "loss": 1.4546,
2311
+ "step": 30700
2312
+ },
2313
+ {
2314
+ "epoch": 19.094810224632067,
2315
+ "grad_norm": 3.5481629371643066,
2316
+ "learning_rate": 4.5815251084934906e-07,
2317
+ "loss": 1.4389,
2318
+ "step": 30800
2319
+ },
2320
+ {
2321
+ "epoch": 19.156777691711852,
2322
+ "grad_norm": 2.849660634994507,
2323
+ "learning_rate": 4.271543707377557e-07,
2324
+ "loss": 1.4311,
2325
+ "step": 30900
2326
+ },
2327
+ {
2328
+ "epoch": 19.218745158791634,
2329
+ "grad_norm": 2.581972599029541,
2330
+ "learning_rate": 3.9615623062616244e-07,
2331
+ "loss": 1.4772,
2332
+ "step": 31000
2333
+ },
2334
+ {
2335
+ "epoch": 19.280712625871416,
2336
+ "grad_norm": 4.163836479187012,
2337
+ "learning_rate": 3.6515809051456916e-07,
2338
+ "loss": 1.4237,
2339
+ "step": 31100
2340
+ },
2341
+ {
2342
+ "epoch": 19.342680092951202,
2343
+ "grad_norm": 2.7274580001831055,
2344
+ "learning_rate": 3.341599504029758e-07,
2345
+ "loss": 1.4404,
2346
+ "step": 31200
2347
+ },
2348
+ {
2349
+ "epoch": 19.404647560030984,
2350
+ "grad_norm": 3.983724355697632,
2351
+ "learning_rate": 3.0316181029138253e-07,
2352
+ "loss": 1.4473,
2353
+ "step": 31300
2354
+ },
2355
+ {
2356
+ "epoch": 19.466615027110766,
2357
+ "grad_norm": 2.6508560180664062,
2358
+ "learning_rate": 2.7216367017978925e-07,
2359
+ "loss": 1.4563,
2360
+ "step": 31400
2361
+ },
2362
+ {
2363
+ "epoch": 19.52858249419055,
2364
+ "grad_norm": 2.608508348464966,
2365
+ "learning_rate": 2.4116553006819597e-07,
2366
+ "loss": 1.4381,
2367
+ "step": 31500
2368
+ },
2369
+ {
2370
+ "epoch": 19.590549961270334,
2371
+ "grad_norm": 3.2333600521087646,
2372
+ "learning_rate": 2.101673899566026e-07,
2373
+ "loss": 1.4418,
2374
+ "step": 31600
2375
+ },
2376
+ {
2377
+ "epoch": 19.652517428350116,
2378
+ "grad_norm": 5.008152008056641,
2379
+ "learning_rate": 1.7916924984500932e-07,
2380
+ "loss": 1.447,
2381
+ "step": 31700
2382
+ },
2383
+ {
2384
+ "epoch": 19.714484895429898,
2385
+ "grad_norm": 3.073624610900879,
2386
+ "learning_rate": 1.48171109733416e-07,
2387
+ "loss": 1.4241,
2388
+ "step": 31800
2389
+ },
2390
+ {
2391
+ "epoch": 19.776452362509684,
2392
+ "grad_norm": 5.159261226654053,
2393
+ "learning_rate": 1.171729696218227e-07,
2394
+ "loss": 1.424,
2395
+ "step": 31900
2396
+ },
2397
+ {
2398
+ "epoch": 19.838419829589466,
2399
+ "grad_norm": 3.225008249282837,
2400
+ "learning_rate": 8.61748295102294e-08,
2401
+ "loss": 1.4327,
2402
+ "step": 32000
2403
+ },
2404
+ {
2405
+ "epoch": 19.900387296669248,
2406
+ "grad_norm": 3.629281520843506,
2407
+ "learning_rate": 5.517668939863608e-08,
2408
+ "loss": 1.437,
2409
+ "step": 32100
2410
+ },
2411
+ {
2412
+ "epoch": 19.962354763749033,
2413
+ "grad_norm": 2.323373556137085,
2414
+ "learning_rate": 2.417854928704278e-08,
2415
+ "loss": 1.4345,
2416
+ "step": 32200
2417
+ },
2418
+ {
2419
+ "epoch": 19.999535243996903,
2420
+ "eval_loss": 1.3642308712005615,
2421
+ "eval_runtime": 70.2626,
2422
+ "eval_samples_per_second": 45.928,
2423
+ "eval_steps_per_second": 22.971,
2424
+ "step": 32260
2425
+ }
2426
+ ],
2427
+ "logging_steps": 100,
2428
+ "max_steps": 32260,
2429
+ "num_input_tokens_seen": 0,
2430
+ "num_train_epochs": 20,
2431
+ "save_steps": 500,
2432
+ "stateful_callbacks": {
2433
+ "TrainerControl": {
2434
+ "args": {
2435
+ "should_epoch_stop": false,
2436
+ "should_evaluate": false,
2437
+ "should_log": false,
2438
+ "should_save": true,
2439
+ "should_training_stop": true
2440
+ },
2441
+ "attributes": {}
2442
+ }
2443
+ },
2444
+ "total_flos": 2.2226940169519104e+17,
2445
+ "train_batch_size": 2,
2446
+ "trial_name": null,
2447
+ "trial_params": null
2448
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:481df95b3a0fad34cab9c2f1194c98d273531df33b717401035b0db384ea9ddc
3
+ size 5841