musabg commited on
Commit
c774912
1 Parent(s): 6925ee7

Upload folder using huggingface_hub

Browse files
config.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "huggyllama/llama-7b",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "bos_token_id": 1,
7
+ "eos_token_id": 2,
8
+ "hidden_act": "silu",
9
+ "hidden_size": 4096,
10
+ "initializer_range": 0.02,
11
+ "intermediate_size": 11008,
12
+ "max_position_embeddings": 2048,
13
+ "max_sequence_length": 2048,
14
+ "model_type": "llama",
15
+ "num_attention_heads": 32,
16
+ "num_hidden_layers": 32,
17
+ "pad_token_id": 0,
18
+ "rms_norm_eps": 1e-06,
19
+ "tie_word_embeddings": false,
20
+ "torch_dtype": "float32",
21
+ "transformers_version": "4.28.1",
22
+ "use_cache": false,
23
+ "vocab_size": 32000
24
+ }
generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "pad_token_id": 0,
6
+ "transformers_version": "4.28.1"
7
+ }
optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a0e58fa21b802c1a0b2a0c75f37c266463af58b130387c328056d45fb5d21c2b
3
+ size 13476859646
pytorch_model-00001-of-00003.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:65b9ad91fdc9f51cd72e45ae3f5d5a2cec0ce89f50b969fac93276b158b69886
3
+ size 9877989650
pytorch_model-00002-of-00003.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ffa84aab9ba936e1d2ad2bfb3ab68920e0a803b0c7b7f6d4a7c7659a65967abb
3
+ size 9894801206
pytorch_model-00003-of-00003.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8658ec14f5fd9db2c4ae9773f8acd5078a09602a15a5484c91a7c83423099458
3
+ size 7180990841
pytorch_model.bin.index.json ADDED
@@ -0,0 +1,330 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 26953666560
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "pytorch_model-00003-of-00003.bin",
7
+ "model.embed_tokens.weight": "pytorch_model-00001-of-00003.bin",
8
+ "model.layers.0.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
9
+ "model.layers.0.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
10
+ "model.layers.0.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
11
+ "model.layers.0.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
12
+ "model.layers.0.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
13
+ "model.layers.0.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
14
+ "model.layers.0.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
15
+ "model.layers.0.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
16
+ "model.layers.0.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
17
+ "model.layers.0.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
18
+ "model.layers.1.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
19
+ "model.layers.1.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
20
+ "model.layers.1.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
21
+ "model.layers.1.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
22
+ "model.layers.1.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
23
+ "model.layers.1.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
24
+ "model.layers.1.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
25
+ "model.layers.1.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
26
+ "model.layers.1.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
27
+ "model.layers.1.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
28
+ "model.layers.10.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
29
+ "model.layers.10.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
30
+ "model.layers.10.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
31
+ "model.layers.10.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
32
+ "model.layers.10.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
33
+ "model.layers.10.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
34
+ "model.layers.10.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
35
+ "model.layers.10.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
36
+ "model.layers.10.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
37
+ "model.layers.10.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
38
+ "model.layers.11.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
39
+ "model.layers.11.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
40
+ "model.layers.11.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
41
+ "model.layers.11.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
42
+ "model.layers.11.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
43
+ "model.layers.11.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
44
+ "model.layers.11.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
45
+ "model.layers.11.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
46
+ "model.layers.11.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
47
+ "model.layers.11.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
48
+ "model.layers.12.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
49
+ "model.layers.12.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
50
+ "model.layers.12.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
51
+ "model.layers.12.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
52
+ "model.layers.12.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
53
+ "model.layers.12.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
54
+ "model.layers.12.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
55
+ "model.layers.12.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
56
+ "model.layers.12.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
57
+ "model.layers.12.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
58
+ "model.layers.13.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
59
+ "model.layers.13.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
60
+ "model.layers.13.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
61
+ "model.layers.13.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
62
+ "model.layers.13.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
63
+ "model.layers.13.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
64
+ "model.layers.13.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
65
+ "model.layers.13.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
66
+ "model.layers.13.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
67
+ "model.layers.13.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
68
+ "model.layers.14.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
69
+ "model.layers.14.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
70
+ "model.layers.14.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
71
+ "model.layers.14.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
72
+ "model.layers.14.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
73
+ "model.layers.14.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
74
+ "model.layers.14.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
75
+ "model.layers.14.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
76
+ "model.layers.14.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
77
+ "model.layers.14.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
78
+ "model.layers.15.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
79
+ "model.layers.15.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
80
+ "model.layers.15.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
81
+ "model.layers.15.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
82
+ "model.layers.15.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
83
+ "model.layers.15.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
84
+ "model.layers.15.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
85
+ "model.layers.15.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
86
+ "model.layers.15.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
87
+ "model.layers.15.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
88
+ "model.layers.16.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
89
+ "model.layers.16.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
90
+ "model.layers.16.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
91
+ "model.layers.16.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
92
+ "model.layers.16.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
93
+ "model.layers.16.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
94
+ "model.layers.16.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
95
+ "model.layers.16.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
96
+ "model.layers.16.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
97
+ "model.layers.16.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
98
+ "model.layers.17.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
99
+ "model.layers.17.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
100
+ "model.layers.17.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
101
+ "model.layers.17.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
102
+ "model.layers.17.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
103
+ "model.layers.17.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
104
+ "model.layers.17.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
105
+ "model.layers.17.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
106
+ "model.layers.17.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
107
+ "model.layers.17.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
108
+ "model.layers.18.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
109
+ "model.layers.18.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
110
+ "model.layers.18.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
111
+ "model.layers.18.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
112
+ "model.layers.18.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
113
+ "model.layers.18.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
114
+ "model.layers.18.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
115
+ "model.layers.18.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
116
+ "model.layers.18.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
117
+ "model.layers.18.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
118
+ "model.layers.19.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
119
+ "model.layers.19.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
120
+ "model.layers.19.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
121
+ "model.layers.19.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
122
+ "model.layers.19.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
123
+ "model.layers.19.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
124
+ "model.layers.19.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
125
+ "model.layers.19.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
126
+ "model.layers.19.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
127
+ "model.layers.19.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
128
+ "model.layers.2.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
129
+ "model.layers.2.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
130
+ "model.layers.2.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
131
+ "model.layers.2.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
132
+ "model.layers.2.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
133
+ "model.layers.2.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
134
+ "model.layers.2.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
135
+ "model.layers.2.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
136
+ "model.layers.2.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
137
+ "model.layers.2.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
138
+ "model.layers.20.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
139
+ "model.layers.20.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
140
+ "model.layers.20.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
141
+ "model.layers.20.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
142
+ "model.layers.20.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
143
+ "model.layers.20.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
144
+ "model.layers.20.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
145
+ "model.layers.20.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
146
+ "model.layers.20.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
147
+ "model.layers.20.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
148
+ "model.layers.21.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
149
+ "model.layers.21.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
150
+ "model.layers.21.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
151
+ "model.layers.21.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
152
+ "model.layers.21.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
153
+ "model.layers.21.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
154
+ "model.layers.21.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
155
+ "model.layers.21.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
156
+ "model.layers.21.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
157
+ "model.layers.21.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
158
+ "model.layers.22.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
159
+ "model.layers.22.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
160
+ "model.layers.22.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
161
+ "model.layers.22.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
162
+ "model.layers.22.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
163
+ "model.layers.22.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
164
+ "model.layers.22.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
165
+ "model.layers.22.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
166
+ "model.layers.22.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
167
+ "model.layers.22.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
168
+ "model.layers.23.input_layernorm.weight": "pytorch_model-00003-of-00003.bin",
169
+ "model.layers.23.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
170
+ "model.layers.23.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
171
+ "model.layers.23.mlp.up_proj.weight": "pytorch_model-00003-of-00003.bin",
172
+ "model.layers.23.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin",
173
+ "model.layers.23.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
174
+ "model.layers.23.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
175
+ "model.layers.23.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
176
+ "model.layers.23.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
177
+ "model.layers.23.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
178
+ "model.layers.24.input_layernorm.weight": "pytorch_model-00003-of-00003.bin",
179
+ "model.layers.24.mlp.down_proj.weight": "pytorch_model-00003-of-00003.bin",
180
+ "model.layers.24.mlp.gate_proj.weight": "pytorch_model-00003-of-00003.bin",
181
+ "model.layers.24.mlp.up_proj.weight": "pytorch_model-00003-of-00003.bin",
182
+ "model.layers.24.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin",
183
+ "model.layers.24.self_attn.k_proj.weight": "pytorch_model-00003-of-00003.bin",
184
+ "model.layers.24.self_attn.o_proj.weight": "pytorch_model-00003-of-00003.bin",
185
+ "model.layers.24.self_attn.q_proj.weight": "pytorch_model-00003-of-00003.bin",
186
+ "model.layers.24.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00003.bin",
187
+ "model.layers.24.self_attn.v_proj.weight": "pytorch_model-00003-of-00003.bin",
188
+ "model.layers.25.input_layernorm.weight": "pytorch_model-00003-of-00003.bin",
189
+ "model.layers.25.mlp.down_proj.weight": "pytorch_model-00003-of-00003.bin",
190
+ "model.layers.25.mlp.gate_proj.weight": "pytorch_model-00003-of-00003.bin",
191
+ "model.layers.25.mlp.up_proj.weight": "pytorch_model-00003-of-00003.bin",
192
+ "model.layers.25.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin",
193
+ "model.layers.25.self_attn.k_proj.weight": "pytorch_model-00003-of-00003.bin",
194
+ "model.layers.25.self_attn.o_proj.weight": "pytorch_model-00003-of-00003.bin",
195
+ "model.layers.25.self_attn.q_proj.weight": "pytorch_model-00003-of-00003.bin",
196
+ "model.layers.25.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00003.bin",
197
+ "model.layers.25.self_attn.v_proj.weight": "pytorch_model-00003-of-00003.bin",
198
+ "model.layers.26.input_layernorm.weight": "pytorch_model-00003-of-00003.bin",
199
+ "model.layers.26.mlp.down_proj.weight": "pytorch_model-00003-of-00003.bin",
200
+ "model.layers.26.mlp.gate_proj.weight": "pytorch_model-00003-of-00003.bin",
201
+ "model.layers.26.mlp.up_proj.weight": "pytorch_model-00003-of-00003.bin",
202
+ "model.layers.26.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin",
203
+ "model.layers.26.self_attn.k_proj.weight": "pytorch_model-00003-of-00003.bin",
204
+ "model.layers.26.self_attn.o_proj.weight": "pytorch_model-00003-of-00003.bin",
205
+ "model.layers.26.self_attn.q_proj.weight": "pytorch_model-00003-of-00003.bin",
206
+ "model.layers.26.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00003.bin",
207
+ "model.layers.26.self_attn.v_proj.weight": "pytorch_model-00003-of-00003.bin",
208
+ "model.layers.27.input_layernorm.weight": "pytorch_model-00003-of-00003.bin",
209
+ "model.layers.27.mlp.down_proj.weight": "pytorch_model-00003-of-00003.bin",
210
+ "model.layers.27.mlp.gate_proj.weight": "pytorch_model-00003-of-00003.bin",
211
+ "model.layers.27.mlp.up_proj.weight": "pytorch_model-00003-of-00003.bin",
212
+ "model.layers.27.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin",
213
+ "model.layers.27.self_attn.k_proj.weight": "pytorch_model-00003-of-00003.bin",
214
+ "model.layers.27.self_attn.o_proj.weight": "pytorch_model-00003-of-00003.bin",
215
+ "model.layers.27.self_attn.q_proj.weight": "pytorch_model-00003-of-00003.bin",
216
+ "model.layers.27.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00003.bin",
217
+ "model.layers.27.self_attn.v_proj.weight": "pytorch_model-00003-of-00003.bin",
218
+ "model.layers.28.input_layernorm.weight": "pytorch_model-00003-of-00003.bin",
219
+ "model.layers.28.mlp.down_proj.weight": "pytorch_model-00003-of-00003.bin",
220
+ "model.layers.28.mlp.gate_proj.weight": "pytorch_model-00003-of-00003.bin",
221
+ "model.layers.28.mlp.up_proj.weight": "pytorch_model-00003-of-00003.bin",
222
+ "model.layers.28.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin",
223
+ "model.layers.28.self_attn.k_proj.weight": "pytorch_model-00003-of-00003.bin",
224
+ "model.layers.28.self_attn.o_proj.weight": "pytorch_model-00003-of-00003.bin",
225
+ "model.layers.28.self_attn.q_proj.weight": "pytorch_model-00003-of-00003.bin",
226
+ "model.layers.28.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00003.bin",
227
+ "model.layers.28.self_attn.v_proj.weight": "pytorch_model-00003-of-00003.bin",
228
+ "model.layers.29.input_layernorm.weight": "pytorch_model-00003-of-00003.bin",
229
+ "model.layers.29.mlp.down_proj.weight": "pytorch_model-00003-of-00003.bin",
230
+ "model.layers.29.mlp.gate_proj.weight": "pytorch_model-00003-of-00003.bin",
231
+ "model.layers.29.mlp.up_proj.weight": "pytorch_model-00003-of-00003.bin",
232
+ "model.layers.29.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin",
233
+ "model.layers.29.self_attn.k_proj.weight": "pytorch_model-00003-of-00003.bin",
234
+ "model.layers.29.self_attn.o_proj.weight": "pytorch_model-00003-of-00003.bin",
235
+ "model.layers.29.self_attn.q_proj.weight": "pytorch_model-00003-of-00003.bin",
236
+ "model.layers.29.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00003.bin",
237
+ "model.layers.29.self_attn.v_proj.weight": "pytorch_model-00003-of-00003.bin",
238
+ "model.layers.3.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
239
+ "model.layers.3.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
240
+ "model.layers.3.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
241
+ "model.layers.3.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
242
+ "model.layers.3.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
243
+ "model.layers.3.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
244
+ "model.layers.3.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
245
+ "model.layers.3.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
246
+ "model.layers.3.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
247
+ "model.layers.3.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
248
+ "model.layers.30.input_layernorm.weight": "pytorch_model-00003-of-00003.bin",
249
+ "model.layers.30.mlp.down_proj.weight": "pytorch_model-00003-of-00003.bin",
250
+ "model.layers.30.mlp.gate_proj.weight": "pytorch_model-00003-of-00003.bin",
251
+ "model.layers.30.mlp.up_proj.weight": "pytorch_model-00003-of-00003.bin",
252
+ "model.layers.30.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin",
253
+ "model.layers.30.self_attn.k_proj.weight": "pytorch_model-00003-of-00003.bin",
254
+ "model.layers.30.self_attn.o_proj.weight": "pytorch_model-00003-of-00003.bin",
255
+ "model.layers.30.self_attn.q_proj.weight": "pytorch_model-00003-of-00003.bin",
256
+ "model.layers.30.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00003.bin",
257
+ "model.layers.30.self_attn.v_proj.weight": "pytorch_model-00003-of-00003.bin",
258
+ "model.layers.31.input_layernorm.weight": "pytorch_model-00003-of-00003.bin",
259
+ "model.layers.31.mlp.down_proj.weight": "pytorch_model-00003-of-00003.bin",
260
+ "model.layers.31.mlp.gate_proj.weight": "pytorch_model-00003-of-00003.bin",
261
+ "model.layers.31.mlp.up_proj.weight": "pytorch_model-00003-of-00003.bin",
262
+ "model.layers.31.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin",
263
+ "model.layers.31.self_attn.k_proj.weight": "pytorch_model-00003-of-00003.bin",
264
+ "model.layers.31.self_attn.o_proj.weight": "pytorch_model-00003-of-00003.bin",
265
+ "model.layers.31.self_attn.q_proj.weight": "pytorch_model-00003-of-00003.bin",
266
+ "model.layers.31.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00003.bin",
267
+ "model.layers.31.self_attn.v_proj.weight": "pytorch_model-00003-of-00003.bin",
268
+ "model.layers.4.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
269
+ "model.layers.4.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
270
+ "model.layers.4.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
271
+ "model.layers.4.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
272
+ "model.layers.4.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
273
+ "model.layers.4.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
274
+ "model.layers.4.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
275
+ "model.layers.4.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
276
+ "model.layers.4.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
277
+ "model.layers.4.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
278
+ "model.layers.5.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
279
+ "model.layers.5.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
280
+ "model.layers.5.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
281
+ "model.layers.5.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
282
+ "model.layers.5.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
283
+ "model.layers.5.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
284
+ "model.layers.5.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
285
+ "model.layers.5.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
286
+ "model.layers.5.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
287
+ "model.layers.5.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
288
+ "model.layers.6.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
289
+ "model.layers.6.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
290
+ "model.layers.6.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
291
+ "model.layers.6.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
292
+ "model.layers.6.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
293
+ "model.layers.6.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
294
+ "model.layers.6.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
295
+ "model.layers.6.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
296
+ "model.layers.6.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
297
+ "model.layers.6.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
298
+ "model.layers.7.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
299
+ "model.layers.7.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
300
+ "model.layers.7.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
301
+ "model.layers.7.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
302
+ "model.layers.7.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
303
+ "model.layers.7.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
304
+ "model.layers.7.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
305
+ "model.layers.7.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
306
+ "model.layers.7.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
307
+ "model.layers.7.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
308
+ "model.layers.8.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
309
+ "model.layers.8.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
310
+ "model.layers.8.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
311
+ "model.layers.8.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
312
+ "model.layers.8.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
313
+ "model.layers.8.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
314
+ "model.layers.8.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
315
+ "model.layers.8.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
316
+ "model.layers.8.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
317
+ "model.layers.8.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
318
+ "model.layers.9.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
319
+ "model.layers.9.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
320
+ "model.layers.9.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
321
+ "model.layers.9.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
322
+ "model.layers.9.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
323
+ "model.layers.9.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
324
+ "model.layers.9.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
325
+ "model.layers.9.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
326
+ "model.layers.9.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
327
+ "model.layers.9.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
328
+ "model.norm.weight": "pytorch_model-00003-of-00003.bin"
329
+ }
330
+ }
rng_state_0.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7e3c5cb412e12159a59afe5657ce4b5e0a06e7fb420bedbb5228fe1245702762
3
+ size 14583
rng_state_1.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:741230672078323886b763e522c728741456a587860909fc529ce815a7aca5ec
3
+ size 14583
rng_state_2.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4ea587886b41579993bb5d20c79047b968ae2d71d22ba4c739b07ce31d7486a6
3
+ size 14583
rng_state_3.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8ab727740f74dd67e60283d27b4339609a1dda888b067cc06520e2f1d7dc17db
3
+ size 14583
scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4e43b372746f1c43e6b8e3b0d9739066714de8bc4930ffc8a745bcec6595d691
3
+ size 627
special_tokens_map.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": true,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": true,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": "<unk>",
17
+ "unk_token": {
18
+ "content": "<unk>",
19
+ "lstrip": false,
20
+ "normalized": true,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ }
24
+ }
tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
3
+ size 499723
tokenizer_config.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "bos_token": {
5
+ "__type": "AddedToken",
6
+ "content": "<s>",
7
+ "lstrip": false,
8
+ "normalized": true,
9
+ "rstrip": false,
10
+ "single_word": false
11
+ },
12
+ "clean_up_tokenization_spaces": false,
13
+ "eos_token": {
14
+ "__type": "AddedToken",
15
+ "content": "</s>",
16
+ "lstrip": false,
17
+ "normalized": true,
18
+ "rstrip": false,
19
+ "single_word": false
20
+ },
21
+ "model_max_length": 2048,
22
+ "pad_token": null,
23
+ "padding_side": "right",
24
+ "sp_model_kwargs": {},
25
+ "tokenizer_class": "LlamaTokenizer",
26
+ "unk_token": {
27
+ "__type": "AddedToken",
28
+ "content": "<unk>",
29
+ "lstrip": false,
30
+ "normalized": true,
31
+ "rstrip": false,
32
+ "single_word": false
33
+ }
34
+ }
trainer_state.json ADDED
@@ -0,0 +1,2904 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 1.6783216783216783,
5
+ "global_step": 480,
6
+ "is_hyper_param_search": false,
7
+ "is_local_process_zero": true,
8
+ "is_world_process_zero": true,
9
+ "log_history": [
10
+ {
11
+ "epoch": 0.0,
12
+ "learning_rate": 7.692307692307694e-07,
13
+ "loss": 6.3448,
14
+ "step": 1
15
+ },
16
+ {
17
+ "epoch": 0.01,
18
+ "learning_rate": 1.5384615384615387e-06,
19
+ "loss": 6.3545,
20
+ "step": 2
21
+ },
22
+ {
23
+ "epoch": 0.01,
24
+ "learning_rate": 2.307692307692308e-06,
25
+ "loss": 5.8293,
26
+ "step": 3
27
+ },
28
+ {
29
+ "epoch": 0.01,
30
+ "learning_rate": 3.0769230769230774e-06,
31
+ "loss": 5.7241,
32
+ "step": 4
33
+ },
34
+ {
35
+ "epoch": 0.02,
36
+ "learning_rate": 3.846153846153847e-06,
37
+ "loss": 5.7151,
38
+ "step": 5
39
+ },
40
+ {
41
+ "epoch": 0.02,
42
+ "learning_rate": 4.615384615384616e-06,
43
+ "loss": 5.759,
44
+ "step": 6
45
+ },
46
+ {
47
+ "epoch": 0.02,
48
+ "learning_rate": 5.384615384615385e-06,
49
+ "loss": 5.7633,
50
+ "step": 7
51
+ },
52
+ {
53
+ "epoch": 0.03,
54
+ "learning_rate": 6.153846153846155e-06,
55
+ "loss": 5.5305,
56
+ "step": 8
57
+ },
58
+ {
59
+ "epoch": 0.03,
60
+ "learning_rate": 6.923076923076923e-06,
61
+ "loss": 5.9615,
62
+ "step": 9
63
+ },
64
+ {
65
+ "epoch": 0.03,
66
+ "learning_rate": 7.692307692307694e-06,
67
+ "loss": 5.8986,
68
+ "step": 10
69
+ },
70
+ {
71
+ "epoch": 0.04,
72
+ "learning_rate": 8.461538461538462e-06,
73
+ "loss": 5.9003,
74
+ "step": 11
75
+ },
76
+ {
77
+ "epoch": 0.04,
78
+ "learning_rate": 9.230769230769232e-06,
79
+ "loss": 5.4062,
80
+ "step": 12
81
+ },
82
+ {
83
+ "epoch": 0.05,
84
+ "learning_rate": 1e-05,
85
+ "loss": 5.8606,
86
+ "step": 13
87
+ },
88
+ {
89
+ "epoch": 0.05,
90
+ "learning_rate": 1.076923076923077e-05,
91
+ "loss": 5.4724,
92
+ "step": 14
93
+ },
94
+ {
95
+ "epoch": 0.05,
96
+ "learning_rate": 1.1538461538461538e-05,
97
+ "loss": 5.4155,
98
+ "step": 15
99
+ },
100
+ {
101
+ "epoch": 0.06,
102
+ "learning_rate": 1.230769230769231e-05,
103
+ "loss": 5.1616,
104
+ "step": 16
105
+ },
106
+ {
107
+ "epoch": 0.06,
108
+ "learning_rate": 1.3076923076923078e-05,
109
+ "loss": 5.0633,
110
+ "step": 17
111
+ },
112
+ {
113
+ "epoch": 0.06,
114
+ "learning_rate": 1.3846153846153847e-05,
115
+ "loss": 5.918,
116
+ "step": 18
117
+ },
118
+ {
119
+ "epoch": 0.07,
120
+ "learning_rate": 1.4615384615384615e-05,
121
+ "loss": 5.4574,
122
+ "step": 19
123
+ },
124
+ {
125
+ "epoch": 0.07,
126
+ "learning_rate": 1.5384615384615387e-05,
127
+ "loss": 4.5698,
128
+ "step": 20
129
+ },
130
+ {
131
+ "epoch": 0.07,
132
+ "learning_rate": 1.6153846153846154e-05,
133
+ "loss": 4.9255,
134
+ "step": 21
135
+ },
136
+ {
137
+ "epoch": 0.08,
138
+ "learning_rate": 1.6923076923076924e-05,
139
+ "loss": 4.7709,
140
+ "step": 22
141
+ },
142
+ {
143
+ "epoch": 0.08,
144
+ "learning_rate": 1.7692307692307694e-05,
145
+ "loss": 4.5418,
146
+ "step": 23
147
+ },
148
+ {
149
+ "epoch": 0.08,
150
+ "learning_rate": 1.8461538461538465e-05,
151
+ "loss": 4.3907,
152
+ "step": 24
153
+ },
154
+ {
155
+ "epoch": 0.09,
156
+ "learning_rate": 1.923076923076923e-05,
157
+ "loss": 3.796,
158
+ "step": 25
159
+ },
160
+ {
161
+ "epoch": 0.09,
162
+ "learning_rate": 2e-05,
163
+ "loss": 3.9047,
164
+ "step": 26
165
+ },
166
+ {
167
+ "epoch": 0.09,
168
+ "learning_rate": 1.9999928710990414e-05,
169
+ "loss": 3.6515,
170
+ "step": 27
171
+ },
172
+ {
173
+ "epoch": 0.1,
174
+ "learning_rate": 1.999971484497808e-05,
175
+ "loss": 3.4483,
176
+ "step": 28
177
+ },
178
+ {
179
+ "epoch": 0.1,
180
+ "learning_rate": 1.999935840501225e-05,
181
+ "loss": 2.9432,
182
+ "step": 29
183
+ },
184
+ {
185
+ "epoch": 0.1,
186
+ "learning_rate": 1.9998859396174982e-05,
187
+ "loss": 2.694,
188
+ "step": 30
189
+ },
190
+ {
191
+ "epoch": 0.11,
192
+ "learning_rate": 1.9998217825581043e-05,
193
+ "loss": 2.9966,
194
+ "step": 31
195
+ },
196
+ {
197
+ "epoch": 0.11,
198
+ "learning_rate": 1.999743370237782e-05,
199
+ "loss": 2.437,
200
+ "step": 32
201
+ },
202
+ {
203
+ "epoch": 0.12,
204
+ "learning_rate": 1.9996507037745184e-05,
205
+ "loss": 2.3988,
206
+ "step": 33
207
+ },
208
+ {
209
+ "epoch": 0.12,
210
+ "learning_rate": 1.9995437844895337e-05,
211
+ "loss": 2.1954,
212
+ "step": 34
213
+ },
214
+ {
215
+ "epoch": 0.12,
216
+ "learning_rate": 1.999422613907262e-05,
217
+ "loss": 2.0812,
218
+ "step": 35
219
+ },
220
+ {
221
+ "epoch": 0.13,
222
+ "learning_rate": 1.9992871937553292e-05,
223
+ "loss": 1.884,
224
+ "step": 36
225
+ },
226
+ {
227
+ "epoch": 0.13,
228
+ "learning_rate": 1.9991375259645293e-05,
229
+ "loss": 1.8956,
230
+ "step": 37
231
+ },
232
+ {
233
+ "epoch": 0.13,
234
+ "learning_rate": 1.998973612668796e-05,
235
+ "loss": 1.8294,
236
+ "step": 38
237
+ },
238
+ {
239
+ "epoch": 0.14,
240
+ "learning_rate": 1.9987954562051724e-05,
241
+ "loss": 1.5839,
242
+ "step": 39
243
+ },
244
+ {
245
+ "epoch": 0.14,
246
+ "learning_rate": 1.9986030591137785e-05,
247
+ "loss": 1.5102,
248
+ "step": 40
249
+ },
250
+ {
251
+ "epoch": 0.14,
252
+ "learning_rate": 1.998396424137773e-05,
253
+ "loss": 1.3267,
254
+ "step": 41
255
+ },
256
+ {
257
+ "epoch": 0.15,
258
+ "learning_rate": 1.9981755542233175e-05,
259
+ "loss": 1.1175,
260
+ "step": 42
261
+ },
262
+ {
263
+ "epoch": 0.15,
264
+ "learning_rate": 1.9979404525195313e-05,
265
+ "loss": 1.0045,
266
+ "step": 43
267
+ },
268
+ {
269
+ "epoch": 0.15,
270
+ "learning_rate": 1.9976911223784473e-05,
271
+ "loss": 0.846,
272
+ "step": 44
273
+ },
274
+ {
275
+ "epoch": 0.16,
276
+ "learning_rate": 1.9974275673549654e-05,
277
+ "loss": 0.7292,
278
+ "step": 45
279
+ },
280
+ {
281
+ "epoch": 0.16,
282
+ "learning_rate": 1.9971497912068014e-05,
283
+ "loss": 0.613,
284
+ "step": 46
285
+ },
286
+ {
287
+ "epoch": 0.16,
288
+ "learning_rate": 1.9968577978944323e-05,
289
+ "loss": 0.5431,
290
+ "step": 47
291
+ },
292
+ {
293
+ "epoch": 0.17,
294
+ "learning_rate": 1.9965515915810408e-05,
295
+ "loss": 0.4976,
296
+ "step": 48
297
+ },
298
+ {
299
+ "epoch": 0.17,
300
+ "learning_rate": 1.9962311766324562e-05,
301
+ "loss": 0.4278,
302
+ "step": 49
303
+ },
304
+ {
305
+ "epoch": 0.17,
306
+ "learning_rate": 1.995896557617091e-05,
307
+ "loss": 0.4254,
308
+ "step": 50
309
+ },
310
+ {
311
+ "epoch": 0.18,
312
+ "learning_rate": 1.9955477393058774e-05,
313
+ "loss": 0.4062,
314
+ "step": 51
315
+ },
316
+ {
317
+ "epoch": 0.18,
318
+ "learning_rate": 1.995184726672197e-05,
319
+ "loss": 0.3502,
320
+ "step": 52
321
+ },
322
+ {
323
+ "epoch": 0.19,
324
+ "learning_rate": 1.9948075248918126e-05,
325
+ "loss": 0.3671,
326
+ "step": 53
327
+ },
328
+ {
329
+ "epoch": 0.19,
330
+ "learning_rate": 1.9944161393427923e-05,
331
+ "loss": 0.3904,
332
+ "step": 54
333
+ },
334
+ {
335
+ "epoch": 0.19,
336
+ "learning_rate": 1.9940105756054337e-05,
337
+ "loss": 0.3622,
338
+ "step": 55
339
+ },
340
+ {
341
+ "epoch": 0.2,
342
+ "learning_rate": 1.9935908394621844e-05,
343
+ "loss": 0.3523,
344
+ "step": 56
345
+ },
346
+ {
347
+ "epoch": 0.2,
348
+ "learning_rate": 1.9931569368975588e-05,
349
+ "loss": 0.3344,
350
+ "step": 57
351
+ },
352
+ {
353
+ "epoch": 0.2,
354
+ "learning_rate": 1.992708874098054e-05,
355
+ "loss": 0.3586,
356
+ "step": 58
357
+ },
358
+ {
359
+ "epoch": 0.21,
360
+ "learning_rate": 1.992246657452061e-05,
361
+ "loss": 0.3559,
362
+ "step": 59
363
+ },
364
+ {
365
+ "epoch": 0.21,
366
+ "learning_rate": 1.9917702935497725e-05,
367
+ "loss": 0.3494,
368
+ "step": 60
369
+ },
370
+ {
371
+ "epoch": 0.21,
372
+ "learning_rate": 1.991279789183091e-05,
373
+ "loss": 0.3458,
374
+ "step": 61
375
+ },
376
+ {
377
+ "epoch": 0.22,
378
+ "learning_rate": 1.99077515134553e-05,
379
+ "loss": 0.3344,
380
+ "step": 62
381
+ },
382
+ {
383
+ "epoch": 0.22,
384
+ "learning_rate": 1.9902563872321174e-05,
385
+ "loss": 0.3354,
386
+ "step": 63
387
+ },
388
+ {
389
+ "epoch": 0.22,
390
+ "learning_rate": 1.9897235042392876e-05,
391
+ "loss": 0.3404,
392
+ "step": 64
393
+ },
394
+ {
395
+ "epoch": 0.23,
396
+ "learning_rate": 1.989176509964781e-05,
397
+ "loss": 0.3622,
398
+ "step": 65
399
+ },
400
+ {
401
+ "epoch": 0.23,
402
+ "learning_rate": 1.9886154122075344e-05,
403
+ "loss": 0.3428,
404
+ "step": 66
405
+ },
406
+ {
407
+ "epoch": 0.23,
408
+ "learning_rate": 1.9880402189675677e-05,
409
+ "loss": 0.3557,
410
+ "step": 67
411
+ },
412
+ {
413
+ "epoch": 0.24,
414
+ "learning_rate": 1.9874509384458726e-05,
415
+ "loss": 0.3492,
416
+ "step": 68
417
+ },
418
+ {
419
+ "epoch": 0.24,
420
+ "learning_rate": 1.986847579044294e-05,
421
+ "loss": 0.3463,
422
+ "step": 69
423
+ },
424
+ {
425
+ "epoch": 0.24,
426
+ "learning_rate": 1.986230149365411e-05,
427
+ "loss": 0.3409,
428
+ "step": 70
429
+ },
430
+ {
431
+ "epoch": 0.25,
432
+ "learning_rate": 1.9855986582124128e-05,
433
+ "loss": 0.344,
434
+ "step": 71
435
+ },
436
+ {
437
+ "epoch": 0.25,
438
+ "learning_rate": 1.9849531145889758e-05,
439
+ "loss": 0.3085,
440
+ "step": 72
441
+ },
442
+ {
443
+ "epoch": 0.26,
444
+ "learning_rate": 1.9842935276991332e-05,
445
+ "loss": 0.332,
446
+ "step": 73
447
+ },
448
+ {
449
+ "epoch": 0.26,
450
+ "learning_rate": 1.983619906947144e-05,
451
+ "loss": 0.3198,
452
+ "step": 74
453
+ },
454
+ {
455
+ "epoch": 0.26,
456
+ "learning_rate": 1.982932261937359e-05,
457
+ "loss": 0.331,
458
+ "step": 75
459
+ },
460
+ {
461
+ "epoch": 0.27,
462
+ "learning_rate": 1.9822306024740855e-05,
463
+ "loss": 0.3298,
464
+ "step": 76
465
+ },
466
+ {
467
+ "epoch": 0.27,
468
+ "learning_rate": 1.9815149385614446e-05,
469
+ "loss": 0.3223,
470
+ "step": 77
471
+ },
472
+ {
473
+ "epoch": 0.27,
474
+ "learning_rate": 1.9807852804032306e-05,
475
+ "loss": 0.3082,
476
+ "step": 78
477
+ },
478
+ {
479
+ "epoch": 0.28,
480
+ "learning_rate": 1.980041638402765e-05,
481
+ "loss": 0.342,
482
+ "step": 79
483
+ },
484
+ {
485
+ "epoch": 0.28,
486
+ "learning_rate": 1.9792840231627482e-05,
487
+ "loss": 0.3217,
488
+ "step": 80
489
+ },
490
+ {
491
+ "epoch": 0.28,
492
+ "learning_rate": 1.9785124454851082e-05,
493
+ "loss": 0.3222,
494
+ "step": 81
495
+ },
496
+ {
497
+ "epoch": 0.29,
498
+ "learning_rate": 1.977726916370847e-05,
499
+ "loss": 0.3533,
500
+ "step": 82
501
+ },
502
+ {
503
+ "epoch": 0.29,
504
+ "learning_rate": 1.9769274470198827e-05,
505
+ "loss": 0.3269,
506
+ "step": 83
507
+ },
508
+ {
509
+ "epoch": 0.29,
510
+ "learning_rate": 1.976114048830891e-05,
511
+ "loss": 0.3448,
512
+ "step": 84
513
+ },
514
+ {
515
+ "epoch": 0.3,
516
+ "learning_rate": 1.9752867334011422e-05,
517
+ "loss": 0.3524,
518
+ "step": 85
519
+ },
520
+ {
521
+ "epoch": 0.3,
522
+ "learning_rate": 1.974445512526336e-05,
523
+ "loss": 0.338,
524
+ "step": 86
525
+ },
526
+ {
527
+ "epoch": 0.3,
528
+ "learning_rate": 1.9735903982004324e-05,
529
+ "loss": 0.3146,
530
+ "step": 87
531
+ },
532
+ {
533
+ "epoch": 0.31,
534
+ "learning_rate": 1.9727214026154827e-05,
535
+ "loss": 0.3093,
536
+ "step": 88
537
+ },
538
+ {
539
+ "epoch": 0.31,
540
+ "learning_rate": 1.971838538161454e-05,
541
+ "loss": 0.3247,
542
+ "step": 89
543
+ },
544
+ {
545
+ "epoch": 0.31,
546
+ "learning_rate": 1.9709418174260523e-05,
547
+ "loss": 0.3215,
548
+ "step": 90
549
+ },
550
+ {
551
+ "epoch": 0.32,
552
+ "learning_rate": 1.9700312531945444e-05,
553
+ "loss": 0.324,
554
+ "step": 91
555
+ },
556
+ {
557
+ "epoch": 0.32,
558
+ "learning_rate": 1.9691068584495744e-05,
559
+ "loss": 0.3608,
560
+ "step": 92
561
+ },
562
+ {
563
+ "epoch": 0.33,
564
+ "learning_rate": 1.96816864637098e-05,
565
+ "loss": 0.3412,
566
+ "step": 93
567
+ },
568
+ {
569
+ "epoch": 0.33,
570
+ "learning_rate": 1.967216630335603e-05,
571
+ "loss": 0.3461,
572
+ "step": 94
573
+ },
574
+ {
575
+ "epoch": 0.33,
576
+ "learning_rate": 1.9662508239170993e-05,
577
+ "loss": 0.3355,
578
+ "step": 95
579
+ },
580
+ {
581
+ "epoch": 0.34,
582
+ "learning_rate": 1.9652712408857452e-05,
583
+ "loss": 0.3223,
584
+ "step": 96
585
+ },
586
+ {
587
+ "epoch": 0.34,
588
+ "learning_rate": 1.9642778952082425e-05,
589
+ "loss": 0.3196,
590
+ "step": 97
591
+ },
592
+ {
593
+ "epoch": 0.34,
594
+ "learning_rate": 1.9632708010475166e-05,
595
+ "loss": 0.3386,
596
+ "step": 98
597
+ },
598
+ {
599
+ "epoch": 0.35,
600
+ "learning_rate": 1.9622499727625162e-05,
601
+ "loss": 0.3265,
602
+ "step": 99
603
+ },
604
+ {
605
+ "epoch": 0.35,
606
+ "learning_rate": 1.961215424908009e-05,
607
+ "loss": 0.3304,
608
+ "step": 100
609
+ },
610
+ {
611
+ "epoch": 0.35,
612
+ "learning_rate": 1.9601671722343737e-05,
613
+ "loss": 0.33,
614
+ "step": 101
615
+ },
616
+ {
617
+ "epoch": 0.36,
618
+ "learning_rate": 1.959105229687389e-05,
619
+ "loss": 0.3344,
620
+ "step": 102
621
+ },
622
+ {
623
+ "epoch": 0.36,
624
+ "learning_rate": 1.9580296124080215e-05,
625
+ "loss": 0.3304,
626
+ "step": 103
627
+ },
628
+ {
629
+ "epoch": 0.36,
630
+ "learning_rate": 1.956940335732209e-05,
631
+ "loss": 0.3467,
632
+ "step": 104
633
+ },
634
+ {
635
+ "epoch": 0.37,
636
+ "learning_rate": 1.955837415190643e-05,
637
+ "loss": 0.3317,
638
+ "step": 105
639
+ },
640
+ {
641
+ "epoch": 0.37,
642
+ "learning_rate": 1.954720866508546e-05,
643
+ "loss": 0.309,
644
+ "step": 106
645
+ },
646
+ {
647
+ "epoch": 0.37,
648
+ "learning_rate": 1.9535907056054475e-05,
649
+ "loss": 0.328,
650
+ "step": 107
651
+ },
652
+ {
653
+ "epoch": 0.38,
654
+ "learning_rate": 1.9524469485949586e-05,
655
+ "loss": 0.3443,
656
+ "step": 108
657
+ },
658
+ {
659
+ "epoch": 0.38,
660
+ "learning_rate": 1.9512896117845393e-05,
661
+ "loss": 0.3142,
662
+ "step": 109
663
+ },
664
+ {
665
+ "epoch": 0.38,
666
+ "learning_rate": 1.9501187116752694e-05,
667
+ "loss": 0.3182,
668
+ "step": 110
669
+ },
670
+ {
671
+ "epoch": 0.39,
672
+ "learning_rate": 1.94893426496161e-05,
673
+ "loss": 0.3485,
674
+ "step": 111
675
+ },
676
+ {
677
+ "epoch": 0.39,
678
+ "learning_rate": 1.9477362885311684e-05,
679
+ "loss": 0.328,
680
+ "step": 112
681
+ },
682
+ {
683
+ "epoch": 0.4,
684
+ "learning_rate": 1.946524799464455e-05,
685
+ "loss": 0.321,
686
+ "step": 113
687
+ },
688
+ {
689
+ "epoch": 0.4,
690
+ "learning_rate": 1.9452998150346403e-05,
691
+ "loss": 0.3118,
692
+ "step": 114
693
+ },
694
+ {
695
+ "epoch": 0.4,
696
+ "learning_rate": 1.9440613527073106e-05,
697
+ "loss": 0.3362,
698
+ "step": 115
699
+ },
700
+ {
701
+ "epoch": 0.41,
702
+ "learning_rate": 1.9428094301402164e-05,
703
+ "loss": 0.3339,
704
+ "step": 116
705
+ },
706
+ {
707
+ "epoch": 0.41,
708
+ "learning_rate": 1.941544065183021e-05,
709
+ "loss": 0.328,
710
+ "step": 117
711
+ },
712
+ {
713
+ "epoch": 0.41,
714
+ "learning_rate": 1.9402652758770476e-05,
715
+ "loss": 0.3226,
716
+ "step": 118
717
+ },
718
+ {
719
+ "epoch": 0.42,
720
+ "learning_rate": 1.938973080455021e-05,
721
+ "loss": 0.3083,
722
+ "step": 119
723
+ },
724
+ {
725
+ "epoch": 0.42,
726
+ "learning_rate": 1.9376674973408077e-05,
727
+ "loss": 0.3247,
728
+ "step": 120
729
+ },
730
+ {
731
+ "epoch": 0.42,
732
+ "learning_rate": 1.9363485451491523e-05,
733
+ "loss": 0.3234,
734
+ "step": 121
735
+ },
736
+ {
737
+ "epoch": 0.43,
738
+ "learning_rate": 1.9350162426854152e-05,
739
+ "loss": 0.3004,
740
+ "step": 122
741
+ },
742
+ {
743
+ "epoch": 0.43,
744
+ "learning_rate": 1.9336706089452995e-05,
745
+ "loss": 0.3346,
746
+ "step": 123
747
+ },
748
+ {
749
+ "epoch": 0.43,
750
+ "learning_rate": 1.932311663114586e-05,
751
+ "loss": 0.3407,
752
+ "step": 124
753
+ },
754
+ {
755
+ "epoch": 0.44,
756
+ "learning_rate": 1.930939424568854e-05,
757
+ "loss": 0.3201,
758
+ "step": 125
759
+ },
760
+ {
761
+ "epoch": 0.44,
762
+ "learning_rate": 1.9295539128732096e-05,
763
+ "loss": 0.3362,
764
+ "step": 126
765
+ },
766
+ {
767
+ "epoch": 0.44,
768
+ "learning_rate": 1.9281551477820038e-05,
769
+ "loss": 0.3295,
770
+ "step": 127
771
+ },
772
+ {
773
+ "epoch": 0.45,
774
+ "learning_rate": 1.9267431492385524e-05,
775
+ "loss": 0.3507,
776
+ "step": 128
777
+ },
778
+ {
779
+ "epoch": 0.45,
780
+ "learning_rate": 1.9253179373748504e-05,
781
+ "loss": 0.3482,
782
+ "step": 129
783
+ },
784
+ {
785
+ "epoch": 0.45,
786
+ "learning_rate": 1.9238795325112867e-05,
787
+ "loss": 0.3215,
788
+ "step": 130
789
+ },
790
+ {
791
+ "epoch": 0.46,
792
+ "learning_rate": 1.9224279551563533e-05,
793
+ "loss": 0.3517,
794
+ "step": 131
795
+ },
796
+ {
797
+ "epoch": 0.46,
798
+ "learning_rate": 1.920963226006352e-05,
799
+ "loss": 0.3089,
800
+ "step": 132
801
+ },
802
+ {
803
+ "epoch": 0.47,
804
+ "learning_rate": 1.919485365945101e-05,
805
+ "loss": 0.3251,
806
+ "step": 133
807
+ },
808
+ {
809
+ "epoch": 0.47,
810
+ "learning_rate": 1.917994396043636e-05,
811
+ "loss": 0.3404,
812
+ "step": 134
813
+ },
814
+ {
815
+ "epoch": 0.47,
816
+ "learning_rate": 1.9164903375599113e-05,
817
+ "loss": 0.3213,
818
+ "step": 135
819
+ },
820
+ {
821
+ "epoch": 0.48,
822
+ "learning_rate": 1.9149732119384942e-05,
823
+ "loss": 0.3384,
824
+ "step": 136
825
+ },
826
+ {
827
+ "epoch": 0.48,
828
+ "learning_rate": 1.9134430408102615e-05,
829
+ "loss": 0.3019,
830
+ "step": 137
831
+ },
832
+ {
833
+ "epoch": 0.48,
834
+ "learning_rate": 1.91189984599209e-05,
835
+ "loss": 0.3008,
836
+ "step": 138
837
+ },
838
+ {
839
+ "epoch": 0.49,
840
+ "learning_rate": 1.9103436494865463e-05,
841
+ "loss": 0.3201,
842
+ "step": 139
843
+ },
844
+ {
845
+ "epoch": 0.49,
846
+ "learning_rate": 1.908774473481571e-05,
847
+ "loss": 0.3281,
848
+ "step": 140
849
+ },
850
+ {
851
+ "epoch": 0.49,
852
+ "learning_rate": 1.907192340350165e-05,
853
+ "loss": 0.3266,
854
+ "step": 141
855
+ },
856
+ {
857
+ "epoch": 0.5,
858
+ "learning_rate": 1.9055972726500696e-05,
859
+ "loss": 0.3099,
860
+ "step": 142
861
+ },
862
+ {
863
+ "epoch": 0.5,
864
+ "learning_rate": 1.9039892931234434e-05,
865
+ "loss": 0.3302,
866
+ "step": 143
867
+ },
868
+ {
869
+ "epoch": 0.5,
870
+ "learning_rate": 1.9023684246965407e-05,
871
+ "loss": 0.3064,
872
+ "step": 144
873
+ },
874
+ {
875
+ "epoch": 0.51,
876
+ "learning_rate": 1.9007346904793817e-05,
877
+ "loss": 0.3015,
878
+ "step": 145
879
+ },
880
+ {
881
+ "epoch": 0.51,
882
+ "learning_rate": 1.899088113765426e-05,
883
+ "loss": 0.3221,
884
+ "step": 146
885
+ },
886
+ {
887
+ "epoch": 0.51,
888
+ "learning_rate": 1.897428718031238e-05,
889
+ "loss": 0.2858,
890
+ "step": 147
891
+ },
892
+ {
893
+ "epoch": 0.52,
894
+ "learning_rate": 1.895756526936153e-05,
895
+ "loss": 0.3266,
896
+ "step": 148
897
+ },
898
+ {
899
+ "epoch": 0.52,
900
+ "learning_rate": 1.8940715643219406e-05,
901
+ "loss": 0.3145,
902
+ "step": 149
903
+ },
904
+ {
905
+ "epoch": 0.52,
906
+ "learning_rate": 1.8923738542124644e-05,
907
+ "loss": 0.3107,
908
+ "step": 150
909
+ },
910
+ {
911
+ "epoch": 0.53,
912
+ "learning_rate": 1.8906634208133386e-05,
913
+ "loss": 0.3132,
914
+ "step": 151
915
+ },
916
+ {
917
+ "epoch": 0.53,
918
+ "learning_rate": 1.8889402885115834e-05,
919
+ "loss": 0.3041,
920
+ "step": 152
921
+ },
922
+ {
923
+ "epoch": 0.53,
924
+ "learning_rate": 1.8872044818752782e-05,
925
+ "loss": 0.3207,
926
+ "step": 153
927
+ },
928
+ {
929
+ "epoch": 0.54,
930
+ "learning_rate": 1.8854560256532098e-05,
931
+ "loss": 0.3468,
932
+ "step": 154
933
+ },
934
+ {
935
+ "epoch": 0.54,
936
+ "learning_rate": 1.8836949447745217e-05,
937
+ "loss": 0.3361,
938
+ "step": 155
939
+ },
940
+ {
941
+ "epoch": 0.55,
942
+ "learning_rate": 1.881921264348355e-05,
943
+ "loss": 0.3132,
944
+ "step": 156
945
+ },
946
+ {
947
+ "epoch": 0.55,
948
+ "learning_rate": 1.8801350096634946e-05,
949
+ "loss": 0.3392,
950
+ "step": 157
951
+ },
952
+ {
953
+ "epoch": 0.55,
954
+ "learning_rate": 1.8783362061880063e-05,
955
+ "loss": 0.2692,
956
+ "step": 158
957
+ },
958
+ {
959
+ "epoch": 0.56,
960
+ "learning_rate": 1.8765248795688726e-05,
961
+ "loss": 0.2989,
962
+ "step": 159
963
+ },
964
+ {
965
+ "epoch": 0.56,
966
+ "learning_rate": 1.8747010556316304e-05,
967
+ "loss": 0.3209,
968
+ "step": 160
969
+ },
970
+ {
971
+ "epoch": 0.56,
972
+ "learning_rate": 1.8728647603800004e-05,
973
+ "loss": 0.3125,
974
+ "step": 161
975
+ },
976
+ {
977
+ "epoch": 0.57,
978
+ "learning_rate": 1.8710160199955158e-05,
979
+ "loss": 0.2904,
980
+ "step": 162
981
+ },
982
+ {
983
+ "epoch": 0.57,
984
+ "learning_rate": 1.869154860837151e-05,
985
+ "loss": 0.3057,
986
+ "step": 163
987
+ },
988
+ {
989
+ "epoch": 0.57,
990
+ "learning_rate": 1.8672813094409453e-05,
991
+ "loss": 0.3337,
992
+ "step": 164
993
+ },
994
+ {
995
+ "epoch": 0.58,
996
+ "learning_rate": 1.8653953925196225e-05,
997
+ "loss": 0.3283,
998
+ "step": 165
999
+ },
1000
+ {
1001
+ "epoch": 0.58,
1002
+ "learning_rate": 1.863497136962213e-05,
1003
+ "loss": 0.3011,
1004
+ "step": 166
1005
+ },
1006
+ {
1007
+ "epoch": 0.58,
1008
+ "learning_rate": 1.8615865698336683e-05,
1009
+ "loss": 0.3222,
1010
+ "step": 167
1011
+ },
1012
+ {
1013
+ "epoch": 0.59,
1014
+ "learning_rate": 1.8596637183744762e-05,
1015
+ "loss": 0.3322,
1016
+ "step": 168
1017
+ },
1018
+ {
1019
+ "epoch": 0.59,
1020
+ "learning_rate": 1.8577286100002723e-05,
1021
+ "loss": 0.3321,
1022
+ "step": 169
1023
+ },
1024
+ {
1025
+ "epoch": 0.59,
1026
+ "learning_rate": 1.8557812723014476e-05,
1027
+ "loss": 0.3174,
1028
+ "step": 170
1029
+ },
1030
+ {
1031
+ "epoch": 0.6,
1032
+ "learning_rate": 1.853821733042758e-05,
1033
+ "loss": 0.2946,
1034
+ "step": 171
1035
+ },
1036
+ {
1037
+ "epoch": 0.6,
1038
+ "learning_rate": 1.851850020162926e-05,
1039
+ "loss": 0.2985,
1040
+ "step": 172
1041
+ },
1042
+ {
1043
+ "epoch": 0.6,
1044
+ "learning_rate": 1.8498661617742426e-05,
1045
+ "loss": 0.3402,
1046
+ "step": 173
1047
+ },
1048
+ {
1049
+ "epoch": 0.61,
1050
+ "learning_rate": 1.8478701861621686e-05,
1051
+ "loss": 0.3113,
1052
+ "step": 174
1053
+ },
1054
+ {
1055
+ "epoch": 0.61,
1056
+ "learning_rate": 1.8458621217849285e-05,
1057
+ "loss": 0.3225,
1058
+ "step": 175
1059
+ },
1060
+ {
1061
+ "epoch": 0.62,
1062
+ "learning_rate": 1.8438419972731066e-05,
1063
+ "loss": 0.3239,
1064
+ "step": 176
1065
+ },
1066
+ {
1067
+ "epoch": 0.62,
1068
+ "learning_rate": 1.841809841429238e-05,
1069
+ "loss": 0.3042,
1070
+ "step": 177
1071
+ },
1072
+ {
1073
+ "epoch": 0.62,
1074
+ "learning_rate": 1.8397656832273982e-05,
1075
+ "loss": 0.3129,
1076
+ "step": 178
1077
+ },
1078
+ {
1079
+ "epoch": 0.63,
1080
+ "learning_rate": 1.8377095518127896e-05,
1081
+ "loss": 0.3148,
1082
+ "step": 179
1083
+ },
1084
+ {
1085
+ "epoch": 0.63,
1086
+ "learning_rate": 1.8356414765013267e-05,
1087
+ "loss": 0.3144,
1088
+ "step": 180
1089
+ },
1090
+ {
1091
+ "epoch": 0.63,
1092
+ "learning_rate": 1.8335614867792183e-05,
1093
+ "loss": 0.3121,
1094
+ "step": 181
1095
+ },
1096
+ {
1097
+ "epoch": 0.64,
1098
+ "learning_rate": 1.8314696123025456e-05,
1099
+ "loss": 0.3249,
1100
+ "step": 182
1101
+ },
1102
+ {
1103
+ "epoch": 0.64,
1104
+ "learning_rate": 1.8293658828968397e-05,
1105
+ "loss": 0.2985,
1106
+ "step": 183
1107
+ },
1108
+ {
1109
+ "epoch": 0.64,
1110
+ "learning_rate": 1.8272503285566587e-05,
1111
+ "loss": 0.3251,
1112
+ "step": 184
1113
+ },
1114
+ {
1115
+ "epoch": 0.65,
1116
+ "learning_rate": 1.825122979445157e-05,
1117
+ "loss": 0.3253,
1118
+ "step": 185
1119
+ },
1120
+ {
1121
+ "epoch": 0.65,
1122
+ "learning_rate": 1.8229838658936566e-05,
1123
+ "loss": 0.3189,
1124
+ "step": 186
1125
+ },
1126
+ {
1127
+ "epoch": 0.65,
1128
+ "learning_rate": 1.820833018401215e-05,
1129
+ "loss": 0.3205,
1130
+ "step": 187
1131
+ },
1132
+ {
1133
+ "epoch": 0.66,
1134
+ "learning_rate": 1.81867046763419e-05,
1135
+ "loss": 0.2983,
1136
+ "step": 188
1137
+ },
1138
+ {
1139
+ "epoch": 0.66,
1140
+ "learning_rate": 1.8164962444258016e-05,
1141
+ "loss": 0.2948,
1142
+ "step": 189
1143
+ },
1144
+ {
1145
+ "epoch": 0.66,
1146
+ "learning_rate": 1.8143103797756942e-05,
1147
+ "loss": 0.2961,
1148
+ "step": 190
1149
+ },
1150
+ {
1151
+ "epoch": 0.67,
1152
+ "learning_rate": 1.812112904849492e-05,
1153
+ "loss": 0.2914,
1154
+ "step": 191
1155
+ },
1156
+ {
1157
+ "epoch": 0.67,
1158
+ "learning_rate": 1.8099038509783586e-05,
1159
+ "loss": 0.3055,
1160
+ "step": 192
1161
+ },
1162
+ {
1163
+ "epoch": 0.67,
1164
+ "learning_rate": 1.807683249658545e-05,
1165
+ "loss": 0.306,
1166
+ "step": 193
1167
+ },
1168
+ {
1169
+ "epoch": 0.68,
1170
+ "learning_rate": 1.805451132550946e-05,
1171
+ "loss": 0.3117,
1172
+ "step": 194
1173
+ },
1174
+ {
1175
+ "epoch": 0.68,
1176
+ "learning_rate": 1.803207531480645e-05,
1177
+ "loss": 0.3164,
1178
+ "step": 195
1179
+ },
1180
+ {
1181
+ "epoch": 0.69,
1182
+ "learning_rate": 1.8009524784364615e-05,
1183
+ "loss": 0.322,
1184
+ "step": 196
1185
+ },
1186
+ {
1187
+ "epoch": 0.69,
1188
+ "learning_rate": 1.7986860055704952e-05,
1189
+ "loss": 0.3073,
1190
+ "step": 197
1191
+ },
1192
+ {
1193
+ "epoch": 0.69,
1194
+ "learning_rate": 1.7964081451976673e-05,
1195
+ "loss": 0.3275,
1196
+ "step": 198
1197
+ },
1198
+ {
1199
+ "epoch": 0.7,
1200
+ "learning_rate": 1.7941189297952598e-05,
1201
+ "loss": 0.3176,
1202
+ "step": 199
1203
+ },
1204
+ {
1205
+ "epoch": 0.7,
1206
+ "learning_rate": 1.791818392002452e-05,
1207
+ "loss": 0.3147,
1208
+ "step": 200
1209
+ },
1210
+ {
1211
+ "epoch": 0.7,
1212
+ "learning_rate": 1.7895065646198567e-05,
1213
+ "loss": 0.2904,
1214
+ "step": 201
1215
+ },
1216
+ {
1217
+ "epoch": 0.71,
1218
+ "learning_rate": 1.7871834806090502e-05,
1219
+ "loss": 0.3099,
1220
+ "step": 202
1221
+ },
1222
+ {
1223
+ "epoch": 0.71,
1224
+ "learning_rate": 1.7848491730921046e-05,
1225
+ "loss": 0.2985,
1226
+ "step": 203
1227
+ },
1228
+ {
1229
+ "epoch": 0.71,
1230
+ "learning_rate": 1.7825036753511143e-05,
1231
+ "loss": 0.32,
1232
+ "step": 204
1233
+ },
1234
+ {
1235
+ "epoch": 0.72,
1236
+ "learning_rate": 1.780147020827721e-05,
1237
+ "loss": 0.3185,
1238
+ "step": 205
1239
+ },
1240
+ {
1241
+ "epoch": 0.72,
1242
+ "learning_rate": 1.7777792431226384e-05,
1243
+ "loss": 0.3074,
1244
+ "step": 206
1245
+ },
1246
+ {
1247
+ "epoch": 0.72,
1248
+ "learning_rate": 1.7754003759951714e-05,
1249
+ "loss": 0.3202,
1250
+ "step": 207
1251
+ },
1252
+ {
1253
+ "epoch": 0.73,
1254
+ "learning_rate": 1.773010453362737e-05,
1255
+ "loss": 0.3039,
1256
+ "step": 208
1257
+ },
1258
+ {
1259
+ "epoch": 0.73,
1260
+ "learning_rate": 1.7706095093003787e-05,
1261
+ "loss": 0.3006,
1262
+ "step": 209
1263
+ },
1264
+ {
1265
+ "epoch": 0.73,
1266
+ "learning_rate": 1.7681975780402807e-05,
1267
+ "loss": 0.3183,
1268
+ "step": 210
1269
+ },
1270
+ {
1271
+ "epoch": 0.74,
1272
+ "learning_rate": 1.7657746939712817e-05,
1273
+ "loss": 0.3519,
1274
+ "step": 211
1275
+ },
1276
+ {
1277
+ "epoch": 0.74,
1278
+ "learning_rate": 1.7633408916383826e-05,
1279
+ "loss": 0.3174,
1280
+ "step": 212
1281
+ },
1282
+ {
1283
+ "epoch": 0.74,
1284
+ "learning_rate": 1.760896205742255e-05,
1285
+ "loss": 0.2977,
1286
+ "step": 213
1287
+ },
1288
+ {
1289
+ "epoch": 0.75,
1290
+ "learning_rate": 1.7584406711387462e-05,
1291
+ "loss": 0.2996,
1292
+ "step": 214
1293
+ },
1294
+ {
1295
+ "epoch": 0.75,
1296
+ "learning_rate": 1.755974322838382e-05,
1297
+ "loss": 0.313,
1298
+ "step": 215
1299
+ },
1300
+ {
1301
+ "epoch": 0.76,
1302
+ "learning_rate": 1.7534971960058684e-05,
1303
+ "loss": 0.304,
1304
+ "step": 216
1305
+ },
1306
+ {
1307
+ "epoch": 0.76,
1308
+ "learning_rate": 1.7510093259595887e-05,
1309
+ "loss": 0.2892,
1310
+ "step": 217
1311
+ },
1312
+ {
1313
+ "epoch": 0.76,
1314
+ "learning_rate": 1.7485107481711014e-05,
1315
+ "loss": 0.3173,
1316
+ "step": 218
1317
+ },
1318
+ {
1319
+ "epoch": 0.77,
1320
+ "learning_rate": 1.7460014982646334e-05,
1321
+ "loss": 0.3111,
1322
+ "step": 219
1323
+ },
1324
+ {
1325
+ "epoch": 0.77,
1326
+ "learning_rate": 1.743481612016573e-05,
1327
+ "loss": 0.3033,
1328
+ "step": 220
1329
+ },
1330
+ {
1331
+ "epoch": 0.77,
1332
+ "learning_rate": 1.7409511253549592e-05,
1333
+ "loss": 0.3089,
1334
+ "step": 221
1335
+ },
1336
+ {
1337
+ "epoch": 0.78,
1338
+ "learning_rate": 1.7384100743589698e-05,
1339
+ "loss": 0.306,
1340
+ "step": 222
1341
+ },
1342
+ {
1343
+ "epoch": 0.78,
1344
+ "learning_rate": 1.735858495258406e-05,
1345
+ "loss": 0.3195,
1346
+ "step": 223
1347
+ },
1348
+ {
1349
+ "epoch": 0.78,
1350
+ "learning_rate": 1.733296424433178e-05,
1351
+ "loss": 0.3039,
1352
+ "step": 224
1353
+ },
1354
+ {
1355
+ "epoch": 0.79,
1356
+ "learning_rate": 1.7307238984127832e-05,
1357
+ "loss": 0.3149,
1358
+ "step": 225
1359
+ },
1360
+ {
1361
+ "epoch": 0.79,
1362
+ "learning_rate": 1.7281409538757886e-05,
1363
+ "loss": 0.329,
1364
+ "step": 226
1365
+ },
1366
+ {
1367
+ "epoch": 0.79,
1368
+ "learning_rate": 1.7255476276493057e-05,
1369
+ "loss": 0.2977,
1370
+ "step": 227
1371
+ },
1372
+ {
1373
+ "epoch": 0.8,
1374
+ "learning_rate": 1.722943956708466e-05,
1375
+ "loss": 0.3077,
1376
+ "step": 228
1377
+ },
1378
+ {
1379
+ "epoch": 0.8,
1380
+ "learning_rate": 1.720329978175894e-05,
1381
+ "loss": 0.3393,
1382
+ "step": 229
1383
+ },
1384
+ {
1385
+ "epoch": 0.8,
1386
+ "learning_rate": 1.7177057293211786e-05,
1387
+ "loss": 0.3094,
1388
+ "step": 230
1389
+ },
1390
+ {
1391
+ "epoch": 0.81,
1392
+ "learning_rate": 1.715071247560339e-05,
1393
+ "loss": 0.303,
1394
+ "step": 231
1395
+ },
1396
+ {
1397
+ "epoch": 0.81,
1398
+ "learning_rate": 1.7124265704552948e-05,
1399
+ "loss": 0.301,
1400
+ "step": 232
1401
+ },
1402
+ {
1403
+ "epoch": 0.81,
1404
+ "learning_rate": 1.7097717357133286e-05,
1405
+ "loss": 0.3235,
1406
+ "step": 233
1407
+ },
1408
+ {
1409
+ "epoch": 0.82,
1410
+ "learning_rate": 1.7071067811865477e-05,
1411
+ "loss": 0.3173,
1412
+ "step": 234
1413
+ },
1414
+ {
1415
+ "epoch": 0.82,
1416
+ "learning_rate": 1.704431744871346e-05,
1417
+ "loss": 0.2904,
1418
+ "step": 235
1419
+ },
1420
+ {
1421
+ "epoch": 0.83,
1422
+ "learning_rate": 1.701746664907862e-05,
1423
+ "loss": 0.305,
1424
+ "step": 236
1425
+ },
1426
+ {
1427
+ "epoch": 0.83,
1428
+ "learning_rate": 1.6990515795794332e-05,
1429
+ "loss": 0.3235,
1430
+ "step": 237
1431
+ },
1432
+ {
1433
+ "epoch": 0.83,
1434
+ "learning_rate": 1.696346527312053e-05,
1435
+ "loss": 0.3007,
1436
+ "step": 238
1437
+ },
1438
+ {
1439
+ "epoch": 0.84,
1440
+ "learning_rate": 1.6936315466738204e-05,
1441
+ "loss": 0.3199,
1442
+ "step": 239
1443
+ },
1444
+ {
1445
+ "epoch": 0.84,
1446
+ "learning_rate": 1.6909066763743914e-05,
1447
+ "loss": 0.3113,
1448
+ "step": 240
1449
+ },
1450
+ {
1451
+ "epoch": 0.84,
1452
+ "learning_rate": 1.6881719552644275e-05,
1453
+ "loss": 0.2918,
1454
+ "step": 241
1455
+ },
1456
+ {
1457
+ "epoch": 0.85,
1458
+ "learning_rate": 1.68542742233504e-05,
1459
+ "loss": 0.2935,
1460
+ "step": 242
1461
+ },
1462
+ {
1463
+ "epoch": 0.85,
1464
+ "learning_rate": 1.682673116717236e-05,
1465
+ "loss": 0.2945,
1466
+ "step": 243
1467
+ },
1468
+ {
1469
+ "epoch": 0.85,
1470
+ "learning_rate": 1.6799090776813597e-05,
1471
+ "loss": 0.3153,
1472
+ "step": 244
1473
+ },
1474
+ {
1475
+ "epoch": 0.86,
1476
+ "learning_rate": 1.677135344636532e-05,
1477
+ "loss": 0.3163,
1478
+ "step": 245
1479
+ },
1480
+ {
1481
+ "epoch": 0.86,
1482
+ "learning_rate": 1.674351957130089e-05,
1483
+ "loss": 0.2929,
1484
+ "step": 246
1485
+ },
1486
+ {
1487
+ "epoch": 0.86,
1488
+ "learning_rate": 1.6715589548470187e-05,
1489
+ "loss": 0.3227,
1490
+ "step": 247
1491
+ },
1492
+ {
1493
+ "epoch": 0.87,
1494
+ "learning_rate": 1.6687563776093943e-05,
1495
+ "loss": 0.2947,
1496
+ "step": 248
1497
+ },
1498
+ {
1499
+ "epoch": 0.87,
1500
+ "learning_rate": 1.6659442653758064e-05,
1501
+ "loss": 0.3437,
1502
+ "step": 249
1503
+ },
1504
+ {
1505
+ "epoch": 0.87,
1506
+ "learning_rate": 1.6631226582407954e-05,
1507
+ "loss": 0.3386,
1508
+ "step": 250
1509
+ },
1510
+ {
1511
+ "epoch": 0.88,
1512
+ "learning_rate": 1.660291596434276e-05,
1513
+ "loss": 0.3186,
1514
+ "step": 251
1515
+ },
1516
+ {
1517
+ "epoch": 0.88,
1518
+ "learning_rate": 1.6574511203209667e-05,
1519
+ "loss": 0.3302,
1520
+ "step": 252
1521
+ },
1522
+ {
1523
+ "epoch": 0.88,
1524
+ "learning_rate": 1.654601270399814e-05,
1525
+ "loss": 0.3249,
1526
+ "step": 253
1527
+ },
1528
+ {
1529
+ "epoch": 0.89,
1530
+ "learning_rate": 1.651742087303412e-05,
1531
+ "loss": 0.3012,
1532
+ "step": 254
1533
+ },
1534
+ {
1535
+ "epoch": 0.89,
1536
+ "learning_rate": 1.648873611797429e-05,
1537
+ "loss": 0.2925,
1538
+ "step": 255
1539
+ },
1540
+ {
1541
+ "epoch": 0.9,
1542
+ "learning_rate": 1.645995884780019e-05,
1543
+ "loss": 0.3111,
1544
+ "step": 256
1545
+ },
1546
+ {
1547
+ "epoch": 0.9,
1548
+ "learning_rate": 1.6431089472812445e-05,
1549
+ "loss": 0.3074,
1550
+ "step": 257
1551
+ },
1552
+ {
1553
+ "epoch": 0.9,
1554
+ "learning_rate": 1.640212840462488e-05,
1555
+ "loss": 0.3157,
1556
+ "step": 258
1557
+ },
1558
+ {
1559
+ "epoch": 0.91,
1560
+ "learning_rate": 1.6373076056158676e-05,
1561
+ "loss": 0.3237,
1562
+ "step": 259
1563
+ },
1564
+ {
1565
+ "epoch": 0.91,
1566
+ "learning_rate": 1.6343932841636455e-05,
1567
+ "loss": 0.2834,
1568
+ "step": 260
1569
+ },
1570
+ {
1571
+ "epoch": 0.91,
1572
+ "learning_rate": 1.6314699176576404e-05,
1573
+ "loss": 0.3072,
1574
+ "step": 261
1575
+ },
1576
+ {
1577
+ "epoch": 0.92,
1578
+ "learning_rate": 1.6285375477786322e-05,
1579
+ "loss": 0.2928,
1580
+ "step": 262
1581
+ },
1582
+ {
1583
+ "epoch": 0.92,
1584
+ "learning_rate": 1.62559621633577e-05,
1585
+ "loss": 0.3005,
1586
+ "step": 263
1587
+ },
1588
+ {
1589
+ "epoch": 0.92,
1590
+ "learning_rate": 1.6226459652659752e-05,
1591
+ "loss": 0.3014,
1592
+ "step": 264
1593
+ },
1594
+ {
1595
+ "epoch": 0.93,
1596
+ "learning_rate": 1.619686836633343e-05,
1597
+ "loss": 0.3053,
1598
+ "step": 265
1599
+ },
1600
+ {
1601
+ "epoch": 0.93,
1602
+ "learning_rate": 1.6167188726285433e-05,
1603
+ "loss": 0.3024,
1604
+ "step": 266
1605
+ },
1606
+ {
1607
+ "epoch": 0.93,
1608
+ "learning_rate": 1.6137421155682186e-05,
1609
+ "loss": 0.3018,
1610
+ "step": 267
1611
+ },
1612
+ {
1613
+ "epoch": 0.94,
1614
+ "learning_rate": 1.6107566078943818e-05,
1615
+ "loss": 0.3139,
1616
+ "step": 268
1617
+ },
1618
+ {
1619
+ "epoch": 0.94,
1620
+ "learning_rate": 1.6077623921738102e-05,
1621
+ "loss": 0.2872,
1622
+ "step": 269
1623
+ },
1624
+ {
1625
+ "epoch": 0.94,
1626
+ "learning_rate": 1.6047595110974376e-05,
1627
+ "loss": 0.2917,
1628
+ "step": 270
1629
+ },
1630
+ {
1631
+ "epoch": 0.95,
1632
+ "learning_rate": 1.6017480074797484e-05,
1633
+ "loss": 0.2947,
1634
+ "step": 271
1635
+ },
1636
+ {
1637
+ "epoch": 0.95,
1638
+ "learning_rate": 1.598727924258164e-05,
1639
+ "loss": 0.2684,
1640
+ "step": 272
1641
+ },
1642
+ {
1643
+ "epoch": 0.95,
1644
+ "learning_rate": 1.5956993044924334e-05,
1645
+ "loss": 0.3185,
1646
+ "step": 273
1647
+ },
1648
+ {
1649
+ "epoch": 0.96,
1650
+ "learning_rate": 1.592662191364017e-05,
1651
+ "loss": 0.3205,
1652
+ "step": 274
1653
+ },
1654
+ {
1655
+ "epoch": 0.96,
1656
+ "learning_rate": 1.589616628175472e-05,
1657
+ "loss": 0.304,
1658
+ "step": 275
1659
+ },
1660
+ {
1661
+ "epoch": 0.97,
1662
+ "learning_rate": 1.5865626583498355e-05,
1663
+ "loss": 0.2973,
1664
+ "step": 276
1665
+ },
1666
+ {
1667
+ "epoch": 0.97,
1668
+ "learning_rate": 1.5835003254300038e-05,
1669
+ "loss": 0.3054,
1670
+ "step": 277
1671
+ },
1672
+ {
1673
+ "epoch": 0.97,
1674
+ "learning_rate": 1.5804296730781134e-05,
1675
+ "loss": 0.3002,
1676
+ "step": 278
1677
+ },
1678
+ {
1679
+ "epoch": 0.98,
1680
+ "learning_rate": 1.5773507450749172e-05,
1681
+ "loss": 0.3246,
1682
+ "step": 279
1683
+ },
1684
+ {
1685
+ "epoch": 0.98,
1686
+ "learning_rate": 1.574263585319161e-05,
1687
+ "loss": 0.2929,
1688
+ "step": 280
1689
+ },
1690
+ {
1691
+ "epoch": 0.98,
1692
+ "learning_rate": 1.5711682378269567e-05,
1693
+ "loss": 0.3271,
1694
+ "step": 281
1695
+ },
1696
+ {
1697
+ "epoch": 0.99,
1698
+ "learning_rate": 1.568064746731156e-05,
1699
+ "loss": 0.3068,
1700
+ "step": 282
1701
+ },
1702
+ {
1703
+ "epoch": 0.99,
1704
+ "learning_rate": 1.56495315628072e-05,
1705
+ "loss": 0.2965,
1706
+ "step": 283
1707
+ },
1708
+ {
1709
+ "epoch": 0.99,
1710
+ "learning_rate": 1.5618335108400893e-05,
1711
+ "loss": 0.3142,
1712
+ "step": 284
1713
+ },
1714
+ {
1715
+ "epoch": 1.0,
1716
+ "learning_rate": 1.5587058548885505e-05,
1717
+ "loss": 0.3324,
1718
+ "step": 285
1719
+ },
1720
+ {
1721
+ "epoch": 1.0,
1722
+ "learning_rate": 1.5555702330196024e-05,
1723
+ "loss": 0.3296,
1724
+ "step": 286
1725
+ },
1726
+ {
1727
+ "epoch": 1.0,
1728
+ "eval_loss": 0.301243394613266,
1729
+ "eval_runtime": 40.9753,
1730
+ "eval_samples_per_second": 18.231,
1731
+ "eval_steps_per_second": 0.586,
1732
+ "step": 286
1733
+ },
1734
+ {
1735
+ "epoch": 1.0,
1736
+ "learning_rate": 1.5524266899403206e-05,
1737
+ "loss": 0.256,
1738
+ "step": 287
1739
+ },
1740
+ {
1741
+ "epoch": 1.01,
1742
+ "learning_rate": 1.5492752704707198e-05,
1743
+ "loss": 0.2517,
1744
+ "step": 288
1745
+ },
1746
+ {
1747
+ "epoch": 1.01,
1748
+ "learning_rate": 1.546116019543115e-05,
1749
+ "loss": 0.2426,
1750
+ "step": 289
1751
+ },
1752
+ {
1753
+ "epoch": 1.01,
1754
+ "learning_rate": 1.542948982201479e-05,
1755
+ "loss": 0.236,
1756
+ "step": 290
1757
+ },
1758
+ {
1759
+ "epoch": 1.02,
1760
+ "learning_rate": 1.5397742036008033e-05,
1761
+ "loss": 0.2415,
1762
+ "step": 291
1763
+ },
1764
+ {
1765
+ "epoch": 1.02,
1766
+ "learning_rate": 1.536591729006453e-05,
1767
+ "loss": 0.2506,
1768
+ "step": 292
1769
+ },
1770
+ {
1771
+ "epoch": 1.02,
1772
+ "learning_rate": 1.5334016037935197e-05,
1773
+ "loss": 0.2427,
1774
+ "step": 293
1775
+ },
1776
+ {
1777
+ "epoch": 1.03,
1778
+ "learning_rate": 1.530203873446177e-05,
1779
+ "loss": 0.2154,
1780
+ "step": 294
1781
+ },
1782
+ {
1783
+ "epoch": 1.03,
1784
+ "learning_rate": 1.526998583557031e-05,
1785
+ "loss": 0.2462,
1786
+ "step": 295
1787
+ },
1788
+ {
1789
+ "epoch": 1.03,
1790
+ "learning_rate": 1.5237857798264701e-05,
1791
+ "loss": 0.2327,
1792
+ "step": 296
1793
+ },
1794
+ {
1795
+ "epoch": 1.04,
1796
+ "learning_rate": 1.520565508062013e-05,
1797
+ "loss": 0.2352,
1798
+ "step": 297
1799
+ },
1800
+ {
1801
+ "epoch": 1.04,
1802
+ "learning_rate": 1.5173378141776569e-05,
1803
+ "loss": 0.2478,
1804
+ "step": 298
1805
+ },
1806
+ {
1807
+ "epoch": 1.05,
1808
+ "learning_rate": 1.5141027441932217e-05,
1809
+ "loss": 0.2364,
1810
+ "step": 299
1811
+ },
1812
+ {
1813
+ "epoch": 1.05,
1814
+ "learning_rate": 1.5108603442336949e-05,
1815
+ "loss": 0.2393,
1816
+ "step": 300
1817
+ },
1818
+ {
1819
+ "epoch": 1.05,
1820
+ "learning_rate": 1.5076106605285725e-05,
1821
+ "loss": 0.25,
1822
+ "step": 301
1823
+ },
1824
+ {
1825
+ "epoch": 1.06,
1826
+ "learning_rate": 1.5043537394112008e-05,
1827
+ "loss": 0.2348,
1828
+ "step": 302
1829
+ },
1830
+ {
1831
+ "epoch": 1.06,
1832
+ "learning_rate": 1.5010896273181166e-05,
1833
+ "loss": 0.2279,
1834
+ "step": 303
1835
+ },
1836
+ {
1837
+ "epoch": 1.06,
1838
+ "learning_rate": 1.4978183707883828e-05,
1839
+ "loss": 0.2225,
1840
+ "step": 304
1841
+ },
1842
+ {
1843
+ "epoch": 1.07,
1844
+ "learning_rate": 1.4945400164629277e-05,
1845
+ "loss": 0.2349,
1846
+ "step": 305
1847
+ },
1848
+ {
1849
+ "epoch": 1.07,
1850
+ "learning_rate": 1.4912546110838775e-05,
1851
+ "loss": 0.24,
1852
+ "step": 306
1853
+ },
1854
+ {
1855
+ "epoch": 1.07,
1856
+ "learning_rate": 1.4879622014938914e-05,
1857
+ "loss": 0.2254,
1858
+ "step": 307
1859
+ },
1860
+ {
1861
+ "epoch": 1.08,
1862
+ "learning_rate": 1.4846628346354934e-05,
1863
+ "loss": 0.2321,
1864
+ "step": 308
1865
+ },
1866
+ {
1867
+ "epoch": 1.08,
1868
+ "learning_rate": 1.4813565575504023e-05,
1869
+ "loss": 0.2375,
1870
+ "step": 309
1871
+ },
1872
+ {
1873
+ "epoch": 1.08,
1874
+ "learning_rate": 1.4780434173788617e-05,
1875
+ "loss": 0.2294,
1876
+ "step": 310
1877
+ },
1878
+ {
1879
+ "epoch": 1.09,
1880
+ "learning_rate": 1.4747234613589688e-05,
1881
+ "loss": 0.2424,
1882
+ "step": 311
1883
+ },
1884
+ {
1885
+ "epoch": 1.09,
1886
+ "learning_rate": 1.4713967368259981e-05,
1887
+ "loss": 0.2402,
1888
+ "step": 312
1889
+ },
1890
+ {
1891
+ "epoch": 1.09,
1892
+ "learning_rate": 1.4680632912117287e-05,
1893
+ "loss": 0.2424,
1894
+ "step": 313
1895
+ },
1896
+ {
1897
+ "epoch": 1.1,
1898
+ "learning_rate": 1.4647231720437687e-05,
1899
+ "loss": 0.2394,
1900
+ "step": 314
1901
+ },
1902
+ {
1903
+ "epoch": 1.1,
1904
+ "learning_rate": 1.4613764269448752e-05,
1905
+ "loss": 0.254,
1906
+ "step": 315
1907
+ },
1908
+ {
1909
+ "epoch": 1.1,
1910
+ "learning_rate": 1.458023103632277e-05,
1911
+ "loss": 0.2367,
1912
+ "step": 316
1913
+ },
1914
+ {
1915
+ "epoch": 1.11,
1916
+ "learning_rate": 1.4546632499169938e-05,
1917
+ "loss": 0.2392,
1918
+ "step": 317
1919
+ },
1920
+ {
1921
+ "epoch": 1.11,
1922
+ "learning_rate": 1.4512969137031538e-05,
1923
+ "loss": 0.2214,
1924
+ "step": 318
1925
+ },
1926
+ {
1927
+ "epoch": 1.12,
1928
+ "learning_rate": 1.4479241429873121e-05,
1929
+ "loss": 0.2439,
1930
+ "step": 319
1931
+ },
1932
+ {
1933
+ "epoch": 1.12,
1934
+ "learning_rate": 1.444544985857766e-05,
1935
+ "loss": 0.2445,
1936
+ "step": 320
1937
+ },
1938
+ {
1939
+ "epoch": 1.12,
1940
+ "learning_rate": 1.4411594904938682e-05,
1941
+ "loss": 0.241,
1942
+ "step": 321
1943
+ },
1944
+ {
1945
+ "epoch": 1.13,
1946
+ "learning_rate": 1.4377677051653404e-05,
1947
+ "loss": 0.2244,
1948
+ "step": 322
1949
+ },
1950
+ {
1951
+ "epoch": 1.13,
1952
+ "learning_rate": 1.434369678231587e-05,
1953
+ "loss": 0.245,
1954
+ "step": 323
1955
+ },
1956
+ {
1957
+ "epoch": 1.13,
1958
+ "learning_rate": 1.4309654581410024e-05,
1959
+ "loss": 0.2158,
1960
+ "step": 324
1961
+ },
1962
+ {
1963
+ "epoch": 1.14,
1964
+ "learning_rate": 1.4275550934302822e-05,
1965
+ "loss": 0.2378,
1966
+ "step": 325
1967
+ },
1968
+ {
1969
+ "epoch": 1.14,
1970
+ "learning_rate": 1.4241386327237312e-05,
1971
+ "loss": 0.2303,
1972
+ "step": 326
1973
+ },
1974
+ {
1975
+ "epoch": 1.14,
1976
+ "learning_rate": 1.420716124732569e-05,
1977
+ "loss": 0.2329,
1978
+ "step": 327
1979
+ },
1980
+ {
1981
+ "epoch": 1.15,
1982
+ "learning_rate": 1.4172876182542372e-05,
1983
+ "loss": 0.2388,
1984
+ "step": 328
1985
+ },
1986
+ {
1987
+ "epoch": 1.15,
1988
+ "learning_rate": 1.4138531621717018e-05,
1989
+ "loss": 0.2242,
1990
+ "step": 329
1991
+ },
1992
+ {
1993
+ "epoch": 1.15,
1994
+ "learning_rate": 1.410412805452757e-05,
1995
+ "loss": 0.2396,
1996
+ "step": 330
1997
+ },
1998
+ {
1999
+ "epoch": 1.16,
2000
+ "learning_rate": 1.4069665971493276e-05,
2001
+ "loss": 0.244,
2002
+ "step": 331
2003
+ },
2004
+ {
2005
+ "epoch": 1.16,
2006
+ "learning_rate": 1.4035145863967692e-05,
2007
+ "loss": 0.2335,
2008
+ "step": 332
2009
+ },
2010
+ {
2011
+ "epoch": 1.16,
2012
+ "learning_rate": 1.4000568224131672e-05,
2013
+ "loss": 0.2326,
2014
+ "step": 333
2015
+ },
2016
+ {
2017
+ "epoch": 1.17,
2018
+ "learning_rate": 1.3965933544986351e-05,
2019
+ "loss": 0.2299,
2020
+ "step": 334
2021
+ },
2022
+ {
2023
+ "epoch": 1.17,
2024
+ "learning_rate": 1.3931242320346132e-05,
2025
+ "loss": 0.2377,
2026
+ "step": 335
2027
+ },
2028
+ {
2029
+ "epoch": 1.17,
2030
+ "learning_rate": 1.3896495044831622e-05,
2031
+ "loss": 0.2347,
2032
+ "step": 336
2033
+ },
2034
+ {
2035
+ "epoch": 1.18,
2036
+ "learning_rate": 1.3861692213862585e-05,
2037
+ "loss": 0.23,
2038
+ "step": 337
2039
+ },
2040
+ {
2041
+ "epoch": 1.18,
2042
+ "learning_rate": 1.3826834323650899e-05,
2043
+ "loss": 0.2336,
2044
+ "step": 338
2045
+ },
2046
+ {
2047
+ "epoch": 1.19,
2048
+ "learning_rate": 1.3791921871193456e-05,
2049
+ "loss": 0.2382,
2050
+ "step": 339
2051
+ },
2052
+ {
2053
+ "epoch": 1.19,
2054
+ "learning_rate": 1.3756955354265085e-05,
2055
+ "loss": 0.2595,
2056
+ "step": 340
2057
+ },
2058
+ {
2059
+ "epoch": 1.19,
2060
+ "learning_rate": 1.3721935271411464e-05,
2061
+ "loss": 0.2497,
2062
+ "step": 341
2063
+ },
2064
+ {
2065
+ "epoch": 1.2,
2066
+ "learning_rate": 1.368686212194199e-05,
2067
+ "loss": 0.2245,
2068
+ "step": 342
2069
+ },
2070
+ {
2071
+ "epoch": 1.2,
2072
+ "learning_rate": 1.3651736405922686e-05,
2073
+ "loss": 0.2213,
2074
+ "step": 343
2075
+ },
2076
+ {
2077
+ "epoch": 1.2,
2078
+ "learning_rate": 1.361655862416905e-05,
2079
+ "loss": 0.2231,
2080
+ "step": 344
2081
+ },
2082
+ {
2083
+ "epoch": 1.21,
2084
+ "learning_rate": 1.3581329278238928e-05,
2085
+ "loss": 0.2354,
2086
+ "step": 345
2087
+ },
2088
+ {
2089
+ "epoch": 1.21,
2090
+ "learning_rate": 1.3546048870425356e-05,
2091
+ "loss": 0.2328,
2092
+ "step": 346
2093
+ },
2094
+ {
2095
+ "epoch": 1.21,
2096
+ "learning_rate": 1.3510717903749402e-05,
2097
+ "loss": 0.2189,
2098
+ "step": 347
2099
+ },
2100
+ {
2101
+ "epoch": 1.22,
2102
+ "learning_rate": 1.3475336881952988e-05,
2103
+ "loss": 0.248,
2104
+ "step": 348
2105
+ },
2106
+ {
2107
+ "epoch": 1.22,
2108
+ "learning_rate": 1.3439906309491713e-05,
2109
+ "loss": 0.2427,
2110
+ "step": 349
2111
+ },
2112
+ {
2113
+ "epoch": 1.22,
2114
+ "learning_rate": 1.340442669152766e-05,
2115
+ "loss": 0.2249,
2116
+ "step": 350
2117
+ },
2118
+ {
2119
+ "epoch": 1.23,
2120
+ "learning_rate": 1.3368898533922202e-05,
2121
+ "loss": 0.2513,
2122
+ "step": 351
2123
+ },
2124
+ {
2125
+ "epoch": 1.23,
2126
+ "learning_rate": 1.3333322343228763e-05,
2127
+ "loss": 0.2323,
2128
+ "step": 352
2129
+ },
2130
+ {
2131
+ "epoch": 1.23,
2132
+ "learning_rate": 1.3297698626685631e-05,
2133
+ "loss": 0.231,
2134
+ "step": 353
2135
+ },
2136
+ {
2137
+ "epoch": 1.24,
2138
+ "learning_rate": 1.3262027892208696e-05,
2139
+ "loss": 0.24,
2140
+ "step": 354
2141
+ },
2142
+ {
2143
+ "epoch": 1.24,
2144
+ "learning_rate": 1.3226310648384222e-05,
2145
+ "loss": 0.2334,
2146
+ "step": 355
2147
+ },
2148
+ {
2149
+ "epoch": 1.24,
2150
+ "learning_rate": 1.31905474044616e-05,
2151
+ "loss": 0.246,
2152
+ "step": 356
2153
+ },
2154
+ {
2155
+ "epoch": 1.25,
2156
+ "learning_rate": 1.315473867034608e-05,
2157
+ "loss": 0.2309,
2158
+ "step": 357
2159
+ },
2160
+ {
2161
+ "epoch": 1.25,
2162
+ "learning_rate": 1.311888495659149e-05,
2163
+ "loss": 0.2425,
2164
+ "step": 358
2165
+ },
2166
+ {
2167
+ "epoch": 1.26,
2168
+ "learning_rate": 1.3082986774392992e-05,
2169
+ "loss": 0.2362,
2170
+ "step": 359
2171
+ },
2172
+ {
2173
+ "epoch": 1.26,
2174
+ "learning_rate": 1.3047044635579748e-05,
2175
+ "loss": 0.2358,
2176
+ "step": 360
2177
+ },
2178
+ {
2179
+ "epoch": 1.26,
2180
+ "learning_rate": 1.3011059052607657e-05,
2181
+ "loss": 0.2245,
2182
+ "step": 361
2183
+ },
2184
+ {
2185
+ "epoch": 1.27,
2186
+ "learning_rate": 1.297503053855203e-05,
2187
+ "loss": 0.2274,
2188
+ "step": 362
2189
+ },
2190
+ {
2191
+ "epoch": 1.27,
2192
+ "learning_rate": 1.2938959607100288e-05,
2193
+ "loss": 0.2404,
2194
+ "step": 363
2195
+ },
2196
+ {
2197
+ "epoch": 1.27,
2198
+ "learning_rate": 1.2902846772544625e-05,
2199
+ "loss": 0.2283,
2200
+ "step": 364
2201
+ },
2202
+ {
2203
+ "epoch": 1.28,
2204
+ "learning_rate": 1.2866692549774683e-05,
2205
+ "loss": 0.2378,
2206
+ "step": 365
2207
+ },
2208
+ {
2209
+ "epoch": 1.28,
2210
+ "learning_rate": 1.2830497454270206e-05,
2211
+ "loss": 0.2299,
2212
+ "step": 366
2213
+ },
2214
+ {
2215
+ "epoch": 1.28,
2216
+ "learning_rate": 1.2794262002093698e-05,
2217
+ "loss": 0.2334,
2218
+ "step": 367
2219
+ },
2220
+ {
2221
+ "epoch": 1.29,
2222
+ "learning_rate": 1.2757986709883059e-05,
2223
+ "loss": 0.2416,
2224
+ "step": 368
2225
+ },
2226
+ {
2227
+ "epoch": 1.29,
2228
+ "learning_rate": 1.2721672094844221e-05,
2229
+ "loss": 0.2301,
2230
+ "step": 369
2231
+ },
2232
+ {
2233
+ "epoch": 1.29,
2234
+ "learning_rate": 1.2685318674743769e-05,
2235
+ "loss": 0.2263,
2236
+ "step": 370
2237
+ },
2238
+ {
2239
+ "epoch": 1.3,
2240
+ "learning_rate": 1.2648926967901567e-05,
2241
+ "loss": 0.231,
2242
+ "step": 371
2243
+ },
2244
+ {
2245
+ "epoch": 1.3,
2246
+ "learning_rate": 1.2612497493183365e-05,
2247
+ "loss": 0.2387,
2248
+ "step": 372
2249
+ },
2250
+ {
2251
+ "epoch": 1.3,
2252
+ "learning_rate": 1.2576030769993393e-05,
2253
+ "loss": 0.2397,
2254
+ "step": 373
2255
+ },
2256
+ {
2257
+ "epoch": 1.31,
2258
+ "learning_rate": 1.2539527318266971e-05,
2259
+ "loss": 0.2244,
2260
+ "step": 374
2261
+ },
2262
+ {
2263
+ "epoch": 1.31,
2264
+ "learning_rate": 1.2502987658463077e-05,
2265
+ "loss": 0.2308,
2266
+ "step": 375
2267
+ },
2268
+ {
2269
+ "epoch": 1.31,
2270
+ "learning_rate": 1.2466412311556952e-05,
2271
+ "loss": 0.238,
2272
+ "step": 376
2273
+ },
2274
+ {
2275
+ "epoch": 1.32,
2276
+ "learning_rate": 1.242980179903264e-05,
2277
+ "loss": 0.2455,
2278
+ "step": 377
2279
+ },
2280
+ {
2281
+ "epoch": 1.32,
2282
+ "learning_rate": 1.2393156642875579e-05,
2283
+ "loss": 0.2362,
2284
+ "step": 378
2285
+ },
2286
+ {
2287
+ "epoch": 1.33,
2288
+ "learning_rate": 1.2356477365565147e-05,
2289
+ "loss": 0.2286,
2290
+ "step": 379
2291
+ },
2292
+ {
2293
+ "epoch": 1.33,
2294
+ "learning_rate": 1.2319764490067212e-05,
2295
+ "loss": 0.2353,
2296
+ "step": 380
2297
+ },
2298
+ {
2299
+ "epoch": 1.33,
2300
+ "learning_rate": 1.2283018539826686e-05,
2301
+ "loss": 0.2215,
2302
+ "step": 381
2303
+ },
2304
+ {
2305
+ "epoch": 1.34,
2306
+ "learning_rate": 1.2246240038760042e-05,
2307
+ "loss": 0.2211,
2308
+ "step": 382
2309
+ },
2310
+ {
2311
+ "epoch": 1.34,
2312
+ "learning_rate": 1.2209429511247865e-05,
2313
+ "loss": 0.226,
2314
+ "step": 383
2315
+ },
2316
+ {
2317
+ "epoch": 1.34,
2318
+ "learning_rate": 1.217258748212737e-05,
2319
+ "loss": 0.2359,
2320
+ "step": 384
2321
+ },
2322
+ {
2323
+ "epoch": 1.35,
2324
+ "learning_rate": 1.2135714476684902e-05,
2325
+ "loss": 0.2251,
2326
+ "step": 385
2327
+ },
2328
+ {
2329
+ "epoch": 1.35,
2330
+ "learning_rate": 1.2098811020648475e-05,
2331
+ "loss": 0.2377,
2332
+ "step": 386
2333
+ },
2334
+ {
2335
+ "epoch": 1.35,
2336
+ "learning_rate": 1.2061877640180255e-05,
2337
+ "loss": 0.2235,
2338
+ "step": 387
2339
+ },
2340
+ {
2341
+ "epoch": 1.36,
2342
+ "learning_rate": 1.2024914861869064e-05,
2343
+ "loss": 0.2436,
2344
+ "step": 388
2345
+ },
2346
+ {
2347
+ "epoch": 1.36,
2348
+ "learning_rate": 1.1987923212722872e-05,
2349
+ "loss": 0.2262,
2350
+ "step": 389
2351
+ },
2352
+ {
2353
+ "epoch": 1.36,
2354
+ "learning_rate": 1.1950903220161286e-05,
2355
+ "loss": 0.2168,
2356
+ "step": 390
2357
+ },
2358
+ {
2359
+ "epoch": 1.37,
2360
+ "learning_rate": 1.1913855412008025e-05,
2361
+ "loss": 0.2366,
2362
+ "step": 391
2363
+ },
2364
+ {
2365
+ "epoch": 1.37,
2366
+ "learning_rate": 1.1876780316483401e-05,
2367
+ "loss": 0.2478,
2368
+ "step": 392
2369
+ },
2370
+ {
2371
+ "epoch": 1.37,
2372
+ "learning_rate": 1.1839678462196785e-05,
2373
+ "loss": 0.2349,
2374
+ "step": 393
2375
+ },
2376
+ {
2377
+ "epoch": 1.38,
2378
+ "learning_rate": 1.180255037813906e-05,
2379
+ "loss": 0.2293,
2380
+ "step": 394
2381
+ },
2382
+ {
2383
+ "epoch": 1.38,
2384
+ "learning_rate": 1.1765396593675098e-05,
2385
+ "loss": 0.2381,
2386
+ "step": 395
2387
+ },
2388
+ {
2389
+ "epoch": 1.38,
2390
+ "learning_rate": 1.1728217638536196e-05,
2391
+ "loss": 0.2567,
2392
+ "step": 396
2393
+ },
2394
+ {
2395
+ "epoch": 1.39,
2396
+ "learning_rate": 1.1691014042812537e-05,
2397
+ "loss": 0.2463,
2398
+ "step": 397
2399
+ },
2400
+ {
2401
+ "epoch": 1.39,
2402
+ "learning_rate": 1.1653786336945614e-05,
2403
+ "loss": 0.2293,
2404
+ "step": 398
2405
+ },
2406
+ {
2407
+ "epoch": 1.4,
2408
+ "learning_rate": 1.1616535051720686e-05,
2409
+ "loss": 0.2422,
2410
+ "step": 399
2411
+ },
2412
+ {
2413
+ "epoch": 1.4,
2414
+ "learning_rate": 1.1579260718259197e-05,
2415
+ "loss": 0.2443,
2416
+ "step": 400
2417
+ },
2418
+ {
2419
+ "epoch": 1.4,
2420
+ "learning_rate": 1.1541963868011212e-05,
2421
+ "loss": 0.2423,
2422
+ "step": 401
2423
+ },
2424
+ {
2425
+ "epoch": 1.41,
2426
+ "learning_rate": 1.1504645032747832e-05,
2427
+ "loss": 0.2475,
2428
+ "step": 402
2429
+ },
2430
+ {
2431
+ "epoch": 1.41,
2432
+ "learning_rate": 1.1467304744553618e-05,
2433
+ "loss": 0.2264,
2434
+ "step": 403
2435
+ },
2436
+ {
2437
+ "epoch": 1.41,
2438
+ "learning_rate": 1.1429943535819005e-05,
2439
+ "loss": 0.2289,
2440
+ "step": 404
2441
+ },
2442
+ {
2443
+ "epoch": 1.42,
2444
+ "learning_rate": 1.1392561939232707e-05,
2445
+ "loss": 0.2275,
2446
+ "step": 405
2447
+ },
2448
+ {
2449
+ "epoch": 1.42,
2450
+ "learning_rate": 1.1355160487774119e-05,
2451
+ "loss": 0.2271,
2452
+ "step": 406
2453
+ },
2454
+ {
2455
+ "epoch": 1.42,
2456
+ "learning_rate": 1.1317739714705732e-05,
2457
+ "loss": 0.2301,
2458
+ "step": 407
2459
+ },
2460
+ {
2461
+ "epoch": 1.43,
2462
+ "learning_rate": 1.128030015356551e-05,
2463
+ "loss": 0.2282,
2464
+ "step": 408
2465
+ },
2466
+ {
2467
+ "epoch": 1.43,
2468
+ "learning_rate": 1.124284233815931e-05,
2469
+ "loss": 0.2382,
2470
+ "step": 409
2471
+ },
2472
+ {
2473
+ "epoch": 1.43,
2474
+ "learning_rate": 1.1205366802553231e-05,
2475
+ "loss": 0.245,
2476
+ "step": 410
2477
+ },
2478
+ {
2479
+ "epoch": 1.44,
2480
+ "learning_rate": 1.1167874081066046e-05,
2481
+ "loss": 0.231,
2482
+ "step": 411
2483
+ },
2484
+ {
2485
+ "epoch": 1.44,
2486
+ "learning_rate": 1.1130364708261552e-05,
2487
+ "loss": 0.2351,
2488
+ "step": 412
2489
+ },
2490
+ {
2491
+ "epoch": 1.44,
2492
+ "learning_rate": 1.1092839218940949e-05,
2493
+ "loss": 0.2352,
2494
+ "step": 413
2495
+ },
2496
+ {
2497
+ "epoch": 1.45,
2498
+ "learning_rate": 1.1055298148135236e-05,
2499
+ "loss": 0.2297,
2500
+ "step": 414
2501
+ },
2502
+ {
2503
+ "epoch": 1.45,
2504
+ "learning_rate": 1.1017742031097562e-05,
2505
+ "loss": 0.225,
2506
+ "step": 415
2507
+ },
2508
+ {
2509
+ "epoch": 1.45,
2510
+ "learning_rate": 1.098017140329561e-05,
2511
+ "loss": 0.2324,
2512
+ "step": 416
2513
+ },
2514
+ {
2515
+ "epoch": 1.46,
2516
+ "learning_rate": 1.094258680040394e-05,
2517
+ "loss": 0.2435,
2518
+ "step": 417
2519
+ },
2520
+ {
2521
+ "epoch": 1.46,
2522
+ "learning_rate": 1.090498875829638e-05,
2523
+ "loss": 0.2341,
2524
+ "step": 418
2525
+ },
2526
+ {
2527
+ "epoch": 1.47,
2528
+ "learning_rate": 1.0867377813038367e-05,
2529
+ "loss": 0.2323,
2530
+ "step": 419
2531
+ },
2532
+ {
2533
+ "epoch": 1.47,
2534
+ "learning_rate": 1.0829754500879308e-05,
2535
+ "loss": 0.2324,
2536
+ "step": 420
2537
+ },
2538
+ {
2539
+ "epoch": 1.47,
2540
+ "learning_rate": 1.079211935824494e-05,
2541
+ "loss": 0.2287,
2542
+ "step": 421
2543
+ },
2544
+ {
2545
+ "epoch": 1.48,
2546
+ "learning_rate": 1.0754472921729661e-05,
2547
+ "loss": 0.2339,
2548
+ "step": 422
2549
+ },
2550
+ {
2551
+ "epoch": 1.48,
2552
+ "learning_rate": 1.0716815728088911e-05,
2553
+ "loss": 0.2426,
2554
+ "step": 423
2555
+ },
2556
+ {
2557
+ "epoch": 1.48,
2558
+ "learning_rate": 1.0679148314231504e-05,
2559
+ "loss": 0.2523,
2560
+ "step": 424
2561
+ },
2562
+ {
2563
+ "epoch": 1.49,
2564
+ "learning_rate": 1.0641471217211959e-05,
2565
+ "loss": 0.2467,
2566
+ "step": 425
2567
+ },
2568
+ {
2569
+ "epoch": 1.49,
2570
+ "learning_rate": 1.0603784974222862e-05,
2571
+ "loss": 0.2384,
2572
+ "step": 426
2573
+ },
2574
+ {
2575
+ "epoch": 1.49,
2576
+ "learning_rate": 1.05660901225872e-05,
2577
+ "loss": 0.2311,
2578
+ "step": 427
2579
+ },
2580
+ {
2581
+ "epoch": 1.5,
2582
+ "learning_rate": 1.0528387199750706e-05,
2583
+ "loss": 0.2375,
2584
+ "step": 428
2585
+ },
2586
+ {
2587
+ "epoch": 1.5,
2588
+ "learning_rate": 1.0490676743274181e-05,
2589
+ "loss": 0.2319,
2590
+ "step": 429
2591
+ },
2592
+ {
2593
+ "epoch": 1.5,
2594
+ "learning_rate": 1.0452959290825846e-05,
2595
+ "loss": 0.223,
2596
+ "step": 430
2597
+ },
2598
+ {
2599
+ "epoch": 1.51,
2600
+ "learning_rate": 1.0415235380173663e-05,
2601
+ "loss": 0.2497,
2602
+ "step": 431
2603
+ },
2604
+ {
2605
+ "epoch": 1.51,
2606
+ "learning_rate": 1.0377505549177683e-05,
2607
+ "loss": 0.2572,
2608
+ "step": 432
2609
+ },
2610
+ {
2611
+ "epoch": 1.51,
2612
+ "learning_rate": 1.033977033578236e-05,
2613
+ "loss": 0.2269,
2614
+ "step": 433
2615
+ },
2616
+ {
2617
+ "epoch": 1.52,
2618
+ "learning_rate": 1.030203027800889e-05,
2619
+ "loss": 0.2383,
2620
+ "step": 434
2621
+ },
2622
+ {
2623
+ "epoch": 1.52,
2624
+ "learning_rate": 1.0264285913947545e-05,
2625
+ "loss": 0.2434,
2626
+ "step": 435
2627
+ },
2628
+ {
2629
+ "epoch": 1.52,
2630
+ "learning_rate": 1.0226537781749988e-05,
2631
+ "loss": 0.2184,
2632
+ "step": 436
2633
+ },
2634
+ {
2635
+ "epoch": 1.53,
2636
+ "learning_rate": 1.0188786419621613e-05,
2637
+ "loss": 0.2272,
2638
+ "step": 437
2639
+ },
2640
+ {
2641
+ "epoch": 1.53,
2642
+ "learning_rate": 1.015103236581386e-05,
2643
+ "loss": 0.2362,
2644
+ "step": 438
2645
+ },
2646
+ {
2647
+ "epoch": 1.53,
2648
+ "learning_rate": 1.0113276158616555e-05,
2649
+ "loss": 0.2336,
2650
+ "step": 439
2651
+ },
2652
+ {
2653
+ "epoch": 1.54,
2654
+ "learning_rate": 1.0075518336350218e-05,
2655
+ "loss": 0.2342,
2656
+ "step": 440
2657
+ },
2658
+ {
2659
+ "epoch": 1.54,
2660
+ "learning_rate": 1.0037759437358398e-05,
2661
+ "loss": 0.2301,
2662
+ "step": 441
2663
+ },
2664
+ {
2665
+ "epoch": 1.55,
2666
+ "learning_rate": 1e-05,
2667
+ "loss": 0.2445,
2668
+ "step": 442
2669
+ },
2670
+ {
2671
+ "epoch": 1.55,
2672
+ "learning_rate": 9.962240562641602e-06,
2673
+ "loss": 0.2292,
2674
+ "step": 443
2675
+ },
2676
+ {
2677
+ "epoch": 1.55,
2678
+ "learning_rate": 9.924481663649785e-06,
2679
+ "loss": 0.236,
2680
+ "step": 444
2681
+ },
2682
+ {
2683
+ "epoch": 1.56,
2684
+ "learning_rate": 9.886723841383447e-06,
2685
+ "loss": 0.236,
2686
+ "step": 445
2687
+ },
2688
+ {
2689
+ "epoch": 1.56,
2690
+ "learning_rate": 9.848967634186142e-06,
2691
+ "loss": 0.2292,
2692
+ "step": 446
2693
+ },
2694
+ {
2695
+ "epoch": 1.56,
2696
+ "learning_rate": 9.811213580378389e-06,
2697
+ "loss": 0.2351,
2698
+ "step": 447
2699
+ },
2700
+ {
2701
+ "epoch": 1.57,
2702
+ "learning_rate": 9.773462218250014e-06,
2703
+ "loss": 0.2272,
2704
+ "step": 448
2705
+ },
2706
+ {
2707
+ "epoch": 1.57,
2708
+ "learning_rate": 9.735714086052458e-06,
2709
+ "loss": 0.248,
2710
+ "step": 449
2711
+ },
2712
+ {
2713
+ "epoch": 1.57,
2714
+ "learning_rate": 9.697969721991114e-06,
2715
+ "loss": 0.2421,
2716
+ "step": 450
2717
+ },
2718
+ {
2719
+ "epoch": 1.58,
2720
+ "learning_rate": 9.660229664217644e-06,
2721
+ "loss": 0.2322,
2722
+ "step": 451
2723
+ },
2724
+ {
2725
+ "epoch": 1.58,
2726
+ "learning_rate": 9.62249445082232e-06,
2727
+ "loss": 0.2393,
2728
+ "step": 452
2729
+ },
2730
+ {
2731
+ "epoch": 1.58,
2732
+ "learning_rate": 9.584764619826339e-06,
2733
+ "loss": 0.2368,
2734
+ "step": 453
2735
+ },
2736
+ {
2737
+ "epoch": 1.59,
2738
+ "learning_rate": 9.547040709174159e-06,
2739
+ "loss": 0.2274,
2740
+ "step": 454
2741
+ },
2742
+ {
2743
+ "epoch": 1.59,
2744
+ "learning_rate": 9.50932325672582e-06,
2745
+ "loss": 0.228,
2746
+ "step": 455
2747
+ },
2748
+ {
2749
+ "epoch": 1.59,
2750
+ "learning_rate": 9.471612800249295e-06,
2751
+ "loss": 0.2388,
2752
+ "step": 456
2753
+ },
2754
+ {
2755
+ "epoch": 1.6,
2756
+ "learning_rate": 9.433909877412801e-06,
2757
+ "loss": 0.2501,
2758
+ "step": 457
2759
+ },
2760
+ {
2761
+ "epoch": 1.6,
2762
+ "learning_rate": 9.39621502577714e-06,
2763
+ "loss": 0.2353,
2764
+ "step": 458
2765
+ },
2766
+ {
2767
+ "epoch": 1.6,
2768
+ "learning_rate": 9.358528782788045e-06,
2769
+ "loss": 0.2394,
2770
+ "step": 459
2771
+ },
2772
+ {
2773
+ "epoch": 1.61,
2774
+ "learning_rate": 9.320851685768498e-06,
2775
+ "loss": 0.2354,
2776
+ "step": 460
2777
+ },
2778
+ {
2779
+ "epoch": 1.61,
2780
+ "learning_rate": 9.28318427191109e-06,
2781
+ "loss": 0.251,
2782
+ "step": 461
2783
+ },
2784
+ {
2785
+ "epoch": 1.62,
2786
+ "learning_rate": 9.24552707827034e-06,
2787
+ "loss": 0.2306,
2788
+ "step": 462
2789
+ },
2790
+ {
2791
+ "epoch": 1.62,
2792
+ "learning_rate": 9.207880641755065e-06,
2793
+ "loss": 0.2397,
2794
+ "step": 463
2795
+ },
2796
+ {
2797
+ "epoch": 1.62,
2798
+ "learning_rate": 9.170245499120694e-06,
2799
+ "loss": 0.2174,
2800
+ "step": 464
2801
+ },
2802
+ {
2803
+ "epoch": 1.63,
2804
+ "learning_rate": 9.132622186961637e-06,
2805
+ "loss": 0.2045,
2806
+ "step": 465
2807
+ },
2808
+ {
2809
+ "epoch": 1.63,
2810
+ "learning_rate": 9.095011241703623e-06,
2811
+ "loss": 0.2372,
2812
+ "step": 466
2813
+ },
2814
+ {
2815
+ "epoch": 1.63,
2816
+ "learning_rate": 9.057413199596066e-06,
2817
+ "loss": 0.2527,
2818
+ "step": 467
2819
+ },
2820
+ {
2821
+ "epoch": 1.64,
2822
+ "learning_rate": 9.019828596704394e-06,
2823
+ "loss": 0.2497,
2824
+ "step": 468
2825
+ },
2826
+ {
2827
+ "epoch": 1.64,
2828
+ "learning_rate": 8.982257968902438e-06,
2829
+ "loss": 0.2511,
2830
+ "step": 469
2831
+ },
2832
+ {
2833
+ "epoch": 1.64,
2834
+ "learning_rate": 8.944701851864767e-06,
2835
+ "loss": 0.2455,
2836
+ "step": 470
2837
+ },
2838
+ {
2839
+ "epoch": 1.65,
2840
+ "learning_rate": 8.907160781059053e-06,
2841
+ "loss": 0.2305,
2842
+ "step": 471
2843
+ },
2844
+ {
2845
+ "epoch": 1.65,
2846
+ "learning_rate": 8.869635291738452e-06,
2847
+ "loss": 0.2327,
2848
+ "step": 472
2849
+ },
2850
+ {
2851
+ "epoch": 1.65,
2852
+ "learning_rate": 8.832125918933955e-06,
2853
+ "loss": 0.2469,
2854
+ "step": 473
2855
+ },
2856
+ {
2857
+ "epoch": 1.66,
2858
+ "learning_rate": 8.79463319744677e-06,
2859
+ "loss": 0.2562,
2860
+ "step": 474
2861
+ },
2862
+ {
2863
+ "epoch": 1.66,
2864
+ "learning_rate": 8.757157661840693e-06,
2865
+ "loss": 0.2452,
2866
+ "step": 475
2867
+ },
2868
+ {
2869
+ "epoch": 1.66,
2870
+ "learning_rate": 8.719699846434493e-06,
2871
+ "loss": 0.226,
2872
+ "step": 476
2873
+ },
2874
+ {
2875
+ "epoch": 1.67,
2876
+ "learning_rate": 8.682260285294272e-06,
2877
+ "loss": 0.2434,
2878
+ "step": 477
2879
+ },
2880
+ {
2881
+ "epoch": 1.67,
2882
+ "learning_rate": 8.644839512225886e-06,
2883
+ "loss": 0.2321,
2884
+ "step": 478
2885
+ },
2886
+ {
2887
+ "epoch": 1.67,
2888
+ "learning_rate": 8.607438060767296e-06,
2889
+ "loss": 0.2233,
2890
+ "step": 479
2891
+ },
2892
+ {
2893
+ "epoch": 1.68,
2894
+ "learning_rate": 8.570056464180998e-06,
2895
+ "loss": 0.2311,
2896
+ "step": 480
2897
+ }
2898
+ ],
2899
+ "max_steps": 858,
2900
+ "num_train_epochs": 3,
2901
+ "total_flos": 1.2217975017622733e+18,
2902
+ "trial_name": null,
2903
+ "trial_params": null
2904
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6ad37d6021448042f80ac382a39a9060255e5455ef072913943d8baae0d450b7
3
+ size 3707