Santipab commited on
Commit
2f03517
·
verified ·
1 Parent(s): 64b4e90

Upload 8 files

Browse files
config.json ADDED
@@ -0,0 +1,313 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "microsoft/swin-base-patch4-window7-224-in22k",
3
+ "architectures": [
4
+ "SwinForImageClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.0,
7
+ "depths": [
8
+ 2,
9
+ 2,
10
+ 18,
11
+ 2
12
+ ],
13
+ "drop_path_rate": 0.1,
14
+ "embed_dim": 128,
15
+ "encoder_stride": 32,
16
+ "hidden_act": "gelu",
17
+ "hidden_dropout_prob": 0.0,
18
+ "hidden_size": 1024,
19
+ "id2label": {
20
+ "0": "001",
21
+ "1": "001_forg",
22
+ "2": "002",
23
+ "3": "002_forg",
24
+ "4": "003",
25
+ "5": "003_forg",
26
+ "6": "004",
27
+ "7": "004_forg",
28
+ "8": "006",
29
+ "9": "006_forg",
30
+ "10": "009",
31
+ "11": "009_forg",
32
+ "12": "012",
33
+ "13": "012_forg",
34
+ "14": "013",
35
+ "15": "013_forg",
36
+ "16": "014",
37
+ "17": "014_forg",
38
+ "18": "015",
39
+ "19": "015_forg",
40
+ "20": "016",
41
+ "21": "016_forg",
42
+ "22": "017",
43
+ "23": "017_forg",
44
+ "24": "018",
45
+ "25": "018_forg",
46
+ "26": "019",
47
+ "27": "019_forg",
48
+ "28": "020",
49
+ "29": "020_forg",
50
+ "30": "021",
51
+ "31": "021_forg",
52
+ "32": "022",
53
+ "33": "022_forg",
54
+ "34": "023",
55
+ "35": "023_forg",
56
+ "36": "024",
57
+ "37": "024_forg",
58
+ "38": "025",
59
+ "39": "025_forg",
60
+ "40": "026",
61
+ "41": "026_forg",
62
+ "42": "027",
63
+ "43": "027_forg",
64
+ "44": "028",
65
+ "45": "028_forg",
66
+ "46": "029",
67
+ "47": "029_forg",
68
+ "48": "030",
69
+ "49": "030_forg",
70
+ "50": "031",
71
+ "51": "031_forg",
72
+ "52": "032",
73
+ "53": "032_forg",
74
+ "54": "033",
75
+ "55": "033_forg",
76
+ "56": "034",
77
+ "57": "034_forg",
78
+ "58": "035",
79
+ "59": "035_forg",
80
+ "60": "036",
81
+ "61": "036_forg",
82
+ "62": "037",
83
+ "63": "037_forg",
84
+ "64": "038",
85
+ "65": "038_forg",
86
+ "66": "039",
87
+ "67": "039_forg",
88
+ "68": "040",
89
+ "69": "040_forg",
90
+ "70": "041",
91
+ "71": "041_forg",
92
+ "72": "042",
93
+ "73": "042_forg",
94
+ "74": "043",
95
+ "75": "043_forg",
96
+ "76": "044",
97
+ "77": "044_forg",
98
+ "78": "045",
99
+ "79": "045_forg",
100
+ "80": "046",
101
+ "81": "046_forg",
102
+ "82": "047",
103
+ "83": "047_forg",
104
+ "84": "048",
105
+ "85": "048_forg",
106
+ "86": "049",
107
+ "87": "049_forg",
108
+ "88": "050",
109
+ "89": "050_forg",
110
+ "90": "051",
111
+ "91": "051_forg",
112
+ "92": "052",
113
+ "93": "052_forg",
114
+ "94": "053",
115
+ "95": "053_forg",
116
+ "96": "054",
117
+ "97": "054_forg",
118
+ "98": "055",
119
+ "99": "055_forg",
120
+ "100": "056",
121
+ "101": "056_forg",
122
+ "102": "057",
123
+ "103": "057_forg",
124
+ "104": "058",
125
+ "105": "058_forg",
126
+ "106": "059",
127
+ "107": "059_forg",
128
+ "108": "060",
129
+ "109": "060_forg",
130
+ "110": "061",
131
+ "111": "061_forg",
132
+ "112": "062",
133
+ "113": "062_forg",
134
+ "114": "063",
135
+ "115": "063_forg",
136
+ "116": "064",
137
+ "117": "064_forg",
138
+ "118": "065",
139
+ "119": "065_forg",
140
+ "120": "066",
141
+ "121": "066_forg",
142
+ "122": "067",
143
+ "123": "067_forg",
144
+ "124": "068",
145
+ "125": "068_forg",
146
+ "126": "069",
147
+ "127": "069_forg"
148
+ },
149
+ "image_size": 224,
150
+ "initializer_range": 0.02,
151
+ "label2id": {
152
+ "001": 0,
153
+ "001_forg": 1,
154
+ "002": 2,
155
+ "002_forg": 3,
156
+ "003": 4,
157
+ "003_forg": 5,
158
+ "004": 6,
159
+ "004_forg": 7,
160
+ "006": 8,
161
+ "006_forg": 9,
162
+ "009": 10,
163
+ "009_forg": 11,
164
+ "012": 12,
165
+ "012_forg": 13,
166
+ "013": 14,
167
+ "013_forg": 15,
168
+ "014": 16,
169
+ "014_forg": 17,
170
+ "015": 18,
171
+ "015_forg": 19,
172
+ "016": 20,
173
+ "016_forg": 21,
174
+ "017": 22,
175
+ "017_forg": 23,
176
+ "018": 24,
177
+ "018_forg": 25,
178
+ "019": 26,
179
+ "019_forg": 27,
180
+ "020": 28,
181
+ "020_forg": 29,
182
+ "021": 30,
183
+ "021_forg": 31,
184
+ "022": 32,
185
+ "022_forg": 33,
186
+ "023": 34,
187
+ "023_forg": 35,
188
+ "024": 36,
189
+ "024_forg": 37,
190
+ "025": 38,
191
+ "025_forg": 39,
192
+ "026": 40,
193
+ "026_forg": 41,
194
+ "027": 42,
195
+ "027_forg": 43,
196
+ "028": 44,
197
+ "028_forg": 45,
198
+ "029": 46,
199
+ "029_forg": 47,
200
+ "030": 48,
201
+ "030_forg": 49,
202
+ "031": 50,
203
+ "031_forg": 51,
204
+ "032": 52,
205
+ "032_forg": 53,
206
+ "033": 54,
207
+ "033_forg": 55,
208
+ "034": 56,
209
+ "034_forg": 57,
210
+ "035": 58,
211
+ "035_forg": 59,
212
+ "036": 60,
213
+ "036_forg": 61,
214
+ "037": 62,
215
+ "037_forg": 63,
216
+ "038": 64,
217
+ "038_forg": 65,
218
+ "039": 66,
219
+ "039_forg": 67,
220
+ "040": 68,
221
+ "040_forg": 69,
222
+ "041": 70,
223
+ "041_forg": 71,
224
+ "042": 72,
225
+ "042_forg": 73,
226
+ "043": 74,
227
+ "043_forg": 75,
228
+ "044": 76,
229
+ "044_forg": 77,
230
+ "045": 78,
231
+ "045_forg": 79,
232
+ "046": 80,
233
+ "046_forg": 81,
234
+ "047": 82,
235
+ "047_forg": 83,
236
+ "048": 84,
237
+ "048_forg": 85,
238
+ "049": 86,
239
+ "049_forg": 87,
240
+ "050": 88,
241
+ "050_forg": 89,
242
+ "051": 90,
243
+ "051_forg": 91,
244
+ "052": 92,
245
+ "052_forg": 93,
246
+ "053": 94,
247
+ "053_forg": 95,
248
+ "054": 96,
249
+ "054_forg": 97,
250
+ "055": 98,
251
+ "055_forg": 99,
252
+ "056": 100,
253
+ "056_forg": 101,
254
+ "057": 102,
255
+ "057_forg": 103,
256
+ "058": 104,
257
+ "058_forg": 105,
258
+ "059": 106,
259
+ "059_forg": 107,
260
+ "060": 108,
261
+ "060_forg": 109,
262
+ "061": 110,
263
+ "061_forg": 111,
264
+ "062": 112,
265
+ "062_forg": 113,
266
+ "063": 114,
267
+ "063_forg": 115,
268
+ "064": 116,
269
+ "064_forg": 117,
270
+ "065": 118,
271
+ "065_forg": 119,
272
+ "066": 120,
273
+ "066_forg": 121,
274
+ "067": 122,
275
+ "067_forg": 123,
276
+ "068": 124,
277
+ "068_forg": 125,
278
+ "069": 126,
279
+ "069_forg": 127
280
+ },
281
+ "layer_norm_eps": 1e-05,
282
+ "mlp_ratio": 4.0,
283
+ "model_type": "swin",
284
+ "num_channels": 3,
285
+ "num_heads": [
286
+ 4,
287
+ 8,
288
+ 16,
289
+ 32
290
+ ],
291
+ "num_layers": 4,
292
+ "out_features": [
293
+ "stage4"
294
+ ],
295
+ "out_indices": [
296
+ 4
297
+ ],
298
+ "patch_size": 4,
299
+ "path_norm": true,
300
+ "problem_type": "single_label_classification",
301
+ "qkv_bias": true,
302
+ "stage_names": [
303
+ "stem",
304
+ "stage1",
305
+ "stage2",
306
+ "stage3",
307
+ "stage4"
308
+ ],
309
+ "torch_dtype": "float32",
310
+ "transformers_version": "4.35.2",
311
+ "use_absolute_embeddings": false,
312
+ "window_size": 7
313
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8115312f2670ec77211477998b55437119809de3fe5ccc67917d435a16cc1824
3
+ size 348015440
optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:72cb32b24fc71fa251272bacf2bb056b165134474fe5e215b8ff9a6473792915
3
+ size 695350861
preprocessor_config.json ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "do_normalize": true,
3
+ "do_rescale": true,
4
+ "do_resize": true,
5
+ "image_mean": [
6
+ 0.485,
7
+ 0.456,
8
+ 0.406
9
+ ],
10
+ "image_processor_type": "ViTImageProcessor",
11
+ "image_std": [
12
+ 0.229,
13
+ 0.224,
14
+ 0.225
15
+ ],
16
+ "resample": 3,
17
+ "rescale_factor": 0.00392156862745098,
18
+ "size": {
19
+ "height": 224,
20
+ "width": 224
21
+ }
22
+ }
rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5377541be1b7ecfe570d045b0d869ff8d9ed32ed19459d310e152ba43d45d889
3
+ size 14244
scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c5196e8b6283bd73106905dbe14b86ceb960ef41a766cd494be7b8cb7159902e
3
+ size 1064
trainer_state.json ADDED
@@ -0,0 +1,439 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": 0.98828125,
3
+ "best_model_checkpoint": "./model/checkpoint-140",
4
+ "epoch": 20.0,
5
+ "eval_steps": 500,
6
+ "global_step": 400,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.5,
13
+ "learning_rate": 5e-06,
14
+ "loss": 0.0465,
15
+ "step": 10
16
+ },
17
+ {
18
+ "epoch": 1.0,
19
+ "learning_rate": 1e-05,
20
+ "loss": 0.041,
21
+ "step": 20
22
+ },
23
+ {
24
+ "epoch": 1.0,
25
+ "eval_accuracy": 0.984375,
26
+ "eval_loss": 0.11365039646625519,
27
+ "eval_runtime": 4.8919,
28
+ "eval_samples_per_second": 52.332,
29
+ "eval_steps_per_second": 6.541,
30
+ "step": 20
31
+ },
32
+ {
33
+ "epoch": 1.5,
34
+ "learning_rate": 1.5000000000000002e-05,
35
+ "loss": 0.0333,
36
+ "step": 30
37
+ },
38
+ {
39
+ "epoch": 2.0,
40
+ "learning_rate": 2e-05,
41
+ "loss": 0.037,
42
+ "step": 40
43
+ },
44
+ {
45
+ "epoch": 2.0,
46
+ "eval_accuracy": 0.98046875,
47
+ "eval_loss": 0.12528091669082642,
48
+ "eval_runtime": 3.9769,
49
+ "eval_samples_per_second": 64.372,
50
+ "eval_steps_per_second": 8.046,
51
+ "step": 40
52
+ },
53
+ {
54
+ "epoch": 2.5,
55
+ "learning_rate": 1.9444444444444445e-05,
56
+ "loss": 0.0342,
57
+ "step": 50
58
+ },
59
+ {
60
+ "epoch": 3.0,
61
+ "learning_rate": 1.888888888888889e-05,
62
+ "loss": 0.0367,
63
+ "step": 60
64
+ },
65
+ {
66
+ "epoch": 3.0,
67
+ "eval_accuracy": 0.98046875,
68
+ "eval_loss": 0.12140656262636185,
69
+ "eval_runtime": 4.3164,
70
+ "eval_samples_per_second": 59.309,
71
+ "eval_steps_per_second": 7.414,
72
+ "step": 60
73
+ },
74
+ {
75
+ "epoch": 3.5,
76
+ "learning_rate": 1.8333333333333333e-05,
77
+ "loss": 0.0301,
78
+ "step": 70
79
+ },
80
+ {
81
+ "epoch": 4.0,
82
+ "learning_rate": 1.7777777777777777e-05,
83
+ "loss": 0.0298,
84
+ "step": 80
85
+ },
86
+ {
87
+ "epoch": 4.0,
88
+ "eval_accuracy": 0.98046875,
89
+ "eval_loss": 0.08230899274349213,
90
+ "eval_runtime": 4.114,
91
+ "eval_samples_per_second": 62.227,
92
+ "eval_steps_per_second": 7.778,
93
+ "step": 80
94
+ },
95
+ {
96
+ "epoch": 4.5,
97
+ "learning_rate": 1.7222222222222224e-05,
98
+ "loss": 0.0186,
99
+ "step": 90
100
+ },
101
+ {
102
+ "epoch": 5.0,
103
+ "learning_rate": 1.6666666666666667e-05,
104
+ "loss": 0.0165,
105
+ "step": 100
106
+ },
107
+ {
108
+ "epoch": 5.0,
109
+ "eval_accuracy": 0.98046875,
110
+ "eval_loss": 0.08846013247966766,
111
+ "eval_runtime": 4.7826,
112
+ "eval_samples_per_second": 53.527,
113
+ "eval_steps_per_second": 6.691,
114
+ "step": 100
115
+ },
116
+ {
117
+ "epoch": 5.5,
118
+ "learning_rate": 1.6111111111111115e-05,
119
+ "loss": 0.0212,
120
+ "step": 110
121
+ },
122
+ {
123
+ "epoch": 6.0,
124
+ "learning_rate": 1.555555555555556e-05,
125
+ "loss": 0.0142,
126
+ "step": 120
127
+ },
128
+ {
129
+ "epoch": 6.0,
130
+ "eval_accuracy": 0.9765625,
131
+ "eval_loss": 0.07800224423408508,
132
+ "eval_runtime": 3.9449,
133
+ "eval_samples_per_second": 64.894,
134
+ "eval_steps_per_second": 8.112,
135
+ "step": 120
136
+ },
137
+ {
138
+ "epoch": 6.5,
139
+ "learning_rate": 1.5000000000000002e-05,
140
+ "loss": 0.0122,
141
+ "step": 130
142
+ },
143
+ {
144
+ "epoch": 7.0,
145
+ "learning_rate": 1.4444444444444446e-05,
146
+ "loss": 0.0145,
147
+ "step": 140
148
+ },
149
+ {
150
+ "epoch": 7.0,
151
+ "eval_accuracy": 0.98828125,
152
+ "eval_loss": 0.06642253696918488,
153
+ "eval_runtime": 4.2638,
154
+ "eval_samples_per_second": 60.04,
155
+ "eval_steps_per_second": 7.505,
156
+ "step": 140
157
+ },
158
+ {
159
+ "epoch": 7.5,
160
+ "learning_rate": 1.388888888888889e-05,
161
+ "loss": 0.0165,
162
+ "step": 150
163
+ },
164
+ {
165
+ "epoch": 8.0,
166
+ "learning_rate": 1.3333333333333333e-05,
167
+ "loss": 0.0105,
168
+ "step": 160
169
+ },
170
+ {
171
+ "epoch": 8.0,
172
+ "eval_accuracy": 0.984375,
173
+ "eval_loss": 0.05820595473051071,
174
+ "eval_runtime": 4.7261,
175
+ "eval_samples_per_second": 54.167,
176
+ "eval_steps_per_second": 6.771,
177
+ "step": 160
178
+ },
179
+ {
180
+ "epoch": 8.5,
181
+ "learning_rate": 1.2777777777777777e-05,
182
+ "loss": 0.009,
183
+ "step": 170
184
+ },
185
+ {
186
+ "epoch": 9.0,
187
+ "learning_rate": 1.2222222222222224e-05,
188
+ "loss": 0.0136,
189
+ "step": 180
190
+ },
191
+ {
192
+ "epoch": 9.0,
193
+ "eval_accuracy": 0.984375,
194
+ "eval_loss": 0.06151523441076279,
195
+ "eval_runtime": 4.7568,
196
+ "eval_samples_per_second": 53.818,
197
+ "eval_steps_per_second": 6.727,
198
+ "step": 180
199
+ },
200
+ {
201
+ "epoch": 9.5,
202
+ "learning_rate": 1.1666666666666668e-05,
203
+ "loss": 0.0095,
204
+ "step": 190
205
+ },
206
+ {
207
+ "epoch": 10.0,
208
+ "learning_rate": 1.1111111111111113e-05,
209
+ "loss": 0.0088,
210
+ "step": 200
211
+ },
212
+ {
213
+ "epoch": 10.0,
214
+ "eval_accuracy": 0.984375,
215
+ "eval_loss": 0.051664501428604126,
216
+ "eval_runtime": 4.0071,
217
+ "eval_samples_per_second": 63.886,
218
+ "eval_steps_per_second": 7.986,
219
+ "step": 200
220
+ },
221
+ {
222
+ "epoch": 10.5,
223
+ "learning_rate": 1.0555555555555557e-05,
224
+ "loss": 0.0087,
225
+ "step": 210
226
+ },
227
+ {
228
+ "epoch": 11.0,
229
+ "learning_rate": 1e-05,
230
+ "loss": 0.0048,
231
+ "step": 220
232
+ },
233
+ {
234
+ "epoch": 11.0,
235
+ "eval_accuracy": 0.98828125,
236
+ "eval_loss": 0.04740528017282486,
237
+ "eval_runtime": 4.776,
238
+ "eval_samples_per_second": 53.601,
239
+ "eval_steps_per_second": 6.7,
240
+ "step": 220
241
+ },
242
+ {
243
+ "epoch": 11.5,
244
+ "learning_rate": 9.444444444444445e-06,
245
+ "loss": 0.0056,
246
+ "step": 230
247
+ },
248
+ {
249
+ "epoch": 12.0,
250
+ "learning_rate": 8.888888888888888e-06,
251
+ "loss": 0.004,
252
+ "step": 240
253
+ },
254
+ {
255
+ "epoch": 12.0,
256
+ "eval_accuracy": 0.984375,
257
+ "eval_loss": 0.04649289697408676,
258
+ "eval_runtime": 3.9995,
259
+ "eval_samples_per_second": 64.008,
260
+ "eval_steps_per_second": 8.001,
261
+ "step": 240
262
+ },
263
+ {
264
+ "epoch": 12.5,
265
+ "learning_rate": 8.333333333333334e-06,
266
+ "loss": 0.007,
267
+ "step": 250
268
+ },
269
+ {
270
+ "epoch": 13.0,
271
+ "learning_rate": 7.77777777777778e-06,
272
+ "loss": 0.004,
273
+ "step": 260
274
+ },
275
+ {
276
+ "epoch": 13.0,
277
+ "eval_accuracy": 0.984375,
278
+ "eval_loss": 0.05019224062561989,
279
+ "eval_runtime": 4.2397,
280
+ "eval_samples_per_second": 60.382,
281
+ "eval_steps_per_second": 7.548,
282
+ "step": 260
283
+ },
284
+ {
285
+ "epoch": 13.5,
286
+ "learning_rate": 7.222222222222223e-06,
287
+ "loss": 0.0039,
288
+ "step": 270
289
+ },
290
+ {
291
+ "epoch": 14.0,
292
+ "learning_rate": 6.666666666666667e-06,
293
+ "loss": 0.0041,
294
+ "step": 280
295
+ },
296
+ {
297
+ "epoch": 14.0,
298
+ "eval_accuracy": 0.98828125,
299
+ "eval_loss": 0.03817906230688095,
300
+ "eval_runtime": 4.303,
301
+ "eval_samples_per_second": 59.493,
302
+ "eval_steps_per_second": 7.437,
303
+ "step": 280
304
+ },
305
+ {
306
+ "epoch": 14.5,
307
+ "learning_rate": 6.111111111111112e-06,
308
+ "loss": 0.0041,
309
+ "step": 290
310
+ },
311
+ {
312
+ "epoch": 15.0,
313
+ "learning_rate": 5.555555555555557e-06,
314
+ "loss": 0.0031,
315
+ "step": 300
316
+ },
317
+ {
318
+ "epoch": 15.0,
319
+ "eval_accuracy": 0.984375,
320
+ "eval_loss": 0.04823793098330498,
321
+ "eval_runtime": 4.2376,
322
+ "eval_samples_per_second": 60.411,
323
+ "eval_steps_per_second": 7.551,
324
+ "step": 300
325
+ },
326
+ {
327
+ "epoch": 15.5,
328
+ "learning_rate": 5e-06,
329
+ "loss": 0.0061,
330
+ "step": 310
331
+ },
332
+ {
333
+ "epoch": 16.0,
334
+ "learning_rate": 4.444444444444444e-06,
335
+ "loss": 0.0072,
336
+ "step": 320
337
+ },
338
+ {
339
+ "epoch": 16.0,
340
+ "eval_accuracy": 0.98828125,
341
+ "eval_loss": 0.04395627975463867,
342
+ "eval_runtime": 4.863,
343
+ "eval_samples_per_second": 52.642,
344
+ "eval_steps_per_second": 6.58,
345
+ "step": 320
346
+ },
347
+ {
348
+ "epoch": 16.5,
349
+ "learning_rate": 3.88888888888889e-06,
350
+ "loss": 0.0032,
351
+ "step": 330
352
+ },
353
+ {
354
+ "epoch": 17.0,
355
+ "learning_rate": 3.3333333333333333e-06,
356
+ "loss": 0.004,
357
+ "step": 340
358
+ },
359
+ {
360
+ "epoch": 17.0,
361
+ "eval_accuracy": 0.984375,
362
+ "eval_loss": 0.04199722409248352,
363
+ "eval_runtime": 4.0297,
364
+ "eval_samples_per_second": 63.528,
365
+ "eval_steps_per_second": 7.941,
366
+ "step": 340
367
+ },
368
+ {
369
+ "epoch": 17.5,
370
+ "learning_rate": 2.7777777777777783e-06,
371
+ "loss": 0.0033,
372
+ "step": 350
373
+ },
374
+ {
375
+ "epoch": 18.0,
376
+ "learning_rate": 2.222222222222222e-06,
377
+ "loss": 0.0026,
378
+ "step": 360
379
+ },
380
+ {
381
+ "epoch": 18.0,
382
+ "eval_accuracy": 0.98828125,
383
+ "eval_loss": 0.03974057734012604,
384
+ "eval_runtime": 4.1185,
385
+ "eval_samples_per_second": 62.158,
386
+ "eval_steps_per_second": 7.77,
387
+ "step": 360
388
+ },
389
+ {
390
+ "epoch": 18.5,
391
+ "learning_rate": 1.6666666666666667e-06,
392
+ "loss": 0.0043,
393
+ "step": 370
394
+ },
395
+ {
396
+ "epoch": 19.0,
397
+ "learning_rate": 1.111111111111111e-06,
398
+ "loss": 0.0041,
399
+ "step": 380
400
+ },
401
+ {
402
+ "epoch": 19.0,
403
+ "eval_accuracy": 0.98828125,
404
+ "eval_loss": 0.04067450016736984,
405
+ "eval_runtime": 4.7682,
406
+ "eval_samples_per_second": 53.688,
407
+ "eval_steps_per_second": 6.711,
408
+ "step": 380
409
+ },
410
+ {
411
+ "epoch": 19.5,
412
+ "learning_rate": 5.555555555555555e-07,
413
+ "loss": 0.0025,
414
+ "step": 390
415
+ },
416
+ {
417
+ "epoch": 20.0,
418
+ "learning_rate": 0.0,
419
+ "loss": 0.004,
420
+ "step": 400
421
+ },
422
+ {
423
+ "epoch": 20.0,
424
+ "eval_accuracy": 0.98828125,
425
+ "eval_loss": 0.040177565068006516,
426
+ "eval_runtime": 3.9732,
427
+ "eval_samples_per_second": 64.432,
428
+ "eval_steps_per_second": 8.054,
429
+ "step": 400
430
+ }
431
+ ],
432
+ "logging_steps": 10,
433
+ "max_steps": 400,
434
+ "num_train_epochs": 20,
435
+ "save_steps": 500,
436
+ "total_flos": 2.1859568857379635e+18,
437
+ "trial_name": null,
438
+ "trial_params": null
439
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aae1c9dc7358ab0ae97794c0422bfc7121545390864859b67b28acbaa39a1771
3
+ size 4536