|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.8313847752663029, |
|
"eval_steps": 500, |
|
"global_step": 4000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 24.954891204833984, |
|
"learning_rate": 5e-06, |
|
"loss": 9.5118, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 14.123294830322266, |
|
"learning_rate": 1e-05, |
|
"loss": 9.3162, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 11.853605270385742, |
|
"learning_rate": 1.5e-05, |
|
"loss": 8.9519, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 8.794404983520508, |
|
"learning_rate": 2e-05, |
|
"loss": 8.511, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 4.945346355438232, |
|
"learning_rate": 2.5e-05, |
|
"loss": 8.1291, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 3.806889057159424, |
|
"learning_rate": 3e-05, |
|
"loss": 7.8939, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.950843572616577, |
|
"learning_rate": 3.5e-05, |
|
"loss": 7.808, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.9586803913116455, |
|
"learning_rate": 4e-05, |
|
"loss": 7.7356, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.5460443496704102, |
|
"learning_rate": 4.5e-05, |
|
"loss": 7.6671, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.6627991199493408, |
|
"learning_rate": 5e-05, |
|
"loss": 7.5727, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.6151450872421265, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 7.4721, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.017178773880005, |
|
"learning_rate": 6e-05, |
|
"loss": 7.306, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 3.126413345336914, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 7.1504, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.792532444000244, |
|
"learning_rate": 7e-05, |
|
"loss": 6.9702, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.8019553422927856, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 6.7947, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.513380527496338, |
|
"learning_rate": 8e-05, |
|
"loss": 6.678, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.021646022796631, |
|
"learning_rate": 8.5e-05, |
|
"loss": 6.5002, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 5.583535194396973, |
|
"learning_rate": 9e-05, |
|
"loss": 6.4036, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.9732987880706787, |
|
"learning_rate": 9.5e-05, |
|
"loss": 6.3155, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.5459728240966797, |
|
"learning_rate": 0.0001, |
|
"loss": 6.1436, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.8032758235931396, |
|
"learning_rate": 9.999972205865686e-05, |
|
"loss": 6.099, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.432061195373535, |
|
"learning_rate": 9.999888823771751e-05, |
|
"loss": 5.9576, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.0502891540527344, |
|
"learning_rate": 9.999749854645204e-05, |
|
"loss": 5.8158, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.0243988037109375, |
|
"learning_rate": 9.99955530003106e-05, |
|
"loss": 5.7212, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.176409959793091, |
|
"learning_rate": 9.99930516209231e-05, |
|
"loss": 5.6199, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.457683563232422, |
|
"learning_rate": 9.998999443609897e-05, |
|
"loss": 5.5896, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.6271471977233887, |
|
"learning_rate": 9.998638147982696e-05, |
|
"loss": 5.3765, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.7946786880493164, |
|
"learning_rate": 9.998221279227467e-05, |
|
"loss": 5.3568, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.4872028827667236, |
|
"learning_rate": 9.997748841978812e-05, |
|
"loss": 5.3366, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.0820538997650146, |
|
"learning_rate": 9.997220841489122e-05, |
|
"loss": 5.2398, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.47347354888916, |
|
"learning_rate": 9.996637283628528e-05, |
|
"loss": 5.1045, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.871932029724121, |
|
"learning_rate": 9.995998174884821e-05, |
|
"loss": 5.09, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.723886251449585, |
|
"learning_rate": 9.995303522363394e-05, |
|
"loss": 5.0264, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.1860122680664062, |
|
"learning_rate": 9.99455333378715e-05, |
|
"loss": 4.9358, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.0296273231506348, |
|
"learning_rate": 9.993747617496428e-05, |
|
"loss": 4.9147, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.251325845718384, |
|
"learning_rate": 9.9928863824489e-05, |
|
"loss": 4.8911, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.8915772438049316, |
|
"learning_rate": 9.99196963821948e-05, |
|
"loss": 4.8019, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.635601758956909, |
|
"learning_rate": 9.990997395000217e-05, |
|
"loss": 4.7388, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.413235664367676, |
|
"learning_rate": 9.989969663600169e-05, |
|
"loss": 4.6891, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.7400410175323486, |
|
"learning_rate": 9.9888864554453e-05, |
|
"loss": 4.6709, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.8295705318450928, |
|
"learning_rate": 9.987747782578342e-05, |
|
"loss": 4.5952, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.3502957820892334, |
|
"learning_rate": 9.986553657658668e-05, |
|
"loss": 4.5546, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.5216689109802246, |
|
"learning_rate": 9.985304093962145e-05, |
|
"loss": 4.5998, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.301525115966797, |
|
"learning_rate": 9.983999105380988e-05, |
|
"loss": 4.5215, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.87137508392334, |
|
"learning_rate": 9.982638706423608e-05, |
|
"loss": 4.5889, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.8169140815734863, |
|
"learning_rate": 9.98122291221445e-05, |
|
"loss": 4.4965, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.8885302543640137, |
|
"learning_rate": 9.979751738493826e-05, |
|
"loss": 4.4749, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.511671304702759, |
|
"learning_rate": 9.978225201617732e-05, |
|
"loss": 4.4188, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.7718353271484375, |
|
"learning_rate": 9.976643318557678e-05, |
|
"loss": 4.4041, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.0946099758148193, |
|
"learning_rate": 9.975006106900495e-05, |
|
"loss": 4.4226, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.0956268310546875, |
|
"learning_rate": 9.973313584848132e-05, |
|
"loss": 4.3768, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.8771021366119385, |
|
"learning_rate": 9.971565771217464e-05, |
|
"loss": 4.3758, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.6710236072540283, |
|
"learning_rate": 9.969762685440076e-05, |
|
"loss": 4.2965, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.7884382009506226, |
|
"learning_rate": 9.967904347562054e-05, |
|
"loss": 4.2645, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.9744821786880493, |
|
"learning_rate": 9.965990778243755e-05, |
|
"loss": 4.3013, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.9575978517532349, |
|
"learning_rate": 9.964021998759577e-05, |
|
"loss": 4.2548, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.7722605466842651, |
|
"learning_rate": 9.961998030997733e-05, |
|
"loss": 4.1621, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.2758901119232178, |
|
"learning_rate": 9.95991889745999e-05, |
|
"loss": 4.2537, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.0251128673553467, |
|
"learning_rate": 9.957784621261441e-05, |
|
"loss": 4.1987, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.0095252990722656, |
|
"learning_rate": 9.955595226130226e-05, |
|
"loss": 4.2274, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.6288301944732666, |
|
"learning_rate": 9.953350736407282e-05, |
|
"loss": 4.1804, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.0467007160186768, |
|
"learning_rate": 9.951051177046069e-05, |
|
"loss": 4.1055, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.819460391998291, |
|
"learning_rate": 9.948696573612292e-05, |
|
"loss": 4.1044, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.1974942684173584, |
|
"learning_rate": 9.946286952283618e-05, |
|
"loss": 4.1271, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.006293773651123, |
|
"learning_rate": 9.943822339849381e-05, |
|
"loss": 4.0771, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.8218380212783813, |
|
"learning_rate": 9.941302763710288e-05, |
|
"loss": 3.9985, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.2746903896331787, |
|
"learning_rate": 9.938728251878116e-05, |
|
"loss": 4.1088, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.773939609527588, |
|
"learning_rate": 9.936098832975393e-05, |
|
"loss": 4.0133, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.7421215772628784, |
|
"learning_rate": 9.933414536235091e-05, |
|
"loss": 3.9569, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.555828332901001, |
|
"learning_rate": 9.93067539150029e-05, |
|
"loss": 3.9792, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.7069153785705566, |
|
"learning_rate": 9.927881429223853e-05, |
|
"loss": 3.9499, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.7734919786453247, |
|
"learning_rate": 9.925032680468085e-05, |
|
"loss": 3.9715, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.539474606513977, |
|
"learning_rate": 9.922129176904388e-05, |
|
"loss": 3.9238, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.5540482997894287, |
|
"learning_rate": 9.919170950812911e-05, |
|
"loss": 3.8796, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.549526333808899, |
|
"learning_rate": 9.916158035082184e-05, |
|
"loss": 3.8584, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.7199825048446655, |
|
"learning_rate": 9.913090463208763e-05, |
|
"loss": 3.8513, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.535899043083191, |
|
"learning_rate": 9.90996826929685e-05, |
|
"loss": 3.873, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.3458640575408936, |
|
"learning_rate": 9.906791488057916e-05, |
|
"loss": 3.8087, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.6906614303588867, |
|
"learning_rate": 9.903560154810313e-05, |
|
"loss": 3.8095, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.5479685068130493, |
|
"learning_rate": 9.900274305478887e-05, |
|
"loss": 3.8874, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.3803184032440186, |
|
"learning_rate": 9.896933976594572e-05, |
|
"loss": 3.8076, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.563531517982483, |
|
"learning_rate": 9.893539205293989e-05, |
|
"loss": 3.8089, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.5819116830825806, |
|
"learning_rate": 9.890090029319028e-05, |
|
"loss": 3.7708, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.441551923751831, |
|
"learning_rate": 9.886586487016433e-05, |
|
"loss": 3.641, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.2000863552093506, |
|
"learning_rate": 9.883028617337378e-05, |
|
"loss": 3.8062, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.5903886556625366, |
|
"learning_rate": 9.879416459837022e-05, |
|
"loss": 3.6873, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.3820626735687256, |
|
"learning_rate": 9.875750054674082e-05, |
|
"loss": 3.6398, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.3108272552490234, |
|
"learning_rate": 9.872029442610382e-05, |
|
"loss": 3.6804, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.4747930765151978, |
|
"learning_rate": 9.8682546650104e-05, |
|
"loss": 3.693, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.3883565664291382, |
|
"learning_rate": 9.864425763840802e-05, |
|
"loss": 3.6498, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.3403358459472656, |
|
"learning_rate": 9.860542781669988e-05, |
|
"loss": 3.6682, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.2842109203338623, |
|
"learning_rate": 9.85660576166761e-05, |
|
"loss": 3.649, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.319282054901123, |
|
"learning_rate": 9.852614747604093e-05, |
|
"loss": 3.6335, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.3801344633102417, |
|
"learning_rate": 9.848569783850145e-05, |
|
"loss": 3.6318, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.342846393585205, |
|
"learning_rate": 9.844470915376278e-05, |
|
"loss": 3.604, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.452356219291687, |
|
"learning_rate": 9.840318187752292e-05, |
|
"loss": 3.599, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.323322057723999, |
|
"learning_rate": 9.836111647146771e-05, |
|
"loss": 3.5778, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.1092493534088135, |
|
"learning_rate": 9.831851340326577e-05, |
|
"loss": 3.6146, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.1545424461364746, |
|
"learning_rate": 9.82753731465633e-05, |
|
"loss": 3.6122, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.1281070709228516, |
|
"learning_rate": 9.823169618097871e-05, |
|
"loss": 3.6009, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.1365258693695068, |
|
"learning_rate": 9.81874829920974e-05, |
|
"loss": 3.5677, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.1423096656799316, |
|
"learning_rate": 9.814273407146623e-05, |
|
"loss": 3.5811, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.1574337482452393, |
|
"learning_rate": 9.809744991658829e-05, |
|
"loss": 3.5232, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.2431848049163818, |
|
"learning_rate": 9.805163103091708e-05, |
|
"loss": 3.5447, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.1095994710922241, |
|
"learning_rate": 9.800527792385112e-05, |
|
"loss": 3.4747, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.259093165397644, |
|
"learning_rate": 9.79583911107282e-05, |
|
"loss": 3.4696, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.3247113227844238, |
|
"learning_rate": 9.791097111281968e-05, |
|
"loss": 3.4693, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.1355781555175781, |
|
"learning_rate": 9.786301845732467e-05, |
|
"loss": 3.4687, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.9854279160499573, |
|
"learning_rate": 9.781453367736418e-05, |
|
"loss": 3.4761, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.998266339302063, |
|
"learning_rate": 9.776551731197524e-05, |
|
"loss": 3.4733, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.0570634603500366, |
|
"learning_rate": 9.771596990610478e-05, |
|
"loss": 3.483, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.182489037513733, |
|
"learning_rate": 9.766589201060372e-05, |
|
"loss": 3.4698, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.718491554260254, |
|
"learning_rate": 9.761528418222077e-05, |
|
"loss": 3.4412, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.1469898223876953, |
|
"learning_rate": 9.756414698359624e-05, |
|
"loss": 3.3577, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.014533281326294, |
|
"learning_rate": 9.75124809832558e-05, |
|
"loss": 3.3677, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.0099555253982544, |
|
"learning_rate": 9.746028675560413e-05, |
|
"loss": 3.4261, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.081205129623413, |
|
"learning_rate": 9.740756488091861e-05, |
|
"loss": 3.3498, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.2028361558914185, |
|
"learning_rate": 9.735431594534277e-05, |
|
"loss": 3.4458, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.1756874322891235, |
|
"learning_rate": 9.730054054087983e-05, |
|
"loss": 3.3875, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.140699028968811, |
|
"learning_rate": 9.724623926538612e-05, |
|
"loss": 3.3088, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.9892724752426147, |
|
"learning_rate": 9.719141272256443e-05, |
|
"loss": 3.3534, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.045665979385376, |
|
"learning_rate": 9.713606152195726e-05, |
|
"loss": 3.4133, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.0393067598342896, |
|
"learning_rate": 9.708018627894011e-05, |
|
"loss": 3.4175, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.099212169647217, |
|
"learning_rate": 9.702378761471456e-05, |
|
"loss": 3.4136, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.9463175535202026, |
|
"learning_rate": 9.696686615630146e-05, |
|
"loss": 3.3175, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.9630415439605713, |
|
"learning_rate": 9.690942253653385e-05, |
|
"loss": 3.343, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.9103713035583496, |
|
"learning_rate": 9.685145739405002e-05, |
|
"loss": 3.3291, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.01008939743042, |
|
"learning_rate": 9.679297137328634e-05, |
|
"loss": 3.3743, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.0484918355941772, |
|
"learning_rate": 9.673396512447013e-05, |
|
"loss": 3.2935, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.9054334759712219, |
|
"learning_rate": 9.667443930361247e-05, |
|
"loss": 3.2916, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.2290186882019043, |
|
"learning_rate": 9.661439457250076e-05, |
|
"loss": 3.3105, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.8617240786552429, |
|
"learning_rate": 9.655383159869158e-05, |
|
"loss": 3.2651, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.8485407829284668, |
|
"learning_rate": 9.649275105550309e-05, |
|
"loss": 3.1712, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.8689857125282288, |
|
"learning_rate": 9.643115362200762e-05, |
|
"loss": 3.2274, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.8368767499923706, |
|
"learning_rate": 9.636903998302409e-05, |
|
"loss": 3.264, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.9838525652885437, |
|
"learning_rate": 9.630641082911045e-05, |
|
"loss": 3.2652, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.9460552334785461, |
|
"learning_rate": 9.624326685655593e-05, |
|
"loss": 3.3197, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.9261606931686401, |
|
"learning_rate": 9.617960876737337e-05, |
|
"loss": 3.3094, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.8362047672271729, |
|
"learning_rate": 9.611543726929134e-05, |
|
"loss": 3.2742, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.8609175086021423, |
|
"learning_rate": 9.605075307574635e-05, |
|
"loss": 3.2256, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.8735069036483765, |
|
"learning_rate": 9.598555690587487e-05, |
|
"loss": 3.3234, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.8475310206413269, |
|
"learning_rate": 9.591984948450532e-05, |
|
"loss": 3.2328, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.831333577632904, |
|
"learning_rate": 9.585363154215008e-05, |
|
"loss": 3.1681, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.7871400713920593, |
|
"learning_rate": 9.578690381499728e-05, |
|
"loss": 3.2017, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.8291134238243103, |
|
"learning_rate": 9.571966704490271e-05, |
|
"loss": 3.2111, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.8258323669433594, |
|
"learning_rate": 9.565192197938148e-05, |
|
"loss": 3.213, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.8596241474151611, |
|
"learning_rate": 9.558366937159977e-05, |
|
"loss": 3.1932, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.8011757135391235, |
|
"learning_rate": 9.551490998036646e-05, |
|
"loss": 3.2353, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.8874036073684692, |
|
"learning_rate": 9.544564457012463e-05, |
|
"loss": 3.1661, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.9632325768470764, |
|
"learning_rate": 9.537587391094314e-05, |
|
"loss": 3.1521, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.7759344577789307, |
|
"learning_rate": 9.5305598778508e-05, |
|
"loss": 3.209, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.8687903881072998, |
|
"learning_rate": 9.52348199541138e-05, |
|
"loss": 3.1717, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.7881929278373718, |
|
"learning_rate": 9.516353822465504e-05, |
|
"loss": 3.203, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.8782910704612732, |
|
"learning_rate": 9.509175438261726e-05, |
|
"loss": 3.1721, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.8087301850318909, |
|
"learning_rate": 9.501946922606838e-05, |
|
"loss": 3.1684, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.737407922744751, |
|
"learning_rate": 9.494668355864973e-05, |
|
"loss": 3.0995, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.8467393517494202, |
|
"learning_rate": 9.487339818956716e-05, |
|
"loss": 3.1303, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.7596563100814819, |
|
"learning_rate": 9.479961393358203e-05, |
|
"loss": 3.1472, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.8828564882278442, |
|
"learning_rate": 9.472533161100215e-05, |
|
"loss": 3.166, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.787274956703186, |
|
"learning_rate": 9.465055204767265e-05, |
|
"loss": 3.2096, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.7758961915969849, |
|
"learning_rate": 9.457527607496685e-05, |
|
"loss": 3.1402, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.8374167680740356, |
|
"learning_rate": 9.44995045297769e-05, |
|
"loss": 3.13, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.9025399088859558, |
|
"learning_rate": 9.442323825450464e-05, |
|
"loss": 3.0811, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.7858222723007202, |
|
"learning_rate": 9.43464780970521e-05, |
|
"loss": 3.1224, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.8241803646087646, |
|
"learning_rate": 9.426922491081212e-05, |
|
"loss": 3.1728, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.8176282048225403, |
|
"learning_rate": 9.419147955465888e-05, |
|
"loss": 3.1275, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.7991018295288086, |
|
"learning_rate": 9.411324289293832e-05, |
|
"loss": 3.0839, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.8595659732818604, |
|
"learning_rate": 9.403451579545859e-05, |
|
"loss": 3.1161, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.8479962944984436, |
|
"learning_rate": 9.395529913748025e-05, |
|
"loss": 3.0946, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.775111198425293, |
|
"learning_rate": 9.387559379970672e-05, |
|
"loss": 3.0524, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.7162839770317078, |
|
"learning_rate": 9.379540066827431e-05, |
|
"loss": 3.0587, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.787935197353363, |
|
"learning_rate": 9.371472063474248e-05, |
|
"loss": 3.0199, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.7565710544586182, |
|
"learning_rate": 9.363355459608394e-05, |
|
"loss": 3.0524, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.7326767444610596, |
|
"learning_rate": 9.355190345467457e-05, |
|
"loss": 3.1168, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.773398220539093, |
|
"learning_rate": 9.346976811828352e-05, |
|
"loss": 3.0634, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.733391523361206, |
|
"learning_rate": 9.338714950006297e-05, |
|
"loss": 3.082, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.767120897769928, |
|
"learning_rate": 9.330404851853817e-05, |
|
"loss": 3.0013, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.738296627998352, |
|
"learning_rate": 9.3220466097597e-05, |
|
"loss": 3.1004, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.7394375205039978, |
|
"learning_rate": 9.313640316647991e-05, |
|
"loss": 3.0697, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.7176501154899597, |
|
"learning_rate": 9.305186065976945e-05, |
|
"loss": 3.1356, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.7473928928375244, |
|
"learning_rate": 9.296683951737993e-05, |
|
"loss": 3.0234, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.8314266800880432, |
|
"learning_rate": 9.288134068454697e-05, |
|
"loss": 3.0555, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.6912246942520142, |
|
"learning_rate": 9.2795365111817e-05, |
|
"loss": 3.0148, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.7679839730262756, |
|
"learning_rate": 9.270891375503665e-05, |
|
"loss": 3.0237, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.7263458371162415, |
|
"learning_rate": 9.262198757534218e-05, |
|
"loss": 3.1093, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.7467177510261536, |
|
"learning_rate": 9.253458753914874e-05, |
|
"loss": 3.0606, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.7480751872062683, |
|
"learning_rate": 9.244671461813969e-05, |
|
"loss": 3.1225, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.7728322148323059, |
|
"learning_rate": 9.235836978925572e-05, |
|
"loss": 3.0865, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.7638208866119385, |
|
"learning_rate": 9.226955403468406e-05, |
|
"loss": 3.0544, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.7154102921485901, |
|
"learning_rate": 9.21802683418475e-05, |
|
"loss": 2.9843, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.6908529996871948, |
|
"learning_rate": 9.209051370339347e-05, |
|
"loss": 2.9612, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.7261789441108704, |
|
"learning_rate": 9.200029111718295e-05, |
|
"loss": 3.0185, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.6965435147285461, |
|
"learning_rate": 9.190960158627941e-05, |
|
"loss": 3.0435, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.7368409037590027, |
|
"learning_rate": 9.181844611893766e-05, |
|
"loss": 2.9824, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.8360817432403564, |
|
"learning_rate": 9.172682572859261e-05, |
|
"loss": 3.0772, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.7241373658180237, |
|
"learning_rate": 9.163474143384806e-05, |
|
"loss": 3.015, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.7527982592582703, |
|
"learning_rate": 9.154219425846528e-05, |
|
"loss": 3.0295, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.7530322074890137, |
|
"learning_rate": 9.144918523135175e-05, |
|
"loss": 3.0634, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.7819015979766846, |
|
"learning_rate": 9.13557153865496e-05, |
|
"loss": 3.057, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.7606942653656006, |
|
"learning_rate": 9.12617857632242e-05, |
|
"loss": 2.9505, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.7142955660820007, |
|
"learning_rate": 9.116739740565259e-05, |
|
"loss": 2.9851, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.6874934434890747, |
|
"learning_rate": 9.107255136321184e-05, |
|
"loss": 2.9952, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.6932684779167175, |
|
"learning_rate": 9.09772486903674e-05, |
|
"loss": 2.9791, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.7075040936470032, |
|
"learning_rate": 9.08814904466614e-05, |
|
"loss": 3.0384, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.7663232684135437, |
|
"learning_rate": 9.078527769670085e-05, |
|
"loss": 2.979, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.7197521924972534, |
|
"learning_rate": 9.068861151014575e-05, |
|
"loss": 2.9968, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.7028963565826416, |
|
"learning_rate": 9.05914929616973e-05, |
|
"loss": 3.0382, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.5042543411254883, |
|
"learning_rate": 9.04939231310859e-05, |
|
"loss": 3.0451, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.7034931182861328, |
|
"learning_rate": 9.039590310305914e-05, |
|
"loss": 3.0157, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.6842995882034302, |
|
"learning_rate": 9.029743396736974e-05, |
|
"loss": 2.9701, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.6938946843147278, |
|
"learning_rate": 9.019851681876348e-05, |
|
"loss": 3.0551, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.7267816662788391, |
|
"learning_rate": 9.009915275696693e-05, |
|
"loss": 2.9805, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.1045072078704834, |
|
"learning_rate": 8.999934288667534e-05, |
|
"loss": 3.0012, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.7654233574867249, |
|
"learning_rate": 8.989908831754028e-05, |
|
"loss": 2.8833, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.7957326769828796, |
|
"learning_rate": 8.979839016415735e-05, |
|
"loss": 2.8604, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.7361999154090881, |
|
"learning_rate": 8.969724954605373e-05, |
|
"loss": 3.0241, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.6803563833236694, |
|
"learning_rate": 8.959566758767581e-05, |
|
"loss": 3.0452, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.733856737613678, |
|
"learning_rate": 8.949364541837661e-05, |
|
"loss": 3.0064, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.7043154239654541, |
|
"learning_rate": 8.939118417240329e-05, |
|
"loss": 2.9622, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.7002702355384827, |
|
"learning_rate": 8.92882849888845e-05, |
|
"loss": 2.9593, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.6819352507591248, |
|
"learning_rate": 8.918494901181773e-05, |
|
"loss": 2.921, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.6527635455131531, |
|
"learning_rate": 8.908117739005659e-05, |
|
"loss": 2.9374, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.6832794547080994, |
|
"learning_rate": 8.897697127729805e-05, |
|
"loss": 2.9854, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.6709647178649902, |
|
"learning_rate": 8.887233183206957e-05, |
|
"loss": 2.9673, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.6724316477775574, |
|
"learning_rate": 8.876726021771627e-05, |
|
"loss": 2.9038, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.6602643132209778, |
|
"learning_rate": 8.866175760238798e-05, |
|
"loss": 2.8952, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.7111104130744934, |
|
"learning_rate": 8.855582515902625e-05, |
|
"loss": 2.9609, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.7529614567756653, |
|
"learning_rate": 8.844946406535131e-05, |
|
"loss": 2.9714, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.6821660399436951, |
|
"learning_rate": 8.834267550384893e-05, |
|
"loss": 2.925, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.6634496450424194, |
|
"learning_rate": 8.823546066175741e-05, |
|
"loss": 2.8966, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.7059546709060669, |
|
"learning_rate": 8.81278207310542e-05, |
|
"loss": 2.9919, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.7027754187583923, |
|
"learning_rate": 8.801975690844278e-05, |
|
"loss": 2.9371, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.7034463286399841, |
|
"learning_rate": 8.791127039533934e-05, |
|
"loss": 2.9084, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.6744077801704407, |
|
"learning_rate": 8.780236239785935e-05, |
|
"loss": 2.9022, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.6845873594284058, |
|
"learning_rate": 8.76930341268042e-05, |
|
"loss": 2.9669, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.7137369513511658, |
|
"learning_rate": 8.758328679764776e-05, |
|
"loss": 2.9028, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.7238683104515076, |
|
"learning_rate": 8.747312163052284e-05, |
|
"loss": 2.8834, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.6741464734077454, |
|
"learning_rate": 8.736253985020761e-05, |
|
"loss": 2.8644, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.657223641872406, |
|
"learning_rate": 8.725154268611203e-05, |
|
"loss": 2.8719, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.6820774674415588, |
|
"learning_rate": 8.714013137226411e-05, |
|
"loss": 3.0099, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.6592907905578613, |
|
"learning_rate": 8.702830714729628e-05, |
|
"loss": 2.9659, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.6504175066947937, |
|
"learning_rate": 8.691607125443153e-05, |
|
"loss": 2.9834, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.655095636844635, |
|
"learning_rate": 8.680342494146967e-05, |
|
"loss": 2.9123, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.64875328540802, |
|
"learning_rate": 8.66903694607734e-05, |
|
"loss": 2.891, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.6668553352355957, |
|
"learning_rate": 8.65769060692544e-05, |
|
"loss": 3.0055, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.6586368680000305, |
|
"learning_rate": 8.646303602835936e-05, |
|
"loss": 2.9121, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.649715006351471, |
|
"learning_rate": 8.634876060405597e-05, |
|
"loss": 3.0043, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.6875714659690857, |
|
"learning_rate": 8.623408106681884e-05, |
|
"loss": 2.8886, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.6406840682029724, |
|
"learning_rate": 8.611899869161535e-05, |
|
"loss": 2.9263, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.6714261770248413, |
|
"learning_rate": 8.600351475789147e-05, |
|
"loss": 2.9036, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.6701438426971436, |
|
"learning_rate": 8.588763054955764e-05, |
|
"loss": 2.9502, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.6833099722862244, |
|
"learning_rate": 8.57713473549743e-05, |
|
"loss": 2.8397, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.652505099773407, |
|
"learning_rate": 8.565466646693778e-05, |
|
"loss": 2.9765, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.6483748555183411, |
|
"learning_rate": 8.553758918266578e-05, |
|
"loss": 2.8748, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.6926413774490356, |
|
"learning_rate": 8.5420116803783e-05, |
|
"loss": 2.952, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.661097526550293, |
|
"learning_rate": 8.530225063630668e-05, |
|
"loss": 2.9308, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.7419840693473816, |
|
"learning_rate": 8.518399199063205e-05, |
|
"loss": 2.9051, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.707297682762146, |
|
"learning_rate": 8.50653421815178e-05, |
|
"loss": 2.9121, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.66552734375, |
|
"learning_rate": 8.494630252807138e-05, |
|
"loss": 2.88, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 3.651979923248291, |
|
"learning_rate": 8.482687435373449e-05, |
|
"loss": 2.9026, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.6851710081100464, |
|
"learning_rate": 8.470705898626817e-05, |
|
"loss": 2.8257, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.6279444694519043, |
|
"learning_rate": 8.458685775773822e-05, |
|
"loss": 2.8641, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.6904920339584351, |
|
"learning_rate": 8.446627200450025e-05, |
|
"loss": 2.9092, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.6325361728668213, |
|
"learning_rate": 8.434530306718493e-05, |
|
"loss": 2.8976, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.6655163168907166, |
|
"learning_rate": 8.4223952290683e-05, |
|
"loss": 2.8918, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.6457725763320923, |
|
"learning_rate": 8.41022210241304e-05, |
|
"loss": 2.8592, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.6553127765655518, |
|
"learning_rate": 8.398011062089316e-05, |
|
"loss": 2.9329, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.6595431566238403, |
|
"learning_rate": 8.385762243855249e-05, |
|
"loss": 2.8627, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.6419198513031006, |
|
"learning_rate": 8.373475783888958e-05, |
|
"loss": 2.8726, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.6688870787620544, |
|
"learning_rate": 8.36115181878705e-05, |
|
"loss": 2.8129, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.6275585889816284, |
|
"learning_rate": 8.348790485563101e-05, |
|
"loss": 2.8333, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.658042848110199, |
|
"learning_rate": 8.336391921646134e-05, |
|
"loss": 2.7352, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.6524467468261719, |
|
"learning_rate": 8.323956264879089e-05, |
|
"loss": 2.9171, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.6397958993911743, |
|
"learning_rate": 8.311483653517294e-05, |
|
"loss": 2.8176, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.6147693395614624, |
|
"learning_rate": 8.298974226226919e-05, |
|
"loss": 2.8676, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.6508784294128418, |
|
"learning_rate": 8.28642812208345e-05, |
|
"loss": 2.842, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.6087005734443665, |
|
"learning_rate": 8.273845480570123e-05, |
|
"loss": 2.8126, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.7282595634460449, |
|
"learning_rate": 8.26122644157639e-05, |
|
"loss": 2.9029, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.619777500629425, |
|
"learning_rate": 8.248571145396362e-05, |
|
"loss": 2.8496, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.6248059868812561, |
|
"learning_rate": 8.235879732727236e-05, |
|
"loss": 2.8447, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.6536071300506592, |
|
"learning_rate": 8.223152344667745e-05, |
|
"loss": 2.831, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.6239855289459229, |
|
"learning_rate": 8.21038912271658e-05, |
|
"loss": 2.8611, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.6541547775268555, |
|
"learning_rate": 8.197590208770824e-05, |
|
"loss": 2.8558, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.6467083692550659, |
|
"learning_rate": 8.184755745124371e-05, |
|
"loss": 2.8409, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.6722072958946228, |
|
"learning_rate": 8.171885874466342e-05, |
|
"loss": 2.8381, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.6703723669052124, |
|
"learning_rate": 8.158980739879507e-05, |
|
"loss": 2.8365, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.6237878799438477, |
|
"learning_rate": 8.146040484838677e-05, |
|
"loss": 2.8091, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.6343963146209717, |
|
"learning_rate": 8.133065253209132e-05, |
|
"loss": 2.8389, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.6207173466682434, |
|
"learning_rate": 8.120055189245e-05, |
|
"loss": 2.8748, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.6682604551315308, |
|
"learning_rate": 8.10701043758767e-05, |
|
"loss": 2.8611, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.6844489574432373, |
|
"learning_rate": 8.093931143264174e-05, |
|
"loss": 2.8423, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.6315253376960754, |
|
"learning_rate": 8.080817451685576e-05, |
|
"loss": 2.7389, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.6357865929603577, |
|
"learning_rate": 8.067669508645356e-05, |
|
"loss": 2.8418, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.6491623520851135, |
|
"learning_rate": 8.054487460317797e-05, |
|
"loss": 2.8975, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.6408995985984802, |
|
"learning_rate": 8.041271453256345e-05, |
|
"loss": 2.782, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.6269375085830688, |
|
"learning_rate": 8.02802163439199e-05, |
|
"loss": 2.8292, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.7113586068153381, |
|
"learning_rate": 8.01473815103163e-05, |
|
"loss": 2.8488, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.6262251138687134, |
|
"learning_rate": 8.001421150856434e-05, |
|
"loss": 2.8008, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.6527858972549438, |
|
"learning_rate": 7.988070781920197e-05, |
|
"loss": 2.8618, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.6287201642990112, |
|
"learning_rate": 7.9746871926477e-05, |
|
"loss": 2.8064, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.6469153165817261, |
|
"learning_rate": 7.961270531833052e-05, |
|
"loss": 2.8638, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.6609588265419006, |
|
"learning_rate": 7.947820948638045e-05, |
|
"loss": 2.8039, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.7042279243469238, |
|
"learning_rate": 7.934338592590486e-05, |
|
"loss": 2.9047, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.6446994543075562, |
|
"learning_rate": 7.92082361358254e-05, |
|
"loss": 2.8422, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.9173874258995056, |
|
"learning_rate": 7.907276161869065e-05, |
|
"loss": 2.8219, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.6591891646385193, |
|
"learning_rate": 7.893696388065936e-05, |
|
"loss": 2.7361, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.6696944832801819, |
|
"learning_rate": 7.88008444314838e-05, |
|
"loss": 2.8942, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.6576460003852844, |
|
"learning_rate": 7.866440478449283e-05, |
|
"loss": 2.8264, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.6158410906791687, |
|
"learning_rate": 7.852764645657522e-05, |
|
"loss": 2.7558, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.6890105605125427, |
|
"learning_rate": 7.839057096816271e-05, |
|
"loss": 2.8061, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.1392295360565186, |
|
"learning_rate": 7.82531798432131e-05, |
|
"loss": 2.8412, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.6717019081115723, |
|
"learning_rate": 7.811547460919333e-05, |
|
"loss": 2.781, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.6458261609077454, |
|
"learning_rate": 7.797745679706254e-05, |
|
"loss": 2.8394, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.6580215096473694, |
|
"learning_rate": 7.783912794125496e-05, |
|
"loss": 2.8779, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.6356989741325378, |
|
"learning_rate": 7.770048957966291e-05, |
|
"loss": 2.828, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.6654439568519592, |
|
"learning_rate": 7.756154325361967e-05, |
|
"loss": 2.8222, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.6044449806213379, |
|
"learning_rate": 7.74222905078824e-05, |
|
"loss": 2.8407, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.6402572393417358, |
|
"learning_rate": 7.728273289061489e-05, |
|
"loss": 2.7892, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 2.9658334255218506, |
|
"learning_rate": 7.714287195337044e-05, |
|
"loss": 2.8384, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.6388287544250488, |
|
"learning_rate": 7.700270925107448e-05, |
|
"loss": 2.8379, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.6642169952392578, |
|
"learning_rate": 7.686224634200742e-05, |
|
"loss": 2.8276, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.6531822681427002, |
|
"learning_rate": 7.672148478778722e-05, |
|
"loss": 2.8161, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.6064309477806091, |
|
"learning_rate": 7.658042615335212e-05, |
|
"loss": 2.736, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.6005924344062805, |
|
"learning_rate": 7.643907200694318e-05, |
|
"loss": 2.8041, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.596092939376831, |
|
"learning_rate": 7.629742392008684e-05, |
|
"loss": 2.7951, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.6009310483932495, |
|
"learning_rate": 7.615548346757749e-05, |
|
"loss": 2.8565, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.6022703051567078, |
|
"learning_rate": 7.60132522274599e-05, |
|
"loss": 2.8197, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.5911956429481506, |
|
"learning_rate": 7.587073178101178e-05, |
|
"loss": 2.7829, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.6114310026168823, |
|
"learning_rate": 7.572792371272609e-05, |
|
"loss": 2.7378, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.6166604161262512, |
|
"learning_rate": 7.55848296102935e-05, |
|
"loss": 2.8319, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.596598744392395, |
|
"learning_rate": 7.544145106458465e-05, |
|
"loss": 2.7407, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.6231933236122131, |
|
"learning_rate": 7.529778966963259e-05, |
|
"loss": 2.7703, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.5864799618721008, |
|
"learning_rate": 7.515384702261496e-05, |
|
"loss": 2.792, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.629953145980835, |
|
"learning_rate": 7.500962472383627e-05, |
|
"loss": 2.7735, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.6393805742263794, |
|
"learning_rate": 7.486512437671011e-05, |
|
"loss": 2.8102, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.6531060934066772, |
|
"learning_rate": 7.472034758774128e-05, |
|
"loss": 2.7946, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.6248769760131836, |
|
"learning_rate": 7.457529596650797e-05, |
|
"loss": 2.815, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.6464139223098755, |
|
"learning_rate": 7.442997112564392e-05, |
|
"loss": 2.7105, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.6017792224884033, |
|
"learning_rate": 7.428437468082037e-05, |
|
"loss": 2.7475, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.6099601984024048, |
|
"learning_rate": 7.413850825072817e-05, |
|
"loss": 2.7917, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.5857229828834534, |
|
"learning_rate": 7.39923734570598e-05, |
|
"loss": 2.7858, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 2.5941388607025146, |
|
"learning_rate": 7.384597192449126e-05, |
|
"loss": 2.8045, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.6371538639068604, |
|
"learning_rate": 7.369930528066412e-05, |
|
"loss": 2.8803, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.6334919333457947, |
|
"learning_rate": 7.355237515616732e-05, |
|
"loss": 2.8386, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.6036331057548523, |
|
"learning_rate": 7.340518318451914e-05, |
|
"loss": 2.7614, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.5964059233665466, |
|
"learning_rate": 7.325773100214893e-05, |
|
"loss": 2.7165, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.6223915815353394, |
|
"learning_rate": 7.311002024837899e-05, |
|
"loss": 2.7807, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.5919489860534668, |
|
"learning_rate": 7.296205256540633e-05, |
|
"loss": 2.8301, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.6090957522392273, |
|
"learning_rate": 7.281382959828443e-05, |
|
"loss": 2.8146, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.5976056456565857, |
|
"learning_rate": 7.26653529949049e-05, |
|
"loss": 2.7906, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.6359770894050598, |
|
"learning_rate": 7.25166244059792e-05, |
|
"loss": 2.7609, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.6062914133071899, |
|
"learning_rate": 7.236764548502029e-05, |
|
"loss": 2.7974, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.6405194401741028, |
|
"learning_rate": 7.221841788832421e-05, |
|
"loss": 2.8288, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.6273607015609741, |
|
"learning_rate": 7.206894327495173e-05, |
|
"loss": 2.8076, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.5930814743041992, |
|
"learning_rate": 7.191922330670982e-05, |
|
"loss": 2.8182, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.5818787217140198, |
|
"learning_rate": 7.176925964813326e-05, |
|
"loss": 2.709, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.6052417755126953, |
|
"learning_rate": 7.161905396646607e-05, |
|
"loss": 2.8197, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.6246456503868103, |
|
"learning_rate": 7.146860793164299e-05, |
|
"loss": 2.7605, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.6177834868431091, |
|
"learning_rate": 7.131792321627098e-05, |
|
"loss": 2.8023, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.6127104163169861, |
|
"learning_rate": 7.116700149561048e-05, |
|
"loss": 2.8207, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.6975888013839722, |
|
"learning_rate": 7.101584444755696e-05, |
|
"loss": 2.7521, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.6271196603775024, |
|
"learning_rate": 7.086445375262212e-05, |
|
"loss": 2.771, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.6058980822563171, |
|
"learning_rate": 7.071283109391528e-05, |
|
"loss": 2.7284, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.6291877031326294, |
|
"learning_rate": 7.056097815712466e-05, |
|
"loss": 2.8608, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.6075469851493835, |
|
"learning_rate": 7.040889663049862e-05, |
|
"loss": 2.7491, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.6630153656005859, |
|
"learning_rate": 7.025658820482693e-05, |
|
"loss": 2.8098, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.6005200743675232, |
|
"learning_rate": 7.010405457342192e-05, |
|
"loss": 2.79, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.6074357032775879, |
|
"learning_rate": 6.995129743209967e-05, |
|
"loss": 2.7717, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.6157153248786926, |
|
"learning_rate": 6.97983184791612e-05, |
|
"loss": 2.7526, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.5990545749664307, |
|
"learning_rate": 6.964511941537355e-05, |
|
"loss": 2.7891, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.5907087326049805, |
|
"learning_rate": 6.949170194395083e-05, |
|
"loss": 2.728, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.5958890318870544, |
|
"learning_rate": 6.933806777053536e-05, |
|
"loss": 2.7638, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.6144511103630066, |
|
"learning_rate": 6.918421860317872e-05, |
|
"loss": 2.785, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.595562756061554, |
|
"learning_rate": 6.903015615232263e-05, |
|
"loss": 2.7708, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.6055405139923096, |
|
"learning_rate": 6.887588213078012e-05, |
|
"loss": 2.7603, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.6407721638679504, |
|
"learning_rate": 6.87213982537163e-05, |
|
"loss": 2.7656, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.5973005294799805, |
|
"learning_rate": 6.856670623862943e-05, |
|
"loss": 2.74, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.6174443960189819, |
|
"learning_rate": 6.841180780533179e-05, |
|
"loss": 2.737, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.6259763836860657, |
|
"learning_rate": 6.82567046759305e-05, |
|
"loss": 2.7718, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.619278609752655, |
|
"learning_rate": 6.810139857480844e-05, |
|
"loss": 2.7495, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.6113199591636658, |
|
"learning_rate": 6.794589122860509e-05, |
|
"loss": 2.7093, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.6377850770950317, |
|
"learning_rate": 6.779018436619725e-05, |
|
"loss": 2.7287, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.5981298089027405, |
|
"learning_rate": 6.763427971867992e-05, |
|
"loss": 2.7516, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.6440865993499756, |
|
"learning_rate": 6.747817901934699e-05, |
|
"loss": 2.7265, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.6392199993133545, |
|
"learning_rate": 6.732188400367197e-05, |
|
"loss": 2.7461, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.5952336192131042, |
|
"learning_rate": 6.716539640928871e-05, |
|
"loss": 2.7049, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.5947147011756897, |
|
"learning_rate": 6.70087179759721e-05, |
|
"loss": 2.6609, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.5876834392547607, |
|
"learning_rate": 6.685185044561874e-05, |
|
"loss": 2.7298, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.6168666481971741, |
|
"learning_rate": 6.669479556222747e-05, |
|
"loss": 2.7995, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.5867111682891846, |
|
"learning_rate": 6.653755507188013e-05, |
|
"loss": 2.7873, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.6168385148048401, |
|
"learning_rate": 6.638013072272205e-05, |
|
"loss": 2.7616, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.6352885365486145, |
|
"learning_rate": 6.622252426494259e-05, |
|
"loss": 2.7235, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.6029419302940369, |
|
"learning_rate": 6.606473745075581e-05, |
|
"loss": 2.755, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.6367367506027222, |
|
"learning_rate": 6.590677203438084e-05, |
|
"loss": 2.7381, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.5971164107322693, |
|
"learning_rate": 6.574862977202252e-05, |
|
"loss": 2.7641, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.6724832653999329, |
|
"learning_rate": 6.559031242185174e-05, |
|
"loss": 2.7543, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.6365452408790588, |
|
"learning_rate": 6.543182174398597e-05, |
|
"loss": 2.7156, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.5962063670158386, |
|
"learning_rate": 6.52731595004697e-05, |
|
"loss": 2.6921, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.6351775527000427, |
|
"learning_rate": 6.51143274552548e-05, |
|
"loss": 2.6404, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.6487362384796143, |
|
"learning_rate": 6.495532737418098e-05, |
|
"loss": 2.6959, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.6513057351112366, |
|
"learning_rate": 6.479616102495605e-05, |
|
"loss": 2.7508, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.5903010368347168, |
|
"learning_rate": 6.463683017713638e-05, |
|
"loss": 2.7474, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.6075739860534668, |
|
"learning_rate": 6.447733660210715e-05, |
|
"loss": 2.7462, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.6202053427696228, |
|
"learning_rate": 6.431768207306272e-05, |
|
"loss": 2.8086, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.6007124185562134, |
|
"learning_rate": 6.415786836498684e-05, |
|
"loss": 2.7325, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.6256109476089478, |
|
"learning_rate": 6.399789725463298e-05, |
|
"loss": 2.7049, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.6217005252838135, |
|
"learning_rate": 6.383777052050458e-05, |
|
"loss": 2.7153, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.6184681057929993, |
|
"learning_rate": 6.367748994283518e-05, |
|
"loss": 2.694, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.5928269624710083, |
|
"learning_rate": 6.351705730356877e-05, |
|
"loss": 2.7265, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.6197706460952759, |
|
"learning_rate": 6.335647438633987e-05, |
|
"loss": 2.7359, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.6162838339805603, |
|
"learning_rate": 6.319574297645374e-05, |
|
"loss": 2.7239, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.5909050107002258, |
|
"learning_rate": 6.303486486086654e-05, |
|
"loss": 2.751, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.5814822912216187, |
|
"learning_rate": 6.287384182816546e-05, |
|
"loss": 2.7583, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.6004829406738281, |
|
"learning_rate": 6.271267566854883e-05, |
|
"loss": 2.7153, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.5877543687820435, |
|
"learning_rate": 6.255136817380618e-05, |
|
"loss": 2.6557, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.5787215828895569, |
|
"learning_rate": 6.23899211372984e-05, |
|
"loss": 2.7686, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.5978643298149109, |
|
"learning_rate": 6.222833635393772e-05, |
|
"loss": 2.7421, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.5708004832267761, |
|
"learning_rate": 6.206661562016782e-05, |
|
"loss": 2.7306, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.6145750284194946, |
|
"learning_rate": 6.190476073394382e-05, |
|
"loss": 2.7589, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.6016147136688232, |
|
"learning_rate": 6.17427734947123e-05, |
|
"loss": 2.6672, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.5984389185905457, |
|
"learning_rate": 6.158065570339127e-05, |
|
"loss": 2.7528, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.609862208366394, |
|
"learning_rate": 6.141840916235021e-05, |
|
"loss": 2.8206, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.5888879299163818, |
|
"learning_rate": 6.125603567539001e-05, |
|
"loss": 2.6867, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.5528971552848816, |
|
"learning_rate": 6.109353704772284e-05, |
|
"loss": 2.711, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.580680787563324, |
|
"learning_rate": 6.0930915085952164e-05, |
|
"loss": 2.6565, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.5929208993911743, |
|
"learning_rate": 6.076817159805267e-05, |
|
"loss": 2.681, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.5872501134872437, |
|
"learning_rate": 6.06053083933501e-05, |
|
"loss": 2.7475, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.5946284532546997, |
|
"learning_rate": 6.044232728250116e-05, |
|
"loss": 2.6727, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.5818427205085754, |
|
"learning_rate": 6.027923007747339e-05, |
|
"loss": 2.639, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.6036926507949829, |
|
"learning_rate": 6.011601859152506e-05, |
|
"loss": 2.7188, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.5915615558624268, |
|
"learning_rate": 5.995269463918495e-05, |
|
"loss": 2.6782, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.589364767074585, |
|
"learning_rate": 5.97892600362322e-05, |
|
"loss": 2.6975, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.570540189743042, |
|
"learning_rate": 5.962571659967614e-05, |
|
"loss": 2.7128, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.6136625409126282, |
|
"learning_rate": 5.946206614773606e-05, |
|
"loss": 2.6649, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.5848395228385925, |
|
"learning_rate": 5.929831049982103e-05, |
|
"loss": 2.7049, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.589984118938446, |
|
"learning_rate": 5.9134451476509633e-05, |
|
"loss": 2.7139, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.59041827917099, |
|
"learning_rate": 5.897049089952974e-05, |
|
"loss": 2.7045, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.5757389068603516, |
|
"learning_rate": 5.880643059173826e-05, |
|
"loss": 2.6226, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.592380702495575, |
|
"learning_rate": 5.864227237710093e-05, |
|
"loss": 2.7305, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.6019942760467529, |
|
"learning_rate": 5.847801808067189e-05, |
|
"loss": 2.6937, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.6186248660087585, |
|
"learning_rate": 5.831366952857357e-05, |
|
"loss": 2.7107, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.5778764486312866, |
|
"learning_rate": 5.814922854797622e-05, |
|
"loss": 2.7006, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.5956502556800842, |
|
"learning_rate": 5.798469696707775e-05, |
|
"loss": 2.7031, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.6054814457893372, |
|
"learning_rate": 5.782007661508331e-05, |
|
"loss": 2.7445, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.5853110551834106, |
|
"learning_rate": 5.765536932218495e-05, |
|
"loss": 2.6657, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.6011916399002075, |
|
"learning_rate": 5.7490576919541315e-05, |
|
"loss": 2.6646, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.5917201638221741, |
|
"learning_rate": 5.732570123925729e-05, |
|
"loss": 2.6769, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.6263756155967712, |
|
"learning_rate": 5.7160744114363593e-05, |
|
"loss": 2.6539, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.5738804340362549, |
|
"learning_rate": 5.699570737879641e-05, |
|
"loss": 2.6446, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.5955730080604553, |
|
"learning_rate": 5.683059286737702e-05, |
|
"loss": 2.7087, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.5945442914962769, |
|
"learning_rate": 5.666540241579139e-05, |
|
"loss": 2.6942, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.5907345414161682, |
|
"learning_rate": 5.6500137860569766e-05, |
|
"loss": 2.7556, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.5916737914085388, |
|
"learning_rate": 5.633480103906624e-05, |
|
"loss": 2.6653, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.5896185636520386, |
|
"learning_rate": 5.616939378943834e-05, |
|
"loss": 2.6891, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.6207943558692932, |
|
"learning_rate": 5.6003917950626595e-05, |
|
"loss": 2.7149, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.6069183945655823, |
|
"learning_rate": 5.583837536233407e-05, |
|
"loss": 2.7647, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.5955156087875366, |
|
"learning_rate": 5.567276786500596e-05, |
|
"loss": 2.6789, |
|
"step": 2285 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.5711756944656372, |
|
"learning_rate": 5.5507097299809054e-05, |
|
"loss": 2.7099, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.5811170339584351, |
|
"learning_rate": 5.534136550861133e-05, |
|
"loss": 2.7018, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.6151323318481445, |
|
"learning_rate": 5.5175574333961465e-05, |
|
"loss": 2.6619, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.6350491642951965, |
|
"learning_rate": 5.500972561906832e-05, |
|
"loss": 2.6722, |
|
"step": 2305 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.6088038682937622, |
|
"learning_rate": 5.484382120778048e-05, |
|
"loss": 2.6838, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.5795965790748596, |
|
"learning_rate": 5.467786294456575e-05, |
|
"loss": 2.7021, |
|
"step": 2315 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.6044985055923462, |
|
"learning_rate": 5.451185267449061e-05, |
|
"loss": 2.648, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.5968245267868042, |
|
"learning_rate": 5.43457922431998e-05, |
|
"loss": 2.7042, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.5967344045639038, |
|
"learning_rate": 5.417968349689566e-05, |
|
"loss": 2.7427, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.6051455736160278, |
|
"learning_rate": 5.401352828231772e-05, |
|
"loss": 2.6792, |
|
"step": 2335 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.6021824479103088, |
|
"learning_rate": 5.384732844672211e-05, |
|
"loss": 2.7174, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.5897824764251709, |
|
"learning_rate": 5.368108583786107e-05, |
|
"loss": 2.7425, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.5941648483276367, |
|
"learning_rate": 5.3514802303962344e-05, |
|
"loss": 2.7718, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.6310714483261108, |
|
"learning_rate": 5.334847969370868e-05, |
|
"loss": 2.7228, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.5781346559524536, |
|
"learning_rate": 5.3182119856217284e-05, |
|
"loss": 2.6547, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.5885051488876343, |
|
"learning_rate": 5.3015724641019214e-05, |
|
"loss": 2.6163, |
|
"step": 2365 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.5877886414527893, |
|
"learning_rate": 5.284929589803884e-05, |
|
"loss": 2.6974, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.5883300304412842, |
|
"learning_rate": 5.2682835477573336e-05, |
|
"loss": 2.7209, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.6138569712638855, |
|
"learning_rate": 5.2516345230271965e-05, |
|
"loss": 2.6766, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.5742478966712952, |
|
"learning_rate": 5.234982700711569e-05, |
|
"loss": 2.6928, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.5935817360877991, |
|
"learning_rate": 5.218328265939643e-05, |
|
"loss": 2.6688, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.579456090927124, |
|
"learning_rate": 5.201671403869657e-05, |
|
"loss": 2.7296, |
|
"step": 2395 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.6063280701637268, |
|
"learning_rate": 5.1850122996868366e-05, |
|
"loss": 2.6807, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.5990563035011292, |
|
"learning_rate": 5.168351138601334e-05, |
|
"loss": 2.6674, |
|
"step": 2405 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.5913801193237305, |
|
"learning_rate": 5.1516881058461675e-05, |
|
"loss": 2.6167, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.5955457091331482, |
|
"learning_rate": 5.135023386675166e-05, |
|
"loss": 2.6794, |
|
"step": 2415 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.5914067625999451, |
|
"learning_rate": 5.118357166360906e-05, |
|
"loss": 2.6578, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.6297724843025208, |
|
"learning_rate": 5.101689630192655e-05, |
|
"loss": 2.6634, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.6354207992553711, |
|
"learning_rate": 5.085020963474307e-05, |
|
"loss": 2.6888, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.6159840226173401, |
|
"learning_rate": 5.068351351522329e-05, |
|
"loss": 2.6681, |
|
"step": 2435 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.5876420736312866, |
|
"learning_rate": 5.0516809796636935e-05, |
|
"loss": 2.6068, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.5940358638763428, |
|
"learning_rate": 5.035010033233821e-05, |
|
"loss": 2.5744, |
|
"step": 2445 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.5878875851631165, |
|
"learning_rate": 5.018338697574523e-05, |
|
"loss": 2.6835, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.5701265335083008, |
|
"learning_rate": 5.0016671580319354e-05, |
|
"loss": 2.6463, |
|
"step": 2455 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.6023485660552979, |
|
"learning_rate": 4.984995599954461e-05, |
|
"loss": 2.6397, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.6002631187438965, |
|
"learning_rate": 4.968324208690712e-05, |
|
"loss": 2.5806, |
|
"step": 2465 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.5934431552886963, |
|
"learning_rate": 4.951653169587441e-05, |
|
"loss": 2.7039, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.5772780179977417, |
|
"learning_rate": 4.93498266798749e-05, |
|
"loss": 2.5367, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 2.262930393218994, |
|
"learning_rate": 4.918312889227722e-05, |
|
"loss": 2.6517, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.6189587712287903, |
|
"learning_rate": 4.901644018636966e-05, |
|
"loss": 2.6813, |
|
"step": 2485 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.5724838376045227, |
|
"learning_rate": 4.8849762415339526e-05, |
|
"loss": 2.6501, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.5820484161376953, |
|
"learning_rate": 4.868309743225256e-05, |
|
"loss": 2.7631, |
|
"step": 2495 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.5822151303291321, |
|
"learning_rate": 4.851644709003233e-05, |
|
"loss": 2.6944, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.5823991894721985, |
|
"learning_rate": 4.834981324143964e-05, |
|
"loss": 2.6122, |
|
"step": 2505 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.5901163220405579, |
|
"learning_rate": 4.818319773905191e-05, |
|
"loss": 2.6254, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.5650957822799683, |
|
"learning_rate": 4.801660243524261e-05, |
|
"loss": 2.6358, |
|
"step": 2515 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.5851700901985168, |
|
"learning_rate": 4.7850029182160626e-05, |
|
"loss": 2.6393, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.581257164478302, |
|
"learning_rate": 4.768347983170973e-05, |
|
"loss": 2.5892, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.5895279049873352, |
|
"learning_rate": 4.7516956235527884e-05, |
|
"loss": 2.6424, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.7180891036987305, |
|
"learning_rate": 4.735046024496682e-05, |
|
"loss": 2.6909, |
|
"step": 2535 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.5922880172729492, |
|
"learning_rate": 4.7183993711071286e-05, |
|
"loss": 2.71, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.5666274428367615, |
|
"learning_rate": 4.7017558484558554e-05, |
|
"loss": 2.7223, |
|
"step": 2545 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.561943531036377, |
|
"learning_rate": 4.6851156415797844e-05, |
|
"loss": 2.6269, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.5793047547340393, |
|
"learning_rate": 4.6684789354789746e-05, |
|
"loss": 2.6433, |
|
"step": 2555 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.6017892956733704, |
|
"learning_rate": 4.651845915114563e-05, |
|
"loss": 2.6989, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.575771689414978, |
|
"learning_rate": 4.6352167654067095e-05, |
|
"loss": 2.6372, |
|
"step": 2565 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.579129695892334, |
|
"learning_rate": 4.618591671232544e-05, |
|
"loss": 2.6722, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.5836328268051147, |
|
"learning_rate": 4.601970817424106e-05, |
|
"loss": 2.6702, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.598733127117157, |
|
"learning_rate": 4.585354388766292e-05, |
|
"loss": 2.697, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.5923112630844116, |
|
"learning_rate": 4.568742569994802e-05, |
|
"loss": 2.6181, |
|
"step": 2585 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.5779056549072266, |
|
"learning_rate": 4.552135545794086e-05, |
|
"loss": 2.6333, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.5684216618537903, |
|
"learning_rate": 4.535533500795288e-05, |
|
"loss": 2.6568, |
|
"step": 2595 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.6125906109809875, |
|
"learning_rate": 4.5189366195741953e-05, |
|
"loss": 2.7044, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.5921022295951843, |
|
"learning_rate": 4.502345086649186e-05, |
|
"loss": 2.565, |
|
"step": 2605 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.6385989785194397, |
|
"learning_rate": 4.485759086479179e-05, |
|
"loss": 2.6301, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.5802872180938721, |
|
"learning_rate": 4.469178803461579e-05, |
|
"loss": 2.5987, |
|
"step": 2615 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.5786648392677307, |
|
"learning_rate": 4.4526044219302326e-05, |
|
"loss": 2.6468, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.5831460356712341, |
|
"learning_rate": 4.4360361261533745e-05, |
|
"loss": 2.6621, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.5802498459815979, |
|
"learning_rate": 4.419474100331579e-05, |
|
"loss": 2.6547, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.5955533981323242, |
|
"learning_rate": 4.402918528595715e-05, |
|
"loss": 2.6076, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.5763300061225891, |
|
"learning_rate": 4.386369595004896e-05, |
|
"loss": 2.6596, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.6035252213478088, |
|
"learning_rate": 4.3698274835444354e-05, |
|
"loss": 2.6467, |
|
"step": 2645 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.5766133069992065, |
|
"learning_rate": 4.3532923781238e-05, |
|
"loss": 2.6246, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.5641186833381653, |
|
"learning_rate": 4.336764462574566e-05, |
|
"loss": 2.6427, |
|
"step": 2655 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.5621420741081238, |
|
"learning_rate": 4.320243920648376e-05, |
|
"loss": 2.7131, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.587009847164154, |
|
"learning_rate": 4.303730936014894e-05, |
|
"loss": 2.5924, |
|
"step": 2665 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.5865349173545837, |
|
"learning_rate": 4.287225692259765e-05, |
|
"loss": 2.6606, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.5877777338027954, |
|
"learning_rate": 4.270728372882575e-05, |
|
"loss": 2.5986, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.6098527908325195, |
|
"learning_rate": 4.254239161294804e-05, |
|
"loss": 2.7315, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.5707640051841736, |
|
"learning_rate": 4.237758240817802e-05, |
|
"loss": 2.6216, |
|
"step": 2685 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.7130870819091797, |
|
"learning_rate": 4.2212857946807336e-05, |
|
"loss": 2.6666, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.5846860408782959, |
|
"learning_rate": 4.2048220060185516e-05, |
|
"loss": 2.6045, |
|
"step": 2695 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.6190792322158813, |
|
"learning_rate": 4.188367057869957e-05, |
|
"loss": 2.6988, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.5834519863128662, |
|
"learning_rate": 4.171921133175365e-05, |
|
"loss": 2.6965, |
|
"step": 2705 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.5982712507247925, |
|
"learning_rate": 4.155484414774872e-05, |
|
"loss": 2.5919, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.5789735317230225, |
|
"learning_rate": 4.139057085406221e-05, |
|
"loss": 2.6139, |
|
"step": 2715 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.5745024681091309, |
|
"learning_rate": 4.1226393277027726e-05, |
|
"loss": 2.6619, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.5738227963447571, |
|
"learning_rate": 4.106231324191471e-05, |
|
"loss": 2.6202, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.5885658860206604, |
|
"learning_rate": 4.089833257290817e-05, |
|
"loss": 2.7186, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.5927093029022217, |
|
"learning_rate": 4.073445309308842e-05, |
|
"loss": 2.6959, |
|
"step": 2735 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.5532147288322449, |
|
"learning_rate": 4.0570676624410756e-05, |
|
"loss": 2.5826, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.571595311164856, |
|
"learning_rate": 4.040700498768525e-05, |
|
"loss": 2.5981, |
|
"step": 2745 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.6024319529533386, |
|
"learning_rate": 4.024344000255648e-05, |
|
"loss": 2.714, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.5821074843406677, |
|
"learning_rate": 4.0079983487483313e-05, |
|
"loss": 2.5985, |
|
"step": 2755 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.5916585922241211, |
|
"learning_rate": 3.9916637259718683e-05, |
|
"loss": 2.6529, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.5751479864120483, |
|
"learning_rate": 3.9753403135289396e-05, |
|
"loss": 2.5818, |
|
"step": 2765 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.5622469782829285, |
|
"learning_rate": 3.9590282928975914e-05, |
|
"loss": 2.545, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.5675203800201416, |
|
"learning_rate": 3.942727845429221e-05, |
|
"loss": 2.5872, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.5867101550102234, |
|
"learning_rate": 3.926439152346558e-05, |
|
"loss": 2.5448, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.5699689984321594, |
|
"learning_rate": 3.910162394741653e-05, |
|
"loss": 2.5741, |
|
"step": 2785 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.5863639712333679, |
|
"learning_rate": 3.893897753573861e-05, |
|
"loss": 2.6459, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.5991702675819397, |
|
"learning_rate": 3.877645409667829e-05, |
|
"loss": 2.6341, |
|
"step": 2795 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.6305561065673828, |
|
"learning_rate": 3.861405543711491e-05, |
|
"loss": 2.6204, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.5843254923820496, |
|
"learning_rate": 3.8451783362540507e-05, |
|
"loss": 2.6726, |
|
"step": 2805 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.5682271122932434, |
|
"learning_rate": 3.828963967703983e-05, |
|
"loss": 2.6089, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.5877279043197632, |
|
"learning_rate": 3.8127626183270223e-05, |
|
"loss": 2.6249, |
|
"step": 2815 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.5976167321205139, |
|
"learning_rate": 3.796574468244161e-05, |
|
"loss": 2.6543, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.5947586297988892, |
|
"learning_rate": 3.7803996974296444e-05, |
|
"loss": 2.6448, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.5799654722213745, |
|
"learning_rate": 3.7642384857089776e-05, |
|
"loss": 2.6903, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.5786498785018921, |
|
"learning_rate": 3.748091012756915e-05, |
|
"loss": 2.6172, |
|
"step": 2835 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.5935390591621399, |
|
"learning_rate": 3.731957458095467e-05, |
|
"loss": 2.6681, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.5944668054580688, |
|
"learning_rate": 3.71583800109191e-05, |
|
"loss": 2.6853, |
|
"step": 2845 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.5829348564147949, |
|
"learning_rate": 3.699732820956784e-05, |
|
"loss": 2.6258, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.5773081183433533, |
|
"learning_rate": 3.6836420967419057e-05, |
|
"loss": 2.6407, |
|
"step": 2855 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.5687635540962219, |
|
"learning_rate": 3.6675660073383745e-05, |
|
"loss": 2.6407, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.5697055459022522, |
|
"learning_rate": 3.6515047314745856e-05, |
|
"loss": 2.651, |
|
"step": 2865 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.574481189250946, |
|
"learning_rate": 3.6354584477142437e-05, |
|
"loss": 2.603, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.5846320390701294, |
|
"learning_rate": 3.6194273344543736e-05, |
|
"loss": 2.6002, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.5774704813957214, |
|
"learning_rate": 3.6034115699233425e-05, |
|
"loss": 2.6297, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.5734532475471497, |
|
"learning_rate": 3.5874113321788736e-05, |
|
"loss": 2.6309, |
|
"step": 2885 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.5819912552833557, |
|
"learning_rate": 3.571426799106071e-05, |
|
"loss": 2.6724, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.5800731182098389, |
|
"learning_rate": 3.555458148415437e-05, |
|
"loss": 2.6073, |
|
"step": 2895 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.6030119061470032, |
|
"learning_rate": 3.539505557640901e-05, |
|
"loss": 2.6547, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.5793662071228027, |
|
"learning_rate": 3.523569204137843e-05, |
|
"loss": 2.6946, |
|
"step": 2905 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.5904523730278015, |
|
"learning_rate": 3.5076492650811246e-05, |
|
"loss": 2.6513, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.5770894289016724, |
|
"learning_rate": 3.491745917463113e-05, |
|
"loss": 2.6559, |
|
"step": 2915 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.5736362338066101, |
|
"learning_rate": 3.475859338091721e-05, |
|
"loss": 2.5941, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.585515022277832, |
|
"learning_rate": 3.4599897035884374e-05, |
|
"loss": 2.6083, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.5967217683792114, |
|
"learning_rate": 3.444137190386363e-05, |
|
"loss": 2.629, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.5730523467063904, |
|
"learning_rate": 3.4283019747282514e-05, |
|
"loss": 2.6124, |
|
"step": 2935 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.5762849450111389, |
|
"learning_rate": 3.412484232664545e-05, |
|
"loss": 2.6663, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.5997823476791382, |
|
"learning_rate": 3.396684140051424e-05, |
|
"loss": 2.5798, |
|
"step": 2945 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.591468334197998, |
|
"learning_rate": 3.3809018725488466e-05, |
|
"loss": 2.6309, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.576914370059967, |
|
"learning_rate": 3.365137605618598e-05, |
|
"loss": 2.628, |
|
"step": 2955 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.5974778532981873, |
|
"learning_rate": 3.3493915145223395e-05, |
|
"loss": 2.652, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.5637148022651672, |
|
"learning_rate": 3.3336637743196584e-05, |
|
"loss": 2.5695, |
|
"step": 2965 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.5815153121948242, |
|
"learning_rate": 3.317954559866126e-05, |
|
"loss": 2.578, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.5781580209732056, |
|
"learning_rate": 3.302264045811344e-05, |
|
"loss": 2.65, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.5772373080253601, |
|
"learning_rate": 3.286592406597021e-05, |
|
"loss": 2.647, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.5850008130073547, |
|
"learning_rate": 3.270939816455012e-05, |
|
"loss": 2.5886, |
|
"step": 2985 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.5770871639251709, |
|
"learning_rate": 3.255306449405395e-05, |
|
"loss": 2.6366, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.6196701526641846, |
|
"learning_rate": 3.2396924792545304e-05, |
|
"loss": 2.6013, |
|
"step": 2995 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.5672704577445984, |
|
"learning_rate": 3.224098079593132e-05, |
|
"loss": 2.5439, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.5793458223342896, |
|
"learning_rate": 3.2085234237943354e-05, |
|
"loss": 2.5838, |
|
"step": 3005 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.5920185446739197, |
|
"learning_rate": 3.19296868501177e-05, |
|
"loss": 2.7513, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.5743845105171204, |
|
"learning_rate": 3.177434036177636e-05, |
|
"loss": 2.5961, |
|
"step": 3015 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.5579830408096313, |
|
"learning_rate": 3.1619196500007804e-05, |
|
"loss": 2.5869, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.5578731894493103, |
|
"learning_rate": 3.146425698964776e-05, |
|
"loss": 2.6034, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.5633421540260315, |
|
"learning_rate": 3.1309523553260046e-05, |
|
"loss": 2.6938, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.5821592211723328, |
|
"learning_rate": 3.115499791111743e-05, |
|
"loss": 2.5988, |
|
"step": 3035 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.568416953086853, |
|
"learning_rate": 3.10006817811825e-05, |
|
"loss": 2.6686, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.6258422136306763, |
|
"learning_rate": 3.084657687908855e-05, |
|
"loss": 2.6229, |
|
"step": 3045 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.5760878324508667, |
|
"learning_rate": 3.069268491812052e-05, |
|
"loss": 2.5593, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.7546426057815552, |
|
"learning_rate": 3.0539007609195934e-05, |
|
"loss": 2.6005, |
|
"step": 3055 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.5613300204277039, |
|
"learning_rate": 3.0385546660845908e-05, |
|
"loss": 2.58, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.5683727264404297, |
|
"learning_rate": 3.0232303779196132e-05, |
|
"loss": 2.6769, |
|
"step": 3065 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.5826250910758972, |
|
"learning_rate": 3.0079280667947885e-05, |
|
"loss": 2.6511, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.6152758598327637, |
|
"learning_rate": 2.9926479028359132e-05, |
|
"loss": 2.6877, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.6035573482513428, |
|
"learning_rate": 2.97739005592256e-05, |
|
"loss": 2.6807, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.5841193795204163, |
|
"learning_rate": 2.962154695686187e-05, |
|
"loss": 2.6925, |
|
"step": 3085 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.6009371280670166, |
|
"learning_rate": 2.9469419915082536e-05, |
|
"loss": 2.6164, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.566608190536499, |
|
"learning_rate": 2.9317521125183368e-05, |
|
"loss": 2.5822, |
|
"step": 3095 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.5858410000801086, |
|
"learning_rate": 2.9165852275922524e-05, |
|
"loss": 2.6449, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.5713921189308167, |
|
"learning_rate": 2.901441505350174e-05, |
|
"loss": 2.5967, |
|
"step": 3105 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.5789456367492676, |
|
"learning_rate": 2.886321114154762e-05, |
|
"loss": 2.6496, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.5742778182029724, |
|
"learning_rate": 2.87122422210929e-05, |
|
"loss": 2.6082, |
|
"step": 3115 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.5781303644180298, |
|
"learning_rate": 2.8561509970557736e-05, |
|
"loss": 2.6587, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.5684245228767395, |
|
"learning_rate": 2.8411016065731146e-05, |
|
"loss": 2.6136, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.5729696750640869, |
|
"learning_rate": 2.826076217975222e-05, |
|
"loss": 2.6092, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.5810202360153198, |
|
"learning_rate": 2.8110749983091632e-05, |
|
"loss": 2.5651, |
|
"step": 3135 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.5778184533119202, |
|
"learning_rate": 2.7960981143533053e-05, |
|
"loss": 2.654, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.5724374055862427, |
|
"learning_rate": 2.781145732615457e-05, |
|
"loss": 2.6412, |
|
"step": 3145 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.5706700682640076, |
|
"learning_rate": 2.7662180193310218e-05, |
|
"loss": 2.6387, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.5863929986953735, |
|
"learning_rate": 2.751315140461145e-05, |
|
"loss": 2.5676, |
|
"step": 3155 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.5613924860954285, |
|
"learning_rate": 2.7364372616908744e-05, |
|
"loss": 2.5772, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.6091950535774231, |
|
"learning_rate": 2.7215845484273152e-05, |
|
"loss": 2.7069, |
|
"step": 3165 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.5914263129234314, |
|
"learning_rate": 2.7067571657977893e-05, |
|
"loss": 2.6698, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.5723788142204285, |
|
"learning_rate": 2.691955278648003e-05, |
|
"loss": 2.5516, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.5798773169517517, |
|
"learning_rate": 2.6771790515402112e-05, |
|
"loss": 2.605, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.5647338032722473, |
|
"learning_rate": 2.6624286487513916e-05, |
|
"loss": 2.6944, |
|
"step": 3185 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.5808343291282654, |
|
"learning_rate": 2.6477042342714137e-05, |
|
"loss": 2.6245, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.6048632264137268, |
|
"learning_rate": 2.633005971801219e-05, |
|
"loss": 2.6853, |
|
"step": 3195 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.566261351108551, |
|
"learning_rate": 2.6183340247510013e-05, |
|
"loss": 2.6689, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.5952037572860718, |
|
"learning_rate": 2.6036885562383856e-05, |
|
"loss": 2.6039, |
|
"step": 3205 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.588478684425354, |
|
"learning_rate": 2.5890697290866206e-05, |
|
"loss": 2.6306, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.5835679769515991, |
|
"learning_rate": 2.5744777058227642e-05, |
|
"loss": 2.6297, |
|
"step": 3215 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.5727607011795044, |
|
"learning_rate": 2.5599126486758777e-05, |
|
"loss": 2.5732, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.578033447265625, |
|
"learning_rate": 2.5453747195752243e-05, |
|
"loss": 2.6064, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.8947706818580627, |
|
"learning_rate": 2.530864080148464e-05, |
|
"loss": 2.614, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.5751860737800598, |
|
"learning_rate": 2.5163808917198615e-05, |
|
"loss": 2.6096, |
|
"step": 3235 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.5922190546989441, |
|
"learning_rate": 2.501925315308492e-05, |
|
"loss": 2.5628, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.6162046194076538, |
|
"learning_rate": 2.4874975116264477e-05, |
|
"loss": 2.5608, |
|
"step": 3245 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.5621728897094727, |
|
"learning_rate": 2.4730976410770534e-05, |
|
"loss": 2.5841, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.5650796294212341, |
|
"learning_rate": 2.458725863753084e-05, |
|
"loss": 2.5644, |
|
"step": 3255 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.5697013139724731, |
|
"learning_rate": 2.4443823394349834e-05, |
|
"loss": 2.5701, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.5562264323234558, |
|
"learning_rate": 2.430067227589088e-05, |
|
"loss": 2.5848, |
|
"step": 3265 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.5883675813674927, |
|
"learning_rate": 2.4157806873658517e-05, |
|
"loss": 2.6811, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.5807322859764099, |
|
"learning_rate": 2.401522877598087e-05, |
|
"loss": 2.5286, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.5690186023712158, |
|
"learning_rate": 2.3872939567991827e-05, |
|
"loss": 2.5263, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.5782911777496338, |
|
"learning_rate": 2.373094083161353e-05, |
|
"loss": 2.6415, |
|
"step": 3285 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.566866934299469, |
|
"learning_rate": 2.358923414553877e-05, |
|
"loss": 2.5329, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.5811752080917358, |
|
"learning_rate": 2.3447821085213405e-05, |
|
"loss": 2.5956, |
|
"step": 3295 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.6118722558021545, |
|
"learning_rate": 2.3306703222818878e-05, |
|
"loss": 2.597, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.5883946418762207, |
|
"learning_rate": 2.3165882127254705e-05, |
|
"loss": 2.6247, |
|
"step": 3305 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.563856840133667, |
|
"learning_rate": 2.302535936412108e-05, |
|
"loss": 2.5334, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.7375384569168091, |
|
"learning_rate": 2.2885136495701415e-05, |
|
"loss": 2.7001, |
|
"step": 3315 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.5989105105400085, |
|
"learning_rate": 2.274521508094501e-05, |
|
"loss": 2.6305, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.5700932145118713, |
|
"learning_rate": 2.2605596675449698e-05, |
|
"loss": 2.6133, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.5767858028411865, |
|
"learning_rate": 2.246628283144457e-05, |
|
"loss": 2.5287, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.5744432210922241, |
|
"learning_rate": 2.232727509777269e-05, |
|
"loss": 2.5014, |
|
"step": 3335 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.5841879844665527, |
|
"learning_rate": 2.2188575019873932e-05, |
|
"loss": 2.643, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.5745596289634705, |
|
"learning_rate": 2.2050184139767704e-05, |
|
"loss": 2.5714, |
|
"step": 3345 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.5794277191162109, |
|
"learning_rate": 2.191210399603591e-05, |
|
"loss": 2.5296, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.5828683376312256, |
|
"learning_rate": 2.1774336123805772e-05, |
|
"loss": 2.6062, |
|
"step": 3355 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.5689579844474792, |
|
"learning_rate": 2.1636882054732776e-05, |
|
"loss": 2.571, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.5862215757369995, |
|
"learning_rate": 2.1499743316983684e-05, |
|
"loss": 2.6261, |
|
"step": 3365 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.5827880501747131, |
|
"learning_rate": 2.1362921435219473e-05, |
|
"loss": 2.5678, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.573703944683075, |
|
"learning_rate": 2.1226417930578464e-05, |
|
"loss": 2.5523, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.5824171304702759, |
|
"learning_rate": 2.109023432065935e-05, |
|
"loss": 2.5961, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.5649211406707764, |
|
"learning_rate": 2.095437211950434e-05, |
|
"loss": 2.5863, |
|
"step": 3385 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.5749054551124573, |
|
"learning_rate": 2.0818832837582352e-05, |
|
"loss": 2.6205, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.5825015902519226, |
|
"learning_rate": 2.068361798177218e-05, |
|
"loss": 2.6257, |
|
"step": 3395 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.5777032971382141, |
|
"learning_rate": 2.0548729055345778e-05, |
|
"loss": 2.5669, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.5922849774360657, |
|
"learning_rate": 2.0414167557951514e-05, |
|
"loss": 2.6121, |
|
"step": 3405 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.5713364481925964, |
|
"learning_rate": 2.0279934985597527e-05, |
|
"loss": 2.5375, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.5662713050842285, |
|
"learning_rate": 2.0146032830635054e-05, |
|
"loss": 2.5945, |
|
"step": 3415 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.5747870206832886, |
|
"learning_rate": 2.001246258174192e-05, |
|
"loss": 2.5869, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.5959161520004272, |
|
"learning_rate": 1.9879225723905886e-05, |
|
"loss": 2.543, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.5774676203727722, |
|
"learning_rate": 1.9746323738408203e-05, |
|
"loss": 2.5901, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.5725018978118896, |
|
"learning_rate": 1.9613758102807117e-05, |
|
"loss": 2.584, |
|
"step": 3435 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.5607089996337891, |
|
"learning_rate": 1.9481530290921474e-05, |
|
"loss": 2.5761, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.577150285243988, |
|
"learning_rate": 1.934964177281428e-05, |
|
"loss": 2.6497, |
|
"step": 3445 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.5869901776313782, |
|
"learning_rate": 1.9218094014776434e-05, |
|
"loss": 2.5564, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.5867902636528015, |
|
"learning_rate": 1.9086888479310333e-05, |
|
"loss": 2.6265, |
|
"step": 3455 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.5620519518852234, |
|
"learning_rate": 1.895602662511371e-05, |
|
"loss": 2.5874, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.5845962762832642, |
|
"learning_rate": 1.8825509907063327e-05, |
|
"loss": 2.5383, |
|
"step": 3465 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.5725237131118774, |
|
"learning_rate": 1.8695339776198872e-05, |
|
"loss": 2.5967, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.58806312084198, |
|
"learning_rate": 1.8565517679706783e-05, |
|
"loss": 2.5904, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.5862602591514587, |
|
"learning_rate": 1.8436045060904174e-05, |
|
"loss": 2.5578, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.5779770612716675, |
|
"learning_rate": 1.830692335922279e-05, |
|
"loss": 2.6801, |
|
"step": 3485 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.5615940690040588, |
|
"learning_rate": 1.8178154010192994e-05, |
|
"loss": 2.5917, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.5655120015144348, |
|
"learning_rate": 1.8049738445427822e-05, |
|
"loss": 2.541, |
|
"step": 3495 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.5825996398925781, |
|
"learning_rate": 1.7921678092607052e-05, |
|
"loss": 2.6431, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.5582937002182007, |
|
"learning_rate": 1.7793974375461352e-05, |
|
"loss": 2.5971, |
|
"step": 3505 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.5537931323051453, |
|
"learning_rate": 1.7666628713756417e-05, |
|
"loss": 2.5717, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.5590839982032776, |
|
"learning_rate": 1.7539642523277228e-05, |
|
"loss": 2.6129, |
|
"step": 3515 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.003821611404419, |
|
"learning_rate": 1.7413017215812273e-05, |
|
"loss": 2.5358, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.5575367212295532, |
|
"learning_rate": 1.728675419913788e-05, |
|
"loss": 2.5917, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.5800896286964417, |
|
"learning_rate": 1.716085487700253e-05, |
|
"loss": 2.5974, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.5730501413345337, |
|
"learning_rate": 1.703532064911131e-05, |
|
"loss": 2.6395, |
|
"step": 3535 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.552326500415802, |
|
"learning_rate": 1.6910152911110283e-05, |
|
"loss": 2.5972, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.5704326033592224, |
|
"learning_rate": 1.6785353054571024e-05, |
|
"loss": 2.6062, |
|
"step": 3545 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.58367919921875, |
|
"learning_rate": 1.666092246697512e-05, |
|
"loss": 2.5066, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.5703946352005005, |
|
"learning_rate": 1.6536862531698766e-05, |
|
"loss": 2.6073, |
|
"step": 3555 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.5820434093475342, |
|
"learning_rate": 1.6413174627997328e-05, |
|
"loss": 2.6337, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.5775151252746582, |
|
"learning_rate": 1.6289860130990147e-05, |
|
"loss": 2.6206, |
|
"step": 3565 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.5769268870353699, |
|
"learning_rate": 1.6166920411645064e-05, |
|
"loss": 2.5696, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.5777645111083984, |
|
"learning_rate": 1.6044356836763315e-05, |
|
"loss": 2.511, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.5798133611679077, |
|
"learning_rate": 1.5922170768964285e-05, |
|
"loss": 2.5312, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.5712454915046692, |
|
"learning_rate": 1.5800363566670362e-05, |
|
"loss": 2.5797, |
|
"step": 3585 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.5668451189994812, |
|
"learning_rate": 1.5678936584091852e-05, |
|
"loss": 2.604, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.5877907872200012, |
|
"learning_rate": 1.5557891171211892e-05, |
|
"loss": 2.6074, |
|
"step": 3595 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.5750396847724915, |
|
"learning_rate": 1.5437228673771465e-05, |
|
"loss": 2.6123, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.5754581689834595, |
|
"learning_rate": 1.5316950433254445e-05, |
|
"loss": 2.5287, |
|
"step": 3605 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.567247211933136, |
|
"learning_rate": 1.5197057786872649e-05, |
|
"loss": 2.5492, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.5782046914100647, |
|
"learning_rate": 1.5077552067551015e-05, |
|
"loss": 2.5554, |
|
"step": 3615 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.5800824165344238, |
|
"learning_rate": 1.4958434603912747e-05, |
|
"loss": 2.6436, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.5893679857254028, |
|
"learning_rate": 1.4839706720264546e-05, |
|
"loss": 2.5866, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.5560811161994934, |
|
"learning_rate": 1.4721369736581924e-05, |
|
"loss": 2.5058, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.6068143844604492, |
|
"learning_rate": 1.4603424968494484e-05, |
|
"loss": 2.6076, |
|
"step": 3635 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.5767700672149658, |
|
"learning_rate": 1.448587372727132e-05, |
|
"loss": 2.5211, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.5597636103630066, |
|
"learning_rate": 1.4368717319806419e-05, |
|
"loss": 2.6155, |
|
"step": 3645 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.5742618441581726, |
|
"learning_rate": 1.4251957048604152e-05, |
|
"loss": 2.5888, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.6016988158226013, |
|
"learning_rate": 1.413559421176479e-05, |
|
"loss": 2.5928, |
|
"step": 3655 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.6035110354423523, |
|
"learning_rate": 1.4019630102970056e-05, |
|
"loss": 2.589, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.5645198822021484, |
|
"learning_rate": 1.3904066011468753e-05, |
|
"loss": 2.5914, |
|
"step": 3665 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.5627663135528564, |
|
"learning_rate": 1.3788903222062433e-05, |
|
"loss": 2.5587, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.5593175292015076, |
|
"learning_rate": 1.3674143015091118e-05, |
|
"loss": 2.5776, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.5947297215461731, |
|
"learning_rate": 1.355978666641905e-05, |
|
"loss": 2.606, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.5737603306770325, |
|
"learning_rate": 1.3445835447420507e-05, |
|
"loss": 2.6054, |
|
"step": 3685 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.5874439477920532, |
|
"learning_rate": 1.3332290624965688e-05, |
|
"loss": 2.5885, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.5544983744621277, |
|
"learning_rate": 1.3219153461406609e-05, |
|
"loss": 2.4788, |
|
"step": 3695 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.5916979908943176, |
|
"learning_rate": 1.3106425214563078e-05, |
|
"loss": 2.5666, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.5403845310211182, |
|
"learning_rate": 1.2994107137708716e-05, |
|
"loss": 2.5592, |
|
"step": 3705 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.5737205147743225, |
|
"learning_rate": 1.2882200479556988e-05, |
|
"loss": 2.5363, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.5680940747261047, |
|
"learning_rate": 1.2770706484247397e-05, |
|
"loss": 2.531, |
|
"step": 3715 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.5880336165428162, |
|
"learning_rate": 1.2659626391331564e-05, |
|
"loss": 2.5294, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.5889505743980408, |
|
"learning_rate": 1.2548961435759493e-05, |
|
"loss": 2.5716, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.5553359389305115, |
|
"learning_rate": 1.2438712847865846e-05, |
|
"loss": 2.5521, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.5884903073310852, |
|
"learning_rate": 1.2328881853356244e-05, |
|
"loss": 2.5846, |
|
"step": 3735 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.5808243155479431, |
|
"learning_rate": 1.221946967329365e-05, |
|
"loss": 2.6001, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.5739181637763977, |
|
"learning_rate": 1.2110477524084796e-05, |
|
"loss": 2.6288, |
|
"step": 3745 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.5804181694984436, |
|
"learning_rate": 1.2001906617466657e-05, |
|
"loss": 2.5253, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.5736623406410217, |
|
"learning_rate": 1.1893758160492978e-05, |
|
"loss": 2.6306, |
|
"step": 3755 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.5596362352371216, |
|
"learning_rate": 1.1786033355520859e-05, |
|
"loss": 2.5859, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.5757764577865601, |
|
"learning_rate": 1.1678733400197373e-05, |
|
"loss": 2.5413, |
|
"step": 3765 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.5746675729751587, |
|
"learning_rate": 1.1571859487446263e-05, |
|
"loss": 2.5768, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.599814772605896, |
|
"learning_rate": 1.1465412805454695e-05, |
|
"loss": 2.6325, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.5795137882232666, |
|
"learning_rate": 1.1359394537660011e-05, |
|
"loss": 2.6282, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.5906032919883728, |
|
"learning_rate": 1.125380586273661e-05, |
|
"loss": 2.5769, |
|
"step": 3785 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.5713207721710205, |
|
"learning_rate": 1.1148647954582808e-05, |
|
"loss": 2.5704, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.5629163980484009, |
|
"learning_rate": 1.1043921982307819e-05, |
|
"loss": 2.5626, |
|
"step": 3795 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.583859384059906, |
|
"learning_rate": 1.0939629110218735e-05, |
|
"loss": 2.5515, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.5773959159851074, |
|
"learning_rate": 1.0835770497807596e-05, |
|
"loss": 2.5416, |
|
"step": 3805 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.5668864250183105, |
|
"learning_rate": 1.0732347299738493e-05, |
|
"loss": 2.5416, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.5780789256095886, |
|
"learning_rate": 1.0629360665834732e-05, |
|
"loss": 2.5834, |
|
"step": 3815 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.5882496237754822, |
|
"learning_rate": 1.052681174106604e-05, |
|
"loss": 2.623, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.5597360134124756, |
|
"learning_rate": 1.0424701665535852e-05, |
|
"loss": 2.5469, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.5949521660804749, |
|
"learning_rate": 1.0323031574468638e-05, |
|
"loss": 2.5835, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.5551010370254517, |
|
"learning_rate": 1.0221802598197261e-05, |
|
"loss": 2.605, |
|
"step": 3835 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.5903000831604004, |
|
"learning_rate": 1.0121015862150423e-05, |
|
"loss": 2.5859, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.5738072395324707, |
|
"learning_rate": 1.0020672486840154e-05, |
|
"loss": 2.599, |
|
"step": 3845 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.5860092639923096, |
|
"learning_rate": 9.920773587849364e-06, |
|
"loss": 2.5565, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.5723434090614319, |
|
"learning_rate": 9.821320275819401e-06, |
|
"loss": 2.5936, |
|
"step": 3855 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.56388920545578, |
|
"learning_rate": 9.72231365643777e-06, |
|
"loss": 2.5033, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.5733988285064697, |
|
"learning_rate": 9.623754830425779e-06, |
|
"loss": 2.5936, |
|
"step": 3865 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.5776930451393127, |
|
"learning_rate": 9.52564489352632e-06, |
|
"loss": 2.5885, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.5818698406219482, |
|
"learning_rate": 9.427984936491702e-06, |
|
"loss": 2.6229, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.5616958141326904, |
|
"learning_rate": 9.330776045071509e-06, |
|
"loss": 2.5224, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.5539332032203674, |
|
"learning_rate": 9.23401930000054e-06, |
|
"loss": 2.5469, |
|
"step": 3885 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.5630046129226685, |
|
"learning_rate": 9.137715776986772e-06, |
|
"loss": 2.5892, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.5861016511917114, |
|
"learning_rate": 9.041866546699434e-06, |
|
"loss": 2.5578, |
|
"step": 3895 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.5587556958198547, |
|
"learning_rate": 8.946472674757078e-06, |
|
"loss": 2.5814, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.560431957244873, |
|
"learning_rate": 8.851535221715735e-06, |
|
"loss": 2.5942, |
|
"step": 3905 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.571217954158783, |
|
"learning_rate": 8.757055243057132e-06, |
|
"loss": 2.6199, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.6178560853004456, |
|
"learning_rate": 8.663033789176967e-06, |
|
"loss": 2.5234, |
|
"step": 3915 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.5733814835548401, |
|
"learning_rate": 8.5694719053732e-06, |
|
"loss": 2.5297, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.5747708082199097, |
|
"learning_rate": 8.476370631834458e-06, |
|
"loss": 2.5832, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.5666419267654419, |
|
"learning_rate": 8.383731003628452e-06, |
|
"loss": 2.524, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.5650913119316101, |
|
"learning_rate": 8.291554050690508e-06, |
|
"loss": 2.5187, |
|
"step": 3935 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.5808721780776978, |
|
"learning_rate": 8.199840797812058e-06, |
|
"loss": 2.5505, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.566299557685852, |
|
"learning_rate": 8.108592264629295e-06, |
|
"loss": 2.5946, |
|
"step": 3945 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.5880241394042969, |
|
"learning_rate": 8.017809465611803e-06, |
|
"loss": 2.589, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.5790772438049316, |
|
"learning_rate": 7.927493410051324e-06, |
|
"loss": 2.5619, |
|
"step": 3955 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.5659160614013672, |
|
"learning_rate": 7.837645102050473e-06, |
|
"loss": 2.562, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.5623981356620789, |
|
"learning_rate": 7.748265540511635e-06, |
|
"loss": 2.5778, |
|
"step": 3965 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.5713520050048828, |
|
"learning_rate": 7.65935571912582e-06, |
|
"loss": 2.5206, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.5754117965698242, |
|
"learning_rate": 7.5709166263616405e-06, |
|
"loss": 2.5908, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.5733134746551514, |
|
"learning_rate": 7.482949245454302e-06, |
|
"loss": 2.5512, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.5658825635910034, |
|
"learning_rate": 7.3954545543946876e-06, |
|
"loss": 2.5486, |
|
"step": 3985 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.5804485082626343, |
|
"learning_rate": 7.308433525918468e-06, |
|
"loss": 2.5343, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.559240460395813, |
|
"learning_rate": 7.221887127495313e-06, |
|
"loss": 2.5867, |
|
"step": 3995 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.5767809152603149, |
|
"learning_rate": 7.1358163213181114e-06, |
|
"loss": 2.6351, |
|
"step": 4000 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 4811, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 1000, |
|
"total_flos": 1.0865380348133376e+19, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|