|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.8313847752663029, |
|
"eval_steps": 500, |
|
"global_step": 4000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 18.487945556640625, |
|
"learning_rate": 5e-06, |
|
"loss": 13.0056, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 14.99580192565918, |
|
"learning_rate": 1e-05, |
|
"loss": 12.7549, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 12.252545356750488, |
|
"learning_rate": 1.5e-05, |
|
"loss": 12.1667, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 11.030613899230957, |
|
"learning_rate": 2e-05, |
|
"loss": 11.3488, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 5.9062275886535645, |
|
"learning_rate": 2.5e-05, |
|
"loss": 10.3979, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 4.439232349395752, |
|
"learning_rate": 3e-05, |
|
"loss": 9.5814, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 4.189538955688477, |
|
"learning_rate": 3.5e-05, |
|
"loss": 8.9368, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.1056129932403564, |
|
"learning_rate": 4e-05, |
|
"loss": 8.4454, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.7891372442245483, |
|
"learning_rate": 4.5e-05, |
|
"loss": 8.0365, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.1064869165420532, |
|
"learning_rate": 5e-05, |
|
"loss": 7.8555, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.9185166954994202, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 7.7611, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.8066293001174927, |
|
"learning_rate": 6e-05, |
|
"loss": 7.6786, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.7840200662612915, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 7.5377, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.6963502168655396, |
|
"learning_rate": 7e-05, |
|
"loss": 7.4308, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.6896659731864929, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 7.2941, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.9641470909118652, |
|
"learning_rate": 8e-05, |
|
"loss": 7.113, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.6882058382034302, |
|
"learning_rate": 8.5e-05, |
|
"loss": 6.9274, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.9275090098381042, |
|
"learning_rate": 9e-05, |
|
"loss": 6.7585, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.9500148892402649, |
|
"learning_rate": 9.5e-05, |
|
"loss": 6.6093, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.4086371660232544, |
|
"learning_rate": 0.0001, |
|
"loss": 6.4294, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.023192286491394, |
|
"learning_rate": 9.999972205865686e-05, |
|
"loss": 6.3935, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.651755928993225, |
|
"learning_rate": 9.999888823771751e-05, |
|
"loss": 6.1994, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.9642592668533325, |
|
"learning_rate": 9.999749854645204e-05, |
|
"loss": 6.1148, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.4170254468917847, |
|
"learning_rate": 9.99955530003106e-05, |
|
"loss": 6.017, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.4251655340194702, |
|
"learning_rate": 9.99930516209231e-05, |
|
"loss": 5.8718, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.2349293231964111, |
|
"learning_rate": 9.998999443609897e-05, |
|
"loss": 5.8388, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.8177589178085327, |
|
"learning_rate": 9.998638147982696e-05, |
|
"loss": 5.8051, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.7111365795135498, |
|
"learning_rate": 9.998221279227467e-05, |
|
"loss": 5.7493, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.7238991260528564, |
|
"learning_rate": 9.997748841978812e-05, |
|
"loss": 5.677, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.7837049961090088, |
|
"learning_rate": 9.997220841489122e-05, |
|
"loss": 5.5433, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.4730719327926636, |
|
"learning_rate": 9.996637283628528e-05, |
|
"loss": 5.6268, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.5730408430099487, |
|
"learning_rate": 9.995998174884821e-05, |
|
"loss": 5.412, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.4034667015075684, |
|
"learning_rate": 9.995303522363394e-05, |
|
"loss": 5.4502, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.4483870267868042, |
|
"learning_rate": 9.99455333378715e-05, |
|
"loss": 5.4505, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.6795587539672852, |
|
"learning_rate": 9.993747617496428e-05, |
|
"loss": 5.3595, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.2084821462631226, |
|
"learning_rate": 9.9928863824489e-05, |
|
"loss": 5.3636, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.4595478773117065, |
|
"learning_rate": 9.99196963821948e-05, |
|
"loss": 5.2336, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.4178177118301392, |
|
"learning_rate": 9.990997395000217e-05, |
|
"loss": 5.2247, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.4685578346252441, |
|
"learning_rate": 9.989969663600169e-05, |
|
"loss": 5.1575, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.5389108657836914, |
|
"learning_rate": 9.9888864554453e-05, |
|
"loss": 5.1734, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.5792580842971802, |
|
"learning_rate": 9.987747782578342e-05, |
|
"loss": 5.1492, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.6819140911102295, |
|
"learning_rate": 9.986553657658668e-05, |
|
"loss": 5.1638, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.5122543573379517, |
|
"learning_rate": 9.985304093962145e-05, |
|
"loss": 5.1114, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.5873609781265259, |
|
"learning_rate": 9.983999105380988e-05, |
|
"loss": 5.0102, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.7786037921905518, |
|
"learning_rate": 9.982638706423608e-05, |
|
"loss": 4.9825, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.9327354431152344, |
|
"learning_rate": 9.98122291221445e-05, |
|
"loss": 5.0193, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.6568500995635986, |
|
"learning_rate": 9.979751738493826e-05, |
|
"loss": 4.9165, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.7649105787277222, |
|
"learning_rate": 9.978225201617732e-05, |
|
"loss": 4.9153, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.2607721090316772, |
|
"learning_rate": 9.976643318557678e-05, |
|
"loss": 4.9862, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.7145535945892334, |
|
"learning_rate": 9.975006106900495e-05, |
|
"loss": 4.8554, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.1020963191986084, |
|
"learning_rate": 9.973313584848132e-05, |
|
"loss": 4.8396, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.910445213317871, |
|
"learning_rate": 9.971565771217464e-05, |
|
"loss": 4.8259, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.9731266498565674, |
|
"learning_rate": 9.969762685440076e-05, |
|
"loss": 4.7914, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.589390754699707, |
|
"learning_rate": 9.967904347562054e-05, |
|
"loss": 4.8357, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.2861696481704712, |
|
"learning_rate": 9.965990778243755e-05, |
|
"loss": 4.7907, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.6012290716171265, |
|
"learning_rate": 9.964021998759577e-05, |
|
"loss": 4.7597, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.6387654542922974, |
|
"learning_rate": 9.961998030997733e-05, |
|
"loss": 4.6971, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.5079236030578613, |
|
"learning_rate": 9.95991889745999e-05, |
|
"loss": 4.7803, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.6518876552581787, |
|
"learning_rate": 9.957784621261441e-05, |
|
"loss": 4.7649, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.4536391496658325, |
|
"learning_rate": 9.955595226130226e-05, |
|
"loss": 4.7011, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.4148342609405518, |
|
"learning_rate": 9.953350736407282e-05, |
|
"loss": 4.7163, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.705102562904358, |
|
"learning_rate": 9.951051177046069e-05, |
|
"loss": 4.6227, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.2245330810546875, |
|
"learning_rate": 9.948696573612292e-05, |
|
"loss": 4.6069, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.7924284934997559, |
|
"learning_rate": 9.946286952283618e-05, |
|
"loss": 4.6406, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.903428554534912, |
|
"learning_rate": 9.943822339849381e-05, |
|
"loss": 4.6441, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.7252001762390137, |
|
"learning_rate": 9.941302763710288e-05, |
|
"loss": 4.5764, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.652077078819275, |
|
"learning_rate": 9.938728251878116e-05, |
|
"loss": 4.5748, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.6563273668289185, |
|
"learning_rate": 9.936098832975393e-05, |
|
"loss": 4.6605, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.196190357208252, |
|
"learning_rate": 9.933414536235091e-05, |
|
"loss": 4.5116, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.7059569358825684, |
|
"learning_rate": 9.93067539150029e-05, |
|
"loss": 4.545, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.919015645980835, |
|
"learning_rate": 9.927881429223853e-05, |
|
"loss": 4.4465, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.4034647941589355, |
|
"learning_rate": 9.925032680468085e-05, |
|
"loss": 4.5262, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.4941623210906982, |
|
"learning_rate": 9.922129176904388e-05, |
|
"loss": 4.5184, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.7192869186401367, |
|
"learning_rate": 9.919170950812911e-05, |
|
"loss": 4.5419, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.8791921138763428, |
|
"learning_rate": 9.916158035082184e-05, |
|
"loss": 4.4524, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.7824347019195557, |
|
"learning_rate": 9.913090463208763e-05, |
|
"loss": 4.5193, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.8421388864517212, |
|
"learning_rate": 9.90996826929685e-05, |
|
"loss": 4.4552, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.1372251510620117, |
|
"learning_rate": 9.906791488057916e-05, |
|
"loss": 4.5304, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.8615418672561646, |
|
"learning_rate": 9.903560154810313e-05, |
|
"loss": 4.4751, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.6029841899871826, |
|
"learning_rate": 9.900274305478887e-05, |
|
"loss": 4.3688, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.8817858695983887, |
|
"learning_rate": 9.896933976594572e-05, |
|
"loss": 4.4703, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.6392571926116943, |
|
"learning_rate": 9.893539205293989e-05, |
|
"loss": 4.4149, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.00087308883667, |
|
"learning_rate": 9.890090029319028e-05, |
|
"loss": 4.4234, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.6066954135894775, |
|
"learning_rate": 9.886586487016433e-05, |
|
"loss": 4.4217, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.6025770902633667, |
|
"learning_rate": 9.883028617337378e-05, |
|
"loss": 4.4763, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.266241431236267, |
|
"learning_rate": 9.879416459837022e-05, |
|
"loss": 4.4888, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.8539241552352905, |
|
"learning_rate": 9.875750054674082e-05, |
|
"loss": 4.3752, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.6009455919265747, |
|
"learning_rate": 9.872029442610382e-05, |
|
"loss": 4.4046, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.015383243560791, |
|
"learning_rate": 9.8682546650104e-05, |
|
"loss": 4.261, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.6920061111450195, |
|
"learning_rate": 9.864425763840802e-05, |
|
"loss": 4.2679, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.5430705547332764, |
|
"learning_rate": 9.860542781669988e-05, |
|
"loss": 4.2743, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.410170555114746, |
|
"learning_rate": 9.85660576166761e-05, |
|
"loss": 4.2691, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.5226540565490723, |
|
"learning_rate": 9.852614747604093e-05, |
|
"loss": 4.2518, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.3666799068450928, |
|
"learning_rate": 9.848569783850145e-05, |
|
"loss": 4.2504, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.4098422527313232, |
|
"learning_rate": 9.844470915376278e-05, |
|
"loss": 4.274, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.5550212860107422, |
|
"learning_rate": 9.840318187752292e-05, |
|
"loss": 4.2033, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.451125144958496, |
|
"learning_rate": 9.836111647146771e-05, |
|
"loss": 4.2993, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.3934297561645508, |
|
"learning_rate": 9.831851340326577e-05, |
|
"loss": 4.2035, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.2739324569702148, |
|
"learning_rate": 9.82753731465633e-05, |
|
"loss": 4.2577, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.430321455001831, |
|
"learning_rate": 9.823169618097871e-05, |
|
"loss": 4.2677, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.8821338415145874, |
|
"learning_rate": 9.81874829920974e-05, |
|
"loss": 4.2304, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.5086530447006226, |
|
"learning_rate": 9.814273407146623e-05, |
|
"loss": 4.2587, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.5528761148452759, |
|
"learning_rate": 9.809744991658829e-05, |
|
"loss": 4.1734, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.376980185508728, |
|
"learning_rate": 9.805163103091708e-05, |
|
"loss": 4.2477, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.3953189849853516, |
|
"learning_rate": 9.800527792385112e-05, |
|
"loss": 4.2327, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.5399726629257202, |
|
"learning_rate": 9.79583911107282e-05, |
|
"loss": 4.313, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.4923655986785889, |
|
"learning_rate": 9.791097111281968e-05, |
|
"loss": 4.2384, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.3612940311431885, |
|
"learning_rate": 9.786301845732467e-05, |
|
"loss": 4.1915, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.4748578071594238, |
|
"learning_rate": 9.781453367736418e-05, |
|
"loss": 4.2318, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.0609843730926514, |
|
"learning_rate": 9.776551731197524e-05, |
|
"loss": 4.2361, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.8525745868682861, |
|
"learning_rate": 9.771596990610478e-05, |
|
"loss": 4.1553, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.5976070165634155, |
|
"learning_rate": 9.766589201060372e-05, |
|
"loss": 4.1892, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.5703035593032837, |
|
"learning_rate": 9.761528418222077e-05, |
|
"loss": 4.1657, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.308478593826294, |
|
"learning_rate": 9.756414698359624e-05, |
|
"loss": 4.1322, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.4874699115753174, |
|
"learning_rate": 9.75124809832558e-05, |
|
"loss": 4.0567, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.1793978214263916, |
|
"learning_rate": 9.746028675560413e-05, |
|
"loss": 4.171, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.6352083683013916, |
|
"learning_rate": 9.740756488091861e-05, |
|
"loss": 4.0955, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.008838176727295, |
|
"learning_rate": 9.735431594534277e-05, |
|
"loss": 4.1521, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.4547291994094849, |
|
"learning_rate": 9.730054054087983e-05, |
|
"loss": 4.098, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.298056960105896, |
|
"learning_rate": 9.724623926538612e-05, |
|
"loss": 4.161, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.6623826026916504, |
|
"learning_rate": 9.719141272256443e-05, |
|
"loss": 4.1901, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.4018698930740356, |
|
"learning_rate": 9.713606152195726e-05, |
|
"loss": 4.1762, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.5026352405548096, |
|
"learning_rate": 9.708018627894011e-05, |
|
"loss": 4.182, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.8811168670654297, |
|
"learning_rate": 9.702378761471456e-05, |
|
"loss": 4.0757, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.5124515295028687, |
|
"learning_rate": 9.696686615630146e-05, |
|
"loss": 4.0314, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.6260911226272583, |
|
"learning_rate": 9.690942253653385e-05, |
|
"loss": 4.0444, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.3121532201766968, |
|
"learning_rate": 9.685145739405002e-05, |
|
"loss": 4.0608, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.7652568817138672, |
|
"learning_rate": 9.679297137328634e-05, |
|
"loss": 4.0556, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.2742689847946167, |
|
"learning_rate": 9.673396512447013e-05, |
|
"loss": 4.0923, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.4735512733459473, |
|
"learning_rate": 9.667443930361247e-05, |
|
"loss": 4.0726, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.2956384420394897, |
|
"learning_rate": 9.661439457250076e-05, |
|
"loss": 4.076, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.651337742805481, |
|
"learning_rate": 9.655383159869158e-05, |
|
"loss": 4.0232, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.560766577720642, |
|
"learning_rate": 9.649275105550309e-05, |
|
"loss": 4.1068, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.3690272569656372, |
|
"learning_rate": 9.643115362200762e-05, |
|
"loss": 4.1346, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.3641835451126099, |
|
"learning_rate": 9.636903998302409e-05, |
|
"loss": 4.0315, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.258906364440918, |
|
"learning_rate": 9.630641082911045e-05, |
|
"loss": 3.9937, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.234425663948059, |
|
"learning_rate": 9.624326685655593e-05, |
|
"loss": 4.0275, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.208127737045288, |
|
"learning_rate": 9.617960876737337e-05, |
|
"loss": 4.0103, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.3401658535003662, |
|
"learning_rate": 9.611543726929134e-05, |
|
"loss": 4.1086, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.2559030055999756, |
|
"learning_rate": 9.605075307574635e-05, |
|
"loss": 4.0595, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.3014733791351318, |
|
"learning_rate": 9.598555690587487e-05, |
|
"loss": 3.9846, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.2272580862045288, |
|
"learning_rate": 9.591984948450532e-05, |
|
"loss": 3.9744, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.804677963256836, |
|
"learning_rate": 9.585363154215008e-05, |
|
"loss": 4.0576, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.5206153392791748, |
|
"learning_rate": 9.578690381499728e-05, |
|
"loss": 4.0031, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.6624362468719482, |
|
"learning_rate": 9.571966704490271e-05, |
|
"loss": 3.9699, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.380760908126831, |
|
"learning_rate": 9.565192197938148e-05, |
|
"loss": 4.0038, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.5826762914657593, |
|
"learning_rate": 9.558366937159977e-05, |
|
"loss": 4.0484, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.5320595502853394, |
|
"learning_rate": 9.551490998036646e-05, |
|
"loss": 3.9104, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.4461925029754639, |
|
"learning_rate": 9.544564457012463e-05, |
|
"loss": 3.9483, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.2367421388626099, |
|
"learning_rate": 9.537587391094314e-05, |
|
"loss": 4.0613, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.4271812438964844, |
|
"learning_rate": 9.5305598778508e-05, |
|
"loss": 3.9746, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.7837293148040771, |
|
"learning_rate": 9.52348199541138e-05, |
|
"loss": 3.9011, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.224382996559143, |
|
"learning_rate": 9.516353822465504e-05, |
|
"loss": 3.9156, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.3299424648284912, |
|
"learning_rate": 9.509175438261726e-05, |
|
"loss": 3.9682, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.3308087587356567, |
|
"learning_rate": 9.501946922606838e-05, |
|
"loss": 3.9638, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.463572382926941, |
|
"learning_rate": 9.494668355864973e-05, |
|
"loss": 3.9593, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.3762937784194946, |
|
"learning_rate": 9.487339818956716e-05, |
|
"loss": 3.8413, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.4410449266433716, |
|
"learning_rate": 9.479961393358203e-05, |
|
"loss": 3.9589, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.4337133169174194, |
|
"learning_rate": 9.472533161100215e-05, |
|
"loss": 3.9466, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.4152320623397827, |
|
"learning_rate": 9.465055204767265e-05, |
|
"loss": 4.0096, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.2973538637161255, |
|
"learning_rate": 9.457527607496685e-05, |
|
"loss": 3.9461, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.2745497226715088, |
|
"learning_rate": 9.44995045297769e-05, |
|
"loss": 3.91, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.2977548837661743, |
|
"learning_rate": 9.442323825450464e-05, |
|
"loss": 4.0103, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.2384387254714966, |
|
"learning_rate": 9.43464780970521e-05, |
|
"loss": 3.9053, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.2694612741470337, |
|
"learning_rate": 9.426922491081212e-05, |
|
"loss": 3.9256, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.1504689455032349, |
|
"learning_rate": 9.419147955465888e-05, |
|
"loss": 3.9034, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.1995341777801514, |
|
"learning_rate": 9.411324289293832e-05, |
|
"loss": 3.9751, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.3309929370880127, |
|
"learning_rate": 9.403451579545859e-05, |
|
"loss": 3.8875, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.1273236274719238, |
|
"learning_rate": 9.395529913748025e-05, |
|
"loss": 3.8719, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.4124709367752075, |
|
"learning_rate": 9.387559379970672e-05, |
|
"loss": 3.9124, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.2471351623535156, |
|
"learning_rate": 9.379540066827431e-05, |
|
"loss": 3.8352, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.2889108657836914, |
|
"learning_rate": 9.371472063474248e-05, |
|
"loss": 3.878, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.1750242710113525, |
|
"learning_rate": 9.363355459608394e-05, |
|
"loss": 3.9879, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.1907912492752075, |
|
"learning_rate": 9.355190345467457e-05, |
|
"loss": 3.9429, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.1586787700653076, |
|
"learning_rate": 9.346976811828352e-05, |
|
"loss": 3.9224, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.3565126657485962, |
|
"learning_rate": 9.338714950006297e-05, |
|
"loss": 3.836, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.2921334505081177, |
|
"learning_rate": 9.330404851853817e-05, |
|
"loss": 3.9068, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.2521257400512695, |
|
"learning_rate": 9.3220466097597e-05, |
|
"loss": 3.8314, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.3298193216323853, |
|
"learning_rate": 9.313640316647991e-05, |
|
"loss": 3.7788, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.3024024963378906, |
|
"learning_rate": 9.305186065976945e-05, |
|
"loss": 3.8647, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.1826115846633911, |
|
"learning_rate": 9.296683951737993e-05, |
|
"loss": 3.7856, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.1611673831939697, |
|
"learning_rate": 9.288134068454697e-05, |
|
"loss": 3.8901, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.5218857526779175, |
|
"learning_rate": 9.2795365111817e-05, |
|
"loss": 3.8341, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.2008612155914307, |
|
"learning_rate": 9.270891375503665e-05, |
|
"loss": 3.8005, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.2103996276855469, |
|
"learning_rate": 9.262198757534218e-05, |
|
"loss": 3.8177, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.1930011510849, |
|
"learning_rate": 9.253458753914874e-05, |
|
"loss": 3.7792, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.2458661794662476, |
|
"learning_rate": 9.244671461813969e-05, |
|
"loss": 3.8579, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.2035701274871826, |
|
"learning_rate": 9.235836978925572e-05, |
|
"loss": 3.7795, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.2439464330673218, |
|
"learning_rate": 9.226955403468406e-05, |
|
"loss": 3.8381, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.3288521766662598, |
|
"learning_rate": 9.21802683418475e-05, |
|
"loss": 3.8374, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.4271641969680786, |
|
"learning_rate": 9.209051370339347e-05, |
|
"loss": 3.8206, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.034192442893982, |
|
"learning_rate": 9.200029111718295e-05, |
|
"loss": 3.9133, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.2292518615722656, |
|
"learning_rate": 9.190960158627941e-05, |
|
"loss": 3.8224, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.3718514442443848, |
|
"learning_rate": 9.181844611893766e-05, |
|
"loss": 3.8073, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.1448020935058594, |
|
"learning_rate": 9.172682572859261e-05, |
|
"loss": 3.7932, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.2375144958496094, |
|
"learning_rate": 9.163474143384806e-05, |
|
"loss": 3.8355, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.2168779373168945, |
|
"learning_rate": 9.154219425846528e-05, |
|
"loss": 3.7932, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.285558819770813, |
|
"learning_rate": 9.144918523135175e-05, |
|
"loss": 3.7946, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.4132548570632935, |
|
"learning_rate": 9.13557153865496e-05, |
|
"loss": 3.7393, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.2185122966766357, |
|
"learning_rate": 9.12617857632242e-05, |
|
"loss": 3.8195, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.0606962442398071, |
|
"learning_rate": 9.116739740565259e-05, |
|
"loss": 3.6942, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.2439757585525513, |
|
"learning_rate": 9.107255136321184e-05, |
|
"loss": 3.7296, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.2272452116012573, |
|
"learning_rate": 9.09772486903674e-05, |
|
"loss": 3.7419, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.1247087717056274, |
|
"learning_rate": 9.08814904466614e-05, |
|
"loss": 3.7929, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.1768749952316284, |
|
"learning_rate": 9.078527769670085e-05, |
|
"loss": 3.7367, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.0592442750930786, |
|
"learning_rate": 9.068861151014575e-05, |
|
"loss": 3.8446, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.0224711894989014, |
|
"learning_rate": 9.05914929616973e-05, |
|
"loss": 3.7538, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.0650990009307861, |
|
"learning_rate": 9.04939231310859e-05, |
|
"loss": 3.747, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.1367437839508057, |
|
"learning_rate": 9.039590310305914e-05, |
|
"loss": 3.7478, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.111429214477539, |
|
"learning_rate": 9.029743396736974e-05, |
|
"loss": 3.6822, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.1658196449279785, |
|
"learning_rate": 9.019851681876348e-05, |
|
"loss": 3.7084, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.2753981351852417, |
|
"learning_rate": 9.009915275696693e-05, |
|
"loss": 3.678, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.1910932064056396, |
|
"learning_rate": 8.999934288667534e-05, |
|
"loss": 3.7276, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.3303883075714111, |
|
"learning_rate": 8.989908831754028e-05, |
|
"loss": 3.7377, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.1198793649673462, |
|
"learning_rate": 8.979839016415735e-05, |
|
"loss": 3.7212, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.1606683731079102, |
|
"learning_rate": 8.969724954605373e-05, |
|
"loss": 3.7697, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.1115267276763916, |
|
"learning_rate": 8.959566758767581e-05, |
|
"loss": 3.6644, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.1623845100402832, |
|
"learning_rate": 8.949364541837661e-05, |
|
"loss": 3.7393, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.153632640838623, |
|
"learning_rate": 8.939118417240329e-05, |
|
"loss": 3.7721, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.0939881801605225, |
|
"learning_rate": 8.92882849888845e-05, |
|
"loss": 3.7285, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.1292450428009033, |
|
"learning_rate": 8.918494901181773e-05, |
|
"loss": 3.6388, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.1940792798995972, |
|
"learning_rate": 8.908117739005659e-05, |
|
"loss": 3.7399, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.0818376541137695, |
|
"learning_rate": 8.897697127729805e-05, |
|
"loss": 3.7775, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.0121492147445679, |
|
"learning_rate": 8.887233183206957e-05, |
|
"loss": 3.727, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.1409603357315063, |
|
"learning_rate": 8.876726021771627e-05, |
|
"loss": 3.6582, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.3128355741500854, |
|
"learning_rate": 8.866175760238798e-05, |
|
"loss": 3.7046, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.2662190198898315, |
|
"learning_rate": 8.855582515902625e-05, |
|
"loss": 3.7284, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.3230217695236206, |
|
"learning_rate": 8.844946406535131e-05, |
|
"loss": 3.6778, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.2414748668670654, |
|
"learning_rate": 8.834267550384893e-05, |
|
"loss": 3.7084, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.0208975076675415, |
|
"learning_rate": 8.823546066175741e-05, |
|
"loss": 3.6613, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.1987378597259521, |
|
"learning_rate": 8.81278207310542e-05, |
|
"loss": 3.7607, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.0733369588851929, |
|
"learning_rate": 8.801975690844278e-05, |
|
"loss": 3.6559, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.1883203983306885, |
|
"learning_rate": 8.791127039533934e-05, |
|
"loss": 3.7051, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.13148832321167, |
|
"learning_rate": 8.780236239785935e-05, |
|
"loss": 3.6763, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.9961485266685486, |
|
"learning_rate": 8.76930341268042e-05, |
|
"loss": 3.6938, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.0561619997024536, |
|
"learning_rate": 8.758328679764776e-05, |
|
"loss": 3.5663, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.127403736114502, |
|
"learning_rate": 8.747312163052284e-05, |
|
"loss": 3.679, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.0956592559814453, |
|
"learning_rate": 8.736253985020761e-05, |
|
"loss": 3.6786, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.08185613155365, |
|
"learning_rate": 8.725154268611203e-05, |
|
"loss": 3.757, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.0391801595687866, |
|
"learning_rate": 8.714013137226411e-05, |
|
"loss": 3.7139, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.1036536693572998, |
|
"learning_rate": 8.702830714729628e-05, |
|
"loss": 3.7101, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.2152525186538696, |
|
"learning_rate": 8.691607125443153e-05, |
|
"loss": 3.6212, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.1887563467025757, |
|
"learning_rate": 8.680342494146967e-05, |
|
"loss": 3.6976, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.0755887031555176, |
|
"learning_rate": 8.66903694607734e-05, |
|
"loss": 3.678, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.9953941106796265, |
|
"learning_rate": 8.65769060692544e-05, |
|
"loss": 3.6319, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.09784996509552, |
|
"learning_rate": 8.646303602835936e-05, |
|
"loss": 3.7209, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.162729263305664, |
|
"learning_rate": 8.634876060405597e-05, |
|
"loss": 3.6444, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.3574235439300537, |
|
"learning_rate": 8.623408106681884e-05, |
|
"loss": 3.5982, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.187386393547058, |
|
"learning_rate": 8.611899869161535e-05, |
|
"loss": 3.6177, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.1473116874694824, |
|
"learning_rate": 8.600351475789147e-05, |
|
"loss": 3.6681, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.0066689252853394, |
|
"learning_rate": 8.588763054955764e-05, |
|
"loss": 3.6256, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.1282976865768433, |
|
"learning_rate": 8.57713473549743e-05, |
|
"loss": 3.5512, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.0864704847335815, |
|
"learning_rate": 8.565466646693778e-05, |
|
"loss": 3.6497, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.0346888303756714, |
|
"learning_rate": 8.553758918266578e-05, |
|
"loss": 3.6071, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.0757558345794678, |
|
"learning_rate": 8.5420116803783e-05, |
|
"loss": 3.61, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.1552907228469849, |
|
"learning_rate": 8.530225063630668e-05, |
|
"loss": 3.6771, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.9322082996368408, |
|
"learning_rate": 8.518399199063205e-05, |
|
"loss": 3.6004, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.185263752937317, |
|
"learning_rate": 8.50653421815178e-05, |
|
"loss": 3.6133, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.0670915842056274, |
|
"learning_rate": 8.494630252807138e-05, |
|
"loss": 3.6131, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.0210850238800049, |
|
"learning_rate": 8.482687435373449e-05, |
|
"loss": 3.6778, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.0529567003250122, |
|
"learning_rate": 8.470705898626817e-05, |
|
"loss": 3.621, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.0043034553527832, |
|
"learning_rate": 8.458685775773822e-05, |
|
"loss": 3.6038, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.008782982826233, |
|
"learning_rate": 8.446627200450025e-05, |
|
"loss": 3.6061, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.0716972351074219, |
|
"learning_rate": 8.434530306718493e-05, |
|
"loss": 3.5565, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.9667292833328247, |
|
"learning_rate": 8.4223952290683e-05, |
|
"loss": 3.6784, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.0869324207305908, |
|
"learning_rate": 8.41022210241304e-05, |
|
"loss": 3.6062, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.1732205152511597, |
|
"learning_rate": 8.398011062089316e-05, |
|
"loss": 3.6649, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.9114173650741577, |
|
"learning_rate": 8.385762243855249e-05, |
|
"loss": 3.5658, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.0308265686035156, |
|
"learning_rate": 8.373475783888958e-05, |
|
"loss": 3.548, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.0726767778396606, |
|
"learning_rate": 8.36115181878705e-05, |
|
"loss": 3.6422, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.0571999549865723, |
|
"learning_rate": 8.348790485563101e-05, |
|
"loss": 3.5881, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.1550967693328857, |
|
"learning_rate": 8.336391921646134e-05, |
|
"loss": 3.5942, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.0572580099105835, |
|
"learning_rate": 8.323956264879089e-05, |
|
"loss": 3.641, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.9506217241287231, |
|
"learning_rate": 8.311483653517294e-05, |
|
"loss": 3.6348, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.0256168842315674, |
|
"learning_rate": 8.298974226226919e-05, |
|
"loss": 3.5617, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.9493563771247864, |
|
"learning_rate": 8.28642812208345e-05, |
|
"loss": 3.6105, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.9935876727104187, |
|
"learning_rate": 8.273845480570123e-05, |
|
"loss": 3.631, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.040040135383606, |
|
"learning_rate": 8.26122644157639e-05, |
|
"loss": 3.6058, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.1051440238952637, |
|
"learning_rate": 8.248571145396362e-05, |
|
"loss": 3.6573, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.9562785029411316, |
|
"learning_rate": 8.235879732727236e-05, |
|
"loss": 3.6642, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.9978268146514893, |
|
"learning_rate": 8.223152344667745e-05, |
|
"loss": 3.6837, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.0548397302627563, |
|
"learning_rate": 8.21038912271658e-05, |
|
"loss": 3.6198, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.9698254466056824, |
|
"learning_rate": 8.197590208770824e-05, |
|
"loss": 3.6531, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.8963156342506409, |
|
"learning_rate": 8.184755745124371e-05, |
|
"loss": 3.6352, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.9984579682350159, |
|
"learning_rate": 8.171885874466342e-05, |
|
"loss": 3.5373, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.9025471210479736, |
|
"learning_rate": 8.158980739879507e-05, |
|
"loss": 3.5601, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.0891411304473877, |
|
"learning_rate": 8.146040484838677e-05, |
|
"loss": 3.6682, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.0604552030563354, |
|
"learning_rate": 8.133065253209132e-05, |
|
"loss": 3.5918, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.0379425287246704, |
|
"learning_rate": 8.120055189245e-05, |
|
"loss": 3.5252, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.9820531010627747, |
|
"learning_rate": 8.10701043758767e-05, |
|
"loss": 3.5687, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.9933525919914246, |
|
"learning_rate": 8.093931143264174e-05, |
|
"loss": 3.6001, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.0662941932678223, |
|
"learning_rate": 8.080817451685576e-05, |
|
"loss": 3.5142, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.0238711833953857, |
|
"learning_rate": 8.067669508645356e-05, |
|
"loss": 3.6646, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.9672027826309204, |
|
"learning_rate": 8.054487460317797e-05, |
|
"loss": 3.6427, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.914375364780426, |
|
"learning_rate": 8.041271453256345e-05, |
|
"loss": 3.4607, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.0790928602218628, |
|
"learning_rate": 8.02802163439199e-05, |
|
"loss": 3.6118, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.070357322692871, |
|
"learning_rate": 8.01473815103163e-05, |
|
"loss": 3.5799, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.1024534702301025, |
|
"learning_rate": 8.001421150856434e-05, |
|
"loss": 3.4861, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.0195724964141846, |
|
"learning_rate": 7.988070781920197e-05, |
|
"loss": 3.6483, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.9426557421684265, |
|
"learning_rate": 7.9746871926477e-05, |
|
"loss": 3.547, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.0127331018447876, |
|
"learning_rate": 7.961270531833052e-05, |
|
"loss": 3.6059, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.9816798567771912, |
|
"learning_rate": 7.947820948638045e-05, |
|
"loss": 3.5799, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.1183470487594604, |
|
"learning_rate": 7.934338592590486e-05, |
|
"loss": 3.6131, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.9896053671836853, |
|
"learning_rate": 7.92082361358254e-05, |
|
"loss": 3.5447, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.1003063917160034, |
|
"learning_rate": 7.907276161869065e-05, |
|
"loss": 3.5735, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.9578513503074646, |
|
"learning_rate": 7.893696388065936e-05, |
|
"loss": 3.5188, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.0876209735870361, |
|
"learning_rate": 7.88008444314838e-05, |
|
"loss": 3.505, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.95606529712677, |
|
"learning_rate": 7.866440478449283e-05, |
|
"loss": 3.5618, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.0113919973373413, |
|
"learning_rate": 7.852764645657522e-05, |
|
"loss": 3.5032, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.0219696760177612, |
|
"learning_rate": 7.839057096816271e-05, |
|
"loss": 3.4406, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.9677157998085022, |
|
"learning_rate": 7.82531798432131e-05, |
|
"loss": 3.4055, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.9238067269325256, |
|
"learning_rate": 7.811547460919333e-05, |
|
"loss": 3.5264, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.071569800376892, |
|
"learning_rate": 7.797745679706254e-05, |
|
"loss": 3.5282, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.0753663778305054, |
|
"learning_rate": 7.783912794125496e-05, |
|
"loss": 3.4538, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.0286469459533691, |
|
"learning_rate": 7.770048957966291e-05, |
|
"loss": 3.4562, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.0505075454711914, |
|
"learning_rate": 7.756154325361967e-05, |
|
"loss": 3.6064, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.976128876209259, |
|
"learning_rate": 7.74222905078824e-05, |
|
"loss": 3.4972, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.98134446144104, |
|
"learning_rate": 7.728273289061489e-05, |
|
"loss": 3.5518, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.9614392518997192, |
|
"learning_rate": 7.714287195337044e-05, |
|
"loss": 3.5749, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.9301189184188843, |
|
"learning_rate": 7.700270925107448e-05, |
|
"loss": 3.541, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.1225268840789795, |
|
"learning_rate": 7.686224634200742e-05, |
|
"loss": 3.5555, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.9103389978408813, |
|
"learning_rate": 7.672148478778722e-05, |
|
"loss": 3.5353, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.9482797384262085, |
|
"learning_rate": 7.658042615335212e-05, |
|
"loss": 3.4771, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.9419737458229065, |
|
"learning_rate": 7.643907200694318e-05, |
|
"loss": 3.5093, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.9081383347511292, |
|
"learning_rate": 7.629742392008684e-05, |
|
"loss": 3.5949, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.9159099459648132, |
|
"learning_rate": 7.615548346757749e-05, |
|
"loss": 3.4818, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.8918059468269348, |
|
"learning_rate": 7.60132522274599e-05, |
|
"loss": 3.6026, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.0188435316085815, |
|
"learning_rate": 7.587073178101178e-05, |
|
"loss": 3.5687, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.8577747941017151, |
|
"learning_rate": 7.572792371272609e-05, |
|
"loss": 3.5082, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.1805169582366943, |
|
"learning_rate": 7.55848296102935e-05, |
|
"loss": 3.5336, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.9859539270401001, |
|
"learning_rate": 7.544145106458465e-05, |
|
"loss": 3.5325, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.9058620929718018, |
|
"learning_rate": 7.529778966963259e-05, |
|
"loss": 3.5395, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.9886941909790039, |
|
"learning_rate": 7.515384702261496e-05, |
|
"loss": 3.5447, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.8978294134140015, |
|
"learning_rate": 7.500962472383627e-05, |
|
"loss": 3.4647, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.9597351551055908, |
|
"learning_rate": 7.486512437671011e-05, |
|
"loss": 3.4347, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.912813127040863, |
|
"learning_rate": 7.472034758774128e-05, |
|
"loss": 3.3937, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.9774367213249207, |
|
"learning_rate": 7.457529596650797e-05, |
|
"loss": 3.4253, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.9628680348396301, |
|
"learning_rate": 7.442997112564392e-05, |
|
"loss": 3.4534, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.0099488496780396, |
|
"learning_rate": 7.428437468082037e-05, |
|
"loss": 3.4865, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.9738776683807373, |
|
"learning_rate": 7.413850825072817e-05, |
|
"loss": 3.3938, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.8624957799911499, |
|
"learning_rate": 7.39923734570598e-05, |
|
"loss": 3.4803, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.8737362623214722, |
|
"learning_rate": 7.384597192449126e-05, |
|
"loss": 3.4579, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.9167929291725159, |
|
"learning_rate": 7.369930528066412e-05, |
|
"loss": 3.5313, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.9283750653266907, |
|
"learning_rate": 7.355237515616732e-05, |
|
"loss": 3.3903, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.0188536643981934, |
|
"learning_rate": 7.340518318451914e-05, |
|
"loss": 3.4892, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.9486240744590759, |
|
"learning_rate": 7.325773100214893e-05, |
|
"loss": 3.5478, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.8894400596618652, |
|
"learning_rate": 7.311002024837899e-05, |
|
"loss": 3.4897, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.0249364376068115, |
|
"learning_rate": 7.296205256540633e-05, |
|
"loss": 3.395, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.9660959839820862, |
|
"learning_rate": 7.281382959828443e-05, |
|
"loss": 3.4896, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.8965651392936707, |
|
"learning_rate": 7.26653529949049e-05, |
|
"loss": 3.4113, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.916313886642456, |
|
"learning_rate": 7.25166244059792e-05, |
|
"loss": 3.4342, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.918171226978302, |
|
"learning_rate": 7.236764548502029e-05, |
|
"loss": 3.4073, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.9626057147979736, |
|
"learning_rate": 7.221841788832421e-05, |
|
"loss": 3.5081, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.8918098211288452, |
|
"learning_rate": 7.206894327495173e-05, |
|
"loss": 3.4852, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.9311957359313965, |
|
"learning_rate": 7.191922330670982e-05, |
|
"loss": 3.4472, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.917389988899231, |
|
"learning_rate": 7.176925964813326e-05, |
|
"loss": 3.5054, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.9579627513885498, |
|
"learning_rate": 7.161905396646607e-05, |
|
"loss": 3.5408, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.9102108478546143, |
|
"learning_rate": 7.146860793164299e-05, |
|
"loss": 3.453, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.8844248652458191, |
|
"learning_rate": 7.131792321627098e-05, |
|
"loss": 3.4266, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.8853104114532471, |
|
"learning_rate": 7.116700149561048e-05, |
|
"loss": 3.4452, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.905546247959137, |
|
"learning_rate": 7.101584444755696e-05, |
|
"loss": 3.5243, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.021131157875061, |
|
"learning_rate": 7.086445375262212e-05, |
|
"loss": 3.5432, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.9376331567764282, |
|
"learning_rate": 7.071283109391528e-05, |
|
"loss": 3.4106, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.8878265023231506, |
|
"learning_rate": 7.056097815712466e-05, |
|
"loss": 3.4425, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.9961305260658264, |
|
"learning_rate": 7.040889663049862e-05, |
|
"loss": 3.4492, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.8520357608795166, |
|
"learning_rate": 7.025658820482693e-05, |
|
"loss": 3.4443, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.9509548544883728, |
|
"learning_rate": 7.010405457342192e-05, |
|
"loss": 3.4016, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.9308119416236877, |
|
"learning_rate": 6.995129743209967e-05, |
|
"loss": 3.4314, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.8559417724609375, |
|
"learning_rate": 6.97983184791612e-05, |
|
"loss": 3.4727, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.8854596614837646, |
|
"learning_rate": 6.964511941537355e-05, |
|
"loss": 3.3541, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.8448160886764526, |
|
"learning_rate": 6.949170194395083e-05, |
|
"loss": 3.4456, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.885260283946991, |
|
"learning_rate": 6.933806777053536e-05, |
|
"loss": 3.4313, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.8721843957901001, |
|
"learning_rate": 6.918421860317872e-05, |
|
"loss": 3.4689, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.830544114112854, |
|
"learning_rate": 6.903015615232263e-05, |
|
"loss": 3.4405, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.8577764630317688, |
|
"learning_rate": 6.887588213078012e-05, |
|
"loss": 3.4241, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.9850843548774719, |
|
"learning_rate": 6.87213982537163e-05, |
|
"loss": 3.3351, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.9125412106513977, |
|
"learning_rate": 6.856670623862943e-05, |
|
"loss": 3.4431, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.8923556208610535, |
|
"learning_rate": 6.841180780533179e-05, |
|
"loss": 3.4541, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.9279801249504089, |
|
"learning_rate": 6.82567046759305e-05, |
|
"loss": 3.4647, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.9199705123901367, |
|
"learning_rate": 6.810139857480844e-05, |
|
"loss": 3.4653, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.0211433172225952, |
|
"learning_rate": 6.794589122860509e-05, |
|
"loss": 3.4191, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.0141521692276, |
|
"learning_rate": 6.779018436619725e-05, |
|
"loss": 3.4482, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.0456479787826538, |
|
"learning_rate": 6.763427971867992e-05, |
|
"loss": 3.4434, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.862886905670166, |
|
"learning_rate": 6.747817901934699e-05, |
|
"loss": 3.3638, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.9483063817024231, |
|
"learning_rate": 6.732188400367197e-05, |
|
"loss": 3.4011, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.8820633888244629, |
|
"learning_rate": 6.716539640928871e-05, |
|
"loss": 3.4679, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.8584756851196289, |
|
"learning_rate": 6.70087179759721e-05, |
|
"loss": 3.3725, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.8979827761650085, |
|
"learning_rate": 6.685185044561874e-05, |
|
"loss": 3.4385, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.8375410437583923, |
|
"learning_rate": 6.669479556222747e-05, |
|
"loss": 3.4367, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.8942875266075134, |
|
"learning_rate": 6.653755507188013e-05, |
|
"loss": 3.4236, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.966738224029541, |
|
"learning_rate": 6.638013072272205e-05, |
|
"loss": 3.328, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.9720861315727234, |
|
"learning_rate": 6.622252426494259e-05, |
|
"loss": 3.4291, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.92670738697052, |
|
"learning_rate": 6.606473745075581e-05, |
|
"loss": 3.4255, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.8368898630142212, |
|
"learning_rate": 6.590677203438084e-05, |
|
"loss": 3.4261, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.9312162399291992, |
|
"learning_rate": 6.574862977202252e-05, |
|
"loss": 3.3538, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.9195160269737244, |
|
"learning_rate": 6.559031242185174e-05, |
|
"loss": 3.4648, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.8352183699607849, |
|
"learning_rate": 6.543182174398597e-05, |
|
"loss": 3.4051, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.8951199650764465, |
|
"learning_rate": 6.52731595004697e-05, |
|
"loss": 3.4039, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.9216262698173523, |
|
"learning_rate": 6.51143274552548e-05, |
|
"loss": 3.3785, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.011792778968811, |
|
"learning_rate": 6.495532737418098e-05, |
|
"loss": 3.4595, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.8370119333267212, |
|
"learning_rate": 6.479616102495605e-05, |
|
"loss": 3.4424, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.8701230883598328, |
|
"learning_rate": 6.463683017713638e-05, |
|
"loss": 3.4074, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.8995570540428162, |
|
"learning_rate": 6.447733660210715e-05, |
|
"loss": 3.3802, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.8354151248931885, |
|
"learning_rate": 6.431768207306272e-05, |
|
"loss": 3.3636, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.857018232345581, |
|
"learning_rate": 6.415786836498684e-05, |
|
"loss": 3.3024, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.9143330454826355, |
|
"learning_rate": 6.399789725463298e-05, |
|
"loss": 3.4235, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.8741365671157837, |
|
"learning_rate": 6.383777052050458e-05, |
|
"loss": 3.4025, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.823043942451477, |
|
"learning_rate": 6.367748994283518e-05, |
|
"loss": 3.3333, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.8715491890907288, |
|
"learning_rate": 6.351705730356877e-05, |
|
"loss": 3.3439, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.8645880222320557, |
|
"learning_rate": 6.335647438633987e-05, |
|
"loss": 3.3991, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.91361403465271, |
|
"learning_rate": 6.319574297645374e-05, |
|
"loss": 3.3544, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.904449462890625, |
|
"learning_rate": 6.303486486086654e-05, |
|
"loss": 3.4209, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.9177110195159912, |
|
"learning_rate": 6.287384182816546e-05, |
|
"loss": 3.3704, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.8553934097290039, |
|
"learning_rate": 6.271267566854883e-05, |
|
"loss": 3.423, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.8918532133102417, |
|
"learning_rate": 6.255136817380618e-05, |
|
"loss": 3.3414, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.8273350596427917, |
|
"learning_rate": 6.23899211372984e-05, |
|
"loss": 3.3959, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.8525533080101013, |
|
"learning_rate": 6.222833635393772e-05, |
|
"loss": 3.3691, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.8700625896453857, |
|
"learning_rate": 6.206661562016782e-05, |
|
"loss": 3.4058, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.8108021020889282, |
|
"learning_rate": 6.190476073394382e-05, |
|
"loss": 3.387, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.8776712417602539, |
|
"learning_rate": 6.17427734947123e-05, |
|
"loss": 3.3676, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.9267117381095886, |
|
"learning_rate": 6.158065570339127e-05, |
|
"loss": 3.3566, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.0094754695892334, |
|
"learning_rate": 6.141840916235021e-05, |
|
"loss": 3.367, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.1877857446670532, |
|
"learning_rate": 6.125603567539001e-05, |
|
"loss": 3.3563, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.8413233757019043, |
|
"learning_rate": 6.109353704772284e-05, |
|
"loss": 3.2802, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.9599123001098633, |
|
"learning_rate": 6.0930915085952164e-05, |
|
"loss": 3.452, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.8594855666160583, |
|
"learning_rate": 6.076817159805267e-05, |
|
"loss": 3.2819, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.8785787224769592, |
|
"learning_rate": 6.06053083933501e-05, |
|
"loss": 3.3245, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.8580414652824402, |
|
"learning_rate": 6.044232728250116e-05, |
|
"loss": 3.2992, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.8790245652198792, |
|
"learning_rate": 6.027923007747339e-05, |
|
"loss": 3.3385, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.8154867887496948, |
|
"learning_rate": 6.011601859152506e-05, |
|
"loss": 3.3793, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.8092056512832642, |
|
"learning_rate": 5.995269463918495e-05, |
|
"loss": 3.3127, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.855643630027771, |
|
"learning_rate": 5.97892600362322e-05, |
|
"loss": 3.3378, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.847649872303009, |
|
"learning_rate": 5.962571659967614e-05, |
|
"loss": 3.3644, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.9203967452049255, |
|
"learning_rate": 5.946206614773606e-05, |
|
"loss": 3.3344, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.9248597025871277, |
|
"learning_rate": 5.929831049982103e-05, |
|
"loss": 3.3794, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.8023829460144043, |
|
"learning_rate": 5.9134451476509633e-05, |
|
"loss": 3.4343, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.8594465851783752, |
|
"learning_rate": 5.897049089952974e-05, |
|
"loss": 3.4572, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.8430312871932983, |
|
"learning_rate": 5.880643059173826e-05, |
|
"loss": 3.3531, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.9222793579101562, |
|
"learning_rate": 5.864227237710093e-05, |
|
"loss": 3.3767, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.89219069480896, |
|
"learning_rate": 5.847801808067189e-05, |
|
"loss": 3.3839, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.9887323975563049, |
|
"learning_rate": 5.831366952857357e-05, |
|
"loss": 3.3403, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.7898027300834656, |
|
"learning_rate": 5.814922854797622e-05, |
|
"loss": 3.3232, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.9959586262702942, |
|
"learning_rate": 5.798469696707775e-05, |
|
"loss": 3.3977, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.9313060641288757, |
|
"learning_rate": 5.782007661508331e-05, |
|
"loss": 3.3479, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.8173262476921082, |
|
"learning_rate": 5.765536932218495e-05, |
|
"loss": 3.3965, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.8597461581230164, |
|
"learning_rate": 5.7490576919541315e-05, |
|
"loss": 3.3328, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.8330791592597961, |
|
"learning_rate": 5.732570123925729e-05, |
|
"loss": 3.404, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.8566576242446899, |
|
"learning_rate": 5.7160744114363593e-05, |
|
"loss": 3.4414, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.141120433807373, |
|
"learning_rate": 5.699570737879641e-05, |
|
"loss": 3.2698, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.8277701735496521, |
|
"learning_rate": 5.683059286737702e-05, |
|
"loss": 3.3993, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.8419466614723206, |
|
"learning_rate": 5.666540241579139e-05, |
|
"loss": 3.334, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.8160429000854492, |
|
"learning_rate": 5.6500137860569766e-05, |
|
"loss": 3.3605, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.9068493843078613, |
|
"learning_rate": 5.633480103906624e-05, |
|
"loss": 3.3465, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.8464723229408264, |
|
"learning_rate": 5.616939378943834e-05, |
|
"loss": 3.3015, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.8291537761688232, |
|
"learning_rate": 5.6003917950626595e-05, |
|
"loss": 3.3815, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.8295892477035522, |
|
"learning_rate": 5.583837536233407e-05, |
|
"loss": 3.3028, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.8144384026527405, |
|
"learning_rate": 5.567276786500596e-05, |
|
"loss": 3.2974, |
|
"step": 2285 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.8660650253295898, |
|
"learning_rate": 5.5507097299809054e-05, |
|
"loss": 3.3467, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.7973735332489014, |
|
"learning_rate": 5.534136550861133e-05, |
|
"loss": 3.3473, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.7773838639259338, |
|
"learning_rate": 5.5175574333961465e-05, |
|
"loss": 3.3118, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.8147760629653931, |
|
"learning_rate": 5.500972561906832e-05, |
|
"loss": 3.3131, |
|
"step": 2305 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.8618188500404358, |
|
"learning_rate": 5.484382120778048e-05, |
|
"loss": 3.3739, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.8593544960021973, |
|
"learning_rate": 5.467786294456575e-05, |
|
"loss": 3.3894, |
|
"step": 2315 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.8763341903686523, |
|
"learning_rate": 5.451185267449061e-05, |
|
"loss": 3.4009, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.09891939163208, |
|
"learning_rate": 5.43457922431998e-05, |
|
"loss": 3.3172, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.7956613898277283, |
|
"learning_rate": 5.417968349689566e-05, |
|
"loss": 3.1964, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.8214170932769775, |
|
"learning_rate": 5.401352828231772e-05, |
|
"loss": 3.3333, |
|
"step": 2335 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.8590761423110962, |
|
"learning_rate": 5.384732844672211e-05, |
|
"loss": 3.3274, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.8010850548744202, |
|
"learning_rate": 5.368108583786107e-05, |
|
"loss": 3.3073, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.8691250681877136, |
|
"learning_rate": 5.3514802303962344e-05, |
|
"loss": 3.3787, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.9491872191429138, |
|
"learning_rate": 5.334847969370868e-05, |
|
"loss": 3.3521, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.8348716497421265, |
|
"learning_rate": 5.3182119856217284e-05, |
|
"loss": 3.3439, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.874321699142456, |
|
"learning_rate": 5.3015724641019214e-05, |
|
"loss": 3.2895, |
|
"step": 2365 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.8504311442375183, |
|
"learning_rate": 5.284929589803884e-05, |
|
"loss": 3.2222, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.860460102558136, |
|
"learning_rate": 5.2682835477573336e-05, |
|
"loss": 3.2721, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.8044112920761108, |
|
"learning_rate": 5.2516345230271965e-05, |
|
"loss": 3.3099, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.8295807242393494, |
|
"learning_rate": 5.234982700711569e-05, |
|
"loss": 3.3845, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.8794777393341064, |
|
"learning_rate": 5.218328265939643e-05, |
|
"loss": 3.3037, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.8412184715270996, |
|
"learning_rate": 5.201671403869657e-05, |
|
"loss": 3.2339, |
|
"step": 2395 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.8259831070899963, |
|
"learning_rate": 5.1850122996868366e-05, |
|
"loss": 3.3287, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.8123874068260193, |
|
"learning_rate": 5.168351138601334e-05, |
|
"loss": 3.2838, |
|
"step": 2405 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.8798362016677856, |
|
"learning_rate": 5.1516881058461675e-05, |
|
"loss": 3.3506, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.8345399498939514, |
|
"learning_rate": 5.135023386675166e-05, |
|
"loss": 3.2879, |
|
"step": 2415 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.8196049332618713, |
|
"learning_rate": 5.118357166360906e-05, |
|
"loss": 3.3534, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.8316165804862976, |
|
"learning_rate": 5.101689630192655e-05, |
|
"loss": 3.3233, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.8012190461158752, |
|
"learning_rate": 5.085020963474307e-05, |
|
"loss": 3.3246, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.8753196597099304, |
|
"learning_rate": 5.068351351522329e-05, |
|
"loss": 3.3063, |
|
"step": 2435 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.8270070552825928, |
|
"learning_rate": 5.0516809796636935e-05, |
|
"loss": 3.2703, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.9107401371002197, |
|
"learning_rate": 5.035010033233821e-05, |
|
"loss": 3.3625, |
|
"step": 2445 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.9239689707756042, |
|
"learning_rate": 5.018338697574523e-05, |
|
"loss": 3.2771, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.7925752997398376, |
|
"learning_rate": 5.0016671580319354e-05, |
|
"loss": 3.2949, |
|
"step": 2455 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.7893439531326294, |
|
"learning_rate": 4.984995599954461e-05, |
|
"loss": 3.315, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.8512730598449707, |
|
"learning_rate": 4.968324208690712e-05, |
|
"loss": 3.3766, |
|
"step": 2465 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.793056309223175, |
|
"learning_rate": 4.951653169587441e-05, |
|
"loss": 3.2775, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.8076874613761902, |
|
"learning_rate": 4.93498266798749e-05, |
|
"loss": 3.2826, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.7923920154571533, |
|
"learning_rate": 4.918312889227722e-05, |
|
"loss": 3.3037, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.8230359554290771, |
|
"learning_rate": 4.901644018636966e-05, |
|
"loss": 3.2542, |
|
"step": 2485 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.801092267036438, |
|
"learning_rate": 4.8849762415339526e-05, |
|
"loss": 3.3019, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.8467086553573608, |
|
"learning_rate": 4.868309743225256e-05, |
|
"loss": 3.3187, |
|
"step": 2495 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.8135638236999512, |
|
"learning_rate": 4.851644709003233e-05, |
|
"loss": 3.2581, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.8063775300979614, |
|
"learning_rate": 4.834981324143964e-05, |
|
"loss": 3.3253, |
|
"step": 2505 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.8042765855789185, |
|
"learning_rate": 4.818319773905191e-05, |
|
"loss": 3.3437, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.7903698682785034, |
|
"learning_rate": 4.801660243524261e-05, |
|
"loss": 3.2981, |
|
"step": 2515 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.9083207249641418, |
|
"learning_rate": 4.7850029182160626e-05, |
|
"loss": 3.3335, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.8911808133125305, |
|
"learning_rate": 4.768347983170973e-05, |
|
"loss": 3.281, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.8923286199569702, |
|
"learning_rate": 4.7516956235527884e-05, |
|
"loss": 3.3236, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.8573455214500427, |
|
"learning_rate": 4.735046024496682e-05, |
|
"loss": 3.3264, |
|
"step": 2535 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.7962297201156616, |
|
"learning_rate": 4.7183993711071286e-05, |
|
"loss": 3.2597, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.8235583305358887, |
|
"learning_rate": 4.7017558484558554e-05, |
|
"loss": 3.3059, |
|
"step": 2545 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.7752333879470825, |
|
"learning_rate": 4.6851156415797844e-05, |
|
"loss": 3.2725, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.8234376907348633, |
|
"learning_rate": 4.6684789354789746e-05, |
|
"loss": 3.2395, |
|
"step": 2555 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.8423200249671936, |
|
"learning_rate": 4.651845915114563e-05, |
|
"loss": 3.2337, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.8219478726387024, |
|
"learning_rate": 4.6352167654067095e-05, |
|
"loss": 3.2808, |
|
"step": 2565 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.8123370409011841, |
|
"learning_rate": 4.618591671232544e-05, |
|
"loss": 3.3276, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.8480009436607361, |
|
"learning_rate": 4.601970817424106e-05, |
|
"loss": 3.3076, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.8245264291763306, |
|
"learning_rate": 4.585354388766292e-05, |
|
"loss": 3.2825, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.8287261128425598, |
|
"learning_rate": 4.568742569994802e-05, |
|
"loss": 3.2332, |
|
"step": 2585 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.9042434096336365, |
|
"learning_rate": 4.552135545794086e-05, |
|
"loss": 3.3015, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.8049812316894531, |
|
"learning_rate": 4.535533500795288e-05, |
|
"loss": 3.2932, |
|
"step": 2595 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.8048073649406433, |
|
"learning_rate": 4.5189366195741953e-05, |
|
"loss": 3.2472, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.8750132322311401, |
|
"learning_rate": 4.502345086649186e-05, |
|
"loss": 3.3242, |
|
"step": 2605 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.7771335244178772, |
|
"learning_rate": 4.485759086479179e-05, |
|
"loss": 3.2696, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.8382053375244141, |
|
"learning_rate": 4.469178803461579e-05, |
|
"loss": 3.2338, |
|
"step": 2615 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.8180707097053528, |
|
"learning_rate": 4.4526044219302326e-05, |
|
"loss": 3.2866, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.847093403339386, |
|
"learning_rate": 4.4360361261533745e-05, |
|
"loss": 3.2347, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.7841566801071167, |
|
"learning_rate": 4.419474100331579e-05, |
|
"loss": 3.2444, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.7948038578033447, |
|
"learning_rate": 4.402918528595715e-05, |
|
"loss": 3.2591, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.8009411096572876, |
|
"learning_rate": 4.386369595004896e-05, |
|
"loss": 3.2533, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.8153753876686096, |
|
"learning_rate": 4.3698274835444354e-05, |
|
"loss": 3.2453, |
|
"step": 2645 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.8134960532188416, |
|
"learning_rate": 4.3532923781238e-05, |
|
"loss": 3.2249, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.8260480165481567, |
|
"learning_rate": 4.336764462574566e-05, |
|
"loss": 3.2123, |
|
"step": 2655 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.8051034808158875, |
|
"learning_rate": 4.320243920648376e-05, |
|
"loss": 3.2513, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.8382444381713867, |
|
"learning_rate": 4.303730936014894e-05, |
|
"loss": 3.2913, |
|
"step": 2665 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.8280849456787109, |
|
"learning_rate": 4.287225692259765e-05, |
|
"loss": 3.2828, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.8976969718933105, |
|
"learning_rate": 4.270728372882575e-05, |
|
"loss": 3.2859, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.8455418944358826, |
|
"learning_rate": 4.254239161294804e-05, |
|
"loss": 3.1686, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.9356967806816101, |
|
"learning_rate": 4.237758240817802e-05, |
|
"loss": 3.2828, |
|
"step": 2685 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.8209810256958008, |
|
"learning_rate": 4.2212857946807336e-05, |
|
"loss": 3.1977, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.8756744265556335, |
|
"learning_rate": 4.2048220060185516e-05, |
|
"loss": 3.2809, |
|
"step": 2695 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.8613160252571106, |
|
"learning_rate": 4.188367057869957e-05, |
|
"loss": 3.2154, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.8148147463798523, |
|
"learning_rate": 4.171921133175365e-05, |
|
"loss": 3.2539, |
|
"step": 2705 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.899475634098053, |
|
"learning_rate": 4.155484414774872e-05, |
|
"loss": 3.208, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.7842448949813843, |
|
"learning_rate": 4.139057085406221e-05, |
|
"loss": 3.2039, |
|
"step": 2715 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.8090781569480896, |
|
"learning_rate": 4.1226393277027726e-05, |
|
"loss": 3.3005, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.8377348184585571, |
|
"learning_rate": 4.106231324191471e-05, |
|
"loss": 3.3182, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.8332144618034363, |
|
"learning_rate": 4.089833257290817e-05, |
|
"loss": 3.258, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.8233649134635925, |
|
"learning_rate": 4.073445309308842e-05, |
|
"loss": 3.3097, |
|
"step": 2735 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.8969298005104065, |
|
"learning_rate": 4.0570676624410756e-05, |
|
"loss": 3.2348, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.823154091835022, |
|
"learning_rate": 4.040700498768525e-05, |
|
"loss": 3.2481, |
|
"step": 2745 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.8446685075759888, |
|
"learning_rate": 4.024344000255648e-05, |
|
"loss": 3.2539, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.77149498462677, |
|
"learning_rate": 4.0079983487483313e-05, |
|
"loss": 3.2859, |
|
"step": 2755 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.8215575814247131, |
|
"learning_rate": 3.9916637259718683e-05, |
|
"loss": 3.3178, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.7828111052513123, |
|
"learning_rate": 3.9753403135289396e-05, |
|
"loss": 3.3045, |
|
"step": 2765 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.8224146962165833, |
|
"learning_rate": 3.9590282928975914e-05, |
|
"loss": 3.2101, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.7711217999458313, |
|
"learning_rate": 3.942727845429221e-05, |
|
"loss": 3.2301, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.7876419425010681, |
|
"learning_rate": 3.926439152346558e-05, |
|
"loss": 3.1997, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.8607501983642578, |
|
"learning_rate": 3.910162394741653e-05, |
|
"loss": 3.289, |
|
"step": 2785 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.8112248182296753, |
|
"learning_rate": 3.893897753573861e-05, |
|
"loss": 3.284, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.8297057747840881, |
|
"learning_rate": 3.877645409667829e-05, |
|
"loss": 3.1787, |
|
"step": 2795 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.8352723121643066, |
|
"learning_rate": 3.861405543711491e-05, |
|
"loss": 3.2283, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.8486620783805847, |
|
"learning_rate": 3.8451783362540507e-05, |
|
"loss": 3.3196, |
|
"step": 2805 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.8207599520683289, |
|
"learning_rate": 3.828963967703983e-05, |
|
"loss": 3.2317, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.8350016474723816, |
|
"learning_rate": 3.8127626183270223e-05, |
|
"loss": 3.2321, |
|
"step": 2815 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.8970246911048889, |
|
"learning_rate": 3.796574468244161e-05, |
|
"loss": 3.1897, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.8711051940917969, |
|
"learning_rate": 3.7803996974296444e-05, |
|
"loss": 3.2698, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.8610997200012207, |
|
"learning_rate": 3.7642384857089776e-05, |
|
"loss": 3.2274, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.7913540601730347, |
|
"learning_rate": 3.748091012756915e-05, |
|
"loss": 3.2757, |
|
"step": 2835 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.7936437726020813, |
|
"learning_rate": 3.731957458095467e-05, |
|
"loss": 3.2132, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.8374015092849731, |
|
"learning_rate": 3.71583800109191e-05, |
|
"loss": 3.2772, |
|
"step": 2845 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.8306594491004944, |
|
"learning_rate": 3.699732820956784e-05, |
|
"loss": 3.2765, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.9261174201965332, |
|
"learning_rate": 3.6836420967419057e-05, |
|
"loss": 3.1874, |
|
"step": 2855 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.7884053587913513, |
|
"learning_rate": 3.6675660073383745e-05, |
|
"loss": 3.2205, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.8375781774520874, |
|
"learning_rate": 3.6515047314745856e-05, |
|
"loss": 3.2302, |
|
"step": 2865 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.8226302862167358, |
|
"learning_rate": 3.6354584477142437e-05, |
|
"loss": 3.3306, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.8241636157035828, |
|
"learning_rate": 3.6194273344543736e-05, |
|
"loss": 3.2692, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.8509169220924377, |
|
"learning_rate": 3.6034115699233425e-05, |
|
"loss": 3.2319, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.8542621731758118, |
|
"learning_rate": 3.5874113321788736e-05, |
|
"loss": 3.1808, |
|
"step": 2885 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.7962069511413574, |
|
"learning_rate": 3.571426799106071e-05, |
|
"loss": 3.2203, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.8124511241912842, |
|
"learning_rate": 3.555458148415437e-05, |
|
"loss": 3.2235, |
|
"step": 2895 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.7960987687110901, |
|
"learning_rate": 3.539505557640901e-05, |
|
"loss": 3.1952, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.7768739461898804, |
|
"learning_rate": 3.523569204137843e-05, |
|
"loss": 3.2864, |
|
"step": 2905 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.8407379388809204, |
|
"learning_rate": 3.5076492650811246e-05, |
|
"loss": 3.2544, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.8421855568885803, |
|
"learning_rate": 3.491745917463113e-05, |
|
"loss": 3.1736, |
|
"step": 2915 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.8619421720504761, |
|
"learning_rate": 3.475859338091721e-05, |
|
"loss": 3.2242, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.8203493356704712, |
|
"learning_rate": 3.4599897035884374e-05, |
|
"loss": 3.2295, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.7925184965133667, |
|
"learning_rate": 3.444137190386363e-05, |
|
"loss": 3.1904, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.9476016759872437, |
|
"learning_rate": 3.4283019747282514e-05, |
|
"loss": 3.2148, |
|
"step": 2935 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.8012914657592773, |
|
"learning_rate": 3.412484232664545e-05, |
|
"loss": 3.2104, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.8429858684539795, |
|
"learning_rate": 3.396684140051424e-05, |
|
"loss": 3.1776, |
|
"step": 2945 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.8614286184310913, |
|
"learning_rate": 3.3809018725488466e-05, |
|
"loss": 3.2487, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.8177609443664551, |
|
"learning_rate": 3.365137605618598e-05, |
|
"loss": 3.3414, |
|
"step": 2955 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.7997239232063293, |
|
"learning_rate": 3.3493915145223395e-05, |
|
"loss": 3.1999, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.7842835187911987, |
|
"learning_rate": 3.3336637743196584e-05, |
|
"loss": 3.1948, |
|
"step": 2965 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.8240490555763245, |
|
"learning_rate": 3.317954559866126e-05, |
|
"loss": 3.2576, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.8564560413360596, |
|
"learning_rate": 3.302264045811344e-05, |
|
"loss": 3.1953, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.8260619044303894, |
|
"learning_rate": 3.286592406597021e-05, |
|
"loss": 3.2912, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.8823931813240051, |
|
"learning_rate": 3.270939816455012e-05, |
|
"loss": 3.1892, |
|
"step": 2985 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.829565703868866, |
|
"learning_rate": 3.255306449405395e-05, |
|
"loss": 3.2287, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.8487275838851929, |
|
"learning_rate": 3.2396924792545304e-05, |
|
"loss": 3.2321, |
|
"step": 2995 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.8765036463737488, |
|
"learning_rate": 3.224098079593132e-05, |
|
"loss": 3.2274, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.8339765071868896, |
|
"learning_rate": 3.2085234237943354e-05, |
|
"loss": 3.1882, |
|
"step": 3005 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.8532525897026062, |
|
"learning_rate": 3.19296868501177e-05, |
|
"loss": 3.3398, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.7803841233253479, |
|
"learning_rate": 3.177434036177636e-05, |
|
"loss": 3.204, |
|
"step": 3015 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.849578320980072, |
|
"learning_rate": 3.1619196500007804e-05, |
|
"loss": 3.2008, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.9354525208473206, |
|
"learning_rate": 3.146425698964776e-05, |
|
"loss": 3.1633, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.750751256942749, |
|
"learning_rate": 3.1309523553260046e-05, |
|
"loss": 3.2002, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.8016756772994995, |
|
"learning_rate": 3.115499791111743e-05, |
|
"loss": 3.2153, |
|
"step": 3035 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.7778719663619995, |
|
"learning_rate": 3.10006817811825e-05, |
|
"loss": 3.1818, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.7951830625534058, |
|
"learning_rate": 3.084657687908855e-05, |
|
"loss": 3.1904, |
|
"step": 3045 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.8168771862983704, |
|
"learning_rate": 3.069268491812052e-05, |
|
"loss": 3.177, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.8381624817848206, |
|
"learning_rate": 3.0539007609195934e-05, |
|
"loss": 3.2262, |
|
"step": 3055 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.8873036503791809, |
|
"learning_rate": 3.0385546660845908e-05, |
|
"loss": 3.0912, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.9288731813430786, |
|
"learning_rate": 3.0232303779196132e-05, |
|
"loss": 3.2078, |
|
"step": 3065 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.8773919343948364, |
|
"learning_rate": 3.0079280667947885e-05, |
|
"loss": 3.1059, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.823991596698761, |
|
"learning_rate": 2.9926479028359132e-05, |
|
"loss": 3.2226, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.7931019067764282, |
|
"learning_rate": 2.97739005592256e-05, |
|
"loss": 3.1763, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.7744354009628296, |
|
"learning_rate": 2.962154695686187e-05, |
|
"loss": 3.146, |
|
"step": 3085 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.8355756402015686, |
|
"learning_rate": 2.9469419915082536e-05, |
|
"loss": 3.1799, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.8058030009269714, |
|
"learning_rate": 2.9317521125183368e-05, |
|
"loss": 3.2193, |
|
"step": 3095 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.8332851529121399, |
|
"learning_rate": 2.9165852275922524e-05, |
|
"loss": 3.2367, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.8169847726821899, |
|
"learning_rate": 2.901441505350174e-05, |
|
"loss": 3.2343, |
|
"step": 3105 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.8637244701385498, |
|
"learning_rate": 2.886321114154762e-05, |
|
"loss": 3.3044, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.7913427948951721, |
|
"learning_rate": 2.87122422210929e-05, |
|
"loss": 3.2248, |
|
"step": 3115 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.8639921545982361, |
|
"learning_rate": 2.8561509970557736e-05, |
|
"loss": 3.2188, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.801016092300415, |
|
"learning_rate": 2.8411016065731146e-05, |
|
"loss": 3.2085, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.7943453788757324, |
|
"learning_rate": 2.826076217975222e-05, |
|
"loss": 3.1371, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.7858323454856873, |
|
"learning_rate": 2.8110749983091632e-05, |
|
"loss": 3.1532, |
|
"step": 3135 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.8061541318893433, |
|
"learning_rate": 2.7960981143533053e-05, |
|
"loss": 3.1412, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.7628043293952942, |
|
"learning_rate": 2.781145732615457e-05, |
|
"loss": 3.1437, |
|
"step": 3145 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.8349359035491943, |
|
"learning_rate": 2.7662180193310218e-05, |
|
"loss": 3.1472, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.8226621150970459, |
|
"learning_rate": 2.751315140461145e-05, |
|
"loss": 3.1925, |
|
"step": 3155 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.8105551600456238, |
|
"learning_rate": 2.7364372616908744e-05, |
|
"loss": 3.1886, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.8078845143318176, |
|
"learning_rate": 2.7215845484273152e-05, |
|
"loss": 3.2221, |
|
"step": 3165 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.8181054592132568, |
|
"learning_rate": 2.7067571657977893e-05, |
|
"loss": 3.1477, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.7936989068984985, |
|
"learning_rate": 2.691955278648003e-05, |
|
"loss": 3.1467, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.7987035512924194, |
|
"learning_rate": 2.6771790515402112e-05, |
|
"loss": 3.155, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.8322426080703735, |
|
"learning_rate": 2.6624286487513916e-05, |
|
"loss": 3.2001, |
|
"step": 3185 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.8069536685943604, |
|
"learning_rate": 2.6477042342714137e-05, |
|
"loss": 3.2272, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.8421223163604736, |
|
"learning_rate": 2.633005971801219e-05, |
|
"loss": 3.1936, |
|
"step": 3195 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.8561714887619019, |
|
"learning_rate": 2.6183340247510013e-05, |
|
"loss": 3.12, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.8508149981498718, |
|
"learning_rate": 2.6036885562383856e-05, |
|
"loss": 3.1496, |
|
"step": 3205 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.8741605281829834, |
|
"learning_rate": 2.5890697290866206e-05, |
|
"loss": 3.2406, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.7964335083961487, |
|
"learning_rate": 2.5744777058227642e-05, |
|
"loss": 3.1666, |
|
"step": 3215 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.8591488003730774, |
|
"learning_rate": 2.5599126486758777e-05, |
|
"loss": 3.2426, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.8198850154876709, |
|
"learning_rate": 2.5453747195752243e-05, |
|
"loss": 3.1771, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.8103959560394287, |
|
"learning_rate": 2.530864080148464e-05, |
|
"loss": 3.229, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.8377269506454468, |
|
"learning_rate": 2.5163808917198615e-05, |
|
"loss": 3.139, |
|
"step": 3235 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.7943127155303955, |
|
"learning_rate": 2.501925315308492e-05, |
|
"loss": 3.2239, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.7915096879005432, |
|
"learning_rate": 2.4874975116264477e-05, |
|
"loss": 3.2592, |
|
"step": 3245 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.791080117225647, |
|
"learning_rate": 2.4730976410770534e-05, |
|
"loss": 3.1514, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.8079975247383118, |
|
"learning_rate": 2.458725863753084e-05, |
|
"loss": 3.1366, |
|
"step": 3255 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.7989469766616821, |
|
"learning_rate": 2.4443823394349834e-05, |
|
"loss": 3.2059, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.7954628467559814, |
|
"learning_rate": 2.430067227589088e-05, |
|
"loss": 3.2649, |
|
"step": 3265 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.7923185229301453, |
|
"learning_rate": 2.4157806873658517e-05, |
|
"loss": 3.14, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.7876720428466797, |
|
"learning_rate": 2.401522877598087e-05, |
|
"loss": 3.154, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.8176397681236267, |
|
"learning_rate": 2.3872939567991827e-05, |
|
"loss": 3.2069, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.7727697491645813, |
|
"learning_rate": 2.373094083161353e-05, |
|
"loss": 3.099, |
|
"step": 3285 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.7817246913909912, |
|
"learning_rate": 2.358923414553877e-05, |
|
"loss": 3.1946, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.7991405725479126, |
|
"learning_rate": 2.3447821085213405e-05, |
|
"loss": 3.1937, |
|
"step": 3295 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.8209265470504761, |
|
"learning_rate": 2.3306703222818878e-05, |
|
"loss": 3.233, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.8106775283813477, |
|
"learning_rate": 2.3165882127254705e-05, |
|
"loss": 3.2158, |
|
"step": 3305 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.8128020167350769, |
|
"learning_rate": 2.302535936412108e-05, |
|
"loss": 3.2527, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.8070123791694641, |
|
"learning_rate": 2.2885136495701415e-05, |
|
"loss": 3.206, |
|
"step": 3315 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.8676778674125671, |
|
"learning_rate": 2.274521508094501e-05, |
|
"loss": 3.1197, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.8604967594146729, |
|
"learning_rate": 2.2605596675449698e-05, |
|
"loss": 3.2578, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.836823582649231, |
|
"learning_rate": 2.246628283144457e-05, |
|
"loss": 3.1424, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.7955985069274902, |
|
"learning_rate": 2.232727509777269e-05, |
|
"loss": 3.2224, |
|
"step": 3335 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.777150571346283, |
|
"learning_rate": 2.2188575019873932e-05, |
|
"loss": 3.2291, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.8456872701644897, |
|
"learning_rate": 2.2050184139767704e-05, |
|
"loss": 3.1922, |
|
"step": 3345 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.7916918396949768, |
|
"learning_rate": 2.191210399603591e-05, |
|
"loss": 3.1397, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.8304871320724487, |
|
"learning_rate": 2.1774336123805772e-05, |
|
"loss": 3.106, |
|
"step": 3355 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.8173920512199402, |
|
"learning_rate": 2.1636882054732776e-05, |
|
"loss": 3.1667, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.8554221987724304, |
|
"learning_rate": 2.1499743316983684e-05, |
|
"loss": 3.2484, |
|
"step": 3365 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.8113805651664734, |
|
"learning_rate": 2.1362921435219473e-05, |
|
"loss": 3.1443, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.8250212669372559, |
|
"learning_rate": 2.1226417930578464e-05, |
|
"loss": 3.2164, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.804509699344635, |
|
"learning_rate": 2.109023432065935e-05, |
|
"loss": 3.1842, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.8785647749900818, |
|
"learning_rate": 2.095437211950434e-05, |
|
"loss": 3.1888, |
|
"step": 3385 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.7957813739776611, |
|
"learning_rate": 2.0818832837582352e-05, |
|
"loss": 3.1795, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.8057026267051697, |
|
"learning_rate": 2.068361798177218e-05, |
|
"loss": 3.2627, |
|
"step": 3395 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.7766295671463013, |
|
"learning_rate": 2.0548729055345778e-05, |
|
"loss": 3.2093, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.785518229007721, |
|
"learning_rate": 2.0414167557951514e-05, |
|
"loss": 3.2074, |
|
"step": 3405 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.840319812297821, |
|
"learning_rate": 2.0279934985597527e-05, |
|
"loss": 3.2023, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.8056073188781738, |
|
"learning_rate": 2.0146032830635054e-05, |
|
"loss": 3.2114, |
|
"step": 3415 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.167738676071167, |
|
"learning_rate": 2.001246258174192e-05, |
|
"loss": 3.2406, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.7821819186210632, |
|
"learning_rate": 1.9879225723905886e-05, |
|
"loss": 3.1548, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.7894221544265747, |
|
"learning_rate": 1.9746323738408203e-05, |
|
"loss": 3.1988, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.8630125522613525, |
|
"learning_rate": 1.9613758102807117e-05, |
|
"loss": 3.1896, |
|
"step": 3435 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.8015197515487671, |
|
"learning_rate": 1.9481530290921474e-05, |
|
"loss": 3.1377, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.8257816433906555, |
|
"learning_rate": 1.934964177281428e-05, |
|
"loss": 3.094, |
|
"step": 3445 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.8061752319335938, |
|
"learning_rate": 1.9218094014776434e-05, |
|
"loss": 3.1906, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.8156401515007019, |
|
"learning_rate": 1.9086888479310333e-05, |
|
"loss": 3.1326, |
|
"step": 3455 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.8027080297470093, |
|
"learning_rate": 1.895602662511371e-05, |
|
"loss": 3.242, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.8097110986709595, |
|
"learning_rate": 1.8825509907063327e-05, |
|
"loss": 3.2284, |
|
"step": 3465 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.7945130467414856, |
|
"learning_rate": 1.8695339776198872e-05, |
|
"loss": 3.138, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.8243803977966309, |
|
"learning_rate": 1.8565517679706783e-05, |
|
"loss": 3.1851, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.8031255602836609, |
|
"learning_rate": 1.8436045060904174e-05, |
|
"loss": 3.2587, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.7686798572540283, |
|
"learning_rate": 1.830692335922279e-05, |
|
"loss": 3.1487, |
|
"step": 3485 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.8455352783203125, |
|
"learning_rate": 1.8178154010192994e-05, |
|
"loss": 3.2103, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.7771708965301514, |
|
"learning_rate": 1.8049738445427822e-05, |
|
"loss": 3.0721, |
|
"step": 3495 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.7743687033653259, |
|
"learning_rate": 1.7921678092607052e-05, |
|
"loss": 3.0943, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.7884378433227539, |
|
"learning_rate": 1.7793974375461352e-05, |
|
"loss": 3.1499, |
|
"step": 3505 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.8018225431442261, |
|
"learning_rate": 1.7666628713756417e-05, |
|
"loss": 3.2109, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.7938441038131714, |
|
"learning_rate": 1.7539642523277228e-05, |
|
"loss": 3.1008, |
|
"step": 3515 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.8159660696983337, |
|
"learning_rate": 1.7413017215812273e-05, |
|
"loss": 3.1646, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.791012704372406, |
|
"learning_rate": 1.728675419913788e-05, |
|
"loss": 3.3161, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.79207843542099, |
|
"learning_rate": 1.716085487700253e-05, |
|
"loss": 3.1227, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.8134340643882751, |
|
"learning_rate": 1.703532064911131e-05, |
|
"loss": 3.1784, |
|
"step": 3535 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.7827357649803162, |
|
"learning_rate": 1.6910152911110283e-05, |
|
"loss": 3.158, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.8012112379074097, |
|
"learning_rate": 1.6785353054571024e-05, |
|
"loss": 3.1053, |
|
"step": 3545 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.802773118019104, |
|
"learning_rate": 1.666092246697512e-05, |
|
"loss": 3.1735, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.7863219380378723, |
|
"learning_rate": 1.6536862531698766e-05, |
|
"loss": 3.0726, |
|
"step": 3555 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.7989824414253235, |
|
"learning_rate": 1.6413174627997328e-05, |
|
"loss": 3.2088, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.7984068989753723, |
|
"learning_rate": 1.6289860130990147e-05, |
|
"loss": 3.1004, |
|
"step": 3565 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.7902229428291321, |
|
"learning_rate": 1.6166920411645064e-05, |
|
"loss": 3.125, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.8097918033599854, |
|
"learning_rate": 1.6044356836763315e-05, |
|
"loss": 3.1523, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.7726302742958069, |
|
"learning_rate": 1.5922170768964285e-05, |
|
"loss": 3.1466, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.7708826065063477, |
|
"learning_rate": 1.5800363566670362e-05, |
|
"loss": 3.1728, |
|
"step": 3585 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.786941409111023, |
|
"learning_rate": 1.5678936584091852e-05, |
|
"loss": 3.183, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.7763702869415283, |
|
"learning_rate": 1.5557891171211892e-05, |
|
"loss": 3.1687, |
|
"step": 3595 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.7879136204719543, |
|
"learning_rate": 1.5437228673771465e-05, |
|
"loss": 3.1369, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.816377580165863, |
|
"learning_rate": 1.5316950433254445e-05, |
|
"loss": 3.1376, |
|
"step": 3605 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.7834891080856323, |
|
"learning_rate": 1.5197057786872649e-05, |
|
"loss": 3.1835, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.8588930368423462, |
|
"learning_rate": 1.5077552067551015e-05, |
|
"loss": 3.0885, |
|
"step": 3615 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.9164984226226807, |
|
"learning_rate": 1.4958434603912747e-05, |
|
"loss": 3.2133, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.7704552412033081, |
|
"learning_rate": 1.4839706720264546e-05, |
|
"loss": 3.1169, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.8061482906341553, |
|
"learning_rate": 1.4721369736581924e-05, |
|
"loss": 3.2268, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.7758923768997192, |
|
"learning_rate": 1.4603424968494484e-05, |
|
"loss": 3.1401, |
|
"step": 3635 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.7879301905632019, |
|
"learning_rate": 1.448587372727132e-05, |
|
"loss": 3.2194, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.8021620512008667, |
|
"learning_rate": 1.4368717319806419e-05, |
|
"loss": 3.1025, |
|
"step": 3645 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.794487476348877, |
|
"learning_rate": 1.4251957048604152e-05, |
|
"loss": 3.1316, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.7669387459754944, |
|
"learning_rate": 1.413559421176479e-05, |
|
"loss": 3.1092, |
|
"step": 3655 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.8293997645378113, |
|
"learning_rate": 1.4019630102970056e-05, |
|
"loss": 3.167, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.8108026385307312, |
|
"learning_rate": 1.3904066011468753e-05, |
|
"loss": 3.2, |
|
"step": 3665 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.7944249510765076, |
|
"learning_rate": 1.3788903222062433e-05, |
|
"loss": 3.0714, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.799651563167572, |
|
"learning_rate": 1.3674143015091118e-05, |
|
"loss": 3.1009, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.7957543134689331, |
|
"learning_rate": 1.355978666641905e-05, |
|
"loss": 3.0966, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.8025516271591187, |
|
"learning_rate": 1.3445835447420507e-05, |
|
"loss": 3.0909, |
|
"step": 3685 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.8256009817123413, |
|
"learning_rate": 1.3332290624965688e-05, |
|
"loss": 3.2392, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.7947035431861877, |
|
"learning_rate": 1.3219153461406609e-05, |
|
"loss": 3.097, |
|
"step": 3695 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.7943867444992065, |
|
"learning_rate": 1.3106425214563078e-05, |
|
"loss": 3.1614, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.8204525113105774, |
|
"learning_rate": 1.2994107137708716e-05, |
|
"loss": 3.2037, |
|
"step": 3705 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.8015938997268677, |
|
"learning_rate": 1.2882200479556988e-05, |
|
"loss": 3.1579, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.7741564512252808, |
|
"learning_rate": 1.2770706484247397e-05, |
|
"loss": 3.1801, |
|
"step": 3715 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.7824724316596985, |
|
"learning_rate": 1.2659626391331564e-05, |
|
"loss": 3.1861, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.769964337348938, |
|
"learning_rate": 1.2548961435759493e-05, |
|
"loss": 3.14, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.8480700850486755, |
|
"learning_rate": 1.2438712847865846e-05, |
|
"loss": 3.1436, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.8069185018539429, |
|
"learning_rate": 1.2328881853356244e-05, |
|
"loss": 3.1581, |
|
"step": 3735 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.7940760254859924, |
|
"learning_rate": 1.221946967329365e-05, |
|
"loss": 3.132, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.8028521537780762, |
|
"learning_rate": 1.2110477524084796e-05, |
|
"loss": 3.1812, |
|
"step": 3745 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.7855331301689148, |
|
"learning_rate": 1.2001906617466657e-05, |
|
"loss": 3.1165, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.7715919017791748, |
|
"learning_rate": 1.1893758160492978e-05, |
|
"loss": 3.1442, |
|
"step": 3755 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.855614423751831, |
|
"learning_rate": 1.1786033355520859e-05, |
|
"loss": 3.0849, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.787558913230896, |
|
"learning_rate": 1.1678733400197373e-05, |
|
"loss": 3.1168, |
|
"step": 3765 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.8070486783981323, |
|
"learning_rate": 1.1571859487446263e-05, |
|
"loss": 3.1315, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.8083584308624268, |
|
"learning_rate": 1.1465412805454695e-05, |
|
"loss": 3.1808, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.8175447583198547, |
|
"learning_rate": 1.1359394537660011e-05, |
|
"loss": 3.1449, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.7884967923164368, |
|
"learning_rate": 1.125380586273661e-05, |
|
"loss": 3.2144, |
|
"step": 3785 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.7838531732559204, |
|
"learning_rate": 1.1148647954582808e-05, |
|
"loss": 3.1226, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.8267455697059631, |
|
"learning_rate": 1.1043921982307819e-05, |
|
"loss": 3.1714, |
|
"step": 3795 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.7771685123443604, |
|
"learning_rate": 1.0939629110218735e-05, |
|
"loss": 3.1241, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.7959226965904236, |
|
"learning_rate": 1.0835770497807596e-05, |
|
"loss": 3.1452, |
|
"step": 3805 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.8620918393135071, |
|
"learning_rate": 1.0732347299738493e-05, |
|
"loss": 3.2259, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.7999968528747559, |
|
"learning_rate": 1.0629360665834732e-05, |
|
"loss": 3.1147, |
|
"step": 3815 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.7933318614959717, |
|
"learning_rate": 1.052681174106604e-05, |
|
"loss": 3.0951, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.8061344027519226, |
|
"learning_rate": 1.0424701665535852e-05, |
|
"loss": 3.2327, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.8072717785835266, |
|
"learning_rate": 1.0323031574468638e-05, |
|
"loss": 3.1211, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.8158341646194458, |
|
"learning_rate": 1.0221802598197261e-05, |
|
"loss": 3.1877, |
|
"step": 3835 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.7635796070098877, |
|
"learning_rate": 1.0121015862150423e-05, |
|
"loss": 3.1272, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.7936083078384399, |
|
"learning_rate": 1.0020672486840154e-05, |
|
"loss": 3.1304, |
|
"step": 3845 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.7961178421974182, |
|
"learning_rate": 9.920773587849364e-06, |
|
"loss": 3.152, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.7673249840736389, |
|
"learning_rate": 9.821320275819401e-06, |
|
"loss": 3.233, |
|
"step": 3855 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.8225710391998291, |
|
"learning_rate": 9.72231365643777e-06, |
|
"loss": 3.1436, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.8114515542984009, |
|
"learning_rate": 9.623754830425779e-06, |
|
"loss": 3.1356, |
|
"step": 3865 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.7966388463973999, |
|
"learning_rate": 9.52564489352632e-06, |
|
"loss": 3.2398, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.8004997372627258, |
|
"learning_rate": 9.427984936491702e-06, |
|
"loss": 3.1899, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.0184743404388428, |
|
"learning_rate": 9.330776045071509e-06, |
|
"loss": 3.1287, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.7918827533721924, |
|
"learning_rate": 9.23401930000054e-06, |
|
"loss": 3.1601, |
|
"step": 3885 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.8677778840065002, |
|
"learning_rate": 9.137715776986772e-06, |
|
"loss": 3.0695, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.8687372207641602, |
|
"learning_rate": 9.041866546699434e-06, |
|
"loss": 3.1383, |
|
"step": 3895 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.8154995441436768, |
|
"learning_rate": 8.946472674757078e-06, |
|
"loss": 3.1618, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.7793681025505066, |
|
"learning_rate": 8.851535221715735e-06, |
|
"loss": 3.0916, |
|
"step": 3905 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.7917068004608154, |
|
"learning_rate": 8.757055243057132e-06, |
|
"loss": 3.1318, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.8099787831306458, |
|
"learning_rate": 8.663033789176967e-06, |
|
"loss": 3.1848, |
|
"step": 3915 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.7744637727737427, |
|
"learning_rate": 8.5694719053732e-06, |
|
"loss": 3.1937, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.8059993386268616, |
|
"learning_rate": 8.476370631834458e-06, |
|
"loss": 3.1326, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.8357394933700562, |
|
"learning_rate": 8.383731003628452e-06, |
|
"loss": 3.1179, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.8141809105873108, |
|
"learning_rate": 8.291554050690508e-06, |
|
"loss": 3.1621, |
|
"step": 3935 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.8160678148269653, |
|
"learning_rate": 8.199840797812058e-06, |
|
"loss": 3.1565, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.7965174317359924, |
|
"learning_rate": 8.108592264629295e-06, |
|
"loss": 3.127, |
|
"step": 3945 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.7999559044837952, |
|
"learning_rate": 8.017809465611803e-06, |
|
"loss": 3.0942, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.8185849189758301, |
|
"learning_rate": 7.927493410051324e-06, |
|
"loss": 3.0918, |
|
"step": 3955 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.8670766353607178, |
|
"learning_rate": 7.837645102050473e-06, |
|
"loss": 3.1102, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.7667662501335144, |
|
"learning_rate": 7.748265540511635e-06, |
|
"loss": 3.0881, |
|
"step": 3965 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.8150936365127563, |
|
"learning_rate": 7.65935571912582e-06, |
|
"loss": 3.1349, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.7920727729797363, |
|
"learning_rate": 7.5709166263616405e-06, |
|
"loss": 3.152, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.7608092427253723, |
|
"learning_rate": 7.482949245454302e-06, |
|
"loss": 3.1649, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.7877283692359924, |
|
"learning_rate": 7.3954545543946876e-06, |
|
"loss": 3.1612, |
|
"step": 3985 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.7629894614219666, |
|
"learning_rate": 7.308433525918468e-06, |
|
"loss": 3.0384, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.8076894879341125, |
|
"learning_rate": 7.221887127495313e-06, |
|
"loss": 3.1539, |
|
"step": 3995 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.7971545457839966, |
|
"learning_rate": 7.1358163213181114e-06, |
|
"loss": 3.1739, |
|
"step": 4000 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 4811, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 1000, |
|
"total_flos": 1.0865380348133376e+19, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|