|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.0526225042242836, |
|
"eval_steps": 500, |
|
"global_step": 6104, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0002672505948480982, |
|
"grad_norm": 3.2076423168182373, |
|
"learning_rate": 1.0157273918741808e-06, |
|
"loss": 1.4511, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.0005345011896961964, |
|
"grad_norm": 2.221529006958008, |
|
"learning_rate": 2.0314547837483616e-06, |
|
"loss": 1.3342, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.0008017517845442946, |
|
"grad_norm": 2.961315393447876, |
|
"learning_rate": 3.0471821756225426e-06, |
|
"loss": 1.2649, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.0010690023793923928, |
|
"grad_norm": 2.311617612838745, |
|
"learning_rate": 4.062909567496723e-06, |
|
"loss": 1.2278, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.001336252974240491, |
|
"grad_norm": 3.4343724250793457, |
|
"learning_rate": 5.078636959370905e-06, |
|
"loss": 1.2061, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.0016035035690885892, |
|
"grad_norm": 2.421396017074585, |
|
"learning_rate": 6.094364351245085e-06, |
|
"loss": 1.1848, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.0018707541639366874, |
|
"grad_norm": 2.8230600357055664, |
|
"learning_rate": 7.110091743119267e-06, |
|
"loss": 1.1586, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.0021380047587847856, |
|
"grad_norm": 2.021296501159668, |
|
"learning_rate": 8.125819134993446e-06, |
|
"loss": 1.1552, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.002405255353632884, |
|
"grad_norm": 2.2329111099243164, |
|
"learning_rate": 9.141546526867629e-06, |
|
"loss": 1.1342, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.002672505948480982, |
|
"grad_norm": 2.8933441638946533, |
|
"learning_rate": 1.015727391874181e-05, |
|
"loss": 1.1092, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.0029397565433290805, |
|
"grad_norm": 2.510772705078125, |
|
"learning_rate": 1.117300131061599e-05, |
|
"loss": 1.108, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.0032070071381771785, |
|
"grad_norm": 2.170926094055176, |
|
"learning_rate": 1.218872870249017e-05, |
|
"loss": 1.0982, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.003474257733025277, |
|
"grad_norm": 2.1116831302642822, |
|
"learning_rate": 1.3204456094364351e-05, |
|
"loss": 1.0977, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.003741508327873375, |
|
"grad_norm": 2.5462026596069336, |
|
"learning_rate": 1.4220183486238533e-05, |
|
"loss": 1.0797, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.004008758922721473, |
|
"grad_norm": 2.5239830017089844, |
|
"learning_rate": 1.5235910878112714e-05, |
|
"loss": 1.0691, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.004276009517569571, |
|
"grad_norm": 2.640591859817505, |
|
"learning_rate": 1.6251638269986893e-05, |
|
"loss": 1.0547, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.004543260112417669, |
|
"grad_norm": 1.9566192626953125, |
|
"learning_rate": 1.7267365661861077e-05, |
|
"loss": 1.0526, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.004810510707265768, |
|
"grad_norm": 2.515118360519409, |
|
"learning_rate": 1.8283093053735257e-05, |
|
"loss": 1.0614, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.005077761302113866, |
|
"grad_norm": 2.1588094234466553, |
|
"learning_rate": 1.9298820445609438e-05, |
|
"loss": 1.0461, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.005345011896961964, |
|
"grad_norm": 2.448280096054077, |
|
"learning_rate": 2.031454783748362e-05, |
|
"loss": 1.0333, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.005612262491810062, |
|
"grad_norm": 2.9539694786071777, |
|
"learning_rate": 2.13302752293578e-05, |
|
"loss": 1.0224, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.005879513086658161, |
|
"grad_norm": 2.6804676055908203, |
|
"learning_rate": 2.234600262123198e-05, |
|
"loss": 1.0145, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.006146763681506259, |
|
"grad_norm": 2.176100730895996, |
|
"learning_rate": 2.336173001310616e-05, |
|
"loss": 1.0164, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.006414014276354357, |
|
"grad_norm": 1.9107022285461426, |
|
"learning_rate": 2.437745740498034e-05, |
|
"loss": 1.004, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.006681264871202455, |
|
"grad_norm": 3.175410270690918, |
|
"learning_rate": 2.5393184796854525e-05, |
|
"loss": 0.9986, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.006948515466050554, |
|
"grad_norm": 1.7941040992736816, |
|
"learning_rate": 2.6408912188728702e-05, |
|
"loss": 0.9979, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.007215766060898652, |
|
"grad_norm": 2.428041458129883, |
|
"learning_rate": 2.7424639580602886e-05, |
|
"loss": 1.0056, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.00748301665574675, |
|
"grad_norm": 2.1440467834472656, |
|
"learning_rate": 2.8440366972477066e-05, |
|
"loss": 0.9908, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.007750267250594848, |
|
"grad_norm": 2.138157844543457, |
|
"learning_rate": 2.9456094364351244e-05, |
|
"loss": 0.986, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.008017517845442947, |
|
"grad_norm": 2.144076108932495, |
|
"learning_rate": 3.0471821756225428e-05, |
|
"loss": 0.9768, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.008284768440291045, |
|
"grad_norm": 2.116927146911621, |
|
"learning_rate": 3.148754914809961e-05, |
|
"loss": 0.975, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.008552019035139143, |
|
"grad_norm": 2.0087051391601562, |
|
"learning_rate": 3.2503276539973785e-05, |
|
"loss": 0.9746, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.00881926962998724, |
|
"grad_norm": 2.008014440536499, |
|
"learning_rate": 3.351900393184797e-05, |
|
"loss": 0.9612, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 0.009086520224835339, |
|
"grad_norm": 2.5932323932647705, |
|
"learning_rate": 3.453473132372215e-05, |
|
"loss": 0.9611, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 0.009353770819683437, |
|
"grad_norm": 1.7766073942184448, |
|
"learning_rate": 3.555045871559633e-05, |
|
"loss": 0.9529, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.009621021414531536, |
|
"grad_norm": 1.699992299079895, |
|
"learning_rate": 3.6566186107470514e-05, |
|
"loss": 0.9638, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 0.009888272009379634, |
|
"grad_norm": 2.139831304550171, |
|
"learning_rate": 3.7581913499344695e-05, |
|
"loss": 0.9518, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 0.010155522604227732, |
|
"grad_norm": 2.510404109954834, |
|
"learning_rate": 3.8597640891218876e-05, |
|
"loss": 0.9557, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 0.01042277319907583, |
|
"grad_norm": 1.7539429664611816, |
|
"learning_rate": 3.9613368283093056e-05, |
|
"loss": 0.949, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 0.010690023793923928, |
|
"grad_norm": 1.7679015398025513, |
|
"learning_rate": 4.062909567496724e-05, |
|
"loss": 0.9285, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.010957274388772026, |
|
"grad_norm": 1.6701771020889282, |
|
"learning_rate": 4.164482306684142e-05, |
|
"loss": 0.9391, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 0.011224524983620124, |
|
"grad_norm": 1.7283812761306763, |
|
"learning_rate": 4.26605504587156e-05, |
|
"loss": 0.9328, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 0.011491775578468222, |
|
"grad_norm": 1.363319754600525, |
|
"learning_rate": 4.367627785058978e-05, |
|
"loss": 0.9353, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 0.011759026173316322, |
|
"grad_norm": 1.847582459449768, |
|
"learning_rate": 4.469200524246396e-05, |
|
"loss": 0.9272, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 0.01202627676816442, |
|
"grad_norm": 1.6437714099884033, |
|
"learning_rate": 4.570773263433814e-05, |
|
"loss": 0.9219, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.012293527363012518, |
|
"grad_norm": 1.979443073272705, |
|
"learning_rate": 4.672346002621232e-05, |
|
"loss": 0.91, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 0.012560777957860616, |
|
"grad_norm": 1.601218342781067, |
|
"learning_rate": 4.77391874180865e-05, |
|
"loss": 0.9229, |
|
"step": 1457 |
|
}, |
|
{ |
|
"epoch": 0.012828028552708714, |
|
"grad_norm": 2.36148738861084, |
|
"learning_rate": 4.875491480996068e-05, |
|
"loss": 0.9199, |
|
"step": 1488 |
|
}, |
|
{ |
|
"epoch": 0.013095279147556812, |
|
"grad_norm": 4.211123466491699, |
|
"learning_rate": 4.977064220183487e-05, |
|
"loss": 1.0419, |
|
"step": 1519 |
|
}, |
|
{ |
|
"epoch": 0.01336252974240491, |
|
"grad_norm": 1.620786428451538, |
|
"learning_rate": 4.9999915451558777e-05, |
|
"loss": 0.954, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.01362978033725301, |
|
"grad_norm": 3.2490246295928955, |
|
"learning_rate": 4.999955597496219e-05, |
|
"loss": 0.9464, |
|
"step": 1581 |
|
}, |
|
{ |
|
"epoch": 0.013897030932101108, |
|
"grad_norm": 1.9302624464035034, |
|
"learning_rate": 4.9998914381774255e-05, |
|
"loss": 0.9294, |
|
"step": 1612 |
|
}, |
|
{ |
|
"epoch": 0.014164281526949206, |
|
"grad_norm": 1.5750601291656494, |
|
"learning_rate": 4.999799067923527e-05, |
|
"loss": 0.922, |
|
"step": 1643 |
|
}, |
|
{ |
|
"epoch": 0.014431532121797304, |
|
"grad_norm": 53.0409049987793, |
|
"learning_rate": 4.999678487776908e-05, |
|
"loss": 1.0611, |
|
"step": 1674 |
|
}, |
|
{ |
|
"epoch": 0.014698782716645402, |
|
"grad_norm": 1.841700553894043, |
|
"learning_rate": 4.9995296990983006e-05, |
|
"loss": 1.0363, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.0149660333114935, |
|
"grad_norm": 1.2549737691879272, |
|
"learning_rate": 4.999352703566763e-05, |
|
"loss": 0.938, |
|
"step": 1736 |
|
}, |
|
{ |
|
"epoch": 0.015233283906341597, |
|
"grad_norm": 1.3289740085601807, |
|
"learning_rate": 4.999147503179668e-05, |
|
"loss": 0.9113, |
|
"step": 1767 |
|
}, |
|
{ |
|
"epoch": 0.015500534501189695, |
|
"grad_norm": 1.5228811502456665, |
|
"learning_rate": 4.998914100252672e-05, |
|
"loss": 0.921, |
|
"step": 1798 |
|
}, |
|
{ |
|
"epoch": 0.015767785096037795, |
|
"grad_norm": 1.5058027505874634, |
|
"learning_rate": 4.998652497419696e-05, |
|
"loss": 0.9214, |
|
"step": 1829 |
|
}, |
|
{ |
|
"epoch": 0.016035035690885893, |
|
"grad_norm": 9.535687446594238, |
|
"learning_rate": 4.9983626976328927e-05, |
|
"loss": 1.3068, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.01630228628573399, |
|
"grad_norm": 2.5820651054382324, |
|
"learning_rate": 4.998044704162613e-05, |
|
"loss": 1.7033, |
|
"step": 1891 |
|
}, |
|
{ |
|
"epoch": 0.01656953688058209, |
|
"grad_norm": 148.47412109375, |
|
"learning_rate": 4.9976985205973705e-05, |
|
"loss": 1.6935, |
|
"step": 1922 |
|
}, |
|
{ |
|
"epoch": 0.016836787475430187, |
|
"grad_norm": 8.416805267333984, |
|
"learning_rate": 4.997324150843799e-05, |
|
"loss": 1.6607, |
|
"step": 1953 |
|
}, |
|
{ |
|
"epoch": 0.017104038070278285, |
|
"grad_norm": 5.2713470458984375, |
|
"learning_rate": 4.99692159912661e-05, |
|
"loss": 1.1301, |
|
"step": 1984 |
|
}, |
|
{ |
|
"epoch": 0.017371288665126383, |
|
"grad_norm": 2.096306800842285, |
|
"learning_rate": 4.996490869988546e-05, |
|
"loss": 0.9931, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 0.01763853925997448, |
|
"grad_norm": 1.5476640462875366, |
|
"learning_rate": 4.996031968290326e-05, |
|
"loss": 0.9514, |
|
"step": 2046 |
|
}, |
|
{ |
|
"epoch": 0.01790578985482258, |
|
"grad_norm": 1.727232813835144, |
|
"learning_rate": 4.995544899210594e-05, |
|
"loss": 0.9409, |
|
"step": 2077 |
|
}, |
|
{ |
|
"epoch": 0.018173040449670677, |
|
"grad_norm": 26.08816909790039, |
|
"learning_rate": 4.9950296682458583e-05, |
|
"loss": 0.9368, |
|
"step": 2108 |
|
}, |
|
{ |
|
"epoch": 0.018440291044518775, |
|
"grad_norm": 1.7082202434539795, |
|
"learning_rate": 4.994486281210429e-05, |
|
"loss": 0.9155, |
|
"step": 2139 |
|
}, |
|
{ |
|
"epoch": 0.018707541639366873, |
|
"grad_norm": 1.5962992906570435, |
|
"learning_rate": 4.9939147442363566e-05, |
|
"loss": 0.9233, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.018974792234214975, |
|
"grad_norm": 2.024268627166748, |
|
"learning_rate": 4.9933150637733574e-05, |
|
"loss": 0.9128, |
|
"step": 2201 |
|
}, |
|
{ |
|
"epoch": 0.019242042829063073, |
|
"grad_norm": 1.4828873872756958, |
|
"learning_rate": 4.992687246588743e-05, |
|
"loss": 0.8957, |
|
"step": 2232 |
|
}, |
|
{ |
|
"epoch": 0.01950929342391117, |
|
"grad_norm": 1.745612382888794, |
|
"learning_rate": 4.992031299767347e-05, |
|
"loss": 0.9005, |
|
"step": 2263 |
|
}, |
|
{ |
|
"epoch": 0.01977654401875927, |
|
"grad_norm": 1.354095458984375, |
|
"learning_rate": 4.9913472307114386e-05, |
|
"loss": 0.9002, |
|
"step": 2294 |
|
}, |
|
{ |
|
"epoch": 0.020043794613607367, |
|
"grad_norm": 2.6227681636810303, |
|
"learning_rate": 4.9906350471406446e-05, |
|
"loss": 0.9004, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 0.020311045208455464, |
|
"grad_norm": 1.3870893716812134, |
|
"learning_rate": 4.989894757091861e-05, |
|
"loss": 0.9018, |
|
"step": 2356 |
|
}, |
|
{ |
|
"epoch": 0.020578295803303562, |
|
"grad_norm": 1.2884349822998047, |
|
"learning_rate": 4.989126368919158e-05, |
|
"loss": 0.8993, |
|
"step": 2387 |
|
}, |
|
{ |
|
"epoch": 0.02084554639815166, |
|
"grad_norm": 1.215851068496704, |
|
"learning_rate": 4.988329891293693e-05, |
|
"loss": 0.8925, |
|
"step": 2418 |
|
}, |
|
{ |
|
"epoch": 0.02111279699299976, |
|
"grad_norm": 1.4246628284454346, |
|
"learning_rate": 4.987505333203608e-05, |
|
"loss": 0.8893, |
|
"step": 2449 |
|
}, |
|
{ |
|
"epoch": 0.021380047587847856, |
|
"grad_norm": 1.5495259761810303, |
|
"learning_rate": 4.9866527039539276e-05, |
|
"loss": 0.8749, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.021647298182695954, |
|
"grad_norm": 1.0121793746948242, |
|
"learning_rate": 4.9857720131664594e-05, |
|
"loss": 0.8803, |
|
"step": 2511 |
|
}, |
|
{ |
|
"epoch": 0.021914548777544052, |
|
"grad_norm": 1.1507561206817627, |
|
"learning_rate": 4.9848632707796773e-05, |
|
"loss": 0.8843, |
|
"step": 2542 |
|
}, |
|
{ |
|
"epoch": 0.02218179937239215, |
|
"grad_norm": 1.612584114074707, |
|
"learning_rate": 4.9839264870486155e-05, |
|
"loss": 0.8733, |
|
"step": 2573 |
|
}, |
|
{ |
|
"epoch": 0.02244904996724025, |
|
"grad_norm": 1.4475393295288086, |
|
"learning_rate": 4.9829616725447526e-05, |
|
"loss": 0.8669, |
|
"step": 2604 |
|
}, |
|
{ |
|
"epoch": 0.022716300562088346, |
|
"grad_norm": 1.9612644910812378, |
|
"learning_rate": 4.981968838155888e-05, |
|
"loss": 0.8791, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 0.022983551156936444, |
|
"grad_norm": 1.6032893657684326, |
|
"learning_rate": 4.980947995086024e-05, |
|
"loss": 0.8646, |
|
"step": 2666 |
|
}, |
|
{ |
|
"epoch": 0.023250801751784546, |
|
"grad_norm": 1.6368708610534668, |
|
"learning_rate": 4.979899154855234e-05, |
|
"loss": 0.871, |
|
"step": 2697 |
|
}, |
|
{ |
|
"epoch": 0.023518052346632644, |
|
"grad_norm": 1.0245656967163086, |
|
"learning_rate": 4.9788223292995386e-05, |
|
"loss": 0.8534, |
|
"step": 2728 |
|
}, |
|
{ |
|
"epoch": 0.023785302941480742, |
|
"grad_norm": 0.9378401041030884, |
|
"learning_rate": 4.977717530570768e-05, |
|
"loss": 0.8478, |
|
"step": 2759 |
|
}, |
|
{ |
|
"epoch": 0.02405255353632884, |
|
"grad_norm": 1.4262951612472534, |
|
"learning_rate": 4.976584771136425e-05, |
|
"loss": 0.8634, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.024319804131176938, |
|
"grad_norm": 1.3320890665054321, |
|
"learning_rate": 4.975424063779547e-05, |
|
"loss": 0.8505, |
|
"step": 2821 |
|
}, |
|
{ |
|
"epoch": 0.024587054726025036, |
|
"grad_norm": 1.043971061706543, |
|
"learning_rate": 4.974235421598557e-05, |
|
"loss": 0.863, |
|
"step": 2852 |
|
}, |
|
{ |
|
"epoch": 0.024854305320873134, |
|
"grad_norm": 1.387088418006897, |
|
"learning_rate": 4.973018858007122e-05, |
|
"loss": 0.8505, |
|
"step": 2883 |
|
}, |
|
{ |
|
"epoch": 0.025121555915721232, |
|
"grad_norm": 1.1267746686935425, |
|
"learning_rate": 4.9717743867339963e-05, |
|
"loss": 0.8528, |
|
"step": 2914 |
|
}, |
|
{ |
|
"epoch": 0.02538880651056933, |
|
"grad_norm": 0.9636305570602417, |
|
"learning_rate": 4.9705020218228695e-05, |
|
"loss": 0.8453, |
|
"step": 2945 |
|
}, |
|
{ |
|
"epoch": 0.025656057105417428, |
|
"grad_norm": 1.1140860319137573, |
|
"learning_rate": 4.969201777632205e-05, |
|
"loss": 0.8664, |
|
"step": 2976 |
|
}, |
|
{ |
|
"epoch": 0.025923307700265526, |
|
"grad_norm": 1.1886147260665894, |
|
"learning_rate": 4.9678736688350846e-05, |
|
"loss": 0.8569, |
|
"step": 3007 |
|
}, |
|
{ |
|
"epoch": 0.026190558295113624, |
|
"grad_norm": 1.0308467149734497, |
|
"learning_rate": 4.966517710419033e-05, |
|
"loss": 0.85, |
|
"step": 3038 |
|
}, |
|
{ |
|
"epoch": 0.02645780888996172, |
|
"grad_norm": 1.6353480815887451, |
|
"learning_rate": 4.965133917685858e-05, |
|
"loss": 0.8414, |
|
"step": 3069 |
|
}, |
|
{ |
|
"epoch": 0.02672505948480982, |
|
"grad_norm": 1.2031575441360474, |
|
"learning_rate": 4.9637223062514714e-05, |
|
"loss": 0.8435, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.026992310079657918, |
|
"grad_norm": 1.2575979232788086, |
|
"learning_rate": 4.962282892045718e-05, |
|
"loss": 0.8415, |
|
"step": 3131 |
|
}, |
|
{ |
|
"epoch": 0.02725956067450602, |
|
"grad_norm": 1.338584303855896, |
|
"learning_rate": 4.9608156913121904e-05, |
|
"loss": 0.8414, |
|
"step": 3162 |
|
}, |
|
{ |
|
"epoch": 0.027526811269354117, |
|
"grad_norm": 1.3699461221694946, |
|
"learning_rate": 4.959320720608049e-05, |
|
"loss": 0.8397, |
|
"step": 3193 |
|
}, |
|
{ |
|
"epoch": 0.027794061864202215, |
|
"grad_norm": 1.1471112966537476, |
|
"learning_rate": 4.9577979968038354e-05, |
|
"loss": 0.8304, |
|
"step": 3224 |
|
}, |
|
{ |
|
"epoch": 0.028061312459050313, |
|
"grad_norm": 1.1752680540084839, |
|
"learning_rate": 4.956247537083282e-05, |
|
"loss": 0.8408, |
|
"step": 3255 |
|
}, |
|
{ |
|
"epoch": 0.02832856305389841, |
|
"grad_norm": 1.089552879333496, |
|
"learning_rate": 4.9546693589431145e-05, |
|
"loss": 0.8286, |
|
"step": 3286 |
|
}, |
|
{ |
|
"epoch": 0.02859581364874651, |
|
"grad_norm": 1.0688341856002808, |
|
"learning_rate": 4.9530634801928595e-05, |
|
"loss": 0.8341, |
|
"step": 3317 |
|
}, |
|
{ |
|
"epoch": 0.028863064243594607, |
|
"grad_norm": 0.9290914535522461, |
|
"learning_rate": 4.9514299189546395e-05, |
|
"loss": 0.8307, |
|
"step": 3348 |
|
}, |
|
{ |
|
"epoch": 0.029130314838442705, |
|
"grad_norm": 1.3211214542388916, |
|
"learning_rate": 4.949768693662973e-05, |
|
"loss": 0.8349, |
|
"step": 3379 |
|
}, |
|
{ |
|
"epoch": 0.029397565433290803, |
|
"grad_norm": 1.8893773555755615, |
|
"learning_rate": 4.948079823064559e-05, |
|
"loss": 0.8289, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.0296648160281389, |
|
"grad_norm": 1.3086936473846436, |
|
"learning_rate": 4.946363326218074e-05, |
|
"loss": 0.8231, |
|
"step": 3441 |
|
}, |
|
{ |
|
"epoch": 0.029932066622987, |
|
"grad_norm": 1.356311559677124, |
|
"learning_rate": 4.9446192224939525e-05, |
|
"loss": 0.8343, |
|
"step": 3472 |
|
}, |
|
{ |
|
"epoch": 0.030199317217835097, |
|
"grad_norm": 1.13872492313385, |
|
"learning_rate": 4.942847531574167e-05, |
|
"loss": 0.828, |
|
"step": 3503 |
|
}, |
|
{ |
|
"epoch": 0.030466567812683195, |
|
"grad_norm": 1.1912773847579956, |
|
"learning_rate": 4.941048273452008e-05, |
|
"loss": 0.8236, |
|
"step": 3534 |
|
}, |
|
{ |
|
"epoch": 0.030733818407531293, |
|
"grad_norm": 0.9231215715408325, |
|
"learning_rate": 4.9392214684318605e-05, |
|
"loss": 0.8226, |
|
"step": 3565 |
|
}, |
|
{ |
|
"epoch": 0.03100106900237939, |
|
"grad_norm": 1.0268129110336304, |
|
"learning_rate": 4.93736713712897e-05, |
|
"loss": 0.8218, |
|
"step": 3596 |
|
}, |
|
{ |
|
"epoch": 0.03126831959722749, |
|
"grad_norm": 0.974876344203949, |
|
"learning_rate": 4.9354853004692124e-05, |
|
"loss": 0.8149, |
|
"step": 3627 |
|
}, |
|
{ |
|
"epoch": 0.03153557019207559, |
|
"grad_norm": 1.2175925970077515, |
|
"learning_rate": 4.93357597968886e-05, |
|
"loss": 0.8152, |
|
"step": 3658 |
|
}, |
|
{ |
|
"epoch": 0.03180282078692369, |
|
"grad_norm": 1.2338181734085083, |
|
"learning_rate": 4.931639196334338e-05, |
|
"loss": 0.8089, |
|
"step": 3689 |
|
}, |
|
{ |
|
"epoch": 0.032070071381771786, |
|
"grad_norm": 1.1462433338165283, |
|
"learning_rate": 4.9296749722619826e-05, |
|
"loss": 0.8216, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.032337321976619884, |
|
"grad_norm": 1.1631278991699219, |
|
"learning_rate": 4.9276833296377966e-05, |
|
"loss": 0.8272, |
|
"step": 3751 |
|
}, |
|
{ |
|
"epoch": 0.03260457257146798, |
|
"grad_norm": 0.903836727142334, |
|
"learning_rate": 4.925664290937196e-05, |
|
"loss": 0.816, |
|
"step": 3782 |
|
}, |
|
{ |
|
"epoch": 0.03287182316631608, |
|
"grad_norm": 0.944284975528717, |
|
"learning_rate": 4.9236178789447576e-05, |
|
"loss": 0.8031, |
|
"step": 3813 |
|
}, |
|
{ |
|
"epoch": 0.03313907376116418, |
|
"grad_norm": 1.1210039854049683, |
|
"learning_rate": 4.921544116753962e-05, |
|
"loss": 0.8096, |
|
"step": 3844 |
|
}, |
|
{ |
|
"epoch": 0.033406324356012276, |
|
"grad_norm": 1.0448529720306396, |
|
"learning_rate": 4.919443027766935e-05, |
|
"loss": 0.8059, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 0.033673574950860374, |
|
"grad_norm": 1.0655525922775269, |
|
"learning_rate": 4.91731463569418e-05, |
|
"loss": 0.81, |
|
"step": 3906 |
|
}, |
|
{ |
|
"epoch": 0.03394082554570847, |
|
"grad_norm": 1.223928451538086, |
|
"learning_rate": 4.915158964554312e-05, |
|
"loss": 0.8175, |
|
"step": 3937 |
|
}, |
|
{ |
|
"epoch": 0.03420807614055657, |
|
"grad_norm": 1.0895342826843262, |
|
"learning_rate": 4.912976038673786e-05, |
|
"loss": 0.7992, |
|
"step": 3968 |
|
}, |
|
{ |
|
"epoch": 0.03447532673540467, |
|
"grad_norm": 1.1117792129516602, |
|
"learning_rate": 4.9107658826866254e-05, |
|
"loss": 0.8122, |
|
"step": 3999 |
|
}, |
|
{ |
|
"epoch": 0.034742577330252766, |
|
"grad_norm": 1.0806598663330078, |
|
"learning_rate": 4.908528521534139e-05, |
|
"loss": 0.811, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 0.035009827925100864, |
|
"grad_norm": 0.7528677582740784, |
|
"learning_rate": 4.906263980464644e-05, |
|
"loss": 0.8036, |
|
"step": 4061 |
|
}, |
|
{ |
|
"epoch": 0.03527707851994896, |
|
"grad_norm": 1.1509230136871338, |
|
"learning_rate": 4.903972285033178e-05, |
|
"loss": 0.8047, |
|
"step": 4092 |
|
}, |
|
{ |
|
"epoch": 0.03554432911479706, |
|
"grad_norm": 1.1190931797027588, |
|
"learning_rate": 4.901653461101213e-05, |
|
"loss": 0.8123, |
|
"step": 4123 |
|
}, |
|
{ |
|
"epoch": 0.03581157970964516, |
|
"grad_norm": 0.8739308714866638, |
|
"learning_rate": 4.8993075348363626e-05, |
|
"loss": 0.8015, |
|
"step": 4154 |
|
}, |
|
{ |
|
"epoch": 0.036078830304493256, |
|
"grad_norm": 0.9889201521873474, |
|
"learning_rate": 4.896934532712084e-05, |
|
"loss": 0.8056, |
|
"step": 4185 |
|
}, |
|
{ |
|
"epoch": 0.036346080899341354, |
|
"grad_norm": 1.1964142322540283, |
|
"learning_rate": 4.8945344815073846e-05, |
|
"loss": 0.8083, |
|
"step": 4216 |
|
}, |
|
{ |
|
"epoch": 0.03661333149418945, |
|
"grad_norm": 1.0793588161468506, |
|
"learning_rate": 4.892107408306516e-05, |
|
"loss": 0.7976, |
|
"step": 4247 |
|
}, |
|
{ |
|
"epoch": 0.03688058208903755, |
|
"grad_norm": 1.2313084602355957, |
|
"learning_rate": 4.889653340498669e-05, |
|
"loss": 0.7954, |
|
"step": 4278 |
|
}, |
|
{ |
|
"epoch": 0.03714783268388565, |
|
"grad_norm": 1.1145700216293335, |
|
"learning_rate": 4.8871723057776664e-05, |
|
"loss": 0.7947, |
|
"step": 4309 |
|
}, |
|
{ |
|
"epoch": 0.037415083278733746, |
|
"grad_norm": 0.9714220762252808, |
|
"learning_rate": 4.8846643321416476e-05, |
|
"loss": 0.7973, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 0.03768233387358185, |
|
"grad_norm": 1.0865004062652588, |
|
"learning_rate": 4.882129447892753e-05, |
|
"loss": 0.7918, |
|
"step": 4371 |
|
}, |
|
{ |
|
"epoch": 0.03794958446842995, |
|
"grad_norm": 0.8420351147651672, |
|
"learning_rate": 4.8795676816368076e-05, |
|
"loss": 0.805, |
|
"step": 4402 |
|
}, |
|
{ |
|
"epoch": 0.03821683506327805, |
|
"grad_norm": 1.0622302293777466, |
|
"learning_rate": 4.876979062282995e-05, |
|
"loss": 0.7977, |
|
"step": 4433 |
|
}, |
|
{ |
|
"epoch": 0.038484085658126145, |
|
"grad_norm": 0.8536683320999146, |
|
"learning_rate": 4.8743636190435325e-05, |
|
"loss": 0.7946, |
|
"step": 4464 |
|
}, |
|
{ |
|
"epoch": 0.03875133625297424, |
|
"grad_norm": 0.9905999302864075, |
|
"learning_rate": 4.871721381433344e-05, |
|
"loss": 0.7932, |
|
"step": 4495 |
|
}, |
|
{ |
|
"epoch": 0.03901858684782234, |
|
"grad_norm": 1.005560278892517, |
|
"learning_rate": 4.869052379269719e-05, |
|
"loss": 0.7956, |
|
"step": 4526 |
|
}, |
|
{ |
|
"epoch": 0.03928583744267044, |
|
"grad_norm": 0.8386696577072144, |
|
"learning_rate": 4.866356642671985e-05, |
|
"loss": 0.7865, |
|
"step": 4557 |
|
}, |
|
{ |
|
"epoch": 0.03955308803751854, |
|
"grad_norm": 0.8390759229660034, |
|
"learning_rate": 4.8636342020611634e-05, |
|
"loss": 0.7925, |
|
"step": 4588 |
|
}, |
|
{ |
|
"epoch": 0.039820338632366635, |
|
"grad_norm": 0.7457160949707031, |
|
"learning_rate": 4.860885088159626e-05, |
|
"loss": 0.7911, |
|
"step": 4619 |
|
}, |
|
{ |
|
"epoch": 0.04008758922721473, |
|
"grad_norm": 0.9717801213264465, |
|
"learning_rate": 4.858109331990751e-05, |
|
"loss": 0.7917, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 0.04035483982206283, |
|
"grad_norm": 0.8517304062843323, |
|
"learning_rate": 4.855306964878567e-05, |
|
"loss": 0.7966, |
|
"step": 4681 |
|
}, |
|
{ |
|
"epoch": 0.04062209041691093, |
|
"grad_norm": 0.7597792744636536, |
|
"learning_rate": 4.8524780184474084e-05, |
|
"loss": 0.7798, |
|
"step": 4712 |
|
}, |
|
{ |
|
"epoch": 0.04088934101175903, |
|
"grad_norm": 0.9633410573005676, |
|
"learning_rate": 4.8496225246215496e-05, |
|
"loss": 0.784, |
|
"step": 4743 |
|
}, |
|
{ |
|
"epoch": 0.041156591606607125, |
|
"grad_norm": 0.8668884634971619, |
|
"learning_rate": 4.8467405156248505e-05, |
|
"loss": 0.7925, |
|
"step": 4774 |
|
}, |
|
{ |
|
"epoch": 0.04142384220145522, |
|
"grad_norm": 1.348170280456543, |
|
"learning_rate": 4.843832023980392e-05, |
|
"loss": 0.7882, |
|
"step": 4805 |
|
}, |
|
{ |
|
"epoch": 0.04169109279630332, |
|
"grad_norm": 0.9085477590560913, |
|
"learning_rate": 4.840897082510106e-05, |
|
"loss": 0.7834, |
|
"step": 4836 |
|
}, |
|
{ |
|
"epoch": 0.04195834339115142, |
|
"grad_norm": 1.2533594369888306, |
|
"learning_rate": 4.8379357243344084e-05, |
|
"loss": 0.7968, |
|
"step": 4867 |
|
}, |
|
{ |
|
"epoch": 0.04222559398599952, |
|
"grad_norm": 1.0250332355499268, |
|
"learning_rate": 4.8349479828718236e-05, |
|
"loss": 0.7755, |
|
"step": 4898 |
|
}, |
|
{ |
|
"epoch": 0.042492844580847615, |
|
"grad_norm": 0.8610180616378784, |
|
"learning_rate": 4.8319338918386075e-05, |
|
"loss": 0.7819, |
|
"step": 4929 |
|
}, |
|
{ |
|
"epoch": 0.04276009517569571, |
|
"grad_norm": 0.8235286474227905, |
|
"learning_rate": 4.828893485248369e-05, |
|
"loss": 0.7834, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 0.04302734577054381, |
|
"grad_norm": 0.95796799659729, |
|
"learning_rate": 4.825826797411682e-05, |
|
"loss": 0.7822, |
|
"step": 4991 |
|
}, |
|
{ |
|
"epoch": 0.04329459636539191, |
|
"grad_norm": 1.022430419921875, |
|
"learning_rate": 4.822733862935702e-05, |
|
"loss": 0.781, |
|
"step": 5022 |
|
}, |
|
{ |
|
"epoch": 0.04356184696024001, |
|
"grad_norm": 0.9080584645271301, |
|
"learning_rate": 4.819614716723775e-05, |
|
"loss": 0.783, |
|
"step": 5053 |
|
}, |
|
{ |
|
"epoch": 0.043829097555088105, |
|
"grad_norm": 0.9808719158172607, |
|
"learning_rate": 4.8164693939750425e-05, |
|
"loss": 0.7838, |
|
"step": 5084 |
|
}, |
|
{ |
|
"epoch": 0.0440963481499362, |
|
"grad_norm": 0.8997575640678406, |
|
"learning_rate": 4.813297930184042e-05, |
|
"loss": 0.7816, |
|
"step": 5115 |
|
}, |
|
{ |
|
"epoch": 0.0443635987447843, |
|
"grad_norm": 0.8737884163856506, |
|
"learning_rate": 4.810100361140314e-05, |
|
"loss": 0.7785, |
|
"step": 5146 |
|
}, |
|
{ |
|
"epoch": 0.0446308493396324, |
|
"grad_norm": 0.7924982309341431, |
|
"learning_rate": 4.8068767229279885e-05, |
|
"loss": 0.7752, |
|
"step": 5177 |
|
}, |
|
{ |
|
"epoch": 0.0448980999344805, |
|
"grad_norm": 1.1685878038406372, |
|
"learning_rate": 4.8036270519253854e-05, |
|
"loss": 0.7881, |
|
"step": 5208 |
|
}, |
|
{ |
|
"epoch": 0.045165350529328595, |
|
"grad_norm": 1.0199975967407227, |
|
"learning_rate": 4.8003513848046e-05, |
|
"loss": 0.7663, |
|
"step": 5239 |
|
}, |
|
{ |
|
"epoch": 0.04543260112417669, |
|
"grad_norm": 0.8155254125595093, |
|
"learning_rate": 4.79704975853109e-05, |
|
"loss": 0.7775, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 0.04569985171902479, |
|
"grad_norm": 0.976006805896759, |
|
"learning_rate": 4.793722210363262e-05, |
|
"loss": 0.7797, |
|
"step": 5301 |
|
}, |
|
{ |
|
"epoch": 0.04596710231387289, |
|
"grad_norm": 1.1252824068069458, |
|
"learning_rate": 4.7903687778520414e-05, |
|
"loss": 0.7824, |
|
"step": 5332 |
|
}, |
|
{ |
|
"epoch": 0.046234352908720994, |
|
"grad_norm": 0.7603053450584412, |
|
"learning_rate": 4.7869894988404593e-05, |
|
"loss": 0.7759, |
|
"step": 5363 |
|
}, |
|
{ |
|
"epoch": 0.04650160350356909, |
|
"grad_norm": 1.007767915725708, |
|
"learning_rate": 4.783584411463221e-05, |
|
"loss": 0.7832, |
|
"step": 5394 |
|
}, |
|
{ |
|
"epoch": 0.04676885409841719, |
|
"grad_norm": 1.1207255125045776, |
|
"learning_rate": 4.780153554146274e-05, |
|
"loss": 0.7849, |
|
"step": 5425 |
|
}, |
|
{ |
|
"epoch": 0.04703610469326529, |
|
"grad_norm": 1.0699808597564697, |
|
"learning_rate": 4.7766969656063766e-05, |
|
"loss": 0.7814, |
|
"step": 5456 |
|
}, |
|
{ |
|
"epoch": 0.047303355288113386, |
|
"grad_norm": 0.8154749274253845, |
|
"learning_rate": 4.773214684850662e-05, |
|
"loss": 0.7806, |
|
"step": 5487 |
|
}, |
|
{ |
|
"epoch": 0.047570605882961484, |
|
"grad_norm": 0.7383331060409546, |
|
"learning_rate": 4.769706751176193e-05, |
|
"loss": 0.7765, |
|
"step": 5518 |
|
}, |
|
{ |
|
"epoch": 0.04783785647780958, |
|
"grad_norm": 0.9729552865028381, |
|
"learning_rate": 4.7661732041695264e-05, |
|
"loss": 0.7754, |
|
"step": 5549 |
|
}, |
|
{ |
|
"epoch": 0.04810510707265768, |
|
"grad_norm": 0.9568310379981995, |
|
"learning_rate": 4.762614083706258e-05, |
|
"loss": 0.7733, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 0.04837235766750578, |
|
"grad_norm": 1.0747005939483643, |
|
"learning_rate": 4.759029429950581e-05, |
|
"loss": 0.7764, |
|
"step": 5611 |
|
}, |
|
{ |
|
"epoch": 0.048639608262353876, |
|
"grad_norm": 1.2053782939910889, |
|
"learning_rate": 4.7554192833548235e-05, |
|
"loss": 0.7725, |
|
"step": 5642 |
|
}, |
|
{ |
|
"epoch": 0.048906858857201974, |
|
"grad_norm": 0.8168059587478638, |
|
"learning_rate": 4.751783684659e-05, |
|
"loss": 0.7779, |
|
"step": 5673 |
|
}, |
|
{ |
|
"epoch": 0.04917410945205007, |
|
"grad_norm": 0.812278151512146, |
|
"learning_rate": 4.748122674890348e-05, |
|
"loss": 0.7884, |
|
"step": 5704 |
|
}, |
|
{ |
|
"epoch": 0.04944136004689817, |
|
"grad_norm": 0.9640679359436035, |
|
"learning_rate": 4.7444362953628654e-05, |
|
"loss": 0.7794, |
|
"step": 5735 |
|
}, |
|
{ |
|
"epoch": 0.04970861064174627, |
|
"grad_norm": 0.8488368988037109, |
|
"learning_rate": 4.7407245876768424e-05, |
|
"loss": 0.7705, |
|
"step": 5766 |
|
}, |
|
{ |
|
"epoch": 0.049975861236594366, |
|
"grad_norm": 0.7826496362686157, |
|
"learning_rate": 4.736987593718397e-05, |
|
"loss": 0.7665, |
|
"step": 5797 |
|
}, |
|
{ |
|
"epoch": 0.050243111831442464, |
|
"grad_norm": 0.7388876676559448, |
|
"learning_rate": 4.733225355658999e-05, |
|
"loss": 0.7715, |
|
"step": 5828 |
|
}, |
|
{ |
|
"epoch": 0.05051036242629056, |
|
"grad_norm": 0.7776780128479004, |
|
"learning_rate": 4.7294379159549926e-05, |
|
"loss": 0.7777, |
|
"step": 5859 |
|
}, |
|
{ |
|
"epoch": 0.05077761302113866, |
|
"grad_norm": 1.1687864065170288, |
|
"learning_rate": 4.725625317347119e-05, |
|
"loss": 0.7698, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 0.05104486361598676, |
|
"grad_norm": 1.069354772567749, |
|
"learning_rate": 4.7217876028600374e-05, |
|
"loss": 0.7671, |
|
"step": 5921 |
|
}, |
|
{ |
|
"epoch": 0.051312114210834855, |
|
"grad_norm": 0.955895185470581, |
|
"learning_rate": 4.717924815801832e-05, |
|
"loss": 0.7641, |
|
"step": 5952 |
|
}, |
|
{ |
|
"epoch": 0.05157936480568295, |
|
"grad_norm": 0.8063961267471313, |
|
"learning_rate": 4.714036999763532e-05, |
|
"loss": 0.772, |
|
"step": 5983 |
|
}, |
|
{ |
|
"epoch": 0.05184661540053105, |
|
"grad_norm": 0.9584303498268127, |
|
"learning_rate": 4.7101241986186116e-05, |
|
"loss": 0.762, |
|
"step": 6014 |
|
}, |
|
{ |
|
"epoch": 0.05211386599537915, |
|
"grad_norm": 0.9711636900901794, |
|
"learning_rate": 4.7061864565225e-05, |
|
"loss": 0.7742, |
|
"step": 6045 |
|
}, |
|
{ |
|
"epoch": 0.05238111659022725, |
|
"grad_norm": 0.8177659511566162, |
|
"learning_rate": 4.702223817912081e-05, |
|
"loss": 0.7532, |
|
"step": 6076 |
|
} |
|
], |
|
"logging_steps": 31, |
|
"max_steps": 30517, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 3052, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4.24270815620078e+18, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|