|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.1052450084485672, |
|
"eval_steps": 500, |
|
"global_step": 12208, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0002672505948480982, |
|
"grad_norm": 3.2076423168182373, |
|
"learning_rate": 1.0157273918741808e-06, |
|
"loss": 1.4511, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.0005345011896961964, |
|
"grad_norm": 2.221529006958008, |
|
"learning_rate": 2.0314547837483616e-06, |
|
"loss": 1.3342, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.0008017517845442946, |
|
"grad_norm": 2.961315393447876, |
|
"learning_rate": 3.0471821756225426e-06, |
|
"loss": 1.2649, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.0010690023793923928, |
|
"grad_norm": 2.311617612838745, |
|
"learning_rate": 4.062909567496723e-06, |
|
"loss": 1.2278, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.001336252974240491, |
|
"grad_norm": 3.4343724250793457, |
|
"learning_rate": 5.078636959370905e-06, |
|
"loss": 1.2061, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.0016035035690885892, |
|
"grad_norm": 2.421396017074585, |
|
"learning_rate": 6.094364351245085e-06, |
|
"loss": 1.1848, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.0018707541639366874, |
|
"grad_norm": 2.8230600357055664, |
|
"learning_rate": 7.110091743119267e-06, |
|
"loss": 1.1586, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.0021380047587847856, |
|
"grad_norm": 2.021296501159668, |
|
"learning_rate": 8.125819134993446e-06, |
|
"loss": 1.1552, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.002405255353632884, |
|
"grad_norm": 2.2329111099243164, |
|
"learning_rate": 9.141546526867629e-06, |
|
"loss": 1.1342, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.002672505948480982, |
|
"grad_norm": 2.8933441638946533, |
|
"learning_rate": 1.015727391874181e-05, |
|
"loss": 1.1092, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.0029397565433290805, |
|
"grad_norm": 2.510772705078125, |
|
"learning_rate": 1.117300131061599e-05, |
|
"loss": 1.108, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.0032070071381771785, |
|
"grad_norm": 2.170926094055176, |
|
"learning_rate": 1.218872870249017e-05, |
|
"loss": 1.0982, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.003474257733025277, |
|
"grad_norm": 2.1116831302642822, |
|
"learning_rate": 1.3204456094364351e-05, |
|
"loss": 1.0977, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.003741508327873375, |
|
"grad_norm": 2.5462026596069336, |
|
"learning_rate": 1.4220183486238533e-05, |
|
"loss": 1.0797, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.004008758922721473, |
|
"grad_norm": 2.5239830017089844, |
|
"learning_rate": 1.5235910878112714e-05, |
|
"loss": 1.0691, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.004276009517569571, |
|
"grad_norm": 2.640591859817505, |
|
"learning_rate": 1.6251638269986893e-05, |
|
"loss": 1.0547, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.004543260112417669, |
|
"grad_norm": 1.9566192626953125, |
|
"learning_rate": 1.7267365661861077e-05, |
|
"loss": 1.0526, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.004810510707265768, |
|
"grad_norm": 2.515118360519409, |
|
"learning_rate": 1.8283093053735257e-05, |
|
"loss": 1.0614, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.005077761302113866, |
|
"grad_norm": 2.1588094234466553, |
|
"learning_rate": 1.9298820445609438e-05, |
|
"loss": 1.0461, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.005345011896961964, |
|
"grad_norm": 2.448280096054077, |
|
"learning_rate": 2.031454783748362e-05, |
|
"loss": 1.0333, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.005612262491810062, |
|
"grad_norm": 2.9539694786071777, |
|
"learning_rate": 2.13302752293578e-05, |
|
"loss": 1.0224, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.005879513086658161, |
|
"grad_norm": 2.6804676055908203, |
|
"learning_rate": 2.234600262123198e-05, |
|
"loss": 1.0145, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.006146763681506259, |
|
"grad_norm": 2.176100730895996, |
|
"learning_rate": 2.336173001310616e-05, |
|
"loss": 1.0164, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.006414014276354357, |
|
"grad_norm": 1.9107022285461426, |
|
"learning_rate": 2.437745740498034e-05, |
|
"loss": 1.004, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.006681264871202455, |
|
"grad_norm": 3.175410270690918, |
|
"learning_rate": 2.5393184796854525e-05, |
|
"loss": 0.9986, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.006948515466050554, |
|
"grad_norm": 1.7941040992736816, |
|
"learning_rate": 2.6408912188728702e-05, |
|
"loss": 0.9979, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.007215766060898652, |
|
"grad_norm": 2.428041458129883, |
|
"learning_rate": 2.7424639580602886e-05, |
|
"loss": 1.0056, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.00748301665574675, |
|
"grad_norm": 2.1440467834472656, |
|
"learning_rate": 2.8440366972477066e-05, |
|
"loss": 0.9908, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.007750267250594848, |
|
"grad_norm": 2.138157844543457, |
|
"learning_rate": 2.9456094364351244e-05, |
|
"loss": 0.986, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.008017517845442947, |
|
"grad_norm": 2.144076108932495, |
|
"learning_rate": 3.0471821756225428e-05, |
|
"loss": 0.9768, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.008284768440291045, |
|
"grad_norm": 2.116927146911621, |
|
"learning_rate": 3.148754914809961e-05, |
|
"loss": 0.975, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.008552019035139143, |
|
"grad_norm": 2.0087051391601562, |
|
"learning_rate": 3.2503276539973785e-05, |
|
"loss": 0.9746, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.00881926962998724, |
|
"grad_norm": 2.008014440536499, |
|
"learning_rate": 3.351900393184797e-05, |
|
"loss": 0.9612, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 0.009086520224835339, |
|
"grad_norm": 2.5932323932647705, |
|
"learning_rate": 3.453473132372215e-05, |
|
"loss": 0.9611, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 0.009353770819683437, |
|
"grad_norm": 1.7766073942184448, |
|
"learning_rate": 3.555045871559633e-05, |
|
"loss": 0.9529, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.009621021414531536, |
|
"grad_norm": 1.699992299079895, |
|
"learning_rate": 3.6566186107470514e-05, |
|
"loss": 0.9638, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 0.009888272009379634, |
|
"grad_norm": 2.139831304550171, |
|
"learning_rate": 3.7581913499344695e-05, |
|
"loss": 0.9518, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 0.010155522604227732, |
|
"grad_norm": 2.510404109954834, |
|
"learning_rate": 3.8597640891218876e-05, |
|
"loss": 0.9557, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 0.01042277319907583, |
|
"grad_norm": 1.7539429664611816, |
|
"learning_rate": 3.9613368283093056e-05, |
|
"loss": 0.949, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 0.010690023793923928, |
|
"grad_norm": 1.7679015398025513, |
|
"learning_rate": 4.062909567496724e-05, |
|
"loss": 0.9285, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.010957274388772026, |
|
"grad_norm": 1.6701771020889282, |
|
"learning_rate": 4.164482306684142e-05, |
|
"loss": 0.9391, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 0.011224524983620124, |
|
"grad_norm": 1.7283812761306763, |
|
"learning_rate": 4.26605504587156e-05, |
|
"loss": 0.9328, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 0.011491775578468222, |
|
"grad_norm": 1.363319754600525, |
|
"learning_rate": 4.367627785058978e-05, |
|
"loss": 0.9353, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 0.011759026173316322, |
|
"grad_norm": 1.847582459449768, |
|
"learning_rate": 4.469200524246396e-05, |
|
"loss": 0.9272, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 0.01202627676816442, |
|
"grad_norm": 1.6437714099884033, |
|
"learning_rate": 4.570773263433814e-05, |
|
"loss": 0.9219, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.012293527363012518, |
|
"grad_norm": 1.979443073272705, |
|
"learning_rate": 4.672346002621232e-05, |
|
"loss": 0.91, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 0.012560777957860616, |
|
"grad_norm": 1.601218342781067, |
|
"learning_rate": 4.77391874180865e-05, |
|
"loss": 0.9229, |
|
"step": 1457 |
|
}, |
|
{ |
|
"epoch": 0.012828028552708714, |
|
"grad_norm": 2.36148738861084, |
|
"learning_rate": 4.875491480996068e-05, |
|
"loss": 0.9199, |
|
"step": 1488 |
|
}, |
|
{ |
|
"epoch": 0.013095279147556812, |
|
"grad_norm": 4.211123466491699, |
|
"learning_rate": 4.977064220183487e-05, |
|
"loss": 1.0419, |
|
"step": 1519 |
|
}, |
|
{ |
|
"epoch": 0.01336252974240491, |
|
"grad_norm": 1.620786428451538, |
|
"learning_rate": 4.9999915451558777e-05, |
|
"loss": 0.954, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.01362978033725301, |
|
"grad_norm": 3.2490246295928955, |
|
"learning_rate": 4.999955597496219e-05, |
|
"loss": 0.9464, |
|
"step": 1581 |
|
}, |
|
{ |
|
"epoch": 0.013897030932101108, |
|
"grad_norm": 1.9302624464035034, |
|
"learning_rate": 4.9998914381774255e-05, |
|
"loss": 0.9294, |
|
"step": 1612 |
|
}, |
|
{ |
|
"epoch": 0.014164281526949206, |
|
"grad_norm": 1.5750601291656494, |
|
"learning_rate": 4.999799067923527e-05, |
|
"loss": 0.922, |
|
"step": 1643 |
|
}, |
|
{ |
|
"epoch": 0.014431532121797304, |
|
"grad_norm": 53.0409049987793, |
|
"learning_rate": 4.999678487776908e-05, |
|
"loss": 1.0611, |
|
"step": 1674 |
|
}, |
|
{ |
|
"epoch": 0.014698782716645402, |
|
"grad_norm": 1.841700553894043, |
|
"learning_rate": 4.9995296990983006e-05, |
|
"loss": 1.0363, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.0149660333114935, |
|
"grad_norm": 1.2549737691879272, |
|
"learning_rate": 4.999352703566763e-05, |
|
"loss": 0.938, |
|
"step": 1736 |
|
}, |
|
{ |
|
"epoch": 0.015233283906341597, |
|
"grad_norm": 1.3289740085601807, |
|
"learning_rate": 4.999147503179668e-05, |
|
"loss": 0.9113, |
|
"step": 1767 |
|
}, |
|
{ |
|
"epoch": 0.015500534501189695, |
|
"grad_norm": 1.5228811502456665, |
|
"learning_rate": 4.998914100252672e-05, |
|
"loss": 0.921, |
|
"step": 1798 |
|
}, |
|
{ |
|
"epoch": 0.015767785096037795, |
|
"grad_norm": 1.5058027505874634, |
|
"learning_rate": 4.998652497419696e-05, |
|
"loss": 0.9214, |
|
"step": 1829 |
|
}, |
|
{ |
|
"epoch": 0.016035035690885893, |
|
"grad_norm": 9.535687446594238, |
|
"learning_rate": 4.9983626976328927e-05, |
|
"loss": 1.3068, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.01630228628573399, |
|
"grad_norm": 2.5820651054382324, |
|
"learning_rate": 4.998044704162613e-05, |
|
"loss": 1.7033, |
|
"step": 1891 |
|
}, |
|
{ |
|
"epoch": 0.01656953688058209, |
|
"grad_norm": 148.47412109375, |
|
"learning_rate": 4.9976985205973705e-05, |
|
"loss": 1.6935, |
|
"step": 1922 |
|
}, |
|
{ |
|
"epoch": 0.016836787475430187, |
|
"grad_norm": 8.416805267333984, |
|
"learning_rate": 4.997324150843799e-05, |
|
"loss": 1.6607, |
|
"step": 1953 |
|
}, |
|
{ |
|
"epoch": 0.017104038070278285, |
|
"grad_norm": 5.2713470458984375, |
|
"learning_rate": 4.99692159912661e-05, |
|
"loss": 1.1301, |
|
"step": 1984 |
|
}, |
|
{ |
|
"epoch": 0.017371288665126383, |
|
"grad_norm": 2.096306800842285, |
|
"learning_rate": 4.996490869988546e-05, |
|
"loss": 0.9931, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 0.01763853925997448, |
|
"grad_norm": 1.5476640462875366, |
|
"learning_rate": 4.996031968290326e-05, |
|
"loss": 0.9514, |
|
"step": 2046 |
|
}, |
|
{ |
|
"epoch": 0.01790578985482258, |
|
"grad_norm": 1.727232813835144, |
|
"learning_rate": 4.995544899210594e-05, |
|
"loss": 0.9409, |
|
"step": 2077 |
|
}, |
|
{ |
|
"epoch": 0.018173040449670677, |
|
"grad_norm": 26.08816909790039, |
|
"learning_rate": 4.9950296682458583e-05, |
|
"loss": 0.9368, |
|
"step": 2108 |
|
}, |
|
{ |
|
"epoch": 0.018440291044518775, |
|
"grad_norm": 1.7082202434539795, |
|
"learning_rate": 4.994486281210429e-05, |
|
"loss": 0.9155, |
|
"step": 2139 |
|
}, |
|
{ |
|
"epoch": 0.018707541639366873, |
|
"grad_norm": 1.5962992906570435, |
|
"learning_rate": 4.9939147442363566e-05, |
|
"loss": 0.9233, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.018974792234214975, |
|
"grad_norm": 2.024268627166748, |
|
"learning_rate": 4.9933150637733574e-05, |
|
"loss": 0.9128, |
|
"step": 2201 |
|
}, |
|
{ |
|
"epoch": 0.019242042829063073, |
|
"grad_norm": 1.4828873872756958, |
|
"learning_rate": 4.992687246588743e-05, |
|
"loss": 0.8957, |
|
"step": 2232 |
|
}, |
|
{ |
|
"epoch": 0.01950929342391117, |
|
"grad_norm": 1.745612382888794, |
|
"learning_rate": 4.992031299767347e-05, |
|
"loss": 0.9005, |
|
"step": 2263 |
|
}, |
|
{ |
|
"epoch": 0.01977654401875927, |
|
"grad_norm": 1.354095458984375, |
|
"learning_rate": 4.9913472307114386e-05, |
|
"loss": 0.9002, |
|
"step": 2294 |
|
}, |
|
{ |
|
"epoch": 0.020043794613607367, |
|
"grad_norm": 2.6227681636810303, |
|
"learning_rate": 4.9906350471406446e-05, |
|
"loss": 0.9004, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 0.020311045208455464, |
|
"grad_norm": 1.3870893716812134, |
|
"learning_rate": 4.989894757091861e-05, |
|
"loss": 0.9018, |
|
"step": 2356 |
|
}, |
|
{ |
|
"epoch": 0.020578295803303562, |
|
"grad_norm": 1.2884349822998047, |
|
"learning_rate": 4.989126368919158e-05, |
|
"loss": 0.8993, |
|
"step": 2387 |
|
}, |
|
{ |
|
"epoch": 0.02084554639815166, |
|
"grad_norm": 1.215851068496704, |
|
"learning_rate": 4.988329891293693e-05, |
|
"loss": 0.8925, |
|
"step": 2418 |
|
}, |
|
{ |
|
"epoch": 0.02111279699299976, |
|
"grad_norm": 1.4246628284454346, |
|
"learning_rate": 4.987505333203608e-05, |
|
"loss": 0.8893, |
|
"step": 2449 |
|
}, |
|
{ |
|
"epoch": 0.021380047587847856, |
|
"grad_norm": 1.5495259761810303, |
|
"learning_rate": 4.9866527039539276e-05, |
|
"loss": 0.8749, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.021647298182695954, |
|
"grad_norm": 1.0121793746948242, |
|
"learning_rate": 4.9857720131664594e-05, |
|
"loss": 0.8803, |
|
"step": 2511 |
|
}, |
|
{ |
|
"epoch": 0.021914548777544052, |
|
"grad_norm": 1.1507561206817627, |
|
"learning_rate": 4.9848632707796773e-05, |
|
"loss": 0.8843, |
|
"step": 2542 |
|
}, |
|
{ |
|
"epoch": 0.02218179937239215, |
|
"grad_norm": 1.612584114074707, |
|
"learning_rate": 4.9839264870486155e-05, |
|
"loss": 0.8733, |
|
"step": 2573 |
|
}, |
|
{ |
|
"epoch": 0.02244904996724025, |
|
"grad_norm": 1.4475393295288086, |
|
"learning_rate": 4.9829616725447526e-05, |
|
"loss": 0.8669, |
|
"step": 2604 |
|
}, |
|
{ |
|
"epoch": 0.022716300562088346, |
|
"grad_norm": 1.9612644910812378, |
|
"learning_rate": 4.981968838155888e-05, |
|
"loss": 0.8791, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 0.022983551156936444, |
|
"grad_norm": 1.6032893657684326, |
|
"learning_rate": 4.980947995086024e-05, |
|
"loss": 0.8646, |
|
"step": 2666 |
|
}, |
|
{ |
|
"epoch": 0.023250801751784546, |
|
"grad_norm": 1.6368708610534668, |
|
"learning_rate": 4.979899154855234e-05, |
|
"loss": 0.871, |
|
"step": 2697 |
|
}, |
|
{ |
|
"epoch": 0.023518052346632644, |
|
"grad_norm": 1.0245656967163086, |
|
"learning_rate": 4.9788223292995386e-05, |
|
"loss": 0.8534, |
|
"step": 2728 |
|
}, |
|
{ |
|
"epoch": 0.023785302941480742, |
|
"grad_norm": 0.9378401041030884, |
|
"learning_rate": 4.977717530570768e-05, |
|
"loss": 0.8478, |
|
"step": 2759 |
|
}, |
|
{ |
|
"epoch": 0.02405255353632884, |
|
"grad_norm": 1.4262951612472534, |
|
"learning_rate": 4.976584771136425e-05, |
|
"loss": 0.8634, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.024319804131176938, |
|
"grad_norm": 1.3320890665054321, |
|
"learning_rate": 4.975424063779547e-05, |
|
"loss": 0.8505, |
|
"step": 2821 |
|
}, |
|
{ |
|
"epoch": 0.024587054726025036, |
|
"grad_norm": 1.043971061706543, |
|
"learning_rate": 4.974235421598557e-05, |
|
"loss": 0.863, |
|
"step": 2852 |
|
}, |
|
{ |
|
"epoch": 0.024854305320873134, |
|
"grad_norm": 1.387088418006897, |
|
"learning_rate": 4.973018858007122e-05, |
|
"loss": 0.8505, |
|
"step": 2883 |
|
}, |
|
{ |
|
"epoch": 0.025121555915721232, |
|
"grad_norm": 1.1267746686935425, |
|
"learning_rate": 4.9717743867339963e-05, |
|
"loss": 0.8528, |
|
"step": 2914 |
|
}, |
|
{ |
|
"epoch": 0.02538880651056933, |
|
"grad_norm": 0.9636305570602417, |
|
"learning_rate": 4.9705020218228695e-05, |
|
"loss": 0.8453, |
|
"step": 2945 |
|
}, |
|
{ |
|
"epoch": 0.025656057105417428, |
|
"grad_norm": 1.1140860319137573, |
|
"learning_rate": 4.969201777632205e-05, |
|
"loss": 0.8664, |
|
"step": 2976 |
|
}, |
|
{ |
|
"epoch": 0.025923307700265526, |
|
"grad_norm": 1.1886147260665894, |
|
"learning_rate": 4.9678736688350846e-05, |
|
"loss": 0.8569, |
|
"step": 3007 |
|
}, |
|
{ |
|
"epoch": 0.026190558295113624, |
|
"grad_norm": 1.0308467149734497, |
|
"learning_rate": 4.966517710419033e-05, |
|
"loss": 0.85, |
|
"step": 3038 |
|
}, |
|
{ |
|
"epoch": 0.02645780888996172, |
|
"grad_norm": 1.6353480815887451, |
|
"learning_rate": 4.965133917685858e-05, |
|
"loss": 0.8414, |
|
"step": 3069 |
|
}, |
|
{ |
|
"epoch": 0.02672505948480982, |
|
"grad_norm": 1.2031575441360474, |
|
"learning_rate": 4.9637223062514714e-05, |
|
"loss": 0.8435, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.026992310079657918, |
|
"grad_norm": 1.2575979232788086, |
|
"learning_rate": 4.962282892045718e-05, |
|
"loss": 0.8415, |
|
"step": 3131 |
|
}, |
|
{ |
|
"epoch": 0.02725956067450602, |
|
"grad_norm": 1.338584303855896, |
|
"learning_rate": 4.9608156913121904e-05, |
|
"loss": 0.8414, |
|
"step": 3162 |
|
}, |
|
{ |
|
"epoch": 0.027526811269354117, |
|
"grad_norm": 1.3699461221694946, |
|
"learning_rate": 4.959320720608049e-05, |
|
"loss": 0.8397, |
|
"step": 3193 |
|
}, |
|
{ |
|
"epoch": 0.027794061864202215, |
|
"grad_norm": 1.1471112966537476, |
|
"learning_rate": 4.9577979968038354e-05, |
|
"loss": 0.8304, |
|
"step": 3224 |
|
}, |
|
{ |
|
"epoch": 0.028061312459050313, |
|
"grad_norm": 1.1752680540084839, |
|
"learning_rate": 4.956247537083282e-05, |
|
"loss": 0.8408, |
|
"step": 3255 |
|
}, |
|
{ |
|
"epoch": 0.02832856305389841, |
|
"grad_norm": 1.089552879333496, |
|
"learning_rate": 4.9546693589431145e-05, |
|
"loss": 0.8286, |
|
"step": 3286 |
|
}, |
|
{ |
|
"epoch": 0.02859581364874651, |
|
"grad_norm": 1.0688341856002808, |
|
"learning_rate": 4.9530634801928595e-05, |
|
"loss": 0.8341, |
|
"step": 3317 |
|
}, |
|
{ |
|
"epoch": 0.028863064243594607, |
|
"grad_norm": 0.9290914535522461, |
|
"learning_rate": 4.9514299189546395e-05, |
|
"loss": 0.8307, |
|
"step": 3348 |
|
}, |
|
{ |
|
"epoch": 0.029130314838442705, |
|
"grad_norm": 1.3211214542388916, |
|
"learning_rate": 4.949768693662973e-05, |
|
"loss": 0.8349, |
|
"step": 3379 |
|
}, |
|
{ |
|
"epoch": 0.029397565433290803, |
|
"grad_norm": 1.8893773555755615, |
|
"learning_rate": 4.948079823064559e-05, |
|
"loss": 0.8289, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.0296648160281389, |
|
"grad_norm": 1.3086936473846436, |
|
"learning_rate": 4.946363326218074e-05, |
|
"loss": 0.8231, |
|
"step": 3441 |
|
}, |
|
{ |
|
"epoch": 0.029932066622987, |
|
"grad_norm": 1.356311559677124, |
|
"learning_rate": 4.9446192224939525e-05, |
|
"loss": 0.8343, |
|
"step": 3472 |
|
}, |
|
{ |
|
"epoch": 0.030199317217835097, |
|
"grad_norm": 1.13872492313385, |
|
"learning_rate": 4.942847531574167e-05, |
|
"loss": 0.828, |
|
"step": 3503 |
|
}, |
|
{ |
|
"epoch": 0.030466567812683195, |
|
"grad_norm": 1.1912773847579956, |
|
"learning_rate": 4.941048273452008e-05, |
|
"loss": 0.8236, |
|
"step": 3534 |
|
}, |
|
{ |
|
"epoch": 0.030733818407531293, |
|
"grad_norm": 0.9231215715408325, |
|
"learning_rate": 4.9392214684318605e-05, |
|
"loss": 0.8226, |
|
"step": 3565 |
|
}, |
|
{ |
|
"epoch": 0.03100106900237939, |
|
"grad_norm": 1.0268129110336304, |
|
"learning_rate": 4.93736713712897e-05, |
|
"loss": 0.8218, |
|
"step": 3596 |
|
}, |
|
{ |
|
"epoch": 0.03126831959722749, |
|
"grad_norm": 0.974876344203949, |
|
"learning_rate": 4.9354853004692124e-05, |
|
"loss": 0.8149, |
|
"step": 3627 |
|
}, |
|
{ |
|
"epoch": 0.03153557019207559, |
|
"grad_norm": 1.2175925970077515, |
|
"learning_rate": 4.93357597968886e-05, |
|
"loss": 0.8152, |
|
"step": 3658 |
|
}, |
|
{ |
|
"epoch": 0.03180282078692369, |
|
"grad_norm": 1.2338181734085083, |
|
"learning_rate": 4.931639196334338e-05, |
|
"loss": 0.8089, |
|
"step": 3689 |
|
}, |
|
{ |
|
"epoch": 0.032070071381771786, |
|
"grad_norm": 1.1462433338165283, |
|
"learning_rate": 4.9296749722619826e-05, |
|
"loss": 0.8216, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.032337321976619884, |
|
"grad_norm": 1.1631278991699219, |
|
"learning_rate": 4.9276833296377966e-05, |
|
"loss": 0.8272, |
|
"step": 3751 |
|
}, |
|
{ |
|
"epoch": 0.03260457257146798, |
|
"grad_norm": 0.903836727142334, |
|
"learning_rate": 4.925664290937196e-05, |
|
"loss": 0.816, |
|
"step": 3782 |
|
}, |
|
{ |
|
"epoch": 0.03287182316631608, |
|
"grad_norm": 0.944284975528717, |
|
"learning_rate": 4.9236178789447576e-05, |
|
"loss": 0.8031, |
|
"step": 3813 |
|
}, |
|
{ |
|
"epoch": 0.03313907376116418, |
|
"grad_norm": 1.1210039854049683, |
|
"learning_rate": 4.921544116753962e-05, |
|
"loss": 0.8096, |
|
"step": 3844 |
|
}, |
|
{ |
|
"epoch": 0.033406324356012276, |
|
"grad_norm": 1.0448529720306396, |
|
"learning_rate": 4.919443027766935e-05, |
|
"loss": 0.8059, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 0.033673574950860374, |
|
"grad_norm": 1.0655525922775269, |
|
"learning_rate": 4.91731463569418e-05, |
|
"loss": 0.81, |
|
"step": 3906 |
|
}, |
|
{ |
|
"epoch": 0.03394082554570847, |
|
"grad_norm": 1.223928451538086, |
|
"learning_rate": 4.915158964554312e-05, |
|
"loss": 0.8175, |
|
"step": 3937 |
|
}, |
|
{ |
|
"epoch": 0.03420807614055657, |
|
"grad_norm": 1.0895342826843262, |
|
"learning_rate": 4.912976038673786e-05, |
|
"loss": 0.7992, |
|
"step": 3968 |
|
}, |
|
{ |
|
"epoch": 0.03447532673540467, |
|
"grad_norm": 1.1117792129516602, |
|
"learning_rate": 4.9107658826866254e-05, |
|
"loss": 0.8122, |
|
"step": 3999 |
|
}, |
|
{ |
|
"epoch": 0.034742577330252766, |
|
"grad_norm": 1.0806598663330078, |
|
"learning_rate": 4.908528521534139e-05, |
|
"loss": 0.811, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 0.035009827925100864, |
|
"grad_norm": 0.7528677582740784, |
|
"learning_rate": 4.906263980464644e-05, |
|
"loss": 0.8036, |
|
"step": 4061 |
|
}, |
|
{ |
|
"epoch": 0.03527707851994896, |
|
"grad_norm": 1.1509230136871338, |
|
"learning_rate": 4.903972285033178e-05, |
|
"loss": 0.8047, |
|
"step": 4092 |
|
}, |
|
{ |
|
"epoch": 0.03554432911479706, |
|
"grad_norm": 1.1190931797027588, |
|
"learning_rate": 4.901653461101213e-05, |
|
"loss": 0.8123, |
|
"step": 4123 |
|
}, |
|
{ |
|
"epoch": 0.03581157970964516, |
|
"grad_norm": 0.8739308714866638, |
|
"learning_rate": 4.8993075348363626e-05, |
|
"loss": 0.8015, |
|
"step": 4154 |
|
}, |
|
{ |
|
"epoch": 0.036078830304493256, |
|
"grad_norm": 0.9889201521873474, |
|
"learning_rate": 4.896934532712084e-05, |
|
"loss": 0.8056, |
|
"step": 4185 |
|
}, |
|
{ |
|
"epoch": 0.036346080899341354, |
|
"grad_norm": 1.1964142322540283, |
|
"learning_rate": 4.8945344815073846e-05, |
|
"loss": 0.8083, |
|
"step": 4216 |
|
}, |
|
{ |
|
"epoch": 0.03661333149418945, |
|
"grad_norm": 1.0793588161468506, |
|
"learning_rate": 4.892107408306516e-05, |
|
"loss": 0.7976, |
|
"step": 4247 |
|
}, |
|
{ |
|
"epoch": 0.03688058208903755, |
|
"grad_norm": 1.2313084602355957, |
|
"learning_rate": 4.889653340498669e-05, |
|
"loss": 0.7954, |
|
"step": 4278 |
|
}, |
|
{ |
|
"epoch": 0.03714783268388565, |
|
"grad_norm": 1.1145700216293335, |
|
"learning_rate": 4.8871723057776664e-05, |
|
"loss": 0.7947, |
|
"step": 4309 |
|
}, |
|
{ |
|
"epoch": 0.037415083278733746, |
|
"grad_norm": 0.9714220762252808, |
|
"learning_rate": 4.8846643321416476e-05, |
|
"loss": 0.7973, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 0.03768233387358185, |
|
"grad_norm": 1.0865004062652588, |
|
"learning_rate": 4.882129447892753e-05, |
|
"loss": 0.7918, |
|
"step": 4371 |
|
}, |
|
{ |
|
"epoch": 0.03794958446842995, |
|
"grad_norm": 0.8420351147651672, |
|
"learning_rate": 4.8795676816368076e-05, |
|
"loss": 0.805, |
|
"step": 4402 |
|
}, |
|
{ |
|
"epoch": 0.03821683506327805, |
|
"grad_norm": 1.0622302293777466, |
|
"learning_rate": 4.876979062282995e-05, |
|
"loss": 0.7977, |
|
"step": 4433 |
|
}, |
|
{ |
|
"epoch": 0.038484085658126145, |
|
"grad_norm": 0.8536683320999146, |
|
"learning_rate": 4.8743636190435325e-05, |
|
"loss": 0.7946, |
|
"step": 4464 |
|
}, |
|
{ |
|
"epoch": 0.03875133625297424, |
|
"grad_norm": 0.9905999302864075, |
|
"learning_rate": 4.871721381433344e-05, |
|
"loss": 0.7932, |
|
"step": 4495 |
|
}, |
|
{ |
|
"epoch": 0.03901858684782234, |
|
"grad_norm": 1.005560278892517, |
|
"learning_rate": 4.869052379269719e-05, |
|
"loss": 0.7956, |
|
"step": 4526 |
|
}, |
|
{ |
|
"epoch": 0.03928583744267044, |
|
"grad_norm": 0.8386696577072144, |
|
"learning_rate": 4.866356642671985e-05, |
|
"loss": 0.7865, |
|
"step": 4557 |
|
}, |
|
{ |
|
"epoch": 0.03955308803751854, |
|
"grad_norm": 0.8390759229660034, |
|
"learning_rate": 4.8636342020611634e-05, |
|
"loss": 0.7925, |
|
"step": 4588 |
|
}, |
|
{ |
|
"epoch": 0.039820338632366635, |
|
"grad_norm": 0.7457160949707031, |
|
"learning_rate": 4.860885088159626e-05, |
|
"loss": 0.7911, |
|
"step": 4619 |
|
}, |
|
{ |
|
"epoch": 0.04008758922721473, |
|
"grad_norm": 0.9717801213264465, |
|
"learning_rate": 4.858109331990751e-05, |
|
"loss": 0.7917, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 0.04035483982206283, |
|
"grad_norm": 0.8517304062843323, |
|
"learning_rate": 4.855306964878567e-05, |
|
"loss": 0.7966, |
|
"step": 4681 |
|
}, |
|
{ |
|
"epoch": 0.04062209041691093, |
|
"grad_norm": 0.7597792744636536, |
|
"learning_rate": 4.8524780184474084e-05, |
|
"loss": 0.7798, |
|
"step": 4712 |
|
}, |
|
{ |
|
"epoch": 0.04088934101175903, |
|
"grad_norm": 0.9633410573005676, |
|
"learning_rate": 4.8496225246215496e-05, |
|
"loss": 0.784, |
|
"step": 4743 |
|
}, |
|
{ |
|
"epoch": 0.041156591606607125, |
|
"grad_norm": 0.8668884634971619, |
|
"learning_rate": 4.8467405156248505e-05, |
|
"loss": 0.7925, |
|
"step": 4774 |
|
}, |
|
{ |
|
"epoch": 0.04142384220145522, |
|
"grad_norm": 1.348170280456543, |
|
"learning_rate": 4.843832023980392e-05, |
|
"loss": 0.7882, |
|
"step": 4805 |
|
}, |
|
{ |
|
"epoch": 0.04169109279630332, |
|
"grad_norm": 0.9085477590560913, |
|
"learning_rate": 4.840897082510106e-05, |
|
"loss": 0.7834, |
|
"step": 4836 |
|
}, |
|
{ |
|
"epoch": 0.04195834339115142, |
|
"grad_norm": 1.2533594369888306, |
|
"learning_rate": 4.8379357243344084e-05, |
|
"loss": 0.7968, |
|
"step": 4867 |
|
}, |
|
{ |
|
"epoch": 0.04222559398599952, |
|
"grad_norm": 1.0250332355499268, |
|
"learning_rate": 4.8349479828718236e-05, |
|
"loss": 0.7755, |
|
"step": 4898 |
|
}, |
|
{ |
|
"epoch": 0.042492844580847615, |
|
"grad_norm": 0.8610180616378784, |
|
"learning_rate": 4.8319338918386075e-05, |
|
"loss": 0.7819, |
|
"step": 4929 |
|
}, |
|
{ |
|
"epoch": 0.04276009517569571, |
|
"grad_norm": 0.8235286474227905, |
|
"learning_rate": 4.828893485248369e-05, |
|
"loss": 0.7834, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 0.04302734577054381, |
|
"grad_norm": 0.95796799659729, |
|
"learning_rate": 4.825826797411682e-05, |
|
"loss": 0.7822, |
|
"step": 4991 |
|
}, |
|
{ |
|
"epoch": 0.04329459636539191, |
|
"grad_norm": 1.022430419921875, |
|
"learning_rate": 4.822733862935702e-05, |
|
"loss": 0.781, |
|
"step": 5022 |
|
}, |
|
{ |
|
"epoch": 0.04356184696024001, |
|
"grad_norm": 0.9080584645271301, |
|
"learning_rate": 4.819614716723775e-05, |
|
"loss": 0.783, |
|
"step": 5053 |
|
}, |
|
{ |
|
"epoch": 0.043829097555088105, |
|
"grad_norm": 0.9808719158172607, |
|
"learning_rate": 4.8164693939750425e-05, |
|
"loss": 0.7838, |
|
"step": 5084 |
|
}, |
|
{ |
|
"epoch": 0.0440963481499362, |
|
"grad_norm": 0.8997575640678406, |
|
"learning_rate": 4.813297930184042e-05, |
|
"loss": 0.7816, |
|
"step": 5115 |
|
}, |
|
{ |
|
"epoch": 0.0443635987447843, |
|
"grad_norm": 0.8737884163856506, |
|
"learning_rate": 4.810100361140314e-05, |
|
"loss": 0.7785, |
|
"step": 5146 |
|
}, |
|
{ |
|
"epoch": 0.0446308493396324, |
|
"grad_norm": 0.7924982309341431, |
|
"learning_rate": 4.8068767229279885e-05, |
|
"loss": 0.7752, |
|
"step": 5177 |
|
}, |
|
{ |
|
"epoch": 0.0448980999344805, |
|
"grad_norm": 1.1685878038406372, |
|
"learning_rate": 4.8036270519253854e-05, |
|
"loss": 0.7881, |
|
"step": 5208 |
|
}, |
|
{ |
|
"epoch": 0.045165350529328595, |
|
"grad_norm": 1.0199975967407227, |
|
"learning_rate": 4.8003513848046e-05, |
|
"loss": 0.7663, |
|
"step": 5239 |
|
}, |
|
{ |
|
"epoch": 0.04543260112417669, |
|
"grad_norm": 0.8155254125595093, |
|
"learning_rate": 4.79704975853109e-05, |
|
"loss": 0.7775, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 0.04569985171902479, |
|
"grad_norm": 0.976006805896759, |
|
"learning_rate": 4.793722210363262e-05, |
|
"loss": 0.7797, |
|
"step": 5301 |
|
}, |
|
{ |
|
"epoch": 0.04596710231387289, |
|
"grad_norm": 1.1252824068069458, |
|
"learning_rate": 4.7903687778520414e-05, |
|
"loss": 0.7824, |
|
"step": 5332 |
|
}, |
|
{ |
|
"epoch": 0.046234352908720994, |
|
"grad_norm": 0.7603053450584412, |
|
"learning_rate": 4.7869894988404593e-05, |
|
"loss": 0.7759, |
|
"step": 5363 |
|
}, |
|
{ |
|
"epoch": 0.04650160350356909, |
|
"grad_norm": 1.007767915725708, |
|
"learning_rate": 4.783584411463221e-05, |
|
"loss": 0.7832, |
|
"step": 5394 |
|
}, |
|
{ |
|
"epoch": 0.04676885409841719, |
|
"grad_norm": 1.1207255125045776, |
|
"learning_rate": 4.780153554146274e-05, |
|
"loss": 0.7849, |
|
"step": 5425 |
|
}, |
|
{ |
|
"epoch": 0.04703610469326529, |
|
"grad_norm": 1.0699808597564697, |
|
"learning_rate": 4.7766969656063766e-05, |
|
"loss": 0.7814, |
|
"step": 5456 |
|
}, |
|
{ |
|
"epoch": 0.047303355288113386, |
|
"grad_norm": 0.8154749274253845, |
|
"learning_rate": 4.773214684850662e-05, |
|
"loss": 0.7806, |
|
"step": 5487 |
|
}, |
|
{ |
|
"epoch": 0.047570605882961484, |
|
"grad_norm": 0.7383331060409546, |
|
"learning_rate": 4.769706751176193e-05, |
|
"loss": 0.7765, |
|
"step": 5518 |
|
}, |
|
{ |
|
"epoch": 0.04783785647780958, |
|
"grad_norm": 0.9729552865028381, |
|
"learning_rate": 4.7661732041695264e-05, |
|
"loss": 0.7754, |
|
"step": 5549 |
|
}, |
|
{ |
|
"epoch": 0.04810510707265768, |
|
"grad_norm": 0.9568310379981995, |
|
"learning_rate": 4.762614083706258e-05, |
|
"loss": 0.7733, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 0.04837235766750578, |
|
"grad_norm": 1.0747005939483643, |
|
"learning_rate": 4.759029429950581e-05, |
|
"loss": 0.7764, |
|
"step": 5611 |
|
}, |
|
{ |
|
"epoch": 0.048639608262353876, |
|
"grad_norm": 1.2053782939910889, |
|
"learning_rate": 4.7554192833548235e-05, |
|
"loss": 0.7725, |
|
"step": 5642 |
|
}, |
|
{ |
|
"epoch": 0.048906858857201974, |
|
"grad_norm": 0.8168059587478638, |
|
"learning_rate": 4.751783684659e-05, |
|
"loss": 0.7779, |
|
"step": 5673 |
|
}, |
|
{ |
|
"epoch": 0.04917410945205007, |
|
"grad_norm": 0.812278151512146, |
|
"learning_rate": 4.748122674890348e-05, |
|
"loss": 0.7884, |
|
"step": 5704 |
|
}, |
|
{ |
|
"epoch": 0.04944136004689817, |
|
"grad_norm": 0.9640679359436035, |
|
"learning_rate": 4.7444362953628654e-05, |
|
"loss": 0.7794, |
|
"step": 5735 |
|
}, |
|
{ |
|
"epoch": 0.04970861064174627, |
|
"grad_norm": 0.8488368988037109, |
|
"learning_rate": 4.7407245876768424e-05, |
|
"loss": 0.7705, |
|
"step": 5766 |
|
}, |
|
{ |
|
"epoch": 0.049975861236594366, |
|
"grad_norm": 0.7826496362686157, |
|
"learning_rate": 4.736987593718397e-05, |
|
"loss": 0.7665, |
|
"step": 5797 |
|
}, |
|
{ |
|
"epoch": 0.050243111831442464, |
|
"grad_norm": 0.7388876676559448, |
|
"learning_rate": 4.733225355658999e-05, |
|
"loss": 0.7715, |
|
"step": 5828 |
|
}, |
|
{ |
|
"epoch": 0.05051036242629056, |
|
"grad_norm": 0.7776780128479004, |
|
"learning_rate": 4.7294379159549926e-05, |
|
"loss": 0.7777, |
|
"step": 5859 |
|
}, |
|
{ |
|
"epoch": 0.05077761302113866, |
|
"grad_norm": 1.1687864065170288, |
|
"learning_rate": 4.725625317347119e-05, |
|
"loss": 0.7698, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 0.05104486361598676, |
|
"grad_norm": 1.069354772567749, |
|
"learning_rate": 4.7217876028600374e-05, |
|
"loss": 0.7671, |
|
"step": 5921 |
|
}, |
|
{ |
|
"epoch": 0.051312114210834855, |
|
"grad_norm": 0.955895185470581, |
|
"learning_rate": 4.717924815801832e-05, |
|
"loss": 0.7641, |
|
"step": 5952 |
|
}, |
|
{ |
|
"epoch": 0.05157936480568295, |
|
"grad_norm": 0.8063961267471313, |
|
"learning_rate": 4.714036999763532e-05, |
|
"loss": 0.772, |
|
"step": 5983 |
|
}, |
|
{ |
|
"epoch": 0.05184661540053105, |
|
"grad_norm": 0.9584303498268127, |
|
"learning_rate": 4.7101241986186116e-05, |
|
"loss": 0.762, |
|
"step": 6014 |
|
}, |
|
{ |
|
"epoch": 0.05211386599537915, |
|
"grad_norm": 0.9711636900901794, |
|
"learning_rate": 4.7061864565225e-05, |
|
"loss": 0.7742, |
|
"step": 6045 |
|
}, |
|
{ |
|
"epoch": 0.05238111659022725, |
|
"grad_norm": 0.8177659511566162, |
|
"learning_rate": 4.702223817912081e-05, |
|
"loss": 0.7532, |
|
"step": 6076 |
|
}, |
|
{ |
|
"epoch": 0.052648367185075345, |
|
"grad_norm": 0.7204889059066772, |
|
"learning_rate": 4.698236327505195e-05, |
|
"loss": 0.7674, |
|
"step": 6107 |
|
}, |
|
{ |
|
"epoch": 0.05291561777992344, |
|
"grad_norm": 0.7740707993507385, |
|
"learning_rate": 4.694224030300127e-05, |
|
"loss": 0.759, |
|
"step": 6138 |
|
}, |
|
{ |
|
"epoch": 0.05318286837477154, |
|
"grad_norm": 0.9027112126350403, |
|
"learning_rate": 4.690186971575107e-05, |
|
"loss": 0.7606, |
|
"step": 6169 |
|
}, |
|
{ |
|
"epoch": 0.05345011896961964, |
|
"grad_norm": 0.8888375759124756, |
|
"learning_rate": 4.6861251968877916e-05, |
|
"loss": 0.7668, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 0.05371736956446774, |
|
"grad_norm": 0.7049738764762878, |
|
"learning_rate": 4.68203875207476e-05, |
|
"loss": 0.7678, |
|
"step": 6231 |
|
}, |
|
{ |
|
"epoch": 0.053984620159315835, |
|
"grad_norm": 0.8254877924919128, |
|
"learning_rate": 4.677927683250983e-05, |
|
"loss": 0.766, |
|
"step": 6262 |
|
}, |
|
{ |
|
"epoch": 0.05425187075416393, |
|
"grad_norm": 0.8231219053268433, |
|
"learning_rate": 4.6737920368093156e-05, |
|
"loss": 0.7577, |
|
"step": 6293 |
|
}, |
|
{ |
|
"epoch": 0.05451912134901204, |
|
"grad_norm": 0.859380841255188, |
|
"learning_rate": 4.669631859419965e-05, |
|
"loss": 0.7613, |
|
"step": 6324 |
|
}, |
|
{ |
|
"epoch": 0.054786371943860136, |
|
"grad_norm": 0.7607110142707825, |
|
"learning_rate": 4.6654471980299676e-05, |
|
"loss": 0.7622, |
|
"step": 6355 |
|
}, |
|
{ |
|
"epoch": 0.055053622538708234, |
|
"grad_norm": 0.8904584646224976, |
|
"learning_rate": 4.661238099862658e-05, |
|
"loss": 0.7622, |
|
"step": 6386 |
|
}, |
|
{ |
|
"epoch": 0.05532087313355633, |
|
"grad_norm": 0.8488460183143616, |
|
"learning_rate": 4.657004612417138e-05, |
|
"loss": 0.7588, |
|
"step": 6417 |
|
}, |
|
{ |
|
"epoch": 0.05558812372840443, |
|
"grad_norm": 0.7917813062667847, |
|
"learning_rate": 4.6527467834677374e-05, |
|
"loss": 0.7712, |
|
"step": 6448 |
|
}, |
|
{ |
|
"epoch": 0.05585537432325253, |
|
"grad_norm": 0.830741822719574, |
|
"learning_rate": 4.648464661063478e-05, |
|
"loss": 0.7615, |
|
"step": 6479 |
|
}, |
|
{ |
|
"epoch": 0.056122624918100626, |
|
"grad_norm": 0.9170836806297302, |
|
"learning_rate": 4.6441582935275264e-05, |
|
"loss": 0.7572, |
|
"step": 6510 |
|
}, |
|
{ |
|
"epoch": 0.056389875512948724, |
|
"grad_norm": 0.8450523614883423, |
|
"learning_rate": 4.6398277294566586e-05, |
|
"loss": 0.7497, |
|
"step": 6541 |
|
}, |
|
{ |
|
"epoch": 0.05665712610779682, |
|
"grad_norm": 0.8187207579612732, |
|
"learning_rate": 4.6354730177207e-05, |
|
"loss": 0.7684, |
|
"step": 6572 |
|
}, |
|
{ |
|
"epoch": 0.05692437670264492, |
|
"grad_norm": 0.8315821886062622, |
|
"learning_rate": 4.6310942074619787e-05, |
|
"loss": 0.7685, |
|
"step": 6603 |
|
}, |
|
{ |
|
"epoch": 0.05719162729749302, |
|
"grad_norm": 1.0876758098602295, |
|
"learning_rate": 4.626691348094777e-05, |
|
"loss": 0.7617, |
|
"step": 6634 |
|
}, |
|
{ |
|
"epoch": 0.057458877892341116, |
|
"grad_norm": 0.9182093143463135, |
|
"learning_rate": 4.622264489304762e-05, |
|
"loss": 0.7621, |
|
"step": 6665 |
|
}, |
|
{ |
|
"epoch": 0.057726128487189214, |
|
"grad_norm": 0.6726477742195129, |
|
"learning_rate": 4.617813681048434e-05, |
|
"loss": 0.7637, |
|
"step": 6696 |
|
}, |
|
{ |
|
"epoch": 0.05799337908203731, |
|
"grad_norm": 0.8932876586914062, |
|
"learning_rate": 4.61333897355256e-05, |
|
"loss": 0.7643, |
|
"step": 6727 |
|
}, |
|
{ |
|
"epoch": 0.05826062967688541, |
|
"grad_norm": 0.7158175706863403, |
|
"learning_rate": 4.608840417313604e-05, |
|
"loss": 0.7585, |
|
"step": 6758 |
|
}, |
|
{ |
|
"epoch": 0.05852788027173351, |
|
"grad_norm": 0.7973946928977966, |
|
"learning_rate": 4.6043180630971646e-05, |
|
"loss": 0.7594, |
|
"step": 6789 |
|
}, |
|
{ |
|
"epoch": 0.058795130866581606, |
|
"grad_norm": 0.8311556577682495, |
|
"learning_rate": 4.599771961937391e-05, |
|
"loss": 0.7646, |
|
"step": 6820 |
|
}, |
|
{ |
|
"epoch": 0.059062381461429704, |
|
"grad_norm": 1.623862385749817, |
|
"learning_rate": 4.5952021651364204e-05, |
|
"loss": 0.7528, |
|
"step": 6851 |
|
}, |
|
{ |
|
"epoch": 0.0593296320562778, |
|
"grad_norm": 0.8134210109710693, |
|
"learning_rate": 4.590608724263786e-05, |
|
"loss": 0.752, |
|
"step": 6882 |
|
}, |
|
{ |
|
"epoch": 0.0595968826511259, |
|
"grad_norm": 0.8656156063079834, |
|
"learning_rate": 4.585991691155845e-05, |
|
"loss": 0.7504, |
|
"step": 6913 |
|
}, |
|
{ |
|
"epoch": 0.059864133245974, |
|
"grad_norm": 0.7220660448074341, |
|
"learning_rate": 4.581351117915188e-05, |
|
"loss": 0.7574, |
|
"step": 6944 |
|
}, |
|
{ |
|
"epoch": 0.060131383840822096, |
|
"grad_norm": 0.8750848174095154, |
|
"learning_rate": 4.5766870569100534e-05, |
|
"loss": 0.7442, |
|
"step": 6975 |
|
}, |
|
{ |
|
"epoch": 0.060398634435670194, |
|
"grad_norm": 0.9824672937393188, |
|
"learning_rate": 4.571999560773736e-05, |
|
"loss": 0.7493, |
|
"step": 7006 |
|
}, |
|
{ |
|
"epoch": 0.06066588503051829, |
|
"grad_norm": 1.1746909618377686, |
|
"learning_rate": 4.5672886824039915e-05, |
|
"loss": 0.7615, |
|
"step": 7037 |
|
}, |
|
{ |
|
"epoch": 0.06093313562536639, |
|
"grad_norm": 0.7957736849784851, |
|
"learning_rate": 4.5625544749624435e-05, |
|
"loss": 0.7567, |
|
"step": 7068 |
|
}, |
|
{ |
|
"epoch": 0.06120038622021449, |
|
"grad_norm": 0.7611691355705261, |
|
"learning_rate": 4.5577969918739794e-05, |
|
"loss": 0.7539, |
|
"step": 7099 |
|
}, |
|
{ |
|
"epoch": 0.061467636815062586, |
|
"grad_norm": 0.7961061596870422, |
|
"learning_rate": 4.5530162868261486e-05, |
|
"loss": 0.7549, |
|
"step": 7130 |
|
}, |
|
{ |
|
"epoch": 0.061734887409910684, |
|
"grad_norm": 0.7554512023925781, |
|
"learning_rate": 4.548212413768558e-05, |
|
"loss": 0.7541, |
|
"step": 7161 |
|
}, |
|
{ |
|
"epoch": 0.06200213800475878, |
|
"grad_norm": 1.0506978034973145, |
|
"learning_rate": 4.543385426912261e-05, |
|
"loss": 0.7477, |
|
"step": 7192 |
|
}, |
|
{ |
|
"epoch": 0.06226938859960688, |
|
"grad_norm": 0.8036066889762878, |
|
"learning_rate": 4.53853538072915e-05, |
|
"loss": 0.7455, |
|
"step": 7223 |
|
}, |
|
{ |
|
"epoch": 0.06253663919445498, |
|
"grad_norm": 0.8787280917167664, |
|
"learning_rate": 4.533662329951336e-05, |
|
"loss": 0.747, |
|
"step": 7254 |
|
}, |
|
{ |
|
"epoch": 0.06280388978930308, |
|
"grad_norm": 0.7274677753448486, |
|
"learning_rate": 4.528766329570536e-05, |
|
"loss": 0.7568, |
|
"step": 7285 |
|
}, |
|
{ |
|
"epoch": 0.06307114038415118, |
|
"grad_norm": 0.7349788546562195, |
|
"learning_rate": 4.523847434837447e-05, |
|
"loss": 0.7472, |
|
"step": 7316 |
|
}, |
|
{ |
|
"epoch": 0.06333839097899928, |
|
"grad_norm": 0.7945473790168762, |
|
"learning_rate": 4.518905701261128e-05, |
|
"loss": 0.7581, |
|
"step": 7347 |
|
}, |
|
{ |
|
"epoch": 0.06360564157384738, |
|
"grad_norm": 0.7640956044197083, |
|
"learning_rate": 4.5139411846083715e-05, |
|
"loss": 0.7451, |
|
"step": 7378 |
|
}, |
|
{ |
|
"epoch": 0.06387289216869547, |
|
"grad_norm": 0.7245631217956543, |
|
"learning_rate": 4.508953940903073e-05, |
|
"loss": 0.7437, |
|
"step": 7409 |
|
}, |
|
{ |
|
"epoch": 0.06414014276354357, |
|
"grad_norm": 0.7420781254768372, |
|
"learning_rate": 4.5039440264255994e-05, |
|
"loss": 0.7626, |
|
"step": 7440 |
|
}, |
|
{ |
|
"epoch": 0.06440739335839167, |
|
"grad_norm": 0.774943470954895, |
|
"learning_rate": 4.498911497712155e-05, |
|
"loss": 0.7608, |
|
"step": 7471 |
|
}, |
|
{ |
|
"epoch": 0.06467464395323977, |
|
"grad_norm": 0.7587719559669495, |
|
"learning_rate": 4.493856411554142e-05, |
|
"loss": 0.7631, |
|
"step": 7502 |
|
}, |
|
{ |
|
"epoch": 0.06494189454808787, |
|
"grad_norm": 0.7052931785583496, |
|
"learning_rate": 4.4887788249975206e-05, |
|
"loss": 0.7437, |
|
"step": 7533 |
|
}, |
|
{ |
|
"epoch": 0.06520914514293596, |
|
"grad_norm": 0.8177131414413452, |
|
"learning_rate": 4.4836787953421656e-05, |
|
"loss": 0.75, |
|
"step": 7564 |
|
}, |
|
{ |
|
"epoch": 0.06547639573778406, |
|
"grad_norm": 0.8247653245925903, |
|
"learning_rate": 4.478556380141218e-05, |
|
"loss": 0.7446, |
|
"step": 7595 |
|
}, |
|
{ |
|
"epoch": 0.06574364633263216, |
|
"grad_norm": 0.6302705407142639, |
|
"learning_rate": 4.4734116372004375e-05, |
|
"loss": 0.7459, |
|
"step": 7626 |
|
}, |
|
{ |
|
"epoch": 0.06601089692748026, |
|
"grad_norm": 0.645069420337677, |
|
"learning_rate": 4.4682446245775477e-05, |
|
"loss": 0.7545, |
|
"step": 7657 |
|
}, |
|
{ |
|
"epoch": 0.06627814752232836, |
|
"grad_norm": 0.6515899300575256, |
|
"learning_rate": 4.463055400581586e-05, |
|
"loss": 0.7439, |
|
"step": 7688 |
|
}, |
|
{ |
|
"epoch": 0.06654539811717645, |
|
"grad_norm": 0.8527146577835083, |
|
"learning_rate": 4.4578440237722374e-05, |
|
"loss": 0.7491, |
|
"step": 7719 |
|
}, |
|
{ |
|
"epoch": 0.06681264871202455, |
|
"grad_norm": 0.6546400785446167, |
|
"learning_rate": 4.452610552959183e-05, |
|
"loss": 0.7459, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 0.06707989930687265, |
|
"grad_norm": 1.2422056198120117, |
|
"learning_rate": 4.447355047201428e-05, |
|
"loss": 0.747, |
|
"step": 7781 |
|
}, |
|
{ |
|
"epoch": 0.06734714990172075, |
|
"grad_norm": 0.7785826325416565, |
|
"learning_rate": 4.4420775658066414e-05, |
|
"loss": 0.7454, |
|
"step": 7812 |
|
}, |
|
{ |
|
"epoch": 0.06761440049656885, |
|
"grad_norm": 0.7051584720611572, |
|
"learning_rate": 4.436778168330484e-05, |
|
"loss": 0.748, |
|
"step": 7843 |
|
}, |
|
{ |
|
"epoch": 0.06788165109141694, |
|
"grad_norm": 0.6893256306648254, |
|
"learning_rate": 4.4314569145759353e-05, |
|
"loss": 0.7466, |
|
"step": 7874 |
|
}, |
|
{ |
|
"epoch": 0.06814890168626504, |
|
"grad_norm": 0.6570892930030823, |
|
"learning_rate": 4.42611386459262e-05, |
|
"loss": 0.7404, |
|
"step": 7905 |
|
}, |
|
{ |
|
"epoch": 0.06841615228111314, |
|
"grad_norm": 1.0755282640457153, |
|
"learning_rate": 4.420749078676133e-05, |
|
"loss": 0.7462, |
|
"step": 7936 |
|
}, |
|
{ |
|
"epoch": 0.06868340287596124, |
|
"grad_norm": 0.9138249158859253, |
|
"learning_rate": 4.4153626173673516e-05, |
|
"loss": 0.7425, |
|
"step": 7967 |
|
}, |
|
{ |
|
"epoch": 0.06895065347080934, |
|
"grad_norm": 0.8147562146186829, |
|
"learning_rate": 4.409954541451762e-05, |
|
"loss": 0.7473, |
|
"step": 7998 |
|
}, |
|
{ |
|
"epoch": 0.06921790406565743, |
|
"grad_norm": 0.9558604955673218, |
|
"learning_rate": 4.404524911958764e-05, |
|
"loss": 0.7372, |
|
"step": 8029 |
|
}, |
|
{ |
|
"epoch": 0.06948515466050553, |
|
"grad_norm": 0.6429873704910278, |
|
"learning_rate": 4.399073790160989e-05, |
|
"loss": 0.7397, |
|
"step": 8060 |
|
}, |
|
{ |
|
"epoch": 0.06975240525535363, |
|
"grad_norm": 0.840124249458313, |
|
"learning_rate": 4.393601237573607e-05, |
|
"loss": 0.7462, |
|
"step": 8091 |
|
}, |
|
{ |
|
"epoch": 0.07001965585020173, |
|
"grad_norm": 0.8520084023475647, |
|
"learning_rate": 4.388107315953628e-05, |
|
"loss": 0.746, |
|
"step": 8122 |
|
}, |
|
{ |
|
"epoch": 0.07028690644504983, |
|
"grad_norm": 0.9427956938743591, |
|
"learning_rate": 4.382592087299212e-05, |
|
"loss": 0.7628, |
|
"step": 8153 |
|
}, |
|
{ |
|
"epoch": 0.07055415703989792, |
|
"grad_norm": 0.6754634380340576, |
|
"learning_rate": 4.377055613848964e-05, |
|
"loss": 0.7387, |
|
"step": 8184 |
|
}, |
|
{ |
|
"epoch": 0.07082140763474602, |
|
"grad_norm": 0.7089795470237732, |
|
"learning_rate": 4.3714979580812355e-05, |
|
"loss": 0.7462, |
|
"step": 8215 |
|
}, |
|
{ |
|
"epoch": 0.07108865822959412, |
|
"grad_norm": 0.6123281717300415, |
|
"learning_rate": 4.365919182713416e-05, |
|
"loss": 0.7363, |
|
"step": 8246 |
|
}, |
|
{ |
|
"epoch": 0.07135590882444222, |
|
"grad_norm": 0.7722983360290527, |
|
"learning_rate": 4.360319350701226e-05, |
|
"loss": 0.7489, |
|
"step": 8277 |
|
}, |
|
{ |
|
"epoch": 0.07162315941929032, |
|
"grad_norm": 0.7888938784599304, |
|
"learning_rate": 4.3546985252380115e-05, |
|
"loss": 0.7407, |
|
"step": 8308 |
|
}, |
|
{ |
|
"epoch": 0.07189041001413841, |
|
"grad_norm": 0.778743326663971, |
|
"learning_rate": 4.349056769754021e-05, |
|
"loss": 0.7412, |
|
"step": 8339 |
|
}, |
|
{ |
|
"epoch": 0.07215766060898651, |
|
"grad_norm": 0.7543514966964722, |
|
"learning_rate": 4.3433941479156994e-05, |
|
"loss": 0.7408, |
|
"step": 8370 |
|
}, |
|
{ |
|
"epoch": 0.07242491120383461, |
|
"grad_norm": 0.8665831089019775, |
|
"learning_rate": 4.3377107236249647e-05, |
|
"loss": 0.7332, |
|
"step": 8401 |
|
}, |
|
{ |
|
"epoch": 0.07269216179868271, |
|
"grad_norm": 0.8533165454864502, |
|
"learning_rate": 4.332006561018488e-05, |
|
"loss": 0.742, |
|
"step": 8432 |
|
}, |
|
{ |
|
"epoch": 0.0729594123935308, |
|
"grad_norm": 0.7969794869422913, |
|
"learning_rate": 4.3262817244669683e-05, |
|
"loss": 0.7412, |
|
"step": 8463 |
|
}, |
|
{ |
|
"epoch": 0.0732266629883789, |
|
"grad_norm": 0.6950485706329346, |
|
"learning_rate": 4.3205362785744083e-05, |
|
"loss": 0.748, |
|
"step": 8494 |
|
}, |
|
{ |
|
"epoch": 0.073493913583227, |
|
"grad_norm": 0.7154302000999451, |
|
"learning_rate": 4.314770288177384e-05, |
|
"loss": 0.7377, |
|
"step": 8525 |
|
}, |
|
{ |
|
"epoch": 0.0737611641780751, |
|
"grad_norm": 0.8694846630096436, |
|
"learning_rate": 4.308983818344313e-05, |
|
"loss": 0.7391, |
|
"step": 8556 |
|
}, |
|
{ |
|
"epoch": 0.0740284147729232, |
|
"grad_norm": 0.6354366540908813, |
|
"learning_rate": 4.3031769343747206e-05, |
|
"loss": 0.7397, |
|
"step": 8587 |
|
}, |
|
{ |
|
"epoch": 0.0742956653677713, |
|
"grad_norm": 0.8266158699989319, |
|
"learning_rate": 4.297349701798505e-05, |
|
"loss": 0.7294, |
|
"step": 8618 |
|
}, |
|
{ |
|
"epoch": 0.0745629159626194, |
|
"grad_norm": 0.8990344405174255, |
|
"learning_rate": 4.2915021863751916e-05, |
|
"loss": 0.7364, |
|
"step": 8649 |
|
}, |
|
{ |
|
"epoch": 0.07483016655746749, |
|
"grad_norm": 0.6664773225784302, |
|
"learning_rate": 4.285634454093198e-05, |
|
"loss": 0.7453, |
|
"step": 8680 |
|
}, |
|
{ |
|
"epoch": 0.07509741715231559, |
|
"grad_norm": 0.7253682017326355, |
|
"learning_rate": 4.279746571169086e-05, |
|
"loss": 0.7383, |
|
"step": 8711 |
|
}, |
|
{ |
|
"epoch": 0.0753646677471637, |
|
"grad_norm": 0.7291272878646851, |
|
"learning_rate": 4.2738386040468136e-05, |
|
"loss": 0.7469, |
|
"step": 8742 |
|
}, |
|
{ |
|
"epoch": 0.0756319183420118, |
|
"grad_norm": 0.7150599956512451, |
|
"learning_rate": 4.2679106193969866e-05, |
|
"loss": 0.7365, |
|
"step": 8773 |
|
}, |
|
{ |
|
"epoch": 0.0758991689368599, |
|
"grad_norm": 0.6730013489723206, |
|
"learning_rate": 4.261962684116106e-05, |
|
"loss": 0.7244, |
|
"step": 8804 |
|
}, |
|
{ |
|
"epoch": 0.076166419531708, |
|
"grad_norm": 0.8112632632255554, |
|
"learning_rate": 4.2559948653258145e-05, |
|
"loss": 0.733, |
|
"step": 8835 |
|
}, |
|
{ |
|
"epoch": 0.0764336701265561, |
|
"grad_norm": 0.8358544707298279, |
|
"learning_rate": 4.250007230372134e-05, |
|
"loss": 0.7423, |
|
"step": 8866 |
|
}, |
|
{ |
|
"epoch": 0.07670092072140419, |
|
"grad_norm": 1.0025874376296997, |
|
"learning_rate": 4.2439998468247126e-05, |
|
"loss": 0.7393, |
|
"step": 8897 |
|
}, |
|
{ |
|
"epoch": 0.07696817131625229, |
|
"grad_norm": 0.7373897433280945, |
|
"learning_rate": 4.2379727824760566e-05, |
|
"loss": 0.7343, |
|
"step": 8928 |
|
}, |
|
{ |
|
"epoch": 0.07723542191110039, |
|
"grad_norm": 0.7370135188102722, |
|
"learning_rate": 4.231926105340768e-05, |
|
"loss": 0.7394, |
|
"step": 8959 |
|
}, |
|
{ |
|
"epoch": 0.07750267250594849, |
|
"grad_norm": 0.7160916924476624, |
|
"learning_rate": 4.225859883654776e-05, |
|
"loss": 0.7426, |
|
"step": 8990 |
|
}, |
|
{ |
|
"epoch": 0.07776992310079658, |
|
"grad_norm": 0.688937783241272, |
|
"learning_rate": 4.219774185874569e-05, |
|
"loss": 0.7345, |
|
"step": 9021 |
|
}, |
|
{ |
|
"epoch": 0.07803717369564468, |
|
"grad_norm": 0.6157529950141907, |
|
"learning_rate": 4.213669080676418e-05, |
|
"loss": 0.7437, |
|
"step": 9052 |
|
}, |
|
{ |
|
"epoch": 0.07830442429049278, |
|
"grad_norm": 0.7043218612670898, |
|
"learning_rate": 4.2075446369556056e-05, |
|
"loss": 0.7346, |
|
"step": 9083 |
|
}, |
|
{ |
|
"epoch": 0.07857167488534088, |
|
"grad_norm": 0.7695103883743286, |
|
"learning_rate": 4.201400923825648e-05, |
|
"loss": 0.7243, |
|
"step": 9114 |
|
}, |
|
{ |
|
"epoch": 0.07883892548018898, |
|
"grad_norm": 0.7572926878929138, |
|
"learning_rate": 4.195238010617511e-05, |
|
"loss": 0.7307, |
|
"step": 9145 |
|
}, |
|
{ |
|
"epoch": 0.07910617607503707, |
|
"grad_norm": 0.7854665517807007, |
|
"learning_rate": 4.1890559668788344e-05, |
|
"loss": 0.7317, |
|
"step": 9176 |
|
}, |
|
{ |
|
"epoch": 0.07937342666988517, |
|
"grad_norm": 0.8939421772956848, |
|
"learning_rate": 4.1828548623731405e-05, |
|
"loss": 0.731, |
|
"step": 9207 |
|
}, |
|
{ |
|
"epoch": 0.07964067726473327, |
|
"grad_norm": 0.799430251121521, |
|
"learning_rate": 4.1766347670790506e-05, |
|
"loss": 0.7322, |
|
"step": 9238 |
|
}, |
|
{ |
|
"epoch": 0.07990792785958137, |
|
"grad_norm": 0.7589206099510193, |
|
"learning_rate": 4.170395751189495e-05, |
|
"loss": 0.7273, |
|
"step": 9269 |
|
}, |
|
{ |
|
"epoch": 0.08017517845442947, |
|
"grad_norm": 0.5817182064056396, |
|
"learning_rate": 4.164137885110921e-05, |
|
"loss": 0.7357, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 0.08044242904927756, |
|
"grad_norm": 1.0037076473236084, |
|
"learning_rate": 4.157861239462495e-05, |
|
"loss": 0.7296, |
|
"step": 9331 |
|
}, |
|
{ |
|
"epoch": 0.08070967964412566, |
|
"grad_norm": 0.8309381604194641, |
|
"learning_rate": 4.1515658850753114e-05, |
|
"loss": 0.7324, |
|
"step": 9362 |
|
}, |
|
{ |
|
"epoch": 0.08097693023897376, |
|
"grad_norm": 0.7340037226676941, |
|
"learning_rate": 4.145251892991588e-05, |
|
"loss": 0.7343, |
|
"step": 9393 |
|
}, |
|
{ |
|
"epoch": 0.08124418083382186, |
|
"grad_norm": 0.6870521903038025, |
|
"learning_rate": 4.138919334463868e-05, |
|
"loss": 0.7314, |
|
"step": 9424 |
|
}, |
|
{ |
|
"epoch": 0.08151143142866996, |
|
"grad_norm": 0.6890373826026917, |
|
"learning_rate": 4.1325682809542124e-05, |
|
"loss": 0.7316, |
|
"step": 9455 |
|
}, |
|
{ |
|
"epoch": 0.08177868202351805, |
|
"grad_norm": 0.7693931460380554, |
|
"learning_rate": 4.126198804133398e-05, |
|
"loss": 0.7332, |
|
"step": 9486 |
|
}, |
|
{ |
|
"epoch": 0.08204593261836615, |
|
"grad_norm": 0.8764196634292603, |
|
"learning_rate": 4.1198109758801055e-05, |
|
"loss": 0.7267, |
|
"step": 9517 |
|
}, |
|
{ |
|
"epoch": 0.08231318321321425, |
|
"grad_norm": 0.7419659495353699, |
|
"learning_rate": 4.113404868280107e-05, |
|
"loss": 0.7344, |
|
"step": 9548 |
|
}, |
|
{ |
|
"epoch": 0.08258043380806235, |
|
"grad_norm": 0.754749596118927, |
|
"learning_rate": 4.106980553625457e-05, |
|
"loss": 0.7312, |
|
"step": 9579 |
|
}, |
|
{ |
|
"epoch": 0.08284768440291045, |
|
"grad_norm": 0.9303844571113586, |
|
"learning_rate": 4.100538104413674e-05, |
|
"loss": 0.7359, |
|
"step": 9610 |
|
}, |
|
{ |
|
"epoch": 0.08311493499775854, |
|
"grad_norm": 0.8202078342437744, |
|
"learning_rate": 4.09407759334692e-05, |
|
"loss": 0.7362, |
|
"step": 9641 |
|
}, |
|
{ |
|
"epoch": 0.08338218559260664, |
|
"grad_norm": 0.8347437977790833, |
|
"learning_rate": 4.087599093331186e-05, |
|
"loss": 0.7188, |
|
"step": 9672 |
|
}, |
|
{ |
|
"epoch": 0.08364943618745474, |
|
"grad_norm": 0.8407759070396423, |
|
"learning_rate": 4.081102677475462e-05, |
|
"loss": 0.7326, |
|
"step": 9703 |
|
}, |
|
{ |
|
"epoch": 0.08391668678230284, |
|
"grad_norm": 0.745712399482727, |
|
"learning_rate": 4.0745884190909194e-05, |
|
"loss": 0.7286, |
|
"step": 9734 |
|
}, |
|
{ |
|
"epoch": 0.08418393737715094, |
|
"grad_norm": 0.630031168460846, |
|
"learning_rate": 4.0680563916900796e-05, |
|
"loss": 0.7262, |
|
"step": 9765 |
|
}, |
|
{ |
|
"epoch": 0.08445118797199903, |
|
"grad_norm": 0.8924230933189392, |
|
"learning_rate": 4.0615066689859815e-05, |
|
"loss": 0.7315, |
|
"step": 9796 |
|
}, |
|
{ |
|
"epoch": 0.08471843856684713, |
|
"grad_norm": 0.7842046022415161, |
|
"learning_rate": 4.0549393248913584e-05, |
|
"loss": 0.7309, |
|
"step": 9827 |
|
}, |
|
{ |
|
"epoch": 0.08498568916169523, |
|
"grad_norm": 0.783723771572113, |
|
"learning_rate": 4.048354433517794e-05, |
|
"loss": 0.7307, |
|
"step": 9858 |
|
}, |
|
{ |
|
"epoch": 0.08525293975654333, |
|
"grad_norm": 0.6454151272773743, |
|
"learning_rate": 4.0417520691748916e-05, |
|
"loss": 0.7277, |
|
"step": 9889 |
|
}, |
|
{ |
|
"epoch": 0.08552019035139143, |
|
"grad_norm": 0.781913697719574, |
|
"learning_rate": 4.035132306369438e-05, |
|
"loss": 0.7267, |
|
"step": 9920 |
|
}, |
|
{ |
|
"epoch": 0.08578744094623952, |
|
"grad_norm": 0.7176892161369324, |
|
"learning_rate": 4.028495219804555e-05, |
|
"loss": 0.7309, |
|
"step": 9951 |
|
}, |
|
{ |
|
"epoch": 0.08605469154108762, |
|
"grad_norm": 0.9282059669494629, |
|
"learning_rate": 4.021840884378864e-05, |
|
"loss": 0.7483, |
|
"step": 9982 |
|
}, |
|
{ |
|
"epoch": 0.08632194213593572, |
|
"grad_norm": 1.0737770795822144, |
|
"learning_rate": 4.015169375185633e-05, |
|
"loss": 0.7262, |
|
"step": 10013 |
|
}, |
|
{ |
|
"epoch": 0.08658919273078382, |
|
"grad_norm": 0.7671861052513123, |
|
"learning_rate": 4.0084807675119396e-05, |
|
"loss": 0.7257, |
|
"step": 10044 |
|
}, |
|
{ |
|
"epoch": 0.08685644332563192, |
|
"grad_norm": 0.8341594934463501, |
|
"learning_rate": 4.0017751368378106e-05, |
|
"loss": 0.72, |
|
"step": 10075 |
|
}, |
|
{ |
|
"epoch": 0.08712369392048001, |
|
"grad_norm": 0.9360256195068359, |
|
"learning_rate": 3.995052558835377e-05, |
|
"loss": 0.7277, |
|
"step": 10106 |
|
}, |
|
{ |
|
"epoch": 0.08739094451532811, |
|
"grad_norm": 0.6352283358573914, |
|
"learning_rate": 3.988313109368017e-05, |
|
"loss": 0.7258, |
|
"step": 10137 |
|
}, |
|
{ |
|
"epoch": 0.08765819511017621, |
|
"grad_norm": 0.6871002912521362, |
|
"learning_rate": 3.981556864489504e-05, |
|
"loss": 0.7184, |
|
"step": 10168 |
|
}, |
|
{ |
|
"epoch": 0.08792544570502431, |
|
"grad_norm": 0.6639994382858276, |
|
"learning_rate": 3.974783900443142e-05, |
|
"loss": 0.727, |
|
"step": 10199 |
|
}, |
|
{ |
|
"epoch": 0.0881926962998724, |
|
"grad_norm": 0.7937990427017212, |
|
"learning_rate": 3.9679942936609095e-05, |
|
"loss": 0.7286, |
|
"step": 10230 |
|
}, |
|
{ |
|
"epoch": 0.0884599468947205, |
|
"grad_norm": 0.6229277849197388, |
|
"learning_rate": 3.961188120762596e-05, |
|
"loss": 0.7223, |
|
"step": 10261 |
|
}, |
|
{ |
|
"epoch": 0.0887271974895686, |
|
"grad_norm": 0.6489992737770081, |
|
"learning_rate": 3.954365458554938e-05, |
|
"loss": 0.7247, |
|
"step": 10292 |
|
}, |
|
{ |
|
"epoch": 0.0889944480844167, |
|
"grad_norm": 0.7326925992965698, |
|
"learning_rate": 3.947526384030751e-05, |
|
"loss": 0.7334, |
|
"step": 10323 |
|
}, |
|
{ |
|
"epoch": 0.0892616986792648, |
|
"grad_norm": 0.7646430730819702, |
|
"learning_rate": 3.9406709743680624e-05, |
|
"loss": 0.7278, |
|
"step": 10354 |
|
}, |
|
{ |
|
"epoch": 0.0895289492741129, |
|
"grad_norm": 0.7933580279350281, |
|
"learning_rate": 3.9337993069292366e-05, |
|
"loss": 0.7205, |
|
"step": 10385 |
|
}, |
|
{ |
|
"epoch": 0.089796199868961, |
|
"grad_norm": 1.0031253099441528, |
|
"learning_rate": 3.926911459260109e-05, |
|
"loss": 0.7224, |
|
"step": 10416 |
|
}, |
|
{ |
|
"epoch": 0.09006345046380909, |
|
"grad_norm": 0.9162613153457642, |
|
"learning_rate": 3.920007509089102e-05, |
|
"loss": 0.7382, |
|
"step": 10447 |
|
}, |
|
{ |
|
"epoch": 0.09033070105865719, |
|
"grad_norm": 0.8875689506530762, |
|
"learning_rate": 3.913087534326357e-05, |
|
"loss": 0.7224, |
|
"step": 10478 |
|
}, |
|
{ |
|
"epoch": 0.09059795165350529, |
|
"grad_norm": 0.8086528778076172, |
|
"learning_rate": 3.9061516130628475e-05, |
|
"loss": 0.7278, |
|
"step": 10509 |
|
}, |
|
{ |
|
"epoch": 0.09086520224835339, |
|
"grad_norm": 0.6289557218551636, |
|
"learning_rate": 3.8991998235695025e-05, |
|
"loss": 0.722, |
|
"step": 10540 |
|
}, |
|
{ |
|
"epoch": 0.09113245284320148, |
|
"grad_norm": 0.7295290231704712, |
|
"learning_rate": 3.8922322442963224e-05, |
|
"loss": 0.7188, |
|
"step": 10571 |
|
}, |
|
{ |
|
"epoch": 0.09139970343804958, |
|
"grad_norm": 0.7659655809402466, |
|
"learning_rate": 3.885248953871491e-05, |
|
"loss": 0.7259, |
|
"step": 10602 |
|
}, |
|
{ |
|
"epoch": 0.09166695403289768, |
|
"grad_norm": 0.7740941047668457, |
|
"learning_rate": 3.8782500311004915e-05, |
|
"loss": 0.7271, |
|
"step": 10633 |
|
}, |
|
{ |
|
"epoch": 0.09193420462774578, |
|
"grad_norm": 0.6929652094841003, |
|
"learning_rate": 3.871235554965218e-05, |
|
"loss": 0.7222, |
|
"step": 10664 |
|
}, |
|
{ |
|
"epoch": 0.09220145522259389, |
|
"grad_norm": 0.7807513475418091, |
|
"learning_rate": 3.864205604623078e-05, |
|
"loss": 0.7106, |
|
"step": 10695 |
|
}, |
|
{ |
|
"epoch": 0.09246870581744199, |
|
"grad_norm": 0.6159222722053528, |
|
"learning_rate": 3.857160259406107e-05, |
|
"loss": 0.7198, |
|
"step": 10726 |
|
}, |
|
{ |
|
"epoch": 0.09273595641229009, |
|
"grad_norm": 0.6694709658622742, |
|
"learning_rate": 3.8500995988200674e-05, |
|
"loss": 0.7087, |
|
"step": 10757 |
|
}, |
|
{ |
|
"epoch": 0.09300320700713818, |
|
"grad_norm": 0.7234062552452087, |
|
"learning_rate": 3.843023702543556e-05, |
|
"loss": 0.7267, |
|
"step": 10788 |
|
}, |
|
{ |
|
"epoch": 0.09327045760198628, |
|
"grad_norm": 0.7624971270561218, |
|
"learning_rate": 3.8359326504270984e-05, |
|
"loss": 0.725, |
|
"step": 10819 |
|
}, |
|
{ |
|
"epoch": 0.09353770819683438, |
|
"grad_norm": 0.7923869490623474, |
|
"learning_rate": 3.828826522492255e-05, |
|
"loss": 0.7226, |
|
"step": 10850 |
|
}, |
|
{ |
|
"epoch": 0.09380495879168248, |
|
"grad_norm": 0.7016533613204956, |
|
"learning_rate": 3.821705398930713e-05, |
|
"loss": 0.7295, |
|
"step": 10881 |
|
}, |
|
{ |
|
"epoch": 0.09407220938653058, |
|
"grad_norm": 0.6347571015357971, |
|
"learning_rate": 3.814569360103385e-05, |
|
"loss": 0.7129, |
|
"step": 10912 |
|
}, |
|
{ |
|
"epoch": 0.09433945998137867, |
|
"grad_norm": 0.7254231572151184, |
|
"learning_rate": 3.807418486539499e-05, |
|
"loss": 0.7181, |
|
"step": 10943 |
|
}, |
|
{ |
|
"epoch": 0.09460671057622677, |
|
"grad_norm": 0.7181808948516846, |
|
"learning_rate": 3.80025285893569e-05, |
|
"loss": 0.7179, |
|
"step": 10974 |
|
}, |
|
{ |
|
"epoch": 0.09487396117107487, |
|
"grad_norm": 0.7308523058891296, |
|
"learning_rate": 3.793072558155093e-05, |
|
"loss": 0.7243, |
|
"step": 11005 |
|
}, |
|
{ |
|
"epoch": 0.09514121176592297, |
|
"grad_norm": 0.8678077459335327, |
|
"learning_rate": 3.785877665226426e-05, |
|
"loss": 0.7254, |
|
"step": 11036 |
|
}, |
|
{ |
|
"epoch": 0.09540846236077107, |
|
"grad_norm": 0.6927953958511353, |
|
"learning_rate": 3.778668261343079e-05, |
|
"loss": 0.7242, |
|
"step": 11067 |
|
}, |
|
{ |
|
"epoch": 0.09567571295561916, |
|
"grad_norm": 0.5659973621368408, |
|
"learning_rate": 3.771444427862192e-05, |
|
"loss": 0.7157, |
|
"step": 11098 |
|
}, |
|
{ |
|
"epoch": 0.09594296355046726, |
|
"grad_norm": 0.8592713475227356, |
|
"learning_rate": 3.7642062463037465e-05, |
|
"loss": 0.7117, |
|
"step": 11129 |
|
}, |
|
{ |
|
"epoch": 0.09621021414531536, |
|
"grad_norm": 0.6449655294418335, |
|
"learning_rate": 3.7569537983496373e-05, |
|
"loss": 0.7197, |
|
"step": 11160 |
|
}, |
|
{ |
|
"epoch": 0.09647746474016346, |
|
"grad_norm": 0.7201160192489624, |
|
"learning_rate": 3.749687165842753e-05, |
|
"loss": 0.7189, |
|
"step": 11191 |
|
}, |
|
{ |
|
"epoch": 0.09674471533501156, |
|
"grad_norm": 0.6577209830284119, |
|
"learning_rate": 3.7424064307860536e-05, |
|
"loss": 0.7235, |
|
"step": 11222 |
|
}, |
|
{ |
|
"epoch": 0.09701196592985965, |
|
"grad_norm": 0.7682704329490662, |
|
"learning_rate": 3.735111675341645e-05, |
|
"loss": 0.7183, |
|
"step": 11253 |
|
}, |
|
{ |
|
"epoch": 0.09727921652470775, |
|
"grad_norm": 0.6187945604324341, |
|
"learning_rate": 3.7278029818298524e-05, |
|
"loss": 0.7046, |
|
"step": 11284 |
|
}, |
|
{ |
|
"epoch": 0.09754646711955585, |
|
"grad_norm": 0.6507108807563782, |
|
"learning_rate": 3.720480432728287e-05, |
|
"loss": 0.7253, |
|
"step": 11315 |
|
}, |
|
{ |
|
"epoch": 0.09781371771440395, |
|
"grad_norm": 0.6345822215080261, |
|
"learning_rate": 3.71314411067092e-05, |
|
"loss": 0.719, |
|
"step": 11346 |
|
}, |
|
{ |
|
"epoch": 0.09808096830925205, |
|
"grad_norm": 0.6644492149353027, |
|
"learning_rate": 3.70579409844715e-05, |
|
"loss": 0.7241, |
|
"step": 11377 |
|
}, |
|
{ |
|
"epoch": 0.09834821890410014, |
|
"grad_norm": 0.6215960383415222, |
|
"learning_rate": 3.698430479000865e-05, |
|
"loss": 0.7196, |
|
"step": 11408 |
|
}, |
|
{ |
|
"epoch": 0.09861546949894824, |
|
"grad_norm": 0.8039011359214783, |
|
"learning_rate": 3.691053335429509e-05, |
|
"loss": 0.7212, |
|
"step": 11439 |
|
}, |
|
{ |
|
"epoch": 0.09888272009379634, |
|
"grad_norm": 0.9103333353996277, |
|
"learning_rate": 3.683662750983147e-05, |
|
"loss": 0.7245, |
|
"step": 11470 |
|
}, |
|
{ |
|
"epoch": 0.09914997068864444, |
|
"grad_norm": 0.7850680947303772, |
|
"learning_rate": 3.676258809063518e-05, |
|
"loss": 0.7249, |
|
"step": 11501 |
|
}, |
|
{ |
|
"epoch": 0.09941722128349254, |
|
"grad_norm": 0.6277598738670349, |
|
"learning_rate": 3.6688415932231004e-05, |
|
"loss": 0.7234, |
|
"step": 11532 |
|
}, |
|
{ |
|
"epoch": 0.09968447187834063, |
|
"grad_norm": 0.6920484900474548, |
|
"learning_rate": 3.661411187164166e-05, |
|
"loss": 0.7126, |
|
"step": 11563 |
|
}, |
|
{ |
|
"epoch": 0.09995172247318873, |
|
"grad_norm": 0.6442444324493408, |
|
"learning_rate": 3.65396767473784e-05, |
|
"loss": 0.7148, |
|
"step": 11594 |
|
}, |
|
{ |
|
"epoch": 0.10021897306803683, |
|
"grad_norm": 0.741570770740509, |
|
"learning_rate": 3.6465111399431465e-05, |
|
"loss": 0.7216, |
|
"step": 11625 |
|
}, |
|
{ |
|
"epoch": 0.10048622366288493, |
|
"grad_norm": 0.7592952251434326, |
|
"learning_rate": 3.6390416669260674e-05, |
|
"loss": 0.7143, |
|
"step": 11656 |
|
}, |
|
{ |
|
"epoch": 0.10075347425773303, |
|
"grad_norm": 0.6915357708930969, |
|
"learning_rate": 3.63155933997859e-05, |
|
"loss": 0.7166, |
|
"step": 11687 |
|
}, |
|
{ |
|
"epoch": 0.10102072485258112, |
|
"grad_norm": 0.8318819403648376, |
|
"learning_rate": 3.624064243537758e-05, |
|
"loss": 0.7092, |
|
"step": 11718 |
|
}, |
|
{ |
|
"epoch": 0.10128797544742922, |
|
"grad_norm": 0.8602069616317749, |
|
"learning_rate": 3.616556462184716e-05, |
|
"loss": 0.7196, |
|
"step": 11749 |
|
}, |
|
{ |
|
"epoch": 0.10155522604227732, |
|
"grad_norm": 0.7524039149284363, |
|
"learning_rate": 3.609036080643755e-05, |
|
"loss": 0.7115, |
|
"step": 11780 |
|
}, |
|
{ |
|
"epoch": 0.10182247663712542, |
|
"grad_norm": 0.6467952728271484, |
|
"learning_rate": 3.60150318378136e-05, |
|
"loss": 0.722, |
|
"step": 11811 |
|
}, |
|
{ |
|
"epoch": 0.10208972723197351, |
|
"grad_norm": 0.6989237070083618, |
|
"learning_rate": 3.5939578566052465e-05, |
|
"loss": 0.7221, |
|
"step": 11842 |
|
}, |
|
{ |
|
"epoch": 0.10235697782682161, |
|
"grad_norm": 0.5162470936775208, |
|
"learning_rate": 3.586400184263408e-05, |
|
"loss": 0.7134, |
|
"step": 11873 |
|
}, |
|
{ |
|
"epoch": 0.10262422842166971, |
|
"grad_norm": 0.7181512713432312, |
|
"learning_rate": 3.578830252043148e-05, |
|
"loss": 0.7187, |
|
"step": 11904 |
|
}, |
|
{ |
|
"epoch": 0.10289147901651781, |
|
"grad_norm": 0.6398524641990662, |
|
"learning_rate": 3.571248145370125e-05, |
|
"loss": 0.7089, |
|
"step": 11935 |
|
}, |
|
{ |
|
"epoch": 0.1031587296113659, |
|
"grad_norm": 0.7089547514915466, |
|
"learning_rate": 3.5636539498073794e-05, |
|
"loss": 0.7192, |
|
"step": 11966 |
|
}, |
|
{ |
|
"epoch": 0.103425980206214, |
|
"grad_norm": 0.8613171577453613, |
|
"learning_rate": 3.556047751054378e-05, |
|
"loss": 0.7192, |
|
"step": 11997 |
|
}, |
|
{ |
|
"epoch": 0.1036932308010621, |
|
"grad_norm": 0.6494088768959045, |
|
"learning_rate": 3.548429634946039e-05, |
|
"loss": 0.7162, |
|
"step": 12028 |
|
}, |
|
{ |
|
"epoch": 0.1039604813959102, |
|
"grad_norm": 0.6777063012123108, |
|
"learning_rate": 3.540799687451768e-05, |
|
"loss": 0.7252, |
|
"step": 12059 |
|
}, |
|
{ |
|
"epoch": 0.1042277319907583, |
|
"grad_norm": 0.7733666300773621, |
|
"learning_rate": 3.533157994674485e-05, |
|
"loss": 0.7196, |
|
"step": 12090 |
|
}, |
|
{ |
|
"epoch": 0.1044949825856064, |
|
"grad_norm": 0.6725175976753235, |
|
"learning_rate": 3.5255046428496546e-05, |
|
"loss": 0.7191, |
|
"step": 12121 |
|
}, |
|
{ |
|
"epoch": 0.1047622331804545, |
|
"grad_norm": 0.6679649353027344, |
|
"learning_rate": 3.517839718344311e-05, |
|
"loss": 0.7207, |
|
"step": 12152 |
|
}, |
|
{ |
|
"epoch": 0.10502948377530259, |
|
"grad_norm": 0.7206579446792603, |
|
"learning_rate": 3.510163307656086e-05, |
|
"loss": 0.7166, |
|
"step": 12183 |
|
} |
|
], |
|
"logging_steps": 31, |
|
"max_steps": 30517, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 3052, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 8.48541631240156e+18, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|