|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.6438003628232714, |
|
"eval_steps": 500, |
|
"global_step": 15260, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0013078513268362654, |
|
"grad_norm": 31.18772315979004, |
|
"learning_rate": 1.0157273918741808e-06, |
|
"loss": 9.3832, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.0026157026536725307, |
|
"grad_norm": 24.045747756958008, |
|
"learning_rate": 2.0314547837483616e-06, |
|
"loss": 7.9011, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.003923553980508796, |
|
"grad_norm": 16.459999084472656, |
|
"learning_rate": 3.0471821756225426e-06, |
|
"loss": 6.3655, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.0052314053073450615, |
|
"grad_norm": 5.892439365386963, |
|
"learning_rate": 4.062909567496723e-06, |
|
"loss": 5.1577, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.0065392566341813275, |
|
"grad_norm": 7.445889949798584, |
|
"learning_rate": 5.078636959370905e-06, |
|
"loss": 4.4767, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.007847107961017593, |
|
"grad_norm": 8.36017894744873, |
|
"learning_rate": 6.094364351245085e-06, |
|
"loss": 4.0756, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.009154959287853858, |
|
"grad_norm": 13.23757266998291, |
|
"learning_rate": 7.110091743119267e-06, |
|
"loss": 3.7893, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.010462810614690123, |
|
"grad_norm": 10.12263011932373, |
|
"learning_rate": 8.125819134993446e-06, |
|
"loss": 3.6154, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.01177066194152639, |
|
"grad_norm": 6.559150218963623, |
|
"learning_rate": 9.141546526867629e-06, |
|
"loss": 3.4391, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.013078513268362655, |
|
"grad_norm": 6.898993015289307, |
|
"learning_rate": 1.015727391874181e-05, |
|
"loss": 3.3229, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.01438636459519892, |
|
"grad_norm": 8.434200286865234, |
|
"learning_rate": 1.117300131061599e-05, |
|
"loss": 3.225, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.015694215922035185, |
|
"grad_norm": 7.735746383666992, |
|
"learning_rate": 1.218872870249017e-05, |
|
"loss": 3.1381, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.01700206724887145, |
|
"grad_norm": 7.964824676513672, |
|
"learning_rate": 1.3204456094364351e-05, |
|
"loss": 3.06, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.018309918575707716, |
|
"grad_norm": 6.081613063812256, |
|
"learning_rate": 1.4220183486238533e-05, |
|
"loss": 3.0052, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.01961776990254398, |
|
"grad_norm": 6.740341663360596, |
|
"learning_rate": 1.5235910878112714e-05, |
|
"loss": 2.9035, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.020925621229380246, |
|
"grad_norm": 4.908828258514404, |
|
"learning_rate": 1.6251638269986893e-05, |
|
"loss": 2.8602, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.02223347255621651, |
|
"grad_norm": 5.6150360107421875, |
|
"learning_rate": 1.7267365661861077e-05, |
|
"loss": 2.8223, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.02354132388305278, |
|
"grad_norm": 3.5981647968292236, |
|
"learning_rate": 1.8283093053735257e-05, |
|
"loss": 2.7488, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.024849175209889045, |
|
"grad_norm": 4.921584606170654, |
|
"learning_rate": 1.9298820445609438e-05, |
|
"loss": 2.741, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.02615702653672531, |
|
"grad_norm": 3.8303065299987793, |
|
"learning_rate": 2.031454783748362e-05, |
|
"loss": 2.6923, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.027464877863561575, |
|
"grad_norm": 3.3674638271331787, |
|
"learning_rate": 2.13302752293578e-05, |
|
"loss": 2.6402, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.02877272919039784, |
|
"grad_norm": 4.096519947052002, |
|
"learning_rate": 2.234600262123198e-05, |
|
"loss": 2.6372, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.030080580517234105, |
|
"grad_norm": 3.10866641998291, |
|
"learning_rate": 2.336173001310616e-05, |
|
"loss": 2.5979, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.03138843184407037, |
|
"grad_norm": 3.3147943019866943, |
|
"learning_rate": 2.437745740498034e-05, |
|
"loss": 2.549, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.032696283170906636, |
|
"grad_norm": 3.045280933380127, |
|
"learning_rate": 2.5393184796854525e-05, |
|
"loss": 2.5487, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.0340041344977429, |
|
"grad_norm": 3.2536299228668213, |
|
"learning_rate": 2.6408912188728702e-05, |
|
"loss": 2.5138, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.035311985824579166, |
|
"grad_norm": 3.108548641204834, |
|
"learning_rate": 2.7424639580602886e-05, |
|
"loss": 2.5006, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.03661983715141543, |
|
"grad_norm": 3.048689603805542, |
|
"learning_rate": 2.8440366972477066e-05, |
|
"loss": 2.4434, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.037927688478251696, |
|
"grad_norm": 2.8378429412841797, |
|
"learning_rate": 2.9456094364351244e-05, |
|
"loss": 2.4403, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.03923553980508796, |
|
"grad_norm": 2.731419324874878, |
|
"learning_rate": 3.0471821756225428e-05, |
|
"loss": 2.4028, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.04054339113192423, |
|
"grad_norm": 2.4825587272644043, |
|
"learning_rate": 3.148754914809961e-05, |
|
"loss": 2.3695, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.04185124245876049, |
|
"grad_norm": 2.5020129680633545, |
|
"learning_rate": 3.2503276539973785e-05, |
|
"loss": 2.3513, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.04315909378559676, |
|
"grad_norm": 2.374310255050659, |
|
"learning_rate": 3.351900393184797e-05, |
|
"loss": 2.3221, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 0.04446694511243302, |
|
"grad_norm": 2.4097862243652344, |
|
"learning_rate": 3.453473132372215e-05, |
|
"loss": 2.3366, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 0.045774796439269294, |
|
"grad_norm": 2.573655128479004, |
|
"learning_rate": 3.555045871559633e-05, |
|
"loss": 2.2997, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.04708264776610556, |
|
"grad_norm": 2.5408434867858887, |
|
"learning_rate": 3.6566186107470514e-05, |
|
"loss": 2.2729, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 0.048390499092941824, |
|
"grad_norm": 2.222527265548706, |
|
"learning_rate": 3.7581913499344695e-05, |
|
"loss": 2.2716, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 0.04969835041977809, |
|
"grad_norm": 2.342715263366699, |
|
"learning_rate": 3.8597640891218876e-05, |
|
"loss": 2.2661, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 0.051006201746614355, |
|
"grad_norm": 2.134208917617798, |
|
"learning_rate": 3.9613368283093056e-05, |
|
"loss": 2.2206, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 0.05231405307345062, |
|
"grad_norm": 2.2482688426971436, |
|
"learning_rate": 4.062909567496724e-05, |
|
"loss": 2.2207, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.053621904400286885, |
|
"grad_norm": 2.2316832542419434, |
|
"learning_rate": 4.164482306684142e-05, |
|
"loss": 2.1847, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 0.05492975572712315, |
|
"grad_norm": 2.2321085929870605, |
|
"learning_rate": 4.26605504587156e-05, |
|
"loss": 2.2079, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 0.056237607053959415, |
|
"grad_norm": 2.115349054336548, |
|
"learning_rate": 4.367627785058978e-05, |
|
"loss": 2.1458, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 0.05754545838079568, |
|
"grad_norm": 2.210914373397827, |
|
"learning_rate": 4.469200524246396e-05, |
|
"loss": 2.1435, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 0.058853309707631946, |
|
"grad_norm": 2.1882164478302, |
|
"learning_rate": 4.570773263433814e-05, |
|
"loss": 2.1389, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.06016116103446821, |
|
"grad_norm": 2.01705265045166, |
|
"learning_rate": 4.672346002621232e-05, |
|
"loss": 2.1599, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 0.061469012361304476, |
|
"grad_norm": 1.9447391033172607, |
|
"learning_rate": 4.77391874180865e-05, |
|
"loss": 2.1254, |
|
"step": 1457 |
|
}, |
|
{ |
|
"epoch": 0.06277686368814074, |
|
"grad_norm": 1.9278348684310913, |
|
"learning_rate": 4.875491480996068e-05, |
|
"loss": 2.1109, |
|
"step": 1488 |
|
}, |
|
{ |
|
"epoch": 0.064084715014977, |
|
"grad_norm": 167.2142791748047, |
|
"learning_rate": 4.977064220183487e-05, |
|
"loss": 2.12, |
|
"step": 1519 |
|
}, |
|
{ |
|
"epoch": 0.06539256634181327, |
|
"grad_norm": 15.058513641357422, |
|
"learning_rate": 4.9999915451558777e-05, |
|
"loss": 3.2174, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.06670041766864954, |
|
"grad_norm": 2.8855061531066895, |
|
"learning_rate": 4.999955597496219e-05, |
|
"loss": 2.191, |
|
"step": 1581 |
|
}, |
|
{ |
|
"epoch": 0.0680082689954858, |
|
"grad_norm": 1.9484020471572876, |
|
"learning_rate": 4.9998914381774255e-05, |
|
"loss": 2.1053, |
|
"step": 1612 |
|
}, |
|
{ |
|
"epoch": 0.06931612032232207, |
|
"grad_norm": 5.792853832244873, |
|
"learning_rate": 4.999799067923527e-05, |
|
"loss": 2.1058, |
|
"step": 1643 |
|
}, |
|
{ |
|
"epoch": 0.07062397164915833, |
|
"grad_norm": 5.8536553382873535, |
|
"learning_rate": 4.999678487776908e-05, |
|
"loss": 2.0906, |
|
"step": 1674 |
|
}, |
|
{ |
|
"epoch": 0.0719318229759946, |
|
"grad_norm": 2.8606152534484863, |
|
"learning_rate": 4.9995296990983006e-05, |
|
"loss": 2.1356, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.07323967430283086, |
|
"grad_norm": 2.423034191131592, |
|
"learning_rate": 4.999352703566763e-05, |
|
"loss": 2.0468, |
|
"step": 1736 |
|
}, |
|
{ |
|
"epoch": 0.07454752562966713, |
|
"grad_norm": 1.7297399044036865, |
|
"learning_rate": 4.999147503179668e-05, |
|
"loss": 2.0687, |
|
"step": 1767 |
|
}, |
|
{ |
|
"epoch": 0.07585537695650339, |
|
"grad_norm": 4.29679536819458, |
|
"learning_rate": 4.998914100252672e-05, |
|
"loss": 2.026, |
|
"step": 1798 |
|
}, |
|
{ |
|
"epoch": 0.07716322828333966, |
|
"grad_norm": 1.942807674407959, |
|
"learning_rate": 4.998652497419696e-05, |
|
"loss": 2.034, |
|
"step": 1829 |
|
}, |
|
{ |
|
"epoch": 0.07847107961017592, |
|
"grad_norm": 1.8823152780532837, |
|
"learning_rate": 4.9983626976328927e-05, |
|
"loss": 2.0186, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.07977893093701219, |
|
"grad_norm": 2.135873556137085, |
|
"learning_rate": 4.998044704162613e-05, |
|
"loss": 2.0047, |
|
"step": 1891 |
|
}, |
|
{ |
|
"epoch": 0.08108678226384845, |
|
"grad_norm": 3.113102436065674, |
|
"learning_rate": 4.9976985205973705e-05, |
|
"loss": 1.9954, |
|
"step": 1922 |
|
}, |
|
{ |
|
"epoch": 0.08239463359068472, |
|
"grad_norm": 2.253596305847168, |
|
"learning_rate": 4.997324150843799e-05, |
|
"loss": 1.9744, |
|
"step": 1953 |
|
}, |
|
{ |
|
"epoch": 0.08370248491752098, |
|
"grad_norm": 5.765998363494873, |
|
"learning_rate": 4.99692159912661e-05, |
|
"loss": 1.9908, |
|
"step": 1984 |
|
}, |
|
{ |
|
"epoch": 0.08501033624435725, |
|
"grad_norm": 2.8442440032958984, |
|
"learning_rate": 4.996490869988546e-05, |
|
"loss": 1.9803, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 0.08631818757119351, |
|
"grad_norm": 1.7528963088989258, |
|
"learning_rate": 4.996031968290326e-05, |
|
"loss": 1.9419, |
|
"step": 2046 |
|
}, |
|
{ |
|
"epoch": 0.08762603889802978, |
|
"grad_norm": 1.7754414081573486, |
|
"learning_rate": 4.995544899210594e-05, |
|
"loss": 1.9485, |
|
"step": 2077 |
|
}, |
|
{ |
|
"epoch": 0.08893389022486604, |
|
"grad_norm": 1.6553430557250977, |
|
"learning_rate": 4.9950296682458583e-05, |
|
"loss": 1.9469, |
|
"step": 2108 |
|
}, |
|
{ |
|
"epoch": 0.09024174155170232, |
|
"grad_norm": 1.7296561002731323, |
|
"learning_rate": 4.994486281210429e-05, |
|
"loss": 1.9195, |
|
"step": 2139 |
|
}, |
|
{ |
|
"epoch": 0.09154959287853859, |
|
"grad_norm": 1.7001283168792725, |
|
"learning_rate": 4.9939147442363566e-05, |
|
"loss": 1.9324, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.09285744420537485, |
|
"grad_norm": 7.332984447479248, |
|
"learning_rate": 4.9933150637733574e-05, |
|
"loss": 1.9171, |
|
"step": 2201 |
|
}, |
|
{ |
|
"epoch": 0.09416529553221112, |
|
"grad_norm": 1.6546969413757324, |
|
"learning_rate": 4.992687246588743e-05, |
|
"loss": 1.9152, |
|
"step": 2232 |
|
}, |
|
{ |
|
"epoch": 0.09547314685904738, |
|
"grad_norm": 1.843203067779541, |
|
"learning_rate": 4.992031299767347e-05, |
|
"loss": 1.9015, |
|
"step": 2263 |
|
}, |
|
{ |
|
"epoch": 0.09678099818588365, |
|
"grad_norm": 4.51094388961792, |
|
"learning_rate": 4.9913472307114386e-05, |
|
"loss": 1.917, |
|
"step": 2294 |
|
}, |
|
{ |
|
"epoch": 0.09808884951271991, |
|
"grad_norm": 1.5267099142074585, |
|
"learning_rate": 4.9906350471406446e-05, |
|
"loss": 1.8985, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 0.09939670083955618, |
|
"grad_norm": 2.0826663970947266, |
|
"learning_rate": 4.989894757091861e-05, |
|
"loss": 1.8968, |
|
"step": 2356 |
|
}, |
|
{ |
|
"epoch": 0.10070455216639244, |
|
"grad_norm": 1.4929964542388916, |
|
"learning_rate": 4.989126368919158e-05, |
|
"loss": 1.8711, |
|
"step": 2387 |
|
}, |
|
{ |
|
"epoch": 0.10201240349322871, |
|
"grad_norm": 1.478498935699463, |
|
"learning_rate": 4.988329891293693e-05, |
|
"loss": 1.8672, |
|
"step": 2418 |
|
}, |
|
{ |
|
"epoch": 0.10332025482006497, |
|
"grad_norm": 1.4785969257354736, |
|
"learning_rate": 4.987505333203608e-05, |
|
"loss": 1.8435, |
|
"step": 2449 |
|
}, |
|
{ |
|
"epoch": 0.10462810614690124, |
|
"grad_norm": 1.524807333946228, |
|
"learning_rate": 4.9866527039539276e-05, |
|
"loss": 1.8543, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.1059359574737375, |
|
"grad_norm": 1.4232748746871948, |
|
"learning_rate": 4.9857720131664594e-05, |
|
"loss": 1.8463, |
|
"step": 2511 |
|
}, |
|
{ |
|
"epoch": 0.10724380880057377, |
|
"grad_norm": 1.7178603410720825, |
|
"learning_rate": 4.9848632707796773e-05, |
|
"loss": 1.8512, |
|
"step": 2542 |
|
}, |
|
{ |
|
"epoch": 0.10855166012741004, |
|
"grad_norm": 1.4624053239822388, |
|
"learning_rate": 4.9839264870486155e-05, |
|
"loss": 1.8421, |
|
"step": 2573 |
|
}, |
|
{ |
|
"epoch": 0.1098595114542463, |
|
"grad_norm": 2.0191211700439453, |
|
"learning_rate": 4.9829616725447526e-05, |
|
"loss": 1.8393, |
|
"step": 2604 |
|
}, |
|
{ |
|
"epoch": 0.11116736278108257, |
|
"grad_norm": 1.4346847534179688, |
|
"learning_rate": 4.981968838155888e-05, |
|
"loss": 1.8149, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 0.11247521410791883, |
|
"grad_norm": 1.4674941301345825, |
|
"learning_rate": 4.980947995086024e-05, |
|
"loss": 1.8215, |
|
"step": 2666 |
|
}, |
|
{ |
|
"epoch": 0.1137830654347551, |
|
"grad_norm": 1.4122453927993774, |
|
"learning_rate": 4.979899154855234e-05, |
|
"loss": 1.8265, |
|
"step": 2697 |
|
}, |
|
{ |
|
"epoch": 0.11509091676159136, |
|
"grad_norm": 1.4748708009719849, |
|
"learning_rate": 4.9788223292995386e-05, |
|
"loss": 1.8149, |
|
"step": 2728 |
|
}, |
|
{ |
|
"epoch": 0.11639876808842763, |
|
"grad_norm": 1.40476655960083, |
|
"learning_rate": 4.977717530570768e-05, |
|
"loss": 1.8316, |
|
"step": 2759 |
|
}, |
|
{ |
|
"epoch": 0.11770661941526389, |
|
"grad_norm": 1.4909706115722656, |
|
"learning_rate": 4.976584771136425e-05, |
|
"loss": 1.8208, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.11901447074210016, |
|
"grad_norm": 1.3661363124847412, |
|
"learning_rate": 4.975424063779547e-05, |
|
"loss": 1.8038, |
|
"step": 2821 |
|
}, |
|
{ |
|
"epoch": 0.12032232206893642, |
|
"grad_norm": 1.422453761100769, |
|
"learning_rate": 4.974235421598557e-05, |
|
"loss": 1.7926, |
|
"step": 2852 |
|
}, |
|
{ |
|
"epoch": 0.12163017339577269, |
|
"grad_norm": 1.3766013383865356, |
|
"learning_rate": 4.973018858007122e-05, |
|
"loss": 1.8192, |
|
"step": 2883 |
|
}, |
|
{ |
|
"epoch": 0.12293802472260895, |
|
"grad_norm": 1.3904545307159424, |
|
"learning_rate": 4.9717743867339963e-05, |
|
"loss": 1.7653, |
|
"step": 2914 |
|
}, |
|
{ |
|
"epoch": 0.12424587604944522, |
|
"grad_norm": 1.369018316268921, |
|
"learning_rate": 4.9705020218228695e-05, |
|
"loss": 1.7879, |
|
"step": 2945 |
|
}, |
|
{ |
|
"epoch": 0.12555372737628148, |
|
"grad_norm": 1.5040982961654663, |
|
"learning_rate": 4.969201777632205e-05, |
|
"loss": 1.7777, |
|
"step": 2976 |
|
}, |
|
{ |
|
"epoch": 0.12686157870311776, |
|
"grad_norm": 1.3161810636520386, |
|
"learning_rate": 4.9678736688350846e-05, |
|
"loss": 1.7654, |
|
"step": 3007 |
|
}, |
|
{ |
|
"epoch": 0.128169430029954, |
|
"grad_norm": 1.3379234075546265, |
|
"learning_rate": 4.966517710419033e-05, |
|
"loss": 1.7699, |
|
"step": 3038 |
|
}, |
|
{ |
|
"epoch": 0.1294772813567903, |
|
"grad_norm": 1.3384974002838135, |
|
"learning_rate": 4.965133917685858e-05, |
|
"loss": 1.7915, |
|
"step": 3069 |
|
}, |
|
{ |
|
"epoch": 0.13078513268362654, |
|
"grad_norm": 1.2991316318511963, |
|
"learning_rate": 4.9637223062514714e-05, |
|
"loss": 1.7636, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.13209298401046282, |
|
"grad_norm": 2.500105857849121, |
|
"learning_rate": 4.962282892045718e-05, |
|
"loss": 1.7573, |
|
"step": 3131 |
|
}, |
|
{ |
|
"epoch": 0.13340083533729907, |
|
"grad_norm": 1.2635610103607178, |
|
"learning_rate": 4.9608156913121904e-05, |
|
"loss": 1.7537, |
|
"step": 3162 |
|
}, |
|
{ |
|
"epoch": 0.13470868666413535, |
|
"grad_norm": 1.2823940515518188, |
|
"learning_rate": 4.959320720608049e-05, |
|
"loss": 1.7709, |
|
"step": 3193 |
|
}, |
|
{ |
|
"epoch": 0.1360165379909716, |
|
"grad_norm": 1.2606983184814453, |
|
"learning_rate": 4.9577979968038354e-05, |
|
"loss": 1.7611, |
|
"step": 3224 |
|
}, |
|
{ |
|
"epoch": 0.13732438931780788, |
|
"grad_norm": 1.305782437324524, |
|
"learning_rate": 4.956247537083282e-05, |
|
"loss": 1.7547, |
|
"step": 3255 |
|
}, |
|
{ |
|
"epoch": 0.13863224064464413, |
|
"grad_norm": 1.2986756563186646, |
|
"learning_rate": 4.9546693589431145e-05, |
|
"loss": 1.7241, |
|
"step": 3286 |
|
}, |
|
{ |
|
"epoch": 0.1399400919714804, |
|
"grad_norm": 1.3163083791732788, |
|
"learning_rate": 4.9530634801928595e-05, |
|
"loss": 1.7352, |
|
"step": 3317 |
|
}, |
|
{ |
|
"epoch": 0.14124794329831666, |
|
"grad_norm": 1.2844524383544922, |
|
"learning_rate": 4.9514299189546395e-05, |
|
"loss": 1.7316, |
|
"step": 3348 |
|
}, |
|
{ |
|
"epoch": 0.14255579462515294, |
|
"grad_norm": 1.2861442565917969, |
|
"learning_rate": 4.949768693662973e-05, |
|
"loss": 1.7233, |
|
"step": 3379 |
|
}, |
|
{ |
|
"epoch": 0.1438636459519892, |
|
"grad_norm": 1.2419766187667847, |
|
"learning_rate": 4.948079823064559e-05, |
|
"loss": 1.7278, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.14517149727882547, |
|
"grad_norm": 1.2518367767333984, |
|
"learning_rate": 4.946363326218074e-05, |
|
"loss": 1.7399, |
|
"step": 3441 |
|
}, |
|
{ |
|
"epoch": 0.14647934860566172, |
|
"grad_norm": 1.217373251914978, |
|
"learning_rate": 4.9446192224939525e-05, |
|
"loss": 1.7386, |
|
"step": 3472 |
|
}, |
|
{ |
|
"epoch": 0.147787199932498, |
|
"grad_norm": 1.2229036092758179, |
|
"learning_rate": 4.942847531574167e-05, |
|
"loss": 1.7096, |
|
"step": 3503 |
|
}, |
|
{ |
|
"epoch": 0.14909505125933425, |
|
"grad_norm": 1.2012135982513428, |
|
"learning_rate": 4.941048273452008e-05, |
|
"loss": 1.7195, |
|
"step": 3534 |
|
}, |
|
{ |
|
"epoch": 0.15040290258617053, |
|
"grad_norm": 1.2632161378860474, |
|
"learning_rate": 4.9392214684318605e-05, |
|
"loss": 1.7377, |
|
"step": 3565 |
|
}, |
|
{ |
|
"epoch": 0.15171075391300679, |
|
"grad_norm": 1.2119094133377075, |
|
"learning_rate": 4.93736713712897e-05, |
|
"loss": 1.7276, |
|
"step": 3596 |
|
}, |
|
{ |
|
"epoch": 0.15301860523984306, |
|
"grad_norm": 1.2516822814941406, |
|
"learning_rate": 4.9354853004692124e-05, |
|
"loss": 1.7224, |
|
"step": 3627 |
|
}, |
|
{ |
|
"epoch": 0.15432645656667932, |
|
"grad_norm": 1.2437561750411987, |
|
"learning_rate": 4.93357597968886e-05, |
|
"loss": 1.703, |
|
"step": 3658 |
|
}, |
|
{ |
|
"epoch": 0.1556343078935156, |
|
"grad_norm": 1.4856292009353638, |
|
"learning_rate": 4.931639196334338e-05, |
|
"loss": 1.7139, |
|
"step": 3689 |
|
}, |
|
{ |
|
"epoch": 0.15694215922035185, |
|
"grad_norm": 1.202919840812683, |
|
"learning_rate": 4.9296749722619826e-05, |
|
"loss": 1.7081, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.15825001054718812, |
|
"grad_norm": 1.1720086336135864, |
|
"learning_rate": 4.9276833296377966e-05, |
|
"loss": 1.7086, |
|
"step": 3751 |
|
}, |
|
{ |
|
"epoch": 0.15955786187402438, |
|
"grad_norm": 1.2187249660491943, |
|
"learning_rate": 4.925664290937196e-05, |
|
"loss": 1.6871, |
|
"step": 3782 |
|
}, |
|
{ |
|
"epoch": 0.16086571320086065, |
|
"grad_norm": 1.1918401718139648, |
|
"learning_rate": 4.9236178789447576e-05, |
|
"loss": 1.7058, |
|
"step": 3813 |
|
}, |
|
{ |
|
"epoch": 0.1621735645276969, |
|
"grad_norm": 1.1956909894943237, |
|
"learning_rate": 4.921544116753962e-05, |
|
"loss": 1.6986, |
|
"step": 3844 |
|
}, |
|
{ |
|
"epoch": 0.16348141585453319, |
|
"grad_norm": 1.1646822690963745, |
|
"learning_rate": 4.919443027766935e-05, |
|
"loss": 1.6797, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 0.16478926718136944, |
|
"grad_norm": 1.2145137786865234, |
|
"learning_rate": 4.91731463569418e-05, |
|
"loss": 1.678, |
|
"step": 3906 |
|
}, |
|
{ |
|
"epoch": 0.16609711850820572, |
|
"grad_norm": 1.2347453832626343, |
|
"learning_rate": 4.915158964554312e-05, |
|
"loss": 1.672, |
|
"step": 3937 |
|
}, |
|
{ |
|
"epoch": 0.16740496983504197, |
|
"grad_norm": 1.1764047145843506, |
|
"learning_rate": 4.912976038673786e-05, |
|
"loss": 1.6864, |
|
"step": 3968 |
|
}, |
|
{ |
|
"epoch": 0.16871282116187825, |
|
"grad_norm": 1.1730256080627441, |
|
"learning_rate": 4.9107658826866254e-05, |
|
"loss": 1.7021, |
|
"step": 3999 |
|
}, |
|
{ |
|
"epoch": 0.1700206724887145, |
|
"grad_norm": 1.1880205869674683, |
|
"learning_rate": 4.908528521534139e-05, |
|
"loss": 1.6832, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 0.17132852381555078, |
|
"grad_norm": 1.1810024976730347, |
|
"learning_rate": 4.906263980464644e-05, |
|
"loss": 1.6674, |
|
"step": 4061 |
|
}, |
|
{ |
|
"epoch": 0.17263637514238703, |
|
"grad_norm": 1.2133933305740356, |
|
"learning_rate": 4.903972285033178e-05, |
|
"loss": 1.6887, |
|
"step": 4092 |
|
}, |
|
{ |
|
"epoch": 0.1739442264692233, |
|
"grad_norm": 1.1253775358200073, |
|
"learning_rate": 4.901653461101213e-05, |
|
"loss": 1.6884, |
|
"step": 4123 |
|
}, |
|
{ |
|
"epoch": 0.17525207779605956, |
|
"grad_norm": 1.215759038925171, |
|
"learning_rate": 4.8993075348363626e-05, |
|
"loss": 1.7013, |
|
"step": 4154 |
|
}, |
|
{ |
|
"epoch": 0.17655992912289584, |
|
"grad_norm": 1.1305285692214966, |
|
"learning_rate": 4.896934532712084e-05, |
|
"loss": 1.6573, |
|
"step": 4185 |
|
}, |
|
{ |
|
"epoch": 0.1778677804497321, |
|
"grad_norm": 1.127212405204773, |
|
"learning_rate": 4.8945344815073846e-05, |
|
"loss": 1.6769, |
|
"step": 4216 |
|
}, |
|
{ |
|
"epoch": 0.17917563177656837, |
|
"grad_norm": 1.1379367113113403, |
|
"learning_rate": 4.892107408306516e-05, |
|
"loss": 1.6583, |
|
"step": 4247 |
|
}, |
|
{ |
|
"epoch": 0.18048348310340465, |
|
"grad_norm": 1.0926482677459717, |
|
"learning_rate": 4.889653340498669e-05, |
|
"loss": 1.6674, |
|
"step": 4278 |
|
}, |
|
{ |
|
"epoch": 0.1817913344302409, |
|
"grad_norm": 1.1098601818084717, |
|
"learning_rate": 4.8871723057776664e-05, |
|
"loss": 1.6598, |
|
"step": 4309 |
|
}, |
|
{ |
|
"epoch": 0.18309918575707718, |
|
"grad_norm": 1.129784345626831, |
|
"learning_rate": 4.8846643321416476e-05, |
|
"loss": 1.6786, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 0.18440703708391343, |
|
"grad_norm": 1.1000723838806152, |
|
"learning_rate": 4.882129447892753e-05, |
|
"loss": 1.6623, |
|
"step": 4371 |
|
}, |
|
{ |
|
"epoch": 0.1857148884107497, |
|
"grad_norm": 1.2229663133621216, |
|
"learning_rate": 4.8795676816368076e-05, |
|
"loss": 1.6675, |
|
"step": 4402 |
|
}, |
|
{ |
|
"epoch": 0.18702273973758596, |
|
"grad_norm": 1.1608648300170898, |
|
"learning_rate": 4.876979062282995e-05, |
|
"loss": 1.6499, |
|
"step": 4433 |
|
}, |
|
{ |
|
"epoch": 0.18833059106442224, |
|
"grad_norm": 1.1060220003128052, |
|
"learning_rate": 4.8743636190435325e-05, |
|
"loss": 1.6453, |
|
"step": 4464 |
|
}, |
|
{ |
|
"epoch": 0.1896384423912585, |
|
"grad_norm": 1.1028501987457275, |
|
"learning_rate": 4.871721381433344e-05, |
|
"loss": 1.6393, |
|
"step": 4495 |
|
}, |
|
{ |
|
"epoch": 0.19094629371809477, |
|
"grad_norm": 1.1736187934875488, |
|
"learning_rate": 4.869052379269719e-05, |
|
"loss": 1.6563, |
|
"step": 4526 |
|
}, |
|
{ |
|
"epoch": 0.19225414504493102, |
|
"grad_norm": 1.1284164190292358, |
|
"learning_rate": 4.866356642671985e-05, |
|
"loss": 1.6535, |
|
"step": 4557 |
|
}, |
|
{ |
|
"epoch": 0.1935619963717673, |
|
"grad_norm": 1.1155307292938232, |
|
"learning_rate": 4.8636342020611634e-05, |
|
"loss": 1.6417, |
|
"step": 4588 |
|
}, |
|
{ |
|
"epoch": 0.19486984769860355, |
|
"grad_norm": 1.1379483938217163, |
|
"learning_rate": 4.860885088159626e-05, |
|
"loss": 1.6432, |
|
"step": 4619 |
|
}, |
|
{ |
|
"epoch": 0.19617769902543983, |
|
"grad_norm": 1.1166200637817383, |
|
"learning_rate": 4.858109331990751e-05, |
|
"loss": 1.6545, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 0.19748555035227608, |
|
"grad_norm": 1.081626296043396, |
|
"learning_rate": 4.855306964878567e-05, |
|
"loss": 1.6402, |
|
"step": 4681 |
|
}, |
|
{ |
|
"epoch": 0.19879340167911236, |
|
"grad_norm": 1.0875532627105713, |
|
"learning_rate": 4.8524780184474084e-05, |
|
"loss": 1.6498, |
|
"step": 4712 |
|
}, |
|
{ |
|
"epoch": 0.2001012530059486, |
|
"grad_norm": 1.0928492546081543, |
|
"learning_rate": 4.8496225246215496e-05, |
|
"loss": 1.6341, |
|
"step": 4743 |
|
}, |
|
{ |
|
"epoch": 0.2014091043327849, |
|
"grad_norm": 1.1116279363632202, |
|
"learning_rate": 4.8467405156248505e-05, |
|
"loss": 1.6389, |
|
"step": 4774 |
|
}, |
|
{ |
|
"epoch": 0.20271695565962114, |
|
"grad_norm": 1.0984513759613037, |
|
"learning_rate": 4.843832023980392e-05, |
|
"loss": 1.6277, |
|
"step": 4805 |
|
}, |
|
{ |
|
"epoch": 0.20402480698645742, |
|
"grad_norm": 1.09713876247406, |
|
"learning_rate": 4.840897082510106e-05, |
|
"loss": 1.6244, |
|
"step": 4836 |
|
}, |
|
{ |
|
"epoch": 0.20533265831329367, |
|
"grad_norm": 1.089237928390503, |
|
"learning_rate": 4.8379357243344084e-05, |
|
"loss": 1.6136, |
|
"step": 4867 |
|
}, |
|
{ |
|
"epoch": 0.20664050964012995, |
|
"grad_norm": 1.1337491273880005, |
|
"learning_rate": 4.8349479828718236e-05, |
|
"loss": 1.641, |
|
"step": 4898 |
|
}, |
|
{ |
|
"epoch": 0.2079483609669662, |
|
"grad_norm": 1.1337246894836426, |
|
"learning_rate": 4.8319338918386075e-05, |
|
"loss": 1.6163, |
|
"step": 4929 |
|
}, |
|
{ |
|
"epoch": 0.20925621229380248, |
|
"grad_norm": 1.112273097038269, |
|
"learning_rate": 4.828893485248369e-05, |
|
"loss": 1.6384, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 0.21056406362063873, |
|
"grad_norm": 1.049856424331665, |
|
"learning_rate": 4.825826797411682e-05, |
|
"loss": 1.6245, |
|
"step": 4991 |
|
}, |
|
{ |
|
"epoch": 0.211871914947475, |
|
"grad_norm": 2.468637704849243, |
|
"learning_rate": 4.822733862935702e-05, |
|
"loss": 1.6165, |
|
"step": 5022 |
|
}, |
|
{ |
|
"epoch": 0.21317976627431126, |
|
"grad_norm": 1.0854454040527344, |
|
"learning_rate": 4.819614716723775e-05, |
|
"loss": 1.6252, |
|
"step": 5053 |
|
}, |
|
{ |
|
"epoch": 0.21448761760114754, |
|
"grad_norm": 1.0815082788467407, |
|
"learning_rate": 4.8164693939750425e-05, |
|
"loss": 1.6096, |
|
"step": 5084 |
|
}, |
|
{ |
|
"epoch": 0.2157954689279838, |
|
"grad_norm": 1.053945541381836, |
|
"learning_rate": 4.813297930184042e-05, |
|
"loss": 1.6296, |
|
"step": 5115 |
|
}, |
|
{ |
|
"epoch": 0.21710332025482007, |
|
"grad_norm": 1.06475031375885, |
|
"learning_rate": 4.810100361140314e-05, |
|
"loss": 1.629, |
|
"step": 5146 |
|
}, |
|
{ |
|
"epoch": 0.21841117158165632, |
|
"grad_norm": 1.1072969436645508, |
|
"learning_rate": 4.8068767229279885e-05, |
|
"loss": 1.6244, |
|
"step": 5177 |
|
}, |
|
{ |
|
"epoch": 0.2197190229084926, |
|
"grad_norm": 1.0536466836929321, |
|
"learning_rate": 4.8036270519253854e-05, |
|
"loss": 1.6474, |
|
"step": 5208 |
|
}, |
|
{ |
|
"epoch": 0.22102687423532885, |
|
"grad_norm": 1.1039215326309204, |
|
"learning_rate": 4.8003513848046e-05, |
|
"loss": 1.6354, |
|
"step": 5239 |
|
}, |
|
{ |
|
"epoch": 0.22233472556216513, |
|
"grad_norm": 1.0463244915008545, |
|
"learning_rate": 4.79704975853109e-05, |
|
"loss": 1.6154, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 0.22364257688900138, |
|
"grad_norm": 1.0931519269943237, |
|
"learning_rate": 4.793722210363262e-05, |
|
"loss": 1.6037, |
|
"step": 5301 |
|
}, |
|
{ |
|
"epoch": 0.22495042821583766, |
|
"grad_norm": 1.0838814973831177, |
|
"learning_rate": 4.7903687778520414e-05, |
|
"loss": 1.6352, |
|
"step": 5332 |
|
}, |
|
{ |
|
"epoch": 0.2262582795426739, |
|
"grad_norm": 1.0400842428207397, |
|
"learning_rate": 4.7869894988404593e-05, |
|
"loss": 1.5924, |
|
"step": 5363 |
|
}, |
|
{ |
|
"epoch": 0.2275661308695102, |
|
"grad_norm": 1.0707345008850098, |
|
"learning_rate": 4.783584411463221e-05, |
|
"loss": 1.6211, |
|
"step": 5394 |
|
}, |
|
{ |
|
"epoch": 0.22887398219634644, |
|
"grad_norm": 1.0422611236572266, |
|
"learning_rate": 4.780153554146274e-05, |
|
"loss": 1.5969, |
|
"step": 5425 |
|
}, |
|
{ |
|
"epoch": 0.23018183352318272, |
|
"grad_norm": 1.0401543378829956, |
|
"learning_rate": 4.7766969656063766e-05, |
|
"loss": 1.5974, |
|
"step": 5456 |
|
}, |
|
{ |
|
"epoch": 0.23148968485001897, |
|
"grad_norm": 1.0429288148880005, |
|
"learning_rate": 4.773214684850662e-05, |
|
"loss": 1.6043, |
|
"step": 5487 |
|
}, |
|
{ |
|
"epoch": 0.23279753617685525, |
|
"grad_norm": 1.098827600479126, |
|
"learning_rate": 4.769706751176193e-05, |
|
"loss": 1.5984, |
|
"step": 5518 |
|
}, |
|
{ |
|
"epoch": 0.2341053875036915, |
|
"grad_norm": 1.0090724229812622, |
|
"learning_rate": 4.7661732041695264e-05, |
|
"loss": 1.6012, |
|
"step": 5549 |
|
}, |
|
{ |
|
"epoch": 0.23541323883052778, |
|
"grad_norm": 1.0732818841934204, |
|
"learning_rate": 4.762614083706258e-05, |
|
"loss": 1.5996, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 0.23672109015736403, |
|
"grad_norm": 1.082144856452942, |
|
"learning_rate": 4.759029429950581e-05, |
|
"loss": 1.6127, |
|
"step": 5611 |
|
}, |
|
{ |
|
"epoch": 0.2380289414842003, |
|
"grad_norm": 1.0642670392990112, |
|
"learning_rate": 4.7554192833548235e-05, |
|
"loss": 1.5962, |
|
"step": 5642 |
|
}, |
|
{ |
|
"epoch": 0.23933679281103656, |
|
"grad_norm": 1.1351134777069092, |
|
"learning_rate": 4.751783684659e-05, |
|
"loss": 1.5983, |
|
"step": 5673 |
|
}, |
|
{ |
|
"epoch": 0.24064464413787284, |
|
"grad_norm": 1.09076726436615, |
|
"learning_rate": 4.748122674890348e-05, |
|
"loss": 1.6162, |
|
"step": 5704 |
|
}, |
|
{ |
|
"epoch": 0.24195249546470912, |
|
"grad_norm": 1.0234689712524414, |
|
"learning_rate": 4.7444362953628654e-05, |
|
"loss": 1.596, |
|
"step": 5735 |
|
}, |
|
{ |
|
"epoch": 0.24326034679154537, |
|
"grad_norm": 1.1209464073181152, |
|
"learning_rate": 4.7407245876768424e-05, |
|
"loss": 1.602, |
|
"step": 5766 |
|
}, |
|
{ |
|
"epoch": 0.24456819811838165, |
|
"grad_norm": 1.0940710306167603, |
|
"learning_rate": 4.736987593718397e-05, |
|
"loss": 1.5992, |
|
"step": 5797 |
|
}, |
|
{ |
|
"epoch": 0.2458760494452179, |
|
"grad_norm": 1.0460273027420044, |
|
"learning_rate": 4.733225355658999e-05, |
|
"loss": 1.5881, |
|
"step": 5828 |
|
}, |
|
{ |
|
"epoch": 0.24718390077205418, |
|
"grad_norm": 1.0842856168746948, |
|
"learning_rate": 4.7294379159549926e-05, |
|
"loss": 1.594, |
|
"step": 5859 |
|
}, |
|
{ |
|
"epoch": 0.24849175209889043, |
|
"grad_norm": 1.0244876146316528, |
|
"learning_rate": 4.725625317347119e-05, |
|
"loss": 1.583, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 0.2497996034257267, |
|
"grad_norm": 1.0182656049728394, |
|
"learning_rate": 4.7217876028600374e-05, |
|
"loss": 1.5839, |
|
"step": 5921 |
|
}, |
|
{ |
|
"epoch": 0.25110745475256296, |
|
"grad_norm": 1.0451878309249878, |
|
"learning_rate": 4.717924815801832e-05, |
|
"loss": 1.6051, |
|
"step": 5952 |
|
}, |
|
{ |
|
"epoch": 0.2524153060793992, |
|
"grad_norm": 1.0340505838394165, |
|
"learning_rate": 4.714036999763532e-05, |
|
"loss": 1.5969, |
|
"step": 5983 |
|
}, |
|
{ |
|
"epoch": 0.2537231574062355, |
|
"grad_norm": 1.0457735061645508, |
|
"learning_rate": 4.7101241986186116e-05, |
|
"loss": 1.5971, |
|
"step": 6014 |
|
}, |
|
{ |
|
"epoch": 0.2550310087330718, |
|
"grad_norm": 1.034166693687439, |
|
"learning_rate": 4.7061864565225e-05, |
|
"loss": 1.5982, |
|
"step": 6045 |
|
}, |
|
{ |
|
"epoch": 0.256338860059908, |
|
"grad_norm": 1.0326176881790161, |
|
"learning_rate": 4.702223817912081e-05, |
|
"loss": 1.5691, |
|
"step": 6076 |
|
}, |
|
{ |
|
"epoch": 0.2576467113867443, |
|
"grad_norm": 1.0339311361312866, |
|
"learning_rate": 4.698236327505195e-05, |
|
"loss": 1.5726, |
|
"step": 6107 |
|
}, |
|
{ |
|
"epoch": 0.2589545627135806, |
|
"grad_norm": 1.022299885749817, |
|
"learning_rate": 4.694224030300127e-05, |
|
"loss": 1.5935, |
|
"step": 6138 |
|
}, |
|
{ |
|
"epoch": 0.26026241404041683, |
|
"grad_norm": 1.009255051612854, |
|
"learning_rate": 4.690186971575107e-05, |
|
"loss": 1.574, |
|
"step": 6169 |
|
}, |
|
{ |
|
"epoch": 0.2615702653672531, |
|
"grad_norm": 1.0181437730789185, |
|
"learning_rate": 4.6861251968877916e-05, |
|
"loss": 1.57, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 0.26287811669408934, |
|
"grad_norm": 1.0302594900131226, |
|
"learning_rate": 4.68203875207476e-05, |
|
"loss": 1.5785, |
|
"step": 6231 |
|
}, |
|
{ |
|
"epoch": 0.26418596802092564, |
|
"grad_norm": 1.006404161453247, |
|
"learning_rate": 4.677927683250983e-05, |
|
"loss": 1.5798, |
|
"step": 6262 |
|
}, |
|
{ |
|
"epoch": 0.2654938193477619, |
|
"grad_norm": 0.993584394454956, |
|
"learning_rate": 4.6737920368093156e-05, |
|
"loss": 1.5558, |
|
"step": 6293 |
|
}, |
|
{ |
|
"epoch": 0.26680167067459815, |
|
"grad_norm": 0.9635722637176514, |
|
"learning_rate": 4.669631859419965e-05, |
|
"loss": 1.5716, |
|
"step": 6324 |
|
}, |
|
{ |
|
"epoch": 0.2681095220014344, |
|
"grad_norm": 0.9815465211868286, |
|
"learning_rate": 4.6654471980299676e-05, |
|
"loss": 1.5472, |
|
"step": 6355 |
|
}, |
|
{ |
|
"epoch": 0.2694173733282707, |
|
"grad_norm": 0.9921912550926208, |
|
"learning_rate": 4.661238099862658e-05, |
|
"loss": 1.5786, |
|
"step": 6386 |
|
}, |
|
{ |
|
"epoch": 0.27072522465510696, |
|
"grad_norm": 1.0193076133728027, |
|
"learning_rate": 4.657004612417138e-05, |
|
"loss": 1.5672, |
|
"step": 6417 |
|
}, |
|
{ |
|
"epoch": 0.2720330759819432, |
|
"grad_norm": 0.9979498982429504, |
|
"learning_rate": 4.6527467834677374e-05, |
|
"loss": 1.5549, |
|
"step": 6448 |
|
}, |
|
{ |
|
"epoch": 0.27334092730877946, |
|
"grad_norm": 1.0342402458190918, |
|
"learning_rate": 4.648464661063478e-05, |
|
"loss": 1.5808, |
|
"step": 6479 |
|
}, |
|
{ |
|
"epoch": 0.27464877863561576, |
|
"grad_norm": 0.9821639060974121, |
|
"learning_rate": 4.6441582935275264e-05, |
|
"loss": 1.5642, |
|
"step": 6510 |
|
}, |
|
{ |
|
"epoch": 0.275956629962452, |
|
"grad_norm": 1.002773404121399, |
|
"learning_rate": 4.6398277294566586e-05, |
|
"loss": 1.5646, |
|
"step": 6541 |
|
}, |
|
{ |
|
"epoch": 0.27726448128928827, |
|
"grad_norm": 1.0241411924362183, |
|
"learning_rate": 4.6354730177207e-05, |
|
"loss": 1.5744, |
|
"step": 6572 |
|
}, |
|
{ |
|
"epoch": 0.2785723326161245, |
|
"grad_norm": 1.0101004838943481, |
|
"learning_rate": 4.6310942074619787e-05, |
|
"loss": 1.5905, |
|
"step": 6603 |
|
}, |
|
{ |
|
"epoch": 0.2798801839429608, |
|
"grad_norm": 1.0355374813079834, |
|
"learning_rate": 4.626691348094777e-05, |
|
"loss": 1.5713, |
|
"step": 6634 |
|
}, |
|
{ |
|
"epoch": 0.2811880352697971, |
|
"grad_norm": 1.0100233554840088, |
|
"learning_rate": 4.622264489304762e-05, |
|
"loss": 1.5491, |
|
"step": 6665 |
|
}, |
|
{ |
|
"epoch": 0.2824958865966333, |
|
"grad_norm": 1.0172356367111206, |
|
"learning_rate": 4.617813681048434e-05, |
|
"loss": 1.562, |
|
"step": 6696 |
|
}, |
|
{ |
|
"epoch": 0.2838037379234696, |
|
"grad_norm": 0.994799017906189, |
|
"learning_rate": 4.61333897355256e-05, |
|
"loss": 1.5526, |
|
"step": 6727 |
|
}, |
|
{ |
|
"epoch": 0.2851115892503059, |
|
"grad_norm": 1.1320250034332275, |
|
"learning_rate": 4.608840417313604e-05, |
|
"loss": 1.5616, |
|
"step": 6758 |
|
}, |
|
{ |
|
"epoch": 0.28641944057714214, |
|
"grad_norm": 1.013725996017456, |
|
"learning_rate": 4.6043180630971646e-05, |
|
"loss": 1.5345, |
|
"step": 6789 |
|
}, |
|
{ |
|
"epoch": 0.2877272919039784, |
|
"grad_norm": 1.045462727546692, |
|
"learning_rate": 4.599771961937391e-05, |
|
"loss": 1.56, |
|
"step": 6820 |
|
}, |
|
{ |
|
"epoch": 0.28903514323081464, |
|
"grad_norm": 1.0075098276138306, |
|
"learning_rate": 4.5952021651364204e-05, |
|
"loss": 1.5825, |
|
"step": 6851 |
|
}, |
|
{ |
|
"epoch": 0.29034299455765095, |
|
"grad_norm": 0.9952128529548645, |
|
"learning_rate": 4.590608724263786e-05, |
|
"loss": 1.5581, |
|
"step": 6882 |
|
}, |
|
{ |
|
"epoch": 0.2916508458844872, |
|
"grad_norm": 0.9492574334144592, |
|
"learning_rate": 4.585991691155845e-05, |
|
"loss": 1.554, |
|
"step": 6913 |
|
}, |
|
{ |
|
"epoch": 0.29295869721132345, |
|
"grad_norm": 0.9850384593009949, |
|
"learning_rate": 4.581351117915188e-05, |
|
"loss": 1.5614, |
|
"step": 6944 |
|
}, |
|
{ |
|
"epoch": 0.2942665485381597, |
|
"grad_norm": 0.9840113520622253, |
|
"learning_rate": 4.5766870569100534e-05, |
|
"loss": 1.5631, |
|
"step": 6975 |
|
}, |
|
{ |
|
"epoch": 0.295574399864996, |
|
"grad_norm": 0.9830542802810669, |
|
"learning_rate": 4.571999560773736e-05, |
|
"loss": 1.5597, |
|
"step": 7006 |
|
}, |
|
{ |
|
"epoch": 0.29688225119183226, |
|
"grad_norm": 0.9737412929534912, |
|
"learning_rate": 4.5672886824039915e-05, |
|
"loss": 1.5412, |
|
"step": 7037 |
|
}, |
|
{ |
|
"epoch": 0.2981901025186685, |
|
"grad_norm": 1.0272278785705566, |
|
"learning_rate": 4.5625544749624435e-05, |
|
"loss": 1.5724, |
|
"step": 7068 |
|
}, |
|
{ |
|
"epoch": 0.29949795384550476, |
|
"grad_norm": 0.973682165145874, |
|
"learning_rate": 4.5577969918739794e-05, |
|
"loss": 1.5739, |
|
"step": 7099 |
|
}, |
|
{ |
|
"epoch": 0.30080580517234107, |
|
"grad_norm": 0.9709253907203674, |
|
"learning_rate": 4.5530162868261486e-05, |
|
"loss": 1.5666, |
|
"step": 7130 |
|
}, |
|
{ |
|
"epoch": 0.3021136564991773, |
|
"grad_norm": 0.9426018595695496, |
|
"learning_rate": 4.548212413768558e-05, |
|
"loss": 1.5542, |
|
"step": 7161 |
|
}, |
|
{ |
|
"epoch": 0.30342150782601357, |
|
"grad_norm": 1.026181697845459, |
|
"learning_rate": 4.543385426912261e-05, |
|
"loss": 1.5235, |
|
"step": 7192 |
|
}, |
|
{ |
|
"epoch": 0.3047293591528499, |
|
"grad_norm": 0.9783868193626404, |
|
"learning_rate": 4.53853538072915e-05, |
|
"loss": 1.5591, |
|
"step": 7223 |
|
}, |
|
{ |
|
"epoch": 0.30603721047968613, |
|
"grad_norm": 0.9816028475761414, |
|
"learning_rate": 4.533662329951336e-05, |
|
"loss": 1.5484, |
|
"step": 7254 |
|
}, |
|
{ |
|
"epoch": 0.3073450618065224, |
|
"grad_norm": 0.9516949653625488, |
|
"learning_rate": 4.528766329570536e-05, |
|
"loss": 1.5251, |
|
"step": 7285 |
|
}, |
|
{ |
|
"epoch": 0.30865291313335863, |
|
"grad_norm": 0.9601557850837708, |
|
"learning_rate": 4.523847434837447e-05, |
|
"loss": 1.5668, |
|
"step": 7316 |
|
}, |
|
{ |
|
"epoch": 0.30996076446019494, |
|
"grad_norm": 0.9607630372047424, |
|
"learning_rate": 4.518905701261128e-05, |
|
"loss": 1.5576, |
|
"step": 7347 |
|
}, |
|
{ |
|
"epoch": 0.3112686157870312, |
|
"grad_norm": 0.9606896638870239, |
|
"learning_rate": 4.5139411846083715e-05, |
|
"loss": 1.5441, |
|
"step": 7378 |
|
}, |
|
{ |
|
"epoch": 0.31257646711386744, |
|
"grad_norm": 1.0111651420593262, |
|
"learning_rate": 4.508953940903073e-05, |
|
"loss": 1.5586, |
|
"step": 7409 |
|
}, |
|
{ |
|
"epoch": 0.3138843184407037, |
|
"grad_norm": 0.9388339519500732, |
|
"learning_rate": 4.5039440264255994e-05, |
|
"loss": 1.5472, |
|
"step": 7440 |
|
}, |
|
{ |
|
"epoch": 0.31519216976754, |
|
"grad_norm": 0.9507540464401245, |
|
"learning_rate": 4.498911497712155e-05, |
|
"loss": 1.5579, |
|
"step": 7471 |
|
}, |
|
{ |
|
"epoch": 0.31650002109437625, |
|
"grad_norm": 0.9572455286979675, |
|
"learning_rate": 4.493856411554142e-05, |
|
"loss": 1.5433, |
|
"step": 7502 |
|
}, |
|
{ |
|
"epoch": 0.3178078724212125, |
|
"grad_norm": 0.9485601782798767, |
|
"learning_rate": 4.4887788249975206e-05, |
|
"loss": 1.5513, |
|
"step": 7533 |
|
}, |
|
{ |
|
"epoch": 0.31911572374804875, |
|
"grad_norm": 0.9995166063308716, |
|
"learning_rate": 4.4836787953421656e-05, |
|
"loss": 1.5289, |
|
"step": 7564 |
|
}, |
|
{ |
|
"epoch": 0.32042357507488506, |
|
"grad_norm": 1.1004894971847534, |
|
"learning_rate": 4.478556380141218e-05, |
|
"loss": 1.5306, |
|
"step": 7595 |
|
}, |
|
{ |
|
"epoch": 0.3217314264017213, |
|
"grad_norm": 0.9642030000686646, |
|
"learning_rate": 4.4734116372004375e-05, |
|
"loss": 1.5497, |
|
"step": 7626 |
|
}, |
|
{ |
|
"epoch": 0.32303927772855756, |
|
"grad_norm": 0.9486343860626221, |
|
"learning_rate": 4.4682446245775477e-05, |
|
"loss": 1.5458, |
|
"step": 7657 |
|
}, |
|
{ |
|
"epoch": 0.3243471290553938, |
|
"grad_norm": 0.942319929599762, |
|
"learning_rate": 4.463055400581586e-05, |
|
"loss": 1.5306, |
|
"step": 7688 |
|
}, |
|
{ |
|
"epoch": 0.3256549803822301, |
|
"grad_norm": 0.9433887004852295, |
|
"learning_rate": 4.4578440237722374e-05, |
|
"loss": 1.5394, |
|
"step": 7719 |
|
}, |
|
{ |
|
"epoch": 0.32696283170906637, |
|
"grad_norm": 0.9954056739807129, |
|
"learning_rate": 4.452610552959183e-05, |
|
"loss": 1.551, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 0.3282706830359026, |
|
"grad_norm": 0.9406622052192688, |
|
"learning_rate": 4.447355047201428e-05, |
|
"loss": 1.5481, |
|
"step": 7781 |
|
}, |
|
{ |
|
"epoch": 0.3295785343627389, |
|
"grad_norm": 0.9743403792381287, |
|
"learning_rate": 4.4420775658066414e-05, |
|
"loss": 1.533, |
|
"step": 7812 |
|
}, |
|
{ |
|
"epoch": 0.3308863856895752, |
|
"grad_norm": 0.9561812877655029, |
|
"learning_rate": 4.436778168330484e-05, |
|
"loss": 1.5615, |
|
"step": 7843 |
|
}, |
|
{ |
|
"epoch": 0.33219423701641143, |
|
"grad_norm": 0.9623595476150513, |
|
"learning_rate": 4.4314569145759353e-05, |
|
"loss": 1.5463, |
|
"step": 7874 |
|
}, |
|
{ |
|
"epoch": 0.3335020883432477, |
|
"grad_norm": 0.9475456476211548, |
|
"learning_rate": 4.42611386459262e-05, |
|
"loss": 1.558, |
|
"step": 7905 |
|
}, |
|
{ |
|
"epoch": 0.33480993967008393, |
|
"grad_norm": 0.9283867478370667, |
|
"learning_rate": 4.420749078676133e-05, |
|
"loss": 1.5237, |
|
"step": 7936 |
|
}, |
|
{ |
|
"epoch": 0.33611779099692024, |
|
"grad_norm": 0.9494192004203796, |
|
"learning_rate": 4.4153626173673516e-05, |
|
"loss": 1.5315, |
|
"step": 7967 |
|
}, |
|
{ |
|
"epoch": 0.3374256423237565, |
|
"grad_norm": 0.9378790855407715, |
|
"learning_rate": 4.409954541451762e-05, |
|
"loss": 1.5378, |
|
"step": 7998 |
|
}, |
|
{ |
|
"epoch": 0.33873349365059274, |
|
"grad_norm": 0.9318397045135498, |
|
"learning_rate": 4.404524911958764e-05, |
|
"loss": 1.5199, |
|
"step": 8029 |
|
}, |
|
{ |
|
"epoch": 0.340041344977429, |
|
"grad_norm": 0.9440187215805054, |
|
"learning_rate": 4.399073790160989e-05, |
|
"loss": 1.5313, |
|
"step": 8060 |
|
}, |
|
{ |
|
"epoch": 0.3413491963042653, |
|
"grad_norm": 0.9643428921699524, |
|
"learning_rate": 4.393601237573607e-05, |
|
"loss": 1.5267, |
|
"step": 8091 |
|
}, |
|
{ |
|
"epoch": 0.34265704763110155, |
|
"grad_norm": 0.9626632928848267, |
|
"learning_rate": 4.388107315953628e-05, |
|
"loss": 1.5312, |
|
"step": 8122 |
|
}, |
|
{ |
|
"epoch": 0.3439648989579378, |
|
"grad_norm": 0.9867373704910278, |
|
"learning_rate": 4.382592087299212e-05, |
|
"loss": 1.5162, |
|
"step": 8153 |
|
}, |
|
{ |
|
"epoch": 0.34527275028477405, |
|
"grad_norm": 0.9584757685661316, |
|
"learning_rate": 4.377055613848964e-05, |
|
"loss": 1.5201, |
|
"step": 8184 |
|
}, |
|
{ |
|
"epoch": 0.34658060161161036, |
|
"grad_norm": 0.9587807655334473, |
|
"learning_rate": 4.3714979580812355e-05, |
|
"loss": 1.55, |
|
"step": 8215 |
|
}, |
|
{ |
|
"epoch": 0.3478884529384466, |
|
"grad_norm": 0.948785662651062, |
|
"learning_rate": 4.365919182713416e-05, |
|
"loss": 1.5392, |
|
"step": 8246 |
|
}, |
|
{ |
|
"epoch": 0.34919630426528286, |
|
"grad_norm": 0.9717795252799988, |
|
"learning_rate": 4.360319350701226e-05, |
|
"loss": 1.5112, |
|
"step": 8277 |
|
}, |
|
{ |
|
"epoch": 0.3505041555921191, |
|
"grad_norm": 0.9338726997375488, |
|
"learning_rate": 4.3546985252380115e-05, |
|
"loss": 1.5371, |
|
"step": 8308 |
|
}, |
|
{ |
|
"epoch": 0.3518120069189554, |
|
"grad_norm": 0.9878547191619873, |
|
"learning_rate": 4.349056769754021e-05, |
|
"loss": 1.5349, |
|
"step": 8339 |
|
}, |
|
{ |
|
"epoch": 0.3531198582457917, |
|
"grad_norm": 1.0211338996887207, |
|
"learning_rate": 4.3433941479156994e-05, |
|
"loss": 1.5234, |
|
"step": 8370 |
|
}, |
|
{ |
|
"epoch": 0.3544277095726279, |
|
"grad_norm": 0.944474458694458, |
|
"learning_rate": 4.3377107236249647e-05, |
|
"loss": 1.5306, |
|
"step": 8401 |
|
}, |
|
{ |
|
"epoch": 0.3557355608994642, |
|
"grad_norm": 0.9218102097511292, |
|
"learning_rate": 4.332006561018488e-05, |
|
"loss": 1.5113, |
|
"step": 8432 |
|
}, |
|
{ |
|
"epoch": 0.3570434122263005, |
|
"grad_norm": 0.9429653286933899, |
|
"learning_rate": 4.3262817244669683e-05, |
|
"loss": 1.5095, |
|
"step": 8463 |
|
}, |
|
{ |
|
"epoch": 0.35835126355313673, |
|
"grad_norm": 0.8979545831680298, |
|
"learning_rate": 4.3205362785744083e-05, |
|
"loss": 1.522, |
|
"step": 8494 |
|
}, |
|
{ |
|
"epoch": 0.359659114879973, |
|
"grad_norm": 0.9040619134902954, |
|
"learning_rate": 4.314770288177384e-05, |
|
"loss": 1.5197, |
|
"step": 8525 |
|
}, |
|
{ |
|
"epoch": 0.3609669662068093, |
|
"grad_norm": 0.9038030505180359, |
|
"learning_rate": 4.308983818344313e-05, |
|
"loss": 1.5218, |
|
"step": 8556 |
|
}, |
|
{ |
|
"epoch": 0.36227481753364554, |
|
"grad_norm": 0.9493038654327393, |
|
"learning_rate": 4.3031769343747206e-05, |
|
"loss": 1.5306, |
|
"step": 8587 |
|
}, |
|
{ |
|
"epoch": 0.3635826688604818, |
|
"grad_norm": 0.9084991812705994, |
|
"learning_rate": 4.297349701798505e-05, |
|
"loss": 1.5146, |
|
"step": 8618 |
|
}, |
|
{ |
|
"epoch": 0.36489052018731805, |
|
"grad_norm": 0.9597145915031433, |
|
"learning_rate": 4.2915021863751916e-05, |
|
"loss": 1.5202, |
|
"step": 8649 |
|
}, |
|
{ |
|
"epoch": 0.36619837151415435, |
|
"grad_norm": 0.9139339923858643, |
|
"learning_rate": 4.285634454093198e-05, |
|
"loss": 1.5082, |
|
"step": 8680 |
|
}, |
|
{ |
|
"epoch": 0.3675062228409906, |
|
"grad_norm": 0.955664336681366, |
|
"learning_rate": 4.279746571169086e-05, |
|
"loss": 1.5073, |
|
"step": 8711 |
|
}, |
|
{ |
|
"epoch": 0.36881407416782686, |
|
"grad_norm": 0.975068986415863, |
|
"learning_rate": 4.2738386040468136e-05, |
|
"loss": 1.5086, |
|
"step": 8742 |
|
}, |
|
{ |
|
"epoch": 0.3701219254946631, |
|
"grad_norm": 0.9280393719673157, |
|
"learning_rate": 4.2679106193969866e-05, |
|
"loss": 1.5342, |
|
"step": 8773 |
|
}, |
|
{ |
|
"epoch": 0.3714297768214994, |
|
"grad_norm": 0.9098469614982605, |
|
"learning_rate": 4.261962684116106e-05, |
|
"loss": 1.5305, |
|
"step": 8804 |
|
}, |
|
{ |
|
"epoch": 0.37273762814833566, |
|
"grad_norm": 0.9359835386276245, |
|
"learning_rate": 4.2559948653258145e-05, |
|
"loss": 1.5172, |
|
"step": 8835 |
|
}, |
|
{ |
|
"epoch": 0.3740454794751719, |
|
"grad_norm": 0.9431541562080383, |
|
"learning_rate": 4.250007230372134e-05, |
|
"loss": 1.5128, |
|
"step": 8866 |
|
}, |
|
{ |
|
"epoch": 0.37535333080200817, |
|
"grad_norm": 0.8999470472335815, |
|
"learning_rate": 4.2439998468247126e-05, |
|
"loss": 1.5025, |
|
"step": 8897 |
|
}, |
|
{ |
|
"epoch": 0.3766611821288445, |
|
"grad_norm": 0.9414424300193787, |
|
"learning_rate": 4.2379727824760566e-05, |
|
"loss": 1.5135, |
|
"step": 8928 |
|
}, |
|
{ |
|
"epoch": 0.3779690334556807, |
|
"grad_norm": 0.9347614049911499, |
|
"learning_rate": 4.231926105340768e-05, |
|
"loss": 1.5252, |
|
"step": 8959 |
|
}, |
|
{ |
|
"epoch": 0.379276884782517, |
|
"grad_norm": 0.9166226983070374, |
|
"learning_rate": 4.225859883654776e-05, |
|
"loss": 1.519, |
|
"step": 8990 |
|
}, |
|
{ |
|
"epoch": 0.3805847361093532, |
|
"grad_norm": 0.9614241719245911, |
|
"learning_rate": 4.219774185874569e-05, |
|
"loss": 1.494, |
|
"step": 9021 |
|
}, |
|
{ |
|
"epoch": 0.38189258743618953, |
|
"grad_norm": 0.9002947211265564, |
|
"learning_rate": 4.213669080676418e-05, |
|
"loss": 1.5093, |
|
"step": 9052 |
|
}, |
|
{ |
|
"epoch": 0.3832004387630258, |
|
"grad_norm": 0.8749356269836426, |
|
"learning_rate": 4.2075446369556056e-05, |
|
"loss": 1.512, |
|
"step": 9083 |
|
}, |
|
{ |
|
"epoch": 0.38450829008986204, |
|
"grad_norm": 0.954454243183136, |
|
"learning_rate": 4.201400923825648e-05, |
|
"loss": 1.4987, |
|
"step": 9114 |
|
}, |
|
{ |
|
"epoch": 0.3858161414166983, |
|
"grad_norm": 0.9184893369674683, |
|
"learning_rate": 4.195238010617511e-05, |
|
"loss": 1.5195, |
|
"step": 9145 |
|
}, |
|
{ |
|
"epoch": 0.3871239927435346, |
|
"grad_norm": 0.9145151376724243, |
|
"learning_rate": 4.1890559668788344e-05, |
|
"loss": 1.5149, |
|
"step": 9176 |
|
}, |
|
{ |
|
"epoch": 0.38843184407037085, |
|
"grad_norm": 0.9118244051933289, |
|
"learning_rate": 4.1828548623731405e-05, |
|
"loss": 1.5179, |
|
"step": 9207 |
|
}, |
|
{ |
|
"epoch": 0.3897396953972071, |
|
"grad_norm": 0.918662428855896, |
|
"learning_rate": 4.1766347670790506e-05, |
|
"loss": 1.5023, |
|
"step": 9238 |
|
}, |
|
{ |
|
"epoch": 0.39104754672404335, |
|
"grad_norm": 0.9427615404129028, |
|
"learning_rate": 4.170395751189495e-05, |
|
"loss": 1.5093, |
|
"step": 9269 |
|
}, |
|
{ |
|
"epoch": 0.39235539805087966, |
|
"grad_norm": 0.8825695514678955, |
|
"learning_rate": 4.164137885110921e-05, |
|
"loss": 1.5056, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 0.3936632493777159, |
|
"grad_norm": 0.9850104451179504, |
|
"learning_rate": 4.157861239462495e-05, |
|
"loss": 1.5308, |
|
"step": 9331 |
|
}, |
|
{ |
|
"epoch": 0.39497110070455216, |
|
"grad_norm": 0.9645696878433228, |
|
"learning_rate": 4.1515658850753114e-05, |
|
"loss": 1.4933, |
|
"step": 9362 |
|
}, |
|
{ |
|
"epoch": 0.3962789520313884, |
|
"grad_norm": 0.9052213430404663, |
|
"learning_rate": 4.145251892991588e-05, |
|
"loss": 1.5072, |
|
"step": 9393 |
|
}, |
|
{ |
|
"epoch": 0.3975868033582247, |
|
"grad_norm": 0.9212966561317444, |
|
"learning_rate": 4.138919334463868e-05, |
|
"loss": 1.5041, |
|
"step": 9424 |
|
}, |
|
{ |
|
"epoch": 0.39889465468506097, |
|
"grad_norm": 0.8963611721992493, |
|
"learning_rate": 4.1325682809542124e-05, |
|
"loss": 1.5121, |
|
"step": 9455 |
|
}, |
|
{ |
|
"epoch": 0.4002025060118972, |
|
"grad_norm": 0.9025225639343262, |
|
"learning_rate": 4.126198804133398e-05, |
|
"loss": 1.5018, |
|
"step": 9486 |
|
}, |
|
{ |
|
"epoch": 0.40151035733873347, |
|
"grad_norm": 0.9104238748550415, |
|
"learning_rate": 4.1198109758801055e-05, |
|
"loss": 1.5107, |
|
"step": 9517 |
|
}, |
|
{ |
|
"epoch": 0.4028182086655698, |
|
"grad_norm": 0.9045152068138123, |
|
"learning_rate": 4.113404868280107e-05, |
|
"loss": 1.4841, |
|
"step": 9548 |
|
}, |
|
{ |
|
"epoch": 0.40412605999240603, |
|
"grad_norm": 0.950362503528595, |
|
"learning_rate": 4.106980553625457e-05, |
|
"loss": 1.5274, |
|
"step": 9579 |
|
}, |
|
{ |
|
"epoch": 0.4054339113192423, |
|
"grad_norm": 0.8988200426101685, |
|
"learning_rate": 4.100538104413674e-05, |
|
"loss": 1.5144, |
|
"step": 9610 |
|
}, |
|
{ |
|
"epoch": 0.40674176264607853, |
|
"grad_norm": 0.9193787574768066, |
|
"learning_rate": 4.09407759334692e-05, |
|
"loss": 1.5083, |
|
"step": 9641 |
|
}, |
|
{ |
|
"epoch": 0.40804961397291484, |
|
"grad_norm": 0.885677695274353, |
|
"learning_rate": 4.087599093331186e-05, |
|
"loss": 1.4969, |
|
"step": 9672 |
|
}, |
|
{ |
|
"epoch": 0.4093574652997511, |
|
"grad_norm": 0.9060222506523132, |
|
"learning_rate": 4.081102677475462e-05, |
|
"loss": 1.4821, |
|
"step": 9703 |
|
}, |
|
{ |
|
"epoch": 0.41066531662658734, |
|
"grad_norm": 0.9060747623443604, |
|
"learning_rate": 4.0745884190909194e-05, |
|
"loss": 1.4851, |
|
"step": 9734 |
|
}, |
|
{ |
|
"epoch": 0.4119731679534236, |
|
"grad_norm": 0.8934387564659119, |
|
"learning_rate": 4.0680563916900796e-05, |
|
"loss": 1.4874, |
|
"step": 9765 |
|
}, |
|
{ |
|
"epoch": 0.4132810192802599, |
|
"grad_norm": 0.8983092904090881, |
|
"learning_rate": 4.0615066689859815e-05, |
|
"loss": 1.503, |
|
"step": 9796 |
|
}, |
|
{ |
|
"epoch": 0.41458887060709615, |
|
"grad_norm": 0.914784848690033, |
|
"learning_rate": 4.0549393248913584e-05, |
|
"loss": 1.4986, |
|
"step": 9827 |
|
}, |
|
{ |
|
"epoch": 0.4158967219339324, |
|
"grad_norm": 0.9157377481460571, |
|
"learning_rate": 4.048354433517794e-05, |
|
"loss": 1.5072, |
|
"step": 9858 |
|
}, |
|
{ |
|
"epoch": 0.41720457326076865, |
|
"grad_norm": 0.8943291306495667, |
|
"learning_rate": 4.0417520691748916e-05, |
|
"loss": 1.4853, |
|
"step": 9889 |
|
}, |
|
{ |
|
"epoch": 0.41851242458760496, |
|
"grad_norm": 0.9054378271102905, |
|
"learning_rate": 4.035132306369438e-05, |
|
"loss": 1.5004, |
|
"step": 9920 |
|
}, |
|
{ |
|
"epoch": 0.4198202759144412, |
|
"grad_norm": 0.9269044399261475, |
|
"learning_rate": 4.028495219804555e-05, |
|
"loss": 1.4741, |
|
"step": 9951 |
|
}, |
|
{ |
|
"epoch": 0.42112812724127746, |
|
"grad_norm": 0.8953903913497925, |
|
"learning_rate": 4.021840884378864e-05, |
|
"loss": 1.4843, |
|
"step": 9982 |
|
}, |
|
{ |
|
"epoch": 0.42243597856811377, |
|
"grad_norm": 0.8900305032730103, |
|
"learning_rate": 4.015169375185633e-05, |
|
"loss": 1.5267, |
|
"step": 10013 |
|
}, |
|
{ |
|
"epoch": 0.42374382989495, |
|
"grad_norm": 0.9487763047218323, |
|
"learning_rate": 4.0084807675119396e-05, |
|
"loss": 1.5108, |
|
"step": 10044 |
|
}, |
|
{ |
|
"epoch": 0.42505168122178627, |
|
"grad_norm": 0.9137413501739502, |
|
"learning_rate": 4.0017751368378106e-05, |
|
"loss": 1.4882, |
|
"step": 10075 |
|
}, |
|
{ |
|
"epoch": 0.4263595325486225, |
|
"grad_norm": 0.9609214067459106, |
|
"learning_rate": 3.995052558835377e-05, |
|
"loss": 1.4845, |
|
"step": 10106 |
|
}, |
|
{ |
|
"epoch": 0.42766738387545883, |
|
"grad_norm": 0.8875072002410889, |
|
"learning_rate": 3.988313109368017e-05, |
|
"loss": 1.5042, |
|
"step": 10137 |
|
}, |
|
{ |
|
"epoch": 0.4289752352022951, |
|
"grad_norm": 0.8940466642379761, |
|
"learning_rate": 3.981556864489504e-05, |
|
"loss": 1.4848, |
|
"step": 10168 |
|
}, |
|
{ |
|
"epoch": 0.43028308652913133, |
|
"grad_norm": 0.9151347279548645, |
|
"learning_rate": 3.974783900443142e-05, |
|
"loss": 1.5091, |
|
"step": 10199 |
|
}, |
|
{ |
|
"epoch": 0.4315909378559676, |
|
"grad_norm": 0.8956405520439148, |
|
"learning_rate": 3.9679942936609095e-05, |
|
"loss": 1.4925, |
|
"step": 10230 |
|
}, |
|
{ |
|
"epoch": 0.4328987891828039, |
|
"grad_norm": 0.8801243305206299, |
|
"learning_rate": 3.961188120762596e-05, |
|
"loss": 1.4992, |
|
"step": 10261 |
|
}, |
|
{ |
|
"epoch": 0.43420664050964014, |
|
"grad_norm": 0.9045059084892273, |
|
"learning_rate": 3.954365458554938e-05, |
|
"loss": 1.4873, |
|
"step": 10292 |
|
}, |
|
{ |
|
"epoch": 0.4355144918364764, |
|
"grad_norm": 0.8852213025093079, |
|
"learning_rate": 3.947526384030751e-05, |
|
"loss": 1.4907, |
|
"step": 10323 |
|
}, |
|
{ |
|
"epoch": 0.43682234316331264, |
|
"grad_norm": 0.9119607210159302, |
|
"learning_rate": 3.9406709743680624e-05, |
|
"loss": 1.4879, |
|
"step": 10354 |
|
}, |
|
{ |
|
"epoch": 0.43813019449014895, |
|
"grad_norm": 0.8808435201644897, |
|
"learning_rate": 3.9337993069292366e-05, |
|
"loss": 1.473, |
|
"step": 10385 |
|
}, |
|
{ |
|
"epoch": 0.4394380458169852, |
|
"grad_norm": 0.8821302056312561, |
|
"learning_rate": 3.926911459260109e-05, |
|
"loss": 1.4883, |
|
"step": 10416 |
|
}, |
|
{ |
|
"epoch": 0.44074589714382145, |
|
"grad_norm": 0.8727222084999084, |
|
"learning_rate": 3.920007509089102e-05, |
|
"loss": 1.4802, |
|
"step": 10447 |
|
}, |
|
{ |
|
"epoch": 0.4420537484706577, |
|
"grad_norm": 0.8907620310783386, |
|
"learning_rate": 3.913087534326357e-05, |
|
"loss": 1.4947, |
|
"step": 10478 |
|
}, |
|
{ |
|
"epoch": 0.443361599797494, |
|
"grad_norm": 0.9233911633491516, |
|
"learning_rate": 3.9061516130628475e-05, |
|
"loss": 1.4844, |
|
"step": 10509 |
|
}, |
|
{ |
|
"epoch": 0.44466945112433026, |
|
"grad_norm": 0.8893357515335083, |
|
"learning_rate": 3.8991998235695025e-05, |
|
"loss": 1.4696, |
|
"step": 10540 |
|
}, |
|
{ |
|
"epoch": 0.4459773024511665, |
|
"grad_norm": 0.8262650966644287, |
|
"learning_rate": 3.8922322442963224e-05, |
|
"loss": 1.4867, |
|
"step": 10571 |
|
}, |
|
{ |
|
"epoch": 0.44728515377800276, |
|
"grad_norm": 0.8931017518043518, |
|
"learning_rate": 3.885248953871491e-05, |
|
"loss": 1.4655, |
|
"step": 10602 |
|
}, |
|
{ |
|
"epoch": 0.44859300510483907, |
|
"grad_norm": 0.8849929571151733, |
|
"learning_rate": 3.8782500311004915e-05, |
|
"loss": 1.4905, |
|
"step": 10633 |
|
}, |
|
{ |
|
"epoch": 0.4499008564316753, |
|
"grad_norm": 0.8966437578201294, |
|
"learning_rate": 3.871235554965218e-05, |
|
"loss": 1.4861, |
|
"step": 10664 |
|
}, |
|
{ |
|
"epoch": 0.4512087077585116, |
|
"grad_norm": 0.931607723236084, |
|
"learning_rate": 3.864205604623078e-05, |
|
"loss": 1.4816, |
|
"step": 10695 |
|
}, |
|
{ |
|
"epoch": 0.4525165590853478, |
|
"grad_norm": 0.9525107741355896, |
|
"learning_rate": 3.857160259406107e-05, |
|
"loss": 1.4758, |
|
"step": 10726 |
|
}, |
|
{ |
|
"epoch": 0.45382441041218413, |
|
"grad_norm": 0.9055147767066956, |
|
"learning_rate": 3.8500995988200674e-05, |
|
"loss": 1.4869, |
|
"step": 10757 |
|
}, |
|
{ |
|
"epoch": 0.4551322617390204, |
|
"grad_norm": 0.8673259019851685, |
|
"learning_rate": 3.843023702543556e-05, |
|
"loss": 1.482, |
|
"step": 10788 |
|
}, |
|
{ |
|
"epoch": 0.45644011306585663, |
|
"grad_norm": 0.845866858959198, |
|
"learning_rate": 3.8359326504270984e-05, |
|
"loss": 1.4733, |
|
"step": 10819 |
|
}, |
|
{ |
|
"epoch": 0.4577479643926929, |
|
"grad_norm": 0.8884342908859253, |
|
"learning_rate": 3.828826522492255e-05, |
|
"loss": 1.4711, |
|
"step": 10850 |
|
}, |
|
{ |
|
"epoch": 0.4590558157195292, |
|
"grad_norm": 0.8943782448768616, |
|
"learning_rate": 3.821705398930713e-05, |
|
"loss": 1.4538, |
|
"step": 10881 |
|
}, |
|
{ |
|
"epoch": 0.46036366704636544, |
|
"grad_norm": 0.8772262930870056, |
|
"learning_rate": 3.814569360103385e-05, |
|
"loss": 1.4771, |
|
"step": 10912 |
|
}, |
|
{ |
|
"epoch": 0.4616715183732017, |
|
"grad_norm": 0.9197453856468201, |
|
"learning_rate": 3.807418486539499e-05, |
|
"loss": 1.4648, |
|
"step": 10943 |
|
}, |
|
{ |
|
"epoch": 0.46297936970003795, |
|
"grad_norm": 0.8835433125495911, |
|
"learning_rate": 3.80025285893569e-05, |
|
"loss": 1.4677, |
|
"step": 10974 |
|
}, |
|
{ |
|
"epoch": 0.46428722102687425, |
|
"grad_norm": 0.955093264579773, |
|
"learning_rate": 3.793072558155093e-05, |
|
"loss": 1.4821, |
|
"step": 11005 |
|
}, |
|
{ |
|
"epoch": 0.4655950723537105, |
|
"grad_norm": 0.9170013070106506, |
|
"learning_rate": 3.785877665226426e-05, |
|
"loss": 1.481, |
|
"step": 11036 |
|
}, |
|
{ |
|
"epoch": 0.46690292368054676, |
|
"grad_norm": 0.8826711177825928, |
|
"learning_rate": 3.778668261343079e-05, |
|
"loss": 1.4713, |
|
"step": 11067 |
|
}, |
|
{ |
|
"epoch": 0.468210775007383, |
|
"grad_norm": 0.9213606715202332, |
|
"learning_rate": 3.771444427862192e-05, |
|
"loss": 1.4898, |
|
"step": 11098 |
|
}, |
|
{ |
|
"epoch": 0.4695186263342193, |
|
"grad_norm": 1.0346989631652832, |
|
"learning_rate": 3.7642062463037465e-05, |
|
"loss": 1.4768, |
|
"step": 11129 |
|
}, |
|
{ |
|
"epoch": 0.47082647766105556, |
|
"grad_norm": 0.8965749740600586, |
|
"learning_rate": 3.7569537983496373e-05, |
|
"loss": 1.4714, |
|
"step": 11160 |
|
}, |
|
{ |
|
"epoch": 0.4721343289878918, |
|
"grad_norm": 0.88069748878479, |
|
"learning_rate": 3.749687165842753e-05, |
|
"loss": 1.4938, |
|
"step": 11191 |
|
}, |
|
{ |
|
"epoch": 0.47344218031472807, |
|
"grad_norm": 0.8329574465751648, |
|
"learning_rate": 3.7424064307860536e-05, |
|
"loss": 1.4581, |
|
"step": 11222 |
|
}, |
|
{ |
|
"epoch": 0.4747500316415644, |
|
"grad_norm": 0.8511537313461304, |
|
"learning_rate": 3.735111675341645e-05, |
|
"loss": 1.4688, |
|
"step": 11253 |
|
}, |
|
{ |
|
"epoch": 0.4760578829684006, |
|
"grad_norm": 0.8722901344299316, |
|
"learning_rate": 3.7278029818298524e-05, |
|
"loss": 1.5016, |
|
"step": 11284 |
|
}, |
|
{ |
|
"epoch": 0.4773657342952369, |
|
"grad_norm": 0.8820895552635193, |
|
"learning_rate": 3.720480432728287e-05, |
|
"loss": 1.4775, |
|
"step": 11315 |
|
}, |
|
{ |
|
"epoch": 0.47867358562207313, |
|
"grad_norm": 0.8825634717941284, |
|
"learning_rate": 3.71314411067092e-05, |
|
"loss": 1.4997, |
|
"step": 11346 |
|
}, |
|
{ |
|
"epoch": 0.47998143694890943, |
|
"grad_norm": 0.8426871299743652, |
|
"learning_rate": 3.70579409844715e-05, |
|
"loss": 1.4549, |
|
"step": 11377 |
|
}, |
|
{ |
|
"epoch": 0.4812892882757457, |
|
"grad_norm": 0.8519338965415955, |
|
"learning_rate": 3.698430479000865e-05, |
|
"loss": 1.4788, |
|
"step": 11408 |
|
}, |
|
{ |
|
"epoch": 0.48259713960258194, |
|
"grad_norm": 0.8908571600914001, |
|
"learning_rate": 3.691053335429509e-05, |
|
"loss": 1.4775, |
|
"step": 11439 |
|
}, |
|
{ |
|
"epoch": 0.48390499092941824, |
|
"grad_norm": 0.8275105357170105, |
|
"learning_rate": 3.683662750983147e-05, |
|
"loss": 1.4569, |
|
"step": 11470 |
|
}, |
|
{ |
|
"epoch": 0.4852128422562545, |
|
"grad_norm": 0.8812130689620972, |
|
"learning_rate": 3.676258809063518e-05, |
|
"loss": 1.4706, |
|
"step": 11501 |
|
}, |
|
{ |
|
"epoch": 0.48652069358309075, |
|
"grad_norm": 0.9266872406005859, |
|
"learning_rate": 3.6688415932231004e-05, |
|
"loss": 1.4699, |
|
"step": 11532 |
|
}, |
|
{ |
|
"epoch": 0.487828544909927, |
|
"grad_norm": 0.9029215574264526, |
|
"learning_rate": 3.661411187164166e-05, |
|
"loss": 1.4807, |
|
"step": 11563 |
|
}, |
|
{ |
|
"epoch": 0.4891363962367633, |
|
"grad_norm": 0.8788023591041565, |
|
"learning_rate": 3.65396767473784e-05, |
|
"loss": 1.4622, |
|
"step": 11594 |
|
}, |
|
{ |
|
"epoch": 0.49044424756359956, |
|
"grad_norm": 0.8894121050834656, |
|
"learning_rate": 3.6465111399431465e-05, |
|
"loss": 1.4748, |
|
"step": 11625 |
|
}, |
|
{ |
|
"epoch": 0.4917520988904358, |
|
"grad_norm": 0.9032202363014221, |
|
"learning_rate": 3.6390416669260674e-05, |
|
"loss": 1.4621, |
|
"step": 11656 |
|
}, |
|
{ |
|
"epoch": 0.49305995021727206, |
|
"grad_norm": 0.9159075021743774, |
|
"learning_rate": 3.63155933997859e-05, |
|
"loss": 1.4837, |
|
"step": 11687 |
|
}, |
|
{ |
|
"epoch": 0.49436780154410837, |
|
"grad_norm": 0.850325345993042, |
|
"learning_rate": 3.624064243537758e-05, |
|
"loss": 1.4703, |
|
"step": 11718 |
|
}, |
|
{ |
|
"epoch": 0.4956756528709446, |
|
"grad_norm": 0.9051500558853149, |
|
"learning_rate": 3.616556462184716e-05, |
|
"loss": 1.4905, |
|
"step": 11749 |
|
}, |
|
{ |
|
"epoch": 0.49698350419778087, |
|
"grad_norm": 0.9070841073989868, |
|
"learning_rate": 3.609036080643755e-05, |
|
"loss": 1.4626, |
|
"step": 11780 |
|
}, |
|
{ |
|
"epoch": 0.4982913555246171, |
|
"grad_norm": 0.8970009088516235, |
|
"learning_rate": 3.60150318378136e-05, |
|
"loss": 1.4704, |
|
"step": 11811 |
|
}, |
|
{ |
|
"epoch": 0.4995992068514534, |
|
"grad_norm": 0.8439539074897766, |
|
"learning_rate": 3.5939578566052465e-05, |
|
"loss": 1.4686, |
|
"step": 11842 |
|
}, |
|
{ |
|
"epoch": 0.5009070581782896, |
|
"grad_norm": 0.8568843603134155, |
|
"learning_rate": 3.586400184263408e-05, |
|
"loss": 1.451, |
|
"step": 11873 |
|
}, |
|
{ |
|
"epoch": 0.5022149095051259, |
|
"grad_norm": 0.8979809284210205, |
|
"learning_rate": 3.578830252043148e-05, |
|
"loss": 1.4808, |
|
"step": 11904 |
|
}, |
|
{ |
|
"epoch": 0.5035227608319622, |
|
"grad_norm": 0.8958112001419067, |
|
"learning_rate": 3.571248145370125e-05, |
|
"loss": 1.4836, |
|
"step": 11935 |
|
}, |
|
{ |
|
"epoch": 0.5048306121587984, |
|
"grad_norm": 0.8810004591941833, |
|
"learning_rate": 3.5636539498073794e-05, |
|
"loss": 1.4666, |
|
"step": 11966 |
|
}, |
|
{ |
|
"epoch": 0.5061384634856347, |
|
"grad_norm": 0.855351984500885, |
|
"learning_rate": 3.556047751054378e-05, |
|
"loss": 1.4653, |
|
"step": 11997 |
|
}, |
|
{ |
|
"epoch": 0.507446314812471, |
|
"grad_norm": 0.8665292263031006, |
|
"learning_rate": 3.548429634946039e-05, |
|
"loss": 1.4737, |
|
"step": 12028 |
|
}, |
|
{ |
|
"epoch": 0.5087541661393072, |
|
"grad_norm": 0.8537725806236267, |
|
"learning_rate": 3.540799687451768e-05, |
|
"loss": 1.4517, |
|
"step": 12059 |
|
}, |
|
{ |
|
"epoch": 0.5100620174661435, |
|
"grad_norm": 0.8791072964668274, |
|
"learning_rate": 3.533157994674485e-05, |
|
"loss": 1.4504, |
|
"step": 12090 |
|
}, |
|
{ |
|
"epoch": 0.5113698687929797, |
|
"grad_norm": 0.8664337396621704, |
|
"learning_rate": 3.5255046428496546e-05, |
|
"loss": 1.4733, |
|
"step": 12121 |
|
}, |
|
{ |
|
"epoch": 0.512677720119816, |
|
"grad_norm": 0.8952924609184265, |
|
"learning_rate": 3.517839718344311e-05, |
|
"loss": 1.472, |
|
"step": 12152 |
|
}, |
|
{ |
|
"epoch": 0.5139855714466524, |
|
"grad_norm": 0.8985433578491211, |
|
"learning_rate": 3.510163307656086e-05, |
|
"loss": 1.452, |
|
"step": 12183 |
|
}, |
|
{ |
|
"epoch": 0.5152934227734886, |
|
"grad_norm": 0.8967877626419067, |
|
"learning_rate": 3.5024754974122324e-05, |
|
"loss": 1.4516, |
|
"step": 12214 |
|
}, |
|
{ |
|
"epoch": 0.5166012741003249, |
|
"grad_norm": 0.8831754922866821, |
|
"learning_rate": 3.494776374368643e-05, |
|
"loss": 1.467, |
|
"step": 12245 |
|
}, |
|
{ |
|
"epoch": 0.5179091254271612, |
|
"grad_norm": 0.8881738781929016, |
|
"learning_rate": 3.4870660254088724e-05, |
|
"loss": 1.444, |
|
"step": 12276 |
|
}, |
|
{ |
|
"epoch": 0.5192169767539974, |
|
"grad_norm": 0.8571346998214722, |
|
"learning_rate": 3.479344537543164e-05, |
|
"loss": 1.4695, |
|
"step": 12307 |
|
}, |
|
{ |
|
"epoch": 0.5205248280808337, |
|
"grad_norm": 0.8962191343307495, |
|
"learning_rate": 3.4716119979074565e-05, |
|
"loss": 1.4539, |
|
"step": 12338 |
|
}, |
|
{ |
|
"epoch": 0.5218326794076699, |
|
"grad_norm": 0.8515344262123108, |
|
"learning_rate": 3.463868493762412e-05, |
|
"loss": 1.4691, |
|
"step": 12369 |
|
}, |
|
{ |
|
"epoch": 0.5231405307345062, |
|
"grad_norm": 0.8616427183151245, |
|
"learning_rate": 3.456114112492418e-05, |
|
"loss": 1.4655, |
|
"step": 12400 |
|
}, |
|
{ |
|
"epoch": 0.5244483820613425, |
|
"grad_norm": 0.8810769319534302, |
|
"learning_rate": 3.4483489416046164e-05, |
|
"loss": 1.4662, |
|
"step": 12431 |
|
}, |
|
{ |
|
"epoch": 0.5257562333881787, |
|
"grad_norm": 0.8603881001472473, |
|
"learning_rate": 3.440573068727905e-05, |
|
"loss": 1.4863, |
|
"step": 12462 |
|
}, |
|
{ |
|
"epoch": 0.527064084715015, |
|
"grad_norm": 0.8843648433685303, |
|
"learning_rate": 3.4327865816119495e-05, |
|
"loss": 1.4665, |
|
"step": 12493 |
|
}, |
|
{ |
|
"epoch": 0.5283719360418513, |
|
"grad_norm": 0.8736210465431213, |
|
"learning_rate": 3.4249895681262025e-05, |
|
"loss": 1.4626, |
|
"step": 12524 |
|
}, |
|
{ |
|
"epoch": 0.5296797873686875, |
|
"grad_norm": 0.8971989750862122, |
|
"learning_rate": 3.417182116258899e-05, |
|
"loss": 1.4413, |
|
"step": 12555 |
|
}, |
|
{ |
|
"epoch": 0.5309876386955238, |
|
"grad_norm": 0.855297327041626, |
|
"learning_rate": 3.409364314116074e-05, |
|
"loss": 1.4792, |
|
"step": 12586 |
|
}, |
|
{ |
|
"epoch": 0.53229549002236, |
|
"grad_norm": 0.8786769509315491, |
|
"learning_rate": 3.401536249920559e-05, |
|
"loss": 1.4526, |
|
"step": 12617 |
|
}, |
|
{ |
|
"epoch": 0.5336033413491963, |
|
"grad_norm": 0.8810843229293823, |
|
"learning_rate": 3.393698012010998e-05, |
|
"loss": 1.4836, |
|
"step": 12648 |
|
}, |
|
{ |
|
"epoch": 0.5349111926760326, |
|
"grad_norm": 0.8931439518928528, |
|
"learning_rate": 3.385849688840839e-05, |
|
"loss": 1.4375, |
|
"step": 12679 |
|
}, |
|
{ |
|
"epoch": 0.5362190440028688, |
|
"grad_norm": 0.8540153503417969, |
|
"learning_rate": 3.3779913689773414e-05, |
|
"loss": 1.4493, |
|
"step": 12710 |
|
}, |
|
{ |
|
"epoch": 0.5375268953297051, |
|
"grad_norm": 0.8924010992050171, |
|
"learning_rate": 3.370123141100578e-05, |
|
"loss": 1.4416, |
|
"step": 12741 |
|
}, |
|
{ |
|
"epoch": 0.5388347466565414, |
|
"grad_norm": 0.8814260959625244, |
|
"learning_rate": 3.3622450940024305e-05, |
|
"loss": 1.4619, |
|
"step": 12772 |
|
}, |
|
{ |
|
"epoch": 0.5401425979833776, |
|
"grad_norm": 0.8524183630943298, |
|
"learning_rate": 3.35435731658559e-05, |
|
"loss": 1.4469, |
|
"step": 12803 |
|
}, |
|
{ |
|
"epoch": 0.5414504493102139, |
|
"grad_norm": 0.860457718372345, |
|
"learning_rate": 3.346459897862552e-05, |
|
"loss": 1.4552, |
|
"step": 12834 |
|
}, |
|
{ |
|
"epoch": 0.5427583006370502, |
|
"grad_norm": 0.859676718711853, |
|
"learning_rate": 3.338552926954613e-05, |
|
"loss": 1.4585, |
|
"step": 12865 |
|
}, |
|
{ |
|
"epoch": 0.5440661519638864, |
|
"grad_norm": 0.8739502429962158, |
|
"learning_rate": 3.330636493090868e-05, |
|
"loss": 1.4506, |
|
"step": 12896 |
|
}, |
|
{ |
|
"epoch": 0.5453740032907227, |
|
"grad_norm": 0.8808159828186035, |
|
"learning_rate": 3.322710685607193e-05, |
|
"loss": 1.4612, |
|
"step": 12927 |
|
}, |
|
{ |
|
"epoch": 0.5466818546175589, |
|
"grad_norm": 0.8579108119010925, |
|
"learning_rate": 3.314775593945251e-05, |
|
"loss": 1.4462, |
|
"step": 12958 |
|
}, |
|
{ |
|
"epoch": 0.5479897059443952, |
|
"grad_norm": 0.9209607839584351, |
|
"learning_rate": 3.3068313076514714e-05, |
|
"loss": 1.4464, |
|
"step": 12989 |
|
}, |
|
{ |
|
"epoch": 0.5492975572712315, |
|
"grad_norm": 0.8351584672927856, |
|
"learning_rate": 3.298877916376047e-05, |
|
"loss": 1.4478, |
|
"step": 13020 |
|
}, |
|
{ |
|
"epoch": 0.5506054085980677, |
|
"grad_norm": 0.8345361351966858, |
|
"learning_rate": 3.290915509871915e-05, |
|
"loss": 1.428, |
|
"step": 13051 |
|
}, |
|
{ |
|
"epoch": 0.551913259924904, |
|
"grad_norm": 1.180969476699829, |
|
"learning_rate": 3.282944177993753e-05, |
|
"loss": 1.442, |
|
"step": 13082 |
|
}, |
|
{ |
|
"epoch": 0.5532211112517403, |
|
"grad_norm": 0.8715757131576538, |
|
"learning_rate": 3.274964010696957e-05, |
|
"loss": 1.4536, |
|
"step": 13113 |
|
}, |
|
{ |
|
"epoch": 0.5545289625785765, |
|
"grad_norm": 0.8906053304672241, |
|
"learning_rate": 3.266975098036629e-05, |
|
"loss": 1.4512, |
|
"step": 13144 |
|
}, |
|
{ |
|
"epoch": 0.5558368139054128, |
|
"grad_norm": 0.8323124647140503, |
|
"learning_rate": 3.258977530166562e-05, |
|
"loss": 1.4515, |
|
"step": 13175 |
|
}, |
|
{ |
|
"epoch": 0.557144665232249, |
|
"grad_norm": 0.835608959197998, |
|
"learning_rate": 3.250971397338227e-05, |
|
"loss": 1.4563, |
|
"step": 13206 |
|
}, |
|
{ |
|
"epoch": 0.5584525165590853, |
|
"grad_norm": 0.8299891948699951, |
|
"learning_rate": 3.2429567898997404e-05, |
|
"loss": 1.4551, |
|
"step": 13237 |
|
}, |
|
{ |
|
"epoch": 0.5597603678859217, |
|
"grad_norm": 0.8563460111618042, |
|
"learning_rate": 3.234933798294859e-05, |
|
"loss": 1.4282, |
|
"step": 13268 |
|
}, |
|
{ |
|
"epoch": 0.5610682192127578, |
|
"grad_norm": 0.8846110701560974, |
|
"learning_rate": 3.2269025130619535e-05, |
|
"loss": 1.441, |
|
"step": 13299 |
|
}, |
|
{ |
|
"epoch": 0.5623760705395942, |
|
"grad_norm": 0.8108500242233276, |
|
"learning_rate": 3.218863024832985e-05, |
|
"loss": 1.4702, |
|
"step": 13330 |
|
}, |
|
{ |
|
"epoch": 0.5636839218664305, |
|
"grad_norm": 0.8752537369728088, |
|
"learning_rate": 3.2108154243324864e-05, |
|
"loss": 1.4361, |
|
"step": 13361 |
|
}, |
|
{ |
|
"epoch": 0.5649917731932667, |
|
"grad_norm": 0.8647633790969849, |
|
"learning_rate": 3.2027598023765345e-05, |
|
"loss": 1.4365, |
|
"step": 13392 |
|
}, |
|
{ |
|
"epoch": 0.566299624520103, |
|
"grad_norm": 0.8434934616088867, |
|
"learning_rate": 3.194696249871729e-05, |
|
"loss": 1.4495, |
|
"step": 13423 |
|
}, |
|
{ |
|
"epoch": 0.5676074758469392, |
|
"grad_norm": 0.8327639102935791, |
|
"learning_rate": 3.186624857814164e-05, |
|
"loss": 1.463, |
|
"step": 13454 |
|
}, |
|
{ |
|
"epoch": 0.5689153271737755, |
|
"grad_norm": 0.8536667823791504, |
|
"learning_rate": 3.178545717288401e-05, |
|
"loss": 1.4334, |
|
"step": 13485 |
|
}, |
|
{ |
|
"epoch": 0.5702231785006118, |
|
"grad_norm": 0.8552531003952026, |
|
"learning_rate": 3.170458919466444e-05, |
|
"loss": 1.453, |
|
"step": 13516 |
|
}, |
|
{ |
|
"epoch": 0.571531029827448, |
|
"grad_norm": 0.8317746520042419, |
|
"learning_rate": 3.1623645556067063e-05, |
|
"loss": 1.4586, |
|
"step": 13547 |
|
}, |
|
{ |
|
"epoch": 0.5728388811542843, |
|
"grad_norm": 0.858818531036377, |
|
"learning_rate": 3.154262717052985e-05, |
|
"loss": 1.4386, |
|
"step": 13578 |
|
}, |
|
{ |
|
"epoch": 0.5741467324811206, |
|
"grad_norm": 0.8759421110153198, |
|
"learning_rate": 3.146153495233426e-05, |
|
"loss": 1.4411, |
|
"step": 13609 |
|
}, |
|
{ |
|
"epoch": 0.5754545838079568, |
|
"grad_norm": 0.9180925488471985, |
|
"learning_rate": 3.1380369816594944e-05, |
|
"loss": 1.4412, |
|
"step": 13640 |
|
}, |
|
{ |
|
"epoch": 0.5767624351347931, |
|
"grad_norm": 0.8536112904548645, |
|
"learning_rate": 3.129913267924946e-05, |
|
"loss": 1.426, |
|
"step": 13671 |
|
}, |
|
{ |
|
"epoch": 0.5780702864616293, |
|
"grad_norm": 0.8422097563743591, |
|
"learning_rate": 3.121782445704782e-05, |
|
"loss": 1.4446, |
|
"step": 13702 |
|
}, |
|
{ |
|
"epoch": 0.5793781377884656, |
|
"grad_norm": 0.8785713315010071, |
|
"learning_rate": 3.11364460675423e-05, |
|
"loss": 1.4583, |
|
"step": 13733 |
|
}, |
|
{ |
|
"epoch": 0.5806859891153019, |
|
"grad_norm": 0.8932250738143921, |
|
"learning_rate": 3.1054998429076934e-05, |
|
"loss": 1.4767, |
|
"step": 13764 |
|
}, |
|
{ |
|
"epoch": 0.5819938404421381, |
|
"grad_norm": 0.8232563138008118, |
|
"learning_rate": 3.097348246077728e-05, |
|
"loss": 1.4384, |
|
"step": 13795 |
|
}, |
|
{ |
|
"epoch": 0.5833016917689744, |
|
"grad_norm": 0.8628634810447693, |
|
"learning_rate": 3.0891899082539924e-05, |
|
"loss": 1.4198, |
|
"step": 13826 |
|
}, |
|
{ |
|
"epoch": 0.5846095430958107, |
|
"grad_norm": 0.8697309494018555, |
|
"learning_rate": 3.0810249215022233e-05, |
|
"loss": 1.4431, |
|
"step": 13857 |
|
}, |
|
{ |
|
"epoch": 0.5859173944226469, |
|
"grad_norm": 0.8559725880622864, |
|
"learning_rate": 3.0728533779631865e-05, |
|
"loss": 1.4556, |
|
"step": 13888 |
|
}, |
|
{ |
|
"epoch": 0.5872252457494832, |
|
"grad_norm": 0.8450871109962463, |
|
"learning_rate": 3.064675369851637e-05, |
|
"loss": 1.4357, |
|
"step": 13919 |
|
}, |
|
{ |
|
"epoch": 0.5885330970763194, |
|
"grad_norm": 0.8583346009254456, |
|
"learning_rate": 3.056490989455289e-05, |
|
"loss": 1.4503, |
|
"step": 13950 |
|
}, |
|
{ |
|
"epoch": 0.5898409484031557, |
|
"grad_norm": 0.8461182117462158, |
|
"learning_rate": 3.0483003291337596e-05, |
|
"loss": 1.4379, |
|
"step": 13981 |
|
}, |
|
{ |
|
"epoch": 0.591148799729992, |
|
"grad_norm": 0.8465268015861511, |
|
"learning_rate": 3.040103481317539e-05, |
|
"loss": 1.4427, |
|
"step": 14012 |
|
}, |
|
{ |
|
"epoch": 0.5924566510568282, |
|
"grad_norm": 0.8772551417350769, |
|
"learning_rate": 3.03190053850694e-05, |
|
"loss": 1.4382, |
|
"step": 14043 |
|
}, |
|
{ |
|
"epoch": 0.5937645023836645, |
|
"grad_norm": 0.8575912714004517, |
|
"learning_rate": 3.0236915932710573e-05, |
|
"loss": 1.4395, |
|
"step": 14074 |
|
}, |
|
{ |
|
"epoch": 0.5950723537105008, |
|
"grad_norm": 0.8548452258110046, |
|
"learning_rate": 3.0154767382467232e-05, |
|
"loss": 1.4569, |
|
"step": 14105 |
|
}, |
|
{ |
|
"epoch": 0.596380205037337, |
|
"grad_norm": 0.8532379865646362, |
|
"learning_rate": 3.0072560661374582e-05, |
|
"loss": 1.455, |
|
"step": 14136 |
|
}, |
|
{ |
|
"epoch": 0.5976880563641733, |
|
"grad_norm": 0.8995953798294067, |
|
"learning_rate": 2.999029669712431e-05, |
|
"loss": 1.4284, |
|
"step": 14167 |
|
}, |
|
{ |
|
"epoch": 0.5989959076910095, |
|
"grad_norm": 0.8514725565910339, |
|
"learning_rate": 2.990797641805408e-05, |
|
"loss": 1.4421, |
|
"step": 14198 |
|
}, |
|
{ |
|
"epoch": 0.6003037590178458, |
|
"grad_norm": 0.8758474588394165, |
|
"learning_rate": 2.982560075313704e-05, |
|
"loss": 1.4499, |
|
"step": 14229 |
|
}, |
|
{ |
|
"epoch": 0.6016116103446821, |
|
"grad_norm": 0.8488143086433411, |
|
"learning_rate": 2.9743170631971368e-05, |
|
"loss": 1.4372, |
|
"step": 14260 |
|
}, |
|
{ |
|
"epoch": 0.6029194616715183, |
|
"grad_norm": 0.8220718502998352, |
|
"learning_rate": 2.9660686984769792e-05, |
|
"loss": 1.4386, |
|
"step": 14291 |
|
}, |
|
{ |
|
"epoch": 0.6042273129983546, |
|
"grad_norm": 0.8164191842079163, |
|
"learning_rate": 2.9578150742349047e-05, |
|
"loss": 1.4483, |
|
"step": 14322 |
|
}, |
|
{ |
|
"epoch": 0.6055351643251909, |
|
"grad_norm": 0.8328901529312134, |
|
"learning_rate": 2.949556283611942e-05, |
|
"loss": 1.415, |
|
"step": 14353 |
|
}, |
|
{ |
|
"epoch": 0.6068430156520271, |
|
"grad_norm": 0.8837373852729797, |
|
"learning_rate": 2.9412924198074206e-05, |
|
"loss": 1.424, |
|
"step": 14384 |
|
}, |
|
{ |
|
"epoch": 0.6081508669788634, |
|
"grad_norm": 0.8237875699996948, |
|
"learning_rate": 2.9330235760779208e-05, |
|
"loss": 1.4431, |
|
"step": 14415 |
|
}, |
|
{ |
|
"epoch": 0.6094587183056998, |
|
"grad_norm": 0.8620266318321228, |
|
"learning_rate": 2.9247498457362188e-05, |
|
"loss": 1.4524, |
|
"step": 14446 |
|
}, |
|
{ |
|
"epoch": 0.610766569632536, |
|
"grad_norm": 0.8520477414131165, |
|
"learning_rate": 2.9164713221502373e-05, |
|
"loss": 1.4223, |
|
"step": 14477 |
|
}, |
|
{ |
|
"epoch": 0.6120744209593723, |
|
"grad_norm": 0.8385702967643738, |
|
"learning_rate": 2.9081880987419912e-05, |
|
"loss": 1.4357, |
|
"step": 14508 |
|
}, |
|
{ |
|
"epoch": 0.6133822722862085, |
|
"grad_norm": 0.8283481597900391, |
|
"learning_rate": 2.8999002689865296e-05, |
|
"loss": 1.4366, |
|
"step": 14539 |
|
}, |
|
{ |
|
"epoch": 0.6146901236130448, |
|
"grad_norm": 0.8642637133598328, |
|
"learning_rate": 2.8916079264108852e-05, |
|
"loss": 1.4541, |
|
"step": 14570 |
|
}, |
|
{ |
|
"epoch": 0.6159979749398811, |
|
"grad_norm": 0.8478630781173706, |
|
"learning_rate": 2.883311164593017e-05, |
|
"loss": 1.4401, |
|
"step": 14601 |
|
}, |
|
{ |
|
"epoch": 0.6173058262667173, |
|
"grad_norm": 0.8753395080566406, |
|
"learning_rate": 2.875010077160754e-05, |
|
"loss": 1.4702, |
|
"step": 14632 |
|
}, |
|
{ |
|
"epoch": 0.6186136775935536, |
|
"grad_norm": 0.827409029006958, |
|
"learning_rate": 2.866704757790741e-05, |
|
"loss": 1.4677, |
|
"step": 14663 |
|
}, |
|
{ |
|
"epoch": 0.6199215289203899, |
|
"grad_norm": 0.8742043375968933, |
|
"learning_rate": 2.858395300207376e-05, |
|
"loss": 1.4291, |
|
"step": 14694 |
|
}, |
|
{ |
|
"epoch": 0.6212293802472261, |
|
"grad_norm": 0.8350484371185303, |
|
"learning_rate": 2.8500817981817607e-05, |
|
"loss": 1.4475, |
|
"step": 14725 |
|
}, |
|
{ |
|
"epoch": 0.6225372315740624, |
|
"grad_norm": 0.8273993134498596, |
|
"learning_rate": 2.8417643455306336e-05, |
|
"loss": 1.4303, |
|
"step": 14756 |
|
}, |
|
{ |
|
"epoch": 0.6238450829008986, |
|
"grad_norm": 0.8380706310272217, |
|
"learning_rate": 2.8334430361153185e-05, |
|
"loss": 1.4479, |
|
"step": 14787 |
|
}, |
|
{ |
|
"epoch": 0.6251529342277349, |
|
"grad_norm": 0.8500368595123291, |
|
"learning_rate": 2.8251179638406612e-05, |
|
"loss": 1.4293, |
|
"step": 14818 |
|
}, |
|
{ |
|
"epoch": 0.6264607855545712, |
|
"grad_norm": 0.837519109249115, |
|
"learning_rate": 2.8167892226539704e-05, |
|
"loss": 1.4433, |
|
"step": 14849 |
|
}, |
|
{ |
|
"epoch": 0.6277686368814074, |
|
"grad_norm": 0.8816763758659363, |
|
"learning_rate": 2.8084569065439588e-05, |
|
"loss": 1.4225, |
|
"step": 14880 |
|
}, |
|
{ |
|
"epoch": 0.6290764882082437, |
|
"grad_norm": 0.8599192500114441, |
|
"learning_rate": 2.8001211095396807e-05, |
|
"loss": 1.4494, |
|
"step": 14911 |
|
}, |
|
{ |
|
"epoch": 0.63038433953508, |
|
"grad_norm": 0.8788051009178162, |
|
"learning_rate": 2.791781925709473e-05, |
|
"loss": 1.424, |
|
"step": 14942 |
|
}, |
|
{ |
|
"epoch": 0.6316921908619162, |
|
"grad_norm": 0.8713638186454773, |
|
"learning_rate": 2.7834394491598908e-05, |
|
"loss": 1.4386, |
|
"step": 14973 |
|
}, |
|
{ |
|
"epoch": 0.6330000421887525, |
|
"grad_norm": 0.883663535118103, |
|
"learning_rate": 2.7750937740346485e-05, |
|
"loss": 1.4509, |
|
"step": 15004 |
|
}, |
|
{ |
|
"epoch": 0.6343078935155887, |
|
"grad_norm": 0.8379449248313904, |
|
"learning_rate": 2.7667449945135564e-05, |
|
"loss": 1.4257, |
|
"step": 15035 |
|
}, |
|
{ |
|
"epoch": 0.635615744842425, |
|
"grad_norm": 0.8503741025924683, |
|
"learning_rate": 2.7583932048114557e-05, |
|
"loss": 1.429, |
|
"step": 15066 |
|
}, |
|
{ |
|
"epoch": 0.6369235961692613, |
|
"grad_norm": 0.8763847351074219, |
|
"learning_rate": 2.7500384991771587e-05, |
|
"loss": 1.4362, |
|
"step": 15097 |
|
}, |
|
{ |
|
"epoch": 0.6382314474960975, |
|
"grad_norm": 0.8516677021980286, |
|
"learning_rate": 2.7416809718923825e-05, |
|
"loss": 1.4248, |
|
"step": 15128 |
|
}, |
|
{ |
|
"epoch": 0.6395392988229338, |
|
"grad_norm": 0.8780989050865173, |
|
"learning_rate": 2.7333207172706864e-05, |
|
"loss": 1.4154, |
|
"step": 15159 |
|
}, |
|
{ |
|
"epoch": 0.6408471501497701, |
|
"grad_norm": 0.8541295528411865, |
|
"learning_rate": 2.7249578296564088e-05, |
|
"loss": 1.4364, |
|
"step": 15190 |
|
}, |
|
{ |
|
"epoch": 0.6421550014766063, |
|
"grad_norm": 0.8412326574325562, |
|
"learning_rate": 2.7165924034235973e-05, |
|
"loss": 1.4317, |
|
"step": 15221 |
|
}, |
|
{ |
|
"epoch": 0.6434628528034426, |
|
"grad_norm": 0.8619037866592407, |
|
"learning_rate": 2.708224532974953e-05, |
|
"loss": 1.4421, |
|
"step": 15252 |
|
} |
|
], |
|
"logging_steps": 31, |
|
"max_steps": 30517, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 3052, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.065842709765292e+19, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|