|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.4610678181612176, |
|
"eval_steps": 500, |
|
"global_step": 18312, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0007805320206966878, |
|
"grad_norm": 24.262310028076172, |
|
"learning_rate": 1.0157273918741808e-06, |
|
"loss": 9.1957, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.0015610640413933755, |
|
"grad_norm": 29.819591522216797, |
|
"learning_rate": 2.0314547837483616e-06, |
|
"loss": 7.9757, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.002341596062090063, |
|
"grad_norm": 14.961602210998535, |
|
"learning_rate": 3.0471821756225426e-06, |
|
"loss": 6.5436, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.003122128082786751, |
|
"grad_norm": 5.151533126831055, |
|
"learning_rate": 4.062909567496723e-06, |
|
"loss": 5.4812, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.003902660103483439, |
|
"grad_norm": 7.370818138122559, |
|
"learning_rate": 5.078636959370905e-06, |
|
"loss": 4.8332, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.004683192124180126, |
|
"grad_norm": 7.932437896728516, |
|
"learning_rate": 6.094364351245085e-06, |
|
"loss": 4.4411, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.005463724144876814, |
|
"grad_norm": 7.834314823150635, |
|
"learning_rate": 7.110091743119267e-06, |
|
"loss": 4.1978, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.006244256165573502, |
|
"grad_norm": 11.881230354309082, |
|
"learning_rate": 8.125819134993446e-06, |
|
"loss": 3.9897, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.00702478818627019, |
|
"grad_norm": 6.348697185516357, |
|
"learning_rate": 9.141546526867629e-06, |
|
"loss": 3.8375, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.007805320206966878, |
|
"grad_norm": 8.414978981018066, |
|
"learning_rate": 1.015727391874181e-05, |
|
"loss": 3.7681, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.008585852227663565, |
|
"grad_norm": 6.531826972961426, |
|
"learning_rate": 1.117300131061599e-05, |
|
"loss": 3.6599, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.009366384248360253, |
|
"grad_norm": 9.794255256652832, |
|
"learning_rate": 1.218872870249017e-05, |
|
"loss": 3.5324, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.01014691626905694, |
|
"grad_norm": 8.084766387939453, |
|
"learning_rate": 1.3204456094364351e-05, |
|
"loss": 3.4696, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.010927448289753628, |
|
"grad_norm": 7.659717559814453, |
|
"learning_rate": 1.4220183486238533e-05, |
|
"loss": 3.3832, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.011707980310450316, |
|
"grad_norm": 5.131711959838867, |
|
"learning_rate": 1.5235910878112714e-05, |
|
"loss": 3.3514, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.012488512331147004, |
|
"grad_norm": 6.3673601150512695, |
|
"learning_rate": 1.6251638269986893e-05, |
|
"loss": 3.2988, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.013269044351843692, |
|
"grad_norm": 4.5014142990112305, |
|
"learning_rate": 1.7267365661861077e-05, |
|
"loss": 3.239, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.01404957637254038, |
|
"grad_norm": 5.1420769691467285, |
|
"learning_rate": 1.8283093053735257e-05, |
|
"loss": 3.1892, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.014830108393237068, |
|
"grad_norm": 5.227200508117676, |
|
"learning_rate": 1.9298820445609438e-05, |
|
"loss": 3.1732, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.015610640413933756, |
|
"grad_norm": 4.4621734619140625, |
|
"learning_rate": 2.031454783748362e-05, |
|
"loss": 3.0898, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.016391172434630442, |
|
"grad_norm": 5.213263511657715, |
|
"learning_rate": 2.13302752293578e-05, |
|
"loss": 3.0832, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.01717170445532713, |
|
"grad_norm": 4.23059606552124, |
|
"learning_rate": 2.234600262123198e-05, |
|
"loss": 3.0341, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.017952236476023817, |
|
"grad_norm": 3.5276854038238525, |
|
"learning_rate": 2.336173001310616e-05, |
|
"loss": 3.0241, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.018732768496720505, |
|
"grad_norm": 3.199700355529785, |
|
"learning_rate": 2.437745740498034e-05, |
|
"loss": 2.9659, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.019513300517417193, |
|
"grad_norm": 9.667389869689941, |
|
"learning_rate": 2.5393184796854525e-05, |
|
"loss": 2.9292, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.02029383253811388, |
|
"grad_norm": 3.677541732788086, |
|
"learning_rate": 2.6408912188728702e-05, |
|
"loss": 2.919, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.02107436455881057, |
|
"grad_norm": 3.711683750152588, |
|
"learning_rate": 2.7424639580602886e-05, |
|
"loss": 2.9141, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.021854896579507257, |
|
"grad_norm": 3.1250174045562744, |
|
"learning_rate": 2.8440366972477066e-05, |
|
"loss": 2.8585, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.022635428600203945, |
|
"grad_norm": 3.4183220863342285, |
|
"learning_rate": 2.9456094364351244e-05, |
|
"loss": 2.8206, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.023415960620900633, |
|
"grad_norm": 2.8176679611206055, |
|
"learning_rate": 3.0471821756225428e-05, |
|
"loss": 2.787, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.02419649264159732, |
|
"grad_norm": 2.7993154525756836, |
|
"learning_rate": 3.148754914809961e-05, |
|
"loss": 2.7678, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.02497702466229401, |
|
"grad_norm": 2.8017027378082275, |
|
"learning_rate": 3.2503276539973785e-05, |
|
"loss": 2.7499, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.025757556682990696, |
|
"grad_norm": 2.958606004714966, |
|
"learning_rate": 3.351900393184797e-05, |
|
"loss": 2.7211, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 0.026538088703687384, |
|
"grad_norm": 2.5942046642303467, |
|
"learning_rate": 3.453473132372215e-05, |
|
"loss": 2.6823, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 0.027318620724384072, |
|
"grad_norm": 3.1978206634521484, |
|
"learning_rate": 3.555045871559633e-05, |
|
"loss": 2.6943, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.02809915274508076, |
|
"grad_norm": 2.6260809898376465, |
|
"learning_rate": 3.6566186107470514e-05, |
|
"loss": 2.6574, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 0.028879684765777448, |
|
"grad_norm": 2.6633715629577637, |
|
"learning_rate": 3.7581913499344695e-05, |
|
"loss": 2.6394, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 0.029660216786474135, |
|
"grad_norm": 2.5701236724853516, |
|
"learning_rate": 3.8597640891218876e-05, |
|
"loss": 2.6336, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 0.030440748807170823, |
|
"grad_norm": 2.4242734909057617, |
|
"learning_rate": 3.9613368283093056e-05, |
|
"loss": 2.6285, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 0.03122128082786751, |
|
"grad_norm": 2.6410114765167236, |
|
"learning_rate": 4.062909567496724e-05, |
|
"loss": 2.5821, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.032001812848564196, |
|
"grad_norm": 2.419649839401245, |
|
"learning_rate": 4.164482306684142e-05, |
|
"loss": 2.5576, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 0.032782344869260883, |
|
"grad_norm": 2.314750909805298, |
|
"learning_rate": 4.26605504587156e-05, |
|
"loss": 2.5438, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 0.03356287688995757, |
|
"grad_norm": 2.288361072540283, |
|
"learning_rate": 4.367627785058978e-05, |
|
"loss": 2.5197, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 0.03434340891065426, |
|
"grad_norm": 2.328840494155884, |
|
"learning_rate": 4.469200524246396e-05, |
|
"loss": 2.5193, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 0.03512394093135095, |
|
"grad_norm": 2.2555599212646484, |
|
"learning_rate": 4.570773263433814e-05, |
|
"loss": 2.4907, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.035904472952047635, |
|
"grad_norm": 2.1358766555786133, |
|
"learning_rate": 4.672346002621232e-05, |
|
"loss": 2.4737, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 0.03668500497274432, |
|
"grad_norm": 2.1471140384674072, |
|
"learning_rate": 4.77391874180865e-05, |
|
"loss": 2.4733, |
|
"step": 1457 |
|
}, |
|
{ |
|
"epoch": 0.03746553699344101, |
|
"grad_norm": 2.1371941566467285, |
|
"learning_rate": 4.875491480996068e-05, |
|
"loss": 2.432, |
|
"step": 1488 |
|
}, |
|
{ |
|
"epoch": 0.0382460690141377, |
|
"grad_norm": 1.9789690971374512, |
|
"learning_rate": 4.977064220183487e-05, |
|
"loss": 2.4407, |
|
"step": 1519 |
|
}, |
|
{ |
|
"epoch": 0.039026601034834386, |
|
"grad_norm": 2.0055742263793945, |
|
"learning_rate": 4.9999915451558777e-05, |
|
"loss": 2.4178, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.039807133055531074, |
|
"grad_norm": 2.095475196838379, |
|
"learning_rate": 4.999955597496219e-05, |
|
"loss": 2.4198, |
|
"step": 1581 |
|
}, |
|
{ |
|
"epoch": 0.04058766507622776, |
|
"grad_norm": 2.046337842941284, |
|
"learning_rate": 4.9998914381774255e-05, |
|
"loss": 2.3936, |
|
"step": 1612 |
|
}, |
|
{ |
|
"epoch": 0.04136819709692445, |
|
"grad_norm": 1.8528848886489868, |
|
"learning_rate": 4.999799067923527e-05, |
|
"loss": 2.3893, |
|
"step": 1643 |
|
}, |
|
{ |
|
"epoch": 0.04214872911762114, |
|
"grad_norm": 1.883175253868103, |
|
"learning_rate": 4.999678487776908e-05, |
|
"loss": 2.3668, |
|
"step": 1674 |
|
}, |
|
{ |
|
"epoch": 0.042929261138317826, |
|
"grad_norm": 1.8084770441055298, |
|
"learning_rate": 4.9995296990983006e-05, |
|
"loss": 2.3702, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.043709793159014514, |
|
"grad_norm": 1.8336963653564453, |
|
"learning_rate": 4.999352703566763e-05, |
|
"loss": 2.357, |
|
"step": 1736 |
|
}, |
|
{ |
|
"epoch": 0.0444903251797112, |
|
"grad_norm": 1.8369375467300415, |
|
"learning_rate": 4.999147503179668e-05, |
|
"loss": 2.3393, |
|
"step": 1767 |
|
}, |
|
{ |
|
"epoch": 0.04527085720040789, |
|
"grad_norm": 1.8411383628845215, |
|
"learning_rate": 4.998914100252672e-05, |
|
"loss": 2.319, |
|
"step": 1798 |
|
}, |
|
{ |
|
"epoch": 0.04605138922110458, |
|
"grad_norm": 1.785543441772461, |
|
"learning_rate": 4.998652497419696e-05, |
|
"loss": 2.3259, |
|
"step": 1829 |
|
}, |
|
{ |
|
"epoch": 0.046831921241801265, |
|
"grad_norm": 1.7194079160690308, |
|
"learning_rate": 4.9983626976328927e-05, |
|
"loss": 2.3046, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.04761245326249795, |
|
"grad_norm": 1.7909469604492188, |
|
"learning_rate": 4.998044704162613e-05, |
|
"loss": 2.307, |
|
"step": 1891 |
|
}, |
|
{ |
|
"epoch": 0.04839298528319464, |
|
"grad_norm": 1.725806474685669, |
|
"learning_rate": 4.9976985205973705e-05, |
|
"loss": 2.2992, |
|
"step": 1922 |
|
}, |
|
{ |
|
"epoch": 0.04917351730389133, |
|
"grad_norm": 1.67300546169281, |
|
"learning_rate": 4.997324150843799e-05, |
|
"loss": 2.2763, |
|
"step": 1953 |
|
}, |
|
{ |
|
"epoch": 0.04995404932458802, |
|
"grad_norm": 1.7416061162948608, |
|
"learning_rate": 4.99692159912661e-05, |
|
"loss": 2.2596, |
|
"step": 1984 |
|
}, |
|
{ |
|
"epoch": 0.050734581345284704, |
|
"grad_norm": 1.843024492263794, |
|
"learning_rate": 4.996490869988546e-05, |
|
"loss": 2.2644, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 0.05151511336598139, |
|
"grad_norm": 1.6172534227371216, |
|
"learning_rate": 4.996031968290326e-05, |
|
"loss": 2.271, |
|
"step": 2046 |
|
}, |
|
{ |
|
"epoch": 0.05229564538667808, |
|
"grad_norm": 1.6510816812515259, |
|
"learning_rate": 4.995544899210594e-05, |
|
"loss": 2.2445, |
|
"step": 2077 |
|
}, |
|
{ |
|
"epoch": 0.05307617740737477, |
|
"grad_norm": 1.625775933265686, |
|
"learning_rate": 4.9950296682458583e-05, |
|
"loss": 2.2432, |
|
"step": 2108 |
|
}, |
|
{ |
|
"epoch": 0.053856709428071456, |
|
"grad_norm": 1.602299690246582, |
|
"learning_rate": 4.994486281210429e-05, |
|
"loss": 2.2266, |
|
"step": 2139 |
|
}, |
|
{ |
|
"epoch": 0.054637241448768144, |
|
"grad_norm": 1.7036757469177246, |
|
"learning_rate": 4.9939147442363566e-05, |
|
"loss": 2.2292, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.05541777346946483, |
|
"grad_norm": 1.5851162672042847, |
|
"learning_rate": 4.9933150637733574e-05, |
|
"loss": 2.2286, |
|
"step": 2201 |
|
}, |
|
{ |
|
"epoch": 0.05619830549016152, |
|
"grad_norm": 1.6328140497207642, |
|
"learning_rate": 4.992687246588743e-05, |
|
"loss": 2.2342, |
|
"step": 2232 |
|
}, |
|
{ |
|
"epoch": 0.05697883751085821, |
|
"grad_norm": 1.4901413917541504, |
|
"learning_rate": 4.992031299767347e-05, |
|
"loss": 2.2094, |
|
"step": 2263 |
|
}, |
|
{ |
|
"epoch": 0.057759369531554895, |
|
"grad_norm": 1.5897445678710938, |
|
"learning_rate": 4.9913472307114386e-05, |
|
"loss": 2.2126, |
|
"step": 2294 |
|
}, |
|
{ |
|
"epoch": 0.05853990155225158, |
|
"grad_norm": 1.5534899234771729, |
|
"learning_rate": 4.9906350471406446e-05, |
|
"loss": 2.2125, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 0.05932043357294827, |
|
"grad_norm": 1.5713675022125244, |
|
"learning_rate": 4.989894757091861e-05, |
|
"loss": 2.2007, |
|
"step": 2356 |
|
}, |
|
{ |
|
"epoch": 0.06010096559364496, |
|
"grad_norm": 1.473955750465393, |
|
"learning_rate": 4.989126368919158e-05, |
|
"loss": 2.1819, |
|
"step": 2387 |
|
}, |
|
{ |
|
"epoch": 0.06088149761434165, |
|
"grad_norm": 1.5200265645980835, |
|
"learning_rate": 4.988329891293693e-05, |
|
"loss": 2.1944, |
|
"step": 2418 |
|
}, |
|
{ |
|
"epoch": 0.061662029635038335, |
|
"grad_norm": 1.5236589908599854, |
|
"learning_rate": 4.987505333203608e-05, |
|
"loss": 2.1816, |
|
"step": 2449 |
|
}, |
|
{ |
|
"epoch": 0.06244256165573502, |
|
"grad_norm": 1.5306485891342163, |
|
"learning_rate": 4.9866527039539276e-05, |
|
"loss": 2.187, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.06322309367643171, |
|
"grad_norm": 1.5017462968826294, |
|
"learning_rate": 4.9857720131664594e-05, |
|
"loss": 2.1706, |
|
"step": 2511 |
|
}, |
|
{ |
|
"epoch": 0.06400362569712839, |
|
"grad_norm": 4.610604763031006, |
|
"learning_rate": 4.9848632707796773e-05, |
|
"loss": 2.1528, |
|
"step": 2542 |
|
}, |
|
{ |
|
"epoch": 0.06478415771782509, |
|
"grad_norm": 1.4580930471420288, |
|
"learning_rate": 4.9839264870486155e-05, |
|
"loss": 2.1849, |
|
"step": 2573 |
|
}, |
|
{ |
|
"epoch": 0.06556468973852177, |
|
"grad_norm": 1.5506151914596558, |
|
"learning_rate": 4.9829616725447526e-05, |
|
"loss": 2.1504, |
|
"step": 2604 |
|
}, |
|
{ |
|
"epoch": 0.06634522175921846, |
|
"grad_norm": 1.4275034666061401, |
|
"learning_rate": 4.981968838155888e-05, |
|
"loss": 2.1492, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 0.06712575377991514, |
|
"grad_norm": 1.4749577045440674, |
|
"learning_rate": 4.980947995086024e-05, |
|
"loss": 2.1579, |
|
"step": 2666 |
|
}, |
|
{ |
|
"epoch": 0.06790628580061184, |
|
"grad_norm": 1.4220107793807983, |
|
"learning_rate": 4.979899154855234e-05, |
|
"loss": 2.1522, |
|
"step": 2697 |
|
}, |
|
{ |
|
"epoch": 0.06868681782130852, |
|
"grad_norm": 1.4243831634521484, |
|
"learning_rate": 4.9788223292995386e-05, |
|
"loss": 2.1258, |
|
"step": 2728 |
|
}, |
|
{ |
|
"epoch": 0.06946734984200521, |
|
"grad_norm": 1.4925100803375244, |
|
"learning_rate": 4.977717530570768e-05, |
|
"loss": 2.1379, |
|
"step": 2759 |
|
}, |
|
{ |
|
"epoch": 0.0702478818627019, |
|
"grad_norm": 1.4839624166488647, |
|
"learning_rate": 4.976584771136425e-05, |
|
"loss": 2.1588, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.07102841388339859, |
|
"grad_norm": 1.4130090475082397, |
|
"learning_rate": 4.975424063779547e-05, |
|
"loss": 2.1461, |
|
"step": 2821 |
|
}, |
|
{ |
|
"epoch": 0.07180894590409527, |
|
"grad_norm": 1.3940472602844238, |
|
"learning_rate": 4.974235421598557e-05, |
|
"loss": 2.1416, |
|
"step": 2852 |
|
}, |
|
{ |
|
"epoch": 0.07258947792479196, |
|
"grad_norm": 1.4502291679382324, |
|
"learning_rate": 4.973018858007122e-05, |
|
"loss": 2.1315, |
|
"step": 2883 |
|
}, |
|
{ |
|
"epoch": 0.07337000994548865, |
|
"grad_norm": 1.4334006309509277, |
|
"learning_rate": 4.9717743867339963e-05, |
|
"loss": 2.122, |
|
"step": 2914 |
|
}, |
|
{ |
|
"epoch": 0.07415054196618534, |
|
"grad_norm": 1.4366185665130615, |
|
"learning_rate": 4.9705020218228695e-05, |
|
"loss": 2.1179, |
|
"step": 2945 |
|
}, |
|
{ |
|
"epoch": 0.07493107398688202, |
|
"grad_norm": 1.437264084815979, |
|
"learning_rate": 4.969201777632205e-05, |
|
"loss": 2.1171, |
|
"step": 2976 |
|
}, |
|
{ |
|
"epoch": 0.07571160600757872, |
|
"grad_norm": 1.3713815212249756, |
|
"learning_rate": 4.9678736688350846e-05, |
|
"loss": 2.1215, |
|
"step": 3007 |
|
}, |
|
{ |
|
"epoch": 0.0764921380282754, |
|
"grad_norm": 1.5930180549621582, |
|
"learning_rate": 4.966517710419033e-05, |
|
"loss": 2.0961, |
|
"step": 3038 |
|
}, |
|
{ |
|
"epoch": 0.07727267004897209, |
|
"grad_norm": 1.4208778142929077, |
|
"learning_rate": 4.965133917685858e-05, |
|
"loss": 2.1193, |
|
"step": 3069 |
|
}, |
|
{ |
|
"epoch": 0.07805320206966877, |
|
"grad_norm": 1.3563286066055298, |
|
"learning_rate": 4.9637223062514714e-05, |
|
"loss": 2.0957, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.07883373409036547, |
|
"grad_norm": 1.382861852645874, |
|
"learning_rate": 4.962282892045718e-05, |
|
"loss": 2.117, |
|
"step": 3131 |
|
}, |
|
{ |
|
"epoch": 0.07961426611106215, |
|
"grad_norm": 1.326231837272644, |
|
"learning_rate": 4.9608156913121904e-05, |
|
"loss": 2.0906, |
|
"step": 3162 |
|
}, |
|
{ |
|
"epoch": 0.08039479813175884, |
|
"grad_norm": 1.3465954065322876, |
|
"learning_rate": 4.959320720608049e-05, |
|
"loss": 2.09, |
|
"step": 3193 |
|
}, |
|
{ |
|
"epoch": 0.08117533015245552, |
|
"grad_norm": 1.3832777738571167, |
|
"learning_rate": 4.9577979968038354e-05, |
|
"loss": 2.089, |
|
"step": 3224 |
|
}, |
|
{ |
|
"epoch": 0.08195586217315222, |
|
"grad_norm": 1.4453243017196655, |
|
"learning_rate": 4.956247537083282e-05, |
|
"loss": 2.0867, |
|
"step": 3255 |
|
}, |
|
{ |
|
"epoch": 0.0827363941938489, |
|
"grad_norm": 1.310168743133545, |
|
"learning_rate": 4.9546693589431145e-05, |
|
"loss": 2.0915, |
|
"step": 3286 |
|
}, |
|
{ |
|
"epoch": 0.0835169262145456, |
|
"grad_norm": 1.354078769683838, |
|
"learning_rate": 4.9530634801928595e-05, |
|
"loss": 2.0888, |
|
"step": 3317 |
|
}, |
|
{ |
|
"epoch": 0.08429745823524228, |
|
"grad_norm": 1.3768595457077026, |
|
"learning_rate": 4.9514299189546395e-05, |
|
"loss": 2.0821, |
|
"step": 3348 |
|
}, |
|
{ |
|
"epoch": 0.08507799025593897, |
|
"grad_norm": 1.3402544260025024, |
|
"learning_rate": 4.949768693662973e-05, |
|
"loss": 2.0776, |
|
"step": 3379 |
|
}, |
|
{ |
|
"epoch": 0.08585852227663565, |
|
"grad_norm": 1.3178027868270874, |
|
"learning_rate": 4.948079823064559e-05, |
|
"loss": 2.0699, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.08663905429733235, |
|
"grad_norm": 1.7057585716247559, |
|
"learning_rate": 4.946363326218074e-05, |
|
"loss": 2.0615, |
|
"step": 3441 |
|
}, |
|
{ |
|
"epoch": 0.08741958631802903, |
|
"grad_norm": 1.3453648090362549, |
|
"learning_rate": 4.9446192224939525e-05, |
|
"loss": 2.0788, |
|
"step": 3472 |
|
}, |
|
{ |
|
"epoch": 0.08820011833872572, |
|
"grad_norm": 1.3319638967514038, |
|
"learning_rate": 4.942847531574167e-05, |
|
"loss": 2.0619, |
|
"step": 3503 |
|
}, |
|
{ |
|
"epoch": 0.0889806503594224, |
|
"grad_norm": 1.2845163345336914, |
|
"learning_rate": 4.941048273452008e-05, |
|
"loss": 2.0728, |
|
"step": 3534 |
|
}, |
|
{ |
|
"epoch": 0.0897611823801191, |
|
"grad_norm": 1.3615431785583496, |
|
"learning_rate": 4.9392214684318605e-05, |
|
"loss": 2.0538, |
|
"step": 3565 |
|
}, |
|
{ |
|
"epoch": 0.09054171440081578, |
|
"grad_norm": 1.2946122884750366, |
|
"learning_rate": 4.93736713712897e-05, |
|
"loss": 2.0689, |
|
"step": 3596 |
|
}, |
|
{ |
|
"epoch": 0.09132224642151247, |
|
"grad_norm": 1.294846534729004, |
|
"learning_rate": 4.9354853004692124e-05, |
|
"loss": 2.0706, |
|
"step": 3627 |
|
}, |
|
{ |
|
"epoch": 0.09210277844220915, |
|
"grad_norm": 1.298010230064392, |
|
"learning_rate": 4.93357597968886e-05, |
|
"loss": 2.0406, |
|
"step": 3658 |
|
}, |
|
{ |
|
"epoch": 0.09288331046290585, |
|
"grad_norm": 1.7569706439971924, |
|
"learning_rate": 4.931639196334338e-05, |
|
"loss": 2.0736, |
|
"step": 3689 |
|
}, |
|
{ |
|
"epoch": 0.09366384248360253, |
|
"grad_norm": 1.2595418691635132, |
|
"learning_rate": 4.9296749722619826e-05, |
|
"loss": 2.0654, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.09444437450429922, |
|
"grad_norm": 1.3139164447784424, |
|
"learning_rate": 4.9276833296377966e-05, |
|
"loss": 2.0467, |
|
"step": 3751 |
|
}, |
|
{ |
|
"epoch": 0.0952249065249959, |
|
"grad_norm": 1.290247917175293, |
|
"learning_rate": 4.925664290937196e-05, |
|
"loss": 2.0418, |
|
"step": 3782 |
|
}, |
|
{ |
|
"epoch": 0.0960054385456926, |
|
"grad_norm": 1.2755826711654663, |
|
"learning_rate": 4.9236178789447576e-05, |
|
"loss": 2.0244, |
|
"step": 3813 |
|
}, |
|
{ |
|
"epoch": 0.09678597056638928, |
|
"grad_norm": 1.2596077919006348, |
|
"learning_rate": 4.921544116753962e-05, |
|
"loss": 2.0609, |
|
"step": 3844 |
|
}, |
|
{ |
|
"epoch": 0.09756650258708598, |
|
"grad_norm": 1.249387264251709, |
|
"learning_rate": 4.919443027766935e-05, |
|
"loss": 2.0455, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 0.09834703460778266, |
|
"grad_norm": 1.255379319190979, |
|
"learning_rate": 4.91731463569418e-05, |
|
"loss": 2.024, |
|
"step": 3906 |
|
}, |
|
{ |
|
"epoch": 0.09912756662847935, |
|
"grad_norm": 1.2620748281478882, |
|
"learning_rate": 4.915158964554312e-05, |
|
"loss": 2.0355, |
|
"step": 3937 |
|
}, |
|
{ |
|
"epoch": 0.09990809864917603, |
|
"grad_norm": 1.2736345529556274, |
|
"learning_rate": 4.912976038673786e-05, |
|
"loss": 2.0351, |
|
"step": 3968 |
|
}, |
|
{ |
|
"epoch": 0.10068863066987273, |
|
"grad_norm": 1.18524169921875, |
|
"learning_rate": 4.9107658826866254e-05, |
|
"loss": 2.029, |
|
"step": 3999 |
|
}, |
|
{ |
|
"epoch": 0.10146916269056941, |
|
"grad_norm": 10.429583549499512, |
|
"learning_rate": 4.908528521534139e-05, |
|
"loss": 2.0226, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 0.1022496947112661, |
|
"grad_norm": 1.240295648574829, |
|
"learning_rate": 4.906263980464644e-05, |
|
"loss": 2.0401, |
|
"step": 4061 |
|
}, |
|
{ |
|
"epoch": 0.10303022673196278, |
|
"grad_norm": 1.2191627025604248, |
|
"learning_rate": 4.903972285033178e-05, |
|
"loss": 2.0079, |
|
"step": 4092 |
|
}, |
|
{ |
|
"epoch": 0.10381075875265948, |
|
"grad_norm": 1.2340337038040161, |
|
"learning_rate": 4.901653461101213e-05, |
|
"loss": 2.0305, |
|
"step": 4123 |
|
}, |
|
{ |
|
"epoch": 0.10459129077335616, |
|
"grad_norm": 1.284839153289795, |
|
"learning_rate": 4.8993075348363626e-05, |
|
"loss": 2.0212, |
|
"step": 4154 |
|
}, |
|
{ |
|
"epoch": 0.10537182279405286, |
|
"grad_norm": 1.2955284118652344, |
|
"learning_rate": 4.896934532712084e-05, |
|
"loss": 2.0328, |
|
"step": 4185 |
|
}, |
|
{ |
|
"epoch": 0.10615235481474954, |
|
"grad_norm": 1.165124535560608, |
|
"learning_rate": 4.8945344815073846e-05, |
|
"loss": 2.0248, |
|
"step": 4216 |
|
}, |
|
{ |
|
"epoch": 0.10693288683544623, |
|
"grad_norm": 1.1925835609436035, |
|
"learning_rate": 4.892107408306516e-05, |
|
"loss": 2.0086, |
|
"step": 4247 |
|
}, |
|
{ |
|
"epoch": 0.10771341885614291, |
|
"grad_norm": 1.1841633319854736, |
|
"learning_rate": 4.889653340498669e-05, |
|
"loss": 2.0166, |
|
"step": 4278 |
|
}, |
|
{ |
|
"epoch": 0.1084939508768396, |
|
"grad_norm": 1.2198415994644165, |
|
"learning_rate": 4.8871723057776664e-05, |
|
"loss": 2.0164, |
|
"step": 4309 |
|
}, |
|
{ |
|
"epoch": 0.10927448289753629, |
|
"grad_norm": 1.2284913063049316, |
|
"learning_rate": 4.8846643321416476e-05, |
|
"loss": 2.015, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 0.11005501491823298, |
|
"grad_norm": 1.2090349197387695, |
|
"learning_rate": 4.882129447892753e-05, |
|
"loss": 2.0094, |
|
"step": 4371 |
|
}, |
|
{ |
|
"epoch": 0.11083554693892966, |
|
"grad_norm": 1.215232491493225, |
|
"learning_rate": 4.8795676816368076e-05, |
|
"loss": 2.0045, |
|
"step": 4402 |
|
}, |
|
{ |
|
"epoch": 0.11161607895962636, |
|
"grad_norm": 1.2804045677185059, |
|
"learning_rate": 4.876979062282995e-05, |
|
"loss": 1.9899, |
|
"step": 4433 |
|
}, |
|
{ |
|
"epoch": 0.11239661098032304, |
|
"grad_norm": 1.2405016422271729, |
|
"learning_rate": 4.8743636190435325e-05, |
|
"loss": 2.0093, |
|
"step": 4464 |
|
}, |
|
{ |
|
"epoch": 0.11317714300101973, |
|
"grad_norm": 1.1784151792526245, |
|
"learning_rate": 4.871721381433344e-05, |
|
"loss": 2.0036, |
|
"step": 4495 |
|
}, |
|
{ |
|
"epoch": 0.11395767502171641, |
|
"grad_norm": 1.185738205909729, |
|
"learning_rate": 4.869052379269719e-05, |
|
"loss": 2.0033, |
|
"step": 4526 |
|
}, |
|
{ |
|
"epoch": 0.11473820704241311, |
|
"grad_norm": 1.1984614133834839, |
|
"learning_rate": 4.866356642671985e-05, |
|
"loss": 2.025, |
|
"step": 4557 |
|
}, |
|
{ |
|
"epoch": 0.11551873906310979, |
|
"grad_norm": 1.1734029054641724, |
|
"learning_rate": 4.8636342020611634e-05, |
|
"loss": 2.0023, |
|
"step": 4588 |
|
}, |
|
{ |
|
"epoch": 0.11629927108380649, |
|
"grad_norm": 1.198611855506897, |
|
"learning_rate": 4.860885088159626e-05, |
|
"loss": 2.0031, |
|
"step": 4619 |
|
}, |
|
{ |
|
"epoch": 0.11707980310450317, |
|
"grad_norm": 1.1793649196624756, |
|
"learning_rate": 4.858109331990751e-05, |
|
"loss": 1.9806, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 0.11786033512519985, |
|
"grad_norm": 1.1945818662643433, |
|
"learning_rate": 4.855306964878567e-05, |
|
"loss": 1.9855, |
|
"step": 4681 |
|
}, |
|
{ |
|
"epoch": 0.11864086714589654, |
|
"grad_norm": 1.1875253915786743, |
|
"learning_rate": 4.8524780184474084e-05, |
|
"loss": 1.9883, |
|
"step": 4712 |
|
}, |
|
{ |
|
"epoch": 0.11942139916659322, |
|
"grad_norm": 1.1512328386306763, |
|
"learning_rate": 4.8496225246215496e-05, |
|
"loss": 1.9807, |
|
"step": 4743 |
|
}, |
|
{ |
|
"epoch": 0.12020193118728992, |
|
"grad_norm": 1.2059484720230103, |
|
"learning_rate": 4.8467405156248505e-05, |
|
"loss": 2.0103, |
|
"step": 4774 |
|
}, |
|
{ |
|
"epoch": 0.1209824632079866, |
|
"grad_norm": 1.1759779453277588, |
|
"learning_rate": 4.843832023980392e-05, |
|
"loss": 1.9754, |
|
"step": 4805 |
|
}, |
|
{ |
|
"epoch": 0.1217629952286833, |
|
"grad_norm": 1.2004414796829224, |
|
"learning_rate": 4.840897082510106e-05, |
|
"loss": 1.9893, |
|
"step": 4836 |
|
}, |
|
{ |
|
"epoch": 0.12254352724937997, |
|
"grad_norm": 1.1744251251220703, |
|
"learning_rate": 4.8379357243344084e-05, |
|
"loss": 1.9848, |
|
"step": 4867 |
|
}, |
|
{ |
|
"epoch": 0.12332405927007667, |
|
"grad_norm": 1.1731071472167969, |
|
"learning_rate": 4.8349479828718236e-05, |
|
"loss": 1.9945, |
|
"step": 4898 |
|
}, |
|
{ |
|
"epoch": 0.12410459129077335, |
|
"grad_norm": 1.2002074718475342, |
|
"learning_rate": 4.8319338918386075e-05, |
|
"loss": 1.9869, |
|
"step": 4929 |
|
}, |
|
{ |
|
"epoch": 0.12488512331147004, |
|
"grad_norm": 1.147085189819336, |
|
"learning_rate": 4.828893485248369e-05, |
|
"loss": 1.9876, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 0.12566565533216673, |
|
"grad_norm": 1.1605637073516846, |
|
"learning_rate": 4.825826797411682e-05, |
|
"loss": 1.9858, |
|
"step": 4991 |
|
}, |
|
{ |
|
"epoch": 0.12644618735286342, |
|
"grad_norm": 1.1823869943618774, |
|
"learning_rate": 4.822733862935702e-05, |
|
"loss": 1.9617, |
|
"step": 5022 |
|
}, |
|
{ |
|
"epoch": 0.12722671937356012, |
|
"grad_norm": 1.1143817901611328, |
|
"learning_rate": 4.819614716723775e-05, |
|
"loss": 1.9756, |
|
"step": 5053 |
|
}, |
|
{ |
|
"epoch": 0.12800725139425678, |
|
"grad_norm": 1.1646877527236938, |
|
"learning_rate": 4.8164693939750425e-05, |
|
"loss": 2.0005, |
|
"step": 5084 |
|
}, |
|
{ |
|
"epoch": 0.12878778341495348, |
|
"grad_norm": 1.1390529870986938, |
|
"learning_rate": 4.813297930184042e-05, |
|
"loss": 1.9943, |
|
"step": 5115 |
|
}, |
|
{ |
|
"epoch": 0.12956831543565017, |
|
"grad_norm": 1.1759583950042725, |
|
"learning_rate": 4.810100361140314e-05, |
|
"loss": 1.9772, |
|
"step": 5146 |
|
}, |
|
{ |
|
"epoch": 0.13034884745634687, |
|
"grad_norm": 1.1465727090835571, |
|
"learning_rate": 4.8068767229279885e-05, |
|
"loss": 1.9563, |
|
"step": 5177 |
|
}, |
|
{ |
|
"epoch": 0.13112937947704353, |
|
"grad_norm": 1.2573471069335938, |
|
"learning_rate": 4.8036270519253854e-05, |
|
"loss": 1.9735, |
|
"step": 5208 |
|
}, |
|
{ |
|
"epoch": 0.13190991149774023, |
|
"grad_norm": 1.1301871538162231, |
|
"learning_rate": 4.8003513848046e-05, |
|
"loss": 1.987, |
|
"step": 5239 |
|
}, |
|
{ |
|
"epoch": 0.13269044351843692, |
|
"grad_norm": 1.1424096822738647, |
|
"learning_rate": 4.79704975853109e-05, |
|
"loss": 1.9661, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 0.13347097553913362, |
|
"grad_norm": 1.131800889968872, |
|
"learning_rate": 4.793722210363262e-05, |
|
"loss": 1.9828, |
|
"step": 5301 |
|
}, |
|
{ |
|
"epoch": 0.13425150755983029, |
|
"grad_norm": 1.1227242946624756, |
|
"learning_rate": 4.7903687778520414e-05, |
|
"loss": 1.975, |
|
"step": 5332 |
|
}, |
|
{ |
|
"epoch": 0.13503203958052698, |
|
"grad_norm": 1.143832802772522, |
|
"learning_rate": 4.7869894988404593e-05, |
|
"loss": 1.9619, |
|
"step": 5363 |
|
}, |
|
{ |
|
"epoch": 0.13581257160122367, |
|
"grad_norm": 1.1281683444976807, |
|
"learning_rate": 4.783584411463221e-05, |
|
"loss": 1.9603, |
|
"step": 5394 |
|
}, |
|
{ |
|
"epoch": 0.13659310362192037, |
|
"grad_norm": 1.134901762008667, |
|
"learning_rate": 4.780153554146274e-05, |
|
"loss": 1.9888, |
|
"step": 5425 |
|
}, |
|
{ |
|
"epoch": 0.13737363564261704, |
|
"grad_norm": 1.1317532062530518, |
|
"learning_rate": 4.7766969656063766e-05, |
|
"loss": 1.9793, |
|
"step": 5456 |
|
}, |
|
{ |
|
"epoch": 0.13815416766331373, |
|
"grad_norm": 1.2516345977783203, |
|
"learning_rate": 4.773214684850662e-05, |
|
"loss": 1.9839, |
|
"step": 5487 |
|
}, |
|
{ |
|
"epoch": 0.13893469968401043, |
|
"grad_norm": 1.107391119003296, |
|
"learning_rate": 4.769706751176193e-05, |
|
"loss": 1.9606, |
|
"step": 5518 |
|
}, |
|
{ |
|
"epoch": 0.13971523170470712, |
|
"grad_norm": 1.1041492223739624, |
|
"learning_rate": 4.7661732041695264e-05, |
|
"loss": 1.9688, |
|
"step": 5549 |
|
}, |
|
{ |
|
"epoch": 0.1404957637254038, |
|
"grad_norm": 1.1099929809570312, |
|
"learning_rate": 4.762614083706258e-05, |
|
"loss": 1.9821, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 0.14127629574610048, |
|
"grad_norm": 1.167151927947998, |
|
"learning_rate": 4.759029429950581e-05, |
|
"loss": 1.9669, |
|
"step": 5611 |
|
}, |
|
{ |
|
"epoch": 0.14205682776679718, |
|
"grad_norm": 1.0942577123641968, |
|
"learning_rate": 4.7554192833548235e-05, |
|
"loss": 1.9565, |
|
"step": 5642 |
|
}, |
|
{ |
|
"epoch": 0.14283735978749387, |
|
"grad_norm": 1.0859640836715698, |
|
"learning_rate": 4.751783684659e-05, |
|
"loss": 1.9634, |
|
"step": 5673 |
|
}, |
|
{ |
|
"epoch": 0.14361789180819054, |
|
"grad_norm": 2.420607089996338, |
|
"learning_rate": 4.748122674890348e-05, |
|
"loss": 1.9365, |
|
"step": 5704 |
|
}, |
|
{ |
|
"epoch": 0.14439842382888723, |
|
"grad_norm": 1.0996880531311035, |
|
"learning_rate": 4.7444362953628654e-05, |
|
"loss": 1.9718, |
|
"step": 5735 |
|
}, |
|
{ |
|
"epoch": 0.14517895584958393, |
|
"grad_norm": 1.1183338165283203, |
|
"learning_rate": 4.7407245876768424e-05, |
|
"loss": 1.9564, |
|
"step": 5766 |
|
}, |
|
{ |
|
"epoch": 0.14595948787028062, |
|
"grad_norm": 1.1093947887420654, |
|
"learning_rate": 4.736987593718397e-05, |
|
"loss": 1.9516, |
|
"step": 5797 |
|
}, |
|
{ |
|
"epoch": 0.1467400198909773, |
|
"grad_norm": 1.141797423362732, |
|
"learning_rate": 4.733225355658999e-05, |
|
"loss": 1.929, |
|
"step": 5828 |
|
}, |
|
{ |
|
"epoch": 0.14752055191167399, |
|
"grad_norm": 1.1214358806610107, |
|
"learning_rate": 4.7294379159549926e-05, |
|
"loss": 1.9297, |
|
"step": 5859 |
|
}, |
|
{ |
|
"epoch": 0.14830108393237068, |
|
"grad_norm": 1.1025431156158447, |
|
"learning_rate": 4.725625317347119e-05, |
|
"loss": 1.9226, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 0.14908161595306738, |
|
"grad_norm": 1.1094226837158203, |
|
"learning_rate": 4.7217876028600374e-05, |
|
"loss": 1.946, |
|
"step": 5921 |
|
}, |
|
{ |
|
"epoch": 0.14986214797376404, |
|
"grad_norm": 1.0909287929534912, |
|
"learning_rate": 4.717924815801832e-05, |
|
"loss": 1.9511, |
|
"step": 5952 |
|
}, |
|
{ |
|
"epoch": 0.15064267999446074, |
|
"grad_norm": 1.0755189657211304, |
|
"learning_rate": 4.714036999763532e-05, |
|
"loss": 1.939, |
|
"step": 5983 |
|
}, |
|
{ |
|
"epoch": 0.15142321201515743, |
|
"grad_norm": 1.1426901817321777, |
|
"learning_rate": 4.7101241986186116e-05, |
|
"loss": 1.93, |
|
"step": 6014 |
|
}, |
|
{ |
|
"epoch": 0.15220374403585413, |
|
"grad_norm": 1.1208311319351196, |
|
"learning_rate": 4.7061864565225e-05, |
|
"loss": 1.9516, |
|
"step": 6045 |
|
}, |
|
{ |
|
"epoch": 0.1529842760565508, |
|
"grad_norm": 1.0931717157363892, |
|
"learning_rate": 4.702223817912081e-05, |
|
"loss": 1.9314, |
|
"step": 6076 |
|
}, |
|
{ |
|
"epoch": 0.1537648080772475, |
|
"grad_norm": 1.075864553451538, |
|
"learning_rate": 4.698236327505195e-05, |
|
"loss": 1.935, |
|
"step": 6107 |
|
}, |
|
{ |
|
"epoch": 0.15454534009794418, |
|
"grad_norm": 1.090559720993042, |
|
"learning_rate": 4.694224030300127e-05, |
|
"loss": 1.9431, |
|
"step": 6138 |
|
}, |
|
{ |
|
"epoch": 0.15532587211864088, |
|
"grad_norm": 1.0956670045852661, |
|
"learning_rate": 4.690186971575107e-05, |
|
"loss": 1.9678, |
|
"step": 6169 |
|
}, |
|
{ |
|
"epoch": 0.15610640413933755, |
|
"grad_norm": 1.125927448272705, |
|
"learning_rate": 4.6861251968877916e-05, |
|
"loss": 1.9292, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 0.15688693616003424, |
|
"grad_norm": 1.0844208002090454, |
|
"learning_rate": 4.68203875207476e-05, |
|
"loss": 1.9189, |
|
"step": 6231 |
|
}, |
|
{ |
|
"epoch": 0.15766746818073094, |
|
"grad_norm": 1.064327597618103, |
|
"learning_rate": 4.677927683250983e-05, |
|
"loss": 1.928, |
|
"step": 6262 |
|
}, |
|
{ |
|
"epoch": 0.15844800020142763, |
|
"grad_norm": 1.1281861066818237, |
|
"learning_rate": 4.6737920368093156e-05, |
|
"loss": 1.9437, |
|
"step": 6293 |
|
}, |
|
{ |
|
"epoch": 0.1592285322221243, |
|
"grad_norm": 1.0674384832382202, |
|
"learning_rate": 4.669631859419965e-05, |
|
"loss": 1.9347, |
|
"step": 6324 |
|
}, |
|
{ |
|
"epoch": 0.160009064242821, |
|
"grad_norm": 1.105566382408142, |
|
"learning_rate": 4.6654471980299676e-05, |
|
"loss": 1.9229, |
|
"step": 6355 |
|
}, |
|
{ |
|
"epoch": 0.1607895962635177, |
|
"grad_norm": 1.0582126379013062, |
|
"learning_rate": 4.661238099862658e-05, |
|
"loss": 1.9321, |
|
"step": 6386 |
|
}, |
|
{ |
|
"epoch": 0.16157012828421438, |
|
"grad_norm": 1.1147974729537964, |
|
"learning_rate": 4.657004612417138e-05, |
|
"loss": 1.9289, |
|
"step": 6417 |
|
}, |
|
{ |
|
"epoch": 0.16235066030491105, |
|
"grad_norm": 1.0804343223571777, |
|
"learning_rate": 4.6527467834677374e-05, |
|
"loss": 1.9307, |
|
"step": 6448 |
|
}, |
|
{ |
|
"epoch": 0.16313119232560774, |
|
"grad_norm": 1.0830508470535278, |
|
"learning_rate": 4.648464661063478e-05, |
|
"loss": 1.9408, |
|
"step": 6479 |
|
}, |
|
{ |
|
"epoch": 0.16391172434630444, |
|
"grad_norm": 1.0533562898635864, |
|
"learning_rate": 4.6441582935275264e-05, |
|
"loss": 1.9347, |
|
"step": 6510 |
|
}, |
|
{ |
|
"epoch": 0.16469225636700113, |
|
"grad_norm": 1.044925332069397, |
|
"learning_rate": 4.6398277294566586e-05, |
|
"loss": 1.9112, |
|
"step": 6541 |
|
}, |
|
{ |
|
"epoch": 0.1654727883876978, |
|
"grad_norm": 1.087296962738037, |
|
"learning_rate": 4.6354730177207e-05, |
|
"loss": 1.9381, |
|
"step": 6572 |
|
}, |
|
{ |
|
"epoch": 0.1662533204083945, |
|
"grad_norm": 1.0853668451309204, |
|
"learning_rate": 4.6310942074619787e-05, |
|
"loss": 1.9224, |
|
"step": 6603 |
|
}, |
|
{ |
|
"epoch": 0.1670338524290912, |
|
"grad_norm": 1.0661752223968506, |
|
"learning_rate": 4.626691348094777e-05, |
|
"loss": 1.9386, |
|
"step": 6634 |
|
}, |
|
{ |
|
"epoch": 0.16781438444978788, |
|
"grad_norm": 1.0393208265304565, |
|
"learning_rate": 4.622264489304762e-05, |
|
"loss": 1.9266, |
|
"step": 6665 |
|
}, |
|
{ |
|
"epoch": 0.16859491647048455, |
|
"grad_norm": 1.0324763059616089, |
|
"learning_rate": 4.617813681048434e-05, |
|
"loss": 1.919, |
|
"step": 6696 |
|
}, |
|
{ |
|
"epoch": 0.16937544849118125, |
|
"grad_norm": 1.0684878826141357, |
|
"learning_rate": 4.61333897355256e-05, |
|
"loss": 1.9321, |
|
"step": 6727 |
|
}, |
|
{ |
|
"epoch": 0.17015598051187794, |
|
"grad_norm": 1.0477505922317505, |
|
"learning_rate": 4.608840417313604e-05, |
|
"loss": 1.9449, |
|
"step": 6758 |
|
}, |
|
{ |
|
"epoch": 0.17093651253257464, |
|
"grad_norm": 1.0684869289398193, |
|
"learning_rate": 4.6043180630971646e-05, |
|
"loss": 1.9252, |
|
"step": 6789 |
|
}, |
|
{ |
|
"epoch": 0.1717170445532713, |
|
"grad_norm": 1.0869554281234741, |
|
"learning_rate": 4.599771961937391e-05, |
|
"loss": 1.9183, |
|
"step": 6820 |
|
}, |
|
{ |
|
"epoch": 0.172497576573968, |
|
"grad_norm": 1.0297125577926636, |
|
"learning_rate": 4.5952021651364204e-05, |
|
"loss": 1.9123, |
|
"step": 6851 |
|
}, |
|
{ |
|
"epoch": 0.1732781085946647, |
|
"grad_norm": 1.0670006275177002, |
|
"learning_rate": 4.590608724263786e-05, |
|
"loss": 1.9353, |
|
"step": 6882 |
|
}, |
|
{ |
|
"epoch": 0.1740586406153614, |
|
"grad_norm": 1.0744417905807495, |
|
"learning_rate": 4.585991691155845e-05, |
|
"loss": 1.8979, |
|
"step": 6913 |
|
}, |
|
{ |
|
"epoch": 0.17483917263605805, |
|
"grad_norm": 1.0910695791244507, |
|
"learning_rate": 4.581351117915188e-05, |
|
"loss": 1.9153, |
|
"step": 6944 |
|
}, |
|
{ |
|
"epoch": 0.17561970465675475, |
|
"grad_norm": 1.083776593208313, |
|
"learning_rate": 4.5766870569100534e-05, |
|
"loss": 1.9026, |
|
"step": 6975 |
|
}, |
|
{ |
|
"epoch": 0.17640023667745144, |
|
"grad_norm": 1.0857270956039429, |
|
"learning_rate": 4.571999560773736e-05, |
|
"loss": 1.9279, |
|
"step": 7006 |
|
}, |
|
{ |
|
"epoch": 0.1771807686981481, |
|
"grad_norm": 1.0350106954574585, |
|
"learning_rate": 4.5672886824039915e-05, |
|
"loss": 1.9236, |
|
"step": 7037 |
|
}, |
|
{ |
|
"epoch": 0.1779613007188448, |
|
"grad_norm": 1.0498634576797485, |
|
"learning_rate": 4.5625544749624435e-05, |
|
"loss": 1.8934, |
|
"step": 7068 |
|
}, |
|
{ |
|
"epoch": 0.1787418327395415, |
|
"grad_norm": 1.0836471319198608, |
|
"learning_rate": 4.5577969918739794e-05, |
|
"loss": 1.9131, |
|
"step": 7099 |
|
}, |
|
{ |
|
"epoch": 0.1795223647602382, |
|
"grad_norm": 1.0414760112762451, |
|
"learning_rate": 4.5530162868261486e-05, |
|
"loss": 1.8792, |
|
"step": 7130 |
|
}, |
|
{ |
|
"epoch": 0.18030289678093486, |
|
"grad_norm": 1.0747581720352173, |
|
"learning_rate": 4.548212413768558e-05, |
|
"loss": 1.8973, |
|
"step": 7161 |
|
}, |
|
{ |
|
"epoch": 0.18108342880163156, |
|
"grad_norm": 1.0464451313018799, |
|
"learning_rate": 4.543385426912261e-05, |
|
"loss": 1.907, |
|
"step": 7192 |
|
}, |
|
{ |
|
"epoch": 0.18186396082232825, |
|
"grad_norm": 1.0159403085708618, |
|
"learning_rate": 4.53853538072915e-05, |
|
"loss": 1.9111, |
|
"step": 7223 |
|
}, |
|
{ |
|
"epoch": 0.18264449284302495, |
|
"grad_norm": 1.093883991241455, |
|
"learning_rate": 4.533662329951336e-05, |
|
"loss": 1.8927, |
|
"step": 7254 |
|
}, |
|
{ |
|
"epoch": 0.18342502486372161, |
|
"grad_norm": 1.0358048677444458, |
|
"learning_rate": 4.528766329570536e-05, |
|
"loss": 1.9164, |
|
"step": 7285 |
|
}, |
|
{ |
|
"epoch": 0.1842055568844183, |
|
"grad_norm": 1.051798939704895, |
|
"learning_rate": 4.523847434837447e-05, |
|
"loss": 1.9084, |
|
"step": 7316 |
|
}, |
|
{ |
|
"epoch": 0.184986088905115, |
|
"grad_norm": 1.0729514360427856, |
|
"learning_rate": 4.518905701261128e-05, |
|
"loss": 1.9089, |
|
"step": 7347 |
|
}, |
|
{ |
|
"epoch": 0.1857666209258117, |
|
"grad_norm": 1.0399775505065918, |
|
"learning_rate": 4.5139411846083715e-05, |
|
"loss": 1.922, |
|
"step": 7378 |
|
}, |
|
{ |
|
"epoch": 0.18654715294650837, |
|
"grad_norm": 1.0500715970993042, |
|
"learning_rate": 4.508953940903073e-05, |
|
"loss": 1.9118, |
|
"step": 7409 |
|
}, |
|
{ |
|
"epoch": 0.18732768496720506, |
|
"grad_norm": 1.0770058631896973, |
|
"learning_rate": 4.5039440264255994e-05, |
|
"loss": 1.9059, |
|
"step": 7440 |
|
}, |
|
{ |
|
"epoch": 0.18810821698790176, |
|
"grad_norm": 1.0471183061599731, |
|
"learning_rate": 4.498911497712155e-05, |
|
"loss": 1.9119, |
|
"step": 7471 |
|
}, |
|
{ |
|
"epoch": 0.18888874900859845, |
|
"grad_norm": 1.038761019706726, |
|
"learning_rate": 4.493856411554142e-05, |
|
"loss": 1.8798, |
|
"step": 7502 |
|
}, |
|
{ |
|
"epoch": 0.18966928102929512, |
|
"grad_norm": 1.0365252494812012, |
|
"learning_rate": 4.4887788249975206e-05, |
|
"loss": 1.8983, |
|
"step": 7533 |
|
}, |
|
{ |
|
"epoch": 0.1904498130499918, |
|
"grad_norm": 1.1033800840377808, |
|
"learning_rate": 4.4836787953421656e-05, |
|
"loss": 1.8911, |
|
"step": 7564 |
|
}, |
|
{ |
|
"epoch": 0.1912303450706885, |
|
"grad_norm": 1.064213514328003, |
|
"learning_rate": 4.478556380141218e-05, |
|
"loss": 1.8995, |
|
"step": 7595 |
|
}, |
|
{ |
|
"epoch": 0.1920108770913852, |
|
"grad_norm": 0.997905969619751, |
|
"learning_rate": 4.4734116372004375e-05, |
|
"loss": 1.8705, |
|
"step": 7626 |
|
}, |
|
{ |
|
"epoch": 0.19279140911208187, |
|
"grad_norm": 1.0392402410507202, |
|
"learning_rate": 4.4682446245775477e-05, |
|
"loss": 1.9105, |
|
"step": 7657 |
|
}, |
|
{ |
|
"epoch": 0.19357194113277856, |
|
"grad_norm": 1.0340332984924316, |
|
"learning_rate": 4.463055400581586e-05, |
|
"loss": 1.9074, |
|
"step": 7688 |
|
}, |
|
{ |
|
"epoch": 0.19435247315347526, |
|
"grad_norm": 1.0196648836135864, |
|
"learning_rate": 4.4578440237722374e-05, |
|
"loss": 1.9059, |
|
"step": 7719 |
|
}, |
|
{ |
|
"epoch": 0.19513300517417195, |
|
"grad_norm": 0.99578458070755, |
|
"learning_rate": 4.452610552959183e-05, |
|
"loss": 1.8974, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 0.19591353719486862, |
|
"grad_norm": 1.026090145111084, |
|
"learning_rate": 4.447355047201428e-05, |
|
"loss": 1.8886, |
|
"step": 7781 |
|
}, |
|
{ |
|
"epoch": 0.19669406921556531, |
|
"grad_norm": 0.9945096969604492, |
|
"learning_rate": 4.4420775658066414e-05, |
|
"loss": 1.8879, |
|
"step": 7812 |
|
}, |
|
{ |
|
"epoch": 0.197474601236262, |
|
"grad_norm": 1.0226850509643555, |
|
"learning_rate": 4.436778168330484e-05, |
|
"loss": 1.8936, |
|
"step": 7843 |
|
}, |
|
{ |
|
"epoch": 0.1982551332569587, |
|
"grad_norm": 1.0433967113494873, |
|
"learning_rate": 4.4314569145759353e-05, |
|
"loss": 1.882, |
|
"step": 7874 |
|
}, |
|
{ |
|
"epoch": 0.19903566527765537, |
|
"grad_norm": 1.0612221956253052, |
|
"learning_rate": 4.42611386459262e-05, |
|
"loss": 1.8879, |
|
"step": 7905 |
|
}, |
|
{ |
|
"epoch": 0.19981619729835207, |
|
"grad_norm": 1.0252665281295776, |
|
"learning_rate": 4.420749078676133e-05, |
|
"loss": 1.9005, |
|
"step": 7936 |
|
}, |
|
{ |
|
"epoch": 0.20059672931904876, |
|
"grad_norm": 1.015100359916687, |
|
"learning_rate": 4.4153626173673516e-05, |
|
"loss": 1.8944, |
|
"step": 7967 |
|
}, |
|
{ |
|
"epoch": 0.20137726133974546, |
|
"grad_norm": 1.0417771339416504, |
|
"learning_rate": 4.409954541451762e-05, |
|
"loss": 1.8837, |
|
"step": 7998 |
|
}, |
|
{ |
|
"epoch": 0.20215779336044212, |
|
"grad_norm": 1.026708960533142, |
|
"learning_rate": 4.404524911958764e-05, |
|
"loss": 1.92, |
|
"step": 8029 |
|
}, |
|
{ |
|
"epoch": 0.20293832538113882, |
|
"grad_norm": 1.0414159297943115, |
|
"learning_rate": 4.399073790160989e-05, |
|
"loss": 1.8709, |
|
"step": 8060 |
|
}, |
|
{ |
|
"epoch": 0.2037188574018355, |
|
"grad_norm": 1.014039397239685, |
|
"learning_rate": 4.393601237573607e-05, |
|
"loss": 1.8842, |
|
"step": 8091 |
|
}, |
|
{ |
|
"epoch": 0.2044993894225322, |
|
"grad_norm": 1.0103037357330322, |
|
"learning_rate": 4.388107315953628e-05, |
|
"loss": 1.8889, |
|
"step": 8122 |
|
}, |
|
{ |
|
"epoch": 0.20527992144322887, |
|
"grad_norm": 1.0166395902633667, |
|
"learning_rate": 4.382592087299212e-05, |
|
"loss": 1.8808, |
|
"step": 8153 |
|
}, |
|
{ |
|
"epoch": 0.20606045346392557, |
|
"grad_norm": 1.023850679397583, |
|
"learning_rate": 4.377055613848964e-05, |
|
"loss": 1.8918, |
|
"step": 8184 |
|
}, |
|
{ |
|
"epoch": 0.20684098548462226, |
|
"grad_norm": 1.0251572132110596, |
|
"learning_rate": 4.3714979580812355e-05, |
|
"loss": 1.882, |
|
"step": 8215 |
|
}, |
|
{ |
|
"epoch": 0.20762151750531896, |
|
"grad_norm": 1.0183281898498535, |
|
"learning_rate": 4.365919182713416e-05, |
|
"loss": 1.8844, |
|
"step": 8246 |
|
}, |
|
{ |
|
"epoch": 0.20840204952601563, |
|
"grad_norm": 1.0135486125946045, |
|
"learning_rate": 4.360319350701226e-05, |
|
"loss": 1.8814, |
|
"step": 8277 |
|
}, |
|
{ |
|
"epoch": 0.20918258154671232, |
|
"grad_norm": 1.0247812271118164, |
|
"learning_rate": 4.3546985252380115e-05, |
|
"loss": 1.8818, |
|
"step": 8308 |
|
}, |
|
{ |
|
"epoch": 0.20996311356740902, |
|
"grad_norm": 1.0123242139816284, |
|
"learning_rate": 4.349056769754021e-05, |
|
"loss": 1.8723, |
|
"step": 8339 |
|
}, |
|
{ |
|
"epoch": 0.2107436455881057, |
|
"grad_norm": 1.0139250755310059, |
|
"learning_rate": 4.3433941479156994e-05, |
|
"loss": 1.8858, |
|
"step": 8370 |
|
}, |
|
{ |
|
"epoch": 0.21152417760880238, |
|
"grad_norm": 4.008477687835693, |
|
"learning_rate": 4.3377107236249647e-05, |
|
"loss": 1.8735, |
|
"step": 8401 |
|
}, |
|
{ |
|
"epoch": 0.21230470962949907, |
|
"grad_norm": 1.0454871654510498, |
|
"learning_rate": 4.332006561018488e-05, |
|
"loss": 1.8824, |
|
"step": 8432 |
|
}, |
|
{ |
|
"epoch": 0.21308524165019577, |
|
"grad_norm": 1.0586799383163452, |
|
"learning_rate": 4.3262817244669683e-05, |
|
"loss": 1.8992, |
|
"step": 8463 |
|
}, |
|
{ |
|
"epoch": 0.21386577367089246, |
|
"grad_norm": 1.0073469877243042, |
|
"learning_rate": 4.3205362785744083e-05, |
|
"loss": 1.8766, |
|
"step": 8494 |
|
}, |
|
{ |
|
"epoch": 0.21464630569158913, |
|
"grad_norm": 0.9976999759674072, |
|
"learning_rate": 4.314770288177384e-05, |
|
"loss": 1.8864, |
|
"step": 8525 |
|
}, |
|
{ |
|
"epoch": 0.21542683771228582, |
|
"grad_norm": 0.9928112030029297, |
|
"learning_rate": 4.308983818344313e-05, |
|
"loss": 1.8874, |
|
"step": 8556 |
|
}, |
|
{ |
|
"epoch": 0.21620736973298252, |
|
"grad_norm": 1.038405418395996, |
|
"learning_rate": 4.3031769343747206e-05, |
|
"loss": 1.8725, |
|
"step": 8587 |
|
}, |
|
{ |
|
"epoch": 0.2169879017536792, |
|
"grad_norm": 1.0328835248947144, |
|
"learning_rate": 4.297349701798505e-05, |
|
"loss": 1.8884, |
|
"step": 8618 |
|
}, |
|
{ |
|
"epoch": 0.21776843377437588, |
|
"grad_norm": 1.0254933834075928, |
|
"learning_rate": 4.2915021863751916e-05, |
|
"loss": 1.8611, |
|
"step": 8649 |
|
}, |
|
{ |
|
"epoch": 0.21854896579507258, |
|
"grad_norm": 0.99531090259552, |
|
"learning_rate": 4.285634454093198e-05, |
|
"loss": 1.8889, |
|
"step": 8680 |
|
}, |
|
{ |
|
"epoch": 0.21932949781576927, |
|
"grad_norm": 1.0536468029022217, |
|
"learning_rate": 4.279746571169086e-05, |
|
"loss": 1.8875, |
|
"step": 8711 |
|
}, |
|
{ |
|
"epoch": 0.22011002983646596, |
|
"grad_norm": 0.9915075898170471, |
|
"learning_rate": 4.2738386040468136e-05, |
|
"loss": 1.8901, |
|
"step": 8742 |
|
}, |
|
{ |
|
"epoch": 0.22089056185716263, |
|
"grad_norm": 0.9883093237876892, |
|
"learning_rate": 4.2679106193969866e-05, |
|
"loss": 1.8704, |
|
"step": 8773 |
|
}, |
|
{ |
|
"epoch": 0.22167109387785933, |
|
"grad_norm": 1.0425434112548828, |
|
"learning_rate": 4.261962684116106e-05, |
|
"loss": 1.8643, |
|
"step": 8804 |
|
}, |
|
{ |
|
"epoch": 0.22245162589855602, |
|
"grad_norm": 1.0154091119766235, |
|
"learning_rate": 4.2559948653258145e-05, |
|
"loss": 1.8806, |
|
"step": 8835 |
|
}, |
|
{ |
|
"epoch": 0.22323215791925272, |
|
"grad_norm": 1.0432935953140259, |
|
"learning_rate": 4.250007230372134e-05, |
|
"loss": 1.8928, |
|
"step": 8866 |
|
}, |
|
{ |
|
"epoch": 0.22401268993994938, |
|
"grad_norm": 0.9709262251853943, |
|
"learning_rate": 4.2439998468247126e-05, |
|
"loss": 1.8591, |
|
"step": 8897 |
|
}, |
|
{ |
|
"epoch": 0.22479322196064608, |
|
"grad_norm": 0.9708088636398315, |
|
"learning_rate": 4.2379727824760566e-05, |
|
"loss": 1.8797, |
|
"step": 8928 |
|
}, |
|
{ |
|
"epoch": 0.22557375398134277, |
|
"grad_norm": 0.9911683201789856, |
|
"learning_rate": 4.231926105340768e-05, |
|
"loss": 1.8456, |
|
"step": 8959 |
|
}, |
|
{ |
|
"epoch": 0.22635428600203947, |
|
"grad_norm": 0.99161297082901, |
|
"learning_rate": 4.225859883654776e-05, |
|
"loss": 1.8712, |
|
"step": 8990 |
|
}, |
|
{ |
|
"epoch": 0.22713481802273613, |
|
"grad_norm": 1.0025807619094849, |
|
"learning_rate": 4.219774185874569e-05, |
|
"loss": 1.8814, |
|
"step": 9021 |
|
}, |
|
{ |
|
"epoch": 0.22791535004343283, |
|
"grad_norm": 0.9597026109695435, |
|
"learning_rate": 4.213669080676418e-05, |
|
"loss": 1.8799, |
|
"step": 9052 |
|
}, |
|
{ |
|
"epoch": 0.22869588206412952, |
|
"grad_norm": 1.0001273155212402, |
|
"learning_rate": 4.2075446369556056e-05, |
|
"loss": 1.8469, |
|
"step": 9083 |
|
}, |
|
{ |
|
"epoch": 0.22947641408482622, |
|
"grad_norm": 0.9670659303665161, |
|
"learning_rate": 4.201400923825648e-05, |
|
"loss": 1.8612, |
|
"step": 9114 |
|
}, |
|
{ |
|
"epoch": 0.23025694610552289, |
|
"grad_norm": 1.018050193786621, |
|
"learning_rate": 4.195238010617511e-05, |
|
"loss": 1.8656, |
|
"step": 9145 |
|
}, |
|
{ |
|
"epoch": 0.23103747812621958, |
|
"grad_norm": 0.9838584661483765, |
|
"learning_rate": 4.1890559668788344e-05, |
|
"loss": 1.8527, |
|
"step": 9176 |
|
}, |
|
{ |
|
"epoch": 0.23181801014691628, |
|
"grad_norm": 0.9968937635421753, |
|
"learning_rate": 4.1828548623731405e-05, |
|
"loss": 1.8753, |
|
"step": 9207 |
|
}, |
|
{ |
|
"epoch": 0.23259854216761297, |
|
"grad_norm": 0.9698464870452881, |
|
"learning_rate": 4.1766347670790506e-05, |
|
"loss": 1.8716, |
|
"step": 9238 |
|
}, |
|
{ |
|
"epoch": 0.23337907418830964, |
|
"grad_norm": 0.9592704772949219, |
|
"learning_rate": 4.170395751189495e-05, |
|
"loss": 1.8698, |
|
"step": 9269 |
|
}, |
|
{ |
|
"epoch": 0.23415960620900633, |
|
"grad_norm": 0.9904705286026001, |
|
"learning_rate": 4.164137885110921e-05, |
|
"loss": 1.8661, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 0.23494013822970303, |
|
"grad_norm": 1.0243868827819824, |
|
"learning_rate": 4.157861239462495e-05, |
|
"loss": 1.8658, |
|
"step": 9331 |
|
}, |
|
{ |
|
"epoch": 0.2357206702503997, |
|
"grad_norm": 0.9936602711677551, |
|
"learning_rate": 4.1515658850753114e-05, |
|
"loss": 1.8475, |
|
"step": 9362 |
|
}, |
|
{ |
|
"epoch": 0.2365012022710964, |
|
"grad_norm": 0.9626069068908691, |
|
"learning_rate": 4.145251892991588e-05, |
|
"loss": 1.8507, |
|
"step": 9393 |
|
}, |
|
{ |
|
"epoch": 0.23728173429179308, |
|
"grad_norm": 0.9688258767127991, |
|
"learning_rate": 4.138919334463868e-05, |
|
"loss": 1.8628, |
|
"step": 9424 |
|
}, |
|
{ |
|
"epoch": 0.23806226631248978, |
|
"grad_norm": 0.9996054172515869, |
|
"learning_rate": 4.1325682809542124e-05, |
|
"loss": 1.8734, |
|
"step": 9455 |
|
}, |
|
{ |
|
"epoch": 0.23884279833318645, |
|
"grad_norm": 1.0037505626678467, |
|
"learning_rate": 4.126198804133398e-05, |
|
"loss": 1.8778, |
|
"step": 9486 |
|
}, |
|
{ |
|
"epoch": 0.23962333035388314, |
|
"grad_norm": 0.9751474857330322, |
|
"learning_rate": 4.1198109758801055e-05, |
|
"loss": 1.8615, |
|
"step": 9517 |
|
}, |
|
{ |
|
"epoch": 0.24040386237457984, |
|
"grad_norm": 0.9760595560073853, |
|
"learning_rate": 4.113404868280107e-05, |
|
"loss": 1.8614, |
|
"step": 9548 |
|
}, |
|
{ |
|
"epoch": 0.24118439439527653, |
|
"grad_norm": 1.0009572505950928, |
|
"learning_rate": 4.106980553625457e-05, |
|
"loss": 1.8762, |
|
"step": 9579 |
|
}, |
|
{ |
|
"epoch": 0.2419649264159732, |
|
"grad_norm": 0.983182966709137, |
|
"learning_rate": 4.100538104413674e-05, |
|
"loss": 1.857, |
|
"step": 9610 |
|
}, |
|
{ |
|
"epoch": 0.2427454584366699, |
|
"grad_norm": 1.0748534202575684, |
|
"learning_rate": 4.09407759334692e-05, |
|
"loss": 1.8658, |
|
"step": 9641 |
|
}, |
|
{ |
|
"epoch": 0.2435259904573666, |
|
"grad_norm": 0.9747381806373596, |
|
"learning_rate": 4.087599093331186e-05, |
|
"loss": 1.8484, |
|
"step": 9672 |
|
}, |
|
{ |
|
"epoch": 0.24430652247806328, |
|
"grad_norm": 0.9784808158874512, |
|
"learning_rate": 4.081102677475462e-05, |
|
"loss": 1.8492, |
|
"step": 9703 |
|
}, |
|
{ |
|
"epoch": 0.24508705449875995, |
|
"grad_norm": 0.9820215106010437, |
|
"learning_rate": 4.0745884190909194e-05, |
|
"loss": 1.8516, |
|
"step": 9734 |
|
}, |
|
{ |
|
"epoch": 0.24586758651945664, |
|
"grad_norm": 0.9536774158477783, |
|
"learning_rate": 4.0680563916900796e-05, |
|
"loss": 1.8444, |
|
"step": 9765 |
|
}, |
|
{ |
|
"epoch": 0.24664811854015334, |
|
"grad_norm": 0.9706370234489441, |
|
"learning_rate": 4.0615066689859815e-05, |
|
"loss": 1.8477, |
|
"step": 9796 |
|
}, |
|
{ |
|
"epoch": 0.24742865056085003, |
|
"grad_norm": 0.9700275659561157, |
|
"learning_rate": 4.0549393248913584e-05, |
|
"loss": 1.8515, |
|
"step": 9827 |
|
}, |
|
{ |
|
"epoch": 0.2482091825815467, |
|
"grad_norm": 0.9522431492805481, |
|
"learning_rate": 4.048354433517794e-05, |
|
"loss": 1.8287, |
|
"step": 9858 |
|
}, |
|
{ |
|
"epoch": 0.2489897146022434, |
|
"grad_norm": 0.9691547155380249, |
|
"learning_rate": 4.0417520691748916e-05, |
|
"loss": 1.8594, |
|
"step": 9889 |
|
}, |
|
{ |
|
"epoch": 0.2497702466229401, |
|
"grad_norm": 0.9588534235954285, |
|
"learning_rate": 4.035132306369438e-05, |
|
"loss": 1.8459, |
|
"step": 9920 |
|
}, |
|
{ |
|
"epoch": 0.2505507786436368, |
|
"grad_norm": 0.9900926351547241, |
|
"learning_rate": 4.028495219804555e-05, |
|
"loss": 1.8608, |
|
"step": 9951 |
|
}, |
|
{ |
|
"epoch": 0.25133131066433345, |
|
"grad_norm": 0.9539284110069275, |
|
"learning_rate": 4.021840884378864e-05, |
|
"loss": 1.8522, |
|
"step": 9982 |
|
}, |
|
{ |
|
"epoch": 0.2521118426850302, |
|
"grad_norm": 0.9557245969772339, |
|
"learning_rate": 4.015169375185633e-05, |
|
"loss": 1.8743, |
|
"step": 10013 |
|
}, |
|
{ |
|
"epoch": 0.25289237470572684, |
|
"grad_norm": 0.9317564964294434, |
|
"learning_rate": 4.0084807675119396e-05, |
|
"loss": 1.8384, |
|
"step": 10044 |
|
}, |
|
{ |
|
"epoch": 0.2536729067264235, |
|
"grad_norm": 0.9944160580635071, |
|
"learning_rate": 4.0017751368378106e-05, |
|
"loss": 1.857, |
|
"step": 10075 |
|
}, |
|
{ |
|
"epoch": 0.25445343874712023, |
|
"grad_norm": 0.9859464168548584, |
|
"learning_rate": 3.995052558835377e-05, |
|
"loss": 1.8484, |
|
"step": 10106 |
|
}, |
|
{ |
|
"epoch": 0.2552339707678169, |
|
"grad_norm": 1.0093594789505005, |
|
"learning_rate": 3.988313109368017e-05, |
|
"loss": 1.8506, |
|
"step": 10137 |
|
}, |
|
{ |
|
"epoch": 0.25601450278851356, |
|
"grad_norm": 0.9451240301132202, |
|
"learning_rate": 3.981556864489504e-05, |
|
"loss": 1.8732, |
|
"step": 10168 |
|
}, |
|
{ |
|
"epoch": 0.2567950348092103, |
|
"grad_norm": 0.972819447517395, |
|
"learning_rate": 3.974783900443142e-05, |
|
"loss": 1.856, |
|
"step": 10199 |
|
}, |
|
{ |
|
"epoch": 0.25757556682990695, |
|
"grad_norm": 0.996379554271698, |
|
"learning_rate": 3.9679942936609095e-05, |
|
"loss": 1.8299, |
|
"step": 10230 |
|
}, |
|
{ |
|
"epoch": 0.2583560988506037, |
|
"grad_norm": 2.503967046737671, |
|
"learning_rate": 3.961188120762596e-05, |
|
"loss": 1.8484, |
|
"step": 10261 |
|
}, |
|
{ |
|
"epoch": 0.25913663087130034, |
|
"grad_norm": 0.9491912126541138, |
|
"learning_rate": 3.954365458554938e-05, |
|
"loss": 1.8528, |
|
"step": 10292 |
|
}, |
|
{ |
|
"epoch": 0.259917162891997, |
|
"grad_norm": 0.9642863273620605, |
|
"learning_rate": 3.947526384030751e-05, |
|
"loss": 1.863, |
|
"step": 10323 |
|
}, |
|
{ |
|
"epoch": 0.26069769491269373, |
|
"grad_norm": 0.9915285706520081, |
|
"learning_rate": 3.9406709743680624e-05, |
|
"loss": 1.8191, |
|
"step": 10354 |
|
}, |
|
{ |
|
"epoch": 0.2614782269333904, |
|
"grad_norm": 1.0176483392715454, |
|
"learning_rate": 3.9337993069292366e-05, |
|
"loss": 1.8396, |
|
"step": 10385 |
|
}, |
|
{ |
|
"epoch": 0.26225875895408707, |
|
"grad_norm": 0.9567158222198486, |
|
"learning_rate": 3.926911459260109e-05, |
|
"loss": 1.8617, |
|
"step": 10416 |
|
}, |
|
{ |
|
"epoch": 0.2630392909747838, |
|
"grad_norm": 0.9236714839935303, |
|
"learning_rate": 3.920007509089102e-05, |
|
"loss": 1.8362, |
|
"step": 10447 |
|
}, |
|
{ |
|
"epoch": 0.26381982299548046, |
|
"grad_norm": 0.9428004026412964, |
|
"learning_rate": 3.913087534326357e-05, |
|
"loss": 1.8447, |
|
"step": 10478 |
|
}, |
|
{ |
|
"epoch": 0.2646003550161772, |
|
"grad_norm": 0.941067099571228, |
|
"learning_rate": 3.9061516130628475e-05, |
|
"loss": 1.8329, |
|
"step": 10509 |
|
}, |
|
{ |
|
"epoch": 0.26538088703687385, |
|
"grad_norm": 0.9685031175613403, |
|
"learning_rate": 3.8991998235695025e-05, |
|
"loss": 1.8543, |
|
"step": 10540 |
|
}, |
|
{ |
|
"epoch": 0.2661614190575705, |
|
"grad_norm": 0.9629632234573364, |
|
"learning_rate": 3.8922322442963224e-05, |
|
"loss": 1.8118, |
|
"step": 10571 |
|
}, |
|
{ |
|
"epoch": 0.26694195107826724, |
|
"grad_norm": 0.9536260366439819, |
|
"learning_rate": 3.885248953871491e-05, |
|
"loss": 1.8379, |
|
"step": 10602 |
|
}, |
|
{ |
|
"epoch": 0.2677224830989639, |
|
"grad_norm": 0.9781749248504639, |
|
"learning_rate": 3.8782500311004915e-05, |
|
"loss": 1.8419, |
|
"step": 10633 |
|
}, |
|
{ |
|
"epoch": 0.26850301511966057, |
|
"grad_norm": 0.9313937425613403, |
|
"learning_rate": 3.871235554965218e-05, |
|
"loss": 1.8467, |
|
"step": 10664 |
|
}, |
|
{ |
|
"epoch": 0.2692835471403573, |
|
"grad_norm": 0.9775392413139343, |
|
"learning_rate": 3.864205604623078e-05, |
|
"loss": 1.8209, |
|
"step": 10695 |
|
}, |
|
{ |
|
"epoch": 0.27006407916105396, |
|
"grad_norm": 0.965977132320404, |
|
"learning_rate": 3.857160259406107e-05, |
|
"loss": 1.8558, |
|
"step": 10726 |
|
}, |
|
{ |
|
"epoch": 0.2708446111817507, |
|
"grad_norm": 0.9789881706237793, |
|
"learning_rate": 3.8500995988200674e-05, |
|
"loss": 1.8523, |
|
"step": 10757 |
|
}, |
|
{ |
|
"epoch": 0.27162514320244735, |
|
"grad_norm": 0.9083163738250732, |
|
"learning_rate": 3.843023702543556e-05, |
|
"loss": 1.8274, |
|
"step": 10788 |
|
}, |
|
{ |
|
"epoch": 0.272405675223144, |
|
"grad_norm": 0.9248176217079163, |
|
"learning_rate": 3.8359326504270984e-05, |
|
"loss": 1.8316, |
|
"step": 10819 |
|
}, |
|
{ |
|
"epoch": 0.27318620724384074, |
|
"grad_norm": 0.9702487587928772, |
|
"learning_rate": 3.828826522492255e-05, |
|
"loss": 1.8344, |
|
"step": 10850 |
|
}, |
|
{ |
|
"epoch": 0.2739667392645374, |
|
"grad_norm": 0.9324803948402405, |
|
"learning_rate": 3.821705398930713e-05, |
|
"loss": 1.8544, |
|
"step": 10881 |
|
}, |
|
{ |
|
"epoch": 0.2747472712852341, |
|
"grad_norm": 0.9495794773101807, |
|
"learning_rate": 3.814569360103385e-05, |
|
"loss": 1.8446, |
|
"step": 10912 |
|
}, |
|
{ |
|
"epoch": 0.2755278033059308, |
|
"grad_norm": 0.9734468460083008, |
|
"learning_rate": 3.807418486539499e-05, |
|
"loss": 1.8434, |
|
"step": 10943 |
|
}, |
|
{ |
|
"epoch": 0.27630833532662746, |
|
"grad_norm": 0.9376105666160583, |
|
"learning_rate": 3.80025285893569e-05, |
|
"loss": 1.8339, |
|
"step": 10974 |
|
}, |
|
{ |
|
"epoch": 0.2770888673473242, |
|
"grad_norm": 0.9637653827667236, |
|
"learning_rate": 3.793072558155093e-05, |
|
"loss": 1.8445, |
|
"step": 11005 |
|
}, |
|
{ |
|
"epoch": 0.27786939936802085, |
|
"grad_norm": 0.9537612199783325, |
|
"learning_rate": 3.785877665226426e-05, |
|
"loss": 1.8332, |
|
"step": 11036 |
|
}, |
|
{ |
|
"epoch": 0.2786499313887175, |
|
"grad_norm": 0.9774130582809448, |
|
"learning_rate": 3.778668261343079e-05, |
|
"loss": 1.8269, |
|
"step": 11067 |
|
}, |
|
{ |
|
"epoch": 0.27943046340941424, |
|
"grad_norm": 0.9648153781890869, |
|
"learning_rate": 3.771444427862192e-05, |
|
"loss": 1.8301, |
|
"step": 11098 |
|
}, |
|
{ |
|
"epoch": 0.2802109954301109, |
|
"grad_norm": 0.9317411184310913, |
|
"learning_rate": 3.7642062463037465e-05, |
|
"loss": 1.836, |
|
"step": 11129 |
|
}, |
|
{ |
|
"epoch": 0.2809915274508076, |
|
"grad_norm": 0.9821638464927673, |
|
"learning_rate": 3.7569537983496373e-05, |
|
"loss": 1.846, |
|
"step": 11160 |
|
}, |
|
{ |
|
"epoch": 0.2817720594715043, |
|
"grad_norm": 0.9444936513900757, |
|
"learning_rate": 3.749687165842753e-05, |
|
"loss": 1.8248, |
|
"step": 11191 |
|
}, |
|
{ |
|
"epoch": 0.28255259149220097, |
|
"grad_norm": 0.9421222805976868, |
|
"learning_rate": 3.7424064307860536e-05, |
|
"loss": 1.8521, |
|
"step": 11222 |
|
}, |
|
{ |
|
"epoch": 0.2833331235128977, |
|
"grad_norm": 0.9378920793533325, |
|
"learning_rate": 3.735111675341645e-05, |
|
"loss": 1.8361, |
|
"step": 11253 |
|
}, |
|
{ |
|
"epoch": 0.28411365553359436, |
|
"grad_norm": 0.9363348484039307, |
|
"learning_rate": 3.7278029818298524e-05, |
|
"loss": 1.8276, |
|
"step": 11284 |
|
}, |
|
{ |
|
"epoch": 0.284894187554291, |
|
"grad_norm": 0.9247562885284424, |
|
"learning_rate": 3.720480432728287e-05, |
|
"loss": 1.8354, |
|
"step": 11315 |
|
}, |
|
{ |
|
"epoch": 0.28567471957498775, |
|
"grad_norm": 0.9505523443222046, |
|
"learning_rate": 3.71314411067092e-05, |
|
"loss": 1.807, |
|
"step": 11346 |
|
}, |
|
{ |
|
"epoch": 0.2864552515956844, |
|
"grad_norm": 0.9292653799057007, |
|
"learning_rate": 3.70579409844715e-05, |
|
"loss": 1.8281, |
|
"step": 11377 |
|
}, |
|
{ |
|
"epoch": 0.2872357836163811, |
|
"grad_norm": 0.9625663161277771, |
|
"learning_rate": 3.698430479000865e-05, |
|
"loss": 1.8211, |
|
"step": 11408 |
|
}, |
|
{ |
|
"epoch": 0.2880163156370778, |
|
"grad_norm": 0.9280233979225159, |
|
"learning_rate": 3.691053335429509e-05, |
|
"loss": 1.8177, |
|
"step": 11439 |
|
}, |
|
{ |
|
"epoch": 0.28879684765777447, |
|
"grad_norm": 0.9617031216621399, |
|
"learning_rate": 3.683662750983147e-05, |
|
"loss": 1.831, |
|
"step": 11470 |
|
}, |
|
{ |
|
"epoch": 0.2895773796784712, |
|
"grad_norm": 0.9631845951080322, |
|
"learning_rate": 3.676258809063518e-05, |
|
"loss": 1.8154, |
|
"step": 11501 |
|
}, |
|
{ |
|
"epoch": 0.29035791169916786, |
|
"grad_norm": 0.9200331568717957, |
|
"learning_rate": 3.6688415932231004e-05, |
|
"loss": 1.8452, |
|
"step": 11532 |
|
}, |
|
{ |
|
"epoch": 0.2911384437198645, |
|
"grad_norm": 0.9431037902832031, |
|
"learning_rate": 3.661411187164166e-05, |
|
"loss": 1.8221, |
|
"step": 11563 |
|
}, |
|
{ |
|
"epoch": 0.29191897574056125, |
|
"grad_norm": 0.9307507276535034, |
|
"learning_rate": 3.65396767473784e-05, |
|
"loss": 1.8092, |
|
"step": 11594 |
|
}, |
|
{ |
|
"epoch": 0.2926995077612579, |
|
"grad_norm": 0.9896314144134521, |
|
"learning_rate": 3.6465111399431465e-05, |
|
"loss": 1.826, |
|
"step": 11625 |
|
}, |
|
{ |
|
"epoch": 0.2934800397819546, |
|
"grad_norm": 0.9361921548843384, |
|
"learning_rate": 3.6390416669260674e-05, |
|
"loss": 1.8207, |
|
"step": 11656 |
|
}, |
|
{ |
|
"epoch": 0.2942605718026513, |
|
"grad_norm": 0.9492716789245605, |
|
"learning_rate": 3.63155933997859e-05, |
|
"loss": 1.822, |
|
"step": 11687 |
|
}, |
|
{ |
|
"epoch": 0.29504110382334797, |
|
"grad_norm": 0.9642727375030518, |
|
"learning_rate": 3.624064243537758e-05, |
|
"loss": 1.8165, |
|
"step": 11718 |
|
}, |
|
{ |
|
"epoch": 0.29582163584404464, |
|
"grad_norm": 0.9700178503990173, |
|
"learning_rate": 3.616556462184716e-05, |
|
"loss": 1.8093, |
|
"step": 11749 |
|
}, |
|
{ |
|
"epoch": 0.29660216786474136, |
|
"grad_norm": 0.90894615650177, |
|
"learning_rate": 3.609036080643755e-05, |
|
"loss": 1.8312, |
|
"step": 11780 |
|
}, |
|
{ |
|
"epoch": 0.29738269988543803, |
|
"grad_norm": 0.9441593885421753, |
|
"learning_rate": 3.60150318378136e-05, |
|
"loss": 1.8324, |
|
"step": 11811 |
|
}, |
|
{ |
|
"epoch": 0.29816323190613475, |
|
"grad_norm": 0.9351882338523865, |
|
"learning_rate": 3.5939578566052465e-05, |
|
"loss": 1.8303, |
|
"step": 11842 |
|
}, |
|
{ |
|
"epoch": 0.2989437639268314, |
|
"grad_norm": 0.9438377618789673, |
|
"learning_rate": 3.586400184263408e-05, |
|
"loss": 1.8164, |
|
"step": 11873 |
|
}, |
|
{ |
|
"epoch": 0.2997242959475281, |
|
"grad_norm": 0.9775059819221497, |
|
"learning_rate": 3.578830252043148e-05, |
|
"loss": 1.8146, |
|
"step": 11904 |
|
}, |
|
{ |
|
"epoch": 0.3005048279682248, |
|
"grad_norm": 0.9342683553695679, |
|
"learning_rate": 3.571248145370125e-05, |
|
"loss": 1.8324, |
|
"step": 11935 |
|
}, |
|
{ |
|
"epoch": 0.3012853599889215, |
|
"grad_norm": 0.9539228081703186, |
|
"learning_rate": 3.5636539498073794e-05, |
|
"loss": 1.8112, |
|
"step": 11966 |
|
}, |
|
{ |
|
"epoch": 0.30206589200961814, |
|
"grad_norm": 1.0048060417175293, |
|
"learning_rate": 3.556047751054378e-05, |
|
"loss": 1.8261, |
|
"step": 11997 |
|
}, |
|
{ |
|
"epoch": 0.30284642403031486, |
|
"grad_norm": 0.9269846081733704, |
|
"learning_rate": 3.548429634946039e-05, |
|
"loss": 1.8256, |
|
"step": 12028 |
|
}, |
|
{ |
|
"epoch": 0.30362695605101153, |
|
"grad_norm": 0.937609076499939, |
|
"learning_rate": 3.540799687451768e-05, |
|
"loss": 1.8222, |
|
"step": 12059 |
|
}, |
|
{ |
|
"epoch": 0.30440748807170825, |
|
"grad_norm": 0.9538866281509399, |
|
"learning_rate": 3.533157994674485e-05, |
|
"loss": 1.8214, |
|
"step": 12090 |
|
}, |
|
{ |
|
"epoch": 0.3051880200924049, |
|
"grad_norm": 0.9644604325294495, |
|
"learning_rate": 3.5255046428496546e-05, |
|
"loss": 1.8117, |
|
"step": 12121 |
|
}, |
|
{ |
|
"epoch": 0.3059685521131016, |
|
"grad_norm": 1.0498855113983154, |
|
"learning_rate": 3.517839718344311e-05, |
|
"loss": 1.8249, |
|
"step": 12152 |
|
}, |
|
{ |
|
"epoch": 0.3067490841337983, |
|
"grad_norm": 0.952634334564209, |
|
"learning_rate": 3.510163307656086e-05, |
|
"loss": 1.8263, |
|
"step": 12183 |
|
}, |
|
{ |
|
"epoch": 0.307529616154495, |
|
"grad_norm": 0.9833852052688599, |
|
"learning_rate": 3.5024754974122324e-05, |
|
"loss": 1.8246, |
|
"step": 12214 |
|
}, |
|
{ |
|
"epoch": 0.30831014817519165, |
|
"grad_norm": 0.9421613812446594, |
|
"learning_rate": 3.494776374368643e-05, |
|
"loss": 1.8065, |
|
"step": 12245 |
|
}, |
|
{ |
|
"epoch": 0.30909068019588837, |
|
"grad_norm": 0.9119872450828552, |
|
"learning_rate": 3.4870660254088724e-05, |
|
"loss": 1.7958, |
|
"step": 12276 |
|
}, |
|
{ |
|
"epoch": 0.30987121221658503, |
|
"grad_norm": 0.9541532397270203, |
|
"learning_rate": 3.479344537543164e-05, |
|
"loss": 1.8156, |
|
"step": 12307 |
|
}, |
|
{ |
|
"epoch": 0.31065174423728176, |
|
"grad_norm": 0.9333822131156921, |
|
"learning_rate": 3.4716119979074565e-05, |
|
"loss": 1.8098, |
|
"step": 12338 |
|
}, |
|
{ |
|
"epoch": 0.3114322762579784, |
|
"grad_norm": 0.9501641392707825, |
|
"learning_rate": 3.463868493762412e-05, |
|
"loss": 1.815, |
|
"step": 12369 |
|
}, |
|
{ |
|
"epoch": 0.3122128082786751, |
|
"grad_norm": 0.9692359566688538, |
|
"learning_rate": 3.456114112492418e-05, |
|
"loss": 1.8192, |
|
"step": 12400 |
|
}, |
|
{ |
|
"epoch": 0.3129933402993718, |
|
"grad_norm": 0.9368917942047119, |
|
"learning_rate": 3.4483489416046164e-05, |
|
"loss": 1.8044, |
|
"step": 12431 |
|
}, |
|
{ |
|
"epoch": 0.3137738723200685, |
|
"grad_norm": 0.981076180934906, |
|
"learning_rate": 3.440573068727905e-05, |
|
"loss": 1.806, |
|
"step": 12462 |
|
}, |
|
{ |
|
"epoch": 0.31455440434076515, |
|
"grad_norm": 0.9535355567932129, |
|
"learning_rate": 3.4327865816119495e-05, |
|
"loss": 1.805, |
|
"step": 12493 |
|
}, |
|
{ |
|
"epoch": 0.31533493636146187, |
|
"grad_norm": 0.9825057983398438, |
|
"learning_rate": 3.4249895681262025e-05, |
|
"loss": 1.7878, |
|
"step": 12524 |
|
}, |
|
{ |
|
"epoch": 0.31611546838215854, |
|
"grad_norm": 0.9295104146003723, |
|
"learning_rate": 3.417182116258899e-05, |
|
"loss": 1.8091, |
|
"step": 12555 |
|
}, |
|
{ |
|
"epoch": 0.31689600040285526, |
|
"grad_norm": 0.9550284147262573, |
|
"learning_rate": 3.409364314116074e-05, |
|
"loss": 1.8053, |
|
"step": 12586 |
|
}, |
|
{ |
|
"epoch": 0.3176765324235519, |
|
"grad_norm": 0.9383823871612549, |
|
"learning_rate": 3.401536249920559e-05, |
|
"loss": 1.8034, |
|
"step": 12617 |
|
}, |
|
{ |
|
"epoch": 0.3184570644442486, |
|
"grad_norm": 0.9513770341873169, |
|
"learning_rate": 3.393698012010998e-05, |
|
"loss": 1.8097, |
|
"step": 12648 |
|
}, |
|
{ |
|
"epoch": 0.3192375964649453, |
|
"grad_norm": 0.9214389324188232, |
|
"learning_rate": 3.385849688840839e-05, |
|
"loss": 1.8089, |
|
"step": 12679 |
|
}, |
|
{ |
|
"epoch": 0.320018128485642, |
|
"grad_norm": 0.9370726346969604, |
|
"learning_rate": 3.3779913689773414e-05, |
|
"loss": 1.8022, |
|
"step": 12710 |
|
}, |
|
{ |
|
"epoch": 0.32079866050633865, |
|
"grad_norm": 0.9661527872085571, |
|
"learning_rate": 3.370123141100578e-05, |
|
"loss": 1.8151, |
|
"step": 12741 |
|
}, |
|
{ |
|
"epoch": 0.3215791925270354, |
|
"grad_norm": 0.9569831490516663, |
|
"learning_rate": 3.3622450940024305e-05, |
|
"loss": 1.8283, |
|
"step": 12772 |
|
}, |
|
{ |
|
"epoch": 0.32235972454773204, |
|
"grad_norm": 0.9162306785583496, |
|
"learning_rate": 3.35435731658559e-05, |
|
"loss": 1.8005, |
|
"step": 12803 |
|
}, |
|
{ |
|
"epoch": 0.32314025656842876, |
|
"grad_norm": 0.955159068107605, |
|
"learning_rate": 3.346459897862552e-05, |
|
"loss": 1.7929, |
|
"step": 12834 |
|
}, |
|
{ |
|
"epoch": 0.32392078858912543, |
|
"grad_norm": 0.9484529495239258, |
|
"learning_rate": 3.338552926954613e-05, |
|
"loss": 1.8355, |
|
"step": 12865 |
|
}, |
|
{ |
|
"epoch": 0.3247013206098221, |
|
"grad_norm": 0.9199361205101013, |
|
"learning_rate": 3.330636493090868e-05, |
|
"loss": 1.8096, |
|
"step": 12896 |
|
}, |
|
{ |
|
"epoch": 0.3254818526305188, |
|
"grad_norm": 0.928848147392273, |
|
"learning_rate": 3.322710685607193e-05, |
|
"loss": 1.7896, |
|
"step": 12927 |
|
}, |
|
{ |
|
"epoch": 0.3262623846512155, |
|
"grad_norm": 0.9141325354576111, |
|
"learning_rate": 3.314775593945251e-05, |
|
"loss": 1.8113, |
|
"step": 12958 |
|
}, |
|
{ |
|
"epoch": 0.32704291667191215, |
|
"grad_norm": 0.97685307264328, |
|
"learning_rate": 3.3068313076514714e-05, |
|
"loss": 1.8241, |
|
"step": 12989 |
|
}, |
|
{ |
|
"epoch": 0.3278234486926089, |
|
"grad_norm": 0.9171717166900635, |
|
"learning_rate": 3.298877916376047e-05, |
|
"loss": 1.7953, |
|
"step": 13020 |
|
}, |
|
{ |
|
"epoch": 0.32860398071330554, |
|
"grad_norm": 0.9607007503509521, |
|
"learning_rate": 3.290915509871915e-05, |
|
"loss": 1.819, |
|
"step": 13051 |
|
}, |
|
{ |
|
"epoch": 0.32938451273400227, |
|
"grad_norm": 0.9524408578872681, |
|
"learning_rate": 3.282944177993753e-05, |
|
"loss": 1.8133, |
|
"step": 13082 |
|
}, |
|
{ |
|
"epoch": 0.33016504475469893, |
|
"grad_norm": 0.9388900399208069, |
|
"learning_rate": 3.274964010696957e-05, |
|
"loss": 1.8103, |
|
"step": 13113 |
|
}, |
|
{ |
|
"epoch": 0.3309455767753956, |
|
"grad_norm": 0.8972403407096863, |
|
"learning_rate": 3.266975098036629e-05, |
|
"loss": 1.7997, |
|
"step": 13144 |
|
}, |
|
{ |
|
"epoch": 0.3317261087960923, |
|
"grad_norm": 0.9373508095741272, |
|
"learning_rate": 3.258977530166562e-05, |
|
"loss": 1.8038, |
|
"step": 13175 |
|
}, |
|
{ |
|
"epoch": 0.332506640816789, |
|
"grad_norm": 0.9278662204742432, |
|
"learning_rate": 3.250971397338227e-05, |
|
"loss": 1.803, |
|
"step": 13206 |
|
}, |
|
{ |
|
"epoch": 0.33328717283748566, |
|
"grad_norm": 0.9486290812492371, |
|
"learning_rate": 3.2429567898997404e-05, |
|
"loss": 1.8114, |
|
"step": 13237 |
|
}, |
|
{ |
|
"epoch": 0.3340677048581824, |
|
"grad_norm": 0.9786876440048218, |
|
"learning_rate": 3.234933798294859e-05, |
|
"loss": 1.7902, |
|
"step": 13268 |
|
}, |
|
{ |
|
"epoch": 0.33484823687887905, |
|
"grad_norm": 0.9001107215881348, |
|
"learning_rate": 3.2269025130619535e-05, |
|
"loss": 1.8108, |
|
"step": 13299 |
|
}, |
|
{ |
|
"epoch": 0.33562876889957577, |
|
"grad_norm": 0.9080937504768372, |
|
"learning_rate": 3.218863024832985e-05, |
|
"loss": 1.7951, |
|
"step": 13330 |
|
}, |
|
{ |
|
"epoch": 0.33640930092027244, |
|
"grad_norm": 0.93299800157547, |
|
"learning_rate": 3.2108154243324864e-05, |
|
"loss": 1.8129, |
|
"step": 13361 |
|
}, |
|
{ |
|
"epoch": 0.3371898329409691, |
|
"grad_norm": 0.9452754855155945, |
|
"learning_rate": 3.2027598023765345e-05, |
|
"loss": 1.8077, |
|
"step": 13392 |
|
}, |
|
{ |
|
"epoch": 0.3379703649616658, |
|
"grad_norm": 0.9515936970710754, |
|
"learning_rate": 3.194696249871729e-05, |
|
"loss": 1.7961, |
|
"step": 13423 |
|
}, |
|
{ |
|
"epoch": 0.3387508969823625, |
|
"grad_norm": 0.9395990371704102, |
|
"learning_rate": 3.186624857814164e-05, |
|
"loss": 1.7982, |
|
"step": 13454 |
|
}, |
|
{ |
|
"epoch": 0.33953142900305916, |
|
"grad_norm": 0.9647415280342102, |
|
"learning_rate": 3.178545717288401e-05, |
|
"loss": 1.7997, |
|
"step": 13485 |
|
}, |
|
{ |
|
"epoch": 0.3403119610237559, |
|
"grad_norm": 0.9437438249588013, |
|
"learning_rate": 3.170458919466444e-05, |
|
"loss": 1.8069, |
|
"step": 13516 |
|
}, |
|
{ |
|
"epoch": 0.34109249304445255, |
|
"grad_norm": 0.9350516200065613, |
|
"learning_rate": 3.1623645556067063e-05, |
|
"loss": 1.8024, |
|
"step": 13547 |
|
}, |
|
{ |
|
"epoch": 0.34187302506514927, |
|
"grad_norm": 0.9213714599609375, |
|
"learning_rate": 3.154262717052985e-05, |
|
"loss": 1.7963, |
|
"step": 13578 |
|
}, |
|
{ |
|
"epoch": 0.34265355708584594, |
|
"grad_norm": 0.9247968792915344, |
|
"learning_rate": 3.146153495233426e-05, |
|
"loss": 1.8172, |
|
"step": 13609 |
|
}, |
|
{ |
|
"epoch": 0.3434340891065426, |
|
"grad_norm": 0.8953402638435364, |
|
"learning_rate": 3.1380369816594944e-05, |
|
"loss": 1.8004, |
|
"step": 13640 |
|
}, |
|
{ |
|
"epoch": 0.34421462112723933, |
|
"grad_norm": 0.9214624762535095, |
|
"learning_rate": 3.129913267924946e-05, |
|
"loss": 1.7862, |
|
"step": 13671 |
|
}, |
|
{ |
|
"epoch": 0.344995153147936, |
|
"grad_norm": 0.946559727191925, |
|
"learning_rate": 3.121782445704782e-05, |
|
"loss": 1.7973, |
|
"step": 13702 |
|
}, |
|
{ |
|
"epoch": 0.34577568516863266, |
|
"grad_norm": 0.9344651103019714, |
|
"learning_rate": 3.11364460675423e-05, |
|
"loss": 1.7992, |
|
"step": 13733 |
|
}, |
|
{ |
|
"epoch": 0.3465562171893294, |
|
"grad_norm": 0.9140195846557617, |
|
"learning_rate": 3.1054998429076934e-05, |
|
"loss": 1.7846, |
|
"step": 13764 |
|
}, |
|
{ |
|
"epoch": 0.34733674921002605, |
|
"grad_norm": 0.9403822422027588, |
|
"learning_rate": 3.097348246077728e-05, |
|
"loss": 1.7814, |
|
"step": 13795 |
|
}, |
|
{ |
|
"epoch": 0.3481172812307228, |
|
"grad_norm": 0.954587996006012, |
|
"learning_rate": 3.0891899082539924e-05, |
|
"loss": 1.7864, |
|
"step": 13826 |
|
}, |
|
{ |
|
"epoch": 0.34889781325141944, |
|
"grad_norm": 0.9238544702529907, |
|
"learning_rate": 3.0810249215022233e-05, |
|
"loss": 1.7929, |
|
"step": 13857 |
|
}, |
|
{ |
|
"epoch": 0.3496783452721161, |
|
"grad_norm": 0.8994883894920349, |
|
"learning_rate": 3.0728533779631865e-05, |
|
"loss": 1.805, |
|
"step": 13888 |
|
}, |
|
{ |
|
"epoch": 0.35045887729281283, |
|
"grad_norm": 0.934203028678894, |
|
"learning_rate": 3.064675369851637e-05, |
|
"loss": 1.8046, |
|
"step": 13919 |
|
}, |
|
{ |
|
"epoch": 0.3512394093135095, |
|
"grad_norm": 0.9044675230979919, |
|
"learning_rate": 3.056490989455289e-05, |
|
"loss": 1.7866, |
|
"step": 13950 |
|
}, |
|
{ |
|
"epoch": 0.35201994133420617, |
|
"grad_norm": 0.915847897529602, |
|
"learning_rate": 3.0483003291337596e-05, |
|
"loss": 1.7861, |
|
"step": 13981 |
|
}, |
|
{ |
|
"epoch": 0.3528004733549029, |
|
"grad_norm": 0.9346082210540771, |
|
"learning_rate": 3.040103481317539e-05, |
|
"loss": 1.7891, |
|
"step": 14012 |
|
}, |
|
{ |
|
"epoch": 0.35358100537559956, |
|
"grad_norm": 0.9164626598358154, |
|
"learning_rate": 3.03190053850694e-05, |
|
"loss": 1.8021, |
|
"step": 14043 |
|
}, |
|
{ |
|
"epoch": 0.3543615373962962, |
|
"grad_norm": 0.9445552229881287, |
|
"learning_rate": 3.0236915932710573e-05, |
|
"loss": 1.7877, |
|
"step": 14074 |
|
}, |
|
{ |
|
"epoch": 0.35514206941699294, |
|
"grad_norm": 0.9604120254516602, |
|
"learning_rate": 3.0154767382467232e-05, |
|
"loss": 1.8087, |
|
"step": 14105 |
|
}, |
|
{ |
|
"epoch": 0.3559226014376896, |
|
"grad_norm": 0.9186177849769592, |
|
"learning_rate": 3.0072560661374582e-05, |
|
"loss": 1.7802, |
|
"step": 14136 |
|
}, |
|
{ |
|
"epoch": 0.35670313345838633, |
|
"grad_norm": 0.9228742122650146, |
|
"learning_rate": 2.999029669712431e-05, |
|
"loss": 1.8091, |
|
"step": 14167 |
|
}, |
|
{ |
|
"epoch": 0.357483665479083, |
|
"grad_norm": 0.9045746922492981, |
|
"learning_rate": 2.990797641805408e-05, |
|
"loss": 1.8013, |
|
"step": 14198 |
|
}, |
|
{ |
|
"epoch": 0.35826419749977967, |
|
"grad_norm": 0.9483916759490967, |
|
"learning_rate": 2.982560075313704e-05, |
|
"loss": 1.8044, |
|
"step": 14229 |
|
}, |
|
{ |
|
"epoch": 0.3590447295204764, |
|
"grad_norm": 0.9580939412117004, |
|
"learning_rate": 2.9743170631971368e-05, |
|
"loss": 1.8037, |
|
"step": 14260 |
|
}, |
|
{ |
|
"epoch": 0.35982526154117306, |
|
"grad_norm": 0.950960099697113, |
|
"learning_rate": 2.9660686984769792e-05, |
|
"loss": 1.8082, |
|
"step": 14291 |
|
}, |
|
{ |
|
"epoch": 0.3606057935618697, |
|
"grad_norm": 0.9356971383094788, |
|
"learning_rate": 2.9578150742349047e-05, |
|
"loss": 1.8049, |
|
"step": 14322 |
|
}, |
|
{ |
|
"epoch": 0.36138632558256645, |
|
"grad_norm": 0.9099254012107849, |
|
"learning_rate": 2.949556283611942e-05, |
|
"loss": 1.7991, |
|
"step": 14353 |
|
}, |
|
{ |
|
"epoch": 0.3621668576032631, |
|
"grad_norm": 0.9190629720687866, |
|
"learning_rate": 2.9412924198074206e-05, |
|
"loss": 1.8018, |
|
"step": 14384 |
|
}, |
|
{ |
|
"epoch": 0.36294738962395984, |
|
"grad_norm": 0.90566486120224, |
|
"learning_rate": 2.9330235760779208e-05, |
|
"loss": 1.789, |
|
"step": 14415 |
|
}, |
|
{ |
|
"epoch": 0.3637279216446565, |
|
"grad_norm": 0.970696747303009, |
|
"learning_rate": 2.9247498457362188e-05, |
|
"loss": 1.7851, |
|
"step": 14446 |
|
}, |
|
{ |
|
"epoch": 0.36450845366535317, |
|
"grad_norm": 0.9313214421272278, |
|
"learning_rate": 2.9164713221502373e-05, |
|
"loss": 1.7893, |
|
"step": 14477 |
|
}, |
|
{ |
|
"epoch": 0.3652889856860499, |
|
"grad_norm": 1.2887455224990845, |
|
"learning_rate": 2.9081880987419912e-05, |
|
"loss": 1.7836, |
|
"step": 14508 |
|
}, |
|
{ |
|
"epoch": 0.36606951770674656, |
|
"grad_norm": 0.922982394695282, |
|
"learning_rate": 2.8999002689865296e-05, |
|
"loss": 1.7676, |
|
"step": 14539 |
|
}, |
|
{ |
|
"epoch": 0.36685004972744323, |
|
"grad_norm": 0.9466812014579773, |
|
"learning_rate": 2.8916079264108852e-05, |
|
"loss": 1.7831, |
|
"step": 14570 |
|
}, |
|
{ |
|
"epoch": 0.36763058174813995, |
|
"grad_norm": 0.9107967615127563, |
|
"learning_rate": 2.883311164593017e-05, |
|
"loss": 1.7887, |
|
"step": 14601 |
|
}, |
|
{ |
|
"epoch": 0.3684111137688366, |
|
"grad_norm": 0.8859044313430786, |
|
"learning_rate": 2.875010077160754e-05, |
|
"loss": 1.7823, |
|
"step": 14632 |
|
}, |
|
{ |
|
"epoch": 0.36919164578953334, |
|
"grad_norm": 0.9182350039482117, |
|
"learning_rate": 2.866704757790741e-05, |
|
"loss": 1.7816, |
|
"step": 14663 |
|
}, |
|
{ |
|
"epoch": 0.36997217781023, |
|
"grad_norm": 1.1406787633895874, |
|
"learning_rate": 2.858395300207376e-05, |
|
"loss": 1.7962, |
|
"step": 14694 |
|
}, |
|
{ |
|
"epoch": 0.3707527098309267, |
|
"grad_norm": 0.9399011135101318, |
|
"learning_rate": 2.8500817981817607e-05, |
|
"loss": 1.7874, |
|
"step": 14725 |
|
}, |
|
{ |
|
"epoch": 0.3715332418516234, |
|
"grad_norm": 0.9223036766052246, |
|
"learning_rate": 2.8417643455306336e-05, |
|
"loss": 1.7661, |
|
"step": 14756 |
|
}, |
|
{ |
|
"epoch": 0.37231377387232006, |
|
"grad_norm": 0.9224306344985962, |
|
"learning_rate": 2.8334430361153185e-05, |
|
"loss": 1.7799, |
|
"step": 14787 |
|
}, |
|
{ |
|
"epoch": 0.37309430589301673, |
|
"grad_norm": 0.9353799819946289, |
|
"learning_rate": 2.8251179638406612e-05, |
|
"loss": 1.795, |
|
"step": 14818 |
|
}, |
|
{ |
|
"epoch": 0.37387483791371345, |
|
"grad_norm": 0.9208987355232239, |
|
"learning_rate": 2.8167892226539704e-05, |
|
"loss": 1.7967, |
|
"step": 14849 |
|
}, |
|
{ |
|
"epoch": 0.3746553699344101, |
|
"grad_norm": 0.9143754243850708, |
|
"learning_rate": 2.8084569065439588e-05, |
|
"loss": 1.7871, |
|
"step": 14880 |
|
}, |
|
{ |
|
"epoch": 0.37543590195510684, |
|
"grad_norm": 0.9162681698799133, |
|
"learning_rate": 2.8001211095396807e-05, |
|
"loss": 1.7791, |
|
"step": 14911 |
|
}, |
|
{ |
|
"epoch": 0.3762164339758035, |
|
"grad_norm": 0.9125809073448181, |
|
"learning_rate": 2.791781925709473e-05, |
|
"loss": 1.7847, |
|
"step": 14942 |
|
}, |
|
{ |
|
"epoch": 0.3769969659965002, |
|
"grad_norm": 0.9262179732322693, |
|
"learning_rate": 2.7834394491598908e-05, |
|
"loss": 1.789, |
|
"step": 14973 |
|
}, |
|
{ |
|
"epoch": 0.3777774980171969, |
|
"grad_norm": 0.9118053317070007, |
|
"learning_rate": 2.7750937740346485e-05, |
|
"loss": 1.7906, |
|
"step": 15004 |
|
}, |
|
{ |
|
"epoch": 0.37855803003789357, |
|
"grad_norm": 0.9322589635848999, |
|
"learning_rate": 2.7667449945135564e-05, |
|
"loss": 1.7811, |
|
"step": 15035 |
|
}, |
|
{ |
|
"epoch": 0.37933856205859023, |
|
"grad_norm": 0.9277715682983398, |
|
"learning_rate": 2.7583932048114557e-05, |
|
"loss": 1.7887, |
|
"step": 15066 |
|
}, |
|
{ |
|
"epoch": 0.38011909407928696, |
|
"grad_norm": 0.9804796576499939, |
|
"learning_rate": 2.7500384991771587e-05, |
|
"loss": 1.7945, |
|
"step": 15097 |
|
}, |
|
{ |
|
"epoch": 0.3808996260999836, |
|
"grad_norm": 0.9327893853187561, |
|
"learning_rate": 2.7416809718923825e-05, |
|
"loss": 1.785, |
|
"step": 15128 |
|
}, |
|
{ |
|
"epoch": 0.38168015812068035, |
|
"grad_norm": 0.9126656651496887, |
|
"learning_rate": 2.7333207172706864e-05, |
|
"loss": 1.7782, |
|
"step": 15159 |
|
}, |
|
{ |
|
"epoch": 0.382460690141377, |
|
"grad_norm": 0.9476912617683411, |
|
"learning_rate": 2.7249578296564088e-05, |
|
"loss": 1.7906, |
|
"step": 15190 |
|
}, |
|
{ |
|
"epoch": 0.3832412221620737, |
|
"grad_norm": 0.9388076663017273, |
|
"learning_rate": 2.7165924034235973e-05, |
|
"loss": 1.7993, |
|
"step": 15221 |
|
}, |
|
{ |
|
"epoch": 0.3840217541827704, |
|
"grad_norm": 0.9265156984329224, |
|
"learning_rate": 2.708224532974953e-05, |
|
"loss": 1.7624, |
|
"step": 15252 |
|
}, |
|
{ |
|
"epoch": 0.38480228620346707, |
|
"grad_norm": 0.9261599183082581, |
|
"learning_rate": 2.6998543127407538e-05, |
|
"loss": 1.7774, |
|
"step": 15283 |
|
}, |
|
{ |
|
"epoch": 0.38558281822416374, |
|
"grad_norm": 0.9569805860519409, |
|
"learning_rate": 2.6914818371777988e-05, |
|
"loss": 1.7695, |
|
"step": 15314 |
|
}, |
|
{ |
|
"epoch": 0.38636335024486046, |
|
"grad_norm": 0.8934130668640137, |
|
"learning_rate": 2.6831072007683373e-05, |
|
"loss": 1.7746, |
|
"step": 15345 |
|
}, |
|
{ |
|
"epoch": 0.3871438822655571, |
|
"grad_norm": 0.9160299301147461, |
|
"learning_rate": 2.6747304980190018e-05, |
|
"loss": 1.7698, |
|
"step": 15376 |
|
}, |
|
{ |
|
"epoch": 0.38792441428625385, |
|
"grad_norm": 0.9243568181991577, |
|
"learning_rate": 2.6663518234597453e-05, |
|
"loss": 1.7823, |
|
"step": 15407 |
|
}, |
|
{ |
|
"epoch": 0.3887049463069505, |
|
"grad_norm": 0.9243864417076111, |
|
"learning_rate": 2.6579712716427696e-05, |
|
"loss": 1.7819, |
|
"step": 15438 |
|
}, |
|
{ |
|
"epoch": 0.3894854783276472, |
|
"grad_norm": 0.9425201416015625, |
|
"learning_rate": 2.6495889371414652e-05, |
|
"loss": 1.7808, |
|
"step": 15469 |
|
}, |
|
{ |
|
"epoch": 0.3902660103483439, |
|
"grad_norm": 0.944649875164032, |
|
"learning_rate": 2.6412049145493367e-05, |
|
"loss": 1.781, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.3910465423690406, |
|
"grad_norm": 0.9222155809402466, |
|
"learning_rate": 2.632819298478939e-05, |
|
"loss": 1.7818, |
|
"step": 15531 |
|
}, |
|
{ |
|
"epoch": 0.39182707438973724, |
|
"grad_norm": 0.9028238654136658, |
|
"learning_rate": 2.6244321835608105e-05, |
|
"loss": 1.7622, |
|
"step": 15562 |
|
}, |
|
{ |
|
"epoch": 0.39260760641043396, |
|
"grad_norm": 0.9092904329299927, |
|
"learning_rate": 2.6160436644424024e-05, |
|
"loss": 1.7829, |
|
"step": 15593 |
|
}, |
|
{ |
|
"epoch": 0.39338813843113063, |
|
"grad_norm": 0.9831812381744385, |
|
"learning_rate": 2.6076538357870133e-05, |
|
"loss": 1.7927, |
|
"step": 15624 |
|
}, |
|
{ |
|
"epoch": 0.39416867045182735, |
|
"grad_norm": 0.9344479441642761, |
|
"learning_rate": 2.5992627922727196e-05, |
|
"loss": 1.7725, |
|
"step": 15655 |
|
}, |
|
{ |
|
"epoch": 0.394949202472524, |
|
"grad_norm": 0.9424457550048828, |
|
"learning_rate": 2.5908706285913066e-05, |
|
"loss": 1.77, |
|
"step": 15686 |
|
}, |
|
{ |
|
"epoch": 0.3957297344932207, |
|
"grad_norm": 0.9403957724571228, |
|
"learning_rate": 2.5824774394472008e-05, |
|
"loss": 1.7822, |
|
"step": 15717 |
|
}, |
|
{ |
|
"epoch": 0.3965102665139174, |
|
"grad_norm": 0.9497373104095459, |
|
"learning_rate": 2.5740833195563996e-05, |
|
"loss": 1.7874, |
|
"step": 15748 |
|
}, |
|
{ |
|
"epoch": 0.3972907985346141, |
|
"grad_norm": 0.9396623373031616, |
|
"learning_rate": 2.5656883636454067e-05, |
|
"loss": 1.7634, |
|
"step": 15779 |
|
}, |
|
{ |
|
"epoch": 0.39807133055531074, |
|
"grad_norm": 0.9426395297050476, |
|
"learning_rate": 2.557292666450159e-05, |
|
"loss": 1.7774, |
|
"step": 15810 |
|
}, |
|
{ |
|
"epoch": 0.39885186257600747, |
|
"grad_norm": 0.9340534806251526, |
|
"learning_rate": 2.5488963227149566e-05, |
|
"loss": 1.7619, |
|
"step": 15841 |
|
}, |
|
{ |
|
"epoch": 0.39963239459670413, |
|
"grad_norm": 0.9206343293190002, |
|
"learning_rate": 2.5404994271913983e-05, |
|
"loss": 1.7817, |
|
"step": 15872 |
|
}, |
|
{ |
|
"epoch": 0.40041292661740086, |
|
"grad_norm": 0.9418168067932129, |
|
"learning_rate": 2.5321020746373085e-05, |
|
"loss": 1.7788, |
|
"step": 15903 |
|
}, |
|
{ |
|
"epoch": 0.4011934586380975, |
|
"grad_norm": 0.9084994792938232, |
|
"learning_rate": 2.52370435981567e-05, |
|
"loss": 1.7784, |
|
"step": 15934 |
|
}, |
|
{ |
|
"epoch": 0.4019739906587942, |
|
"grad_norm": 0.9424017667770386, |
|
"learning_rate": 2.5153063774935533e-05, |
|
"loss": 1.7744, |
|
"step": 15965 |
|
}, |
|
{ |
|
"epoch": 0.4027545226794909, |
|
"grad_norm": 0.9338597655296326, |
|
"learning_rate": 2.506908222441045e-05, |
|
"loss": 1.7533, |
|
"step": 15996 |
|
}, |
|
{ |
|
"epoch": 0.4035350547001876, |
|
"grad_norm": 0.9237053394317627, |
|
"learning_rate": 2.498509989430187e-05, |
|
"loss": 1.7691, |
|
"step": 16027 |
|
}, |
|
{ |
|
"epoch": 0.40431558672088425, |
|
"grad_norm": 0.9271100759506226, |
|
"learning_rate": 2.4901117732338958e-05, |
|
"loss": 1.79, |
|
"step": 16058 |
|
}, |
|
{ |
|
"epoch": 0.40509611874158097, |
|
"grad_norm": 0.9305247068405151, |
|
"learning_rate": 2.481713668624899e-05, |
|
"loss": 1.7786, |
|
"step": 16089 |
|
}, |
|
{ |
|
"epoch": 0.40587665076227764, |
|
"grad_norm": 0.9237347841262817, |
|
"learning_rate": 2.4733157703746663e-05, |
|
"loss": 1.7951, |
|
"step": 16120 |
|
}, |
|
{ |
|
"epoch": 0.40665718278297436, |
|
"grad_norm": 0.8985100388526917, |
|
"learning_rate": 2.4649181732523392e-05, |
|
"loss": 1.7699, |
|
"step": 16151 |
|
}, |
|
{ |
|
"epoch": 0.407437714803671, |
|
"grad_norm": 0.9226688742637634, |
|
"learning_rate": 2.4565209720236582e-05, |
|
"loss": 1.769, |
|
"step": 16182 |
|
}, |
|
{ |
|
"epoch": 0.4082182468243677, |
|
"grad_norm": 0.9165879487991333, |
|
"learning_rate": 2.4481242614498975e-05, |
|
"loss": 1.7712, |
|
"step": 16213 |
|
}, |
|
{ |
|
"epoch": 0.4089987788450644, |
|
"grad_norm": 0.949297308921814, |
|
"learning_rate": 2.439728136286796e-05, |
|
"loss": 1.7709, |
|
"step": 16244 |
|
}, |
|
{ |
|
"epoch": 0.4097793108657611, |
|
"grad_norm": 0.930533230304718, |
|
"learning_rate": 2.4313326912834852e-05, |
|
"loss": 1.7512, |
|
"step": 16275 |
|
}, |
|
{ |
|
"epoch": 0.41055984288645775, |
|
"grad_norm": 0.926105797290802, |
|
"learning_rate": 2.4229380211814206e-05, |
|
"loss": 1.7769, |
|
"step": 16306 |
|
}, |
|
{ |
|
"epoch": 0.41134037490715447, |
|
"grad_norm": 0.9282335638999939, |
|
"learning_rate": 2.4145442207133124e-05, |
|
"loss": 1.7624, |
|
"step": 16337 |
|
}, |
|
{ |
|
"epoch": 0.41212090692785114, |
|
"grad_norm": 0.9377603530883789, |
|
"learning_rate": 2.406151384602059e-05, |
|
"loss": 1.7756, |
|
"step": 16368 |
|
}, |
|
{ |
|
"epoch": 0.4129014389485478, |
|
"grad_norm": 0.9203463792800903, |
|
"learning_rate": 2.3977596075596747e-05, |
|
"loss": 1.7839, |
|
"step": 16399 |
|
}, |
|
{ |
|
"epoch": 0.41368197096924453, |
|
"grad_norm": 0.9060366153717041, |
|
"learning_rate": 2.3893689842862223e-05, |
|
"loss": 1.7667, |
|
"step": 16430 |
|
}, |
|
{ |
|
"epoch": 0.4144625029899412, |
|
"grad_norm": 0.9065908193588257, |
|
"learning_rate": 2.3809796094687475e-05, |
|
"loss": 1.7691, |
|
"step": 16461 |
|
}, |
|
{ |
|
"epoch": 0.4152430350106379, |
|
"grad_norm": 0.9281785488128662, |
|
"learning_rate": 2.372591577780202e-05, |
|
"loss": 1.7573, |
|
"step": 16492 |
|
}, |
|
{ |
|
"epoch": 0.4160235670313346, |
|
"grad_norm": 0.880733847618103, |
|
"learning_rate": 2.3642049838783838e-05, |
|
"loss": 1.7697, |
|
"step": 16523 |
|
}, |
|
{ |
|
"epoch": 0.41680409905203125, |
|
"grad_norm": 0.9289157390594482, |
|
"learning_rate": 2.3558199224048666e-05, |
|
"loss": 1.7604, |
|
"step": 16554 |
|
}, |
|
{ |
|
"epoch": 0.417584631072728, |
|
"grad_norm": 0.9380747079849243, |
|
"learning_rate": 2.347436487983929e-05, |
|
"loss": 1.7686, |
|
"step": 16585 |
|
}, |
|
{ |
|
"epoch": 0.41836516309342464, |
|
"grad_norm": 0.9439517855644226, |
|
"learning_rate": 2.3390547752214888e-05, |
|
"loss": 1.7786, |
|
"step": 16616 |
|
}, |
|
{ |
|
"epoch": 0.4191456951141213, |
|
"grad_norm": 0.9420905113220215, |
|
"learning_rate": 2.330674878704035e-05, |
|
"loss": 1.762, |
|
"step": 16647 |
|
}, |
|
{ |
|
"epoch": 0.41992622713481803, |
|
"grad_norm": 0.9132199883460999, |
|
"learning_rate": 2.322296892997561e-05, |
|
"loss": 1.776, |
|
"step": 16678 |
|
}, |
|
{ |
|
"epoch": 0.4207067591555147, |
|
"grad_norm": 0.9286783933639526, |
|
"learning_rate": 2.313920912646497e-05, |
|
"loss": 1.7564, |
|
"step": 16709 |
|
}, |
|
{ |
|
"epoch": 0.4214872911762114, |
|
"grad_norm": 0.9031944870948792, |
|
"learning_rate": 2.305547032172643e-05, |
|
"loss": 1.75, |
|
"step": 16740 |
|
}, |
|
{ |
|
"epoch": 0.4222678231969081, |
|
"grad_norm": 0.9604383111000061, |
|
"learning_rate": 2.2971753460741014e-05, |
|
"loss": 1.7821, |
|
"step": 16771 |
|
}, |
|
{ |
|
"epoch": 0.42304835521760475, |
|
"grad_norm": 0.9145975112915039, |
|
"learning_rate": 2.288805948824212e-05, |
|
"loss": 1.7604, |
|
"step": 16802 |
|
}, |
|
{ |
|
"epoch": 0.4238288872383015, |
|
"grad_norm": 0.9214676022529602, |
|
"learning_rate": 2.2804389348704858e-05, |
|
"loss": 1.7553, |
|
"step": 16833 |
|
}, |
|
{ |
|
"epoch": 0.42460941925899814, |
|
"grad_norm": 0.9024298191070557, |
|
"learning_rate": 2.2720743986335374e-05, |
|
"loss": 1.7652, |
|
"step": 16864 |
|
}, |
|
{ |
|
"epoch": 0.4253899512796948, |
|
"grad_norm": 0.8926112055778503, |
|
"learning_rate": 2.2637124345060233e-05, |
|
"loss": 1.7583, |
|
"step": 16895 |
|
}, |
|
{ |
|
"epoch": 0.42617048330039153, |
|
"grad_norm": 0.9225366711616516, |
|
"learning_rate": 2.2553531368515695e-05, |
|
"loss": 1.7607, |
|
"step": 16926 |
|
}, |
|
{ |
|
"epoch": 0.4269510153210882, |
|
"grad_norm": 0.9340829253196716, |
|
"learning_rate": 2.2469966000037144e-05, |
|
"loss": 1.7659, |
|
"step": 16957 |
|
}, |
|
{ |
|
"epoch": 0.4277315473417849, |
|
"grad_norm": 0.9964073896408081, |
|
"learning_rate": 2.2386429182648417e-05, |
|
"loss": 1.7414, |
|
"step": 16988 |
|
}, |
|
{ |
|
"epoch": 0.4285120793624816, |
|
"grad_norm": 0.9147719144821167, |
|
"learning_rate": 2.230292185905114e-05, |
|
"loss": 1.764, |
|
"step": 17019 |
|
}, |
|
{ |
|
"epoch": 0.42929261138317826, |
|
"grad_norm": 0.9175845384597778, |
|
"learning_rate": 2.2219444971614116e-05, |
|
"loss": 1.7623, |
|
"step": 17050 |
|
}, |
|
{ |
|
"epoch": 0.430073143403875, |
|
"grad_norm": 0.9041738510131836, |
|
"learning_rate": 2.2135999462362655e-05, |
|
"loss": 1.7448, |
|
"step": 17081 |
|
}, |
|
{ |
|
"epoch": 0.43085367542457165, |
|
"grad_norm": 0.9334216713905334, |
|
"learning_rate": 2.2052586272968003e-05, |
|
"loss": 1.7548, |
|
"step": 17112 |
|
}, |
|
{ |
|
"epoch": 0.4316342074452683, |
|
"grad_norm": 0.9379672408103943, |
|
"learning_rate": 2.196920634473666e-05, |
|
"loss": 1.7703, |
|
"step": 17143 |
|
}, |
|
{ |
|
"epoch": 0.43241473946596504, |
|
"grad_norm": 0.9310106039047241, |
|
"learning_rate": 2.1885860618599787e-05, |
|
"loss": 1.7567, |
|
"step": 17174 |
|
}, |
|
{ |
|
"epoch": 0.4331952714866617, |
|
"grad_norm": 0.936827540397644, |
|
"learning_rate": 2.1802550035102577e-05, |
|
"loss": 1.7572, |
|
"step": 17205 |
|
}, |
|
{ |
|
"epoch": 0.4339758035073584, |
|
"grad_norm": 0.954757571220398, |
|
"learning_rate": 2.171927553439363e-05, |
|
"loss": 1.7773, |
|
"step": 17236 |
|
}, |
|
{ |
|
"epoch": 0.4347563355280551, |
|
"grad_norm": 0.9472478032112122, |
|
"learning_rate": 2.1636038056214376e-05, |
|
"loss": 1.7614, |
|
"step": 17267 |
|
}, |
|
{ |
|
"epoch": 0.43553686754875176, |
|
"grad_norm": 0.9342318773269653, |
|
"learning_rate": 2.155283853988844e-05, |
|
"loss": 1.7561, |
|
"step": 17298 |
|
}, |
|
{ |
|
"epoch": 0.4363173995694485, |
|
"grad_norm": 0.9363663792610168, |
|
"learning_rate": 2.146967792431106e-05, |
|
"loss": 1.7703, |
|
"step": 17329 |
|
}, |
|
{ |
|
"epoch": 0.43709793159014515, |
|
"grad_norm": 0.9496386647224426, |
|
"learning_rate": 2.138655714793849e-05, |
|
"loss": 1.7565, |
|
"step": 17360 |
|
}, |
|
{ |
|
"epoch": 0.4378784636108418, |
|
"grad_norm": 0.9076322913169861, |
|
"learning_rate": 2.1303477148777367e-05, |
|
"loss": 1.738, |
|
"step": 17391 |
|
}, |
|
{ |
|
"epoch": 0.43865899563153854, |
|
"grad_norm": 0.9229772686958313, |
|
"learning_rate": 2.122043886437421e-05, |
|
"loss": 1.7455, |
|
"step": 17422 |
|
}, |
|
{ |
|
"epoch": 0.4394395276522352, |
|
"grad_norm": 0.9290786385536194, |
|
"learning_rate": 2.1137443231804765e-05, |
|
"loss": 1.7635, |
|
"step": 17453 |
|
}, |
|
{ |
|
"epoch": 0.44022005967293193, |
|
"grad_norm": 0.9575942754745483, |
|
"learning_rate": 2.105449118766347e-05, |
|
"loss": 1.7763, |
|
"step": 17484 |
|
}, |
|
{ |
|
"epoch": 0.4410005916936286, |
|
"grad_norm": 0.9625800848007202, |
|
"learning_rate": 2.097158366805287e-05, |
|
"loss": 1.7514, |
|
"step": 17515 |
|
}, |
|
{ |
|
"epoch": 0.44178112371432526, |
|
"grad_norm": 0.9176467061042786, |
|
"learning_rate": 2.0888721608573047e-05, |
|
"loss": 1.7521, |
|
"step": 17546 |
|
}, |
|
{ |
|
"epoch": 0.442561655735022, |
|
"grad_norm": 0.92497318983078, |
|
"learning_rate": 2.0805905944311087e-05, |
|
"loss": 1.7582, |
|
"step": 17577 |
|
}, |
|
{ |
|
"epoch": 0.44334218775571865, |
|
"grad_norm": 0.9499204158782959, |
|
"learning_rate": 2.0723137609830497e-05, |
|
"loss": 1.762, |
|
"step": 17608 |
|
}, |
|
{ |
|
"epoch": 0.4441227197764153, |
|
"grad_norm": 0.9391850829124451, |
|
"learning_rate": 2.0640417539160686e-05, |
|
"loss": 1.7495, |
|
"step": 17639 |
|
}, |
|
{ |
|
"epoch": 0.44490325179711204, |
|
"grad_norm": 0.9657866358757019, |
|
"learning_rate": 2.0557746665786427e-05, |
|
"loss": 1.7642, |
|
"step": 17670 |
|
}, |
|
{ |
|
"epoch": 0.4456837838178087, |
|
"grad_norm": 0.955342173576355, |
|
"learning_rate": 2.0475125922637256e-05, |
|
"loss": 1.7645, |
|
"step": 17701 |
|
}, |
|
{ |
|
"epoch": 0.44646431583850543, |
|
"grad_norm": 0.9168228507041931, |
|
"learning_rate": 2.0392556242077047e-05, |
|
"loss": 1.7598, |
|
"step": 17732 |
|
}, |
|
{ |
|
"epoch": 0.4472448478592021, |
|
"grad_norm": 0.8854954242706299, |
|
"learning_rate": 2.031003855589343e-05, |
|
"loss": 1.7576, |
|
"step": 17763 |
|
}, |
|
{ |
|
"epoch": 0.44802537987989877, |
|
"grad_norm": 0.9424418210983276, |
|
"learning_rate": 2.022757379528727e-05, |
|
"loss": 1.766, |
|
"step": 17794 |
|
}, |
|
{ |
|
"epoch": 0.4488059119005955, |
|
"grad_norm": 0.9471886157989502, |
|
"learning_rate": 2.0145162890862184e-05, |
|
"loss": 1.7458, |
|
"step": 17825 |
|
}, |
|
{ |
|
"epoch": 0.44958644392129216, |
|
"grad_norm": 0.9373571872711182, |
|
"learning_rate": 2.0062806772614022e-05, |
|
"loss": 1.743, |
|
"step": 17856 |
|
}, |
|
{ |
|
"epoch": 0.4503669759419888, |
|
"grad_norm": 0.8964229226112366, |
|
"learning_rate": 1.9980506369920392e-05, |
|
"loss": 1.756, |
|
"step": 17887 |
|
}, |
|
{ |
|
"epoch": 0.45114750796268555, |
|
"grad_norm": 0.9414677619934082, |
|
"learning_rate": 1.989826261153015e-05, |
|
"loss": 1.7407, |
|
"step": 17918 |
|
}, |
|
{ |
|
"epoch": 0.4519280399833822, |
|
"grad_norm": 0.9373415112495422, |
|
"learning_rate": 1.9816076425552923e-05, |
|
"loss": 1.746, |
|
"step": 17949 |
|
}, |
|
{ |
|
"epoch": 0.45270857200407894, |
|
"grad_norm": 0.8916881084442139, |
|
"learning_rate": 1.9733948739448676e-05, |
|
"loss": 1.7564, |
|
"step": 17980 |
|
}, |
|
{ |
|
"epoch": 0.4534891040247756, |
|
"grad_norm": 0.9054657816886902, |
|
"learning_rate": 1.9651880480017155e-05, |
|
"loss": 1.7586, |
|
"step": 18011 |
|
}, |
|
{ |
|
"epoch": 0.45426963604547227, |
|
"grad_norm": 0.9446020126342773, |
|
"learning_rate": 1.9569872573387516e-05, |
|
"loss": 1.758, |
|
"step": 18042 |
|
}, |
|
{ |
|
"epoch": 0.455050168066169, |
|
"grad_norm": 0.9143036007881165, |
|
"learning_rate": 1.9487925945007854e-05, |
|
"loss": 1.7554, |
|
"step": 18073 |
|
}, |
|
{ |
|
"epoch": 0.45583070008686566, |
|
"grad_norm": 0.9555609822273254, |
|
"learning_rate": 1.9406041519634726e-05, |
|
"loss": 1.7533, |
|
"step": 18104 |
|
}, |
|
{ |
|
"epoch": 0.4566112321075623, |
|
"grad_norm": 0.929480791091919, |
|
"learning_rate": 1.932422022132275e-05, |
|
"loss": 1.7599, |
|
"step": 18135 |
|
}, |
|
{ |
|
"epoch": 0.45739176412825905, |
|
"grad_norm": 0.9011172652244568, |
|
"learning_rate": 1.924246297341414e-05, |
|
"loss": 1.7712, |
|
"step": 18166 |
|
}, |
|
{ |
|
"epoch": 0.4581722961489557, |
|
"grad_norm": 0.9395762085914612, |
|
"learning_rate": 1.9160770698528338e-05, |
|
"loss": 1.7715, |
|
"step": 18197 |
|
}, |
|
{ |
|
"epoch": 0.45895282816965244, |
|
"grad_norm": 0.9550304412841797, |
|
"learning_rate": 1.907914431855156e-05, |
|
"loss": 1.7315, |
|
"step": 18228 |
|
}, |
|
{ |
|
"epoch": 0.4597333601903491, |
|
"grad_norm": 0.9520135521888733, |
|
"learning_rate": 1.8997584754626412e-05, |
|
"loss": 1.7453, |
|
"step": 18259 |
|
}, |
|
{ |
|
"epoch": 0.46051389221104577, |
|
"grad_norm": 0.8951966762542725, |
|
"learning_rate": 1.8916092927141486e-05, |
|
"loss": 1.7496, |
|
"step": 18290 |
|
} |
|
], |
|
"logging_steps": 31, |
|
"max_steps": 30517, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 3052, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.2790112517183504e+19, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|