atsuki-yamaguchi's picture
Upload folder using huggingface_hub
5100c9a verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.23680126900927617,
"eval_steps": 500,
"global_step": 27468,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0002672505948480982,
"grad_norm": 3.2076423168182373,
"learning_rate": 1.0157273918741808e-06,
"loss": 1.4511,
"step": 31
},
{
"epoch": 0.0005345011896961964,
"grad_norm": 2.221529006958008,
"learning_rate": 2.0314547837483616e-06,
"loss": 1.3342,
"step": 62
},
{
"epoch": 0.0008017517845442946,
"grad_norm": 2.961315393447876,
"learning_rate": 3.0471821756225426e-06,
"loss": 1.2649,
"step": 93
},
{
"epoch": 0.0010690023793923928,
"grad_norm": 2.311617612838745,
"learning_rate": 4.062909567496723e-06,
"loss": 1.2278,
"step": 124
},
{
"epoch": 0.001336252974240491,
"grad_norm": 3.4343724250793457,
"learning_rate": 5.078636959370905e-06,
"loss": 1.2061,
"step": 155
},
{
"epoch": 0.0016035035690885892,
"grad_norm": 2.421396017074585,
"learning_rate": 6.094364351245085e-06,
"loss": 1.1848,
"step": 186
},
{
"epoch": 0.0018707541639366874,
"grad_norm": 2.8230600357055664,
"learning_rate": 7.110091743119267e-06,
"loss": 1.1586,
"step": 217
},
{
"epoch": 0.0021380047587847856,
"grad_norm": 2.021296501159668,
"learning_rate": 8.125819134993446e-06,
"loss": 1.1552,
"step": 248
},
{
"epoch": 0.002405255353632884,
"grad_norm": 2.2329111099243164,
"learning_rate": 9.141546526867629e-06,
"loss": 1.1342,
"step": 279
},
{
"epoch": 0.002672505948480982,
"grad_norm": 2.8933441638946533,
"learning_rate": 1.015727391874181e-05,
"loss": 1.1092,
"step": 310
},
{
"epoch": 0.0029397565433290805,
"grad_norm": 2.510772705078125,
"learning_rate": 1.117300131061599e-05,
"loss": 1.108,
"step": 341
},
{
"epoch": 0.0032070071381771785,
"grad_norm": 2.170926094055176,
"learning_rate": 1.218872870249017e-05,
"loss": 1.0982,
"step": 372
},
{
"epoch": 0.003474257733025277,
"grad_norm": 2.1116831302642822,
"learning_rate": 1.3204456094364351e-05,
"loss": 1.0977,
"step": 403
},
{
"epoch": 0.003741508327873375,
"grad_norm": 2.5462026596069336,
"learning_rate": 1.4220183486238533e-05,
"loss": 1.0797,
"step": 434
},
{
"epoch": 0.004008758922721473,
"grad_norm": 2.5239830017089844,
"learning_rate": 1.5235910878112714e-05,
"loss": 1.0691,
"step": 465
},
{
"epoch": 0.004276009517569571,
"grad_norm": 2.640591859817505,
"learning_rate": 1.6251638269986893e-05,
"loss": 1.0547,
"step": 496
},
{
"epoch": 0.004543260112417669,
"grad_norm": 1.9566192626953125,
"learning_rate": 1.7267365661861077e-05,
"loss": 1.0526,
"step": 527
},
{
"epoch": 0.004810510707265768,
"grad_norm": 2.515118360519409,
"learning_rate": 1.8283093053735257e-05,
"loss": 1.0614,
"step": 558
},
{
"epoch": 0.005077761302113866,
"grad_norm": 2.1588094234466553,
"learning_rate": 1.9298820445609438e-05,
"loss": 1.0461,
"step": 589
},
{
"epoch": 0.005345011896961964,
"grad_norm": 2.448280096054077,
"learning_rate": 2.031454783748362e-05,
"loss": 1.0333,
"step": 620
},
{
"epoch": 0.005612262491810062,
"grad_norm": 2.9539694786071777,
"learning_rate": 2.13302752293578e-05,
"loss": 1.0224,
"step": 651
},
{
"epoch": 0.005879513086658161,
"grad_norm": 2.6804676055908203,
"learning_rate": 2.234600262123198e-05,
"loss": 1.0145,
"step": 682
},
{
"epoch": 0.006146763681506259,
"grad_norm": 2.176100730895996,
"learning_rate": 2.336173001310616e-05,
"loss": 1.0164,
"step": 713
},
{
"epoch": 0.006414014276354357,
"grad_norm": 1.9107022285461426,
"learning_rate": 2.437745740498034e-05,
"loss": 1.004,
"step": 744
},
{
"epoch": 0.006681264871202455,
"grad_norm": 3.175410270690918,
"learning_rate": 2.5393184796854525e-05,
"loss": 0.9986,
"step": 775
},
{
"epoch": 0.006948515466050554,
"grad_norm": 1.7941040992736816,
"learning_rate": 2.6408912188728702e-05,
"loss": 0.9979,
"step": 806
},
{
"epoch": 0.007215766060898652,
"grad_norm": 2.428041458129883,
"learning_rate": 2.7424639580602886e-05,
"loss": 1.0056,
"step": 837
},
{
"epoch": 0.00748301665574675,
"grad_norm": 2.1440467834472656,
"learning_rate": 2.8440366972477066e-05,
"loss": 0.9908,
"step": 868
},
{
"epoch": 0.007750267250594848,
"grad_norm": 2.138157844543457,
"learning_rate": 2.9456094364351244e-05,
"loss": 0.986,
"step": 899
},
{
"epoch": 0.008017517845442947,
"grad_norm": 2.144076108932495,
"learning_rate": 3.0471821756225428e-05,
"loss": 0.9768,
"step": 930
},
{
"epoch": 0.008284768440291045,
"grad_norm": 2.116927146911621,
"learning_rate": 3.148754914809961e-05,
"loss": 0.975,
"step": 961
},
{
"epoch": 0.008552019035139143,
"grad_norm": 2.0087051391601562,
"learning_rate": 3.2503276539973785e-05,
"loss": 0.9746,
"step": 992
},
{
"epoch": 0.00881926962998724,
"grad_norm": 2.008014440536499,
"learning_rate": 3.351900393184797e-05,
"loss": 0.9612,
"step": 1023
},
{
"epoch": 0.009086520224835339,
"grad_norm": 2.5932323932647705,
"learning_rate": 3.453473132372215e-05,
"loss": 0.9611,
"step": 1054
},
{
"epoch": 0.009353770819683437,
"grad_norm": 1.7766073942184448,
"learning_rate": 3.555045871559633e-05,
"loss": 0.9529,
"step": 1085
},
{
"epoch": 0.009621021414531536,
"grad_norm": 1.699992299079895,
"learning_rate": 3.6566186107470514e-05,
"loss": 0.9638,
"step": 1116
},
{
"epoch": 0.009888272009379634,
"grad_norm": 2.139831304550171,
"learning_rate": 3.7581913499344695e-05,
"loss": 0.9518,
"step": 1147
},
{
"epoch": 0.010155522604227732,
"grad_norm": 2.510404109954834,
"learning_rate": 3.8597640891218876e-05,
"loss": 0.9557,
"step": 1178
},
{
"epoch": 0.01042277319907583,
"grad_norm": 1.7539429664611816,
"learning_rate": 3.9613368283093056e-05,
"loss": 0.949,
"step": 1209
},
{
"epoch": 0.010690023793923928,
"grad_norm": 1.7679015398025513,
"learning_rate": 4.062909567496724e-05,
"loss": 0.9285,
"step": 1240
},
{
"epoch": 0.010957274388772026,
"grad_norm": 1.6701771020889282,
"learning_rate": 4.164482306684142e-05,
"loss": 0.9391,
"step": 1271
},
{
"epoch": 0.011224524983620124,
"grad_norm": 1.7283812761306763,
"learning_rate": 4.26605504587156e-05,
"loss": 0.9328,
"step": 1302
},
{
"epoch": 0.011491775578468222,
"grad_norm": 1.363319754600525,
"learning_rate": 4.367627785058978e-05,
"loss": 0.9353,
"step": 1333
},
{
"epoch": 0.011759026173316322,
"grad_norm": 1.847582459449768,
"learning_rate": 4.469200524246396e-05,
"loss": 0.9272,
"step": 1364
},
{
"epoch": 0.01202627676816442,
"grad_norm": 1.6437714099884033,
"learning_rate": 4.570773263433814e-05,
"loss": 0.9219,
"step": 1395
},
{
"epoch": 0.012293527363012518,
"grad_norm": 1.979443073272705,
"learning_rate": 4.672346002621232e-05,
"loss": 0.91,
"step": 1426
},
{
"epoch": 0.012560777957860616,
"grad_norm": 1.601218342781067,
"learning_rate": 4.77391874180865e-05,
"loss": 0.9229,
"step": 1457
},
{
"epoch": 0.012828028552708714,
"grad_norm": 2.36148738861084,
"learning_rate": 4.875491480996068e-05,
"loss": 0.9199,
"step": 1488
},
{
"epoch": 0.013095279147556812,
"grad_norm": 4.211123466491699,
"learning_rate": 4.977064220183487e-05,
"loss": 1.0419,
"step": 1519
},
{
"epoch": 0.01336252974240491,
"grad_norm": 1.620786428451538,
"learning_rate": 4.9999915451558777e-05,
"loss": 0.954,
"step": 1550
},
{
"epoch": 0.01362978033725301,
"grad_norm": 3.2490246295928955,
"learning_rate": 4.999955597496219e-05,
"loss": 0.9464,
"step": 1581
},
{
"epoch": 0.013897030932101108,
"grad_norm": 1.9302624464035034,
"learning_rate": 4.9998914381774255e-05,
"loss": 0.9294,
"step": 1612
},
{
"epoch": 0.014164281526949206,
"grad_norm": 1.5750601291656494,
"learning_rate": 4.999799067923527e-05,
"loss": 0.922,
"step": 1643
},
{
"epoch": 0.014431532121797304,
"grad_norm": 53.0409049987793,
"learning_rate": 4.999678487776908e-05,
"loss": 1.0611,
"step": 1674
},
{
"epoch": 0.014698782716645402,
"grad_norm": 1.841700553894043,
"learning_rate": 4.9995296990983006e-05,
"loss": 1.0363,
"step": 1705
},
{
"epoch": 0.0149660333114935,
"grad_norm": 1.2549737691879272,
"learning_rate": 4.999352703566763e-05,
"loss": 0.938,
"step": 1736
},
{
"epoch": 0.015233283906341597,
"grad_norm": 1.3289740085601807,
"learning_rate": 4.999147503179668e-05,
"loss": 0.9113,
"step": 1767
},
{
"epoch": 0.015500534501189695,
"grad_norm": 1.5228811502456665,
"learning_rate": 4.998914100252672e-05,
"loss": 0.921,
"step": 1798
},
{
"epoch": 0.015767785096037795,
"grad_norm": 1.5058027505874634,
"learning_rate": 4.998652497419696e-05,
"loss": 0.9214,
"step": 1829
},
{
"epoch": 0.016035035690885893,
"grad_norm": 9.535687446594238,
"learning_rate": 4.9983626976328927e-05,
"loss": 1.3068,
"step": 1860
},
{
"epoch": 0.01630228628573399,
"grad_norm": 2.5820651054382324,
"learning_rate": 4.998044704162613e-05,
"loss": 1.7033,
"step": 1891
},
{
"epoch": 0.01656953688058209,
"grad_norm": 148.47412109375,
"learning_rate": 4.9976985205973705e-05,
"loss": 1.6935,
"step": 1922
},
{
"epoch": 0.016836787475430187,
"grad_norm": 8.416805267333984,
"learning_rate": 4.997324150843799e-05,
"loss": 1.6607,
"step": 1953
},
{
"epoch": 0.017104038070278285,
"grad_norm": 5.2713470458984375,
"learning_rate": 4.99692159912661e-05,
"loss": 1.1301,
"step": 1984
},
{
"epoch": 0.017371288665126383,
"grad_norm": 2.096306800842285,
"learning_rate": 4.996490869988546e-05,
"loss": 0.9931,
"step": 2015
},
{
"epoch": 0.01763853925997448,
"grad_norm": 1.5476640462875366,
"learning_rate": 4.996031968290326e-05,
"loss": 0.9514,
"step": 2046
},
{
"epoch": 0.01790578985482258,
"grad_norm": 1.727232813835144,
"learning_rate": 4.995544899210594e-05,
"loss": 0.9409,
"step": 2077
},
{
"epoch": 0.018173040449670677,
"grad_norm": 26.08816909790039,
"learning_rate": 4.9950296682458583e-05,
"loss": 0.9368,
"step": 2108
},
{
"epoch": 0.018440291044518775,
"grad_norm": 1.7082202434539795,
"learning_rate": 4.994486281210429e-05,
"loss": 0.9155,
"step": 2139
},
{
"epoch": 0.018707541639366873,
"grad_norm": 1.5962992906570435,
"learning_rate": 4.9939147442363566e-05,
"loss": 0.9233,
"step": 2170
},
{
"epoch": 0.018974792234214975,
"grad_norm": 2.024268627166748,
"learning_rate": 4.9933150637733574e-05,
"loss": 0.9128,
"step": 2201
},
{
"epoch": 0.019242042829063073,
"grad_norm": 1.4828873872756958,
"learning_rate": 4.992687246588743e-05,
"loss": 0.8957,
"step": 2232
},
{
"epoch": 0.01950929342391117,
"grad_norm": 1.745612382888794,
"learning_rate": 4.992031299767347e-05,
"loss": 0.9005,
"step": 2263
},
{
"epoch": 0.01977654401875927,
"grad_norm": 1.354095458984375,
"learning_rate": 4.9913472307114386e-05,
"loss": 0.9002,
"step": 2294
},
{
"epoch": 0.020043794613607367,
"grad_norm": 2.6227681636810303,
"learning_rate": 4.9906350471406446e-05,
"loss": 0.9004,
"step": 2325
},
{
"epoch": 0.020311045208455464,
"grad_norm": 1.3870893716812134,
"learning_rate": 4.989894757091861e-05,
"loss": 0.9018,
"step": 2356
},
{
"epoch": 0.020578295803303562,
"grad_norm": 1.2884349822998047,
"learning_rate": 4.989126368919158e-05,
"loss": 0.8993,
"step": 2387
},
{
"epoch": 0.02084554639815166,
"grad_norm": 1.215851068496704,
"learning_rate": 4.988329891293693e-05,
"loss": 0.8925,
"step": 2418
},
{
"epoch": 0.02111279699299976,
"grad_norm": 1.4246628284454346,
"learning_rate": 4.987505333203608e-05,
"loss": 0.8893,
"step": 2449
},
{
"epoch": 0.021380047587847856,
"grad_norm": 1.5495259761810303,
"learning_rate": 4.9866527039539276e-05,
"loss": 0.8749,
"step": 2480
},
{
"epoch": 0.021647298182695954,
"grad_norm": 1.0121793746948242,
"learning_rate": 4.9857720131664594e-05,
"loss": 0.8803,
"step": 2511
},
{
"epoch": 0.021914548777544052,
"grad_norm": 1.1507561206817627,
"learning_rate": 4.9848632707796773e-05,
"loss": 0.8843,
"step": 2542
},
{
"epoch": 0.02218179937239215,
"grad_norm": 1.612584114074707,
"learning_rate": 4.9839264870486155e-05,
"loss": 0.8733,
"step": 2573
},
{
"epoch": 0.02244904996724025,
"grad_norm": 1.4475393295288086,
"learning_rate": 4.9829616725447526e-05,
"loss": 0.8669,
"step": 2604
},
{
"epoch": 0.022716300562088346,
"grad_norm": 1.9612644910812378,
"learning_rate": 4.981968838155888e-05,
"loss": 0.8791,
"step": 2635
},
{
"epoch": 0.022983551156936444,
"grad_norm": 1.6032893657684326,
"learning_rate": 4.980947995086024e-05,
"loss": 0.8646,
"step": 2666
},
{
"epoch": 0.023250801751784546,
"grad_norm": 1.6368708610534668,
"learning_rate": 4.979899154855234e-05,
"loss": 0.871,
"step": 2697
},
{
"epoch": 0.023518052346632644,
"grad_norm": 1.0245656967163086,
"learning_rate": 4.9788223292995386e-05,
"loss": 0.8534,
"step": 2728
},
{
"epoch": 0.023785302941480742,
"grad_norm": 0.9378401041030884,
"learning_rate": 4.977717530570768e-05,
"loss": 0.8478,
"step": 2759
},
{
"epoch": 0.02405255353632884,
"grad_norm": 1.4262951612472534,
"learning_rate": 4.976584771136425e-05,
"loss": 0.8634,
"step": 2790
},
{
"epoch": 0.024319804131176938,
"grad_norm": 1.3320890665054321,
"learning_rate": 4.975424063779547e-05,
"loss": 0.8505,
"step": 2821
},
{
"epoch": 0.024587054726025036,
"grad_norm": 1.043971061706543,
"learning_rate": 4.974235421598557e-05,
"loss": 0.863,
"step": 2852
},
{
"epoch": 0.024854305320873134,
"grad_norm": 1.387088418006897,
"learning_rate": 4.973018858007122e-05,
"loss": 0.8505,
"step": 2883
},
{
"epoch": 0.025121555915721232,
"grad_norm": 1.1267746686935425,
"learning_rate": 4.9717743867339963e-05,
"loss": 0.8528,
"step": 2914
},
{
"epoch": 0.02538880651056933,
"grad_norm": 0.9636305570602417,
"learning_rate": 4.9705020218228695e-05,
"loss": 0.8453,
"step": 2945
},
{
"epoch": 0.025656057105417428,
"grad_norm": 1.1140860319137573,
"learning_rate": 4.969201777632205e-05,
"loss": 0.8664,
"step": 2976
},
{
"epoch": 0.025923307700265526,
"grad_norm": 1.1886147260665894,
"learning_rate": 4.9678736688350846e-05,
"loss": 0.8569,
"step": 3007
},
{
"epoch": 0.026190558295113624,
"grad_norm": 1.0308467149734497,
"learning_rate": 4.966517710419033e-05,
"loss": 0.85,
"step": 3038
},
{
"epoch": 0.02645780888996172,
"grad_norm": 1.6353480815887451,
"learning_rate": 4.965133917685858e-05,
"loss": 0.8414,
"step": 3069
},
{
"epoch": 0.02672505948480982,
"grad_norm": 1.2031575441360474,
"learning_rate": 4.9637223062514714e-05,
"loss": 0.8435,
"step": 3100
},
{
"epoch": 0.026992310079657918,
"grad_norm": 1.2575979232788086,
"learning_rate": 4.962282892045718e-05,
"loss": 0.8415,
"step": 3131
},
{
"epoch": 0.02725956067450602,
"grad_norm": 1.338584303855896,
"learning_rate": 4.9608156913121904e-05,
"loss": 0.8414,
"step": 3162
},
{
"epoch": 0.027526811269354117,
"grad_norm": 1.3699461221694946,
"learning_rate": 4.959320720608049e-05,
"loss": 0.8397,
"step": 3193
},
{
"epoch": 0.027794061864202215,
"grad_norm": 1.1471112966537476,
"learning_rate": 4.9577979968038354e-05,
"loss": 0.8304,
"step": 3224
},
{
"epoch": 0.028061312459050313,
"grad_norm": 1.1752680540084839,
"learning_rate": 4.956247537083282e-05,
"loss": 0.8408,
"step": 3255
},
{
"epoch": 0.02832856305389841,
"grad_norm": 1.089552879333496,
"learning_rate": 4.9546693589431145e-05,
"loss": 0.8286,
"step": 3286
},
{
"epoch": 0.02859581364874651,
"grad_norm": 1.0688341856002808,
"learning_rate": 4.9530634801928595e-05,
"loss": 0.8341,
"step": 3317
},
{
"epoch": 0.028863064243594607,
"grad_norm": 0.9290914535522461,
"learning_rate": 4.9514299189546395e-05,
"loss": 0.8307,
"step": 3348
},
{
"epoch": 0.029130314838442705,
"grad_norm": 1.3211214542388916,
"learning_rate": 4.949768693662973e-05,
"loss": 0.8349,
"step": 3379
},
{
"epoch": 0.029397565433290803,
"grad_norm": 1.8893773555755615,
"learning_rate": 4.948079823064559e-05,
"loss": 0.8289,
"step": 3410
},
{
"epoch": 0.0296648160281389,
"grad_norm": 1.3086936473846436,
"learning_rate": 4.946363326218074e-05,
"loss": 0.8231,
"step": 3441
},
{
"epoch": 0.029932066622987,
"grad_norm": 1.356311559677124,
"learning_rate": 4.9446192224939525e-05,
"loss": 0.8343,
"step": 3472
},
{
"epoch": 0.030199317217835097,
"grad_norm": 1.13872492313385,
"learning_rate": 4.942847531574167e-05,
"loss": 0.828,
"step": 3503
},
{
"epoch": 0.030466567812683195,
"grad_norm": 1.1912773847579956,
"learning_rate": 4.941048273452008e-05,
"loss": 0.8236,
"step": 3534
},
{
"epoch": 0.030733818407531293,
"grad_norm": 0.9231215715408325,
"learning_rate": 4.9392214684318605e-05,
"loss": 0.8226,
"step": 3565
},
{
"epoch": 0.03100106900237939,
"grad_norm": 1.0268129110336304,
"learning_rate": 4.93736713712897e-05,
"loss": 0.8218,
"step": 3596
},
{
"epoch": 0.03126831959722749,
"grad_norm": 0.974876344203949,
"learning_rate": 4.9354853004692124e-05,
"loss": 0.8149,
"step": 3627
},
{
"epoch": 0.03153557019207559,
"grad_norm": 1.2175925970077515,
"learning_rate": 4.93357597968886e-05,
"loss": 0.8152,
"step": 3658
},
{
"epoch": 0.03180282078692369,
"grad_norm": 1.2338181734085083,
"learning_rate": 4.931639196334338e-05,
"loss": 0.8089,
"step": 3689
},
{
"epoch": 0.032070071381771786,
"grad_norm": 1.1462433338165283,
"learning_rate": 4.9296749722619826e-05,
"loss": 0.8216,
"step": 3720
},
{
"epoch": 0.032337321976619884,
"grad_norm": 1.1631278991699219,
"learning_rate": 4.9276833296377966e-05,
"loss": 0.8272,
"step": 3751
},
{
"epoch": 0.03260457257146798,
"grad_norm": 0.903836727142334,
"learning_rate": 4.925664290937196e-05,
"loss": 0.816,
"step": 3782
},
{
"epoch": 0.03287182316631608,
"grad_norm": 0.944284975528717,
"learning_rate": 4.9236178789447576e-05,
"loss": 0.8031,
"step": 3813
},
{
"epoch": 0.03313907376116418,
"grad_norm": 1.1210039854049683,
"learning_rate": 4.921544116753962e-05,
"loss": 0.8096,
"step": 3844
},
{
"epoch": 0.033406324356012276,
"grad_norm": 1.0448529720306396,
"learning_rate": 4.919443027766935e-05,
"loss": 0.8059,
"step": 3875
},
{
"epoch": 0.033673574950860374,
"grad_norm": 1.0655525922775269,
"learning_rate": 4.91731463569418e-05,
"loss": 0.81,
"step": 3906
},
{
"epoch": 0.03394082554570847,
"grad_norm": 1.223928451538086,
"learning_rate": 4.915158964554312e-05,
"loss": 0.8175,
"step": 3937
},
{
"epoch": 0.03420807614055657,
"grad_norm": 1.0895342826843262,
"learning_rate": 4.912976038673786e-05,
"loss": 0.7992,
"step": 3968
},
{
"epoch": 0.03447532673540467,
"grad_norm": 1.1117792129516602,
"learning_rate": 4.9107658826866254e-05,
"loss": 0.8122,
"step": 3999
},
{
"epoch": 0.034742577330252766,
"grad_norm": 1.0806598663330078,
"learning_rate": 4.908528521534139e-05,
"loss": 0.811,
"step": 4030
},
{
"epoch": 0.035009827925100864,
"grad_norm": 0.7528677582740784,
"learning_rate": 4.906263980464644e-05,
"loss": 0.8036,
"step": 4061
},
{
"epoch": 0.03527707851994896,
"grad_norm": 1.1509230136871338,
"learning_rate": 4.903972285033178e-05,
"loss": 0.8047,
"step": 4092
},
{
"epoch": 0.03554432911479706,
"grad_norm": 1.1190931797027588,
"learning_rate": 4.901653461101213e-05,
"loss": 0.8123,
"step": 4123
},
{
"epoch": 0.03581157970964516,
"grad_norm": 0.8739308714866638,
"learning_rate": 4.8993075348363626e-05,
"loss": 0.8015,
"step": 4154
},
{
"epoch": 0.036078830304493256,
"grad_norm": 0.9889201521873474,
"learning_rate": 4.896934532712084e-05,
"loss": 0.8056,
"step": 4185
},
{
"epoch": 0.036346080899341354,
"grad_norm": 1.1964142322540283,
"learning_rate": 4.8945344815073846e-05,
"loss": 0.8083,
"step": 4216
},
{
"epoch": 0.03661333149418945,
"grad_norm": 1.0793588161468506,
"learning_rate": 4.892107408306516e-05,
"loss": 0.7976,
"step": 4247
},
{
"epoch": 0.03688058208903755,
"grad_norm": 1.2313084602355957,
"learning_rate": 4.889653340498669e-05,
"loss": 0.7954,
"step": 4278
},
{
"epoch": 0.03714783268388565,
"grad_norm": 1.1145700216293335,
"learning_rate": 4.8871723057776664e-05,
"loss": 0.7947,
"step": 4309
},
{
"epoch": 0.037415083278733746,
"grad_norm": 0.9714220762252808,
"learning_rate": 4.8846643321416476e-05,
"loss": 0.7973,
"step": 4340
},
{
"epoch": 0.03768233387358185,
"grad_norm": 1.0865004062652588,
"learning_rate": 4.882129447892753e-05,
"loss": 0.7918,
"step": 4371
},
{
"epoch": 0.03794958446842995,
"grad_norm": 0.8420351147651672,
"learning_rate": 4.8795676816368076e-05,
"loss": 0.805,
"step": 4402
},
{
"epoch": 0.03821683506327805,
"grad_norm": 1.0622302293777466,
"learning_rate": 4.876979062282995e-05,
"loss": 0.7977,
"step": 4433
},
{
"epoch": 0.038484085658126145,
"grad_norm": 0.8536683320999146,
"learning_rate": 4.8743636190435325e-05,
"loss": 0.7946,
"step": 4464
},
{
"epoch": 0.03875133625297424,
"grad_norm": 0.9905999302864075,
"learning_rate": 4.871721381433344e-05,
"loss": 0.7932,
"step": 4495
},
{
"epoch": 0.03901858684782234,
"grad_norm": 1.005560278892517,
"learning_rate": 4.869052379269719e-05,
"loss": 0.7956,
"step": 4526
},
{
"epoch": 0.03928583744267044,
"grad_norm": 0.8386696577072144,
"learning_rate": 4.866356642671985e-05,
"loss": 0.7865,
"step": 4557
},
{
"epoch": 0.03955308803751854,
"grad_norm": 0.8390759229660034,
"learning_rate": 4.8636342020611634e-05,
"loss": 0.7925,
"step": 4588
},
{
"epoch": 0.039820338632366635,
"grad_norm": 0.7457160949707031,
"learning_rate": 4.860885088159626e-05,
"loss": 0.7911,
"step": 4619
},
{
"epoch": 0.04008758922721473,
"grad_norm": 0.9717801213264465,
"learning_rate": 4.858109331990751e-05,
"loss": 0.7917,
"step": 4650
},
{
"epoch": 0.04035483982206283,
"grad_norm": 0.8517304062843323,
"learning_rate": 4.855306964878567e-05,
"loss": 0.7966,
"step": 4681
},
{
"epoch": 0.04062209041691093,
"grad_norm": 0.7597792744636536,
"learning_rate": 4.8524780184474084e-05,
"loss": 0.7798,
"step": 4712
},
{
"epoch": 0.04088934101175903,
"grad_norm": 0.9633410573005676,
"learning_rate": 4.8496225246215496e-05,
"loss": 0.784,
"step": 4743
},
{
"epoch": 0.041156591606607125,
"grad_norm": 0.8668884634971619,
"learning_rate": 4.8467405156248505e-05,
"loss": 0.7925,
"step": 4774
},
{
"epoch": 0.04142384220145522,
"grad_norm": 1.348170280456543,
"learning_rate": 4.843832023980392e-05,
"loss": 0.7882,
"step": 4805
},
{
"epoch": 0.04169109279630332,
"grad_norm": 0.9085477590560913,
"learning_rate": 4.840897082510106e-05,
"loss": 0.7834,
"step": 4836
},
{
"epoch": 0.04195834339115142,
"grad_norm": 1.2533594369888306,
"learning_rate": 4.8379357243344084e-05,
"loss": 0.7968,
"step": 4867
},
{
"epoch": 0.04222559398599952,
"grad_norm": 1.0250332355499268,
"learning_rate": 4.8349479828718236e-05,
"loss": 0.7755,
"step": 4898
},
{
"epoch": 0.042492844580847615,
"grad_norm": 0.8610180616378784,
"learning_rate": 4.8319338918386075e-05,
"loss": 0.7819,
"step": 4929
},
{
"epoch": 0.04276009517569571,
"grad_norm": 0.8235286474227905,
"learning_rate": 4.828893485248369e-05,
"loss": 0.7834,
"step": 4960
},
{
"epoch": 0.04302734577054381,
"grad_norm": 0.95796799659729,
"learning_rate": 4.825826797411682e-05,
"loss": 0.7822,
"step": 4991
},
{
"epoch": 0.04329459636539191,
"grad_norm": 1.022430419921875,
"learning_rate": 4.822733862935702e-05,
"loss": 0.781,
"step": 5022
},
{
"epoch": 0.04356184696024001,
"grad_norm": 0.9080584645271301,
"learning_rate": 4.819614716723775e-05,
"loss": 0.783,
"step": 5053
},
{
"epoch": 0.043829097555088105,
"grad_norm": 0.9808719158172607,
"learning_rate": 4.8164693939750425e-05,
"loss": 0.7838,
"step": 5084
},
{
"epoch": 0.0440963481499362,
"grad_norm": 0.8997575640678406,
"learning_rate": 4.813297930184042e-05,
"loss": 0.7816,
"step": 5115
},
{
"epoch": 0.0443635987447843,
"grad_norm": 0.8737884163856506,
"learning_rate": 4.810100361140314e-05,
"loss": 0.7785,
"step": 5146
},
{
"epoch": 0.0446308493396324,
"grad_norm": 0.7924982309341431,
"learning_rate": 4.8068767229279885e-05,
"loss": 0.7752,
"step": 5177
},
{
"epoch": 0.0448980999344805,
"grad_norm": 1.1685878038406372,
"learning_rate": 4.8036270519253854e-05,
"loss": 0.7881,
"step": 5208
},
{
"epoch": 0.045165350529328595,
"grad_norm": 1.0199975967407227,
"learning_rate": 4.8003513848046e-05,
"loss": 0.7663,
"step": 5239
},
{
"epoch": 0.04543260112417669,
"grad_norm": 0.8155254125595093,
"learning_rate": 4.79704975853109e-05,
"loss": 0.7775,
"step": 5270
},
{
"epoch": 0.04569985171902479,
"grad_norm": 0.976006805896759,
"learning_rate": 4.793722210363262e-05,
"loss": 0.7797,
"step": 5301
},
{
"epoch": 0.04596710231387289,
"grad_norm": 1.1252824068069458,
"learning_rate": 4.7903687778520414e-05,
"loss": 0.7824,
"step": 5332
},
{
"epoch": 0.046234352908720994,
"grad_norm": 0.7603053450584412,
"learning_rate": 4.7869894988404593e-05,
"loss": 0.7759,
"step": 5363
},
{
"epoch": 0.04650160350356909,
"grad_norm": 1.007767915725708,
"learning_rate": 4.783584411463221e-05,
"loss": 0.7832,
"step": 5394
},
{
"epoch": 0.04676885409841719,
"grad_norm": 1.1207255125045776,
"learning_rate": 4.780153554146274e-05,
"loss": 0.7849,
"step": 5425
},
{
"epoch": 0.04703610469326529,
"grad_norm": 1.0699808597564697,
"learning_rate": 4.7766969656063766e-05,
"loss": 0.7814,
"step": 5456
},
{
"epoch": 0.047303355288113386,
"grad_norm": 0.8154749274253845,
"learning_rate": 4.773214684850662e-05,
"loss": 0.7806,
"step": 5487
},
{
"epoch": 0.047570605882961484,
"grad_norm": 0.7383331060409546,
"learning_rate": 4.769706751176193e-05,
"loss": 0.7765,
"step": 5518
},
{
"epoch": 0.04783785647780958,
"grad_norm": 0.9729552865028381,
"learning_rate": 4.7661732041695264e-05,
"loss": 0.7754,
"step": 5549
},
{
"epoch": 0.04810510707265768,
"grad_norm": 0.9568310379981995,
"learning_rate": 4.762614083706258e-05,
"loss": 0.7733,
"step": 5580
},
{
"epoch": 0.04837235766750578,
"grad_norm": 1.0747005939483643,
"learning_rate": 4.759029429950581e-05,
"loss": 0.7764,
"step": 5611
},
{
"epoch": 0.048639608262353876,
"grad_norm": 1.2053782939910889,
"learning_rate": 4.7554192833548235e-05,
"loss": 0.7725,
"step": 5642
},
{
"epoch": 0.048906858857201974,
"grad_norm": 0.8168059587478638,
"learning_rate": 4.751783684659e-05,
"loss": 0.7779,
"step": 5673
},
{
"epoch": 0.04917410945205007,
"grad_norm": 0.812278151512146,
"learning_rate": 4.748122674890348e-05,
"loss": 0.7884,
"step": 5704
},
{
"epoch": 0.04944136004689817,
"grad_norm": 0.9640679359436035,
"learning_rate": 4.7444362953628654e-05,
"loss": 0.7794,
"step": 5735
},
{
"epoch": 0.04970861064174627,
"grad_norm": 0.8488368988037109,
"learning_rate": 4.7407245876768424e-05,
"loss": 0.7705,
"step": 5766
},
{
"epoch": 0.049975861236594366,
"grad_norm": 0.7826496362686157,
"learning_rate": 4.736987593718397e-05,
"loss": 0.7665,
"step": 5797
},
{
"epoch": 0.050243111831442464,
"grad_norm": 0.7388876676559448,
"learning_rate": 4.733225355658999e-05,
"loss": 0.7715,
"step": 5828
},
{
"epoch": 0.05051036242629056,
"grad_norm": 0.7776780128479004,
"learning_rate": 4.7294379159549926e-05,
"loss": 0.7777,
"step": 5859
},
{
"epoch": 0.05077761302113866,
"grad_norm": 1.1687864065170288,
"learning_rate": 4.725625317347119e-05,
"loss": 0.7698,
"step": 5890
},
{
"epoch": 0.05104486361598676,
"grad_norm": 1.069354772567749,
"learning_rate": 4.7217876028600374e-05,
"loss": 0.7671,
"step": 5921
},
{
"epoch": 0.051312114210834855,
"grad_norm": 0.955895185470581,
"learning_rate": 4.717924815801832e-05,
"loss": 0.7641,
"step": 5952
},
{
"epoch": 0.05157936480568295,
"grad_norm": 0.8063961267471313,
"learning_rate": 4.714036999763532e-05,
"loss": 0.772,
"step": 5983
},
{
"epoch": 0.05184661540053105,
"grad_norm": 0.9584303498268127,
"learning_rate": 4.7101241986186116e-05,
"loss": 0.762,
"step": 6014
},
{
"epoch": 0.05211386599537915,
"grad_norm": 0.9711636900901794,
"learning_rate": 4.7061864565225e-05,
"loss": 0.7742,
"step": 6045
},
{
"epoch": 0.05238111659022725,
"grad_norm": 0.8177659511566162,
"learning_rate": 4.702223817912081e-05,
"loss": 0.7532,
"step": 6076
},
{
"epoch": 0.052648367185075345,
"grad_norm": 0.7204889059066772,
"learning_rate": 4.698236327505195e-05,
"loss": 0.7674,
"step": 6107
},
{
"epoch": 0.05291561777992344,
"grad_norm": 0.7740707993507385,
"learning_rate": 4.694224030300127e-05,
"loss": 0.759,
"step": 6138
},
{
"epoch": 0.05318286837477154,
"grad_norm": 0.9027112126350403,
"learning_rate": 4.690186971575107e-05,
"loss": 0.7606,
"step": 6169
},
{
"epoch": 0.05345011896961964,
"grad_norm": 0.8888375759124756,
"learning_rate": 4.6861251968877916e-05,
"loss": 0.7668,
"step": 6200
},
{
"epoch": 0.05371736956446774,
"grad_norm": 0.7049738764762878,
"learning_rate": 4.68203875207476e-05,
"loss": 0.7678,
"step": 6231
},
{
"epoch": 0.053984620159315835,
"grad_norm": 0.8254877924919128,
"learning_rate": 4.677927683250983e-05,
"loss": 0.766,
"step": 6262
},
{
"epoch": 0.05425187075416393,
"grad_norm": 0.8231219053268433,
"learning_rate": 4.6737920368093156e-05,
"loss": 0.7577,
"step": 6293
},
{
"epoch": 0.05451912134901204,
"grad_norm": 0.859380841255188,
"learning_rate": 4.669631859419965e-05,
"loss": 0.7613,
"step": 6324
},
{
"epoch": 0.054786371943860136,
"grad_norm": 0.7607110142707825,
"learning_rate": 4.6654471980299676e-05,
"loss": 0.7622,
"step": 6355
},
{
"epoch": 0.055053622538708234,
"grad_norm": 0.8904584646224976,
"learning_rate": 4.661238099862658e-05,
"loss": 0.7622,
"step": 6386
},
{
"epoch": 0.05532087313355633,
"grad_norm": 0.8488460183143616,
"learning_rate": 4.657004612417138e-05,
"loss": 0.7588,
"step": 6417
},
{
"epoch": 0.05558812372840443,
"grad_norm": 0.7917813062667847,
"learning_rate": 4.6527467834677374e-05,
"loss": 0.7712,
"step": 6448
},
{
"epoch": 0.05585537432325253,
"grad_norm": 0.830741822719574,
"learning_rate": 4.648464661063478e-05,
"loss": 0.7615,
"step": 6479
},
{
"epoch": 0.056122624918100626,
"grad_norm": 0.9170836806297302,
"learning_rate": 4.6441582935275264e-05,
"loss": 0.7572,
"step": 6510
},
{
"epoch": 0.056389875512948724,
"grad_norm": 0.8450523614883423,
"learning_rate": 4.6398277294566586e-05,
"loss": 0.7497,
"step": 6541
},
{
"epoch": 0.05665712610779682,
"grad_norm": 0.8187207579612732,
"learning_rate": 4.6354730177207e-05,
"loss": 0.7684,
"step": 6572
},
{
"epoch": 0.05692437670264492,
"grad_norm": 0.8315821886062622,
"learning_rate": 4.6310942074619787e-05,
"loss": 0.7685,
"step": 6603
},
{
"epoch": 0.05719162729749302,
"grad_norm": 1.0876758098602295,
"learning_rate": 4.626691348094777e-05,
"loss": 0.7617,
"step": 6634
},
{
"epoch": 0.057458877892341116,
"grad_norm": 0.9182093143463135,
"learning_rate": 4.622264489304762e-05,
"loss": 0.7621,
"step": 6665
},
{
"epoch": 0.057726128487189214,
"grad_norm": 0.6726477742195129,
"learning_rate": 4.617813681048434e-05,
"loss": 0.7637,
"step": 6696
},
{
"epoch": 0.05799337908203731,
"grad_norm": 0.8932876586914062,
"learning_rate": 4.61333897355256e-05,
"loss": 0.7643,
"step": 6727
},
{
"epoch": 0.05826062967688541,
"grad_norm": 0.7158175706863403,
"learning_rate": 4.608840417313604e-05,
"loss": 0.7585,
"step": 6758
},
{
"epoch": 0.05852788027173351,
"grad_norm": 0.7973946928977966,
"learning_rate": 4.6043180630971646e-05,
"loss": 0.7594,
"step": 6789
},
{
"epoch": 0.058795130866581606,
"grad_norm": 0.8311556577682495,
"learning_rate": 4.599771961937391e-05,
"loss": 0.7646,
"step": 6820
},
{
"epoch": 0.059062381461429704,
"grad_norm": 1.623862385749817,
"learning_rate": 4.5952021651364204e-05,
"loss": 0.7528,
"step": 6851
},
{
"epoch": 0.0593296320562778,
"grad_norm": 0.8134210109710693,
"learning_rate": 4.590608724263786e-05,
"loss": 0.752,
"step": 6882
},
{
"epoch": 0.0595968826511259,
"grad_norm": 0.8656156063079834,
"learning_rate": 4.585991691155845e-05,
"loss": 0.7504,
"step": 6913
},
{
"epoch": 0.059864133245974,
"grad_norm": 0.7220660448074341,
"learning_rate": 4.581351117915188e-05,
"loss": 0.7574,
"step": 6944
},
{
"epoch": 0.060131383840822096,
"grad_norm": 0.8750848174095154,
"learning_rate": 4.5766870569100534e-05,
"loss": 0.7442,
"step": 6975
},
{
"epoch": 0.060398634435670194,
"grad_norm": 0.9824672937393188,
"learning_rate": 4.571999560773736e-05,
"loss": 0.7493,
"step": 7006
},
{
"epoch": 0.06066588503051829,
"grad_norm": 1.1746909618377686,
"learning_rate": 4.5672886824039915e-05,
"loss": 0.7615,
"step": 7037
},
{
"epoch": 0.06093313562536639,
"grad_norm": 0.7957736849784851,
"learning_rate": 4.5625544749624435e-05,
"loss": 0.7567,
"step": 7068
},
{
"epoch": 0.06120038622021449,
"grad_norm": 0.7611691355705261,
"learning_rate": 4.5577969918739794e-05,
"loss": 0.7539,
"step": 7099
},
{
"epoch": 0.061467636815062586,
"grad_norm": 0.7961061596870422,
"learning_rate": 4.5530162868261486e-05,
"loss": 0.7549,
"step": 7130
},
{
"epoch": 0.061734887409910684,
"grad_norm": 0.7554512023925781,
"learning_rate": 4.548212413768558e-05,
"loss": 0.7541,
"step": 7161
},
{
"epoch": 0.06200213800475878,
"grad_norm": 1.0506978034973145,
"learning_rate": 4.543385426912261e-05,
"loss": 0.7477,
"step": 7192
},
{
"epoch": 0.06226938859960688,
"grad_norm": 0.8036066889762878,
"learning_rate": 4.53853538072915e-05,
"loss": 0.7455,
"step": 7223
},
{
"epoch": 0.06253663919445498,
"grad_norm": 0.8787280917167664,
"learning_rate": 4.533662329951336e-05,
"loss": 0.747,
"step": 7254
},
{
"epoch": 0.06280388978930308,
"grad_norm": 0.7274677753448486,
"learning_rate": 4.528766329570536e-05,
"loss": 0.7568,
"step": 7285
},
{
"epoch": 0.06307114038415118,
"grad_norm": 0.7349788546562195,
"learning_rate": 4.523847434837447e-05,
"loss": 0.7472,
"step": 7316
},
{
"epoch": 0.06333839097899928,
"grad_norm": 0.7945473790168762,
"learning_rate": 4.518905701261128e-05,
"loss": 0.7581,
"step": 7347
},
{
"epoch": 0.06360564157384738,
"grad_norm": 0.7640956044197083,
"learning_rate": 4.5139411846083715e-05,
"loss": 0.7451,
"step": 7378
},
{
"epoch": 0.06387289216869547,
"grad_norm": 0.7245631217956543,
"learning_rate": 4.508953940903073e-05,
"loss": 0.7437,
"step": 7409
},
{
"epoch": 0.06414014276354357,
"grad_norm": 0.7420781254768372,
"learning_rate": 4.5039440264255994e-05,
"loss": 0.7626,
"step": 7440
},
{
"epoch": 0.06440739335839167,
"grad_norm": 0.774943470954895,
"learning_rate": 4.498911497712155e-05,
"loss": 0.7608,
"step": 7471
},
{
"epoch": 0.06467464395323977,
"grad_norm": 0.7587719559669495,
"learning_rate": 4.493856411554142e-05,
"loss": 0.7631,
"step": 7502
},
{
"epoch": 0.06494189454808787,
"grad_norm": 0.7052931785583496,
"learning_rate": 4.4887788249975206e-05,
"loss": 0.7437,
"step": 7533
},
{
"epoch": 0.06520914514293596,
"grad_norm": 0.8177131414413452,
"learning_rate": 4.4836787953421656e-05,
"loss": 0.75,
"step": 7564
},
{
"epoch": 0.06547639573778406,
"grad_norm": 0.8247653245925903,
"learning_rate": 4.478556380141218e-05,
"loss": 0.7446,
"step": 7595
},
{
"epoch": 0.06574364633263216,
"grad_norm": 0.6302705407142639,
"learning_rate": 4.4734116372004375e-05,
"loss": 0.7459,
"step": 7626
},
{
"epoch": 0.06601089692748026,
"grad_norm": 0.645069420337677,
"learning_rate": 4.4682446245775477e-05,
"loss": 0.7545,
"step": 7657
},
{
"epoch": 0.06627814752232836,
"grad_norm": 0.6515899300575256,
"learning_rate": 4.463055400581586e-05,
"loss": 0.7439,
"step": 7688
},
{
"epoch": 0.06654539811717645,
"grad_norm": 0.8527146577835083,
"learning_rate": 4.4578440237722374e-05,
"loss": 0.7491,
"step": 7719
},
{
"epoch": 0.06681264871202455,
"grad_norm": 0.6546400785446167,
"learning_rate": 4.452610552959183e-05,
"loss": 0.7459,
"step": 7750
},
{
"epoch": 0.06707989930687265,
"grad_norm": 1.2422056198120117,
"learning_rate": 4.447355047201428e-05,
"loss": 0.747,
"step": 7781
},
{
"epoch": 0.06734714990172075,
"grad_norm": 0.7785826325416565,
"learning_rate": 4.4420775658066414e-05,
"loss": 0.7454,
"step": 7812
},
{
"epoch": 0.06761440049656885,
"grad_norm": 0.7051584720611572,
"learning_rate": 4.436778168330484e-05,
"loss": 0.748,
"step": 7843
},
{
"epoch": 0.06788165109141694,
"grad_norm": 0.6893256306648254,
"learning_rate": 4.4314569145759353e-05,
"loss": 0.7466,
"step": 7874
},
{
"epoch": 0.06814890168626504,
"grad_norm": 0.6570892930030823,
"learning_rate": 4.42611386459262e-05,
"loss": 0.7404,
"step": 7905
},
{
"epoch": 0.06841615228111314,
"grad_norm": 1.0755282640457153,
"learning_rate": 4.420749078676133e-05,
"loss": 0.7462,
"step": 7936
},
{
"epoch": 0.06868340287596124,
"grad_norm": 0.9138249158859253,
"learning_rate": 4.4153626173673516e-05,
"loss": 0.7425,
"step": 7967
},
{
"epoch": 0.06895065347080934,
"grad_norm": 0.8147562146186829,
"learning_rate": 4.409954541451762e-05,
"loss": 0.7473,
"step": 7998
},
{
"epoch": 0.06921790406565743,
"grad_norm": 0.9558604955673218,
"learning_rate": 4.404524911958764e-05,
"loss": 0.7372,
"step": 8029
},
{
"epoch": 0.06948515466050553,
"grad_norm": 0.6429873704910278,
"learning_rate": 4.399073790160989e-05,
"loss": 0.7397,
"step": 8060
},
{
"epoch": 0.06975240525535363,
"grad_norm": 0.840124249458313,
"learning_rate": 4.393601237573607e-05,
"loss": 0.7462,
"step": 8091
},
{
"epoch": 0.07001965585020173,
"grad_norm": 0.8520084023475647,
"learning_rate": 4.388107315953628e-05,
"loss": 0.746,
"step": 8122
},
{
"epoch": 0.07028690644504983,
"grad_norm": 0.9427956938743591,
"learning_rate": 4.382592087299212e-05,
"loss": 0.7628,
"step": 8153
},
{
"epoch": 0.07055415703989792,
"grad_norm": 0.6754634380340576,
"learning_rate": 4.377055613848964e-05,
"loss": 0.7387,
"step": 8184
},
{
"epoch": 0.07082140763474602,
"grad_norm": 0.7089795470237732,
"learning_rate": 4.3714979580812355e-05,
"loss": 0.7462,
"step": 8215
},
{
"epoch": 0.07108865822959412,
"grad_norm": 0.6123281717300415,
"learning_rate": 4.365919182713416e-05,
"loss": 0.7363,
"step": 8246
},
{
"epoch": 0.07135590882444222,
"grad_norm": 0.7722983360290527,
"learning_rate": 4.360319350701226e-05,
"loss": 0.7489,
"step": 8277
},
{
"epoch": 0.07162315941929032,
"grad_norm": 0.7888938784599304,
"learning_rate": 4.3546985252380115e-05,
"loss": 0.7407,
"step": 8308
},
{
"epoch": 0.07189041001413841,
"grad_norm": 0.778743326663971,
"learning_rate": 4.349056769754021e-05,
"loss": 0.7412,
"step": 8339
},
{
"epoch": 0.07215766060898651,
"grad_norm": 0.7543514966964722,
"learning_rate": 4.3433941479156994e-05,
"loss": 0.7408,
"step": 8370
},
{
"epoch": 0.07242491120383461,
"grad_norm": 0.8665831089019775,
"learning_rate": 4.3377107236249647e-05,
"loss": 0.7332,
"step": 8401
},
{
"epoch": 0.07269216179868271,
"grad_norm": 0.8533165454864502,
"learning_rate": 4.332006561018488e-05,
"loss": 0.742,
"step": 8432
},
{
"epoch": 0.0729594123935308,
"grad_norm": 0.7969794869422913,
"learning_rate": 4.3262817244669683e-05,
"loss": 0.7412,
"step": 8463
},
{
"epoch": 0.0732266629883789,
"grad_norm": 0.6950485706329346,
"learning_rate": 4.3205362785744083e-05,
"loss": 0.748,
"step": 8494
},
{
"epoch": 0.073493913583227,
"grad_norm": 0.7154302000999451,
"learning_rate": 4.314770288177384e-05,
"loss": 0.7377,
"step": 8525
},
{
"epoch": 0.0737611641780751,
"grad_norm": 0.8694846630096436,
"learning_rate": 4.308983818344313e-05,
"loss": 0.7391,
"step": 8556
},
{
"epoch": 0.0740284147729232,
"grad_norm": 0.6354366540908813,
"learning_rate": 4.3031769343747206e-05,
"loss": 0.7397,
"step": 8587
},
{
"epoch": 0.0742956653677713,
"grad_norm": 0.8266158699989319,
"learning_rate": 4.297349701798505e-05,
"loss": 0.7294,
"step": 8618
},
{
"epoch": 0.0745629159626194,
"grad_norm": 0.8990344405174255,
"learning_rate": 4.2915021863751916e-05,
"loss": 0.7364,
"step": 8649
},
{
"epoch": 0.07483016655746749,
"grad_norm": 0.6664773225784302,
"learning_rate": 4.285634454093198e-05,
"loss": 0.7453,
"step": 8680
},
{
"epoch": 0.07509741715231559,
"grad_norm": 0.7253682017326355,
"learning_rate": 4.279746571169086e-05,
"loss": 0.7383,
"step": 8711
},
{
"epoch": 0.0753646677471637,
"grad_norm": 0.7291272878646851,
"learning_rate": 4.2738386040468136e-05,
"loss": 0.7469,
"step": 8742
},
{
"epoch": 0.0756319183420118,
"grad_norm": 0.7150599956512451,
"learning_rate": 4.2679106193969866e-05,
"loss": 0.7365,
"step": 8773
},
{
"epoch": 0.0758991689368599,
"grad_norm": 0.6730013489723206,
"learning_rate": 4.261962684116106e-05,
"loss": 0.7244,
"step": 8804
},
{
"epoch": 0.076166419531708,
"grad_norm": 0.8112632632255554,
"learning_rate": 4.2559948653258145e-05,
"loss": 0.733,
"step": 8835
},
{
"epoch": 0.0764336701265561,
"grad_norm": 0.8358544707298279,
"learning_rate": 4.250007230372134e-05,
"loss": 0.7423,
"step": 8866
},
{
"epoch": 0.07670092072140419,
"grad_norm": 1.0025874376296997,
"learning_rate": 4.2439998468247126e-05,
"loss": 0.7393,
"step": 8897
},
{
"epoch": 0.07696817131625229,
"grad_norm": 0.7373897433280945,
"learning_rate": 4.2379727824760566e-05,
"loss": 0.7343,
"step": 8928
},
{
"epoch": 0.07723542191110039,
"grad_norm": 0.7370135188102722,
"learning_rate": 4.231926105340768e-05,
"loss": 0.7394,
"step": 8959
},
{
"epoch": 0.07750267250594849,
"grad_norm": 0.7160916924476624,
"learning_rate": 4.225859883654776e-05,
"loss": 0.7426,
"step": 8990
},
{
"epoch": 0.07776992310079658,
"grad_norm": 0.688937783241272,
"learning_rate": 4.219774185874569e-05,
"loss": 0.7345,
"step": 9021
},
{
"epoch": 0.07803717369564468,
"grad_norm": 0.6157529950141907,
"learning_rate": 4.213669080676418e-05,
"loss": 0.7437,
"step": 9052
},
{
"epoch": 0.07830442429049278,
"grad_norm": 0.7043218612670898,
"learning_rate": 4.2075446369556056e-05,
"loss": 0.7346,
"step": 9083
},
{
"epoch": 0.07857167488534088,
"grad_norm": 0.7695103883743286,
"learning_rate": 4.201400923825648e-05,
"loss": 0.7243,
"step": 9114
},
{
"epoch": 0.07883892548018898,
"grad_norm": 0.7572926878929138,
"learning_rate": 4.195238010617511e-05,
"loss": 0.7307,
"step": 9145
},
{
"epoch": 0.07910617607503707,
"grad_norm": 0.7854665517807007,
"learning_rate": 4.1890559668788344e-05,
"loss": 0.7317,
"step": 9176
},
{
"epoch": 0.07937342666988517,
"grad_norm": 0.8939421772956848,
"learning_rate": 4.1828548623731405e-05,
"loss": 0.731,
"step": 9207
},
{
"epoch": 0.07964067726473327,
"grad_norm": 0.799430251121521,
"learning_rate": 4.1766347670790506e-05,
"loss": 0.7322,
"step": 9238
},
{
"epoch": 0.07990792785958137,
"grad_norm": 0.7589206099510193,
"learning_rate": 4.170395751189495e-05,
"loss": 0.7273,
"step": 9269
},
{
"epoch": 0.08017517845442947,
"grad_norm": 0.5817182064056396,
"learning_rate": 4.164137885110921e-05,
"loss": 0.7357,
"step": 9300
},
{
"epoch": 0.08044242904927756,
"grad_norm": 1.0037076473236084,
"learning_rate": 4.157861239462495e-05,
"loss": 0.7296,
"step": 9331
},
{
"epoch": 0.08070967964412566,
"grad_norm": 0.8309381604194641,
"learning_rate": 4.1515658850753114e-05,
"loss": 0.7324,
"step": 9362
},
{
"epoch": 0.08097693023897376,
"grad_norm": 0.7340037226676941,
"learning_rate": 4.145251892991588e-05,
"loss": 0.7343,
"step": 9393
},
{
"epoch": 0.08124418083382186,
"grad_norm": 0.6870521903038025,
"learning_rate": 4.138919334463868e-05,
"loss": 0.7314,
"step": 9424
},
{
"epoch": 0.08151143142866996,
"grad_norm": 0.6890373826026917,
"learning_rate": 4.1325682809542124e-05,
"loss": 0.7316,
"step": 9455
},
{
"epoch": 0.08177868202351805,
"grad_norm": 0.7693931460380554,
"learning_rate": 4.126198804133398e-05,
"loss": 0.7332,
"step": 9486
},
{
"epoch": 0.08204593261836615,
"grad_norm": 0.8764196634292603,
"learning_rate": 4.1198109758801055e-05,
"loss": 0.7267,
"step": 9517
},
{
"epoch": 0.08231318321321425,
"grad_norm": 0.7419659495353699,
"learning_rate": 4.113404868280107e-05,
"loss": 0.7344,
"step": 9548
},
{
"epoch": 0.08258043380806235,
"grad_norm": 0.754749596118927,
"learning_rate": 4.106980553625457e-05,
"loss": 0.7312,
"step": 9579
},
{
"epoch": 0.08284768440291045,
"grad_norm": 0.9303844571113586,
"learning_rate": 4.100538104413674e-05,
"loss": 0.7359,
"step": 9610
},
{
"epoch": 0.08311493499775854,
"grad_norm": 0.8202078342437744,
"learning_rate": 4.09407759334692e-05,
"loss": 0.7362,
"step": 9641
},
{
"epoch": 0.08338218559260664,
"grad_norm": 0.8347437977790833,
"learning_rate": 4.087599093331186e-05,
"loss": 0.7188,
"step": 9672
},
{
"epoch": 0.08364943618745474,
"grad_norm": 0.8407759070396423,
"learning_rate": 4.081102677475462e-05,
"loss": 0.7326,
"step": 9703
},
{
"epoch": 0.08391668678230284,
"grad_norm": 0.745712399482727,
"learning_rate": 4.0745884190909194e-05,
"loss": 0.7286,
"step": 9734
},
{
"epoch": 0.08418393737715094,
"grad_norm": 0.630031168460846,
"learning_rate": 4.0680563916900796e-05,
"loss": 0.7262,
"step": 9765
},
{
"epoch": 0.08445118797199903,
"grad_norm": 0.8924230933189392,
"learning_rate": 4.0615066689859815e-05,
"loss": 0.7315,
"step": 9796
},
{
"epoch": 0.08471843856684713,
"grad_norm": 0.7842046022415161,
"learning_rate": 4.0549393248913584e-05,
"loss": 0.7309,
"step": 9827
},
{
"epoch": 0.08498568916169523,
"grad_norm": 0.783723771572113,
"learning_rate": 4.048354433517794e-05,
"loss": 0.7307,
"step": 9858
},
{
"epoch": 0.08525293975654333,
"grad_norm": 0.6454151272773743,
"learning_rate": 4.0417520691748916e-05,
"loss": 0.7277,
"step": 9889
},
{
"epoch": 0.08552019035139143,
"grad_norm": 0.781913697719574,
"learning_rate": 4.035132306369438e-05,
"loss": 0.7267,
"step": 9920
},
{
"epoch": 0.08578744094623952,
"grad_norm": 0.7176892161369324,
"learning_rate": 4.028495219804555e-05,
"loss": 0.7309,
"step": 9951
},
{
"epoch": 0.08605469154108762,
"grad_norm": 0.9282059669494629,
"learning_rate": 4.021840884378864e-05,
"loss": 0.7483,
"step": 9982
},
{
"epoch": 0.08632194213593572,
"grad_norm": 1.0737770795822144,
"learning_rate": 4.015169375185633e-05,
"loss": 0.7262,
"step": 10013
},
{
"epoch": 0.08658919273078382,
"grad_norm": 0.7671861052513123,
"learning_rate": 4.0084807675119396e-05,
"loss": 0.7257,
"step": 10044
},
{
"epoch": 0.08685644332563192,
"grad_norm": 0.8341594934463501,
"learning_rate": 4.0017751368378106e-05,
"loss": 0.72,
"step": 10075
},
{
"epoch": 0.08712369392048001,
"grad_norm": 0.9360256195068359,
"learning_rate": 3.995052558835377e-05,
"loss": 0.7277,
"step": 10106
},
{
"epoch": 0.08739094451532811,
"grad_norm": 0.6352283358573914,
"learning_rate": 3.988313109368017e-05,
"loss": 0.7258,
"step": 10137
},
{
"epoch": 0.08765819511017621,
"grad_norm": 0.6871002912521362,
"learning_rate": 3.981556864489504e-05,
"loss": 0.7184,
"step": 10168
},
{
"epoch": 0.08792544570502431,
"grad_norm": 0.6639994382858276,
"learning_rate": 3.974783900443142e-05,
"loss": 0.727,
"step": 10199
},
{
"epoch": 0.0881926962998724,
"grad_norm": 0.7937990427017212,
"learning_rate": 3.9679942936609095e-05,
"loss": 0.7286,
"step": 10230
},
{
"epoch": 0.0884599468947205,
"grad_norm": 0.6229277849197388,
"learning_rate": 3.961188120762596e-05,
"loss": 0.7223,
"step": 10261
},
{
"epoch": 0.0887271974895686,
"grad_norm": 0.6489992737770081,
"learning_rate": 3.954365458554938e-05,
"loss": 0.7247,
"step": 10292
},
{
"epoch": 0.0889944480844167,
"grad_norm": 0.7326925992965698,
"learning_rate": 3.947526384030751e-05,
"loss": 0.7334,
"step": 10323
},
{
"epoch": 0.0892616986792648,
"grad_norm": 0.7646430730819702,
"learning_rate": 3.9406709743680624e-05,
"loss": 0.7278,
"step": 10354
},
{
"epoch": 0.0895289492741129,
"grad_norm": 0.7933580279350281,
"learning_rate": 3.9337993069292366e-05,
"loss": 0.7205,
"step": 10385
},
{
"epoch": 0.089796199868961,
"grad_norm": 1.0031253099441528,
"learning_rate": 3.926911459260109e-05,
"loss": 0.7224,
"step": 10416
},
{
"epoch": 0.09006345046380909,
"grad_norm": 0.9162613153457642,
"learning_rate": 3.920007509089102e-05,
"loss": 0.7382,
"step": 10447
},
{
"epoch": 0.09033070105865719,
"grad_norm": 0.8875689506530762,
"learning_rate": 3.913087534326357e-05,
"loss": 0.7224,
"step": 10478
},
{
"epoch": 0.09059795165350529,
"grad_norm": 0.8086528778076172,
"learning_rate": 3.9061516130628475e-05,
"loss": 0.7278,
"step": 10509
},
{
"epoch": 0.09086520224835339,
"grad_norm": 0.6289557218551636,
"learning_rate": 3.8991998235695025e-05,
"loss": 0.722,
"step": 10540
},
{
"epoch": 0.09113245284320148,
"grad_norm": 0.7295290231704712,
"learning_rate": 3.8922322442963224e-05,
"loss": 0.7188,
"step": 10571
},
{
"epoch": 0.09139970343804958,
"grad_norm": 0.7659655809402466,
"learning_rate": 3.885248953871491e-05,
"loss": 0.7259,
"step": 10602
},
{
"epoch": 0.09166695403289768,
"grad_norm": 0.7740941047668457,
"learning_rate": 3.8782500311004915e-05,
"loss": 0.7271,
"step": 10633
},
{
"epoch": 0.09193420462774578,
"grad_norm": 0.6929652094841003,
"learning_rate": 3.871235554965218e-05,
"loss": 0.7222,
"step": 10664
},
{
"epoch": 0.09220145522259389,
"grad_norm": 0.7807513475418091,
"learning_rate": 3.864205604623078e-05,
"loss": 0.7106,
"step": 10695
},
{
"epoch": 0.09246870581744199,
"grad_norm": 0.6159222722053528,
"learning_rate": 3.857160259406107e-05,
"loss": 0.7198,
"step": 10726
},
{
"epoch": 0.09273595641229009,
"grad_norm": 0.6694709658622742,
"learning_rate": 3.8500995988200674e-05,
"loss": 0.7087,
"step": 10757
},
{
"epoch": 0.09300320700713818,
"grad_norm": 0.7234062552452087,
"learning_rate": 3.843023702543556e-05,
"loss": 0.7267,
"step": 10788
},
{
"epoch": 0.09327045760198628,
"grad_norm": 0.7624971270561218,
"learning_rate": 3.8359326504270984e-05,
"loss": 0.725,
"step": 10819
},
{
"epoch": 0.09353770819683438,
"grad_norm": 0.7923869490623474,
"learning_rate": 3.828826522492255e-05,
"loss": 0.7226,
"step": 10850
},
{
"epoch": 0.09380495879168248,
"grad_norm": 0.7016533613204956,
"learning_rate": 3.821705398930713e-05,
"loss": 0.7295,
"step": 10881
},
{
"epoch": 0.09407220938653058,
"grad_norm": 0.6347571015357971,
"learning_rate": 3.814569360103385e-05,
"loss": 0.7129,
"step": 10912
},
{
"epoch": 0.09433945998137867,
"grad_norm": 0.7254231572151184,
"learning_rate": 3.807418486539499e-05,
"loss": 0.7181,
"step": 10943
},
{
"epoch": 0.09460671057622677,
"grad_norm": 0.7181808948516846,
"learning_rate": 3.80025285893569e-05,
"loss": 0.7179,
"step": 10974
},
{
"epoch": 0.09487396117107487,
"grad_norm": 0.7308523058891296,
"learning_rate": 3.793072558155093e-05,
"loss": 0.7243,
"step": 11005
},
{
"epoch": 0.09514121176592297,
"grad_norm": 0.8678077459335327,
"learning_rate": 3.785877665226426e-05,
"loss": 0.7254,
"step": 11036
},
{
"epoch": 0.09540846236077107,
"grad_norm": 0.6927953958511353,
"learning_rate": 3.778668261343079e-05,
"loss": 0.7242,
"step": 11067
},
{
"epoch": 0.09567571295561916,
"grad_norm": 0.5659973621368408,
"learning_rate": 3.771444427862192e-05,
"loss": 0.7157,
"step": 11098
},
{
"epoch": 0.09594296355046726,
"grad_norm": 0.8592713475227356,
"learning_rate": 3.7642062463037465e-05,
"loss": 0.7117,
"step": 11129
},
{
"epoch": 0.09621021414531536,
"grad_norm": 0.6449655294418335,
"learning_rate": 3.7569537983496373e-05,
"loss": 0.7197,
"step": 11160
},
{
"epoch": 0.09647746474016346,
"grad_norm": 0.7201160192489624,
"learning_rate": 3.749687165842753e-05,
"loss": 0.7189,
"step": 11191
},
{
"epoch": 0.09674471533501156,
"grad_norm": 0.6577209830284119,
"learning_rate": 3.7424064307860536e-05,
"loss": 0.7235,
"step": 11222
},
{
"epoch": 0.09701196592985965,
"grad_norm": 0.7682704329490662,
"learning_rate": 3.735111675341645e-05,
"loss": 0.7183,
"step": 11253
},
{
"epoch": 0.09727921652470775,
"grad_norm": 0.6187945604324341,
"learning_rate": 3.7278029818298524e-05,
"loss": 0.7046,
"step": 11284
},
{
"epoch": 0.09754646711955585,
"grad_norm": 0.6507108807563782,
"learning_rate": 3.720480432728287e-05,
"loss": 0.7253,
"step": 11315
},
{
"epoch": 0.09781371771440395,
"grad_norm": 0.6345822215080261,
"learning_rate": 3.71314411067092e-05,
"loss": 0.719,
"step": 11346
},
{
"epoch": 0.09808096830925205,
"grad_norm": 0.6644492149353027,
"learning_rate": 3.70579409844715e-05,
"loss": 0.7241,
"step": 11377
},
{
"epoch": 0.09834821890410014,
"grad_norm": 0.6215960383415222,
"learning_rate": 3.698430479000865e-05,
"loss": 0.7196,
"step": 11408
},
{
"epoch": 0.09861546949894824,
"grad_norm": 0.8039011359214783,
"learning_rate": 3.691053335429509e-05,
"loss": 0.7212,
"step": 11439
},
{
"epoch": 0.09888272009379634,
"grad_norm": 0.9103333353996277,
"learning_rate": 3.683662750983147e-05,
"loss": 0.7245,
"step": 11470
},
{
"epoch": 0.09914997068864444,
"grad_norm": 0.7850680947303772,
"learning_rate": 3.676258809063518e-05,
"loss": 0.7249,
"step": 11501
},
{
"epoch": 0.09941722128349254,
"grad_norm": 0.6277598738670349,
"learning_rate": 3.6688415932231004e-05,
"loss": 0.7234,
"step": 11532
},
{
"epoch": 0.09968447187834063,
"grad_norm": 0.6920484900474548,
"learning_rate": 3.661411187164166e-05,
"loss": 0.7126,
"step": 11563
},
{
"epoch": 0.09995172247318873,
"grad_norm": 0.6442444324493408,
"learning_rate": 3.65396767473784e-05,
"loss": 0.7148,
"step": 11594
},
{
"epoch": 0.10021897306803683,
"grad_norm": 0.741570770740509,
"learning_rate": 3.6465111399431465e-05,
"loss": 0.7216,
"step": 11625
},
{
"epoch": 0.10048622366288493,
"grad_norm": 0.7592952251434326,
"learning_rate": 3.6390416669260674e-05,
"loss": 0.7143,
"step": 11656
},
{
"epoch": 0.10075347425773303,
"grad_norm": 0.6915357708930969,
"learning_rate": 3.63155933997859e-05,
"loss": 0.7166,
"step": 11687
},
{
"epoch": 0.10102072485258112,
"grad_norm": 0.8318819403648376,
"learning_rate": 3.624064243537758e-05,
"loss": 0.7092,
"step": 11718
},
{
"epoch": 0.10128797544742922,
"grad_norm": 0.8602069616317749,
"learning_rate": 3.616556462184716e-05,
"loss": 0.7196,
"step": 11749
},
{
"epoch": 0.10155522604227732,
"grad_norm": 0.7524039149284363,
"learning_rate": 3.609036080643755e-05,
"loss": 0.7115,
"step": 11780
},
{
"epoch": 0.10182247663712542,
"grad_norm": 0.6467952728271484,
"learning_rate": 3.60150318378136e-05,
"loss": 0.722,
"step": 11811
},
{
"epoch": 0.10208972723197351,
"grad_norm": 0.6989237070083618,
"learning_rate": 3.5939578566052465e-05,
"loss": 0.7221,
"step": 11842
},
{
"epoch": 0.10235697782682161,
"grad_norm": 0.5162470936775208,
"learning_rate": 3.586400184263408e-05,
"loss": 0.7134,
"step": 11873
},
{
"epoch": 0.10262422842166971,
"grad_norm": 0.7181512713432312,
"learning_rate": 3.578830252043148e-05,
"loss": 0.7187,
"step": 11904
},
{
"epoch": 0.10289147901651781,
"grad_norm": 0.6398524641990662,
"learning_rate": 3.571248145370125e-05,
"loss": 0.7089,
"step": 11935
},
{
"epoch": 0.1031587296113659,
"grad_norm": 0.7089547514915466,
"learning_rate": 3.5636539498073794e-05,
"loss": 0.7192,
"step": 11966
},
{
"epoch": 0.103425980206214,
"grad_norm": 0.8613171577453613,
"learning_rate": 3.556047751054378e-05,
"loss": 0.7192,
"step": 11997
},
{
"epoch": 0.1036932308010621,
"grad_norm": 0.6494088768959045,
"learning_rate": 3.548429634946039e-05,
"loss": 0.7162,
"step": 12028
},
{
"epoch": 0.1039604813959102,
"grad_norm": 0.6777063012123108,
"learning_rate": 3.540799687451768e-05,
"loss": 0.7252,
"step": 12059
},
{
"epoch": 0.1042277319907583,
"grad_norm": 0.7733666300773621,
"learning_rate": 3.533157994674485e-05,
"loss": 0.7196,
"step": 12090
},
{
"epoch": 0.1044949825856064,
"grad_norm": 0.6725175976753235,
"learning_rate": 3.5255046428496546e-05,
"loss": 0.7191,
"step": 12121
},
{
"epoch": 0.1047622331804545,
"grad_norm": 0.6679649353027344,
"learning_rate": 3.517839718344311e-05,
"loss": 0.7207,
"step": 12152
},
{
"epoch": 0.10502948377530259,
"grad_norm": 0.7206579446792603,
"learning_rate": 3.510163307656086e-05,
"loss": 0.7166,
"step": 12183
},
{
"epoch": 0.10529673437015069,
"grad_norm": 0.6833537817001343,
"learning_rate": 3.5024754974122324e-05,
"loss": 0.7143,
"step": 12214
},
{
"epoch": 0.10556398496499879,
"grad_norm": 0.7469558119773865,
"learning_rate": 3.494776374368643e-05,
"loss": 0.7131,
"step": 12245
},
{
"epoch": 0.10583123555984689,
"grad_norm": 0.665662944316864,
"learning_rate": 3.4870660254088724e-05,
"loss": 0.721,
"step": 12276
},
{
"epoch": 0.10609848615469498,
"grad_norm": 0.6722923517227173,
"learning_rate": 3.479344537543164e-05,
"loss": 0.717,
"step": 12307
},
{
"epoch": 0.10636573674954308,
"grad_norm": 0.7240269780158997,
"learning_rate": 3.4716119979074565e-05,
"loss": 0.7188,
"step": 12338
},
{
"epoch": 0.10663298734439118,
"grad_norm": 0.6044260859489441,
"learning_rate": 3.463868493762412e-05,
"loss": 0.7114,
"step": 12369
},
{
"epoch": 0.10690023793923928,
"grad_norm": 0.5933346748352051,
"learning_rate": 3.456114112492418e-05,
"loss": 0.7224,
"step": 12400
},
{
"epoch": 0.10716748853408738,
"grad_norm": 0.6202985644340515,
"learning_rate": 3.4483489416046164e-05,
"loss": 0.7114,
"step": 12431
},
{
"epoch": 0.10743473912893547,
"grad_norm": 0.7195279002189636,
"learning_rate": 3.440573068727905e-05,
"loss": 0.7074,
"step": 12462
},
{
"epoch": 0.10770198972378357,
"grad_norm": 0.6577067971229553,
"learning_rate": 3.4327865816119495e-05,
"loss": 0.7103,
"step": 12493
},
{
"epoch": 0.10796924031863167,
"grad_norm": 0.5871012806892395,
"learning_rate": 3.4249895681262025e-05,
"loss": 0.7143,
"step": 12524
},
{
"epoch": 0.10823649091347977,
"grad_norm": 0.7182403206825256,
"learning_rate": 3.417182116258899e-05,
"loss": 0.7131,
"step": 12555
},
{
"epoch": 0.10850374150832787,
"grad_norm": 0.7164127230644226,
"learning_rate": 3.409364314116074e-05,
"loss": 0.7078,
"step": 12586
},
{
"epoch": 0.10877099210317596,
"grad_norm": 0.6984509825706482,
"learning_rate": 3.401536249920559e-05,
"loss": 0.7052,
"step": 12617
},
{
"epoch": 0.10903824269802408,
"grad_norm": 0.7046581506729126,
"learning_rate": 3.393698012010998e-05,
"loss": 0.7087,
"step": 12648
},
{
"epoch": 0.10930549329287217,
"grad_norm": 0.7870312333106995,
"learning_rate": 3.385849688840839e-05,
"loss": 0.7073,
"step": 12679
},
{
"epoch": 0.10957274388772027,
"grad_norm": 0.6933169960975647,
"learning_rate": 3.3779913689773414e-05,
"loss": 0.7123,
"step": 12710
},
{
"epoch": 0.10983999448256837,
"grad_norm": 0.8421098589897156,
"learning_rate": 3.370123141100578e-05,
"loss": 0.7074,
"step": 12741
},
{
"epoch": 0.11010724507741647,
"grad_norm": 0.6738384962081909,
"learning_rate": 3.3622450940024305e-05,
"loss": 0.7102,
"step": 12772
},
{
"epoch": 0.11037449567226457,
"grad_norm": 1.22402024269104,
"learning_rate": 3.35435731658559e-05,
"loss": 0.7096,
"step": 12803
},
{
"epoch": 0.11064174626711266,
"grad_norm": 0.637190580368042,
"learning_rate": 3.346459897862552e-05,
"loss": 0.7094,
"step": 12834
},
{
"epoch": 0.11090899686196076,
"grad_norm": 0.6984738111495972,
"learning_rate": 3.338552926954613e-05,
"loss": 0.7039,
"step": 12865
},
{
"epoch": 0.11117624745680886,
"grad_norm": 0.6750801801681519,
"learning_rate": 3.330636493090868e-05,
"loss": 0.7109,
"step": 12896
},
{
"epoch": 0.11144349805165696,
"grad_norm": 0.605526864528656,
"learning_rate": 3.322710685607193e-05,
"loss": 0.7107,
"step": 12927
},
{
"epoch": 0.11171074864650506,
"grad_norm": 0.6708593964576721,
"learning_rate": 3.314775593945251e-05,
"loss": 0.7115,
"step": 12958
},
{
"epoch": 0.11197799924135315,
"grad_norm": 0.8030126690864563,
"learning_rate": 3.3068313076514714e-05,
"loss": 0.6952,
"step": 12989
},
{
"epoch": 0.11224524983620125,
"grad_norm": 0.7429558038711548,
"learning_rate": 3.298877916376047e-05,
"loss": 0.7193,
"step": 13020
},
{
"epoch": 0.11251250043104935,
"grad_norm": 0.5634856224060059,
"learning_rate": 3.290915509871915e-05,
"loss": 0.7027,
"step": 13051
},
{
"epoch": 0.11277975102589745,
"grad_norm": 0.6698859930038452,
"learning_rate": 3.282944177993753e-05,
"loss": 0.706,
"step": 13082
},
{
"epoch": 0.11304700162074555,
"grad_norm": 0.6879245042800903,
"learning_rate": 3.274964010696957e-05,
"loss": 0.7145,
"step": 13113
},
{
"epoch": 0.11331425221559364,
"grad_norm": 0.6581853628158569,
"learning_rate": 3.266975098036629e-05,
"loss": 0.7052,
"step": 13144
},
{
"epoch": 0.11358150281044174,
"grad_norm": 0.7926408648490906,
"learning_rate": 3.258977530166562e-05,
"loss": 0.7055,
"step": 13175
},
{
"epoch": 0.11384875340528984,
"grad_norm": 0.7677760720252991,
"learning_rate": 3.250971397338227e-05,
"loss": 0.705,
"step": 13206
},
{
"epoch": 0.11411600400013794,
"grad_norm": 0.7506090998649597,
"learning_rate": 3.2429567898997404e-05,
"loss": 0.7099,
"step": 13237
},
{
"epoch": 0.11438325459498604,
"grad_norm": 0.7195343375205994,
"learning_rate": 3.234933798294859e-05,
"loss": 0.7091,
"step": 13268
},
{
"epoch": 0.11465050518983413,
"grad_norm": 0.5797944068908691,
"learning_rate": 3.2269025130619535e-05,
"loss": 0.7134,
"step": 13299
},
{
"epoch": 0.11491775578468223,
"grad_norm": 0.6853557229042053,
"learning_rate": 3.218863024832985e-05,
"loss": 0.7116,
"step": 13330
},
{
"epoch": 0.11518500637953033,
"grad_norm": 0.8225119113922119,
"learning_rate": 3.2108154243324864e-05,
"loss": 0.7161,
"step": 13361
},
{
"epoch": 0.11545225697437843,
"grad_norm": 0.5669307708740234,
"learning_rate": 3.2027598023765345e-05,
"loss": 0.7024,
"step": 13392
},
{
"epoch": 0.11571950756922653,
"grad_norm": 0.7186108827590942,
"learning_rate": 3.194696249871729e-05,
"loss": 0.7081,
"step": 13423
},
{
"epoch": 0.11598675816407462,
"grad_norm": 0.5777581334114075,
"learning_rate": 3.186624857814164e-05,
"loss": 0.7015,
"step": 13454
},
{
"epoch": 0.11625400875892272,
"grad_norm": 0.6641644239425659,
"learning_rate": 3.178545717288401e-05,
"loss": 0.7184,
"step": 13485
},
{
"epoch": 0.11652125935377082,
"grad_norm": 0.7468976974487305,
"learning_rate": 3.170458919466444e-05,
"loss": 0.7132,
"step": 13516
},
{
"epoch": 0.11678850994861892,
"grad_norm": 0.5197424292564392,
"learning_rate": 3.1623645556067063e-05,
"loss": 0.7053,
"step": 13547
},
{
"epoch": 0.11705576054346702,
"grad_norm": 0.8400532603263855,
"learning_rate": 3.154262717052985e-05,
"loss": 0.7099,
"step": 13578
},
{
"epoch": 0.11732301113831511,
"grad_norm": 0.6441920399665833,
"learning_rate": 3.146153495233426e-05,
"loss": 0.7078,
"step": 13609
},
{
"epoch": 0.11759026173316321,
"grad_norm": 0.676284909248352,
"learning_rate": 3.1380369816594944e-05,
"loss": 0.7073,
"step": 13640
},
{
"epoch": 0.11785751232801131,
"grad_norm": 0.7930014729499817,
"learning_rate": 3.129913267924946e-05,
"loss": 0.7057,
"step": 13671
},
{
"epoch": 0.11812476292285941,
"grad_norm": 0.5682835578918457,
"learning_rate": 3.121782445704782e-05,
"loss": 0.7029,
"step": 13702
},
{
"epoch": 0.1183920135177075,
"grad_norm": 0.699668824672699,
"learning_rate": 3.11364460675423e-05,
"loss": 0.7039,
"step": 13733
},
{
"epoch": 0.1186592641125556,
"grad_norm": 0.6139336824417114,
"learning_rate": 3.1054998429076934e-05,
"loss": 0.7049,
"step": 13764
},
{
"epoch": 0.1189265147074037,
"grad_norm": 0.7547024488449097,
"learning_rate": 3.097348246077728e-05,
"loss": 0.7053,
"step": 13795
},
{
"epoch": 0.1191937653022518,
"grad_norm": 0.6330803036689758,
"learning_rate": 3.0891899082539924e-05,
"loss": 0.7006,
"step": 13826
},
{
"epoch": 0.1194610158970999,
"grad_norm": 0.6040504574775696,
"learning_rate": 3.0810249215022233e-05,
"loss": 0.7132,
"step": 13857
},
{
"epoch": 0.119728266491948,
"grad_norm": 0.6030833125114441,
"learning_rate": 3.0728533779631865e-05,
"loss": 0.7164,
"step": 13888
},
{
"epoch": 0.1199955170867961,
"grad_norm": 0.6896437406539917,
"learning_rate": 3.064675369851637e-05,
"loss": 0.713,
"step": 13919
},
{
"epoch": 0.12026276768164419,
"grad_norm": 0.8433464169502258,
"learning_rate": 3.056490989455289e-05,
"loss": 0.6968,
"step": 13950
},
{
"epoch": 0.12053001827649229,
"grad_norm": 0.7637025713920593,
"learning_rate": 3.0483003291337596e-05,
"loss": 0.7014,
"step": 13981
},
{
"epoch": 0.12079726887134039,
"grad_norm": 0.6914523243904114,
"learning_rate": 3.040103481317539e-05,
"loss": 0.6999,
"step": 14012
},
{
"epoch": 0.12106451946618849,
"grad_norm": 0.6796788573265076,
"learning_rate": 3.03190053850694e-05,
"loss": 0.7138,
"step": 14043
},
{
"epoch": 0.12133177006103658,
"grad_norm": 0.6779701113700867,
"learning_rate": 3.0236915932710573e-05,
"loss": 0.6953,
"step": 14074
},
{
"epoch": 0.12159902065588468,
"grad_norm": 0.7082575559616089,
"learning_rate": 3.0154767382467232e-05,
"loss": 0.7097,
"step": 14105
},
{
"epoch": 0.12186627125073278,
"grad_norm": 0.7408347129821777,
"learning_rate": 3.0072560661374582e-05,
"loss": 0.6898,
"step": 14136
},
{
"epoch": 0.12213352184558088,
"grad_norm": 0.6222202181816101,
"learning_rate": 2.999029669712431e-05,
"loss": 0.6995,
"step": 14167
},
{
"epoch": 0.12240077244042898,
"grad_norm": 0.6858973503112793,
"learning_rate": 2.990797641805408e-05,
"loss": 0.7059,
"step": 14198
},
{
"epoch": 0.12266802303527707,
"grad_norm": 0.7068437337875366,
"learning_rate": 2.982560075313704e-05,
"loss": 0.7137,
"step": 14229
},
{
"epoch": 0.12293527363012517,
"grad_norm": 0.6153962016105652,
"learning_rate": 2.9743170631971368e-05,
"loss": 0.6964,
"step": 14260
},
{
"epoch": 0.12320252422497327,
"grad_norm": 0.6866403818130493,
"learning_rate": 2.9660686984769792e-05,
"loss": 0.7051,
"step": 14291
},
{
"epoch": 0.12346977481982137,
"grad_norm": 0.7551136612892151,
"learning_rate": 2.9578150742349047e-05,
"loss": 0.7039,
"step": 14322
},
{
"epoch": 0.12373702541466947,
"grad_norm": 0.5940378904342651,
"learning_rate": 2.949556283611942e-05,
"loss": 0.6997,
"step": 14353
},
{
"epoch": 0.12400427600951756,
"grad_norm": 0.5693697929382324,
"learning_rate": 2.9412924198074206e-05,
"loss": 0.6972,
"step": 14384
},
{
"epoch": 0.12427152660436566,
"grad_norm": 0.5443055033683777,
"learning_rate": 2.9330235760779208e-05,
"loss": 0.6918,
"step": 14415
},
{
"epoch": 0.12453877719921376,
"grad_norm": 0.6901688575744629,
"learning_rate": 2.9247498457362188e-05,
"loss": 0.7026,
"step": 14446
},
{
"epoch": 0.12480602779406186,
"grad_norm": 0.5541255474090576,
"learning_rate": 2.9164713221502373e-05,
"loss": 0.7006,
"step": 14477
},
{
"epoch": 0.12507327838890997,
"grad_norm": 0.7744006514549255,
"learning_rate": 2.9081880987419912e-05,
"loss": 0.7068,
"step": 14508
},
{
"epoch": 0.12534052898375805,
"grad_norm": 0.6032939553260803,
"learning_rate": 2.8999002689865296e-05,
"loss": 0.7063,
"step": 14539
},
{
"epoch": 0.12560777957860617,
"grad_norm": 0.5508326888084412,
"learning_rate": 2.8916079264108852e-05,
"loss": 0.7008,
"step": 14570
},
{
"epoch": 0.12587503017345425,
"grad_norm": 0.6029148697853088,
"learning_rate": 2.883311164593017e-05,
"loss": 0.6972,
"step": 14601
},
{
"epoch": 0.12614228076830236,
"grad_norm": 0.612224817276001,
"learning_rate": 2.875010077160754e-05,
"loss": 0.6998,
"step": 14632
},
{
"epoch": 0.12640953136315045,
"grad_norm": 0.5881705284118652,
"learning_rate": 2.866704757790741e-05,
"loss": 0.7026,
"step": 14663
},
{
"epoch": 0.12667678195799856,
"grad_norm": 0.6885594725608826,
"learning_rate": 2.858395300207376e-05,
"loss": 0.6987,
"step": 14694
},
{
"epoch": 0.12694403255284664,
"grad_norm": 0.5755457878112793,
"learning_rate": 2.8500817981817607e-05,
"loss": 0.693,
"step": 14725
},
{
"epoch": 0.12721128314769475,
"grad_norm": 0.6199221611022949,
"learning_rate": 2.8417643455306336e-05,
"loss": 0.7007,
"step": 14756
},
{
"epoch": 0.12747853374254284,
"grad_norm": 0.601020872592926,
"learning_rate": 2.8334430361153185e-05,
"loss": 0.6949,
"step": 14787
},
{
"epoch": 0.12774578433739095,
"grad_norm": 0.5955238342285156,
"learning_rate": 2.8251179638406612e-05,
"loss": 0.697,
"step": 14818
},
{
"epoch": 0.12801303493223903,
"grad_norm": 0.5562962293624878,
"learning_rate": 2.8167892226539704e-05,
"loss": 0.7021,
"step": 14849
},
{
"epoch": 0.12828028552708715,
"grad_norm": 0.829988956451416,
"learning_rate": 2.8084569065439588e-05,
"loss": 0.7028,
"step": 14880
},
{
"epoch": 0.12854753612193523,
"grad_norm": 0.6745316982269287,
"learning_rate": 2.8001211095396807e-05,
"loss": 0.7081,
"step": 14911
},
{
"epoch": 0.12881478671678334,
"grad_norm": 0.5051907896995544,
"learning_rate": 2.791781925709473e-05,
"loss": 0.6984,
"step": 14942
},
{
"epoch": 0.12908203731163143,
"grad_norm": 0.7191225290298462,
"learning_rate": 2.7834394491598908e-05,
"loss": 0.6976,
"step": 14973
},
{
"epoch": 0.12934928790647954,
"grad_norm": 0.6744056344032288,
"learning_rate": 2.7750937740346485e-05,
"loss": 0.7,
"step": 15004
},
{
"epoch": 0.12961653850132762,
"grad_norm": 0.5787363648414612,
"learning_rate": 2.7667449945135564e-05,
"loss": 0.704,
"step": 15035
},
{
"epoch": 0.12988378909617573,
"grad_norm": 0.7416272759437561,
"learning_rate": 2.7583932048114557e-05,
"loss": 0.6961,
"step": 15066
},
{
"epoch": 0.13015103969102382,
"grad_norm": 0.7691118121147156,
"learning_rate": 2.7500384991771587e-05,
"loss": 0.7045,
"step": 15097
},
{
"epoch": 0.13041829028587193,
"grad_norm": 0.6439851522445679,
"learning_rate": 2.7416809718923825e-05,
"loss": 0.6935,
"step": 15128
},
{
"epoch": 0.13068554088072,
"grad_norm": 0.8656207919120789,
"learning_rate": 2.7333207172706864e-05,
"loss": 0.7037,
"step": 15159
},
{
"epoch": 0.13095279147556813,
"grad_norm": 0.6430538296699524,
"learning_rate": 2.7249578296564088e-05,
"loss": 0.702,
"step": 15190
},
{
"epoch": 0.1312200420704162,
"grad_norm": 0.840202271938324,
"learning_rate": 2.7165924034235973e-05,
"loss": 0.6965,
"step": 15221
},
{
"epoch": 0.13148729266526432,
"grad_norm": 0.6381927728652954,
"learning_rate": 2.708224532974953e-05,
"loss": 0.6948,
"step": 15252
},
{
"epoch": 0.1317545432601124,
"grad_norm": 0.7093908786773682,
"learning_rate": 2.6998543127407538e-05,
"loss": 0.7018,
"step": 15283
},
{
"epoch": 0.13202179385496052,
"grad_norm": 0.8095340132713318,
"learning_rate": 2.6914818371777988e-05,
"loss": 0.6933,
"step": 15314
},
{
"epoch": 0.1322890444498086,
"grad_norm": 0.7339562177658081,
"learning_rate": 2.6831072007683373e-05,
"loss": 0.697,
"step": 15345
},
{
"epoch": 0.1325562950446567,
"grad_norm": 0.6299667954444885,
"learning_rate": 2.6747304980190018e-05,
"loss": 0.6953,
"step": 15376
},
{
"epoch": 0.1328235456395048,
"grad_norm": 0.7498751878738403,
"learning_rate": 2.6663518234597453e-05,
"loss": 0.7122,
"step": 15407
},
{
"epoch": 0.1330907962343529,
"grad_norm": 0.6487712860107422,
"learning_rate": 2.6579712716427696e-05,
"loss": 0.7046,
"step": 15438
},
{
"epoch": 0.133358046829201,
"grad_norm": 0.6411934494972229,
"learning_rate": 2.6495889371414652e-05,
"loss": 0.6949,
"step": 15469
},
{
"epoch": 0.1336252974240491,
"grad_norm": 0.6873781085014343,
"learning_rate": 2.6412049145493367e-05,
"loss": 0.6936,
"step": 15500
},
{
"epoch": 0.1338925480188972,
"grad_norm": 0.7147006988525391,
"learning_rate": 2.632819298478939e-05,
"loss": 0.6897,
"step": 15531
},
{
"epoch": 0.1341597986137453,
"grad_norm": 0.6077598333358765,
"learning_rate": 2.6244321835608105e-05,
"loss": 0.6999,
"step": 15562
},
{
"epoch": 0.1344270492085934,
"grad_norm": 0.6404904723167419,
"learning_rate": 2.6160436644424024e-05,
"loss": 0.703,
"step": 15593
},
{
"epoch": 0.1346942998034415,
"grad_norm": 0.6677570939064026,
"learning_rate": 2.6076538357870133e-05,
"loss": 0.6983,
"step": 15624
},
{
"epoch": 0.1349615503982896,
"grad_norm": 0.6934464573860168,
"learning_rate": 2.5992627922727196e-05,
"loss": 0.6939,
"step": 15655
},
{
"epoch": 0.1352288009931377,
"grad_norm": 0.6399245262145996,
"learning_rate": 2.5908706285913066e-05,
"loss": 0.6971,
"step": 15686
},
{
"epoch": 0.1354960515879858,
"grad_norm": 0.5851583480834961,
"learning_rate": 2.5824774394472008e-05,
"loss": 0.6945,
"step": 15717
},
{
"epoch": 0.1357633021828339,
"grad_norm": 0.6914601922035217,
"learning_rate": 2.5740833195563996e-05,
"loss": 0.6943,
"step": 15748
},
{
"epoch": 0.136030552777682,
"grad_norm": 0.6366608142852783,
"learning_rate": 2.5656883636454067e-05,
"loss": 0.6905,
"step": 15779
},
{
"epoch": 0.13629780337253009,
"grad_norm": 0.64646977186203,
"learning_rate": 2.557292666450159e-05,
"loss": 0.6984,
"step": 15810
},
{
"epoch": 0.1365650539673782,
"grad_norm": 0.6968972086906433,
"learning_rate": 2.5488963227149566e-05,
"loss": 0.7023,
"step": 15841
},
{
"epoch": 0.13683230456222628,
"grad_norm": 0.62071293592453,
"learning_rate": 2.5404994271913983e-05,
"loss": 0.6982,
"step": 15872
},
{
"epoch": 0.1370995551570744,
"grad_norm": 0.6104496717453003,
"learning_rate": 2.5321020746373085e-05,
"loss": 0.6916,
"step": 15903
},
{
"epoch": 0.13736680575192248,
"grad_norm": 0.8115667104721069,
"learning_rate": 2.52370435981567e-05,
"loss": 0.7101,
"step": 15934
},
{
"epoch": 0.1376340563467706,
"grad_norm": 0.6817131638526917,
"learning_rate": 2.5153063774935533e-05,
"loss": 0.697,
"step": 15965
},
{
"epoch": 0.13790130694161867,
"grad_norm": 0.5559698343276978,
"learning_rate": 2.506908222441045e-05,
"loss": 0.7041,
"step": 15996
},
{
"epoch": 0.13816855753646679,
"grad_norm": 0.6042380332946777,
"learning_rate": 2.498509989430187e-05,
"loss": 0.6896,
"step": 16027
},
{
"epoch": 0.13843580813131487,
"grad_norm": 0.6447672843933105,
"learning_rate": 2.4901117732338958e-05,
"loss": 0.692,
"step": 16058
},
{
"epoch": 0.13870305872616298,
"grad_norm": 0.6936026811599731,
"learning_rate": 2.481713668624899e-05,
"loss": 0.6957,
"step": 16089
},
{
"epoch": 0.13897030932101107,
"grad_norm": 0.6021552681922913,
"learning_rate": 2.4733157703746663e-05,
"loss": 0.6877,
"step": 16120
},
{
"epoch": 0.13923755991585918,
"grad_norm": 0.6911255121231079,
"learning_rate": 2.4649181732523392e-05,
"loss": 0.6936,
"step": 16151
},
{
"epoch": 0.13950481051070726,
"grad_norm": 0.6742393374443054,
"learning_rate": 2.4565209720236582e-05,
"loss": 0.7005,
"step": 16182
},
{
"epoch": 0.13977206110555537,
"grad_norm": 0.6693837642669678,
"learning_rate": 2.4481242614498975e-05,
"loss": 0.6961,
"step": 16213
},
{
"epoch": 0.14003931170040346,
"grad_norm": 0.6516282558441162,
"learning_rate": 2.439728136286796e-05,
"loss": 0.6955,
"step": 16244
},
{
"epoch": 0.14030656229525157,
"grad_norm": 0.5943180322647095,
"learning_rate": 2.4313326912834852e-05,
"loss": 0.6864,
"step": 16275
},
{
"epoch": 0.14057381289009965,
"grad_norm": 0.598438024520874,
"learning_rate": 2.4229380211814206e-05,
"loss": 0.6888,
"step": 16306
},
{
"epoch": 0.14084106348494776,
"grad_norm": 0.7447234392166138,
"learning_rate": 2.4145442207133124e-05,
"loss": 0.6927,
"step": 16337
},
{
"epoch": 0.14110831407979585,
"grad_norm": 0.5119730830192566,
"learning_rate": 2.406151384602059e-05,
"loss": 0.6981,
"step": 16368
},
{
"epoch": 0.14137556467464396,
"grad_norm": 0.6349120736122131,
"learning_rate": 2.3977596075596747e-05,
"loss": 0.6923,
"step": 16399
},
{
"epoch": 0.14164281526949205,
"grad_norm": 0.6133832931518555,
"learning_rate": 2.3893689842862223e-05,
"loss": 0.6962,
"step": 16430
},
{
"epoch": 0.14191006586434016,
"grad_norm": 0.5794311165809631,
"learning_rate": 2.3809796094687475e-05,
"loss": 0.6945,
"step": 16461
},
{
"epoch": 0.14217731645918824,
"grad_norm": 0.6241285800933838,
"learning_rate": 2.372591577780202e-05,
"loss": 0.6967,
"step": 16492
},
{
"epoch": 0.14244456705403635,
"grad_norm": 0.5526315569877625,
"learning_rate": 2.3642049838783838e-05,
"loss": 0.69,
"step": 16523
},
{
"epoch": 0.14271181764888444,
"grad_norm": 0.6427567005157471,
"learning_rate": 2.3558199224048666e-05,
"loss": 0.7001,
"step": 16554
},
{
"epoch": 0.14297906824373255,
"grad_norm": 0.6813309788703918,
"learning_rate": 2.347436487983929e-05,
"loss": 0.6834,
"step": 16585
},
{
"epoch": 0.14324631883858063,
"grad_norm": 0.6263120174407959,
"learning_rate": 2.3390547752214888e-05,
"loss": 0.6999,
"step": 16616
},
{
"epoch": 0.14351356943342874,
"grad_norm": 0.7129321098327637,
"learning_rate": 2.330674878704035e-05,
"loss": 0.689,
"step": 16647
},
{
"epoch": 0.14378082002827683,
"grad_norm": 0.6017931699752808,
"learning_rate": 2.322296892997561e-05,
"loss": 0.6974,
"step": 16678
},
{
"epoch": 0.14404807062312494,
"grad_norm": 0.6443967223167419,
"learning_rate": 2.313920912646497e-05,
"loss": 0.6888,
"step": 16709
},
{
"epoch": 0.14431532121797302,
"grad_norm": 0.5837503671646118,
"learning_rate": 2.305547032172643e-05,
"loss": 0.6841,
"step": 16740
},
{
"epoch": 0.14458257181282114,
"grad_norm": 0.5609689950942993,
"learning_rate": 2.2971753460741014e-05,
"loss": 0.6888,
"step": 16771
},
{
"epoch": 0.14484982240766922,
"grad_norm": 0.5821804404258728,
"learning_rate": 2.288805948824212e-05,
"loss": 0.6925,
"step": 16802
},
{
"epoch": 0.14511707300251733,
"grad_norm": 0.6066220998764038,
"learning_rate": 2.2804389348704858e-05,
"loss": 0.6847,
"step": 16833
},
{
"epoch": 0.14538432359736542,
"grad_norm": 0.6416580080986023,
"learning_rate": 2.2720743986335374e-05,
"loss": 0.6892,
"step": 16864
},
{
"epoch": 0.14565157419221353,
"grad_norm": 0.7490798234939575,
"learning_rate": 2.2637124345060233e-05,
"loss": 0.6887,
"step": 16895
},
{
"epoch": 0.1459188247870616,
"grad_norm": 0.58744215965271,
"learning_rate": 2.2553531368515695e-05,
"loss": 0.6938,
"step": 16926
},
{
"epoch": 0.14618607538190972,
"grad_norm": 0.5941025614738464,
"learning_rate": 2.2469966000037144e-05,
"loss": 0.6941,
"step": 16957
},
{
"epoch": 0.1464533259767578,
"grad_norm": 0.5168039798736572,
"learning_rate": 2.2386429182648417e-05,
"loss": 0.69,
"step": 16988
},
{
"epoch": 0.14672057657160592,
"grad_norm": 0.5566929578781128,
"learning_rate": 2.230292185905114e-05,
"loss": 0.6874,
"step": 17019
},
{
"epoch": 0.146987827166454,
"grad_norm": 0.6314446330070496,
"learning_rate": 2.2219444971614116e-05,
"loss": 0.6891,
"step": 17050
},
{
"epoch": 0.14725507776130212,
"grad_norm": 0.6427028179168701,
"learning_rate": 2.2135999462362655e-05,
"loss": 0.6884,
"step": 17081
},
{
"epoch": 0.1475223283561502,
"grad_norm": 0.6639821529388428,
"learning_rate": 2.2052586272968003e-05,
"loss": 0.6887,
"step": 17112
},
{
"epoch": 0.1477895789509983,
"grad_norm": 0.6545510292053223,
"learning_rate": 2.196920634473666e-05,
"loss": 0.6917,
"step": 17143
},
{
"epoch": 0.1480568295458464,
"grad_norm": 0.6460488438606262,
"learning_rate": 2.1885860618599787e-05,
"loss": 0.6987,
"step": 17174
},
{
"epoch": 0.1483240801406945,
"grad_norm": 0.6484776735305786,
"learning_rate": 2.1802550035102577e-05,
"loss": 0.6881,
"step": 17205
},
{
"epoch": 0.1485913307355426,
"grad_norm": 0.6550127267837524,
"learning_rate": 2.171927553439363e-05,
"loss": 0.6943,
"step": 17236
},
{
"epoch": 0.1488585813303907,
"grad_norm": 0.5928293466567993,
"learning_rate": 2.1636038056214376e-05,
"loss": 0.6903,
"step": 17267
},
{
"epoch": 0.1491258319252388,
"grad_norm": 0.6523717045783997,
"learning_rate": 2.155283853988844e-05,
"loss": 0.6818,
"step": 17298
},
{
"epoch": 0.1493930825200869,
"grad_norm": 0.6091533303260803,
"learning_rate": 2.146967792431106e-05,
"loss": 0.6892,
"step": 17329
},
{
"epoch": 0.14966033311493498,
"grad_norm": 0.6898590922355652,
"learning_rate": 2.138655714793849e-05,
"loss": 0.6986,
"step": 17360
},
{
"epoch": 0.1499275837097831,
"grad_norm": 0.6864176392555237,
"learning_rate": 2.1303477148777367e-05,
"loss": 0.6927,
"step": 17391
},
{
"epoch": 0.15019483430463118,
"grad_norm": 0.6650465726852417,
"learning_rate": 2.122043886437421e-05,
"loss": 0.689,
"step": 17422
},
{
"epoch": 0.1504620848994793,
"grad_norm": 0.604285717010498,
"learning_rate": 2.1137443231804765e-05,
"loss": 0.6942,
"step": 17453
},
{
"epoch": 0.1507293354943274,
"grad_norm": 0.643878698348999,
"learning_rate": 2.105449118766347e-05,
"loss": 0.6868,
"step": 17484
},
{
"epoch": 0.1509965860891755,
"grad_norm": 0.6043519377708435,
"learning_rate": 2.097158366805287e-05,
"loss": 0.694,
"step": 17515
},
{
"epoch": 0.1512638366840236,
"grad_norm": 0.7776553630828857,
"learning_rate": 2.0888721608573047e-05,
"loss": 0.6881,
"step": 17546
},
{
"epoch": 0.15153108727887168,
"grad_norm": 0.8376491665840149,
"learning_rate": 2.0805905944311087e-05,
"loss": 0.685,
"step": 17577
},
{
"epoch": 0.1517983378737198,
"grad_norm": 0.5732520222663879,
"learning_rate": 2.0723137609830497e-05,
"loss": 0.6886,
"step": 17608
},
{
"epoch": 0.15206558846856788,
"grad_norm": 0.6805883049964905,
"learning_rate": 2.0640417539160686e-05,
"loss": 0.6818,
"step": 17639
},
{
"epoch": 0.152332839063416,
"grad_norm": 0.6287026405334473,
"learning_rate": 2.0557746665786427e-05,
"loss": 0.6945,
"step": 17670
},
{
"epoch": 0.15260008965826408,
"grad_norm": 0.6397233605384827,
"learning_rate": 2.0475125922637256e-05,
"loss": 0.6884,
"step": 17701
},
{
"epoch": 0.1528673402531122,
"grad_norm": 0.533277153968811,
"learning_rate": 2.0392556242077047e-05,
"loss": 0.679,
"step": 17732
},
{
"epoch": 0.15313459084796027,
"grad_norm": 0.6750075817108154,
"learning_rate": 2.031003855589343e-05,
"loss": 0.6963,
"step": 17763
},
{
"epoch": 0.15340184144280838,
"grad_norm": 0.5579234957695007,
"learning_rate": 2.022757379528727e-05,
"loss": 0.6909,
"step": 17794
},
{
"epoch": 0.15366909203765647,
"grad_norm": 0.6035830974578857,
"learning_rate": 2.0145162890862184e-05,
"loss": 0.6853,
"step": 17825
},
{
"epoch": 0.15393634263250458,
"grad_norm": 0.6703247427940369,
"learning_rate": 2.0062806772614022e-05,
"loss": 0.6827,
"step": 17856
},
{
"epoch": 0.15420359322735266,
"grad_norm": 0.6000218987464905,
"learning_rate": 1.9980506369920392e-05,
"loss": 0.684,
"step": 17887
},
{
"epoch": 0.15447084382220078,
"grad_norm": 0.6591530442237854,
"learning_rate": 1.989826261153015e-05,
"loss": 0.6855,
"step": 17918
},
{
"epoch": 0.15473809441704886,
"grad_norm": 0.7445237040519714,
"learning_rate": 1.9816076425552923e-05,
"loss": 0.6771,
"step": 17949
},
{
"epoch": 0.15500534501189697,
"grad_norm": 0.5756223797798157,
"learning_rate": 1.9733948739448676e-05,
"loss": 0.6923,
"step": 17980
},
{
"epoch": 0.15527259560674506,
"grad_norm": 0.5653395056724548,
"learning_rate": 1.9651880480017155e-05,
"loss": 0.6885,
"step": 18011
},
{
"epoch": 0.15553984620159317,
"grad_norm": 0.551142156124115,
"learning_rate": 1.9569872573387516e-05,
"loss": 0.6945,
"step": 18042
},
{
"epoch": 0.15580709679644125,
"grad_norm": 0.6376651525497437,
"learning_rate": 1.9487925945007854e-05,
"loss": 0.6823,
"step": 18073
},
{
"epoch": 0.15607434739128936,
"grad_norm": 0.7611255645751953,
"learning_rate": 1.9406041519634726e-05,
"loss": 0.6837,
"step": 18104
},
{
"epoch": 0.15634159798613745,
"grad_norm": 0.7171857357025146,
"learning_rate": 1.932422022132275e-05,
"loss": 0.6905,
"step": 18135
},
{
"epoch": 0.15660884858098556,
"grad_norm": 0.6394956707954407,
"learning_rate": 1.924246297341414e-05,
"loss": 0.6781,
"step": 18166
},
{
"epoch": 0.15687609917583364,
"grad_norm": 0.7751463055610657,
"learning_rate": 1.9160770698528338e-05,
"loss": 0.6817,
"step": 18197
},
{
"epoch": 0.15714334977068176,
"grad_norm": 0.5093162655830383,
"learning_rate": 1.907914431855156e-05,
"loss": 0.6857,
"step": 18228
},
{
"epoch": 0.15741060036552984,
"grad_norm": 0.6336578726768494,
"learning_rate": 1.8997584754626412e-05,
"loss": 0.6785,
"step": 18259
},
{
"epoch": 0.15767785096037795,
"grad_norm": 0.6285567879676819,
"learning_rate": 1.8916092927141486e-05,
"loss": 0.6827,
"step": 18290
},
{
"epoch": 0.15794510155522604,
"grad_norm": 0.5558923482894897,
"learning_rate": 1.883466975572098e-05,
"loss": 0.6904,
"step": 18321
},
{
"epoch": 0.15821235215007415,
"grad_norm": 0.6782019734382629,
"learning_rate": 1.8753316159214312e-05,
"loss": 0.6807,
"step": 18352
},
{
"epoch": 0.15847960274492223,
"grad_norm": 0.5593744516372681,
"learning_rate": 1.8672033055685766e-05,
"loss": 0.682,
"step": 18383
},
{
"epoch": 0.15874685333977034,
"grad_norm": 0.7029324173927307,
"learning_rate": 1.8590821362404116e-05,
"loss": 0.6863,
"step": 18414
},
{
"epoch": 0.15901410393461843,
"grad_norm": 0.6170771718025208,
"learning_rate": 1.8509681995832294e-05,
"loss": 0.6847,
"step": 18445
},
{
"epoch": 0.15928135452946654,
"grad_norm": 0.5704348087310791,
"learning_rate": 1.8428615871617004e-05,
"loss": 0.68,
"step": 18476
},
{
"epoch": 0.15954860512431462,
"grad_norm": 0.7009204626083374,
"learning_rate": 1.8347623904578448e-05,
"loss": 0.6869,
"step": 18507
},
{
"epoch": 0.15981585571916274,
"grad_norm": 0.5872434377670288,
"learning_rate": 1.8266707008699975e-05,
"loss": 0.6822,
"step": 18538
},
{
"epoch": 0.16008310631401082,
"grad_norm": 0.5624253153800964,
"learning_rate": 1.818586609711774e-05,
"loss": 0.6861,
"step": 18569
},
{
"epoch": 0.16035035690885893,
"grad_norm": 0.5884125232696533,
"learning_rate": 1.8105102082110462e-05,
"loss": 0.6985,
"step": 18600
},
{
"epoch": 0.16061760750370702,
"grad_norm": 0.6707611083984375,
"learning_rate": 1.8024415875089058e-05,
"loss": 0.6848,
"step": 18631
},
{
"epoch": 0.16088485809855513,
"grad_norm": 0.6062221527099609,
"learning_rate": 1.7943808386586407e-05,
"loss": 0.6881,
"step": 18662
},
{
"epoch": 0.1611521086934032,
"grad_norm": 0.57669597864151,
"learning_rate": 1.7863280526247073e-05,
"loss": 0.6799,
"step": 18693
},
{
"epoch": 0.16141935928825132,
"grad_norm": 0.8299211859703064,
"learning_rate": 1.7782833202817003e-05,
"loss": 0.6783,
"step": 18724
},
{
"epoch": 0.1616866098830994,
"grad_norm": 0.6534104347229004,
"learning_rate": 1.7702467324133327e-05,
"loss": 0.6862,
"step": 18755
},
{
"epoch": 0.16195386047794752,
"grad_norm": 0.7435428500175476,
"learning_rate": 1.7622183797114042e-05,
"loss": 0.6895,
"step": 18786
},
{
"epoch": 0.1622211110727956,
"grad_norm": 0.6502455472946167,
"learning_rate": 1.7541983527747838e-05,
"loss": 0.6798,
"step": 18817
},
{
"epoch": 0.16248836166764372,
"grad_norm": 0.6559191346168518,
"learning_rate": 1.746186742108387e-05,
"loss": 0.6812,
"step": 18848
},
{
"epoch": 0.1627556122624918,
"grad_norm": 0.6120203733444214,
"learning_rate": 1.73818363812215e-05,
"loss": 0.6725,
"step": 18879
},
{
"epoch": 0.1630228628573399,
"grad_norm": 0.6878360509872437,
"learning_rate": 1.7301891311300153e-05,
"loss": 0.6893,
"step": 18910
},
{
"epoch": 0.163290113452188,
"grad_norm": 0.6229916214942932,
"learning_rate": 1.7222033113489055e-05,
"loss": 0.6841,
"step": 18941
},
{
"epoch": 0.1635573640470361,
"grad_norm": 0.5408461093902588,
"learning_rate": 1.7142262688977127e-05,
"loss": 0.6742,
"step": 18972
},
{
"epoch": 0.1638246146418842,
"grad_norm": 0.5998808145523071,
"learning_rate": 1.7062580937962764e-05,
"loss": 0.689,
"step": 19003
},
{
"epoch": 0.1640918652367323,
"grad_norm": 0.6789708733558655,
"learning_rate": 1.698298875964369e-05,
"loss": 0.6825,
"step": 19034
},
{
"epoch": 0.1643591158315804,
"grad_norm": 0.5498121380805969,
"learning_rate": 1.690348705220684e-05,
"loss": 0.6766,
"step": 19065
},
{
"epoch": 0.1646263664264285,
"grad_norm": 0.7266933917999268,
"learning_rate": 1.6824076712818156e-05,
"loss": 0.6775,
"step": 19096
},
{
"epoch": 0.16489361702127658,
"grad_norm": 0.5324463844299316,
"learning_rate": 1.6744758637612533e-05,
"loss": 0.6763,
"step": 19127
},
{
"epoch": 0.1651608676161247,
"grad_norm": 0.5213314890861511,
"learning_rate": 1.6665533721683664e-05,
"loss": 0.6831,
"step": 19158
},
{
"epoch": 0.16542811821097278,
"grad_norm": 0.6004193425178528,
"learning_rate": 1.6586402859073974e-05,
"loss": 0.6865,
"step": 19189
},
{
"epoch": 0.1656953688058209,
"grad_norm": 0.6059597134590149,
"learning_rate": 1.6507366942764463e-05,
"loss": 0.6855,
"step": 19220
},
{
"epoch": 0.16596261940066898,
"grad_norm": 0.8009548187255859,
"learning_rate": 1.6428426864664732e-05,
"loss": 0.6837,
"step": 19251
},
{
"epoch": 0.1662298699955171,
"grad_norm": 0.5806351900100708,
"learning_rate": 1.6349583515602816e-05,
"loss": 0.6783,
"step": 19282
},
{
"epoch": 0.16649712059036517,
"grad_norm": 0.6568647623062134,
"learning_rate": 1.6270837785315208e-05,
"loss": 0.6792,
"step": 19313
},
{
"epoch": 0.16676437118521328,
"grad_norm": 0.5352032780647278,
"learning_rate": 1.619219056243676e-05,
"loss": 0.68,
"step": 19344
},
{
"epoch": 0.16703162178006137,
"grad_norm": 0.6357106566429138,
"learning_rate": 1.6113642734490698e-05,
"loss": 0.681,
"step": 19375
},
{
"epoch": 0.16729887237490948,
"grad_norm": 0.5829596519470215,
"learning_rate": 1.6035195187878577e-05,
"loss": 0.6846,
"step": 19406
},
{
"epoch": 0.1675661229697576,
"grad_norm": 0.6300954222679138,
"learning_rate": 1.5956848807870305e-05,
"loss": 0.6762,
"step": 19437
},
{
"epoch": 0.16783337356460568,
"grad_norm": 0.5970476865768433,
"learning_rate": 1.587860447859413e-05,
"loss": 0.6909,
"step": 19468
},
{
"epoch": 0.1681006241594538,
"grad_norm": 0.6171104311943054,
"learning_rate": 1.5800463083026686e-05,
"loss": 0.6843,
"step": 19499
},
{
"epoch": 0.16836787475430187,
"grad_norm": 0.7339180111885071,
"learning_rate": 1.572242550298298e-05,
"loss": 0.6826,
"step": 19530
},
{
"epoch": 0.16863512534914998,
"grad_norm": 0.6006590723991394,
"learning_rate": 1.56444926191065e-05,
"loss": 0.6798,
"step": 19561
},
{
"epoch": 0.16890237594399807,
"grad_norm": 0.6238895654678345,
"learning_rate": 1.5566665310859257e-05,
"loss": 0.6753,
"step": 19592
},
{
"epoch": 0.16916962653884618,
"grad_norm": 0.6362223625183105,
"learning_rate": 1.5488944456511846e-05,
"loss": 0.685,
"step": 19623
},
{
"epoch": 0.16943687713369426,
"grad_norm": 0.599464476108551,
"learning_rate": 1.5411330933133546e-05,
"loss": 0.6813,
"step": 19654
},
{
"epoch": 0.16970412772854238,
"grad_norm": 0.545221745967865,
"learning_rate": 1.533382561658241e-05,
"loss": 0.675,
"step": 19685
},
{
"epoch": 0.16997137832339046,
"grad_norm": 0.5692989230155945,
"learning_rate": 1.525642938149541e-05,
"loss": 0.6814,
"step": 19716
},
{
"epoch": 0.17023862891823857,
"grad_norm": 0.6204626560211182,
"learning_rate": 1.5179143101278536e-05,
"loss": 0.6837,
"step": 19747
},
{
"epoch": 0.17050587951308666,
"grad_norm": 0.6067166328430176,
"learning_rate": 1.5101967648096955e-05,
"loss": 0.6788,
"step": 19778
},
{
"epoch": 0.17077313010793477,
"grad_norm": 0.7275129556655884,
"learning_rate": 1.5024903892865172e-05,
"loss": 0.6794,
"step": 19809
},
{
"epoch": 0.17104038070278285,
"grad_norm": 0.653567373752594,
"learning_rate": 1.4947952705237184e-05,
"loss": 0.6878,
"step": 19840
},
{
"epoch": 0.17130763129763096,
"grad_norm": 0.5741860270500183,
"learning_rate": 1.4871114953596682e-05,
"loss": 0.68,
"step": 19871
},
{
"epoch": 0.17157488189247905,
"grad_norm": 0.6224374771118164,
"learning_rate": 1.4794391505047256e-05,
"loss": 0.6746,
"step": 19902
},
{
"epoch": 0.17184213248732716,
"grad_norm": 0.6131766438484192,
"learning_rate": 1.4717783225402596e-05,
"loss": 0.6783,
"step": 19933
},
{
"epoch": 0.17210938308217524,
"grad_norm": 0.5544754266738892,
"learning_rate": 1.4641290979176735e-05,
"loss": 0.6836,
"step": 19964
},
{
"epoch": 0.17237663367702336,
"grad_norm": 0.6847221255302429,
"learning_rate": 1.4564915629574246e-05,
"loss": 0.689,
"step": 19995
},
{
"epoch": 0.17264388427187144,
"grad_norm": 0.5787181258201599,
"learning_rate": 1.4488658038480601e-05,
"loss": 0.6822,
"step": 20026
},
{
"epoch": 0.17291113486671955,
"grad_norm": 0.6759909987449646,
"learning_rate": 1.4412519066452323e-05,
"loss": 0.686,
"step": 20057
},
{
"epoch": 0.17317838546156764,
"grad_norm": 0.626928985118866,
"learning_rate": 1.4336499572707373e-05,
"loss": 0.6883,
"step": 20088
},
{
"epoch": 0.17344563605641575,
"grad_norm": 0.6162570714950562,
"learning_rate": 1.4260600415115433e-05,
"loss": 0.6756,
"step": 20119
},
{
"epoch": 0.17371288665126383,
"grad_norm": 0.7289220690727234,
"learning_rate": 1.4184822450188137e-05,
"loss": 0.677,
"step": 20150
},
{
"epoch": 0.17398013724611194,
"grad_norm": 0.6390628218650818,
"learning_rate": 1.410916653306954e-05,
"loss": 0.6794,
"step": 20181
},
{
"epoch": 0.17424738784096003,
"grad_norm": 0.5724453926086426,
"learning_rate": 1.403363351752639e-05,
"loss": 0.6721,
"step": 20212
},
{
"epoch": 0.17451463843580814,
"grad_norm": 0.7784771919250488,
"learning_rate": 1.3958224255938485e-05,
"loss": 0.6764,
"step": 20243
},
{
"epoch": 0.17478188903065622,
"grad_norm": 0.6640349626541138,
"learning_rate": 1.388293959928911e-05,
"loss": 0.6814,
"step": 20274
},
{
"epoch": 0.17504913962550434,
"grad_norm": 0.6183404922485352,
"learning_rate": 1.3807780397155379e-05,
"loss": 0.6765,
"step": 20305
},
{
"epoch": 0.17531639022035242,
"grad_norm": 0.5709137916564941,
"learning_rate": 1.3732747497698655e-05,
"loss": 0.6806,
"step": 20336
},
{
"epoch": 0.17558364081520053,
"grad_norm": 0.5495983958244324,
"learning_rate": 1.3657841747655038e-05,
"loss": 0.6829,
"step": 20367
},
{
"epoch": 0.17585089141004862,
"grad_norm": 0.6204441785812378,
"learning_rate": 1.3583063992325706e-05,
"loss": 0.6839,
"step": 20398
},
{
"epoch": 0.17611814200489673,
"grad_norm": 0.5645965933799744,
"learning_rate": 1.3508415075567496e-05,
"loss": 0.6828,
"step": 20429
},
{
"epoch": 0.1763853925997448,
"grad_norm": 0.6008773446083069,
"learning_rate": 1.343389583978327e-05,
"loss": 0.6734,
"step": 20460
},
{
"epoch": 0.17665264319459292,
"grad_norm": 0.6559963822364807,
"learning_rate": 1.3359507125912468e-05,
"loss": 0.6726,
"step": 20491
},
{
"epoch": 0.176919893789441,
"grad_norm": 0.5835151672363281,
"learning_rate": 1.3285249773421627e-05,
"loss": 0.6704,
"step": 20522
},
{
"epoch": 0.17718714438428912,
"grad_norm": 0.7009577751159668,
"learning_rate": 1.3211124620294884e-05,
"loss": 0.6843,
"step": 20553
},
{
"epoch": 0.1774543949791372,
"grad_norm": 0.5399602651596069,
"learning_rate": 1.313713250302451e-05,
"loss": 0.6894,
"step": 20584
},
{
"epoch": 0.17772164557398532,
"grad_norm": 0.5367136001586914,
"learning_rate": 1.3063274256601479e-05,
"loss": 0.6906,
"step": 20615
},
{
"epoch": 0.1779888961688334,
"grad_norm": 0.5709632635116577,
"learning_rate": 1.2989550714506086e-05,
"loss": 0.6829,
"step": 20646
},
{
"epoch": 0.1782561467636815,
"grad_norm": 0.5786108374595642,
"learning_rate": 1.291596270869846e-05,
"loss": 0.6761,
"step": 20677
},
{
"epoch": 0.1785233973585296,
"grad_norm": 0.5778704285621643,
"learning_rate": 1.284251106960927e-05,
"loss": 0.6759,
"step": 20708
},
{
"epoch": 0.1787906479533777,
"grad_norm": 0.6163730025291443,
"learning_rate": 1.2769196626130263e-05,
"loss": 0.6735,
"step": 20739
},
{
"epoch": 0.1790578985482258,
"grad_norm": 0.5373594760894775,
"learning_rate": 1.2696020205604969e-05,
"loss": 0.6865,
"step": 20770
},
{
"epoch": 0.1793251491430739,
"grad_norm": 0.7162913084030151,
"learning_rate": 1.2622982633819359e-05,
"loss": 0.684,
"step": 20801
},
{
"epoch": 0.179592399737922,
"grad_norm": 0.6749635934829712,
"learning_rate": 1.2550084734992484e-05,
"loss": 0.6799,
"step": 20832
},
{
"epoch": 0.1798596503327701,
"grad_norm": 0.7294095158576965,
"learning_rate": 1.247732733176724e-05,
"loss": 0.6865,
"step": 20863
},
{
"epoch": 0.18012690092761818,
"grad_norm": 0.5720840692520142,
"learning_rate": 1.2404711245201044e-05,
"loss": 0.6829,
"step": 20894
},
{
"epoch": 0.1803941515224663,
"grad_norm": 0.6276606321334839,
"learning_rate": 1.2332237294756535e-05,
"loss": 0.6742,
"step": 20925
},
{
"epoch": 0.18066140211731438,
"grad_norm": 0.6403278112411499,
"learning_rate": 1.225990629829241e-05,
"loss": 0.6706,
"step": 20956
},
{
"epoch": 0.1809286527121625,
"grad_norm": 0.6730019450187683,
"learning_rate": 1.2187719072054136e-05,
"loss": 0.6828,
"step": 20987
},
{
"epoch": 0.18119590330701058,
"grad_norm": 0.5833009481430054,
"learning_rate": 1.2115676430664735e-05,
"loss": 0.6829,
"step": 21018
},
{
"epoch": 0.1814631539018587,
"grad_norm": 0.5789329409599304,
"learning_rate": 1.2043779187115647e-05,
"loss": 0.6831,
"step": 21049
},
{
"epoch": 0.18173040449670677,
"grad_norm": 0.6406223773956299,
"learning_rate": 1.1972028152757476e-05,
"loss": 0.679,
"step": 21080
},
{
"epoch": 0.18199765509155488,
"grad_norm": 0.4948377311229706,
"learning_rate": 1.1900424137290889e-05,
"loss": 0.6733,
"step": 21111
},
{
"epoch": 0.18226490568640297,
"grad_norm": 0.6024377346038818,
"learning_rate": 1.1828967948757482e-05,
"loss": 0.6793,
"step": 21142
},
{
"epoch": 0.18253215628125108,
"grad_norm": 0.5884882211685181,
"learning_rate": 1.175766039353062e-05,
"loss": 0.6756,
"step": 21173
},
{
"epoch": 0.18279940687609916,
"grad_norm": 0.5883007049560547,
"learning_rate": 1.1686502276306382e-05,
"loss": 0.673,
"step": 21204
},
{
"epoch": 0.18306665747094727,
"grad_norm": 0.6002291440963745,
"learning_rate": 1.1615494400094445e-05,
"loss": 0.6715,
"step": 21235
},
{
"epoch": 0.18333390806579536,
"grad_norm": 0.6667938232421875,
"learning_rate": 1.1544637566209029e-05,
"loss": 0.6844,
"step": 21266
},
{
"epoch": 0.18360115866064347,
"grad_norm": 0.5517445206642151,
"learning_rate": 1.1473932574259886e-05,
"loss": 0.6826,
"step": 21297
},
{
"epoch": 0.18386840925549156,
"grad_norm": 0.5575104355812073,
"learning_rate": 1.1403380222143247e-05,
"loss": 0.6704,
"step": 21328
},
{
"epoch": 0.18413565985033967,
"grad_norm": 0.6711089015007019,
"learning_rate": 1.1332981306032808e-05,
"loss": 0.674,
"step": 21359
},
{
"epoch": 0.18440291044518778,
"grad_norm": 0.5593468546867371,
"learning_rate": 1.1262736620370762e-05,
"loss": 0.6682,
"step": 21390
},
{
"epoch": 0.18467016104003586,
"grad_norm": 0.6826859712600708,
"learning_rate": 1.1192646957858854e-05,
"loss": 0.6773,
"step": 21421
},
{
"epoch": 0.18493741163488397,
"grad_norm": 0.6396262645721436,
"learning_rate": 1.1122713109449381e-05,
"loss": 0.683,
"step": 21452
},
{
"epoch": 0.18520466222973206,
"grad_norm": 0.67862468957901,
"learning_rate": 1.105293586433634e-05,
"loss": 0.6712,
"step": 21483
},
{
"epoch": 0.18547191282458017,
"grad_norm": 0.5799532532691956,
"learning_rate": 1.0983316009946446e-05,
"loss": 0.675,
"step": 21514
},
{
"epoch": 0.18573916341942825,
"grad_norm": 0.5911145210266113,
"learning_rate": 1.0913854331930282e-05,
"loss": 0.6724,
"step": 21545
},
{
"epoch": 0.18600641401427637,
"grad_norm": 0.5589364767074585,
"learning_rate": 1.0844551614153456e-05,
"loss": 0.6731,
"step": 21576
},
{
"epoch": 0.18627366460912445,
"grad_norm": 0.6362337470054626,
"learning_rate": 1.0775408638687725e-05,
"loss": 0.6754,
"step": 21607
},
{
"epoch": 0.18654091520397256,
"grad_norm": 0.615933358669281,
"learning_rate": 1.0706426185802165e-05,
"loss": 0.6703,
"step": 21638
},
{
"epoch": 0.18680816579882065,
"grad_norm": 0.6284533739089966,
"learning_rate": 1.0637605033954371e-05,
"loss": 0.6688,
"step": 21669
},
{
"epoch": 0.18707541639366876,
"grad_norm": 0.6516579389572144,
"learning_rate": 1.05689459597817e-05,
"loss": 0.6643,
"step": 21700
},
{
"epoch": 0.18734266698851684,
"grad_norm": 0.583691418170929,
"learning_rate": 1.050044973809246e-05,
"loss": 0.6787,
"step": 21731
},
{
"epoch": 0.18760991758336495,
"grad_norm": 0.5451207756996155,
"learning_rate": 1.043211714185722e-05,
"loss": 0.6728,
"step": 21762
},
{
"epoch": 0.18787716817821304,
"grad_norm": 0.6690101623535156,
"learning_rate": 1.036394894220003e-05,
"loss": 0.6732,
"step": 21793
},
{
"epoch": 0.18814441877306115,
"grad_norm": 0.6195334196090698,
"learning_rate": 1.0295945908389751e-05,
"loss": 0.6723,
"step": 21824
},
{
"epoch": 0.18841166936790923,
"grad_norm": 0.6505009531974792,
"learning_rate": 1.0228108807831393e-05,
"loss": 0.6755,
"step": 21855
},
{
"epoch": 0.18867891996275735,
"grad_norm": 0.5886141061782837,
"learning_rate": 1.01604384060574e-05,
"loss": 0.6762,
"step": 21886
},
{
"epoch": 0.18894617055760543,
"grad_norm": 0.6939468383789062,
"learning_rate": 1.009293546671907e-05,
"loss": 0.6707,
"step": 21917
},
{
"epoch": 0.18921342115245354,
"grad_norm": 0.572865903377533,
"learning_rate": 1.002560075157791e-05,
"loss": 0.6649,
"step": 21948
},
{
"epoch": 0.18948067174730163,
"grad_norm": 0.6154040694236755,
"learning_rate": 9.958435020496995e-06,
"loss": 0.677,
"step": 21979
},
{
"epoch": 0.18974792234214974,
"grad_norm": 0.6302117705345154,
"learning_rate": 9.89143903143249e-06,
"loss": 0.6714,
"step": 22010
},
{
"epoch": 0.19001517293699782,
"grad_norm": 0.5567659139633179,
"learning_rate": 9.824613540425038e-06,
"loss": 0.6842,
"step": 22041
},
{
"epoch": 0.19028242353184593,
"grad_norm": 0.6444590091705322,
"learning_rate": 9.757959301591197e-06,
"loss": 0.6766,
"step": 22072
},
{
"epoch": 0.19054967412669402,
"grad_norm": 0.5904573202133179,
"learning_rate": 9.691477067115017e-06,
"loss": 0.6774,
"step": 22103
},
{
"epoch": 0.19081692472154213,
"grad_norm": 0.6825259923934937,
"learning_rate": 9.625167587239467e-06,
"loss": 0.6669,
"step": 22134
},
{
"epoch": 0.19108417531639021,
"grad_norm": 0.6414413452148438,
"learning_rate": 9.559031610258007e-06,
"loss": 0.6715,
"step": 22165
},
{
"epoch": 0.19135142591123833,
"grad_norm": 0.5381656885147095,
"learning_rate": 9.493069882506164e-06,
"loss": 0.6761,
"step": 22196
},
{
"epoch": 0.1916186765060864,
"grad_norm": 0.6131913065910339,
"learning_rate": 9.427283148353056e-06,
"loss": 0.6789,
"step": 22227
},
{
"epoch": 0.19188592710093452,
"grad_norm": 0.5893442630767822,
"learning_rate": 9.361672150193052e-06,
"loss": 0.6794,
"step": 22258
},
{
"epoch": 0.1921531776957826,
"grad_norm": 0.6715711951255798,
"learning_rate": 9.29623762843734e-06,
"loss": 0.6802,
"step": 22289
},
{
"epoch": 0.19242042829063072,
"grad_norm": 0.5842010974884033,
"learning_rate": 9.230980321505594e-06,
"loss": 0.6787,
"step": 22320
},
{
"epoch": 0.1926876788854788,
"grad_norm": 0.6658715009689331,
"learning_rate": 9.165900965817668e-06,
"loss": 0.6751,
"step": 22351
},
{
"epoch": 0.19295492948032691,
"grad_norm": 0.5553327798843384,
"learning_rate": 9.101000295785245e-06,
"loss": 0.6721,
"step": 22382
},
{
"epoch": 0.193222180075175,
"grad_norm": 0.5402572751045227,
"learning_rate": 9.036279043803565e-06,
"loss": 0.6687,
"step": 22413
},
{
"epoch": 0.1934894306700231,
"grad_norm": 0.634361207485199,
"learning_rate": 8.971737940243147e-06,
"loss": 0.6676,
"step": 22444
},
{
"epoch": 0.1937566812648712,
"grad_norm": 0.6722608208656311,
"learning_rate": 8.907377713441592e-06,
"loss": 0.6785,
"step": 22475
},
{
"epoch": 0.1940239318597193,
"grad_norm": 0.6826739311218262,
"learning_rate": 8.843199089695293e-06,
"loss": 0.6655,
"step": 22506
},
{
"epoch": 0.1942911824545674,
"grad_norm": 0.5914424061775208,
"learning_rate": 8.779202793251311e-06,
"loss": 0.6678,
"step": 22537
},
{
"epoch": 0.1945584330494155,
"grad_norm": 0.6146554946899414,
"learning_rate": 8.715389546299149e-06,
"loss": 0.6795,
"step": 22568
},
{
"epoch": 0.1948256836442636,
"grad_norm": 0.6002945303916931,
"learning_rate": 8.651760068962617e-06,
"loss": 0.6783,
"step": 22599
},
{
"epoch": 0.1950929342391117,
"grad_norm": 0.6891940236091614,
"learning_rate": 8.588315079291733e-06,
"loss": 0.6705,
"step": 22630
},
{
"epoch": 0.19536018483395978,
"grad_norm": 0.6043858528137207,
"learning_rate": 8.52505529325457e-06,
"loss": 0.6712,
"step": 22661
},
{
"epoch": 0.1956274354288079,
"grad_norm": 0.703781008720398,
"learning_rate": 8.461981424729216e-06,
"loss": 0.6782,
"step": 22692
},
{
"epoch": 0.19589468602365598,
"grad_norm": 0.5306392908096313,
"learning_rate": 8.399094185495725e-06,
"loss": 0.6707,
"step": 22723
},
{
"epoch": 0.1961619366185041,
"grad_norm": 0.5337186455726624,
"learning_rate": 8.336394285228017e-06,
"loss": 0.6696,
"step": 22754
},
{
"epoch": 0.19642918721335217,
"grad_norm": 0.496993750333786,
"learning_rate": 8.273882431485952e-06,
"loss": 0.6701,
"step": 22785
},
{
"epoch": 0.1966964378082003,
"grad_norm": 0.6593798398971558,
"learning_rate": 8.211559329707316e-06,
"loss": 0.6748,
"step": 22816
},
{
"epoch": 0.19696368840304837,
"grad_norm": 0.626368522644043,
"learning_rate": 8.149425683199823e-06,
"loss": 0.6731,
"step": 22847
},
{
"epoch": 0.19723093899789648,
"grad_norm": 0.6292176246643066,
"learning_rate": 8.08748219313325e-06,
"loss": 0.6672,
"step": 22878
},
{
"epoch": 0.19749818959274457,
"grad_norm": 0.5610869526863098,
"learning_rate": 8.025729558531453e-06,
"loss": 0.6753,
"step": 22909
},
{
"epoch": 0.19776544018759268,
"grad_norm": 0.5906586050987244,
"learning_rate": 7.964168476264508e-06,
"loss": 0.6726,
"step": 22940
},
{
"epoch": 0.19803269078244076,
"grad_norm": 0.5963625311851501,
"learning_rate": 7.902799641040884e-06,
"loss": 0.6715,
"step": 22971
},
{
"epoch": 0.19829994137728887,
"grad_norm": 0.5861343145370483,
"learning_rate": 7.841623745399523e-06,
"loss": 0.6796,
"step": 23002
},
{
"epoch": 0.19856719197213696,
"grad_norm": 0.6269991993904114,
"learning_rate": 7.780641479702114e-06,
"loss": 0.674,
"step": 23033
},
{
"epoch": 0.19883444256698507,
"grad_norm": 0.5723039507865906,
"learning_rate": 7.719853532125227e-06,
"loss": 0.6709,
"step": 23064
},
{
"epoch": 0.19910169316183315,
"grad_norm": 0.6368955373764038,
"learning_rate": 7.65926058865258e-06,
"loss": 0.6724,
"step": 23095
},
{
"epoch": 0.19936894375668127,
"grad_norm": 0.6846131086349487,
"learning_rate": 7.598863333067313e-06,
"loss": 0.6761,
"step": 23126
},
{
"epoch": 0.19963619435152935,
"grad_norm": 0.5712518095970154,
"learning_rate": 7.538662446944253e-06,
"loss": 0.6833,
"step": 23157
},
{
"epoch": 0.19990344494637746,
"grad_norm": 0.5600943565368652,
"learning_rate": 7.478658609642211e-06,
"loss": 0.6702,
"step": 23188
},
{
"epoch": 0.20017069554122555,
"grad_norm": 0.644990861415863,
"learning_rate": 7.418852498296327e-06,
"loss": 0.6739,
"step": 23219
},
{
"epoch": 0.20043794613607366,
"grad_norm": 0.607283353805542,
"learning_rate": 7.359244787810457e-06,
"loss": 0.6692,
"step": 23250
},
{
"epoch": 0.20070519673092174,
"grad_norm": 0.7326353192329407,
"learning_rate": 7.299836150849493e-06,
"loss": 0.6727,
"step": 23281
},
{
"epoch": 0.20097244732576985,
"grad_norm": 0.5449365377426147,
"learning_rate": 7.240627257831847e-06,
"loss": 0.6647,
"step": 23312
},
{
"epoch": 0.20123969792061797,
"grad_norm": 0.6297513246536255,
"learning_rate": 7.1816187769218195e-06,
"loss": 0.6651,
"step": 23343
},
{
"epoch": 0.20150694851546605,
"grad_norm": 0.6222187876701355,
"learning_rate": 7.1228113740220895e-06,
"loss": 0.6739,
"step": 23374
},
{
"epoch": 0.20177419911031416,
"grad_norm": 0.6404258608818054,
"learning_rate": 7.064205712766226e-06,
"loss": 0.667,
"step": 23405
},
{
"epoch": 0.20204144970516225,
"grad_norm": 0.49462103843688965,
"learning_rate": 7.005802454511129e-06,
"loss": 0.6677,
"step": 23436
},
{
"epoch": 0.20230870030001036,
"grad_norm": 0.6847253441810608,
"learning_rate": 6.947602258329639e-06,
"loss": 0.6723,
"step": 23467
},
{
"epoch": 0.20257595089485844,
"grad_norm": 0.5333061218261719,
"learning_rate": 6.889605781003078e-06,
"loss": 0.6643,
"step": 23498
},
{
"epoch": 0.20284320148970655,
"grad_norm": 0.6069990396499634,
"learning_rate": 6.831813677013776e-06,
"loss": 0.6709,
"step": 23529
},
{
"epoch": 0.20311045208455464,
"grad_norm": 0.5871272087097168,
"learning_rate": 6.774226598537792e-06,
"loss": 0.6776,
"step": 23560
},
{
"epoch": 0.20337770267940275,
"grad_norm": 0.6089819073677063,
"learning_rate": 6.716845195437482e-06,
"loss": 0.6696,
"step": 23591
},
{
"epoch": 0.20364495327425083,
"grad_norm": 0.5961847305297852,
"learning_rate": 6.659670115254168e-06,
"loss": 0.6692,
"step": 23622
},
{
"epoch": 0.20391220386909895,
"grad_norm": 0.6181542277336121,
"learning_rate": 6.602702003200872e-06,
"loss": 0.6689,
"step": 23653
},
{
"epoch": 0.20417945446394703,
"grad_norm": 0.4776616394519806,
"learning_rate": 6.545941502154992e-06,
"loss": 0.669,
"step": 23684
},
{
"epoch": 0.20444670505879514,
"grad_norm": 0.6631652116775513,
"learning_rate": 6.489389252651057e-06,
"loss": 0.6738,
"step": 23715
},
{
"epoch": 0.20471395565364323,
"grad_norm": 0.6361461877822876,
"learning_rate": 6.4330458928735325e-06,
"loss": 0.674,
"step": 23746
},
{
"epoch": 0.20498120624849134,
"grad_norm": 0.6103286743164062,
"learning_rate": 6.376912058649559e-06,
"loss": 0.6752,
"step": 23777
},
{
"epoch": 0.20524845684333942,
"grad_norm": 0.5240300893783569,
"learning_rate": 6.320988383441845e-06,
"loss": 0.6745,
"step": 23808
},
{
"epoch": 0.20551570743818753,
"grad_norm": 0.6142013072967529,
"learning_rate": 6.265275498341452e-06,
"loss": 0.672,
"step": 23839
},
{
"epoch": 0.20578295803303562,
"grad_norm": 0.5362926125526428,
"learning_rate": 6.209774032060714e-06,
"loss": 0.6726,
"step": 23870
},
{
"epoch": 0.20605020862788373,
"grad_norm": 0.6299006938934326,
"learning_rate": 6.1544846109261365e-06,
"loss": 0.6713,
"step": 23901
},
{
"epoch": 0.2063174592227318,
"grad_norm": 0.588973343372345,
"learning_rate": 6.099407858871342e-06,
"loss": 0.6682,
"step": 23932
},
{
"epoch": 0.20658470981757993,
"grad_norm": 0.5536270141601562,
"learning_rate": 6.044544397429958e-06,
"loss": 0.6647,
"step": 23963
},
{
"epoch": 0.206851960412428,
"grad_norm": 0.6450552940368652,
"learning_rate": 5.989894845728708e-06,
"loss": 0.6728,
"step": 23994
},
{
"epoch": 0.20711921100727612,
"grad_norm": 0.6554008722305298,
"learning_rate": 5.9354598204803605e-06,
"loss": 0.6685,
"step": 24025
},
{
"epoch": 0.2073864616021242,
"grad_norm": 0.6218012571334839,
"learning_rate": 5.881239935976762e-06,
"loss": 0.6739,
"step": 24056
},
{
"epoch": 0.20765371219697232,
"grad_norm": 0.5855986475944519,
"learning_rate": 5.827235804081954e-06,
"loss": 0.6633,
"step": 24087
},
{
"epoch": 0.2079209627918204,
"grad_norm": 0.5602990388870239,
"learning_rate": 5.773448034225221e-06,
"loss": 0.6645,
"step": 24118
},
{
"epoch": 0.2081882133866685,
"grad_norm": 0.4789784252643585,
"learning_rate": 5.719877233394228e-06,
"loss": 0.6785,
"step": 24149
},
{
"epoch": 0.2084554639815166,
"grad_norm": 0.5836377143859863,
"learning_rate": 5.666524006128191e-06,
"loss": 0.6699,
"step": 24180
},
{
"epoch": 0.2087227145763647,
"grad_norm": 0.5053188800811768,
"learning_rate": 5.613388954511015e-06,
"loss": 0.6628,
"step": 24211
},
{
"epoch": 0.2089899651712128,
"grad_norm": 0.5825930833816528,
"learning_rate": 5.560472678164552e-06,
"loss": 0.6672,
"step": 24242
},
{
"epoch": 0.2092572157660609,
"grad_norm": 0.5532339215278625,
"learning_rate": 5.507775774241775e-06,
"loss": 0.6689,
"step": 24273
},
{
"epoch": 0.209524466360909,
"grad_norm": 0.5598761439323425,
"learning_rate": 5.4552988374200945e-06,
"loss": 0.6769,
"step": 24304
},
{
"epoch": 0.2097917169557571,
"grad_norm": 0.7001100182533264,
"learning_rate": 5.403042459894597e-06,
"loss": 0.6691,
"step": 24335
},
{
"epoch": 0.21005896755060519,
"grad_norm": 0.6455100774765015,
"learning_rate": 5.3510072313714135e-06,
"loss": 0.6786,
"step": 24366
},
{
"epoch": 0.2103262181454533,
"grad_norm": 0.541474461555481,
"learning_rate": 5.2991937390610205e-06,
"loss": 0.6766,
"step": 24397
},
{
"epoch": 0.21059346874030138,
"grad_norm": 0.48722484707832336,
"learning_rate": 5.247602567671625e-06,
"loss": 0.6715,
"step": 24428
},
{
"epoch": 0.2108607193351495,
"grad_norm": 0.6046543121337891,
"learning_rate": 5.196234299402603e-06,
"loss": 0.666,
"step": 24459
},
{
"epoch": 0.21112796992999758,
"grad_norm": 0.6596629023551941,
"learning_rate": 5.145089513937865e-06,
"loss": 0.664,
"step": 24490
},
{
"epoch": 0.2113952205248457,
"grad_norm": 0.5654379725456238,
"learning_rate": 5.094168788439369e-06,
"loss": 0.6641,
"step": 24521
},
{
"epoch": 0.21166247111969377,
"grad_norm": 0.6351049542427063,
"learning_rate": 5.043472697540594e-06,
"loss": 0.6607,
"step": 24552
},
{
"epoch": 0.21192972171454189,
"grad_norm": 0.6417475938796997,
"learning_rate": 4.993001813340012e-06,
"loss": 0.6795,
"step": 24583
},
{
"epoch": 0.21219697230938997,
"grad_norm": 0.6464203000068665,
"learning_rate": 4.942756705394702e-06,
"loss": 0.6648,
"step": 24614
},
{
"epoch": 0.21246422290423808,
"grad_norm": 0.656051754951477,
"learning_rate": 4.892737940713884e-06,
"loss": 0.6674,
"step": 24645
},
{
"epoch": 0.21273147349908617,
"grad_norm": 0.6434012055397034,
"learning_rate": 4.842946083752511e-06,
"loss": 0.6694,
"step": 24676
},
{
"epoch": 0.21299872409393428,
"grad_norm": 0.6368052363395691,
"learning_rate": 4.79338169640493e-06,
"loss": 0.6688,
"step": 24707
},
{
"epoch": 0.21326597468878236,
"grad_norm": 0.6277346014976501,
"learning_rate": 4.74404533799851e-06,
"loss": 0.6601,
"step": 24738
},
{
"epoch": 0.21353322528363047,
"grad_norm": 0.6229516267776489,
"learning_rate": 4.694937565287344e-06,
"loss": 0.66,
"step": 24769
},
{
"epoch": 0.21380047587847856,
"grad_norm": 0.5491278171539307,
"learning_rate": 4.646058932445985e-06,
"loss": 0.677,
"step": 24800
},
{
"epoch": 0.21406772647332667,
"grad_norm": 0.6241891384124756,
"learning_rate": 4.597409991063148e-06,
"loss": 0.6627,
"step": 24831
},
{
"epoch": 0.21433497706817475,
"grad_norm": 0.5684633255004883,
"learning_rate": 4.5489912901355375e-06,
"loss": 0.6675,
"step": 24862
},
{
"epoch": 0.21460222766302287,
"grad_norm": 0.5918188095092773,
"learning_rate": 4.500803376061608e-06,
"loss": 0.6723,
"step": 24893
},
{
"epoch": 0.21486947825787095,
"grad_norm": 0.5449228286743164,
"learning_rate": 4.45284679263541e-06,
"loss": 0.6788,
"step": 24924
},
{
"epoch": 0.21513672885271906,
"grad_norm": 0.5575039982795715,
"learning_rate": 4.4051220810404775e-06,
"loss": 0.6695,
"step": 24955
},
{
"epoch": 0.21540397944756715,
"grad_norm": 0.6305622458457947,
"learning_rate": 4.3576297798437025e-06,
"loss": 0.6633,
"step": 24986
},
{
"epoch": 0.21567123004241526,
"grad_norm": 0.6464682221412659,
"learning_rate": 4.3103704249892436e-06,
"loss": 0.6777,
"step": 25017
},
{
"epoch": 0.21593848063726334,
"grad_norm": 0.6043287515640259,
"learning_rate": 4.263344549792487e-06,
"loss": 0.663,
"step": 25048
},
{
"epoch": 0.21620573123211145,
"grad_norm": 0.5028355717658997,
"learning_rate": 4.216552684934056e-06,
"loss": 0.6789,
"step": 25079
},
{
"epoch": 0.21647298182695954,
"grad_norm": 0.5976166725158691,
"learning_rate": 4.169995358453777e-06,
"loss": 0.6632,
"step": 25110
},
{
"epoch": 0.21674023242180765,
"grad_norm": 0.5891854763031006,
"learning_rate": 4.123673095744757e-06,
"loss": 0.6714,
"step": 25141
},
{
"epoch": 0.21700748301665573,
"grad_norm": 0.5742916464805603,
"learning_rate": 4.077586419547435e-06,
"loss": 0.6611,
"step": 25172
},
{
"epoch": 0.21727473361150385,
"grad_norm": 0.5454676747322083,
"learning_rate": 4.03173584994368e-06,
"loss": 0.6762,
"step": 25203
},
{
"epoch": 0.21754198420635193,
"grad_norm": 0.5392730832099915,
"learning_rate": 3.986121904350948e-06,
"loss": 0.6747,
"step": 25234
},
{
"epoch": 0.21780923480120004,
"grad_norm": 0.571982741355896,
"learning_rate": 3.940745097516407e-06,
"loss": 0.6714,
"step": 25265
},
{
"epoch": 0.21807648539604815,
"grad_norm": 0.6387728452682495,
"learning_rate": 3.89560594151116e-06,
"loss": 0.6743,
"step": 25296
},
{
"epoch": 0.21834373599089624,
"grad_norm": 0.642375648021698,
"learning_rate": 3.850704945724456e-06,
"loss": 0.6691,
"step": 25327
},
{
"epoch": 0.21861098658574435,
"grad_norm": 0.6087313890457153,
"learning_rate": 3.8060426168579077e-06,
"loss": 0.6689,
"step": 25358
},
{
"epoch": 0.21887823718059243,
"grad_norm": 0.6513610482215881,
"learning_rate": 3.7616194589198407e-06,
"loss": 0.6685,
"step": 25389
},
{
"epoch": 0.21914548777544055,
"grad_norm": 0.5720754861831665,
"learning_rate": 3.7174359732195574e-06,
"loss": 0.6701,
"step": 25420
},
{
"epoch": 0.21941273837028863,
"grad_norm": 0.6255003213882446,
"learning_rate": 3.673492658361677e-06,
"loss": 0.6755,
"step": 25451
},
{
"epoch": 0.21967998896513674,
"grad_norm": 0.6417989134788513,
"learning_rate": 3.6297900102405467e-06,
"loss": 0.6711,
"step": 25482
},
{
"epoch": 0.21994723955998483,
"grad_norm": 0.5277450680732727,
"learning_rate": 3.586328522034607e-06,
"loss": 0.6757,
"step": 25513
},
{
"epoch": 0.22021449015483294,
"grad_norm": 0.5509169101715088,
"learning_rate": 3.543108684200838e-06,
"loss": 0.6695,
"step": 25544
},
{
"epoch": 0.22048174074968102,
"grad_norm": 0.5626096725463867,
"learning_rate": 3.5001309844692464e-06,
"loss": 0.6681,
"step": 25575
},
{
"epoch": 0.22074899134452913,
"grad_norm": 0.6337453722953796,
"learning_rate": 3.4573959078373215e-06,
"loss": 0.6689,
"step": 25606
},
{
"epoch": 0.22101624193937722,
"grad_norm": 0.5907692909240723,
"learning_rate": 3.4149039365646063e-06,
"loss": 0.658,
"step": 25637
},
{
"epoch": 0.22128349253422533,
"grad_norm": 0.5864708423614502,
"learning_rate": 3.3726555501672143e-06,
"loss": 0.6696,
"step": 25668
},
{
"epoch": 0.2215507431290734,
"grad_norm": 0.5856440663337708,
"learning_rate": 3.33065122541244e-06,
"loss": 0.6724,
"step": 25699
},
{
"epoch": 0.22181799372392152,
"grad_norm": 0.5745190978050232,
"learning_rate": 3.288891436313385e-06,
"loss": 0.6625,
"step": 25730
},
{
"epoch": 0.2220852443187696,
"grad_norm": 0.6039624810218811,
"learning_rate": 3.2473766541235963e-06,
"loss": 0.67,
"step": 25761
},
{
"epoch": 0.22235249491361772,
"grad_norm": 0.6757243275642395,
"learning_rate": 3.2061073473317466e-06,
"loss": 0.665,
"step": 25792
},
{
"epoch": 0.2226197455084658,
"grad_norm": 0.6563556790351868,
"learning_rate": 3.1650839816563444e-06,
"loss": 0.6667,
"step": 25823
},
{
"epoch": 0.22288699610331392,
"grad_norm": 0.597527801990509,
"learning_rate": 3.1243070200405093e-06,
"loss": 0.6646,
"step": 25854
},
{
"epoch": 0.223154246698162,
"grad_norm": 0.6143335700035095,
"learning_rate": 3.0837769226467e-06,
"loss": 0.6659,
"step": 25885
},
{
"epoch": 0.2234214972930101,
"grad_norm": 0.5681073069572449,
"learning_rate": 3.0434941468515666e-06,
"loss": 0.6659,
"step": 25916
},
{
"epoch": 0.2236887478878582,
"grad_norm": 0.5153323411941528,
"learning_rate": 3.003459147240753e-06,
"loss": 0.6712,
"step": 25947
},
{
"epoch": 0.2239559984827063,
"grad_norm": 0.5973331928253174,
"learning_rate": 2.9636723756037875e-06,
"loss": 0.6624,
"step": 25978
},
{
"epoch": 0.2242232490775544,
"grad_norm": 0.6074575185775757,
"learning_rate": 2.9241342809289833e-06,
"loss": 0.6635,
"step": 26009
},
{
"epoch": 0.2244904996724025,
"grad_norm": 0.552126944065094,
"learning_rate": 2.8848453093983594e-06,
"loss": 0.665,
"step": 26040
},
{
"epoch": 0.2247577502672506,
"grad_norm": 0.5694225430488586,
"learning_rate": 2.8458059043826257e-06,
"loss": 0.6685,
"step": 26071
},
{
"epoch": 0.2250250008620987,
"grad_norm": 0.5542888045310974,
"learning_rate": 2.807016506436172e-06,
"loss": 0.6546,
"step": 26102
},
{
"epoch": 0.22529225145694678,
"grad_norm": 0.6268821358680725,
"learning_rate": 2.7684775532920566e-06,
"loss": 0.6645,
"step": 26133
},
{
"epoch": 0.2255595020517949,
"grad_norm": 0.5739062428474426,
"learning_rate": 2.7301894798571425e-06,
"loss": 0.667,
"step": 26164
},
{
"epoch": 0.22582675264664298,
"grad_norm": 0.5594868659973145,
"learning_rate": 2.6921527182071386e-06,
"loss": 0.666,
"step": 26195
},
{
"epoch": 0.2260940032414911,
"grad_norm": 0.613577663898468,
"learning_rate": 2.654367697581725e-06,
"loss": 0.6732,
"step": 26226
},
{
"epoch": 0.22636125383633918,
"grad_norm": 0.5728760361671448,
"learning_rate": 2.6168348443797175e-06,
"loss": 0.6746,
"step": 26257
},
{
"epoch": 0.2266285044311873,
"grad_norm": 0.5837547779083252,
"learning_rate": 2.5795545821542757e-06,
"loss": 0.6585,
"step": 26288
},
{
"epoch": 0.22689575502603537,
"grad_norm": 0.5179529786109924,
"learning_rate": 2.54252733160808e-06,
"loss": 0.6633,
"step": 26319
},
{
"epoch": 0.22716300562088348,
"grad_norm": 0.5924873352050781,
"learning_rate": 2.5057535105886294e-06,
"loss": 0.667,
"step": 26350
},
{
"epoch": 0.22743025621573157,
"grad_norm": 0.5567170977592468,
"learning_rate": 2.4692335340834953e-06,
"loss": 0.6665,
"step": 26381
},
{
"epoch": 0.22769750681057968,
"grad_norm": 0.5711391568183899,
"learning_rate": 2.432967814215639e-06,
"loss": 0.6664,
"step": 26412
},
{
"epoch": 0.22796475740542776,
"grad_norm": 0.5977137684822083,
"learning_rate": 2.396956760238794e-06,
"loss": 0.6776,
"step": 26443
},
{
"epoch": 0.22823200800027588,
"grad_norm": 0.5300673246383667,
"learning_rate": 2.361200778532796e-06,
"loss": 0.6598,
"step": 26474
},
{
"epoch": 0.22849925859512396,
"grad_norm": 0.6637442111968994,
"learning_rate": 2.325700272599049e-06,
"loss": 0.6503,
"step": 26505
},
{
"epoch": 0.22876650918997207,
"grad_norm": 0.5773261189460754,
"learning_rate": 2.2904556430559415e-06,
"loss": 0.6681,
"step": 26536
},
{
"epoch": 0.22903375978482016,
"grad_norm": 0.5448989272117615,
"learning_rate": 2.2554672876343106e-06,
"loss": 0.6642,
"step": 26567
},
{
"epoch": 0.22930101037966827,
"grad_norm": 0.528055727481842,
"learning_rate": 2.220735601173002e-06,
"loss": 0.6619,
"step": 26598
},
{
"epoch": 0.22956826097451635,
"grad_norm": 0.6259179711341858,
"learning_rate": 2.186260975614382e-06,
"loss": 0.6685,
"step": 26629
},
{
"epoch": 0.22983551156936446,
"grad_norm": 0.5694562196731567,
"learning_rate": 2.1520437999999034e-06,
"loss": 0.667,
"step": 26660
},
{
"epoch": 0.23010276216421255,
"grad_norm": 0.5807471871376038,
"learning_rate": 2.1180844604657526e-06,
"loss": 0.666,
"step": 26691
},
{
"epoch": 0.23037001275906066,
"grad_norm": 0.5103436708450317,
"learning_rate": 2.084383340238455e-06,
"loss": 0.6673,
"step": 26722
},
{
"epoch": 0.23063726335390874,
"grad_norm": 0.53006911277771,
"learning_rate": 2.0509408196305704e-06,
"loss": 0.6635,
"step": 26753
},
{
"epoch": 0.23090451394875686,
"grad_norm": 0.48842284083366394,
"learning_rate": 2.017757276036403e-06,
"loss": 0.6644,
"step": 26784
},
{
"epoch": 0.23117176454360494,
"grad_norm": 0.5291392803192139,
"learning_rate": 1.984833083927726e-06,
"loss": 0.6667,
"step": 26815
},
{
"epoch": 0.23143901513845305,
"grad_norm": 0.5639858245849609,
"learning_rate": 1.952168614849581e-06,
"loss": 0.6688,
"step": 26846
},
{
"epoch": 0.23170626573330114,
"grad_norm": 0.5842956304550171,
"learning_rate": 1.919764237416058e-06,
"loss": 0.6636,
"step": 26877
},
{
"epoch": 0.23197351632814925,
"grad_norm": 0.5915853977203369,
"learning_rate": 1.8876203173061463e-06,
"loss": 0.6655,
"step": 26908
},
{
"epoch": 0.23224076692299733,
"grad_norm": 0.5913236141204834,
"learning_rate": 1.8557372172596206e-06,
"loss": 0.6657,
"step": 26939
},
{
"epoch": 0.23250801751784544,
"grad_norm": 0.5344509482383728,
"learning_rate": 1.8241152970729341e-06,
"loss": 0.6665,
"step": 26970
},
{
"epoch": 0.23277526811269353,
"grad_norm": 0.5515422821044922,
"learning_rate": 1.7927549135951572e-06,
"loss": 0.6703,
"step": 27001
},
{
"epoch": 0.23304251870754164,
"grad_norm": 0.5578742027282715,
"learning_rate": 1.7616564207239477e-06,
"loss": 0.6611,
"step": 27032
},
{
"epoch": 0.23330976930238972,
"grad_norm": 0.5652346014976501,
"learning_rate": 1.730820169401584e-06,
"loss": 0.6674,
"step": 27063
},
{
"epoch": 0.23357701989723784,
"grad_norm": 0.5775825381278992,
"learning_rate": 1.7002465076109558e-06,
"loss": 0.6667,
"step": 27094
},
{
"epoch": 0.23384427049208592,
"grad_norm": 0.5728250741958618,
"learning_rate": 1.6699357803716898e-06,
"loss": 0.6611,
"step": 27125
},
{
"epoch": 0.23411152108693403,
"grad_norm": 0.6758059859275818,
"learning_rate": 1.6398883297362305e-06,
"loss": 0.6616,
"step": 27156
},
{
"epoch": 0.23437877168178214,
"grad_norm": 0.5942234396934509,
"learning_rate": 1.6101044947859606e-06,
"loss": 0.6667,
"step": 27187
},
{
"epoch": 0.23464602227663023,
"grad_norm": 0.6499549746513367,
"learning_rate": 1.5805846116274114e-06,
"loss": 0.6618,
"step": 27218
},
{
"epoch": 0.23491327287147834,
"grad_norm": 0.5207785964012146,
"learning_rate": 1.5513290133884611e-06,
"loss": 0.673,
"step": 27249
},
{
"epoch": 0.23518052346632642,
"grad_norm": 0.5559687614440918,
"learning_rate": 1.5223380302145512e-06,
"loss": 0.6686,
"step": 27280
},
{
"epoch": 0.23544777406117454,
"grad_norm": 0.5722928643226624,
"learning_rate": 1.4936119892649925e-06,
"loss": 0.6647,
"step": 27311
},
{
"epoch": 0.23571502465602262,
"grad_norm": 0.5691096186637878,
"learning_rate": 1.4651512147092482e-06,
"loss": 0.67,
"step": 27342
},
{
"epoch": 0.23598227525087073,
"grad_norm": 0.6147379279136658,
"learning_rate": 1.4369560277232908e-06,
"loss": 0.6576,
"step": 27373
},
{
"epoch": 0.23624952584571882,
"grad_norm": 0.4987974464893341,
"learning_rate": 1.409026746485978e-06,
"loss": 0.6591,
"step": 27404
},
{
"epoch": 0.23651677644056693,
"grad_norm": 0.6399179697036743,
"learning_rate": 1.3813636861754464e-06,
"loss": 0.6674,
"step": 27435
},
{
"epoch": 0.236784027035415,
"grad_norm": 0.6207811832427979,
"learning_rate": 1.3539671589655773e-06,
"loss": 0.6694,
"step": 27466
}
],
"logging_steps": 31,
"max_steps": 30517,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 3052,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 1.909218670290351e+19,
"train_batch_size": 16,
"trial_name": null,
"trial_params": null
}