Safetensors
Qwulu3 / checkpoint-10000 /trainer_state.json
bnjmnmarie's picture
Upload folder using huggingface_hub
c115652 verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.47778067698219,
"eval_steps": 500,
"global_step": 10000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0036945613747462877,
"grad_norm": 2.2292165756225586,
"learning_rate": 5.911330049261084e-06,
"loss": 0.9395,
"step": 25
},
{
"epoch": 0.007389122749492575,
"grad_norm": 0.8381065130233765,
"learning_rate": 1.206896551724138e-05,
"loss": 0.89,
"step": 50
},
{
"epoch": 0.011083684124238862,
"grad_norm": 0.9238471984863281,
"learning_rate": 1.8226600985221676e-05,
"loss": 0.8384,
"step": 75
},
{
"epoch": 0.01477824549898515,
"grad_norm": 0.6837311387062073,
"learning_rate": 2.438423645320197e-05,
"loss": 0.784,
"step": 100
},
{
"epoch": 0.01847280687373144,
"grad_norm": 0.7093706727027893,
"learning_rate": 3.0541871921182266e-05,
"loss": 0.7675,
"step": 125
},
{
"epoch": 0.022167368248477724,
"grad_norm": 0.6208077073097229,
"learning_rate": 3.669950738916256e-05,
"loss": 0.7466,
"step": 150
},
{
"epoch": 0.025861929623224013,
"grad_norm": 0.8929094076156616,
"learning_rate": 4.2857142857142856e-05,
"loss": 0.7386,
"step": 175
},
{
"epoch": 0.0295564909979703,
"grad_norm": 0.7828120589256287,
"learning_rate": 4.901477832512316e-05,
"loss": 0.7173,
"step": 200
},
{
"epoch": 0.03325105237271659,
"grad_norm": 0.7134449481964111,
"learning_rate": 5.517241379310345e-05,
"loss": 0.7108,
"step": 225
},
{
"epoch": 0.03694561374746288,
"grad_norm": 0.7464851140975952,
"learning_rate": 6.133004926108375e-05,
"loss": 0.7332,
"step": 250
},
{
"epoch": 0.04064017512220916,
"grad_norm": 0.677793025970459,
"learning_rate": 6.748768472906404e-05,
"loss": 0.7194,
"step": 275
},
{
"epoch": 0.04433473649695545,
"grad_norm": 0.7936354875564575,
"learning_rate": 7.364532019704434e-05,
"loss": 0.7253,
"step": 300
},
{
"epoch": 0.04802929787170174,
"grad_norm": 0.6711100935935974,
"learning_rate": 7.980295566502463e-05,
"loss": 0.7018,
"step": 325
},
{
"epoch": 0.051723859246448026,
"grad_norm": 0.5816489458084106,
"learning_rate": 8.596059113300493e-05,
"loss": 0.7298,
"step": 350
},
{
"epoch": 0.055418420621194314,
"grad_norm": 0.6680681705474854,
"learning_rate": 9.211822660098522e-05,
"loss": 0.7149,
"step": 375
},
{
"epoch": 0.0591129819959406,
"grad_norm": 0.5643934607505798,
"learning_rate": 9.827586206896552e-05,
"loss": 0.763,
"step": 400
},
{
"epoch": 0.06280754337068689,
"grad_norm": 0.5739309191703796,
"learning_rate": 9.986286759104069e-05,
"loss": 0.7345,
"step": 425
},
{
"epoch": 0.06650210474543318,
"grad_norm": 0.5929909944534302,
"learning_rate": 9.967240591193052e-05,
"loss": 0.7364,
"step": 450
},
{
"epoch": 0.07019666612017947,
"grad_norm": 0.609235405921936,
"learning_rate": 9.948194423282036e-05,
"loss": 0.7294,
"step": 475
},
{
"epoch": 0.07389122749492576,
"grad_norm": 0.4643324613571167,
"learning_rate": 9.92914825537102e-05,
"loss": 0.7344,
"step": 500
},
{
"epoch": 0.07758578886967203,
"grad_norm": 0.5267598032951355,
"learning_rate": 9.910102087460003e-05,
"loss": 0.7249,
"step": 525
},
{
"epoch": 0.08128035024441832,
"grad_norm": 0.47951069474220276,
"learning_rate": 9.891055919548987e-05,
"loss": 0.7256,
"step": 550
},
{
"epoch": 0.08497491161916461,
"grad_norm": 0.4505012333393097,
"learning_rate": 9.87200975163797e-05,
"loss": 0.7359,
"step": 575
},
{
"epoch": 0.0886694729939109,
"grad_norm": 0.5320091247558594,
"learning_rate": 9.852963583726955e-05,
"loss": 0.6856,
"step": 600
},
{
"epoch": 0.09236403436865719,
"grad_norm": 0.5583036541938782,
"learning_rate": 9.833917415815939e-05,
"loss": 0.7235,
"step": 625
},
{
"epoch": 0.09605859574340347,
"grad_norm": 0.5139252543449402,
"learning_rate": 9.814871247904922e-05,
"loss": 0.7272,
"step": 650
},
{
"epoch": 0.09975315711814976,
"grad_norm": 0.4989326000213623,
"learning_rate": 9.795825079993906e-05,
"loss": 0.6952,
"step": 675
},
{
"epoch": 0.10344771849289605,
"grad_norm": 0.47355732321739197,
"learning_rate": 9.776778912082889e-05,
"loss": 0.7266,
"step": 700
},
{
"epoch": 0.10714227986764234,
"grad_norm": 0.3588508367538452,
"learning_rate": 9.757732744171874e-05,
"loss": 0.7406,
"step": 725
},
{
"epoch": 0.11083684124238863,
"grad_norm": 0.4120556712150574,
"learning_rate": 9.738686576260857e-05,
"loss": 0.7443,
"step": 750
},
{
"epoch": 0.11453140261713492,
"grad_norm": 0.5160555839538574,
"learning_rate": 9.71964040834984e-05,
"loss": 0.7134,
"step": 775
},
{
"epoch": 0.1182259639918812,
"grad_norm": 0.5423145890235901,
"learning_rate": 9.700594240438823e-05,
"loss": 0.7289,
"step": 800
},
{
"epoch": 0.1219205253666275,
"grad_norm": 0.5352346301078796,
"learning_rate": 9.681548072527808e-05,
"loss": 0.7144,
"step": 825
},
{
"epoch": 0.12561508674137378,
"grad_norm": 0.47908860445022583,
"learning_rate": 9.662501904616791e-05,
"loss": 0.7175,
"step": 850
},
{
"epoch": 0.12930964811612006,
"grad_norm": 0.47986069321632385,
"learning_rate": 9.643455736705776e-05,
"loss": 0.6983,
"step": 875
},
{
"epoch": 0.13300420949086636,
"grad_norm": 0.6903620958328247,
"learning_rate": 9.624409568794759e-05,
"loss": 0.7086,
"step": 900
},
{
"epoch": 0.13669877086561263,
"grad_norm": 0.44413208961486816,
"learning_rate": 9.605363400883742e-05,
"loss": 0.7306,
"step": 925
},
{
"epoch": 0.14039333224035894,
"grad_norm": 0.4634678065776825,
"learning_rate": 9.586317232972727e-05,
"loss": 0.7061,
"step": 950
},
{
"epoch": 0.1440878936151052,
"grad_norm": 0.5110129714012146,
"learning_rate": 9.56727106506171e-05,
"loss": 0.7406,
"step": 975
},
{
"epoch": 0.1477824549898515,
"grad_norm": 0.5460866093635559,
"learning_rate": 9.548224897150694e-05,
"loss": 0.721,
"step": 1000
},
{
"epoch": 0.1514770163645978,
"grad_norm": 0.5179885029792786,
"learning_rate": 9.529178729239677e-05,
"loss": 0.7068,
"step": 1025
},
{
"epoch": 0.15517157773934406,
"grad_norm": 0.40280836820602417,
"learning_rate": 9.51013256132866e-05,
"loss": 0.7009,
"step": 1050
},
{
"epoch": 0.15886613911409037,
"grad_norm": 1.2706756591796875,
"learning_rate": 9.491086393417645e-05,
"loss": 0.7125,
"step": 1075
},
{
"epoch": 0.16256070048883664,
"grad_norm": 0.4963163435459137,
"learning_rate": 9.47204022550663e-05,
"loss": 0.7149,
"step": 1100
},
{
"epoch": 0.16625526186358294,
"grad_norm": 0.5147728323936462,
"learning_rate": 9.452994057595613e-05,
"loss": 0.7245,
"step": 1125
},
{
"epoch": 0.16994982323832922,
"grad_norm": 0.5933899879455566,
"learning_rate": 9.433947889684596e-05,
"loss": 0.7261,
"step": 1150
},
{
"epoch": 0.17364438461307552,
"grad_norm": 0.4750466048717499,
"learning_rate": 9.414901721773579e-05,
"loss": 0.7206,
"step": 1175
},
{
"epoch": 0.1773389459878218,
"grad_norm": 0.46546968817710876,
"learning_rate": 9.395855553862564e-05,
"loss": 0.7121,
"step": 1200
},
{
"epoch": 0.1810335073625681,
"grad_norm": 0.6512172818183899,
"learning_rate": 9.376809385951547e-05,
"loss": 0.7212,
"step": 1225
},
{
"epoch": 0.18472806873731437,
"grad_norm": 0.34932607412338257,
"learning_rate": 9.35776321804053e-05,
"loss": 0.704,
"step": 1250
},
{
"epoch": 0.18842263011206067,
"grad_norm": 0.4648846983909607,
"learning_rate": 9.338717050129514e-05,
"loss": 0.7419,
"step": 1275
},
{
"epoch": 0.19211719148680695,
"grad_norm": 0.4566064774990082,
"learning_rate": 9.319670882218498e-05,
"loss": 0.7318,
"step": 1300
},
{
"epoch": 0.19581175286155325,
"grad_norm": 0.5357668399810791,
"learning_rate": 9.300624714307481e-05,
"loss": 0.6973,
"step": 1325
},
{
"epoch": 0.19950631423629953,
"grad_norm": 0.4423241913318634,
"learning_rate": 9.281578546396466e-05,
"loss": 0.7298,
"step": 1350
},
{
"epoch": 0.20320087561104583,
"grad_norm": 0.4530033767223358,
"learning_rate": 9.26253237848545e-05,
"loss": 0.7161,
"step": 1375
},
{
"epoch": 0.2068954369857921,
"grad_norm": 0.4678841233253479,
"learning_rate": 9.243486210574433e-05,
"loss": 0.6972,
"step": 1400
},
{
"epoch": 0.21058999836053838,
"grad_norm": 0.6039907336235046,
"learning_rate": 9.224440042663417e-05,
"loss": 0.7165,
"step": 1425
},
{
"epoch": 0.21428455973528468,
"grad_norm": 0.4463271498680115,
"learning_rate": 9.2053938747524e-05,
"loss": 0.6863,
"step": 1450
},
{
"epoch": 0.21797912111003095,
"grad_norm": 0.5739301443099976,
"learning_rate": 9.186347706841384e-05,
"loss": 0.6907,
"step": 1475
},
{
"epoch": 0.22167368248477726,
"grad_norm": 0.4577805697917938,
"learning_rate": 9.167301538930367e-05,
"loss": 0.7114,
"step": 1500
},
{
"epoch": 0.22536824385952353,
"grad_norm": 0.4522150158882141,
"learning_rate": 9.14825537101935e-05,
"loss": 0.6877,
"step": 1525
},
{
"epoch": 0.22906280523426983,
"grad_norm": 0.49612903594970703,
"learning_rate": 9.129209203108335e-05,
"loss": 0.7112,
"step": 1550
},
{
"epoch": 0.2327573666090161,
"grad_norm": 0.4710284471511841,
"learning_rate": 9.11016303519732e-05,
"loss": 0.7062,
"step": 1575
},
{
"epoch": 0.2364519279837624,
"grad_norm": 0.5009223818778992,
"learning_rate": 9.091116867286303e-05,
"loss": 0.7275,
"step": 1600
},
{
"epoch": 0.24014648935850869,
"grad_norm": 0.5547946691513062,
"learning_rate": 9.072070699375286e-05,
"loss": 0.6993,
"step": 1625
},
{
"epoch": 0.243841050733255,
"grad_norm": 0.4580361843109131,
"learning_rate": 9.05302453146427e-05,
"loss": 0.7106,
"step": 1650
},
{
"epoch": 0.24753561210800126,
"grad_norm": 0.4767173230648041,
"learning_rate": 9.033978363553254e-05,
"loss": 0.7103,
"step": 1675
},
{
"epoch": 0.25123017348274757,
"grad_norm": 0.502202570438385,
"learning_rate": 9.014932195642237e-05,
"loss": 0.6921,
"step": 1700
},
{
"epoch": 0.25492473485749384,
"grad_norm": 0.5283953547477722,
"learning_rate": 8.99588602773122e-05,
"loss": 0.7077,
"step": 1725
},
{
"epoch": 0.2586192962322401,
"grad_norm": 0.4994209408760071,
"learning_rate": 8.976839859820204e-05,
"loss": 0.7,
"step": 1750
},
{
"epoch": 0.2623138576069864,
"grad_norm": 0.48279210925102234,
"learning_rate": 8.957793691909188e-05,
"loss": 0.7114,
"step": 1775
},
{
"epoch": 0.2660084189817327,
"grad_norm": 0.6055914759635925,
"learning_rate": 8.938747523998172e-05,
"loss": 0.7081,
"step": 1800
},
{
"epoch": 0.269702980356479,
"grad_norm": 0.489519327878952,
"learning_rate": 8.919701356087156e-05,
"loss": 0.6946,
"step": 1825
},
{
"epoch": 0.27339754173122527,
"grad_norm": 0.5379961133003235,
"learning_rate": 8.90065518817614e-05,
"loss": 0.6996,
"step": 1850
},
{
"epoch": 0.27709210310597154,
"grad_norm": 0.47824332118034363,
"learning_rate": 8.881609020265123e-05,
"loss": 0.6881,
"step": 1875
},
{
"epoch": 0.2807866644807179,
"grad_norm": 1551.0594482421875,
"learning_rate": 8.862562852354107e-05,
"loss": 0.6998,
"step": 1900
},
{
"epoch": 0.28448122585546415,
"grad_norm": 0.4107681214809418,
"learning_rate": 8.84351668444309e-05,
"loss": 0.7088,
"step": 1925
},
{
"epoch": 0.2881757872302104,
"grad_norm": 0.4558309316635132,
"learning_rate": 8.824470516532074e-05,
"loss": 0.712,
"step": 1950
},
{
"epoch": 0.2918703486049567,
"grad_norm": 0.539107620716095,
"learning_rate": 8.805424348621057e-05,
"loss": 0.7159,
"step": 1975
},
{
"epoch": 0.295564909979703,
"grad_norm": 0.5768142938613892,
"learning_rate": 8.786378180710042e-05,
"loss": 0.7072,
"step": 2000
},
{
"epoch": 0.2992594713544493,
"grad_norm": 0.5575465559959412,
"learning_rate": 8.767332012799025e-05,
"loss": 0.7118,
"step": 2025
},
{
"epoch": 0.3029540327291956,
"grad_norm": 0.5190144181251526,
"learning_rate": 8.748285844888008e-05,
"loss": 0.7099,
"step": 2050
},
{
"epoch": 0.30664859410394185,
"grad_norm": 0.4934520125389099,
"learning_rate": 8.729239676976993e-05,
"loss": 0.692,
"step": 2075
},
{
"epoch": 0.3103431554786881,
"grad_norm": 0.42613571882247925,
"learning_rate": 8.710193509065976e-05,
"loss": 0.7277,
"step": 2100
},
{
"epoch": 0.31403771685343446,
"grad_norm": 0.5124602317810059,
"learning_rate": 8.691147341154961e-05,
"loss": 0.6801,
"step": 2125
},
{
"epoch": 0.31773227822818073,
"grad_norm": 0.5284898281097412,
"learning_rate": 8.672101173243944e-05,
"loss": 0.7103,
"step": 2150
},
{
"epoch": 0.321426839602927,
"grad_norm": 0.43099457025527954,
"learning_rate": 8.653055005332927e-05,
"loss": 0.7023,
"step": 2175
},
{
"epoch": 0.3251214009776733,
"grad_norm": 0.5190865993499756,
"learning_rate": 8.63400883742191e-05,
"loss": 0.7144,
"step": 2200
},
{
"epoch": 0.3288159623524196,
"grad_norm": 0.4722968637943268,
"learning_rate": 8.614962669510895e-05,
"loss": 0.7351,
"step": 2225
},
{
"epoch": 0.3325105237271659,
"grad_norm": 0.6091466546058655,
"learning_rate": 8.595916501599878e-05,
"loss": 0.7062,
"step": 2250
},
{
"epoch": 0.33620508510191216,
"grad_norm": 0.6135897040367126,
"learning_rate": 8.576870333688862e-05,
"loss": 0.7117,
"step": 2275
},
{
"epoch": 0.33989964647665843,
"grad_norm": 0.5224157571792603,
"learning_rate": 8.557824165777846e-05,
"loss": 0.7068,
"step": 2300
},
{
"epoch": 0.34359420785140476,
"grad_norm": 0.4863536059856415,
"learning_rate": 8.53877799786683e-05,
"loss": 0.6952,
"step": 2325
},
{
"epoch": 0.34728876922615104,
"grad_norm": 0.4728885889053345,
"learning_rate": 8.519731829955814e-05,
"loss": 0.7289,
"step": 2350
},
{
"epoch": 0.3509833306008973,
"grad_norm": 0.5152695775032043,
"learning_rate": 8.500685662044798e-05,
"loss": 0.6986,
"step": 2375
},
{
"epoch": 0.3546778919756436,
"grad_norm": 0.4407690465450287,
"learning_rate": 8.481639494133781e-05,
"loss": 0.6983,
"step": 2400
},
{
"epoch": 0.3583724533503899,
"grad_norm": 0.4756406545639038,
"learning_rate": 8.462593326222764e-05,
"loss": 0.6626,
"step": 2425
},
{
"epoch": 0.3620670147251362,
"grad_norm": 0.48595255613327026,
"learning_rate": 8.443547158311747e-05,
"loss": 0.6892,
"step": 2450
},
{
"epoch": 0.36576157609988247,
"grad_norm": 0.47830772399902344,
"learning_rate": 8.424500990400732e-05,
"loss": 0.7162,
"step": 2475
},
{
"epoch": 0.36945613747462874,
"grad_norm": 0.43090149760246277,
"learning_rate": 8.405454822489715e-05,
"loss": 0.6913,
"step": 2500
},
{
"epoch": 0.373150698849375,
"grad_norm": 0.5338718295097351,
"learning_rate": 8.386408654578698e-05,
"loss": 0.7159,
"step": 2525
},
{
"epoch": 0.37684526022412135,
"grad_norm": 0.4907350242137909,
"learning_rate": 8.367362486667683e-05,
"loss": 0.7022,
"step": 2550
},
{
"epoch": 0.3805398215988676,
"grad_norm": 0.44093430042266846,
"learning_rate": 8.348316318756668e-05,
"loss": 0.7124,
"step": 2575
},
{
"epoch": 0.3842343829736139,
"grad_norm": 0.5388796925544739,
"learning_rate": 8.329270150845651e-05,
"loss": 0.7304,
"step": 2600
},
{
"epoch": 0.38792894434836017,
"grad_norm": 0.4456349313259125,
"learning_rate": 8.310223982934634e-05,
"loss": 0.6964,
"step": 2625
},
{
"epoch": 0.3916235057231065,
"grad_norm": 0.4602237343788147,
"learning_rate": 8.291177815023617e-05,
"loss": 0.6707,
"step": 2650
},
{
"epoch": 0.3953180670978528,
"grad_norm": 0.4726378917694092,
"learning_rate": 8.272131647112601e-05,
"loss": 0.694,
"step": 2675
},
{
"epoch": 0.39901262847259905,
"grad_norm": 0.500451922416687,
"learning_rate": 8.253085479201585e-05,
"loss": 0.6812,
"step": 2700
},
{
"epoch": 0.4027071898473453,
"grad_norm": 0.4073813259601593,
"learning_rate": 8.234039311290569e-05,
"loss": 0.7028,
"step": 2725
},
{
"epoch": 0.40640175122209166,
"grad_norm": 0.43644702434539795,
"learning_rate": 8.214993143379552e-05,
"loss": 0.6936,
"step": 2750
},
{
"epoch": 0.41009631259683793,
"grad_norm": 0.5256789922714233,
"learning_rate": 8.195946975468535e-05,
"loss": 0.7035,
"step": 2775
},
{
"epoch": 0.4137908739715842,
"grad_norm": 0.48385316133499146,
"learning_rate": 8.17690080755752e-05,
"loss": 0.6736,
"step": 2800
},
{
"epoch": 0.4174854353463305,
"grad_norm": 0.49825233221054077,
"learning_rate": 8.157854639646504e-05,
"loss": 0.7019,
"step": 2825
},
{
"epoch": 0.42117999672107675,
"grad_norm": 0.5086994171142578,
"learning_rate": 8.138808471735488e-05,
"loss": 0.6918,
"step": 2850
},
{
"epoch": 0.4248745580958231,
"grad_norm": 0.5430259108543396,
"learning_rate": 8.119762303824471e-05,
"loss": 0.7175,
"step": 2875
},
{
"epoch": 0.42856911947056936,
"grad_norm": 0.5889118313789368,
"learning_rate": 8.100716135913454e-05,
"loss": 0.6967,
"step": 2900
},
{
"epoch": 0.43226368084531563,
"grad_norm": 0.5345672369003296,
"learning_rate": 8.081669968002439e-05,
"loss": 0.6926,
"step": 2925
},
{
"epoch": 0.4359582422200619,
"grad_norm": 0.511101245880127,
"learning_rate": 8.062623800091422e-05,
"loss": 0.7248,
"step": 2950
},
{
"epoch": 0.43965280359480824,
"grad_norm": 0.511239767074585,
"learning_rate": 8.043577632180405e-05,
"loss": 0.7319,
"step": 2975
},
{
"epoch": 0.4433473649695545,
"grad_norm": 0.5121573805809021,
"learning_rate": 8.024531464269389e-05,
"loss": 0.7023,
"step": 3000
},
{
"epoch": 0.4470419263443008,
"grad_norm": 0.5658753514289856,
"learning_rate": 8.005485296358373e-05,
"loss": 0.6934,
"step": 3025
},
{
"epoch": 0.45073648771904706,
"grad_norm": 0.5475583672523499,
"learning_rate": 7.986439128447358e-05,
"loss": 0.6701,
"step": 3050
},
{
"epoch": 0.4544310490937934,
"grad_norm": 0.6107661724090576,
"learning_rate": 7.967392960536341e-05,
"loss": 0.7056,
"step": 3075
},
{
"epoch": 0.45812561046853967,
"grad_norm": 0.48424115777015686,
"learning_rate": 7.948346792625324e-05,
"loss": 0.7179,
"step": 3100
},
{
"epoch": 0.46182017184328594,
"grad_norm": 0.6184881329536438,
"learning_rate": 7.929300624714308e-05,
"loss": 0.7141,
"step": 3125
},
{
"epoch": 0.4655147332180322,
"grad_norm": 0.49919527769088745,
"learning_rate": 7.910254456803291e-05,
"loss": 0.7027,
"step": 3150
},
{
"epoch": 0.4692092945927785,
"grad_norm": 0.5012905597686768,
"learning_rate": 7.891208288892276e-05,
"loss": 0.6852,
"step": 3175
},
{
"epoch": 0.4729038559675248,
"grad_norm": 0.5033735036849976,
"learning_rate": 7.872162120981259e-05,
"loss": 0.7199,
"step": 3200
},
{
"epoch": 0.4765984173422711,
"grad_norm": 0.5746079087257385,
"learning_rate": 7.853115953070242e-05,
"loss": 0.715,
"step": 3225
},
{
"epoch": 0.48029297871701737,
"grad_norm": 0.5436145663261414,
"learning_rate": 7.834069785159225e-05,
"loss": 0.697,
"step": 3250
},
{
"epoch": 0.48398754009176365,
"grad_norm": 0.5836604833602905,
"learning_rate": 7.81502361724821e-05,
"loss": 0.7129,
"step": 3275
},
{
"epoch": 0.48768210146651,
"grad_norm": 0.5618935227394104,
"learning_rate": 7.795977449337195e-05,
"loss": 0.6993,
"step": 3300
},
{
"epoch": 0.49137666284125625,
"grad_norm": 0.6251245737075806,
"learning_rate": 7.776931281426178e-05,
"loss": 0.7132,
"step": 3325
},
{
"epoch": 0.4950712242160025,
"grad_norm": 0.5123202800750732,
"learning_rate": 7.757885113515161e-05,
"loss": 0.7218,
"step": 3350
},
{
"epoch": 0.4987657855907488,
"grad_norm": 0.5818086862564087,
"learning_rate": 7.738838945604144e-05,
"loss": 0.7154,
"step": 3375
},
{
"epoch": 0.5024603469654951,
"grad_norm": 0.5861947536468506,
"learning_rate": 7.719792777693129e-05,
"loss": 0.7013,
"step": 3400
},
{
"epoch": 0.5061549083402413,
"grad_norm": 0.4849907457828522,
"learning_rate": 7.700746609782112e-05,
"loss": 0.6902,
"step": 3425
},
{
"epoch": 0.5098494697149877,
"grad_norm": 0.5476916432380676,
"learning_rate": 7.681700441871096e-05,
"loss": 0.7147,
"step": 3450
},
{
"epoch": 0.513544031089734,
"grad_norm": 0.5822548866271973,
"learning_rate": 7.662654273960079e-05,
"loss": 0.7431,
"step": 3475
},
{
"epoch": 0.5172385924644802,
"grad_norm": 0.44818004965782166,
"learning_rate": 7.643608106049063e-05,
"loss": 0.718,
"step": 3500
},
{
"epoch": 0.5209331538392266,
"grad_norm": 0.544068455696106,
"learning_rate": 7.624561938138047e-05,
"loss": 0.7201,
"step": 3525
},
{
"epoch": 0.5246277152139728,
"grad_norm": 0.5535098910331726,
"learning_rate": 7.605515770227031e-05,
"loss": 0.6998,
"step": 3550
},
{
"epoch": 0.5283222765887191,
"grad_norm": 0.5754445195198059,
"learning_rate": 7.586469602316015e-05,
"loss": 0.6918,
"step": 3575
},
{
"epoch": 0.5320168379634654,
"grad_norm": 0.5976133942604065,
"learning_rate": 7.567423434404998e-05,
"loss": 0.693,
"step": 3600
},
{
"epoch": 0.5357113993382117,
"grad_norm": 0.4844263195991516,
"learning_rate": 7.548377266493982e-05,
"loss": 0.68,
"step": 3625
},
{
"epoch": 0.539405960712958,
"grad_norm": 0.5436462163925171,
"learning_rate": 7.529331098582966e-05,
"loss": 0.7075,
"step": 3650
},
{
"epoch": 0.5431005220877043,
"grad_norm": 0.6490929126739502,
"learning_rate": 7.510284930671949e-05,
"loss": 0.6956,
"step": 3675
},
{
"epoch": 0.5467950834624505,
"grad_norm": 0.47479814291000366,
"learning_rate": 7.491238762760932e-05,
"loss": 0.7026,
"step": 3700
},
{
"epoch": 0.5504896448371969,
"grad_norm": 0.590874969959259,
"learning_rate": 7.472192594849915e-05,
"loss": 0.7117,
"step": 3725
},
{
"epoch": 0.5541842062119431,
"grad_norm": 0.46487829089164734,
"learning_rate": 7.4531464269389e-05,
"loss": 0.6919,
"step": 3750
},
{
"epoch": 0.5578787675866894,
"grad_norm": 0.6609780192375183,
"learning_rate": 7.434100259027885e-05,
"loss": 0.7089,
"step": 3775
},
{
"epoch": 0.5615733289614357,
"grad_norm": 0.6165657639503479,
"learning_rate": 7.415054091116868e-05,
"loss": 0.7444,
"step": 3800
},
{
"epoch": 0.565267890336182,
"grad_norm": 0.5194655656814575,
"learning_rate": 7.396007923205851e-05,
"loss": 0.7114,
"step": 3825
},
{
"epoch": 0.5689624517109283,
"grad_norm": 0.4813441336154938,
"learning_rate": 7.376961755294836e-05,
"loss": 0.6953,
"step": 3850
},
{
"epoch": 0.5726570130856745,
"grad_norm": 0.7607313990592957,
"learning_rate": 7.357915587383819e-05,
"loss": 0.7058,
"step": 3875
},
{
"epoch": 0.5763515744604208,
"grad_norm": 0.48498719930648804,
"learning_rate": 7.338869419472802e-05,
"loss": 0.6866,
"step": 3900
},
{
"epoch": 0.5800461358351672,
"grad_norm": 0.5969393253326416,
"learning_rate": 7.319823251561786e-05,
"loss": 0.6951,
"step": 3925
},
{
"epoch": 0.5837406972099134,
"grad_norm": 0.6178887486457825,
"learning_rate": 7.300777083650769e-05,
"loss": 0.7036,
"step": 3950
},
{
"epoch": 0.5874352585846597,
"grad_norm": 0.5318612456321716,
"learning_rate": 7.281730915739754e-05,
"loss": 0.6856,
"step": 3975
},
{
"epoch": 0.591129819959406,
"grad_norm": 0.6101936101913452,
"learning_rate": 7.262684747828737e-05,
"loss": 0.7103,
"step": 4000
},
{
"epoch": 0.5948243813341523,
"grad_norm": 0.5605831742286682,
"learning_rate": 7.243638579917721e-05,
"loss": 0.6684,
"step": 4025
},
{
"epoch": 0.5985189427088986,
"grad_norm": 0.6576380133628845,
"learning_rate": 7.224592412006705e-05,
"loss": 0.6955,
"step": 4050
},
{
"epoch": 0.6022135040836448,
"grad_norm": 0.49083924293518066,
"learning_rate": 7.205546244095688e-05,
"loss": 0.7089,
"step": 4075
},
{
"epoch": 0.6059080654583912,
"grad_norm": 0.4783398509025574,
"learning_rate": 7.186500076184673e-05,
"loss": 0.6766,
"step": 4100
},
{
"epoch": 0.6096026268331375,
"grad_norm": 0.5773366689682007,
"learning_rate": 7.167453908273656e-05,
"loss": 0.666,
"step": 4125
},
{
"epoch": 0.6132971882078837,
"grad_norm": 0.5449897050857544,
"learning_rate": 7.148407740362639e-05,
"loss": 0.6795,
"step": 4150
},
{
"epoch": 0.61699174958263,
"grad_norm": 0.519882082939148,
"learning_rate": 7.129361572451622e-05,
"loss": 0.6892,
"step": 4175
},
{
"epoch": 0.6206863109573763,
"grad_norm": 0.5653222799301147,
"learning_rate": 7.110315404540607e-05,
"loss": 0.7029,
"step": 4200
},
{
"epoch": 0.6243808723321226,
"grad_norm": 0.542448878288269,
"learning_rate": 7.09126923662959e-05,
"loss": 0.6885,
"step": 4225
},
{
"epoch": 0.6280754337068689,
"grad_norm": 0.5602554082870483,
"learning_rate": 7.072223068718575e-05,
"loss": 0.6811,
"step": 4250
},
{
"epoch": 0.6317699950816151,
"grad_norm": 0.5326575636863708,
"learning_rate": 7.053176900807558e-05,
"loss": 0.7032,
"step": 4275
},
{
"epoch": 0.6354645564563615,
"grad_norm": 0.5822186470031738,
"learning_rate": 7.034130732896541e-05,
"loss": 0.6951,
"step": 4300
},
{
"epoch": 0.6391591178311078,
"grad_norm": 0.5309107899665833,
"learning_rate": 7.015084564985526e-05,
"loss": 0.6891,
"step": 4325
},
{
"epoch": 0.642853679205854,
"grad_norm": 0.6958228349685669,
"learning_rate": 6.996038397074509e-05,
"loss": 0.6932,
"step": 4350
},
{
"epoch": 0.6465482405806003,
"grad_norm": 0.4864750802516937,
"learning_rate": 6.976992229163493e-05,
"loss": 0.6897,
"step": 4375
},
{
"epoch": 0.6502428019553466,
"grad_norm": 0.5077944993972778,
"learning_rate": 6.957946061252476e-05,
"loss": 0.7067,
"step": 4400
},
{
"epoch": 0.6539373633300929,
"grad_norm": 0.5589050054550171,
"learning_rate": 6.938899893341459e-05,
"loss": 0.6927,
"step": 4425
},
{
"epoch": 0.6576319247048392,
"grad_norm": 0.6064692735671997,
"learning_rate": 6.919853725430444e-05,
"loss": 0.7186,
"step": 4450
},
{
"epoch": 0.6613264860795854,
"grad_norm": 0.5546572208404541,
"learning_rate": 6.900807557519427e-05,
"loss": 0.6865,
"step": 4475
},
{
"epoch": 0.6650210474543318,
"grad_norm": 0.6032342314720154,
"learning_rate": 6.881761389608412e-05,
"loss": 0.6999,
"step": 4500
},
{
"epoch": 0.668715608829078,
"grad_norm": 0.5067450404167175,
"learning_rate": 6.862715221697395e-05,
"loss": 0.7068,
"step": 4525
},
{
"epoch": 0.6724101702038243,
"grad_norm": 0.6697527170181274,
"learning_rate": 6.84366905378638e-05,
"loss": 0.7075,
"step": 4550
},
{
"epoch": 0.6761047315785707,
"grad_norm": 0.48360082507133484,
"learning_rate": 6.824622885875363e-05,
"loss": 0.7076,
"step": 4575
},
{
"epoch": 0.6797992929533169,
"grad_norm": 0.6387288570404053,
"learning_rate": 6.805576717964346e-05,
"loss": 0.6871,
"step": 4600
},
{
"epoch": 0.6834938543280632,
"grad_norm": 0.6431862115859985,
"learning_rate": 6.786530550053329e-05,
"loss": 0.7138,
"step": 4625
},
{
"epoch": 0.6871884157028095,
"grad_norm": 0.6050564050674438,
"learning_rate": 6.767484382142313e-05,
"loss": 0.6978,
"step": 4650
},
{
"epoch": 0.6908829770775557,
"grad_norm": 0.7160177230834961,
"learning_rate": 6.748438214231297e-05,
"loss": 0.6813,
"step": 4675
},
{
"epoch": 0.6945775384523021,
"grad_norm": 0.6984575986862183,
"learning_rate": 6.72939204632028e-05,
"loss": 0.7155,
"step": 4700
},
{
"epoch": 0.6982720998270483,
"grad_norm": 0.5910038352012634,
"learning_rate": 6.710345878409264e-05,
"loss": 0.6689,
"step": 4725
},
{
"epoch": 0.7019666612017946,
"grad_norm": 0.5897320508956909,
"learning_rate": 6.691299710498248e-05,
"loss": 0.7169,
"step": 4750
},
{
"epoch": 0.705661222576541,
"grad_norm": 0.5735405683517456,
"learning_rate": 6.672253542587232e-05,
"loss": 0.6779,
"step": 4775
},
{
"epoch": 0.7093557839512872,
"grad_norm": 0.6408699750900269,
"learning_rate": 6.653207374676216e-05,
"loss": 0.7069,
"step": 4800
},
{
"epoch": 0.7130503453260335,
"grad_norm": 0.6292117834091187,
"learning_rate": 6.6341612067652e-05,
"loss": 0.698,
"step": 4825
},
{
"epoch": 0.7167449067007798,
"grad_norm": 0.6101416349411011,
"learning_rate": 6.615115038854183e-05,
"loss": 0.6708,
"step": 4850
},
{
"epoch": 0.7204394680755261,
"grad_norm": 0.6805480122566223,
"learning_rate": 6.596068870943166e-05,
"loss": 0.7115,
"step": 4875
},
{
"epoch": 0.7241340294502724,
"grad_norm": 0.6465732455253601,
"learning_rate": 6.57702270303215e-05,
"loss": 0.688,
"step": 4900
},
{
"epoch": 0.7278285908250186,
"grad_norm": 0.5873344540596008,
"learning_rate": 6.557976535121134e-05,
"loss": 0.7032,
"step": 4925
},
{
"epoch": 0.7315231521997649,
"grad_norm": 0.5717042088508606,
"learning_rate": 6.538930367210117e-05,
"loss": 0.6907,
"step": 4950
},
{
"epoch": 0.7352177135745113,
"grad_norm": 0.6366106867790222,
"learning_rate": 6.519884199299102e-05,
"loss": 0.7061,
"step": 4975
},
{
"epoch": 0.7389122749492575,
"grad_norm": 0.7421902418136597,
"learning_rate": 6.500838031388085e-05,
"loss": 0.7267,
"step": 5000
},
{
"epoch": 0.7426068363240038,
"grad_norm": 0.5897513031959534,
"learning_rate": 6.48179186347707e-05,
"loss": 0.7089,
"step": 5025
},
{
"epoch": 0.74630139769875,
"grad_norm": 0.6692824959754944,
"learning_rate": 6.462745695566053e-05,
"loss": 0.7059,
"step": 5050
},
{
"epoch": 0.7499959590734964,
"grad_norm": 0.5818034410476685,
"learning_rate": 6.443699527655036e-05,
"loss": 0.6734,
"step": 5075
},
{
"epoch": 0.7536905204482427,
"grad_norm": 0.5975498557090759,
"learning_rate": 6.42465335974402e-05,
"loss": 0.6857,
"step": 5100
},
{
"epoch": 0.7573850818229889,
"grad_norm": 0.5827130675315857,
"learning_rate": 6.405607191833003e-05,
"loss": 0.7156,
"step": 5125
},
{
"epoch": 0.7610796431977352,
"grad_norm": 0.660932719707489,
"learning_rate": 6.386561023921987e-05,
"loss": 0.688,
"step": 5150
},
{
"epoch": 0.7647742045724816,
"grad_norm": 0.5963577628135681,
"learning_rate": 6.36751485601097e-05,
"loss": 0.709,
"step": 5175
},
{
"epoch": 0.7684687659472278,
"grad_norm": 0.6608302593231201,
"learning_rate": 6.348468688099954e-05,
"loss": 0.6761,
"step": 5200
},
{
"epoch": 0.7721633273219741,
"grad_norm": 0.6137542724609375,
"learning_rate": 6.329422520188938e-05,
"loss": 0.6845,
"step": 5225
},
{
"epoch": 0.7758578886967203,
"grad_norm": 0.6861995458602905,
"learning_rate": 6.310376352277923e-05,
"loss": 0.6746,
"step": 5250
},
{
"epoch": 0.7795524500714667,
"grad_norm": 0.6537772417068481,
"learning_rate": 6.291330184366906e-05,
"loss": 0.6777,
"step": 5275
},
{
"epoch": 0.783247011446213,
"grad_norm": 0.6634919047355652,
"learning_rate": 6.27228401645589e-05,
"loss": 0.6945,
"step": 5300
},
{
"epoch": 0.7869415728209592,
"grad_norm": 0.610098123550415,
"learning_rate": 6.253237848544873e-05,
"loss": 0.6909,
"step": 5325
},
{
"epoch": 0.7906361341957056,
"grad_norm": 0.6167535185813904,
"learning_rate": 6.234191680633856e-05,
"loss": 0.6988,
"step": 5350
},
{
"epoch": 0.7943306955704518,
"grad_norm": 0.6502842903137207,
"learning_rate": 6.215145512722841e-05,
"loss": 0.6937,
"step": 5375
},
{
"epoch": 0.7980252569451981,
"grad_norm": 0.5586534142494202,
"learning_rate": 6.196099344811824e-05,
"loss": 0.6891,
"step": 5400
},
{
"epoch": 0.8017198183199444,
"grad_norm": 0.577847957611084,
"learning_rate": 6.177053176900807e-05,
"loss": 0.7111,
"step": 5425
},
{
"epoch": 0.8054143796946907,
"grad_norm": 0.5086051225662231,
"learning_rate": 6.158007008989792e-05,
"loss": 0.6892,
"step": 5450
},
{
"epoch": 0.809108941069437,
"grad_norm": 0.6650702953338623,
"learning_rate": 6.138960841078775e-05,
"loss": 0.6853,
"step": 5475
},
{
"epoch": 0.8128035024441833,
"grad_norm": 0.730775773525238,
"learning_rate": 6.11991467316776e-05,
"loss": 0.6972,
"step": 5500
},
{
"epoch": 0.8164980638189295,
"grad_norm": 0.6812962293624878,
"learning_rate": 6.100868505256743e-05,
"loss": 0.7179,
"step": 5525
},
{
"epoch": 0.8201926251936759,
"grad_norm": 0.6698195934295654,
"learning_rate": 6.081822337345726e-05,
"loss": 0.6935,
"step": 5550
},
{
"epoch": 0.8238871865684221,
"grad_norm": 0.7661596536636353,
"learning_rate": 6.0627761694347096e-05,
"loss": 0.7081,
"step": 5575
},
{
"epoch": 0.8275817479431684,
"grad_norm": 0.63306725025177,
"learning_rate": 6.043730001523694e-05,
"loss": 0.6814,
"step": 5600
},
{
"epoch": 0.8312763093179147,
"grad_norm": 0.638088047504425,
"learning_rate": 6.0246838336126774e-05,
"loss": 0.7162,
"step": 5625
},
{
"epoch": 0.834970870692661,
"grad_norm": 0.6416764259338379,
"learning_rate": 6.0056376657016614e-05,
"loss": 0.6935,
"step": 5650
},
{
"epoch": 0.8386654320674073,
"grad_norm": 0.6060255169868469,
"learning_rate": 5.9865914977906447e-05,
"loss": 0.6935,
"step": 5675
},
{
"epoch": 0.8423599934421535,
"grad_norm": 0.6919652223587036,
"learning_rate": 5.967545329879628e-05,
"loss": 0.6781,
"step": 5700
},
{
"epoch": 0.8460545548168998,
"grad_norm": 0.5610880851745605,
"learning_rate": 5.9484991619686125e-05,
"loss": 0.6924,
"step": 5725
},
{
"epoch": 0.8497491161916462,
"grad_norm": 0.6481006145477295,
"learning_rate": 5.929452994057596e-05,
"loss": 0.6708,
"step": 5750
},
{
"epoch": 0.8534436775663924,
"grad_norm": 0.618869423866272,
"learning_rate": 5.91040682614658e-05,
"loss": 0.6801,
"step": 5775
},
{
"epoch": 0.8571382389411387,
"grad_norm": 0.5622214674949646,
"learning_rate": 5.891360658235563e-05,
"loss": 0.6932,
"step": 5800
},
{
"epoch": 0.860832800315885,
"grad_norm": 0.6936132311820984,
"learning_rate": 5.8723144903245476e-05,
"loss": 0.6932,
"step": 5825
},
{
"epoch": 0.8645273616906313,
"grad_norm": 0.6182092428207397,
"learning_rate": 5.853268322413531e-05,
"loss": 0.6767,
"step": 5850
},
{
"epoch": 0.8682219230653776,
"grad_norm": 0.6932141184806824,
"learning_rate": 5.834222154502514e-05,
"loss": 0.694,
"step": 5875
},
{
"epoch": 0.8719164844401238,
"grad_norm": 0.4943319261074066,
"learning_rate": 5.815175986591498e-05,
"loss": 0.6884,
"step": 5900
},
{
"epoch": 0.8756110458148701,
"grad_norm": 0.730697512626648,
"learning_rate": 5.7961298186804814e-05,
"loss": 0.6906,
"step": 5925
},
{
"epoch": 0.8793056071896165,
"grad_norm": 0.5535916090011597,
"learning_rate": 5.777083650769466e-05,
"loss": 0.6992,
"step": 5950
},
{
"epoch": 0.8830001685643627,
"grad_norm": 0.6035041809082031,
"learning_rate": 5.758037482858449e-05,
"loss": 0.6637,
"step": 5975
},
{
"epoch": 0.886694729939109,
"grad_norm": 0.6580167412757874,
"learning_rate": 5.7389913149474325e-05,
"loss": 0.7056,
"step": 6000
},
{
"epoch": 0.8903892913138552,
"grad_norm": 0.5391905903816223,
"learning_rate": 5.7199451470364165e-05,
"loss": 0.7169,
"step": 6025
},
{
"epoch": 0.8940838526886016,
"grad_norm": 0.7833768725395203,
"learning_rate": 5.7008989791254e-05,
"loss": 0.682,
"step": 6050
},
{
"epoch": 0.8977784140633479,
"grad_norm": 0.6040502190589905,
"learning_rate": 5.681852811214384e-05,
"loss": 0.6893,
"step": 6075
},
{
"epoch": 0.9014729754380941,
"grad_norm": 0.5723184943199158,
"learning_rate": 5.6628066433033676e-05,
"loss": 0.7041,
"step": 6100
},
{
"epoch": 0.9051675368128405,
"grad_norm": 0.7001731395721436,
"learning_rate": 5.643760475392351e-05,
"loss": 0.69,
"step": 6125
},
{
"epoch": 0.9088620981875868,
"grad_norm": 0.6454519629478455,
"learning_rate": 5.624714307481335e-05,
"loss": 0.6777,
"step": 6150
},
{
"epoch": 0.912556659562333,
"grad_norm": 0.6187843680381775,
"learning_rate": 5.6056681395703194e-05,
"loss": 0.7069,
"step": 6175
},
{
"epoch": 0.9162512209370793,
"grad_norm": 0.6245271563529968,
"learning_rate": 5.586621971659303e-05,
"loss": 0.6735,
"step": 6200
},
{
"epoch": 0.9199457823118256,
"grad_norm": 0.513124406337738,
"learning_rate": 5.567575803748286e-05,
"loss": 0.6859,
"step": 6225
},
{
"epoch": 0.9236403436865719,
"grad_norm": 0.5510721206665039,
"learning_rate": 5.54852963583727e-05,
"loss": 0.7195,
"step": 6250
},
{
"epoch": 0.9273349050613182,
"grad_norm": 0.7087464332580566,
"learning_rate": 5.529483467926253e-05,
"loss": 0.6761,
"step": 6275
},
{
"epoch": 0.9310294664360644,
"grad_norm": 0.6695664525032043,
"learning_rate": 5.510437300015238e-05,
"loss": 0.7013,
"step": 6300
},
{
"epoch": 0.9347240278108108,
"grad_norm": 0.6182588934898376,
"learning_rate": 5.491391132104221e-05,
"loss": 0.6793,
"step": 6325
},
{
"epoch": 0.938418589185557,
"grad_norm": 0.7019252181053162,
"learning_rate": 5.472344964193204e-05,
"loss": 0.7122,
"step": 6350
},
{
"epoch": 0.9421131505603033,
"grad_norm": 0.772847592830658,
"learning_rate": 5.453298796282188e-05,
"loss": 0.6991,
"step": 6375
},
{
"epoch": 0.9458077119350496,
"grad_norm": 0.7126289010047913,
"learning_rate": 5.4342526283711715e-05,
"loss": 0.6992,
"step": 6400
},
{
"epoch": 0.9495022733097959,
"grad_norm": 0.7134938836097717,
"learning_rate": 5.415206460460156e-05,
"loss": 0.7091,
"step": 6425
},
{
"epoch": 0.9531968346845422,
"grad_norm": 0.7651578187942505,
"learning_rate": 5.3961602925491394e-05,
"loss": 0.6829,
"step": 6450
},
{
"epoch": 0.9568913960592885,
"grad_norm": 0.6493939161300659,
"learning_rate": 5.377114124638123e-05,
"loss": 0.6831,
"step": 6475
},
{
"epoch": 0.9605859574340347,
"grad_norm": 0.5992809534072876,
"learning_rate": 5.3580679567271066e-05,
"loss": 0.6984,
"step": 6500
},
{
"epoch": 0.9642805188087811,
"grad_norm": 0.6597899794578552,
"learning_rate": 5.339021788816091e-05,
"loss": 0.6893,
"step": 6525
},
{
"epoch": 0.9679750801835273,
"grad_norm": 0.6459916234016418,
"learning_rate": 5.3199756209050745e-05,
"loss": 0.6935,
"step": 6550
},
{
"epoch": 0.9716696415582736,
"grad_norm": 0.7714385986328125,
"learning_rate": 5.300929452994058e-05,
"loss": 0.6883,
"step": 6575
},
{
"epoch": 0.97536420293302,
"grad_norm": 0.5793107748031616,
"learning_rate": 5.281883285083041e-05,
"loss": 0.6795,
"step": 6600
},
{
"epoch": 0.9790587643077662,
"grad_norm": 0.5452476739883423,
"learning_rate": 5.262837117172025e-05,
"loss": 0.6809,
"step": 6625
},
{
"epoch": 0.9827533256825125,
"grad_norm": 0.6292601823806763,
"learning_rate": 5.2437909492610096e-05,
"loss": 0.7021,
"step": 6650
},
{
"epoch": 0.9864478870572587,
"grad_norm": 0.6509853601455688,
"learning_rate": 5.224744781349993e-05,
"loss": 0.6808,
"step": 6675
},
{
"epoch": 0.990142448432005,
"grad_norm": 0.6169773936271667,
"learning_rate": 5.205698613438976e-05,
"loss": 0.677,
"step": 6700
},
{
"epoch": 0.9938370098067514,
"grad_norm": 0.6769931316375732,
"learning_rate": 5.1866524455279594e-05,
"loss": 0.7188,
"step": 6725
},
{
"epoch": 0.9975315711814976,
"grad_norm": 0.6493127346038818,
"learning_rate": 5.167606277616944e-05,
"loss": 0.68,
"step": 6750
},
{
"epoch": 1.001182259639919,
"grad_norm": 0.6528682708740234,
"learning_rate": 5.148560109705928e-05,
"loss": 0.6982,
"step": 6775
},
{
"epoch": 1.004876821014665,
"grad_norm": 0.6537097692489624,
"learning_rate": 5.129513941794911e-05,
"loss": 0.6513,
"step": 6800
},
{
"epoch": 1.0085713823894114,
"grad_norm": 0.6426008939743042,
"learning_rate": 5.1104677738838945e-05,
"loss": 0.668,
"step": 6825
},
{
"epoch": 1.0122659437641577,
"grad_norm": 0.5742406249046326,
"learning_rate": 5.0914216059728784e-05,
"loss": 0.67,
"step": 6850
},
{
"epoch": 1.015960505138904,
"grad_norm": 0.7166649103164673,
"learning_rate": 5.0723754380618623e-05,
"loss": 0.6602,
"step": 6875
},
{
"epoch": 1.0196550665136503,
"grad_norm": 0.7485601305961609,
"learning_rate": 5.053329270150846e-05,
"loss": 0.6567,
"step": 6900
},
{
"epoch": 1.0233496278883965,
"grad_norm": 0.7126789689064026,
"learning_rate": 5.0342831022398296e-05,
"loss": 0.6624,
"step": 6925
},
{
"epoch": 1.0270441892631428,
"grad_norm": 0.7238374948501587,
"learning_rate": 5.015236934328813e-05,
"loss": 0.6613,
"step": 6950
},
{
"epoch": 1.0307387506378891,
"grad_norm": 0.6505608558654785,
"learning_rate": 4.996190766417797e-05,
"loss": 0.6554,
"step": 6975
},
{
"epoch": 1.0344333120126354,
"grad_norm": 0.6918332576751709,
"learning_rate": 4.977144598506781e-05,
"loss": 0.6645,
"step": 7000
},
{
"epoch": 1.0381278733873818,
"grad_norm": 0.5876255035400391,
"learning_rate": 4.9580984305957646e-05,
"loss": 0.6638,
"step": 7025
},
{
"epoch": 1.0418224347621279,
"grad_norm": 0.7554610967636108,
"learning_rate": 4.939052262684748e-05,
"loss": 0.6742,
"step": 7050
},
{
"epoch": 1.0455169961368742,
"grad_norm": 0.6300481557846069,
"learning_rate": 4.920006094773732e-05,
"loss": 0.6409,
"step": 7075
},
{
"epoch": 1.0492115575116205,
"grad_norm": 0.5924395322799683,
"learning_rate": 4.900959926862715e-05,
"loss": 0.668,
"step": 7100
},
{
"epoch": 1.0529061188863669,
"grad_norm": 0.6832597851753235,
"learning_rate": 4.881913758951699e-05,
"loss": 0.6431,
"step": 7125
},
{
"epoch": 1.0566006802611132,
"grad_norm": 0.702418863773346,
"learning_rate": 4.862867591040683e-05,
"loss": 0.6463,
"step": 7150
},
{
"epoch": 1.0602952416358593,
"grad_norm": 0.6264967918395996,
"learning_rate": 4.843821423129666e-05,
"loss": 0.653,
"step": 7175
},
{
"epoch": 1.0639898030106056,
"grad_norm": 0.6441030502319336,
"learning_rate": 4.82477525521865e-05,
"loss": 0.6415,
"step": 7200
},
{
"epoch": 1.067684364385352,
"grad_norm": 0.5445654392242432,
"learning_rate": 4.805729087307634e-05,
"loss": 0.6516,
"step": 7225
},
{
"epoch": 1.0713789257600983,
"grad_norm": 0.634982168674469,
"learning_rate": 4.786682919396618e-05,
"loss": 0.6617,
"step": 7250
},
{
"epoch": 1.0750734871348446,
"grad_norm": 0.5670004487037659,
"learning_rate": 4.7676367514856013e-05,
"loss": 0.644,
"step": 7275
},
{
"epoch": 1.0787680485095907,
"grad_norm": 0.6136172413825989,
"learning_rate": 4.7485905835745846e-05,
"loss": 0.6404,
"step": 7300
},
{
"epoch": 1.082462609884337,
"grad_norm": 0.6087863445281982,
"learning_rate": 4.7295444156635686e-05,
"loss": 0.666,
"step": 7325
},
{
"epoch": 1.0861571712590834,
"grad_norm": 0.54926997423172,
"learning_rate": 4.7104982477525525e-05,
"loss": 0.6157,
"step": 7350
},
{
"epoch": 1.0898517326338297,
"grad_norm": 0.6426320672035217,
"learning_rate": 4.6914520798415364e-05,
"loss": 0.6349,
"step": 7375
},
{
"epoch": 1.093546294008576,
"grad_norm": 0.5854539275169373,
"learning_rate": 4.67240591193052e-05,
"loss": 0.6583,
"step": 7400
},
{
"epoch": 1.0972408553833224,
"grad_norm": 0.7021641731262207,
"learning_rate": 4.6533597440195036e-05,
"loss": 0.6682,
"step": 7425
},
{
"epoch": 1.1009354167580685,
"grad_norm": 0.7356472611427307,
"learning_rate": 4.634313576108487e-05,
"loss": 0.6563,
"step": 7450
},
{
"epoch": 1.1046299781328148,
"grad_norm": 0.6147669553756714,
"learning_rate": 4.615267408197471e-05,
"loss": 0.667,
"step": 7475
},
{
"epoch": 1.1083245395075612,
"grad_norm": 0.6394315958023071,
"learning_rate": 4.596221240286455e-05,
"loss": 0.6737,
"step": 7500
},
{
"epoch": 1.1120191008823075,
"grad_norm": 0.7234614491462708,
"learning_rate": 4.577175072375438e-05,
"loss": 0.6556,
"step": 7525
},
{
"epoch": 1.1157136622570538,
"grad_norm": 0.6812229156494141,
"learning_rate": 4.558128904464422e-05,
"loss": 0.6544,
"step": 7550
},
{
"epoch": 1.1194082236318,
"grad_norm": 0.6218217611312866,
"learning_rate": 4.539082736553405e-05,
"loss": 0.668,
"step": 7575
},
{
"epoch": 1.1231027850065463,
"grad_norm": 0.8202681541442871,
"learning_rate": 4.52003656864239e-05,
"loss": 0.679,
"step": 7600
},
{
"epoch": 1.1267973463812926,
"grad_norm": 0.5360725522041321,
"learning_rate": 4.500990400731373e-05,
"loss": 0.6463,
"step": 7625
},
{
"epoch": 1.130491907756039,
"grad_norm": 0.6142716407775879,
"learning_rate": 4.481944232820357e-05,
"loss": 0.659,
"step": 7650
},
{
"epoch": 1.1341864691307852,
"grad_norm": 0.619349479675293,
"learning_rate": 4.4628980649093404e-05,
"loss": 0.6724,
"step": 7675
},
{
"epoch": 1.1378810305055314,
"grad_norm": 0.6891987323760986,
"learning_rate": 4.4438518969983236e-05,
"loss": 0.6509,
"step": 7700
},
{
"epoch": 1.1415755918802777,
"grad_norm": 0.7174720168113708,
"learning_rate": 4.424805729087308e-05,
"loss": 0.6715,
"step": 7725
},
{
"epoch": 1.145270153255024,
"grad_norm": 0.7424497008323669,
"learning_rate": 4.4057595611762915e-05,
"loss": 0.6449,
"step": 7750
},
{
"epoch": 1.1489647146297703,
"grad_norm": 0.6533998847007751,
"learning_rate": 4.3867133932652754e-05,
"loss": 0.6528,
"step": 7775
},
{
"epoch": 1.1526592760045167,
"grad_norm": 0.7500383853912354,
"learning_rate": 4.367667225354259e-05,
"loss": 0.6592,
"step": 7800
},
{
"epoch": 1.156353837379263,
"grad_norm": 0.6293950080871582,
"learning_rate": 4.3486210574432427e-05,
"loss": 0.6584,
"step": 7825
},
{
"epoch": 1.1600483987540091,
"grad_norm": 0.8463473320007324,
"learning_rate": 4.3295748895322266e-05,
"loss": 0.6656,
"step": 7850
},
{
"epoch": 1.1637429601287554,
"grad_norm": 0.6918061971664429,
"learning_rate": 4.31052872162121e-05,
"loss": 0.6603,
"step": 7875
},
{
"epoch": 1.1674375215035018,
"grad_norm": 0.5433516502380371,
"learning_rate": 4.291482553710194e-05,
"loss": 0.6422,
"step": 7900
},
{
"epoch": 1.171132082878248,
"grad_norm": 0.6414408087730408,
"learning_rate": 4.272436385799177e-05,
"loss": 0.6279,
"step": 7925
},
{
"epoch": 1.1748266442529944,
"grad_norm": 0.814218282699585,
"learning_rate": 4.253390217888161e-05,
"loss": 0.6673,
"step": 7950
},
{
"epoch": 1.1785212056277405,
"grad_norm": 0.7378386855125427,
"learning_rate": 4.234344049977145e-05,
"loss": 0.6651,
"step": 7975
},
{
"epoch": 1.1822157670024869,
"grad_norm": 0.6620386242866516,
"learning_rate": 4.215297882066129e-05,
"loss": 0.6984,
"step": 8000
},
{
"epoch": 1.1859103283772332,
"grad_norm": 0.6845581531524658,
"learning_rate": 4.196251714155112e-05,
"loss": 0.6524,
"step": 8025
},
{
"epoch": 1.1896048897519795,
"grad_norm": 0.7139785289764404,
"learning_rate": 4.1772055462440954e-05,
"loss": 0.6453,
"step": 8050
},
{
"epoch": 1.1932994511267259,
"grad_norm": 0.6536353230476379,
"learning_rate": 4.1581593783330794e-05,
"loss": 0.6456,
"step": 8075
},
{
"epoch": 1.196994012501472,
"grad_norm": 0.6225493550300598,
"learning_rate": 4.139113210422063e-05,
"loss": 0.6192,
"step": 8100
},
{
"epoch": 1.2006885738762183,
"grad_norm": 0.6810159683227539,
"learning_rate": 4.120067042511047e-05,
"loss": 0.6634,
"step": 8125
},
{
"epoch": 1.2043831352509646,
"grad_norm": 0.5847315788269043,
"learning_rate": 4.1010208746000305e-05,
"loss": 0.6231,
"step": 8150
},
{
"epoch": 1.208077696625711,
"grad_norm": 0.6385469436645508,
"learning_rate": 4.0819747066890144e-05,
"loss": 0.6619,
"step": 8175
},
{
"epoch": 1.2117722580004573,
"grad_norm": 0.7124472260475159,
"learning_rate": 4.0629285387779984e-05,
"loss": 0.6626,
"step": 8200
},
{
"epoch": 1.2154668193752034,
"grad_norm": 0.658824622631073,
"learning_rate": 4.0438823708669817e-05,
"loss": 0.6653,
"step": 8225
},
{
"epoch": 1.2191613807499497,
"grad_norm": 0.6626468300819397,
"learning_rate": 4.0248362029559656e-05,
"loss": 0.6777,
"step": 8250
},
{
"epoch": 1.222855942124696,
"grad_norm": 0.6238393783569336,
"learning_rate": 4.005790035044949e-05,
"loss": 0.6622,
"step": 8275
},
{
"epoch": 1.2265505034994424,
"grad_norm": 0.685213565826416,
"learning_rate": 3.986743867133933e-05,
"loss": 0.6423,
"step": 8300
},
{
"epoch": 1.2302450648741887,
"grad_norm": 0.7486940622329712,
"learning_rate": 3.967697699222917e-05,
"loss": 0.6631,
"step": 8325
},
{
"epoch": 1.2339396262489348,
"grad_norm": 0.7994277477264404,
"learning_rate": 3.948651531311901e-05,
"loss": 0.6725,
"step": 8350
},
{
"epoch": 1.2376341876236812,
"grad_norm": 0.6204445958137512,
"learning_rate": 3.929605363400884e-05,
"loss": 0.6411,
"step": 8375
},
{
"epoch": 1.2413287489984275,
"grad_norm": 0.7625504732131958,
"learning_rate": 3.910559195489867e-05,
"loss": 0.6819,
"step": 8400
},
{
"epoch": 1.2450233103731738,
"grad_norm": 0.6892343163490295,
"learning_rate": 3.891513027578851e-05,
"loss": 0.6614,
"step": 8425
},
{
"epoch": 1.2487178717479201,
"grad_norm": 0.6849514245986938,
"learning_rate": 3.872466859667835e-05,
"loss": 0.6442,
"step": 8450
},
{
"epoch": 1.2524124331226663,
"grad_norm": 0.7257765531539917,
"learning_rate": 3.853420691756819e-05,
"loss": 0.6475,
"step": 8475
},
{
"epoch": 1.2561069944974126,
"grad_norm": 0.7827818393707275,
"learning_rate": 3.834374523845802e-05,
"loss": 0.6317,
"step": 8500
},
{
"epoch": 1.259801555872159,
"grad_norm": 0.7970981597900391,
"learning_rate": 3.815328355934786e-05,
"loss": 0.6585,
"step": 8525
},
{
"epoch": 1.2634961172469052,
"grad_norm": 0.7086262106895447,
"learning_rate": 3.7962821880237695e-05,
"loss": 0.6499,
"step": 8550
},
{
"epoch": 1.2671906786216516,
"grad_norm": 0.654151439666748,
"learning_rate": 3.7772360201127535e-05,
"loss": 0.6587,
"step": 8575
},
{
"epoch": 1.2708852399963977,
"grad_norm": 0.6484542489051819,
"learning_rate": 3.7581898522017374e-05,
"loss": 0.6533,
"step": 8600
},
{
"epoch": 1.274579801371144,
"grad_norm": 0.5822983384132385,
"learning_rate": 3.7391436842907207e-05,
"loss": 0.6535,
"step": 8625
},
{
"epoch": 1.2782743627458903,
"grad_norm": 0.6506041884422302,
"learning_rate": 3.7200975163797046e-05,
"loss": 0.6527,
"step": 8650
},
{
"epoch": 1.2819689241206367,
"grad_norm": 0.6804136037826538,
"learning_rate": 3.701051348468688e-05,
"loss": 0.642,
"step": 8675
},
{
"epoch": 1.285663485495383,
"grad_norm": 0.7997829914093018,
"learning_rate": 3.6820051805576725e-05,
"loss": 0.6489,
"step": 8700
},
{
"epoch": 1.2893580468701291,
"grad_norm": 0.7355867028236389,
"learning_rate": 3.662959012646656e-05,
"loss": 0.6777,
"step": 8725
},
{
"epoch": 1.2930526082448754,
"grad_norm": 9.047796249389648,
"learning_rate": 3.643912844735639e-05,
"loss": 0.6777,
"step": 8750
},
{
"epoch": 1.2967471696196218,
"grad_norm": 0.7236223220825195,
"learning_rate": 3.624866676824623e-05,
"loss": 0.6777,
"step": 8775
},
{
"epoch": 1.300441730994368,
"grad_norm": 0.6845753192901611,
"learning_rate": 3.605820508913607e-05,
"loss": 0.6284,
"step": 8800
},
{
"epoch": 1.3041362923691144,
"grad_norm": 0.7639452815055847,
"learning_rate": 3.586774341002591e-05,
"loss": 0.6822,
"step": 8825
},
{
"epoch": 1.3078308537438605,
"grad_norm": 0.6909865736961365,
"learning_rate": 3.567728173091574e-05,
"loss": 0.6737,
"step": 8850
},
{
"epoch": 1.311525415118607,
"grad_norm": 0.6128563284873962,
"learning_rate": 3.548682005180558e-05,
"loss": 0.666,
"step": 8875
},
{
"epoch": 1.3152199764933532,
"grad_norm": 0.7517656087875366,
"learning_rate": 3.529635837269541e-05,
"loss": 0.6389,
"step": 8900
},
{
"epoch": 1.3189145378680995,
"grad_norm": 0.7127660512924194,
"learning_rate": 3.510589669358525e-05,
"loss": 0.6334,
"step": 8925
},
{
"epoch": 1.3226090992428459,
"grad_norm": 0.7129451632499695,
"learning_rate": 3.491543501447509e-05,
"loss": 0.6545,
"step": 8950
},
{
"epoch": 1.3263036606175922,
"grad_norm": 0.7367307543754578,
"learning_rate": 3.4724973335364925e-05,
"loss": 0.6329,
"step": 8975
},
{
"epoch": 1.3299982219923385,
"grad_norm": 0.7707272171974182,
"learning_rate": 3.4534511656254764e-05,
"loss": 0.6469,
"step": 9000
},
{
"epoch": 1.3336927833670846,
"grad_norm": 0.5796623826026917,
"learning_rate": 3.43440499771446e-05,
"loss": 0.6533,
"step": 9025
},
{
"epoch": 1.337387344741831,
"grad_norm": 210.8633270263672,
"learning_rate": 3.4153588298034436e-05,
"loss": 0.663,
"step": 9050
},
{
"epoch": 1.3410819061165773,
"grad_norm": 0.6077564358711243,
"learning_rate": 3.3963126618924275e-05,
"loss": 0.6529,
"step": 9075
},
{
"epoch": 1.3447764674913236,
"grad_norm": 0.5756903290748596,
"learning_rate": 3.3772664939814115e-05,
"loss": 0.6379,
"step": 9100
},
{
"epoch": 1.34847102886607,
"grad_norm": 0.6951320171356201,
"learning_rate": 3.358220326070395e-05,
"loss": 0.6572,
"step": 9125
},
{
"epoch": 1.352165590240816,
"grad_norm": 0.654563307762146,
"learning_rate": 3.339174158159378e-05,
"loss": 0.6571,
"step": 9150
},
{
"epoch": 1.3558601516155624,
"grad_norm": 0.8268250823020935,
"learning_rate": 3.320127990248362e-05,
"loss": 0.6952,
"step": 9175
},
{
"epoch": 1.3595547129903087,
"grad_norm": 0.7915245890617371,
"learning_rate": 3.301081822337346e-05,
"loss": 0.6442,
"step": 9200
},
{
"epoch": 1.363249274365055,
"grad_norm": 0.7299513816833496,
"learning_rate": 3.28203565442633e-05,
"loss": 0.6347,
"step": 9225
},
{
"epoch": 1.3669438357398014,
"grad_norm": 0.6474806070327759,
"learning_rate": 3.262989486515313e-05,
"loss": 0.6604,
"step": 9250
},
{
"epoch": 1.3706383971145475,
"grad_norm": 0.6813268661499023,
"learning_rate": 3.243943318604297e-05,
"loss": 0.66,
"step": 9275
},
{
"epoch": 1.3743329584892938,
"grad_norm": 0.7166799306869507,
"learning_rate": 3.224897150693281e-05,
"loss": 0.6689,
"step": 9300
},
{
"epoch": 1.3780275198640402,
"grad_norm": 0.6958301663398743,
"learning_rate": 3.205850982782264e-05,
"loss": 0.6512,
"step": 9325
},
{
"epoch": 1.3817220812387865,
"grad_norm": 0.8137691617012024,
"learning_rate": 3.186804814871248e-05,
"loss": 0.6691,
"step": 9350
},
{
"epoch": 1.3854166426135328,
"grad_norm": 0.6115707159042358,
"learning_rate": 3.1677586469602315e-05,
"loss": 0.6733,
"step": 9375
},
{
"epoch": 1.389111203988279,
"grad_norm": 0.7478678822517395,
"learning_rate": 3.1487124790492154e-05,
"loss": 0.6514,
"step": 9400
},
{
"epoch": 1.3928057653630252,
"grad_norm": 0.7280460000038147,
"learning_rate": 3.1296663111381993e-05,
"loss": 0.6375,
"step": 9425
},
{
"epoch": 1.3965003267377716,
"grad_norm": 0.6709932088851929,
"learning_rate": 3.110620143227183e-05,
"loss": 0.6843,
"step": 9450
},
{
"epoch": 1.400194888112518,
"grad_norm": 0.6445898413658142,
"learning_rate": 3.0915739753161666e-05,
"loss": 0.6395,
"step": 9475
},
{
"epoch": 1.4038894494872642,
"grad_norm": 0.6072065830230713,
"learning_rate": 3.07252780740515e-05,
"loss": 0.6615,
"step": 9500
},
{
"epoch": 1.4075840108620103,
"grad_norm": 0.7841944098472595,
"learning_rate": 3.053481639494134e-05,
"loss": 0.6388,
"step": 9525
},
{
"epoch": 1.4112785722367567,
"grad_norm": 0.7215288281440735,
"learning_rate": 3.0344354715831174e-05,
"loss": 0.6694,
"step": 9550
},
{
"epoch": 1.414973133611503,
"grad_norm": 0.5916579961776733,
"learning_rate": 3.0153893036721016e-05,
"loss": 0.7004,
"step": 9575
},
{
"epoch": 1.4186676949862493,
"grad_norm": 0.6357461214065552,
"learning_rate": 2.996343135761085e-05,
"loss": 0.6416,
"step": 9600
},
{
"epoch": 1.4223622563609957,
"grad_norm": 0.735261857509613,
"learning_rate": 2.977296967850069e-05,
"loss": 0.6374,
"step": 9625
},
{
"epoch": 1.4260568177357418,
"grad_norm": 0.7285844087600708,
"learning_rate": 2.9582507999390525e-05,
"loss": 0.6448,
"step": 9650
},
{
"epoch": 1.429751379110488,
"grad_norm": 0.7573617100715637,
"learning_rate": 2.9392046320280357e-05,
"loss": 0.6672,
"step": 9675
},
{
"epoch": 1.4334459404852344,
"grad_norm": 0.5082629919052124,
"learning_rate": 2.92015846411702e-05,
"loss": 0.6514,
"step": 9700
},
{
"epoch": 1.4371405018599808,
"grad_norm": 0.6786466836929321,
"learning_rate": 2.9011122962060033e-05,
"loss": 0.6542,
"step": 9725
},
{
"epoch": 1.440835063234727,
"grad_norm": 0.6085937023162842,
"learning_rate": 2.8820661282949872e-05,
"loss": 0.6581,
"step": 9750
},
{
"epoch": 1.4445296246094732,
"grad_norm": 0.6520203351974487,
"learning_rate": 2.8630199603839708e-05,
"loss": 0.6469,
"step": 9775
},
{
"epoch": 1.4482241859842195,
"grad_norm": 0.5597354769706726,
"learning_rate": 2.8439737924729548e-05,
"loss": 0.6418,
"step": 9800
},
{
"epoch": 1.4519187473589659,
"grad_norm": 0.6233022809028625,
"learning_rate": 2.8249276245619384e-05,
"loss": 0.6371,
"step": 9825
},
{
"epoch": 1.4556133087337122,
"grad_norm": 0.880703866481781,
"learning_rate": 2.8058814566509216e-05,
"loss": 0.6647,
"step": 9850
},
{
"epoch": 1.4593078701084585,
"grad_norm": 0.6821489334106445,
"learning_rate": 2.786835288739906e-05,
"loss": 0.6545,
"step": 9875
},
{
"epoch": 1.4630024314832046,
"grad_norm": 0.6734182834625244,
"learning_rate": 2.767789120828889e-05,
"loss": 0.6385,
"step": 9900
},
{
"epoch": 1.466696992857951,
"grad_norm": 0.8161661028862,
"learning_rate": 2.748742952917873e-05,
"loss": 0.6526,
"step": 9925
},
{
"epoch": 1.4703915542326973,
"grad_norm": 0.6308382153511047,
"learning_rate": 2.7296967850068567e-05,
"loss": 0.6561,
"step": 9950
},
{
"epoch": 1.4740861156074436,
"grad_norm": 0.791493833065033,
"learning_rate": 2.7106506170958406e-05,
"loss": 0.6607,
"step": 9975
},
{
"epoch": 1.47778067698219,
"grad_norm": 0.7888880372047424,
"learning_rate": 2.6916044491848243e-05,
"loss": 0.6511,
"step": 10000
}
],
"logging_steps": 25,
"max_steps": 13532,
"num_input_tokens_seen": 0,
"num_train_epochs": 2,
"save_steps": 1000,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 4.001171062528594e+19,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}