tulu-sft-12b-ep2 / trainer_state.json
taozi555's picture
Upload folder using huggingface_hub
932845c verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 2.9938347718865597,
"eval_steps": 500,
"global_step": 606,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.004932182490752158,
"grad_norm": 1.1257907152175903,
"learning_rate": 5.0000000000000004e-08,
"loss": 1.0986,
"step": 1
},
{
"epoch": 0.009864364981504316,
"grad_norm": 1.5788811445236206,
"learning_rate": 1.0000000000000001e-07,
"loss": 1.1152,
"step": 2
},
{
"epoch": 0.014796547472256474,
"grad_norm": 1.7341364622116089,
"learning_rate": 1.5000000000000002e-07,
"loss": 1.1392,
"step": 3
},
{
"epoch": 0.01972872996300863,
"grad_norm": 1.3568042516708374,
"learning_rate": 2.0000000000000002e-07,
"loss": 1.1046,
"step": 4
},
{
"epoch": 0.02466091245376079,
"grad_norm": 1.1873835325241089,
"learning_rate": 2.5000000000000004e-07,
"loss": 1.1123,
"step": 5
},
{
"epoch": 0.029593094944512947,
"grad_norm": 1.0728861093521118,
"learning_rate": 3.0000000000000004e-07,
"loss": 1.1024,
"step": 6
},
{
"epoch": 0.0345252774352651,
"grad_norm": 1.991363525390625,
"learning_rate": 3.5000000000000004e-07,
"loss": 1.1045,
"step": 7
},
{
"epoch": 0.03945745992601726,
"grad_norm": 1.3626810312271118,
"learning_rate": 4.0000000000000003e-07,
"loss": 1.1414,
"step": 8
},
{
"epoch": 0.04438964241676942,
"grad_norm": 1.2338333129882812,
"learning_rate": 4.5000000000000003e-07,
"loss": 1.1312,
"step": 9
},
{
"epoch": 0.04932182490752158,
"grad_norm": 1.2466716766357422,
"learning_rate": 5.000000000000001e-07,
"loss": 1.0895,
"step": 10
},
{
"epoch": 0.05425400739827373,
"grad_norm": 1.360613465309143,
"learning_rate": 5.5e-07,
"loss": 1.1285,
"step": 11
},
{
"epoch": 0.059186189889025895,
"grad_norm": 2.145904541015625,
"learning_rate": 6.000000000000001e-07,
"loss": 1.079,
"step": 12
},
{
"epoch": 0.06411837237977805,
"grad_norm": 1.5354256629943848,
"learning_rate": 6.5e-07,
"loss": 1.0897,
"step": 13
},
{
"epoch": 0.0690505548705302,
"grad_norm": 1.2002555131912231,
"learning_rate": 7.000000000000001e-07,
"loss": 1.0474,
"step": 14
},
{
"epoch": 0.07398273736128237,
"grad_norm": 1.213218331336975,
"learning_rate": 7.5e-07,
"loss": 1.0336,
"step": 15
},
{
"epoch": 0.07891491985203453,
"grad_norm": 1.0160884857177734,
"learning_rate": 8.000000000000001e-07,
"loss": 1.0396,
"step": 16
},
{
"epoch": 0.08384710234278668,
"grad_norm": 1.3291572332382202,
"learning_rate": 8.500000000000001e-07,
"loss": 1.0111,
"step": 17
},
{
"epoch": 0.08877928483353884,
"grad_norm": 9.560614585876465,
"learning_rate": 9.000000000000001e-07,
"loss": 1.0343,
"step": 18
},
{
"epoch": 0.093711467324291,
"grad_norm": 0.8536161184310913,
"learning_rate": 9.500000000000001e-07,
"loss": 1.0288,
"step": 19
},
{
"epoch": 0.09864364981504316,
"grad_norm": 0.8779953122138977,
"learning_rate": 1.0000000000000002e-06,
"loss": 0.9296,
"step": 20
},
{
"epoch": 0.10357583230579531,
"grad_norm": 0.9102424383163452,
"learning_rate": 1.0500000000000001e-06,
"loss": 0.9348,
"step": 21
},
{
"epoch": 0.10850801479654747,
"grad_norm": 0.5936668515205383,
"learning_rate": 1.1e-06,
"loss": 0.9103,
"step": 22
},
{
"epoch": 0.11344019728729964,
"grad_norm": 0.38109758496284485,
"learning_rate": 1.1500000000000002e-06,
"loss": 0.9412,
"step": 23
},
{
"epoch": 0.11837237977805179,
"grad_norm": 0.6523936986923218,
"learning_rate": 1.2000000000000002e-06,
"loss": 0.8968,
"step": 24
},
{
"epoch": 0.12330456226880394,
"grad_norm": 0.4067033529281616,
"learning_rate": 1.25e-06,
"loss": 0.9031,
"step": 25
},
{
"epoch": 0.1282367447595561,
"grad_norm": 0.4689585566520691,
"learning_rate": 1.3e-06,
"loss": 0.8724,
"step": 26
},
{
"epoch": 0.13316892725030827,
"grad_norm": 0.5714285373687744,
"learning_rate": 1.3500000000000002e-06,
"loss": 0.9195,
"step": 27
},
{
"epoch": 0.1381011097410604,
"grad_norm": 0.43251562118530273,
"learning_rate": 1.4000000000000001e-06,
"loss": 0.8937,
"step": 28
},
{
"epoch": 0.14303329223181258,
"grad_norm": 0.3626736104488373,
"learning_rate": 1.45e-06,
"loss": 0.8685,
"step": 29
},
{
"epoch": 0.14796547472256474,
"grad_norm": 0.3419496715068817,
"learning_rate": 1.5e-06,
"loss": 0.8833,
"step": 30
},
{
"epoch": 0.15289765721331688,
"grad_norm": 0.4578935503959656,
"learning_rate": 1.5500000000000002e-06,
"loss": 0.8287,
"step": 31
},
{
"epoch": 0.15782983970406905,
"grad_norm": 0.4360993206501007,
"learning_rate": 1.6000000000000001e-06,
"loss": 0.8452,
"step": 32
},
{
"epoch": 0.16276202219482122,
"grad_norm": 0.5611799359321594,
"learning_rate": 1.6500000000000003e-06,
"loss": 0.872,
"step": 33
},
{
"epoch": 0.16769420468557336,
"grad_norm": 0.2829279601573944,
"learning_rate": 1.7000000000000002e-06,
"loss": 0.87,
"step": 34
},
{
"epoch": 0.17262638717632553,
"grad_norm": 0.38662174344062805,
"learning_rate": 1.75e-06,
"loss": 0.816,
"step": 35
},
{
"epoch": 0.17755856966707767,
"grad_norm": 0.4239096939563751,
"learning_rate": 1.8000000000000001e-06,
"loss": 0.8599,
"step": 36
},
{
"epoch": 0.18249075215782984,
"grad_norm": 0.653641939163208,
"learning_rate": 1.85e-06,
"loss": 0.8456,
"step": 37
},
{
"epoch": 0.187422934648582,
"grad_norm": 0.21455056965351105,
"learning_rate": 1.9000000000000002e-06,
"loss": 0.795,
"step": 38
},
{
"epoch": 0.19235511713933415,
"grad_norm": 0.26148131489753723,
"learning_rate": 1.9500000000000004e-06,
"loss": 0.8405,
"step": 39
},
{
"epoch": 0.19728729963008632,
"grad_norm": 0.31602978706359863,
"learning_rate": 2.0000000000000003e-06,
"loss": 0.8569,
"step": 40
},
{
"epoch": 0.20221948212083848,
"grad_norm": 0.20896084606647491,
"learning_rate": 2.05e-06,
"loss": 0.801,
"step": 41
},
{
"epoch": 0.20715166461159062,
"grad_norm": 0.21203891932964325,
"learning_rate": 2.1000000000000002e-06,
"loss": 0.8202,
"step": 42
},
{
"epoch": 0.2120838471023428,
"grad_norm": 0.23695039749145508,
"learning_rate": 2.15e-06,
"loss": 0.8088,
"step": 43
},
{
"epoch": 0.21701602959309493,
"grad_norm": 0.1736220270395279,
"learning_rate": 2.2e-06,
"loss": 0.8031,
"step": 44
},
{
"epoch": 0.2219482120838471,
"grad_norm": 0.23390308022499084,
"learning_rate": 2.25e-06,
"loss": 0.7914,
"step": 45
},
{
"epoch": 0.22688039457459927,
"grad_norm": 0.23420561850070953,
"learning_rate": 2.3000000000000004e-06,
"loss": 0.8092,
"step": 46
},
{
"epoch": 0.2318125770653514,
"grad_norm": 0.19289737939834595,
"learning_rate": 2.35e-06,
"loss": 0.7767,
"step": 47
},
{
"epoch": 0.23674475955610358,
"grad_norm": 0.23044224083423615,
"learning_rate": 2.4000000000000003e-06,
"loss": 0.8185,
"step": 48
},
{
"epoch": 0.24167694204685575,
"grad_norm": 0.2052794247865677,
"learning_rate": 2.4500000000000003e-06,
"loss": 0.781,
"step": 49
},
{
"epoch": 0.2466091245376079,
"grad_norm": 0.19429263472557068,
"learning_rate": 2.5e-06,
"loss": 0.8125,
"step": 50
},
{
"epoch": 0.25154130702836003,
"grad_norm": 0.18123206496238708,
"learning_rate": 2.55e-06,
"loss": 0.7775,
"step": 51
},
{
"epoch": 0.2564734895191122,
"grad_norm": 1.8012878894805908,
"learning_rate": 2.6e-06,
"loss": 0.7692,
"step": 52
},
{
"epoch": 0.26140567200986436,
"grad_norm": 0.2761130928993225,
"learning_rate": 2.6500000000000005e-06,
"loss": 0.7996,
"step": 53
},
{
"epoch": 0.26633785450061653,
"grad_norm": 0.1579173356294632,
"learning_rate": 2.7000000000000004e-06,
"loss": 0.7936,
"step": 54
},
{
"epoch": 0.2712700369913687,
"grad_norm": 0.16990424692630768,
"learning_rate": 2.7500000000000004e-06,
"loss": 0.8067,
"step": 55
},
{
"epoch": 0.2762022194821208,
"grad_norm": 1.0547306537628174,
"learning_rate": 2.8000000000000003e-06,
"loss": 0.7894,
"step": 56
},
{
"epoch": 0.281134401972873,
"grad_norm": 0.18402548134326935,
"learning_rate": 2.85e-06,
"loss": 0.7564,
"step": 57
},
{
"epoch": 0.28606658446362515,
"grad_norm": 0.19761355221271515,
"learning_rate": 2.9e-06,
"loss": 0.7754,
"step": 58
},
{
"epoch": 0.2909987669543773,
"grad_norm": 0.26926949620246887,
"learning_rate": 2.95e-06,
"loss": 0.7698,
"step": 59
},
{
"epoch": 0.2959309494451295,
"grad_norm": 0.19200852513313293,
"learning_rate": 3e-06,
"loss": 0.757,
"step": 60
},
{
"epoch": 0.3008631319358816,
"grad_norm": 0.14297647774219513,
"learning_rate": 3.05e-06,
"loss": 0.7372,
"step": 61
},
{
"epoch": 0.30579531442663377,
"grad_norm": 0.1689106523990631,
"learning_rate": 3.1000000000000004e-06,
"loss": 0.7511,
"step": 62
},
{
"epoch": 0.31072749691738594,
"grad_norm": 0.3330126106739044,
"learning_rate": 3.1500000000000003e-06,
"loss": 0.747,
"step": 63
},
{
"epoch": 0.3156596794081381,
"grad_norm": 0.2532431483268738,
"learning_rate": 3.2000000000000003e-06,
"loss": 0.798,
"step": 64
},
{
"epoch": 0.3205918618988903,
"grad_norm": 0.14446307718753815,
"learning_rate": 3.2500000000000002e-06,
"loss": 0.7631,
"step": 65
},
{
"epoch": 0.32552404438964244,
"grad_norm": 0.24439038336277008,
"learning_rate": 3.3000000000000006e-06,
"loss": 0.7405,
"step": 66
},
{
"epoch": 0.33045622688039455,
"grad_norm": 0.15191350877285004,
"learning_rate": 3.3500000000000005e-06,
"loss": 0.7559,
"step": 67
},
{
"epoch": 0.3353884093711467,
"grad_norm": 0.16212493181228638,
"learning_rate": 3.4000000000000005e-06,
"loss": 0.7452,
"step": 68
},
{
"epoch": 0.3403205918618989,
"grad_norm": 0.1210586428642273,
"learning_rate": 3.45e-06,
"loss": 0.7817,
"step": 69
},
{
"epoch": 0.34525277435265106,
"grad_norm": 0.35743728280067444,
"learning_rate": 3.5e-06,
"loss": 0.7811,
"step": 70
},
{
"epoch": 0.35018495684340323,
"grad_norm": 0.12270136177539825,
"learning_rate": 3.5500000000000003e-06,
"loss": 0.7679,
"step": 71
},
{
"epoch": 0.35511713933415534,
"grad_norm": 0.14078450202941895,
"learning_rate": 3.6000000000000003e-06,
"loss": 0.7479,
"step": 72
},
{
"epoch": 0.3600493218249075,
"grad_norm": 0.11881807446479797,
"learning_rate": 3.65e-06,
"loss": 0.7475,
"step": 73
},
{
"epoch": 0.3649815043156597,
"grad_norm": 0.11201618611812592,
"learning_rate": 3.7e-06,
"loss": 0.7396,
"step": 74
},
{
"epoch": 0.36991368680641185,
"grad_norm": 0.14292244613170624,
"learning_rate": 3.7500000000000005e-06,
"loss": 0.7117,
"step": 75
},
{
"epoch": 0.374845869297164,
"grad_norm": 0.18425297737121582,
"learning_rate": 3.8000000000000005e-06,
"loss": 0.718,
"step": 76
},
{
"epoch": 0.3797780517879161,
"grad_norm": 0.16399289667606354,
"learning_rate": 3.85e-06,
"loss": 0.7314,
"step": 77
},
{
"epoch": 0.3847102342786683,
"grad_norm": 0.1419249176979065,
"learning_rate": 3.900000000000001e-06,
"loss": 0.7996,
"step": 78
},
{
"epoch": 0.38964241676942046,
"grad_norm": 0.154827281832695,
"learning_rate": 3.95e-06,
"loss": 0.7308,
"step": 79
},
{
"epoch": 0.39457459926017263,
"grad_norm": 0.17852246761322021,
"learning_rate": 4.000000000000001e-06,
"loss": 0.7161,
"step": 80
},
{
"epoch": 0.3995067817509248,
"grad_norm": 0.12098474055528641,
"learning_rate": 4.05e-06,
"loss": 0.7581,
"step": 81
},
{
"epoch": 0.40443896424167697,
"grad_norm": 0.15087738633155823,
"learning_rate": 4.1e-06,
"loss": 0.728,
"step": 82
},
{
"epoch": 0.4093711467324291,
"grad_norm": 0.23446552455425262,
"learning_rate": 4.15e-06,
"loss": 0.7089,
"step": 83
},
{
"epoch": 0.41430332922318125,
"grad_norm": 0.12758222222328186,
"learning_rate": 4.2000000000000004e-06,
"loss": 0.7513,
"step": 84
},
{
"epoch": 0.4192355117139334,
"grad_norm": 0.18490324914455414,
"learning_rate": 4.25e-06,
"loss": 0.7232,
"step": 85
},
{
"epoch": 0.4241676942046856,
"grad_norm": 3.9814538955688477,
"learning_rate": 4.3e-06,
"loss": 0.7071,
"step": 86
},
{
"epoch": 0.42909987669543775,
"grad_norm": 0.13754811882972717,
"learning_rate": 4.350000000000001e-06,
"loss": 0.7218,
"step": 87
},
{
"epoch": 0.43403205918618987,
"grad_norm": 0.15279600024223328,
"learning_rate": 4.4e-06,
"loss": 0.7531,
"step": 88
},
{
"epoch": 0.43896424167694204,
"grad_norm": 0.13091601431369781,
"learning_rate": 4.450000000000001e-06,
"loss": 0.6954,
"step": 89
},
{
"epoch": 0.4438964241676942,
"grad_norm": 0.14220909774303436,
"learning_rate": 4.5e-06,
"loss": 0.7227,
"step": 90
},
{
"epoch": 0.44882860665844637,
"grad_norm": 0.11386663466691971,
"learning_rate": 4.5500000000000005e-06,
"loss": 0.7196,
"step": 91
},
{
"epoch": 0.45376078914919854,
"grad_norm": 0.16582414507865906,
"learning_rate": 4.600000000000001e-06,
"loss": 0.7155,
"step": 92
},
{
"epoch": 0.45869297163995065,
"grad_norm": 0.2046297937631607,
"learning_rate": 4.65e-06,
"loss": 0.7201,
"step": 93
},
{
"epoch": 0.4636251541307028,
"grad_norm": 0.11329102516174316,
"learning_rate": 4.7e-06,
"loss": 0.7233,
"step": 94
},
{
"epoch": 0.468557336621455,
"grad_norm": 0.1256910264492035,
"learning_rate": 4.75e-06,
"loss": 0.7294,
"step": 95
},
{
"epoch": 0.47348951911220716,
"grad_norm": 0.16878068447113037,
"learning_rate": 4.800000000000001e-06,
"loss": 0.7292,
"step": 96
},
{
"epoch": 0.4784217016029593,
"grad_norm": 0.1140855923295021,
"learning_rate": 4.85e-06,
"loss": 0.7215,
"step": 97
},
{
"epoch": 0.4833538840937115,
"grad_norm": 0.13400927186012268,
"learning_rate": 4.9000000000000005e-06,
"loss": 0.7446,
"step": 98
},
{
"epoch": 0.4882860665844636,
"grad_norm": 0.13537898659706116,
"learning_rate": 4.95e-06,
"loss": 0.7083,
"step": 99
},
{
"epoch": 0.4932182490752158,
"grad_norm": 0.23342657089233398,
"learning_rate": 5e-06,
"loss": 0.7172,
"step": 100
},
{
"epoch": 0.49815043156596794,
"grad_norm": 2.2964117527008057,
"learning_rate": 4.999951815503011e-06,
"loss": 0.7564,
"step": 101
},
{
"epoch": 0.5030826140567201,
"grad_norm": 0.20878185331821442,
"learning_rate": 4.999807263869441e-06,
"loss": 0.7255,
"step": 102
},
{
"epoch": 0.5080147965474723,
"grad_norm": 0.26244527101516724,
"learning_rate": 4.999566350671405e-06,
"loss": 0.707,
"step": 103
},
{
"epoch": 0.5129469790382244,
"grad_norm": 0.12257402390241623,
"learning_rate": 4.999229085195532e-06,
"loss": 0.7147,
"step": 104
},
{
"epoch": 0.5178791615289766,
"grad_norm": 0.18218959867954254,
"learning_rate": 4.998795480442595e-06,
"loss": 0.7113,
"step": 105
},
{
"epoch": 0.5228113440197287,
"grad_norm": 0.1487857848405838,
"learning_rate": 4.998265553127013e-06,
"loss": 0.7532,
"step": 106
},
{
"epoch": 0.5277435265104808,
"grad_norm": 0.18883858621120453,
"learning_rate": 4.997639323676214e-06,
"loss": 0.6822,
"step": 107
},
{
"epoch": 0.5326757090012331,
"grad_norm": 0.13591569662094116,
"learning_rate": 4.996916816229838e-06,
"loss": 0.7569,
"step": 108
},
{
"epoch": 0.5376078914919852,
"grad_norm": 0.9631307721138,
"learning_rate": 4.99609805863881e-06,
"loss": 0.745,
"step": 109
},
{
"epoch": 0.5425400739827374,
"grad_norm": 0.13628794252872467,
"learning_rate": 4.995183082464269e-06,
"loss": 0.732,
"step": 110
},
{
"epoch": 0.5474722564734895,
"grad_norm": 2.4701926708221436,
"learning_rate": 4.994171922976349e-06,
"loss": 0.756,
"step": 111
},
{
"epoch": 0.5524044389642416,
"grad_norm": 1.1472599506378174,
"learning_rate": 4.993064619152818e-06,
"loss": 0.7427,
"step": 112
},
{
"epoch": 0.5573366214549939,
"grad_norm": 0.11852557212114334,
"learning_rate": 4.991861213677578e-06,
"loss": 0.7124,
"step": 113
},
{
"epoch": 0.562268803945746,
"grad_norm": 0.09930302202701569,
"learning_rate": 4.99056175293902e-06,
"loss": 0.7065,
"step": 114
},
{
"epoch": 0.5672009864364982,
"grad_norm": 0.12781856954097748,
"learning_rate": 4.989166287028234e-06,
"loss": 0.7298,
"step": 115
},
{
"epoch": 0.5721331689272503,
"grad_norm": 0.39002031087875366,
"learning_rate": 4.987674869737078e-06,
"loss": 0.6996,
"step": 116
},
{
"epoch": 0.5770653514180024,
"grad_norm": 0.1187821626663208,
"learning_rate": 4.986087558556104e-06,
"loss": 0.7333,
"step": 117
},
{
"epoch": 0.5819975339087546,
"grad_norm": 0.11041084676980972,
"learning_rate": 4.984404414672346e-06,
"loss": 0.7101,
"step": 118
},
{
"epoch": 0.5869297163995068,
"grad_norm": 0.1762731671333313,
"learning_rate": 4.9826255029669575e-06,
"loss": 0.7063,
"step": 119
},
{
"epoch": 0.591861898890259,
"grad_norm": 0.12481699883937836,
"learning_rate": 4.980750892012711e-06,
"loss": 0.725,
"step": 120
},
{
"epoch": 0.5967940813810111,
"grad_norm": 0.7090242505073547,
"learning_rate": 4.978780654071355e-06,
"loss": 0.7105,
"step": 121
},
{
"epoch": 0.6017262638717632,
"grad_norm": 0.11730094999074936,
"learning_rate": 4.976714865090827e-06,
"loss": 0.7091,
"step": 122
},
{
"epoch": 0.6066584463625154,
"grad_norm": 0.12327645719051361,
"learning_rate": 4.974553604702332e-06,
"loss": 0.739,
"step": 123
},
{
"epoch": 0.6115906288532675,
"grad_norm": 0.22779831290245056,
"learning_rate": 4.972296956217265e-06,
"loss": 0.6924,
"step": 124
},
{
"epoch": 0.6165228113440198,
"grad_norm": 0.3117700517177582,
"learning_rate": 4.969945006624003e-06,
"loss": 0.7166,
"step": 125
},
{
"epoch": 0.6214549938347719,
"grad_norm": 0.1463162750005722,
"learning_rate": 4.967497846584552e-06,
"loss": 0.7236,
"step": 126
},
{
"epoch": 0.6263871763255241,
"grad_norm": 0.11583642661571503,
"learning_rate": 4.9649555704310545e-06,
"loss": 0.6957,
"step": 127
},
{
"epoch": 0.6313193588162762,
"grad_norm": 0.1344325989484787,
"learning_rate": 4.962318276162148e-06,
"loss": 0.7031,
"step": 128
},
{
"epoch": 0.6362515413070283,
"grad_norm": 0.13892586529254913,
"learning_rate": 4.959586065439189e-06,
"loss": 0.6925,
"step": 129
},
{
"epoch": 0.6411837237977805,
"grad_norm": 0.19953861832618713,
"learning_rate": 4.956759043582339e-06,
"loss": 0.72,
"step": 130
},
{
"epoch": 0.6461159062885327,
"grad_norm": 0.2914772629737854,
"learning_rate": 4.953837319566498e-06,
"loss": 0.7175,
"step": 131
},
{
"epoch": 0.6510480887792849,
"grad_norm": 0.109347403049469,
"learning_rate": 4.950821006017107e-06,
"loss": 0.6923,
"step": 132
},
{
"epoch": 0.655980271270037,
"grad_norm": 0.2301577925682068,
"learning_rate": 4.947710219205808e-06,
"loss": 0.7093,
"step": 133
},
{
"epoch": 0.6609124537607891,
"grad_norm": 0.1368824988603592,
"learning_rate": 4.9445050790459585e-06,
"loss": 0.6746,
"step": 134
},
{
"epoch": 0.6658446362515413,
"grad_norm": 0.13323882222175598,
"learning_rate": 4.9412057090880115e-06,
"loss": 0.6919,
"step": 135
},
{
"epoch": 0.6707768187422934,
"grad_norm": 0.14521068334579468,
"learning_rate": 4.937812236514754e-06,
"loss": 0.7352,
"step": 136
},
{
"epoch": 0.6757090012330457,
"grad_norm": 0.11869832128286362,
"learning_rate": 4.9343247921364e-06,
"loss": 0.6759,
"step": 137
},
{
"epoch": 0.6806411837237978,
"grad_norm": 0.20703694224357605,
"learning_rate": 4.930743510385551e-06,
"loss": 0.6984,
"step": 138
},
{
"epoch": 0.6855733662145499,
"grad_norm": 0.13110846281051636,
"learning_rate": 4.927068529312017e-06,
"loss": 0.7143,
"step": 139
},
{
"epoch": 0.6905055487053021,
"grad_norm": 0.13985510170459747,
"learning_rate": 4.923299990577488e-06,
"loss": 0.6899,
"step": 140
},
{
"epoch": 0.6954377311960542,
"grad_norm": 0.1306031197309494,
"learning_rate": 4.919438039450079e-06,
"loss": 0.6801,
"step": 141
},
{
"epoch": 0.7003699136868065,
"grad_norm": 0.10599593818187714,
"learning_rate": 4.915482824798728e-06,
"loss": 0.7191,
"step": 142
},
{
"epoch": 0.7053020961775586,
"grad_norm": 0.1460665911436081,
"learning_rate": 4.911434499087457e-06,
"loss": 0.7134,
"step": 143
},
{
"epoch": 0.7102342786683107,
"grad_norm": 0.1587439626455307,
"learning_rate": 4.907293218369499e-06,
"loss": 0.6987,
"step": 144
},
{
"epoch": 0.7151664611590629,
"grad_norm": 0.22658978402614594,
"learning_rate": 4.903059142281273e-06,
"loss": 0.7049,
"step": 145
},
{
"epoch": 0.720098643649815,
"grad_norm": 0.6049544215202332,
"learning_rate": 4.8987324340362445e-06,
"loss": 0.709,
"step": 146
},
{
"epoch": 0.7250308261405672,
"grad_norm": 0.1274397075176239,
"learning_rate": 4.894313260418617e-06,
"loss": 0.6762,
"step": 147
},
{
"epoch": 0.7299630086313194,
"grad_norm": 0.1254238486289978,
"learning_rate": 4.889801791776921e-06,
"loss": 0.6598,
"step": 148
},
{
"epoch": 0.7348951911220715,
"grad_norm": 0.12932169437408447,
"learning_rate": 4.885198202017431e-06,
"loss": 0.6905,
"step": 149
},
{
"epoch": 0.7398273736128237,
"grad_norm": 0.1857711523771286,
"learning_rate": 4.880502668597475e-06,
"loss": 0.7186,
"step": 150
},
{
"epoch": 0.7447595561035758,
"grad_norm": 0.11666952073574066,
"learning_rate": 4.875715372518585e-06,
"loss": 0.6971,
"step": 151
},
{
"epoch": 0.749691738594328,
"grad_norm": 0.11182554066181183,
"learning_rate": 4.870836498319523e-06,
"loss": 0.6876,
"step": 152
},
{
"epoch": 0.7546239210850801,
"grad_norm": 0.11282117664813995,
"learning_rate": 4.865866234069169e-06,
"loss": 0.7039,
"step": 153
},
{
"epoch": 0.7595561035758323,
"grad_norm": 0.11031971871852875,
"learning_rate": 4.86080477135927e-06,
"loss": 0.6703,
"step": 154
},
{
"epoch": 0.7644882860665845,
"grad_norm": 0.1600004881620407,
"learning_rate": 4.855652305297052e-06,
"loss": 0.6776,
"step": 155
},
{
"epoch": 0.7694204685573366,
"grad_norm": 0.13014693558216095,
"learning_rate": 4.8504090344977036e-06,
"loss": 0.6926,
"step": 156
},
{
"epoch": 0.7743526510480888,
"grad_norm": 0.17829261720180511,
"learning_rate": 4.84507516107672e-06,
"loss": 0.7036,
"step": 157
},
{
"epoch": 0.7792848335388409,
"grad_norm": 0.13715577125549316,
"learning_rate": 4.839650890642104e-06,
"loss": 0.6788,
"step": 158
},
{
"epoch": 0.7842170160295932,
"grad_norm": 0.16660194098949432,
"learning_rate": 4.834136432286452e-06,
"loss": 0.7052,
"step": 159
},
{
"epoch": 0.7891491985203453,
"grad_norm": 0.10915841907262802,
"learning_rate": 4.828531998578885e-06,
"loss": 0.6484,
"step": 160
},
{
"epoch": 0.7940813810110974,
"grad_norm": 0.11565055698156357,
"learning_rate": 4.822837805556858e-06,
"loss": 0.6853,
"step": 161
},
{
"epoch": 0.7990135635018496,
"grad_norm": 0.11839542537927628,
"learning_rate": 4.817054072717833e-06,
"loss": 0.6647,
"step": 162
},
{
"epoch": 0.8039457459926017,
"grad_norm": 0.11023162305355072,
"learning_rate": 4.811181023010815e-06,
"loss": 0.7049,
"step": 163
},
{
"epoch": 0.8088779284833539,
"grad_norm": 0.21431177854537964,
"learning_rate": 4.805218882827761e-06,
"loss": 0.6942,
"step": 164
},
{
"epoch": 0.813810110974106,
"grad_norm": 0.11522458493709564,
"learning_rate": 4.799167881994852e-06,
"loss": 0.6929,
"step": 165
},
{
"epoch": 0.8187422934648582,
"grad_norm": 0.12389165163040161,
"learning_rate": 4.793028253763633e-06,
"loss": 0.6994,
"step": 166
},
{
"epoch": 0.8236744759556104,
"grad_norm": 0.11536920070648193,
"learning_rate": 4.786800234802022e-06,
"loss": 0.6959,
"step": 167
},
{
"epoch": 0.8286066584463625,
"grad_norm": 0.1272544115781784,
"learning_rate": 4.780484065185188e-06,
"loss": 0.6626,
"step": 168
},
{
"epoch": 0.8335388409371147,
"grad_norm": 0.16449759900569916,
"learning_rate": 4.7740799883862966e-06,
"loss": 0.6925,
"step": 169
},
{
"epoch": 0.8384710234278668,
"grad_norm": 0.2522273361682892,
"learning_rate": 4.767588251267121e-06,
"loss": 0.7052,
"step": 170
},
{
"epoch": 0.843403205918619,
"grad_norm": 0.09476125985383987,
"learning_rate": 4.761009104068533e-06,
"loss": 0.6722,
"step": 171
},
{
"epoch": 0.8483353884093712,
"grad_norm": 0.23170353472232819,
"learning_rate": 4.754342800400852e-06,
"loss": 0.6794,
"step": 172
},
{
"epoch": 0.8532675709001233,
"grad_norm": 0.14337614178657532,
"learning_rate": 4.747589597234068e-06,
"loss": 0.6564,
"step": 173
},
{
"epoch": 0.8581997533908755,
"grad_norm": 0.17019444704055786,
"learning_rate": 4.740749754887939e-06,
"loss": 0.6688,
"step": 174
},
{
"epoch": 0.8631319358816276,
"grad_norm": 0.1297682523727417,
"learning_rate": 4.7338235370219556e-06,
"loss": 0.7185,
"step": 175
},
{
"epoch": 0.8680641183723797,
"grad_norm": 0.17522914707660675,
"learning_rate": 4.726811210625176e-06,
"loss": 0.7061,
"step": 176
},
{
"epoch": 0.872996300863132,
"grad_norm": 0.3105694651603699,
"learning_rate": 4.7197130460059385e-06,
"loss": 0.688,
"step": 177
},
{
"epoch": 0.8779284833538841,
"grad_norm": 0.10563701391220093,
"learning_rate": 4.712529316781435e-06,
"loss": 0.6695,
"step": 178
},
{
"epoch": 0.8828606658446363,
"grad_norm": 0.16776345670223236,
"learning_rate": 4.705260299867169e-06,
"loss": 0.6855,
"step": 179
},
{
"epoch": 0.8877928483353884,
"grad_norm": 0.10242436826229095,
"learning_rate": 4.697906275466279e-06,
"loss": 0.6768,
"step": 180
},
{
"epoch": 0.8927250308261405,
"grad_norm": 0.1279619336128235,
"learning_rate": 4.69046752705874e-06,
"loss": 0.6854,
"step": 181
},
{
"epoch": 0.8976572133168927,
"grad_norm": 0.190452441573143,
"learning_rate": 4.682944341390431e-06,
"loss": 0.7199,
"step": 182
},
{
"epoch": 0.9025893958076449,
"grad_norm": 0.2059255987405777,
"learning_rate": 4.675337008462085e-06,
"loss": 0.6694,
"step": 183
},
{
"epoch": 0.9075215782983971,
"grad_norm": 0.10989531874656677,
"learning_rate": 4.667645821518111e-06,
"loss": 0.6707,
"step": 184
},
{
"epoch": 0.9124537607891492,
"grad_norm": 0.11381805688142776,
"learning_rate": 4.659871077035289e-06,
"loss": 0.6617,
"step": 185
},
{
"epoch": 0.9173859432799013,
"grad_norm": 0.18315713107585907,
"learning_rate": 4.65201307471134e-06,
"loss": 0.6721,
"step": 186
},
{
"epoch": 0.9223181257706535,
"grad_norm": 0.13762731850147247,
"learning_rate": 4.644072117453377e-06,
"loss": 0.6691,
"step": 187
},
{
"epoch": 0.9272503082614056,
"grad_norm": 0.13186247646808624,
"learning_rate": 4.636048511366222e-06,
"loss": 0.6907,
"step": 188
},
{
"epoch": 0.9321824907521579,
"grad_norm": 0.23819328844547272,
"learning_rate": 4.6279425657406154e-06,
"loss": 0.7147,
"step": 189
},
{
"epoch": 0.93711467324291,
"grad_norm": 0.3640376329421997,
"learning_rate": 4.619754593041287e-06,
"loss": 0.6981,
"step": 190
},
{
"epoch": 0.9420468557336621,
"grad_norm": 0.19634462893009186,
"learning_rate": 4.6114849088949146e-06,
"loss": 0.6704,
"step": 191
},
{
"epoch": 0.9469790382244143,
"grad_norm": 0.11775479465723038,
"learning_rate": 4.603133832077953e-06,
"loss": 0.6909,
"step": 192
},
{
"epoch": 0.9519112207151664,
"grad_norm": 0.10143059492111206,
"learning_rate": 4.594701684504352e-06,
"loss": 0.6625,
"step": 193
},
{
"epoch": 0.9568434032059187,
"grad_norm": 0.11726385354995728,
"learning_rate": 4.586188791213143e-06,
"loss": 0.6692,
"step": 194
},
{
"epoch": 0.9617755856966708,
"grad_norm": 0.11105194687843323,
"learning_rate": 4.577595480355911e-06,
"loss": 0.6963,
"step": 195
},
{
"epoch": 0.966707768187423,
"grad_norm": 0.10140436887741089,
"learning_rate": 4.568922083184144e-06,
"loss": 0.6969,
"step": 196
},
{
"epoch": 0.9716399506781751,
"grad_norm": 0.2534874677658081,
"learning_rate": 4.560168934036467e-06,
"loss": 0.6852,
"step": 197
},
{
"epoch": 0.9765721331689272,
"grad_norm": 0.10877111554145813,
"learning_rate": 4.55133637032575e-06,
"loss": 0.6998,
"step": 198
},
{
"epoch": 0.9815043156596794,
"grad_norm": 0.14067795872688293,
"learning_rate": 4.542424732526105e-06,
"loss": 0.6685,
"step": 199
},
{
"epoch": 0.9864364981504316,
"grad_norm": 0.7147830128669739,
"learning_rate": 4.533434364159761e-06,
"loss": 0.6982,
"step": 200
},
{
"epoch": 0.9913686806411838,
"grad_norm": 0.15026246011257172,
"learning_rate": 4.524365611783818e-06,
"loss": 0.6915,
"step": 201
},
{
"epoch": 0.9963008631319359,
"grad_norm": 0.12991268932819366,
"learning_rate": 4.515218824976895e-06,
"loss": 0.6539,
"step": 202
},
{
"epoch": 1.0036991368680641,
"grad_norm": 0.11360019445419312,
"learning_rate": 4.505994356325648e-06,
"loss": 0.6479,
"step": 203
},
{
"epoch": 1.0086313193588163,
"grad_norm": 0.1208319365978241,
"learning_rate": 4.496692561411182e-06,
"loss": 0.6704,
"step": 204
},
{
"epoch": 1.0135635018495683,
"grad_norm": 0.15796445310115814,
"learning_rate": 4.487313798795347e-06,
"loss": 0.6585,
"step": 205
},
{
"epoch": 1.0184956843403206,
"grad_norm": 0.2212684154510498,
"learning_rate": 4.477858430006906e-06,
"loss": 0.6748,
"step": 206
},
{
"epoch": 1.0234278668310728,
"grad_norm": 0.14605554938316345,
"learning_rate": 4.468326819527613e-06,
"loss": 0.6671,
"step": 207
},
{
"epoch": 1.028360049321825,
"grad_norm": 0.11415625363588333,
"learning_rate": 4.458719334778153e-06,
"loss": 0.681,
"step": 208
},
{
"epoch": 1.033292231812577,
"grad_norm": 0.113845095038414,
"learning_rate": 4.449036346103982e-06,
"loss": 0.6803,
"step": 209
},
{
"epoch": 1.0382244143033292,
"grad_norm": 0.10725957155227661,
"learning_rate": 4.43927822676105e-06,
"loss": 0.648,
"step": 210
},
{
"epoch": 1.0431565967940815,
"grad_norm": 0.12335722148418427,
"learning_rate": 4.429445352901415e-06,
"loss": 0.695,
"step": 211
},
{
"epoch": 1.0480887792848335,
"grad_norm": 0.10324176400899887,
"learning_rate": 4.419538103558742e-06,
"loss": 0.6529,
"step": 212
},
{
"epoch": 1.0530209617755857,
"grad_norm": 0.10406983643770218,
"learning_rate": 4.409556860633692e-06,
"loss": 0.6599,
"step": 213
},
{
"epoch": 1.057953144266338,
"grad_norm": 0.11518678814172745,
"learning_rate": 4.3995020088792e-06,
"loss": 0.6601,
"step": 214
},
{
"epoch": 1.06288532675709,
"grad_norm": 0.14593787491321564,
"learning_rate": 4.3893739358856465e-06,
"loss": 0.6714,
"step": 215
},
{
"epoch": 1.0678175092478421,
"grad_norm": 0.17964190244674683,
"learning_rate": 4.379173032065912e-06,
"loss": 0.6441,
"step": 216
},
{
"epoch": 1.0727496917385944,
"grad_norm": 0.12014143913984299,
"learning_rate": 4.368899690640333e-06,
"loss": 0.6526,
"step": 217
},
{
"epoch": 1.0776818742293466,
"grad_norm": 0.2840045690536499,
"learning_rate": 4.3585543076215405e-06,
"loss": 0.6688,
"step": 218
},
{
"epoch": 1.0826140567200986,
"grad_norm": 0.23248234391212463,
"learning_rate": 4.3481372817991976e-06,
"loss": 0.6798,
"step": 219
},
{
"epoch": 1.0875462392108508,
"grad_norm": 0.11116907000541687,
"learning_rate": 4.3376490147246205e-06,
"loss": 0.6676,
"step": 220
},
{
"epoch": 1.092478421701603,
"grad_norm": 0.10024593770503998,
"learning_rate": 4.32708991069531e-06,
"loss": 0.6718,
"step": 221
},
{
"epoch": 1.097410604192355,
"grad_norm": 0.11960247159004211,
"learning_rate": 4.31646037673936e-06,
"loss": 0.665,
"step": 222
},
{
"epoch": 1.1023427866831073,
"grad_norm": 0.2154683619737625,
"learning_rate": 4.305760822599766e-06,
"loss": 0.6782,
"step": 223
},
{
"epoch": 1.1072749691738595,
"grad_norm": 0.13609148561954498,
"learning_rate": 4.294991660718636e-06,
"loss": 0.6715,
"step": 224
},
{
"epoch": 1.1122071516646117,
"grad_norm": 0.12187358736991882,
"learning_rate": 4.284153306221289e-06,
"loss": 0.6785,
"step": 225
},
{
"epoch": 1.1171393341553637,
"grad_norm": 0.10929637402296066,
"learning_rate": 4.273246176900252e-06,
"loss": 0.6757,
"step": 226
},
{
"epoch": 1.122071516646116,
"grad_norm": 0.12787918746471405,
"learning_rate": 4.262270693199159e-06,
"loss": 0.6895,
"step": 227
},
{
"epoch": 1.1270036991368682,
"grad_norm": 0.11202108860015869,
"learning_rate": 4.2512272781965355e-06,
"loss": 0.6823,
"step": 228
},
{
"epoch": 1.1319358816276202,
"grad_norm": 0.5738335251808167,
"learning_rate": 4.240116357589502e-06,
"loss": 0.6724,
"step": 229
},
{
"epoch": 1.1368680641183724,
"grad_norm": 0.13368858397006989,
"learning_rate": 4.228938359677354e-06,
"loss": 0.6583,
"step": 230
},
{
"epoch": 1.1418002466091246,
"grad_norm": 0.10556632280349731,
"learning_rate": 4.217693715345057e-06,
"loss": 0.6444,
"step": 231
},
{
"epoch": 1.1467324290998766,
"grad_norm": 0.28452828526496887,
"learning_rate": 4.206382858046636e-06,
"loss": 0.6638,
"step": 232
},
{
"epoch": 1.1516646115906288,
"grad_norm": 0.19593405723571777,
"learning_rate": 4.195006223788466e-06,
"loss": 0.6537,
"step": 233
},
{
"epoch": 1.156596794081381,
"grad_norm": 0.11627080291509628,
"learning_rate": 4.183564251112466e-06,
"loss": 0.6699,
"step": 234
},
{
"epoch": 1.161528976572133,
"grad_norm": 0.15152066946029663,
"learning_rate": 4.172057381079196e-06,
"loss": 0.6685,
"step": 235
},
{
"epoch": 1.1664611590628853,
"grad_norm": 0.20805396139621735,
"learning_rate": 4.160486057250849e-06,
"loss": 0.6605,
"step": 236
},
{
"epoch": 1.1713933415536375,
"grad_norm": 0.16027909517288208,
"learning_rate": 4.148850725674162e-06,
"loss": 0.6959,
"step": 237
},
{
"epoch": 1.1763255240443897,
"grad_norm": 0.12061706185340881,
"learning_rate": 4.137151834863213e-06,
"loss": 0.6706,
"step": 238
},
{
"epoch": 1.1812577065351417,
"grad_norm": 0.09794213622808456,
"learning_rate": 4.125389835782138e-06,
"loss": 0.6846,
"step": 239
},
{
"epoch": 1.186189889025894,
"grad_norm": 0.13422715663909912,
"learning_rate": 4.113565181827745e-06,
"loss": 0.6611,
"step": 240
},
{
"epoch": 1.1911220715166462,
"grad_norm": 0.11418966203927994,
"learning_rate": 4.101678328812035e-06,
"loss": 0.6687,
"step": 241
},
{
"epoch": 1.1960542540073984,
"grad_norm": 0.2175075113773346,
"learning_rate": 4.0897297349446345e-06,
"loss": 0.6933,
"step": 242
},
{
"epoch": 1.2009864364981504,
"grad_norm": 0.1245436891913414,
"learning_rate": 4.077719860815132e-06,
"loss": 0.6538,
"step": 243
},
{
"epoch": 1.2059186189889026,
"grad_norm": 0.12700359523296356,
"learning_rate": 4.065649169375324e-06,
"loss": 0.6555,
"step": 244
},
{
"epoch": 1.2108508014796548,
"grad_norm": 0.12477461248636246,
"learning_rate": 4.053518125921365e-06,
"loss": 0.6664,
"step": 245
},
{
"epoch": 1.2157829839704068,
"grad_norm": 0.16408082842826843,
"learning_rate": 4.041327198075838e-06,
"loss": 0.6482,
"step": 246
},
{
"epoch": 1.220715166461159,
"grad_norm": 0.11284561455249786,
"learning_rate": 4.029076855769722e-06,
"loss": 0.6557,
"step": 247
},
{
"epoch": 1.2256473489519113,
"grad_norm": 0.11041481792926788,
"learning_rate": 4.016767571224285e-06,
"loss": 0.6445,
"step": 248
},
{
"epoch": 1.2305795314426633,
"grad_norm": 0.1686171293258667,
"learning_rate": 4.0043998189328705e-06,
"loss": 0.641,
"step": 249
},
{
"epoch": 1.2355117139334155,
"grad_norm": 0.10102049261331558,
"learning_rate": 3.991974075642621e-06,
"loss": 0.6522,
"step": 250
},
{
"epoch": 1.2404438964241677,
"grad_norm": 0.22298377752304077,
"learning_rate": 3.9794908203360865e-06,
"loss": 0.6942,
"step": 251
},
{
"epoch": 1.2453760789149197,
"grad_norm": 0.11277928948402405,
"learning_rate": 3.966950534212769e-06,
"loss": 0.647,
"step": 252
},
{
"epoch": 1.250308261405672,
"grad_norm": 0.12690778076648712,
"learning_rate": 3.954353700670573e-06,
"loss": 0.6682,
"step": 253
},
{
"epoch": 1.2552404438964242,
"grad_norm": 0.1330857127904892,
"learning_rate": 3.941700805287169e-06,
"loss": 0.6902,
"step": 254
},
{
"epoch": 1.2601726263871762,
"grad_norm": 0.10677850991487503,
"learning_rate": 3.9289923358012735e-06,
"loss": 0.6523,
"step": 255
},
{
"epoch": 1.2651048088779284,
"grad_norm": 0.09997207671403885,
"learning_rate": 3.9162287820938575e-06,
"loss": 0.6396,
"step": 256
},
{
"epoch": 1.2700369913686806,
"grad_norm": 0.10573034733533859,
"learning_rate": 3.903410636169252e-06,
"loss": 0.6631,
"step": 257
},
{
"epoch": 1.2749691738594329,
"grad_norm": 0.11500944197177887,
"learning_rate": 3.890538392136188e-06,
"loss": 0.6485,
"step": 258
},
{
"epoch": 1.279901356350185,
"grad_norm": 0.11942487210035324,
"learning_rate": 3.877612546188749e-06,
"loss": 0.6783,
"step": 259
},
{
"epoch": 1.284833538840937,
"grad_norm": 0.22677327692508698,
"learning_rate": 3.864633596587242e-06,
"loss": 0.6761,
"step": 260
},
{
"epoch": 1.2897657213316893,
"grad_norm": 0.10834953933954239,
"learning_rate": 3.8516020436389945e-06,
"loss": 0.6667,
"step": 261
},
{
"epoch": 1.2946979038224415,
"grad_norm": 0.10524627566337585,
"learning_rate": 3.838518389679065e-06,
"loss": 0.6722,
"step": 262
},
{
"epoch": 1.2996300863131935,
"grad_norm": 0.20889033377170563,
"learning_rate": 3.825383139050881e-06,
"loss": 0.6471,
"step": 263
},
{
"epoch": 1.3045622688039458,
"grad_norm": 0.09844981133937836,
"learning_rate": 3.812196798086799e-06,
"loss": 0.6847,
"step": 264
},
{
"epoch": 1.309494451294698,
"grad_norm": 0.10606922209262848,
"learning_rate": 3.798959875088584e-06,
"loss": 0.666,
"step": 265
},
{
"epoch": 1.31442663378545,
"grad_norm": 0.1338571161031723,
"learning_rate": 3.7856728803078168e-06,
"loss": 0.6438,
"step": 266
},
{
"epoch": 1.3193588162762022,
"grad_norm": 0.09691350907087326,
"learning_rate": 3.7723363259262253e-06,
"loss": 0.6567,
"step": 267
},
{
"epoch": 1.3242909987669544,
"grad_norm": 0.12978605926036835,
"learning_rate": 3.75895072603594e-06,
"loss": 0.654,
"step": 268
},
{
"epoch": 1.3292231812577064,
"grad_norm": 0.11295681446790695,
"learning_rate": 3.7455165966196817e-06,
"loss": 0.6981,
"step": 269
},
{
"epoch": 1.3341553637484587,
"grad_norm": 0.11601907014846802,
"learning_rate": 3.732034455530863e-06,
"loss": 0.6891,
"step": 270
},
{
"epoch": 1.339087546239211,
"grad_norm": 0.10687986761331558,
"learning_rate": 3.718504822473634e-06,
"loss": 0.6606,
"step": 271
},
{
"epoch": 1.344019728729963,
"grad_norm": 0.11528074741363525,
"learning_rate": 3.704928218982845e-06,
"loss": 0.6481,
"step": 272
},
{
"epoch": 1.3489519112207151,
"grad_norm": 0.18344081938266754,
"learning_rate": 3.6913051684039435e-06,
"loss": 0.6645,
"step": 273
},
{
"epoch": 1.3538840937114673,
"grad_norm": 0.2689914405345917,
"learning_rate": 3.6776361958728025e-06,
"loss": 0.6648,
"step": 274
},
{
"epoch": 1.3588162762022196,
"grad_norm": 0.3526904582977295,
"learning_rate": 3.663921828295474e-06,
"loss": 0.6679,
"step": 275
},
{
"epoch": 1.3637484586929716,
"grad_norm": 0.12253173440694809,
"learning_rate": 3.650162594327881e-06,
"loss": 0.649,
"step": 276
},
{
"epoch": 1.3686806411837238,
"grad_norm": 0.11910586804151535,
"learning_rate": 3.6363590243554362e-06,
"loss": 0.6372,
"step": 277
},
{
"epoch": 1.373612823674476,
"grad_norm": 0.2642151117324829,
"learning_rate": 3.6225116504726014e-06,
"loss": 0.6465,
"step": 278
},
{
"epoch": 1.3785450061652282,
"grad_norm": 0.12019569426774979,
"learning_rate": 3.6086210064623735e-06,
"loss": 0.6815,
"step": 279
},
{
"epoch": 1.3834771886559802,
"grad_norm": 0.12397979199886322,
"learning_rate": 3.5946876277757066e-06,
"loss": 0.6836,
"step": 280
},
{
"epoch": 1.3884093711467325,
"grad_norm": 0.12133345752954483,
"learning_rate": 3.580712051510876e-06,
"loss": 0.6514,
"step": 281
},
{
"epoch": 1.3933415536374847,
"grad_norm": 0.12849684059619904,
"learning_rate": 3.5666948163927716e-06,
"loss": 0.6651,
"step": 282
},
{
"epoch": 1.3982737361282367,
"grad_norm": 0.10554145276546478,
"learning_rate": 3.5526364627521322e-06,
"loss": 0.6784,
"step": 283
},
{
"epoch": 1.403205918618989,
"grad_norm": 0.12648458778858185,
"learning_rate": 3.5385375325047167e-06,
"loss": 0.6738,
"step": 284
},
{
"epoch": 1.4081381011097411,
"grad_norm": 0.10506008565425873,
"learning_rate": 3.5243985691304146e-06,
"loss": 0.6478,
"step": 285
},
{
"epoch": 1.4130702836004931,
"grad_norm": 0.1196879968047142,
"learning_rate": 3.5102201176522966e-06,
"loss": 0.6516,
"step": 286
},
{
"epoch": 1.4180024660912454,
"grad_norm": 0.10076049715280533,
"learning_rate": 3.4960027246156043e-06,
"loss": 0.6363,
"step": 287
},
{
"epoch": 1.4229346485819976,
"grad_norm": 0.11161559075117111,
"learning_rate": 3.4817469380666834e-06,
"loss": 0.6501,
"step": 288
},
{
"epoch": 1.4278668310727496,
"grad_norm": 0.10083605349063873,
"learning_rate": 3.467453307531858e-06,
"loss": 0.6715,
"step": 289
},
{
"epoch": 1.4327990135635018,
"grad_norm": 0.16506797075271606,
"learning_rate": 3.453122383996245e-06,
"loss": 0.6404,
"step": 290
},
{
"epoch": 1.437731196054254,
"grad_norm": 0.12411224842071533,
"learning_rate": 3.4387547198825187e-06,
"loss": 0.668,
"step": 291
},
{
"epoch": 1.442663378545006,
"grad_norm": 0.1273510903120041,
"learning_rate": 3.4243508690296133e-06,
"loss": 0.6571,
"step": 292
},
{
"epoch": 1.4475955610357583,
"grad_norm": 0.10576938837766647,
"learning_rate": 3.409911386671375e-06,
"loss": 0.656,
"step": 293
},
{
"epoch": 1.4525277435265105,
"grad_norm": 0.10712239891290665,
"learning_rate": 3.3954368294151603e-06,
"loss": 0.6474,
"step": 294
},
{
"epoch": 1.4574599260172627,
"grad_norm": 0.1035386323928833,
"learning_rate": 3.380927755220376e-06,
"loss": 0.6512,
"step": 295
},
{
"epoch": 1.462392108508015,
"grad_norm": 0.1390061378479004,
"learning_rate": 3.366384723376977e-06,
"loss": 0.6538,
"step": 296
},
{
"epoch": 1.467324290998767,
"grad_norm": 0.12152580916881561,
"learning_rate": 3.351808294483902e-06,
"loss": 0.6414,
"step": 297
},
{
"epoch": 1.4722564734895192,
"grad_norm": 0.11257496476173401,
"learning_rate": 3.3371990304274654e-06,
"loss": 0.6793,
"step": 298
},
{
"epoch": 1.4771886559802714,
"grad_norm": 0.10283617675304413,
"learning_rate": 3.3225574943597005e-06,
"loss": 0.6444,
"step": 299
},
{
"epoch": 1.4821208384710234,
"grad_norm": 0.12117356061935425,
"learning_rate": 3.3078842506766484e-06,
"loss": 0.631,
"step": 300
},
{
"epoch": 1.4870530209617756,
"grad_norm": 0.14655297994613647,
"learning_rate": 3.2931798649966e-06,
"loss": 0.6614,
"step": 301
},
{
"epoch": 1.4919852034525278,
"grad_norm": 0.1442922204732895,
"learning_rate": 3.2784449041382973e-06,
"loss": 0.6602,
"step": 302
},
{
"epoch": 1.4969173859432798,
"grad_norm": 0.11084005236625671,
"learning_rate": 3.263679936099083e-06,
"loss": 0.6616,
"step": 303
},
{
"epoch": 1.501849568434032,
"grad_norm": 0.1568291336297989,
"learning_rate": 3.248885530033004e-06,
"loss": 0.6745,
"step": 304
},
{
"epoch": 1.5067817509247843,
"grad_norm": 0.11827261000871658,
"learning_rate": 3.2340622562288717e-06,
"loss": 0.666,
"step": 305
},
{
"epoch": 1.5117139334155363,
"grad_norm": 0.13850663602352142,
"learning_rate": 3.2192106860882782e-06,
"loss": 0.6425,
"step": 306
},
{
"epoch": 1.5166461159062885,
"grad_norm": 0.11242467910051346,
"learning_rate": 3.2043313921035747e-06,
"loss": 0.6366,
"step": 307
},
{
"epoch": 1.5215782983970407,
"grad_norm": 0.11277095973491669,
"learning_rate": 3.189424947835797e-06,
"loss": 0.6423,
"step": 308
},
{
"epoch": 1.5265104808877927,
"grad_norm": 0.1134696900844574,
"learning_rate": 3.174491927892561e-06,
"loss": 0.6637,
"step": 309
},
{
"epoch": 1.531442663378545,
"grad_norm": 0.10201112180948257,
"learning_rate": 3.1595329079059102e-06,
"loss": 0.6458,
"step": 310
},
{
"epoch": 1.5363748458692972,
"grad_norm": 0.22865180671215057,
"learning_rate": 3.144548464510127e-06,
"loss": 0.6693,
"step": 311
},
{
"epoch": 1.5413070283600492,
"grad_norm": 0.12015289813280106,
"learning_rate": 3.129539175319505e-06,
"loss": 0.6463,
"step": 312
},
{
"epoch": 1.5462392108508016,
"grad_norm": 0.14742949604988098,
"learning_rate": 3.114505618906086e-06,
"loss": 0.6398,
"step": 313
},
{
"epoch": 1.5511713933415536,
"grad_norm": 0.12067458778619766,
"learning_rate": 3.0994483747773508e-06,
"loss": 0.6618,
"step": 314
},
{
"epoch": 1.5561035758323056,
"grad_norm": 0.5674145817756653,
"learning_rate": 3.0843680233538885e-06,
"loss": 0.6552,
"step": 315
},
{
"epoch": 1.561035758323058,
"grad_norm": 0.1083153635263443,
"learning_rate": 3.0692651459470164e-06,
"loss": 0.6283,
"step": 316
},
{
"epoch": 1.56596794081381,
"grad_norm": 0.177827849984169,
"learning_rate": 3.0541403247363756e-06,
"loss": 0.6423,
"step": 317
},
{
"epoch": 1.5709001233045623,
"grad_norm": 0.10957730561494827,
"learning_rate": 3.0389941427474873e-06,
"loss": 0.6569,
"step": 318
},
{
"epoch": 1.5758323057953145,
"grad_norm": 0.14152103662490845,
"learning_rate": 3.0238271838292815e-06,
"loss": 0.6607,
"step": 319
},
{
"epoch": 1.5807644882860665,
"grad_norm": 0.2304203361272812,
"learning_rate": 3.0086400326315853e-06,
"loss": 0.6798,
"step": 320
},
{
"epoch": 1.5856966707768188,
"grad_norm": 0.11011958122253418,
"learning_rate": 2.9934332745825924e-06,
"loss": 0.6308,
"step": 321
},
{
"epoch": 1.590628853267571,
"grad_norm": 0.12152457237243652,
"learning_rate": 2.9782074958662923e-06,
"loss": 0.6423,
"step": 322
},
{
"epoch": 1.595561035758323,
"grad_norm": 0.10855505615472794,
"learning_rate": 2.962963283399877e-06,
"loss": 0.6759,
"step": 323
},
{
"epoch": 1.6004932182490752,
"grad_norm": 0.12242516875267029,
"learning_rate": 2.947701224811113e-06,
"loss": 0.6489,
"step": 324
},
{
"epoch": 1.6054254007398274,
"grad_norm": 0.11491915583610535,
"learning_rate": 2.932421908415694e-06,
"loss": 0.6476,
"step": 325
},
{
"epoch": 1.6103575832305794,
"grad_norm": 0.10774843394756317,
"learning_rate": 2.9171259231945598e-06,
"loss": 0.6535,
"step": 326
},
{
"epoch": 1.6152897657213316,
"grad_norm": 0.10615874826908112,
"learning_rate": 2.901813858771193e-06,
"loss": 0.6477,
"step": 327
},
{
"epoch": 1.6202219482120839,
"grad_norm": 0.09981298446655273,
"learning_rate": 2.8864863053888927e-06,
"loss": 0.6525,
"step": 328
},
{
"epoch": 1.6251541307028359,
"grad_norm": 0.1059933453798294,
"learning_rate": 2.871143853888017e-06,
"loss": 0.6609,
"step": 329
},
{
"epoch": 1.6300863131935883,
"grad_norm": 0.28401389718055725,
"learning_rate": 2.8557870956832135e-06,
"loss": 0.6738,
"step": 330
},
{
"epoch": 1.6350184956843403,
"grad_norm": 0.1126745268702507,
"learning_rate": 2.840416622740617e-06,
"loss": 0.6443,
"step": 331
},
{
"epoch": 1.6399506781750923,
"grad_norm": 0.10980167984962463,
"learning_rate": 2.8250330275550337e-06,
"loss": 0.6575,
"step": 332
},
{
"epoch": 1.6448828606658448,
"grad_norm": 0.20389322936534882,
"learning_rate": 2.8096369031271e-06,
"loss": 0.6855,
"step": 333
},
{
"epoch": 1.6498150431565968,
"grad_norm": 0.1709917038679123,
"learning_rate": 2.7942288429404256e-06,
"loss": 0.6752,
"step": 334
},
{
"epoch": 1.654747225647349,
"grad_norm": 0.12198632955551147,
"learning_rate": 2.778809440938714e-06,
"loss": 0.6398,
"step": 335
},
{
"epoch": 1.6596794081381012,
"grad_norm": 0.11568919569253922,
"learning_rate": 2.763379291502868e-06,
"loss": 0.6489,
"step": 336
},
{
"epoch": 1.6646115906288532,
"grad_norm": 0.15780635178089142,
"learning_rate": 2.7479389894280793e-06,
"loss": 0.6489,
"step": 337
},
{
"epoch": 1.6695437731196054,
"grad_norm": 0.1101425290107727,
"learning_rate": 2.7324891299008987e-06,
"loss": 0.6726,
"step": 338
},
{
"epoch": 1.6744759556103577,
"grad_norm": 0.10121767222881317,
"learning_rate": 2.7170303084762958e-06,
"loss": 0.6726,
"step": 339
},
{
"epoch": 1.6794081381011097,
"grad_norm": 0.1087949350476265,
"learning_rate": 2.701563121054695e-06,
"loss": 0.6175,
"step": 340
},
{
"epoch": 1.684340320591862,
"grad_norm": 0.11207877844572067,
"learning_rate": 2.6860881638590146e-06,
"loss": 0.6565,
"step": 341
},
{
"epoch": 1.6892725030826141,
"grad_norm": 0.11743203550577164,
"learning_rate": 2.670606033411678e-06,
"loss": 0.677,
"step": 342
},
{
"epoch": 1.6942046855733661,
"grad_norm": 0.10707394033670425,
"learning_rate": 2.6551173265116182e-06,
"loss": 0.6601,
"step": 343
},
{
"epoch": 1.6991368680641183,
"grad_norm": 0.10522118955850601,
"learning_rate": 2.6396226402112768e-06,
"loss": 0.6494,
"step": 344
},
{
"epoch": 1.7040690505548706,
"grad_norm": 0.19015160202980042,
"learning_rate": 2.624122571793586e-06,
"loss": 0.6494,
"step": 345
},
{
"epoch": 1.7090012330456226,
"grad_norm": 0.11446730047464371,
"learning_rate": 2.6086177187489453e-06,
"loss": 0.6183,
"step": 346
},
{
"epoch": 1.7139334155363748,
"grad_norm": 0.17297939956188202,
"learning_rate": 2.593108678752191e-06,
"loss": 0.6572,
"step": 347
},
{
"epoch": 1.718865598027127,
"grad_norm": 0.12407731264829636,
"learning_rate": 2.5775960496395565e-06,
"loss": 0.6496,
"step": 348
},
{
"epoch": 1.723797780517879,
"grad_norm": 0.36749541759490967,
"learning_rate": 2.562080429385626e-06,
"loss": 0.6611,
"step": 349
},
{
"epoch": 1.7287299630086315,
"grad_norm": 0.10345156490802765,
"learning_rate": 2.5465624160802847e-06,
"loss": 0.6716,
"step": 350
},
{
"epoch": 1.7336621454993835,
"grad_norm": 0.11017563939094543,
"learning_rate": 2.531042607905665e-06,
"loss": 0.6499,
"step": 351
},
{
"epoch": 1.7385943279901355,
"grad_norm": 0.09504040330648422,
"learning_rate": 2.5155216031130882e-06,
"loss": 0.6657,
"step": 352
},
{
"epoch": 1.743526510480888,
"grad_norm": 0.11456336081027985,
"learning_rate": 2.5e-06,
"loss": 0.6433,
"step": 353
},
{
"epoch": 1.74845869297164,
"grad_norm": 0.3017407953739166,
"learning_rate": 2.4844783968869126e-06,
"loss": 0.6678,
"step": 354
},
{
"epoch": 1.7533908754623921,
"grad_norm": 0.17654098570346832,
"learning_rate": 2.4689573920943358e-06,
"loss": 0.6407,
"step": 355
},
{
"epoch": 1.7583230579531444,
"grad_norm": 0.19192291796207428,
"learning_rate": 2.4534375839197166e-06,
"loss": 0.6538,
"step": 356
},
{
"epoch": 1.7632552404438964,
"grad_norm": 0.101350799202919,
"learning_rate": 2.4379195706143755e-06,
"loss": 0.6563,
"step": 357
},
{
"epoch": 1.7681874229346486,
"grad_norm": 0.09221342951059341,
"learning_rate": 2.422403950360444e-06,
"loss": 0.6465,
"step": 358
},
{
"epoch": 1.7731196054254008,
"grad_norm": 0.1029122844338417,
"learning_rate": 2.406891321247809e-06,
"loss": 0.6524,
"step": 359
},
{
"epoch": 1.7780517879161528,
"grad_norm": 0.10780887305736542,
"learning_rate": 2.391382281251055e-06,
"loss": 0.6577,
"step": 360
},
{
"epoch": 1.782983970406905,
"grad_norm": 0.11179229617118835,
"learning_rate": 2.375877428206415e-06,
"loss": 0.6281,
"step": 361
},
{
"epoch": 1.7879161528976573,
"grad_norm": 0.1713687628507614,
"learning_rate": 2.360377359788724e-06,
"loss": 0.6866,
"step": 362
},
{
"epoch": 1.7928483353884093,
"grad_norm": 0.20977120101451874,
"learning_rate": 2.3448826734883826e-06,
"loss": 0.6524,
"step": 363
},
{
"epoch": 1.7977805178791615,
"grad_norm": 0.10442214459180832,
"learning_rate": 2.3293939665883233e-06,
"loss": 0.6356,
"step": 364
},
{
"epoch": 1.8027127003699137,
"grad_norm": 0.11700151115655899,
"learning_rate": 2.313911836140986e-06,
"loss": 0.635,
"step": 365
},
{
"epoch": 1.8076448828606657,
"grad_norm": 0.20740574598312378,
"learning_rate": 2.298436878945306e-06,
"loss": 0.6407,
"step": 366
},
{
"epoch": 1.8125770653514182,
"grad_norm": 0.10487078875303268,
"learning_rate": 2.2829696915237055e-06,
"loss": 0.6606,
"step": 367
},
{
"epoch": 1.8175092478421702,
"grad_norm": 0.19342948496341705,
"learning_rate": 2.267510870099101e-06,
"loss": 0.6652,
"step": 368
},
{
"epoch": 1.8224414303329222,
"grad_norm": 0.12832309305667877,
"learning_rate": 2.252061010571921e-06,
"loss": 0.6699,
"step": 369
},
{
"epoch": 1.8273736128236746,
"grad_norm": 0.24777992069721222,
"learning_rate": 2.2366207084971326e-06,
"loss": 0.652,
"step": 370
},
{
"epoch": 1.8323057953144266,
"grad_norm": 0.2240799218416214,
"learning_rate": 2.2211905590612864e-06,
"loss": 0.6559,
"step": 371
},
{
"epoch": 1.8372379778051788,
"grad_norm": 0.21929948031902313,
"learning_rate": 2.205771157059575e-06,
"loss": 0.6524,
"step": 372
},
{
"epoch": 1.842170160295931,
"grad_norm": 0.09952554106712341,
"learning_rate": 2.1903630968729003e-06,
"loss": 0.663,
"step": 373
},
{
"epoch": 1.847102342786683,
"grad_norm": 0.13348402082920074,
"learning_rate": 2.174966972444967e-06,
"loss": 0.6594,
"step": 374
},
{
"epoch": 1.8520345252774353,
"grad_norm": 0.13318030536174774,
"learning_rate": 2.159583377259384e-06,
"loss": 0.6271,
"step": 375
},
{
"epoch": 1.8569667077681875,
"grad_norm": 0.10292787104845047,
"learning_rate": 2.1442129043167877e-06,
"loss": 0.6537,
"step": 376
},
{
"epoch": 1.8618988902589395,
"grad_norm": 0.1853366643190384,
"learning_rate": 2.1288561461119837e-06,
"loss": 0.6434,
"step": 377
},
{
"epoch": 1.8668310727496917,
"grad_norm": 0.3069738745689392,
"learning_rate": 2.1135136946111077e-06,
"loss": 0.6294,
"step": 378
},
{
"epoch": 1.871763255240444,
"grad_norm": 0.1591643989086151,
"learning_rate": 2.098186141228807e-06,
"loss": 0.6596,
"step": 379
},
{
"epoch": 1.876695437731196,
"grad_norm": 0.1068023145198822,
"learning_rate": 2.0828740768054406e-06,
"loss": 0.6559,
"step": 380
},
{
"epoch": 1.8816276202219482,
"grad_norm": 0.13300898671150208,
"learning_rate": 2.0675780915843068e-06,
"loss": 0.63,
"step": 381
},
{
"epoch": 1.8865598027127004,
"grad_norm": 0.13550280034542084,
"learning_rate": 2.052298775188888e-06,
"loss": 0.646,
"step": 382
},
{
"epoch": 1.8914919852034524,
"grad_norm": 0.19939783215522766,
"learning_rate": 2.0370367166001245e-06,
"loss": 0.6511,
"step": 383
},
{
"epoch": 1.8964241676942046,
"grad_norm": 0.13335835933685303,
"learning_rate": 2.021792504133709e-06,
"loss": 0.6584,
"step": 384
},
{
"epoch": 1.9013563501849569,
"grad_norm": 0.0967630073428154,
"learning_rate": 2.006566725417409e-06,
"loss": 0.656,
"step": 385
},
{
"epoch": 1.9062885326757089,
"grad_norm": 0.10085921734571457,
"learning_rate": 1.991359967368416e-06,
"loss": 0.6515,
"step": 386
},
{
"epoch": 1.9112207151664613,
"grad_norm": 0.1494772732257843,
"learning_rate": 1.97617281617072e-06,
"loss": 0.6625,
"step": 387
},
{
"epoch": 1.9161528976572133,
"grad_norm": 0.14001788198947906,
"learning_rate": 1.9610058572525127e-06,
"loss": 0.6265,
"step": 388
},
{
"epoch": 1.9210850801479655,
"grad_norm": 0.10699108988046646,
"learning_rate": 1.945859675263625e-06,
"loss": 0.6446,
"step": 389
},
{
"epoch": 1.9260172626387178,
"grad_norm": 0.10522957146167755,
"learning_rate": 1.9307348540529845e-06,
"loss": 0.6525,
"step": 390
},
{
"epoch": 1.9309494451294698,
"grad_norm": 0.0986817479133606,
"learning_rate": 1.9156319766461124e-06,
"loss": 0.6325,
"step": 391
},
{
"epoch": 1.935881627620222,
"grad_norm": 0.11651718616485596,
"learning_rate": 1.90055162522265e-06,
"loss": 0.6303,
"step": 392
},
{
"epoch": 1.9408138101109742,
"grad_norm": 0.10605504363775253,
"learning_rate": 1.8854943810939152e-06,
"loss": 0.6559,
"step": 393
},
{
"epoch": 1.9457459926017262,
"grad_norm": 0.09837733954191208,
"learning_rate": 1.8704608246804956e-06,
"loss": 0.6708,
"step": 394
},
{
"epoch": 1.9506781750924784,
"grad_norm": 0.08927737921476364,
"learning_rate": 1.8554515354898744e-06,
"loss": 0.654,
"step": 395
},
{
"epoch": 1.9556103575832307,
"grad_norm": 0.10312498360872269,
"learning_rate": 1.840467092094091e-06,
"loss": 0.6178,
"step": 396
},
{
"epoch": 1.9605425400739827,
"grad_norm": 0.12431120872497559,
"learning_rate": 1.8255080721074391e-06,
"loss": 0.6804,
"step": 397
},
{
"epoch": 1.9654747225647349,
"grad_norm": 0.17175759375095367,
"learning_rate": 1.8105750521642035e-06,
"loss": 0.6509,
"step": 398
},
{
"epoch": 1.970406905055487,
"grad_norm": 0.6188605427742004,
"learning_rate": 1.7956686078964257e-06,
"loss": 0.6311,
"step": 399
},
{
"epoch": 1.975339087546239,
"grad_norm": 0.15260924398899078,
"learning_rate": 1.7807893139117222e-06,
"loss": 0.6562,
"step": 400
},
{
"epoch": 1.9802712700369913,
"grad_norm": 0.10048322379589081,
"learning_rate": 1.7659377437711294e-06,
"loss": 0.6562,
"step": 401
},
{
"epoch": 1.9852034525277436,
"grad_norm": 0.14348655939102173,
"learning_rate": 1.7511144699669967e-06,
"loss": 0.6415,
"step": 402
},
{
"epoch": 1.9901356350184956,
"grad_norm": 0.2633483111858368,
"learning_rate": 1.7363200639009176e-06,
"loss": 0.6289,
"step": 403
},
{
"epoch": 1.995067817509248,
"grad_norm": 0.10529658943414688,
"learning_rate": 1.7215550958617036e-06,
"loss": 0.6494,
"step": 404
},
{
"epoch": 2.0024660912453762,
"grad_norm": 0.12062520533800125,
"learning_rate": 1.7068201350034017e-06,
"loss": 0.6449,
"step": 405
},
{
"epoch": 2.0073982737361282,
"grad_norm": 0.1379857212305069,
"learning_rate": 1.692115749323353e-06,
"loss": 0.6557,
"step": 406
},
{
"epoch": 2.0123304562268802,
"grad_norm": 0.10167136788368225,
"learning_rate": 1.6774425056402993e-06,
"loss": 0.6362,
"step": 407
},
{
"epoch": 2.0172626387176327,
"grad_norm": 0.1222464069724083,
"learning_rate": 1.6628009695725348e-06,
"loss": 0.6479,
"step": 408
},
{
"epoch": 2.0221948212083847,
"grad_norm": 0.19944354891777039,
"learning_rate": 1.648191705516099e-06,
"loss": 0.6606,
"step": 409
},
{
"epoch": 2.0271270036991367,
"grad_norm": 0.09848273545503616,
"learning_rate": 1.6336152766230235e-06,
"loss": 0.6408,
"step": 410
},
{
"epoch": 2.032059186189889,
"grad_norm": 0.09147990494966507,
"learning_rate": 1.6190722447796242e-06,
"loss": 0.6169,
"step": 411
},
{
"epoch": 2.036991368680641,
"grad_norm": 0.09940177202224731,
"learning_rate": 1.6045631705848405e-06,
"loss": 0.6397,
"step": 412
},
{
"epoch": 2.041923551171393,
"grad_norm": 0.10459251701831818,
"learning_rate": 1.5900886133286254e-06,
"loss": 0.6209,
"step": 413
},
{
"epoch": 2.0468557336621456,
"grad_norm": 0.10069490969181061,
"learning_rate": 1.5756491309703875e-06,
"loss": 0.6516,
"step": 414
},
{
"epoch": 2.0517879161528976,
"grad_norm": 0.10702253878116608,
"learning_rate": 1.561245280117482e-06,
"loss": 0.6491,
"step": 415
},
{
"epoch": 2.05672009864365,
"grad_norm": 0.10752403736114502,
"learning_rate": 1.5468776160037558e-06,
"loss": 0.6554,
"step": 416
},
{
"epoch": 2.061652281134402,
"grad_norm": 0.1126432865858078,
"learning_rate": 1.5325466924681425e-06,
"loss": 0.642,
"step": 417
},
{
"epoch": 2.066584463625154,
"grad_norm": 0.13967493176460266,
"learning_rate": 1.5182530619333168e-06,
"loss": 0.6271,
"step": 418
},
{
"epoch": 2.0715166461159065,
"grad_norm": 0.107243612408638,
"learning_rate": 1.5039972753843966e-06,
"loss": 0.6296,
"step": 419
},
{
"epoch": 2.0764488286066585,
"grad_norm": 0.12694260478019714,
"learning_rate": 1.4897798823477045e-06,
"loss": 0.6584,
"step": 420
},
{
"epoch": 2.0813810110974105,
"grad_norm": 0.11211492121219635,
"learning_rate": 1.4756014308695865e-06,
"loss": 0.6394,
"step": 421
},
{
"epoch": 2.086313193588163,
"grad_norm": 0.10561738908290863,
"learning_rate": 1.4614624674952843e-06,
"loss": 0.6333,
"step": 422
},
{
"epoch": 2.091245376078915,
"grad_norm": 0.11454541981220245,
"learning_rate": 1.4473635372478692e-06,
"loss": 0.6185,
"step": 423
},
{
"epoch": 2.096177558569667,
"grad_norm": 0.11180110275745392,
"learning_rate": 1.4333051836072298e-06,
"loss": 0.6266,
"step": 424
},
{
"epoch": 2.1011097410604194,
"grad_norm": 0.10139421373605728,
"learning_rate": 1.4192879484891253e-06,
"loss": 0.6519,
"step": 425
},
{
"epoch": 2.1060419235511714,
"grad_norm": 0.10878422111272812,
"learning_rate": 1.405312372224294e-06,
"loss": 0.6253,
"step": 426
},
{
"epoch": 2.1109741060419234,
"grad_norm": 0.09218919277191162,
"learning_rate": 1.3913789935376271e-06,
"loss": 0.6373,
"step": 427
},
{
"epoch": 2.115906288532676,
"grad_norm": 0.10704641044139862,
"learning_rate": 1.3774883495273986e-06,
"loss": 0.6498,
"step": 428
},
{
"epoch": 2.120838471023428,
"grad_norm": 0.09850191324949265,
"learning_rate": 1.363640975644564e-06,
"loss": 0.6595,
"step": 429
},
{
"epoch": 2.12577065351418,
"grad_norm": 0.13874687254428864,
"learning_rate": 1.3498374056721198e-06,
"loss": 0.6555,
"step": 430
},
{
"epoch": 2.1307028360049323,
"grad_norm": 0.11779513210058212,
"learning_rate": 1.3360781717045266e-06,
"loss": 0.6217,
"step": 431
},
{
"epoch": 2.1356350184956843,
"grad_norm": 0.22133827209472656,
"learning_rate": 1.322363804127198e-06,
"loss": 0.6624,
"step": 432
},
{
"epoch": 2.1405672009864363,
"grad_norm": 0.1059156209230423,
"learning_rate": 1.3086948315960567e-06,
"loss": 0.6412,
"step": 433
},
{
"epoch": 2.1454993834771887,
"grad_norm": 0.1276949644088745,
"learning_rate": 1.295071781017156e-06,
"loss": 0.6325,
"step": 434
},
{
"epoch": 2.1504315659679407,
"grad_norm": 0.09928814321756363,
"learning_rate": 1.2814951775263671e-06,
"loss": 0.6165,
"step": 435
},
{
"epoch": 2.155363748458693,
"grad_norm": 0.1885470151901245,
"learning_rate": 1.267965544469137e-06,
"loss": 0.6294,
"step": 436
},
{
"epoch": 2.160295930949445,
"grad_norm": 0.11126084625720978,
"learning_rate": 1.2544834033803183e-06,
"loss": 0.6392,
"step": 437
},
{
"epoch": 2.165228113440197,
"grad_norm": 0.09588748216629028,
"learning_rate": 1.2410492739640592e-06,
"loss": 0.6242,
"step": 438
},
{
"epoch": 2.1701602959309496,
"grad_norm": 0.3128255009651184,
"learning_rate": 1.227663674073775e-06,
"loss": 0.6191,
"step": 439
},
{
"epoch": 2.1750924784217016,
"grad_norm": 0.11006899923086166,
"learning_rate": 1.2143271196921832e-06,
"loss": 0.6418,
"step": 440
},
{
"epoch": 2.1800246609124536,
"grad_norm": 0.12083282321691513,
"learning_rate": 1.2010401249114166e-06,
"loss": 0.6192,
"step": 441
},
{
"epoch": 2.184956843403206,
"grad_norm": 0.13546785712242126,
"learning_rate": 1.1878032019132016e-06,
"loss": 0.6688,
"step": 442
},
{
"epoch": 2.189889025893958,
"grad_norm": 0.1263391375541687,
"learning_rate": 1.1746168609491198e-06,
"loss": 0.639,
"step": 443
},
{
"epoch": 2.19482120838471,
"grad_norm": 0.357994943857193,
"learning_rate": 1.1614816103209363e-06,
"loss": 0.6582,
"step": 444
},
{
"epoch": 2.1997533908754625,
"grad_norm": 0.09652648866176605,
"learning_rate": 1.148397956361007e-06,
"loss": 0.6323,
"step": 445
},
{
"epoch": 2.2046855733662145,
"grad_norm": 0.12993961572647095,
"learning_rate": 1.1353664034127585e-06,
"loss": 0.6461,
"step": 446
},
{
"epoch": 2.2096177558569665,
"grad_norm": 0.10628235340118408,
"learning_rate": 1.122387453811252e-06,
"loss": 0.6315,
"step": 447
},
{
"epoch": 2.214549938347719,
"grad_norm": 2.75348162651062,
"learning_rate": 1.1094616078638123e-06,
"loss": 0.6616,
"step": 448
},
{
"epoch": 2.219482120838471,
"grad_norm": 0.13206490874290466,
"learning_rate": 1.0965893638307484e-06,
"loss": 0.6227,
"step": 449
},
{
"epoch": 2.2244143033292234,
"grad_norm": 0.10024918615818024,
"learning_rate": 1.083771217906143e-06,
"loss": 0.6171,
"step": 450
},
{
"epoch": 2.2293464858199754,
"grad_norm": 0.1251702457666397,
"learning_rate": 1.071007664198727e-06,
"loss": 0.6592,
"step": 451
},
{
"epoch": 2.2342786683107274,
"grad_norm": 0.12185829877853394,
"learning_rate": 1.0582991947128324e-06,
"loss": 0.6235,
"step": 452
},
{
"epoch": 2.23921085080148,
"grad_norm": 0.09852628409862518,
"learning_rate": 1.0456462993294273e-06,
"loss": 0.6207,
"step": 453
},
{
"epoch": 2.244143033292232,
"grad_norm": 0.11288397014141083,
"learning_rate": 1.0330494657872312e-06,
"loss": 0.6497,
"step": 454
},
{
"epoch": 2.249075215782984,
"grad_norm": 0.1829065978527069,
"learning_rate": 1.0205091796639143e-06,
"loss": 0.6385,
"step": 455
},
{
"epoch": 2.2540073982737363,
"grad_norm": 0.13576146960258484,
"learning_rate": 1.008025924357379e-06,
"loss": 0.6214,
"step": 456
},
{
"epoch": 2.2589395807644883,
"grad_norm": 0.1026289239525795,
"learning_rate": 9.95600181067129e-07,
"loss": 0.6422,
"step": 457
},
{
"epoch": 2.2638717632552403,
"grad_norm": 0.11270825564861298,
"learning_rate": 9.832324287757158e-07,
"loss": 0.6441,
"step": 458
},
{
"epoch": 2.2688039457459928,
"grad_norm": 0.15193237364292145,
"learning_rate": 9.709231442302777e-07,
"loss": 0.6211,
"step": 459
},
{
"epoch": 2.2737361282367448,
"grad_norm": 0.09436095505952835,
"learning_rate": 9.586728019241622e-07,
"loss": 0.6341,
"step": 460
},
{
"epoch": 2.2786683107274968,
"grad_norm": 0.503972053527832,
"learning_rate": 9.464818740786357e-07,
"loss": 0.6092,
"step": 461
},
{
"epoch": 2.283600493218249,
"grad_norm": 0.1313806027173996,
"learning_rate": 9.343508306246771e-07,
"loss": 0.6338,
"step": 462
},
{
"epoch": 2.288532675709001,
"grad_norm": 0.21144555509090424,
"learning_rate": 9.222801391848688e-07,
"loss": 0.6227,
"step": 463
},
{
"epoch": 2.293464858199753,
"grad_norm": 0.09302227199077606,
"learning_rate": 9.102702650553672e-07,
"loss": 0.6538,
"step": 464
},
{
"epoch": 2.2983970406905057,
"grad_norm": 0.15463946759700775,
"learning_rate": 8.983216711879663e-07,
"loss": 0.6429,
"step": 465
},
{
"epoch": 2.3033292231812577,
"grad_norm": 0.09232427924871445,
"learning_rate": 8.86434818172256e-07,
"loss": 0.6414,
"step": 466
},
{
"epoch": 2.3082614056720097,
"grad_norm": 0.4079054296016693,
"learning_rate": 8.746101642178623e-07,
"loss": 0.6505,
"step": 467
},
{
"epoch": 2.313193588162762,
"grad_norm": 0.11529503017663956,
"learning_rate": 8.628481651367876e-07,
"loss": 0.6379,
"step": 468
},
{
"epoch": 2.318125770653514,
"grad_norm": 0.2939020097255707,
"learning_rate": 8.51149274325839e-07,
"loss": 0.6404,
"step": 469
},
{
"epoch": 2.323057953144266,
"grad_norm": 0.09394059330224991,
"learning_rate": 8.395139427491517e-07,
"loss": 0.6456,
"step": 470
},
{
"epoch": 2.3279901356350186,
"grad_norm": 0.12832114100456238,
"learning_rate": 8.279426189208057e-07,
"loss": 0.6372,
"step": 471
},
{
"epoch": 2.3329223181257706,
"grad_norm": 0.1322140395641327,
"learning_rate": 8.164357488875349e-07,
"loss": 0.6199,
"step": 472
},
{
"epoch": 2.337854500616523,
"grad_norm": 0.1532570719718933,
"learning_rate": 8.049937762115354e-07,
"loss": 0.6197,
"step": 473
},
{
"epoch": 2.342786683107275,
"grad_norm": 0.11840520799160004,
"learning_rate": 7.936171419533653e-07,
"loss": 0.6225,
"step": 474
},
{
"epoch": 2.347718865598027,
"grad_norm": 0.11926340311765671,
"learning_rate": 7.823062846549432e-07,
"loss": 0.6369,
"step": 475
},
{
"epoch": 2.3526510480887795,
"grad_norm": 0.10458600521087646,
"learning_rate": 7.71061640322646e-07,
"loss": 0.6192,
"step": 476
},
{
"epoch": 2.3575832305795315,
"grad_norm": 0.09384766221046448,
"learning_rate": 7.59883642410498e-07,
"loss": 0.6319,
"step": 477
},
{
"epoch": 2.3625154130702835,
"grad_norm": 0.13794849812984467,
"learning_rate": 7.487727218034646e-07,
"loss": 0.6627,
"step": 478
},
{
"epoch": 2.367447595561036,
"grad_norm": 0.09522448480129242,
"learning_rate": 7.377293068008421e-07,
"loss": 0.612,
"step": 479
},
{
"epoch": 2.372379778051788,
"grad_norm": 0.34490853548049927,
"learning_rate": 7.267538230997487e-07,
"loss": 0.6447,
"step": 480
},
{
"epoch": 2.37731196054254,
"grad_norm": 0.3206603229045868,
"learning_rate": 7.15846693778712e-07,
"loss": 0.6633,
"step": 481
},
{
"epoch": 2.3822441430332923,
"grad_norm": 0.10969394445419312,
"learning_rate": 7.050083392813651e-07,
"loss": 0.63,
"step": 482
},
{
"epoch": 2.3871763255240444,
"grad_norm": 0.09678292274475098,
"learning_rate": 6.942391774002352e-07,
"loss": 0.6102,
"step": 483
},
{
"epoch": 2.392108508014797,
"grad_norm": 0.12844984233379364,
"learning_rate": 6.835396232606414e-07,
"loss": 0.6065,
"step": 484
},
{
"epoch": 2.397040690505549,
"grad_norm": 0.10880632698535919,
"learning_rate": 6.729100893046897e-07,
"loss": 0.6388,
"step": 485
},
{
"epoch": 2.401972872996301,
"grad_norm": 0.15737678110599518,
"learning_rate": 6.623509852753798e-07,
"loss": 0.6288,
"step": 486
},
{
"epoch": 2.4069050554870532,
"grad_norm": 0.26250696182250977,
"learning_rate": 6.518627182008034e-07,
"loss": 0.6355,
"step": 487
},
{
"epoch": 2.4118372379778052,
"grad_norm": 0.1346411406993866,
"learning_rate": 6.414456923784593e-07,
"loss": 0.6637,
"step": 488
},
{
"epoch": 2.4167694204685573,
"grad_norm": 0.10590661317110062,
"learning_rate": 6.311003093596674e-07,
"loss": 0.6191,
"step": 489
},
{
"epoch": 2.4217016029593097,
"grad_norm": 0.10193908959627151,
"learning_rate": 6.208269679340886e-07,
"loss": 0.6224,
"step": 490
},
{
"epoch": 2.4266337854500617,
"grad_norm": 0.12438759952783585,
"learning_rate": 6.106260641143547e-07,
"loss": 0.6291,
"step": 491
},
{
"epoch": 2.4315659679408137,
"grad_norm": 0.11157315969467163,
"learning_rate": 6.004979911208006e-07,
"loss": 0.6265,
"step": 492
},
{
"epoch": 2.436498150431566,
"grad_norm": 0.10205821692943573,
"learning_rate": 5.904431393663088e-07,
"loss": 0.6219,
"step": 493
},
{
"epoch": 2.441430332922318,
"grad_norm": 0.0957137867808342,
"learning_rate": 5.804618964412587e-07,
"loss": 0.6376,
"step": 494
},
{
"epoch": 2.44636251541307,
"grad_norm": 0.14211417734622955,
"learning_rate": 5.705546470985851e-07,
"loss": 0.6255,
"step": 495
},
{
"epoch": 2.4512946979038226,
"grad_norm": 0.12571153044700623,
"learning_rate": 5.607217732389503e-07,
"loss": 0.6222,
"step": 496
},
{
"epoch": 2.4562268803945746,
"grad_norm": 0.18337282538414001,
"learning_rate": 5.509636538960183e-07,
"loss": 0.661,
"step": 497
},
{
"epoch": 2.4611590628853266,
"grad_norm": 0.10181237757205963,
"learning_rate": 5.412806652218469e-07,
"loss": 0.6097,
"step": 498
},
{
"epoch": 2.466091245376079,
"grad_norm": 0.10895208269357681,
"learning_rate": 5.316731804723877e-07,
"loss": 0.6544,
"step": 499
},
{
"epoch": 2.471023427866831,
"grad_norm": 0.11107916384935379,
"learning_rate": 5.221415699930952e-07,
"loss": 0.6326,
"step": 500
},
{
"epoch": 2.475955610357583,
"grad_norm": 0.09523724764585495,
"learning_rate": 5.126862012046551e-07,
"loss": 0.6295,
"step": 501
},
{
"epoch": 2.4808877928483355,
"grad_norm": 0.17275214195251465,
"learning_rate": 5.03307438588819e-07,
"loss": 0.647,
"step": 502
},
{
"epoch": 2.4858199753390875,
"grad_norm": 0.3728453516960144,
"learning_rate": 4.940056436743534e-07,
"loss": 0.6419,
"step": 503
},
{
"epoch": 2.4907521578298395,
"grad_norm": 0.09702899307012558,
"learning_rate": 4.847811750231057e-07,
"loss": 0.6374,
"step": 504
},
{
"epoch": 2.495684340320592,
"grad_norm": 0.12215188145637512,
"learning_rate": 4.7563438821618236e-07,
"loss": 0.6408,
"step": 505
},
{
"epoch": 2.500616522811344,
"grad_norm": 0.10780132561922073,
"learning_rate": 4.6656563584023955e-07,
"loss": 0.6191,
"step": 506
},
{
"epoch": 2.505548705302096,
"grad_norm": 0.14112994074821472,
"learning_rate": 4.5757526747389506e-07,
"loss": 0.6381,
"step": 507
},
{
"epoch": 2.5104808877928484,
"grad_norm": 0.09904135763645172,
"learning_rate": 4.4866362967425054e-07,
"loss": 0.6312,
"step": 508
},
{
"epoch": 2.5154130702836004,
"grad_norm": 0.24112163484096527,
"learning_rate": 4.398310659635338e-07,
"loss": 0.6271,
"step": 509
},
{
"epoch": 2.5203452527743524,
"grad_norm": 0.14225369691848755,
"learning_rate": 4.310779168158566e-07,
"loss": 0.6423,
"step": 510
},
{
"epoch": 2.525277435265105,
"grad_norm": 0.13167624175548553,
"learning_rate": 4.2240451964408984e-07,
"loss": 0.6338,
"step": 511
},
{
"epoch": 2.530209617755857,
"grad_norm": 0.10630662739276886,
"learning_rate": 4.138112087868576e-07,
"loss": 0.6255,
"step": 512
},
{
"epoch": 2.5351418002466093,
"grad_norm": 0.10379086434841156,
"learning_rate": 4.052983154956483e-07,
"loss": 0.6264,
"step": 513
},
{
"epoch": 2.5400739827373613,
"grad_norm": 0.09079194813966751,
"learning_rate": 3.9686616792204677e-07,
"loss": 0.5999,
"step": 514
},
{
"epoch": 2.5450061652281133,
"grad_norm": 0.09570661187171936,
"learning_rate": 3.885150911050856e-07,
"loss": 0.61,
"step": 515
},
{
"epoch": 2.5499383477188657,
"grad_norm": 0.09997903555631638,
"learning_rate": 3.8024540695871275e-07,
"loss": 0.6391,
"step": 516
},
{
"epoch": 2.5548705302096177,
"grad_norm": 0.10698223114013672,
"learning_rate": 3.720574342593847e-07,
"loss": 0.6197,
"step": 517
},
{
"epoch": 2.55980271270037,
"grad_norm": 0.13335859775543213,
"learning_rate": 3.639514886337786e-07,
"loss": 0.6507,
"step": 518
},
{
"epoch": 2.564734895191122,
"grad_norm": 0.2598697245121002,
"learning_rate": 3.559278825466245e-07,
"loss": 0.6284,
"step": 519
},
{
"epoch": 2.569667077681874,
"grad_norm": 0.19334684312343597,
"learning_rate": 3.4798692528866057e-07,
"loss": 0.631,
"step": 520
},
{
"epoch": 2.5745992601726266,
"grad_norm": 0.1008177101612091,
"learning_rate": 3.4012892296471173e-07,
"loss": 0.6651,
"step": 521
},
{
"epoch": 2.5795314426633786,
"grad_norm": 0.11722905933856964,
"learning_rate": 3.3235417848188985e-07,
"loss": 0.6524,
"step": 522
},
{
"epoch": 2.5844636251541306,
"grad_norm": 0.12759149074554443,
"learning_rate": 3.2466299153791626e-07,
"loss": 0.6508,
"step": 523
},
{
"epoch": 2.589395807644883,
"grad_norm": 0.12765510380268097,
"learning_rate": 3.1705565860956994e-07,
"loss": 0.636,
"step": 524
},
{
"epoch": 2.594327990135635,
"grad_norm": 0.10414116829633713,
"learning_rate": 3.095324729412602e-07,
"loss": 0.6192,
"step": 525
},
{
"epoch": 2.599260172626387,
"grad_norm": 0.28323763608932495,
"learning_rate": 3.020937245337208e-07,
"loss": 0.6614,
"step": 526
},
{
"epoch": 2.6041923551171395,
"grad_norm": 0.19360347092151642,
"learning_rate": 2.947397001328314e-07,
"loss": 0.6444,
"step": 527
},
{
"epoch": 2.6091245376078915,
"grad_norm": 0.10508602857589722,
"learning_rate": 2.874706832185656e-07,
"loss": 0.6113,
"step": 528
},
{
"epoch": 2.6140567200986435,
"grad_norm": 0.10985539853572845,
"learning_rate": 2.80286953994062e-07,
"loss": 0.6372,
"step": 529
},
{
"epoch": 2.618988902589396,
"grad_norm": 0.11580062657594681,
"learning_rate": 2.731887893748242e-07,
"loss": 0.6274,
"step": 530
},
{
"epoch": 2.623921085080148,
"grad_norm": 0.10763997584581375,
"learning_rate": 2.6617646297804554e-07,
"loss": 0.6345,
"step": 531
},
{
"epoch": 2.6288532675709,
"grad_norm": 0.10293387621641159,
"learning_rate": 2.5925024511206207e-07,
"loss": 0.6032,
"step": 532
},
{
"epoch": 2.6337854500616524,
"grad_norm": 0.1057887151837349,
"learning_rate": 2.52410402765933e-07,
"loss": 0.6594,
"step": 533
},
{
"epoch": 2.6387176325524044,
"grad_norm": 0.11511359363794327,
"learning_rate": 2.45657199599148e-07,
"loss": 0.6283,
"step": 534
},
{
"epoch": 2.6436498150431564,
"grad_norm": 0.10298167169094086,
"learning_rate": 2.389908959314663e-07,
"loss": 0.6194,
"step": 535
},
{
"epoch": 2.648581997533909,
"grad_norm": 0.13076511025428772,
"learning_rate": 2.3241174873287892e-07,
"loss": 0.6128,
"step": 536
},
{
"epoch": 2.653514180024661,
"grad_norm": 0.10795601457357407,
"learning_rate": 2.2592001161370392e-07,
"loss": 0.6604,
"step": 537
},
{
"epoch": 2.658446362515413,
"grad_norm": 0.08846652507781982,
"learning_rate": 2.1951593481481236e-07,
"loss": 0.5985,
"step": 538
},
{
"epoch": 2.6633785450061653,
"grad_norm": 0.14082390069961548,
"learning_rate": 2.1319976519797862e-07,
"loss": 0.6444,
"step": 539
},
{
"epoch": 2.6683107274969173,
"grad_norm": 0.1039619892835617,
"learning_rate": 2.0697174623636795e-07,
"loss": 0.6735,
"step": 540
},
{
"epoch": 2.6732429099876693,
"grad_norm": 0.09636316448450089,
"learning_rate": 2.0083211800514868e-07,
"loss": 0.6311,
"step": 541
},
{
"epoch": 2.678175092478422,
"grad_norm": 0.10067697614431381,
"learning_rate": 1.9478111717223968e-07,
"loss": 0.6237,
"step": 542
},
{
"epoch": 2.683107274969174,
"grad_norm": 0.11034560203552246,
"learning_rate": 1.8881897698918544e-07,
"loss": 0.6247,
"step": 543
},
{
"epoch": 2.688039457459926,
"grad_norm": 0.11042781919240952,
"learning_rate": 1.8294592728216764e-07,
"loss": 0.6413,
"step": 544
},
{
"epoch": 2.6929716399506782,
"grad_norm": 0.1032506600022316,
"learning_rate": 1.7716219444314204e-07,
"loss": 0.6604,
"step": 545
},
{
"epoch": 2.6979038224414302,
"grad_norm": 0.09196452796459198,
"learning_rate": 1.7146800142111536e-07,
"loss": 0.6503,
"step": 546
},
{
"epoch": 2.7028360049321822,
"grad_norm": 0.09848834574222565,
"learning_rate": 1.658635677135484e-07,
"loss": 0.6195,
"step": 547
},
{
"epoch": 2.7077681874229347,
"grad_norm": 0.12145579606294632,
"learning_rate": 1.6034910935789628e-07,
"loss": 0.6269,
"step": 548
},
{
"epoch": 2.7127003699136867,
"grad_norm": 0.10812917351722717,
"learning_rate": 1.5492483892328104e-07,
"loss": 0.6422,
"step": 549
},
{
"epoch": 2.717632552404439,
"grad_norm": 0.10012295842170715,
"learning_rate": 1.4959096550229645e-07,
"loss": 0.6266,
"step": 550
},
{
"epoch": 2.722564734895191,
"grad_norm": 0.24410749971866608,
"learning_rate": 1.44347694702949e-07,
"loss": 0.6406,
"step": 551
},
{
"epoch": 2.727496917385943,
"grad_norm": 0.11757177114486694,
"learning_rate": 1.391952286407311e-07,
"loss": 0.6446,
"step": 552
},
{
"epoch": 2.7324290998766956,
"grad_norm": 0.11057958751916885,
"learning_rate": 1.341337659308309e-07,
"loss": 0.6347,
"step": 553
},
{
"epoch": 2.7373612823674476,
"grad_norm": 0.11715658754110336,
"learning_rate": 1.291635016804768e-07,
"loss": 0.6508,
"step": 554
},
{
"epoch": 2.7422934648582,
"grad_norm": 0.10512126982212067,
"learning_rate": 1.2428462748141523e-07,
"loss": 0.6509,
"step": 555
},
{
"epoch": 2.747225647348952,
"grad_norm": 0.20344923436641693,
"learning_rate": 1.1949733140252468e-07,
"loss": 0.619,
"step": 556
},
{
"epoch": 2.752157829839704,
"grad_norm": 0.13411924242973328,
"learning_rate": 1.1480179798256857e-07,
"loss": 0.6254,
"step": 557
},
{
"epoch": 2.7570900123304565,
"grad_norm": 0.09810943156480789,
"learning_rate": 1.1019820822307986e-07,
"loss": 0.6301,
"step": 558
},
{
"epoch": 2.7620221948212085,
"grad_norm": 0.14971470832824707,
"learning_rate": 1.056867395813832e-07,
"loss": 0.6325,
"step": 559
},
{
"epoch": 2.7669543773119605,
"grad_norm": 0.11750344932079315,
"learning_rate": 1.0126756596375687e-07,
"loss": 0.6256,
"step": 560
},
{
"epoch": 2.771886559802713,
"grad_norm": 0.125830739736557,
"learning_rate": 9.694085771872697e-08,
"loss": 0.6014,
"step": 561
},
{
"epoch": 2.776818742293465,
"grad_norm": 0.16469189524650574,
"learning_rate": 9.270678163050218e-08,
"loss": 0.6491,
"step": 562
},
{
"epoch": 2.781750924784217,
"grad_norm": 0.12204600125551224,
"learning_rate": 8.856550091254302e-08,
"loss": 0.6472,
"step": 563
},
{
"epoch": 2.7866831072749694,
"grad_norm": 0.11357175558805466,
"learning_rate": 8.451717520127272e-08,
"loss": 0.6447,
"step": 564
},
{
"epoch": 2.7916152897657214,
"grad_norm": 0.2082013189792633,
"learning_rate": 8.056196054992193e-08,
"loss": 0.6686,
"step": 565
},
{
"epoch": 2.7965474722564734,
"grad_norm": 0.09912417829036713,
"learning_rate": 7.670000942251288e-08,
"loss": 0.6365,
"step": 566
},
{
"epoch": 2.801479654747226,
"grad_norm": 0.3408164381980896,
"learning_rate": 7.293147068798384e-08,
"loss": 0.7199,
"step": 567
},
{
"epoch": 2.806411837237978,
"grad_norm": 0.18314948678016663,
"learning_rate": 6.92564896144493e-08,
"loss": 0.6531,
"step": 568
},
{
"epoch": 2.81134401972873,
"grad_norm": 0.0905625969171524,
"learning_rate": 6.56752078636011e-08,
"loss": 0.6495,
"step": 569
},
{
"epoch": 2.8162762022194823,
"grad_norm": 0.10962024331092834,
"learning_rate": 6.218776348524663e-08,
"loss": 0.629,
"step": 570
},
{
"epoch": 2.8212083847102343,
"grad_norm": 0.1477546989917755,
"learning_rate": 5.879429091198846e-08,
"loss": 0.6415,
"step": 571
},
{
"epoch": 2.8261405672009863,
"grad_norm": 0.2878320515155792,
"learning_rate": 5.549492095404202e-08,
"loss": 0.6354,
"step": 572
},
{
"epoch": 2.8310727496917387,
"grad_norm": 0.09865111857652664,
"learning_rate": 5.2289780794192726e-08,
"loss": 0.6333,
"step": 573
},
{
"epoch": 2.8360049321824907,
"grad_norm": 0.11314819753170013,
"learning_rate": 4.917899398289378e-08,
"loss": 0.6353,
"step": 574
},
{
"epoch": 2.8409371146732427,
"grad_norm": 0.13539192080497742,
"learning_rate": 4.6162680433503024e-08,
"loss": 0.6449,
"step": 575
},
{
"epoch": 2.845869297163995,
"grad_norm": 0.11400008946657181,
"learning_rate": 4.3240956417661685e-08,
"loss": 0.5971,
"step": 576
},
{
"epoch": 2.850801479654747,
"grad_norm": 0.1335747390985489,
"learning_rate": 4.0413934560811216e-08,
"loss": 0.6234,
"step": 577
},
{
"epoch": 2.855733662145499,
"grad_norm": 1.7575569152832031,
"learning_rate": 3.768172383785268e-08,
"loss": 0.6393,
"step": 578
},
{
"epoch": 2.8606658446362516,
"grad_norm": 0.20131815969944,
"learning_rate": 3.504442956894533e-08,
"loss": 0.6389,
"step": 579
},
{
"epoch": 2.8655980271270036,
"grad_norm": 0.10285453498363495,
"learning_rate": 3.250215341544766e-08,
"loss": 0.6129,
"step": 580
},
{
"epoch": 2.8705302096177556,
"grad_norm": 0.11187023669481277,
"learning_rate": 3.005499337599777e-08,
"loss": 0.622,
"step": 581
},
{
"epoch": 2.875462392108508,
"grad_norm": 0.10102323442697525,
"learning_rate": 2.7703043782735527e-08,
"loss": 0.6667,
"step": 582
},
{
"epoch": 2.88039457459926,
"grad_norm": 0.16657452285289764,
"learning_rate": 2.544639529766829e-08,
"loss": 0.6302,
"step": 583
},
{
"epoch": 2.885326757090012,
"grad_norm": 0.09614276140928268,
"learning_rate": 2.3285134909173113e-08,
"loss": 0.614,
"step": 584
},
{
"epoch": 2.8902589395807645,
"grad_norm": 0.17049086093902588,
"learning_rate": 2.1219345928646107e-08,
"loss": 0.6179,
"step": 585
},
{
"epoch": 2.8951911220715165,
"grad_norm": 0.11277743428945541,
"learning_rate": 1.924910798728946e-08,
"loss": 0.6397,
"step": 586
},
{
"epoch": 2.900123304562269,
"grad_norm": 0.1780087947845459,
"learning_rate": 1.7374497033042504e-08,
"loss": 0.6358,
"step": 587
},
{
"epoch": 2.905055487053021,
"grad_norm": 0.33896899223327637,
"learning_rate": 1.559558532765404e-08,
"loss": 0.6364,
"step": 588
},
{
"epoch": 2.909987669543773,
"grad_norm": 0.11533108353614807,
"learning_rate": 1.3912441443896529e-08,
"loss": 0.6219,
"step": 589
},
{
"epoch": 2.9149198520345254,
"grad_norm": 0.2513286769390106,
"learning_rate": 1.2325130262923202e-08,
"loss": 0.6544,
"step": 590
},
{
"epoch": 2.9198520345252774,
"grad_norm": 0.10646089911460876,
"learning_rate": 1.0833712971766442e-08,
"loss": 0.6314,
"step": 591
},
{
"epoch": 2.92478421701603,
"grad_norm": 0.18473948538303375,
"learning_rate": 9.438247060979954e-09,
"loss": 0.6272,
"step": 592
},
{
"epoch": 2.929716399506782,
"grad_norm": 0.17810285091400146,
"learning_rate": 8.13878632242221e-09,
"loss": 0.6551,
"step": 593
},
{
"epoch": 2.934648581997534,
"grad_norm": 0.10676706582307816,
"learning_rate": 6.935380847182815e-09,
"loss": 0.6505,
"step": 594
},
{
"epoch": 2.9395807644882863,
"grad_norm": 0.1609431952238083,
"learning_rate": 5.828077023651846e-09,
"loss": 0.6403,
"step": 595
},
{
"epoch": 2.9445129469790383,
"grad_norm": 0.3682437539100647,
"learning_rate": 4.816917535731547e-09,
"loss": 0.6535,
"step": 596
},
{
"epoch": 2.9494451294697903,
"grad_norm": 0.08937019854784012,
"learning_rate": 3.9019413611907084e-09,
"loss": 0.6422,
"step": 597
},
{
"epoch": 2.9543773119605428,
"grad_norm": 0.09343031048774719,
"learning_rate": 3.083183770162812e-09,
"loss": 0.6454,
"step": 598
},
{
"epoch": 2.9593094944512948,
"grad_norm": 0.11115273088216782,
"learning_rate": 2.360676323786282e-09,
"loss": 0.6167,
"step": 599
},
{
"epoch": 2.9642416769420468,
"grad_norm": 0.09774809330701828,
"learning_rate": 1.7344468729868502e-09,
"loss": 0.6422,
"step": 600
},
{
"epoch": 2.969173859432799,
"grad_norm": 0.10831815004348755,
"learning_rate": 1.2045195574056367e-09,
"loss": 0.6387,
"step": 601
},
{
"epoch": 2.974106041923551,
"grad_norm": 0.22296251356601715,
"learning_rate": 7.709148044679482e-10,
"loss": 0.6415,
"step": 602
},
{
"epoch": 2.979038224414303,
"grad_norm": 0.10190446674823761,
"learning_rate": 4.3364932859474293e-10,
"loss": 0.6286,
"step": 603
},
{
"epoch": 2.9839704069050557,
"grad_norm": 0.15854156017303467,
"learning_rate": 1.9273613056008945e-10,
"loss": 0.6051,
"step": 604
},
{
"epoch": 2.9889025893958077,
"grad_norm": 0.09298679232597351,
"learning_rate": 4.8184496989067684e-11,
"loss": 0.6213,
"step": 605
},
{
"epoch": 2.9938347718865597,
"grad_norm": 0.2940804958343506,
"learning_rate": 0.0,
"loss": 0.6347,
"step": 606
}
],
"logging_steps": 1,
"max_steps": 606,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 101,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 1.765503957851418e+20,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}