satyanshu404's picture
Model save
e319cfe verified
raw
history blame
75 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.0,
"eval_steps": 500,
"global_step": 9013,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0022190169754798626,
"grad_norm": 7.625,
"learning_rate": 3.3277870216306157e-07,
"loss": 1.2617,
"step": 20
},
{
"epoch": 0.004438033950959725,
"grad_norm": 5.5625,
"learning_rate": 6.655574043261231e-07,
"loss": 1.2121,
"step": 40
},
{
"epoch": 0.006657050926439588,
"grad_norm": 4.78125,
"learning_rate": 9.983361064891848e-07,
"loss": 1.2737,
"step": 60
},
{
"epoch": 0.00887606790191945,
"grad_norm": 5.46875,
"learning_rate": 1.3311148086522463e-06,
"loss": 1.1634,
"step": 80
},
{
"epoch": 0.011095084877399313,
"grad_norm": 6.46875,
"learning_rate": 1.6638935108153078e-06,
"loss": 1.1582,
"step": 100
},
{
"epoch": 0.013314101852879175,
"grad_norm": 6.09375,
"learning_rate": 1.9966722129783695e-06,
"loss": 1.1071,
"step": 120
},
{
"epoch": 0.015533118828359036,
"grad_norm": 5.5625,
"learning_rate": 2.329450915141431e-06,
"loss": 1.0944,
"step": 140
},
{
"epoch": 0.0177521358038389,
"grad_norm": 3.3125,
"learning_rate": 2.6622296173044925e-06,
"loss": 0.9622,
"step": 160
},
{
"epoch": 0.01997115277931876,
"grad_norm": 5.65625,
"learning_rate": 2.995008319467554e-06,
"loss": 0.7219,
"step": 180
},
{
"epoch": 0.022190169754798626,
"grad_norm": 3.234375,
"learning_rate": 3.3277870216306156e-06,
"loss": 0.6194,
"step": 200
},
{
"epoch": 0.024409186730278486,
"grad_norm": 3.359375,
"learning_rate": 3.6605657237936775e-06,
"loss": 0.5223,
"step": 220
},
{
"epoch": 0.02662820370575835,
"grad_norm": 2.234375,
"learning_rate": 3.993344425956739e-06,
"loss": 0.5673,
"step": 240
},
{
"epoch": 0.02884722068123821,
"grad_norm": 2.109375,
"learning_rate": 4.326123128119801e-06,
"loss": 0.5215,
"step": 260
},
{
"epoch": 0.031066237656718072,
"grad_norm": 2.765625,
"learning_rate": 4.658901830282862e-06,
"loss": 0.5166,
"step": 280
},
{
"epoch": 0.03328525463219793,
"grad_norm": 2.0625,
"learning_rate": 4.991680532445923e-06,
"loss": 0.5023,
"step": 300
},
{
"epoch": 0.0355042716076778,
"grad_norm": 2.0625,
"learning_rate": 5.324459234608985e-06,
"loss": 0.4611,
"step": 320
},
{
"epoch": 0.03772328858315766,
"grad_norm": 2.09375,
"learning_rate": 5.657237936772047e-06,
"loss": 0.5203,
"step": 340
},
{
"epoch": 0.03994230555863752,
"grad_norm": 2.34375,
"learning_rate": 5.990016638935108e-06,
"loss": 0.5498,
"step": 360
},
{
"epoch": 0.04216132253411738,
"grad_norm": 2.03125,
"learning_rate": 6.32279534109817e-06,
"loss": 0.5597,
"step": 380
},
{
"epoch": 0.04438033950959725,
"grad_norm": 2.453125,
"learning_rate": 6.655574043261231e-06,
"loss": 0.5133,
"step": 400
},
{
"epoch": 0.04659935648507711,
"grad_norm": 2.15625,
"learning_rate": 6.988352745424292e-06,
"loss": 0.4843,
"step": 420
},
{
"epoch": 0.04881837346055697,
"grad_norm": 2.265625,
"learning_rate": 7.321131447587355e-06,
"loss": 0.5358,
"step": 440
},
{
"epoch": 0.051037390436036834,
"grad_norm": 2.6875,
"learning_rate": 7.653910149750416e-06,
"loss": 0.3936,
"step": 460
},
{
"epoch": 0.0532564074115167,
"grad_norm": 2.125,
"learning_rate": 7.986688851913478e-06,
"loss": 0.5245,
"step": 480
},
{
"epoch": 0.05547542438699656,
"grad_norm": 2.796875,
"learning_rate": 8.319467554076538e-06,
"loss": 0.5094,
"step": 500
},
{
"epoch": 0.05769444136247642,
"grad_norm": 1.84375,
"learning_rate": 8.652246256239602e-06,
"loss": 0.4476,
"step": 520
},
{
"epoch": 0.059913458337956284,
"grad_norm": 2.09375,
"learning_rate": 8.985024958402662e-06,
"loss": 0.4836,
"step": 540
},
{
"epoch": 0.062132475313436145,
"grad_norm": 1.6328125,
"learning_rate": 9.317803660565724e-06,
"loss": 0.4805,
"step": 560
},
{
"epoch": 0.06435149228891601,
"grad_norm": 1.8828125,
"learning_rate": 9.650582362728786e-06,
"loss": 0.425,
"step": 580
},
{
"epoch": 0.06657050926439587,
"grad_norm": 2.6875,
"learning_rate": 9.983361064891846e-06,
"loss": 0.5223,
"step": 600
},
{
"epoch": 0.06878952623987573,
"grad_norm": 2.140625,
"learning_rate": 1.031613976705491e-05,
"loss": 0.5,
"step": 620
},
{
"epoch": 0.0710085432153556,
"grad_norm": 2.203125,
"learning_rate": 1.064891846921797e-05,
"loss": 0.5271,
"step": 640
},
{
"epoch": 0.07322756019083546,
"grad_norm": 1.9765625,
"learning_rate": 1.0981697171381032e-05,
"loss": 0.4981,
"step": 660
},
{
"epoch": 0.07544657716631532,
"grad_norm": 1.2421875,
"learning_rate": 1.1314475873544094e-05,
"loss": 0.5133,
"step": 680
},
{
"epoch": 0.07766559414179519,
"grad_norm": 2.515625,
"learning_rate": 1.1647254575707154e-05,
"loss": 0.4693,
"step": 700
},
{
"epoch": 0.07988461111727505,
"grad_norm": 2.03125,
"learning_rate": 1.1980033277870216e-05,
"loss": 0.4844,
"step": 720
},
{
"epoch": 0.08210362809275491,
"grad_norm": 1.578125,
"learning_rate": 1.2312811980033278e-05,
"loss": 0.4943,
"step": 740
},
{
"epoch": 0.08432264506823477,
"grad_norm": 2.75,
"learning_rate": 1.264559068219634e-05,
"loss": 0.5,
"step": 760
},
{
"epoch": 0.08654166204371463,
"grad_norm": 1.46875,
"learning_rate": 1.2978369384359402e-05,
"loss": 0.4318,
"step": 780
},
{
"epoch": 0.0887606790191945,
"grad_norm": 2.484375,
"learning_rate": 1.3311148086522462e-05,
"loss": 0.4745,
"step": 800
},
{
"epoch": 0.09097969599467436,
"grad_norm": 2.28125,
"learning_rate": 1.3643926788685524e-05,
"loss": 0.5459,
"step": 820
},
{
"epoch": 0.09319871297015422,
"grad_norm": 2.078125,
"learning_rate": 1.3976705490848584e-05,
"loss": 0.5021,
"step": 840
},
{
"epoch": 0.09541772994563408,
"grad_norm": 0.97265625,
"learning_rate": 1.4309484193011648e-05,
"loss": 0.4271,
"step": 860
},
{
"epoch": 0.09763674692111395,
"grad_norm": 1.828125,
"learning_rate": 1.464226289517471e-05,
"loss": 0.4924,
"step": 880
},
{
"epoch": 0.09985576389659381,
"grad_norm": 2.25,
"learning_rate": 1.497504159733777e-05,
"loss": 0.514,
"step": 900
},
{
"epoch": 0.10207478087207367,
"grad_norm": 1.578125,
"learning_rate": 1.5307820299500832e-05,
"loss": 0.4569,
"step": 920
},
{
"epoch": 0.10429379784755353,
"grad_norm": 1.4140625,
"learning_rate": 1.5640599001663892e-05,
"loss": 0.4751,
"step": 940
},
{
"epoch": 0.1065128148230334,
"grad_norm": 2.1875,
"learning_rate": 1.5973377703826956e-05,
"loss": 0.4719,
"step": 960
},
{
"epoch": 0.10873183179851326,
"grad_norm": 1.921875,
"learning_rate": 1.6306156405990016e-05,
"loss": 0.4455,
"step": 980
},
{
"epoch": 0.11095084877399312,
"grad_norm": 1.7421875,
"learning_rate": 1.6638935108153077e-05,
"loss": 0.5068,
"step": 1000
},
{
"epoch": 0.11316986574947298,
"grad_norm": 1.96875,
"learning_rate": 1.697171381031614e-05,
"loss": 0.5076,
"step": 1020
},
{
"epoch": 0.11538888272495285,
"grad_norm": 2.0625,
"learning_rate": 1.7304492512479204e-05,
"loss": 0.449,
"step": 1040
},
{
"epoch": 0.11760789970043271,
"grad_norm": 2.859375,
"learning_rate": 1.7637271214642264e-05,
"loss": 0.4298,
"step": 1060
},
{
"epoch": 0.11982691667591257,
"grad_norm": 2.0625,
"learning_rate": 1.7970049916805324e-05,
"loss": 0.5298,
"step": 1080
},
{
"epoch": 0.12204593365139244,
"grad_norm": 1.90625,
"learning_rate": 1.8302828618968388e-05,
"loss": 0.5398,
"step": 1100
},
{
"epoch": 0.12426495062687229,
"grad_norm": 1.40625,
"learning_rate": 1.8635607321131448e-05,
"loss": 0.5533,
"step": 1120
},
{
"epoch": 0.12648396760235217,
"grad_norm": 2.21875,
"learning_rate": 1.896838602329451e-05,
"loss": 0.422,
"step": 1140
},
{
"epoch": 0.12870298457783202,
"grad_norm": 1.96875,
"learning_rate": 1.9301164725457572e-05,
"loss": 0.4807,
"step": 1160
},
{
"epoch": 0.13092200155331188,
"grad_norm": 1.484375,
"learning_rate": 1.9633943427620632e-05,
"loss": 0.5209,
"step": 1180
},
{
"epoch": 0.13314101852879173,
"grad_norm": 1.9765625,
"learning_rate": 1.9966722129783693e-05,
"loss": 0.4522,
"step": 1200
},
{
"epoch": 0.13536003550427161,
"grad_norm": 1.3828125,
"learning_rate": 2.0299500831946756e-05,
"loss": 0.4684,
"step": 1220
},
{
"epoch": 0.13757905247975147,
"grad_norm": 1.59375,
"learning_rate": 2.063227953410982e-05,
"loss": 0.3982,
"step": 1240
},
{
"epoch": 0.13979806945523132,
"grad_norm": 1.7734375,
"learning_rate": 2.096505823627288e-05,
"loss": 0.5518,
"step": 1260
},
{
"epoch": 0.1420170864307112,
"grad_norm": 1.6875,
"learning_rate": 2.129783693843594e-05,
"loss": 0.5107,
"step": 1280
},
{
"epoch": 0.14423610340619106,
"grad_norm": 1.453125,
"learning_rate": 2.1630615640599004e-05,
"loss": 0.4422,
"step": 1300
},
{
"epoch": 0.1464551203816709,
"grad_norm": 2.015625,
"learning_rate": 2.1963394342762064e-05,
"loss": 0.5578,
"step": 1320
},
{
"epoch": 0.1486741373571508,
"grad_norm": 1.4921875,
"learning_rate": 2.2296173044925124e-05,
"loss": 0.4516,
"step": 1340
},
{
"epoch": 0.15089315433263065,
"grad_norm": 2.15625,
"learning_rate": 2.2628951747088188e-05,
"loss": 0.3979,
"step": 1360
},
{
"epoch": 0.1531121713081105,
"grad_norm": 1.4140625,
"learning_rate": 2.296173044925125e-05,
"loss": 0.4895,
"step": 1380
},
{
"epoch": 0.15533118828359038,
"grad_norm": 1.453125,
"learning_rate": 2.329450915141431e-05,
"loss": 0.4505,
"step": 1400
},
{
"epoch": 0.15755020525907024,
"grad_norm": 2.265625,
"learning_rate": 2.3627287853577372e-05,
"loss": 0.5008,
"step": 1420
},
{
"epoch": 0.1597692222345501,
"grad_norm": 1.5390625,
"learning_rate": 2.3960066555740432e-05,
"loss": 0.523,
"step": 1440
},
{
"epoch": 0.16198823921002994,
"grad_norm": 1.5546875,
"learning_rate": 2.4292845257903493e-05,
"loss": 0.4813,
"step": 1460
},
{
"epoch": 0.16420725618550983,
"grad_norm": 1.46875,
"learning_rate": 2.4625623960066556e-05,
"loss": 0.4995,
"step": 1480
},
{
"epoch": 0.16642627316098968,
"grad_norm": 1.8359375,
"learning_rate": 2.495840266222962e-05,
"loss": 0.474,
"step": 1500
},
{
"epoch": 0.16864529013646953,
"grad_norm": 1.6328125,
"learning_rate": 2.529118136439268e-05,
"loss": 0.4201,
"step": 1520
},
{
"epoch": 0.17086430711194942,
"grad_norm": 2.15625,
"learning_rate": 2.562396006655574e-05,
"loss": 0.5332,
"step": 1540
},
{
"epoch": 0.17308332408742927,
"grad_norm": 2.53125,
"learning_rate": 2.5956738768718804e-05,
"loss": 0.5105,
"step": 1560
},
{
"epoch": 0.17530234106290912,
"grad_norm": 1.734375,
"learning_rate": 2.6289517470881864e-05,
"loss": 0.4679,
"step": 1580
},
{
"epoch": 0.177521358038389,
"grad_norm": 1.4453125,
"learning_rate": 2.6622296173044925e-05,
"loss": 0.4276,
"step": 1600
},
{
"epoch": 0.17974037501386886,
"grad_norm": 1.9609375,
"learning_rate": 2.6955074875207988e-05,
"loss": 0.4462,
"step": 1620
},
{
"epoch": 0.1819593919893487,
"grad_norm": 1.734375,
"learning_rate": 2.728785357737105e-05,
"loss": 0.4534,
"step": 1640
},
{
"epoch": 0.1841784089648286,
"grad_norm": 1.5703125,
"learning_rate": 2.762063227953411e-05,
"loss": 0.4699,
"step": 1660
},
{
"epoch": 0.18639742594030845,
"grad_norm": 1.828125,
"learning_rate": 2.795341098169717e-05,
"loss": 0.483,
"step": 1680
},
{
"epoch": 0.1886164429157883,
"grad_norm": 2.765625,
"learning_rate": 2.8286189683860236e-05,
"loss": 0.4776,
"step": 1700
},
{
"epoch": 0.19083545989126816,
"grad_norm": 2.0,
"learning_rate": 2.8618968386023296e-05,
"loss": 0.4361,
"step": 1720
},
{
"epoch": 0.19305447686674804,
"grad_norm": 1.640625,
"learning_rate": 2.8951747088186356e-05,
"loss": 0.4566,
"step": 1740
},
{
"epoch": 0.1952734938422279,
"grad_norm": 1.640625,
"learning_rate": 2.928452579034942e-05,
"loss": 0.4609,
"step": 1760
},
{
"epoch": 0.19749251081770774,
"grad_norm": 1.9140625,
"learning_rate": 2.961730449251248e-05,
"loss": 0.5019,
"step": 1780
},
{
"epoch": 0.19971152779318763,
"grad_norm": 2.125,
"learning_rate": 2.995008319467554e-05,
"loss": 0.4844,
"step": 1800
},
{
"epoch": 0.20193054476866748,
"grad_norm": 2.015625,
"learning_rate": 2.999958848436878e-05,
"loss": 0.4974,
"step": 1820
},
{
"epoch": 0.20414956174414733,
"grad_norm": 1.828125,
"learning_rate": 2.9998050673796383e-05,
"loss": 0.4591,
"step": 1840
},
{
"epoch": 0.20636857871962722,
"grad_norm": 1.8671875,
"learning_rate": 2.999537386964595e-05,
"loss": 0.4319,
"step": 1860
},
{
"epoch": 0.20858759569510707,
"grad_norm": 1.953125,
"learning_rate": 2.9991558275201416e-05,
"loss": 0.4425,
"step": 1880
},
{
"epoch": 0.21080661267058692,
"grad_norm": 1.703125,
"learning_rate": 2.998660418022961e-05,
"loss": 0.4729,
"step": 1900
},
{
"epoch": 0.2130256296460668,
"grad_norm": 1.6484375,
"learning_rate": 2.9980511960958247e-05,
"loss": 0.4049,
"step": 1920
},
{
"epoch": 0.21524464662154666,
"grad_norm": 1.671875,
"learning_rate": 2.9973282080047365e-05,
"loss": 0.4646,
"step": 1940
},
{
"epoch": 0.2174636635970265,
"grad_norm": 1.6875,
"learning_rate": 2.996491508655417e-05,
"loss": 0.4725,
"step": 1960
},
{
"epoch": 0.21968268057250637,
"grad_norm": 1.953125,
"learning_rate": 2.995541161589137e-05,
"loss": 0.4448,
"step": 1980
},
{
"epoch": 0.22190169754798625,
"grad_norm": 1.3046875,
"learning_rate": 2.994477238977889e-05,
"loss": 0.4673,
"step": 2000
},
{
"epoch": 0.2241207145234661,
"grad_norm": 1.71875,
"learning_rate": 2.9932998216189077e-05,
"loss": 0.423,
"step": 2020
},
{
"epoch": 0.22633973149894596,
"grad_norm": 1.703125,
"learning_rate": 2.992008998928534e-05,
"loss": 0.4803,
"step": 2040
},
{
"epoch": 0.22855874847442584,
"grad_norm": 1.4296875,
"learning_rate": 2.990604868935424e-05,
"loss": 0.4604,
"step": 2060
},
{
"epoch": 0.2307777654499057,
"grad_norm": 1.890625,
"learning_rate": 2.989087538273105e-05,
"loss": 0.5155,
"step": 2080
},
{
"epoch": 0.23299678242538555,
"grad_norm": 1.7421875,
"learning_rate": 2.9874571221718776e-05,
"loss": 0.5122,
"step": 2100
},
{
"epoch": 0.23521579940086543,
"grad_norm": 1.765625,
"learning_rate": 2.985713744450063e-05,
"loss": 0.4475,
"step": 2120
},
{
"epoch": 0.23743481637634528,
"grad_norm": 2.15625,
"learning_rate": 2.9838575375046034e-05,
"loss": 0.4281,
"step": 2140
},
{
"epoch": 0.23965383335182514,
"grad_norm": 1.9296875,
"learning_rate": 2.9818886423010024e-05,
"loss": 0.4383,
"step": 2160
},
{
"epoch": 0.24187285032730502,
"grad_norm": 1.7421875,
"learning_rate": 2.979807208362625e-05,
"loss": 0.449,
"step": 2180
},
{
"epoch": 0.24409186730278487,
"grad_norm": 1.4453125,
"learning_rate": 2.9776133937593395e-05,
"loss": 0.5002,
"step": 2200
},
{
"epoch": 0.24631088427826472,
"grad_norm": 1.6484375,
"learning_rate": 2.9753073650955128e-05,
"loss": 0.4667,
"step": 2220
},
{
"epoch": 0.24852990125374458,
"grad_norm": 2.203125,
"learning_rate": 2.9728892974973592e-05,
"loss": 0.4431,
"step": 2240
},
{
"epoch": 0.25074891822922446,
"grad_norm": 1.78125,
"learning_rate": 2.970359374599641e-05,
"loss": 0.4684,
"step": 2260
},
{
"epoch": 0.25296793520470434,
"grad_norm": 2.15625,
"learning_rate": 2.967717788531722e-05,
"loss": 0.4599,
"step": 2280
},
{
"epoch": 0.25518695218018417,
"grad_norm": 1.4453125,
"learning_rate": 2.9649647399029764e-05,
"loss": 0.5047,
"step": 2300
},
{
"epoch": 0.25740596915566405,
"grad_norm": 2.265625,
"learning_rate": 2.9621004377875558e-05,
"loss": 0.4824,
"step": 2320
},
{
"epoch": 0.2596249861311439,
"grad_norm": 1.3515625,
"learning_rate": 2.959125099708509e-05,
"loss": 0.4928,
"step": 2340
},
{
"epoch": 0.26184400310662376,
"grad_norm": 1.5703125,
"learning_rate": 2.9560389516212638e-05,
"loss": 0.5386,
"step": 2360
},
{
"epoch": 0.26406302008210364,
"grad_norm": 0.96875,
"learning_rate": 2.9528422278964687e-05,
"loss": 0.4092,
"step": 2380
},
{
"epoch": 0.26628203705758346,
"grad_norm": 1.5703125,
"learning_rate": 2.949535171302192e-05,
"loss": 0.4929,
"step": 2400
},
{
"epoch": 0.26850105403306335,
"grad_norm": 1.5859375,
"learning_rate": 2.9461180329854867e-05,
"loss": 0.4893,
"step": 2420
},
{
"epoch": 0.27072007100854323,
"grad_norm": 1.890625,
"learning_rate": 2.9425910724533165e-05,
"loss": 0.4554,
"step": 2440
},
{
"epoch": 0.27293908798402305,
"grad_norm": 1.65625,
"learning_rate": 2.9389545575528496e-05,
"loss": 0.4702,
"step": 2460
},
{
"epoch": 0.27515810495950294,
"grad_norm": 1.375,
"learning_rate": 2.9352087644511162e-05,
"loss": 0.5056,
"step": 2480
},
{
"epoch": 0.2773771219349828,
"grad_norm": 2.859375,
"learning_rate": 2.9313539776140362e-05,
"loss": 0.428,
"step": 2500
},
{
"epoch": 0.27959613891046264,
"grad_norm": 1.7890625,
"learning_rate": 2.9273904897848174e-05,
"loss": 0.4827,
"step": 2520
},
{
"epoch": 0.2818151558859425,
"grad_norm": 1.359375,
"learning_rate": 2.9233186019617214e-05,
"loss": 0.5082,
"step": 2540
},
{
"epoch": 0.2840341728614224,
"grad_norm": 1.421875,
"learning_rate": 2.9191386233752062e-05,
"loss": 0.4803,
"step": 2560
},
{
"epoch": 0.28625318983690223,
"grad_norm": 1.859375,
"learning_rate": 2.9148508714644424e-05,
"loss": 0.5205,
"step": 2580
},
{
"epoch": 0.2884722068123821,
"grad_norm": 1.546875,
"learning_rate": 2.9104556718532054e-05,
"loss": 0.4965,
"step": 2600
},
{
"epoch": 0.290691223787862,
"grad_norm": 1.5078125,
"learning_rate": 2.9059533583251487e-05,
"loss": 0.4376,
"step": 2620
},
{
"epoch": 0.2929102407633418,
"grad_norm": 1.7578125,
"learning_rate": 2.9013442727984517e-05,
"loss": 0.4618,
"step": 2640
},
{
"epoch": 0.2951292577388217,
"grad_norm": 1.640625,
"learning_rate": 2.8966287652998562e-05,
"loss": 0.4943,
"step": 2660
},
{
"epoch": 0.2973482747143016,
"grad_norm": 1.703125,
"learning_rate": 2.891807193938085e-05,
"loss": 0.4582,
"step": 2680
},
{
"epoch": 0.2995672916897814,
"grad_norm": 1.6875,
"learning_rate": 2.8868799248766436e-05,
"loss": 0.5133,
"step": 2700
},
{
"epoch": 0.3017863086652613,
"grad_norm": 1.71875,
"learning_rate": 2.8818473323060143e-05,
"loss": 0.4628,
"step": 2720
},
{
"epoch": 0.3040053256407412,
"grad_norm": 1.609375,
"learning_rate": 2.87670979841524e-05,
"loss": 0.4408,
"step": 2740
},
{
"epoch": 0.306224342616221,
"grad_norm": 1.734375,
"learning_rate": 2.8714677133628963e-05,
"loss": 0.5004,
"step": 2760
},
{
"epoch": 0.3084433595917009,
"grad_norm": 1.6640625,
"learning_rate": 2.866121475247467e-05,
"loss": 0.4031,
"step": 2780
},
{
"epoch": 0.31066237656718076,
"grad_norm": 1.8046875,
"learning_rate": 2.8606714900771055e-05,
"loss": 0.5192,
"step": 2800
},
{
"epoch": 0.3128813935426606,
"grad_norm": 2.03125,
"learning_rate": 2.8551181717388066e-05,
"loss": 0.5397,
"step": 2820
},
{
"epoch": 0.3151004105181405,
"grad_norm": 1.9609375,
"learning_rate": 2.849461941966972e-05,
"loss": 0.4446,
"step": 2840
},
{
"epoch": 0.3173194274936203,
"grad_norm": 1.9296875,
"learning_rate": 2.8437032303113823e-05,
"loss": 0.4464,
"step": 2860
},
{
"epoch": 0.3195384444691002,
"grad_norm": 1.734375,
"learning_rate": 2.8378424741045773e-05,
"loss": 0.526,
"step": 2880
},
{
"epoch": 0.32175746144458006,
"grad_norm": 1.7265625,
"learning_rate": 2.831880118428644e-05,
"loss": 0.4087,
"step": 2900
},
{
"epoch": 0.3239764784200599,
"grad_norm": 1.125,
"learning_rate": 2.8258166160814135e-05,
"loss": 0.4833,
"step": 2920
},
{
"epoch": 0.32619549539553977,
"grad_norm": 1.34375,
"learning_rate": 2.8196524275420758e-05,
"loss": 0.4403,
"step": 2940
},
{
"epoch": 0.32841451237101965,
"grad_norm": 1.7421875,
"learning_rate": 2.813388020936211e-05,
"loss": 0.4728,
"step": 2960
},
{
"epoch": 0.3306335293464995,
"grad_norm": 1.0390625,
"learning_rate": 2.8070238720002364e-05,
"loss": 0.4389,
"step": 2980
},
{
"epoch": 0.33285254632197936,
"grad_norm": 0.90625,
"learning_rate": 2.800560464045278e-05,
"loss": 0.482,
"step": 3000
},
{
"epoch": 0.33507156329745924,
"grad_norm": 1.6796875,
"learning_rate": 2.7939982879204686e-05,
"loss": 0.4157,
"step": 3020
},
{
"epoch": 0.33729058027293907,
"grad_norm": 1.9140625,
"learning_rate": 2.787337841975668e-05,
"loss": 0.4593,
"step": 3040
},
{
"epoch": 0.33950959724841895,
"grad_norm": 1.2421875,
"learning_rate": 2.78057963202362e-05,
"loss": 0.4071,
"step": 3060
},
{
"epoch": 0.34172861422389883,
"grad_norm": 1.671875,
"learning_rate": 2.773724171301538e-05,
"loss": 0.4307,
"step": 3080
},
{
"epoch": 0.34394763119937866,
"grad_norm": 2.75,
"learning_rate": 2.7667719804321285e-05,
"loss": 0.5196,
"step": 3100
},
{
"epoch": 0.34616664817485854,
"grad_norm": 1.71875,
"learning_rate": 2.7597235873840544e-05,
"loss": 0.4529,
"step": 3120
},
{
"epoch": 0.3483856651503384,
"grad_norm": 1.453125,
"learning_rate": 2.7525795274318386e-05,
"loss": 0.5671,
"step": 3140
},
{
"epoch": 0.35060468212581825,
"grad_norm": 1.6484375,
"learning_rate": 2.745340343115213e-05,
"loss": 0.506,
"step": 3160
},
{
"epoch": 0.3528236991012981,
"grad_norm": 1.828125,
"learning_rate": 2.7380065841979196e-05,
"loss": 0.4834,
"step": 3180
},
{
"epoch": 0.355042716076778,
"grad_norm": 1.9296875,
"learning_rate": 2.7305788076259565e-05,
"loss": 0.4635,
"step": 3200
},
{
"epoch": 0.35726173305225783,
"grad_norm": 1.7109375,
"learning_rate": 2.7230575774852843e-05,
"loss": 0.4873,
"step": 3220
},
{
"epoch": 0.3594807500277377,
"grad_norm": 1.578125,
"learning_rate": 2.715443464958986e-05,
"loss": 0.4133,
"step": 3240
},
{
"epoch": 0.3616997670032176,
"grad_norm": 2.03125,
"learning_rate": 2.707737048283891e-05,
"loss": 0.4954,
"step": 3260
},
{
"epoch": 0.3639187839786974,
"grad_norm": 1.3671875,
"learning_rate": 2.699938912706663e-05,
"loss": 0.4585,
"step": 3280
},
{
"epoch": 0.3661378009541773,
"grad_norm": 1.921875,
"learning_rate": 2.6920496504393507e-05,
"loss": 0.5479,
"step": 3300
},
{
"epoch": 0.3683568179296572,
"grad_norm": 2.515625,
"learning_rate": 2.6840698606144197e-05,
"loss": 0.4712,
"step": 3320
},
{
"epoch": 0.370575834905137,
"grad_norm": 2.28125,
"learning_rate": 2.6760001492392474e-05,
"loss": 0.5046,
"step": 3340
},
{
"epoch": 0.3727948518806169,
"grad_norm": 1.53125,
"learning_rate": 2.6678411291501038e-05,
"loss": 0.522,
"step": 3360
},
{
"epoch": 0.3750138688560967,
"grad_norm": 1.6796875,
"learning_rate": 2.6595934199656108e-05,
"loss": 0.4852,
"step": 3380
},
{
"epoch": 0.3772328858315766,
"grad_norm": 1.9140625,
"learning_rate": 2.6512576480396862e-05,
"loss": 0.4745,
"step": 3400
},
{
"epoch": 0.3794519028070565,
"grad_norm": 1.71875,
"learning_rate": 2.6428344464139756e-05,
"loss": 0.4903,
"step": 3420
},
{
"epoch": 0.3816709197825363,
"grad_norm": 1.625,
"learning_rate": 2.6343244547697798e-05,
"loss": 0.3906,
"step": 3440
},
{
"epoch": 0.3838899367580162,
"grad_norm": 1.859375,
"learning_rate": 2.6257283193794742e-05,
"loss": 0.471,
"step": 3460
},
{
"epoch": 0.3861089537334961,
"grad_norm": 1.90625,
"learning_rate": 2.617046693057429e-05,
"loss": 0.4878,
"step": 3480
},
{
"epoch": 0.3883279707089759,
"grad_norm": 1.765625,
"learning_rate": 2.6082802351104317e-05,
"loss": 0.4718,
"step": 3500
},
{
"epoch": 0.3905469876844558,
"grad_norm": 1.8828125,
"learning_rate": 2.5994296112876222e-05,
"loss": 0.4649,
"step": 3520
},
{
"epoch": 0.39276600465993566,
"grad_norm": 1.2109375,
"learning_rate": 2.5904954937299267e-05,
"loss": 0.3973,
"step": 3540
},
{
"epoch": 0.3949850216354155,
"grad_norm": 1.4609375,
"learning_rate": 2.5814785609190197e-05,
"loss": 0.4785,
"step": 3560
},
{
"epoch": 0.39720403861089537,
"grad_norm": 2.171875,
"learning_rate": 2.5723794976257947e-05,
"loss": 0.4512,
"step": 3580
},
{
"epoch": 0.39942305558637525,
"grad_norm": 1.7265625,
"learning_rate": 2.5631989948583623e-05,
"loss": 0.4931,
"step": 3600
},
{
"epoch": 0.4016420725618551,
"grad_norm": 1.1796875,
"learning_rate": 2.553937749809572e-05,
"loss": 0.4183,
"step": 3620
},
{
"epoch": 0.40386108953733496,
"grad_norm": 1.7890625,
"learning_rate": 2.544596465804068e-05,
"loss": 0.4321,
"step": 3640
},
{
"epoch": 0.40608010651281484,
"grad_norm": 2.109375,
"learning_rate": 2.5351758522448724e-05,
"loss": 0.489,
"step": 3660
},
{
"epoch": 0.40829912348829467,
"grad_norm": 2.03125,
"learning_rate": 2.5256766245595166e-05,
"loss": 0.4398,
"step": 3680
},
{
"epoch": 0.41051814046377455,
"grad_norm": 1.6484375,
"learning_rate": 2.516099504145703e-05,
"loss": 0.4831,
"step": 3700
},
{
"epoch": 0.41273715743925443,
"grad_norm": 1.90625,
"learning_rate": 2.5064452183165283e-05,
"loss": 0.4365,
"step": 3720
},
{
"epoch": 0.41495617441473426,
"grad_norm": 1.359375,
"learning_rate": 2.496714500245241e-05,
"loss": 0.4309,
"step": 3740
},
{
"epoch": 0.41717519139021414,
"grad_norm": 1.765625,
"learning_rate": 2.4869080889095693e-05,
"loss": 0.5378,
"step": 3760
},
{
"epoch": 0.419394208365694,
"grad_norm": 1.75,
"learning_rate": 2.477026729035595e-05,
"loss": 0.4905,
"step": 3780
},
{
"epoch": 0.42161322534117385,
"grad_norm": 1.859375,
"learning_rate": 2.4670711710412026e-05,
"loss": 0.4187,
"step": 3800
},
{
"epoch": 0.42383224231665373,
"grad_norm": 1.9140625,
"learning_rate": 2.457042170979086e-05,
"loss": 0.4817,
"step": 3820
},
{
"epoch": 0.4260512592921336,
"grad_norm": 1.6953125,
"learning_rate": 2.4469404904793338e-05,
"loss": 0.5108,
"step": 3840
},
{
"epoch": 0.42827027626761344,
"grad_norm": 1.46875,
"learning_rate": 2.4367668966915885e-05,
"loss": 0.5112,
"step": 3860
},
{
"epoch": 0.4304892932430933,
"grad_norm": 3.046875,
"learning_rate": 2.4265221622267876e-05,
"loss": 0.5353,
"step": 3880
},
{
"epoch": 0.43270831021857314,
"grad_norm": 1.6640625,
"learning_rate": 2.4162070650984893e-05,
"loss": 0.4684,
"step": 3900
},
{
"epoch": 0.434927327194053,
"grad_norm": 1.5078125,
"learning_rate": 2.4058223886637872e-05,
"loss": 0.4374,
"step": 3920
},
{
"epoch": 0.4371463441695329,
"grad_norm": 1.6953125,
"learning_rate": 2.3953689215638194e-05,
"loss": 0.3753,
"step": 3940
},
{
"epoch": 0.43936536114501273,
"grad_norm": 1.4921875,
"learning_rate": 2.3848474576638807e-05,
"loss": 0.4427,
"step": 3960
},
{
"epoch": 0.4415843781204926,
"grad_norm": 1.25,
"learning_rate": 2.3742587959931285e-05,
"loss": 0.5074,
"step": 3980
},
{
"epoch": 0.4438033950959725,
"grad_norm": 1.7265625,
"learning_rate": 2.3636037406839076e-05,
"loss": 0.4841,
"step": 4000
},
{
"epoch": 0.4460224120714523,
"grad_norm": 1.8203125,
"learning_rate": 2.3528831009106786e-05,
"loss": 0.4643,
"step": 4020
},
{
"epoch": 0.4482414290469322,
"grad_norm": 1.25,
"learning_rate": 2.3420976908285687e-05,
"loss": 0.4976,
"step": 4040
},
{
"epoch": 0.4504604460224121,
"grad_norm": 1.828125,
"learning_rate": 2.3312483295115424e-05,
"loss": 0.5453,
"step": 4060
},
{
"epoch": 0.4526794629978919,
"grad_norm": 2.921875,
"learning_rate": 2.320335840890198e-05,
"loss": 0.46,
"step": 4080
},
{
"epoch": 0.4548984799733718,
"grad_norm": 2.03125,
"learning_rate": 2.3093610536891965e-05,
"loss": 0.5129,
"step": 4100
},
{
"epoch": 0.4571174969488517,
"grad_norm": 1.4375,
"learning_rate": 2.2983248013643253e-05,
"loss": 0.4429,
"step": 4120
},
{
"epoch": 0.4593365139243315,
"grad_norm": 2.03125,
"learning_rate": 2.2872279220392054e-05,
"loss": 0.4817,
"step": 4140
},
{
"epoch": 0.4615555308998114,
"grad_norm": 1.8984375,
"learning_rate": 2.2760712584416386e-05,
"loss": 0.51,
"step": 4160
},
{
"epoch": 0.46377454787529127,
"grad_norm": 1.2734375,
"learning_rate": 2.2648556578396107e-05,
"loss": 0.5001,
"step": 4180
},
{
"epoch": 0.4659935648507711,
"grad_norm": 2.34375,
"learning_rate": 2.2535819719769487e-05,
"loss": 0.4739,
"step": 4200
},
{
"epoch": 0.468212581826251,
"grad_norm": 1.484375,
"learning_rate": 2.242251057008633e-05,
"loss": 0.474,
"step": 4220
},
{
"epoch": 0.47043159880173085,
"grad_norm": 1.7421875,
"learning_rate": 2.2308637734357826e-05,
"loss": 0.4622,
"step": 4240
},
{
"epoch": 0.4726506157772107,
"grad_norm": 1.4296875,
"learning_rate": 2.219420986040305e-05,
"loss": 0.5075,
"step": 4260
},
{
"epoch": 0.47486963275269056,
"grad_norm": 1.7734375,
"learning_rate": 2.2079235638192203e-05,
"loss": 0.4817,
"step": 4280
},
{
"epoch": 0.47708864972817044,
"grad_norm": 1.3671875,
"learning_rate": 2.1963723799186706e-05,
"loss": 0.445,
"step": 4300
},
{
"epoch": 0.47930766670365027,
"grad_norm": 1.6796875,
"learning_rate": 2.184768311567608e-05,
"loss": 0.475,
"step": 4320
},
{
"epoch": 0.48152668367913015,
"grad_norm": 1.640625,
"learning_rate": 2.1731122400111764e-05,
"loss": 0.4418,
"step": 4340
},
{
"epoch": 0.48374570065461003,
"grad_norm": 1.765625,
"learning_rate": 2.161405050443789e-05,
"loss": 0.4722,
"step": 4360
},
{
"epoch": 0.48596471763008986,
"grad_norm": 1.75,
"learning_rate": 2.1496476319419002e-05,
"loss": 0.4814,
"step": 4380
},
{
"epoch": 0.48818373460556974,
"grad_norm": 1.59375,
"learning_rate": 2.137840877396491e-05,
"loss": 0.5435,
"step": 4400
},
{
"epoch": 0.49040275158104957,
"grad_norm": 1.734375,
"learning_rate": 2.125985683445258e-05,
"loss": 0.4521,
"step": 4420
},
{
"epoch": 0.49262176855652945,
"grad_norm": 1.6640625,
"learning_rate": 2.114082950404519e-05,
"loss": 0.452,
"step": 4440
},
{
"epoch": 0.49484078553200933,
"grad_norm": 1.2734375,
"learning_rate": 2.1021335822008447e-05,
"loss": 0.5176,
"step": 4460
},
{
"epoch": 0.49705980250748916,
"grad_norm": 1.6328125,
"learning_rate": 2.0901384863024078e-05,
"loss": 0.4307,
"step": 4480
},
{
"epoch": 0.49927881948296904,
"grad_norm": 1.6640625,
"learning_rate": 2.0780985736500696e-05,
"loss": 0.4856,
"step": 4500
},
{
"epoch": 0.5014978364584489,
"grad_norm": 1.9609375,
"learning_rate": 2.0660147585881994e-05,
"loss": 0.4339,
"step": 4520
},
{
"epoch": 0.5037168534339288,
"grad_norm": 2.046875,
"learning_rate": 2.0538879587952382e-05,
"loss": 0.4902,
"step": 4540
},
{
"epoch": 0.5059358704094087,
"grad_norm": 1.8671875,
"learning_rate": 2.0417190952140064e-05,
"loss": 0.4343,
"step": 4560
},
{
"epoch": 0.5081548873848885,
"grad_norm": 1.7265625,
"learning_rate": 2.029509091981765e-05,
"loss": 0.4416,
"step": 4580
},
{
"epoch": 0.5103739043603683,
"grad_norm": 1.9375,
"learning_rate": 2.0172588763600335e-05,
"loss": 0.4802,
"step": 4600
},
{
"epoch": 0.5125929213358482,
"grad_norm": 1.703125,
"learning_rate": 2.0049693786641734e-05,
"loss": 0.4794,
"step": 4620
},
{
"epoch": 0.5148119383113281,
"grad_norm": 2.8125,
"learning_rate": 1.9926415321927347e-05,
"loss": 0.5092,
"step": 4640
},
{
"epoch": 0.517030955286808,
"grad_norm": 1.3984375,
"learning_rate": 1.980276273156581e-05,
"loss": 0.4595,
"step": 4660
},
{
"epoch": 0.5192499722622878,
"grad_norm": 1.484375,
"learning_rate": 1.9678745406077886e-05,
"loss": 0.452,
"step": 4680
},
{
"epoch": 0.5214689892377676,
"grad_norm": 1.703125,
"learning_rate": 1.9554372763683337e-05,
"loss": 0.4528,
"step": 4700
},
{
"epoch": 0.5236880062132475,
"grad_norm": 1.5,
"learning_rate": 1.9429654249585684e-05,
"loss": 0.4743,
"step": 4720
},
{
"epoch": 0.5259070231887274,
"grad_norm": 1.5703125,
"learning_rate": 1.9304599335254894e-05,
"loss": 0.4476,
"step": 4740
},
{
"epoch": 0.5281260401642073,
"grad_norm": 1.546875,
"learning_rate": 1.9179217517708117e-05,
"loss": 0.447,
"step": 4760
},
{
"epoch": 0.5303450571396872,
"grad_norm": 1.625,
"learning_rate": 1.9053518318788428e-05,
"loss": 0.4606,
"step": 4780
},
{
"epoch": 0.5325640741151669,
"grad_norm": 1.6875,
"learning_rate": 1.8927511284441722e-05,
"loss": 0.4561,
"step": 4800
},
{
"epoch": 0.5347830910906468,
"grad_norm": 1.8359375,
"learning_rate": 1.880120598399178e-05,
"loss": 0.4522,
"step": 4820
},
{
"epoch": 0.5370021080661267,
"grad_norm": 1.9921875,
"learning_rate": 1.8674612009413536e-05,
"loss": 0.4943,
"step": 4840
},
{
"epoch": 0.5392211250416066,
"grad_norm": 1.703125,
"learning_rate": 1.8547738974604623e-05,
"loss": 0.5822,
"step": 4860
},
{
"epoch": 0.5414401420170865,
"grad_norm": 1.7734375,
"learning_rate": 1.842059651465531e-05,
"loss": 0.4273,
"step": 4880
},
{
"epoch": 0.5436591589925663,
"grad_norm": 1.6953125,
"learning_rate": 1.829319428511673e-05,
"loss": 0.4704,
"step": 4900
},
{
"epoch": 0.5458781759680461,
"grad_norm": 1.671875,
"learning_rate": 1.816554196126767e-05,
"loss": 0.5452,
"step": 4920
},
{
"epoch": 0.548097192943526,
"grad_norm": 1.8203125,
"learning_rate": 1.803764923737974e-05,
"loss": 0.436,
"step": 4940
},
{
"epoch": 0.5503162099190059,
"grad_norm": 1.7578125,
"learning_rate": 1.7909525825981214e-05,
"loss": 0.4518,
"step": 4960
},
{
"epoch": 0.5525352268944858,
"grad_norm": 2.03125,
"learning_rate": 1.778118145711942e-05,
"loss": 0.4138,
"step": 4980
},
{
"epoch": 0.5547542438699656,
"grad_norm": 2.46875,
"learning_rate": 1.7652625877621793e-05,
"loss": 0.4332,
"step": 5000
},
{
"epoch": 0.5569732608454455,
"grad_norm": 2.09375,
"learning_rate": 1.7523868850355704e-05,
"loss": 0.4842,
"step": 5020
},
{
"epoch": 0.5591922778209253,
"grad_norm": 1.6328125,
"learning_rate": 1.7394920153487022e-05,
"loss": 0.4935,
"step": 5040
},
{
"epoch": 0.5614112947964052,
"grad_norm": 1.6953125,
"learning_rate": 1.7265789579737528e-05,
"loss": 0.5129,
"step": 5060
},
{
"epoch": 0.563630311771885,
"grad_norm": 1.65625,
"learning_rate": 1.7136486935641256e-05,
"loss": 0.4281,
"step": 5080
},
{
"epoch": 0.5658493287473649,
"grad_norm": 2.015625,
"learning_rate": 1.7007022040799726e-05,
"loss": 0.4634,
"step": 5100
},
{
"epoch": 0.5680683457228448,
"grad_norm": 2.15625,
"learning_rate": 1.687740472713623e-05,
"loss": 0.5225,
"step": 5120
},
{
"epoch": 0.5702873626983247,
"grad_norm": 1.9921875,
"learning_rate": 1.674764483814918e-05,
"loss": 0.4838,
"step": 5140
},
{
"epoch": 0.5725063796738045,
"grad_norm": 1.7421875,
"learning_rate": 1.661775222816453e-05,
"loss": 0.4291,
"step": 5160
},
{
"epoch": 0.5747253966492843,
"grad_norm": 1.671875,
"learning_rate": 1.648773676158747e-05,
"loss": 0.3925,
"step": 5180
},
{
"epoch": 0.5769444136247642,
"grad_norm": 1.734375,
"learning_rate": 1.6357608312153223e-05,
"loss": 0.4385,
"step": 5200
},
{
"epoch": 0.5791634306002441,
"grad_norm": 1.71875,
"learning_rate": 1.6227376762177272e-05,
"loss": 0.4144,
"step": 5220
},
{
"epoch": 0.581382447575724,
"grad_norm": 2.71875,
"learning_rate": 1.6097052001804825e-05,
"loss": 0.5116,
"step": 5240
},
{
"epoch": 0.5836014645512038,
"grad_norm": 1.953125,
"learning_rate": 1.5966643928259753e-05,
"loss": 0.4869,
"step": 5260
},
{
"epoch": 0.5858204815266836,
"grad_norm": 1.9140625,
"learning_rate": 1.5836162445092963e-05,
"loss": 0.4644,
"step": 5280
},
{
"epoch": 0.5880394985021635,
"grad_norm": 1.8203125,
"learning_rate": 1.5705617461430282e-05,
"loss": 0.3844,
"step": 5300
},
{
"epoch": 0.5902585154776434,
"grad_norm": 1.75,
"learning_rate": 1.5575018891219944e-05,
"loss": 0.4233,
"step": 5320
},
{
"epoch": 0.5924775324531233,
"grad_norm": 1.140625,
"learning_rate": 1.5444376652479706e-05,
"loss": 0.5273,
"step": 5340
},
{
"epoch": 0.5946965494286032,
"grad_norm": 1.921875,
"learning_rate": 1.531370066654362e-05,
"loss": 0.4783,
"step": 5360
},
{
"epoch": 0.5969155664040829,
"grad_norm": 1.796875,
"learning_rate": 1.5183000857308604e-05,
"loss": 0.4747,
"step": 5380
},
{
"epoch": 0.5991345833795628,
"grad_norm": 1.6484375,
"learning_rate": 1.5052287150480774e-05,
"loss": 0.4335,
"step": 5400
},
{
"epoch": 0.6013536003550427,
"grad_norm": 1.75,
"learning_rate": 1.4921569472821673e-05,
"loss": 0.4489,
"step": 5420
},
{
"epoch": 0.6035726173305226,
"grad_norm": 2.21875,
"learning_rate": 1.4790857751394398e-05,
"loss": 0.4495,
"step": 5440
},
{
"epoch": 0.6057916343060025,
"grad_norm": 1.5703125,
"learning_rate": 1.4660161912809718e-05,
"loss": 0.4516,
"step": 5460
},
{
"epoch": 0.6080106512814824,
"grad_norm": 1.4765625,
"learning_rate": 1.4529491882472209e-05,
"loss": 0.4418,
"step": 5480
},
{
"epoch": 0.6102296682569621,
"grad_norm": 1.6015625,
"learning_rate": 1.4398857583826501e-05,
"loss": 0.4701,
"step": 5500
},
{
"epoch": 0.612448685232442,
"grad_norm": 2.390625,
"learning_rate": 1.4268268937603659e-05,
"loss": 0.4957,
"step": 5520
},
{
"epoch": 0.6146677022079219,
"grad_norm": 1.7421875,
"learning_rate": 1.413773586106777e-05,
"loss": 0.5176,
"step": 5540
},
{
"epoch": 0.6168867191834018,
"grad_norm": 1.8125,
"learning_rate": 1.400726826726282e-05,
"loss": 0.4252,
"step": 5560
},
{
"epoch": 0.6191057361588816,
"grad_norm": 1.4453125,
"learning_rate": 1.3876876064259836e-05,
"loss": 0.473,
"step": 5580
},
{
"epoch": 0.6213247531343615,
"grad_norm": 1.6796875,
"learning_rate": 1.3746569154404477e-05,
"loss": 0.4589,
"step": 5600
},
{
"epoch": 0.6235437701098413,
"grad_norm": 2.734375,
"learning_rate": 1.3616357433564993e-05,
"loss": 0.4083,
"step": 5620
},
{
"epoch": 0.6257627870853212,
"grad_norm": 1.75,
"learning_rate": 1.348625079038071e-05,
"loss": 0.4638,
"step": 5640
},
{
"epoch": 0.6279818040608011,
"grad_norm": 2.0625,
"learning_rate": 1.335625910551108e-05,
"loss": 0.4703,
"step": 5660
},
{
"epoch": 0.630200821036281,
"grad_norm": 1.7109375,
"learning_rate": 1.3226392250885288e-05,
"loss": 0.4683,
"step": 5680
},
{
"epoch": 0.6324198380117608,
"grad_norm": 1.2265625,
"learning_rate": 1.3096660088952581e-05,
"loss": 0.4641,
"step": 5700
},
{
"epoch": 0.6346388549872406,
"grad_norm": 1.6953125,
"learning_rate": 1.2967072471933255e-05,
"loss": 0.4324,
"step": 5720
},
{
"epoch": 0.6368578719627205,
"grad_norm": 1.6953125,
"learning_rate": 1.283763924107046e-05,
"loss": 0.5183,
"step": 5740
},
{
"epoch": 0.6390768889382004,
"grad_norm": 1.4921875,
"learning_rate": 1.2708370225882848e-05,
"loss": 0.4178,
"step": 5760
},
{
"epoch": 0.6412959059136802,
"grad_norm": 2.046875,
"learning_rate": 1.2579275243418074e-05,
"loss": 0.4503,
"step": 5780
},
{
"epoch": 0.6435149228891601,
"grad_norm": 1.21875,
"learning_rate": 1.245036409750725e-05,
"loss": 0.4796,
"step": 5800
},
{
"epoch": 0.64573393986464,
"grad_norm": 1.6015625,
"learning_rate": 1.2321646578020452e-05,
"loss": 0.4437,
"step": 5820
},
{
"epoch": 0.6479529568401198,
"grad_norm": 2.09375,
"learning_rate": 1.219313246012321e-05,
"loss": 0.4504,
"step": 5840
},
{
"epoch": 0.6501719738155997,
"grad_norm": 1.2265625,
"learning_rate": 1.2064831503534185e-05,
"loss": 0.5122,
"step": 5860
},
{
"epoch": 0.6523909907910795,
"grad_norm": 1.7890625,
"learning_rate": 1.1936753451783973e-05,
"loss": 0.4294,
"step": 5880
},
{
"epoch": 0.6546100077665594,
"grad_norm": 2.296875,
"learning_rate": 1.1808908031475151e-05,
"loss": 0.4895,
"step": 5900
},
{
"epoch": 0.6568290247420393,
"grad_norm": 1.140625,
"learning_rate": 1.1681304951543635e-05,
"loss": 0.4824,
"step": 5920
},
{
"epoch": 0.6590480417175192,
"grad_norm": 1.875,
"learning_rate": 1.1553953902521321e-05,
"loss": 0.525,
"step": 5940
},
{
"epoch": 0.661267058692999,
"grad_norm": 1.8359375,
"learning_rate": 1.1426864555800195e-05,
"loss": 0.4289,
"step": 5960
},
{
"epoch": 0.6634860756684788,
"grad_norm": 1.9375,
"learning_rate": 1.1300046562897837e-05,
"loss": 0.4695,
"step": 5980
},
{
"epoch": 0.6657050926439587,
"grad_norm": 1.7109375,
"learning_rate": 1.1173509554724461e-05,
"loss": 0.465,
"step": 6000
},
{
"epoch": 0.6679241096194386,
"grad_norm": 1.5625,
"learning_rate": 1.104726314085153e-05,
"loss": 0.4641,
"step": 6020
},
{
"epoch": 0.6701431265949185,
"grad_norm": 1.84375,
"learning_rate": 1.0921316908781965e-05,
"loss": 0.4559,
"step": 6040
},
{
"epoch": 0.6723621435703984,
"grad_norm": 1.4375,
"learning_rate": 1.079568042322205e-05,
"loss": 0.4938,
"step": 6060
},
{
"epoch": 0.6745811605458781,
"grad_norm": 1.2734375,
"learning_rate": 1.0670363225355054e-05,
"loss": 0.4247,
"step": 6080
},
{
"epoch": 0.676800177521358,
"grad_norm": 1.5390625,
"learning_rate": 1.0545374832116658e-05,
"loss": 0.4739,
"step": 6100
},
{
"epoch": 0.6790191944968379,
"grad_norm": 1.640625,
"learning_rate": 1.042072473547221e-05,
"loss": 0.4923,
"step": 6120
},
{
"epoch": 0.6812382114723178,
"grad_norm": 1.5,
"learning_rate": 1.0296422401695867e-05,
"loss": 0.5248,
"step": 6140
},
{
"epoch": 0.6834572284477977,
"grad_norm": 1.5390625,
"learning_rate": 1.017247727065172e-05,
"loss": 0.4336,
"step": 6160
},
{
"epoch": 0.6856762454232775,
"grad_norm": 1.7265625,
"learning_rate": 1.0048898755076885e-05,
"loss": 0.4915,
"step": 6180
},
{
"epoch": 0.6878952623987573,
"grad_norm": 1.515625,
"learning_rate": 9.925696239866679e-06,
"loss": 0.4908,
"step": 6200
},
{
"epoch": 0.6901142793742372,
"grad_norm": 1.4375,
"learning_rate": 9.802879081361927e-06,
"loss": 0.4512,
"step": 6220
},
{
"epoch": 0.6923332963497171,
"grad_norm": 2.21875,
"learning_rate": 9.680456606638376e-06,
"loss": 0.4356,
"step": 6240
},
{
"epoch": 0.694552313325197,
"grad_norm": 1.796875,
"learning_rate": 9.558438112798397e-06,
"loss": 0.4321,
"step": 6260
},
{
"epoch": 0.6967713303006768,
"grad_norm": 1.421875,
"learning_rate": 9.436832866264942e-06,
"loss": 0.4288,
"step": 6280
},
{
"epoch": 0.6989903472761566,
"grad_norm": 2.140625,
"learning_rate": 9.3156501020778e-06,
"loss": 0.4119,
"step": 6300
},
{
"epoch": 0.7012093642516365,
"grad_norm": 1.7421875,
"learning_rate": 9.194899023192295e-06,
"loss": 0.4729,
"step": 6320
},
{
"epoch": 0.7034283812271164,
"grad_norm": 2.03125,
"learning_rate": 9.074588799780359e-06,
"loss": 0.4438,
"step": 6340
},
{
"epoch": 0.7056473982025963,
"grad_norm": 1.703125,
"learning_rate": 8.95472856853414e-06,
"loss": 0.4509,
"step": 6360
},
{
"epoch": 0.7078664151780761,
"grad_norm": 1.453125,
"learning_rate": 8.835327431972136e-06,
"loss": 0.4812,
"step": 6380
},
{
"epoch": 0.710085432153556,
"grad_norm": 1.4375,
"learning_rate": 8.716394457747915e-06,
"loss": 0.4796,
"step": 6400
},
{
"epoch": 0.7123044491290358,
"grad_norm": 0.9375,
"learning_rate": 8.597938677961505e-06,
"loss": 0.4138,
"step": 6420
},
{
"epoch": 0.7145234661045157,
"grad_norm": 1.71875,
"learning_rate": 8.479969088473462e-06,
"loss": 0.4161,
"step": 6440
},
{
"epoch": 0.7167424830799956,
"grad_norm": 2.203125,
"learning_rate": 8.362494648221697e-06,
"loss": 0.4685,
"step": 6460
},
{
"epoch": 0.7189615000554754,
"grad_norm": 1.6875,
"learning_rate": 8.245524278541116e-06,
"loss": 0.4476,
"step": 6480
},
{
"epoch": 0.7211805170309553,
"grad_norm": 1.671875,
"learning_rate": 8.129066862486115e-06,
"loss": 0.5104,
"step": 6500
},
{
"epoch": 0.7233995340064352,
"grad_norm": 1.5625,
"learning_rate": 8.013131244155964e-06,
"loss": 0.4467,
"step": 6520
},
{
"epoch": 0.725618550981915,
"grad_norm": 2.234375,
"learning_rate": 7.89772622802316e-06,
"loss": 0.4317,
"step": 6540
},
{
"epoch": 0.7278375679573948,
"grad_norm": 2.109375,
"learning_rate": 7.782860578264806e-06,
"loss": 0.4398,
"step": 6560
},
{
"epoch": 0.7300565849328747,
"grad_norm": 1.6875,
"learning_rate": 7.668543018097014e-06,
"loss": 0.5054,
"step": 6580
},
{
"epoch": 0.7322756019083546,
"grad_norm": 1.4453125,
"learning_rate": 7.5547822291124715e-06,
"loss": 0.4968,
"step": 6600
},
{
"epoch": 0.7344946188838345,
"grad_norm": 1.5,
"learning_rate": 7.441586850621102e-06,
"loss": 0.4202,
"step": 6620
},
{
"epoch": 0.7367136358593144,
"grad_norm": 1.765625,
"learning_rate": 7.328965478993994e-06,
"loss": 0.4447,
"step": 6640
},
{
"epoch": 0.7389326528347941,
"grad_norm": 1.90625,
"learning_rate": 7.2169266670105555e-06,
"loss": 0.4869,
"step": 6660
},
{
"epoch": 0.741151669810274,
"grad_norm": 1.3203125,
"learning_rate": 7.105478923209001e-06,
"loss": 0.4744,
"step": 6680
},
{
"epoch": 0.7433706867857539,
"grad_norm": 0.76171875,
"learning_rate": 6.994630711240201e-06,
"loss": 0.4054,
"step": 6700
},
{
"epoch": 0.7455897037612338,
"grad_norm": 1.9765625,
"learning_rate": 6.884390449224898e-06,
"loss": 0.4307,
"step": 6720
},
{
"epoch": 0.7478087207367137,
"grad_norm": 1.921875,
"learning_rate": 6.774766509114435e-06,
"loss": 0.4728,
"step": 6740
},
{
"epoch": 0.7500277377121934,
"grad_norm": 1.53125,
"learning_rate": 6.66576721605496e-06,
"loss": 0.4254,
"step": 6760
},
{
"epoch": 0.7522467546876733,
"grad_norm": 1.78125,
"learning_rate": 6.557400847755183e-06,
"loss": 0.4508,
"step": 6780
},
{
"epoch": 0.7544657716631532,
"grad_norm": 1.828125,
"learning_rate": 6.449675633857772e-06,
"loss": 0.4814,
"step": 6800
},
{
"epoch": 0.7566847886386331,
"grad_norm": 1.578125,
"learning_rate": 6.3425997553143315e-06,
"loss": 0.459,
"step": 6820
},
{
"epoch": 0.758903805614113,
"grad_norm": 1.7421875,
"learning_rate": 6.236181343764144e-06,
"loss": 0.464,
"step": 6840
},
{
"epoch": 0.7611228225895929,
"grad_norm": 2.21875,
"learning_rate": 6.130428480916626e-06,
"loss": 0.4897,
"step": 6860
},
{
"epoch": 0.7633418395650726,
"grad_norm": 1.5625,
"learning_rate": 6.025349197937577e-06,
"loss": 0.4614,
"step": 6880
},
{
"epoch": 0.7655608565405525,
"grad_norm": 1.71875,
"learning_rate": 5.920951474839266e-06,
"loss": 0.444,
"step": 6900
},
{
"epoch": 0.7677798735160324,
"grad_norm": 1.1640625,
"learning_rate": 5.817243239874434e-06,
"loss": 0.4649,
"step": 6920
},
{
"epoch": 0.7699988904915123,
"grad_norm": 1.8046875,
"learning_rate": 5.714232368934163e-06,
"loss": 0.4758,
"step": 6940
},
{
"epoch": 0.7722179074669921,
"grad_norm": 2.15625,
"learning_rate": 5.611926684949779e-06,
"loss": 0.4519,
"step": 6960
},
{
"epoch": 0.774436924442472,
"grad_norm": 1.5234375,
"learning_rate": 5.510333957298756e-06,
"loss": 0.4729,
"step": 6980
},
{
"epoch": 0.7766559414179518,
"grad_norm": 1.28125,
"learning_rate": 5.409461901214679e-06,
"loss": 0.4557,
"step": 7000
},
{
"epoch": 0.7788749583934317,
"grad_norm": 1.2265625,
"learning_rate": 5.3093181772013545e-06,
"loss": 0.4433,
"step": 7020
},
{
"epoch": 0.7810939753689116,
"grad_norm": 1.4375,
"learning_rate": 5.209910390451007e-06,
"loss": 0.4767,
"step": 7040
},
{
"epoch": 0.7833129923443914,
"grad_norm": 2.140625,
"learning_rate": 5.111246090266763e-06,
"loss": 0.4422,
"step": 7060
},
{
"epoch": 0.7855320093198713,
"grad_norm": 1.7265625,
"learning_rate": 5.0133327694893035e-06,
"loss": 0.4276,
"step": 7080
},
{
"epoch": 0.7877510262953512,
"grad_norm": 1.5859375,
"learning_rate": 4.916177863927856e-06,
"loss": 0.482,
"step": 7100
},
{
"epoch": 0.789970043270831,
"grad_norm": 1.71875,
"learning_rate": 4.819788751795485e-06,
"loss": 0.4817,
"step": 7120
},
{
"epoch": 0.7921890602463109,
"grad_norm": 1.6484375,
"learning_rate": 4.7241727531487925e-06,
"loss": 0.4702,
"step": 7140
},
{
"epoch": 0.7944080772217907,
"grad_norm": 1.9296875,
"learning_rate": 4.629337129331983e-06,
"loss": 0.4494,
"step": 7160
},
{
"epoch": 0.7966270941972706,
"grad_norm": 1.9453125,
"learning_rate": 4.535289082425438e-06,
"loss": 0.4411,
"step": 7180
},
{
"epoch": 0.7988461111727505,
"grad_norm": 2.015625,
"learning_rate": 4.442035754698759e-06,
"loss": 0.4671,
"step": 7200
},
{
"epoch": 0.8010651281482304,
"grad_norm": 2.359375,
"learning_rate": 4.349584228068369e-06,
"loss": 0.4077,
"step": 7220
},
{
"epoch": 0.8032841451237102,
"grad_norm": 1.3984375,
"learning_rate": 4.257941523559703e-06,
"loss": 0.5378,
"step": 7240
},
{
"epoch": 0.80550316209919,
"grad_norm": 1.84375,
"learning_rate": 4.167114600773983e-06,
"loss": 0.4719,
"step": 7260
},
{
"epoch": 0.8077221790746699,
"grad_norm": 1.140625,
"learning_rate": 4.0771103573597125e-06,
"loss": 0.4596,
"step": 7280
},
{
"epoch": 0.8099411960501498,
"grad_norm": 1.5703125,
"learning_rate": 3.987935628488841e-06,
"loss": 0.4812,
"step": 7300
},
{
"epoch": 0.8121602130256297,
"grad_norm": 1.65625,
"learning_rate": 3.899597186337676e-06,
"loss": 0.4636,
"step": 7320
},
{
"epoch": 0.8143792300011095,
"grad_norm": 1.609375,
"learning_rate": 3.812101739572605e-06,
"loss": 0.4605,
"step": 7340
},
{
"epoch": 0.8165982469765893,
"grad_norm": 2.09375,
"learning_rate": 3.725455932840593e-06,
"loss": 0.4368,
"step": 7360
},
{
"epoch": 0.8188172639520692,
"grad_norm": 1.8125,
"learning_rate": 3.6396663462645917e-06,
"loss": 0.4686,
"step": 7380
},
{
"epoch": 0.8210362809275491,
"grad_norm": 1.875,
"learning_rate": 3.554739494943813e-06,
"loss": 0.4701,
"step": 7400
},
{
"epoch": 0.823255297903029,
"grad_norm": 1.8984375,
"learning_rate": 3.470681828458962e-06,
"loss": 0.4595,
"step": 7420
},
{
"epoch": 0.8254743148785089,
"grad_norm": 1.328125,
"learning_rate": 3.3874997303824416e-06,
"loss": 0.4265,
"step": 7440
},
{
"epoch": 0.8276933318539886,
"grad_norm": 1.5,
"learning_rate": 3.305199517793557e-06,
"loss": 0.4927,
"step": 7460
},
{
"epoch": 0.8299123488294685,
"grad_norm": 1.7109375,
"learning_rate": 3.2237874407987776e-06,
"loss": 0.4211,
"step": 7480
},
{
"epoch": 0.8321313658049484,
"grad_norm": 2.28125,
"learning_rate": 3.1432696820570993e-06,
"loss": 0.4771,
"step": 7500
},
{
"epoch": 0.8343503827804283,
"grad_norm": 1.7109375,
"learning_rate": 3.0636523563104985e-06,
"loss": 0.4934,
"step": 7520
},
{
"epoch": 0.8365693997559082,
"grad_norm": 1.6328125,
"learning_rate": 2.9849415099195886e-06,
"loss": 0.5184,
"step": 7540
},
{
"epoch": 0.838788416731388,
"grad_norm": 1.234375,
"learning_rate": 2.9071431204044123e-06,
"loss": 0.4622,
"step": 7560
},
{
"epoch": 0.8410074337068678,
"grad_norm": 1.6171875,
"learning_rate": 2.8302630959905084e-06,
"loss": 0.4491,
"step": 7580
},
{
"epoch": 0.8432264506823477,
"grad_norm": 2.46875,
"learning_rate": 2.7543072751602246e-06,
"loss": 0.4093,
"step": 7600
},
{
"epoch": 0.8454454676578276,
"grad_norm": 1.671875,
"learning_rate": 2.6792814262093214e-06,
"loss": 0.4312,
"step": 7620
},
{
"epoch": 0.8476644846333075,
"grad_norm": 1.71875,
"learning_rate": 2.605191246808912e-06,
"loss": 0.455,
"step": 7640
},
{
"epoch": 0.8498835016087873,
"grad_norm": 2.109375,
"learning_rate": 2.5320423635727824e-06,
"loss": 0.4839,
"step": 7660
},
{
"epoch": 0.8521025185842672,
"grad_norm": 2.15625,
"learning_rate": 2.45984033163006e-06,
"loss": 0.5647,
"step": 7680
},
{
"epoch": 0.854321535559747,
"grad_norm": 1.6484375,
"learning_rate": 2.388590634203366e-06,
"loss": 0.4108,
"step": 7700
},
{
"epoch": 0.8565405525352269,
"grad_norm": 1.90625,
"learning_rate": 2.3182986821923934e-06,
"loss": 0.3933,
"step": 7720
},
{
"epoch": 0.8587595695107068,
"grad_norm": 2.015625,
"learning_rate": 2.2489698137629904e-06,
"loss": 0.413,
"step": 7740
},
{
"epoch": 0.8609785864861866,
"grad_norm": 2.140625,
"learning_rate": 2.1806092939417732e-06,
"loss": 0.516,
"step": 7760
},
{
"epoch": 0.8631976034616665,
"grad_norm": 1.40625,
"learning_rate": 2.1132223142162714e-06,
"loss": 0.4403,
"step": 7780
},
{
"epoch": 0.8654166204371463,
"grad_norm": 1.859375,
"learning_rate": 2.046813992140679e-06,
"loss": 0.4355,
"step": 7800
},
{
"epoch": 0.8676356374126262,
"grad_norm": 1.4375,
"learning_rate": 1.981389370947218e-06,
"loss": 0.4744,
"step": 7820
},
{
"epoch": 0.869854654388106,
"grad_norm": 2.359375,
"learning_rate": 1.9169534191631243e-06,
"loss": 0.4106,
"step": 7840
},
{
"epoch": 0.8720736713635859,
"grad_norm": 2.046875,
"learning_rate": 1.853511030233354e-06,
"loss": 0.4519,
"step": 7860
},
{
"epoch": 0.8742926883390658,
"grad_norm": 1.7421875,
"learning_rate": 1.79106702214893e-06,
"loss": 0.4568,
"step": 7880
},
{
"epoch": 0.8765117053145457,
"grad_norm": 1.5234375,
"learning_rate": 1.7296261370810695e-06,
"loss": 0.4734,
"step": 7900
},
{
"epoch": 0.8787307222900255,
"grad_norm": 1.828125,
"learning_rate": 1.669193041021041e-06,
"loss": 0.547,
"step": 7920
},
{
"epoch": 0.8809497392655053,
"grad_norm": 1.4453125,
"learning_rate": 1.6097723234258188e-06,
"loss": 0.5001,
"step": 7940
},
{
"epoch": 0.8831687562409852,
"grad_norm": 1.6875,
"learning_rate": 1.5513684968695574e-06,
"loss": 0.447,
"step": 7960
},
{
"epoch": 0.8853877732164651,
"grad_norm": 1.578125,
"learning_rate": 1.4939859967008768e-06,
"loss": 0.4161,
"step": 7980
},
{
"epoch": 0.887606790191945,
"grad_norm": 1.78125,
"learning_rate": 1.437629180706037e-06,
"loss": 0.4606,
"step": 8000
},
{
"epoch": 0.8898258071674249,
"grad_norm": 1.6015625,
"learning_rate": 1.382302328778e-06,
"loss": 0.4126,
"step": 8020
},
{
"epoch": 0.8920448241429046,
"grad_norm": 1.6015625,
"learning_rate": 1.328009642591394e-06,
"loss": 0.4313,
"step": 8040
},
{
"epoch": 0.8942638411183845,
"grad_norm": 1.4296875,
"learning_rate": 1.2747552452834388e-06,
"loss": 0.4436,
"step": 8060
},
{
"epoch": 0.8964828580938644,
"grad_norm": 1.6015625,
"learning_rate": 1.2225431811408133e-06,
"loss": 0.4591,
"step": 8080
},
{
"epoch": 0.8987018750693443,
"grad_norm": 1.484375,
"learning_rate": 1.1713774152925195e-06,
"loss": 0.4103,
"step": 8100
},
{
"epoch": 0.9009208920448242,
"grad_norm": 1.890625,
"learning_rate": 1.1212618334087693e-06,
"loss": 0.4196,
"step": 8120
},
{
"epoch": 0.903139909020304,
"grad_norm": 2.125,
"learning_rate": 1.0722002414058868e-06,
"loss": 0.4701,
"step": 8140
},
{
"epoch": 0.9053589259957838,
"grad_norm": 1.8671875,
"learning_rate": 1.0241963651572867e-06,
"loss": 0.4452,
"step": 8160
},
{
"epoch": 0.9075779429712637,
"grad_norm": 2.21875,
"learning_rate": 9.772538502105093e-07,
"loss": 0.4122,
"step": 8180
},
{
"epoch": 0.9097969599467436,
"grad_norm": 1.625,
"learning_rate": 9.313762615103761e-07,
"loss": 0.4265,
"step": 8200
},
{
"epoch": 0.9120159769222235,
"grad_norm": 1.9296875,
"learning_rate": 8.865670831282513e-07,
"loss": 0.4678,
"step": 8220
},
{
"epoch": 0.9142349938977034,
"grad_norm": 1.671875,
"learning_rate": 8.42829717997457e-07,
"loss": 0.4137,
"step": 8240
},
{
"epoch": 0.9164540108731832,
"grad_norm": 1.6484375,
"learning_rate": 8.001674876548471e-07,
"loss": 0.4939,
"step": 8260
},
{
"epoch": 0.918673027848663,
"grad_norm": 1.640625,
"learning_rate": 7.585836319885525e-07,
"loss": 0.4888,
"step": 8280
},
{
"epoch": 0.9208920448241429,
"grad_norm": 1.578125,
"learning_rate": 7.180813089919403e-07,
"loss": 0.4352,
"step": 8300
},
{
"epoch": 0.9231110617996228,
"grad_norm": 1.6640625,
"learning_rate": 6.78663594523788e-07,
"loss": 0.5086,
"step": 8320
},
{
"epoch": 0.9253300787751026,
"grad_norm": 1.96875,
"learning_rate": 6.403334820746876e-07,
"loss": 0.4695,
"step": 8340
},
{
"epoch": 0.9275490957505825,
"grad_norm": 1.5234375,
"learning_rate": 6.030938825397225e-07,
"loss": 0.3946,
"step": 8360
},
{
"epoch": 0.9297681127260623,
"grad_norm": 1.609375,
"learning_rate": 5.669476239973975e-07,
"loss": 0.4797,
"step": 8380
},
{
"epoch": 0.9319871297015422,
"grad_norm": 1.796875,
"learning_rate": 5.318974514948672e-07,
"loss": 0.4274,
"step": 8400
},
{
"epoch": 0.9342061466770221,
"grad_norm": 1.453125,
"learning_rate": 4.979460268394726e-07,
"loss": 0.4369,
"step": 8420
},
{
"epoch": 0.936425163652502,
"grad_norm": 1.6640625,
"learning_rate": 4.6509592839659666e-07,
"loss": 0.4452,
"step": 8440
},
{
"epoch": 0.9386441806279818,
"grad_norm": 1.7734375,
"learning_rate": 4.333496508938506e-07,
"loss": 0.4949,
"step": 8460
},
{
"epoch": 0.9408631976034617,
"grad_norm": 2.078125,
"learning_rate": 4.02709605231627e-07,
"loss": 0.4815,
"step": 8480
},
{
"epoch": 0.9430822145789415,
"grad_norm": 2.109375,
"learning_rate": 3.731781182999983e-07,
"loss": 0.5533,
"step": 8500
},
{
"epoch": 0.9453012315544214,
"grad_norm": 1.5234375,
"learning_rate": 3.447574328020109e-07,
"loss": 0.4741,
"step": 8520
},
{
"epoch": 0.9475202485299012,
"grad_norm": 1.5546875,
"learning_rate": 3.1744970708337205e-07,
"loss": 0.5182,
"step": 8540
},
{
"epoch": 0.9497392655053811,
"grad_norm": 1.4296875,
"learning_rate": 2.912570149685323e-07,
"loss": 0.4612,
"step": 8560
},
{
"epoch": 0.951958282480861,
"grad_norm": 1.3984375,
"learning_rate": 2.661813456032014e-07,
"loss": 0.4253,
"step": 8580
},
{
"epoch": 0.9541772994563409,
"grad_norm": 2.125,
"learning_rate": 2.4222460330327933e-07,
"loss": 0.5029,
"step": 8600
},
{
"epoch": 0.9563963164318207,
"grad_norm": 1.9921875,
"learning_rate": 2.1938860741023858e-07,
"loss": 0.4444,
"step": 8620
},
{
"epoch": 0.9586153334073005,
"grad_norm": 1.390625,
"learning_rate": 1.9767509215296297e-07,
"loss": 0.4607,
"step": 8640
},
{
"epoch": 0.9608343503827804,
"grad_norm": 1.7890625,
"learning_rate": 1.7708570651604306e-07,
"loss": 0.5045,
"step": 8660
},
{
"epoch": 0.9630533673582603,
"grad_norm": 1.484375,
"learning_rate": 1.5762201411454626e-07,
"loss": 0.4525,
"step": 8680
},
{
"epoch": 0.9652723843337402,
"grad_norm": 1.28125,
"learning_rate": 1.3928549307527183e-07,
"loss": 0.4484,
"step": 8700
},
{
"epoch": 0.9674914013092201,
"grad_norm": 1.7265625,
"learning_rate": 1.2207753592450078e-07,
"loss": 0.4194,
"step": 8720
},
{
"epoch": 0.9697104182846998,
"grad_norm": 1.7734375,
"learning_rate": 1.059994494822386e-07,
"loss": 0.4822,
"step": 8740
},
{
"epoch": 0.9719294352601797,
"grad_norm": 1.7421875,
"learning_rate": 9.105245476297653e-08,
"loss": 0.4584,
"step": 8760
},
{
"epoch": 0.9741484522356596,
"grad_norm": 1.5546875,
"learning_rate": 7.723768688296217e-08,
"loss": 0.4508,
"step": 8780
},
{
"epoch": 0.9763674692111395,
"grad_norm": 1.4921875,
"learning_rate": 6.455619497399534e-08,
"loss": 0.4953,
"step": 8800
},
{
"epoch": 0.9785864861866194,
"grad_norm": 1.9375,
"learning_rate": 5.300894210375329e-08,
"loss": 0.4791,
"step": 8820
},
{
"epoch": 0.9808055031620991,
"grad_norm": 1.4765625,
"learning_rate": 4.259680520265596e-08,
"loss": 0.4083,
"step": 8840
},
{
"epoch": 0.983024520137579,
"grad_norm": 1.6328125,
"learning_rate": 3.3320574997267595e-08,
"loss": 0.449,
"step": 8860
},
{
"epoch": 0.9852435371130589,
"grad_norm": 1.84375,
"learning_rate": 2.5180955950243056e-08,
"loss": 0.5022,
"step": 8880
},
{
"epoch": 0.9874625540885388,
"grad_norm": 1.203125,
"learning_rate": 1.8178566206837334e-08,
"loss": 0.54,
"step": 8900
},
{
"epoch": 0.9896815710640187,
"grad_norm": 1.8203125,
"learning_rate": 1.231393754795307e-08,
"loss": 0.4788,
"step": 8920
},
{
"epoch": 0.9919005880394985,
"grad_norm": 1.5546875,
"learning_rate": 7.587515349762874e-09,
"loss": 0.5039,
"step": 8940
},
{
"epoch": 0.9941196050149783,
"grad_norm": 1.8984375,
"learning_rate": 3.9996585498797145e-09,
"loss": 0.5317,
"step": 8960
},
{
"epoch": 0.9963386219904582,
"grad_norm": 2.0,
"learning_rate": 1.550639620103711e-09,
"loss": 0.4578,
"step": 8980
},
{
"epoch": 0.9985576389659381,
"grad_norm": 1.5234375,
"learning_rate": 2.406445457253659e-10,
"loss": 0.474,
"step": 9000
},
{
"epoch": 1.0,
"step": 9013,
"total_flos": 5.485972481640161e+17,
"train_loss": 0.4824531834622566,
"train_runtime": 26326.043,
"train_samples_per_second": 0.685,
"train_steps_per_second": 0.342
}
],
"logging_steps": 20,
"max_steps": 9013,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 100,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 5.485972481640161e+17,
"train_batch_size": 2,
"trial_name": null,
"trial_params": null
}