move-object-left-right / trainer_state.json
LegrandFrederic's picture
Upload trainer_state.json with huggingface_hub
067bcc1 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 10.0,
"eval_steps": 500,
"global_step": 2880,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.034722222222222224,
"grad_norm": 12.390419960021973,
"learning_rate": 6.944444444444445e-06,
"loss": 1.8579,
"step": 10
},
{
"epoch": 0.06944444444444445,
"grad_norm": 7.292122840881348,
"learning_rate": 1.388888888888889e-05,
"loss": 1.409,
"step": 20
},
{
"epoch": 0.10416666666666667,
"grad_norm": 3.3312809467315674,
"learning_rate": 2.0833333333333336e-05,
"loss": 0.4742,
"step": 30
},
{
"epoch": 0.1388888888888889,
"grad_norm": 1.8886680603027344,
"learning_rate": 2.777777777777778e-05,
"loss": 0.3359,
"step": 40
},
{
"epoch": 0.1736111111111111,
"grad_norm": 0.9730450510978699,
"learning_rate": 3.472222222222222e-05,
"loss": 0.2133,
"step": 50
},
{
"epoch": 0.20833333333333334,
"grad_norm": 1.1679153442382812,
"learning_rate": 4.166666666666667e-05,
"loss": 0.1603,
"step": 60
},
{
"epoch": 0.24305555555555555,
"grad_norm": 4.400736331939697,
"learning_rate": 4.8611111111111115e-05,
"loss": 0.1299,
"step": 70
},
{
"epoch": 0.2777777777777778,
"grad_norm": 1.0059056282043457,
"learning_rate": 5.555555555555556e-05,
"loss": 0.1123,
"step": 80
},
{
"epoch": 0.3125,
"grad_norm": 0.6611884832382202,
"learning_rate": 6.25e-05,
"loss": 0.0934,
"step": 90
},
{
"epoch": 0.3472222222222222,
"grad_norm": 0.879301130771637,
"learning_rate": 6.944444444444444e-05,
"loss": 0.0746,
"step": 100
},
{
"epoch": 0.3819444444444444,
"grad_norm": 0.8346568942070007,
"learning_rate": 7.638888888888889e-05,
"loss": 0.0692,
"step": 110
},
{
"epoch": 0.4166666666666667,
"grad_norm": 0.5676056742668152,
"learning_rate": 8.333333333333334e-05,
"loss": 0.0664,
"step": 120
},
{
"epoch": 0.4513888888888889,
"grad_norm": 0.6186522245407104,
"learning_rate": 9.027777777777779e-05,
"loss": 0.0523,
"step": 130
},
{
"epoch": 0.4861111111111111,
"grad_norm": 0.618405282497406,
"learning_rate": 9.722222222222223e-05,
"loss": 0.0596,
"step": 140
},
{
"epoch": 0.5208333333333334,
"grad_norm": 0.9579340815544128,
"learning_rate": 9.999881338905204e-05,
"loss": 0.0586,
"step": 150
},
{
"epoch": 0.5555555555555556,
"grad_norm": 0.6852683424949646,
"learning_rate": 9.999156208166614e-05,
"loss": 0.054,
"step": 160
},
{
"epoch": 0.5902777777777778,
"grad_norm": 0.6845273375511169,
"learning_rate": 9.997771965008657e-05,
"loss": 0.0568,
"step": 170
},
{
"epoch": 0.625,
"grad_norm": 0.49688324332237244,
"learning_rate": 9.995728791936504e-05,
"loss": 0.0541,
"step": 180
},
{
"epoch": 0.6597222222222222,
"grad_norm": 0.6374590992927551,
"learning_rate": 9.993026958331766e-05,
"loss": 0.047,
"step": 190
},
{
"epoch": 0.6944444444444444,
"grad_norm": 0.746904730796814,
"learning_rate": 9.989666820416974e-05,
"loss": 0.0514,
"step": 200
},
{
"epoch": 0.7291666666666666,
"grad_norm": 0.5761491656303406,
"learning_rate": 9.985648821208616e-05,
"loss": 0.0357,
"step": 210
},
{
"epoch": 0.7638888888888888,
"grad_norm": 0.401060551404953,
"learning_rate": 9.980973490458728e-05,
"loss": 0.0321,
"step": 220
},
{
"epoch": 0.7986111111111112,
"grad_norm": 0.3264583349227905,
"learning_rate": 9.97564144458505e-05,
"loss": 0.0351,
"step": 230
},
{
"epoch": 0.8333333333333334,
"grad_norm": 0.33728697896003723,
"learning_rate": 9.969653386589748e-05,
"loss": 0.0327,
"step": 240
},
{
"epoch": 0.8680555555555556,
"grad_norm": 0.3204336166381836,
"learning_rate": 9.963010105966736e-05,
"loss": 0.0353,
"step": 250
},
{
"epoch": 0.9027777777777778,
"grad_norm": 0.6982614994049072,
"learning_rate": 9.955712478597579e-05,
"loss": 0.0271,
"step": 260
},
{
"epoch": 0.9375,
"grad_norm": 0.5134708285331726,
"learning_rate": 9.947761466636014e-05,
"loss": 0.0319,
"step": 270
},
{
"epoch": 0.9722222222222222,
"grad_norm": 0.41447389125823975,
"learning_rate": 9.939158118381098e-05,
"loss": 0.0275,
"step": 280
},
{
"epoch": 1.0069444444444444,
"grad_norm": 0.36693140864372253,
"learning_rate": 9.929903568138989e-05,
"loss": 0.0275,
"step": 290
},
{
"epoch": 1.0416666666666667,
"grad_norm": 0.3795158863067627,
"learning_rate": 9.9199990360734e-05,
"loss": 0.0344,
"step": 300
},
{
"epoch": 1.0763888888888888,
"grad_norm": 0.2937488853931427,
"learning_rate": 9.909445828044727e-05,
"loss": 0.034,
"step": 310
},
{
"epoch": 1.1111111111111112,
"grad_norm": 0.3288098871707916,
"learning_rate": 9.89824533543787e-05,
"loss": 0.0259,
"step": 320
},
{
"epoch": 1.1458333333333333,
"grad_norm": 0.27956312894821167,
"learning_rate": 9.886399034978797e-05,
"loss": 0.036,
"step": 330
},
{
"epoch": 1.1805555555555556,
"grad_norm": 0.3101339638233185,
"learning_rate": 9.873908488539836e-05,
"loss": 0.03,
"step": 340
},
{
"epoch": 1.2152777777777777,
"grad_norm": 0.3746699094772339,
"learning_rate": 9.86077534293376e-05,
"loss": 0.0292,
"step": 350
},
{
"epoch": 1.25,
"grad_norm": 0.3677271604537964,
"learning_rate": 9.847001329696653e-05,
"loss": 0.0264,
"step": 360
},
{
"epoch": 1.2847222222222223,
"grad_norm": 0.3505748212337494,
"learning_rate": 9.832588264859624e-05,
"loss": 0.0283,
"step": 370
},
{
"epoch": 1.3194444444444444,
"grad_norm": 0.2949109673500061,
"learning_rate": 9.817538048709367e-05,
"loss": 0.0252,
"step": 380
},
{
"epoch": 1.3541666666666667,
"grad_norm": 0.2589399516582489,
"learning_rate": 9.801852665537627e-05,
"loss": 0.0258,
"step": 390
},
{
"epoch": 1.3888888888888888,
"grad_norm": 0.36217331886291504,
"learning_rate": 9.785534183379572e-05,
"loss": 0.0252,
"step": 400
},
{
"epoch": 1.4236111111111112,
"grad_norm": 0.2563369870185852,
"learning_rate": 9.768584753741134e-05,
"loss": 0.0266,
"step": 410
},
{
"epoch": 1.4583333333333333,
"grad_norm": 0.43410396575927734,
"learning_rate": 9.751006611315356e-05,
"loss": 0.0239,
"step": 420
},
{
"epoch": 1.4930555555555556,
"grad_norm": 0.5142470002174377,
"learning_rate": 9.732802073687745e-05,
"loss": 0.025,
"step": 430
},
{
"epoch": 1.5277777777777777,
"grad_norm": 0.31519609689712524,
"learning_rate": 9.713973541030716e-05,
"loss": 0.027,
"step": 440
},
{
"epoch": 1.5625,
"grad_norm": 0.25440528988838196,
"learning_rate": 9.694523495787149e-05,
"loss": 0.0239,
"step": 450
},
{
"epoch": 1.5972222222222223,
"grad_norm": 0.31716373562812805,
"learning_rate": 9.674454502343076e-05,
"loss": 0.0297,
"step": 460
},
{
"epoch": 1.6319444444444444,
"grad_norm": 0.31023451685905457,
"learning_rate": 9.653769206689595e-05,
"loss": 0.024,
"step": 470
},
{
"epoch": 1.6666666666666665,
"grad_norm": 0.16677865386009216,
"learning_rate": 9.632470336074009e-05,
"loss": 0.0252,
"step": 480
},
{
"epoch": 1.7013888888888888,
"grad_norm": 0.309967577457428,
"learning_rate": 9.610560698640241e-05,
"loss": 0.0265,
"step": 490
},
{
"epoch": 1.7361111111111112,
"grad_norm": 0.2867221534252167,
"learning_rate": 9.588043183058606e-05,
"loss": 0.0191,
"step": 500
},
{
"epoch": 1.7708333333333335,
"grad_norm": 0.19987526535987854,
"learning_rate": 9.564920758144951e-05,
"loss": 0.0238,
"step": 510
},
{
"epoch": 1.8055555555555556,
"grad_norm": 0.4381541609764099,
"learning_rate": 9.541196472469233e-05,
"loss": 0.0262,
"step": 520
},
{
"epoch": 1.8402777777777777,
"grad_norm": 0.31020158529281616,
"learning_rate": 9.51687345395358e-05,
"loss": 0.0235,
"step": 530
},
{
"epoch": 1.875,
"grad_norm": 0.33004629611968994,
"learning_rate": 9.491954909459895e-05,
"loss": 0.0234,
"step": 540
},
{
"epoch": 1.9097222222222223,
"grad_norm": 0.39373746514320374,
"learning_rate": 9.466444124367041e-05,
"loss": 0.0237,
"step": 550
},
{
"epoch": 1.9444444444444444,
"grad_norm": 0.43140795826911926,
"learning_rate": 9.440344462137689e-05,
"loss": 0.0284,
"step": 560
},
{
"epoch": 1.9791666666666665,
"grad_norm": 0.37478235363960266,
"learning_rate": 9.413659363874853e-05,
"loss": 0.0225,
"step": 570
},
{
"epoch": 2.013888888888889,
"grad_norm": 0.2924532890319824,
"learning_rate": 9.386392347868215e-05,
"loss": 0.0189,
"step": 580
},
{
"epoch": 2.048611111111111,
"grad_norm": 0.20395496487617493,
"learning_rate": 9.358547009130237e-05,
"loss": 0.0254,
"step": 590
},
{
"epoch": 2.0833333333333335,
"grad_norm": 0.44635412096977234,
"learning_rate": 9.330127018922194e-05,
"loss": 0.0226,
"step": 600
},
{
"epoch": 2.1180555555555554,
"grad_norm": 0.23846730589866638,
"learning_rate": 9.301136124270126e-05,
"loss": 0.0228,
"step": 610
},
{
"epoch": 2.1527777777777777,
"grad_norm": 0.24774909019470215,
"learning_rate": 9.271578147470825e-05,
"loss": 0.0254,
"step": 620
},
{
"epoch": 2.1875,
"grad_norm": 0.3856460154056549,
"learning_rate": 9.241456985587868e-05,
"loss": 0.0197,
"step": 630
},
{
"epoch": 2.2222222222222223,
"grad_norm": 0.30161726474761963,
"learning_rate": 9.210776609937829e-05,
"loss": 0.0203,
"step": 640
},
{
"epoch": 2.2569444444444446,
"grad_norm": 0.38972583413124084,
"learning_rate": 9.179541065566664e-05,
"loss": 0.0252,
"step": 650
},
{
"epoch": 2.2916666666666665,
"grad_norm": 0.1908106803894043,
"learning_rate": 9.147754470716408e-05,
"loss": 0.0257,
"step": 660
},
{
"epoch": 2.326388888888889,
"grad_norm": 0.23305340111255646,
"learning_rate": 9.115421016282193e-05,
"loss": 0.0202,
"step": 670
},
{
"epoch": 2.361111111111111,
"grad_norm": 0.39377719163894653,
"learning_rate": 9.082544965259715e-05,
"loss": 0.028,
"step": 680
},
{
"epoch": 2.3958333333333335,
"grad_norm": 0.29361796379089355,
"learning_rate": 9.049130652183166e-05,
"loss": 0.0166,
"step": 690
},
{
"epoch": 2.4305555555555554,
"grad_norm": 0.3843666613101959,
"learning_rate": 9.01518248255376e-05,
"loss": 0.0252,
"step": 700
},
{
"epoch": 2.4652777777777777,
"grad_norm": 0.31173354387283325,
"learning_rate": 8.980704932258878e-05,
"loss": 0.0217,
"step": 710
},
{
"epoch": 2.5,
"grad_norm": 0.31975364685058594,
"learning_rate": 8.945702546981969e-05,
"loss": 0.0189,
"step": 720
},
{
"epoch": 2.5347222222222223,
"grad_norm": 0.20601293444633484,
"learning_rate": 8.910179941603193e-05,
"loss": 0.0218,
"step": 730
},
{
"epoch": 2.5694444444444446,
"grad_norm": 0.30430278182029724,
"learning_rate": 8.874141799591007e-05,
"loss": 0.0257,
"step": 740
},
{
"epoch": 2.6041666666666665,
"grad_norm": 0.2161753624677658,
"learning_rate": 8.837592872384644e-05,
"loss": 0.024,
"step": 750
},
{
"epoch": 2.638888888888889,
"grad_norm": 0.5043374300003052,
"learning_rate": 8.800537978767682e-05,
"loss": 0.0218,
"step": 760
},
{
"epoch": 2.673611111111111,
"grad_norm": 0.3008856773376465,
"learning_rate": 8.762982004232709e-05,
"loss": 0.0198,
"step": 770
},
{
"epoch": 2.7083333333333335,
"grad_norm": 0.32244426012039185,
"learning_rate": 8.724929900337186e-05,
"loss": 0.0269,
"step": 780
},
{
"epoch": 2.7430555555555554,
"grad_norm": 0.35441824793815613,
"learning_rate": 8.68638668405062e-05,
"loss": 0.0214,
"step": 790
},
{
"epoch": 2.7777777777777777,
"grad_norm": 0.2707558572292328,
"learning_rate": 8.647357437093105e-05,
"loss": 0.0215,
"step": 800
},
{
"epoch": 2.8125,
"grad_norm": 0.29439160227775574,
"learning_rate": 8.60784730526531e-05,
"loss": 0.0175,
"step": 810
},
{
"epoch": 2.8472222222222223,
"grad_norm": 0.2966266870498657,
"learning_rate": 8.567861497770053e-05,
"loss": 0.0204,
"step": 820
},
{
"epoch": 2.8819444444444446,
"grad_norm": 0.27358055114746094,
"learning_rate": 8.527405286525475e-05,
"loss": 0.0216,
"step": 830
},
{
"epoch": 2.9166666666666665,
"grad_norm": 0.25975996255874634,
"learning_rate": 8.486484005469977e-05,
"loss": 0.0196,
"step": 840
},
{
"epoch": 2.951388888888889,
"grad_norm": 0.3131217062473297,
"learning_rate": 8.445103049858966e-05,
"loss": 0.0165,
"step": 850
},
{
"epoch": 2.986111111111111,
"grad_norm": 0.3194817900657654,
"learning_rate": 8.403267875553519e-05,
"loss": 0.0181,
"step": 860
},
{
"epoch": 3.0208333333333335,
"grad_norm": 0.24048690497875214,
"learning_rate": 8.360983998301053e-05,
"loss": 0.019,
"step": 870
},
{
"epoch": 3.0555555555555554,
"grad_norm": 0.2128559798002243,
"learning_rate": 8.318256993008107e-05,
"loss": 0.0181,
"step": 880
},
{
"epoch": 3.0902777777777777,
"grad_norm": 0.22403229773044586,
"learning_rate": 8.275092493005321e-05,
"loss": 0.0185,
"step": 890
},
{
"epoch": 3.125,
"grad_norm": 0.24800759553909302,
"learning_rate": 8.231496189304704e-05,
"loss": 0.0211,
"step": 900
},
{
"epoch": 3.1597222222222223,
"grad_norm": 0.187672957777977,
"learning_rate": 8.187473829849314e-05,
"loss": 0.0173,
"step": 910
},
{
"epoch": 3.1944444444444446,
"grad_norm": 0.2035113275051117,
"learning_rate": 8.14303121875541e-05,
"loss": 0.0187,
"step": 920
},
{
"epoch": 3.2291666666666665,
"grad_norm": 0.32637548446655273,
"learning_rate": 8.098174215547224e-05,
"loss": 0.018,
"step": 930
},
{
"epoch": 3.263888888888889,
"grad_norm": 0.2666160762310028,
"learning_rate": 8.052908734384399e-05,
"loss": 0.0165,
"step": 940
},
{
"epoch": 3.298611111111111,
"grad_norm": 0.29945555329322815,
"learning_rate": 8.007240743282247e-05,
"loss": 0.0177,
"step": 950
},
{
"epoch": 3.3333333333333335,
"grad_norm": 0.4292016923427582,
"learning_rate": 7.961176263324901e-05,
"loss": 0.018,
"step": 960
},
{
"epoch": 3.3680555555555554,
"grad_norm": 0.387088418006897,
"learning_rate": 7.914721367871454e-05,
"loss": 0.0208,
"step": 970
},
{
"epoch": 3.4027777777777777,
"grad_norm": 0.4121212065219879,
"learning_rate": 7.86788218175523e-05,
"loss": 0.0199,
"step": 980
},
{
"epoch": 3.4375,
"grad_norm": 0.3326111137866974,
"learning_rate": 7.820664880476256e-05,
"loss": 0.0176,
"step": 990
},
{
"epoch": 3.4722222222222223,
"grad_norm": 0.3561447262763977,
"learning_rate": 7.773075689387043e-05,
"loss": 0.0164,
"step": 1000
},
{
"epoch": 3.5069444444444446,
"grad_norm": 0.3860738277435303,
"learning_rate": 7.725120882871828e-05,
"loss": 0.021,
"step": 1010
},
{
"epoch": 3.5416666666666665,
"grad_norm": 0.2797831594944,
"learning_rate": 7.676806783519304e-05,
"loss": 0.0185,
"step": 1020
},
{
"epoch": 3.576388888888889,
"grad_norm": 0.31011706590652466,
"learning_rate": 7.628139761289038e-05,
"loss": 0.0168,
"step": 1030
},
{
"epoch": 3.611111111111111,
"grad_norm": 0.2268083244562149,
"learning_rate": 7.579126232671621e-05,
"loss": 0.0163,
"step": 1040
},
{
"epoch": 3.6458333333333335,
"grad_norm": 0.25035360455513,
"learning_rate": 7.529772659842685e-05,
"loss": 0.0181,
"step": 1050
},
{
"epoch": 3.6805555555555554,
"grad_norm": 0.401965856552124,
"learning_rate": 7.480085549810897e-05,
"loss": 0.0164,
"step": 1060
},
{
"epoch": 3.7152777777777777,
"grad_norm": 0.23698702454566956,
"learning_rate": 7.430071453560059e-05,
"loss": 0.0163,
"step": 1070
},
{
"epoch": 3.75,
"grad_norm": 0.22072561085224152,
"learning_rate": 7.379736965185368e-05,
"loss": 0.0193,
"step": 1080
},
{
"epoch": 3.7847222222222223,
"grad_norm": 0.2923282980918884,
"learning_rate": 7.32908872102405e-05,
"loss": 0.0192,
"step": 1090
},
{
"epoch": 3.8194444444444446,
"grad_norm": 0.3529258668422699,
"learning_rate": 7.278133398780366e-05,
"loss": 0.0189,
"step": 1100
},
{
"epoch": 3.8541666666666665,
"grad_norm": 0.2745247483253479,
"learning_rate": 7.226877716645217e-05,
"loss": 0.0198,
"step": 1110
},
{
"epoch": 3.888888888888889,
"grad_norm": 0.3626753091812134,
"learning_rate": 7.175328432410366e-05,
"loss": 0.0157,
"step": 1120
},
{
"epoch": 3.923611111111111,
"grad_norm": 0.24702580273151398,
"learning_rate": 7.123492342577472e-05,
"loss": 0.015,
"step": 1130
},
{
"epoch": 3.9583333333333335,
"grad_norm": 0.2592775523662567,
"learning_rate": 7.071376281461994e-05,
"loss": 0.0224,
"step": 1140
},
{
"epoch": 3.9930555555555554,
"grad_norm": 0.21476583182811737,
"learning_rate": 7.018987120292141e-05,
"loss": 0.0199,
"step": 1150
},
{
"epoch": 4.027777777777778,
"grad_norm": 0.47759580612182617,
"learning_rate": 6.966331766302916e-05,
"loss": 0.02,
"step": 1160
},
{
"epoch": 4.0625,
"grad_norm": 0.5051742196083069,
"learning_rate": 6.91341716182545e-05,
"loss": 0.0205,
"step": 1170
},
{
"epoch": 4.097222222222222,
"grad_norm": 0.2376144826412201,
"learning_rate": 6.860250283371681e-05,
"loss": 0.0159,
"step": 1180
},
{
"epoch": 4.131944444444445,
"grad_norm": 0.2458970993757248,
"learning_rate": 6.806838140714545e-05,
"loss": 0.0161,
"step": 1190
},
{
"epoch": 4.166666666666667,
"grad_norm": 0.1450349986553192,
"learning_rate": 6.753187775963773e-05,
"loss": 0.0138,
"step": 1200
},
{
"epoch": 4.201388888888889,
"grad_norm": 0.16866087913513184,
"learning_rate": 6.69930626263742e-05,
"loss": 0.0171,
"step": 1210
},
{
"epoch": 4.236111111111111,
"grad_norm": 0.24081069231033325,
"learning_rate": 6.645200704729266e-05,
"loss": 0.0181,
"step": 1220
},
{
"epoch": 4.270833333333333,
"grad_norm": 0.2583770155906677,
"learning_rate": 6.590878235772191e-05,
"loss": 0.0145,
"step": 1230
},
{
"epoch": 4.305555555555555,
"grad_norm": 0.3081924319267273,
"learning_rate": 6.536346017897653e-05,
"loss": 0.016,
"step": 1240
},
{
"epoch": 4.340277777777778,
"grad_norm": 0.26682665944099426,
"learning_rate": 6.481611240891396e-05,
"loss": 0.0185,
"step": 1250
},
{
"epoch": 4.375,
"grad_norm": 0.346853107213974,
"learning_rate": 6.426681121245527e-05,
"loss": 0.0161,
"step": 1260
},
{
"epoch": 4.409722222222222,
"grad_norm": 0.20850667357444763,
"learning_rate": 6.371562901207045e-05,
"loss": 0.0126,
"step": 1270
},
{
"epoch": 4.444444444444445,
"grad_norm": 0.19474495947360992,
"learning_rate": 6.316263847822997e-05,
"loss": 0.0158,
"step": 1280
},
{
"epoch": 4.479166666666667,
"grad_norm": 0.32026270031929016,
"learning_rate": 6.260791251982354e-05,
"loss": 0.0174,
"step": 1290
},
{
"epoch": 4.513888888888889,
"grad_norm": 0.2859480082988739,
"learning_rate": 6.205152427454747e-05,
"loss": 0.0188,
"step": 1300
},
{
"epoch": 4.548611111111111,
"grad_norm": 0.19467797875404358,
"learning_rate": 6.149354709926185e-05,
"loss": 0.0141,
"step": 1310
},
{
"epoch": 4.583333333333333,
"grad_norm": 0.2192724645137787,
"learning_rate": 6.09340545603188e-05,
"loss": 0.0149,
"step": 1320
},
{
"epoch": 4.618055555555555,
"grad_norm": 0.2860821783542633,
"learning_rate": 6.037312042386314e-05,
"loss": 0.0222,
"step": 1330
},
{
"epoch": 4.652777777777778,
"grad_norm": 0.16547256708145142,
"learning_rate": 5.98108186461068e-05,
"loss": 0.0156,
"step": 1340
},
{
"epoch": 4.6875,
"grad_norm": 0.21727648377418518,
"learning_rate": 5.924722336357793e-05,
"loss": 0.0146,
"step": 1350
},
{
"epoch": 4.722222222222222,
"grad_norm": 0.2452634871006012,
"learning_rate": 5.868240888334653e-05,
"loss": 0.0164,
"step": 1360
},
{
"epoch": 4.756944444444445,
"grad_norm": 0.22284066677093506,
"learning_rate": 5.811644967322737e-05,
"loss": 0.0156,
"step": 1370
},
{
"epoch": 4.791666666666667,
"grad_norm": 0.2330964207649231,
"learning_rate": 5.7549420351961844e-05,
"loss": 0.0138,
"step": 1380
},
{
"epoch": 4.826388888888889,
"grad_norm": 0.20147477090358734,
"learning_rate": 5.698139567937988e-05,
"loss": 0.0147,
"step": 1390
},
{
"epoch": 4.861111111111111,
"grad_norm": 0.36548295617103577,
"learning_rate": 5.641245054654316e-05,
"loss": 0.0154,
"step": 1400
},
{
"epoch": 4.895833333333333,
"grad_norm": 0.20836779475212097,
"learning_rate": 5.584265996587129e-05,
"loss": 0.012,
"step": 1410
},
{
"epoch": 4.930555555555555,
"grad_norm": 0.21215607225894928,
"learning_rate": 5.527209906125169e-05,
"loss": 0.0119,
"step": 1420
},
{
"epoch": 4.965277777777778,
"grad_norm": 0.2577287256717682,
"learning_rate": 5.470084305813492e-05,
"loss": 0.0171,
"step": 1430
},
{
"epoch": 5.0,
"grad_norm": 0.189128577709198,
"learning_rate": 5.4128967273616625e-05,
"loss": 0.0129,
"step": 1440
},
{
"epoch": 5.034722222222222,
"grad_norm": 0.26594117283821106,
"learning_rate": 5.355654710650737e-05,
"loss": 0.0122,
"step": 1450
},
{
"epoch": 5.069444444444445,
"grad_norm": 0.1738700121641159,
"learning_rate": 5.29836580273917e-05,
"loss": 0.0125,
"step": 1460
},
{
"epoch": 5.104166666666667,
"grad_norm": 0.17082342505455017,
"learning_rate": 5.241037556867775e-05,
"loss": 0.0149,
"step": 1470
},
{
"epoch": 5.138888888888889,
"grad_norm": 0.3401947319507599,
"learning_rate": 5.183677531463863e-05,
"loss": 0.0137,
"step": 1480
},
{
"epoch": 5.173611111111111,
"grad_norm": 0.2809308171272278,
"learning_rate": 5.126293289144715e-05,
"loss": 0.0147,
"step": 1490
},
{
"epoch": 5.208333333333333,
"grad_norm": 0.2079499512910843,
"learning_rate": 5.068892395720483e-05,
"loss": 0.012,
"step": 1500
},
{
"epoch": 5.243055555555555,
"grad_norm": 0.3077143430709839,
"learning_rate": 5.011482419196677e-05,
"loss": 0.0183,
"step": 1510
},
{
"epoch": 5.277777777777778,
"grad_norm": 0.2815386950969696,
"learning_rate": 4.9540709287763685e-05,
"loss": 0.0186,
"step": 1520
},
{
"epoch": 5.3125,
"grad_norm": 0.2164885401725769,
"learning_rate": 4.8966654938622295e-05,
"loss": 0.0182,
"step": 1530
},
{
"epoch": 5.347222222222222,
"grad_norm": 0.22151343524456024,
"learning_rate": 4.839273683058542e-05,
"loss": 0.0166,
"step": 1540
},
{
"epoch": 5.381944444444445,
"grad_norm": 0.23041661083698273,
"learning_rate": 4.781903063173321e-05,
"loss": 0.0145,
"step": 1550
},
{
"epoch": 5.416666666666667,
"grad_norm": 0.2047954499721527,
"learning_rate": 4.7245611982206724e-05,
"loss": 0.0123,
"step": 1560
},
{
"epoch": 5.451388888888889,
"grad_norm": 0.21224728226661682,
"learning_rate": 4.6672556484235174e-05,
"loss": 0.0105,
"step": 1570
},
{
"epoch": 5.486111111111111,
"grad_norm": 0.13932043313980103,
"learning_rate": 4.6099939692168107e-05,
"loss": 0.0134,
"step": 1580
},
{
"epoch": 5.520833333333333,
"grad_norm": 0.2085762619972229,
"learning_rate": 4.5527837102514035e-05,
"loss": 0.0118,
"step": 1590
},
{
"epoch": 5.555555555555555,
"grad_norm": 0.1945631057024002,
"learning_rate": 4.4956324143986596e-05,
"loss": 0.0162,
"step": 1600
},
{
"epoch": 5.590277777777778,
"grad_norm": 0.2890234887599945,
"learning_rate": 4.438547616755962e-05,
"loss": 0.0143,
"step": 1610
},
{
"epoch": 5.625,
"grad_norm": 0.212904155254364,
"learning_rate": 4.381536843653262e-05,
"loss": 0.0142,
"step": 1620
},
{
"epoch": 5.659722222222222,
"grad_norm": 0.18237364292144775,
"learning_rate": 4.3246076116607604e-05,
"loss": 0.0092,
"step": 1630
},
{
"epoch": 5.694444444444445,
"grad_norm": 0.22255735099315643,
"learning_rate": 4.267767426597893e-05,
"loss": 0.0129,
"step": 1640
},
{
"epoch": 5.729166666666667,
"grad_norm": 0.18430383503437042,
"learning_rate": 4.211023782543727e-05,
"loss": 0.0115,
"step": 1650
},
{
"epoch": 5.763888888888889,
"grad_norm": 0.3231765329837799,
"learning_rate": 4.1543841608489075e-05,
"loss": 0.0142,
"step": 1660
},
{
"epoch": 5.798611111111111,
"grad_norm": 0.22352637350559235,
"learning_rate": 4.097856029149276e-05,
"loss": 0.0111,
"step": 1670
},
{
"epoch": 5.833333333333333,
"grad_norm": 0.17798005044460297,
"learning_rate": 4.0414468403813095e-05,
"loss": 0.0144,
"step": 1680
},
{
"epoch": 5.868055555555555,
"grad_norm": 0.26681026816368103,
"learning_rate": 3.985164031799481e-05,
"loss": 0.012,
"step": 1690
},
{
"epoch": 5.902777777777778,
"grad_norm": 0.2966468632221222,
"learning_rate": 3.9290150239957057e-05,
"loss": 0.0102,
"step": 1700
},
{
"epoch": 5.9375,
"grad_norm": 0.2999604344367981,
"learning_rate": 3.87300721992097e-05,
"loss": 0.011,
"step": 1710
},
{
"epoch": 5.972222222222222,
"grad_norm": 0.29702338576316833,
"learning_rate": 3.817148003909288e-05,
"loss": 0.0116,
"step": 1720
},
{
"epoch": 6.006944444444445,
"grad_norm": 0.2850812077522278,
"learning_rate": 3.761444740704129e-05,
"loss": 0.01,
"step": 1730
},
{
"epoch": 6.041666666666667,
"grad_norm": 0.26815593242645264,
"learning_rate": 3.705904774487396e-05,
"loss": 0.0142,
"step": 1740
},
{
"epoch": 6.076388888888889,
"grad_norm": 0.31307709217071533,
"learning_rate": 3.6505354279111524e-05,
"loss": 0.0144,
"step": 1750
},
{
"epoch": 6.111111111111111,
"grad_norm": 0.18757231533527374,
"learning_rate": 3.595344001132154e-05,
"loss": 0.0146,
"step": 1760
},
{
"epoch": 6.145833333333333,
"grad_norm": 0.21246947348117828,
"learning_rate": 3.540337770849371e-05,
"loss": 0.0134,
"step": 1770
},
{
"epoch": 6.180555555555555,
"grad_norm": 0.16654673218727112,
"learning_rate": 3.4855239893445946e-05,
"loss": 0.015,
"step": 1780
},
{
"epoch": 6.215277777777778,
"grad_norm": 0.1691170185804367,
"learning_rate": 3.430909883526251e-05,
"loss": 0.012,
"step": 1790
},
{
"epoch": 6.25,
"grad_norm": 0.14390875399112701,
"learning_rate": 3.3765026539765834e-05,
"loss": 0.0108,
"step": 1800
},
{
"epoch": 6.284722222222222,
"grad_norm": 0.19382424652576447,
"learning_rate": 3.322309474002288e-05,
"loss": 0.0111,
"step": 1810
},
{
"epoch": 6.319444444444445,
"grad_norm": 0.1386929452419281,
"learning_rate": 3.268337488688748e-05,
"loss": 0.0101,
"step": 1820
},
{
"epoch": 6.354166666666667,
"grad_norm": 0.1915092170238495,
"learning_rate": 3.214593813958001e-05,
"loss": 0.0135,
"step": 1830
},
{
"epoch": 6.388888888888889,
"grad_norm": 0.155628964304924,
"learning_rate": 3.1610855356305354e-05,
"loss": 0.011,
"step": 1840
},
{
"epoch": 6.423611111111111,
"grad_norm": 0.194572314620018,
"learning_rate": 3.107819708491059e-05,
"loss": 0.0119,
"step": 1850
},
{
"epoch": 6.458333333333333,
"grad_norm": 0.1013031154870987,
"learning_rate": 3.0548033553583705e-05,
"loss": 0.0122,
"step": 1860
},
{
"epoch": 6.493055555555555,
"grad_norm": 0.2090463936328888,
"learning_rate": 3.0020434661594343e-05,
"loss": 0.0138,
"step": 1870
},
{
"epoch": 6.527777777777778,
"grad_norm": 0.1606292426586151,
"learning_rate": 2.9495469970078e-05,
"loss": 0.0093,
"step": 1880
},
{
"epoch": 6.5625,
"grad_norm": 0.1697450429201126,
"learning_rate": 2.8973208692864624e-05,
"loss": 0.0132,
"step": 1890
},
{
"epoch": 6.597222222222222,
"grad_norm": 0.2177683413028717,
"learning_rate": 2.8453719687353285e-05,
"loss": 0.013,
"step": 1900
},
{
"epoch": 6.631944444444445,
"grad_norm": 0.26842954754829407,
"learning_rate": 2.7937071445433615e-05,
"loss": 0.0101,
"step": 1910
},
{
"epoch": 6.666666666666667,
"grad_norm": 0.34368401765823364,
"learning_rate": 2.7423332084455544e-05,
"loss": 0.0118,
"step": 1920
},
{
"epoch": 6.701388888888889,
"grad_norm": 0.19154952466487885,
"learning_rate": 2.6912569338248315e-05,
"loss": 0.0118,
"step": 1930
},
{
"epoch": 6.736111111111111,
"grad_norm": 0.2398257553577423,
"learning_rate": 2.6404850548190235e-05,
"loss": 0.0101,
"step": 1940
},
{
"epoch": 6.770833333333333,
"grad_norm": 0.2987666428089142,
"learning_rate": 2.590024265433002e-05,
"loss": 0.0105,
"step": 1950
},
{
"epoch": 6.805555555555555,
"grad_norm": 0.284682035446167,
"learning_rate": 2.5398812186561095e-05,
"loss": 0.0117,
"step": 1960
},
{
"epoch": 6.840277777777778,
"grad_norm": 0.24240490794181824,
"learning_rate": 2.4900625255849986e-05,
"loss": 0.0114,
"step": 1970
},
{
"epoch": 6.875,
"grad_norm": 0.2425372302532196,
"learning_rate": 2.4405747545519963e-05,
"loss": 0.0122,
"step": 1980
},
{
"epoch": 6.909722222222222,
"grad_norm": 0.15807554125785828,
"learning_rate": 2.3914244302590966e-05,
"loss": 0.0102,
"step": 1990
},
{
"epoch": 6.944444444444445,
"grad_norm": 0.2001255601644516,
"learning_rate": 2.3426180329177215e-05,
"loss": 0.0115,
"step": 2000
},
{
"epoch": 6.979166666666667,
"grad_norm": 0.3653542995452881,
"learning_rate": 2.294161997394336e-05,
"loss": 0.0115,
"step": 2010
},
{
"epoch": 7.013888888888889,
"grad_norm": 0.2090686410665512,
"learning_rate": 2.246062712362043e-05,
"loss": 0.0111,
"step": 2020
},
{
"epoch": 7.048611111111111,
"grad_norm": 0.263019323348999,
"learning_rate": 2.1983265194582724e-05,
"loss": 0.012,
"step": 2030
},
{
"epoch": 7.083333333333333,
"grad_norm": 0.2306613177061081,
"learning_rate": 2.150959712448669e-05,
"loss": 0.0128,
"step": 2040
},
{
"epoch": 7.118055555555555,
"grad_norm": 0.10623705387115479,
"learning_rate": 2.1039685363972934e-05,
"loss": 0.0122,
"step": 2050
},
{
"epoch": 7.152777777777778,
"grad_norm": 0.22297905385494232,
"learning_rate": 2.0573591868432406e-05,
"loss": 0.0112,
"step": 2060
},
{
"epoch": 7.1875,
"grad_norm": 0.14223700761795044,
"learning_rate": 2.0111378089837956e-05,
"loss": 0.012,
"step": 2070
},
{
"epoch": 7.222222222222222,
"grad_norm": 0.1814931482076645,
"learning_rate": 1.9653104968642173e-05,
"loss": 0.0083,
"step": 2080
},
{
"epoch": 7.256944444444445,
"grad_norm": 0.18442519009113312,
"learning_rate": 1.919883292574269e-05,
"loss": 0.0138,
"step": 2090
},
{
"epoch": 7.291666666666667,
"grad_norm": 0.2410842329263687,
"learning_rate": 1.874862185451608e-05,
"loss": 0.0126,
"step": 2100
},
{
"epoch": 7.326388888888889,
"grad_norm": 0.16032792627811432,
"learning_rate": 1.8302531112921172e-05,
"loss": 0.0097,
"step": 2110
},
{
"epoch": 7.361111111111111,
"grad_norm": 0.18864929676055908,
"learning_rate": 1.7860619515673033e-05,
"loss": 0.0102,
"step": 2120
},
{
"epoch": 7.395833333333333,
"grad_norm": 0.12309829145669937,
"learning_rate": 1.7422945326488553e-05,
"loss": 0.0126,
"step": 2130
},
{
"epoch": 7.430555555555555,
"grad_norm": 0.13348473608493805,
"learning_rate": 1.6989566250404702e-05,
"loss": 0.0085,
"step": 2140
},
{
"epoch": 7.465277777777778,
"grad_norm": 0.08756257593631744,
"learning_rate": 1.656053942617036e-05,
"loss": 0.0104,
"step": 2150
},
{
"epoch": 7.5,
"grad_norm": 0.1287485808134079,
"learning_rate": 1.6135921418712956e-05,
"loss": 0.0092,
"step": 2160
},
{
"epoch": 7.534722222222222,
"grad_norm": 0.22407540678977966,
"learning_rate": 1.5715768211680647e-05,
"loss": 0.0082,
"step": 2170
},
{
"epoch": 7.569444444444445,
"grad_norm": 0.1887049823999405,
"learning_rate": 1.5300135200061168e-05,
"loss": 0.0098,
"step": 2180
},
{
"epoch": 7.604166666666667,
"grad_norm": 0.13443368673324585,
"learning_rate": 1.4889077182878269e-05,
"loss": 0.0101,
"step": 2190
},
{
"epoch": 7.638888888888889,
"grad_norm": 0.2131468653678894,
"learning_rate": 1.44826483559668e-05,
"loss": 0.0116,
"step": 2200
},
{
"epoch": 7.673611111111111,
"grad_norm": 0.16836215555667877,
"learning_rate": 1.4080902304827242e-05,
"loss": 0.0094,
"step": 2210
},
{
"epoch": 7.708333333333333,
"grad_norm": 0.30081215500831604,
"learning_rate": 1.368389199756075e-05,
"loss": 0.0133,
"step": 2220
},
{
"epoch": 7.743055555555555,
"grad_norm": 0.20233741402626038,
"learning_rate": 1.3291669777885596e-05,
"loss": 0.0094,
"step": 2230
},
{
"epoch": 7.777777777777778,
"grad_norm": 0.1448446661233902,
"learning_rate": 1.2904287358235928e-05,
"loss": 0.0105,
"step": 2240
},
{
"epoch": 7.8125,
"grad_norm": 0.16389504075050354,
"learning_rate": 1.2521795812943704e-05,
"loss": 0.0082,
"step": 2250
},
{
"epoch": 7.847222222222222,
"grad_norm": 0.18017397820949554,
"learning_rate": 1.2144245571504898e-05,
"loss": 0.0092,
"step": 2260
},
{
"epoch": 7.881944444444445,
"grad_norm": 0.20985354483127594,
"learning_rate": 1.1771686411930577e-05,
"loss": 0.0093,
"step": 2270
},
{
"epoch": 7.916666666666667,
"grad_norm": 0.2412370890378952,
"learning_rate": 1.1404167454183957e-05,
"loss": 0.0082,
"step": 2280
},
{
"epoch": 7.951388888888889,
"grad_norm": 0.1690802127122879,
"learning_rate": 1.1041737153704185e-05,
"loss": 0.0112,
"step": 2290
},
{
"epoch": 7.986111111111111,
"grad_norm": 0.22050976753234863,
"learning_rate": 1.0684443295017721e-05,
"loss": 0.0078,
"step": 2300
},
{
"epoch": 8.020833333333334,
"grad_norm": 0.134538471698761,
"learning_rate": 1.0332332985438248e-05,
"loss": 0.011,
"step": 2310
},
{
"epoch": 8.055555555555555,
"grad_norm": 0.16212856769561768,
"learning_rate": 9.985452648855803e-06,
"loss": 0.0097,
"step": 2320
},
{
"epoch": 8.090277777777779,
"grad_norm": 0.1637708693742752,
"learning_rate": 9.643848019616003e-06,
"loss": 0.0094,
"step": 2330
},
{
"epoch": 8.125,
"grad_norm": 0.20079755783081055,
"learning_rate": 9.307564136490254e-06,
"loss": 0.0108,
"step": 2340
},
{
"epoch": 8.159722222222221,
"grad_norm": 0.2914990782737732,
"learning_rate": 8.976645336737599e-06,
"loss": 0.0072,
"step": 2350
},
{
"epoch": 8.194444444444445,
"grad_norm": 0.14318501949310303,
"learning_rate": 8.651135250259091e-06,
"loss": 0.0081,
"step": 2360
},
{
"epoch": 8.229166666666666,
"grad_norm": 0.22007636725902557,
"learning_rate": 8.33107679384542e-06,
"loss": 0.0076,
"step": 2370
},
{
"epoch": 8.26388888888889,
"grad_norm": 0.215325728058815,
"learning_rate": 8.016512165518569e-06,
"loss": 0.0074,
"step": 2380
},
{
"epoch": 8.29861111111111,
"grad_norm": 0.2510315775871277,
"learning_rate": 7.707482838968222e-06,
"loss": 0.0109,
"step": 2390
},
{
"epoch": 8.333333333333334,
"grad_norm": 0.37152716517448425,
"learning_rate": 7.404029558083653e-06,
"loss": 0.0105,
"step": 2400
},
{
"epoch": 8.368055555555555,
"grad_norm": 0.2045627236366272,
"learning_rate": 7.106192331581896e-06,
"loss": 0.0084,
"step": 2410
},
{
"epoch": 8.402777777777779,
"grad_norm": 0.26112157106399536,
"learning_rate": 6.814010427732786e-06,
"loss": 0.0083,
"step": 2420
},
{
"epoch": 8.4375,
"grad_norm": 0.18590597808361053,
"learning_rate": 6.527522369181655e-06,
"loss": 0.0119,
"step": 2430
},
{
"epoch": 8.472222222222221,
"grad_norm": 0.19280190765857697,
"learning_rate": 6.246765927870313e-06,
"loss": 0.0088,
"step": 2440
},
{
"epoch": 8.506944444444445,
"grad_norm": 0.11337297409772873,
"learning_rate": 5.971778120057031e-06,
"loss": 0.0102,
"step": 2450
},
{
"epoch": 8.541666666666666,
"grad_norm": 0.20286336541175842,
"learning_rate": 5.702595201436101e-06,
"loss": 0.0116,
"step": 2460
},
{
"epoch": 8.57638888888889,
"grad_norm": 0.18840523064136505,
"learning_rate": 5.439252662357769e-06,
"loss": 0.0102,
"step": 2470
},
{
"epoch": 8.61111111111111,
"grad_norm": 0.14088304340839386,
"learning_rate": 5.181785223148999e-06,
"loss": 0.0097,
"step": 2480
},
{
"epoch": 8.645833333333334,
"grad_norm": 0.14217671751976013,
"learning_rate": 4.930226829535767e-06,
"loss": 0.0075,
"step": 2490
},
{
"epoch": 8.680555555555555,
"grad_norm": 0.21861067414283752,
"learning_rate": 4.684610648167503e-06,
"loss": 0.014,
"step": 2500
},
{
"epoch": 8.715277777777779,
"grad_norm": 0.1945720613002777,
"learning_rate": 4.4449690622442585e-06,
"loss": 0.0099,
"step": 2510
},
{
"epoch": 8.75,
"grad_norm": 0.2723676264286041,
"learning_rate": 4.2113336672471245e-06,
"loss": 0.0129,
"step": 2520
},
{
"epoch": 8.784722222222221,
"grad_norm": 0.2239852398633957,
"learning_rate": 3.983735266772565e-06,
"loss": 0.0094,
"step": 2530
},
{
"epoch": 8.819444444444445,
"grad_norm": 0.25737810134887695,
"learning_rate": 3.762203868471087e-06,
"loss": 0.0102,
"step": 2540
},
{
"epoch": 8.854166666666666,
"grad_norm": 0.18129268288612366,
"learning_rate": 3.546768680090934e-06,
"loss": 0.0119,
"step": 2550
},
{
"epoch": 8.88888888888889,
"grad_norm": 0.3871157169342041,
"learning_rate": 3.3374581056271447e-06,
"loss": 0.0088,
"step": 2560
},
{
"epoch": 8.92361111111111,
"grad_norm": 0.16779103875160217,
"learning_rate": 3.1342997415767015e-06,
"loss": 0.009,
"step": 2570
},
{
"epoch": 8.958333333333334,
"grad_norm": 0.11715767532587051,
"learning_rate": 2.9373203733000232e-06,
"loss": 0.0099,
"step": 2580
},
{
"epoch": 8.993055555555555,
"grad_norm": 0.21253140270709991,
"learning_rate": 2.746545971489478e-06,
"loss": 0.0099,
"step": 2590
},
{
"epoch": 9.027777777777779,
"grad_norm": 0.26496362686157227,
"learning_rate": 2.562001688745291e-06,
"loss": 0.0088,
"step": 2600
},
{
"epoch": 9.0625,
"grad_norm": 0.1795375943183899,
"learning_rate": 2.3837118562592797e-06,
"loss": 0.0095,
"step": 2610
},
{
"epoch": 9.097222222222221,
"grad_norm": 0.20966722071170807,
"learning_rate": 2.211699980606946e-06,
"loss": 0.008,
"step": 2620
},
{
"epoch": 9.131944444444445,
"grad_norm": 0.19171403348445892,
"learning_rate": 2.0459887406482413e-06,
"loss": 0.0103,
"step": 2630
},
{
"epoch": 9.166666666666666,
"grad_norm": 0.22042040526866913,
"learning_rate": 1.8865999845374793e-06,
"loss": 0.0087,
"step": 2640
},
{
"epoch": 9.20138888888889,
"grad_norm": 0.21836155652999878,
"learning_rate": 1.7335547268427843e-06,
"loss": 0.0091,
"step": 2650
},
{
"epoch": 9.23611111111111,
"grad_norm": 0.1593165397644043,
"learning_rate": 1.5868731457754139e-06,
"loss": 0.0108,
"step": 2660
},
{
"epoch": 9.270833333333334,
"grad_norm": 0.08942472189664841,
"learning_rate": 1.4465745805293585e-06,
"loss": 0.0079,
"step": 2670
},
{
"epoch": 9.305555555555555,
"grad_norm": 0.13019748032093048,
"learning_rate": 1.3126775287316151e-06,
"loss": 0.0072,
"step": 2680
},
{
"epoch": 9.340277777777779,
"grad_norm": 0.17031489312648773,
"learning_rate": 1.1851996440033319e-06,
"loss": 0.0076,
"step": 2690
},
{
"epoch": 9.375,
"grad_norm": 0.2234310358762741,
"learning_rate": 1.064157733632276e-06,
"loss": 0.0108,
"step": 2700
},
{
"epoch": 9.409722222222221,
"grad_norm": 0.1366192102432251,
"learning_rate": 9.495677563569005e-07,
"loss": 0.0086,
"step": 2710
},
{
"epoch": 9.444444444444445,
"grad_norm": 0.1559923142194748,
"learning_rate": 8.414448202622494e-07,
"loss": 0.0078,
"step": 2720
},
{
"epoch": 9.479166666666666,
"grad_norm": 0.1459418386220932,
"learning_rate": 7.398031807880457e-07,
"loss": 0.0114,
"step": 2730
},
{
"epoch": 9.51388888888889,
"grad_norm": 0.23313875496387482,
"learning_rate": 6.44656238849195e-07,
"loss": 0.0136,
"step": 2740
},
{
"epoch": 9.54861111111111,
"grad_norm": 0.14127647876739502,
"learning_rate": 5.560165390689154e-07,
"loss": 0.0061,
"step": 2750
},
{
"epoch": 9.583333333333334,
"grad_norm": 0.14344729483127594,
"learning_rate": 4.738957681248379e-07,
"loss": 0.0076,
"step": 2760
},
{
"epoch": 9.618055555555555,
"grad_norm": 0.257382869720459,
"learning_rate": 3.983047532081341e-07,
"loss": 0.0103,
"step": 2770
},
{
"epoch": 9.652777777777779,
"grad_norm": 0.17644330859184265,
"learning_rate": 3.2925346059605176e-07,
"loss": 0.0122,
"step": 2780
},
{
"epoch": 9.6875,
"grad_norm": 0.1124967411160469,
"learning_rate": 2.667509943378721e-07,
"loss": 0.0099,
"step": 2790
},
{
"epoch": 9.722222222222221,
"grad_norm": 0.14655955135822296,
"learning_rate": 2.1080559505462505e-07,
"loss": 0.009,
"step": 2800
},
{
"epoch": 9.756944444444445,
"grad_norm": 0.08127148449420929,
"learning_rate": 1.614246388525864e-07,
"loss": 0.0074,
"step": 2810
},
{
"epoch": 9.791666666666666,
"grad_norm": 0.13320744037628174,
"learning_rate": 1.1861463635077785e-07,
"loss": 0.0076,
"step": 2820
},
{
"epoch": 9.82638888888889,
"grad_norm": 0.14339619874954224,
"learning_rate": 8.238123182257584e-08,
"loss": 0.0053,
"step": 2830
},
{
"epoch": 9.86111111111111,
"grad_norm": 0.14370766282081604,
"learning_rate": 5.272920245156798e-08,
"loss": 0.0081,
"step": 2840
},
{
"epoch": 9.895833333333334,
"grad_norm": 0.19183674454689026,
"learning_rate": 2.9662457701662428e-08,
"loss": 0.0117,
"step": 2850
},
{
"epoch": 9.930555555555555,
"grad_norm": 0.1539844423532486,
"learning_rate": 1.3184038801683508e-08,
"loss": 0.0066,
"step": 2860
},
{
"epoch": 9.965277777777779,
"grad_norm": 0.16146790981292725,
"learning_rate": 3.2961183443813714e-09,
"loss": 0.0105,
"step": 2870
},
{
"epoch": 10.0,
"grad_norm": 0.14206930994987488,
"learning_rate": 0.0,
"loss": 0.0123,
"step": 2880
},
{
"epoch": 10.0,
"step": 2880,
"total_flos": 3.0392298095399424e+17,
"train_loss": 0.03354278181084535,
"train_runtime": 2973.9122,
"train_samples_per_second": 47.439,
"train_steps_per_second": 0.968
}
],
"logging_steps": 10,
"max_steps": 2880,
"num_input_tokens_seen": 0,
"num_train_epochs": 10,
"save_steps": 10000,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 3.0392298095399424e+17,
"train_batch_size": 49,
"trial_name": null,
"trial_params": null
}