andstor's picture
Upload folder using huggingface_hub
8450703 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 2.99835255354201,
"eval_steps": 500,
"global_step": 1365,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.002196595277320154,
"grad_norm": 2047828.375,
"learning_rate": 0.0,
"loss": 1.3567,
"step": 1
},
{
"epoch": 0.004393190554640308,
"grad_norm": 6674292.0,
"learning_rate": 3.6496350364963505e-07,
"loss": 1.5194,
"step": 2
},
{
"epoch": 0.006589785831960461,
"grad_norm": 40031500.0,
"learning_rate": 7.299270072992701e-07,
"loss": 1.2716,
"step": 3
},
{
"epoch": 0.008786381109280615,
"grad_norm": 4004006.25,
"learning_rate": 1.0948905109489052e-06,
"loss": 1.1705,
"step": 4
},
{
"epoch": 0.010982976386600769,
"grad_norm": 11234545.0,
"learning_rate": 1.4598540145985402e-06,
"loss": 1.2851,
"step": 5
},
{
"epoch": 0.013179571663920923,
"grad_norm": 7696731.0,
"learning_rate": 1.824817518248175e-06,
"loss": 1.1159,
"step": 6
},
{
"epoch": 0.015376166941241077,
"grad_norm": 19380058.0,
"learning_rate": 2.1897810218978103e-06,
"loss": 1.0939,
"step": 7
},
{
"epoch": 0.01757276221856123,
"grad_norm": 51641744.0,
"learning_rate": 2.5547445255474454e-06,
"loss": 1.1843,
"step": 8
},
{
"epoch": 0.019769357495881382,
"grad_norm": 51203412.0,
"learning_rate": 2.9197080291970804e-06,
"loss": 1.4502,
"step": 9
},
{
"epoch": 0.021965952773201538,
"grad_norm": 8740138.0,
"learning_rate": 3.2846715328467155e-06,
"loss": 1.1613,
"step": 10
},
{
"epoch": 0.02416254805052169,
"grad_norm": 14772801.0,
"learning_rate": 3.64963503649635e-06,
"loss": 1.3874,
"step": 11
},
{
"epoch": 0.026359143327841845,
"grad_norm": 11424318.0,
"learning_rate": 4.014598540145985e-06,
"loss": 1.2471,
"step": 12
},
{
"epoch": 0.028555738605161998,
"grad_norm": 6222449.0,
"learning_rate": 4.379562043795621e-06,
"loss": 1.219,
"step": 13
},
{
"epoch": 0.030752333882482153,
"grad_norm": 14838477.0,
"learning_rate": 4.744525547445255e-06,
"loss": 1.2428,
"step": 14
},
{
"epoch": 0.032948929159802305,
"grad_norm": 34365772.0,
"learning_rate": 5.109489051094891e-06,
"loss": 1.3154,
"step": 15
},
{
"epoch": 0.03514552443712246,
"grad_norm": 11343648.0,
"learning_rate": 5.474452554744526e-06,
"loss": 1.0682,
"step": 16
},
{
"epoch": 0.037342119714442616,
"grad_norm": 16938996.0,
"learning_rate": 5.839416058394161e-06,
"loss": 1.1922,
"step": 17
},
{
"epoch": 0.039538714991762765,
"grad_norm": 28895242.0,
"learning_rate": 6.204379562043796e-06,
"loss": 1.4344,
"step": 18
},
{
"epoch": 0.04173531026908292,
"grad_norm": 11380300.0,
"learning_rate": 6.569343065693431e-06,
"loss": 1.3168,
"step": 19
},
{
"epoch": 0.043931905546403076,
"grad_norm": 22187968.0,
"learning_rate": 6.9343065693430655e-06,
"loss": 1.2649,
"step": 20
},
{
"epoch": 0.04612850082372323,
"grad_norm": 10284782.0,
"learning_rate": 7.2992700729927e-06,
"loss": 1.2709,
"step": 21
},
{
"epoch": 0.04832509610104338,
"grad_norm": 14559371.0,
"learning_rate": 7.664233576642336e-06,
"loss": 1.2476,
"step": 22
},
{
"epoch": 0.050521691378363535,
"grad_norm": 10865930.0,
"learning_rate": 8.02919708029197e-06,
"loss": 1.5553,
"step": 23
},
{
"epoch": 0.05271828665568369,
"grad_norm": 12806353.0,
"learning_rate": 8.394160583941606e-06,
"loss": 1.2871,
"step": 24
},
{
"epoch": 0.054914881933003847,
"grad_norm": 12299012.0,
"learning_rate": 8.759124087591241e-06,
"loss": 1.1707,
"step": 25
},
{
"epoch": 0.057111477210323995,
"grad_norm": 7523549.0,
"learning_rate": 9.124087591240877e-06,
"loss": 1.2504,
"step": 26
},
{
"epoch": 0.05930807248764415,
"grad_norm": 5730644.0,
"learning_rate": 9.48905109489051e-06,
"loss": 1.1693,
"step": 27
},
{
"epoch": 0.061504667764964306,
"grad_norm": 5095684.5,
"learning_rate": 9.854014598540148e-06,
"loss": 1.0545,
"step": 28
},
{
"epoch": 0.06370126304228446,
"grad_norm": 7149638.0,
"learning_rate": 1.0218978102189781e-05,
"loss": 1.1866,
"step": 29
},
{
"epoch": 0.06589785831960461,
"grad_norm": 25770528.0,
"learning_rate": 1.0583941605839417e-05,
"loss": 1.1411,
"step": 30
},
{
"epoch": 0.06809445359692477,
"grad_norm": 7752126.0,
"learning_rate": 1.0948905109489052e-05,
"loss": 1.209,
"step": 31
},
{
"epoch": 0.07029104887424492,
"grad_norm": 11265648.0,
"learning_rate": 1.1313868613138686e-05,
"loss": 1.3958,
"step": 32
},
{
"epoch": 0.07248764415156507,
"grad_norm": 21874444.0,
"learning_rate": 1.1678832116788322e-05,
"loss": 1.6249,
"step": 33
},
{
"epoch": 0.07468423942888523,
"grad_norm": 11610835.0,
"learning_rate": 1.2043795620437957e-05,
"loss": 1.1728,
"step": 34
},
{
"epoch": 0.07688083470620538,
"grad_norm": 5678601.5,
"learning_rate": 1.2408759124087593e-05,
"loss": 1.2732,
"step": 35
},
{
"epoch": 0.07907742998352553,
"grad_norm": 14198422.0,
"learning_rate": 1.2773722627737228e-05,
"loss": 1.2813,
"step": 36
},
{
"epoch": 0.08127402526084569,
"grad_norm": 8859336.0,
"learning_rate": 1.3138686131386862e-05,
"loss": 1.2603,
"step": 37
},
{
"epoch": 0.08347062053816584,
"grad_norm": 5651872.0,
"learning_rate": 1.3503649635036497e-05,
"loss": 1.2841,
"step": 38
},
{
"epoch": 0.085667215815486,
"grad_norm": 17596590.0,
"learning_rate": 1.3868613138686131e-05,
"loss": 1.4325,
"step": 39
},
{
"epoch": 0.08786381109280615,
"grad_norm": 12388730.0,
"learning_rate": 1.4233576642335767e-05,
"loss": 1.1562,
"step": 40
},
{
"epoch": 0.0900604063701263,
"grad_norm": 12075692.0,
"learning_rate": 1.45985401459854e-05,
"loss": 1.2676,
"step": 41
},
{
"epoch": 0.09225700164744646,
"grad_norm": 27072708.0,
"learning_rate": 1.496350364963504e-05,
"loss": 1.4864,
"step": 42
},
{
"epoch": 0.09445359692476661,
"grad_norm": 37403220.0,
"learning_rate": 1.5328467153284673e-05,
"loss": 1.2104,
"step": 43
},
{
"epoch": 0.09665019220208676,
"grad_norm": 12227715.0,
"learning_rate": 1.569343065693431e-05,
"loss": 1.3041,
"step": 44
},
{
"epoch": 0.09884678747940692,
"grad_norm": 12901322.0,
"learning_rate": 1.605839416058394e-05,
"loss": 1.3288,
"step": 45
},
{
"epoch": 0.10104338275672707,
"grad_norm": 17107380.0,
"learning_rate": 1.6423357664233576e-05,
"loss": 1.2191,
"step": 46
},
{
"epoch": 0.10323997803404723,
"grad_norm": 31481982.0,
"learning_rate": 1.678832116788321e-05,
"loss": 1.2633,
"step": 47
},
{
"epoch": 0.10543657331136738,
"grad_norm": 15303975.0,
"learning_rate": 1.715328467153285e-05,
"loss": 1.2865,
"step": 48
},
{
"epoch": 0.10763316858868753,
"grad_norm": 17270372.0,
"learning_rate": 1.7518248175182482e-05,
"loss": 1.4734,
"step": 49
},
{
"epoch": 0.10982976386600769,
"grad_norm": 34183024.0,
"learning_rate": 1.7883211678832118e-05,
"loss": 1.303,
"step": 50
},
{
"epoch": 0.11202635914332784,
"grad_norm": 14855132.0,
"learning_rate": 1.8248175182481753e-05,
"loss": 1.2756,
"step": 51
},
{
"epoch": 0.11422295442064799,
"grad_norm": 18743198.0,
"learning_rate": 1.861313868613139e-05,
"loss": 1.1061,
"step": 52
},
{
"epoch": 0.11641954969796815,
"grad_norm": 6949814.5,
"learning_rate": 1.897810218978102e-05,
"loss": 1.2949,
"step": 53
},
{
"epoch": 0.1186161449752883,
"grad_norm": 7799824.0,
"learning_rate": 1.934306569343066e-05,
"loss": 1.0481,
"step": 54
},
{
"epoch": 0.12081274025260846,
"grad_norm": 8161740.0,
"learning_rate": 1.9708029197080295e-05,
"loss": 1.1827,
"step": 55
},
{
"epoch": 0.12300933552992861,
"grad_norm": 34570872.0,
"learning_rate": 2.0072992700729927e-05,
"loss": 1.4264,
"step": 56
},
{
"epoch": 0.12520593080724876,
"grad_norm": 7429580.5,
"learning_rate": 2.0437956204379563e-05,
"loss": 1.0902,
"step": 57
},
{
"epoch": 0.12740252608456892,
"grad_norm": 17569806.0,
"learning_rate": 2.08029197080292e-05,
"loss": 1.2842,
"step": 58
},
{
"epoch": 0.12959912136188906,
"grad_norm": 15480506.0,
"learning_rate": 2.1167883211678834e-05,
"loss": 1.0864,
"step": 59
},
{
"epoch": 0.13179571663920922,
"grad_norm": 12282927.0,
"learning_rate": 2.1532846715328466e-05,
"loss": 1.1316,
"step": 60
},
{
"epoch": 0.13399231191652938,
"grad_norm": 4455538.0,
"learning_rate": 2.1897810218978105e-05,
"loss": 1.2465,
"step": 61
},
{
"epoch": 0.13618890719384955,
"grad_norm": 15463966.0,
"learning_rate": 2.226277372262774e-05,
"loss": 1.099,
"step": 62
},
{
"epoch": 0.13838550247116968,
"grad_norm": 22602364.0,
"learning_rate": 2.2627737226277372e-05,
"loss": 1.0459,
"step": 63
},
{
"epoch": 0.14058209774848984,
"grad_norm": 10241717.0,
"learning_rate": 2.2992700729927008e-05,
"loss": 1.2806,
"step": 64
},
{
"epoch": 0.14277869302581,
"grad_norm": 7588423.5,
"learning_rate": 2.3357664233576643e-05,
"loss": 1.114,
"step": 65
},
{
"epoch": 0.14497528830313014,
"grad_norm": 10213540.0,
"learning_rate": 2.372262773722628e-05,
"loss": 0.9609,
"step": 66
},
{
"epoch": 0.1471718835804503,
"grad_norm": 11998898.0,
"learning_rate": 2.4087591240875914e-05,
"loss": 1.1912,
"step": 67
},
{
"epoch": 0.14936847885777046,
"grad_norm": 5615866.0,
"learning_rate": 2.445255474452555e-05,
"loss": 1.049,
"step": 68
},
{
"epoch": 0.1515650741350906,
"grad_norm": 19550316.0,
"learning_rate": 2.4817518248175185e-05,
"loss": 1.2795,
"step": 69
},
{
"epoch": 0.15376166941241076,
"grad_norm": 10943439.0,
"learning_rate": 2.518248175182482e-05,
"loss": 1.1303,
"step": 70
},
{
"epoch": 0.15595826468973092,
"grad_norm": 19708706.0,
"learning_rate": 2.5547445255474456e-05,
"loss": 1.2344,
"step": 71
},
{
"epoch": 0.15815485996705106,
"grad_norm": 17543686.0,
"learning_rate": 2.591240875912409e-05,
"loss": 1.1521,
"step": 72
},
{
"epoch": 0.16035145524437122,
"grad_norm": 43875364.0,
"learning_rate": 2.6277372262773724e-05,
"loss": 1.3358,
"step": 73
},
{
"epoch": 0.16254805052169138,
"grad_norm": 6051551.5,
"learning_rate": 2.664233576642336e-05,
"loss": 1.2323,
"step": 74
},
{
"epoch": 0.16474464579901152,
"grad_norm": 32905656.0,
"learning_rate": 2.7007299270072995e-05,
"loss": 1.0926,
"step": 75
},
{
"epoch": 0.16694124107633168,
"grad_norm": 8517439.0,
"learning_rate": 2.737226277372263e-05,
"loss": 1.1515,
"step": 76
},
{
"epoch": 0.16913783635365184,
"grad_norm": 11192837.0,
"learning_rate": 2.7737226277372262e-05,
"loss": 1.0715,
"step": 77
},
{
"epoch": 0.171334431630972,
"grad_norm": 9605860.0,
"learning_rate": 2.8102189781021898e-05,
"loss": 0.8551,
"step": 78
},
{
"epoch": 0.17353102690829214,
"grad_norm": 8618499.0,
"learning_rate": 2.8467153284671533e-05,
"loss": 1.0788,
"step": 79
},
{
"epoch": 0.1757276221856123,
"grad_norm": 14007595.0,
"learning_rate": 2.883211678832117e-05,
"loss": 1.2054,
"step": 80
},
{
"epoch": 0.17792421746293247,
"grad_norm": 9647478.0,
"learning_rate": 2.91970802919708e-05,
"loss": 1.1001,
"step": 81
},
{
"epoch": 0.1801208127402526,
"grad_norm": 38317572.0,
"learning_rate": 2.9562043795620443e-05,
"loss": 1.0888,
"step": 82
},
{
"epoch": 0.18231740801757276,
"grad_norm": 16823494.0,
"learning_rate": 2.992700729927008e-05,
"loss": 1.0136,
"step": 83
},
{
"epoch": 0.18451400329489293,
"grad_norm": 9234012.0,
"learning_rate": 3.029197080291971e-05,
"loss": 1.1833,
"step": 84
},
{
"epoch": 0.18671059857221306,
"grad_norm": 13358546.0,
"learning_rate": 3.0656934306569346e-05,
"loss": 1.2703,
"step": 85
},
{
"epoch": 0.18890719384953322,
"grad_norm": 22999784.0,
"learning_rate": 3.102189781021898e-05,
"loss": 1.4449,
"step": 86
},
{
"epoch": 0.19110378912685339,
"grad_norm": 16768359.0,
"learning_rate": 3.138686131386862e-05,
"loss": 1.1601,
"step": 87
},
{
"epoch": 0.19330038440417352,
"grad_norm": 31959344.0,
"learning_rate": 3.175182481751825e-05,
"loss": 1.2223,
"step": 88
},
{
"epoch": 0.19549697968149368,
"grad_norm": 19150826.0,
"learning_rate": 3.211678832116788e-05,
"loss": 0.9576,
"step": 89
},
{
"epoch": 0.19769357495881384,
"grad_norm": 13048818.0,
"learning_rate": 3.248175182481752e-05,
"loss": 1.3498,
"step": 90
},
{
"epoch": 0.19989017023613398,
"grad_norm": 12262197.0,
"learning_rate": 3.284671532846715e-05,
"loss": 1.052,
"step": 91
},
{
"epoch": 0.20208676551345414,
"grad_norm": 2876966.25,
"learning_rate": 3.321167883211679e-05,
"loss": 1.0189,
"step": 92
},
{
"epoch": 0.2042833607907743,
"grad_norm": 18541894.0,
"learning_rate": 3.357664233576642e-05,
"loss": 1.0832,
"step": 93
},
{
"epoch": 0.20647995606809447,
"grad_norm": 15970311.0,
"learning_rate": 3.3941605839416055e-05,
"loss": 1.2928,
"step": 94
},
{
"epoch": 0.2086765513454146,
"grad_norm": 25580606.0,
"learning_rate": 3.43065693430657e-05,
"loss": 1.0362,
"step": 95
},
{
"epoch": 0.21087314662273476,
"grad_norm": 14224102.0,
"learning_rate": 3.467153284671533e-05,
"loss": 1.2039,
"step": 96
},
{
"epoch": 0.21306974190005493,
"grad_norm": 6323020.5,
"learning_rate": 3.5036496350364965e-05,
"loss": 1.237,
"step": 97
},
{
"epoch": 0.21526633717737506,
"grad_norm": 4563988.5,
"learning_rate": 3.5401459854014604e-05,
"loss": 1.1155,
"step": 98
},
{
"epoch": 0.21746293245469522,
"grad_norm": 11096799.0,
"learning_rate": 3.5766423357664236e-05,
"loss": 1.1138,
"step": 99
},
{
"epoch": 0.21965952773201539,
"grad_norm": 5840709.5,
"learning_rate": 3.613138686131387e-05,
"loss": 1.1854,
"step": 100
},
{
"epoch": 0.22185612300933552,
"grad_norm": 4187084.5,
"learning_rate": 3.649635036496351e-05,
"loss": 1.3305,
"step": 101
},
{
"epoch": 0.22405271828665568,
"grad_norm": 6342277.5,
"learning_rate": 3.686131386861314e-05,
"loss": 1.1569,
"step": 102
},
{
"epoch": 0.22624931356397585,
"grad_norm": 5076073.5,
"learning_rate": 3.722627737226278e-05,
"loss": 1.1453,
"step": 103
},
{
"epoch": 0.22844590884129598,
"grad_norm": 12348493.0,
"learning_rate": 3.759124087591241e-05,
"loss": 1.0243,
"step": 104
},
{
"epoch": 0.23064250411861614,
"grad_norm": 10997787.0,
"learning_rate": 3.795620437956204e-05,
"loss": 1.1016,
"step": 105
},
{
"epoch": 0.2328390993959363,
"grad_norm": 6869693.0,
"learning_rate": 3.832116788321168e-05,
"loss": 1.1914,
"step": 106
},
{
"epoch": 0.23503569467325644,
"grad_norm": 24705828.0,
"learning_rate": 3.868613138686132e-05,
"loss": 1.2834,
"step": 107
},
{
"epoch": 0.2372322899505766,
"grad_norm": 13220064.0,
"learning_rate": 3.905109489051095e-05,
"loss": 0.9956,
"step": 108
},
{
"epoch": 0.23942888522789676,
"grad_norm": 34857952.0,
"learning_rate": 3.941605839416059e-05,
"loss": 1.0755,
"step": 109
},
{
"epoch": 0.24162548050521693,
"grad_norm": 17391034.0,
"learning_rate": 3.978102189781022e-05,
"loss": 1.0426,
"step": 110
},
{
"epoch": 0.24382207578253706,
"grad_norm": 16179722.0,
"learning_rate": 4.0145985401459855e-05,
"loss": 1.1024,
"step": 111
},
{
"epoch": 0.24601867105985722,
"grad_norm": 19842270.0,
"learning_rate": 4.0510948905109494e-05,
"loss": 1.0659,
"step": 112
},
{
"epoch": 0.2482152663371774,
"grad_norm": 14679981.0,
"learning_rate": 4.0875912408759126e-05,
"loss": 1.0519,
"step": 113
},
{
"epoch": 0.2504118616144975,
"grad_norm": 22071618.0,
"learning_rate": 4.124087591240876e-05,
"loss": 1.0097,
"step": 114
},
{
"epoch": 0.25260845689181766,
"grad_norm": 22394396.0,
"learning_rate": 4.16058394160584e-05,
"loss": 1.1212,
"step": 115
},
{
"epoch": 0.25480505216913785,
"grad_norm": 7273321.5,
"learning_rate": 4.197080291970803e-05,
"loss": 1.1518,
"step": 116
},
{
"epoch": 0.257001647446458,
"grad_norm": 35136992.0,
"learning_rate": 4.233576642335767e-05,
"loss": 1.0966,
"step": 117
},
{
"epoch": 0.2591982427237781,
"grad_norm": 20715392.0,
"learning_rate": 4.27007299270073e-05,
"loss": 1.0206,
"step": 118
},
{
"epoch": 0.2613948380010983,
"grad_norm": 28416488.0,
"learning_rate": 4.306569343065693e-05,
"loss": 0.9672,
"step": 119
},
{
"epoch": 0.26359143327841844,
"grad_norm": 12644584.0,
"learning_rate": 4.343065693430657e-05,
"loss": 0.9307,
"step": 120
},
{
"epoch": 0.26578802855573863,
"grad_norm": 46953568.0,
"learning_rate": 4.379562043795621e-05,
"loss": 1.0041,
"step": 121
},
{
"epoch": 0.26798462383305877,
"grad_norm": 18650110.0,
"learning_rate": 4.416058394160584e-05,
"loss": 0.9494,
"step": 122
},
{
"epoch": 0.2701812191103789,
"grad_norm": 7411148.5,
"learning_rate": 4.452554744525548e-05,
"loss": 1.0256,
"step": 123
},
{
"epoch": 0.2723778143876991,
"grad_norm": 14795885.0,
"learning_rate": 4.489051094890511e-05,
"loss": 1.006,
"step": 124
},
{
"epoch": 0.2745744096650192,
"grad_norm": 7987120.0,
"learning_rate": 4.5255474452554745e-05,
"loss": 1.1112,
"step": 125
},
{
"epoch": 0.27677100494233936,
"grad_norm": 3664041.5,
"learning_rate": 4.5620437956204383e-05,
"loss": 0.827,
"step": 126
},
{
"epoch": 0.27896760021965955,
"grad_norm": 4188943.75,
"learning_rate": 4.5985401459854016e-05,
"loss": 1.0363,
"step": 127
},
{
"epoch": 0.2811641954969797,
"grad_norm": 10032366.0,
"learning_rate": 4.635036496350365e-05,
"loss": 1.3081,
"step": 128
},
{
"epoch": 0.2833607907742998,
"grad_norm": 19652358.0,
"learning_rate": 4.6715328467153287e-05,
"loss": 1.1723,
"step": 129
},
{
"epoch": 0.28555738605162,
"grad_norm": 29400118.0,
"learning_rate": 4.708029197080292e-05,
"loss": 1.0401,
"step": 130
},
{
"epoch": 0.28775398132894014,
"grad_norm": 6440731.0,
"learning_rate": 4.744525547445256e-05,
"loss": 0.9859,
"step": 131
},
{
"epoch": 0.2899505766062603,
"grad_norm": 6155487.0,
"learning_rate": 4.7810218978102196e-05,
"loss": 0.8675,
"step": 132
},
{
"epoch": 0.29214717188358047,
"grad_norm": 11451470.0,
"learning_rate": 4.817518248175183e-05,
"loss": 0.8143,
"step": 133
},
{
"epoch": 0.2943437671609006,
"grad_norm": 8476629.0,
"learning_rate": 4.854014598540147e-05,
"loss": 0.9655,
"step": 134
},
{
"epoch": 0.29654036243822074,
"grad_norm": 20757296.0,
"learning_rate": 4.89051094890511e-05,
"loss": 0.9572,
"step": 135
},
{
"epoch": 0.29873695771554093,
"grad_norm": 17907030.0,
"learning_rate": 4.927007299270073e-05,
"loss": 1.1968,
"step": 136
},
{
"epoch": 0.30093355299286106,
"grad_norm": 16662175.0,
"learning_rate": 4.963503649635037e-05,
"loss": 1.0537,
"step": 137
},
{
"epoch": 0.3031301482701812,
"grad_norm": 10720323.0,
"learning_rate": 5e-05,
"loss": 1.032,
"step": 138
},
{
"epoch": 0.3053267435475014,
"grad_norm": 6210951.5,
"learning_rate": 4.995928338762215e-05,
"loss": 0.9378,
"step": 139
},
{
"epoch": 0.3075233388248215,
"grad_norm": 12067100.0,
"learning_rate": 4.99185667752443e-05,
"loss": 1.0416,
"step": 140
},
{
"epoch": 0.30971993410214166,
"grad_norm": 10375612.0,
"learning_rate": 4.9877850162866454e-05,
"loss": 1.0868,
"step": 141
},
{
"epoch": 0.31191652937946185,
"grad_norm": 19215752.0,
"learning_rate": 4.9837133550488604e-05,
"loss": 1.1573,
"step": 142
},
{
"epoch": 0.314113124656782,
"grad_norm": 18632564.0,
"learning_rate": 4.9796416938110755e-05,
"loss": 1.0283,
"step": 143
},
{
"epoch": 0.3163097199341021,
"grad_norm": 11247008.0,
"learning_rate": 4.97557003257329e-05,
"loss": 1.2683,
"step": 144
},
{
"epoch": 0.3185063152114223,
"grad_norm": 10169539.0,
"learning_rate": 4.971498371335505e-05,
"loss": 1.0616,
"step": 145
},
{
"epoch": 0.32070291048874244,
"grad_norm": 14896106.0,
"learning_rate": 4.96742671009772e-05,
"loss": 1.0025,
"step": 146
},
{
"epoch": 0.3228995057660626,
"grad_norm": 11283311.0,
"learning_rate": 4.963355048859935e-05,
"loss": 0.919,
"step": 147
},
{
"epoch": 0.32509610104338277,
"grad_norm": 5662458.5,
"learning_rate": 4.95928338762215e-05,
"loss": 0.8772,
"step": 148
},
{
"epoch": 0.3272926963207029,
"grad_norm": 9590134.0,
"learning_rate": 4.955211726384365e-05,
"loss": 1.004,
"step": 149
},
{
"epoch": 0.32948929159802304,
"grad_norm": 6902963.5,
"learning_rate": 4.95114006514658e-05,
"loss": 1.2467,
"step": 150
},
{
"epoch": 0.3316858868753432,
"grad_norm": 33446322.0,
"learning_rate": 4.947068403908795e-05,
"loss": 1.0799,
"step": 151
},
{
"epoch": 0.33388248215266336,
"grad_norm": 14731285.0,
"learning_rate": 4.94299674267101e-05,
"loss": 1.2127,
"step": 152
},
{
"epoch": 0.33607907742998355,
"grad_norm": 15324599.0,
"learning_rate": 4.938925081433225e-05,
"loss": 0.8953,
"step": 153
},
{
"epoch": 0.3382756727073037,
"grad_norm": 12761835.0,
"learning_rate": 4.9348534201954396e-05,
"loss": 0.9621,
"step": 154
},
{
"epoch": 0.3404722679846238,
"grad_norm": 4396397.0,
"learning_rate": 4.930781758957655e-05,
"loss": 0.9655,
"step": 155
},
{
"epoch": 0.342668863261944,
"grad_norm": 18684486.0,
"learning_rate": 4.92671009771987e-05,
"loss": 0.961,
"step": 156
},
{
"epoch": 0.34486545853926415,
"grad_norm": 11647168.0,
"learning_rate": 4.922638436482085e-05,
"loss": 1.0236,
"step": 157
},
{
"epoch": 0.3470620538165843,
"grad_norm": 7403086.5,
"learning_rate": 4.9185667752443e-05,
"loss": 0.921,
"step": 158
},
{
"epoch": 0.34925864909390447,
"grad_norm": 9301290.0,
"learning_rate": 4.914495114006515e-05,
"loss": 1.073,
"step": 159
},
{
"epoch": 0.3514552443712246,
"grad_norm": 6785378.5,
"learning_rate": 4.91042345276873e-05,
"loss": 0.9421,
"step": 160
},
{
"epoch": 0.35365183964854474,
"grad_norm": 37305500.0,
"learning_rate": 4.906351791530945e-05,
"loss": 0.9959,
"step": 161
},
{
"epoch": 0.35584843492586493,
"grad_norm": 12157369.0,
"learning_rate": 4.90228013029316e-05,
"loss": 0.8274,
"step": 162
},
{
"epoch": 0.35804503020318507,
"grad_norm": 11638986.0,
"learning_rate": 4.898208469055375e-05,
"loss": 0.679,
"step": 163
},
{
"epoch": 0.3602416254805052,
"grad_norm": 16125764.0,
"learning_rate": 4.89413680781759e-05,
"loss": 0.8744,
"step": 164
},
{
"epoch": 0.3624382207578254,
"grad_norm": 5387654.5,
"learning_rate": 4.8900651465798044e-05,
"loss": 1.0482,
"step": 165
},
{
"epoch": 0.3646348160351455,
"grad_norm": 23652066.0,
"learning_rate": 4.8859934853420195e-05,
"loss": 0.9726,
"step": 166
},
{
"epoch": 0.36683141131246566,
"grad_norm": 9789622.0,
"learning_rate": 4.8819218241042345e-05,
"loss": 0.8733,
"step": 167
},
{
"epoch": 0.36902800658978585,
"grad_norm": 30906436.0,
"learning_rate": 4.8778501628664496e-05,
"loss": 0.9609,
"step": 168
},
{
"epoch": 0.371224601867106,
"grad_norm": 189772432.0,
"learning_rate": 4.8737785016286646e-05,
"loss": 1.0023,
"step": 169
},
{
"epoch": 0.3734211971444261,
"grad_norm": 8754641.0,
"learning_rate": 4.86970684039088e-05,
"loss": 1.0592,
"step": 170
},
{
"epoch": 0.3756177924217463,
"grad_norm": 17426248.0,
"learning_rate": 4.865635179153095e-05,
"loss": 0.8442,
"step": 171
},
{
"epoch": 0.37781438769906645,
"grad_norm": 4542470.0,
"learning_rate": 4.86156351791531e-05,
"loss": 0.7911,
"step": 172
},
{
"epoch": 0.3800109829763866,
"grad_norm": 8447095.0,
"learning_rate": 4.857491856677525e-05,
"loss": 0.9593,
"step": 173
},
{
"epoch": 0.38220757825370677,
"grad_norm": 7352316.5,
"learning_rate": 4.85342019543974e-05,
"loss": 1.1872,
"step": 174
},
{
"epoch": 0.3844041735310269,
"grad_norm": 5602028.5,
"learning_rate": 4.849348534201954e-05,
"loss": 0.7461,
"step": 175
},
{
"epoch": 0.38660076880834704,
"grad_norm": 4677864.0,
"learning_rate": 4.845276872964169e-05,
"loss": 1.0743,
"step": 176
},
{
"epoch": 0.38879736408566723,
"grad_norm": 4680797.5,
"learning_rate": 4.841205211726384e-05,
"loss": 0.6711,
"step": 177
},
{
"epoch": 0.39099395936298736,
"grad_norm": 5534360.0,
"learning_rate": 4.8371335504885994e-05,
"loss": 0.9604,
"step": 178
},
{
"epoch": 0.3931905546403075,
"grad_norm": 13123183.0,
"learning_rate": 4.8330618892508144e-05,
"loss": 1.017,
"step": 179
},
{
"epoch": 0.3953871499176277,
"grad_norm": 6758069.0,
"learning_rate": 4.8289902280130295e-05,
"loss": 0.8691,
"step": 180
},
{
"epoch": 0.3975837451949478,
"grad_norm": 8226452.0,
"learning_rate": 4.8249185667752445e-05,
"loss": 0.8608,
"step": 181
},
{
"epoch": 0.39978034047226796,
"grad_norm": 3854469.5,
"learning_rate": 4.8208469055374595e-05,
"loss": 0.7598,
"step": 182
},
{
"epoch": 0.40197693574958815,
"grad_norm": 18598116.0,
"learning_rate": 4.8167752442996746e-05,
"loss": 0.9759,
"step": 183
},
{
"epoch": 0.4041735310269083,
"grad_norm": 11485942.0,
"learning_rate": 4.8127035830618896e-05,
"loss": 1.2585,
"step": 184
},
{
"epoch": 0.4063701263042284,
"grad_norm": 11972959.0,
"learning_rate": 4.808631921824105e-05,
"loss": 1.0022,
"step": 185
},
{
"epoch": 0.4085667215815486,
"grad_norm": 6080062.0,
"learning_rate": 4.804560260586319e-05,
"loss": 0.9132,
"step": 186
},
{
"epoch": 0.41076331685886874,
"grad_norm": 8417478.0,
"learning_rate": 4.800488599348534e-05,
"loss": 0.7388,
"step": 187
},
{
"epoch": 0.41295991213618893,
"grad_norm": 10634729.0,
"learning_rate": 4.796416938110749e-05,
"loss": 0.9308,
"step": 188
},
{
"epoch": 0.41515650741350907,
"grad_norm": 11250071.0,
"learning_rate": 4.792345276872964e-05,
"loss": 1.033,
"step": 189
},
{
"epoch": 0.4173531026908292,
"grad_norm": 5495279.0,
"learning_rate": 4.788273615635179e-05,
"loss": 0.8692,
"step": 190
},
{
"epoch": 0.4195496979681494,
"grad_norm": 37268708.0,
"learning_rate": 4.784201954397394e-05,
"loss": 0.8936,
"step": 191
},
{
"epoch": 0.42174629324546953,
"grad_norm": 6610986.5,
"learning_rate": 4.780130293159609e-05,
"loss": 0.9738,
"step": 192
},
{
"epoch": 0.42394288852278966,
"grad_norm": 11809499.0,
"learning_rate": 4.7760586319218244e-05,
"loss": 0.9591,
"step": 193
},
{
"epoch": 0.42613948380010985,
"grad_norm": 8254889.0,
"learning_rate": 4.7719869706840394e-05,
"loss": 0.9874,
"step": 194
},
{
"epoch": 0.42833607907743,
"grad_norm": 7814906.0,
"learning_rate": 4.7679153094462545e-05,
"loss": 1.155,
"step": 195
},
{
"epoch": 0.4305326743547501,
"grad_norm": 7317868.5,
"learning_rate": 4.7638436482084695e-05,
"loss": 1.3268,
"step": 196
},
{
"epoch": 0.4327292696320703,
"grad_norm": 5934029.0,
"learning_rate": 4.759771986970684e-05,
"loss": 0.812,
"step": 197
},
{
"epoch": 0.43492586490939045,
"grad_norm": 5832792.5,
"learning_rate": 4.755700325732899e-05,
"loss": 0.7935,
"step": 198
},
{
"epoch": 0.4371224601867106,
"grad_norm": 8039267.0,
"learning_rate": 4.751628664495114e-05,
"loss": 0.8929,
"step": 199
},
{
"epoch": 0.43931905546403077,
"grad_norm": 9420844.0,
"learning_rate": 4.747557003257329e-05,
"loss": 0.9899,
"step": 200
},
{
"epoch": 0.4415156507413509,
"grad_norm": 2905903.25,
"learning_rate": 4.743485342019544e-05,
"loss": 0.7712,
"step": 201
},
{
"epoch": 0.44371224601867104,
"grad_norm": 12287270.0,
"learning_rate": 4.739413680781759e-05,
"loss": 0.814,
"step": 202
},
{
"epoch": 0.44590884129599123,
"grad_norm": 6525739.0,
"learning_rate": 4.735342019543974e-05,
"loss": 0.9559,
"step": 203
},
{
"epoch": 0.44810543657331137,
"grad_norm": 10583591.0,
"learning_rate": 4.731270358306189e-05,
"loss": 1.0787,
"step": 204
},
{
"epoch": 0.4503020318506315,
"grad_norm": 8354005.5,
"learning_rate": 4.727198697068404e-05,
"loss": 1.0828,
"step": 205
},
{
"epoch": 0.4524986271279517,
"grad_norm": 10097118.0,
"learning_rate": 4.723127035830619e-05,
"loss": 1.1523,
"step": 206
},
{
"epoch": 0.4546952224052718,
"grad_norm": 7784980.0,
"learning_rate": 4.719055374592834e-05,
"loss": 0.9164,
"step": 207
},
{
"epoch": 0.45689181768259196,
"grad_norm": 2736766.75,
"learning_rate": 4.714983713355049e-05,
"loss": 0.8624,
"step": 208
},
{
"epoch": 0.45908841295991215,
"grad_norm": 9288337.0,
"learning_rate": 4.710912052117264e-05,
"loss": 0.9828,
"step": 209
},
{
"epoch": 0.4612850082372323,
"grad_norm": 7567845.0,
"learning_rate": 4.706840390879479e-05,
"loss": 0.9977,
"step": 210
},
{
"epoch": 0.4634816035145524,
"grad_norm": 6014590.5,
"learning_rate": 4.702768729641694e-05,
"loss": 1.0515,
"step": 211
},
{
"epoch": 0.4656781987918726,
"grad_norm": 4778235.0,
"learning_rate": 4.698697068403909e-05,
"loss": 0.9762,
"step": 212
},
{
"epoch": 0.46787479406919275,
"grad_norm": 15021668.0,
"learning_rate": 4.694625407166124e-05,
"loss": 0.7264,
"step": 213
},
{
"epoch": 0.4700713893465129,
"grad_norm": 14335472.0,
"learning_rate": 4.690553745928339e-05,
"loss": 0.8519,
"step": 214
},
{
"epoch": 0.47226798462383307,
"grad_norm": 16014673.0,
"learning_rate": 4.686482084690554e-05,
"loss": 1.0334,
"step": 215
},
{
"epoch": 0.4744645799011532,
"grad_norm": 7501092.0,
"learning_rate": 4.682410423452769e-05,
"loss": 0.8861,
"step": 216
},
{
"epoch": 0.47666117517847334,
"grad_norm": 8373311.5,
"learning_rate": 4.678338762214984e-05,
"loss": 0.7788,
"step": 217
},
{
"epoch": 0.47885777045579353,
"grad_norm": 4377398.0,
"learning_rate": 4.6742671009771985e-05,
"loss": 0.9944,
"step": 218
},
{
"epoch": 0.48105436573311366,
"grad_norm": 42197748.0,
"learning_rate": 4.6701954397394135e-05,
"loss": 0.8525,
"step": 219
},
{
"epoch": 0.48325096101043385,
"grad_norm": 5442925.0,
"learning_rate": 4.6661237785016286e-05,
"loss": 0.9012,
"step": 220
},
{
"epoch": 0.485447556287754,
"grad_norm": 29140418.0,
"learning_rate": 4.6620521172638436e-05,
"loss": 0.9017,
"step": 221
},
{
"epoch": 0.4876441515650741,
"grad_norm": 7130746.0,
"learning_rate": 4.657980456026059e-05,
"loss": 0.7604,
"step": 222
},
{
"epoch": 0.4898407468423943,
"grad_norm": 5992578.0,
"learning_rate": 4.653908794788274e-05,
"loss": 0.931,
"step": 223
},
{
"epoch": 0.49203734211971445,
"grad_norm": 17682494.0,
"learning_rate": 4.649837133550489e-05,
"loss": 0.9846,
"step": 224
},
{
"epoch": 0.4942339373970346,
"grad_norm": 5666645.5,
"learning_rate": 4.645765472312704e-05,
"loss": 0.722,
"step": 225
},
{
"epoch": 0.4964305326743548,
"grad_norm": 11430500.0,
"learning_rate": 4.641693811074919e-05,
"loss": 1.1372,
"step": 226
},
{
"epoch": 0.4986271279516749,
"grad_norm": 7021308.5,
"learning_rate": 4.637622149837134e-05,
"loss": 0.7983,
"step": 227
},
{
"epoch": 0.500823723228995,
"grad_norm": 10584033.0,
"learning_rate": 4.633550488599348e-05,
"loss": 0.835,
"step": 228
},
{
"epoch": 0.5030203185063152,
"grad_norm": 16393499.0,
"learning_rate": 4.629478827361563e-05,
"loss": 0.8904,
"step": 229
},
{
"epoch": 0.5052169137836353,
"grad_norm": 2858034.5,
"learning_rate": 4.6254071661237784e-05,
"loss": 1.0295,
"step": 230
},
{
"epoch": 0.5074135090609555,
"grad_norm": 3826194.0,
"learning_rate": 4.6213355048859934e-05,
"loss": 0.9269,
"step": 231
},
{
"epoch": 0.5096101043382757,
"grad_norm": 3889286.75,
"learning_rate": 4.6172638436482085e-05,
"loss": 0.882,
"step": 232
},
{
"epoch": 0.5118066996155958,
"grad_norm": 11277812.0,
"learning_rate": 4.6131921824104235e-05,
"loss": 0.9674,
"step": 233
},
{
"epoch": 0.514003294892916,
"grad_norm": 4758247.0,
"learning_rate": 4.6091205211726385e-05,
"loss": 0.9393,
"step": 234
},
{
"epoch": 0.5161998901702362,
"grad_norm": 6524251.5,
"learning_rate": 4.6050488599348536e-05,
"loss": 0.9669,
"step": 235
},
{
"epoch": 0.5183964854475562,
"grad_norm": 6961989.0,
"learning_rate": 4.6009771986970686e-05,
"loss": 0.8194,
"step": 236
},
{
"epoch": 0.5205930807248764,
"grad_norm": 6262928.5,
"learning_rate": 4.596905537459284e-05,
"loss": 1.0925,
"step": 237
},
{
"epoch": 0.5227896760021966,
"grad_norm": 11089486.0,
"learning_rate": 4.592833876221499e-05,
"loss": 0.9925,
"step": 238
},
{
"epoch": 0.5249862712795168,
"grad_norm": 13835292.0,
"learning_rate": 4.588762214983713e-05,
"loss": 0.8165,
"step": 239
},
{
"epoch": 0.5271828665568369,
"grad_norm": 4045450.0,
"learning_rate": 4.584690553745928e-05,
"loss": 0.9027,
"step": 240
},
{
"epoch": 0.5293794618341571,
"grad_norm": 6143702.0,
"learning_rate": 4.580618892508143e-05,
"loss": 0.9063,
"step": 241
},
{
"epoch": 0.5315760571114773,
"grad_norm": 13785544.0,
"learning_rate": 4.576547231270358e-05,
"loss": 0.8607,
"step": 242
},
{
"epoch": 0.5337726523887973,
"grad_norm": 5596667.5,
"learning_rate": 4.572475570032573e-05,
"loss": 0.8006,
"step": 243
},
{
"epoch": 0.5359692476661175,
"grad_norm": 8459262.0,
"learning_rate": 4.568403908794788e-05,
"loss": 0.9847,
"step": 244
},
{
"epoch": 0.5381658429434377,
"grad_norm": 11312232.0,
"learning_rate": 4.5643322475570034e-05,
"loss": 0.9212,
"step": 245
},
{
"epoch": 0.5403624382207578,
"grad_norm": 7100279.0,
"learning_rate": 4.5602605863192184e-05,
"loss": 0.8963,
"step": 246
},
{
"epoch": 0.542559033498078,
"grad_norm": 9365418.0,
"learning_rate": 4.5561889250814335e-05,
"loss": 0.7869,
"step": 247
},
{
"epoch": 0.5447556287753982,
"grad_norm": 21830076.0,
"learning_rate": 4.5521172638436485e-05,
"loss": 0.9359,
"step": 248
},
{
"epoch": 0.5469522240527183,
"grad_norm": 4518256.5,
"learning_rate": 4.548045602605863e-05,
"loss": 0.8025,
"step": 249
},
{
"epoch": 0.5491488193300385,
"grad_norm": 9094974.0,
"learning_rate": 4.543973941368078e-05,
"loss": 0.8602,
"step": 250
},
{
"epoch": 0.5513454146073586,
"grad_norm": 15917480.0,
"learning_rate": 4.539902280130293e-05,
"loss": 0.9053,
"step": 251
},
{
"epoch": 0.5535420098846787,
"grad_norm": 6764456.0,
"learning_rate": 4.535830618892508e-05,
"loss": 0.8822,
"step": 252
},
{
"epoch": 0.5557386051619989,
"grad_norm": 4736989.5,
"learning_rate": 4.531758957654723e-05,
"loss": 0.9517,
"step": 253
},
{
"epoch": 0.5579352004393191,
"grad_norm": 3093551.25,
"learning_rate": 4.527687296416938e-05,
"loss": 0.8166,
"step": 254
},
{
"epoch": 0.5601317957166392,
"grad_norm": 10738455.0,
"learning_rate": 4.523615635179153e-05,
"loss": 0.9489,
"step": 255
},
{
"epoch": 0.5623283909939594,
"grad_norm": 7849633.0,
"learning_rate": 4.519543973941368e-05,
"loss": 0.9002,
"step": 256
},
{
"epoch": 0.5645249862712796,
"grad_norm": 3027068.75,
"learning_rate": 4.515472312703583e-05,
"loss": 0.7536,
"step": 257
},
{
"epoch": 0.5667215815485996,
"grad_norm": 2546966.0,
"learning_rate": 4.511400651465798e-05,
"loss": 0.9777,
"step": 258
},
{
"epoch": 0.5689181768259198,
"grad_norm": 22920500.0,
"learning_rate": 4.507328990228013e-05,
"loss": 0.8078,
"step": 259
},
{
"epoch": 0.57111477210324,
"grad_norm": 3398881.0,
"learning_rate": 4.503257328990228e-05,
"loss": 0.9109,
"step": 260
},
{
"epoch": 0.5733113673805601,
"grad_norm": 10613815.0,
"learning_rate": 4.499185667752443e-05,
"loss": 0.9227,
"step": 261
},
{
"epoch": 0.5755079626578803,
"grad_norm": 5875875.5,
"learning_rate": 4.495114006514658e-05,
"loss": 0.8826,
"step": 262
},
{
"epoch": 0.5777045579352005,
"grad_norm": 9503300.0,
"learning_rate": 4.491042345276873e-05,
"loss": 0.7595,
"step": 263
},
{
"epoch": 0.5799011532125206,
"grad_norm": 14739306.0,
"learning_rate": 4.486970684039088e-05,
"loss": 0.9293,
"step": 264
},
{
"epoch": 0.5820977484898407,
"grad_norm": 30256120.0,
"learning_rate": 4.482899022801303e-05,
"loss": 1.227,
"step": 265
},
{
"epoch": 0.5842943437671609,
"grad_norm": 4365349.5,
"learning_rate": 4.478827361563518e-05,
"loss": 0.9806,
"step": 266
},
{
"epoch": 0.586490939044481,
"grad_norm": 32792546.0,
"learning_rate": 4.474755700325733e-05,
"loss": 0.9428,
"step": 267
},
{
"epoch": 0.5886875343218012,
"grad_norm": 8507007.0,
"learning_rate": 4.470684039087948e-05,
"loss": 0.9196,
"step": 268
},
{
"epoch": 0.5908841295991214,
"grad_norm": 3569782.25,
"learning_rate": 4.466612377850163e-05,
"loss": 1.0419,
"step": 269
},
{
"epoch": 0.5930807248764415,
"grad_norm": 21276318.0,
"learning_rate": 4.462540716612378e-05,
"loss": 0.9971,
"step": 270
},
{
"epoch": 0.5952773201537617,
"grad_norm": 6198542.5,
"learning_rate": 4.4584690553745925e-05,
"loss": 0.8048,
"step": 271
},
{
"epoch": 0.5974739154310819,
"grad_norm": 14407476.0,
"learning_rate": 4.4543973941368076e-05,
"loss": 0.8347,
"step": 272
},
{
"epoch": 0.5996705107084019,
"grad_norm": 6460475.5,
"learning_rate": 4.4503257328990226e-05,
"loss": 0.8249,
"step": 273
},
{
"epoch": 0.6018671059857221,
"grad_norm": 1941072.0,
"learning_rate": 4.446254071661238e-05,
"loss": 0.9565,
"step": 274
},
{
"epoch": 0.6040637012630423,
"grad_norm": 5871742.5,
"learning_rate": 4.442182410423453e-05,
"loss": 0.9288,
"step": 275
},
{
"epoch": 0.6062602965403624,
"grad_norm": 4966977.0,
"learning_rate": 4.438110749185668e-05,
"loss": 0.6796,
"step": 276
},
{
"epoch": 0.6084568918176826,
"grad_norm": 7954624.5,
"learning_rate": 4.434039087947883e-05,
"loss": 0.9218,
"step": 277
},
{
"epoch": 0.6106534870950028,
"grad_norm": 4816445.0,
"learning_rate": 4.429967426710098e-05,
"loss": 1.042,
"step": 278
},
{
"epoch": 0.6128500823723229,
"grad_norm": 3931317.75,
"learning_rate": 4.425895765472313e-05,
"loss": 0.8656,
"step": 279
},
{
"epoch": 0.615046677649643,
"grad_norm": 8411777.0,
"learning_rate": 4.421824104234528e-05,
"loss": 0.7576,
"step": 280
},
{
"epoch": 0.6172432729269632,
"grad_norm": 2942928.25,
"learning_rate": 4.417752442996742e-05,
"loss": 0.7349,
"step": 281
},
{
"epoch": 0.6194398682042833,
"grad_norm": 9794862.0,
"learning_rate": 4.4136807817589574e-05,
"loss": 0.7857,
"step": 282
},
{
"epoch": 0.6216364634816035,
"grad_norm": 11497258.0,
"learning_rate": 4.4096091205211724e-05,
"loss": 0.9657,
"step": 283
},
{
"epoch": 0.6238330587589237,
"grad_norm": 7352097.0,
"learning_rate": 4.4055374592833875e-05,
"loss": 0.7933,
"step": 284
},
{
"epoch": 0.6260296540362438,
"grad_norm": 4899710.0,
"learning_rate": 4.4014657980456025e-05,
"loss": 0.8233,
"step": 285
},
{
"epoch": 0.628226249313564,
"grad_norm": 4511158.5,
"learning_rate": 4.3973941368078175e-05,
"loss": 0.7914,
"step": 286
},
{
"epoch": 0.6304228445908842,
"grad_norm": 20783182.0,
"learning_rate": 4.3933224755700326e-05,
"loss": 0.9018,
"step": 287
},
{
"epoch": 0.6326194398682042,
"grad_norm": 6158838.0,
"learning_rate": 4.3892508143322476e-05,
"loss": 0.8031,
"step": 288
},
{
"epoch": 0.6348160351455244,
"grad_norm": 23449790.0,
"learning_rate": 4.385179153094463e-05,
"loss": 0.7383,
"step": 289
},
{
"epoch": 0.6370126304228446,
"grad_norm": 6257906.0,
"learning_rate": 4.381107491856678e-05,
"loss": 0.7691,
"step": 290
},
{
"epoch": 0.6392092257001647,
"grad_norm": 4492374.0,
"learning_rate": 4.377035830618893e-05,
"loss": 0.987,
"step": 291
},
{
"epoch": 0.6414058209774849,
"grad_norm": 17023156.0,
"learning_rate": 4.372964169381108e-05,
"loss": 0.9187,
"step": 292
},
{
"epoch": 0.6436024162548051,
"grad_norm": 6583997.5,
"learning_rate": 4.368892508143323e-05,
"loss": 0.8994,
"step": 293
},
{
"epoch": 0.6457990115321252,
"grad_norm": 7650719.5,
"learning_rate": 4.364820846905538e-05,
"loss": 0.9232,
"step": 294
},
{
"epoch": 0.6479956068094453,
"grad_norm": 12442975.0,
"learning_rate": 4.360749185667753e-05,
"loss": 0.8611,
"step": 295
},
{
"epoch": 0.6501922020867655,
"grad_norm": 11185547.0,
"learning_rate": 4.356677524429968e-05,
"loss": 0.8816,
"step": 296
},
{
"epoch": 0.6523887973640856,
"grad_norm": 2894880.5,
"learning_rate": 4.352605863192183e-05,
"loss": 0.7881,
"step": 297
},
{
"epoch": 0.6545853926414058,
"grad_norm": 8969189.0,
"learning_rate": 4.348534201954398e-05,
"loss": 0.8634,
"step": 298
},
{
"epoch": 0.656781987918726,
"grad_norm": 6360104.5,
"learning_rate": 4.344462540716613e-05,
"loss": 0.8561,
"step": 299
},
{
"epoch": 0.6589785831960461,
"grad_norm": 15523615.0,
"learning_rate": 4.3403908794788275e-05,
"loss": 0.8584,
"step": 300
},
{
"epoch": 0.6611751784733663,
"grad_norm": 18714836.0,
"learning_rate": 4.3363192182410426e-05,
"loss": 0.9762,
"step": 301
},
{
"epoch": 0.6633717737506865,
"grad_norm": 11480799.0,
"learning_rate": 4.3322475570032576e-05,
"loss": 0.8664,
"step": 302
},
{
"epoch": 0.6655683690280065,
"grad_norm": 9781725.0,
"learning_rate": 4.3281758957654726e-05,
"loss": 0.7704,
"step": 303
},
{
"epoch": 0.6677649643053267,
"grad_norm": 13431326.0,
"learning_rate": 4.324104234527688e-05,
"loss": 0.9839,
"step": 304
},
{
"epoch": 0.6699615595826469,
"grad_norm": 6600915.5,
"learning_rate": 4.320032573289903e-05,
"loss": 0.9311,
"step": 305
},
{
"epoch": 0.6721581548599671,
"grad_norm": 7580225.0,
"learning_rate": 4.315960912052118e-05,
"loss": 0.8591,
"step": 306
},
{
"epoch": 0.6743547501372872,
"grad_norm": 6451969.0,
"learning_rate": 4.311889250814333e-05,
"loss": 0.9092,
"step": 307
},
{
"epoch": 0.6765513454146074,
"grad_norm": 4211526.0,
"learning_rate": 4.307817589576548e-05,
"loss": 0.9243,
"step": 308
},
{
"epoch": 0.6787479406919276,
"grad_norm": 4953117.5,
"learning_rate": 4.303745928338763e-05,
"loss": 0.9456,
"step": 309
},
{
"epoch": 0.6809445359692476,
"grad_norm": 10224394.0,
"learning_rate": 4.299674267100978e-05,
"loss": 0.9441,
"step": 310
},
{
"epoch": 0.6831411312465678,
"grad_norm": 22817490.0,
"learning_rate": 4.295602605863192e-05,
"loss": 0.8026,
"step": 311
},
{
"epoch": 0.685337726523888,
"grad_norm": 8647461.0,
"learning_rate": 4.2915309446254074e-05,
"loss": 0.9289,
"step": 312
},
{
"epoch": 0.6875343218012081,
"grad_norm": 16536851.0,
"learning_rate": 4.2874592833876224e-05,
"loss": 0.8415,
"step": 313
},
{
"epoch": 0.6897309170785283,
"grad_norm": 11579658.0,
"learning_rate": 4.2833876221498375e-05,
"loss": 0.6157,
"step": 314
},
{
"epoch": 0.6919275123558485,
"grad_norm": 5614166.0,
"learning_rate": 4.2793159609120525e-05,
"loss": 0.6692,
"step": 315
},
{
"epoch": 0.6941241076331686,
"grad_norm": 16915516.0,
"learning_rate": 4.2752442996742676e-05,
"loss": 0.9245,
"step": 316
},
{
"epoch": 0.6963207029104888,
"grad_norm": 5778363.0,
"learning_rate": 4.2711726384364826e-05,
"loss": 1.0392,
"step": 317
},
{
"epoch": 0.6985172981878089,
"grad_norm": 5275346.5,
"learning_rate": 4.2671009771986977e-05,
"loss": 0.9945,
"step": 318
},
{
"epoch": 0.700713893465129,
"grad_norm": 19296752.0,
"learning_rate": 4.263029315960913e-05,
"loss": 0.793,
"step": 319
},
{
"epoch": 0.7029104887424492,
"grad_norm": 4581366.5,
"learning_rate": 4.258957654723128e-05,
"loss": 0.825,
"step": 320
},
{
"epoch": 0.7051070840197694,
"grad_norm": 3308216.25,
"learning_rate": 4.254885993485342e-05,
"loss": 0.8774,
"step": 321
},
{
"epoch": 0.7073036792970895,
"grad_norm": 7703581.5,
"learning_rate": 4.250814332247557e-05,
"loss": 0.8918,
"step": 322
},
{
"epoch": 0.7095002745744097,
"grad_norm": 6396188.0,
"learning_rate": 4.246742671009772e-05,
"loss": 0.7692,
"step": 323
},
{
"epoch": 0.7116968698517299,
"grad_norm": 6489722.5,
"learning_rate": 4.242671009771987e-05,
"loss": 0.9539,
"step": 324
},
{
"epoch": 0.7138934651290499,
"grad_norm": 22732088.0,
"learning_rate": 4.238599348534202e-05,
"loss": 0.8366,
"step": 325
},
{
"epoch": 0.7160900604063701,
"grad_norm": 3959640.0,
"learning_rate": 4.2345276872964173e-05,
"loss": 0.8401,
"step": 326
},
{
"epoch": 0.7182866556836903,
"grad_norm": 4880879.5,
"learning_rate": 4.2304560260586324e-05,
"loss": 0.8851,
"step": 327
},
{
"epoch": 0.7204832509610104,
"grad_norm": 23368146.0,
"learning_rate": 4.2263843648208474e-05,
"loss": 1.0028,
"step": 328
},
{
"epoch": 0.7226798462383306,
"grad_norm": 9643974.0,
"learning_rate": 4.2223127035830625e-05,
"loss": 0.8387,
"step": 329
},
{
"epoch": 0.7248764415156508,
"grad_norm": 3867018.0,
"learning_rate": 4.2182410423452775e-05,
"loss": 0.8287,
"step": 330
},
{
"epoch": 0.7270730367929709,
"grad_norm": 7396840.5,
"learning_rate": 4.2141693811074926e-05,
"loss": 1.0076,
"step": 331
},
{
"epoch": 0.729269632070291,
"grad_norm": 4308874.5,
"learning_rate": 4.210097719869707e-05,
"loss": 0.8102,
"step": 332
},
{
"epoch": 0.7314662273476112,
"grad_norm": 11172095.0,
"learning_rate": 4.206026058631922e-05,
"loss": 0.9213,
"step": 333
},
{
"epoch": 0.7336628226249313,
"grad_norm": 16709707.0,
"learning_rate": 4.201954397394137e-05,
"loss": 0.9974,
"step": 334
},
{
"epoch": 0.7358594179022515,
"grad_norm": 9412243.0,
"learning_rate": 4.197882736156352e-05,
"loss": 0.8619,
"step": 335
},
{
"epoch": 0.7380560131795717,
"grad_norm": 4649884.0,
"learning_rate": 4.193811074918567e-05,
"loss": 0.8352,
"step": 336
},
{
"epoch": 0.7402526084568918,
"grad_norm": 4795967.0,
"learning_rate": 4.189739413680782e-05,
"loss": 1.0452,
"step": 337
},
{
"epoch": 0.742449203734212,
"grad_norm": 21222040.0,
"learning_rate": 4.185667752442997e-05,
"loss": 0.821,
"step": 338
},
{
"epoch": 0.7446457990115322,
"grad_norm": 8605101.0,
"learning_rate": 4.181596091205212e-05,
"loss": 0.89,
"step": 339
},
{
"epoch": 0.7468423942888522,
"grad_norm": 10975735.0,
"learning_rate": 4.177524429967427e-05,
"loss": 0.9122,
"step": 340
},
{
"epoch": 0.7490389895661724,
"grad_norm": 5105589.0,
"learning_rate": 4.1734527687296424e-05,
"loss": 0.81,
"step": 341
},
{
"epoch": 0.7512355848434926,
"grad_norm": 11751471.0,
"learning_rate": 4.1693811074918574e-05,
"loss": 0.7396,
"step": 342
},
{
"epoch": 0.7534321801208127,
"grad_norm": 8476453.0,
"learning_rate": 4.165309446254072e-05,
"loss": 0.8053,
"step": 343
},
{
"epoch": 0.7556287753981329,
"grad_norm": 5329847.0,
"learning_rate": 4.161237785016287e-05,
"loss": 0.8507,
"step": 344
},
{
"epoch": 0.7578253706754531,
"grad_norm": 4554331.0,
"learning_rate": 4.157166123778502e-05,
"loss": 0.7138,
"step": 345
},
{
"epoch": 0.7600219659527732,
"grad_norm": 9091578.0,
"learning_rate": 4.153094462540717e-05,
"loss": 0.8644,
"step": 346
},
{
"epoch": 0.7622185612300933,
"grad_norm": 27390056.0,
"learning_rate": 4.149022801302932e-05,
"loss": 0.8909,
"step": 347
},
{
"epoch": 0.7644151565074135,
"grad_norm": 9285642.0,
"learning_rate": 4.144951140065147e-05,
"loss": 1.078,
"step": 348
},
{
"epoch": 0.7666117517847336,
"grad_norm": 16344011.0,
"learning_rate": 4.140879478827362e-05,
"loss": 0.6494,
"step": 349
},
{
"epoch": 0.7688083470620538,
"grad_norm": 5823217.0,
"learning_rate": 4.136807817589577e-05,
"loss": 1.0222,
"step": 350
},
{
"epoch": 0.771004942339374,
"grad_norm": 24863778.0,
"learning_rate": 4.132736156351792e-05,
"loss": 1.0486,
"step": 351
},
{
"epoch": 0.7732015376166941,
"grad_norm": 8073097.0,
"learning_rate": 4.128664495114007e-05,
"loss": 0.8891,
"step": 352
},
{
"epoch": 0.7753981328940143,
"grad_norm": 2275112.75,
"learning_rate": 4.1245928338762215e-05,
"loss": 0.687,
"step": 353
},
{
"epoch": 0.7775947281713345,
"grad_norm": 29366184.0,
"learning_rate": 4.1205211726384366e-05,
"loss": 0.8847,
"step": 354
},
{
"epoch": 0.7797913234486545,
"grad_norm": 8535258.0,
"learning_rate": 4.1164495114006516e-05,
"loss": 0.6604,
"step": 355
},
{
"epoch": 0.7819879187259747,
"grad_norm": 5347244.5,
"learning_rate": 4.112377850162867e-05,
"loss": 0.7014,
"step": 356
},
{
"epoch": 0.7841845140032949,
"grad_norm": 17013644.0,
"learning_rate": 4.108306188925082e-05,
"loss": 0.8369,
"step": 357
},
{
"epoch": 0.786381109280615,
"grad_norm": 5505895.0,
"learning_rate": 4.104234527687297e-05,
"loss": 0.8252,
"step": 358
},
{
"epoch": 0.7885777045579352,
"grad_norm": 47764372.0,
"learning_rate": 4.100162866449512e-05,
"loss": 0.822,
"step": 359
},
{
"epoch": 0.7907742998352554,
"grad_norm": 4797409.5,
"learning_rate": 4.096091205211727e-05,
"loss": 0.694,
"step": 360
},
{
"epoch": 0.7929708951125755,
"grad_norm": 5890969.5,
"learning_rate": 4.092019543973942e-05,
"loss": 0.9714,
"step": 361
},
{
"epoch": 0.7951674903898956,
"grad_norm": 7213682.0,
"learning_rate": 4.087947882736157e-05,
"loss": 0.884,
"step": 362
},
{
"epoch": 0.7973640856672158,
"grad_norm": 10531884.0,
"learning_rate": 4.083876221498372e-05,
"loss": 0.7182,
"step": 363
},
{
"epoch": 0.7995606809445359,
"grad_norm": 20195876.0,
"learning_rate": 4.0798045602605864e-05,
"loss": 0.7789,
"step": 364
},
{
"epoch": 0.8017572762218561,
"grad_norm": 1930498.0,
"learning_rate": 4.0757328990228014e-05,
"loss": 0.6679,
"step": 365
},
{
"epoch": 0.8039538714991763,
"grad_norm": 7317309.0,
"learning_rate": 4.0716612377850165e-05,
"loss": 0.925,
"step": 366
},
{
"epoch": 0.8061504667764964,
"grad_norm": 5059402.0,
"learning_rate": 4.0675895765472315e-05,
"loss": 0.843,
"step": 367
},
{
"epoch": 0.8083470620538166,
"grad_norm": 4036758.0,
"learning_rate": 4.0635179153094466e-05,
"loss": 0.7063,
"step": 368
},
{
"epoch": 0.8105436573311368,
"grad_norm": 8493004.0,
"learning_rate": 4.0594462540716616e-05,
"loss": 0.7426,
"step": 369
},
{
"epoch": 0.8127402526084568,
"grad_norm": 7284358.5,
"learning_rate": 4.0553745928338767e-05,
"loss": 0.6303,
"step": 370
},
{
"epoch": 0.814936847885777,
"grad_norm": 15478656.0,
"learning_rate": 4.051302931596092e-05,
"loss": 0.9083,
"step": 371
},
{
"epoch": 0.8171334431630972,
"grad_norm": 2402556.75,
"learning_rate": 4.047231270358307e-05,
"loss": 0.8856,
"step": 372
},
{
"epoch": 0.8193300384404174,
"grad_norm": 5181035.5,
"learning_rate": 4.043159609120522e-05,
"loss": 0.8156,
"step": 373
},
{
"epoch": 0.8215266337177375,
"grad_norm": 12148220.0,
"learning_rate": 4.039087947882736e-05,
"loss": 0.8112,
"step": 374
},
{
"epoch": 0.8237232289950577,
"grad_norm": 9295641.0,
"learning_rate": 4.035016286644951e-05,
"loss": 0.9305,
"step": 375
},
{
"epoch": 0.8259198242723779,
"grad_norm": 3331442.0,
"learning_rate": 4.030944625407166e-05,
"loss": 0.8619,
"step": 376
},
{
"epoch": 0.828116419549698,
"grad_norm": 11976113.0,
"learning_rate": 4.026872964169381e-05,
"loss": 0.9033,
"step": 377
},
{
"epoch": 0.8303130148270181,
"grad_norm": 12333790.0,
"learning_rate": 4.0228013029315963e-05,
"loss": 0.8309,
"step": 378
},
{
"epoch": 0.8325096101043383,
"grad_norm": 18306704.0,
"learning_rate": 4.0187296416938114e-05,
"loss": 0.6869,
"step": 379
},
{
"epoch": 0.8347062053816584,
"grad_norm": 12500234.0,
"learning_rate": 4.0146579804560264e-05,
"loss": 0.9265,
"step": 380
},
{
"epoch": 0.8369028006589786,
"grad_norm": 3638457.25,
"learning_rate": 4.0105863192182415e-05,
"loss": 0.8659,
"step": 381
},
{
"epoch": 0.8390993959362988,
"grad_norm": 39964704.0,
"learning_rate": 4.0065146579804565e-05,
"loss": 0.8859,
"step": 382
},
{
"epoch": 0.8412959912136189,
"grad_norm": 1792221.5,
"learning_rate": 4.0024429967426716e-05,
"loss": 0.8887,
"step": 383
},
{
"epoch": 0.8434925864909391,
"grad_norm": 73207232.0,
"learning_rate": 3.9983713355048866e-05,
"loss": 0.903,
"step": 384
},
{
"epoch": 0.8456891817682592,
"grad_norm": 3413364.75,
"learning_rate": 3.994299674267101e-05,
"loss": 0.9844,
"step": 385
},
{
"epoch": 0.8478857770455793,
"grad_norm": 8311068.5,
"learning_rate": 3.990228013029316e-05,
"loss": 0.9515,
"step": 386
},
{
"epoch": 0.8500823723228995,
"grad_norm": 2447756.75,
"learning_rate": 3.986156351791531e-05,
"loss": 0.7988,
"step": 387
},
{
"epoch": 0.8522789676002197,
"grad_norm": 4686553.5,
"learning_rate": 3.982084690553746e-05,
"loss": 0.9065,
"step": 388
},
{
"epoch": 0.8544755628775398,
"grad_norm": 5194229.0,
"learning_rate": 3.978013029315961e-05,
"loss": 0.6724,
"step": 389
},
{
"epoch": 0.85667215815486,
"grad_norm": 3372298.0,
"learning_rate": 3.973941368078176e-05,
"loss": 0.7658,
"step": 390
},
{
"epoch": 0.8588687534321802,
"grad_norm": 5034177.0,
"learning_rate": 3.969869706840391e-05,
"loss": 0.8142,
"step": 391
},
{
"epoch": 0.8610653487095002,
"grad_norm": 5968269.5,
"learning_rate": 3.965798045602606e-05,
"loss": 1.0806,
"step": 392
},
{
"epoch": 0.8632619439868204,
"grad_norm": 8287274.0,
"learning_rate": 3.9617263843648214e-05,
"loss": 1.1554,
"step": 393
},
{
"epoch": 0.8654585392641406,
"grad_norm": 17271268.0,
"learning_rate": 3.9576547231270364e-05,
"loss": 0.8774,
"step": 394
},
{
"epoch": 0.8676551345414607,
"grad_norm": 12984409.0,
"learning_rate": 3.953583061889251e-05,
"loss": 0.7632,
"step": 395
},
{
"epoch": 0.8698517298187809,
"grad_norm": 4521503.5,
"learning_rate": 3.949511400651466e-05,
"loss": 0.8149,
"step": 396
},
{
"epoch": 0.8720483250961011,
"grad_norm": 3361893.25,
"learning_rate": 3.945439739413681e-05,
"loss": 0.8628,
"step": 397
},
{
"epoch": 0.8742449203734212,
"grad_norm": 10988202.0,
"learning_rate": 3.941368078175896e-05,
"loss": 0.7513,
"step": 398
},
{
"epoch": 0.8764415156507414,
"grad_norm": 2900712.25,
"learning_rate": 3.937296416938111e-05,
"loss": 0.8525,
"step": 399
},
{
"epoch": 0.8786381109280615,
"grad_norm": 5232877.5,
"learning_rate": 3.933224755700326e-05,
"loss": 0.928,
"step": 400
},
{
"epoch": 0.8808347062053816,
"grad_norm": 53784552.0,
"learning_rate": 3.929153094462541e-05,
"loss": 0.6787,
"step": 401
},
{
"epoch": 0.8830313014827018,
"grad_norm": 21809352.0,
"learning_rate": 3.925081433224756e-05,
"loss": 0.8371,
"step": 402
},
{
"epoch": 0.885227896760022,
"grad_norm": 4536432.0,
"learning_rate": 3.921009771986971e-05,
"loss": 0.9664,
"step": 403
},
{
"epoch": 0.8874244920373421,
"grad_norm": 6359187.5,
"learning_rate": 3.916938110749186e-05,
"loss": 0.9898,
"step": 404
},
{
"epoch": 0.8896210873146623,
"grad_norm": 7551597.0,
"learning_rate": 3.912866449511401e-05,
"loss": 0.705,
"step": 405
},
{
"epoch": 0.8918176825919825,
"grad_norm": 6143014.0,
"learning_rate": 3.9087947882736156e-05,
"loss": 0.7789,
"step": 406
},
{
"epoch": 0.8940142778693025,
"grad_norm": 5476680.0,
"learning_rate": 3.9047231270358306e-05,
"loss": 0.6997,
"step": 407
},
{
"epoch": 0.8962108731466227,
"grad_norm": 12279581.0,
"learning_rate": 3.900651465798046e-05,
"loss": 0.7033,
"step": 408
},
{
"epoch": 0.8984074684239429,
"grad_norm": 3615422.5,
"learning_rate": 3.896579804560261e-05,
"loss": 0.7568,
"step": 409
},
{
"epoch": 0.900604063701263,
"grad_norm": 7852239.5,
"learning_rate": 3.892508143322476e-05,
"loss": 0.9656,
"step": 410
},
{
"epoch": 0.9028006589785832,
"grad_norm": 6948022.0,
"learning_rate": 3.888436482084691e-05,
"loss": 0.7215,
"step": 411
},
{
"epoch": 0.9049972542559034,
"grad_norm": 2027938.75,
"learning_rate": 3.884364820846906e-05,
"loss": 0.743,
"step": 412
},
{
"epoch": 0.9071938495332235,
"grad_norm": 9140767.0,
"learning_rate": 3.880293159609121e-05,
"loss": 0.8042,
"step": 413
},
{
"epoch": 0.9093904448105437,
"grad_norm": 7185347.0,
"learning_rate": 3.876221498371336e-05,
"loss": 0.9513,
"step": 414
},
{
"epoch": 0.9115870400878638,
"grad_norm": 15829996.0,
"learning_rate": 3.872149837133551e-05,
"loss": 0.7824,
"step": 415
},
{
"epoch": 0.9137836353651839,
"grad_norm": 11386417.0,
"learning_rate": 3.8680781758957654e-05,
"loss": 0.9135,
"step": 416
},
{
"epoch": 0.9159802306425041,
"grad_norm": 10734097.0,
"learning_rate": 3.8640065146579804e-05,
"loss": 0.85,
"step": 417
},
{
"epoch": 0.9181768259198243,
"grad_norm": 3796483.75,
"learning_rate": 3.8599348534201955e-05,
"loss": 0.5997,
"step": 418
},
{
"epoch": 0.9203734211971444,
"grad_norm": 11318456.0,
"learning_rate": 3.8558631921824105e-05,
"loss": 0.7123,
"step": 419
},
{
"epoch": 0.9225700164744646,
"grad_norm": 6741748.5,
"learning_rate": 3.8517915309446256e-05,
"loss": 0.8653,
"step": 420
},
{
"epoch": 0.9247666117517848,
"grad_norm": 18654650.0,
"learning_rate": 3.8477198697068406e-05,
"loss": 0.8113,
"step": 421
},
{
"epoch": 0.9269632070291048,
"grad_norm": 6612527.5,
"learning_rate": 3.8436482084690556e-05,
"loss": 0.8149,
"step": 422
},
{
"epoch": 0.929159802306425,
"grad_norm": 2267397.25,
"learning_rate": 3.839576547231271e-05,
"loss": 0.9744,
"step": 423
},
{
"epoch": 0.9313563975837452,
"grad_norm": 3449570.5,
"learning_rate": 3.835504885993486e-05,
"loss": 0.8645,
"step": 424
},
{
"epoch": 0.9335529928610653,
"grad_norm": 2820739.5,
"learning_rate": 3.831433224755701e-05,
"loss": 0.7483,
"step": 425
},
{
"epoch": 0.9357495881383855,
"grad_norm": 4588689.5,
"learning_rate": 3.827361563517916e-05,
"loss": 0.7371,
"step": 426
},
{
"epoch": 0.9379461834157057,
"grad_norm": 2832083.25,
"learning_rate": 3.82328990228013e-05,
"loss": 0.8102,
"step": 427
},
{
"epoch": 0.9401427786930258,
"grad_norm": 15555140.0,
"learning_rate": 3.819218241042345e-05,
"loss": 0.6949,
"step": 428
},
{
"epoch": 0.942339373970346,
"grad_norm": 7799302.5,
"learning_rate": 3.81514657980456e-05,
"loss": 0.6668,
"step": 429
},
{
"epoch": 0.9445359692476661,
"grad_norm": 2267858.5,
"learning_rate": 3.811074918566775e-05,
"loss": 0.8553,
"step": 430
},
{
"epoch": 0.9467325645249862,
"grad_norm": 5764214.0,
"learning_rate": 3.8070032573289904e-05,
"loss": 0.7074,
"step": 431
},
{
"epoch": 0.9489291598023064,
"grad_norm": 9709723.0,
"learning_rate": 3.8029315960912054e-05,
"loss": 0.9449,
"step": 432
},
{
"epoch": 0.9511257550796266,
"grad_norm": 3719678.75,
"learning_rate": 3.7988599348534205e-05,
"loss": 0.8546,
"step": 433
},
{
"epoch": 0.9533223503569467,
"grad_norm": 10701060.0,
"learning_rate": 3.7947882736156355e-05,
"loss": 0.7293,
"step": 434
},
{
"epoch": 0.9555189456342669,
"grad_norm": 8692140.0,
"learning_rate": 3.7907166123778506e-05,
"loss": 0.7623,
"step": 435
},
{
"epoch": 0.9577155409115871,
"grad_norm": 1107094.75,
"learning_rate": 3.7866449511400656e-05,
"loss": 0.6784,
"step": 436
},
{
"epoch": 0.9599121361889071,
"grad_norm": 3959699.5,
"learning_rate": 3.7825732899022807e-05,
"loss": 0.8284,
"step": 437
},
{
"epoch": 0.9621087314662273,
"grad_norm": 3346395.75,
"learning_rate": 3.778501628664495e-05,
"loss": 0.7126,
"step": 438
},
{
"epoch": 0.9643053267435475,
"grad_norm": 3931378.75,
"learning_rate": 3.77442996742671e-05,
"loss": 1.0596,
"step": 439
},
{
"epoch": 0.9665019220208677,
"grad_norm": 2804710.0,
"learning_rate": 3.770358306188925e-05,
"loss": 0.9038,
"step": 440
},
{
"epoch": 0.9686985172981878,
"grad_norm": 3163888.5,
"learning_rate": 3.76628664495114e-05,
"loss": 0.7675,
"step": 441
},
{
"epoch": 0.970895112575508,
"grad_norm": 8885306.0,
"learning_rate": 3.762214983713355e-05,
"loss": 0.7497,
"step": 442
},
{
"epoch": 0.9730917078528282,
"grad_norm": 2620851.5,
"learning_rate": 3.75814332247557e-05,
"loss": 0.824,
"step": 443
},
{
"epoch": 0.9752883031301482,
"grad_norm": 4316842.0,
"learning_rate": 3.754071661237785e-05,
"loss": 0.9471,
"step": 444
},
{
"epoch": 0.9774848984074684,
"grad_norm": 3267577.0,
"learning_rate": 3.7500000000000003e-05,
"loss": 0.9057,
"step": 445
},
{
"epoch": 0.9796814936847886,
"grad_norm": 3122342.75,
"learning_rate": 3.7459283387622154e-05,
"loss": 0.92,
"step": 446
},
{
"epoch": 0.9818780889621087,
"grad_norm": 19377328.0,
"learning_rate": 3.7418566775244304e-05,
"loss": 0.9873,
"step": 447
},
{
"epoch": 0.9840746842394289,
"grad_norm": 3318042.25,
"learning_rate": 3.737785016286645e-05,
"loss": 0.7811,
"step": 448
},
{
"epoch": 0.9862712795167491,
"grad_norm": 5037451.0,
"learning_rate": 3.73371335504886e-05,
"loss": 0.8696,
"step": 449
},
{
"epoch": 0.9884678747940692,
"grad_norm": 9572285.0,
"learning_rate": 3.729641693811075e-05,
"loss": 0.8111,
"step": 450
},
{
"epoch": 0.9906644700713894,
"grad_norm": 7891369.5,
"learning_rate": 3.72557003257329e-05,
"loss": 0.8349,
"step": 451
},
{
"epoch": 0.9928610653487095,
"grad_norm": 3774431.0,
"learning_rate": 3.721498371335505e-05,
"loss": 0.8568,
"step": 452
},
{
"epoch": 0.9950576606260296,
"grad_norm": 14723319.0,
"learning_rate": 3.71742671009772e-05,
"loss": 0.901,
"step": 453
},
{
"epoch": 0.9972542559033498,
"grad_norm": 9706890.0,
"learning_rate": 3.713355048859935e-05,
"loss": 0.6967,
"step": 454
},
{
"epoch": 0.99945085118067,
"grad_norm": 2838797.5,
"learning_rate": 3.70928338762215e-05,
"loss": 0.777,
"step": 455
},
{
"epoch": 1.00164744645799,
"grad_norm": 8991332.0,
"learning_rate": 3.705211726384365e-05,
"loss": 0.9142,
"step": 456
},
{
"epoch": 1.0038440417353103,
"grad_norm": 3369319.5,
"learning_rate": 3.70114006514658e-05,
"loss": 0.8587,
"step": 457
},
{
"epoch": 1.0060406370126305,
"grad_norm": 3280881.75,
"learning_rate": 3.697068403908795e-05,
"loss": 0.8162,
"step": 458
},
{
"epoch": 1.0082372322899507,
"grad_norm": 5707167.5,
"learning_rate": 3.6929967426710096e-05,
"loss": 0.7555,
"step": 459
},
{
"epoch": 1.0104338275672706,
"grad_norm": 7033941.5,
"learning_rate": 3.688925081433225e-05,
"loss": 0.758,
"step": 460
},
{
"epoch": 1.0126304228445908,
"grad_norm": 1708048.25,
"learning_rate": 3.68485342019544e-05,
"loss": 0.6466,
"step": 461
},
{
"epoch": 1.014827018121911,
"grad_norm": 4216993.5,
"learning_rate": 3.680781758957655e-05,
"loss": 1.1216,
"step": 462
},
{
"epoch": 1.0170236133992312,
"grad_norm": 11881433.0,
"learning_rate": 3.67671009771987e-05,
"loss": 0.9845,
"step": 463
},
{
"epoch": 1.0192202086765514,
"grad_norm": 13909134.0,
"learning_rate": 3.672638436482085e-05,
"loss": 0.775,
"step": 464
},
{
"epoch": 1.0214168039538716,
"grad_norm": 13205813.0,
"learning_rate": 3.6685667752443e-05,
"loss": 1.0079,
"step": 465
},
{
"epoch": 1.0236133992311915,
"grad_norm": 8669044.0,
"learning_rate": 3.664495114006515e-05,
"loss": 1.0056,
"step": 466
},
{
"epoch": 1.0258099945085117,
"grad_norm": 4657157.0,
"learning_rate": 3.66042345276873e-05,
"loss": 0.8558,
"step": 467
},
{
"epoch": 1.028006589785832,
"grad_norm": 9291309.0,
"learning_rate": 3.656351791530945e-05,
"loss": 0.7488,
"step": 468
},
{
"epoch": 1.0302031850631521,
"grad_norm": 5880277.5,
"learning_rate": 3.6522801302931594e-05,
"loss": 0.9239,
"step": 469
},
{
"epoch": 1.0323997803404723,
"grad_norm": 1935226.75,
"learning_rate": 3.6482084690553745e-05,
"loss": 0.9432,
"step": 470
},
{
"epoch": 1.0345963756177925,
"grad_norm": 2122449.75,
"learning_rate": 3.6441368078175895e-05,
"loss": 0.7108,
"step": 471
},
{
"epoch": 1.0367929708951125,
"grad_norm": 10027997.0,
"learning_rate": 3.6400651465798046e-05,
"loss": 0.8021,
"step": 472
},
{
"epoch": 1.0389895661724327,
"grad_norm": 6696744.5,
"learning_rate": 3.6359934853420196e-05,
"loss": 1.0112,
"step": 473
},
{
"epoch": 1.0411861614497528,
"grad_norm": 12777209.0,
"learning_rate": 3.6319218241042346e-05,
"loss": 0.8327,
"step": 474
},
{
"epoch": 1.043382756727073,
"grad_norm": 3747195.75,
"learning_rate": 3.62785016286645e-05,
"loss": 0.7982,
"step": 475
},
{
"epoch": 1.0455793520043932,
"grad_norm": 4901371.5,
"learning_rate": 3.623778501628665e-05,
"loss": 1.0869,
"step": 476
},
{
"epoch": 1.0477759472817134,
"grad_norm": 3937130.0,
"learning_rate": 3.61970684039088e-05,
"loss": 0.7382,
"step": 477
},
{
"epoch": 1.0499725425590336,
"grad_norm": 3580537.0,
"learning_rate": 3.615635179153095e-05,
"loss": 0.8539,
"step": 478
},
{
"epoch": 1.0521691378363536,
"grad_norm": 6540120.5,
"learning_rate": 3.61156351791531e-05,
"loss": 0.9064,
"step": 479
},
{
"epoch": 1.0543657331136738,
"grad_norm": 11335932.0,
"learning_rate": 3.607491856677524e-05,
"loss": 0.6979,
"step": 480
},
{
"epoch": 1.056562328390994,
"grad_norm": 4903040.0,
"learning_rate": 3.603420195439739e-05,
"loss": 0.7633,
"step": 481
},
{
"epoch": 1.0587589236683141,
"grad_norm": 3875244.0,
"learning_rate": 3.599348534201954e-05,
"loss": 0.7527,
"step": 482
},
{
"epoch": 1.0609555189456343,
"grad_norm": 3177955.0,
"learning_rate": 3.5952768729641694e-05,
"loss": 0.7824,
"step": 483
},
{
"epoch": 1.0631521142229543,
"grad_norm": 10429066.0,
"learning_rate": 3.5912052117263844e-05,
"loss": 0.8659,
"step": 484
},
{
"epoch": 1.0653487095002745,
"grad_norm": 4362183.5,
"learning_rate": 3.5871335504885995e-05,
"loss": 0.8113,
"step": 485
},
{
"epoch": 1.0675453047775947,
"grad_norm": 20388008.0,
"learning_rate": 3.5830618892508145e-05,
"loss": 0.7389,
"step": 486
},
{
"epoch": 1.0697419000549149,
"grad_norm": 1476416.125,
"learning_rate": 3.5789902280130296e-05,
"loss": 0.7578,
"step": 487
},
{
"epoch": 1.071938495332235,
"grad_norm": 4040277.0,
"learning_rate": 3.5749185667752446e-05,
"loss": 0.914,
"step": 488
},
{
"epoch": 1.0741350906095553,
"grad_norm": 7079791.5,
"learning_rate": 3.5708469055374597e-05,
"loss": 0.6894,
"step": 489
},
{
"epoch": 1.0763316858868754,
"grad_norm": 3127609.75,
"learning_rate": 3.566775244299674e-05,
"loss": 0.6968,
"step": 490
},
{
"epoch": 1.0785282811641954,
"grad_norm": 3809764.5,
"learning_rate": 3.562703583061889e-05,
"loss": 1.0094,
"step": 491
},
{
"epoch": 1.0807248764415156,
"grad_norm": 3065993.25,
"learning_rate": 3.558631921824104e-05,
"loss": 0.7706,
"step": 492
},
{
"epoch": 1.0829214717188358,
"grad_norm": 2871166.5,
"learning_rate": 3.554560260586319e-05,
"loss": 0.9104,
"step": 493
},
{
"epoch": 1.085118066996156,
"grad_norm": 3959990.25,
"learning_rate": 3.550488599348534e-05,
"loss": 0.6892,
"step": 494
},
{
"epoch": 1.0873146622734762,
"grad_norm": 2142621.25,
"learning_rate": 3.546416938110749e-05,
"loss": 0.8145,
"step": 495
},
{
"epoch": 1.0895112575507964,
"grad_norm": 9961341.0,
"learning_rate": 3.542345276872964e-05,
"loss": 0.711,
"step": 496
},
{
"epoch": 1.0917078528281163,
"grad_norm": 3682777.25,
"learning_rate": 3.5382736156351793e-05,
"loss": 0.7049,
"step": 497
},
{
"epoch": 1.0939044481054365,
"grad_norm": 4144218.75,
"learning_rate": 3.5342019543973944e-05,
"loss": 0.8052,
"step": 498
},
{
"epoch": 1.0961010433827567,
"grad_norm": 1452248.25,
"learning_rate": 3.5301302931596094e-05,
"loss": 0.8244,
"step": 499
},
{
"epoch": 1.098297638660077,
"grad_norm": 4761271.5,
"learning_rate": 3.5260586319218245e-05,
"loss": 0.8815,
"step": 500
},
{
"epoch": 1.100494233937397,
"grad_norm": 9929670.0,
"learning_rate": 3.521986970684039e-05,
"loss": 0.9855,
"step": 501
},
{
"epoch": 1.1026908292147173,
"grad_norm": 5726086.0,
"learning_rate": 3.517915309446254e-05,
"loss": 0.6964,
"step": 502
},
{
"epoch": 1.1048874244920373,
"grad_norm": 8523648.0,
"learning_rate": 3.513843648208469e-05,
"loss": 0.8744,
"step": 503
},
{
"epoch": 1.1070840197693574,
"grad_norm": 2879091.0,
"learning_rate": 3.509771986970684e-05,
"loss": 0.7042,
"step": 504
},
{
"epoch": 1.1092806150466776,
"grad_norm": 8812327.0,
"learning_rate": 3.505700325732899e-05,
"loss": 1.0488,
"step": 505
},
{
"epoch": 1.1114772103239978,
"grad_norm": 6325029.5,
"learning_rate": 3.501628664495114e-05,
"loss": 0.8452,
"step": 506
},
{
"epoch": 1.113673805601318,
"grad_norm": 4794634.5,
"learning_rate": 3.497557003257329e-05,
"loss": 0.929,
"step": 507
},
{
"epoch": 1.1158704008786382,
"grad_norm": 7767060.0,
"learning_rate": 3.493485342019544e-05,
"loss": 0.9219,
"step": 508
},
{
"epoch": 1.1180669961559582,
"grad_norm": 4710876.5,
"learning_rate": 3.489413680781759e-05,
"loss": 0.8301,
"step": 509
},
{
"epoch": 1.1202635914332784,
"grad_norm": 7873708.5,
"learning_rate": 3.485342019543974e-05,
"loss": 0.9386,
"step": 510
},
{
"epoch": 1.1224601867105986,
"grad_norm": 1433709.625,
"learning_rate": 3.481270358306189e-05,
"loss": 0.7807,
"step": 511
},
{
"epoch": 1.1246567819879187,
"grad_norm": 4903263.5,
"learning_rate": 3.477198697068404e-05,
"loss": 0.801,
"step": 512
},
{
"epoch": 1.126853377265239,
"grad_norm": 3248271.75,
"learning_rate": 3.473127035830619e-05,
"loss": 0.9455,
"step": 513
},
{
"epoch": 1.1290499725425591,
"grad_norm": 5260664.5,
"learning_rate": 3.469055374592834e-05,
"loss": 0.7725,
"step": 514
},
{
"epoch": 1.131246567819879,
"grad_norm": 5526199.5,
"learning_rate": 3.464983713355049e-05,
"loss": 0.788,
"step": 515
},
{
"epoch": 1.1334431630971993,
"grad_norm": 6048993.0,
"learning_rate": 3.460912052117264e-05,
"loss": 0.8122,
"step": 516
},
{
"epoch": 1.1356397583745195,
"grad_norm": 10826018.0,
"learning_rate": 3.456840390879479e-05,
"loss": 0.8729,
"step": 517
},
{
"epoch": 1.1378363536518397,
"grad_norm": 6635749.0,
"learning_rate": 3.452768729641694e-05,
"loss": 0.6768,
"step": 518
},
{
"epoch": 1.1400329489291599,
"grad_norm": 2388449.5,
"learning_rate": 3.448697068403909e-05,
"loss": 0.6119,
"step": 519
},
{
"epoch": 1.14222954420648,
"grad_norm": 5985002.0,
"learning_rate": 3.444625407166124e-05,
"loss": 0.8503,
"step": 520
},
{
"epoch": 1.1444261394838002,
"grad_norm": 4519367.5,
"learning_rate": 3.440553745928339e-05,
"loss": 0.6913,
"step": 521
},
{
"epoch": 1.1466227347611202,
"grad_norm": 8761667.0,
"learning_rate": 3.4364820846905535e-05,
"loss": 0.7784,
"step": 522
},
{
"epoch": 1.1488193300384404,
"grad_norm": 3156495.5,
"learning_rate": 3.4324104234527685e-05,
"loss": 0.6168,
"step": 523
},
{
"epoch": 1.1510159253157606,
"grad_norm": 8132012.0,
"learning_rate": 3.4283387622149836e-05,
"loss": 0.9162,
"step": 524
},
{
"epoch": 1.1532125205930808,
"grad_norm": 3212973.0,
"learning_rate": 3.4242671009771986e-05,
"loss": 0.793,
"step": 525
},
{
"epoch": 1.155409115870401,
"grad_norm": 6705481.5,
"learning_rate": 3.4201954397394136e-05,
"loss": 1.0598,
"step": 526
},
{
"epoch": 1.157605711147721,
"grad_norm": 12085641.0,
"learning_rate": 3.416123778501629e-05,
"loss": 0.722,
"step": 527
},
{
"epoch": 1.1598023064250411,
"grad_norm": 4394856.5,
"learning_rate": 3.412052117263844e-05,
"loss": 0.9344,
"step": 528
},
{
"epoch": 1.1619989017023613,
"grad_norm": 4306202.0,
"learning_rate": 3.407980456026059e-05,
"loss": 0.6416,
"step": 529
},
{
"epoch": 1.1641954969796815,
"grad_norm": 12400364.0,
"learning_rate": 3.403908794788274e-05,
"loss": 0.6212,
"step": 530
},
{
"epoch": 1.1663920922570017,
"grad_norm": 2410434.25,
"learning_rate": 3.399837133550489e-05,
"loss": 0.7261,
"step": 531
},
{
"epoch": 1.1685886875343219,
"grad_norm": 15016189.0,
"learning_rate": 3.395765472312704e-05,
"loss": 0.9121,
"step": 532
},
{
"epoch": 1.170785282811642,
"grad_norm": 4933925.0,
"learning_rate": 3.391693811074918e-05,
"loss": 0.9069,
"step": 533
},
{
"epoch": 1.172981878088962,
"grad_norm": 7928337.0,
"learning_rate": 3.387622149837133e-05,
"loss": 0.9353,
"step": 534
},
{
"epoch": 1.1751784733662822,
"grad_norm": 7127355.0,
"learning_rate": 3.3835504885993484e-05,
"loss": 0.8018,
"step": 535
},
{
"epoch": 1.1773750686436024,
"grad_norm": 3091171.25,
"learning_rate": 3.3794788273615634e-05,
"loss": 0.7222,
"step": 536
},
{
"epoch": 1.1795716639209226,
"grad_norm": 2942911.75,
"learning_rate": 3.3754071661237785e-05,
"loss": 0.7417,
"step": 537
},
{
"epoch": 1.1817682591982428,
"grad_norm": 2696065.5,
"learning_rate": 3.3713355048859935e-05,
"loss": 0.723,
"step": 538
},
{
"epoch": 1.1839648544755628,
"grad_norm": 4667742.0,
"learning_rate": 3.3672638436482086e-05,
"loss": 1.0653,
"step": 539
},
{
"epoch": 1.186161449752883,
"grad_norm": 17610622.0,
"learning_rate": 3.3631921824104236e-05,
"loss": 0.7089,
"step": 540
},
{
"epoch": 1.1883580450302031,
"grad_norm": 9259681.0,
"learning_rate": 3.3591205211726387e-05,
"loss": 0.8135,
"step": 541
},
{
"epoch": 1.1905546403075233,
"grad_norm": 28884450.0,
"learning_rate": 3.355048859934854e-05,
"loss": 0.6974,
"step": 542
},
{
"epoch": 1.1927512355848435,
"grad_norm": 7348413.5,
"learning_rate": 3.350977198697068e-05,
"loss": 0.8565,
"step": 543
},
{
"epoch": 1.1949478308621637,
"grad_norm": 5243163.5,
"learning_rate": 3.346905537459283e-05,
"loss": 0.7723,
"step": 544
},
{
"epoch": 1.197144426139484,
"grad_norm": 2949088.5,
"learning_rate": 3.342833876221498e-05,
"loss": 0.9163,
"step": 545
},
{
"epoch": 1.1993410214168039,
"grad_norm": 4970167.5,
"learning_rate": 3.338762214983713e-05,
"loss": 0.8277,
"step": 546
},
{
"epoch": 1.201537616694124,
"grad_norm": 6460347.0,
"learning_rate": 3.334690553745928e-05,
"loss": 1.11,
"step": 547
},
{
"epoch": 1.2037342119714443,
"grad_norm": 3999363.25,
"learning_rate": 3.330618892508143e-05,
"loss": 0.791,
"step": 548
},
{
"epoch": 1.2059308072487644,
"grad_norm": 6791791.0,
"learning_rate": 3.3265472312703583e-05,
"loss": 1.0351,
"step": 549
},
{
"epoch": 1.2081274025260846,
"grad_norm": 8928464.0,
"learning_rate": 3.3224755700325734e-05,
"loss": 0.9303,
"step": 550
},
{
"epoch": 1.2103239978034046,
"grad_norm": 10347070.0,
"learning_rate": 3.3184039087947884e-05,
"loss": 0.7622,
"step": 551
},
{
"epoch": 1.2125205930807248,
"grad_norm": 7738571.0,
"learning_rate": 3.3143322475570035e-05,
"loss": 0.9321,
"step": 552
},
{
"epoch": 1.214717188358045,
"grad_norm": 4177337.25,
"learning_rate": 3.3102605863192185e-05,
"loss": 0.8865,
"step": 553
},
{
"epoch": 1.2169137836353652,
"grad_norm": 6501912.0,
"learning_rate": 3.306188925081433e-05,
"loss": 0.8529,
"step": 554
},
{
"epoch": 1.2191103789126854,
"grad_norm": 6199077.5,
"learning_rate": 3.302117263843648e-05,
"loss": 0.9066,
"step": 555
},
{
"epoch": 1.2213069741900056,
"grad_norm": 10434565.0,
"learning_rate": 3.298045602605863e-05,
"loss": 0.7102,
"step": 556
},
{
"epoch": 1.2235035694673257,
"grad_norm": 8458470.0,
"learning_rate": 3.293973941368078e-05,
"loss": 0.8486,
"step": 557
},
{
"epoch": 1.2257001647446457,
"grad_norm": 9179014.0,
"learning_rate": 3.289902280130293e-05,
"loss": 0.8103,
"step": 558
},
{
"epoch": 1.227896760021966,
"grad_norm": 10963746.0,
"learning_rate": 3.285830618892508e-05,
"loss": 1.0779,
"step": 559
},
{
"epoch": 1.230093355299286,
"grad_norm": 5221127.5,
"learning_rate": 3.281758957654723e-05,
"loss": 0.8836,
"step": 560
},
{
"epoch": 1.2322899505766063,
"grad_norm": 5596379.0,
"learning_rate": 3.277687296416938e-05,
"loss": 0.8029,
"step": 561
},
{
"epoch": 1.2344865458539265,
"grad_norm": 6915608.0,
"learning_rate": 3.273615635179153e-05,
"loss": 0.8501,
"step": 562
},
{
"epoch": 1.2366831411312464,
"grad_norm": 4770158.5,
"learning_rate": 3.269543973941368e-05,
"loss": 1.0326,
"step": 563
},
{
"epoch": 1.2388797364085666,
"grad_norm": 5102892.5,
"learning_rate": 3.265472312703583e-05,
"loss": 0.7409,
"step": 564
},
{
"epoch": 1.2410763316858868,
"grad_norm": 8208687.5,
"learning_rate": 3.261400651465798e-05,
"loss": 0.9087,
"step": 565
},
{
"epoch": 1.243272926963207,
"grad_norm": 8665397.0,
"learning_rate": 3.257328990228013e-05,
"loss": 0.7881,
"step": 566
},
{
"epoch": 1.2454695222405272,
"grad_norm": 7068941.5,
"learning_rate": 3.253257328990228e-05,
"loss": 0.7957,
"step": 567
},
{
"epoch": 1.2476661175178474,
"grad_norm": 6942070.5,
"learning_rate": 3.249185667752443e-05,
"loss": 0.8498,
"step": 568
},
{
"epoch": 1.2498627127951676,
"grad_norm": 22498534.0,
"learning_rate": 3.245114006514658e-05,
"loss": 0.7478,
"step": 569
},
{
"epoch": 1.2520593080724876,
"grad_norm": 3654512.25,
"learning_rate": 3.241042345276873e-05,
"loss": 0.568,
"step": 570
},
{
"epoch": 1.2542559033498077,
"grad_norm": 3267536.25,
"learning_rate": 3.236970684039088e-05,
"loss": 0.9269,
"step": 571
},
{
"epoch": 1.256452498627128,
"grad_norm": 6504901.0,
"learning_rate": 3.232899022801303e-05,
"loss": 0.8469,
"step": 572
},
{
"epoch": 1.2586490939044481,
"grad_norm": 5276869.0,
"learning_rate": 3.228827361563518e-05,
"loss": 0.807,
"step": 573
},
{
"epoch": 1.2608456891817683,
"grad_norm": 3400798.5,
"learning_rate": 3.224755700325733e-05,
"loss": 0.8742,
"step": 574
},
{
"epoch": 1.2630422844590883,
"grad_norm": 3815133.25,
"learning_rate": 3.2206840390879475e-05,
"loss": 0.9133,
"step": 575
},
{
"epoch": 1.2652388797364087,
"grad_norm": 4158373.0,
"learning_rate": 3.2166123778501625e-05,
"loss": 0.7596,
"step": 576
},
{
"epoch": 1.2674354750137287,
"grad_norm": 4513312.5,
"learning_rate": 3.2125407166123776e-05,
"loss": 1.047,
"step": 577
},
{
"epoch": 1.2696320702910489,
"grad_norm": 6753482.0,
"learning_rate": 3.2084690553745926e-05,
"loss": 0.585,
"step": 578
},
{
"epoch": 1.271828665568369,
"grad_norm": 2310095.5,
"learning_rate": 3.204397394136808e-05,
"loss": 0.6412,
"step": 579
},
{
"epoch": 1.2740252608456892,
"grad_norm": 6548274.5,
"learning_rate": 3.200325732899023e-05,
"loss": 0.6608,
"step": 580
},
{
"epoch": 1.2762218561230094,
"grad_norm": 9589902.0,
"learning_rate": 3.196254071661238e-05,
"loss": 0.8523,
"step": 581
},
{
"epoch": 1.2784184514003294,
"grad_norm": 13084248.0,
"learning_rate": 3.192182410423453e-05,
"loss": 0.7467,
"step": 582
},
{
"epoch": 1.2806150466776496,
"grad_norm": 2030765.0,
"learning_rate": 3.188110749185668e-05,
"loss": 0.7413,
"step": 583
},
{
"epoch": 1.2828116419549698,
"grad_norm": 75569048.0,
"learning_rate": 3.184039087947883e-05,
"loss": 0.7575,
"step": 584
},
{
"epoch": 1.28500823723229,
"grad_norm": 4401401.5,
"learning_rate": 3.179967426710097e-05,
"loss": 0.8569,
"step": 585
},
{
"epoch": 1.2872048325096102,
"grad_norm": 8539482.0,
"learning_rate": 3.175895765472312e-05,
"loss": 0.791,
"step": 586
},
{
"epoch": 1.2894014277869301,
"grad_norm": 5570772.5,
"learning_rate": 3.1718241042345274e-05,
"loss": 0.8206,
"step": 587
},
{
"epoch": 1.2915980230642505,
"grad_norm": 6221003.5,
"learning_rate": 3.1677524429967424e-05,
"loss": 0.8668,
"step": 588
},
{
"epoch": 1.2937946183415705,
"grad_norm": 17060828.0,
"learning_rate": 3.1636807817589575e-05,
"loss": 0.7171,
"step": 589
},
{
"epoch": 1.2959912136188907,
"grad_norm": 11605545.0,
"learning_rate": 3.1596091205211725e-05,
"loss": 0.833,
"step": 590
},
{
"epoch": 1.2981878088962109,
"grad_norm": 4201617.5,
"learning_rate": 3.1555374592833876e-05,
"loss": 0.597,
"step": 591
},
{
"epoch": 1.300384404173531,
"grad_norm": 3455147.0,
"learning_rate": 3.1514657980456026e-05,
"loss": 0.8203,
"step": 592
},
{
"epoch": 1.3025809994508513,
"grad_norm": 2380072.75,
"learning_rate": 3.1473941368078177e-05,
"loss": 0.7031,
"step": 593
},
{
"epoch": 1.3047775947281712,
"grad_norm": 4997146.0,
"learning_rate": 3.143322475570033e-05,
"loss": 0.8235,
"step": 594
},
{
"epoch": 1.3069741900054914,
"grad_norm": 19033640.0,
"learning_rate": 3.139250814332248e-05,
"loss": 0.9772,
"step": 595
},
{
"epoch": 1.3091707852828116,
"grad_norm": 9704992.0,
"learning_rate": 3.135179153094462e-05,
"loss": 0.75,
"step": 596
},
{
"epoch": 1.3113673805601318,
"grad_norm": 3873534.5,
"learning_rate": 3.131107491856677e-05,
"loss": 0.7585,
"step": 597
},
{
"epoch": 1.313563975837452,
"grad_norm": 9189857.0,
"learning_rate": 3.127035830618892e-05,
"loss": 0.6987,
"step": 598
},
{
"epoch": 1.3157605711147722,
"grad_norm": 2504172.25,
"learning_rate": 3.122964169381108e-05,
"loss": 0.7699,
"step": 599
},
{
"epoch": 1.3179571663920924,
"grad_norm": 6048699.0,
"learning_rate": 3.118892508143323e-05,
"loss": 0.982,
"step": 600
},
{
"epoch": 1.3201537616694123,
"grad_norm": 5469415.0,
"learning_rate": 3.114820846905538e-05,
"loss": 0.8172,
"step": 601
},
{
"epoch": 1.3223503569467325,
"grad_norm": 2182520.25,
"learning_rate": 3.110749185667753e-05,
"loss": 0.7697,
"step": 602
},
{
"epoch": 1.3245469522240527,
"grad_norm": 2348266.5,
"learning_rate": 3.106677524429968e-05,
"loss": 0.8726,
"step": 603
},
{
"epoch": 1.326743547501373,
"grad_norm": 5198596.0,
"learning_rate": 3.102605863192183e-05,
"loss": 0.8597,
"step": 604
},
{
"epoch": 1.328940142778693,
"grad_norm": 3521929.75,
"learning_rate": 3.0985342019543975e-05,
"loss": 0.9489,
"step": 605
},
{
"epoch": 1.331136738056013,
"grad_norm": 2628773.0,
"learning_rate": 3.0944625407166126e-05,
"loss": 0.6981,
"step": 606
},
{
"epoch": 1.3333333333333333,
"grad_norm": 5645844.5,
"learning_rate": 3.0903908794788276e-05,
"loss": 0.622,
"step": 607
},
{
"epoch": 1.3355299286106534,
"grad_norm": 6870191.5,
"learning_rate": 3.086319218241043e-05,
"loss": 0.8146,
"step": 608
},
{
"epoch": 1.3377265238879736,
"grad_norm": 4888755.5,
"learning_rate": 3.082247557003258e-05,
"loss": 0.8402,
"step": 609
},
{
"epoch": 1.3399231191652938,
"grad_norm": 4698551.0,
"learning_rate": 3.078175895765473e-05,
"loss": 0.862,
"step": 610
},
{
"epoch": 1.342119714442614,
"grad_norm": 2885170.75,
"learning_rate": 3.074104234527688e-05,
"loss": 0.8313,
"step": 611
},
{
"epoch": 1.3443163097199342,
"grad_norm": 7445346.0,
"learning_rate": 3.070032573289903e-05,
"loss": 0.7226,
"step": 612
},
{
"epoch": 1.3465129049972542,
"grad_norm": 6588745.0,
"learning_rate": 3.065960912052118e-05,
"loss": 0.8745,
"step": 613
},
{
"epoch": 1.3487095002745744,
"grad_norm": 9090884.0,
"learning_rate": 3.061889250814333e-05,
"loss": 0.7313,
"step": 614
},
{
"epoch": 1.3509060955518946,
"grad_norm": 6291323.5,
"learning_rate": 3.057817589576547e-05,
"loss": 0.9667,
"step": 615
},
{
"epoch": 1.3531026908292147,
"grad_norm": 1949737.125,
"learning_rate": 3.0537459283387624e-05,
"loss": 0.9007,
"step": 616
},
{
"epoch": 1.355299286106535,
"grad_norm": 3990109.5,
"learning_rate": 3.0496742671009777e-05,
"loss": 0.8265,
"step": 617
},
{
"epoch": 1.357495881383855,
"grad_norm": 9570753.0,
"learning_rate": 3.0456026058631924e-05,
"loss": 1.0475,
"step": 618
},
{
"epoch": 1.359692476661175,
"grad_norm": 4849979.5,
"learning_rate": 3.0415309446254075e-05,
"loss": 0.6503,
"step": 619
},
{
"epoch": 1.3618890719384953,
"grad_norm": 3035817.5,
"learning_rate": 3.0374592833876225e-05,
"loss": 0.7252,
"step": 620
},
{
"epoch": 1.3640856672158155,
"grad_norm": 6198328.5,
"learning_rate": 3.0333876221498376e-05,
"loss": 1.0213,
"step": 621
},
{
"epoch": 1.3662822624931357,
"grad_norm": 2671334.25,
"learning_rate": 3.0293159609120526e-05,
"loss": 0.7657,
"step": 622
},
{
"epoch": 1.3684788577704559,
"grad_norm": 24189594.0,
"learning_rate": 3.0252442996742673e-05,
"loss": 0.6755,
"step": 623
},
{
"epoch": 1.370675453047776,
"grad_norm": 8585594.0,
"learning_rate": 3.0211726384364824e-05,
"loss": 0.8251,
"step": 624
},
{
"epoch": 1.372872048325096,
"grad_norm": 9984543.0,
"learning_rate": 3.0171009771986974e-05,
"loss": 0.9082,
"step": 625
},
{
"epoch": 1.3750686436024162,
"grad_norm": 5325456.5,
"learning_rate": 3.0130293159609125e-05,
"loss": 0.7263,
"step": 626
},
{
"epoch": 1.3772652388797364,
"grad_norm": 11045564.0,
"learning_rate": 3.0089576547231275e-05,
"loss": 0.738,
"step": 627
},
{
"epoch": 1.3794618341570566,
"grad_norm": 12307985.0,
"learning_rate": 3.0048859934853422e-05,
"loss": 0.6282,
"step": 628
},
{
"epoch": 1.3816584294343768,
"grad_norm": 5545169.0,
"learning_rate": 3.0008143322475573e-05,
"loss": 0.796,
"step": 629
},
{
"epoch": 1.3838550247116967,
"grad_norm": 2571234.75,
"learning_rate": 2.9967426710097723e-05,
"loss": 0.7542,
"step": 630
},
{
"epoch": 1.3860516199890172,
"grad_norm": 3870043.25,
"learning_rate": 2.9926710097719874e-05,
"loss": 0.7682,
"step": 631
},
{
"epoch": 1.3882482152663371,
"grad_norm": 5821889.5,
"learning_rate": 2.9885993485342024e-05,
"loss": 0.9469,
"step": 632
},
{
"epoch": 1.3904448105436573,
"grad_norm": 2501907.75,
"learning_rate": 2.9845276872964175e-05,
"loss": 0.804,
"step": 633
},
{
"epoch": 1.3926414058209775,
"grad_norm": 6694513.5,
"learning_rate": 2.980456026058632e-05,
"loss": 0.7361,
"step": 634
},
{
"epoch": 1.3948380010982977,
"grad_norm": 4213918.5,
"learning_rate": 2.9763843648208472e-05,
"loss": 0.7994,
"step": 635
},
{
"epoch": 1.3970345963756179,
"grad_norm": 7716902.0,
"learning_rate": 2.9723127035830623e-05,
"loss": 0.9123,
"step": 636
},
{
"epoch": 1.3992311916529379,
"grad_norm": 8129986.5,
"learning_rate": 2.9682410423452773e-05,
"loss": 0.7939,
"step": 637
},
{
"epoch": 1.401427786930258,
"grad_norm": 5230363.5,
"learning_rate": 2.9641693811074923e-05,
"loss": 0.7561,
"step": 638
},
{
"epoch": 1.4036243822075782,
"grad_norm": 15260243.0,
"learning_rate": 2.960097719869707e-05,
"loss": 0.7561,
"step": 639
},
{
"epoch": 1.4058209774848984,
"grad_norm": 9165478.0,
"learning_rate": 2.956026058631922e-05,
"loss": 1.0408,
"step": 640
},
{
"epoch": 1.4080175727622186,
"grad_norm": 2777815.25,
"learning_rate": 2.951954397394137e-05,
"loss": 0.7271,
"step": 641
},
{
"epoch": 1.4102141680395386,
"grad_norm": 3931786.0,
"learning_rate": 2.9478827361563522e-05,
"loss": 0.9403,
"step": 642
},
{
"epoch": 1.412410763316859,
"grad_norm": 9556869.0,
"learning_rate": 2.9438110749185672e-05,
"loss": 0.8069,
"step": 643
},
{
"epoch": 1.414607358594179,
"grad_norm": 3883110.25,
"learning_rate": 2.939739413680782e-05,
"loss": 0.6838,
"step": 644
},
{
"epoch": 1.4168039538714992,
"grad_norm": 4311441.0,
"learning_rate": 2.935667752442997e-05,
"loss": 0.7977,
"step": 645
},
{
"epoch": 1.4190005491488193,
"grad_norm": 11853439.0,
"learning_rate": 2.931596091205212e-05,
"loss": 0.7791,
"step": 646
},
{
"epoch": 1.4211971444261395,
"grad_norm": 5503657.0,
"learning_rate": 2.927524429967427e-05,
"loss": 0.7692,
"step": 647
},
{
"epoch": 1.4233937397034597,
"grad_norm": 14219026.0,
"learning_rate": 2.923452768729642e-05,
"loss": 0.7668,
"step": 648
},
{
"epoch": 1.4255903349807797,
"grad_norm": 3119570.5,
"learning_rate": 2.9193811074918572e-05,
"loss": 0.9098,
"step": 649
},
{
"epoch": 1.4277869302580999,
"grad_norm": 6621255.5,
"learning_rate": 2.915309446254072e-05,
"loss": 0.6781,
"step": 650
},
{
"epoch": 1.42998352553542,
"grad_norm": 23727058.0,
"learning_rate": 2.911237785016287e-05,
"loss": 0.7309,
"step": 651
},
{
"epoch": 1.4321801208127403,
"grad_norm": 31922152.0,
"learning_rate": 2.907166123778502e-05,
"loss": 0.7827,
"step": 652
},
{
"epoch": 1.4343767160900605,
"grad_norm": 6264115.5,
"learning_rate": 2.903094462540717e-05,
"loss": 0.8391,
"step": 653
},
{
"epoch": 1.4365733113673804,
"grad_norm": 22005284.0,
"learning_rate": 2.899022801302932e-05,
"loss": 0.764,
"step": 654
},
{
"epoch": 1.4387699066447008,
"grad_norm": 5722619.5,
"learning_rate": 2.8949511400651468e-05,
"loss": 0.7783,
"step": 655
},
{
"epoch": 1.4409665019220208,
"grad_norm": 6130999.0,
"learning_rate": 2.8908794788273618e-05,
"loss": 0.8338,
"step": 656
},
{
"epoch": 1.443163097199341,
"grad_norm": 3925606.0,
"learning_rate": 2.886807817589577e-05,
"loss": 0.9843,
"step": 657
},
{
"epoch": 1.4453596924766612,
"grad_norm": 6457721.0,
"learning_rate": 2.882736156351792e-05,
"loss": 0.8933,
"step": 658
},
{
"epoch": 1.4475562877539814,
"grad_norm": 4999463.0,
"learning_rate": 2.878664495114007e-05,
"loss": 0.9811,
"step": 659
},
{
"epoch": 1.4497528830313016,
"grad_norm": 4058591.75,
"learning_rate": 2.8745928338762217e-05,
"loss": 0.7143,
"step": 660
},
{
"epoch": 1.4519494783086215,
"grad_norm": 3496151.5,
"learning_rate": 2.8705211726384367e-05,
"loss": 0.7023,
"step": 661
},
{
"epoch": 1.4541460735859417,
"grad_norm": 4563755.5,
"learning_rate": 2.8664495114006518e-05,
"loss": 0.8082,
"step": 662
},
{
"epoch": 1.456342668863262,
"grad_norm": 2525402.75,
"learning_rate": 2.8623778501628668e-05,
"loss": 0.8271,
"step": 663
},
{
"epoch": 1.458539264140582,
"grad_norm": 5428222.5,
"learning_rate": 2.858306188925082e-05,
"loss": 0.7275,
"step": 664
},
{
"epoch": 1.4607358594179023,
"grad_norm": 7972065.5,
"learning_rate": 2.8542345276872965e-05,
"loss": 0.9807,
"step": 665
},
{
"epoch": 1.4629324546952225,
"grad_norm": 8523531.0,
"learning_rate": 2.8501628664495116e-05,
"loss": 0.9444,
"step": 666
},
{
"epoch": 1.4651290499725427,
"grad_norm": 4583850.0,
"learning_rate": 2.8460912052117266e-05,
"loss": 0.753,
"step": 667
},
{
"epoch": 1.4673256452498626,
"grad_norm": 7885099.5,
"learning_rate": 2.8420195439739417e-05,
"loss": 0.933,
"step": 668
},
{
"epoch": 1.4695222405271828,
"grad_norm": 4484214.5,
"learning_rate": 2.8379478827361567e-05,
"loss": 0.7095,
"step": 669
},
{
"epoch": 1.471718835804503,
"grad_norm": 7593714.5,
"learning_rate": 2.8338762214983718e-05,
"loss": 0.8228,
"step": 670
},
{
"epoch": 1.4739154310818232,
"grad_norm": 4510652.5,
"learning_rate": 2.8298045602605865e-05,
"loss": 0.9625,
"step": 671
},
{
"epoch": 1.4761120263591434,
"grad_norm": 3340290.0,
"learning_rate": 2.8257328990228015e-05,
"loss": 0.818,
"step": 672
},
{
"epoch": 1.4783086216364634,
"grad_norm": 14126548.0,
"learning_rate": 2.8216612377850166e-05,
"loss": 0.7649,
"step": 673
},
{
"epoch": 1.4805052169137836,
"grad_norm": 5096661.5,
"learning_rate": 2.8175895765472316e-05,
"loss": 0.8626,
"step": 674
},
{
"epoch": 1.4827018121911038,
"grad_norm": 18412062.0,
"learning_rate": 2.8135179153094467e-05,
"loss": 0.7369,
"step": 675
},
{
"epoch": 1.484898407468424,
"grad_norm": 13129880.0,
"learning_rate": 2.8094462540716614e-05,
"loss": 0.7982,
"step": 676
},
{
"epoch": 1.4870950027457441,
"grad_norm": 3733168.25,
"learning_rate": 2.8053745928338764e-05,
"loss": 0.7679,
"step": 677
},
{
"epoch": 1.4892915980230643,
"grad_norm": 4655964.0,
"learning_rate": 2.8013029315960915e-05,
"loss": 0.8692,
"step": 678
},
{
"epoch": 1.4914881933003845,
"grad_norm": 2379629.0,
"learning_rate": 2.7972312703583065e-05,
"loss": 0.8423,
"step": 679
},
{
"epoch": 1.4936847885777045,
"grad_norm": 14175747.0,
"learning_rate": 2.7931596091205216e-05,
"loss": 1.0303,
"step": 680
},
{
"epoch": 1.4958813838550247,
"grad_norm": 6592980.0,
"learning_rate": 2.7890879478827363e-05,
"loss": 0.8141,
"step": 681
},
{
"epoch": 1.4980779791323449,
"grad_norm": 2083189.75,
"learning_rate": 2.7850162866449513e-05,
"loss": 0.9554,
"step": 682
},
{
"epoch": 1.500274574409665,
"grad_norm": 3264328.0,
"learning_rate": 2.7809446254071664e-05,
"loss": 0.5439,
"step": 683
},
{
"epoch": 1.5024711696869852,
"grad_norm": 8035727.0,
"learning_rate": 2.7768729641693814e-05,
"loss": 0.6945,
"step": 684
},
{
"epoch": 1.5046677649643052,
"grad_norm": 24236826.0,
"learning_rate": 2.7728013029315965e-05,
"loss": 0.9112,
"step": 685
},
{
"epoch": 1.5068643602416256,
"grad_norm": 16203580.0,
"learning_rate": 2.768729641693811e-05,
"loss": 1.038,
"step": 686
},
{
"epoch": 1.5090609555189456,
"grad_norm": 20527648.0,
"learning_rate": 2.7646579804560262e-05,
"loss": 0.8679,
"step": 687
},
{
"epoch": 1.5112575507962658,
"grad_norm": 1687664.875,
"learning_rate": 2.7605863192182412e-05,
"loss": 0.8992,
"step": 688
},
{
"epoch": 1.513454146073586,
"grad_norm": 6100792.5,
"learning_rate": 2.7565146579804563e-05,
"loss": 0.6928,
"step": 689
},
{
"epoch": 1.515650741350906,
"grad_norm": 1779872.25,
"learning_rate": 2.7524429967426713e-05,
"loss": 0.7264,
"step": 690
},
{
"epoch": 1.5178473366282264,
"grad_norm": 5877993.0,
"learning_rate": 2.7483713355048864e-05,
"loss": 0.9053,
"step": 691
},
{
"epoch": 1.5200439319055463,
"grad_norm": 3860265.25,
"learning_rate": 2.744299674267101e-05,
"loss": 0.8396,
"step": 692
},
{
"epoch": 1.5222405271828665,
"grad_norm": 2222628.5,
"learning_rate": 2.740228013029316e-05,
"loss": 0.8158,
"step": 693
},
{
"epoch": 1.5244371224601867,
"grad_norm": 6485382.0,
"learning_rate": 2.7361563517915312e-05,
"loss": 0.7263,
"step": 694
},
{
"epoch": 1.526633717737507,
"grad_norm": 5865175.0,
"learning_rate": 2.7320846905537462e-05,
"loss": 0.7776,
"step": 695
},
{
"epoch": 1.528830313014827,
"grad_norm": 4594936.5,
"learning_rate": 2.7280130293159613e-05,
"loss": 0.7184,
"step": 696
},
{
"epoch": 1.531026908292147,
"grad_norm": 1345459.5,
"learning_rate": 2.723941368078176e-05,
"loss": 0.8008,
"step": 697
},
{
"epoch": 1.5332235035694675,
"grad_norm": 5575137.5,
"learning_rate": 2.719869706840391e-05,
"loss": 0.7645,
"step": 698
},
{
"epoch": 1.5354200988467874,
"grad_norm": 13658198.0,
"learning_rate": 2.715798045602606e-05,
"loss": 0.8793,
"step": 699
},
{
"epoch": 1.5376166941241076,
"grad_norm": 6223910.5,
"learning_rate": 2.711726384364821e-05,
"loss": 0.932,
"step": 700
},
{
"epoch": 1.5398132894014278,
"grad_norm": 3107052.25,
"learning_rate": 2.707654723127036e-05,
"loss": 0.7443,
"step": 701
},
{
"epoch": 1.5420098846787478,
"grad_norm": 3807637.5,
"learning_rate": 2.703583061889251e-05,
"loss": 0.7836,
"step": 702
},
{
"epoch": 1.5442064799560682,
"grad_norm": 8423850.0,
"learning_rate": 2.699511400651466e-05,
"loss": 0.6999,
"step": 703
},
{
"epoch": 1.5464030752333882,
"grad_norm": 3934505.5,
"learning_rate": 2.695439739413681e-05,
"loss": 0.6424,
"step": 704
},
{
"epoch": 1.5485996705107083,
"grad_norm": 4523469.5,
"learning_rate": 2.691368078175896e-05,
"loss": 0.8295,
"step": 705
},
{
"epoch": 1.5507962657880285,
"grad_norm": 1500514.125,
"learning_rate": 2.687296416938111e-05,
"loss": 0.7464,
"step": 706
},
{
"epoch": 1.5529928610653487,
"grad_norm": 4100195.25,
"learning_rate": 2.683224755700326e-05,
"loss": 0.8612,
"step": 707
},
{
"epoch": 1.555189456342669,
"grad_norm": 6796936.5,
"learning_rate": 2.6791530944625408e-05,
"loss": 0.9161,
"step": 708
},
{
"epoch": 1.5573860516199889,
"grad_norm": 11311848.0,
"learning_rate": 2.675081433224756e-05,
"loss": 0.983,
"step": 709
},
{
"epoch": 1.5595826468973093,
"grad_norm": 5327868.0,
"learning_rate": 2.671009771986971e-05,
"loss": 0.5595,
"step": 710
},
{
"epoch": 1.5617792421746293,
"grad_norm": 6154893.5,
"learning_rate": 2.666938110749186e-05,
"loss": 0.9245,
"step": 711
},
{
"epoch": 1.5639758374519495,
"grad_norm": 19080946.0,
"learning_rate": 2.662866449511401e-05,
"loss": 0.6188,
"step": 712
},
{
"epoch": 1.5661724327292696,
"grad_norm": 6060025.0,
"learning_rate": 2.6587947882736157e-05,
"loss": 0.7311,
"step": 713
},
{
"epoch": 1.5683690280065898,
"grad_norm": 3273898.0,
"learning_rate": 2.6547231270358307e-05,
"loss": 0.6852,
"step": 714
},
{
"epoch": 1.57056562328391,
"grad_norm": 4101042.25,
"learning_rate": 2.6506514657980458e-05,
"loss": 0.8457,
"step": 715
},
{
"epoch": 1.57276221856123,
"grad_norm": 7110334.0,
"learning_rate": 2.646579804560261e-05,
"loss": 0.9775,
"step": 716
},
{
"epoch": 1.5749588138385504,
"grad_norm": 8520013.0,
"learning_rate": 2.642508143322476e-05,
"loss": 0.8237,
"step": 717
},
{
"epoch": 1.5771554091158704,
"grad_norm": 11102958.0,
"learning_rate": 2.6384364820846906e-05,
"loss": 0.7317,
"step": 718
},
{
"epoch": 1.5793520043931906,
"grad_norm": 4241118.5,
"learning_rate": 2.6343648208469056e-05,
"loss": 0.7595,
"step": 719
},
{
"epoch": 1.5815485996705108,
"grad_norm": 2514485.0,
"learning_rate": 2.6302931596091207e-05,
"loss": 0.7004,
"step": 720
},
{
"epoch": 1.5837451949478307,
"grad_norm": 6694659.0,
"learning_rate": 2.6262214983713357e-05,
"loss": 0.9085,
"step": 721
},
{
"epoch": 1.5859417902251511,
"grad_norm": 5642870.0,
"learning_rate": 2.6221498371335508e-05,
"loss": 0.9417,
"step": 722
},
{
"epoch": 1.588138385502471,
"grad_norm": 6868730.5,
"learning_rate": 2.6180781758957655e-05,
"loss": 0.7569,
"step": 723
},
{
"epoch": 1.5903349807797913,
"grad_norm": 28863254.0,
"learning_rate": 2.6140065146579805e-05,
"loss": 0.6815,
"step": 724
},
{
"epoch": 1.5925315760571115,
"grad_norm": 12179046.0,
"learning_rate": 2.6099348534201956e-05,
"loss": 0.8387,
"step": 725
},
{
"epoch": 1.5947281713344317,
"grad_norm": 5193878.0,
"learning_rate": 2.6058631921824106e-05,
"loss": 0.8018,
"step": 726
},
{
"epoch": 1.5969247666117519,
"grad_norm": 5733950.0,
"learning_rate": 2.6017915309446257e-05,
"loss": 0.8555,
"step": 727
},
{
"epoch": 1.5991213618890718,
"grad_norm": 3895866.75,
"learning_rate": 2.5977198697068407e-05,
"loss": 0.8569,
"step": 728
},
{
"epoch": 1.6013179571663922,
"grad_norm": 5921540.0,
"learning_rate": 2.5936482084690554e-05,
"loss": 0.8158,
"step": 729
},
{
"epoch": 1.6035145524437122,
"grad_norm": 2908632.75,
"learning_rate": 2.5895765472312705e-05,
"loss": 0.8134,
"step": 730
},
{
"epoch": 1.6057111477210324,
"grad_norm": 9060295.0,
"learning_rate": 2.5855048859934855e-05,
"loss": 0.5383,
"step": 731
},
{
"epoch": 1.6079077429983526,
"grad_norm": 20096192.0,
"learning_rate": 2.5814332247557006e-05,
"loss": 0.8042,
"step": 732
},
{
"epoch": 1.6101043382756726,
"grad_norm": 7516175.5,
"learning_rate": 2.5773615635179156e-05,
"loss": 0.9844,
"step": 733
},
{
"epoch": 1.612300933552993,
"grad_norm": 7557309.5,
"learning_rate": 2.5732899022801303e-05,
"loss": 0.8605,
"step": 734
},
{
"epoch": 1.614497528830313,
"grad_norm": 7147080.0,
"learning_rate": 2.5692182410423454e-05,
"loss": 0.9023,
"step": 735
},
{
"epoch": 1.6166941241076331,
"grad_norm": 10267037.0,
"learning_rate": 2.5651465798045604e-05,
"loss": 0.8682,
"step": 736
},
{
"epoch": 1.6188907193849533,
"grad_norm": 6557988.0,
"learning_rate": 2.5610749185667754e-05,
"loss": 0.8464,
"step": 737
},
{
"epoch": 1.6210873146622735,
"grad_norm": 7624202.5,
"learning_rate": 2.5570032573289905e-05,
"loss": 0.824,
"step": 738
},
{
"epoch": 1.6232839099395937,
"grad_norm": 4876888.5,
"learning_rate": 2.5529315960912052e-05,
"loss": 0.7947,
"step": 739
},
{
"epoch": 1.6254805052169137,
"grad_norm": 11985913.0,
"learning_rate": 2.5488599348534202e-05,
"loss": 0.8355,
"step": 740
},
{
"epoch": 1.627677100494234,
"grad_norm": 5538659.0,
"learning_rate": 2.5447882736156353e-05,
"loss": 0.866,
"step": 741
},
{
"epoch": 1.629873695771554,
"grad_norm": 3729755.5,
"learning_rate": 2.5407166123778503e-05,
"loss": 0.7835,
"step": 742
},
{
"epoch": 1.6320702910488742,
"grad_norm": 3816142.5,
"learning_rate": 2.5366449511400654e-05,
"loss": 0.7141,
"step": 743
},
{
"epoch": 1.6342668863261944,
"grad_norm": 6218193.0,
"learning_rate": 2.5325732899022804e-05,
"loss": 0.648,
"step": 744
},
{
"epoch": 1.6364634816035144,
"grad_norm": 5415427.0,
"learning_rate": 2.528501628664495e-05,
"loss": 0.689,
"step": 745
},
{
"epoch": 1.6386600768808348,
"grad_norm": 3454429.0,
"learning_rate": 2.5244299674267102e-05,
"loss": 0.5743,
"step": 746
},
{
"epoch": 1.6408566721581548,
"grad_norm": 2751250.75,
"learning_rate": 2.5203583061889252e-05,
"loss": 0.84,
"step": 747
},
{
"epoch": 1.643053267435475,
"grad_norm": 5668017.0,
"learning_rate": 2.5162866449511403e-05,
"loss": 0.6741,
"step": 748
},
{
"epoch": 1.6452498627127952,
"grad_norm": 5624318.0,
"learning_rate": 2.5122149837133553e-05,
"loss": 0.6624,
"step": 749
},
{
"epoch": 1.6474464579901154,
"grad_norm": 8126046.5,
"learning_rate": 2.50814332247557e-05,
"loss": 0.7089,
"step": 750
},
{
"epoch": 1.6496430532674355,
"grad_norm": 21289788.0,
"learning_rate": 2.504071661237785e-05,
"loss": 0.826,
"step": 751
},
{
"epoch": 1.6518396485447555,
"grad_norm": 10167456.0,
"learning_rate": 2.5e-05,
"loss": 0.921,
"step": 752
},
{
"epoch": 1.654036243822076,
"grad_norm": 2350454.25,
"learning_rate": 2.495928338762215e-05,
"loss": 1.0177,
"step": 753
},
{
"epoch": 1.656232839099396,
"grad_norm": 10937058.0,
"learning_rate": 2.4918566775244302e-05,
"loss": 0.6545,
"step": 754
},
{
"epoch": 1.658429434376716,
"grad_norm": 4438251.5,
"learning_rate": 2.487785016286645e-05,
"loss": 0.7499,
"step": 755
},
{
"epoch": 1.6606260296540363,
"grad_norm": 3436862.5,
"learning_rate": 2.48371335504886e-05,
"loss": 0.7578,
"step": 756
},
{
"epoch": 1.6628226249313562,
"grad_norm": 3206228.0,
"learning_rate": 2.479641693811075e-05,
"loss": 0.8695,
"step": 757
},
{
"epoch": 1.6650192202086767,
"grad_norm": 3531367.25,
"learning_rate": 2.47557003257329e-05,
"loss": 0.8113,
"step": 758
},
{
"epoch": 1.6672158154859966,
"grad_norm": 5536763.0,
"learning_rate": 2.471498371335505e-05,
"loss": 0.9103,
"step": 759
},
{
"epoch": 1.6694124107633168,
"grad_norm": 7986967.0,
"learning_rate": 2.4674267100977198e-05,
"loss": 0.9087,
"step": 760
},
{
"epoch": 1.671609006040637,
"grad_norm": 5164309.0,
"learning_rate": 2.463355048859935e-05,
"loss": 0.7603,
"step": 761
},
{
"epoch": 1.6738056013179572,
"grad_norm": 3190495.25,
"learning_rate": 2.45928338762215e-05,
"loss": 0.8386,
"step": 762
},
{
"epoch": 1.6760021965952774,
"grad_norm": 10057574.0,
"learning_rate": 2.455211726384365e-05,
"loss": 0.7874,
"step": 763
},
{
"epoch": 1.6781987918725974,
"grad_norm": 5266538.0,
"learning_rate": 2.45114006514658e-05,
"loss": 0.8817,
"step": 764
},
{
"epoch": 1.6803953871499178,
"grad_norm": 5973479.0,
"learning_rate": 2.447068403908795e-05,
"loss": 0.8821,
"step": 765
},
{
"epoch": 1.6825919824272377,
"grad_norm": 8201318.0,
"learning_rate": 2.4429967426710097e-05,
"loss": 0.6769,
"step": 766
},
{
"epoch": 1.684788577704558,
"grad_norm": 18101764.0,
"learning_rate": 2.4389250814332248e-05,
"loss": 0.8242,
"step": 767
},
{
"epoch": 1.6869851729818781,
"grad_norm": 12085227.0,
"learning_rate": 2.43485342019544e-05,
"loss": 0.7494,
"step": 768
},
{
"epoch": 1.689181768259198,
"grad_norm": 5681829.0,
"learning_rate": 2.430781758957655e-05,
"loss": 0.7705,
"step": 769
},
{
"epoch": 1.6913783635365185,
"grad_norm": 6344797.0,
"learning_rate": 2.42671009771987e-05,
"loss": 0.894,
"step": 770
},
{
"epoch": 1.6935749588138385,
"grad_norm": 2183038.75,
"learning_rate": 2.4226384364820846e-05,
"loss": 0.9672,
"step": 771
},
{
"epoch": 1.6957715540911587,
"grad_norm": 5401895.0,
"learning_rate": 2.4185667752442997e-05,
"loss": 0.8957,
"step": 772
},
{
"epoch": 1.6979681493684788,
"grad_norm": 4046054.0,
"learning_rate": 2.4144951140065147e-05,
"loss": 0.7156,
"step": 773
},
{
"epoch": 1.700164744645799,
"grad_norm": 5847228.0,
"learning_rate": 2.4104234527687298e-05,
"loss": 0.6937,
"step": 774
},
{
"epoch": 1.7023613399231192,
"grad_norm": 2440498.5,
"learning_rate": 2.4063517915309448e-05,
"loss": 0.8356,
"step": 775
},
{
"epoch": 1.7045579352004392,
"grad_norm": 5103590.5,
"learning_rate": 2.4022801302931595e-05,
"loss": 0.6546,
"step": 776
},
{
"epoch": 1.7067545304777596,
"grad_norm": 5184302.0,
"learning_rate": 2.3982084690553746e-05,
"loss": 0.8807,
"step": 777
},
{
"epoch": 1.7089511257550796,
"grad_norm": 3406332.5,
"learning_rate": 2.3941368078175896e-05,
"loss": 0.8972,
"step": 778
},
{
"epoch": 1.7111477210323998,
"grad_norm": 8893194.0,
"learning_rate": 2.3900651465798047e-05,
"loss": 0.791,
"step": 779
},
{
"epoch": 1.71334431630972,
"grad_norm": 28299830.0,
"learning_rate": 2.3859934853420197e-05,
"loss": 0.8584,
"step": 780
},
{
"epoch": 1.7155409115870401,
"grad_norm": 12228590.0,
"learning_rate": 2.3819218241042348e-05,
"loss": 0.8137,
"step": 781
},
{
"epoch": 1.7177375068643603,
"grad_norm": 5580711.5,
"learning_rate": 2.3778501628664495e-05,
"loss": 0.7505,
"step": 782
},
{
"epoch": 1.7199341021416803,
"grad_norm": 5130976.0,
"learning_rate": 2.3737785016286645e-05,
"loss": 0.8373,
"step": 783
},
{
"epoch": 1.7221306974190007,
"grad_norm": 3462598.25,
"learning_rate": 2.3697068403908796e-05,
"loss": 0.7679,
"step": 784
},
{
"epoch": 1.7243272926963207,
"grad_norm": 11268651.0,
"learning_rate": 2.3656351791530946e-05,
"loss": 1.0337,
"step": 785
},
{
"epoch": 1.7265238879736409,
"grad_norm": 7928887.5,
"learning_rate": 2.3615635179153096e-05,
"loss": 0.6929,
"step": 786
},
{
"epoch": 1.728720483250961,
"grad_norm": 6228416.0,
"learning_rate": 2.3574918566775244e-05,
"loss": 0.7268,
"step": 787
},
{
"epoch": 1.730917078528281,
"grad_norm": 3173557.0,
"learning_rate": 2.3534201954397394e-05,
"loss": 0.7475,
"step": 788
},
{
"epoch": 1.7331136738056014,
"grad_norm": 1184311.75,
"learning_rate": 2.3493485342019544e-05,
"loss": 0.8507,
"step": 789
},
{
"epoch": 1.7353102690829214,
"grad_norm": 11702628.0,
"learning_rate": 2.3452768729641695e-05,
"loss": 0.9999,
"step": 790
},
{
"epoch": 1.7375068643602416,
"grad_norm": 7791823.0,
"learning_rate": 2.3412052117263845e-05,
"loss": 0.7915,
"step": 791
},
{
"epoch": 1.7397034596375618,
"grad_norm": 6442666.5,
"learning_rate": 2.3371335504885992e-05,
"loss": 0.7302,
"step": 792
},
{
"epoch": 1.741900054914882,
"grad_norm": 4107820.75,
"learning_rate": 2.3330618892508143e-05,
"loss": 0.7632,
"step": 793
},
{
"epoch": 1.7440966501922022,
"grad_norm": 7459289.0,
"learning_rate": 2.3289902280130293e-05,
"loss": 0.9324,
"step": 794
},
{
"epoch": 1.7462932454695221,
"grad_norm": 3513638.0,
"learning_rate": 2.3249185667752444e-05,
"loss": 0.7417,
"step": 795
},
{
"epoch": 1.7484898407468425,
"grad_norm": 2193017.75,
"learning_rate": 2.3208469055374594e-05,
"loss": 0.7389,
"step": 796
},
{
"epoch": 1.7506864360241625,
"grad_norm": 6493457.5,
"learning_rate": 2.316775244299674e-05,
"loss": 0.9408,
"step": 797
},
{
"epoch": 1.7528830313014827,
"grad_norm": 3541514.5,
"learning_rate": 2.3127035830618892e-05,
"loss": 0.8113,
"step": 798
},
{
"epoch": 1.755079626578803,
"grad_norm": 3392785.75,
"learning_rate": 2.3086319218241042e-05,
"loss": 0.9173,
"step": 799
},
{
"epoch": 1.7572762218561229,
"grad_norm": 3237105.75,
"learning_rate": 2.3045602605863193e-05,
"loss": 0.9845,
"step": 800
},
{
"epoch": 1.7594728171334433,
"grad_norm": 7631320.0,
"learning_rate": 2.3004885993485343e-05,
"loss": 0.8848,
"step": 801
},
{
"epoch": 1.7616694124107632,
"grad_norm": 7963966.0,
"learning_rate": 2.2964169381107494e-05,
"loss": 1.0407,
"step": 802
},
{
"epoch": 1.7638660076880834,
"grad_norm": 3586038.5,
"learning_rate": 2.292345276872964e-05,
"loss": 1.1671,
"step": 803
},
{
"epoch": 1.7660626029654036,
"grad_norm": 9959085.0,
"learning_rate": 2.288273615635179e-05,
"loss": 1.066,
"step": 804
},
{
"epoch": 1.7682591982427238,
"grad_norm": 3935806.0,
"learning_rate": 2.284201954397394e-05,
"loss": 0.8171,
"step": 805
},
{
"epoch": 1.770455793520044,
"grad_norm": 4664130.0,
"learning_rate": 2.2801302931596092e-05,
"loss": 0.7434,
"step": 806
},
{
"epoch": 1.772652388797364,
"grad_norm": 20482446.0,
"learning_rate": 2.2760586319218243e-05,
"loss": 0.8425,
"step": 807
},
{
"epoch": 1.7748489840746844,
"grad_norm": 19744094.0,
"learning_rate": 2.271986970684039e-05,
"loss": 0.9734,
"step": 808
},
{
"epoch": 1.7770455793520044,
"grad_norm": 1815085.375,
"learning_rate": 2.267915309446254e-05,
"loss": 0.9162,
"step": 809
},
{
"epoch": 1.7792421746293245,
"grad_norm": 2197733.5,
"learning_rate": 2.263843648208469e-05,
"loss": 0.6199,
"step": 810
},
{
"epoch": 1.7814387699066447,
"grad_norm": 7639382.0,
"learning_rate": 2.259771986970684e-05,
"loss": 0.709,
"step": 811
},
{
"epoch": 1.7836353651839647,
"grad_norm": 4249810.0,
"learning_rate": 2.255700325732899e-05,
"loss": 0.7909,
"step": 812
},
{
"epoch": 1.7858319604612851,
"grad_norm": 13327131.0,
"learning_rate": 2.251628664495114e-05,
"loss": 0.9038,
"step": 813
},
{
"epoch": 1.788028555738605,
"grad_norm": 10513216.0,
"learning_rate": 2.247557003257329e-05,
"loss": 0.8613,
"step": 814
},
{
"epoch": 1.7902251510159253,
"grad_norm": 3392477.0,
"learning_rate": 2.243485342019544e-05,
"loss": 0.8942,
"step": 815
},
{
"epoch": 1.7924217462932455,
"grad_norm": 3174575.75,
"learning_rate": 2.239413680781759e-05,
"loss": 0.8228,
"step": 816
},
{
"epoch": 1.7946183415705657,
"grad_norm": 5832620.0,
"learning_rate": 2.235342019543974e-05,
"loss": 0.7187,
"step": 817
},
{
"epoch": 1.7968149368478858,
"grad_norm": 6747026.0,
"learning_rate": 2.231270358306189e-05,
"loss": 0.9073,
"step": 818
},
{
"epoch": 1.7990115321252058,
"grad_norm": 16638837.0,
"learning_rate": 2.2271986970684038e-05,
"loss": 0.7966,
"step": 819
},
{
"epoch": 1.8012081274025262,
"grad_norm": 2785806.0,
"learning_rate": 2.223127035830619e-05,
"loss": 0.8535,
"step": 820
},
{
"epoch": 1.8034047226798462,
"grad_norm": 2478058.25,
"learning_rate": 2.219055374592834e-05,
"loss": 0.835,
"step": 821
},
{
"epoch": 1.8056013179571664,
"grad_norm": 2685527.0,
"learning_rate": 2.214983713355049e-05,
"loss": 0.9156,
"step": 822
},
{
"epoch": 1.8077979132344866,
"grad_norm": 9808544.0,
"learning_rate": 2.210912052117264e-05,
"loss": 0.8345,
"step": 823
},
{
"epoch": 1.8099945085118065,
"grad_norm": 2587785.25,
"learning_rate": 2.2068403908794787e-05,
"loss": 0.746,
"step": 824
},
{
"epoch": 1.812191103789127,
"grad_norm": 9017761.0,
"learning_rate": 2.2027687296416937e-05,
"loss": 0.6969,
"step": 825
},
{
"epoch": 1.814387699066447,
"grad_norm": 5046416.0,
"learning_rate": 2.1986970684039088e-05,
"loss": 0.6424,
"step": 826
},
{
"epoch": 1.8165842943437671,
"grad_norm": 8755801.0,
"learning_rate": 2.1946254071661238e-05,
"loss": 0.8777,
"step": 827
},
{
"epoch": 1.8187808896210873,
"grad_norm": 3163870.75,
"learning_rate": 2.190553745928339e-05,
"loss": 0.7417,
"step": 828
},
{
"epoch": 1.8209774848984075,
"grad_norm": 3030208.0,
"learning_rate": 2.186482084690554e-05,
"loss": 0.739,
"step": 829
},
{
"epoch": 1.8231740801757277,
"grad_norm": 8461639.0,
"learning_rate": 2.182410423452769e-05,
"loss": 0.6393,
"step": 830
},
{
"epoch": 1.8253706754530477,
"grad_norm": 4746518.0,
"learning_rate": 2.178338762214984e-05,
"loss": 0.5455,
"step": 831
},
{
"epoch": 1.827567270730368,
"grad_norm": 15795456.0,
"learning_rate": 2.174267100977199e-05,
"loss": 0.89,
"step": 832
},
{
"epoch": 1.829763866007688,
"grad_norm": 3614378.75,
"learning_rate": 2.1701954397394138e-05,
"loss": 0.8752,
"step": 833
},
{
"epoch": 1.8319604612850082,
"grad_norm": 8447181.0,
"learning_rate": 2.1661237785016288e-05,
"loss": 0.9715,
"step": 834
},
{
"epoch": 1.8341570565623284,
"grad_norm": 10205021.0,
"learning_rate": 2.162052117263844e-05,
"loss": 0.8767,
"step": 835
},
{
"epoch": 1.8363536518396484,
"grad_norm": 6217193.0,
"learning_rate": 2.157980456026059e-05,
"loss": 0.6549,
"step": 836
},
{
"epoch": 1.8385502471169688,
"grad_norm": 3376318.75,
"learning_rate": 2.153908794788274e-05,
"loss": 0.7556,
"step": 837
},
{
"epoch": 1.8407468423942888,
"grad_norm": 3759376.5,
"learning_rate": 2.149837133550489e-05,
"loss": 0.8284,
"step": 838
},
{
"epoch": 1.842943437671609,
"grad_norm": 5908501.0,
"learning_rate": 2.1457654723127037e-05,
"loss": 0.7409,
"step": 839
},
{
"epoch": 1.8451400329489291,
"grad_norm": 4526432.5,
"learning_rate": 2.1416938110749187e-05,
"loss": 0.8601,
"step": 840
},
{
"epoch": 1.8473366282262493,
"grad_norm": 2507999.5,
"learning_rate": 2.1376221498371338e-05,
"loss": 0.7499,
"step": 841
},
{
"epoch": 1.8495332235035695,
"grad_norm": 15405622.0,
"learning_rate": 2.1335504885993488e-05,
"loss": 0.8036,
"step": 842
},
{
"epoch": 1.8517298187808895,
"grad_norm": 7135560.5,
"learning_rate": 2.129478827361564e-05,
"loss": 1.0048,
"step": 843
},
{
"epoch": 1.85392641405821,
"grad_norm": 1439257.625,
"learning_rate": 2.1254071661237786e-05,
"loss": 0.8738,
"step": 844
},
{
"epoch": 1.8561230093355299,
"grad_norm": 8523340.0,
"learning_rate": 2.1213355048859936e-05,
"loss": 0.7504,
"step": 845
},
{
"epoch": 1.85831960461285,
"grad_norm": 4241663.0,
"learning_rate": 2.1172638436482087e-05,
"loss": 0.7513,
"step": 846
},
{
"epoch": 1.8605161998901703,
"grad_norm": 3059999.25,
"learning_rate": 2.1131921824104237e-05,
"loss": 0.9571,
"step": 847
},
{
"epoch": 1.8627127951674904,
"grad_norm": 7952339.0,
"learning_rate": 2.1091205211726388e-05,
"loss": 0.7104,
"step": 848
},
{
"epoch": 1.8649093904448106,
"grad_norm": 5356255.0,
"learning_rate": 2.1050488599348535e-05,
"loss": 0.7163,
"step": 849
},
{
"epoch": 1.8671059857221306,
"grad_norm": 4578026.5,
"learning_rate": 2.1009771986970685e-05,
"loss": 0.7132,
"step": 850
},
{
"epoch": 1.869302580999451,
"grad_norm": 5333593.5,
"learning_rate": 2.0969055374592836e-05,
"loss": 0.6446,
"step": 851
},
{
"epoch": 1.871499176276771,
"grad_norm": 15180029.0,
"learning_rate": 2.0928338762214986e-05,
"loss": 0.8549,
"step": 852
},
{
"epoch": 1.8736957715540912,
"grad_norm": 101827592.0,
"learning_rate": 2.0887622149837137e-05,
"loss": 1.0132,
"step": 853
},
{
"epoch": 1.8758923668314114,
"grad_norm": 4923558.5,
"learning_rate": 2.0846905537459287e-05,
"loss": 0.9386,
"step": 854
},
{
"epoch": 1.8780889621087313,
"grad_norm": 1697080.5,
"learning_rate": 2.0806188925081434e-05,
"loss": 0.7045,
"step": 855
},
{
"epoch": 1.8802855573860517,
"grad_norm": 4818134.5,
"learning_rate": 2.0765472312703585e-05,
"loss": 0.9553,
"step": 856
},
{
"epoch": 1.8824821526633717,
"grad_norm": 2097056.625,
"learning_rate": 2.0724755700325735e-05,
"loss": 0.7434,
"step": 857
},
{
"epoch": 1.884678747940692,
"grad_norm": 5853286.0,
"learning_rate": 2.0684039087947885e-05,
"loss": 0.8532,
"step": 858
},
{
"epoch": 1.886875343218012,
"grad_norm": 4985847.0,
"learning_rate": 2.0643322475570036e-05,
"loss": 0.7246,
"step": 859
},
{
"epoch": 1.8890719384953323,
"grad_norm": 16572541.0,
"learning_rate": 2.0602605863192183e-05,
"loss": 0.733,
"step": 860
},
{
"epoch": 1.8912685337726525,
"grad_norm": 15598193.0,
"learning_rate": 2.0561889250814333e-05,
"loss": 0.9523,
"step": 861
},
{
"epoch": 1.8934651290499724,
"grad_norm": 4152683.0,
"learning_rate": 2.0521172638436484e-05,
"loss": 0.8332,
"step": 862
},
{
"epoch": 1.8956617243272929,
"grad_norm": 7006605.0,
"learning_rate": 2.0480456026058634e-05,
"loss": 1.0624,
"step": 863
},
{
"epoch": 1.8978583196046128,
"grad_norm": 9953751.0,
"learning_rate": 2.0439739413680785e-05,
"loss": 0.693,
"step": 864
},
{
"epoch": 1.900054914881933,
"grad_norm": 4379904.5,
"learning_rate": 2.0399022801302932e-05,
"loss": 0.8718,
"step": 865
},
{
"epoch": 1.9022515101592532,
"grad_norm": 5631609.5,
"learning_rate": 2.0358306188925082e-05,
"loss": 0.759,
"step": 866
},
{
"epoch": 1.9044481054365732,
"grad_norm": 9908307.0,
"learning_rate": 2.0317589576547233e-05,
"loss": 0.7545,
"step": 867
},
{
"epoch": 1.9066447007138936,
"grad_norm": 8658726.0,
"learning_rate": 2.0276872964169383e-05,
"loss": 1.0098,
"step": 868
},
{
"epoch": 1.9088412959912135,
"grad_norm": 14695729.0,
"learning_rate": 2.0236156351791534e-05,
"loss": 1.0508,
"step": 869
},
{
"epoch": 1.9110378912685337,
"grad_norm": 1182297.625,
"learning_rate": 2.019543973941368e-05,
"loss": 0.7606,
"step": 870
},
{
"epoch": 1.913234486545854,
"grad_norm": 5388146.5,
"learning_rate": 2.015472312703583e-05,
"loss": 0.7966,
"step": 871
},
{
"epoch": 1.9154310818231741,
"grad_norm": 2985355.75,
"learning_rate": 2.0114006514657982e-05,
"loss": 0.8855,
"step": 872
},
{
"epoch": 1.9176276771004943,
"grad_norm": 9514274.0,
"learning_rate": 2.0073289902280132e-05,
"loss": 0.9375,
"step": 873
},
{
"epoch": 1.9198242723778143,
"grad_norm": 7851527.5,
"learning_rate": 2.0032573289902283e-05,
"loss": 1.0427,
"step": 874
},
{
"epoch": 1.9220208676551347,
"grad_norm": 6111110.5,
"learning_rate": 1.9991856677524433e-05,
"loss": 0.9939,
"step": 875
},
{
"epoch": 1.9242174629324547,
"grad_norm": 8540786.0,
"learning_rate": 1.995114006514658e-05,
"loss": 0.9236,
"step": 876
},
{
"epoch": 1.9264140582097748,
"grad_norm": 13982911.0,
"learning_rate": 1.991042345276873e-05,
"loss": 0.7019,
"step": 877
},
{
"epoch": 1.928610653487095,
"grad_norm": 6914903.5,
"learning_rate": 1.986970684039088e-05,
"loss": 0.8671,
"step": 878
},
{
"epoch": 1.930807248764415,
"grad_norm": 3630576.25,
"learning_rate": 1.982899022801303e-05,
"loss": 0.8166,
"step": 879
},
{
"epoch": 1.9330038440417354,
"grad_norm": 9739586.0,
"learning_rate": 1.9788273615635182e-05,
"loss": 0.7128,
"step": 880
},
{
"epoch": 1.9352004393190554,
"grad_norm": 13686392.0,
"learning_rate": 1.974755700325733e-05,
"loss": 0.8336,
"step": 881
},
{
"epoch": 1.9373970345963756,
"grad_norm": 2490466.5,
"learning_rate": 1.970684039087948e-05,
"loss": 0.6191,
"step": 882
},
{
"epoch": 1.9395936298736958,
"grad_norm": 7867994.5,
"learning_rate": 1.966612377850163e-05,
"loss": 0.9043,
"step": 883
},
{
"epoch": 1.941790225151016,
"grad_norm": 1060045.375,
"learning_rate": 1.962540716612378e-05,
"loss": 0.862,
"step": 884
},
{
"epoch": 1.9439868204283361,
"grad_norm": 4620791.0,
"learning_rate": 1.958469055374593e-05,
"loss": 1.2265,
"step": 885
},
{
"epoch": 1.9461834157056561,
"grad_norm": 30233016.0,
"learning_rate": 1.9543973941368078e-05,
"loss": 0.847,
"step": 886
},
{
"epoch": 1.9483800109829765,
"grad_norm": 14562146.0,
"learning_rate": 1.950325732899023e-05,
"loss": 0.722,
"step": 887
},
{
"epoch": 1.9505766062602965,
"grad_norm": 13033622.0,
"learning_rate": 1.946254071661238e-05,
"loss": 0.7176,
"step": 888
},
{
"epoch": 1.9527732015376167,
"grad_norm": 2603171.75,
"learning_rate": 1.942182410423453e-05,
"loss": 0.8335,
"step": 889
},
{
"epoch": 1.9549697968149369,
"grad_norm": 4586265.0,
"learning_rate": 1.938110749185668e-05,
"loss": 0.8147,
"step": 890
},
{
"epoch": 1.9571663920922568,
"grad_norm": 2921726.0,
"learning_rate": 1.9340390879478827e-05,
"loss": 0.9424,
"step": 891
},
{
"epoch": 1.9593629873695773,
"grad_norm": 4601482.5,
"learning_rate": 1.9299674267100977e-05,
"loss": 0.7312,
"step": 892
},
{
"epoch": 1.9615595826468972,
"grad_norm": 2894955.5,
"learning_rate": 1.9258957654723128e-05,
"loss": 0.7901,
"step": 893
},
{
"epoch": 1.9637561779242174,
"grad_norm": 3576191.75,
"learning_rate": 1.9218241042345278e-05,
"loss": 0.8681,
"step": 894
},
{
"epoch": 1.9659527732015376,
"grad_norm": 6953558.5,
"learning_rate": 1.917752442996743e-05,
"loss": 0.6646,
"step": 895
},
{
"epoch": 1.9681493684788578,
"grad_norm": 6109067.5,
"learning_rate": 1.913680781758958e-05,
"loss": 0.941,
"step": 896
},
{
"epoch": 1.970345963756178,
"grad_norm": 7972811.5,
"learning_rate": 1.9096091205211726e-05,
"loss": 0.6893,
"step": 897
},
{
"epoch": 1.972542559033498,
"grad_norm": 4189912.5,
"learning_rate": 1.9055374592833877e-05,
"loss": 0.9582,
"step": 898
},
{
"epoch": 1.9747391543108184,
"grad_norm": 2655152.25,
"learning_rate": 1.9014657980456027e-05,
"loss": 0.9548,
"step": 899
},
{
"epoch": 1.9769357495881383,
"grad_norm": 2501240.5,
"learning_rate": 1.8973941368078178e-05,
"loss": 0.7463,
"step": 900
},
{
"epoch": 1.9791323448654585,
"grad_norm": 5079035.0,
"learning_rate": 1.8933224755700328e-05,
"loss": 0.9083,
"step": 901
},
{
"epoch": 1.9813289401427787,
"grad_norm": 7339288.0,
"learning_rate": 1.8892508143322475e-05,
"loss": 0.8887,
"step": 902
},
{
"epoch": 1.9835255354200987,
"grad_norm": 4568381.5,
"learning_rate": 1.8851791530944626e-05,
"loss": 0.7833,
"step": 903
},
{
"epoch": 1.985722130697419,
"grad_norm": 2933216.5,
"learning_rate": 1.8811074918566776e-05,
"loss": 0.7092,
"step": 904
},
{
"epoch": 1.987918725974739,
"grad_norm": 3911176.25,
"learning_rate": 1.8770358306188927e-05,
"loss": 0.6684,
"step": 905
},
{
"epoch": 1.9901153212520593,
"grad_norm": 4835380.0,
"learning_rate": 1.8729641693811077e-05,
"loss": 0.9137,
"step": 906
},
{
"epoch": 1.9923119165293794,
"grad_norm": 3693694.25,
"learning_rate": 1.8688925081433224e-05,
"loss": 0.6428,
"step": 907
},
{
"epoch": 1.9945085118066996,
"grad_norm": 5489355.0,
"learning_rate": 1.8648208469055375e-05,
"loss": 0.66,
"step": 908
},
{
"epoch": 1.9967051070840198,
"grad_norm": 3017182.75,
"learning_rate": 1.8607491856677525e-05,
"loss": 0.6366,
"step": 909
},
{
"epoch": 1.9989017023613398,
"grad_norm": 8326379.5,
"learning_rate": 1.8566775244299675e-05,
"loss": 0.8199,
"step": 910
},
{
"epoch": 2.00109829763866,
"grad_norm": 3494578.5,
"learning_rate": 1.8526058631921826e-05,
"loss": 0.9508,
"step": 911
},
{
"epoch": 2.00329489291598,
"grad_norm": 5448883.5,
"learning_rate": 1.8485342019543976e-05,
"loss": 0.7703,
"step": 912
},
{
"epoch": 2.0054914881933006,
"grad_norm": 7030463.5,
"learning_rate": 1.8444625407166123e-05,
"loss": 0.6459,
"step": 913
},
{
"epoch": 2.0076880834706206,
"grad_norm": 2744755.25,
"learning_rate": 1.8403908794788274e-05,
"loss": 0.7983,
"step": 914
},
{
"epoch": 2.0098846787479405,
"grad_norm": 6295109.5,
"learning_rate": 1.8363192182410424e-05,
"loss": 0.7915,
"step": 915
},
{
"epoch": 2.012081274025261,
"grad_norm": 7412736.5,
"learning_rate": 1.8322475570032575e-05,
"loss": 1.0068,
"step": 916
},
{
"epoch": 2.014277869302581,
"grad_norm": 7046878.5,
"learning_rate": 1.8281758957654725e-05,
"loss": 0.9608,
"step": 917
},
{
"epoch": 2.0164744645799013,
"grad_norm": 2421389.25,
"learning_rate": 1.8241042345276872e-05,
"loss": 0.9851,
"step": 918
},
{
"epoch": 2.0186710598572213,
"grad_norm": 3188937.0,
"learning_rate": 1.8200325732899023e-05,
"loss": 0.7339,
"step": 919
},
{
"epoch": 2.0208676551345413,
"grad_norm": 13980595.0,
"learning_rate": 1.8159609120521173e-05,
"loss": 0.7492,
"step": 920
},
{
"epoch": 2.0230642504118617,
"grad_norm": 6551893.0,
"learning_rate": 1.8118892508143324e-05,
"loss": 0.7184,
"step": 921
},
{
"epoch": 2.0252608456891816,
"grad_norm": 9451679.0,
"learning_rate": 1.8078175895765474e-05,
"loss": 0.823,
"step": 922
},
{
"epoch": 2.027457440966502,
"grad_norm": 8318042.5,
"learning_rate": 1.803745928338762e-05,
"loss": 0.8667,
"step": 923
},
{
"epoch": 2.029654036243822,
"grad_norm": 4752105.5,
"learning_rate": 1.799674267100977e-05,
"loss": 0.9472,
"step": 924
},
{
"epoch": 2.0318506315211424,
"grad_norm": 2738853.0,
"learning_rate": 1.7956026058631922e-05,
"loss": 0.7482,
"step": 925
},
{
"epoch": 2.0340472267984624,
"grad_norm": 4173811.75,
"learning_rate": 1.7915309446254073e-05,
"loss": 0.8398,
"step": 926
},
{
"epoch": 2.0362438220757824,
"grad_norm": 5078139.0,
"learning_rate": 1.7874592833876223e-05,
"loss": 0.9674,
"step": 927
},
{
"epoch": 2.0384404173531028,
"grad_norm": 8356471.5,
"learning_rate": 1.783387622149837e-05,
"loss": 0.771,
"step": 928
},
{
"epoch": 2.0406370126304227,
"grad_norm": 4511216.0,
"learning_rate": 1.779315960912052e-05,
"loss": 0.6947,
"step": 929
},
{
"epoch": 2.042833607907743,
"grad_norm": 4675797.0,
"learning_rate": 1.775244299674267e-05,
"loss": 0.708,
"step": 930
},
{
"epoch": 2.045030203185063,
"grad_norm": 6009520.0,
"learning_rate": 1.771172638436482e-05,
"loss": 0.7586,
"step": 931
},
{
"epoch": 2.047226798462383,
"grad_norm": 2811151.25,
"learning_rate": 1.7671009771986972e-05,
"loss": 0.8171,
"step": 932
},
{
"epoch": 2.0494233937397035,
"grad_norm": 3328623.75,
"learning_rate": 1.7630293159609122e-05,
"loss": 0.8007,
"step": 933
},
{
"epoch": 2.0516199890170235,
"grad_norm": 9727441.0,
"learning_rate": 1.758957654723127e-05,
"loss": 0.7732,
"step": 934
},
{
"epoch": 2.053816584294344,
"grad_norm": 1385656.5,
"learning_rate": 1.754885993485342e-05,
"loss": 0.7453,
"step": 935
},
{
"epoch": 2.056013179571664,
"grad_norm": 3496263.0,
"learning_rate": 1.750814332247557e-05,
"loss": 0.939,
"step": 936
},
{
"epoch": 2.0582097748489843,
"grad_norm": 3648535.25,
"learning_rate": 1.746742671009772e-05,
"loss": 0.8441,
"step": 937
},
{
"epoch": 2.0604063701263042,
"grad_norm": 3279550.0,
"learning_rate": 1.742671009771987e-05,
"loss": 0.6513,
"step": 938
},
{
"epoch": 2.062602965403624,
"grad_norm": 6393275.0,
"learning_rate": 1.738599348534202e-05,
"loss": 1.0011,
"step": 939
},
{
"epoch": 2.0647995606809446,
"grad_norm": 7883316.5,
"learning_rate": 1.734527687296417e-05,
"loss": 0.7555,
"step": 940
},
{
"epoch": 2.0669961559582646,
"grad_norm": 7346948.0,
"learning_rate": 1.730456026058632e-05,
"loss": 0.8227,
"step": 941
},
{
"epoch": 2.069192751235585,
"grad_norm": 2171400.25,
"learning_rate": 1.726384364820847e-05,
"loss": 0.6303,
"step": 942
},
{
"epoch": 2.071389346512905,
"grad_norm": 3247883.0,
"learning_rate": 1.722312703583062e-05,
"loss": 0.8052,
"step": 943
},
{
"epoch": 2.073585941790225,
"grad_norm": 2736027.25,
"learning_rate": 1.7182410423452767e-05,
"loss": 0.719,
"step": 944
},
{
"epoch": 2.0757825370675453,
"grad_norm": 4661686.0,
"learning_rate": 1.7141693811074918e-05,
"loss": 0.8049,
"step": 945
},
{
"epoch": 2.0779791323448653,
"grad_norm": 6777251.5,
"learning_rate": 1.7100977198697068e-05,
"loss": 0.802,
"step": 946
},
{
"epoch": 2.0801757276221857,
"grad_norm": 12720982.0,
"learning_rate": 1.706026058631922e-05,
"loss": 0.6167,
"step": 947
},
{
"epoch": 2.0823723228995057,
"grad_norm": 3066535.25,
"learning_rate": 1.701954397394137e-05,
"loss": 0.7019,
"step": 948
},
{
"epoch": 2.084568918176826,
"grad_norm": 8993991.0,
"learning_rate": 1.697882736156352e-05,
"loss": 0.9715,
"step": 949
},
{
"epoch": 2.086765513454146,
"grad_norm": 1970840.25,
"learning_rate": 1.6938110749185667e-05,
"loss": 0.8013,
"step": 950
},
{
"epoch": 2.088962108731466,
"grad_norm": 14743399.0,
"learning_rate": 1.6897394136807817e-05,
"loss": 0.8416,
"step": 951
},
{
"epoch": 2.0911587040087865,
"grad_norm": 7608622.5,
"learning_rate": 1.6856677524429968e-05,
"loss": 0.8608,
"step": 952
},
{
"epoch": 2.0933552992861064,
"grad_norm": 6201172.5,
"learning_rate": 1.6815960912052118e-05,
"loss": 0.9443,
"step": 953
},
{
"epoch": 2.095551894563427,
"grad_norm": 2341765.75,
"learning_rate": 1.677524429967427e-05,
"loss": 0.6956,
"step": 954
},
{
"epoch": 2.097748489840747,
"grad_norm": 3916520.5,
"learning_rate": 1.6734527687296416e-05,
"loss": 0.902,
"step": 955
},
{
"epoch": 2.099945085118067,
"grad_norm": 33477604.0,
"learning_rate": 1.6693811074918566e-05,
"loss": 0.7859,
"step": 956
},
{
"epoch": 2.102141680395387,
"grad_norm": 3646212.75,
"learning_rate": 1.6653094462540716e-05,
"loss": 0.8155,
"step": 957
},
{
"epoch": 2.104338275672707,
"grad_norm": 10441493.0,
"learning_rate": 1.6612377850162867e-05,
"loss": 0.7251,
"step": 958
},
{
"epoch": 2.1065348709500276,
"grad_norm": 11191649.0,
"learning_rate": 1.6571661237785017e-05,
"loss": 0.8062,
"step": 959
},
{
"epoch": 2.1087314662273475,
"grad_norm": 2551275.0,
"learning_rate": 1.6530944625407164e-05,
"loss": 0.7731,
"step": 960
},
{
"epoch": 2.110928061504668,
"grad_norm": 16948220.0,
"learning_rate": 1.6490228013029315e-05,
"loss": 1.0092,
"step": 961
},
{
"epoch": 2.113124656781988,
"grad_norm": 7067645.5,
"learning_rate": 1.6449511400651465e-05,
"loss": 0.7808,
"step": 962
},
{
"epoch": 2.115321252059308,
"grad_norm": 7295557.5,
"learning_rate": 1.6408794788273616e-05,
"loss": 0.6985,
"step": 963
},
{
"epoch": 2.1175178473366283,
"grad_norm": 4207166.5,
"learning_rate": 1.6368078175895766e-05,
"loss": 0.7756,
"step": 964
},
{
"epoch": 2.1197144426139483,
"grad_norm": 3023842.25,
"learning_rate": 1.6327361563517913e-05,
"loss": 0.7299,
"step": 965
},
{
"epoch": 2.1219110378912687,
"grad_norm": 2193285.0,
"learning_rate": 1.6286644951140064e-05,
"loss": 0.7421,
"step": 966
},
{
"epoch": 2.1241076331685886,
"grad_norm": 4508207.5,
"learning_rate": 1.6245928338762214e-05,
"loss": 0.7343,
"step": 967
},
{
"epoch": 2.1263042284459086,
"grad_norm": 3777765.5,
"learning_rate": 1.6205211726384365e-05,
"loss": 0.7332,
"step": 968
},
{
"epoch": 2.128500823723229,
"grad_norm": 6247828.0,
"learning_rate": 1.6164495114006515e-05,
"loss": 0.7085,
"step": 969
},
{
"epoch": 2.130697419000549,
"grad_norm": 2008213.25,
"learning_rate": 1.6123778501628666e-05,
"loss": 0.8822,
"step": 970
},
{
"epoch": 2.1328940142778694,
"grad_norm": 5647030.0,
"learning_rate": 1.6083061889250813e-05,
"loss": 0.7882,
"step": 971
},
{
"epoch": 2.1350906095551894,
"grad_norm": 3650408.5,
"learning_rate": 1.6042345276872963e-05,
"loss": 0.9182,
"step": 972
},
{
"epoch": 2.13728720483251,
"grad_norm": 4611719.5,
"learning_rate": 1.6001628664495114e-05,
"loss": 0.8011,
"step": 973
},
{
"epoch": 2.1394838001098297,
"grad_norm": 2211410.75,
"learning_rate": 1.5960912052117264e-05,
"loss": 0.8519,
"step": 974
},
{
"epoch": 2.1416803953871497,
"grad_norm": 8847583.0,
"learning_rate": 1.5920195439739415e-05,
"loss": 0.8471,
"step": 975
},
{
"epoch": 2.14387699066447,
"grad_norm": 2778647.75,
"learning_rate": 1.587947882736156e-05,
"loss": 0.8439,
"step": 976
},
{
"epoch": 2.14607358594179,
"grad_norm": 2086659.75,
"learning_rate": 1.5838762214983712e-05,
"loss": 0.7953,
"step": 977
},
{
"epoch": 2.1482701812191105,
"grad_norm": 3368784.0,
"learning_rate": 1.5798045602605863e-05,
"loss": 0.7257,
"step": 978
},
{
"epoch": 2.1504667764964305,
"grad_norm": 6053738.5,
"learning_rate": 1.5757328990228013e-05,
"loss": 0.7569,
"step": 979
},
{
"epoch": 2.152663371773751,
"grad_norm": 10296760.0,
"learning_rate": 1.5716612377850163e-05,
"loss": 0.8769,
"step": 980
},
{
"epoch": 2.154859967051071,
"grad_norm": 7475247.0,
"learning_rate": 1.567589576547231e-05,
"loss": 0.6506,
"step": 981
},
{
"epoch": 2.157056562328391,
"grad_norm": 3973627.0,
"learning_rate": 1.563517915309446e-05,
"loss": 0.6861,
"step": 982
},
{
"epoch": 2.1592531576057112,
"grad_norm": 4072698.75,
"learning_rate": 1.5594462540716615e-05,
"loss": 0.9369,
"step": 983
},
{
"epoch": 2.161449752883031,
"grad_norm": 3552805.0,
"learning_rate": 1.5553745928338765e-05,
"loss": 0.7046,
"step": 984
},
{
"epoch": 2.1636463481603516,
"grad_norm": 8278152.0,
"learning_rate": 1.5513029315960916e-05,
"loss": 0.718,
"step": 985
},
{
"epoch": 2.1658429434376716,
"grad_norm": 4112720.25,
"learning_rate": 1.5472312703583063e-05,
"loss": 0.7558,
"step": 986
},
{
"epoch": 2.168039538714992,
"grad_norm": 2435579.0,
"learning_rate": 1.5431596091205213e-05,
"loss": 0.8916,
"step": 987
},
{
"epoch": 2.170236133992312,
"grad_norm": 3046778.5,
"learning_rate": 1.5390879478827364e-05,
"loss": 0.6922,
"step": 988
},
{
"epoch": 2.172432729269632,
"grad_norm": 6671465.0,
"learning_rate": 1.5350162866449514e-05,
"loss": 0.8092,
"step": 989
},
{
"epoch": 2.1746293245469523,
"grad_norm": 5192924.0,
"learning_rate": 1.5309446254071665e-05,
"loss": 0.9859,
"step": 990
},
{
"epoch": 2.1768259198242723,
"grad_norm": 9703150.0,
"learning_rate": 1.5268729641693812e-05,
"loss": 0.7553,
"step": 991
},
{
"epoch": 2.1790225151015927,
"grad_norm": 3975486.0,
"learning_rate": 1.5228013029315962e-05,
"loss": 0.6931,
"step": 992
},
{
"epoch": 2.1812191103789127,
"grad_norm": 9991519.0,
"learning_rate": 1.5187296416938113e-05,
"loss": 0.8509,
"step": 993
},
{
"epoch": 2.1834157056562327,
"grad_norm": 1893980.5,
"learning_rate": 1.5146579804560263e-05,
"loss": 0.7236,
"step": 994
},
{
"epoch": 2.185612300933553,
"grad_norm": 4502999.5,
"learning_rate": 1.5105863192182412e-05,
"loss": 0.728,
"step": 995
},
{
"epoch": 2.187808896210873,
"grad_norm": 4736523.0,
"learning_rate": 1.5065146579804562e-05,
"loss": 0.9339,
"step": 996
},
{
"epoch": 2.1900054914881935,
"grad_norm": 8523063.0,
"learning_rate": 1.5024429967426711e-05,
"loss": 0.85,
"step": 997
},
{
"epoch": 2.1922020867655134,
"grad_norm": 1488145.75,
"learning_rate": 1.4983713355048862e-05,
"loss": 0.8031,
"step": 998
},
{
"epoch": 2.1943986820428334,
"grad_norm": 7985575.5,
"learning_rate": 1.4942996742671012e-05,
"loss": 0.9585,
"step": 999
},
{
"epoch": 2.196595277320154,
"grad_norm": 7183530.0,
"learning_rate": 1.490228013029316e-05,
"loss": 0.7347,
"step": 1000
},
{
"epoch": 2.1987918725974738,
"grad_norm": 3536959.0,
"learning_rate": 1.4861563517915311e-05,
"loss": 0.5781,
"step": 1001
},
{
"epoch": 2.200988467874794,
"grad_norm": 2099161.25,
"learning_rate": 1.4820846905537462e-05,
"loss": 0.7237,
"step": 1002
},
{
"epoch": 2.203185063152114,
"grad_norm": 4888102.5,
"learning_rate": 1.478013029315961e-05,
"loss": 0.8234,
"step": 1003
},
{
"epoch": 2.2053816584294346,
"grad_norm": 4817862.5,
"learning_rate": 1.4739413680781761e-05,
"loss": 0.7021,
"step": 1004
},
{
"epoch": 2.2075782537067545,
"grad_norm": 1827390.375,
"learning_rate": 1.469869706840391e-05,
"loss": 0.7217,
"step": 1005
},
{
"epoch": 2.2097748489840745,
"grad_norm": 3758607.0,
"learning_rate": 1.465798045602606e-05,
"loss": 0.8257,
"step": 1006
},
{
"epoch": 2.211971444261395,
"grad_norm": 12939548.0,
"learning_rate": 1.461726384364821e-05,
"loss": 0.6608,
"step": 1007
},
{
"epoch": 2.214168039538715,
"grad_norm": 18534588.0,
"learning_rate": 1.457654723127036e-05,
"loss": 1.042,
"step": 1008
},
{
"epoch": 2.2163646348160353,
"grad_norm": 1720359.375,
"learning_rate": 1.453583061889251e-05,
"loss": 0.8151,
"step": 1009
},
{
"epoch": 2.2185612300933553,
"grad_norm": 29221916.0,
"learning_rate": 1.449511400651466e-05,
"loss": 0.7917,
"step": 1010
},
{
"epoch": 2.2207578253706757,
"grad_norm": 1805597.625,
"learning_rate": 1.4454397394136809e-05,
"loss": 0.7776,
"step": 1011
},
{
"epoch": 2.2229544206479956,
"grad_norm": 46326092.0,
"learning_rate": 1.441368078175896e-05,
"loss": 1.0515,
"step": 1012
},
{
"epoch": 2.2251510159253156,
"grad_norm": 10872767.0,
"learning_rate": 1.4372964169381108e-05,
"loss": 0.9426,
"step": 1013
},
{
"epoch": 2.227347611202636,
"grad_norm": 7785878.0,
"learning_rate": 1.4332247557003259e-05,
"loss": 0.9596,
"step": 1014
},
{
"epoch": 2.229544206479956,
"grad_norm": 36649992.0,
"learning_rate": 1.429153094462541e-05,
"loss": 0.9342,
"step": 1015
},
{
"epoch": 2.2317408017572764,
"grad_norm": 4091651.5,
"learning_rate": 1.4250814332247558e-05,
"loss": 0.8276,
"step": 1016
},
{
"epoch": 2.2339373970345964,
"grad_norm": 4844306.0,
"learning_rate": 1.4210097719869708e-05,
"loss": 0.9671,
"step": 1017
},
{
"epoch": 2.2361339923119163,
"grad_norm": 7422064.5,
"learning_rate": 1.4169381107491859e-05,
"loss": 1.0135,
"step": 1018
},
{
"epoch": 2.2383305875892368,
"grad_norm": 5165876.0,
"learning_rate": 1.4128664495114008e-05,
"loss": 0.8735,
"step": 1019
},
{
"epoch": 2.2405271828665567,
"grad_norm": 7189243.5,
"learning_rate": 1.4087947882736158e-05,
"loss": 0.8116,
"step": 1020
},
{
"epoch": 2.242723778143877,
"grad_norm": 8663355.0,
"learning_rate": 1.4047231270358307e-05,
"loss": 0.7846,
"step": 1021
},
{
"epoch": 2.244920373421197,
"grad_norm": 4290221.0,
"learning_rate": 1.4006514657980457e-05,
"loss": 0.6436,
"step": 1022
},
{
"epoch": 2.247116968698517,
"grad_norm": 4210670.0,
"learning_rate": 1.3965798045602608e-05,
"loss": 0.8688,
"step": 1023
},
{
"epoch": 2.2493135639758375,
"grad_norm": 16253383.0,
"learning_rate": 1.3925081433224757e-05,
"loss": 0.7745,
"step": 1024
},
{
"epoch": 2.2515101592531575,
"grad_norm": 5735170.5,
"learning_rate": 1.3884364820846907e-05,
"loss": 0.7289,
"step": 1025
},
{
"epoch": 2.253706754530478,
"grad_norm": 3034424.0,
"learning_rate": 1.3843648208469056e-05,
"loss": 0.7259,
"step": 1026
},
{
"epoch": 2.255903349807798,
"grad_norm": 3135050.0,
"learning_rate": 1.3802931596091206e-05,
"loss": 0.8013,
"step": 1027
},
{
"epoch": 2.2580999450851182,
"grad_norm": 7517336.0,
"learning_rate": 1.3762214983713357e-05,
"loss": 0.897,
"step": 1028
},
{
"epoch": 2.260296540362438,
"grad_norm": 5234165.5,
"learning_rate": 1.3721498371335505e-05,
"loss": 0.8335,
"step": 1029
},
{
"epoch": 2.262493135639758,
"grad_norm": 2984769.5,
"learning_rate": 1.3680781758957656e-05,
"loss": 0.6761,
"step": 1030
},
{
"epoch": 2.2646897309170786,
"grad_norm": 2842281.25,
"learning_rate": 1.3640065146579806e-05,
"loss": 0.7023,
"step": 1031
},
{
"epoch": 2.2668863261943986,
"grad_norm": 4719035.0,
"learning_rate": 1.3599348534201955e-05,
"loss": 0.6832,
"step": 1032
},
{
"epoch": 2.269082921471719,
"grad_norm": 2050836.125,
"learning_rate": 1.3558631921824106e-05,
"loss": 0.9524,
"step": 1033
},
{
"epoch": 2.271279516749039,
"grad_norm": 3227051.75,
"learning_rate": 1.3517915309446254e-05,
"loss": 0.7612,
"step": 1034
},
{
"epoch": 2.2734761120263594,
"grad_norm": 5535661.0,
"learning_rate": 1.3477198697068405e-05,
"loss": 0.7114,
"step": 1035
},
{
"epoch": 2.2756727073036793,
"grad_norm": 1950398.625,
"learning_rate": 1.3436482084690555e-05,
"loss": 0.7762,
"step": 1036
},
{
"epoch": 2.2778693025809993,
"grad_norm": 4733415.0,
"learning_rate": 1.3395765472312704e-05,
"loss": 0.684,
"step": 1037
},
{
"epoch": 2.2800658978583197,
"grad_norm": 3833281.0,
"learning_rate": 1.3355048859934855e-05,
"loss": 0.7297,
"step": 1038
},
{
"epoch": 2.2822624931356397,
"grad_norm": 6211348.5,
"learning_rate": 1.3314332247557005e-05,
"loss": 0.7067,
"step": 1039
},
{
"epoch": 2.28445908841296,
"grad_norm": 1275267.75,
"learning_rate": 1.3273615635179154e-05,
"loss": 0.8348,
"step": 1040
},
{
"epoch": 2.28665568369028,
"grad_norm": 3057475.25,
"learning_rate": 1.3232899022801304e-05,
"loss": 0.764,
"step": 1041
},
{
"epoch": 2.2888522789676005,
"grad_norm": 5520224.5,
"learning_rate": 1.3192182410423453e-05,
"loss": 0.867,
"step": 1042
},
{
"epoch": 2.2910488742449204,
"grad_norm": 3466532.25,
"learning_rate": 1.3151465798045603e-05,
"loss": 0.6492,
"step": 1043
},
{
"epoch": 2.2932454695222404,
"grad_norm": 2652557.0,
"learning_rate": 1.3110749185667754e-05,
"loss": 0.7513,
"step": 1044
},
{
"epoch": 2.295442064799561,
"grad_norm": 2464003.75,
"learning_rate": 1.3070032573289903e-05,
"loss": 0.8589,
"step": 1045
},
{
"epoch": 2.297638660076881,
"grad_norm": 3981063.0,
"learning_rate": 1.3029315960912053e-05,
"loss": 0.7246,
"step": 1046
},
{
"epoch": 2.2998352553542007,
"grad_norm": 2509091.0,
"learning_rate": 1.2988599348534204e-05,
"loss": 0.614,
"step": 1047
},
{
"epoch": 2.302031850631521,
"grad_norm": 5999327.0,
"learning_rate": 1.2947882736156352e-05,
"loss": 0.7993,
"step": 1048
},
{
"epoch": 2.304228445908841,
"grad_norm": 8384308.0,
"learning_rate": 1.2907166123778503e-05,
"loss": 0.7839,
"step": 1049
},
{
"epoch": 2.3064250411861615,
"grad_norm": 6297944.5,
"learning_rate": 1.2866449511400652e-05,
"loss": 0.8027,
"step": 1050
},
{
"epoch": 2.3086216364634815,
"grad_norm": 13187944.0,
"learning_rate": 1.2825732899022802e-05,
"loss": 0.6071,
"step": 1051
},
{
"epoch": 2.310818231740802,
"grad_norm": 9754928.0,
"learning_rate": 1.2785016286644952e-05,
"loss": 0.7241,
"step": 1052
},
{
"epoch": 2.313014827018122,
"grad_norm": 1552770.875,
"learning_rate": 1.2744299674267101e-05,
"loss": 0.8248,
"step": 1053
},
{
"epoch": 2.315211422295442,
"grad_norm": 16734540.0,
"learning_rate": 1.2703583061889252e-05,
"loss": 0.7476,
"step": 1054
},
{
"epoch": 2.3174080175727623,
"grad_norm": 4143281.75,
"learning_rate": 1.2662866449511402e-05,
"loss": 1.0132,
"step": 1055
},
{
"epoch": 2.3196046128500822,
"grad_norm": 7862158.0,
"learning_rate": 1.2622149837133551e-05,
"loss": 0.7782,
"step": 1056
},
{
"epoch": 2.3218012081274026,
"grad_norm": 4118543.25,
"learning_rate": 1.2581433224755701e-05,
"loss": 1.0039,
"step": 1057
},
{
"epoch": 2.3239978034047226,
"grad_norm": 4323584.5,
"learning_rate": 1.254071661237785e-05,
"loss": 0.6,
"step": 1058
},
{
"epoch": 2.326194398682043,
"grad_norm": 3603073.5,
"learning_rate": 1.25e-05,
"loss": 0.7142,
"step": 1059
},
{
"epoch": 2.328390993959363,
"grad_norm": 6588454.0,
"learning_rate": 1.2459283387622151e-05,
"loss": 0.7712,
"step": 1060
},
{
"epoch": 2.330587589236683,
"grad_norm": 2593079.5,
"learning_rate": 1.24185667752443e-05,
"loss": 0.7785,
"step": 1061
},
{
"epoch": 2.3327841845140034,
"grad_norm": 1775102.625,
"learning_rate": 1.237785016286645e-05,
"loss": 0.7758,
"step": 1062
},
{
"epoch": 2.3349807797913233,
"grad_norm": 2622416.25,
"learning_rate": 1.2337133550488599e-05,
"loss": 0.8841,
"step": 1063
},
{
"epoch": 2.3371773750686438,
"grad_norm": 3231957.5,
"learning_rate": 1.229641693811075e-05,
"loss": 0.8539,
"step": 1064
},
{
"epoch": 2.3393739703459637,
"grad_norm": 1965101.875,
"learning_rate": 1.22557003257329e-05,
"loss": 0.7902,
"step": 1065
},
{
"epoch": 2.341570565623284,
"grad_norm": 9011023.0,
"learning_rate": 1.2214983713355049e-05,
"loss": 0.9112,
"step": 1066
},
{
"epoch": 2.343767160900604,
"grad_norm": 12300077.0,
"learning_rate": 1.21742671009772e-05,
"loss": 0.6904,
"step": 1067
},
{
"epoch": 2.345963756177924,
"grad_norm": 4213445.0,
"learning_rate": 1.213355048859935e-05,
"loss": 0.8308,
"step": 1068
},
{
"epoch": 2.3481603514552445,
"grad_norm": 3941625.0,
"learning_rate": 1.2092833876221498e-05,
"loss": 0.9742,
"step": 1069
},
{
"epoch": 2.3503569467325645,
"grad_norm": 5948733.0,
"learning_rate": 1.2052117263843649e-05,
"loss": 0.7223,
"step": 1070
},
{
"epoch": 2.352553542009885,
"grad_norm": 5675687.5,
"learning_rate": 1.2011400651465798e-05,
"loss": 0.8359,
"step": 1071
},
{
"epoch": 2.354750137287205,
"grad_norm": 1645119.875,
"learning_rate": 1.1970684039087948e-05,
"loss": 0.6769,
"step": 1072
},
{
"epoch": 2.356946732564525,
"grad_norm": 7933894.5,
"learning_rate": 1.1929967426710099e-05,
"loss": 0.7246,
"step": 1073
},
{
"epoch": 2.359143327841845,
"grad_norm": 13084234.0,
"learning_rate": 1.1889250814332247e-05,
"loss": 0.7177,
"step": 1074
},
{
"epoch": 2.361339923119165,
"grad_norm": 4980314.0,
"learning_rate": 1.1848534201954398e-05,
"loss": 0.874,
"step": 1075
},
{
"epoch": 2.3635365183964856,
"grad_norm": 11697443.0,
"learning_rate": 1.1807817589576548e-05,
"loss": 0.8653,
"step": 1076
},
{
"epoch": 2.3657331136738056,
"grad_norm": 6242828.5,
"learning_rate": 1.1767100977198697e-05,
"loss": 0.771,
"step": 1077
},
{
"epoch": 2.3679297089511255,
"grad_norm": 3590073.5,
"learning_rate": 1.1726384364820847e-05,
"loss": 0.8289,
"step": 1078
},
{
"epoch": 2.370126304228446,
"grad_norm": 7172018.0,
"learning_rate": 1.1685667752442996e-05,
"loss": 0.613,
"step": 1079
},
{
"epoch": 2.372322899505766,
"grad_norm": 3888274.0,
"learning_rate": 1.1644951140065147e-05,
"loss": 0.7947,
"step": 1080
},
{
"epoch": 2.3745194947830863,
"grad_norm": 8613911.0,
"learning_rate": 1.1604234527687297e-05,
"loss": 1.0802,
"step": 1081
},
{
"epoch": 2.3767160900604063,
"grad_norm": 4574738.5,
"learning_rate": 1.1563517915309446e-05,
"loss": 0.7241,
"step": 1082
},
{
"epoch": 2.3789126853377267,
"grad_norm": 9096491.0,
"learning_rate": 1.1522801302931596e-05,
"loss": 0.7928,
"step": 1083
},
{
"epoch": 2.3811092806150467,
"grad_norm": 6841468.5,
"learning_rate": 1.1482084690553747e-05,
"loss": 0.9741,
"step": 1084
},
{
"epoch": 2.3833058758923666,
"grad_norm": 3922741.5,
"learning_rate": 1.1441368078175896e-05,
"loss": 1.0277,
"step": 1085
},
{
"epoch": 2.385502471169687,
"grad_norm": 3562315.5,
"learning_rate": 1.1400651465798046e-05,
"loss": 0.7098,
"step": 1086
},
{
"epoch": 2.387699066447007,
"grad_norm": 14663703.0,
"learning_rate": 1.1359934853420195e-05,
"loss": 1.0815,
"step": 1087
},
{
"epoch": 2.3898956617243274,
"grad_norm": 15018353.0,
"learning_rate": 1.1319218241042345e-05,
"loss": 0.7351,
"step": 1088
},
{
"epoch": 2.3920922570016474,
"grad_norm": 8490890.0,
"learning_rate": 1.1278501628664496e-05,
"loss": 0.9321,
"step": 1089
},
{
"epoch": 2.394288852278968,
"grad_norm": 4437916.5,
"learning_rate": 1.1237785016286644e-05,
"loss": 0.7472,
"step": 1090
},
{
"epoch": 2.396485447556288,
"grad_norm": 9657434.0,
"learning_rate": 1.1197068403908795e-05,
"loss": 0.7775,
"step": 1091
},
{
"epoch": 2.3986820428336078,
"grad_norm": 6190163.5,
"learning_rate": 1.1156351791530945e-05,
"loss": 0.8102,
"step": 1092
},
{
"epoch": 2.400878638110928,
"grad_norm": 8654396.0,
"learning_rate": 1.1115635179153094e-05,
"loss": 0.8412,
"step": 1093
},
{
"epoch": 2.403075233388248,
"grad_norm": 3719385.0,
"learning_rate": 1.1074918566775245e-05,
"loss": 0.8662,
"step": 1094
},
{
"epoch": 2.4052718286655685,
"grad_norm": 7327033.5,
"learning_rate": 1.1034201954397393e-05,
"loss": 0.7252,
"step": 1095
},
{
"epoch": 2.4074684239428885,
"grad_norm": 8585433.0,
"learning_rate": 1.0993485342019544e-05,
"loss": 0.6554,
"step": 1096
},
{
"epoch": 2.409665019220209,
"grad_norm": 3423707.5,
"learning_rate": 1.0952768729641694e-05,
"loss": 0.8809,
"step": 1097
},
{
"epoch": 2.411861614497529,
"grad_norm": 3511080.0,
"learning_rate": 1.0912052117263845e-05,
"loss": 0.8029,
"step": 1098
},
{
"epoch": 2.414058209774849,
"grad_norm": 3968857.5,
"learning_rate": 1.0871335504885995e-05,
"loss": 0.8949,
"step": 1099
},
{
"epoch": 2.4162548050521693,
"grad_norm": 2289465.75,
"learning_rate": 1.0830618892508144e-05,
"loss": 0.8306,
"step": 1100
},
{
"epoch": 2.4184514003294892,
"grad_norm": 20745104.0,
"learning_rate": 1.0789902280130294e-05,
"loss": 0.7254,
"step": 1101
},
{
"epoch": 2.420647995606809,
"grad_norm": 2543496.75,
"learning_rate": 1.0749185667752445e-05,
"loss": 0.8705,
"step": 1102
},
{
"epoch": 2.4228445908841296,
"grad_norm": 5394329.5,
"learning_rate": 1.0708469055374594e-05,
"loss": 0.8089,
"step": 1103
},
{
"epoch": 2.4250411861614496,
"grad_norm": 1576959.875,
"learning_rate": 1.0667752442996744e-05,
"loss": 0.8676,
"step": 1104
},
{
"epoch": 2.42723778143877,
"grad_norm": 3800304.0,
"learning_rate": 1.0627035830618893e-05,
"loss": 0.845,
"step": 1105
},
{
"epoch": 2.42943437671609,
"grad_norm": 15098813.0,
"learning_rate": 1.0586319218241043e-05,
"loss": 0.7557,
"step": 1106
},
{
"epoch": 2.4316309719934104,
"grad_norm": 4500141.5,
"learning_rate": 1.0545602605863194e-05,
"loss": 0.8855,
"step": 1107
},
{
"epoch": 2.4338275672707304,
"grad_norm": 2867308.5,
"learning_rate": 1.0504885993485343e-05,
"loss": 0.9233,
"step": 1108
},
{
"epoch": 2.4360241625480503,
"grad_norm": 4740615.5,
"learning_rate": 1.0464169381107493e-05,
"loss": 0.7984,
"step": 1109
},
{
"epoch": 2.4382207578253707,
"grad_norm": 9119673.0,
"learning_rate": 1.0423452768729644e-05,
"loss": 0.8595,
"step": 1110
},
{
"epoch": 2.4404173531026907,
"grad_norm": 9822895.0,
"learning_rate": 1.0382736156351792e-05,
"loss": 0.7805,
"step": 1111
},
{
"epoch": 2.442613948380011,
"grad_norm": 1951774.125,
"learning_rate": 1.0342019543973943e-05,
"loss": 0.6044,
"step": 1112
},
{
"epoch": 2.444810543657331,
"grad_norm": 7767419.0,
"learning_rate": 1.0301302931596091e-05,
"loss": 1.0171,
"step": 1113
},
{
"epoch": 2.4470071389346515,
"grad_norm": 31730786.0,
"learning_rate": 1.0260586319218242e-05,
"loss": 0.9539,
"step": 1114
},
{
"epoch": 2.4492037342119715,
"grad_norm": 32194368.0,
"learning_rate": 1.0219869706840392e-05,
"loss": 0.7512,
"step": 1115
},
{
"epoch": 2.4514003294892914,
"grad_norm": 58858368.0,
"learning_rate": 1.0179153094462541e-05,
"loss": 0.7022,
"step": 1116
},
{
"epoch": 2.453596924766612,
"grad_norm": 3722460.5,
"learning_rate": 1.0138436482084692e-05,
"loss": 0.7823,
"step": 1117
},
{
"epoch": 2.455793520043932,
"grad_norm": 12120713.0,
"learning_rate": 1.009771986970684e-05,
"loss": 0.8108,
"step": 1118
},
{
"epoch": 2.4579901153212522,
"grad_norm": 7420889.0,
"learning_rate": 1.0057003257328991e-05,
"loss": 0.8501,
"step": 1119
},
{
"epoch": 2.460186710598572,
"grad_norm": 3466311.75,
"learning_rate": 1.0016286644951141e-05,
"loss": 0.7779,
"step": 1120
},
{
"epoch": 2.4623833058758926,
"grad_norm": 2764851.0,
"learning_rate": 9.97557003257329e-06,
"loss": 0.8147,
"step": 1121
},
{
"epoch": 2.4645799011532126,
"grad_norm": 8838615.0,
"learning_rate": 9.93485342019544e-06,
"loss": 1.1987,
"step": 1122
},
{
"epoch": 2.4667764964305325,
"grad_norm": 4142787.75,
"learning_rate": 9.894136807817591e-06,
"loss": 0.8362,
"step": 1123
},
{
"epoch": 2.468973091707853,
"grad_norm": 8983909.0,
"learning_rate": 9.85342019543974e-06,
"loss": 0.8383,
"step": 1124
},
{
"epoch": 2.471169686985173,
"grad_norm": 5277952.5,
"learning_rate": 9.81270358306189e-06,
"loss": 0.6959,
"step": 1125
},
{
"epoch": 2.473366282262493,
"grad_norm": 3108833.25,
"learning_rate": 9.771986970684039e-06,
"loss": 0.9406,
"step": 1126
},
{
"epoch": 2.4755628775398133,
"grad_norm": 3648057.0,
"learning_rate": 9.73127035830619e-06,
"loss": 0.7256,
"step": 1127
},
{
"epoch": 2.4777594728171333,
"grad_norm": 4605368.5,
"learning_rate": 9.69055374592834e-06,
"loss": 0.7745,
"step": 1128
},
{
"epoch": 2.4799560680944537,
"grad_norm": 4494435.5,
"learning_rate": 9.649837133550489e-06,
"loss": 1.0054,
"step": 1129
},
{
"epoch": 2.4821526633717736,
"grad_norm": 8113612.0,
"learning_rate": 9.609120521172639e-06,
"loss": 0.802,
"step": 1130
},
{
"epoch": 2.484349258649094,
"grad_norm": 4283793.0,
"learning_rate": 9.56840390879479e-06,
"loss": 0.7634,
"step": 1131
},
{
"epoch": 2.486545853926414,
"grad_norm": 4527398.0,
"learning_rate": 9.527687296416938e-06,
"loss": 0.7985,
"step": 1132
},
{
"epoch": 2.488742449203734,
"grad_norm": 2424841.5,
"learning_rate": 9.486970684039089e-06,
"loss": 0.6853,
"step": 1133
},
{
"epoch": 2.4909390444810544,
"grad_norm": 4183110.5,
"learning_rate": 9.446254071661238e-06,
"loss": 0.8485,
"step": 1134
},
{
"epoch": 2.4931356397583744,
"grad_norm": 10011695.0,
"learning_rate": 9.405537459283388e-06,
"loss": 0.839,
"step": 1135
},
{
"epoch": 2.495332235035695,
"grad_norm": 4811375.5,
"learning_rate": 9.364820846905538e-06,
"loss": 0.702,
"step": 1136
},
{
"epoch": 2.4975288303130148,
"grad_norm": 3211023.5,
"learning_rate": 9.324104234527687e-06,
"loss": 0.5787,
"step": 1137
},
{
"epoch": 2.499725425590335,
"grad_norm": 10911671.0,
"learning_rate": 9.283387622149838e-06,
"loss": 0.8402,
"step": 1138
},
{
"epoch": 2.501922020867655,
"grad_norm": 6305678.0,
"learning_rate": 9.242671009771988e-06,
"loss": 0.9766,
"step": 1139
},
{
"epoch": 2.504118616144975,
"grad_norm": 7081075.5,
"learning_rate": 9.201954397394137e-06,
"loss": 0.7916,
"step": 1140
},
{
"epoch": 2.5063152114222955,
"grad_norm": 7760064.5,
"learning_rate": 9.161237785016287e-06,
"loss": 0.7072,
"step": 1141
},
{
"epoch": 2.5085118066996155,
"grad_norm": 37980600.0,
"learning_rate": 9.120521172638436e-06,
"loss": 0.8516,
"step": 1142
},
{
"epoch": 2.510708401976936,
"grad_norm": 3480184.5,
"learning_rate": 9.079804560260587e-06,
"loss": 0.9093,
"step": 1143
},
{
"epoch": 2.512904997254256,
"grad_norm": 4379595.0,
"learning_rate": 9.039087947882737e-06,
"loss": 0.7566,
"step": 1144
},
{
"epoch": 2.5151015925315763,
"grad_norm": 7421131.0,
"learning_rate": 8.998371335504886e-06,
"loss": 0.7599,
"step": 1145
},
{
"epoch": 2.5172981878088962,
"grad_norm": 6436979.5,
"learning_rate": 8.957654723127036e-06,
"loss": 0.8429,
"step": 1146
},
{
"epoch": 2.519494783086216,
"grad_norm": 2583326.25,
"learning_rate": 8.916938110749185e-06,
"loss": 0.7495,
"step": 1147
},
{
"epoch": 2.5216913783635366,
"grad_norm": 13952491.0,
"learning_rate": 8.876221498371336e-06,
"loss": 0.7041,
"step": 1148
},
{
"epoch": 2.5238879736408566,
"grad_norm": 5217487.5,
"learning_rate": 8.835504885993486e-06,
"loss": 0.8489,
"step": 1149
},
{
"epoch": 2.5260845689181766,
"grad_norm": 7477479.5,
"learning_rate": 8.794788273615635e-06,
"loss": 1.0378,
"step": 1150
},
{
"epoch": 2.528281164195497,
"grad_norm": 6661338.0,
"learning_rate": 8.754071661237785e-06,
"loss": 0.8179,
"step": 1151
},
{
"epoch": 2.5304777594728174,
"grad_norm": 3381347.0,
"learning_rate": 8.713355048859936e-06,
"loss": 0.7556,
"step": 1152
},
{
"epoch": 2.5326743547501374,
"grad_norm": 6420329.5,
"learning_rate": 8.672638436482084e-06,
"loss": 0.8233,
"step": 1153
},
{
"epoch": 2.5348709500274573,
"grad_norm": 4672946.5,
"learning_rate": 8.631921824104235e-06,
"loss": 0.7382,
"step": 1154
},
{
"epoch": 2.5370675453047777,
"grad_norm": 3747122.25,
"learning_rate": 8.591205211726384e-06,
"loss": 0.7522,
"step": 1155
},
{
"epoch": 2.5392641405820977,
"grad_norm": 8256866.5,
"learning_rate": 8.550488599348534e-06,
"loss": 0.9296,
"step": 1156
},
{
"epoch": 2.5414607358594177,
"grad_norm": 6203118.5,
"learning_rate": 8.509771986970685e-06,
"loss": 0.9205,
"step": 1157
},
{
"epoch": 2.543657331136738,
"grad_norm": 16221756.0,
"learning_rate": 8.469055374592833e-06,
"loss": 0.7076,
"step": 1158
},
{
"epoch": 2.545853926414058,
"grad_norm": 11897712.0,
"learning_rate": 8.428338762214984e-06,
"loss": 0.6916,
"step": 1159
},
{
"epoch": 2.5480505216913785,
"grad_norm": 19553034.0,
"learning_rate": 8.387622149837134e-06,
"loss": 0.772,
"step": 1160
},
{
"epoch": 2.5502471169686984,
"grad_norm": 2140742.25,
"learning_rate": 8.346905537459283e-06,
"loss": 0.6454,
"step": 1161
},
{
"epoch": 2.552443712246019,
"grad_norm": 2892942.5,
"learning_rate": 8.306188925081433e-06,
"loss": 0.828,
"step": 1162
},
{
"epoch": 2.554640307523339,
"grad_norm": 8463796.0,
"learning_rate": 8.265472312703582e-06,
"loss": 0.6992,
"step": 1163
},
{
"epoch": 2.556836902800659,
"grad_norm": 3616309.0,
"learning_rate": 8.224755700325733e-06,
"loss": 0.8697,
"step": 1164
},
{
"epoch": 2.559033498077979,
"grad_norm": 5422616.5,
"learning_rate": 8.184039087947883e-06,
"loss": 0.861,
"step": 1165
},
{
"epoch": 2.561230093355299,
"grad_norm": 10063970.0,
"learning_rate": 8.143322475570032e-06,
"loss": 0.7409,
"step": 1166
},
{
"epoch": 2.5634266886326196,
"grad_norm": 3730941.75,
"learning_rate": 8.102605863192182e-06,
"loss": 0.6844,
"step": 1167
},
{
"epoch": 2.5656232839099395,
"grad_norm": 5220560.0,
"learning_rate": 8.061889250814333e-06,
"loss": 0.8594,
"step": 1168
},
{
"epoch": 2.56781987918726,
"grad_norm": 7238725.5,
"learning_rate": 8.021172638436482e-06,
"loss": 0.7781,
"step": 1169
},
{
"epoch": 2.57001647446458,
"grad_norm": 5563441.5,
"learning_rate": 7.980456026058632e-06,
"loss": 0.7106,
"step": 1170
},
{
"epoch": 2.5722130697419,
"grad_norm": 4423179.0,
"learning_rate": 7.93973941368078e-06,
"loss": 0.6717,
"step": 1171
},
{
"epoch": 2.5744096650192203,
"grad_norm": 2079958.125,
"learning_rate": 7.899022801302931e-06,
"loss": 0.7362,
"step": 1172
},
{
"epoch": 2.5766062602965403,
"grad_norm": 2770067.5,
"learning_rate": 7.858306188925082e-06,
"loss": 0.7288,
"step": 1173
},
{
"epoch": 2.5788028555738602,
"grad_norm": 2280089.25,
"learning_rate": 7.81758957654723e-06,
"loss": 0.8524,
"step": 1174
},
{
"epoch": 2.5809994508511807,
"grad_norm": 5417975.0,
"learning_rate": 7.776872964169383e-06,
"loss": 0.8495,
"step": 1175
},
{
"epoch": 2.583196046128501,
"grad_norm": 5998843.5,
"learning_rate": 7.736156351791531e-06,
"loss": 0.7334,
"step": 1176
},
{
"epoch": 2.585392641405821,
"grad_norm": 9620786.0,
"learning_rate": 7.695439739413682e-06,
"loss": 0.9633,
"step": 1177
},
{
"epoch": 2.587589236683141,
"grad_norm": 27155608.0,
"learning_rate": 7.654723127035832e-06,
"loss": 0.7478,
"step": 1178
},
{
"epoch": 2.5897858319604614,
"grad_norm": 3008433.0,
"learning_rate": 7.614006514657981e-06,
"loss": 0.9599,
"step": 1179
},
{
"epoch": 2.5919824272377814,
"grad_norm": 4526184.0,
"learning_rate": 7.5732899022801316e-06,
"loss": 0.6892,
"step": 1180
},
{
"epoch": 2.5941790225151014,
"grad_norm": 39450368.0,
"learning_rate": 7.532573289902281e-06,
"loss": 0.941,
"step": 1181
},
{
"epoch": 2.5963756177924218,
"grad_norm": 2991268.5,
"learning_rate": 7.491856677524431e-06,
"loss": 0.8136,
"step": 1182
},
{
"epoch": 2.598572213069742,
"grad_norm": 8942134.0,
"learning_rate": 7.45114006514658e-06,
"loss": 0.7942,
"step": 1183
},
{
"epoch": 2.600768808347062,
"grad_norm": 14095416.0,
"learning_rate": 7.410423452768731e-06,
"loss": 0.8661,
"step": 1184
},
{
"epoch": 2.602965403624382,
"grad_norm": 5406536.0,
"learning_rate": 7.3697068403908805e-06,
"loss": 0.8327,
"step": 1185
},
{
"epoch": 2.6051619989017025,
"grad_norm": 3708295.0,
"learning_rate": 7.32899022801303e-06,
"loss": 0.8411,
"step": 1186
},
{
"epoch": 2.6073585941790225,
"grad_norm": 5899132.0,
"learning_rate": 7.28827361563518e-06,
"loss": 0.6926,
"step": 1187
},
{
"epoch": 2.6095551894563425,
"grad_norm": 2464656.5,
"learning_rate": 7.24755700325733e-06,
"loss": 0.7658,
"step": 1188
},
{
"epoch": 2.611751784733663,
"grad_norm": 6742926.5,
"learning_rate": 7.20684039087948e-06,
"loss": 0.7401,
"step": 1189
},
{
"epoch": 2.613948380010983,
"grad_norm": 1958478.375,
"learning_rate": 7.166123778501629e-06,
"loss": 0.7855,
"step": 1190
},
{
"epoch": 2.6161449752883033,
"grad_norm": 2084311.875,
"learning_rate": 7.125407166123779e-06,
"loss": 0.7656,
"step": 1191
},
{
"epoch": 2.618341570565623,
"grad_norm": 6223765.0,
"learning_rate": 7.0846905537459294e-06,
"loss": 0.622,
"step": 1192
},
{
"epoch": 2.6205381658429436,
"grad_norm": 4192605.75,
"learning_rate": 7.043973941368079e-06,
"loss": 1.2417,
"step": 1193
},
{
"epoch": 2.6227347611202636,
"grad_norm": 8324554.0,
"learning_rate": 7.003257328990229e-06,
"loss": 0.9172,
"step": 1194
},
{
"epoch": 2.6249313563975836,
"grad_norm": 4022413.0,
"learning_rate": 6.962540716612378e-06,
"loss": 0.5115,
"step": 1195
},
{
"epoch": 2.627127951674904,
"grad_norm": 1934851.625,
"learning_rate": 6.921824104234528e-06,
"loss": 0.7719,
"step": 1196
},
{
"epoch": 2.629324546952224,
"grad_norm": 2949400.5,
"learning_rate": 6.881107491856678e-06,
"loss": 0.8278,
"step": 1197
},
{
"epoch": 2.6315211422295444,
"grad_norm": 2916673.25,
"learning_rate": 6.840390879478828e-06,
"loss": 0.8172,
"step": 1198
},
{
"epoch": 2.6337177375068643,
"grad_norm": 5639826.5,
"learning_rate": 6.799674267100978e-06,
"loss": 0.6383,
"step": 1199
},
{
"epoch": 2.6359143327841847,
"grad_norm": 2851869.0,
"learning_rate": 6.758957654723127e-06,
"loss": 0.7823,
"step": 1200
},
{
"epoch": 2.6381109280615047,
"grad_norm": 7851859.5,
"learning_rate": 6.718241042345278e-06,
"loss": 0.6393,
"step": 1201
},
{
"epoch": 2.6403075233388247,
"grad_norm": 14723333.0,
"learning_rate": 6.677524429967427e-06,
"loss": 0.6357,
"step": 1202
},
{
"epoch": 2.642504118616145,
"grad_norm": 2337256.25,
"learning_rate": 6.636807817589577e-06,
"loss": 0.9949,
"step": 1203
},
{
"epoch": 2.644700713893465,
"grad_norm": 8048820.5,
"learning_rate": 6.5960912052117265e-06,
"loss": 0.7724,
"step": 1204
},
{
"epoch": 2.646897309170785,
"grad_norm": 6673396.5,
"learning_rate": 6.555374592833877e-06,
"loss": 0.8397,
"step": 1205
},
{
"epoch": 2.6490939044481054,
"grad_norm": 16071678.0,
"learning_rate": 6.5146579804560266e-06,
"loss": 0.9185,
"step": 1206
},
{
"epoch": 2.651290499725426,
"grad_norm": 2104632.75,
"learning_rate": 6.473941368078176e-06,
"loss": 0.8789,
"step": 1207
},
{
"epoch": 2.653487095002746,
"grad_norm": 4341807.0,
"learning_rate": 6.433224755700326e-06,
"loss": 0.8184,
"step": 1208
},
{
"epoch": 2.655683690280066,
"grad_norm": 7949819.0,
"learning_rate": 6.392508143322476e-06,
"loss": 0.6682,
"step": 1209
},
{
"epoch": 2.657880285557386,
"grad_norm": 7754156.0,
"learning_rate": 6.351791530944626e-06,
"loss": 1.0141,
"step": 1210
},
{
"epoch": 2.660076880834706,
"grad_norm": 4420963.0,
"learning_rate": 6.3110749185667755e-06,
"loss": 0.7257,
"step": 1211
},
{
"epoch": 2.662273476112026,
"grad_norm": 4343279.0,
"learning_rate": 6.270358306188925e-06,
"loss": 0.7054,
"step": 1212
},
{
"epoch": 2.6644700713893466,
"grad_norm": 25877124.0,
"learning_rate": 6.2296416938110755e-06,
"loss": 0.8773,
"step": 1213
},
{
"epoch": 2.6666666666666665,
"grad_norm": 3126885.5,
"learning_rate": 6.188925081433225e-06,
"loss": 0.7401,
"step": 1214
},
{
"epoch": 2.668863261943987,
"grad_norm": 15323114.0,
"learning_rate": 6.148208469055375e-06,
"loss": 0.7644,
"step": 1215
},
{
"epoch": 2.671059857221307,
"grad_norm": 1805705.375,
"learning_rate": 6.107491856677524e-06,
"loss": 0.8049,
"step": 1216
},
{
"epoch": 2.6732564524986273,
"grad_norm": 6889947.5,
"learning_rate": 6.066775244299675e-06,
"loss": 0.8436,
"step": 1217
},
{
"epoch": 2.6754530477759473,
"grad_norm": 4288368.5,
"learning_rate": 6.0260586319218244e-06,
"loss": 0.7333,
"step": 1218
},
{
"epoch": 2.6776496430532672,
"grad_norm": 4976939.0,
"learning_rate": 5.985342019543974e-06,
"loss": 0.7262,
"step": 1219
},
{
"epoch": 2.6798462383305877,
"grad_norm": 2611674.5,
"learning_rate": 5.944625407166124e-06,
"loss": 0.7965,
"step": 1220
},
{
"epoch": 2.6820428336079076,
"grad_norm": 3217127.25,
"learning_rate": 5.903908794788274e-06,
"loss": 0.9846,
"step": 1221
},
{
"epoch": 2.684239428885228,
"grad_norm": 8207299.0,
"learning_rate": 5.863192182410424e-06,
"loss": 0.6034,
"step": 1222
},
{
"epoch": 2.686436024162548,
"grad_norm": 14461778.0,
"learning_rate": 5.822475570032573e-06,
"loss": 0.8427,
"step": 1223
},
{
"epoch": 2.6886326194398684,
"grad_norm": 1894958.375,
"learning_rate": 5.781758957654723e-06,
"loss": 0.7883,
"step": 1224
},
{
"epoch": 2.6908292147171884,
"grad_norm": 6389778.0,
"learning_rate": 5.741042345276873e-06,
"loss": 0.9012,
"step": 1225
},
{
"epoch": 2.6930258099945084,
"grad_norm": 18744804.0,
"learning_rate": 5.700325732899023e-06,
"loss": 0.6706,
"step": 1226
},
{
"epoch": 2.6952224052718288,
"grad_norm": 5564625.5,
"learning_rate": 5.659609120521173e-06,
"loss": 0.8883,
"step": 1227
},
{
"epoch": 2.6974190005491487,
"grad_norm": 8987305.0,
"learning_rate": 5.618892508143322e-06,
"loss": 0.7295,
"step": 1228
},
{
"epoch": 2.6996155958264687,
"grad_norm": 12430103.0,
"learning_rate": 5.578175895765473e-06,
"loss": 0.8755,
"step": 1229
},
{
"epoch": 2.701812191103789,
"grad_norm": 23815454.0,
"learning_rate": 5.537459283387622e-06,
"loss": 0.6697,
"step": 1230
},
{
"epoch": 2.7040087863811095,
"grad_norm": 3578766.75,
"learning_rate": 5.496742671009772e-06,
"loss": 0.6827,
"step": 1231
},
{
"epoch": 2.7062053816584295,
"grad_norm": 5508980.5,
"learning_rate": 5.456026058631922e-06,
"loss": 0.859,
"step": 1232
},
{
"epoch": 2.7084019769357495,
"grad_norm": 8206292.0,
"learning_rate": 5.415309446254072e-06,
"loss": 0.7212,
"step": 1233
},
{
"epoch": 2.71059857221307,
"grad_norm": 9189232.0,
"learning_rate": 5.3745928338762225e-06,
"loss": 0.7818,
"step": 1234
},
{
"epoch": 2.71279516749039,
"grad_norm": 2708089.75,
"learning_rate": 5.333876221498372e-06,
"loss": 0.8605,
"step": 1235
},
{
"epoch": 2.71499176276771,
"grad_norm": 7786324.5,
"learning_rate": 5.293159609120522e-06,
"loss": 0.9834,
"step": 1236
},
{
"epoch": 2.7171883580450302,
"grad_norm": 3118259.0,
"learning_rate": 5.252442996742671e-06,
"loss": 0.8038,
"step": 1237
},
{
"epoch": 2.71938495332235,
"grad_norm": 3731574.0,
"learning_rate": 5.211726384364822e-06,
"loss": 0.8072,
"step": 1238
},
{
"epoch": 2.7215815485996706,
"grad_norm": 3523254.0,
"learning_rate": 5.171009771986971e-06,
"loss": 0.7805,
"step": 1239
},
{
"epoch": 2.7237781438769906,
"grad_norm": 1341622.375,
"learning_rate": 5.130293159609121e-06,
"loss": 0.7965,
"step": 1240
},
{
"epoch": 2.725974739154311,
"grad_norm": 3696195.25,
"learning_rate": 5.089576547231271e-06,
"loss": 0.6148,
"step": 1241
},
{
"epoch": 2.728171334431631,
"grad_norm": 5802127.5,
"learning_rate": 5.04885993485342e-06,
"loss": 0.8052,
"step": 1242
},
{
"epoch": 2.730367929708951,
"grad_norm": 3392009.75,
"learning_rate": 5.008143322475571e-06,
"loss": 0.8646,
"step": 1243
},
{
"epoch": 2.7325645249862713,
"grad_norm": 1102332.625,
"learning_rate": 4.96742671009772e-06,
"loss": 0.8439,
"step": 1244
},
{
"epoch": 2.7347611202635913,
"grad_norm": 8662749.0,
"learning_rate": 4.92671009771987e-06,
"loss": 0.8066,
"step": 1245
},
{
"epoch": 2.7369577155409117,
"grad_norm": 15605783.0,
"learning_rate": 4.8859934853420195e-06,
"loss": 0.8639,
"step": 1246
},
{
"epoch": 2.7391543108182317,
"grad_norm": 11085640.0,
"learning_rate": 4.84527687296417e-06,
"loss": 1.3125,
"step": 1247
},
{
"epoch": 2.741350906095552,
"grad_norm": 7253258.0,
"learning_rate": 4.8045602605863196e-06,
"loss": 0.7922,
"step": 1248
},
{
"epoch": 2.743547501372872,
"grad_norm": 2673229.0,
"learning_rate": 4.763843648208469e-06,
"loss": 0.9026,
"step": 1249
},
{
"epoch": 2.745744096650192,
"grad_norm": 1597929.875,
"learning_rate": 4.723127035830619e-06,
"loss": 0.828,
"step": 1250
},
{
"epoch": 2.7479406919275124,
"grad_norm": 3410672.75,
"learning_rate": 4.682410423452769e-06,
"loss": 0.9291,
"step": 1251
},
{
"epoch": 2.7501372872048324,
"grad_norm": 11938232.0,
"learning_rate": 4.641693811074919e-06,
"loss": 0.859,
"step": 1252
},
{
"epoch": 2.752333882482153,
"grad_norm": 4468953.5,
"learning_rate": 4.6009771986970685e-06,
"loss": 0.6758,
"step": 1253
},
{
"epoch": 2.754530477759473,
"grad_norm": 2428296.0,
"learning_rate": 4.560260586319218e-06,
"loss": 0.703,
"step": 1254
},
{
"epoch": 2.756727073036793,
"grad_norm": 15036388.0,
"learning_rate": 4.5195439739413685e-06,
"loss": 0.9319,
"step": 1255
},
{
"epoch": 2.758923668314113,
"grad_norm": 7085497.0,
"learning_rate": 4.478827361563518e-06,
"loss": 0.6379,
"step": 1256
},
{
"epoch": 2.761120263591433,
"grad_norm": 3165533.5,
"learning_rate": 4.438110749185668e-06,
"loss": 0.7783,
"step": 1257
},
{
"epoch": 2.7633168588687536,
"grad_norm": 3842944.5,
"learning_rate": 4.397394136807817e-06,
"loss": 0.6631,
"step": 1258
},
{
"epoch": 2.7655134541460735,
"grad_norm": 26449858.0,
"learning_rate": 4.356677524429968e-06,
"loss": 0.9221,
"step": 1259
},
{
"epoch": 2.7677100494233935,
"grad_norm": 4904693.5,
"learning_rate": 4.3159609120521174e-06,
"loss": 0.8772,
"step": 1260
},
{
"epoch": 2.769906644700714,
"grad_norm": 3348442.5,
"learning_rate": 4.275244299674267e-06,
"loss": 0.7637,
"step": 1261
},
{
"epoch": 2.7721032399780343,
"grad_norm": 8484901.0,
"learning_rate": 4.234527687296417e-06,
"loss": 0.8237,
"step": 1262
},
{
"epoch": 2.7742998352553543,
"grad_norm": 9053304.0,
"learning_rate": 4.193811074918567e-06,
"loss": 0.8507,
"step": 1263
},
{
"epoch": 2.7764964305326743,
"grad_norm": 5076836.5,
"learning_rate": 4.153094462540717e-06,
"loss": 0.9401,
"step": 1264
},
{
"epoch": 2.7786930258099947,
"grad_norm": 4565967.5,
"learning_rate": 4.112377850162866e-06,
"loss": 0.8543,
"step": 1265
},
{
"epoch": 2.7808896210873146,
"grad_norm": 3831978.0,
"learning_rate": 4.071661237785016e-06,
"loss": 0.6769,
"step": 1266
},
{
"epoch": 2.7830862163646346,
"grad_norm": 2044434.375,
"learning_rate": 4.030944625407166e-06,
"loss": 0.6888,
"step": 1267
},
{
"epoch": 2.785282811641955,
"grad_norm": 3601185.0,
"learning_rate": 3.990228013029316e-06,
"loss": 0.7531,
"step": 1268
},
{
"epoch": 2.787479406919275,
"grad_norm": 5314910.0,
"learning_rate": 3.949511400651466e-06,
"loss": 0.6995,
"step": 1269
},
{
"epoch": 2.7896760021965954,
"grad_norm": 5030508.0,
"learning_rate": 3.908794788273615e-06,
"loss": 0.7457,
"step": 1270
},
{
"epoch": 2.7918725974739154,
"grad_norm": 5685473.5,
"learning_rate": 3.868078175895766e-06,
"loss": 0.7194,
"step": 1271
},
{
"epoch": 2.7940691927512358,
"grad_norm": 9892777.0,
"learning_rate": 3.827361563517916e-06,
"loss": 0.8124,
"step": 1272
},
{
"epoch": 2.7962657880285557,
"grad_norm": 4952108.5,
"learning_rate": 3.7866449511400658e-06,
"loss": 0.8066,
"step": 1273
},
{
"epoch": 2.7984623833058757,
"grad_norm": 6134618.0,
"learning_rate": 3.7459283387622154e-06,
"loss": 0.9912,
"step": 1274
},
{
"epoch": 2.800658978583196,
"grad_norm": 2698271.75,
"learning_rate": 3.7052117263843654e-06,
"loss": 0.773,
"step": 1275
},
{
"epoch": 2.802855573860516,
"grad_norm": 3984969.5,
"learning_rate": 3.664495114006515e-06,
"loss": 0.9424,
"step": 1276
},
{
"epoch": 2.8050521691378365,
"grad_norm": 10884478.0,
"learning_rate": 3.623778501628665e-06,
"loss": 1.0783,
"step": 1277
},
{
"epoch": 2.8072487644151565,
"grad_norm": 4614646.5,
"learning_rate": 3.5830618892508147e-06,
"loss": 0.865,
"step": 1278
},
{
"epoch": 2.809445359692477,
"grad_norm": 12210489.0,
"learning_rate": 3.5423452768729647e-06,
"loss": 0.7308,
"step": 1279
},
{
"epoch": 2.811641954969797,
"grad_norm": 9137895.0,
"learning_rate": 3.5016286644951143e-06,
"loss": 0.7726,
"step": 1280
},
{
"epoch": 2.813838550247117,
"grad_norm": 9227584.0,
"learning_rate": 3.460912052117264e-06,
"loss": 0.8759,
"step": 1281
},
{
"epoch": 2.8160351455244372,
"grad_norm": 6036039.5,
"learning_rate": 3.420195439739414e-06,
"loss": 0.8112,
"step": 1282
},
{
"epoch": 2.818231740801757,
"grad_norm": 9014040.0,
"learning_rate": 3.3794788273615636e-06,
"loss": 0.9741,
"step": 1283
},
{
"epoch": 2.820428336079077,
"grad_norm": 2439870.5,
"learning_rate": 3.3387622149837136e-06,
"loss": 0.8584,
"step": 1284
},
{
"epoch": 2.8226249313563976,
"grad_norm": 1889212.875,
"learning_rate": 3.2980456026058632e-06,
"loss": 0.8445,
"step": 1285
},
{
"epoch": 2.824821526633718,
"grad_norm": 6801981.0,
"learning_rate": 3.2573289902280133e-06,
"loss": 1.0537,
"step": 1286
},
{
"epoch": 2.827018121911038,
"grad_norm": 2069030.75,
"learning_rate": 3.216612377850163e-06,
"loss": 0.728,
"step": 1287
},
{
"epoch": 2.829214717188358,
"grad_norm": 10793646.0,
"learning_rate": 3.175895765472313e-06,
"loss": 0.8375,
"step": 1288
},
{
"epoch": 2.8314113124656783,
"grad_norm": 5786920.0,
"learning_rate": 3.1351791530944625e-06,
"loss": 0.7214,
"step": 1289
},
{
"epoch": 2.8336079077429983,
"grad_norm": 9872860.0,
"learning_rate": 3.0944625407166126e-06,
"loss": 0.8624,
"step": 1290
},
{
"epoch": 2.8358045030203183,
"grad_norm": 61847104.0,
"learning_rate": 3.053745928338762e-06,
"loss": 0.6621,
"step": 1291
},
{
"epoch": 2.8380010982976387,
"grad_norm": 2021296.5,
"learning_rate": 3.0130293159609122e-06,
"loss": 0.7126,
"step": 1292
},
{
"epoch": 2.8401976935749587,
"grad_norm": 1506967.125,
"learning_rate": 2.972312703583062e-06,
"loss": 0.5763,
"step": 1293
},
{
"epoch": 2.842394288852279,
"grad_norm": 5157691.5,
"learning_rate": 2.931596091205212e-06,
"loss": 0.8836,
"step": 1294
},
{
"epoch": 2.844590884129599,
"grad_norm": 8408041.0,
"learning_rate": 2.8908794788273615e-06,
"loss": 0.9025,
"step": 1295
},
{
"epoch": 2.8467874794069195,
"grad_norm": 2117217.75,
"learning_rate": 2.8501628664495115e-06,
"loss": 0.9261,
"step": 1296
},
{
"epoch": 2.8489840746842394,
"grad_norm": 4311697.0,
"learning_rate": 2.809446254071661e-06,
"loss": 0.674,
"step": 1297
},
{
"epoch": 2.8511806699615594,
"grad_norm": 1351027.875,
"learning_rate": 2.768729641693811e-06,
"loss": 1.0208,
"step": 1298
},
{
"epoch": 2.85337726523888,
"grad_norm": 19619468.0,
"learning_rate": 2.728013029315961e-06,
"loss": 0.8283,
"step": 1299
},
{
"epoch": 2.8555738605161998,
"grad_norm": 7202300.5,
"learning_rate": 2.6872964169381112e-06,
"loss": 0.9765,
"step": 1300
},
{
"epoch": 2.85777045579352,
"grad_norm": 2850933.25,
"learning_rate": 2.646579804560261e-06,
"loss": 0.8268,
"step": 1301
},
{
"epoch": 2.85996705107084,
"grad_norm": 3646593.25,
"learning_rate": 2.605863192182411e-06,
"loss": 0.6783,
"step": 1302
},
{
"epoch": 2.8621636463481606,
"grad_norm": 3372742.0,
"learning_rate": 2.5651465798045605e-06,
"loss": 1.0352,
"step": 1303
},
{
"epoch": 2.8643602416254805,
"grad_norm": 3541434.0,
"learning_rate": 2.52442996742671e-06,
"loss": 0.7158,
"step": 1304
},
{
"epoch": 2.8665568369028005,
"grad_norm": 2451411.5,
"learning_rate": 2.48371335504886e-06,
"loss": 0.753,
"step": 1305
},
{
"epoch": 2.868753432180121,
"grad_norm": 3310861.0,
"learning_rate": 2.4429967426710097e-06,
"loss": 0.7655,
"step": 1306
},
{
"epoch": 2.870950027457441,
"grad_norm": 3677972.0,
"learning_rate": 2.4022801302931598e-06,
"loss": 0.831,
"step": 1307
},
{
"epoch": 2.873146622734761,
"grad_norm": 2547505.5,
"learning_rate": 2.3615635179153094e-06,
"loss": 0.6619,
"step": 1308
},
{
"epoch": 2.8753432180120813,
"grad_norm": 15637284.0,
"learning_rate": 2.3208469055374594e-06,
"loss": 0.8139,
"step": 1309
},
{
"epoch": 2.8775398132894017,
"grad_norm": 4371933.5,
"learning_rate": 2.280130293159609e-06,
"loss": 0.8925,
"step": 1310
},
{
"epoch": 2.8797364085667216,
"grad_norm": 6664858.0,
"learning_rate": 2.239413680781759e-06,
"loss": 0.8394,
"step": 1311
},
{
"epoch": 2.8819330038440416,
"grad_norm": 6765905.5,
"learning_rate": 2.1986970684039087e-06,
"loss": 0.8293,
"step": 1312
},
{
"epoch": 2.884129599121362,
"grad_norm": 4205558.5,
"learning_rate": 2.1579804560260587e-06,
"loss": 0.8106,
"step": 1313
},
{
"epoch": 2.886326194398682,
"grad_norm": 2466667.75,
"learning_rate": 2.1172638436482083e-06,
"loss": 0.819,
"step": 1314
},
{
"epoch": 2.888522789676002,
"grad_norm": 8145394.0,
"learning_rate": 2.0765472312703584e-06,
"loss": 0.7181,
"step": 1315
},
{
"epoch": 2.8907193849533224,
"grad_norm": 4562730.0,
"learning_rate": 2.035830618892508e-06,
"loss": 0.7766,
"step": 1316
},
{
"epoch": 2.892915980230643,
"grad_norm": 4878736.5,
"learning_rate": 1.995114006514658e-06,
"loss": 0.7264,
"step": 1317
},
{
"epoch": 2.8951125755079627,
"grad_norm": 7035435.5,
"learning_rate": 1.9543973941368076e-06,
"loss": 0.6426,
"step": 1318
},
{
"epoch": 2.8973091707852827,
"grad_norm": 4361831.5,
"learning_rate": 1.913680781758958e-06,
"loss": 0.6632,
"step": 1319
},
{
"epoch": 2.899505766062603,
"grad_norm": 6432325.5,
"learning_rate": 1.8729641693811077e-06,
"loss": 0.8785,
"step": 1320
},
{
"epoch": 2.901702361339923,
"grad_norm": 2889484.5,
"learning_rate": 1.8322475570032575e-06,
"loss": 0.6304,
"step": 1321
},
{
"epoch": 2.903898956617243,
"grad_norm": 12899765.0,
"learning_rate": 1.7915309446254073e-06,
"loss": 0.8323,
"step": 1322
},
{
"epoch": 2.9060955518945635,
"grad_norm": 8848465.0,
"learning_rate": 1.7508143322475572e-06,
"loss": 0.7776,
"step": 1323
},
{
"epoch": 2.9082921471718834,
"grad_norm": 9044314.0,
"learning_rate": 1.710097719869707e-06,
"loss": 0.7768,
"step": 1324
},
{
"epoch": 2.910488742449204,
"grad_norm": 6190195.0,
"learning_rate": 1.6693811074918568e-06,
"loss": 0.7254,
"step": 1325
},
{
"epoch": 2.912685337726524,
"grad_norm": 6261659.0,
"learning_rate": 1.6286644951140066e-06,
"loss": 0.8139,
"step": 1326
},
{
"epoch": 2.9148819330038442,
"grad_norm": 2689539.25,
"learning_rate": 1.5879478827361565e-06,
"loss": 0.8426,
"step": 1327
},
{
"epoch": 2.917078528281164,
"grad_norm": 4591746.5,
"learning_rate": 1.5472312703583063e-06,
"loss": 0.6496,
"step": 1328
},
{
"epoch": 2.919275123558484,
"grad_norm": 5625928.5,
"learning_rate": 1.5065146579804561e-06,
"loss": 0.571,
"step": 1329
},
{
"epoch": 2.9214717188358046,
"grad_norm": 5071654.5,
"learning_rate": 1.465798045602606e-06,
"loss": 0.7503,
"step": 1330
},
{
"epoch": 2.9236683141131246,
"grad_norm": 3913498.75,
"learning_rate": 1.4250814332247558e-06,
"loss": 0.8757,
"step": 1331
},
{
"epoch": 2.925864909390445,
"grad_norm": 1437075.625,
"learning_rate": 1.3843648208469056e-06,
"loss": 0.475,
"step": 1332
},
{
"epoch": 2.928061504667765,
"grad_norm": 4601580.0,
"learning_rate": 1.3436482084690556e-06,
"loss": 0.7994,
"step": 1333
},
{
"epoch": 2.9302580999450853,
"grad_norm": 4288684.5,
"learning_rate": 1.3029315960912054e-06,
"loss": 0.7059,
"step": 1334
},
{
"epoch": 2.9324546952224053,
"grad_norm": 9257111.0,
"learning_rate": 1.262214983713355e-06,
"loss": 0.6693,
"step": 1335
},
{
"epoch": 2.9346512904997253,
"grad_norm": 2904731.5,
"learning_rate": 1.2214983713355049e-06,
"loss": 0.7124,
"step": 1336
},
{
"epoch": 2.9368478857770457,
"grad_norm": 5157225.0,
"learning_rate": 1.1807817589576547e-06,
"loss": 0.6926,
"step": 1337
},
{
"epoch": 2.9390444810543657,
"grad_norm": 8206613.5,
"learning_rate": 1.1400651465798045e-06,
"loss": 0.7442,
"step": 1338
},
{
"epoch": 2.9412410763316856,
"grad_norm": 3170124.5,
"learning_rate": 1.0993485342019543e-06,
"loss": 0.72,
"step": 1339
},
{
"epoch": 2.943437671609006,
"grad_norm": 2436431.5,
"learning_rate": 1.0586319218241042e-06,
"loss": 0.5681,
"step": 1340
},
{
"epoch": 2.9456342668863265,
"grad_norm": 5246524.0,
"learning_rate": 1.017915309446254e-06,
"loss": 0.7163,
"step": 1341
},
{
"epoch": 2.9478308621636464,
"grad_norm": 3337089.5,
"learning_rate": 9.771986970684038e-07,
"loss": 0.7938,
"step": 1342
},
{
"epoch": 2.9500274574409664,
"grad_norm": 6755895.0,
"learning_rate": 9.364820846905538e-07,
"loss": 0.878,
"step": 1343
},
{
"epoch": 2.952224052718287,
"grad_norm": 8914362.0,
"learning_rate": 8.957654723127037e-07,
"loss": 0.9456,
"step": 1344
},
{
"epoch": 2.9544206479956068,
"grad_norm": 4952710.0,
"learning_rate": 8.550488599348535e-07,
"loss": 0.8511,
"step": 1345
},
{
"epoch": 2.9566172432729267,
"grad_norm": 14665521.0,
"learning_rate": 8.143322475570033e-07,
"loss": 0.7337,
"step": 1346
},
{
"epoch": 2.958813838550247,
"grad_norm": 7912421.5,
"learning_rate": 7.736156351791531e-07,
"loss": 0.7101,
"step": 1347
},
{
"epoch": 2.961010433827567,
"grad_norm": 30088358.0,
"learning_rate": 7.32899022801303e-07,
"loss": 0.7023,
"step": 1348
},
{
"epoch": 2.9632070291048875,
"grad_norm": 4725255.5,
"learning_rate": 6.921824104234528e-07,
"loss": 0.9731,
"step": 1349
},
{
"epoch": 2.9654036243822075,
"grad_norm": 5158853.5,
"learning_rate": 6.514657980456027e-07,
"loss": 0.7145,
"step": 1350
},
{
"epoch": 2.967600219659528,
"grad_norm": 5323983.5,
"learning_rate": 6.107491856677524e-07,
"loss": 0.8979,
"step": 1351
},
{
"epoch": 2.969796814936848,
"grad_norm": 5994244.0,
"learning_rate": 5.700325732899023e-07,
"loss": 0.769,
"step": 1352
},
{
"epoch": 2.971993410214168,
"grad_norm": 4088484.5,
"learning_rate": 5.293159609120521e-07,
"loss": 0.8527,
"step": 1353
},
{
"epoch": 2.9741900054914883,
"grad_norm": 2901818.5,
"learning_rate": 4.885993485342019e-07,
"loss": 0.9364,
"step": 1354
},
{
"epoch": 2.9763866007688082,
"grad_norm": 11335834.0,
"learning_rate": 4.4788273615635184e-07,
"loss": 0.6777,
"step": 1355
},
{
"epoch": 2.9785831960461286,
"grad_norm": 2763095.75,
"learning_rate": 4.0716612377850166e-07,
"loss": 0.89,
"step": 1356
},
{
"epoch": 2.9807797913234486,
"grad_norm": 3734213.0,
"learning_rate": 3.664495114006515e-07,
"loss": 0.8536,
"step": 1357
},
{
"epoch": 2.982976386600769,
"grad_norm": 30005090.0,
"learning_rate": 3.2573289902280136e-07,
"loss": 0.8909,
"step": 1358
},
{
"epoch": 2.985172981878089,
"grad_norm": 5646650.0,
"learning_rate": 2.8501628664495113e-07,
"loss": 0.9318,
"step": 1359
},
{
"epoch": 2.987369577155409,
"grad_norm": 10859570.0,
"learning_rate": 2.4429967426710095e-07,
"loss": 0.6719,
"step": 1360
},
{
"epoch": 2.9895661724327294,
"grad_norm": 6024375.0,
"learning_rate": 2.0358306188925083e-07,
"loss": 0.814,
"step": 1361
},
{
"epoch": 2.9917627677100493,
"grad_norm": 3320985.25,
"learning_rate": 1.6286644951140068e-07,
"loss": 1.0307,
"step": 1362
},
{
"epoch": 2.9939593629873693,
"grad_norm": 6593533.0,
"learning_rate": 1.2214983713355048e-07,
"loss": 0.8078,
"step": 1363
},
{
"epoch": 2.9961559582646897,
"grad_norm": 4601075.5,
"learning_rate": 8.143322475570034e-08,
"loss": 0.8431,
"step": 1364
},
{
"epoch": 2.99835255354201,
"grad_norm": 7609306.5,
"learning_rate": 4.071661237785017e-08,
"loss": 0.888,
"step": 1365
},
{
"epoch": 2.99835255354201,
"step": 1365,
"total_flos": 4.598491246242038e+17,
"train_loss": 0.8648729579352634,
"train_runtime": 64264.928,
"train_samples_per_second": 0.34,
"train_steps_per_second": 0.021
}
],
"logging_steps": 1.0,
"max_steps": 1365,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 4.598491246242038e+17,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}