diff --git "a/checkpoint-1148/trainer_state.json" "b/checkpoint-1148/trainer_state.json" new file mode 100644--- /dev/null +++ "b/checkpoint-1148/trainer_state.json" @@ -0,0 +1,8198 @@ +{ + "best_global_step": null, + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 1.9973890339425586, + "eval_steps": 72, + "global_step": 1148, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0, + "eval_loss": 1.136976718902588, + "eval_runtime": 64.1168, + "eval_samples_per_second": 73.117, + "eval_steps_per_second": 9.14, + "step": 0 + }, + { + "epoch": 0.0017406440382941688, + "grad_norm": 1.859375, + "learning_rate": 0.0, + "loss": 1.1426, + "step": 1 + }, + { + "epoch": 0.0034812880765883376, + "grad_norm": 1.9765625, + "learning_rate": 1.7543859649122808e-07, + "loss": 1.1274, + "step": 2 + }, + { + "epoch": 0.005221932114882507, + "grad_norm": 1.828125, + "learning_rate": 3.5087719298245616e-07, + "loss": 1.1205, + "step": 3 + }, + { + "epoch": 0.006962576153176675, + "grad_norm": 1.8515625, + "learning_rate": 5.263157894736843e-07, + "loss": 1.1383, + "step": 4 + }, + { + "epoch": 0.008703220191470844, + "grad_norm": 1.796875, + "learning_rate": 7.017543859649123e-07, + "loss": 1.14, + "step": 5 + }, + { + "epoch": 0.010443864229765013, + "grad_norm": 1.734375, + "learning_rate": 8.771929824561404e-07, + "loss": 1.1031, + "step": 6 + }, + { + "epoch": 0.012184508268059183, + "grad_norm": 2.015625, + "learning_rate": 1.0526315789473685e-06, + "loss": 1.1794, + "step": 7 + }, + { + "epoch": 0.01392515230635335, + "grad_norm": 1.765625, + "learning_rate": 1.2280701754385965e-06, + "loss": 1.1766, + "step": 8 + }, + { + "epoch": 0.015665796344647518, + "grad_norm": 1.8046875, + "learning_rate": 1.4035087719298246e-06, + "loss": 1.1251, + "step": 9 + }, + { + "epoch": 0.017406440382941687, + "grad_norm": 1.6796875, + "learning_rate": 1.5789473684210526e-06, + "loss": 1.1622, + "step": 10 + }, + { + "epoch": 0.019147084421235857, + "grad_norm": 1.8671875, + "learning_rate": 1.7543859649122807e-06, + "loss": 1.0846, + "step": 11 + }, + { + "epoch": 0.020887728459530026, + "grad_norm": 1.765625, + "learning_rate": 1.929824561403509e-06, + "loss": 1.1644, + "step": 12 + }, + { + "epoch": 0.022628372497824196, + "grad_norm": 1.7265625, + "learning_rate": 2.105263157894737e-06, + "loss": 1.1361, + "step": 13 + }, + { + "epoch": 0.024369016536118365, + "grad_norm": 1.8671875, + "learning_rate": 2.280701754385965e-06, + "loss": 1.1783, + "step": 14 + }, + { + "epoch": 0.02610966057441253, + "grad_norm": 1.828125, + "learning_rate": 2.456140350877193e-06, + "loss": 1.127, + "step": 15 + }, + { + "epoch": 0.0278503046127067, + "grad_norm": 1.640625, + "learning_rate": 2.631578947368421e-06, + "loss": 1.1587, + "step": 16 + }, + { + "epoch": 0.02959094865100087, + "grad_norm": 1.671875, + "learning_rate": 2.8070175438596493e-06, + "loss": 1.1088, + "step": 17 + }, + { + "epoch": 0.031331592689295036, + "grad_norm": 1.59375, + "learning_rate": 2.9824561403508774e-06, + "loss": 1.174, + "step": 18 + }, + { + "epoch": 0.03307223672758921, + "grad_norm": 1.71875, + "learning_rate": 3.157894736842105e-06, + "loss": 1.1453, + "step": 19 + }, + { + "epoch": 0.034812880765883375, + "grad_norm": 1.8515625, + "learning_rate": 3.3333333333333333e-06, + "loss": 1.1922, + "step": 20 + }, + { + "epoch": 0.03655352480417755, + "grad_norm": 1.5390625, + "learning_rate": 3.5087719298245615e-06, + "loss": 1.1541, + "step": 21 + }, + { + "epoch": 0.038294168842471714, + "grad_norm": 1.5390625, + "learning_rate": 3.6842105263157896e-06, + "loss": 1.0909, + "step": 22 + }, + { + "epoch": 0.04003481288076589, + "grad_norm": 1.515625, + "learning_rate": 3.859649122807018e-06, + "loss": 1.1498, + "step": 23 + }, + { + "epoch": 0.04177545691906005, + "grad_norm": 1.5703125, + "learning_rate": 4.035087719298246e-06, + "loss": 1.1096, + "step": 24 + }, + { + "epoch": 0.04351610095735422, + "grad_norm": 1.453125, + "learning_rate": 4.210526315789474e-06, + "loss": 1.1152, + "step": 25 + }, + { + "epoch": 0.04525674499564839, + "grad_norm": 1.390625, + "learning_rate": 4.385964912280702e-06, + "loss": 1.1111, + "step": 26 + }, + { + "epoch": 0.04699738903394256, + "grad_norm": 1.296875, + "learning_rate": 4.56140350877193e-06, + "loss": 1.161, + "step": 27 + }, + { + "epoch": 0.04873803307223673, + "grad_norm": 1.390625, + "learning_rate": 4.736842105263158e-06, + "loss": 1.1557, + "step": 28 + }, + { + "epoch": 0.050478677110530897, + "grad_norm": 1.3125, + "learning_rate": 4.912280701754386e-06, + "loss": 1.1065, + "step": 29 + }, + { + "epoch": 0.05221932114882506, + "grad_norm": 1.28125, + "learning_rate": 5.087719298245615e-06, + "loss": 1.0493, + "step": 30 + }, + { + "epoch": 0.053959965187119235, + "grad_norm": 1.1953125, + "learning_rate": 5.263157894736842e-06, + "loss": 1.0652, + "step": 31 + }, + { + "epoch": 0.0557006092254134, + "grad_norm": 1.1484375, + "learning_rate": 5.438596491228071e-06, + "loss": 1.0389, + "step": 32 + }, + { + "epoch": 0.057441253263707574, + "grad_norm": 1.1015625, + "learning_rate": 5.6140350877192985e-06, + "loss": 1.0349, + "step": 33 + }, + { + "epoch": 0.05918189730200174, + "grad_norm": 1.0390625, + "learning_rate": 5.789473684210527e-06, + "loss": 1.1109, + "step": 34 + }, + { + "epoch": 0.060922541340295906, + "grad_norm": 1.03125, + "learning_rate": 5.964912280701755e-06, + "loss": 1.0541, + "step": 35 + }, + { + "epoch": 0.06266318537859007, + "grad_norm": 0.9453125, + "learning_rate": 6.140350877192983e-06, + "loss": 1.1176, + "step": 36 + }, + { + "epoch": 0.06440382941688425, + "grad_norm": 0.9140625, + "learning_rate": 6.31578947368421e-06, + "loss": 1.0774, + "step": 37 + }, + { + "epoch": 0.06614447345517842, + "grad_norm": 0.9453125, + "learning_rate": 6.491228070175439e-06, + "loss": 1.031, + "step": 38 + }, + { + "epoch": 0.06788511749347259, + "grad_norm": 0.85546875, + "learning_rate": 6.666666666666667e-06, + "loss": 1.0977, + "step": 39 + }, + { + "epoch": 0.06962576153176675, + "grad_norm": 0.8203125, + "learning_rate": 6.842105263157896e-06, + "loss": 1.0871, + "step": 40 + }, + { + "epoch": 0.07136640557006092, + "grad_norm": 0.83984375, + "learning_rate": 7.017543859649123e-06, + "loss": 1.0856, + "step": 41 + }, + { + "epoch": 0.0731070496083551, + "grad_norm": 0.82421875, + "learning_rate": 7.192982456140352e-06, + "loss": 1.0189, + "step": 42 + }, + { + "epoch": 0.07484769364664925, + "grad_norm": 0.76953125, + "learning_rate": 7.368421052631579e-06, + "loss": 1.0633, + "step": 43 + }, + { + "epoch": 0.07658833768494343, + "grad_norm": 0.7578125, + "learning_rate": 7.5438596491228074e-06, + "loss": 1.0582, + "step": 44 + }, + { + "epoch": 0.0783289817232376, + "grad_norm": 0.78125, + "learning_rate": 7.719298245614036e-06, + "loss": 1.0261, + "step": 45 + }, + { + "epoch": 0.08006962576153177, + "grad_norm": 0.66796875, + "learning_rate": 7.894736842105265e-06, + "loss": 0.9962, + "step": 46 + }, + { + "epoch": 0.08181026979982593, + "grad_norm": 0.6953125, + "learning_rate": 8.070175438596492e-06, + "loss": 1.0041, + "step": 47 + }, + { + "epoch": 0.0835509138381201, + "grad_norm": 0.6640625, + "learning_rate": 8.24561403508772e-06, + "loss": 1.0953, + "step": 48 + }, + { + "epoch": 0.08529155787641428, + "grad_norm": 0.671875, + "learning_rate": 8.421052631578948e-06, + "loss": 1.0397, + "step": 49 + }, + { + "epoch": 0.08703220191470844, + "grad_norm": 0.66015625, + "learning_rate": 8.596491228070176e-06, + "loss": 1.0366, + "step": 50 + }, + { + "epoch": 0.08877284595300261, + "grad_norm": 0.65625, + "learning_rate": 8.771929824561405e-06, + "loss": 1.005, + "step": 51 + }, + { + "epoch": 0.09051348999129678, + "grad_norm": 0.62109375, + "learning_rate": 8.947368421052632e-06, + "loss": 1.1106, + "step": 52 + }, + { + "epoch": 0.09225413402959094, + "grad_norm": 0.59375, + "learning_rate": 9.12280701754386e-06, + "loss": 1.002, + "step": 53 + }, + { + "epoch": 0.09399477806788512, + "grad_norm": 0.5859375, + "learning_rate": 9.298245614035088e-06, + "loss": 1.0599, + "step": 54 + }, + { + "epoch": 0.09573542210617929, + "grad_norm": 0.59765625, + "learning_rate": 9.473684210526315e-06, + "loss": 1.0736, + "step": 55 + }, + { + "epoch": 0.09747606614447346, + "grad_norm": 0.56640625, + "learning_rate": 9.649122807017545e-06, + "loss": 1.0179, + "step": 56 + }, + { + "epoch": 0.09921671018276762, + "grad_norm": 0.58203125, + "learning_rate": 9.824561403508772e-06, + "loss": 1.0308, + "step": 57 + }, + { + "epoch": 0.10095735422106179, + "grad_norm": 0.5625, + "learning_rate": 1e-05, + "loss": 1.0135, + "step": 58 + }, + { + "epoch": 0.10269799825935597, + "grad_norm": 0.5859375, + "learning_rate": 9.999979270446263e-06, + "loss": 0.9598, + "step": 59 + }, + { + "epoch": 0.10443864229765012, + "grad_norm": 0.5390625, + "learning_rate": 9.999917081956933e-06, + "loss": 1.0041, + "step": 60 + }, + { + "epoch": 0.1061792863359443, + "grad_norm": 0.53125, + "learning_rate": 9.999813435047668e-06, + "loss": 1.0726, + "step": 61 + }, + { + "epoch": 0.10791993037423847, + "grad_norm": 0.55078125, + "learning_rate": 9.99966833057789e-06, + "loss": 0.9915, + "step": 62 + }, + { + "epoch": 0.10966057441253264, + "grad_norm": 0.55078125, + "learning_rate": 9.999481769750779e-06, + "loss": 1.0266, + "step": 63 + }, + { + "epoch": 0.1114012184508268, + "grad_norm": 0.5234375, + "learning_rate": 9.999253754113263e-06, + "loss": 1.0524, + "step": 64 + }, + { + "epoch": 0.11314186248912098, + "grad_norm": 0.53125, + "learning_rate": 9.998984285556008e-06, + "loss": 1.0254, + "step": 65 + }, + { + "epoch": 0.11488250652741515, + "grad_norm": 0.53515625, + "learning_rate": 9.998673366313399e-06, + "loss": 1.0132, + "step": 66 + }, + { + "epoch": 0.11662315056570931, + "grad_norm": 0.5234375, + "learning_rate": 9.998320998963523e-06, + "loss": 0.9604, + "step": 67 + }, + { + "epoch": 0.11836379460400348, + "grad_norm": 0.54296875, + "learning_rate": 9.997927186428145e-06, + "loss": 1.0147, + "step": 68 + }, + { + "epoch": 0.12010443864229765, + "grad_norm": 0.52734375, + "learning_rate": 9.997491931972694e-06, + "loss": 0.998, + "step": 69 + }, + { + "epoch": 0.12184508268059181, + "grad_norm": 0.51953125, + "learning_rate": 9.997015239206216e-06, + "loss": 1.034, + "step": 70 + }, + { + "epoch": 0.12358572671888599, + "grad_norm": 0.53125, + "learning_rate": 9.996497112081365e-06, + "loss": 1.0103, + "step": 71 + }, + { + "epoch": 0.12532637075718014, + "grad_norm": 0.5078125, + "learning_rate": 9.99593755489436e-06, + "loss": 1.0053, + "step": 72 + }, + { + "epoch": 0.12532637075718014, + "eval_loss": 0.9893413186073303, + "eval_runtime": 60.3364, + "eval_samples_per_second": 77.698, + "eval_steps_per_second": 9.712, + "step": 72 + }, + { + "epoch": 0.12706701479547433, + "grad_norm": 0.50390625, + "learning_rate": 9.995336572284945e-06, + "loss": 0.9841, + "step": 73 + }, + { + "epoch": 0.1288076588337685, + "grad_norm": 0.53125, + "learning_rate": 9.994694169236366e-06, + "loss": 0.9298, + "step": 74 + }, + { + "epoch": 0.13054830287206268, + "grad_norm": 0.51171875, + "learning_rate": 9.99401035107531e-06, + "loss": 1.0449, + "step": 75 + }, + { + "epoch": 0.13228894691035684, + "grad_norm": 0.515625, + "learning_rate": 9.993285123471878e-06, + "loss": 1.0139, + "step": 76 + }, + { + "epoch": 0.134029590948651, + "grad_norm": 0.5078125, + "learning_rate": 9.992518492439526e-06, + "loss": 1.0129, + "step": 77 + }, + { + "epoch": 0.13577023498694518, + "grad_norm": 0.54296875, + "learning_rate": 9.991710464335022e-06, + "loss": 0.9696, + "step": 78 + }, + { + "epoch": 0.13751087902523934, + "grad_norm": 0.53125, + "learning_rate": 9.990861045858392e-06, + "loss": 1.0114, + "step": 79 + }, + { + "epoch": 0.1392515230635335, + "grad_norm": 0.515625, + "learning_rate": 9.989970244052861e-06, + "loss": 0.957, + "step": 80 + }, + { + "epoch": 0.1409921671018277, + "grad_norm": 0.5078125, + "learning_rate": 9.9890380663048e-06, + "loss": 0.9603, + "step": 81 + }, + { + "epoch": 0.14273281114012185, + "grad_norm": 0.52734375, + "learning_rate": 9.98806452034366e-06, + "loss": 1.0523, + "step": 82 + }, + { + "epoch": 0.144473455178416, + "grad_norm": 0.5078125, + "learning_rate": 9.987049614241907e-06, + "loss": 0.9804, + "step": 83 + }, + { + "epoch": 0.1462140992167102, + "grad_norm": 0.54296875, + "learning_rate": 9.985993356414965e-06, + "loss": 1.0307, + "step": 84 + }, + { + "epoch": 0.14795474325500435, + "grad_norm": 0.515625, + "learning_rate": 9.984895755621136e-06, + "loss": 0.9802, + "step": 85 + }, + { + "epoch": 0.1496953872932985, + "grad_norm": 0.494140625, + "learning_rate": 9.983756820961528e-06, + "loss": 0.9547, + "step": 86 + }, + { + "epoch": 0.1514360313315927, + "grad_norm": 0.5, + "learning_rate": 9.982576561879984e-06, + "loss": 1.014, + "step": 87 + }, + { + "epoch": 0.15317667536988686, + "grad_norm": 0.494140625, + "learning_rate": 9.981354988163002e-06, + "loss": 0.9708, + "step": 88 + }, + { + "epoch": 0.15491731940818101, + "grad_norm": 0.51171875, + "learning_rate": 9.980092109939651e-06, + "loss": 0.9717, + "step": 89 + }, + { + "epoch": 0.1566579634464752, + "grad_norm": 0.484375, + "learning_rate": 9.978787937681496e-06, + "loss": 0.9862, + "step": 90 + }, + { + "epoch": 0.15839860748476936, + "grad_norm": 0.498046875, + "learning_rate": 9.977442482202498e-06, + "loss": 0.9813, + "step": 91 + }, + { + "epoch": 0.16013925152306355, + "grad_norm": 0.494140625, + "learning_rate": 9.976055754658935e-06, + "loss": 0.9551, + "step": 92 + }, + { + "epoch": 0.1618798955613577, + "grad_norm": 0.51171875, + "learning_rate": 9.974627766549301e-06, + "loss": 0.9777, + "step": 93 + }, + { + "epoch": 0.16362053959965187, + "grad_norm": 0.50390625, + "learning_rate": 9.973158529714224e-06, + "loss": 0.9648, + "step": 94 + }, + { + "epoch": 0.16536118363794605, + "grad_norm": 0.48828125, + "learning_rate": 9.971648056336349e-06, + "loss": 0.964, + "step": 95 + }, + { + "epoch": 0.1671018276762402, + "grad_norm": 0.5078125, + "learning_rate": 9.97009635894025e-06, + "loss": 0.9635, + "step": 96 + }, + { + "epoch": 0.16884247171453437, + "grad_norm": 0.515625, + "learning_rate": 9.968503450392332e-06, + "loss": 1.0115, + "step": 97 + }, + { + "epoch": 0.17058311575282856, + "grad_norm": 0.51171875, + "learning_rate": 9.966869343900702e-06, + "loss": 0.9841, + "step": 98 + }, + { + "epoch": 0.17232375979112272, + "grad_norm": 0.484375, + "learning_rate": 9.965194053015083e-06, + "loss": 1.0241, + "step": 99 + }, + { + "epoch": 0.17406440382941687, + "grad_norm": 0.50390625, + "learning_rate": 9.963477591626687e-06, + "loss": 0.968, + "step": 100 + }, + { + "epoch": 0.17580504786771106, + "grad_norm": 0.50390625, + "learning_rate": 9.961719973968102e-06, + "loss": 0.946, + "step": 101 + }, + { + "epoch": 0.17754569190600522, + "grad_norm": 0.5234375, + "learning_rate": 9.959921214613187e-06, + "loss": 0.9284, + "step": 102 + }, + { + "epoch": 0.17928633594429938, + "grad_norm": 0.494140625, + "learning_rate": 9.958081328476926e-06, + "loss": 0.958, + "step": 103 + }, + { + "epoch": 0.18102697998259357, + "grad_norm": 0.486328125, + "learning_rate": 9.956200330815329e-06, + "loss": 0.9361, + "step": 104 + }, + { + "epoch": 0.18276762402088773, + "grad_norm": 0.5078125, + "learning_rate": 9.954278237225296e-06, + "loss": 1.0116, + "step": 105 + }, + { + "epoch": 0.18450826805918188, + "grad_norm": 0.5078125, + "learning_rate": 9.952315063644479e-06, + "loss": 1.0041, + "step": 106 + }, + { + "epoch": 0.18624891209747607, + "grad_norm": 0.5, + "learning_rate": 9.950310826351168e-06, + "loss": 0.9725, + "step": 107 + }, + { + "epoch": 0.18798955613577023, + "grad_norm": 0.498046875, + "learning_rate": 9.948265541964136e-06, + "loss": 0.9763, + "step": 108 + }, + { + "epoch": 0.18973020017406442, + "grad_norm": 0.5, + "learning_rate": 9.946179227442521e-06, + "loss": 0.9814, + "step": 109 + }, + { + "epoch": 0.19147084421235858, + "grad_norm": 0.50390625, + "learning_rate": 9.944051900085668e-06, + "loss": 1.0092, + "step": 110 + }, + { + "epoch": 0.19321148825065274, + "grad_norm": 0.515625, + "learning_rate": 9.941883577532993e-06, + "loss": 0.9836, + "step": 111 + }, + { + "epoch": 0.19495213228894692, + "grad_norm": 0.49609375, + "learning_rate": 9.939674277763845e-06, + "loss": 0.9598, + "step": 112 + }, + { + "epoch": 0.19669277632724108, + "grad_norm": 0.48828125, + "learning_rate": 9.937424019097337e-06, + "loss": 0.988, + "step": 113 + }, + { + "epoch": 0.19843342036553524, + "grad_norm": 0.498046875, + "learning_rate": 9.935132820192218e-06, + "loss": 0.9512, + "step": 114 + }, + { + "epoch": 0.20017406440382943, + "grad_norm": 0.498046875, + "learning_rate": 9.932800700046697e-06, + "loss": 0.9916, + "step": 115 + }, + { + "epoch": 0.20191470844212359, + "grad_norm": 0.51171875, + "learning_rate": 9.9304276779983e-06, + "loss": 0.9397, + "step": 116 + }, + { + "epoch": 0.20365535248041775, + "grad_norm": 0.48828125, + "learning_rate": 9.9280137737237e-06, + "loss": 0.9791, + "step": 117 + }, + { + "epoch": 0.20539599651871193, + "grad_norm": 0.515625, + "learning_rate": 9.925559007238564e-06, + "loss": 0.9231, + "step": 118 + }, + { + "epoch": 0.2071366405570061, + "grad_norm": 0.5, + "learning_rate": 9.923063398897372e-06, + "loss": 0.9854, + "step": 119 + }, + { + "epoch": 0.20887728459530025, + "grad_norm": 0.50390625, + "learning_rate": 9.920526969393267e-06, + "loss": 1.0411, + "step": 120 + }, + { + "epoch": 0.21061792863359444, + "grad_norm": 0.5078125, + "learning_rate": 9.917949739757869e-06, + "loss": 1.0254, + "step": 121 + }, + { + "epoch": 0.2123585726718886, + "grad_norm": 0.5, + "learning_rate": 9.915331731361104e-06, + "loss": 0.9208, + "step": 122 + }, + { + "epoch": 0.21409921671018275, + "grad_norm": 0.5078125, + "learning_rate": 9.912672965911034e-06, + "loss": 0.9195, + "step": 123 + }, + { + "epoch": 0.21583986074847694, + "grad_norm": 0.48046875, + "learning_rate": 9.909973465453666e-06, + "loss": 0.9938, + "step": 124 + }, + { + "epoch": 0.2175805047867711, + "grad_norm": 0.5078125, + "learning_rate": 9.907233252372775e-06, + "loss": 0.904, + "step": 125 + }, + { + "epoch": 0.2193211488250653, + "grad_norm": 0.48828125, + "learning_rate": 9.904452349389717e-06, + "loss": 0.9882, + "step": 126 + }, + { + "epoch": 0.22106179286335945, + "grad_norm": 0.50390625, + "learning_rate": 9.901630779563247e-06, + "loss": 0.9688, + "step": 127 + }, + { + "epoch": 0.2228024369016536, + "grad_norm": 0.51953125, + "learning_rate": 9.898768566289316e-06, + "loss": 1.0522, + "step": 128 + }, + { + "epoch": 0.2245430809399478, + "grad_norm": 0.48828125, + "learning_rate": 9.895865733300887e-06, + "loss": 0.9551, + "step": 129 + }, + { + "epoch": 0.22628372497824195, + "grad_norm": 0.498046875, + "learning_rate": 9.89292230466773e-06, + "loss": 0.9623, + "step": 130 + }, + { + "epoch": 0.2280243690165361, + "grad_norm": 0.515625, + "learning_rate": 9.889938304796236e-06, + "loss": 0.9702, + "step": 131 + }, + { + "epoch": 0.2297650130548303, + "grad_norm": 0.5078125, + "learning_rate": 9.886913758429194e-06, + "loss": 0.9679, + "step": 132 + }, + { + "epoch": 0.23150565709312446, + "grad_norm": 0.466796875, + "learning_rate": 9.883848690645601e-06, + "loss": 0.9719, + "step": 133 + }, + { + "epoch": 0.23324630113141862, + "grad_norm": 0.4921875, + "learning_rate": 9.880743126860458e-06, + "loss": 0.9717, + "step": 134 + }, + { + "epoch": 0.2349869451697128, + "grad_norm": 0.4921875, + "learning_rate": 9.87759709282454e-06, + "loss": 0.9365, + "step": 135 + }, + { + "epoch": 0.23672758920800696, + "grad_norm": 0.478515625, + "learning_rate": 9.874410614624202e-06, + "loss": 0.9254, + "step": 136 + }, + { + "epoch": 0.23846823324630112, + "grad_norm": 0.498046875, + "learning_rate": 9.871183718681153e-06, + "loss": 1.0045, + "step": 137 + }, + { + "epoch": 0.2402088772845953, + "grad_norm": 0.51171875, + "learning_rate": 9.867916431752237e-06, + "loss": 0.9693, + "step": 138 + }, + { + "epoch": 0.24194952132288947, + "grad_norm": 0.474609375, + "learning_rate": 9.864608780929218e-06, + "loss": 0.9981, + "step": 139 + }, + { + "epoch": 0.24369016536118362, + "grad_norm": 0.4921875, + "learning_rate": 9.861260793638539e-06, + "loss": 0.9569, + "step": 140 + }, + { + "epoch": 0.2454308093994778, + "grad_norm": 0.490234375, + "learning_rate": 9.857872497641117e-06, + "loss": 1.0422, + "step": 141 + }, + { + "epoch": 0.24717145343777197, + "grad_norm": 0.49609375, + "learning_rate": 9.854443921032098e-06, + "loss": 1.0408, + "step": 142 + }, + { + "epoch": 0.24891209747606616, + "grad_norm": 0.484375, + "learning_rate": 9.850975092240625e-06, + "loss": 0.8893, + "step": 143 + }, + { + "epoch": 0.2506527415143603, + "grad_norm": 0.5078125, + "learning_rate": 9.84746604002961e-06, + "loss": 0.9679, + "step": 144 + }, + { + "epoch": 0.2506527415143603, + "eval_loss": 0.9576423168182373, + "eval_runtime": 59.1215, + "eval_samples_per_second": 79.294, + "eval_steps_per_second": 9.912, + "step": 144 + }, + { + "epoch": 0.2523933855526545, + "grad_norm": 0.5, + "learning_rate": 9.843916793495487e-06, + "loss": 0.9071, + "step": 145 + }, + { + "epoch": 0.25413402959094866, + "grad_norm": 0.498046875, + "learning_rate": 9.840327382067972e-06, + "loss": 0.9496, + "step": 146 + }, + { + "epoch": 0.2558746736292428, + "grad_norm": 0.5078125, + "learning_rate": 9.836697835509827e-06, + "loss": 0.9864, + "step": 147 + }, + { + "epoch": 0.257615317667537, + "grad_norm": 0.5, + "learning_rate": 9.833028183916601e-06, + "loss": 1.0082, + "step": 148 + }, + { + "epoch": 0.25935596170583114, + "grad_norm": 0.494140625, + "learning_rate": 9.829318457716395e-06, + "loss": 0.9591, + "step": 149 + }, + { + "epoch": 0.26109660574412535, + "grad_norm": 0.515625, + "learning_rate": 9.82556868766959e-06, + "loss": 1.0204, + "step": 150 + }, + { + "epoch": 0.2628372497824195, + "grad_norm": 0.4921875, + "learning_rate": 9.821778904868616e-06, + "loss": 0.9805, + "step": 151 + }, + { + "epoch": 0.26457789382071367, + "grad_norm": 0.5078125, + "learning_rate": 9.817949140737672e-06, + "loss": 0.9761, + "step": 152 + }, + { + "epoch": 0.26631853785900783, + "grad_norm": 0.50390625, + "learning_rate": 9.81407942703248e-06, + "loss": 0.9789, + "step": 153 + }, + { + "epoch": 0.268059181897302, + "grad_norm": 0.49609375, + "learning_rate": 9.810169795840012e-06, + "loss": 0.952, + "step": 154 + }, + { + "epoch": 0.26979982593559615, + "grad_norm": 0.484375, + "learning_rate": 9.806220279578236e-06, + "loss": 0.9431, + "step": 155 + }, + { + "epoch": 0.27154046997389036, + "grad_norm": 0.5078125, + "learning_rate": 9.802230910995833e-06, + "loss": 1.0015, + "step": 156 + }, + { + "epoch": 0.2732811140121845, + "grad_norm": 0.49609375, + "learning_rate": 9.798201723171938e-06, + "loss": 0.9513, + "step": 157 + }, + { + "epoch": 0.2750217580504787, + "grad_norm": 0.48828125, + "learning_rate": 9.794132749515854e-06, + "loss": 0.9454, + "step": 158 + }, + { + "epoch": 0.27676240208877284, + "grad_norm": 0.494140625, + "learning_rate": 9.790024023766789e-06, + "loss": 0.9581, + "step": 159 + }, + { + "epoch": 0.278503046127067, + "grad_norm": 0.482421875, + "learning_rate": 9.785875579993558e-06, + "loss": 0.9874, + "step": 160 + }, + { + "epoch": 0.28024369016536116, + "grad_norm": 0.49609375, + "learning_rate": 9.781687452594318e-06, + "loss": 0.9417, + "step": 161 + }, + { + "epoch": 0.2819843342036554, + "grad_norm": 0.478515625, + "learning_rate": 9.777459676296276e-06, + "loss": 0.9589, + "step": 162 + }, + { + "epoch": 0.28372497824194953, + "grad_norm": 0.515625, + "learning_rate": 9.773192286155395e-06, + "loss": 0.9851, + "step": 163 + }, + { + "epoch": 0.2854656222802437, + "grad_norm": 0.49609375, + "learning_rate": 9.768885317556116e-06, + "loss": 0.98, + "step": 164 + }, + { + "epoch": 0.28720626631853785, + "grad_norm": 0.5078125, + "learning_rate": 9.764538806211052e-06, + "loss": 0.9651, + "step": 165 + }, + { + "epoch": 0.288946910356832, + "grad_norm": 0.50390625, + "learning_rate": 9.760152788160697e-06, + "loss": 0.9407, + "step": 166 + }, + { + "epoch": 0.2906875543951262, + "grad_norm": 0.46875, + "learning_rate": 9.755727299773135e-06, + "loss": 0.9553, + "step": 167 + }, + { + "epoch": 0.2924281984334204, + "grad_norm": 0.470703125, + "learning_rate": 9.75126237774372e-06, + "loss": 0.9823, + "step": 168 + }, + { + "epoch": 0.29416884247171454, + "grad_norm": 0.5, + "learning_rate": 9.746758059094791e-06, + "loss": 0.9832, + "step": 169 + }, + { + "epoch": 0.2959094865100087, + "grad_norm": 0.50390625, + "learning_rate": 9.742214381175355e-06, + "loss": 0.8976, + "step": 170 + }, + { + "epoch": 0.29765013054830286, + "grad_norm": 0.48828125, + "learning_rate": 9.737631381660777e-06, + "loss": 0.9331, + "step": 171 + }, + { + "epoch": 0.299390774586597, + "grad_norm": 0.50390625, + "learning_rate": 9.733009098552473e-06, + "loss": 0.9666, + "step": 172 + }, + { + "epoch": 0.30113141862489123, + "grad_norm": 0.490234375, + "learning_rate": 9.728347570177587e-06, + "loss": 0.9781, + "step": 173 + }, + { + "epoch": 0.3028720626631854, + "grad_norm": 0.484375, + "learning_rate": 9.723646835188681e-06, + "loss": 0.969, + "step": 174 + }, + { + "epoch": 0.30461270670147955, + "grad_norm": 0.51171875, + "learning_rate": 9.71890693256341e-06, + "loss": 0.9481, + "step": 175 + }, + { + "epoch": 0.3063533507397737, + "grad_norm": 0.49609375, + "learning_rate": 9.7141279016042e-06, + "loss": 1.0301, + "step": 176 + }, + { + "epoch": 0.30809399477806787, + "grad_norm": 0.48828125, + "learning_rate": 9.709309781937925e-06, + "loss": 1.023, + "step": 177 + }, + { + "epoch": 0.30983463881636203, + "grad_norm": 0.490234375, + "learning_rate": 9.704452613515571e-06, + "loss": 0.9598, + "step": 178 + }, + { + "epoch": 0.31157528285465624, + "grad_norm": 0.482421875, + "learning_rate": 9.699556436611912e-06, + "loss": 0.9699, + "step": 179 + }, + { + "epoch": 0.3133159268929504, + "grad_norm": 0.498046875, + "learning_rate": 9.694621291825174e-06, + "loss": 0.903, + "step": 180 + }, + { + "epoch": 0.31505657093124456, + "grad_norm": 0.515625, + "learning_rate": 9.689647220076696e-06, + "loss": 0.9581, + "step": 181 + }, + { + "epoch": 0.3167972149695387, + "grad_norm": 0.490234375, + "learning_rate": 9.684634262610593e-06, + "loss": 0.939, + "step": 182 + }, + { + "epoch": 0.3185378590078329, + "grad_norm": 0.498046875, + "learning_rate": 9.679582460993413e-06, + "loss": 0.9363, + "step": 183 + }, + { + "epoch": 0.3202785030461271, + "grad_norm": 0.482421875, + "learning_rate": 9.674491857113792e-06, + "loss": 1.0215, + "step": 184 + }, + { + "epoch": 0.32201914708442125, + "grad_norm": 0.48828125, + "learning_rate": 9.669362493182112e-06, + "loss": 0.9464, + "step": 185 + }, + { + "epoch": 0.3237597911227154, + "grad_norm": 0.482421875, + "learning_rate": 9.66419441173014e-06, + "loss": 0.8955, + "step": 186 + }, + { + "epoch": 0.32550043516100957, + "grad_norm": 0.494140625, + "learning_rate": 9.658987655610687e-06, + "loss": 0.9503, + "step": 187 + }, + { + "epoch": 0.32724107919930373, + "grad_norm": 0.474609375, + "learning_rate": 9.653742267997245e-06, + "loss": 0.9808, + "step": 188 + }, + { + "epoch": 0.3289817232375979, + "grad_norm": 0.48046875, + "learning_rate": 9.648458292383631e-06, + "loss": 1.0155, + "step": 189 + }, + { + "epoch": 0.3307223672758921, + "grad_norm": 0.5078125, + "learning_rate": 9.643135772583627e-06, + "loss": 0.929, + "step": 190 + }, + { + "epoch": 0.33246301131418626, + "grad_norm": 0.5, + "learning_rate": 9.63777475273062e-06, + "loss": 0.9214, + "step": 191 + }, + { + "epoch": 0.3342036553524804, + "grad_norm": 0.47265625, + "learning_rate": 9.632375277277226e-06, + "loss": 0.9762, + "step": 192 + }, + { + "epoch": 0.3359442993907746, + "grad_norm": 0.474609375, + "learning_rate": 9.626937390994932e-06, + "loss": 0.9734, + "step": 193 + }, + { + "epoch": 0.33768494342906874, + "grad_norm": 0.498046875, + "learning_rate": 9.621461138973725e-06, + "loss": 0.9599, + "step": 194 + }, + { + "epoch": 0.3394255874673629, + "grad_norm": 0.50390625, + "learning_rate": 9.615946566621704e-06, + "loss": 0.9448, + "step": 195 + }, + { + "epoch": 0.3411662315056571, + "grad_norm": 0.494140625, + "learning_rate": 9.61039371966472e-06, + "loss": 0.9721, + "step": 196 + }, + { + "epoch": 0.3429068755439513, + "grad_norm": 0.48828125, + "learning_rate": 9.60480264414599e-06, + "loss": 0.97, + "step": 197 + }, + { + "epoch": 0.34464751958224543, + "grad_norm": 0.49609375, + "learning_rate": 9.599173386425711e-06, + "loss": 0.9964, + "step": 198 + }, + { + "epoch": 0.3463881636205396, + "grad_norm": 0.486328125, + "learning_rate": 9.593505993180687e-06, + "loss": 0.9715, + "step": 199 + }, + { + "epoch": 0.34812880765883375, + "grad_norm": 0.4921875, + "learning_rate": 9.587800511403931e-06, + "loss": 0.9655, + "step": 200 + }, + { + "epoch": 0.34986945169712796, + "grad_norm": 0.5078125, + "learning_rate": 9.582056988404276e-06, + "loss": 0.9606, + "step": 201 + }, + { + "epoch": 0.3516100957354221, + "grad_norm": 0.50390625, + "learning_rate": 9.576275471805993e-06, + "loss": 0.9166, + "step": 202 + }, + { + "epoch": 0.3533507397737163, + "grad_norm": 0.494140625, + "learning_rate": 9.570456009548383e-06, + "loss": 0.9063, + "step": 203 + }, + { + "epoch": 0.35509138381201044, + "grad_norm": 0.484375, + "learning_rate": 9.564598649885391e-06, + "loss": 0.9295, + "step": 204 + }, + { + "epoch": 0.3568320278503046, + "grad_norm": 0.48046875, + "learning_rate": 9.558703441385195e-06, + "loss": 0.9933, + "step": 205 + }, + { + "epoch": 0.35857267188859876, + "grad_norm": 0.48046875, + "learning_rate": 9.552770432929812e-06, + "loss": 0.9572, + "step": 206 + }, + { + "epoch": 0.360313315926893, + "grad_norm": 0.482421875, + "learning_rate": 9.54679967371469e-06, + "loss": 0.9484, + "step": 207 + }, + { + "epoch": 0.36205395996518713, + "grad_norm": 0.47265625, + "learning_rate": 9.540791213248299e-06, + "loss": 0.9266, + "step": 208 + }, + { + "epoch": 0.3637946040034813, + "grad_norm": 0.5, + "learning_rate": 9.534745101351719e-06, + "loss": 0.9526, + "step": 209 + }, + { + "epoch": 0.36553524804177545, + "grad_norm": 0.5, + "learning_rate": 9.528661388158234e-06, + "loss": 1.0046, + "step": 210 + }, + { + "epoch": 0.3672758920800696, + "grad_norm": 0.51171875, + "learning_rate": 9.522540124112902e-06, + "loss": 0.9757, + "step": 211 + }, + { + "epoch": 0.36901653611836377, + "grad_norm": 0.48046875, + "learning_rate": 9.516381359972157e-06, + "loss": 1.0195, + "step": 212 + }, + { + "epoch": 0.370757180156658, + "grad_norm": 0.490234375, + "learning_rate": 9.51018514680337e-06, + "loss": 0.971, + "step": 213 + }, + { + "epoch": 0.37249782419495214, + "grad_norm": 0.5, + "learning_rate": 9.503951535984434e-06, + "loss": 0.9336, + "step": 214 + }, + { + "epoch": 0.3742384682332463, + "grad_norm": 0.494140625, + "learning_rate": 9.49768057920334e-06, + "loss": 0.9887, + "step": 215 + }, + { + "epoch": 0.37597911227154046, + "grad_norm": 0.486328125, + "learning_rate": 9.491372328457737e-06, + "loss": 0.966, + "step": 216 + }, + { + "epoch": 0.37597911227154046, + "eval_loss": 0.9439952373504639, + "eval_runtime": 59.4647, + "eval_samples_per_second": 78.837, + "eval_steps_per_second": 9.855, + "step": 216 + }, + { + "epoch": 0.3777197563098346, + "grad_norm": 0.494140625, + "learning_rate": 9.485026836054519e-06, + "loss": 0.9771, + "step": 217 + }, + { + "epoch": 0.37946040034812883, + "grad_norm": 0.498046875, + "learning_rate": 9.478644154609372e-06, + "loss": 0.9988, + "step": 218 + }, + { + "epoch": 0.381201044386423, + "grad_norm": 0.470703125, + "learning_rate": 9.472224337046357e-06, + "loss": 0.9875, + "step": 219 + }, + { + "epoch": 0.38294168842471715, + "grad_norm": 0.482421875, + "learning_rate": 9.46576743659745e-06, + "loss": 0.9664, + "step": 220 + }, + { + "epoch": 0.3846823324630113, + "grad_norm": 0.478515625, + "learning_rate": 9.45927350680212e-06, + "loss": 0.9693, + "step": 221 + }, + { + "epoch": 0.38642297650130547, + "grad_norm": 0.48828125, + "learning_rate": 9.452742601506873e-06, + "loss": 0.9137, + "step": 222 + }, + { + "epoch": 0.38816362053959963, + "grad_norm": 0.5078125, + "learning_rate": 9.446174774864808e-06, + "loss": 0.9047, + "step": 223 + }, + { + "epoch": 0.38990426457789384, + "grad_norm": 0.486328125, + "learning_rate": 9.439570081335173e-06, + "loss": 0.9926, + "step": 224 + }, + { + "epoch": 0.391644908616188, + "grad_norm": 0.478515625, + "learning_rate": 9.432928575682908e-06, + "loss": 0.9693, + "step": 225 + }, + { + "epoch": 0.39338555265448216, + "grad_norm": 0.50390625, + "learning_rate": 9.426250312978191e-06, + "loss": 0.9106, + "step": 226 + }, + { + "epoch": 0.3951261966927763, + "grad_norm": 0.5, + "learning_rate": 9.419535348595985e-06, + "loss": 0.9639, + "step": 227 + }, + { + "epoch": 0.3968668407310705, + "grad_norm": 0.49609375, + "learning_rate": 9.412783738215576e-06, + "loss": 0.9338, + "step": 228 + }, + { + "epoch": 0.39860748476936464, + "grad_norm": 0.48828125, + "learning_rate": 9.405995537820111e-06, + "loss": 1.0216, + "step": 229 + }, + { + "epoch": 0.40034812880765885, + "grad_norm": 0.498046875, + "learning_rate": 9.399170803696139e-06, + "loss": 0.942, + "step": 230 + }, + { + "epoch": 0.402088772845953, + "grad_norm": 0.486328125, + "learning_rate": 9.392309592433134e-06, + "loss": 0.9184, + "step": 231 + }, + { + "epoch": 0.40382941688424717, + "grad_norm": 0.5234375, + "learning_rate": 9.385411960923036e-06, + "loss": 0.9178, + "step": 232 + }, + { + "epoch": 0.40557006092254133, + "grad_norm": 0.4921875, + "learning_rate": 9.378477966359773e-06, + "loss": 0.9303, + "step": 233 + }, + { + "epoch": 0.4073107049608355, + "grad_norm": 0.5, + "learning_rate": 9.371507666238793e-06, + "loss": 0.9563, + "step": 234 + }, + { + "epoch": 0.4090513489991297, + "grad_norm": 0.47265625, + "learning_rate": 9.364501118356579e-06, + "loss": 0.92, + "step": 235 + }, + { + "epoch": 0.41079199303742386, + "grad_norm": 0.4765625, + "learning_rate": 9.357458380810175e-06, + "loss": 0.9532, + "step": 236 + }, + { + "epoch": 0.412532637075718, + "grad_norm": 0.482421875, + "learning_rate": 9.350379511996706e-06, + "loss": 0.9604, + "step": 237 + }, + { + "epoch": 0.4142732811140122, + "grad_norm": 0.48828125, + "learning_rate": 9.343264570612883e-06, + "loss": 0.9415, + "step": 238 + }, + { + "epoch": 0.41601392515230634, + "grad_norm": 0.48828125, + "learning_rate": 9.336113615654535e-06, + "loss": 0.9752, + "step": 239 + }, + { + "epoch": 0.4177545691906005, + "grad_norm": 0.484375, + "learning_rate": 9.328926706416102e-06, + "loss": 0.9517, + "step": 240 + }, + { + "epoch": 0.4194952132288947, + "grad_norm": 0.484375, + "learning_rate": 9.321703902490152e-06, + "loss": 0.9245, + "step": 241 + }, + { + "epoch": 0.4212358572671889, + "grad_norm": 0.50390625, + "learning_rate": 9.314445263766888e-06, + "loss": 0.9341, + "step": 242 + }, + { + "epoch": 0.42297650130548303, + "grad_norm": 0.47265625, + "learning_rate": 9.307150850433643e-06, + "loss": 0.9399, + "step": 243 + }, + { + "epoch": 0.4247171453437772, + "grad_norm": 0.48828125, + "learning_rate": 9.299820722974396e-06, + "loss": 0.9865, + "step": 244 + }, + { + "epoch": 0.42645778938207135, + "grad_norm": 0.5078125, + "learning_rate": 9.29245494216925e-06, + "loss": 0.9538, + "step": 245 + }, + { + "epoch": 0.4281984334203655, + "grad_norm": 0.5, + "learning_rate": 9.285053569093948e-06, + "loss": 1.0095, + "step": 246 + }, + { + "epoch": 0.4299390774586597, + "grad_norm": 0.494140625, + "learning_rate": 9.277616665119352e-06, + "loss": 0.9691, + "step": 247 + }, + { + "epoch": 0.4316797214969539, + "grad_norm": 0.4921875, + "learning_rate": 9.27014429191094e-06, + "loss": 0.9854, + "step": 248 + }, + { + "epoch": 0.43342036553524804, + "grad_norm": 0.498046875, + "learning_rate": 9.262636511428304e-06, + "loss": 0.9179, + "step": 249 + }, + { + "epoch": 0.4351610095735422, + "grad_norm": 0.490234375, + "learning_rate": 9.255093385924616e-06, + "loss": 0.9388, + "step": 250 + }, + { + "epoch": 0.43690165361183636, + "grad_norm": 0.51953125, + "learning_rate": 9.247514977946124e-06, + "loss": 0.9788, + "step": 251 + }, + { + "epoch": 0.4386422976501306, + "grad_norm": 0.515625, + "learning_rate": 9.239901350331635e-06, + "loss": 0.9301, + "step": 252 + }, + { + "epoch": 0.44038294168842473, + "grad_norm": 0.474609375, + "learning_rate": 9.232252566211993e-06, + "loss": 0.9656, + "step": 253 + }, + { + "epoch": 0.4421235857267189, + "grad_norm": 0.4921875, + "learning_rate": 9.224568689009548e-06, + "loss": 1.0119, + "step": 254 + }, + { + "epoch": 0.44386422976501305, + "grad_norm": 0.49609375, + "learning_rate": 9.216849782437637e-06, + "loss": 0.874, + "step": 255 + }, + { + "epoch": 0.4456048738033072, + "grad_norm": 0.5, + "learning_rate": 9.20909591050006e-06, + "loss": 0.9191, + "step": 256 + }, + { + "epoch": 0.44734551784160137, + "grad_norm": 0.50390625, + "learning_rate": 9.201307137490536e-06, + "loss": 0.9017, + "step": 257 + }, + { + "epoch": 0.4490861618798956, + "grad_norm": 0.4921875, + "learning_rate": 9.19348352799218e-06, + "loss": 0.9363, + "step": 258 + }, + { + "epoch": 0.45082680591818974, + "grad_norm": 0.494140625, + "learning_rate": 9.185625146876966e-06, + "loss": 0.9921, + "step": 259 + }, + { + "epoch": 0.4525674499564839, + "grad_norm": 0.482421875, + "learning_rate": 9.177732059305187e-06, + "loss": 0.9358, + "step": 260 + }, + { + "epoch": 0.45430809399477806, + "grad_norm": 0.494140625, + "learning_rate": 9.169804330724916e-06, + "loss": 0.9257, + "step": 261 + }, + { + "epoch": 0.4560487380330722, + "grad_norm": 0.49609375, + "learning_rate": 9.161842026871465e-06, + "loss": 0.9201, + "step": 262 + }, + { + "epoch": 0.4577893820713664, + "grad_norm": 0.498046875, + "learning_rate": 9.153845213766837e-06, + "loss": 0.9212, + "step": 263 + }, + { + "epoch": 0.4595300261096606, + "grad_norm": 0.5, + "learning_rate": 9.145813957719174e-06, + "loss": 0.9735, + "step": 264 + }, + { + "epoch": 0.46127067014795475, + "grad_norm": 0.5078125, + "learning_rate": 9.137748325322223e-06, + "loss": 0.9585, + "step": 265 + }, + { + "epoch": 0.4630113141862489, + "grad_norm": 0.50390625, + "learning_rate": 9.129648383454764e-06, + "loss": 0.9781, + "step": 266 + }, + { + "epoch": 0.46475195822454307, + "grad_norm": 0.486328125, + "learning_rate": 9.121514199280072e-06, + "loss": 0.9759, + "step": 267 + }, + { + "epoch": 0.46649260226283723, + "grad_norm": 0.5078125, + "learning_rate": 9.113345840245348e-06, + "loss": 0.9688, + "step": 268 + }, + { + "epoch": 0.46823324630113144, + "grad_norm": 0.49609375, + "learning_rate": 9.105143374081167e-06, + "loss": 0.9092, + "step": 269 + }, + { + "epoch": 0.4699738903394256, + "grad_norm": 0.48046875, + "learning_rate": 9.096906868800917e-06, + "loss": 0.9357, + "step": 270 + }, + { + "epoch": 0.47171453437771976, + "grad_norm": 0.4921875, + "learning_rate": 9.088636392700227e-06, + "loss": 1.0134, + "step": 271 + }, + { + "epoch": 0.4734551784160139, + "grad_norm": 0.49609375, + "learning_rate": 9.08033201435641e-06, + "loss": 0.9494, + "step": 272 + }, + { + "epoch": 0.4751958224543081, + "grad_norm": 0.48046875, + "learning_rate": 9.071993802627887e-06, + "loss": 0.9446, + "step": 273 + }, + { + "epoch": 0.47693646649260224, + "grad_norm": 0.494140625, + "learning_rate": 9.063621826653624e-06, + "loss": 0.8926, + "step": 274 + }, + { + "epoch": 0.47867711053089645, + "grad_norm": 0.484375, + "learning_rate": 9.055216155852548e-06, + "loss": 0.9216, + "step": 275 + }, + { + "epoch": 0.4804177545691906, + "grad_norm": 0.470703125, + "learning_rate": 9.046776859922983e-06, + "loss": 0.9442, + "step": 276 + }, + { + "epoch": 0.4821583986074848, + "grad_norm": 0.484375, + "learning_rate": 9.038304008842064e-06, + "loss": 0.9456, + "step": 277 + }, + { + "epoch": 0.48389904264577893, + "grad_norm": 0.482421875, + "learning_rate": 9.02979767286516e-06, + "loss": 0.909, + "step": 278 + }, + { + "epoch": 0.4856396866840731, + "grad_norm": 0.50390625, + "learning_rate": 9.021257922525289e-06, + "loss": 0.9597, + "step": 279 + }, + { + "epoch": 0.48738033072236725, + "grad_norm": 0.5, + "learning_rate": 9.012684828632538e-06, + "loss": 0.9646, + "step": 280 + }, + { + "epoch": 0.48912097476066146, + "grad_norm": 0.50390625, + "learning_rate": 9.004078462273471e-06, + "loss": 0.9679, + "step": 281 + }, + { + "epoch": 0.4908616187989556, + "grad_norm": 0.5078125, + "learning_rate": 8.995438894810541e-06, + "loss": 0.952, + "step": 282 + }, + { + "epoch": 0.4926022628372498, + "grad_norm": 0.484375, + "learning_rate": 8.9867661978815e-06, + "loss": 0.9792, + "step": 283 + }, + { + "epoch": 0.49434290687554394, + "grad_norm": 0.5, + "learning_rate": 8.978060443398802e-06, + "loss": 0.939, + "step": 284 + }, + { + "epoch": 0.4960835509138381, + "grad_norm": 0.486328125, + "learning_rate": 8.96932170354901e-06, + "loss": 0.9974, + "step": 285 + }, + { + "epoch": 0.4978241949521323, + "grad_norm": 0.48828125, + "learning_rate": 8.960550050792194e-06, + "loss": 0.9947, + "step": 286 + }, + { + "epoch": 0.4995648389904265, + "grad_norm": 0.50390625, + "learning_rate": 8.951745557861333e-06, + "loss": 0.9139, + "step": 287 + }, + { + "epoch": 0.5013054830287206, + "grad_norm": 0.49609375, + "learning_rate": 8.942908297761712e-06, + "loss": 0.9397, + "step": 288 + }, + { + "epoch": 0.5013054830287206, + "eval_loss": 0.9357889294624329, + "eval_runtime": 60.8471, + "eval_samples_per_second": 77.046, + "eval_steps_per_second": 9.631, + "step": 288 + }, + { + "epoch": 0.5030461270670148, + "grad_norm": 0.482421875, + "learning_rate": 8.934038343770312e-06, + "loss": 0.9656, + "step": 289 + }, + { + "epoch": 0.504786771105309, + "grad_norm": 0.5078125, + "learning_rate": 8.925135769435211e-06, + "loss": 0.9896, + "step": 290 + }, + { + "epoch": 0.5065274151436031, + "grad_norm": 0.5, + "learning_rate": 8.916200648574964e-06, + "loss": 0.905, + "step": 291 + }, + { + "epoch": 0.5082680591818973, + "grad_norm": 0.490234375, + "learning_rate": 8.907233055277999e-06, + "loss": 0.9309, + "step": 292 + }, + { + "epoch": 0.5100087032201914, + "grad_norm": 0.48046875, + "learning_rate": 8.898233063902e-06, + "loss": 0.9796, + "step": 293 + }, + { + "epoch": 0.5117493472584856, + "grad_norm": 0.5234375, + "learning_rate": 8.889200749073285e-06, + "loss": 0.9335, + "step": 294 + }, + { + "epoch": 0.5134899912967799, + "grad_norm": 0.498046875, + "learning_rate": 8.880136185686202e-06, + "loss": 0.9292, + "step": 295 + }, + { + "epoch": 0.515230635335074, + "grad_norm": 0.478515625, + "learning_rate": 8.871039448902488e-06, + "loss": 0.9116, + "step": 296 + }, + { + "epoch": 0.5169712793733682, + "grad_norm": 0.47265625, + "learning_rate": 8.861910614150662e-06, + "loss": 0.9315, + "step": 297 + }, + { + "epoch": 0.5187119234116623, + "grad_norm": 0.478515625, + "learning_rate": 8.852749757125392e-06, + "loss": 0.9283, + "step": 298 + }, + { + "epoch": 0.5204525674499565, + "grad_norm": 0.5078125, + "learning_rate": 8.843556953786872e-06, + "loss": 0.952, + "step": 299 + }, + { + "epoch": 0.5221932114882507, + "grad_norm": 0.48828125, + "learning_rate": 8.834332280360181e-06, + "loss": 0.9999, + "step": 300 + }, + { + "epoch": 0.5239338555265448, + "grad_norm": 0.498046875, + "learning_rate": 8.82507581333467e-06, + "loss": 0.9453, + "step": 301 + }, + { + "epoch": 0.525674499564839, + "grad_norm": 0.482421875, + "learning_rate": 8.815787629463306e-06, + "loss": 0.8678, + "step": 302 + }, + { + "epoch": 0.5274151436031331, + "grad_norm": 0.5, + "learning_rate": 8.806467805762056e-06, + "loss": 0.9878, + "step": 303 + }, + { + "epoch": 0.5291557876414273, + "grad_norm": 0.49609375, + "learning_rate": 8.797116419509232e-06, + "loss": 0.8964, + "step": 304 + }, + { + "epoch": 0.5308964316797214, + "grad_norm": 0.474609375, + "learning_rate": 8.78773354824486e-06, + "loss": 0.9584, + "step": 305 + }, + { + "epoch": 0.5326370757180157, + "grad_norm": 0.482421875, + "learning_rate": 8.778319269770033e-06, + "loss": 0.9715, + "step": 306 + }, + { + "epoch": 0.5343777197563099, + "grad_norm": 0.484375, + "learning_rate": 8.768873662146271e-06, + "loss": 0.9034, + "step": 307 + }, + { + "epoch": 0.536118363794604, + "grad_norm": 0.494140625, + "learning_rate": 8.759396803694863e-06, + "loss": 0.9189, + "step": 308 + }, + { + "epoch": 0.5378590078328982, + "grad_norm": 0.484375, + "learning_rate": 8.749888772996226e-06, + "loss": 1.0066, + "step": 309 + }, + { + "epoch": 0.5395996518711923, + "grad_norm": 0.47265625, + "learning_rate": 8.74034964888926e-06, + "loss": 1.0147, + "step": 310 + }, + { + "epoch": 0.5413402959094865, + "grad_norm": 0.484375, + "learning_rate": 8.730779510470672e-06, + "loss": 0.9504, + "step": 311 + }, + { + "epoch": 0.5430809399477807, + "grad_norm": 0.482421875, + "learning_rate": 8.721178437094346e-06, + "loss": 0.9239, + "step": 312 + }, + { + "epoch": 0.5448215839860748, + "grad_norm": 0.5078125, + "learning_rate": 8.711546508370666e-06, + "loss": 0.9145, + "step": 313 + }, + { + "epoch": 0.546562228024369, + "grad_norm": 0.498046875, + "learning_rate": 8.701883804165867e-06, + "loss": 0.897, + "step": 314 + }, + { + "epoch": 0.5483028720626631, + "grad_norm": 0.490234375, + "learning_rate": 8.692190404601368e-06, + "loss": 0.8796, + "step": 315 + }, + { + "epoch": 0.5500435161009574, + "grad_norm": 0.53515625, + "learning_rate": 8.682466390053106e-06, + "loss": 1.03, + "step": 316 + }, + { + "epoch": 0.5517841601392516, + "grad_norm": 0.482421875, + "learning_rate": 8.672711841150877e-06, + "loss": 0.9676, + "step": 317 + }, + { + "epoch": 0.5535248041775457, + "grad_norm": 0.490234375, + "learning_rate": 8.662926838777657e-06, + "loss": 1.0098, + "step": 318 + }, + { + "epoch": 0.5552654482158399, + "grad_norm": 0.5078125, + "learning_rate": 8.653111464068937e-06, + "loss": 0.9344, + "step": 319 + }, + { + "epoch": 0.557006092254134, + "grad_norm": 0.51171875, + "learning_rate": 8.643265798412057e-06, + "loss": 0.9555, + "step": 320 + }, + { + "epoch": 0.5587467362924282, + "grad_norm": 0.5, + "learning_rate": 8.633389923445515e-06, + "loss": 0.8845, + "step": 321 + }, + { + "epoch": 0.5604873803307223, + "grad_norm": 0.48046875, + "learning_rate": 8.623483921058304e-06, + "loss": 0.9403, + "step": 322 + }, + { + "epoch": 0.5622280243690165, + "grad_norm": 0.482421875, + "learning_rate": 8.613547873389228e-06, + "loss": 0.9654, + "step": 323 + }, + { + "epoch": 0.5639686684073107, + "grad_norm": 0.490234375, + "learning_rate": 8.603581862826222e-06, + "loss": 1.0108, + "step": 324 + }, + { + "epoch": 0.5657093124456049, + "grad_norm": 0.4921875, + "learning_rate": 8.593585972005665e-06, + "loss": 0.9708, + "step": 325 + }, + { + "epoch": 0.5674499564838991, + "grad_norm": 0.484375, + "learning_rate": 8.5835602838117e-06, + "loss": 0.9333, + "step": 326 + }, + { + "epoch": 0.5691906005221932, + "grad_norm": 0.478515625, + "learning_rate": 8.573504881375543e-06, + "loss": 0.9067, + "step": 327 + }, + { + "epoch": 0.5709312445604874, + "grad_norm": 0.486328125, + "learning_rate": 8.563419848074798e-06, + "loss": 0.9388, + "step": 328 + }, + { + "epoch": 0.5726718885987816, + "grad_norm": 0.4921875, + "learning_rate": 8.55330526753276e-06, + "loss": 0.9246, + "step": 329 + }, + { + "epoch": 0.5744125326370757, + "grad_norm": 0.498046875, + "learning_rate": 8.543161223617724e-06, + "loss": 0.9222, + "step": 330 + }, + { + "epoch": 0.5761531766753699, + "grad_norm": 0.490234375, + "learning_rate": 8.532987800442292e-06, + "loss": 1.0001, + "step": 331 + }, + { + "epoch": 0.577893820713664, + "grad_norm": 0.498046875, + "learning_rate": 8.522785082362675e-06, + "loss": 1.0003, + "step": 332 + }, + { + "epoch": 0.5796344647519582, + "grad_norm": 0.498046875, + "learning_rate": 8.512553153977988e-06, + "loss": 0.9103, + "step": 333 + }, + { + "epoch": 0.5813751087902524, + "grad_norm": 0.498046875, + "learning_rate": 8.502292100129553e-06, + "loss": 0.9895, + "step": 334 + }, + { + "epoch": 0.5831157528285466, + "grad_norm": 0.4921875, + "learning_rate": 8.492002005900201e-06, + "loss": 0.9981, + "step": 335 + }, + { + "epoch": 0.5848563968668408, + "grad_norm": 0.4765625, + "learning_rate": 8.481682956613555e-06, + "loss": 0.8758, + "step": 336 + }, + { + "epoch": 0.5865970409051349, + "grad_norm": 0.49609375, + "learning_rate": 8.471335037833328e-06, + "loss": 0.9845, + "step": 337 + }, + { + "epoch": 0.5883376849434291, + "grad_norm": 0.474609375, + "learning_rate": 8.460958335362617e-06, + "loss": 0.9974, + "step": 338 + }, + { + "epoch": 0.5900783289817232, + "grad_norm": 0.4921875, + "learning_rate": 8.450552935243186e-06, + "loss": 0.9725, + "step": 339 + }, + { + "epoch": 0.5918189730200174, + "grad_norm": 0.48046875, + "learning_rate": 8.440118923754757e-06, + "loss": 0.9362, + "step": 340 + }, + { + "epoch": 0.5935596170583116, + "grad_norm": 0.47265625, + "learning_rate": 8.429656387414289e-06, + "loss": 0.9659, + "step": 341 + }, + { + "epoch": 0.5953002610966057, + "grad_norm": 0.5078125, + "learning_rate": 8.419165412975265e-06, + "loss": 0.9346, + "step": 342 + }, + { + "epoch": 0.5970409051348999, + "grad_norm": 0.48828125, + "learning_rate": 8.408646087426975e-06, + "loss": 0.8759, + "step": 343 + }, + { + "epoch": 0.598781549173194, + "grad_norm": 0.5, + "learning_rate": 8.398098497993785e-06, + "loss": 0.9359, + "step": 344 + }, + { + "epoch": 0.6005221932114883, + "grad_norm": 0.5, + "learning_rate": 8.387522732134428e-06, + "loss": 0.9171, + "step": 345 + }, + { + "epoch": 0.6022628372497825, + "grad_norm": 0.49609375, + "learning_rate": 8.376918877541263e-06, + "loss": 0.919, + "step": 346 + }, + { + "epoch": 0.6040034812880766, + "grad_norm": 0.5234375, + "learning_rate": 8.36628702213956e-06, + "loss": 0.9211, + "step": 347 + }, + { + "epoch": 0.6057441253263708, + "grad_norm": 0.494140625, + "learning_rate": 8.355627254086771e-06, + "loss": 0.9191, + "step": 348 + }, + { + "epoch": 0.6074847693646649, + "grad_norm": 0.49609375, + "learning_rate": 8.344939661771784e-06, + "loss": 0.926, + "step": 349 + }, + { + "epoch": 0.6092254134029591, + "grad_norm": 0.5, + "learning_rate": 8.334224333814209e-06, + "loss": 0.9088, + "step": 350 + }, + { + "epoch": 0.6109660574412533, + "grad_norm": 0.50390625, + "learning_rate": 8.323481359063631e-06, + "loss": 0.9455, + "step": 351 + }, + { + "epoch": 0.6127067014795474, + "grad_norm": 0.50390625, + "learning_rate": 8.312710826598884e-06, + "loss": 0.8873, + "step": 352 + }, + { + "epoch": 0.6144473455178416, + "grad_norm": 0.49609375, + "learning_rate": 8.301912825727294e-06, + "loss": 0.9562, + "step": 353 + }, + { + "epoch": 0.6161879895561357, + "grad_norm": 0.48828125, + "learning_rate": 8.29108744598396e-06, + "loss": 0.9324, + "step": 354 + }, + { + "epoch": 0.61792863359443, + "grad_norm": 0.51171875, + "learning_rate": 8.280234777131e-06, + "loss": 0.9037, + "step": 355 + }, + { + "epoch": 0.6196692776327241, + "grad_norm": 0.5078125, + "learning_rate": 8.269354909156803e-06, + "loss": 0.9127, + "step": 356 + }, + { + "epoch": 0.6214099216710183, + "grad_norm": 0.494140625, + "learning_rate": 8.258447932275296e-06, + "loss": 0.9003, + "step": 357 + }, + { + "epoch": 0.6231505657093125, + "grad_norm": 0.494140625, + "learning_rate": 8.247513936925182e-06, + "loss": 0.9871, + "step": 358 + }, + { + "epoch": 0.6248912097476066, + "grad_norm": 0.50390625, + "learning_rate": 8.236553013769198e-06, + "loss": 0.8732, + "step": 359 + }, + { + "epoch": 0.6266318537859008, + "grad_norm": 0.486328125, + "learning_rate": 8.225565253693365e-06, + "loss": 0.9563, + "step": 360 + }, + { + "epoch": 0.6266318537859008, + "eval_loss": 0.9300395846366882, + "eval_runtime": 59.3628, + "eval_samples_per_second": 78.972, + "eval_steps_per_second": 9.871, + "step": 360 + }, + { + "epoch": 0.6283724978241949, + "grad_norm": 0.5078125, + "learning_rate": 8.214550747806227e-06, + "loss": 0.9173, + "step": 361 + }, + { + "epoch": 0.6301131418624891, + "grad_norm": 0.494140625, + "learning_rate": 8.2035095874381e-06, + "loss": 0.9297, + "step": 362 + }, + { + "epoch": 0.6318537859007833, + "grad_norm": 0.50390625, + "learning_rate": 8.192441864140314e-06, + "loss": 0.9582, + "step": 363 + }, + { + "epoch": 0.6335944299390774, + "grad_norm": 0.50390625, + "learning_rate": 8.181347669684456e-06, + "loss": 0.9176, + "step": 364 + }, + { + "epoch": 0.6353350739773717, + "grad_norm": 0.494140625, + "learning_rate": 8.170227096061607e-06, + "loss": 0.9181, + "step": 365 + }, + { + "epoch": 0.6370757180156658, + "grad_norm": 0.486328125, + "learning_rate": 8.15908023548158e-06, + "loss": 0.9708, + "step": 366 + }, + { + "epoch": 0.63881636205396, + "grad_norm": 0.48046875, + "learning_rate": 8.147907180372147e-06, + "loss": 0.9683, + "step": 367 + }, + { + "epoch": 0.6405570060922542, + "grad_norm": 0.4765625, + "learning_rate": 8.136708023378292e-06, + "loss": 0.9728, + "step": 368 + }, + { + "epoch": 0.6422976501305483, + "grad_norm": 0.498046875, + "learning_rate": 8.125482857361426e-06, + "loss": 0.9696, + "step": 369 + }, + { + "epoch": 0.6440382941688425, + "grad_norm": 0.49609375, + "learning_rate": 8.114231775398618e-06, + "loss": 0.9189, + "step": 370 + }, + { + "epoch": 0.6457789382071366, + "grad_norm": 0.486328125, + "learning_rate": 8.102954870781831e-06, + "loss": 0.9361, + "step": 371 + }, + { + "epoch": 0.6475195822454308, + "grad_norm": 0.490234375, + "learning_rate": 8.091652237017152e-06, + "loss": 0.9612, + "step": 372 + }, + { + "epoch": 0.6492602262837249, + "grad_norm": 0.498046875, + "learning_rate": 8.080323967823993e-06, + "loss": 0.9442, + "step": 373 + }, + { + "epoch": 0.6510008703220191, + "grad_norm": 0.48828125, + "learning_rate": 8.068970157134349e-06, + "loss": 0.9346, + "step": 374 + }, + { + "epoch": 0.6527415143603134, + "grad_norm": 0.486328125, + "learning_rate": 8.057590899091985e-06, + "loss": 0.8999, + "step": 375 + }, + { + "epoch": 0.6544821583986075, + "grad_norm": 0.498046875, + "learning_rate": 8.046186288051681e-06, + "loss": 0.9674, + "step": 376 + }, + { + "epoch": 0.6562228024369017, + "grad_norm": 0.48046875, + "learning_rate": 8.034756418578434e-06, + "loss": 0.9364, + "step": 377 + }, + { + "epoch": 0.6579634464751958, + "grad_norm": 0.484375, + "learning_rate": 8.023301385446682e-06, + "loss": 0.9341, + "step": 378 + }, + { + "epoch": 0.65970409051349, + "grad_norm": 0.4921875, + "learning_rate": 8.011821283639515e-06, + "loss": 0.8986, + "step": 379 + }, + { + "epoch": 0.6614447345517842, + "grad_norm": 0.515625, + "learning_rate": 8.000316208347891e-06, + "loss": 0.9266, + "step": 380 + }, + { + "epoch": 0.6631853785900783, + "grad_norm": 0.5078125, + "learning_rate": 7.988786254969837e-06, + "loss": 0.8911, + "step": 381 + }, + { + "epoch": 0.6649260226283725, + "grad_norm": 0.5078125, + "learning_rate": 7.977231519109665e-06, + "loss": 0.9221, + "step": 382 + }, + { + "epoch": 0.6666666666666666, + "grad_norm": 0.466796875, + "learning_rate": 7.965652096577188e-06, + "loss": 0.9635, + "step": 383 + }, + { + "epoch": 0.6684073107049608, + "grad_norm": 0.48828125, + "learning_rate": 7.954048083386909e-06, + "loss": 0.9023, + "step": 384 + }, + { + "epoch": 0.6701479547432551, + "grad_norm": 0.48828125, + "learning_rate": 7.942419575757235e-06, + "loss": 0.9277, + "step": 385 + }, + { + "epoch": 0.6718885987815492, + "grad_norm": 0.486328125, + "learning_rate": 7.930766670109675e-06, + "loss": 0.9826, + "step": 386 + }, + { + "epoch": 0.6736292428198434, + "grad_norm": 0.490234375, + "learning_rate": 7.919089463068038e-06, + "loss": 0.9223, + "step": 387 + }, + { + "epoch": 0.6753698868581375, + "grad_norm": 0.51953125, + "learning_rate": 7.907388051457647e-06, + "loss": 1.0468, + "step": 388 + }, + { + "epoch": 0.6771105308964317, + "grad_norm": 0.4921875, + "learning_rate": 7.895662532304516e-06, + "loss": 0.9233, + "step": 389 + }, + { + "epoch": 0.6788511749347258, + "grad_norm": 0.478515625, + "learning_rate": 7.883913002834555e-06, + "loss": 0.9756, + "step": 390 + }, + { + "epoch": 0.68059181897302, + "grad_norm": 0.494140625, + "learning_rate": 7.872139560472767e-06, + "loss": 0.9, + "step": 391 + }, + { + "epoch": 0.6823324630113142, + "grad_norm": 0.51953125, + "learning_rate": 7.860342302842434e-06, + "loss": 0.894, + "step": 392 + }, + { + "epoch": 0.6840731070496083, + "grad_norm": 0.5, + "learning_rate": 7.848521327764309e-06, + "loss": 0.9369, + "step": 393 + }, + { + "epoch": 0.6858137510879025, + "grad_norm": 0.5, + "learning_rate": 7.836676733255809e-06, + "loss": 0.9416, + "step": 394 + }, + { + "epoch": 0.6875543951261966, + "grad_norm": 0.494140625, + "learning_rate": 7.824808617530197e-06, + "loss": 0.8992, + "step": 395 + }, + { + "epoch": 0.6892950391644909, + "grad_norm": 0.486328125, + "learning_rate": 7.812917078995769e-06, + "loss": 0.9457, + "step": 396 + }, + { + "epoch": 0.6910356832027851, + "grad_norm": 0.4765625, + "learning_rate": 7.801002216255042e-06, + "loss": 0.9484, + "step": 397 + }, + { + "epoch": 0.6927763272410792, + "grad_norm": 0.50390625, + "learning_rate": 7.78906412810393e-06, + "loss": 0.9297, + "step": 398 + }, + { + "epoch": 0.6945169712793734, + "grad_norm": 0.498046875, + "learning_rate": 7.777102913530927e-06, + "loss": 0.951, + "step": 399 + }, + { + "epoch": 0.6962576153176675, + "grad_norm": 0.4921875, + "learning_rate": 7.76511867171629e-06, + "loss": 0.9091, + "step": 400 + }, + { + "epoch": 0.6979982593559617, + "grad_norm": 0.49609375, + "learning_rate": 7.753111502031214e-06, + "loss": 0.9041, + "step": 401 + }, + { + "epoch": 0.6997389033942559, + "grad_norm": 0.50390625, + "learning_rate": 7.741081504037009e-06, + "loss": 0.9463, + "step": 402 + }, + { + "epoch": 0.70147954743255, + "grad_norm": 0.486328125, + "learning_rate": 7.729028777484266e-06, + "loss": 0.9494, + "step": 403 + }, + { + "epoch": 0.7032201914708442, + "grad_norm": 0.482421875, + "learning_rate": 7.716953422312044e-06, + "loss": 0.9266, + "step": 404 + }, + { + "epoch": 0.7049608355091384, + "grad_norm": 0.5078125, + "learning_rate": 7.704855538647033e-06, + "loss": 0.9731, + "step": 405 + }, + { + "epoch": 0.7067014795474326, + "grad_norm": 0.490234375, + "learning_rate": 7.692735226802729e-06, + "loss": 0.961, + "step": 406 + }, + { + "epoch": 0.7084421235857267, + "grad_norm": 0.482421875, + "learning_rate": 7.680592587278585e-06, + "loss": 0.9506, + "step": 407 + }, + { + "epoch": 0.7101827676240209, + "grad_norm": 0.5, + "learning_rate": 7.668427720759207e-06, + "loss": 0.9084, + "step": 408 + }, + { + "epoch": 0.7119234116623151, + "grad_norm": 0.5, + "learning_rate": 7.656240728113493e-06, + "loss": 0.9147, + "step": 409 + }, + { + "epoch": 0.7136640557006092, + "grad_norm": 0.4921875, + "learning_rate": 7.644031710393815e-06, + "loss": 0.9498, + "step": 410 + }, + { + "epoch": 0.7154046997389034, + "grad_norm": 0.490234375, + "learning_rate": 7.631800768835167e-06, + "loss": 0.9145, + "step": 411 + }, + { + "epoch": 0.7171453437771975, + "grad_norm": 0.498046875, + "learning_rate": 7.619548004854332e-06, + "loss": 0.8884, + "step": 412 + }, + { + "epoch": 0.7188859878154917, + "grad_norm": 0.48828125, + "learning_rate": 7.607273520049041e-06, + "loss": 0.9777, + "step": 413 + }, + { + "epoch": 0.720626631853786, + "grad_norm": 0.478515625, + "learning_rate": 7.594977416197134e-06, + "loss": 0.9802, + "step": 414 + }, + { + "epoch": 0.72236727589208, + "grad_norm": 0.49609375, + "learning_rate": 7.582659795255707e-06, + "loss": 0.9484, + "step": 415 + }, + { + "epoch": 0.7241079199303743, + "grad_norm": 0.5078125, + "learning_rate": 7.570320759360273e-06, + "loss": 0.9231, + "step": 416 + }, + { + "epoch": 0.7258485639686684, + "grad_norm": 0.498046875, + "learning_rate": 7.557960410823917e-06, + "loss": 0.9416, + "step": 417 + }, + { + "epoch": 0.7275892080069626, + "grad_norm": 0.4921875, + "learning_rate": 7.545578852136443e-06, + "loss": 1.0058, + "step": 418 + }, + { + "epoch": 0.7293298520452568, + "grad_norm": 0.5, + "learning_rate": 7.533176185963523e-06, + "loss": 0.9426, + "step": 419 + }, + { + "epoch": 0.7310704960835509, + "grad_norm": 0.498046875, + "learning_rate": 7.520752515145855e-06, + "loss": 0.949, + "step": 420 + }, + { + "epoch": 0.7328111401218451, + "grad_norm": 0.484375, + "learning_rate": 7.508307942698296e-06, + "loss": 0.9727, + "step": 421 + }, + { + "epoch": 0.7345517841601392, + "grad_norm": 0.515625, + "learning_rate": 7.495842571809021e-06, + "loss": 0.8763, + "step": 422 + }, + { + "epoch": 0.7362924281984334, + "grad_norm": 0.5, + "learning_rate": 7.4833565058386595e-06, + "loss": 0.8914, + "step": 423 + }, + { + "epoch": 0.7380330722367275, + "grad_norm": 0.49609375, + "learning_rate": 7.470849848319443e-06, + "loss": 0.9157, + "step": 424 + }, + { + "epoch": 0.7397737162750218, + "grad_norm": 0.490234375, + "learning_rate": 7.458322702954342e-06, + "loss": 0.8703, + "step": 425 + }, + { + "epoch": 0.741514360313316, + "grad_norm": 0.48828125, + "learning_rate": 7.44577517361621e-06, + "loss": 0.9106, + "step": 426 + }, + { + "epoch": 0.7432550043516101, + "grad_norm": 0.5078125, + "learning_rate": 7.4332073643469196e-06, + "loss": 0.8928, + "step": 427 + }, + { + "epoch": 0.7449956483899043, + "grad_norm": 0.5234375, + "learning_rate": 7.420619379356504e-06, + "loss": 0.9402, + "step": 428 + }, + { + "epoch": 0.7467362924281984, + "grad_norm": 0.490234375, + "learning_rate": 7.408011323022286e-06, + "loss": 0.8997, + "step": 429 + }, + { + "epoch": 0.7484769364664926, + "grad_norm": 0.48828125, + "learning_rate": 7.395383299888019e-06, + "loss": 0.9831, + "step": 430 + }, + { + "epoch": 0.7502175805047868, + "grad_norm": 0.494140625, + "learning_rate": 7.382735414663017e-06, + "loss": 1.04, + "step": 431 + }, + { + "epoch": 0.7519582245430809, + "grad_norm": 0.49609375, + "learning_rate": 7.370067772221285e-06, + "loss": 0.9034, + "step": 432 + }, + { + "epoch": 0.7519582245430809, + "eval_loss": 0.9259106516838074, + "eval_runtime": 59.5681, + "eval_samples_per_second": 78.7, + "eval_steps_per_second": 9.837, + "step": 432 + }, + { + "epoch": 0.7536988685813751, + "grad_norm": 0.4921875, + "learning_rate": 7.357380477600654e-06, + "loss": 0.9201, + "step": 433 + }, + { + "epoch": 0.7554395126196692, + "grad_norm": 0.5078125, + "learning_rate": 7.3446736360019065e-06, + "loss": 0.9291, + "step": 434 + }, + { + "epoch": 0.7571801566579635, + "grad_norm": 0.47265625, + "learning_rate": 7.331947352787905e-06, + "loss": 0.951, + "step": 435 + }, + { + "epoch": 0.7589208006962577, + "grad_norm": 0.49609375, + "learning_rate": 7.319201733482715e-06, + "loss": 0.9208, + "step": 436 + }, + { + "epoch": 0.7606614447345518, + "grad_norm": 0.490234375, + "learning_rate": 7.3064368837707425e-06, + "loss": 0.9116, + "step": 437 + }, + { + "epoch": 0.762402088772846, + "grad_norm": 0.50390625, + "learning_rate": 7.2936529094958365e-06, + "loss": 0.9165, + "step": 438 + }, + { + "epoch": 0.7641427328111401, + "grad_norm": 0.494140625, + "learning_rate": 7.280849916660434e-06, + "loss": 0.9513, + "step": 439 + }, + { + "epoch": 0.7658833768494343, + "grad_norm": 0.48828125, + "learning_rate": 7.268028011424664e-06, + "loss": 0.9725, + "step": 440 + }, + { + "epoch": 0.7676240208877284, + "grad_norm": 0.494140625, + "learning_rate": 7.255187300105477e-06, + "loss": 0.9328, + "step": 441 + }, + { + "epoch": 0.7693646649260226, + "grad_norm": 0.50390625, + "learning_rate": 7.24232788917576e-06, + "loss": 0.8776, + "step": 442 + }, + { + "epoch": 0.7711053089643168, + "grad_norm": 0.5078125, + "learning_rate": 7.229449885263451e-06, + "loss": 0.9432, + "step": 443 + }, + { + "epoch": 0.7728459530026109, + "grad_norm": 0.478515625, + "learning_rate": 7.21655339515066e-06, + "loss": 0.8951, + "step": 444 + }, + { + "epoch": 0.7745865970409052, + "grad_norm": 0.4921875, + "learning_rate": 7.203638525772783e-06, + "loss": 0.961, + "step": 445 + }, + { + "epoch": 0.7763272410791993, + "grad_norm": 0.48828125, + "learning_rate": 7.1907053842176075e-06, + "loss": 0.9498, + "step": 446 + }, + { + "epoch": 0.7780678851174935, + "grad_norm": 0.498046875, + "learning_rate": 7.17775407772444e-06, + "loss": 0.9638, + "step": 447 + }, + { + "epoch": 0.7798085291557877, + "grad_norm": 0.490234375, + "learning_rate": 7.164784713683197e-06, + "loss": 0.9516, + "step": 448 + }, + { + "epoch": 0.7815491731940818, + "grad_norm": 0.466796875, + "learning_rate": 7.1517973996335335e-06, + "loss": 0.9108, + "step": 449 + }, + { + "epoch": 0.783289817232376, + "grad_norm": 0.50390625, + "learning_rate": 7.138792243263936e-06, + "loss": 0.9089, + "step": 450 + }, + { + "epoch": 0.7850304612706701, + "grad_norm": 0.50390625, + "learning_rate": 7.125769352410845e-06, + "loss": 0.9667, + "step": 451 + }, + { + "epoch": 0.7867711053089643, + "grad_norm": 0.484375, + "learning_rate": 7.112728835057742e-06, + "loss": 0.9458, + "step": 452 + }, + { + "epoch": 0.7885117493472585, + "grad_norm": 0.478515625, + "learning_rate": 7.099670799334269e-06, + "loss": 0.899, + "step": 453 + }, + { + "epoch": 0.7902523933855526, + "grad_norm": 0.498046875, + "learning_rate": 7.08659535351533e-06, + "loss": 0.9031, + "step": 454 + }, + { + "epoch": 0.7919930374238469, + "grad_norm": 0.49609375, + "learning_rate": 7.073502606020187e-06, + "loss": 0.9771, + "step": 455 + }, + { + "epoch": 0.793733681462141, + "grad_norm": 0.52734375, + "learning_rate": 7.060392665411564e-06, + "loss": 0.9113, + "step": 456 + }, + { + "epoch": 0.7954743255004352, + "grad_norm": 0.498046875, + "learning_rate": 7.0472656403947505e-06, + "loss": 1.0172, + "step": 457 + }, + { + "epoch": 0.7972149695387293, + "grad_norm": 0.4765625, + "learning_rate": 7.034121639816691e-06, + "loss": 0.9282, + "step": 458 + }, + { + "epoch": 0.7989556135770235, + "grad_norm": 0.50390625, + "learning_rate": 7.020960772665096e-06, + "loss": 0.896, + "step": 459 + }, + { + "epoch": 0.8006962576153177, + "grad_norm": 0.490234375, + "learning_rate": 7.007783148067524e-06, + "loss": 0.881, + "step": 460 + }, + { + "epoch": 0.8024369016536118, + "grad_norm": 0.5078125, + "learning_rate": 6.994588875290488e-06, + "loss": 0.9155, + "step": 461 + }, + { + "epoch": 0.804177545691906, + "grad_norm": 0.5078125, + "learning_rate": 6.9813780637385385e-06, + "loss": 0.9012, + "step": 462 + }, + { + "epoch": 0.8059181897302001, + "grad_norm": 0.50390625, + "learning_rate": 6.968150822953372e-06, + "loss": 0.9085, + "step": 463 + }, + { + "epoch": 0.8076588337684943, + "grad_norm": 0.498046875, + "learning_rate": 6.954907262612906e-06, + "loss": 0.9818, + "step": 464 + }, + { + "epoch": 0.8093994778067886, + "grad_norm": 0.484375, + "learning_rate": 6.941647492530378e-06, + "loss": 0.9717, + "step": 465 + }, + { + "epoch": 0.8111401218450827, + "grad_norm": 0.5, + "learning_rate": 6.928371622653434e-06, + "loss": 0.9369, + "step": 466 + }, + { + "epoch": 0.8128807658833769, + "grad_norm": 0.486328125, + "learning_rate": 6.91507976306322e-06, + "loss": 0.8943, + "step": 467 + }, + { + "epoch": 0.814621409921671, + "grad_norm": 0.498046875, + "learning_rate": 6.901772023973459e-06, + "loss": 0.9113, + "step": 468 + }, + { + "epoch": 0.8163620539599652, + "grad_norm": 0.49609375, + "learning_rate": 6.888448515729552e-06, + "loss": 0.8995, + "step": 469 + }, + { + "epoch": 0.8181026979982594, + "grad_norm": 0.486328125, + "learning_rate": 6.8751093488076485e-06, + "loss": 0.9159, + "step": 470 + }, + { + "epoch": 0.8198433420365535, + "grad_norm": 0.498046875, + "learning_rate": 6.86175463381374e-06, + "loss": 0.9388, + "step": 471 + }, + { + "epoch": 0.8215839860748477, + "grad_norm": 0.49609375, + "learning_rate": 6.8483844814827405e-06, + "loss": 0.9524, + "step": 472 + }, + { + "epoch": 0.8233246301131418, + "grad_norm": 0.5078125, + "learning_rate": 6.8349990026775656e-06, + "loss": 0.9458, + "step": 473 + }, + { + "epoch": 0.825065274151436, + "grad_norm": 0.5, + "learning_rate": 6.821598308388217e-06, + "loss": 0.9258, + "step": 474 + }, + { + "epoch": 0.8268059181897301, + "grad_norm": 0.482421875, + "learning_rate": 6.8081825097308584e-06, + "loss": 0.9159, + "step": 475 + }, + { + "epoch": 0.8285465622280244, + "grad_norm": 0.5, + "learning_rate": 6.794751717946897e-06, + "loss": 0.9029, + "step": 476 + }, + { + "epoch": 0.8302872062663186, + "grad_norm": 0.484375, + "learning_rate": 6.781306044402064e-06, + "loss": 0.8852, + "step": 477 + }, + { + "epoch": 0.8320278503046127, + "grad_norm": 0.482421875, + "learning_rate": 6.767845600585479e-06, + "loss": 0.945, + "step": 478 + }, + { + "epoch": 0.8337684943429069, + "grad_norm": 0.474609375, + "learning_rate": 6.754370498108747e-06, + "loss": 0.9776, + "step": 479 + }, + { + "epoch": 0.835509138381201, + "grad_norm": 0.490234375, + "learning_rate": 6.740880848705005e-06, + "loss": 0.9229, + "step": 480 + }, + { + "epoch": 0.8372497824194952, + "grad_norm": 0.50390625, + "learning_rate": 6.72737676422802e-06, + "loss": 0.9686, + "step": 481 + }, + { + "epoch": 0.8389904264577894, + "grad_norm": 0.484375, + "learning_rate": 6.713858356651253e-06, + "loss": 0.9459, + "step": 482 + }, + { + "epoch": 0.8407310704960835, + "grad_norm": 0.486328125, + "learning_rate": 6.700325738066923e-06, + "loss": 0.9472, + "step": 483 + }, + { + "epoch": 0.8424717145343777, + "grad_norm": 0.5, + "learning_rate": 6.686779020685089e-06, + "loss": 0.9417, + "step": 484 + }, + { + "epoch": 0.8442123585726719, + "grad_norm": 0.490234375, + "learning_rate": 6.6732183168327146e-06, + "loss": 0.9666, + "step": 485 + }, + { + "epoch": 0.8459530026109661, + "grad_norm": 0.490234375, + "learning_rate": 6.659643738952732e-06, + "loss": 0.9368, + "step": 486 + }, + { + "epoch": 0.8476936466492603, + "grad_norm": 0.484375, + "learning_rate": 6.646055399603122e-06, + "loss": 0.9466, + "step": 487 + }, + { + "epoch": 0.8494342906875544, + "grad_norm": 0.5, + "learning_rate": 6.6324534114559656e-06, + "loss": 0.9159, + "step": 488 + }, + { + "epoch": 0.8511749347258486, + "grad_norm": 0.5078125, + "learning_rate": 6.618837887296523e-06, + "loss": 0.9067, + "step": 489 + }, + { + "epoch": 0.8529155787641427, + "grad_norm": 0.48046875, + "learning_rate": 6.605208940022289e-06, + "loss": 0.9608, + "step": 490 + }, + { + "epoch": 0.8546562228024369, + "grad_norm": 0.498046875, + "learning_rate": 6.591566682642061e-06, + "loss": 0.9663, + "step": 491 + }, + { + "epoch": 0.856396866840731, + "grad_norm": 0.5234375, + "learning_rate": 6.5779112282750035e-06, + "loss": 0.8931, + "step": 492 + }, + { + "epoch": 0.8581375108790252, + "grad_norm": 0.498046875, + "learning_rate": 6.564242690149705e-06, + "loss": 0.9383, + "step": 493 + }, + { + "epoch": 0.8598781549173194, + "grad_norm": 0.484375, + "learning_rate": 6.550561181603244e-06, + "loss": 0.9343, + "step": 494 + }, + { + "epoch": 0.8616187989556136, + "grad_norm": 0.486328125, + "learning_rate": 6.536866816080247e-06, + "loss": 0.9999, + "step": 495 + }, + { + "epoch": 0.8633594429939078, + "grad_norm": 0.4765625, + "learning_rate": 6.523159707131951e-06, + "loss": 0.9955, + "step": 496 + }, + { + "epoch": 0.8651000870322019, + "grad_norm": 0.50390625, + "learning_rate": 6.509439968415252e-06, + "loss": 0.8897, + "step": 497 + }, + { + "epoch": 0.8668407310704961, + "grad_norm": 0.498046875, + "learning_rate": 6.4957077136917776e-06, + "loss": 0.9065, + "step": 498 + }, + { + "epoch": 0.8685813751087903, + "grad_norm": 0.4921875, + "learning_rate": 6.481963056826932e-06, + "loss": 0.9224, + "step": 499 + }, + { + "epoch": 0.8703220191470844, + "grad_norm": 0.50390625, + "learning_rate": 6.468206111788957e-06, + "loss": 0.9638, + "step": 500 + }, + { + "epoch": 0.8720626631853786, + "grad_norm": 0.5, + "learning_rate": 6.454436992647984e-06, + "loss": 0.9232, + "step": 501 + }, + { + "epoch": 0.8738033072236727, + "grad_norm": 0.51171875, + "learning_rate": 6.440655813575093e-06, + "loss": 0.9412, + "step": 502 + }, + { + "epoch": 0.8755439512619669, + "grad_norm": 0.5078125, + "learning_rate": 6.426862688841359e-06, + "loss": 0.9051, + "step": 503 + }, + { + "epoch": 0.8772845953002611, + "grad_norm": 0.5078125, + "learning_rate": 6.413057732816911e-06, + "loss": 0.9214, + "step": 504 + }, + { + "epoch": 0.8772845953002611, + "eval_loss": 0.9230473637580872, + "eval_runtime": 59.2532, + "eval_samples_per_second": 79.118, + "eval_steps_per_second": 9.89, + "step": 504 + }, + { + "epoch": 0.8790252393385553, + "grad_norm": 0.49609375, + "learning_rate": 6.3992410599699786e-06, + "loss": 0.908, + "step": 505 + }, + { + "epoch": 0.8807658833768495, + "grad_norm": 0.48828125, + "learning_rate": 6.385412784865948e-06, + "loss": 0.9613, + "step": 506 + }, + { + "epoch": 0.8825065274151436, + "grad_norm": 0.498046875, + "learning_rate": 6.371573022166409e-06, + "loss": 0.9061, + "step": 507 + }, + { + "epoch": 0.8842471714534378, + "grad_norm": 0.5, + "learning_rate": 6.357721886628201e-06, + "loss": 0.9117, + "step": 508 + }, + { + "epoch": 0.8859878154917319, + "grad_norm": 0.48046875, + "learning_rate": 6.34385949310247e-06, + "loss": 0.9219, + "step": 509 + }, + { + "epoch": 0.8877284595300261, + "grad_norm": 0.5078125, + "learning_rate": 6.329985956533708e-06, + "loss": 0.976, + "step": 510 + }, + { + "epoch": 0.8894691035683203, + "grad_norm": 0.515625, + "learning_rate": 6.3161013919588e-06, + "loss": 0.9131, + "step": 511 + }, + { + "epoch": 0.8912097476066144, + "grad_norm": 0.5, + "learning_rate": 6.302205914506083e-06, + "loss": 0.908, + "step": 512 + }, + { + "epoch": 0.8929503916449086, + "grad_norm": 0.4921875, + "learning_rate": 6.2882996393943706e-06, + "loss": 0.9786, + "step": 513 + }, + { + "epoch": 0.8946910356832027, + "grad_norm": 0.48828125, + "learning_rate": 6.274382681932019e-06, + "loss": 0.8728, + "step": 514 + }, + { + "epoch": 0.896431679721497, + "grad_norm": 0.482421875, + "learning_rate": 6.2604551575159476e-06, + "loss": 0.9491, + "step": 515 + }, + { + "epoch": 0.8981723237597912, + "grad_norm": 0.49609375, + "learning_rate": 6.24651718163071e-06, + "loss": 0.995, + "step": 516 + }, + { + "epoch": 0.8999129677980853, + "grad_norm": 0.482421875, + "learning_rate": 6.2325688698475106e-06, + "loss": 0.9574, + "step": 517 + }, + { + "epoch": 0.9016536118363795, + "grad_norm": 0.486328125, + "learning_rate": 6.218610337823262e-06, + "loss": 0.9004, + "step": 518 + }, + { + "epoch": 0.9033942558746736, + "grad_norm": 0.48046875, + "learning_rate": 6.2046417012996195e-06, + "loss": 0.9165, + "step": 519 + }, + { + "epoch": 0.9051348999129678, + "grad_norm": 0.498046875, + "learning_rate": 6.1906630761020245e-06, + "loss": 0.9534, + "step": 520 + }, + { + "epoch": 0.906875543951262, + "grad_norm": 0.5, + "learning_rate": 6.17667457813874e-06, + "loss": 0.8883, + "step": 521 + }, + { + "epoch": 0.9086161879895561, + "grad_norm": 0.51171875, + "learning_rate": 6.162676323399898e-06, + "loss": 0.919, + "step": 522 + }, + { + "epoch": 0.9103568320278503, + "grad_norm": 0.4921875, + "learning_rate": 6.148668427956523e-06, + "loss": 0.9187, + "step": 523 + }, + { + "epoch": 0.9120974760661444, + "grad_norm": 0.494140625, + "learning_rate": 6.134651007959586e-06, + "loss": 0.8984, + "step": 524 + }, + { + "epoch": 0.9138381201044387, + "grad_norm": 0.4921875, + "learning_rate": 6.120624179639032e-06, + "loss": 0.9585, + "step": 525 + }, + { + "epoch": 0.9155787641427328, + "grad_norm": 0.49609375, + "learning_rate": 6.106588059302818e-06, + "loss": 0.9848, + "step": 526 + }, + { + "epoch": 0.917319408181027, + "grad_norm": 0.4921875, + "learning_rate": 6.092542763335947e-06, + "loss": 0.9542, + "step": 527 + }, + { + "epoch": 0.9190600522193212, + "grad_norm": 0.5, + "learning_rate": 6.0784884081995065e-06, + "loss": 0.9476, + "step": 528 + }, + { + "epoch": 0.9208006962576153, + "grad_norm": 0.498046875, + "learning_rate": 6.0644251104296995e-06, + "loss": 0.9425, + "step": 529 + }, + { + "epoch": 0.9225413402959095, + "grad_norm": 0.5, + "learning_rate": 6.0503529866368824e-06, + "loss": 0.9532, + "step": 530 + }, + { + "epoch": 0.9242819843342036, + "grad_norm": 0.49609375, + "learning_rate": 6.036272153504592e-06, + "loss": 0.9243, + "step": 531 + }, + { + "epoch": 0.9260226283724978, + "grad_norm": 0.490234375, + "learning_rate": 6.022182727788586e-06, + "loss": 0.9452, + "step": 532 + }, + { + "epoch": 0.927763272410792, + "grad_norm": 0.5234375, + "learning_rate": 6.008084826315863e-06, + "loss": 0.9243, + "step": 533 + }, + { + "epoch": 0.9295039164490861, + "grad_norm": 0.498046875, + "learning_rate": 5.993978565983709e-06, + "loss": 0.8917, + "step": 534 + }, + { + "epoch": 0.9312445604873804, + "grad_norm": 0.4921875, + "learning_rate": 5.979864063758717e-06, + "loss": 0.9562, + "step": 535 + }, + { + "epoch": 0.9329852045256745, + "grad_norm": 0.478515625, + "learning_rate": 5.965741436675816e-06, + "loss": 0.9904, + "step": 536 + }, + { + "epoch": 0.9347258485639687, + "grad_norm": 0.5, + "learning_rate": 5.9516108018373145e-06, + "loss": 0.8953, + "step": 537 + }, + { + "epoch": 0.9364664926022629, + "grad_norm": 0.494140625, + "learning_rate": 5.937472276411909e-06, + "loss": 0.8674, + "step": 538 + }, + { + "epoch": 0.938207136640557, + "grad_norm": 0.498046875, + "learning_rate": 5.923325977633732e-06, + "loss": 1.0281, + "step": 539 + }, + { + "epoch": 0.9399477806788512, + "grad_norm": 0.494140625, + "learning_rate": 5.909172022801364e-06, + "loss": 0.9278, + "step": 540 + }, + { + "epoch": 0.9416884247171453, + "grad_norm": 0.486328125, + "learning_rate": 5.8950105292768754e-06, + "loss": 0.8704, + "step": 541 + }, + { + "epoch": 0.9434290687554395, + "grad_norm": 0.515625, + "learning_rate": 5.880841614484841e-06, + "loss": 0.9561, + "step": 542 + }, + { + "epoch": 0.9451697127937336, + "grad_norm": 0.47265625, + "learning_rate": 5.866665395911375e-06, + "loss": 0.9407, + "step": 543 + }, + { + "epoch": 0.9469103568320278, + "grad_norm": 0.484375, + "learning_rate": 5.852481991103149e-06, + "loss": 0.8833, + "step": 544 + }, + { + "epoch": 0.9486510008703221, + "grad_norm": 0.5, + "learning_rate": 5.838291517666427e-06, + "loss": 0.993, + "step": 545 + }, + { + "epoch": 0.9503916449086162, + "grad_norm": 0.494140625, + "learning_rate": 5.824094093266077e-06, + "loss": 0.9231, + "step": 546 + }, + { + "epoch": 0.9521322889469104, + "grad_norm": 0.490234375, + "learning_rate": 5.809889835624611e-06, + "loss": 0.9512, + "step": 547 + }, + { + "epoch": 0.9538729329852045, + "grad_norm": 0.494140625, + "learning_rate": 5.795678862521197e-06, + "loss": 0.9175, + "step": 548 + }, + { + "epoch": 0.9556135770234987, + "grad_norm": 0.486328125, + "learning_rate": 5.781461291790687e-06, + "loss": 0.8922, + "step": 549 + }, + { + "epoch": 0.9573542210617929, + "grad_norm": 0.494140625, + "learning_rate": 5.767237241322641e-06, + "loss": 0.9701, + "step": 550 + }, + { + "epoch": 0.959094865100087, + "grad_norm": 0.49609375, + "learning_rate": 5.753006829060343e-06, + "loss": 0.9464, + "step": 551 + }, + { + "epoch": 0.9608355091383812, + "grad_norm": 0.5, + "learning_rate": 5.738770172999835e-06, + "loss": 0.9335, + "step": 552 + }, + { + "epoch": 0.9625761531766753, + "grad_norm": 0.458984375, + "learning_rate": 5.724527391188927e-06, + "loss": 0.9689, + "step": 553 + }, + { + "epoch": 0.9643167972149695, + "grad_norm": 0.50390625, + "learning_rate": 5.710278601726222e-06, + "loss": 0.8756, + "step": 554 + }, + { + "epoch": 0.9660574412532638, + "grad_norm": 0.484375, + "learning_rate": 5.696023922760141e-06, + "loss": 0.9459, + "step": 555 + }, + { + "epoch": 0.9677980852915579, + "grad_norm": 0.50390625, + "learning_rate": 5.681763472487933e-06, + "loss": 0.9305, + "step": 556 + }, + { + "epoch": 0.9695387293298521, + "grad_norm": 0.50390625, + "learning_rate": 5.667497369154712e-06, + "loss": 0.8707, + "step": 557 + }, + { + "epoch": 0.9712793733681462, + "grad_norm": 0.47265625, + "learning_rate": 5.6532257310524565e-06, + "loss": 0.8977, + "step": 558 + }, + { + "epoch": 0.9730200174064404, + "grad_norm": 0.494140625, + "learning_rate": 5.638948676519043e-06, + "loss": 0.8735, + "step": 559 + }, + { + "epoch": 0.9747606614447345, + "grad_norm": 0.50390625, + "learning_rate": 5.624666323937257e-06, + "loss": 0.9788, + "step": 560 + }, + { + "epoch": 0.9765013054830287, + "grad_norm": 0.49609375, + "learning_rate": 5.610378791733821e-06, + "loss": 0.9348, + "step": 561 + }, + { + "epoch": 0.9782419495213229, + "grad_norm": 0.48046875, + "learning_rate": 5.596086198378399e-06, + "loss": 0.9258, + "step": 562 + }, + { + "epoch": 0.979982593559617, + "grad_norm": 0.490234375, + "learning_rate": 5.5817886623826245e-06, + "loss": 0.9184, + "step": 563 + }, + { + "epoch": 0.9817232375979112, + "grad_norm": 0.490234375, + "learning_rate": 5.567486302299112e-06, + "loss": 0.9439, + "step": 564 + }, + { + "epoch": 0.9834638816362054, + "grad_norm": 0.474609375, + "learning_rate": 5.553179236720482e-06, + "loss": 0.9887, + "step": 565 + }, + { + "epoch": 0.9852045256744996, + "grad_norm": 0.48828125, + "learning_rate": 5.5388675842783644e-06, + "loss": 0.9439, + "step": 566 + }, + { + "epoch": 0.9869451697127938, + "grad_norm": 0.490234375, + "learning_rate": 5.524551463642429e-06, + "loss": 0.971, + "step": 567 + }, + { + "epoch": 0.9886858137510879, + "grad_norm": 0.486328125, + "learning_rate": 5.510230993519391e-06, + "loss": 0.9583, + "step": 568 + }, + { + "epoch": 0.9904264577893821, + "grad_norm": 0.50390625, + "learning_rate": 5.495906292652035e-06, + "loss": 0.9788, + "step": 569 + }, + { + "epoch": 0.9921671018276762, + "grad_norm": 0.4921875, + "learning_rate": 5.48157747981822e-06, + "loss": 0.9634, + "step": 570 + }, + { + "epoch": 0.9939077458659704, + "grad_norm": 0.5078125, + "learning_rate": 5.467244673829908e-06, + "loss": 0.8834, + "step": 571 + }, + { + "epoch": 0.9956483899042646, + "grad_norm": 0.49609375, + "learning_rate": 5.452907993532164e-06, + "loss": 0.9113, + "step": 572 + }, + { + "epoch": 0.9973890339425587, + "grad_norm": 0.50390625, + "learning_rate": 5.438567557802186e-06, + "loss": 0.8828, + "step": 573 + }, + { + "epoch": 0.999129677980853, + "grad_norm": 0.48828125, + "learning_rate": 5.424223485548303e-06, + "loss": 0.9363, + "step": 574 + }, + { + "epoch": 1.0, + "grad_norm": 1.1796875, + "learning_rate": 5.4098758957090055e-06, + "loss": 1.0521, + "step": 575 + }, + { + "epoch": 1.001740644038294, + "grad_norm": 0.48828125, + "learning_rate": 5.395524907251944e-06, + "loss": 0.9155, + "step": 576 + }, + { + "epoch": 1.001740644038294, + "eval_loss": 0.9210891723632812, + "eval_runtime": 60.2228, + "eval_samples_per_second": 77.844, + "eval_steps_per_second": 9.731, + "step": 576 + }, + { + "epoch": 1.0034812880765884, + "grad_norm": 0.490234375, + "learning_rate": 5.381170639172955e-06, + "loss": 0.947, + "step": 577 + }, + { + "epoch": 1.0052219321148825, + "grad_norm": 0.490234375, + "learning_rate": 5.366813210495067e-06, + "loss": 0.8777, + "step": 578 + }, + { + "epoch": 1.0069625761531766, + "grad_norm": 0.52734375, + "learning_rate": 5.352452740267515e-06, + "loss": 1.0166, + "step": 579 + }, + { + "epoch": 1.0087032201914707, + "grad_norm": 0.486328125, + "learning_rate": 5.338089347564757e-06, + "loss": 0.9429, + "step": 580 + }, + { + "epoch": 1.010443864229765, + "grad_norm": 0.498046875, + "learning_rate": 5.323723151485477e-06, + "loss": 0.8975, + "step": 581 + }, + { + "epoch": 1.0121845082680592, + "grad_norm": 0.484375, + "learning_rate": 5.309354271151613e-06, + "loss": 0.8804, + "step": 582 + }, + { + "epoch": 1.0139251523063533, + "grad_norm": 0.478515625, + "learning_rate": 5.294982825707352e-06, + "loss": 0.9551, + "step": 583 + }, + { + "epoch": 1.0156657963446476, + "grad_norm": 0.51171875, + "learning_rate": 5.280608934318157e-06, + "loss": 0.8975, + "step": 584 + }, + { + "epoch": 1.0174064403829417, + "grad_norm": 0.484375, + "learning_rate": 5.266232716169769e-06, + "loss": 0.9317, + "step": 585 + }, + { + "epoch": 1.0191470844212358, + "grad_norm": 0.490234375, + "learning_rate": 5.251854290467221e-06, + "loss": 0.9401, + "step": 586 + }, + { + "epoch": 1.0208877284595301, + "grad_norm": 0.4921875, + "learning_rate": 5.237473776433854e-06, + "loss": 0.8991, + "step": 587 + }, + { + "epoch": 1.0226283724978242, + "grad_norm": 0.51171875, + "learning_rate": 5.223091293310324e-06, + "loss": 0.9414, + "step": 588 + }, + { + "epoch": 1.0243690165361183, + "grad_norm": 0.49609375, + "learning_rate": 5.208706960353611e-06, + "loss": 0.9756, + "step": 589 + }, + { + "epoch": 1.0261096605744124, + "grad_norm": 0.474609375, + "learning_rate": 5.194320896836039e-06, + "loss": 0.9493, + "step": 590 + }, + { + "epoch": 1.0278503046127068, + "grad_norm": 0.45703125, + "learning_rate": 5.1799332220442776e-06, + "loss": 0.8799, + "step": 591 + }, + { + "epoch": 1.0295909486510009, + "grad_norm": 0.515625, + "learning_rate": 5.165544055278359e-06, + "loss": 0.9089, + "step": 592 + }, + { + "epoch": 1.031331592689295, + "grad_norm": 0.490234375, + "learning_rate": 5.151153515850682e-06, + "loss": 0.9088, + "step": 593 + }, + { + "epoch": 1.0330722367275893, + "grad_norm": 0.49609375, + "learning_rate": 5.136761723085035e-06, + "loss": 0.9372, + "step": 594 + }, + { + "epoch": 1.0348128807658834, + "grad_norm": 0.490234375, + "learning_rate": 5.122368796315591e-06, + "loss": 0.9711, + "step": 595 + }, + { + "epoch": 1.0365535248041775, + "grad_norm": 0.50390625, + "learning_rate": 5.1079748548859335e-06, + "loss": 0.8852, + "step": 596 + }, + { + "epoch": 1.0382941688424716, + "grad_norm": 0.474609375, + "learning_rate": 5.093580018148052e-06, + "loss": 0.9251, + "step": 597 + }, + { + "epoch": 1.040034812880766, + "grad_norm": 0.494140625, + "learning_rate": 5.079184405461365e-06, + "loss": 0.9595, + "step": 598 + }, + { + "epoch": 1.04177545691906, + "grad_norm": 0.48828125, + "learning_rate": 5.064788136191723e-06, + "loss": 0.933, + "step": 599 + }, + { + "epoch": 1.0435161009573541, + "grad_norm": 0.478515625, + "learning_rate": 5.05039132971042e-06, + "loss": 0.8658, + "step": 600 + }, + { + "epoch": 1.0452567449956485, + "grad_norm": 0.486328125, + "learning_rate": 5.035994105393206e-06, + "loss": 0.952, + "step": 601 + }, + { + "epoch": 1.0469973890339426, + "grad_norm": 0.49609375, + "learning_rate": 5.0215965826192954e-06, + "loss": 0.9343, + "step": 602 + }, + { + "epoch": 1.0487380330722367, + "grad_norm": 0.4609375, + "learning_rate": 5.007198880770378e-06, + "loss": 0.9541, + "step": 603 + }, + { + "epoch": 1.050478677110531, + "grad_norm": 0.498046875, + "learning_rate": 4.992801119229624e-06, + "loss": 0.9273, + "step": 604 + }, + { + "epoch": 1.052219321148825, + "grad_norm": 0.5078125, + "learning_rate": 4.978403417380706e-06, + "loss": 0.9089, + "step": 605 + }, + { + "epoch": 1.0539599651871192, + "grad_norm": 0.48828125, + "learning_rate": 4.964005894606796e-06, + "loss": 0.8991, + "step": 606 + }, + { + "epoch": 1.0557006092254133, + "grad_norm": 0.5078125, + "learning_rate": 4.949608670289582e-06, + "loss": 0.9742, + "step": 607 + }, + { + "epoch": 1.0574412532637076, + "grad_norm": 0.486328125, + "learning_rate": 4.935211863808279e-06, + "loss": 0.9682, + "step": 608 + }, + { + "epoch": 1.0591818973020017, + "grad_norm": 0.4921875, + "learning_rate": 4.920815594538635e-06, + "loss": 0.9551, + "step": 609 + }, + { + "epoch": 1.0609225413402958, + "grad_norm": 0.4765625, + "learning_rate": 4.90641998185195e-06, + "loss": 0.9374, + "step": 610 + }, + { + "epoch": 1.0626631853785902, + "grad_norm": 0.4921875, + "learning_rate": 4.892025145114067e-06, + "loss": 0.9252, + "step": 611 + }, + { + "epoch": 1.0644038294168843, + "grad_norm": 0.474609375, + "learning_rate": 4.8776312036844106e-06, + "loss": 0.9088, + "step": 612 + }, + { + "epoch": 1.0661444734551784, + "grad_norm": 0.5078125, + "learning_rate": 4.8632382769149665e-06, + "loss": 0.9063, + "step": 613 + }, + { + "epoch": 1.0678851174934727, + "grad_norm": 0.486328125, + "learning_rate": 4.84884648414932e-06, + "loss": 0.9153, + "step": 614 + }, + { + "epoch": 1.0696257615317668, + "grad_norm": 0.486328125, + "learning_rate": 4.834455944721643e-06, + "loss": 0.8917, + "step": 615 + }, + { + "epoch": 1.071366405570061, + "grad_norm": 0.5078125, + "learning_rate": 4.820066777955725e-06, + "loss": 0.8974, + "step": 616 + }, + { + "epoch": 1.073107049608355, + "grad_norm": 0.494140625, + "learning_rate": 4.8056791031639615e-06, + "loss": 0.8868, + "step": 617 + }, + { + "epoch": 1.0748476936466493, + "grad_norm": 0.51953125, + "learning_rate": 4.7912930396463895e-06, + "loss": 0.8745, + "step": 618 + }, + { + "epoch": 1.0765883376849434, + "grad_norm": 0.478515625, + "learning_rate": 4.776908706689678e-06, + "loss": 0.9591, + "step": 619 + }, + { + "epoch": 1.0783289817232375, + "grad_norm": 0.490234375, + "learning_rate": 4.762526223566147e-06, + "loss": 0.926, + "step": 620 + }, + { + "epoch": 1.0800696257615319, + "grad_norm": 0.51171875, + "learning_rate": 4.74814570953278e-06, + "loss": 0.9114, + "step": 621 + }, + { + "epoch": 1.081810269799826, + "grad_norm": 0.4765625, + "learning_rate": 4.733767283830233e-06, + "loss": 0.901, + "step": 622 + }, + { + "epoch": 1.08355091383812, + "grad_norm": 0.494140625, + "learning_rate": 4.719391065681845e-06, + "loss": 0.9164, + "step": 623 + }, + { + "epoch": 1.0852915578764142, + "grad_norm": 0.490234375, + "learning_rate": 4.7050171742926496e-06, + "loss": 0.8907, + "step": 624 + }, + { + "epoch": 1.0870322019147085, + "grad_norm": 0.5, + "learning_rate": 4.690645728848389e-06, + "loss": 0.9179, + "step": 625 + }, + { + "epoch": 1.0887728459530026, + "grad_norm": 0.498046875, + "learning_rate": 4.6762768485145245e-06, + "loss": 0.8941, + "step": 626 + }, + { + "epoch": 1.0905134899912967, + "grad_norm": 0.494140625, + "learning_rate": 4.661910652435245e-06, + "loss": 0.907, + "step": 627 + }, + { + "epoch": 1.092254134029591, + "grad_norm": 0.4765625, + "learning_rate": 4.647547259732486e-06, + "loss": 0.9632, + "step": 628 + }, + { + "epoch": 1.0939947780678851, + "grad_norm": 0.49609375, + "learning_rate": 4.6331867895049335e-06, + "loss": 0.9523, + "step": 629 + }, + { + "epoch": 1.0957354221061792, + "grad_norm": 0.49609375, + "learning_rate": 4.618829360827046e-06, + "loss": 0.8906, + "step": 630 + }, + { + "epoch": 1.0974760661444734, + "grad_norm": 0.5078125, + "learning_rate": 4.6044750927480576e-06, + "loss": 0.8999, + "step": 631 + }, + { + "epoch": 1.0992167101827677, + "grad_norm": 0.5, + "learning_rate": 4.590124104290997e-06, + "loss": 0.9345, + "step": 632 + }, + { + "epoch": 1.1009573542210618, + "grad_norm": 0.486328125, + "learning_rate": 4.5757765144516986e-06, + "loss": 0.8744, + "step": 633 + }, + { + "epoch": 1.1026979982593559, + "grad_norm": 0.5, + "learning_rate": 4.561432442197817e-06, + "loss": 0.9158, + "step": 634 + }, + { + "epoch": 1.1044386422976502, + "grad_norm": 0.50390625, + "learning_rate": 4.5470920064678375e-06, + "loss": 0.8859, + "step": 635 + }, + { + "epoch": 1.1061792863359443, + "grad_norm": 0.50390625, + "learning_rate": 4.532755326170093e-06, + "loss": 0.9125, + "step": 636 + }, + { + "epoch": 1.1079199303742384, + "grad_norm": 0.5, + "learning_rate": 4.518422520181782e-06, + "loss": 0.974, + "step": 637 + }, + { + "epoch": 1.1096605744125327, + "grad_norm": 0.4921875, + "learning_rate": 4.504093707347966e-06, + "loss": 0.9758, + "step": 638 + }, + { + "epoch": 1.1114012184508268, + "grad_norm": 0.484375, + "learning_rate": 4.489769006480611e-06, + "loss": 0.9069, + "step": 639 + }, + { + "epoch": 1.113141862489121, + "grad_norm": 0.494140625, + "learning_rate": 4.475448536357573e-06, + "loss": 0.8795, + "step": 640 + }, + { + "epoch": 1.114882506527415, + "grad_norm": 0.4921875, + "learning_rate": 4.461132415721637e-06, + "loss": 0.9399, + "step": 641 + }, + { + "epoch": 1.1166231505657094, + "grad_norm": 0.484375, + "learning_rate": 4.44682076327952e-06, + "loss": 0.8988, + "step": 642 + }, + { + "epoch": 1.1183637946040035, + "grad_norm": 0.5078125, + "learning_rate": 4.43251369770089e-06, + "loss": 0.9101, + "step": 643 + }, + { + "epoch": 1.1201044386422976, + "grad_norm": 0.51953125, + "learning_rate": 4.418211337617377e-06, + "loss": 0.9539, + "step": 644 + }, + { + "epoch": 1.121845082680592, + "grad_norm": 0.490234375, + "learning_rate": 4.403913801621602e-06, + "loss": 0.9267, + "step": 645 + }, + { + "epoch": 1.123585726718886, + "grad_norm": 0.5234375, + "learning_rate": 4.38962120826618e-06, + "loss": 0.9168, + "step": 646 + }, + { + "epoch": 1.1253263707571801, + "grad_norm": 0.4765625, + "learning_rate": 4.375333676062743e-06, + "loss": 1.0016, + "step": 647 + }, + { + "epoch": 1.1270670147954744, + "grad_norm": 0.48046875, + "learning_rate": 4.36105132348096e-06, + "loss": 0.9072, + "step": 648 + }, + { + "epoch": 1.1270670147954744, + "eval_loss": 0.9198443293571472, + "eval_runtime": 59.2529, + "eval_samples_per_second": 79.119, + "eval_steps_per_second": 9.89, + "step": 648 + }, + { + "epoch": 1.1288076588337685, + "grad_norm": 0.4921875, + "learning_rate": 4.346774268947546e-06, + "loss": 0.8867, + "step": 649 + }, + { + "epoch": 1.1305483028720626, + "grad_norm": 0.482421875, + "learning_rate": 4.33250263084529e-06, + "loss": 0.9207, + "step": 650 + }, + { + "epoch": 1.1322889469103568, + "grad_norm": 0.46875, + "learning_rate": 4.3182365275120675e-06, + "loss": 0.9792, + "step": 651 + }, + { + "epoch": 1.134029590948651, + "grad_norm": 0.5078125, + "learning_rate": 4.303976077239862e-06, + "loss": 0.8846, + "step": 652 + }, + { + "epoch": 1.1357702349869452, + "grad_norm": 0.482421875, + "learning_rate": 4.289721398273779e-06, + "loss": 0.9415, + "step": 653 + }, + { + "epoch": 1.1375108790252393, + "grad_norm": 0.4921875, + "learning_rate": 4.2754726088110736e-06, + "loss": 0.9083, + "step": 654 + }, + { + "epoch": 1.1392515230635336, + "grad_norm": 0.494140625, + "learning_rate": 4.261229827000166e-06, + "loss": 1.0001, + "step": 655 + }, + { + "epoch": 1.1409921671018277, + "grad_norm": 0.51171875, + "learning_rate": 4.2469931709396574e-06, + "loss": 0.8853, + "step": 656 + }, + { + "epoch": 1.1427328111401218, + "grad_norm": 0.478515625, + "learning_rate": 4.232762758677362e-06, + "loss": 0.9153, + "step": 657 + }, + { + "epoch": 1.144473455178416, + "grad_norm": 0.48046875, + "learning_rate": 4.2185387082093134e-06, + "loss": 0.9752, + "step": 658 + }, + { + "epoch": 1.1462140992167102, + "grad_norm": 0.50390625, + "learning_rate": 4.204321137478806e-06, + "loss": 0.9251, + "step": 659 + }, + { + "epoch": 1.1479547432550044, + "grad_norm": 0.490234375, + "learning_rate": 4.1901101643753905e-06, + "loss": 0.9761, + "step": 660 + }, + { + "epoch": 1.1496953872932985, + "grad_norm": 0.51171875, + "learning_rate": 4.175905906733925e-06, + "loss": 0.8988, + "step": 661 + }, + { + "epoch": 1.1514360313315928, + "grad_norm": 0.5, + "learning_rate": 4.1617084823335755e-06, + "loss": 0.9451, + "step": 662 + }, + { + "epoch": 1.1531766753698869, + "grad_norm": 0.51171875, + "learning_rate": 4.147518008896851e-06, + "loss": 0.9488, + "step": 663 + }, + { + "epoch": 1.154917319408181, + "grad_norm": 0.4921875, + "learning_rate": 4.1333346040886255e-06, + "loss": 0.9257, + "step": 664 + }, + { + "epoch": 1.156657963446475, + "grad_norm": 0.484375, + "learning_rate": 4.119158385515159e-06, + "loss": 0.9016, + "step": 665 + }, + { + "epoch": 1.1583986074847694, + "grad_norm": 0.50390625, + "learning_rate": 4.104989470723126e-06, + "loss": 0.9013, + "step": 666 + }, + { + "epoch": 1.1601392515230635, + "grad_norm": 0.486328125, + "learning_rate": 4.090827977198638e-06, + "loss": 0.8923, + "step": 667 + }, + { + "epoch": 1.1618798955613576, + "grad_norm": 0.494140625, + "learning_rate": 4.076674022366272e-06, + "loss": 0.9282, + "step": 668 + }, + { + "epoch": 1.163620539599652, + "grad_norm": 0.49609375, + "learning_rate": 4.062527723588094e-06, + "loss": 0.9434, + "step": 669 + }, + { + "epoch": 1.165361183637946, + "grad_norm": 0.47265625, + "learning_rate": 4.048389198162689e-06, + "loss": 0.914, + "step": 670 + }, + { + "epoch": 1.1671018276762402, + "grad_norm": 0.486328125, + "learning_rate": 4.034258563324185e-06, + "loss": 0.9239, + "step": 671 + }, + { + "epoch": 1.1688424717145343, + "grad_norm": 0.474609375, + "learning_rate": 4.020135936241285e-06, + "loss": 0.9558, + "step": 672 + }, + { + "epoch": 1.1705831157528286, + "grad_norm": 0.48828125, + "learning_rate": 4.0060214340162926e-06, + "loss": 0.9596, + "step": 673 + }, + { + "epoch": 1.1723237597911227, + "grad_norm": 0.50390625, + "learning_rate": 3.991915173684138e-06, + "loss": 0.9856, + "step": 674 + }, + { + "epoch": 1.1740644038294168, + "grad_norm": 0.515625, + "learning_rate": 3.977817272211417e-06, + "loss": 0.8879, + "step": 675 + }, + { + "epoch": 1.1758050478677111, + "grad_norm": 0.494140625, + "learning_rate": 3.9637278464954095e-06, + "loss": 0.9207, + "step": 676 + }, + { + "epoch": 1.1775456919060052, + "grad_norm": 0.4921875, + "learning_rate": 3.94964701336312e-06, + "loss": 0.9283, + "step": 677 + }, + { + "epoch": 1.1792863359442993, + "grad_norm": 0.51171875, + "learning_rate": 3.935574889570302e-06, + "loss": 0.907, + "step": 678 + }, + { + "epoch": 1.1810269799825936, + "grad_norm": 0.49609375, + "learning_rate": 3.921511591800494e-06, + "loss": 0.9033, + "step": 679 + }, + { + "epoch": 1.1827676240208878, + "grad_norm": 0.5, + "learning_rate": 3.907457236664055e-06, + "loss": 0.9185, + "step": 680 + }, + { + "epoch": 1.1845082680591819, + "grad_norm": 0.498046875, + "learning_rate": 3.893411940697182e-06, + "loss": 0.9276, + "step": 681 + }, + { + "epoch": 1.1862489120974762, + "grad_norm": 0.494140625, + "learning_rate": 3.879375820360969e-06, + "loss": 0.9474, + "step": 682 + }, + { + "epoch": 1.1879895561357703, + "grad_norm": 0.494140625, + "learning_rate": 3.8653489920404136e-06, + "loss": 0.9302, + "step": 683 + }, + { + "epoch": 1.1897302001740644, + "grad_norm": 0.498046875, + "learning_rate": 3.851331572043479e-06, + "loss": 0.9128, + "step": 684 + }, + { + "epoch": 1.1914708442123585, + "grad_norm": 0.494140625, + "learning_rate": 3.837323676600104e-06, + "loss": 0.9019, + "step": 685 + }, + { + "epoch": 1.1932114882506528, + "grad_norm": 0.490234375, + "learning_rate": 3.823325421861261e-06, + "loss": 0.9108, + "step": 686 + }, + { + "epoch": 1.194952132288947, + "grad_norm": 0.4921875, + "learning_rate": 3.809336923897977e-06, + "loss": 0.9521, + "step": 687 + }, + { + "epoch": 1.196692776327241, + "grad_norm": 0.4921875, + "learning_rate": 3.7953582987003805e-06, + "loss": 0.9103, + "step": 688 + }, + { + "epoch": 1.1984334203655354, + "grad_norm": 0.49609375, + "learning_rate": 3.781389662176739e-06, + "loss": 0.9666, + "step": 689 + }, + { + "epoch": 1.2001740644038295, + "grad_norm": 0.474609375, + "learning_rate": 3.7674311301524903e-06, + "loss": 0.9084, + "step": 690 + }, + { + "epoch": 1.2019147084421236, + "grad_norm": 0.48046875, + "learning_rate": 3.753482818369291e-06, + "loss": 0.9699, + "step": 691 + }, + { + "epoch": 1.2036553524804177, + "grad_norm": 0.46484375, + "learning_rate": 3.739544842484053e-06, + "loss": 0.9993, + "step": 692 + }, + { + "epoch": 1.205395996518712, + "grad_norm": 0.474609375, + "learning_rate": 3.7256173180679844e-06, + "loss": 0.9637, + "step": 693 + }, + { + "epoch": 1.207136640557006, + "grad_norm": 0.482421875, + "learning_rate": 3.7117003606056303e-06, + "loss": 0.9365, + "step": 694 + }, + { + "epoch": 1.2088772845953002, + "grad_norm": 0.490234375, + "learning_rate": 3.6977940854939193e-06, + "loss": 0.9151, + "step": 695 + }, + { + "epoch": 1.2106179286335945, + "grad_norm": 0.50390625, + "learning_rate": 3.683898608041201e-06, + "loss": 0.9365, + "step": 696 + }, + { + "epoch": 1.2123585726718886, + "grad_norm": 0.515625, + "learning_rate": 3.6700140434662936e-06, + "loss": 0.9364, + "step": 697 + }, + { + "epoch": 1.2140992167101827, + "grad_norm": 0.4765625, + "learning_rate": 3.656140506897532e-06, + "loss": 0.9882, + "step": 698 + }, + { + "epoch": 1.2158398607484768, + "grad_norm": 0.46875, + "learning_rate": 3.6422781133717987e-06, + "loss": 0.9437, + "step": 699 + }, + { + "epoch": 1.2175805047867712, + "grad_norm": 0.48828125, + "learning_rate": 3.6284269778335933e-06, + "loss": 0.9138, + "step": 700 + }, + { + "epoch": 1.2193211488250653, + "grad_norm": 0.486328125, + "learning_rate": 3.6145872151340523e-06, + "loss": 0.9435, + "step": 701 + }, + { + "epoch": 1.2210617928633594, + "grad_norm": 0.46875, + "learning_rate": 3.600758940030024e-06, + "loss": 0.9106, + "step": 702 + }, + { + "epoch": 1.2228024369016537, + "grad_norm": 0.51171875, + "learning_rate": 3.5869422671830913e-06, + "loss": 0.9317, + "step": 703 + }, + { + "epoch": 1.2245430809399478, + "grad_norm": 0.498046875, + "learning_rate": 3.5731373111586443e-06, + "loss": 0.9219, + "step": 704 + }, + { + "epoch": 1.226283724978242, + "grad_norm": 0.51953125, + "learning_rate": 3.5593441864249077e-06, + "loss": 0.9369, + "step": 705 + }, + { + "epoch": 1.228024369016536, + "grad_norm": 0.4765625, + "learning_rate": 3.545563007352016e-06, + "loss": 0.8967, + "step": 706 + }, + { + "epoch": 1.2297650130548303, + "grad_norm": 0.4921875, + "learning_rate": 3.531793888211044e-06, + "loss": 1.0215, + "step": 707 + }, + { + "epoch": 1.2315056570931244, + "grad_norm": 0.486328125, + "learning_rate": 3.5180369431730686e-06, + "loss": 0.911, + "step": 708 + }, + { + "epoch": 1.2332463011314185, + "grad_norm": 0.50390625, + "learning_rate": 3.504292286308224e-06, + "loss": 0.9061, + "step": 709 + }, + { + "epoch": 1.2349869451697129, + "grad_norm": 0.5078125, + "learning_rate": 3.4905600315847492e-06, + "loss": 0.9258, + "step": 710 + }, + { + "epoch": 1.236727589208007, + "grad_norm": 0.5, + "learning_rate": 3.4768402928680524e-06, + "loss": 0.9589, + "step": 711 + }, + { + "epoch": 1.238468233246301, + "grad_norm": 0.515625, + "learning_rate": 3.4631331839197542e-06, + "loss": 0.8786, + "step": 712 + }, + { + "epoch": 1.2402088772845954, + "grad_norm": 0.474609375, + "learning_rate": 3.4494388183967587e-06, + "loss": 0.9163, + "step": 713 + }, + { + "epoch": 1.2419495213228895, + "grad_norm": 0.484375, + "learning_rate": 3.4357573098502972e-06, + "loss": 0.898, + "step": 714 + }, + { + "epoch": 1.2436901653611836, + "grad_norm": 0.53515625, + "learning_rate": 3.4220887717249986e-06, + "loss": 0.8827, + "step": 715 + }, + { + "epoch": 1.245430809399478, + "grad_norm": 0.4921875, + "learning_rate": 3.4084333173579403e-06, + "loss": 0.9424, + "step": 716 + }, + { + "epoch": 1.247171453437772, + "grad_norm": 0.48046875, + "learning_rate": 3.3947910599777123e-06, + "loss": 0.9472, + "step": 717 + }, + { + "epoch": 1.2489120974760661, + "grad_norm": 0.4921875, + "learning_rate": 3.3811621127034788e-06, + "loss": 0.9097, + "step": 718 + }, + { + "epoch": 1.2506527415143602, + "grad_norm": 0.498046875, + "learning_rate": 3.3675465885440352e-06, + "loss": 0.9554, + "step": 719 + }, + { + "epoch": 1.2523933855526546, + "grad_norm": 0.494140625, + "learning_rate": 3.35394460039688e-06, + "loss": 0.893, + "step": 720 + }, + { + "epoch": 1.2523933855526546, + "eval_loss": 0.9190898537635803, + "eval_runtime": 59.5108, + "eval_samples_per_second": 78.776, + "eval_steps_per_second": 9.847, + "step": 720 + }, + { + "epoch": 1.2541340295909487, + "grad_norm": 0.494140625, + "learning_rate": 3.340356261047269e-06, + "loss": 0.9303, + "step": 721 + }, + { + "epoch": 1.2558746736292428, + "grad_norm": 0.51171875, + "learning_rate": 3.3267816831672884e-06, + "loss": 0.9278, + "step": 722 + }, + { + "epoch": 1.257615317667537, + "grad_norm": 0.5078125, + "learning_rate": 3.3132209793149127e-06, + "loss": 0.938, + "step": 723 + }, + { + "epoch": 1.2593559617058312, + "grad_norm": 0.474609375, + "learning_rate": 3.2996742619330776e-06, + "loss": 0.8702, + "step": 724 + }, + { + "epoch": 1.2610966057441253, + "grad_norm": 0.515625, + "learning_rate": 3.286141643348748e-06, + "loss": 0.8824, + "step": 725 + }, + { + "epoch": 1.2628372497824194, + "grad_norm": 0.50390625, + "learning_rate": 3.2726232357719802e-06, + "loss": 0.9235, + "step": 726 + }, + { + "epoch": 1.2645778938207137, + "grad_norm": 0.5, + "learning_rate": 3.259119151294997e-06, + "loss": 0.8961, + "step": 727 + }, + { + "epoch": 1.2663185378590078, + "grad_norm": 0.494140625, + "learning_rate": 3.2456295018912553e-06, + "loss": 0.9248, + "step": 728 + }, + { + "epoch": 1.268059181897302, + "grad_norm": 0.5, + "learning_rate": 3.232154399414521e-06, + "loss": 0.9537, + "step": 729 + }, + { + "epoch": 1.2697998259355963, + "grad_norm": 0.4921875, + "learning_rate": 3.218693955597938e-06, + "loss": 0.9538, + "step": 730 + }, + { + "epoch": 1.2715404699738904, + "grad_norm": 0.51171875, + "learning_rate": 3.2052482820531037e-06, + "loss": 0.9422, + "step": 731 + }, + { + "epoch": 1.2732811140121845, + "grad_norm": 0.51953125, + "learning_rate": 3.1918174902691432e-06, + "loss": 0.8778, + "step": 732 + }, + { + "epoch": 1.2750217580504786, + "grad_norm": 0.494140625, + "learning_rate": 3.178401691611784e-06, + "loss": 1.0258, + "step": 733 + }, + { + "epoch": 1.276762402088773, + "grad_norm": 0.5078125, + "learning_rate": 3.1650009973224357e-06, + "loss": 0.9427, + "step": 734 + }, + { + "epoch": 1.278503046127067, + "grad_norm": 0.50390625, + "learning_rate": 3.1516155185172604e-06, + "loss": 0.9096, + "step": 735 + }, + { + "epoch": 1.280243690165361, + "grad_norm": 0.494140625, + "learning_rate": 3.138245366186261e-06, + "loss": 0.9617, + "step": 736 + }, + { + "epoch": 1.2819843342036554, + "grad_norm": 0.490234375, + "learning_rate": 3.1248906511923527e-06, + "loss": 0.9608, + "step": 737 + }, + { + "epoch": 1.2837249782419495, + "grad_norm": 0.52734375, + "learning_rate": 3.11155148427045e-06, + "loss": 0.9023, + "step": 738 + }, + { + "epoch": 1.2854656222802436, + "grad_norm": 0.51953125, + "learning_rate": 3.0982279760265423e-06, + "loss": 0.9068, + "step": 739 + }, + { + "epoch": 1.2872062663185377, + "grad_norm": 0.4921875, + "learning_rate": 3.0849202369367833e-06, + "loss": 0.9116, + "step": 740 + }, + { + "epoch": 1.288946910356832, + "grad_norm": 0.48828125, + "learning_rate": 3.071628377346567e-06, + "loss": 0.9162, + "step": 741 + }, + { + "epoch": 1.2906875543951262, + "grad_norm": 0.482421875, + "learning_rate": 3.058352507469623e-06, + "loss": 0.9335, + "step": 742 + }, + { + "epoch": 1.2924281984334205, + "grad_norm": 0.5, + "learning_rate": 3.045092737387096e-06, + "loss": 0.9228, + "step": 743 + }, + { + "epoch": 1.2941688424717146, + "grad_norm": 0.490234375, + "learning_rate": 3.0318491770466274e-06, + "loss": 0.9664, + "step": 744 + }, + { + "epoch": 1.2959094865100087, + "grad_norm": 0.5, + "learning_rate": 3.0186219362614623e-06, + "loss": 0.9353, + "step": 745 + }, + { + "epoch": 1.2976501305483028, + "grad_norm": 0.48046875, + "learning_rate": 3.0054111247095135e-06, + "loss": 0.9129, + "step": 746 + }, + { + "epoch": 1.299390774586597, + "grad_norm": 0.474609375, + "learning_rate": 2.992216851932478e-06, + "loss": 0.8836, + "step": 747 + }, + { + "epoch": 1.3011314186248912, + "grad_norm": 0.484375, + "learning_rate": 2.979039227334905e-06, + "loss": 0.9318, + "step": 748 + }, + { + "epoch": 1.3028720626631853, + "grad_norm": 0.474609375, + "learning_rate": 2.9658783601833108e-06, + "loss": 0.9084, + "step": 749 + }, + { + "epoch": 1.3046127067014797, + "grad_norm": 0.49609375, + "learning_rate": 2.9527343596052516e-06, + "loss": 0.9117, + "step": 750 + }, + { + "epoch": 1.3063533507397738, + "grad_norm": 0.5, + "learning_rate": 2.9396073345884362e-06, + "loss": 0.8702, + "step": 751 + }, + { + "epoch": 1.3080939947780679, + "grad_norm": 0.51171875, + "learning_rate": 2.9264973939798143e-06, + "loss": 0.9101, + "step": 752 + }, + { + "epoch": 1.309834638816362, + "grad_norm": 0.5, + "learning_rate": 2.9134046464846706e-06, + "loss": 1.0054, + "step": 753 + }, + { + "epoch": 1.3115752828546563, + "grad_norm": 0.50390625, + "learning_rate": 2.9003292006657336e-06, + "loss": 0.9377, + "step": 754 + }, + { + "epoch": 1.3133159268929504, + "grad_norm": 0.5078125, + "learning_rate": 2.8872711649422592e-06, + "loss": 0.889, + "step": 755 + }, + { + "epoch": 1.3150565709312445, + "grad_norm": 0.4921875, + "learning_rate": 2.8742306475891574e-06, + "loss": 0.9281, + "step": 756 + }, + { + "epoch": 1.3167972149695388, + "grad_norm": 0.494140625, + "learning_rate": 2.861207756736064e-06, + "loss": 0.999, + "step": 757 + }, + { + "epoch": 1.318537859007833, + "grad_norm": 0.482421875, + "learning_rate": 2.84820260036647e-06, + "loss": 0.9738, + "step": 758 + }, + { + "epoch": 1.320278503046127, + "grad_norm": 0.5, + "learning_rate": 2.8352152863168036e-06, + "loss": 0.9049, + "step": 759 + }, + { + "epoch": 1.3220191470844211, + "grad_norm": 0.4921875, + "learning_rate": 2.8222459222755603e-06, + "loss": 0.9404, + "step": 760 + }, + { + "epoch": 1.3237597911227155, + "grad_norm": 0.5, + "learning_rate": 2.809294615782393e-06, + "loss": 0.9116, + "step": 761 + }, + { + "epoch": 1.3255004351610096, + "grad_norm": 0.5078125, + "learning_rate": 2.796361474227219e-06, + "loss": 0.9634, + "step": 762 + }, + { + "epoch": 1.3272410791993037, + "grad_norm": 0.5, + "learning_rate": 2.7834466048493402e-06, + "loss": 0.9164, + "step": 763 + }, + { + "epoch": 1.328981723237598, + "grad_norm": 0.515625, + "learning_rate": 2.77055011473655e-06, + "loss": 0.9315, + "step": 764 + }, + { + "epoch": 1.330722367275892, + "grad_norm": 0.490234375, + "learning_rate": 2.757672110824242e-06, + "loss": 0.8546, + "step": 765 + }, + { + "epoch": 1.3324630113141862, + "grad_norm": 0.51953125, + "learning_rate": 2.744812699894524e-06, + "loss": 0.9433, + "step": 766 + }, + { + "epoch": 1.3342036553524803, + "grad_norm": 0.5078125, + "learning_rate": 2.7319719885753373e-06, + "loss": 0.8915, + "step": 767 + }, + { + "epoch": 1.3359442993907746, + "grad_norm": 0.49609375, + "learning_rate": 2.7191500833395675e-06, + "loss": 0.9298, + "step": 768 + }, + { + "epoch": 1.3376849434290687, + "grad_norm": 0.498046875, + "learning_rate": 2.7063470905041643e-06, + "loss": 0.8648, + "step": 769 + }, + { + "epoch": 1.3394255874673628, + "grad_norm": 0.49609375, + "learning_rate": 2.693563116229261e-06, + "loss": 0.9184, + "step": 770 + }, + { + "epoch": 1.3411662315056572, + "grad_norm": 0.50390625, + "learning_rate": 2.680798266517286e-06, + "loss": 0.8677, + "step": 771 + }, + { + "epoch": 1.3429068755439513, + "grad_norm": 0.478515625, + "learning_rate": 2.6680526472120972e-06, + "loss": 1.0233, + "step": 772 + }, + { + "epoch": 1.3446475195822454, + "grad_norm": 0.51171875, + "learning_rate": 2.6553263639980943e-06, + "loss": 0.9575, + "step": 773 + }, + { + "epoch": 1.3463881636205395, + "grad_norm": 0.494140625, + "learning_rate": 2.642619522399348e-06, + "loss": 0.8853, + "step": 774 + }, + { + "epoch": 1.3481288076588338, + "grad_norm": 0.5, + "learning_rate": 2.6299322277787165e-06, + "loss": 0.9374, + "step": 775 + }, + { + "epoch": 1.349869451697128, + "grad_norm": 0.474609375, + "learning_rate": 2.6172645853369845e-06, + "loss": 0.9011, + "step": 776 + }, + { + "epoch": 1.3516100957354222, + "grad_norm": 0.486328125, + "learning_rate": 2.6046167001119816e-06, + "loss": 0.8959, + "step": 777 + }, + { + "epoch": 1.3533507397737163, + "grad_norm": 0.5078125, + "learning_rate": 2.5919886769777135e-06, + "loss": 0.9115, + "step": 778 + }, + { + "epoch": 1.3550913838120104, + "grad_norm": 0.51953125, + "learning_rate": 2.579380620643498e-06, + "loss": 0.9052, + "step": 779 + }, + { + "epoch": 1.3568320278503045, + "grad_norm": 0.482421875, + "learning_rate": 2.56679263565308e-06, + "loss": 0.9862, + "step": 780 + }, + { + "epoch": 1.3585726718885986, + "grad_norm": 0.51171875, + "learning_rate": 2.554224826383792e-06, + "loss": 0.9081, + "step": 781 + }, + { + "epoch": 1.360313315926893, + "grad_norm": 0.50390625, + "learning_rate": 2.541677297045659e-06, + "loss": 0.9453, + "step": 782 + }, + { + "epoch": 1.362053959965187, + "grad_norm": 0.5078125, + "learning_rate": 2.5291501516805583e-06, + "loss": 0.9185, + "step": 783 + }, + { + "epoch": 1.3637946040034814, + "grad_norm": 0.4765625, + "learning_rate": 2.516643494161341e-06, + "loss": 0.8531, + "step": 784 + }, + { + "epoch": 1.3655352480417755, + "grad_norm": 0.5078125, + "learning_rate": 2.504157428190981e-06, + "loss": 0.9289, + "step": 785 + }, + { + "epoch": 1.3672758920800696, + "grad_norm": 0.490234375, + "learning_rate": 2.491692057301706e-06, + "loss": 0.9517, + "step": 786 + }, + { + "epoch": 1.3690165361183637, + "grad_norm": 0.5, + "learning_rate": 2.479247484854147e-06, + "loss": 0.9819, + "step": 787 + }, + { + "epoch": 1.370757180156658, + "grad_norm": 0.490234375, + "learning_rate": 2.466823814036477e-06, + "loss": 0.9332, + "step": 788 + }, + { + "epoch": 1.3724978241949521, + "grad_norm": 0.51171875, + "learning_rate": 2.454421147863558e-06, + "loss": 0.9445, + "step": 789 + }, + { + "epoch": 1.3742384682332462, + "grad_norm": 0.490234375, + "learning_rate": 2.442039589176085e-06, + "loss": 0.9218, + "step": 790 + }, + { + "epoch": 1.3759791122715406, + "grad_norm": 0.486328125, + "learning_rate": 2.429679240639729e-06, + "loss": 0.8956, + "step": 791 + }, + { + "epoch": 1.3777197563098347, + "grad_norm": 0.49609375, + "learning_rate": 2.417340204744295e-06, + "loss": 0.91, + "step": 792 + }, + { + "epoch": 1.3777197563098347, + "eval_loss": 0.9186442494392395, + "eval_runtime": 60.5857, + "eval_samples_per_second": 77.378, + "eval_steps_per_second": 9.672, + "step": 792 + }, + { + "epoch": 1.3794604003481288, + "grad_norm": 0.494140625, + "learning_rate": 2.4050225838028675e-06, + "loss": 0.9799, + "step": 793 + }, + { + "epoch": 1.3812010443864229, + "grad_norm": 0.50390625, + "learning_rate": 2.392726479950961e-06, + "loss": 0.9271, + "step": 794 + }, + { + "epoch": 1.3829416884247172, + "grad_norm": 0.490234375, + "learning_rate": 2.38045199514567e-06, + "loss": 0.9499, + "step": 795 + }, + { + "epoch": 1.3846823324630113, + "grad_norm": 0.50390625, + "learning_rate": 2.368199231164832e-06, + "loss": 0.9364, + "step": 796 + }, + { + "epoch": 1.3864229765013054, + "grad_norm": 0.484375, + "learning_rate": 2.3559682896061847e-06, + "loss": 0.9068, + "step": 797 + }, + { + "epoch": 1.3881636205395997, + "grad_norm": 0.5, + "learning_rate": 2.3437592718865064e-06, + "loss": 0.9681, + "step": 798 + }, + { + "epoch": 1.3899042645778938, + "grad_norm": 0.498046875, + "learning_rate": 2.3315722792407964e-06, + "loss": 0.943, + "step": 799 + }, + { + "epoch": 1.391644908616188, + "grad_norm": 0.498046875, + "learning_rate": 2.3194074127214162e-06, + "loss": 0.939, + "step": 800 + }, + { + "epoch": 1.393385552654482, + "grad_norm": 0.515625, + "learning_rate": 2.3072647731972747e-06, + "loss": 0.9741, + "step": 801 + }, + { + "epoch": 1.3951261966927764, + "grad_norm": 0.494140625, + "learning_rate": 2.295144461352967e-06, + "loss": 0.9575, + "step": 802 + }, + { + "epoch": 1.3968668407310705, + "grad_norm": 0.5, + "learning_rate": 2.283046577687958e-06, + "loss": 0.921, + "step": 803 + }, + { + "epoch": 1.3986074847693646, + "grad_norm": 0.48046875, + "learning_rate": 2.270971222515735e-06, + "loss": 0.9488, + "step": 804 + }, + { + "epoch": 1.400348128807659, + "grad_norm": 0.494140625, + "learning_rate": 2.2589184959629917e-06, + "loss": 0.9236, + "step": 805 + }, + { + "epoch": 1.402088772845953, + "grad_norm": 0.5, + "learning_rate": 2.2468884979687864e-06, + "loss": 0.8477, + "step": 806 + }, + { + "epoch": 1.4038294168842471, + "grad_norm": 0.5078125, + "learning_rate": 2.234881328283711e-06, + "loss": 0.9209, + "step": 807 + }, + { + "epoch": 1.4055700609225412, + "grad_norm": 0.49609375, + "learning_rate": 2.2228970864690747e-06, + "loss": 0.9231, + "step": 808 + }, + { + "epoch": 1.4073107049608355, + "grad_norm": 0.50390625, + "learning_rate": 2.210935871896072e-06, + "loss": 0.94, + "step": 809 + }, + { + "epoch": 1.4090513489991296, + "grad_norm": 0.490234375, + "learning_rate": 2.1989977837449605e-06, + "loss": 0.9848, + "step": 810 + }, + { + "epoch": 1.410791993037424, + "grad_norm": 0.484375, + "learning_rate": 2.187082921004232e-06, + "loss": 0.947, + "step": 811 + }, + { + "epoch": 1.412532637075718, + "grad_norm": 0.515625, + "learning_rate": 2.1751913824698045e-06, + "loss": 0.9177, + "step": 812 + }, + { + "epoch": 1.4142732811140122, + "grad_norm": 0.4765625, + "learning_rate": 2.163323266744192e-06, + "loss": 0.9394, + "step": 813 + }, + { + "epoch": 1.4160139251523063, + "grad_norm": 0.494140625, + "learning_rate": 2.1514786722356918e-06, + "loss": 0.9399, + "step": 814 + }, + { + "epoch": 1.4177545691906004, + "grad_norm": 0.498046875, + "learning_rate": 2.139657697157569e-06, + "loss": 0.9303, + "step": 815 + }, + { + "epoch": 1.4194952132288947, + "grad_norm": 0.5, + "learning_rate": 2.1278604395272345e-06, + "loss": 0.8518, + "step": 816 + }, + { + "epoch": 1.4212358572671888, + "grad_norm": 0.490234375, + "learning_rate": 2.1160869971654464e-06, + "loss": 0.9786, + "step": 817 + }, + { + "epoch": 1.4229765013054831, + "grad_norm": 0.51171875, + "learning_rate": 2.104337467695485e-06, + "loss": 0.8946, + "step": 818 + }, + { + "epoch": 1.4247171453437772, + "grad_norm": 0.494140625, + "learning_rate": 2.0926119485423546e-06, + "loss": 0.9014, + "step": 819 + }, + { + "epoch": 1.4264577893820714, + "grad_norm": 0.490234375, + "learning_rate": 2.0809105369319628e-06, + "loss": 1.0124, + "step": 820 + }, + { + "epoch": 1.4281984334203655, + "grad_norm": 0.51171875, + "learning_rate": 2.069233329890326e-06, + "loss": 0.9119, + "step": 821 + }, + { + "epoch": 1.4299390774586598, + "grad_norm": 0.494140625, + "learning_rate": 2.057580424242766e-06, + "loss": 0.941, + "step": 822 + }, + { + "epoch": 1.4316797214969539, + "grad_norm": 0.5, + "learning_rate": 2.045951916613091e-06, + "loss": 0.9747, + "step": 823 + }, + { + "epoch": 1.433420365535248, + "grad_norm": 0.494140625, + "learning_rate": 2.034347903422812e-06, + "loss": 0.8826, + "step": 824 + }, + { + "epoch": 1.4351610095735423, + "grad_norm": 0.515625, + "learning_rate": 2.0227684808903353e-06, + "loss": 0.9225, + "step": 825 + }, + { + "epoch": 1.4369016536118364, + "grad_norm": 0.48828125, + "learning_rate": 2.011213745030167e-06, + "loss": 0.9086, + "step": 826 + }, + { + "epoch": 1.4386422976501305, + "grad_norm": 0.49609375, + "learning_rate": 1.999683791652111e-06, + "loss": 0.9156, + "step": 827 + }, + { + "epoch": 1.4403829416884246, + "grad_norm": 0.478515625, + "learning_rate": 1.9881787163604843e-06, + "loss": 0.9219, + "step": 828 + }, + { + "epoch": 1.442123585726719, + "grad_norm": 0.478515625, + "learning_rate": 1.976698614553318e-06, + "loss": 0.8968, + "step": 829 + }, + { + "epoch": 1.443864229765013, + "grad_norm": 0.49609375, + "learning_rate": 1.965243581421566e-06, + "loss": 0.9275, + "step": 830 + }, + { + "epoch": 1.4456048738033072, + "grad_norm": 0.498046875, + "learning_rate": 1.953813711948321e-06, + "loss": 0.9196, + "step": 831 + }, + { + "epoch": 1.4473455178416015, + "grad_norm": 0.494140625, + "learning_rate": 1.9424091009080157e-06, + "loss": 0.908, + "step": 832 + }, + { + "epoch": 1.4490861618798956, + "grad_norm": 0.478515625, + "learning_rate": 1.9310298428656523e-06, + "loss": 0.9035, + "step": 833 + }, + { + "epoch": 1.4508268059181897, + "grad_norm": 0.515625, + "learning_rate": 1.919676032176006e-06, + "loss": 0.8846, + "step": 834 + }, + { + "epoch": 1.4525674499564838, + "grad_norm": 0.515625, + "learning_rate": 1.908347762982851e-06, + "loss": 0.93, + "step": 835 + }, + { + "epoch": 1.4543080939947781, + "grad_norm": 0.4765625, + "learning_rate": 1.8970451292181691e-06, + "loss": 0.995, + "step": 836 + }, + { + "epoch": 1.4560487380330722, + "grad_norm": 0.5, + "learning_rate": 1.8857682246013842e-06, + "loss": 0.9633, + "step": 837 + }, + { + "epoch": 1.4577893820713663, + "grad_norm": 0.486328125, + "learning_rate": 1.8745171426385762e-06, + "loss": 0.9203, + "step": 838 + }, + { + "epoch": 1.4595300261096606, + "grad_norm": 0.490234375, + "learning_rate": 1.8632919766217077e-06, + "loss": 0.9304, + "step": 839 + }, + { + "epoch": 1.4612706701479548, + "grad_norm": 0.490234375, + "learning_rate": 1.8520928196278542e-06, + "loss": 0.8694, + "step": 840 + }, + { + "epoch": 1.4630113141862489, + "grad_norm": 0.5078125, + "learning_rate": 1.8409197645184207e-06, + "loss": 0.9058, + "step": 841 + }, + { + "epoch": 1.464751958224543, + "grad_norm": 0.498046875, + "learning_rate": 1.8297729039383932e-06, + "loss": 0.8851, + "step": 842 + }, + { + "epoch": 1.4664926022628373, + "grad_norm": 0.50390625, + "learning_rate": 1.818652330315544e-06, + "loss": 0.9536, + "step": 843 + }, + { + "epoch": 1.4682332463011314, + "grad_norm": 0.4921875, + "learning_rate": 1.8075581358596888e-06, + "loss": 0.9151, + "step": 844 + }, + { + "epoch": 1.4699738903394257, + "grad_norm": 0.46484375, + "learning_rate": 1.7964904125619016e-06, + "loss": 1.0108, + "step": 845 + }, + { + "epoch": 1.4717145343777198, + "grad_norm": 0.486328125, + "learning_rate": 1.7854492521937755e-06, + "loss": 0.9179, + "step": 846 + }, + { + "epoch": 1.473455178416014, + "grad_norm": 0.546875, + "learning_rate": 1.7744347463066365e-06, + "loss": 0.8852, + "step": 847 + }, + { + "epoch": 1.475195822454308, + "grad_norm": 0.478515625, + "learning_rate": 1.7634469862308028e-06, + "loss": 0.9305, + "step": 848 + }, + { + "epoch": 1.4769364664926021, + "grad_norm": 0.47265625, + "learning_rate": 1.7524860630748198e-06, + "loss": 0.8963, + "step": 849 + }, + { + "epoch": 1.4786771105308965, + "grad_norm": 0.4921875, + "learning_rate": 1.741552067724705e-06, + "loss": 0.9082, + "step": 850 + }, + { + "epoch": 1.4804177545691906, + "grad_norm": 0.51171875, + "learning_rate": 1.7306450908431987e-06, + "loss": 0.9025, + "step": 851 + }, + { + "epoch": 1.4821583986074849, + "grad_norm": 0.490234375, + "learning_rate": 1.719765222869002e-06, + "loss": 0.9398, + "step": 852 + }, + { + "epoch": 1.483899042645779, + "grad_norm": 0.494140625, + "learning_rate": 1.7089125540160405e-06, + "loss": 0.988, + "step": 853 + }, + { + "epoch": 1.485639686684073, + "grad_norm": 0.490234375, + "learning_rate": 1.6980871742727067e-06, + "loss": 0.9119, + "step": 854 + }, + { + "epoch": 1.4873803307223672, + "grad_norm": 0.4765625, + "learning_rate": 1.6872891734011194e-06, + "loss": 0.9317, + "step": 855 + }, + { + "epoch": 1.4891209747606615, + "grad_norm": 0.50390625, + "learning_rate": 1.6765186409363697e-06, + "loss": 0.937, + "step": 856 + }, + { + "epoch": 1.4908616187989556, + "grad_norm": 0.494140625, + "learning_rate": 1.6657756661857927e-06, + "loss": 0.8569, + "step": 857 + }, + { + "epoch": 1.4926022628372497, + "grad_norm": 0.5078125, + "learning_rate": 1.655060338228217e-06, + "loss": 0.9074, + "step": 858 + }, + { + "epoch": 1.494342906875544, + "grad_norm": 0.5, + "learning_rate": 1.6443727459132296e-06, + "loss": 0.9357, + "step": 859 + }, + { + "epoch": 1.4960835509138382, + "grad_norm": 0.5, + "learning_rate": 1.6337129778604405e-06, + "loss": 0.8929, + "step": 860 + }, + { + "epoch": 1.4978241949521323, + "grad_norm": 0.5234375, + "learning_rate": 1.623081122458739e-06, + "loss": 0.9023, + "step": 861 + }, + { + "epoch": 1.4995648389904264, + "grad_norm": 0.498046875, + "learning_rate": 1.6124772678655743e-06, + "loss": 0.9336, + "step": 862 + }, + { + "epoch": 1.5013054830287205, + "grad_norm": 0.5078125, + "learning_rate": 1.6019015020062162e-06, + "loss": 0.9337, + "step": 863 + }, + { + "epoch": 1.5030461270670148, + "grad_norm": 0.5, + "learning_rate": 1.5913539125730282e-06, + "loss": 0.9649, + "step": 864 + }, + { + "epoch": 1.5030461270670148, + "eval_loss": 0.9184313416481018, + "eval_runtime": 59.3854, + "eval_samples_per_second": 78.942, + "eval_steps_per_second": 9.868, + "step": 864 + }, + { + "epoch": 1.5047867711053091, + "grad_norm": 0.486328125, + "learning_rate": 1.5808345870247371e-06, + "loss": 0.9249, + "step": 865 + }, + { + "epoch": 1.5065274151436032, + "grad_norm": 0.478515625, + "learning_rate": 1.5703436125857119e-06, + "loss": 0.9123, + "step": 866 + }, + { + "epoch": 1.5082680591818973, + "grad_norm": 0.51171875, + "learning_rate": 1.5598810762452443e-06, + "loss": 0.9466, + "step": 867 + }, + { + "epoch": 1.5100087032201914, + "grad_norm": 0.4921875, + "learning_rate": 1.5494470647568144e-06, + "loss": 0.9372, + "step": 868 + }, + { + "epoch": 1.5117493472584855, + "grad_norm": 0.4921875, + "learning_rate": 1.5390416646373836e-06, + "loss": 0.9126, + "step": 869 + }, + { + "epoch": 1.5134899912967799, + "grad_norm": 0.49609375, + "learning_rate": 1.5286649621666726e-06, + "loss": 0.9317, + "step": 870 + }, + { + "epoch": 1.515230635335074, + "grad_norm": 0.494140625, + "learning_rate": 1.5183170433864474e-06, + "loss": 0.9109, + "step": 871 + }, + { + "epoch": 1.5169712793733683, + "grad_norm": 0.486328125, + "learning_rate": 1.5079979940998e-06, + "loss": 0.9734, + "step": 872 + }, + { + "epoch": 1.5187119234116624, + "grad_norm": 0.494140625, + "learning_rate": 1.4977078998704482e-06, + "loss": 0.9285, + "step": 873 + }, + { + "epoch": 1.5204525674499565, + "grad_norm": 0.53125, + "learning_rate": 1.4874468460220142e-06, + "loss": 0.8997, + "step": 874 + }, + { + "epoch": 1.5221932114882506, + "grad_norm": 0.494140625, + "learning_rate": 1.4772149176373263e-06, + "loss": 0.9392, + "step": 875 + }, + { + "epoch": 1.5239338555265447, + "grad_norm": 0.51953125, + "learning_rate": 1.4670121995577098e-06, + "loss": 0.9526, + "step": 876 + }, + { + "epoch": 1.525674499564839, + "grad_norm": 0.478515625, + "learning_rate": 1.4568387763822778e-06, + "loss": 0.9362, + "step": 877 + }, + { + "epoch": 1.5274151436031331, + "grad_norm": 0.51171875, + "learning_rate": 1.446694732467242e-06, + "loss": 0.935, + "step": 878 + }, + { + "epoch": 1.5291557876414275, + "grad_norm": 0.478515625, + "learning_rate": 1.4365801519252027e-06, + "loss": 0.9765, + "step": 879 + }, + { + "epoch": 1.5308964316797216, + "grad_norm": 0.48828125, + "learning_rate": 1.4264951186244586e-06, + "loss": 0.9126, + "step": 880 + }, + { + "epoch": 1.5326370757180157, + "grad_norm": 0.50390625, + "learning_rate": 1.416439716188302e-06, + "loss": 0.8997, + "step": 881 + }, + { + "epoch": 1.5343777197563098, + "grad_norm": 0.50390625, + "learning_rate": 1.4064140279943366e-06, + "loss": 0.9776, + "step": 882 + }, + { + "epoch": 1.5361183637946039, + "grad_norm": 0.5078125, + "learning_rate": 1.3964181371737795e-06, + "loss": 0.8686, + "step": 883 + }, + { + "epoch": 1.5378590078328982, + "grad_norm": 0.5, + "learning_rate": 1.3864521266107728e-06, + "loss": 0.9042, + "step": 884 + }, + { + "epoch": 1.5395996518711923, + "grad_norm": 0.50390625, + "learning_rate": 1.3765160789416988e-06, + "loss": 0.8743, + "step": 885 + }, + { + "epoch": 1.5413402959094866, + "grad_norm": 0.4765625, + "learning_rate": 1.3666100765544865e-06, + "loss": 0.9002, + "step": 886 + }, + { + "epoch": 1.5430809399477807, + "grad_norm": 0.5, + "learning_rate": 1.3567342015879454e-06, + "loss": 0.9469, + "step": 887 + }, + { + "epoch": 1.5448215839860748, + "grad_norm": 0.4765625, + "learning_rate": 1.3468885359310641e-06, + "loss": 0.9432, + "step": 888 + }, + { + "epoch": 1.546562228024369, + "grad_norm": 0.486328125, + "learning_rate": 1.3370731612223464e-06, + "loss": 0.8693, + "step": 889 + }, + { + "epoch": 1.548302872062663, + "grad_norm": 0.5078125, + "learning_rate": 1.3272881588491239e-06, + "loss": 0.9178, + "step": 890 + }, + { + "epoch": 1.5500435161009574, + "grad_norm": 0.4921875, + "learning_rate": 1.3175336099468945e-06, + "loss": 0.9519, + "step": 891 + }, + { + "epoch": 1.5517841601392517, + "grad_norm": 0.486328125, + "learning_rate": 1.3078095953986332e-06, + "loss": 0.9241, + "step": 892 + }, + { + "epoch": 1.5535248041775458, + "grad_norm": 0.494140625, + "learning_rate": 1.2981161958341338e-06, + "loss": 0.9798, + "step": 893 + }, + { + "epoch": 1.55526544821584, + "grad_norm": 0.51953125, + "learning_rate": 1.2884534916293345e-06, + "loss": 0.8939, + "step": 894 + }, + { + "epoch": 1.557006092254134, + "grad_norm": 0.49609375, + "learning_rate": 1.278821562905655e-06, + "loss": 0.8929, + "step": 895 + }, + { + "epoch": 1.558746736292428, + "grad_norm": 0.494140625, + "learning_rate": 1.2692204895293297e-06, + "loss": 0.9142, + "step": 896 + }, + { + "epoch": 1.5604873803307222, + "grad_norm": 0.482421875, + "learning_rate": 1.2596503511107428e-06, + "loss": 0.9478, + "step": 897 + }, + { + "epoch": 1.5622280243690165, + "grad_norm": 0.478515625, + "learning_rate": 1.250111227003774e-06, + "loss": 0.961, + "step": 898 + }, + { + "epoch": 1.5639686684073109, + "grad_norm": 0.5, + "learning_rate": 1.240603196305139e-06, + "loss": 0.8979, + "step": 899 + }, + { + "epoch": 1.565709312445605, + "grad_norm": 0.482421875, + "learning_rate": 1.2311263378537314e-06, + "loss": 0.9198, + "step": 900 + }, + { + "epoch": 1.567449956483899, + "grad_norm": 0.5078125, + "learning_rate": 1.2216807302299682e-06, + "loss": 0.9374, + "step": 901 + }, + { + "epoch": 1.5691906005221932, + "grad_norm": 0.4921875, + "learning_rate": 1.212266451755142e-06, + "loss": 0.8978, + "step": 902 + }, + { + "epoch": 1.5709312445604873, + "grad_norm": 0.482421875, + "learning_rate": 1.2028835804907701e-06, + "loss": 0.9428, + "step": 903 + }, + { + "epoch": 1.5726718885987816, + "grad_norm": 0.50390625, + "learning_rate": 1.1935321942379457e-06, + "loss": 0.8847, + "step": 904 + }, + { + "epoch": 1.5744125326370757, + "grad_norm": 0.474609375, + "learning_rate": 1.184212370536696e-06, + "loss": 0.965, + "step": 905 + }, + { + "epoch": 1.57615317667537, + "grad_norm": 0.5078125, + "learning_rate": 1.1749241866653328e-06, + "loss": 0.8731, + "step": 906 + }, + { + "epoch": 1.5778938207136641, + "grad_norm": 0.50390625, + "learning_rate": 1.1656677196398197e-06, + "loss": 0.8818, + "step": 907 + }, + { + "epoch": 1.5796344647519582, + "grad_norm": 0.5078125, + "learning_rate": 1.15644304621313e-06, + "loss": 0.8725, + "step": 908 + }, + { + "epoch": 1.5813751087902523, + "grad_norm": 0.51171875, + "learning_rate": 1.147250242874609e-06, + "loss": 0.9285, + "step": 909 + }, + { + "epoch": 1.5831157528285464, + "grad_norm": 0.490234375, + "learning_rate": 1.138089385849338e-06, + "loss": 0.9031, + "step": 910 + }, + { + "epoch": 1.5848563968668408, + "grad_norm": 0.49609375, + "learning_rate": 1.1289605510975122e-06, + "loss": 0.9971, + "step": 911 + }, + { + "epoch": 1.5865970409051349, + "grad_norm": 0.5, + "learning_rate": 1.1198638143137995e-06, + "loss": 0.8605, + "step": 912 + }, + { + "epoch": 1.5883376849434292, + "grad_norm": 0.494140625, + "learning_rate": 1.110799250926715e-06, + "loss": 0.8972, + "step": 913 + }, + { + "epoch": 1.5900783289817233, + "grad_norm": 0.486328125, + "learning_rate": 1.1017669360980016e-06, + "loss": 0.9216, + "step": 914 + }, + { + "epoch": 1.5918189730200174, + "grad_norm": 0.51953125, + "learning_rate": 1.0927669447220012e-06, + "loss": 0.9231, + "step": 915 + }, + { + "epoch": 1.5935596170583115, + "grad_norm": 0.482421875, + "learning_rate": 1.0837993514250373e-06, + "loss": 0.8726, + "step": 916 + }, + { + "epoch": 1.5953002610966056, + "grad_norm": 0.48828125, + "learning_rate": 1.07486423056479e-06, + "loss": 0.9542, + "step": 917 + }, + { + "epoch": 1.5970409051349, + "grad_norm": 0.5, + "learning_rate": 1.065961656229688e-06, + "loss": 0.934, + "step": 918 + }, + { + "epoch": 1.598781549173194, + "grad_norm": 0.498046875, + "learning_rate": 1.0570917022382888e-06, + "loss": 0.952, + "step": 919 + }, + { + "epoch": 1.6005221932114884, + "grad_norm": 0.4921875, + "learning_rate": 1.048254442138667e-06, + "loss": 0.9237, + "step": 920 + }, + { + "epoch": 1.6022628372497825, + "grad_norm": 0.490234375, + "learning_rate": 1.039449949207808e-06, + "loss": 0.8996, + "step": 921 + }, + { + "epoch": 1.6040034812880766, + "grad_norm": 0.50390625, + "learning_rate": 1.030678296450992e-06, + "loss": 0.9186, + "step": 922 + }, + { + "epoch": 1.6057441253263707, + "grad_norm": 0.49609375, + "learning_rate": 1.0219395566011992e-06, + "loss": 0.8819, + "step": 923 + }, + { + "epoch": 1.6074847693646648, + "grad_norm": 0.50390625, + "learning_rate": 1.0132338021185013e-06, + "loss": 0.8493, + "step": 924 + }, + { + "epoch": 1.609225413402959, + "grad_norm": 0.494140625, + "learning_rate": 1.004561105189461e-06, + "loss": 0.9161, + "step": 925 + }, + { + "epoch": 1.6109660574412534, + "grad_norm": 0.5078125, + "learning_rate": 9.959215377265312e-07, + "loss": 0.9435, + "step": 926 + }, + { + "epoch": 1.6127067014795475, + "grad_norm": 0.48828125, + "learning_rate": 9.87315171367464e-07, + "loss": 0.9459, + "step": 927 + }, + { + "epoch": 1.6144473455178416, + "grad_norm": 0.50390625, + "learning_rate": 9.787420774747132e-07, + "loss": 0.9374, + "step": 928 + }, + { + "epoch": 1.6161879895561357, + "grad_norm": 0.50390625, + "learning_rate": 9.70202327134842e-07, + "loss": 0.9112, + "step": 929 + }, + { + "epoch": 1.6179286335944298, + "grad_norm": 0.5078125, + "learning_rate": 9.616959911579388e-07, + "loss": 0.9066, + "step": 930 + }, + { + "epoch": 1.619669277632724, + "grad_norm": 0.51953125, + "learning_rate": 9.532231400770181e-07, + "loss": 0.9103, + "step": 931 + }, + { + "epoch": 1.6214099216710183, + "grad_norm": 0.494140625, + "learning_rate": 9.447838441474538e-07, + "loss": 0.8597, + "step": 932 + }, + { + "epoch": 1.6231505657093126, + "grad_norm": 0.48828125, + "learning_rate": 9.363781733463784e-07, + "loss": 0.9831, + "step": 933 + }, + { + "epoch": 1.6248912097476067, + "grad_norm": 0.51171875, + "learning_rate": 9.28006197372115e-07, + "loss": 0.9023, + "step": 934 + }, + { + "epoch": 1.6266318537859008, + "grad_norm": 0.490234375, + "learning_rate": 9.196679856435909e-07, + "loss": 0.8803, + "step": 935 + }, + { + "epoch": 1.628372497824195, + "grad_norm": 0.5, + "learning_rate": 9.113636072997745e-07, + "loss": 0.8838, + "step": 936 + }, + { + "epoch": 1.628372497824195, + "eval_loss": 0.9183242917060852, + "eval_runtime": 59.607, + "eval_samples_per_second": 78.648, + "eval_steps_per_second": 9.831, + "step": 936 + }, + { + "epoch": 1.630113141862489, + "grad_norm": 0.48828125, + "learning_rate": 9.030931311990842e-07, + "loss": 0.9945, + "step": 937 + }, + { + "epoch": 1.6318537859007833, + "grad_norm": 0.50390625, + "learning_rate": 8.948566259188335e-07, + "loss": 0.9272, + "step": 938 + }, + { + "epoch": 1.6335944299390774, + "grad_norm": 0.51171875, + "learning_rate": 8.86654159754653e-07, + "loss": 0.9066, + "step": 939 + }, + { + "epoch": 1.6353350739773718, + "grad_norm": 0.49609375, + "learning_rate": 8.784858007199293e-07, + "loss": 0.936, + "step": 940 + }, + { + "epoch": 1.6370757180156659, + "grad_norm": 0.4921875, + "learning_rate": 8.703516165452374e-07, + "loss": 0.8701, + "step": 941 + }, + { + "epoch": 1.63881636205396, + "grad_norm": 0.51171875, + "learning_rate": 8.622516746777787e-07, + "loss": 0.9653, + "step": 942 + }, + { + "epoch": 1.640557006092254, + "grad_norm": 0.5078125, + "learning_rate": 8.541860422808268e-07, + "loss": 0.9306, + "step": 943 + }, + { + "epoch": 1.6422976501305482, + "grad_norm": 0.5078125, + "learning_rate": 8.461547862331648e-07, + "loss": 0.9144, + "step": 944 + }, + { + "epoch": 1.6440382941688425, + "grad_norm": 0.48828125, + "learning_rate": 8.38157973128535e-07, + "loss": 0.9413, + "step": 945 + }, + { + "epoch": 1.6457789382071366, + "grad_norm": 0.478515625, + "learning_rate": 8.301956692750851e-07, + "loss": 0.9101, + "step": 946 + }, + { + "epoch": 1.647519582245431, + "grad_norm": 0.50390625, + "learning_rate": 8.222679406948148e-07, + "loss": 0.8962, + "step": 947 + }, + { + "epoch": 1.649260226283725, + "grad_norm": 0.4921875, + "learning_rate": 8.143748531230361e-07, + "loss": 0.9566, + "step": 948 + }, + { + "epoch": 1.6510008703220191, + "grad_norm": 0.490234375, + "learning_rate": 8.065164720078217e-07, + "loss": 0.964, + "step": 949 + }, + { + "epoch": 1.6527415143603132, + "grad_norm": 0.494140625, + "learning_rate": 7.986928625094664e-07, + "loss": 0.9256, + "step": 950 + }, + { + "epoch": 1.6544821583986073, + "grad_norm": 0.484375, + "learning_rate": 7.909040894999404e-07, + "loss": 0.8909, + "step": 951 + }, + { + "epoch": 1.6562228024369017, + "grad_norm": 0.5078125, + "learning_rate": 7.831502175623629e-07, + "loss": 0.9502, + "step": 952 + }, + { + "epoch": 1.6579634464751958, + "grad_norm": 0.48828125, + "learning_rate": 7.754313109904532e-07, + "loss": 0.8644, + "step": 953 + }, + { + "epoch": 1.65970409051349, + "grad_norm": 0.482421875, + "learning_rate": 7.677474337880081e-07, + "loss": 0.8743, + "step": 954 + }, + { + "epoch": 1.6614447345517842, + "grad_norm": 0.484375, + "learning_rate": 7.600986496683654e-07, + "loss": 0.9146, + "step": 955 + }, + { + "epoch": 1.6631853785900783, + "grad_norm": 0.49609375, + "learning_rate": 7.524850220538771e-07, + "loss": 0.898, + "step": 956 + }, + { + "epoch": 1.6649260226283724, + "grad_norm": 0.494140625, + "learning_rate": 7.449066140753864e-07, + "loss": 0.8945, + "step": 957 + }, + { + "epoch": 1.6666666666666665, + "grad_norm": 0.5, + "learning_rate": 7.373634885716968e-07, + "loss": 0.918, + "step": 958 + }, + { + "epoch": 1.6684073107049608, + "grad_norm": 0.490234375, + "learning_rate": 7.298557080890595e-07, + "loss": 0.9634, + "step": 959 + }, + { + "epoch": 1.6701479547432552, + "grad_norm": 0.5, + "learning_rate": 7.223833348806503e-07, + "loss": 0.8925, + "step": 960 + }, + { + "epoch": 1.6718885987815493, + "grad_norm": 0.50390625, + "learning_rate": 7.149464309060549e-07, + "loss": 0.9052, + "step": 961 + }, + { + "epoch": 1.6736292428198434, + "grad_norm": 0.482421875, + "learning_rate": 7.075450578307514e-07, + "loss": 0.9594, + "step": 962 + }, + { + "epoch": 1.6753698868581375, + "grad_norm": 0.5078125, + "learning_rate": 7.001792770256055e-07, + "loss": 0.8956, + "step": 963 + }, + { + "epoch": 1.6771105308964316, + "grad_norm": 0.484375, + "learning_rate": 6.928491495663565e-07, + "loss": 0.9591, + "step": 964 + }, + { + "epoch": 1.6788511749347257, + "grad_norm": 0.482421875, + "learning_rate": 6.855547362331128e-07, + "loss": 0.8822, + "step": 965 + }, + { + "epoch": 1.68059181897302, + "grad_norm": 0.51171875, + "learning_rate": 6.78296097509849e-07, + "loss": 0.9193, + "step": 966 + }, + { + "epoch": 1.6823324630113143, + "grad_norm": 0.494140625, + "learning_rate": 6.710732935838999e-07, + "loss": 0.9275, + "step": 967 + }, + { + "epoch": 1.6840731070496084, + "grad_norm": 0.494140625, + "learning_rate": 6.638863843454663e-07, + "loss": 0.8994, + "step": 968 + }, + { + "epoch": 1.6858137510879025, + "grad_norm": 0.482421875, + "learning_rate": 6.567354293871181e-07, + "loss": 0.9116, + "step": 969 + }, + { + "epoch": 1.6875543951261966, + "grad_norm": 0.48828125, + "learning_rate": 6.49620488003297e-07, + "loss": 0.9547, + "step": 970 + }, + { + "epoch": 1.6892950391644908, + "grad_norm": 0.51171875, + "learning_rate": 6.425416191898259e-07, + "loss": 0.9733, + "step": 971 + }, + { + "epoch": 1.691035683202785, + "grad_norm": 0.50390625, + "learning_rate": 6.354988816434205e-07, + "loss": 0.9002, + "step": 972 + }, + { + "epoch": 1.6927763272410792, + "grad_norm": 0.470703125, + "learning_rate": 6.28492333761207e-07, + "loss": 0.9075, + "step": 973 + }, + { + "epoch": 1.6945169712793735, + "grad_norm": 0.49609375, + "learning_rate": 6.215220336402272e-07, + "loss": 0.9165, + "step": 974 + }, + { + "epoch": 1.6962576153176676, + "grad_norm": 0.458984375, + "learning_rate": 6.145880390769665e-07, + "loss": 0.9573, + "step": 975 + }, + { + "epoch": 1.6979982593559617, + "grad_norm": 0.5, + "learning_rate": 6.076904075668671e-07, + "loss": 0.9044, + "step": 976 + }, + { + "epoch": 1.6997389033942558, + "grad_norm": 0.494140625, + "learning_rate": 6.008291963038632e-07, + "loss": 0.9077, + "step": 977 + }, + { + "epoch": 1.70147954743255, + "grad_norm": 0.48046875, + "learning_rate": 5.940044621798896e-07, + "loss": 0.9474, + "step": 978 + }, + { + "epoch": 1.7032201914708442, + "grad_norm": 0.486328125, + "learning_rate": 5.872162617844268e-07, + "loss": 0.9597, + "step": 979 + }, + { + "epoch": 1.7049608355091384, + "grad_norm": 0.49609375, + "learning_rate": 5.804646514040163e-07, + "loss": 0.8826, + "step": 980 + }, + { + "epoch": 1.7067014795474327, + "grad_norm": 0.50390625, + "learning_rate": 5.737496870218101e-07, + "loss": 0.9059, + "step": 981 + }, + { + "epoch": 1.7084421235857268, + "grad_norm": 0.490234375, + "learning_rate": 5.670714243170938e-07, + "loss": 0.9254, + "step": 982 + }, + { + "epoch": 1.7101827676240209, + "grad_norm": 0.51171875, + "learning_rate": 5.604299186648282e-07, + "loss": 0.9313, + "step": 983 + }, + { + "epoch": 1.711923411662315, + "grad_norm": 0.4765625, + "learning_rate": 5.538252251351934e-07, + "loss": 0.9003, + "step": 984 + }, + { + "epoch": 1.713664055700609, + "grad_norm": 0.50390625, + "learning_rate": 5.472573984931284e-07, + "loss": 0.9114, + "step": 985 + }, + { + "epoch": 1.7154046997389034, + "grad_norm": 0.486328125, + "learning_rate": 5.407264931978812e-07, + "loss": 0.8797, + "step": 986 + }, + { + "epoch": 1.7171453437771975, + "grad_norm": 0.5, + "learning_rate": 5.342325634025503e-07, + "loss": 0.8949, + "step": 987 + }, + { + "epoch": 1.7188859878154918, + "grad_norm": 0.490234375, + "learning_rate": 5.277756629536434e-07, + "loss": 0.8995, + "step": 988 + }, + { + "epoch": 1.720626631853786, + "grad_norm": 0.5, + "learning_rate": 5.21355845390627e-07, + "loss": 0.9108, + "step": 989 + }, + { + "epoch": 1.72236727589208, + "grad_norm": 0.50390625, + "learning_rate": 5.149731639454819e-07, + "loss": 0.9389, + "step": 990 + }, + { + "epoch": 1.7241079199303742, + "grad_norm": 0.51171875, + "learning_rate": 5.086276715422644e-07, + "loss": 0.9128, + "step": 991 + }, + { + "epoch": 1.7258485639686683, + "grad_norm": 0.498046875, + "learning_rate": 5.023194207966614e-07, + "loss": 0.951, + "step": 992 + }, + { + "epoch": 1.7275892080069626, + "grad_norm": 0.484375, + "learning_rate": 4.960484640155666e-07, + "loss": 0.8701, + "step": 993 + }, + { + "epoch": 1.729329852045257, + "grad_norm": 0.50390625, + "learning_rate": 4.898148531966307e-07, + "loss": 0.9149, + "step": 994 + }, + { + "epoch": 1.731070496083551, + "grad_norm": 0.494140625, + "learning_rate": 4.836186400278442e-07, + "loss": 0.9811, + "step": 995 + }, + { + "epoch": 1.7328111401218451, + "grad_norm": 0.484375, + "learning_rate": 4.774598758870979e-07, + "loss": 0.8836, + "step": 996 + }, + { + "epoch": 1.7345517841601392, + "grad_norm": 0.4921875, + "learning_rate": 4.7133861184176845e-07, + "loss": 0.8863, + "step": 997 + }, + { + "epoch": 1.7362924281984333, + "grad_norm": 0.48828125, + "learning_rate": 4.652548986482813e-07, + "loss": 0.965, + "step": 998 + }, + { + "epoch": 1.7380330722367274, + "grad_norm": 0.478515625, + "learning_rate": 4.59208786751702e-07, + "loss": 0.9116, + "step": 999 + }, + { + "epoch": 1.7397737162750218, + "grad_norm": 0.490234375, + "learning_rate": 4.5320032628531074e-07, + "loss": 0.945, + "step": 1000 + }, + { + "epoch": 1.741514360313316, + "grad_norm": 0.4921875, + "learning_rate": 4.4722956707018873e-07, + "loss": 0.9238, + "step": 1001 + }, + { + "epoch": 1.7432550043516102, + "grad_norm": 0.5078125, + "learning_rate": 4.4129655861480713e-07, + "loss": 0.878, + "step": 1002 + }, + { + "epoch": 1.7449956483899043, + "grad_norm": 0.494140625, + "learning_rate": 4.3540135011461094e-07, + "loss": 0.9214, + "step": 1003 + }, + { + "epoch": 1.7467362924281984, + "grad_norm": 0.48828125, + "learning_rate": 4.295439904516174e-07, + "loss": 0.8751, + "step": 1004 + }, + { + "epoch": 1.7484769364664925, + "grad_norm": 0.498046875, + "learning_rate": 4.2372452819400776e-07, + "loss": 0.8561, + "step": 1005 + }, + { + "epoch": 1.7502175805047868, + "grad_norm": 0.498046875, + "learning_rate": 4.1794301159572506e-07, + "loss": 0.9321, + "step": 1006 + }, + { + "epoch": 1.751958224543081, + "grad_norm": 0.490234375, + "learning_rate": 4.1219948859607063e-07, + "loss": 0.9764, + "step": 1007 + }, + { + "epoch": 1.7536988685813752, + "grad_norm": 0.4765625, + "learning_rate": 4.0649400681931296e-07, + "loss": 0.8856, + "step": 1008 + }, + { + "epoch": 1.7536988685813752, + "eval_loss": 0.9182912707328796, + "eval_runtime": 60.1278, + "eval_samples_per_second": 77.967, + "eval_steps_per_second": 9.746, + "step": 1008 + }, + { + "epoch": 1.7554395126196694, + "grad_norm": 0.498046875, + "learning_rate": 4.008266135742894e-07, + "loss": 0.9056, + "step": 1009 + }, + { + "epoch": 1.7571801566579635, + "grad_norm": 0.49609375, + "learning_rate": 3.9519735585401174e-07, + "loss": 0.8885, + "step": 1010 + }, + { + "epoch": 1.7589208006962576, + "grad_norm": 0.4921875, + "learning_rate": 3.896062803352818e-07, + "loss": 0.8983, + "step": 1011 + }, + { + "epoch": 1.7606614447345517, + "grad_norm": 0.5078125, + "learning_rate": 3.8405343337829806e-07, + "loss": 0.8796, + "step": 1012 + }, + { + "epoch": 1.762402088772846, + "grad_norm": 0.5078125, + "learning_rate": 3.785388610262769e-07, + "loss": 0.9577, + "step": 1013 + }, + { + "epoch": 1.76414273281114, + "grad_norm": 0.50390625, + "learning_rate": 3.730626090050676e-07, + "loss": 0.902, + "step": 1014 + }, + { + "epoch": 1.7658833768494344, + "grad_norm": 0.4921875, + "learning_rate": 3.6762472272277584e-07, + "loss": 0.8889, + "step": 1015 + }, + { + "epoch": 1.7676240208877285, + "grad_norm": 0.4765625, + "learning_rate": 3.6222524726938157e-07, + "loss": 0.9114, + "step": 1016 + }, + { + "epoch": 1.7693646649260226, + "grad_norm": 0.494140625, + "learning_rate": 3.568642274163725e-07, + "loss": 0.9452, + "step": 1017 + }, + { + "epoch": 1.7711053089643167, + "grad_norm": 0.490234375, + "learning_rate": 3.5154170761636966e-07, + "loss": 0.9557, + "step": 1018 + }, + { + "epoch": 1.7728459530026108, + "grad_norm": 0.494140625, + "learning_rate": 3.4625773200275546e-07, + "loss": 0.8771, + "step": 1019 + }, + { + "epoch": 1.7745865970409052, + "grad_norm": 0.486328125, + "learning_rate": 3.4101234438931375e-07, + "loss": 0.9142, + "step": 1020 + }, + { + "epoch": 1.7763272410791993, + "grad_norm": 0.474609375, + "learning_rate": 3.3580558826985964e-07, + "loss": 0.9797, + "step": 1021 + }, + { + "epoch": 1.7780678851174936, + "grad_norm": 0.48046875, + "learning_rate": 3.306375068178891e-07, + "loss": 0.8661, + "step": 1022 + }, + { + "epoch": 1.7798085291557877, + "grad_norm": 0.490234375, + "learning_rate": 3.2550814288620837e-07, + "loss": 0.9754, + "step": 1023 + }, + { + "epoch": 1.7815491731940818, + "grad_norm": 0.51171875, + "learning_rate": 3.204175390065889e-07, + "loss": 0.9253, + "step": 1024 + }, + { + "epoch": 1.783289817232376, + "grad_norm": 0.5, + "learning_rate": 3.1536573738940814e-07, + "loss": 0.9537, + "step": 1025 + }, + { + "epoch": 1.78503046127067, + "grad_norm": 0.5, + "learning_rate": 3.103527799233047e-07, + "loss": 0.9244, + "step": 1026 + }, + { + "epoch": 1.7867711053089643, + "grad_norm": 0.498046875, + "learning_rate": 3.053787081748266e-07, + "loss": 0.8811, + "step": 1027 + }, + { + "epoch": 1.7885117493472587, + "grad_norm": 0.51171875, + "learning_rate": 3.004435633880881e-07, + "loss": 0.9149, + "step": 1028 + }, + { + "epoch": 1.7902523933855528, + "grad_norm": 0.49609375, + "learning_rate": 2.955473864844299e-07, + "loss": 0.9247, + "step": 1029 + }, + { + "epoch": 1.7919930374238469, + "grad_norm": 0.49609375, + "learning_rate": 2.906902180620758e-07, + "loss": 0.9432, + "step": 1030 + }, + { + "epoch": 1.793733681462141, + "grad_norm": 0.486328125, + "learning_rate": 2.8587209839580134e-07, + "loss": 0.9187, + "step": 1031 + }, + { + "epoch": 1.795474325500435, + "grad_norm": 0.5, + "learning_rate": 2.810930674365919e-07, + "loss": 0.8787, + "step": 1032 + }, + { + "epoch": 1.7972149695387292, + "grad_norm": 0.498046875, + "learning_rate": 2.7635316481132103e-07, + "loss": 0.9635, + "step": 1033 + }, + { + "epoch": 1.7989556135770235, + "grad_norm": 0.48828125, + "learning_rate": 2.7165242982241436e-07, + "loss": 0.9223, + "step": 1034 + }, + { + "epoch": 1.8006962576153178, + "grad_norm": 0.50390625, + "learning_rate": 2.6699090144752803e-07, + "loss": 0.9436, + "step": 1035 + }, + { + "epoch": 1.802436901653612, + "grad_norm": 0.48828125, + "learning_rate": 2.623686183392238e-07, + "loss": 0.8988, + "step": 1036 + }, + { + "epoch": 1.804177545691906, + "grad_norm": 0.51171875, + "learning_rate": 2.5778561882464525e-07, + "loss": 0.9136, + "step": 1037 + }, + { + "epoch": 1.8059181897302001, + "grad_norm": 0.49609375, + "learning_rate": 2.5324194090521003e-07, + "loss": 0.9017, + "step": 1038 + }, + { + "epoch": 1.8076588337684942, + "grad_norm": 0.494140625, + "learning_rate": 2.487376222562815e-07, + "loss": 0.8713, + "step": 1039 + }, + { + "epoch": 1.8093994778067886, + "grad_norm": 0.478515625, + "learning_rate": 2.44272700226868e-07, + "loss": 0.9001, + "step": 1040 + }, + { + "epoch": 1.8111401218450827, + "grad_norm": 0.50390625, + "learning_rate": 2.3984721183930305e-07, + "loss": 0.8807, + "step": 1041 + }, + { + "epoch": 1.812880765883377, + "grad_norm": 0.494140625, + "learning_rate": 2.3546119378895005e-07, + "loss": 0.9568, + "step": 1042 + }, + { + "epoch": 1.814621409921671, + "grad_norm": 0.5, + "learning_rate": 2.311146824438848e-07, + "loss": 0.9383, + "step": 1043 + }, + { + "epoch": 1.8163620539599652, + "grad_norm": 0.51953125, + "learning_rate": 2.268077138446051e-07, + "loss": 0.9677, + "step": 1044 + }, + { + "epoch": 1.8181026979982593, + "grad_norm": 0.5234375, + "learning_rate": 2.225403237037249e-07, + "loss": 0.8965, + "step": 1045 + }, + { + "epoch": 1.8198433420365534, + "grad_norm": 0.515625, + "learning_rate": 2.183125474056824e-07, + "loss": 0.9013, + "step": 1046 + }, + { + "epoch": 1.8215839860748477, + "grad_norm": 0.47265625, + "learning_rate": 2.1412442000644352e-07, + "loss": 0.9021, + "step": 1047 + }, + { + "epoch": 1.8233246301131418, + "grad_norm": 0.49609375, + "learning_rate": 2.0997597623321265e-07, + "loss": 0.8785, + "step": 1048 + }, + { + "epoch": 1.8250652741514362, + "grad_norm": 0.5078125, + "learning_rate": 2.058672504841458e-07, + "loss": 0.9131, + "step": 1049 + }, + { + "epoch": 1.8268059181897303, + "grad_norm": 0.486328125, + "learning_rate": 2.0179827682806241e-07, + "loss": 0.8653, + "step": 1050 + }, + { + "epoch": 1.8285465622280244, + "grad_norm": 0.4921875, + "learning_rate": 1.977690890041678e-07, + "loss": 0.9263, + "step": 1051 + }, + { + "epoch": 1.8302872062663185, + "grad_norm": 0.48828125, + "learning_rate": 1.9377972042176563e-07, + "loss": 0.9633, + "step": 1052 + }, + { + "epoch": 1.8320278503046126, + "grad_norm": 0.486328125, + "learning_rate": 1.898302041599892e-07, + "loss": 0.906, + "step": 1053 + }, + { + "epoch": 1.833768494342907, + "grad_norm": 0.490234375, + "learning_rate": 1.859205729675223e-07, + "loss": 0.8946, + "step": 1054 + }, + { + "epoch": 1.835509138381201, + "grad_norm": 0.498046875, + "learning_rate": 1.820508592623288e-07, + "loss": 0.8959, + "step": 1055 + }, + { + "epoch": 1.8372497824194953, + "grad_norm": 0.490234375, + "learning_rate": 1.7822109513138452e-07, + "loss": 0.9191, + "step": 1056 + }, + { + "epoch": 1.8389904264577894, + "grad_norm": 0.5, + "learning_rate": 1.744313123304092e-07, + "loss": 0.9553, + "step": 1057 + }, + { + "epoch": 1.8407310704960835, + "grad_norm": 0.5078125, + "learning_rate": 1.706815422836061e-07, + "loss": 0.9546, + "step": 1058 + }, + { + "epoch": 1.8424717145343776, + "grad_norm": 0.51171875, + "learning_rate": 1.669718160833983e-07, + "loss": 0.9716, + "step": 1059 + }, + { + "epoch": 1.8442123585726717, + "grad_norm": 0.494140625, + "learning_rate": 1.6330216449017445e-07, + "loss": 0.8972, + "step": 1060 + }, + { + "epoch": 1.845953002610966, + "grad_norm": 0.50390625, + "learning_rate": 1.5967261793202904e-07, + "loss": 0.9528, + "step": 1061 + }, + { + "epoch": 1.8476936466492604, + "grad_norm": 0.5, + "learning_rate": 1.5608320650451425e-07, + "loss": 0.8747, + "step": 1062 + }, + { + "epoch": 1.8494342906875545, + "grad_norm": 0.5078125, + "learning_rate": 1.5253395997039066e-07, + "loss": 0.8525, + "step": 1063 + }, + { + "epoch": 1.8511749347258486, + "grad_norm": 0.51171875, + "learning_rate": 1.490249077593753e-07, + "loss": 0.8958, + "step": 1064 + }, + { + "epoch": 1.8529155787641427, + "grad_norm": 0.4921875, + "learning_rate": 1.4555607896790447e-07, + "loss": 0.9631, + "step": 1065 + }, + { + "epoch": 1.8546562228024368, + "grad_norm": 0.50390625, + "learning_rate": 1.4212750235888416e-07, + "loss": 0.9459, + "step": 1066 + }, + { + "epoch": 1.856396866840731, + "grad_norm": 0.5, + "learning_rate": 1.3873920636146344e-07, + "loss": 0.935, + "step": 1067 + }, + { + "epoch": 1.8581375108790252, + "grad_norm": 0.49609375, + "learning_rate": 1.353912190707851e-07, + "loss": 0.9517, + "step": 1068 + }, + { + "epoch": 1.8598781549173196, + "grad_norm": 0.474609375, + "learning_rate": 1.320835682477628e-07, + "loss": 0.9141, + "step": 1069 + }, + { + "epoch": 1.8616187989556137, + "grad_norm": 0.486328125, + "learning_rate": 1.288162813188476e-07, + "loss": 0.9573, + "step": 1070 + }, + { + "epoch": 1.8633594429939078, + "grad_norm": 0.5078125, + "learning_rate": 1.2558938537579834e-07, + "loss": 0.8628, + "step": 1071 + }, + { + "epoch": 1.8651000870322019, + "grad_norm": 0.5, + "learning_rate": 1.2240290717546178e-07, + "loss": 0.9729, + "step": 1072 + }, + { + "epoch": 1.866840731070496, + "grad_norm": 0.466796875, + "learning_rate": 1.1925687313954437e-07, + "loss": 0.954, + "step": 1073 + }, + { + "epoch": 1.8685813751087903, + "grad_norm": 0.4921875, + "learning_rate": 1.1615130935439978e-07, + "loss": 0.9583, + "step": 1074 + }, + { + "epoch": 1.8703220191470844, + "grad_norm": 0.51171875, + "learning_rate": 1.1308624157080838e-07, + "loss": 0.969, + "step": 1075 + }, + { + "epoch": 1.8720626631853787, + "grad_norm": 0.48046875, + "learning_rate": 1.1006169520376586e-07, + "loss": 0.9452, + "step": 1076 + }, + { + "epoch": 1.8738033072236728, + "grad_norm": 0.51171875, + "learning_rate": 1.0707769533226942e-07, + "loss": 0.9365, + "step": 1077 + }, + { + "epoch": 1.875543951261967, + "grad_norm": 0.48828125, + "learning_rate": 1.0413426669911408e-07, + "loss": 0.9677, + "step": 1078 + }, + { + "epoch": 1.877284595300261, + "grad_norm": 0.494140625, + "learning_rate": 1.0123143371068456e-07, + "loss": 0.9638, + "step": 1079 + }, + { + "epoch": 1.8790252393385551, + "grad_norm": 0.478515625, + "learning_rate": 9.836922043675368e-08, + "loss": 0.9235, + "step": 1080 + }, + { + "epoch": 1.8790252393385551, + "eval_loss": 0.9182877540588379, + "eval_runtime": 59.5013, + "eval_samples_per_second": 78.788, + "eval_steps_per_second": 9.849, + "step": 1080 + }, + { + "epoch": 1.8807658833768495, + "grad_norm": 0.4921875, + "learning_rate": 9.554765061028371e-08, + "loss": 0.9831, + "step": 1081 + }, + { + "epoch": 1.8825065274151436, + "grad_norm": 0.494140625, + "learning_rate": 9.276674762722703e-08, + "loss": 1.0114, + "step": 1082 + }, + { + "epoch": 1.884247171453438, + "grad_norm": 0.5, + "learning_rate": 9.002653454633581e-08, + "loss": 0.9167, + "step": 1083 + }, + { + "epoch": 1.885987815491732, + "grad_norm": 0.490234375, + "learning_rate": 8.732703408896648e-08, + "loss": 0.9615, + "step": 1084 + }, + { + "epoch": 1.887728459530026, + "grad_norm": 0.484375, + "learning_rate": 8.46682686388961e-08, + "loss": 0.9051, + "step": 1085 + }, + { + "epoch": 1.8894691035683202, + "grad_norm": 0.482421875, + "learning_rate": 8.205026024213191e-08, + "loss": 0.933, + "step": 1086 + }, + { + "epoch": 1.8912097476066143, + "grad_norm": 0.48828125, + "learning_rate": 7.947303060673373e-08, + "loss": 0.8518, + "step": 1087 + }, + { + "epoch": 1.8929503916449086, + "grad_norm": 0.4921875, + "learning_rate": 7.693660110262902e-08, + "loss": 0.9153, + "step": 1088 + }, + { + "epoch": 1.8946910356832027, + "grad_norm": 0.484375, + "learning_rate": 7.444099276143812e-08, + "loss": 0.961, + "step": 1089 + }, + { + "epoch": 1.896431679721497, + "grad_norm": 0.5078125, + "learning_rate": 7.198622627630047e-08, + "loss": 0.8997, + "step": 1090 + }, + { + "epoch": 1.8981723237597912, + "grad_norm": 0.494140625, + "learning_rate": 6.957232200170083e-08, + "loss": 0.875, + "step": 1091 + }, + { + "epoch": 1.8999129677980853, + "grad_norm": 0.484375, + "learning_rate": 6.719929995330388e-08, + "loss": 0.8301, + "step": 1092 + }, + { + "epoch": 1.9016536118363794, + "grad_norm": 0.48828125, + "learning_rate": 6.486717980778324e-08, + "loss": 0.9119, + "step": 1093 + }, + { + "epoch": 1.9033942558746735, + "grad_norm": 0.498046875, + "learning_rate": 6.257598090266326e-08, + "loss": 0.8896, + "step": 1094 + }, + { + "epoch": 1.9051348999129678, + "grad_norm": 0.51171875, + "learning_rate": 6.032572223615696e-08, + "loss": 0.9021, + "step": 1095 + }, + { + "epoch": 1.9068755439512621, + "grad_norm": 0.48828125, + "learning_rate": 5.811642246700722e-08, + "loss": 0.8831, + "step": 1096 + }, + { + "epoch": 1.9086161879895562, + "grad_norm": 0.4921875, + "learning_rate": 5.594809991433414e-08, + "loss": 0.9563, + "step": 1097 + }, + { + "epoch": 1.9103568320278503, + "grad_norm": 0.490234375, + "learning_rate": 5.382077255747964e-08, + "loss": 0.9229, + "step": 1098 + }, + { + "epoch": 1.9120974760661444, + "grad_norm": 0.482421875, + "learning_rate": 5.1734458035863076e-08, + "loss": 0.9321, + "step": 1099 + }, + { + "epoch": 1.9138381201044385, + "grad_norm": 0.5078125, + "learning_rate": 4.968917364883197e-08, + "loss": 0.9381, + "step": 1100 + }, + { + "epoch": 1.9155787641427326, + "grad_norm": 0.478515625, + "learning_rate": 4.768493635552041e-08, + "loss": 0.953, + "step": 1101 + }, + { + "epoch": 1.917319408181027, + "grad_norm": 0.51171875, + "learning_rate": 4.5721762774704745e-08, + "loss": 0.8987, + "step": 1102 + }, + { + "epoch": 1.9190600522193213, + "grad_norm": 0.490234375, + "learning_rate": 4.3799669184670936e-08, + "loss": 0.9296, + "step": 1103 + }, + { + "epoch": 1.9208006962576154, + "grad_norm": 0.49609375, + "learning_rate": 4.191867152307572e-08, + "loss": 0.9121, + "step": 1104 + }, + { + "epoch": 1.9225413402959095, + "grad_norm": 0.5234375, + "learning_rate": 4.0078785386815646e-08, + "loss": 0.9209, + "step": 1105 + }, + { + "epoch": 1.9242819843342036, + "grad_norm": 0.50390625, + "learning_rate": 3.8280026031898286e-08, + "loss": 0.9339, + "step": 1106 + }, + { + "epoch": 1.9260226283724977, + "grad_norm": 0.478515625, + "learning_rate": 3.652240837331456e-08, + "loss": 0.9054, + "step": 1107 + }, + { + "epoch": 1.927763272410792, + "grad_norm": 0.51953125, + "learning_rate": 3.4805946984917685e-08, + "loss": 0.9279, + "step": 1108 + }, + { + "epoch": 1.9295039164490861, + "grad_norm": 0.50390625, + "learning_rate": 3.3130656099297775e-08, + "loss": 0.9331, + "step": 1109 + }, + { + "epoch": 1.9312445604873805, + "grad_norm": 0.48828125, + "learning_rate": 3.149654960766857e-08, + "loss": 0.9167, + "step": 1110 + }, + { + "epoch": 1.9329852045256746, + "grad_norm": 0.482421875, + "learning_rate": 2.990364105974919e-08, + "loss": 0.8797, + "step": 1111 + }, + { + "epoch": 1.9347258485639687, + "grad_norm": 0.515625, + "learning_rate": 2.8351943663653126e-08, + "loss": 0.8777, + "step": 1112 + }, + { + "epoch": 1.9364664926022628, + "grad_norm": 0.482421875, + "learning_rate": 2.6841470285777772e-08, + "loss": 0.9268, + "step": 1113 + }, + { + "epoch": 1.9382071366405569, + "grad_norm": 0.51171875, + "learning_rate": 2.5372233450698947e-08, + "loss": 0.9177, + "step": 1114 + }, + { + "epoch": 1.9399477806788512, + "grad_norm": 0.484375, + "learning_rate": 2.3944245341065987e-08, + "loss": 0.9239, + "step": 1115 + }, + { + "epoch": 1.9416884247171453, + "grad_norm": 0.498046875, + "learning_rate": 2.2557517797502372e-08, + "loss": 1.0003, + "step": 1116 + }, + { + "epoch": 1.9434290687554396, + "grad_norm": 0.48046875, + "learning_rate": 2.1212062318504702e-08, + "loss": 0.9136, + "step": 1117 + }, + { + "epoch": 1.9451697127937337, + "grad_norm": 0.5, + "learning_rate": 1.9907890060348877e-08, + "loss": 0.9331, + "step": 1118 + }, + { + "epoch": 1.9469103568320278, + "grad_norm": 0.50390625, + "learning_rate": 1.8645011836999626e-08, + "loss": 1.002, + "step": 1119 + }, + { + "epoch": 1.948651000870322, + "grad_norm": 0.484375, + "learning_rate": 1.7423438120017234e-08, + "loss": 0.8867, + "step": 1120 + }, + { + "epoch": 1.950391644908616, + "grad_norm": 0.4921875, + "learning_rate": 1.624317903847372e-08, + "loss": 0.9608, + "step": 1121 + }, + { + "epoch": 1.9521322889469104, + "grad_norm": 0.5, + "learning_rate": 1.510424437886515e-08, + "loss": 0.8998, + "step": 1122 + }, + { + "epoch": 1.9538729329852045, + "grad_norm": 0.484375, + "learning_rate": 1.4006643585035007e-08, + "loss": 0.9214, + "step": 1123 + }, + { + "epoch": 1.9556135770234988, + "grad_norm": 0.490234375, + "learning_rate": 1.2950385758094263e-08, + "loss": 0.9178, + "step": 1124 + }, + { + "epoch": 1.957354221061793, + "grad_norm": 0.5, + "learning_rate": 1.1935479656342563e-08, + "loss": 0.944, + "step": 1125 + }, + { + "epoch": 1.959094865100087, + "grad_norm": 0.5234375, + "learning_rate": 1.09619336952016e-08, + "loss": 0.8819, + "step": 1126 + }, + { + "epoch": 1.9608355091383811, + "grad_norm": 0.4765625, + "learning_rate": 1.002975594713962e-08, + "loss": 0.9145, + "step": 1127 + }, + { + "epoch": 1.9625761531766752, + "grad_norm": 0.494140625, + "learning_rate": 9.138954141608702e-09, + "loss": 0.9318, + "step": 1128 + }, + { + "epoch": 1.9643167972149695, + "grad_norm": 0.4765625, + "learning_rate": 8.2895356649787e-09, + "loss": 0.9394, + "step": 1129 + }, + { + "epoch": 1.9660574412532639, + "grad_norm": 0.46875, + "learning_rate": 7.481507560475054e-09, + "loss": 0.9018, + "step": 1130 + }, + { + "epoch": 1.967798085291558, + "grad_norm": 0.494140625, + "learning_rate": 6.714876528123304e-09, + "loss": 0.8765, + "step": 1131 + }, + { + "epoch": 1.969538729329852, + "grad_norm": 0.494140625, + "learning_rate": 5.989648924690783e-09, + "loss": 0.9233, + "step": 1132 + }, + { + "epoch": 1.9712793733681462, + "grad_norm": 0.51171875, + "learning_rate": 5.305830763635556e-09, + "loss": 0.9219, + "step": 1133 + }, + { + "epoch": 1.9730200174064403, + "grad_norm": 0.5078125, + "learning_rate": 4.663427715055346e-09, + "loss": 0.9476, + "step": 1134 + }, + { + "epoch": 1.9747606614447344, + "grad_norm": 0.5078125, + "learning_rate": 4.062445105642021e-09, + "loss": 0.8924, + "step": 1135 + }, + { + "epoch": 1.9765013054830287, + "grad_norm": 0.4921875, + "learning_rate": 3.5028879186360664e-09, + "loss": 0.9134, + "step": 1136 + }, + { + "epoch": 1.978241949521323, + "grad_norm": 0.494140625, + "learning_rate": 2.9847607937855126e-09, + "loss": 0.8745, + "step": 1137 + }, + { + "epoch": 1.9799825935596171, + "grad_norm": 0.494140625, + "learning_rate": 2.508068027308186e-09, + "loss": 0.9141, + "step": 1138 + }, + { + "epoch": 1.9817232375979112, + "grad_norm": 0.474609375, + "learning_rate": 2.07281357185507e-09, + "loss": 0.9276, + "step": 1139 + }, + { + "epoch": 1.9834638816362054, + "grad_norm": 0.49609375, + "learning_rate": 1.6790010364786669e-09, + "loss": 0.9051, + "step": 1140 + }, + { + "epoch": 1.9852045256744995, + "grad_norm": 0.490234375, + "learning_rate": 1.3266336866024633e-09, + "loss": 0.9308, + "step": 1141 + }, + { + "epoch": 1.9869451697127938, + "grad_norm": 0.486328125, + "learning_rate": 1.0157144439937317e-09, + "loss": 0.9339, + "step": 1142 + }, + { + "epoch": 1.9886858137510879, + "grad_norm": 0.48828125, + "learning_rate": 7.462458867385503e-10, + "loss": 0.929, + "step": 1143 + }, + { + "epoch": 1.9904264577893822, + "grad_norm": 0.515625, + "learning_rate": 5.182302492229285e-10, + "loss": 0.9161, + "step": 1144 + }, + { + "epoch": 1.9921671018276763, + "grad_norm": 0.5234375, + "learning_rate": 3.316694221111583e-10, + "loss": 0.8931, + "step": 1145 + }, + { + "epoch": 1.9939077458659704, + "grad_norm": 0.490234375, + "learning_rate": 1.8656495233304594e-10, + "loss": 0.8902, + "step": 1146 + }, + { + "epoch": 1.9956483899042645, + "grad_norm": 0.5, + "learning_rate": 8.291804306781447e-11, + "loss": 0.9633, + "step": 1147 + }, + { + "epoch": 1.9973890339425586, + "grad_norm": 0.478515625, + "learning_rate": 2.0729553738552122e-11, + "loss": 0.8998, + "step": 1148 + } + ], + "logging_steps": 1, + "max_steps": 1148, + "num_input_tokens_seen": 0, + "num_train_epochs": 2, + "save_steps": 574, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": true + }, + "attributes": {} + } + }, + "total_flos": 7.226819769217843e+18, + "train_batch_size": 8, + "trial_name": null, + "trial_params": null +}