{ "best_metric": 0.2915531098842621, "best_model_checkpoint": "./w2v-bert-2.0-chichewa_34_68h/checkpoint-15000", "epoch": 22.075096631695196, "eval_steps": 1000, "global_step": 20000, "is_hyper_param_search": false, "is_local_process_zero": true, "is_world_process_zero": true, "log_history": [ { "epoch": 0.0011043622308117063, "grad_norm": 21.818126678466797, "learning_rate": 3.0000000000000004e-09, "loss": 8.98, "step": 1 }, { "epoch": 0.0022087244616234127, "grad_norm": 19.142053604125977, "learning_rate": 6.000000000000001e-09, "loss": 7.9266, "step": 2 }, { "epoch": 0.0033130866924351186, "grad_norm": 19.274776458740234, "learning_rate": 9e-09, "loss": 8.0351, "step": 3 }, { "epoch": 0.004417448923246825, "grad_norm": 20.756601333618164, "learning_rate": 1.2000000000000002e-08, "loss": 8.5108, "step": 4 }, { "epoch": 0.005521811154058531, "grad_norm": 19.95114517211914, "learning_rate": 1.5000000000000002e-08, "loss": 8.2009, "step": 5 }, { "epoch": 0.006626173384870237, "grad_norm": 20.010526657104492, "learning_rate": 1.8e-08, "loss": 8.1941, "step": 6 }, { "epoch": 0.007730535615681944, "grad_norm": 21.821311950683594, "learning_rate": 2.1e-08, "loss": 8.8958, "step": 7 }, { "epoch": 0.00883489784649365, "grad_norm": 22.442974090576172, "learning_rate": 2.4000000000000003e-08, "loss": 8.8654, "step": 8 }, { "epoch": 0.009939260077305357, "grad_norm": 23.851503372192383, "learning_rate": 2.7e-08, "loss": 9.463, "step": 9 }, { "epoch": 0.011043622308117063, "grad_norm": 25.68195343017578, "learning_rate": 3.0000000000000004e-08, "loss": 9.9403, "step": 10 }, { "epoch": 0.012147984538928768, "grad_norm": 25.868040084838867, "learning_rate": 3.3000000000000004e-08, "loss": 10.1515, "step": 11 }, { "epoch": 0.013252346769740474, "grad_norm": 25.492202758789062, "learning_rate": 3.6e-08, "loss": 10.0174, "step": 12 }, { "epoch": 0.01435670900055218, "grad_norm": 27.195087432861328, "learning_rate": 3.9e-08, "loss": 10.4684, "step": 13 }, { "epoch": 0.015461071231363888, "grad_norm": 26.48980140686035, "learning_rate": 4.2e-08, "loss": 10.1971, "step": 14 }, { "epoch": 0.016565433462175594, "grad_norm": 25.938669204711914, "learning_rate": 4.5e-08, "loss": 10.0385, "step": 15 }, { "epoch": 0.0176697956929873, "grad_norm": 25.684959411621094, "learning_rate": 4.8000000000000006e-08, "loss": 9.8838, "step": 16 }, { "epoch": 0.018774157923799006, "grad_norm": 27.656476974487305, "learning_rate": 5.1e-08, "loss": 10.3708, "step": 17 }, { "epoch": 0.019878520154610713, "grad_norm": 26.69969367980957, "learning_rate": 5.4e-08, "loss": 10.1, "step": 18 }, { "epoch": 0.020982882385422418, "grad_norm": 25.731966018676758, "learning_rate": 5.7e-08, "loss": 9.8487, "step": 19 }, { "epoch": 0.022087244616234125, "grad_norm": 26.41034698486328, "learning_rate": 6.000000000000001e-08, "loss": 10.2696, "step": 20 }, { "epoch": 0.02319160684704583, "grad_norm": 27.27632713317871, "learning_rate": 6.3e-08, "loss": 10.4782, "step": 21 }, { "epoch": 0.024295969077857537, "grad_norm": 26.154550552368164, "learning_rate": 6.600000000000001e-08, "loss": 9.953, "step": 22 }, { "epoch": 0.025400331308669245, "grad_norm": 27.348390579223633, "learning_rate": 6.9e-08, "loss": 10.3954, "step": 23 }, { "epoch": 0.02650469353948095, "grad_norm": 26.75918960571289, "learning_rate": 7.2e-08, "loss": 10.2777, "step": 24 }, { "epoch": 0.027609055770292656, "grad_norm": 27.029285430908203, "learning_rate": 7.500000000000001e-08, "loss": 10.1941, "step": 25 }, { "epoch": 0.02871341800110436, "grad_norm": 27.26399040222168, "learning_rate": 7.8e-08, "loss": 10.3111, "step": 26 }, { "epoch": 0.02981778023191607, "grad_norm": 27.168718338012695, "learning_rate": 8.100000000000001e-08, "loss": 10.2609, "step": 27 }, { "epoch": 0.030922142462727776, "grad_norm": 27.14641761779785, "learning_rate": 8.4e-08, "loss": 10.1432, "step": 28 }, { "epoch": 0.03202650469353948, "grad_norm": 27.7408447265625, "learning_rate": 8.7e-08, "loss": 10.4463, "step": 29 }, { "epoch": 0.03313086692435119, "grad_norm": 26.907421112060547, "learning_rate": 9e-08, "loss": 10.2013, "step": 30 }, { "epoch": 0.034235229155162895, "grad_norm": 27.57917594909668, "learning_rate": 9.3e-08, "loss": 10.1874, "step": 31 }, { "epoch": 0.0353395913859746, "grad_norm": 27.67759132385254, "learning_rate": 9.600000000000001e-08, "loss": 9.9293, "step": 32 }, { "epoch": 0.036443953616786304, "grad_norm": 26.106714248657227, "learning_rate": 9.9e-08, "loss": 9.9227, "step": 33 }, { "epoch": 0.03754831584759801, "grad_norm": 25.276371002197266, "learning_rate": 1.02e-07, "loss": 9.6688, "step": 34 }, { "epoch": 0.03865267807840972, "grad_norm": 26.24152183532715, "learning_rate": 1.05e-07, "loss": 9.7193, "step": 35 }, { "epoch": 0.03975704030922143, "grad_norm": 27.11524200439453, "learning_rate": 1.08e-07, "loss": 9.9981, "step": 36 }, { "epoch": 0.040861402540033134, "grad_norm": 26.551088333129883, "learning_rate": 1.11e-07, "loss": 10.0124, "step": 37 }, { "epoch": 0.041965764770844835, "grad_norm": 24.901384353637695, "learning_rate": 1.14e-07, "loss": 9.5828, "step": 38 }, { "epoch": 0.04307012700165654, "grad_norm": 26.979820251464844, "learning_rate": 1.17e-07, "loss": 9.8318, "step": 39 }, { "epoch": 0.04417448923246825, "grad_norm": 25.554513931274414, "learning_rate": 1.2000000000000002e-07, "loss": 9.4657, "step": 40 }, { "epoch": 0.04527885146327996, "grad_norm": 26.241125106811523, "learning_rate": 1.23e-07, "loss": 9.4981, "step": 41 }, { "epoch": 0.04638321369409166, "grad_norm": 26.30233383178711, "learning_rate": 1.26e-07, "loss": 9.7591, "step": 42 }, { "epoch": 0.047487575924903366, "grad_norm": 26.501834869384766, "learning_rate": 1.29e-07, "loss": 9.4583, "step": 43 }, { "epoch": 0.048591938155715074, "grad_norm": 25.959285736083984, "learning_rate": 1.3200000000000002e-07, "loss": 9.4459, "step": 44 }, { "epoch": 0.04969630038652678, "grad_norm": 24.23187828063965, "learning_rate": 1.35e-07, "loss": 9.0523, "step": 45 }, { "epoch": 0.05080066261733849, "grad_norm": 25.78313636779785, "learning_rate": 1.38e-07, "loss": 9.3673, "step": 46 }, { "epoch": 0.05190502484815019, "grad_norm": 25.398441314697266, "learning_rate": 1.41e-07, "loss": 9.2448, "step": 47 }, { "epoch": 0.0530093870789619, "grad_norm": 24.190706253051758, "learning_rate": 1.44e-07, "loss": 9.1034, "step": 48 }, { "epoch": 0.054113749309773605, "grad_norm": 24.611783981323242, "learning_rate": 1.47e-07, "loss": 9.0755, "step": 49 }, { "epoch": 0.05521811154058531, "grad_norm": 25.469419479370117, "learning_rate": 1.5000000000000002e-07, "loss": 9.1675, "step": 50 }, { "epoch": 0.05632247377139702, "grad_norm": 25.725690841674805, "learning_rate": 1.53e-07, "loss": 9.6454, "step": 51 }, { "epoch": 0.05742683600220872, "grad_norm": 21.72728729248047, "learning_rate": 1.56e-07, "loss": 8.3702, "step": 52 }, { "epoch": 0.05853119823302043, "grad_norm": 20.26394271850586, "learning_rate": 1.59e-07, "loss": 7.9726, "step": 53 }, { "epoch": 0.05963556046383214, "grad_norm": 20.16863441467285, "learning_rate": 1.6200000000000002e-07, "loss": 7.8361, "step": 54 }, { "epoch": 0.060739922694643844, "grad_norm": 20.229869842529297, "learning_rate": 1.6499999999999998e-07, "loss": 7.8569, "step": 55 }, { "epoch": 0.06184428492545555, "grad_norm": 20.52715301513672, "learning_rate": 1.68e-07, "loss": 7.7501, "step": 56 }, { "epoch": 0.06294864715626726, "grad_norm": 22.231521606445312, "learning_rate": 1.71e-07, "loss": 8.4593, "step": 57 }, { "epoch": 0.06405300938707896, "grad_norm": 21.881174087524414, "learning_rate": 1.74e-07, "loss": 8.2926, "step": 58 }, { "epoch": 0.06515737161789067, "grad_norm": 23.973791122436523, "learning_rate": 1.77e-07, "loss": 8.848, "step": 59 }, { "epoch": 0.06626173384870238, "grad_norm": 27.301607131958008, "learning_rate": 1.8e-07, "loss": 9.8388, "step": 60 }, { "epoch": 0.06736609607951408, "grad_norm": 26.1263427734375, "learning_rate": 1.83e-07, "loss": 9.2792, "step": 61 }, { "epoch": 0.06847045831032579, "grad_norm": 28.84987449645996, "learning_rate": 1.86e-07, "loss": 10.0307, "step": 62 }, { "epoch": 0.06957482054113749, "grad_norm": 28.604270935058594, "learning_rate": 1.89e-07, "loss": 9.8056, "step": 63 }, { "epoch": 0.0706791827719492, "grad_norm": 29.506996154785156, "learning_rate": 1.9200000000000003e-07, "loss": 10.0155, "step": 64 }, { "epoch": 0.0717835450027609, "grad_norm": 28.14299201965332, "learning_rate": 1.9499999999999999e-07, "loss": 9.7215, "step": 65 }, { "epoch": 0.07288790723357261, "grad_norm": 29.491870880126953, "learning_rate": 1.98e-07, "loss": 10.0191, "step": 66 }, { "epoch": 0.07399226946438432, "grad_norm": 27.787593841552734, "learning_rate": 2.01e-07, "loss": 9.5992, "step": 67 }, { "epoch": 0.07509663169519602, "grad_norm": 29.19999122619629, "learning_rate": 2.04e-07, "loss": 9.7527, "step": 68 }, { "epoch": 0.07620099392600774, "grad_norm": 29.765960693359375, "learning_rate": 2.0700000000000001e-07, "loss": 9.8436, "step": 69 }, { "epoch": 0.07730535615681944, "grad_norm": 28.39474868774414, "learning_rate": 2.1e-07, "loss": 9.6408, "step": 70 }, { "epoch": 0.07840971838763114, "grad_norm": 29.049678802490234, "learning_rate": 2.1300000000000001e-07, "loss": 9.6703, "step": 71 }, { "epoch": 0.07951408061844285, "grad_norm": 28.410053253173828, "learning_rate": 2.16e-07, "loss": 9.5011, "step": 72 }, { "epoch": 0.08061844284925455, "grad_norm": 30.46071434020996, "learning_rate": 2.1900000000000002e-07, "loss": 9.9185, "step": 73 }, { "epoch": 0.08172280508006627, "grad_norm": 29.117446899414062, "learning_rate": 2.22e-07, "loss": 9.7273, "step": 74 }, { "epoch": 0.08282716731087797, "grad_norm": 28.576501846313477, "learning_rate": 2.25e-07, "loss": 9.4511, "step": 75 }, { "epoch": 0.08393152954168967, "grad_norm": 27.783966064453125, "learning_rate": 2.28e-07, "loss": 9.1928, "step": 76 }, { "epoch": 0.08503589177250138, "grad_norm": 30.636690139770508, "learning_rate": 2.3100000000000002e-07, "loss": 9.6036, "step": 77 }, { "epoch": 0.08614025400331309, "grad_norm": 30.224050521850586, "learning_rate": 2.34e-07, "loss": 9.503, "step": 78 }, { "epoch": 0.08724461623412479, "grad_norm": 29.562015533447266, "learning_rate": 2.3700000000000002e-07, "loss": 9.4552, "step": 79 }, { "epoch": 0.0883489784649365, "grad_norm": 29.58129119873047, "learning_rate": 2.4000000000000003e-07, "loss": 9.5315, "step": 80 }, { "epoch": 0.0894533406957482, "grad_norm": 30.074750900268555, "learning_rate": 2.43e-07, "loss": 9.5778, "step": 81 }, { "epoch": 0.09055770292655992, "grad_norm": 30.161821365356445, "learning_rate": 2.46e-07, "loss": 9.3814, "step": 82 }, { "epoch": 0.09166206515737162, "grad_norm": 29.428369522094727, "learning_rate": 2.49e-07, "loss": 9.2398, "step": 83 }, { "epoch": 0.09276642738818332, "grad_norm": 30.128459930419922, "learning_rate": 2.52e-07, "loss": 9.2127, "step": 84 }, { "epoch": 0.09387078961899503, "grad_norm": 32.243404388427734, "learning_rate": 2.5500000000000005e-07, "loss": 9.7106, "step": 85 }, { "epoch": 0.09497515184980673, "grad_norm": 31.87127685546875, "learning_rate": 2.58e-07, "loss": 9.6486, "step": 86 }, { "epoch": 0.09607951408061845, "grad_norm": 30.40481185913086, "learning_rate": 2.6099999999999997e-07, "loss": 9.3086, "step": 87 }, { "epoch": 0.09718387631143015, "grad_norm": 30.504940032958984, "learning_rate": 2.6400000000000003e-07, "loss": 9.1461, "step": 88 }, { "epoch": 0.09828823854224185, "grad_norm": 30.42082977294922, "learning_rate": 2.67e-07, "loss": 9.1816, "step": 89 }, { "epoch": 0.09939260077305356, "grad_norm": 31.880264282226562, "learning_rate": 2.7e-07, "loss": 9.3059, "step": 90 }, { "epoch": 0.10049696300386526, "grad_norm": 30.230260848999023, "learning_rate": 2.73e-07, "loss": 8.8018, "step": 91 }, { "epoch": 0.10160132523467698, "grad_norm": 31.263334274291992, "learning_rate": 2.76e-07, "loss": 8.9427, "step": 92 }, { "epoch": 0.10270568746548868, "grad_norm": 29.735490798950195, "learning_rate": 2.79e-07, "loss": 8.6397, "step": 93 }, { "epoch": 0.10381004969630038, "grad_norm": 30.400630950927734, "learning_rate": 2.82e-07, "loss": 8.8386, "step": 94 }, { "epoch": 0.1049144119271121, "grad_norm": 31.248552322387695, "learning_rate": 2.85e-07, "loss": 8.8567, "step": 95 }, { "epoch": 0.1060187741579238, "grad_norm": 30.01069450378418, "learning_rate": 2.88e-07, "loss": 8.7573, "step": 96 }, { "epoch": 0.10712313638873551, "grad_norm": 30.29937171936035, "learning_rate": 2.91e-07, "loss": 8.5911, "step": 97 }, { "epoch": 0.10822749861954721, "grad_norm": 29.42728042602539, "learning_rate": 2.94e-07, "loss": 8.5128, "step": 98 }, { "epoch": 0.10933186085035891, "grad_norm": 30.172536849975586, "learning_rate": 2.97e-07, "loss": 8.4141, "step": 99 }, { "epoch": 0.11043622308117063, "grad_norm": 29.047748565673828, "learning_rate": 3.0000000000000004e-07, "loss": 8.2754, "step": 100 }, { "epoch": 0.11154058531198233, "grad_norm": 25.497541427612305, "learning_rate": 3.03e-07, "loss": 8.0628, "step": 101 }, { "epoch": 0.11264494754279404, "grad_norm": 22.78545379638672, "learning_rate": 3.06e-07, "loss": 7.3499, "step": 102 }, { "epoch": 0.11374930977360574, "grad_norm": 25.78771209716797, "learning_rate": 3.0900000000000003e-07, "loss": 7.8938, "step": 103 }, { "epoch": 0.11485367200441744, "grad_norm": 25.232698440551758, "learning_rate": 3.12e-07, "loss": 7.7042, "step": 104 }, { "epoch": 0.11595803423522916, "grad_norm": 23.552751541137695, "learning_rate": 3.15e-07, "loss": 7.3459, "step": 105 }, { "epoch": 0.11706239646604086, "grad_norm": 23.889833450317383, "learning_rate": 3.18e-07, "loss": 7.2837, "step": 106 }, { "epoch": 0.11816675869685257, "grad_norm": 23.603317260742188, "learning_rate": 3.21e-07, "loss": 7.1869, "step": 107 }, { "epoch": 0.11927112092766427, "grad_norm": 27.66062355041504, "learning_rate": 3.2400000000000004e-07, "loss": 7.802, "step": 108 }, { "epoch": 0.12037548315847597, "grad_norm": 31.925636291503906, "learning_rate": 3.27e-07, "loss": 8.4517, "step": 109 }, { "epoch": 0.12147984538928769, "grad_norm": 31.51603889465332, "learning_rate": 3.2999999999999996e-07, "loss": 8.3385, "step": 110 }, { "epoch": 0.12258420762009939, "grad_norm": 34.11604690551758, "learning_rate": 3.3300000000000003e-07, "loss": 8.587, "step": 111 }, { "epoch": 0.1236885698509111, "grad_norm": 36.21760559082031, "learning_rate": 3.36e-07, "loss": 8.6351, "step": 112 }, { "epoch": 0.1247929320817228, "grad_norm": Infinity, "learning_rate": 3.36e-07, "loss": 8.878, "step": 113 }, { "epoch": 0.12589729431253452, "grad_norm": 40.481998443603516, "learning_rate": 3.39e-07, "loss": 9.1791, "step": 114 }, { "epoch": 0.12700165654334622, "grad_norm": 35.59861755371094, "learning_rate": 3.42e-07, "loss": 8.5629, "step": 115 }, { "epoch": 0.12810601877415792, "grad_norm": 38.14615249633789, "learning_rate": 3.45e-07, "loss": 8.9139, "step": 116 }, { "epoch": 0.12921038100496962, "grad_norm": 37.66563034057617, "learning_rate": 3.48e-07, "loss": 8.7302, "step": 117 }, { "epoch": 0.13031474323578135, "grad_norm": 38.08316421508789, "learning_rate": 3.51e-07, "loss": 8.6184, "step": 118 }, { "epoch": 0.13141910546659305, "grad_norm": 38.345947265625, "learning_rate": 3.54e-07, "loss": 8.6473, "step": 119 }, { "epoch": 0.13252346769740475, "grad_norm": 42.60735321044922, "learning_rate": 3.5700000000000003e-07, "loss": 8.9601, "step": 120 }, { "epoch": 0.13362782992821645, "grad_norm": 37.00083541870117, "learning_rate": 3.6e-07, "loss": 8.485, "step": 121 }, { "epoch": 0.13473219215902815, "grad_norm": 40.80447769165039, "learning_rate": 3.63e-07, "loss": 8.787, "step": 122 }, { "epoch": 0.13583655438983988, "grad_norm": 41.64543151855469, "learning_rate": 3.66e-07, "loss": 8.6464, "step": 123 }, { "epoch": 0.13694091662065158, "grad_norm": 41.83917999267578, "learning_rate": 3.6900000000000004e-07, "loss": 8.6071, "step": 124 }, { "epoch": 0.13804527885146328, "grad_norm": 42.53705978393555, "learning_rate": 3.72e-07, "loss": 8.6135, "step": 125 }, { "epoch": 0.13914964108227498, "grad_norm": 41.94074249267578, "learning_rate": 3.75e-07, "loss": 8.4842, "step": 126 }, { "epoch": 0.14025400331308668, "grad_norm": 39.60122299194336, "learning_rate": 3.78e-07, "loss": 8.086, "step": 127 }, { "epoch": 0.1413583655438984, "grad_norm": 43.697898864746094, "learning_rate": 3.81e-07, "loss": 8.3205, "step": 128 }, { "epoch": 0.1424627277747101, "grad_norm": 45.44581604003906, "learning_rate": 3.8400000000000005e-07, "loss": 8.4744, "step": 129 }, { "epoch": 0.1435670900055218, "grad_norm": 44.455589294433594, "learning_rate": 3.87e-07, "loss": 8.1817, "step": 130 }, { "epoch": 0.14467145223633351, "grad_norm": 43.18782043457031, "learning_rate": 3.8999999999999997e-07, "loss": 8.0742, "step": 131 }, { "epoch": 0.14577581446714521, "grad_norm": 42.90877151489258, "learning_rate": 3.9300000000000004e-07, "loss": 8.0382, "step": 132 }, { "epoch": 0.14688017669795694, "grad_norm": 41.53150939941406, "learning_rate": 3.96e-07, "loss": 7.8361, "step": 133 }, { "epoch": 0.14798453892876864, "grad_norm": 42.588993072509766, "learning_rate": 3.99e-07, "loss": 7.9438, "step": 134 }, { "epoch": 0.14908890115958034, "grad_norm": 44.96218490600586, "learning_rate": 4.02e-07, "loss": 7.8387, "step": 135 }, { "epoch": 0.15019326339039205, "grad_norm": 47.227840423583984, "learning_rate": 4.05e-07, "loss": 7.9435, "step": 136 }, { "epoch": 0.15129762562120375, "grad_norm": 44.9638786315918, "learning_rate": 4.08e-07, "loss": 7.6916, "step": 137 }, { "epoch": 0.15240198785201547, "grad_norm": 43.97755432128906, "learning_rate": 4.11e-07, "loss": 7.539, "step": 138 }, { "epoch": 0.15350635008282718, "grad_norm": 44.20932388305664, "learning_rate": 4.1400000000000003e-07, "loss": 7.518, "step": 139 }, { "epoch": 0.15461071231363888, "grad_norm": 47.036537170410156, "learning_rate": 4.17e-07, "loss": 7.4661, "step": 140 }, { "epoch": 0.15571507454445058, "grad_norm": 46.82403564453125, "learning_rate": 4.2e-07, "loss": 7.4649, "step": 141 }, { "epoch": 0.15681943677526228, "grad_norm": 48.51802444458008, "learning_rate": 4.23e-07, "loss": 7.3729, "step": 142 }, { "epoch": 0.157923799006074, "grad_norm": 51.65591812133789, "learning_rate": 4.2600000000000003e-07, "loss": 7.4497, "step": 143 }, { "epoch": 0.1590281612368857, "grad_norm": 50.55500793457031, "learning_rate": 4.2900000000000004e-07, "loss": 7.3504, "step": 144 }, { "epoch": 0.1601325234676974, "grad_norm": 47.51457214355469, "learning_rate": 4.32e-07, "loss": 7.11, "step": 145 }, { "epoch": 0.1612368856985091, "grad_norm": 46.64597702026367, "learning_rate": 4.35e-07, "loss": 7.0194, "step": 146 }, { "epoch": 0.1623412479293208, "grad_norm": 48.28982925415039, "learning_rate": 4.3800000000000003e-07, "loss": 7.0292, "step": 147 }, { "epoch": 0.16344561016013254, "grad_norm": 45.6124382019043, "learning_rate": 4.41e-07, "loss": 6.8906, "step": 148 }, { "epoch": 0.16454997239094424, "grad_norm": 46.083152770996094, "learning_rate": 4.44e-07, "loss": 6.693, "step": 149 }, { "epoch": 0.16565433462175594, "grad_norm": 43.51268768310547, "learning_rate": 4.47e-07, "loss": 6.513, "step": 150 }, { "epoch": 0.16675869685256764, "grad_norm": 48.5594482421875, "learning_rate": 4.5e-07, "loss": 7.1138, "step": 151 }, { "epoch": 0.16786305908337934, "grad_norm": 37.22804641723633, "learning_rate": 4.5300000000000005e-07, "loss": 6.2665, "step": 152 }, { "epoch": 0.16896742131419104, "grad_norm": 39.51731872558594, "learning_rate": 4.56e-07, "loss": 6.2921, "step": 153 }, { "epoch": 0.17007178354500277, "grad_norm": 34.68922424316406, "learning_rate": 4.5899999999999997e-07, "loss": 5.9109, "step": 154 }, { "epoch": 0.17117614577581447, "grad_norm": 36.208370208740234, "learning_rate": 4.6200000000000003e-07, "loss": 5.9831, "step": 155 }, { "epoch": 0.17228050800662617, "grad_norm": 37.270233154296875, "learning_rate": 4.65e-07, "loss": 5.946, "step": 156 }, { "epoch": 0.17338487023743787, "grad_norm": 37.796112060546875, "learning_rate": 4.68e-07, "loss": 5.8625, "step": 157 }, { "epoch": 0.17448923246824957, "grad_norm": 40.33853530883789, "learning_rate": 4.7099999999999997e-07, "loss": 5.9645, "step": 158 }, { "epoch": 0.1755935946990613, "grad_norm": 48.202083587646484, "learning_rate": 4.7400000000000004e-07, "loss": 6.1377, "step": 159 }, { "epoch": 0.176697956929873, "grad_norm": 49.47103500366211, "learning_rate": 4.77e-07, "loss": 6.3202, "step": 160 }, { "epoch": 0.1778023191606847, "grad_norm": 51.79259490966797, "learning_rate": 4.800000000000001e-07, "loss": 6.2633, "step": 161 }, { "epoch": 0.1789066813914964, "grad_norm": 52.51485824584961, "learning_rate": 4.83e-07, "loss": 6.3169, "step": 162 }, { "epoch": 0.1800110436223081, "grad_norm": 50.47430419921875, "learning_rate": 4.86e-07, "loss": 6.1072, "step": 163 }, { "epoch": 0.18111540585311983, "grad_norm": 50.96548843383789, "learning_rate": 4.89e-07, "loss": 6.1026, "step": 164 }, { "epoch": 0.18221976808393153, "grad_norm": 50.50025177001953, "learning_rate": 4.92e-07, "loss": 6.071, "step": 165 }, { "epoch": 0.18332413031474323, "grad_norm": 48.737464904785156, "learning_rate": 4.95e-07, "loss": 5.9486, "step": 166 }, { "epoch": 0.18442849254555493, "grad_norm": 49.16767883300781, "learning_rate": 4.98e-07, "loss": 5.81, "step": 167 }, { "epoch": 0.18553285477636663, "grad_norm": 42.46337890625, "learning_rate": 5.01e-07, "loss": 5.6123, "step": 168 }, { "epoch": 0.18663721700717836, "grad_norm": 46.495849609375, "learning_rate": 5.04e-07, "loss": 5.663, "step": 169 }, { "epoch": 0.18774157923799006, "grad_norm": 42.2645263671875, "learning_rate": 5.07e-07, "loss": 5.5137, "step": 170 }, { "epoch": 0.18884594146880176, "grad_norm": 38.8209342956543, "learning_rate": 5.100000000000001e-07, "loss": 5.3967, "step": 171 }, { "epoch": 0.18995030369961347, "grad_norm": 40.22709274291992, "learning_rate": 5.13e-07, "loss": 5.3819, "step": 172 }, { "epoch": 0.19105466593042517, "grad_norm": 38.42844009399414, "learning_rate": 5.16e-07, "loss": 5.3146, "step": 173 }, { "epoch": 0.1921590281612369, "grad_norm": 39.1297607421875, "learning_rate": 5.19e-07, "loss": 5.3651, "step": 174 }, { "epoch": 0.1932633903920486, "grad_norm": 37.881282806396484, "learning_rate": 5.219999999999999e-07, "loss": 5.2348, "step": 175 }, { "epoch": 0.1943677526228603, "grad_norm": 35.99671173095703, "learning_rate": 5.250000000000001e-07, "loss": 5.2452, "step": 176 }, { "epoch": 0.195472114853672, "grad_norm": 32.34904861450195, "learning_rate": 5.280000000000001e-07, "loss": 5.0623, "step": 177 }, { "epoch": 0.1965764770844837, "grad_norm": 29.974721908569336, "learning_rate": 5.31e-07, "loss": 4.9942, "step": 178 }, { "epoch": 0.19768083931529543, "grad_norm": 28.241558074951172, "learning_rate": 5.34e-07, "loss": 4.9712, "step": 179 }, { "epoch": 0.19878520154610713, "grad_norm": 27.96314811706543, "learning_rate": 5.37e-07, "loss": 4.9497, "step": 180 }, { "epoch": 0.19988956377691883, "grad_norm": 25.213237762451172, "learning_rate": 5.4e-07, "loss": 4.8363, "step": 181 }, { "epoch": 0.20099392600773053, "grad_norm": 22.632083892822266, "learning_rate": 5.43e-07, "loss": 4.8042, "step": 182 }, { "epoch": 0.20209828823854223, "grad_norm": 21.06781005859375, "learning_rate": 5.46e-07, "loss": 4.7874, "step": 183 }, { "epoch": 0.20320265046935396, "grad_norm": 17.396682739257812, "learning_rate": 5.490000000000001e-07, "loss": 4.7123, "step": 184 }, { "epoch": 0.20430701270016566, "grad_norm": 16.792224884033203, "learning_rate": 5.52e-07, "loss": 4.6982, "step": 185 }, { "epoch": 0.20541137493097736, "grad_norm": 12.618452072143555, "learning_rate": 5.55e-07, "loss": 4.6806, "step": 186 }, { "epoch": 0.20651573716178906, "grad_norm": 10.907914161682129, "learning_rate": 5.58e-07, "loss": 4.6112, "step": 187 }, { "epoch": 0.20762009939260076, "grad_norm": 8.70097827911377, "learning_rate": 5.61e-07, "loss": 4.6158, "step": 188 }, { "epoch": 0.2087244616234125, "grad_norm": 8.296287536621094, "learning_rate": 5.64e-07, "loss": 4.619, "step": 189 }, { "epoch": 0.2098288238542242, "grad_norm": 7.8918986320495605, "learning_rate": 5.67e-07, "loss": 4.5896, "step": 190 }, { "epoch": 0.2109331860850359, "grad_norm": 8.1670503616333, "learning_rate": 5.7e-07, "loss": 4.5235, "step": 191 }, { "epoch": 0.2120375483158476, "grad_norm": 8.553444862365723, "learning_rate": 5.73e-07, "loss": 4.5784, "step": 192 }, { "epoch": 0.2131419105466593, "grad_norm": 9.1376953125, "learning_rate": 5.76e-07, "loss": 4.5392, "step": 193 }, { "epoch": 0.21424627277747102, "grad_norm": 9.89333724975586, "learning_rate": 5.790000000000001e-07, "loss": 4.5119, "step": 194 }, { "epoch": 0.21535063500828272, "grad_norm": 9.865289688110352, "learning_rate": 5.82e-07, "loss": 4.4886, "step": 195 }, { "epoch": 0.21645499723909442, "grad_norm": 11.241876602172852, "learning_rate": 5.85e-07, "loss": 4.5272, "step": 196 }, { "epoch": 0.21755935946990612, "grad_norm": 11.469217300415039, "learning_rate": 5.88e-07, "loss": 4.5427, "step": 197 }, { "epoch": 0.21866372170071782, "grad_norm": 9.925869941711426, "learning_rate": 5.909999999999999e-07, "loss": 4.478, "step": 198 }, { "epoch": 0.21976808393152955, "grad_norm": 9.766914367675781, "learning_rate": 5.94e-07, "loss": 4.4826, "step": 199 }, { "epoch": 0.22087244616234125, "grad_norm": 10.505325317382812, "learning_rate": 5.970000000000001e-07, "loss": 4.4396, "step": 200 }, { "epoch": 0.22197680839315295, "grad_norm": 6.9929351806640625, "learning_rate": 6.000000000000001e-07, "loss": 4.1979, "step": 201 }, { "epoch": 0.22308117062396465, "grad_norm": 6.913085460662842, "learning_rate": 6.03e-07, "loss": 4.1841, "step": 202 }, { "epoch": 0.22418553285477635, "grad_norm": 6.744357585906982, "learning_rate": 6.06e-07, "loss": 4.1682, "step": 203 }, { "epoch": 0.22528989508558808, "grad_norm": 6.92411470413208, "learning_rate": 6.09e-07, "loss": 4.1363, "step": 204 }, { "epoch": 0.22639425731639978, "grad_norm": 6.862609386444092, "learning_rate": 6.12e-07, "loss": 4.1486, "step": 205 }, { "epoch": 0.22749861954721148, "grad_norm": 6.800034999847412, "learning_rate": 6.15e-07, "loss": 4.1277, "step": 206 }, { "epoch": 0.22860298177802318, "grad_norm": 6.55843448638916, "learning_rate": 6.180000000000001e-07, "loss": 4.126, "step": 207 }, { "epoch": 0.22970734400883489, "grad_norm": 6.261094093322754, "learning_rate": 6.21e-07, "loss": 4.1864, "step": 208 }, { "epoch": 0.2308117062396466, "grad_norm": 6.316919803619385, "learning_rate": 6.24e-07, "loss": 4.2679, "step": 209 }, { "epoch": 0.23191606847045831, "grad_norm": 6.219297885894775, "learning_rate": 6.27e-07, "loss": 4.219, "step": 210 }, { "epoch": 0.23302043070127001, "grad_norm": 6.05348539352417, "learning_rate": 6.3e-07, "loss": 4.2126, "step": 211 }, { "epoch": 0.23412479293208172, "grad_norm": 6.700880527496338, "learning_rate": 6.33e-07, "loss": 4.2706, "step": 212 }, { "epoch": 0.23522915516289342, "grad_norm": 6.061969757080078, "learning_rate": 6.36e-07, "loss": 4.2506, "step": 213 }, { "epoch": 0.23633351739370514, "grad_norm": 5.960559368133545, "learning_rate": 6.39e-07, "loss": 4.2103, "step": 214 }, { "epoch": 0.23743787962451685, "grad_norm": 5.944448947906494, "learning_rate": 6.42e-07, "loss": 4.2398, "step": 215 }, { "epoch": 0.23854224185532855, "grad_norm": 5.629647731781006, "learning_rate": 6.45e-07, "loss": 4.2473, "step": 216 }, { "epoch": 0.23964660408614025, "grad_norm": 5.69216775894165, "learning_rate": 6.480000000000001e-07, "loss": 4.1859, "step": 217 }, { "epoch": 0.24075096631695195, "grad_norm": 5.297571659088135, "learning_rate": 6.51e-07, "loss": 4.1945, "step": 218 }, { "epoch": 0.24185532854776368, "grad_norm": 5.386274814605713, "learning_rate": 6.54e-07, "loss": 4.1774, "step": 219 }, { "epoch": 0.24295969077857538, "grad_norm": 5.609156131744385, "learning_rate": 6.57e-07, "loss": 4.2241, "step": 220 }, { "epoch": 0.24406405300938708, "grad_norm": 5.606249809265137, "learning_rate": 6.599999999999999e-07, "loss": 4.1344, "step": 221 }, { "epoch": 0.24516841524019878, "grad_norm": 5.431570053100586, "learning_rate": 6.63e-07, "loss": 4.0779, "step": 222 }, { "epoch": 0.24627277747101048, "grad_norm": 5.478754997253418, "learning_rate": 6.660000000000001e-07, "loss": 4.1061, "step": 223 }, { "epoch": 0.2473771397018222, "grad_norm": 5.008996486663818, "learning_rate": 6.690000000000001e-07, "loss": 4.0879, "step": 224 }, { "epoch": 0.2484815019326339, "grad_norm": 5.385288238525391, "learning_rate": 6.72e-07, "loss": 4.1165, "step": 225 }, { "epoch": 0.2495858641634456, "grad_norm": 5.00258207321167, "learning_rate": 6.75e-07, "loss": 4.0534, "step": 226 }, { "epoch": 0.2506902263942573, "grad_norm": 5.017960548400879, "learning_rate": 6.78e-07, "loss": 4.07, "step": 227 }, { "epoch": 0.25179458862506904, "grad_norm": 4.991387844085693, "learning_rate": 6.81e-07, "loss": 4.0157, "step": 228 }, { "epoch": 0.2528989508558807, "grad_norm": 5.006114482879639, "learning_rate": 6.84e-07, "loss": 4.0347, "step": 229 }, { "epoch": 0.25400331308669244, "grad_norm": 4.986337184906006, "learning_rate": 6.87e-07, "loss": 4.0467, "step": 230 }, { "epoch": 0.25510767531750417, "grad_norm": 4.716815948486328, "learning_rate": 6.9e-07, "loss": 4.0091, "step": 231 }, { "epoch": 0.25621203754831584, "grad_norm": 5.021855354309082, "learning_rate": 6.93e-07, "loss": 3.9913, "step": 232 }, { "epoch": 0.25731639977912757, "grad_norm": 5.051729202270508, "learning_rate": 6.96e-07, "loss": 3.9222, "step": 233 }, { "epoch": 0.25842076200993924, "grad_norm": 6.159341335296631, "learning_rate": 6.990000000000001e-07, "loss": 3.9791, "step": 234 }, { "epoch": 0.25952512424075097, "grad_norm": 4.455577850341797, "learning_rate": 7.02e-07, "loss": 3.9596, "step": 235 }, { "epoch": 0.2606294864715627, "grad_norm": 4.343255043029785, "learning_rate": 7.05e-07, "loss": 3.9313, "step": 236 }, { "epoch": 0.26173384870237437, "grad_norm": 4.75402307510376, "learning_rate": 7.08e-07, "loss": 3.9162, "step": 237 }, { "epoch": 0.2628382109331861, "grad_norm": 4.473922252655029, "learning_rate": 7.11e-07, "loss": 3.9109, "step": 238 }, { "epoch": 0.2639425731639978, "grad_norm": 4.2522454261779785, "learning_rate": 7.140000000000001e-07, "loss": 3.9189, "step": 239 }, { "epoch": 0.2650469353948095, "grad_norm": 4.048357009887695, "learning_rate": 7.170000000000001e-07, "loss": 3.8741, "step": 240 }, { "epoch": 0.26615129762562123, "grad_norm": 4.529073238372803, "learning_rate": 7.2e-07, "loss": 3.8436, "step": 241 }, { "epoch": 0.2672556598564329, "grad_norm": 5.596093654632568, "learning_rate": 7.23e-07, "loss": 3.8973, "step": 242 }, { "epoch": 0.26836002208724463, "grad_norm": 4.078098773956299, "learning_rate": 7.26e-07, "loss": 3.8512, "step": 243 }, { "epoch": 0.2694643843180563, "grad_norm": 12.770224571228027, "learning_rate": 7.29e-07, "loss": 3.8132, "step": 244 }, { "epoch": 0.27056874654886803, "grad_norm": 4.122069358825684, "learning_rate": 7.32e-07, "loss": 3.8135, "step": 245 }, { "epoch": 0.27167310877967976, "grad_norm": 4.089653968811035, "learning_rate": 7.350000000000001e-07, "loss": 3.7911, "step": 246 }, { "epoch": 0.27277747101049143, "grad_norm": 3.8683865070343018, "learning_rate": 7.380000000000001e-07, "loss": 3.8001, "step": 247 }, { "epoch": 0.27388183324130316, "grad_norm": 4.115608215332031, "learning_rate": 7.41e-07, "loss": 3.7901, "step": 248 }, { "epoch": 0.27498619547211484, "grad_norm": 4.473904132843018, "learning_rate": 7.44e-07, "loss": 3.7601, "step": 249 }, { "epoch": 0.27609055770292656, "grad_norm": 3.997875928878784, "learning_rate": 7.47e-07, "loss": 3.7107, "step": 250 }, { "epoch": 0.2771949199337383, "grad_norm": 4.178367614746094, "learning_rate": 7.5e-07, "loss": 3.6358, "step": 251 }, { "epoch": 0.27829928216454997, "grad_norm": 4.597774028778076, "learning_rate": 7.53e-07, "loss": 3.5561, "step": 252 }, { "epoch": 0.2794036443953617, "grad_norm": 4.089013576507568, "learning_rate": 7.56e-07, "loss": 3.588, "step": 253 }, { "epoch": 0.28050800662617337, "grad_norm": 4.140195369720459, "learning_rate": 7.59e-07, "loss": 3.5351, "step": 254 }, { "epoch": 0.2816123688569851, "grad_norm": 3.47623872756958, "learning_rate": 7.62e-07, "loss": 3.5402, "step": 255 }, { "epoch": 0.2827167310877968, "grad_norm": 4.016599178314209, "learning_rate": 7.65e-07, "loss": 3.5161, "step": 256 }, { "epoch": 0.2838210933186085, "grad_norm": 3.235722303390503, "learning_rate": 7.680000000000001e-07, "loss": 3.5019, "step": 257 }, { "epoch": 0.2849254555494202, "grad_norm": 6.258062839508057, "learning_rate": 7.71e-07, "loss": 3.5986, "step": 258 }, { "epoch": 0.2860298177802319, "grad_norm": 6.553055286407471, "learning_rate": 7.74e-07, "loss": 3.5675, "step": 259 }, { "epoch": 0.2871341800110436, "grad_norm": 8.58781623840332, "learning_rate": 7.77e-07, "loss": 3.569, "step": 260 }, { "epoch": 0.28823854224185536, "grad_norm": 6.250955581665039, "learning_rate": 7.799999999999999e-07, "loss": 3.6169, "step": 261 }, { "epoch": 0.28934290447266703, "grad_norm": 6.793821811676025, "learning_rate": 7.830000000000001e-07, "loss": 3.5865, "step": 262 }, { "epoch": 0.29044726670347876, "grad_norm": 5.147331714630127, "learning_rate": 7.860000000000001e-07, "loss": 3.5374, "step": 263 }, { "epoch": 0.29155162893429043, "grad_norm": 5.068551540374756, "learning_rate": 7.89e-07, "loss": 3.5776, "step": 264 }, { "epoch": 0.29265599116510216, "grad_norm": 4.763809680938721, "learning_rate": 7.92e-07, "loss": 3.5225, "step": 265 }, { "epoch": 0.2937603533959139, "grad_norm": 5.4151201248168945, "learning_rate": 7.95e-07, "loss": 3.5204, "step": 266 }, { "epoch": 0.29486471562672556, "grad_norm": 5.576271057128906, "learning_rate": 7.98e-07, "loss": 3.5142, "step": 267 }, { "epoch": 0.2959690778575373, "grad_norm": 6.4701080322265625, "learning_rate": 8.01e-07, "loss": 3.4616, "step": 268 }, { "epoch": 0.29707344008834896, "grad_norm": 11.605978012084961, "learning_rate": 8.04e-07, "loss": 3.4531, "step": 269 }, { "epoch": 0.2981778023191607, "grad_norm": 8.793405532836914, "learning_rate": 8.070000000000001e-07, "loss": 3.4452, "step": 270 }, { "epoch": 0.2992821645499724, "grad_norm": 9.470057487487793, "learning_rate": 8.1e-07, "loss": 3.4151, "step": 271 }, { "epoch": 0.3003865267807841, "grad_norm": 6.4670090675354, "learning_rate": 8.13e-07, "loss": 3.4126, "step": 272 }, { "epoch": 0.3014908890115958, "grad_norm": 3.634660482406616, "learning_rate": 8.16e-07, "loss": 3.4342, "step": 273 }, { "epoch": 0.3025952512424075, "grad_norm": 4.846097469329834, "learning_rate": 8.19e-07, "loss": 3.3598, "step": 274 }, { "epoch": 0.3036996134732192, "grad_norm": 4.918699741363525, "learning_rate": 8.22e-07, "loss": 3.4092, "step": 275 }, { "epoch": 0.30480397570403095, "grad_norm": 4.220233917236328, "learning_rate": 8.25e-07, "loss": 3.3715, "step": 276 }, { "epoch": 0.3059083379348426, "grad_norm": 5.2152099609375, "learning_rate": 8.280000000000001e-07, "loss": 3.3457, "step": 277 }, { "epoch": 0.30701270016565435, "grad_norm": 9.493802070617676, "learning_rate": 8.31e-07, "loss": 3.2828, "step": 278 }, { "epoch": 0.308117062396466, "grad_norm": 5.671670913696289, "learning_rate": 8.34e-07, "loss": 3.339, "step": 279 }, { "epoch": 0.30922142462727775, "grad_norm": 3.556901693344116, "learning_rate": 8.370000000000001e-07, "loss": 3.2847, "step": 280 }, { "epoch": 0.3103257868580895, "grad_norm": 4.178207874298096, "learning_rate": 8.4e-07, "loss": 3.3215, "step": 281 }, { "epoch": 0.31143014908890115, "grad_norm": 8.053496360778809, "learning_rate": 8.43e-07, "loss": 3.3147, "step": 282 }, { "epoch": 0.3125345113197129, "grad_norm": 4.572976589202881, "learning_rate": 8.46e-07, "loss": 3.3361, "step": 283 }, { "epoch": 0.31363887355052456, "grad_norm": 5.391819953918457, "learning_rate": 8.489999999999999e-07, "loss": 3.3213, "step": 284 }, { "epoch": 0.3147432357813363, "grad_norm": 11.188932418823242, "learning_rate": 8.520000000000001e-07, "loss": 3.2834, "step": 285 }, { "epoch": 0.315847598012148, "grad_norm": 5.196976661682129, "learning_rate": 8.550000000000001e-07, "loss": 3.2535, "step": 286 }, { "epoch": 0.3169519602429597, "grad_norm": 7.495361804962158, "learning_rate": 8.580000000000001e-07, "loss": 3.2384, "step": 287 }, { "epoch": 0.3180563224737714, "grad_norm": 12.07502269744873, "learning_rate": 8.61e-07, "loss": 3.2503, "step": 288 }, { "epoch": 0.3191606847045831, "grad_norm": NaN, "learning_rate": 8.61e-07, "loss": 3.2233, "step": 289 }, { "epoch": 0.3202650469353948, "grad_norm": 4.511821746826172, "learning_rate": 8.64e-07, "loss": 3.2194, "step": 290 }, { "epoch": 0.32136940916620654, "grad_norm": 6.754092693328857, "learning_rate": 8.67e-07, "loss": 3.2677, "step": 291 }, { "epoch": 0.3224737713970182, "grad_norm": 5.004817485809326, "learning_rate": 8.7e-07, "loss": 3.2568, "step": 292 }, { "epoch": 0.32357813362782994, "grad_norm": NaN, "learning_rate": 8.7e-07, "loss": 3.2516, "step": 293 }, { "epoch": 0.3246824958586416, "grad_norm": 6.4454874992370605, "learning_rate": 8.73e-07, "loss": 3.2621, "step": 294 }, { "epoch": 0.32578685808945335, "grad_norm": 15.927083969116211, "learning_rate": 8.760000000000001e-07, "loss": 3.2464, "step": 295 }, { "epoch": 0.3268912203202651, "grad_norm": 9.019712448120117, "learning_rate": 8.79e-07, "loss": 3.2347, "step": 296 }, { "epoch": 0.32799558255107675, "grad_norm": 5.752242088317871, "learning_rate": 8.82e-07, "loss": 3.2341, "step": 297 }, { "epoch": 0.3290999447818885, "grad_norm": 3.180664300918579, "learning_rate": 8.85e-07, "loss": 3.2174, "step": 298 }, { "epoch": 0.33020430701270015, "grad_norm": 12.250070571899414, "learning_rate": 8.88e-07, "loss": 3.236, "step": 299 }, { "epoch": 0.3313086692435119, "grad_norm": 4.5728759765625, "learning_rate": 8.91e-07, "loss": 3.2563, "step": 300 }, { "epoch": 0.3324130314743236, "grad_norm": 11.17817497253418, "learning_rate": 8.94e-07, "loss": 3.2989, "step": 301 }, { "epoch": 0.3335173937051353, "grad_norm": 8.901724815368652, "learning_rate": 8.97e-07, "loss": 3.2667, "step": 302 }, { "epoch": 0.334621755935947, "grad_norm": 6.391355514526367, "learning_rate": 9e-07, "loss": 3.2212, "step": 303 }, { "epoch": 0.3357261181667587, "grad_norm": 4.293461322784424, "learning_rate": 9.03e-07, "loss": 3.1897, "step": 304 }, { "epoch": 0.3368304803975704, "grad_norm": 3.129148006439209, "learning_rate": 9.060000000000001e-07, "loss": 3.1817, "step": 305 }, { "epoch": 0.3379348426283821, "grad_norm": 5.215662956237793, "learning_rate": 9.09e-07, "loss": 3.139, "step": 306 }, { "epoch": 0.3390392048591938, "grad_norm": 5.019737720489502, "learning_rate": 9.12e-07, "loss": 3.1998, "step": 307 }, { "epoch": 0.34014356709000554, "grad_norm": 5.010782241821289, "learning_rate": 9.15e-07, "loss": 3.2151, "step": 308 }, { "epoch": 0.3412479293208172, "grad_norm": 7.200685501098633, "learning_rate": 9.179999999999999e-07, "loss": 3.2851, "step": 309 }, { "epoch": 0.34235229155162894, "grad_norm": 7.878523826599121, "learning_rate": 9.210000000000001e-07, "loss": 3.2315, "step": 310 }, { "epoch": 0.3434566537824406, "grad_norm": 8.005127906799316, "learning_rate": 9.240000000000001e-07, "loss": 3.2276, "step": 311 }, { "epoch": 0.34456101601325234, "grad_norm": 8.87568187713623, "learning_rate": 9.270000000000001e-07, "loss": 3.2176, "step": 312 }, { "epoch": 0.34566537824406407, "grad_norm": 4.6906819343566895, "learning_rate": 9.3e-07, "loss": 3.1938, "step": 313 }, { "epoch": 0.34676974047487574, "grad_norm": 3.6373465061187744, "learning_rate": 9.33e-07, "loss": 3.1805, "step": 314 }, { "epoch": 0.34787410270568747, "grad_norm": 4.72260046005249, "learning_rate": 9.36e-07, "loss": 3.1884, "step": 315 }, { "epoch": 0.34897846493649914, "grad_norm": 7.14030647277832, "learning_rate": 9.39e-07, "loss": 3.1782, "step": 316 }, { "epoch": 0.3500828271673109, "grad_norm": 8.369483947753906, "learning_rate": 9.419999999999999e-07, "loss": 3.2372, "step": 317 }, { "epoch": 0.3511871893981226, "grad_norm": 8.60047721862793, "learning_rate": 9.450000000000001e-07, "loss": 3.1865, "step": 318 }, { "epoch": 0.3522915516289343, "grad_norm": 22.400068283081055, "learning_rate": 9.480000000000001e-07, "loss": 3.1716, "step": 319 }, { "epoch": 0.353395913859746, "grad_norm": 4.270265579223633, "learning_rate": 9.51e-07, "loss": 3.148, "step": 320 }, { "epoch": 0.3545002760905577, "grad_norm": 14.191431045532227, "learning_rate": 9.54e-07, "loss": 3.1408, "step": 321 }, { "epoch": 0.3556046383213694, "grad_norm": 6.626250743865967, "learning_rate": 9.57e-07, "loss": 3.1548, "step": 322 }, { "epoch": 0.35670900055218113, "grad_norm": 3.888542652130127, "learning_rate": 9.600000000000001e-07, "loss": 3.1672, "step": 323 }, { "epoch": 0.3578133627829928, "grad_norm": 3.2057573795318604, "learning_rate": 9.63e-07, "loss": 3.1472, "step": 324 }, { "epoch": 0.35891772501380453, "grad_norm": 7.071220874786377, "learning_rate": 9.66e-07, "loss": 3.1791, "step": 325 }, { "epoch": 0.3600220872446162, "grad_norm": 6.408501148223877, "learning_rate": 9.690000000000002e-07, "loss": 3.1522, "step": 326 }, { "epoch": 0.36112644947542794, "grad_norm": 3.331507444381714, "learning_rate": 9.72e-07, "loss": 3.1733, "step": 327 }, { "epoch": 0.36223081170623966, "grad_norm": 21.051048278808594, "learning_rate": 9.75e-07, "loss": 3.1372, "step": 328 }, { "epoch": 0.36333517393705134, "grad_norm": 3.9032156467437744, "learning_rate": 9.78e-07, "loss": 3.1451, "step": 329 }, { "epoch": 0.36443953616786307, "grad_norm": 7.222768783569336, "learning_rate": 9.81e-07, "loss": 3.1568, "step": 330 }, { "epoch": 0.36554389839867474, "grad_norm": 6.908663272857666, "learning_rate": 9.84e-07, "loss": 3.1827, "step": 331 }, { "epoch": 0.36664826062948647, "grad_norm": 5.342127799987793, "learning_rate": 9.87e-07, "loss": 3.1115, "step": 332 }, { "epoch": 0.3677526228602982, "grad_norm": 8.65066146850586, "learning_rate": 9.9e-07, "loss": 3.0872, "step": 333 }, { "epoch": 0.36885698509110987, "grad_norm": 49.99424362182617, "learning_rate": 9.929999999999999e-07, "loss": 3.0959, "step": 334 }, { "epoch": 0.3699613473219216, "grad_norm": 4.74981164932251, "learning_rate": 9.96e-07, "loss": 3.0814, "step": 335 }, { "epoch": 0.37106570955273327, "grad_norm": 9.853766441345215, "learning_rate": 9.99e-07, "loss": 3.0843, "step": 336 }, { "epoch": 0.372170071783545, "grad_norm": 13.888333320617676, "learning_rate": 1.002e-06, "loss": 3.1391, "step": 337 }, { "epoch": 0.3732744340143567, "grad_norm": 5.2010979652404785, "learning_rate": 1.0050000000000001e-06, "loss": 3.1172, "step": 338 }, { "epoch": 0.3743787962451684, "grad_norm": 5.349017143249512, "learning_rate": 1.008e-06, "loss": 3.1004, "step": 339 }, { "epoch": 0.3754831584759801, "grad_norm": 4.888770580291748, "learning_rate": 1.0110000000000001e-06, "loss": 3.095, "step": 340 }, { "epoch": 0.3765875207067918, "grad_norm": 6.747123718261719, "learning_rate": 1.014e-06, "loss": 3.1192, "step": 341 }, { "epoch": 0.37769188293760353, "grad_norm": 5.254423141479492, "learning_rate": 1.017e-06, "loss": 3.0912, "step": 342 }, { "epoch": 0.37879624516841526, "grad_norm": 7.014056205749512, "learning_rate": 1.0200000000000002e-06, "loss": 3.096, "step": 343 }, { "epoch": 0.37990060739922693, "grad_norm": 3.3871054649353027, "learning_rate": 1.023e-06, "loss": 3.1009, "step": 344 }, { "epoch": 0.38100496963003866, "grad_norm": 7.067105293273926, "learning_rate": 1.026e-06, "loss": 3.088, "step": 345 }, { "epoch": 0.38210933186085033, "grad_norm": 5.872891902923584, "learning_rate": 1.029e-06, "loss": 3.1035, "step": 346 }, { "epoch": 0.38321369409166206, "grad_norm": 3.5968339443206787, "learning_rate": 1.032e-06, "loss": 3.1237, "step": 347 }, { "epoch": 0.3843180563224738, "grad_norm": 9.406268119812012, "learning_rate": 1.035e-06, "loss": 3.0825, "step": 348 }, { "epoch": 0.38542241855328546, "grad_norm": 3.0008561611175537, "learning_rate": 1.038e-06, "loss": 3.0973, "step": 349 }, { "epoch": 0.3865267807840972, "grad_norm": 3.727741003036499, "learning_rate": 1.041e-06, "loss": 3.0616, "step": 350 }, { "epoch": 0.38763114301490886, "grad_norm": 8.484833717346191, "learning_rate": 1.0439999999999999e-06, "loss": 3.2278, "step": 351 }, { "epoch": 0.3887355052457206, "grad_norm": 6.9975433349609375, "learning_rate": 1.047e-06, "loss": 3.1567, "step": 352 }, { "epoch": 0.3898398674765323, "grad_norm": 5.184877872467041, "learning_rate": 1.0500000000000001e-06, "loss": 3.1345, "step": 353 }, { "epoch": 0.390944229707344, "grad_norm": 3.318382740020752, "learning_rate": 1.053e-06, "loss": 3.1041, "step": 354 }, { "epoch": 0.3920485919381557, "grad_norm": 2.6516475677490234, "learning_rate": 1.0560000000000001e-06, "loss": 3.1223, "step": 355 }, { "epoch": 0.3931529541689674, "grad_norm": 3.294773578643799, "learning_rate": 1.059e-06, "loss": 3.0797, "step": 356 }, { "epoch": 0.3942573163997791, "grad_norm": 5.105257987976074, "learning_rate": 1.062e-06, "loss": 3.067, "step": 357 }, { "epoch": 0.39536167863059085, "grad_norm": 5.814640522003174, "learning_rate": 1.065e-06, "loss": 3.1332, "step": 358 }, { "epoch": 0.3964660408614025, "grad_norm": 5.7555437088012695, "learning_rate": 1.068e-06, "loss": 3.102, "step": 359 }, { "epoch": 0.39757040309221425, "grad_norm": 5.539275646209717, "learning_rate": 1.0710000000000002e-06, "loss": 3.0843, "step": 360 }, { "epoch": 0.3986747653230259, "grad_norm": 5.911229133605957, "learning_rate": 1.074e-06, "loss": 3.1144, "step": 361 }, { "epoch": 0.39977912755383765, "grad_norm": 6.333744525909424, "learning_rate": 1.077e-06, "loss": 3.0829, "step": 362 }, { "epoch": 0.4008834897846494, "grad_norm": 4.308165073394775, "learning_rate": 1.08e-06, "loss": 3.0685, "step": 363 }, { "epoch": 0.40198785201546106, "grad_norm": 8.082413673400879, "learning_rate": 1.083e-06, "loss": 3.0688, "step": 364 }, { "epoch": 0.4030922142462728, "grad_norm": 5.248201370239258, "learning_rate": 1.086e-06, "loss": 3.1138, "step": 365 }, { "epoch": 0.40419657647708446, "grad_norm": 12.829466819763184, "learning_rate": 1.089e-06, "loss": 3.1141, "step": 366 }, { "epoch": 0.4053009387078962, "grad_norm": 5.3262715339660645, "learning_rate": 1.092e-06, "loss": 3.0605, "step": 367 }, { "epoch": 0.4064053009387079, "grad_norm": 4.108585357666016, "learning_rate": 1.0949999999999999e-06, "loss": 3.0835, "step": 368 }, { "epoch": 0.4075096631695196, "grad_norm": 8.326204299926758, "learning_rate": 1.0980000000000001e-06, "loss": 3.039, "step": 369 }, { "epoch": 0.4086140254003313, "grad_norm": 6.219574451446533, "learning_rate": 1.1010000000000001e-06, "loss": 3.1211, "step": 370 }, { "epoch": 0.409718387631143, "grad_norm": 4.437131404876709, "learning_rate": 1.104e-06, "loss": 3.0602, "step": 371 }, { "epoch": 0.4108227498619547, "grad_norm": 5.051458358764648, "learning_rate": 1.1070000000000002e-06, "loss": 3.0571, "step": 372 }, { "epoch": 0.41192711209276645, "grad_norm": 3.421335458755493, "learning_rate": 1.11e-06, "loss": 3.0605, "step": 373 }, { "epoch": 0.4130314743235781, "grad_norm": 4.738569736480713, "learning_rate": 1.113e-06, "loss": 3.0833, "step": 374 }, { "epoch": 0.41413583655438985, "grad_norm": 8.112787246704102, "learning_rate": 1.116e-06, "loss": 3.0713, "step": 375 }, { "epoch": 0.4152401987852015, "grad_norm": 3.220590591430664, "learning_rate": 1.119e-06, "loss": 3.0441, "step": 376 }, { "epoch": 0.41634456101601325, "grad_norm": 4.56587553024292, "learning_rate": 1.122e-06, "loss": 3.0349, "step": 377 }, { "epoch": 0.417448923246825, "grad_norm": 33.35784912109375, "learning_rate": 1.125e-06, "loss": 3.031, "step": 378 }, { "epoch": 0.41855328547763665, "grad_norm": 4.990856647491455, "learning_rate": 1.128e-06, "loss": 3.0302, "step": 379 }, { "epoch": 0.4196576477084484, "grad_norm": 4.899589538574219, "learning_rate": 1.131e-06, "loss": 3.0576, "step": 380 }, { "epoch": 0.42076200993926005, "grad_norm": 12.965826988220215, "learning_rate": 1.134e-06, "loss": 3.0319, "step": 381 }, { "epoch": 0.4218663721700718, "grad_norm": 5.360799312591553, "learning_rate": 1.137e-06, "loss": 3.0372, "step": 382 }, { "epoch": 0.4229707344008835, "grad_norm": 5.363495826721191, "learning_rate": 1.14e-06, "loss": 2.9918, "step": 383 }, { "epoch": 0.4240750966316952, "grad_norm": 16.227008819580078, "learning_rate": 1.1430000000000001e-06, "loss": 3.0213, "step": 384 }, { "epoch": 0.4251794588625069, "grad_norm": 9.142959594726562, "learning_rate": 1.146e-06, "loss": 3.0376, "step": 385 }, { "epoch": 0.4262838210933186, "grad_norm": 4.7515153884887695, "learning_rate": 1.1490000000000001e-06, "loss": 3.0468, "step": 386 }, { "epoch": 0.4273881833241303, "grad_norm": 12.403339385986328, "learning_rate": 1.152e-06, "loss": 2.988, "step": 387 }, { "epoch": 0.42849254555494204, "grad_norm": 4.787986755371094, "learning_rate": 1.155e-06, "loss": 3.0402, "step": 388 }, { "epoch": 0.4295969077857537, "grad_norm": 4.006364345550537, "learning_rate": 1.1580000000000002e-06, "loss": 3.0683, "step": 389 }, { "epoch": 0.43070127001656544, "grad_norm": 4.657031536102295, "learning_rate": 1.161e-06, "loss": 3.0796, "step": 390 }, { "epoch": 0.4318056322473771, "grad_norm": 3.155332565307617, "learning_rate": 1.164e-06, "loss": 2.9903, "step": 391 }, { "epoch": 0.43290999447818884, "grad_norm": 12.73788070678711, "learning_rate": 1.167e-06, "loss": 3.0503, "step": 392 }, { "epoch": 0.43401435670900057, "grad_norm": 9.056694030761719, "learning_rate": 1.17e-06, "loss": 3.0451, "step": 393 }, { "epoch": 0.43511871893981224, "grad_norm": 5.237503528594971, "learning_rate": 1.173e-06, "loss": 3.0181, "step": 394 }, { "epoch": 0.43622308117062397, "grad_norm": 3.0697784423828125, "learning_rate": 1.176e-06, "loss": 3.0465, "step": 395 }, { "epoch": 0.43732744340143564, "grad_norm": 6.488866329193115, "learning_rate": 1.179e-06, "loss": 3.0526, "step": 396 }, { "epoch": 0.4384318056322474, "grad_norm": 4.682281494140625, "learning_rate": 1.1819999999999999e-06, "loss": 3.0446, "step": 397 }, { "epoch": 0.4395361678630591, "grad_norm": 5.321879863739014, "learning_rate": 1.185e-06, "loss": 3.0334, "step": 398 }, { "epoch": 0.4406405300938708, "grad_norm": 40.47715759277344, "learning_rate": 1.188e-06, "loss": 3.0277, "step": 399 }, { "epoch": 0.4417448923246825, "grad_norm": 6.3862409591674805, "learning_rate": 1.191e-06, "loss": 3.0416, "step": 400 }, { "epoch": 0.4428492545554942, "grad_norm": 3.1205942630767822, "learning_rate": 1.1940000000000001e-06, "loss": 3.1586, "step": 401 }, { "epoch": 0.4439536167863059, "grad_norm": 7.875143051147461, "learning_rate": 1.197e-06, "loss": 3.079, "step": 402 }, { "epoch": 0.44505797901711763, "grad_norm": 5.8892927169799805, "learning_rate": 1.2000000000000002e-06, "loss": 3.0678, "step": 403 }, { "epoch": 0.4461623412479293, "grad_norm": 4.228477954864502, "learning_rate": 1.203e-06, "loss": 3.0448, "step": 404 }, { "epoch": 0.44726670347874103, "grad_norm": 2.8704068660736084, "learning_rate": 1.206e-06, "loss": 3.0407, "step": 405 }, { "epoch": 0.4483710657095527, "grad_norm": 6.064779758453369, "learning_rate": 1.2090000000000002e-06, "loss": 2.9931, "step": 406 }, { "epoch": 0.44947542794036444, "grad_norm": 2.603018045425415, "learning_rate": 1.212e-06, "loss": 3.0075, "step": 407 }, { "epoch": 0.45057979017117616, "grad_norm": 3.3944735527038574, "learning_rate": 1.215e-06, "loss": 3.0041, "step": 408 }, { "epoch": 0.45168415240198784, "grad_norm": 3.202716827392578, "learning_rate": 1.218e-06, "loss": 3.0331, "step": 409 }, { "epoch": 0.45278851463279957, "grad_norm": 3.1281187534332275, "learning_rate": 1.221e-06, "loss": 3.0042, "step": 410 }, { "epoch": 0.45389287686361124, "grad_norm": 8.910686492919922, "learning_rate": 1.224e-06, "loss": 3.037, "step": 411 }, { "epoch": 0.45499723909442297, "grad_norm": 5.455728530883789, "learning_rate": 1.227e-06, "loss": 3.0555, "step": 412 }, { "epoch": 0.4561016013252347, "grad_norm": 5.560511589050293, "learning_rate": 1.23e-06, "loss": 3.0427, "step": 413 }, { "epoch": 0.45720596355604637, "grad_norm": 4.8851165771484375, "learning_rate": 1.2329999999999999e-06, "loss": 3.0665, "step": 414 }, { "epoch": 0.4583103257868581, "grad_norm": 6.3516035079956055, "learning_rate": 1.2360000000000001e-06, "loss": 3.0077, "step": 415 }, { "epoch": 0.45941468801766977, "grad_norm": 3.7946269512176514, "learning_rate": 1.2390000000000001e-06, "loss": 2.9617, "step": 416 }, { "epoch": 0.4605190502484815, "grad_norm": 4.071764945983887, "learning_rate": 1.242e-06, "loss": 3.0307, "step": 417 }, { "epoch": 0.4616234124792932, "grad_norm": 4.7388200759887695, "learning_rate": 1.2450000000000002e-06, "loss": 3.0047, "step": 418 }, { "epoch": 0.4627277747101049, "grad_norm": 8.387419700622559, "learning_rate": 1.248e-06, "loss": 3.0294, "step": 419 }, { "epoch": 0.46383213694091663, "grad_norm": 7.1610236167907715, "learning_rate": 1.251e-06, "loss": 2.9881, "step": 420 }, { "epoch": 0.4649364991717283, "grad_norm": 5.964282035827637, "learning_rate": 1.254e-06, "loss": 2.9888, "step": 421 }, { "epoch": 0.46604086140254003, "grad_norm": 6.002147197723389, "learning_rate": 1.257e-06, "loss": 2.9981, "step": 422 }, { "epoch": 0.46714522363335176, "grad_norm": 4.8611674308776855, "learning_rate": 1.26e-06, "loss": 3.0528, "step": 423 }, { "epoch": 0.46824958586416343, "grad_norm": 5.159078121185303, "learning_rate": 1.263e-06, "loss": 2.9968, "step": 424 }, { "epoch": 0.46935394809497516, "grad_norm": 29.4544677734375, "learning_rate": 1.266e-06, "loss": 2.9973, "step": 425 }, { "epoch": 0.47045831032578683, "grad_norm": 9.652680397033691, "learning_rate": 1.269e-06, "loss": 3.0021, "step": 426 }, { "epoch": 0.47156267255659856, "grad_norm": 5.002384185791016, "learning_rate": 1.272e-06, "loss": 2.9667, "step": 427 }, { "epoch": 0.4726670347874103, "grad_norm": 6.109363555908203, "learning_rate": 1.275e-06, "loss": 2.9876, "step": 428 }, { "epoch": 0.47377139701822196, "grad_norm": 11.024892807006836, "learning_rate": 1.278e-06, "loss": 2.9628, "step": 429 }, { "epoch": 0.4748757592490337, "grad_norm": 4.169198036193848, "learning_rate": 1.281e-06, "loss": 2.9523, "step": 430 }, { "epoch": 0.47598012147984536, "grad_norm": 5.473679065704346, "learning_rate": 1.284e-06, "loss": 2.9812, "step": 431 }, { "epoch": 0.4770844837106571, "grad_norm": 5.06229829788208, "learning_rate": 1.2870000000000001e-06, "loss": 2.9705, "step": 432 }, { "epoch": 0.4781888459414688, "grad_norm": 6.783336162567139, "learning_rate": 1.29e-06, "loss": 2.9802, "step": 433 }, { "epoch": 0.4792932081722805, "grad_norm": 11.940949440002441, "learning_rate": 1.293e-06, "loss": 2.957, "step": 434 }, { "epoch": 0.4803975704030922, "grad_norm": 7.954470157623291, "learning_rate": 1.2960000000000002e-06, "loss": 2.9795, "step": 435 }, { "epoch": 0.4815019326339039, "grad_norm": 4.150512218475342, "learning_rate": 1.299e-06, "loss": 3.0171, "step": 436 }, { "epoch": 0.4826062948647156, "grad_norm": 21.0142822265625, "learning_rate": 1.302e-06, "loss": 3.0062, "step": 437 }, { "epoch": 0.48371065709552735, "grad_norm": 6.426945686340332, "learning_rate": 1.305e-06, "loss": 2.9623, "step": 438 }, { "epoch": 0.484815019326339, "grad_norm": 12.963583946228027, "learning_rate": 1.308e-06, "loss": 2.9695, "step": 439 }, { "epoch": 0.48591938155715075, "grad_norm": 5.783449172973633, "learning_rate": 1.311e-06, "loss": 2.9591, "step": 440 }, { "epoch": 0.4870237437879624, "grad_norm": 15.725288391113281, "learning_rate": 1.314e-06, "loss": 2.983, "step": 441 }, { "epoch": 0.48812810601877415, "grad_norm": 6.031706809997559, "learning_rate": 1.317e-06, "loss": 3.0083, "step": 442 }, { "epoch": 0.4892324682495859, "grad_norm": 6.023431777954102, "learning_rate": 1.3199999999999999e-06, "loss": 2.9261, "step": 443 }, { "epoch": 0.49033683048039756, "grad_norm": 8.605467796325684, "learning_rate": 1.323e-06, "loss": 3.0301, "step": 444 }, { "epoch": 0.4914411927112093, "grad_norm": 16.01865005493164, "learning_rate": 1.326e-06, "loss": 2.9535, "step": 445 }, { "epoch": 0.49254555494202096, "grad_norm": 30.567079544067383, "learning_rate": 1.3290000000000001e-06, "loss": 2.9736, "step": 446 }, { "epoch": 0.4936499171728327, "grad_norm": 18.307546615600586, "learning_rate": 1.3320000000000001e-06, "loss": 2.9974, "step": 447 }, { "epoch": 0.4947542794036444, "grad_norm": 10.251293182373047, "learning_rate": 1.335e-06, "loss": 2.9673, "step": 448 }, { "epoch": 0.4958586416344561, "grad_norm": 14.701570510864258, "learning_rate": 1.3380000000000001e-06, "loss": 3.0052, "step": 449 }, { "epoch": 0.4969630038652678, "grad_norm": 9.674931526184082, "learning_rate": 1.341e-06, "loss": 3.0153, "step": 450 }, { "epoch": 0.4980673660960795, "grad_norm": 4.844958782196045, "learning_rate": 1.344e-06, "loss": 3.0602, "step": 451 }, { "epoch": 0.4991717283268912, "grad_norm": 3.7462568283081055, "learning_rate": 1.3470000000000002e-06, "loss": 3.0345, "step": 452 }, { "epoch": 0.500276090557703, "grad_norm": 3.647336721420288, "learning_rate": 1.35e-06, "loss": 2.997, "step": 453 }, { "epoch": 0.5013804527885146, "grad_norm": 5.938355922698975, "learning_rate": 1.353e-06, "loss": 3.0215, "step": 454 }, { "epoch": 0.5024848150193263, "grad_norm": 4.501109600067139, "learning_rate": 1.356e-06, "loss": 2.9904, "step": 455 }, { "epoch": 0.5035891772501381, "grad_norm": 6.1155548095703125, "learning_rate": 1.359e-06, "loss": 2.9913, "step": 456 }, { "epoch": 0.5046935394809497, "grad_norm": 4.899641513824463, "learning_rate": 1.362e-06, "loss": 2.9888, "step": 457 }, { "epoch": 0.5057979017117614, "grad_norm": 5.971794605255127, "learning_rate": 1.365e-06, "loss": 2.9995, "step": 458 }, { "epoch": 0.5069022639425732, "grad_norm": 6.784829616546631, "learning_rate": 1.368e-06, "loss": 2.9444, "step": 459 }, { "epoch": 0.5080066261733849, "grad_norm": 12.27127742767334, "learning_rate": 1.3709999999999999e-06, "loss": 2.9546, "step": 460 }, { "epoch": 0.5091109884041966, "grad_norm": 7.472290992736816, "learning_rate": 1.374e-06, "loss": 2.99, "step": 461 }, { "epoch": 0.5102153506350083, "grad_norm": 19.937681198120117, "learning_rate": 1.3770000000000001e-06, "loss": 2.9922, "step": 462 }, { "epoch": 0.51131971286582, "grad_norm": 9.929736137390137, "learning_rate": 1.38e-06, "loss": 2.9809, "step": 463 }, { "epoch": 0.5124240750966317, "grad_norm": 6.99354887008667, "learning_rate": 1.3830000000000001e-06, "loss": 2.9888, "step": 464 }, { "epoch": 0.5135284373274434, "grad_norm": 3.8260364532470703, "learning_rate": 1.386e-06, "loss": 2.9692, "step": 465 }, { "epoch": 0.5146327995582551, "grad_norm": 5.246779918670654, "learning_rate": 1.389e-06, "loss": 2.9738, "step": 466 }, { "epoch": 0.5157371617890668, "grad_norm": 6.425617218017578, "learning_rate": 1.392e-06, "loss": 2.9478, "step": 467 }, { "epoch": 0.5168415240198785, "grad_norm": 6.432727813720703, "learning_rate": 1.395e-06, "loss": 3.0129, "step": 468 }, { "epoch": 0.5179458862506903, "grad_norm": 17.08741569519043, "learning_rate": 1.3980000000000002e-06, "loss": 2.9973, "step": 469 }, { "epoch": 0.5190502484815019, "grad_norm": 6.806764125823975, "learning_rate": 1.401e-06, "loss": 2.9488, "step": 470 }, { "epoch": 0.5201546107123136, "grad_norm": 12.809340476989746, "learning_rate": 1.404e-06, "loss": 2.9578, "step": 471 }, { "epoch": 0.5212589729431254, "grad_norm": 16.714427947998047, "learning_rate": 1.407e-06, "loss": 2.9545, "step": 472 }, { "epoch": 0.5223633351739371, "grad_norm": 5.959131717681885, "learning_rate": 1.41e-06, "loss": 2.9622, "step": 473 }, { "epoch": 0.5234676974047487, "grad_norm": 4.063722610473633, "learning_rate": 1.413e-06, "loss": 2.9273, "step": 474 }, { "epoch": 0.5245720596355604, "grad_norm": 4.0488386154174805, "learning_rate": 1.416e-06, "loss": 2.9589, "step": 475 }, { "epoch": 0.5256764218663722, "grad_norm": 4.879284858703613, "learning_rate": 1.419e-06, "loss": 2.9871, "step": 476 }, { "epoch": 0.5267807840971839, "grad_norm": 5.274569988250732, "learning_rate": 1.422e-06, "loss": 2.9772, "step": 477 }, { "epoch": 0.5278851463279955, "grad_norm": 5.958399772644043, "learning_rate": 1.4250000000000001e-06, "loss": 2.9315, "step": 478 }, { "epoch": 0.5289895085588073, "grad_norm": 5.562157154083252, "learning_rate": 1.4280000000000001e-06, "loss": 2.9396, "step": 479 }, { "epoch": 0.530093870789619, "grad_norm": 6.931772708892822, "learning_rate": 1.431e-06, "loss": 2.9735, "step": 480 }, { "epoch": 0.5311982330204307, "grad_norm": 10.79392147064209, "learning_rate": 1.4340000000000002e-06, "loss": 2.9677, "step": 481 }, { "epoch": 0.5323025952512425, "grad_norm": 4.75665283203125, "learning_rate": 1.437e-06, "loss": 2.9678, "step": 482 }, { "epoch": 0.5334069574820541, "grad_norm": 11.458367347717285, "learning_rate": 1.44e-06, "loss": 2.9958, "step": 483 }, { "epoch": 0.5345113197128658, "grad_norm": 5.120297431945801, "learning_rate": 1.443e-06, "loss": 2.9724, "step": 484 }, { "epoch": 0.5356156819436775, "grad_norm": 6.7952094078063965, "learning_rate": 1.446e-06, "loss": 2.9319, "step": 485 }, { "epoch": 0.5367200441744893, "grad_norm": 4.849118232727051, "learning_rate": 1.449e-06, "loss": 2.9559, "step": 486 }, { "epoch": 0.5378244064053009, "grad_norm": 5.595165729522705, "learning_rate": 1.452e-06, "loss": 2.9479, "step": 487 }, { "epoch": 0.5389287686361126, "grad_norm": 9.224928855895996, "learning_rate": 1.455e-06, "loss": 2.9483, "step": 488 }, { "epoch": 0.5400331308669244, "grad_norm": 5.105157852172852, "learning_rate": 1.458e-06, "loss": 2.9397, "step": 489 }, { "epoch": 0.5411374930977361, "grad_norm": 6.726254940032959, "learning_rate": 1.461e-06, "loss": 2.9535, "step": 490 }, { "epoch": 0.5422418553285477, "grad_norm": 5.6334147453308105, "learning_rate": 1.464e-06, "loss": 2.9238, "step": 491 }, { "epoch": 0.5433462175593595, "grad_norm": 7.908188343048096, "learning_rate": 1.467e-06, "loss": 2.937, "step": 492 }, { "epoch": 0.5444505797901712, "grad_norm": 7.092876434326172, "learning_rate": 1.4700000000000001e-06, "loss": 2.9551, "step": 493 }, { "epoch": 0.5455549420209829, "grad_norm": 10.266342163085938, "learning_rate": 1.473e-06, "loss": 2.9844, "step": 494 }, { "epoch": 0.5466593042517945, "grad_norm": 6.062618732452393, "learning_rate": 1.4760000000000001e-06, "loss": 2.948, "step": 495 }, { "epoch": 0.5477636664826063, "grad_norm": 9.737025260925293, "learning_rate": 1.479e-06, "loss": 2.9337, "step": 496 }, { "epoch": 0.548868028713418, "grad_norm": 7.183052062988281, "learning_rate": 1.482e-06, "loss": 2.9733, "step": 497 }, { "epoch": 0.5499723909442297, "grad_norm": 7.516322135925293, "learning_rate": 1.4850000000000002e-06, "loss": 2.9786, "step": 498 }, { "epoch": 0.5510767531750415, "grad_norm": 6.5018391609191895, "learning_rate": 1.488e-06, "loss": 2.9404, "step": 499 }, { "epoch": 0.5521811154058531, "grad_norm": 4.887401580810547, "learning_rate": 1.491e-06, "loss": 2.9118, "step": 500 }, { "epoch": 0.5532854776366648, "grad_norm": 6.947997093200684, "learning_rate": 1.494e-06, "loss": 3.1153, "step": 501 }, { "epoch": 0.5543898398674766, "grad_norm": 10.234834671020508, "learning_rate": 1.497e-06, "loss": 3.0228, "step": 502 }, { "epoch": 0.5554942020982883, "grad_norm": 7.418269157409668, "learning_rate": 1.5e-06, "loss": 3.0487, "step": 503 }, { "epoch": 0.5565985643290999, "grad_norm": 2.7339091300964355, "learning_rate": 1.503e-06, "loss": 2.9682, "step": 504 }, { "epoch": 0.5577029265599116, "grad_norm": 5.3754377365112305, "learning_rate": 1.506e-06, "loss": 2.9764, "step": 505 }, { "epoch": 0.5588072887907234, "grad_norm": 4.68873405456543, "learning_rate": 1.5089999999999999e-06, "loss": 2.9877, "step": 506 }, { "epoch": 0.5599116510215351, "grad_norm": 15.33177375793457, "learning_rate": 1.512e-06, "loss": 2.9844, "step": 507 }, { "epoch": 0.5610160132523467, "grad_norm": 4.318116188049316, "learning_rate": 1.5150000000000001e-06, "loss": 2.9642, "step": 508 }, { "epoch": 0.5621203754831585, "grad_norm": 4.385987758636475, "learning_rate": 1.518e-06, "loss": 2.9471, "step": 509 }, { "epoch": 0.5632247377139702, "grad_norm": 4.370758056640625, "learning_rate": 1.5210000000000001e-06, "loss": 2.9556, "step": 510 }, { "epoch": 0.5643290999447819, "grad_norm": 11.400328636169434, "learning_rate": 1.524e-06, "loss": 2.9405, "step": 511 }, { "epoch": 0.5654334621755936, "grad_norm": 4.25964879989624, "learning_rate": 1.5270000000000002e-06, "loss": 2.9007, "step": 512 }, { "epoch": 0.5665378244064053, "grad_norm": 8.925121307373047, "learning_rate": 1.53e-06, "loss": 2.9583, "step": 513 }, { "epoch": 0.567642186637217, "grad_norm": 8.571832656860352, "learning_rate": 1.533e-06, "loss": 2.9583, "step": 514 }, { "epoch": 0.5687465488680287, "grad_norm": 9.978013038635254, "learning_rate": 1.5360000000000002e-06, "loss": 2.9459, "step": 515 }, { "epoch": 0.5698509110988405, "grad_norm": 8.828751564025879, "learning_rate": 1.539e-06, "loss": 2.927, "step": 516 }, { "epoch": 0.5709552733296521, "grad_norm": 12.563618659973145, "learning_rate": 1.542e-06, "loss": 2.9498, "step": 517 }, { "epoch": 0.5720596355604638, "grad_norm": 8.544693946838379, "learning_rate": 1.545e-06, "loss": 2.9415, "step": 518 }, { "epoch": 0.5731639977912756, "grad_norm": 12.600970268249512, "learning_rate": 1.548e-06, "loss": 2.9436, "step": 519 }, { "epoch": 0.5742683600220873, "grad_norm": 4.481036186218262, "learning_rate": 1.551e-06, "loss": 2.9396, "step": 520 }, { "epoch": 0.5753727222528989, "grad_norm": 24.386613845825195, "learning_rate": 1.554e-06, "loss": 2.9547, "step": 521 }, { "epoch": 0.5764770844837107, "grad_norm": 8.624656677246094, "learning_rate": 1.557e-06, "loss": 2.9767, "step": 522 }, { "epoch": 0.5775814467145224, "grad_norm": 7.541572570800781, "learning_rate": 1.5599999999999999e-06, "loss": 2.9887, "step": 523 }, { "epoch": 0.5786858089453341, "grad_norm": 4.060111999511719, "learning_rate": 1.5630000000000001e-06, "loss": 2.903, "step": 524 }, { "epoch": 0.5797901711761457, "grad_norm": 8.023724555969238, "learning_rate": 1.5660000000000001e-06, "loss": 2.9055, "step": 525 }, { "epoch": 0.5808945334069575, "grad_norm": 8.753127098083496, "learning_rate": 1.569e-06, "loss": 2.9023, "step": 526 }, { "epoch": 0.5819988956377692, "grad_norm": 4.2007622718811035, "learning_rate": 1.5720000000000002e-06, "loss": 2.9133, "step": 527 }, { "epoch": 0.5831032578685809, "grad_norm": 15.80457878112793, "learning_rate": 1.575e-06, "loss": 2.9165, "step": 528 }, { "epoch": 0.5842076200993926, "grad_norm": 5.224271297454834, "learning_rate": 1.578e-06, "loss": 2.9169, "step": 529 }, { "epoch": 0.5853119823302043, "grad_norm": 25.35341453552246, "learning_rate": 1.581e-06, "loss": 2.8874, "step": 530 }, { "epoch": 0.586416344561016, "grad_norm": 5.48516845703125, "learning_rate": 1.584e-06, "loss": 2.8926, "step": 531 }, { "epoch": 0.5875207067918278, "grad_norm": 11.679147720336914, "learning_rate": 1.5870000000000002e-06, "loss": 2.9217, "step": 532 }, { "epoch": 0.5886250690226394, "grad_norm": 10.377972602844238, "learning_rate": 1.59e-06, "loss": 2.9174, "step": 533 }, { "epoch": 0.5897294312534511, "grad_norm": 5.428811550140381, "learning_rate": 1.593e-06, "loss": 2.9245, "step": 534 }, { "epoch": 0.5908337934842628, "grad_norm": 7.518815040588379, "learning_rate": 1.596e-06, "loss": 2.924, "step": 535 }, { "epoch": 0.5919381557150746, "grad_norm": 4.949530124664307, "learning_rate": 1.599e-06, "loss": 2.943, "step": 536 }, { "epoch": 0.5930425179458862, "grad_norm": 8.420149803161621, "learning_rate": 1.602e-06, "loss": 2.9773, "step": 537 }, { "epoch": 0.5941468801766979, "grad_norm": 5.1310648918151855, "learning_rate": 1.605e-06, "loss": 2.9147, "step": 538 }, { "epoch": 0.5952512424075097, "grad_norm": 6.2036638259887695, "learning_rate": 1.608e-06, "loss": 2.9355, "step": 539 }, { "epoch": 0.5963556046383214, "grad_norm": 3.4415652751922607, "learning_rate": 1.611e-06, "loss": 2.8979, "step": 540 }, { "epoch": 0.597459966869133, "grad_norm": 7.35853910446167, "learning_rate": 1.6140000000000001e-06, "loss": 2.9523, "step": 541 }, { "epoch": 0.5985643290999448, "grad_norm": 5.638726711273193, "learning_rate": 1.6170000000000001e-06, "loss": 2.9335, "step": 542 }, { "epoch": 0.5996686913307565, "grad_norm": 6.859483242034912, "learning_rate": 1.62e-06, "loss": 2.9137, "step": 543 }, { "epoch": 0.6007730535615682, "grad_norm": 6.5918288230896, "learning_rate": 1.6230000000000002e-06, "loss": 2.919, "step": 544 }, { "epoch": 0.6018774157923799, "grad_norm": 7.555466651916504, "learning_rate": 1.626e-06, "loss": 2.9422, "step": 545 }, { "epoch": 0.6029817780231916, "grad_norm": 4.114239692687988, "learning_rate": 1.629e-06, "loss": 2.8932, "step": 546 }, { "epoch": 0.6040861402540033, "grad_norm": 7.722898960113525, "learning_rate": 1.632e-06, "loss": 2.9375, "step": 547 }, { "epoch": 0.605190502484815, "grad_norm": 4.108684539794922, "learning_rate": 1.635e-06, "loss": 2.8907, "step": 548 }, { "epoch": 0.6062948647156268, "grad_norm": 8.399603843688965, "learning_rate": 1.638e-06, "loss": 2.8787, "step": 549 }, { "epoch": 0.6073992269464384, "grad_norm": 6.894826889038086, "learning_rate": 1.641e-06, "loss": 2.9133, "step": 550 }, { "epoch": 0.6085035891772501, "grad_norm": 7.127720355987549, "learning_rate": 1.644e-06, "loss": 3.0227, "step": 551 }, { "epoch": 0.6096079514080619, "grad_norm": 7.376366138458252, "learning_rate": 1.6469999999999999e-06, "loss": 3.0011, "step": 552 }, { "epoch": 0.6107123136388736, "grad_norm": 5.748219966888428, "learning_rate": 1.65e-06, "loss": 2.9436, "step": 553 }, { "epoch": 0.6118166758696852, "grad_norm": 5.01236629486084, "learning_rate": 1.653e-06, "loss": 2.9826, "step": 554 }, { "epoch": 0.6129210381004969, "grad_norm": 4.656754016876221, "learning_rate": 1.6560000000000001e-06, "loss": 2.9439, "step": 555 }, { "epoch": 0.6140254003313087, "grad_norm": 2.8887128829956055, "learning_rate": 1.6590000000000001e-06, "loss": 2.9284, "step": 556 }, { "epoch": 0.6151297625621204, "grad_norm": 7.919761657714844, "learning_rate": 1.662e-06, "loss": 2.9572, "step": 557 }, { "epoch": 0.616234124792932, "grad_norm": 4.834834575653076, "learning_rate": 1.6650000000000002e-06, "loss": 2.9657, "step": 558 }, { "epoch": 0.6173384870237438, "grad_norm": 6.500499248504639, "learning_rate": 1.668e-06, "loss": 2.8935, "step": 559 }, { "epoch": 0.6184428492545555, "grad_norm": 5.292578220367432, "learning_rate": 1.671e-06, "loss": 2.8957, "step": 560 }, { "epoch": 0.6195472114853672, "grad_norm": 6.79939079284668, "learning_rate": 1.6740000000000002e-06, "loss": 2.9525, "step": 561 }, { "epoch": 0.620651573716179, "grad_norm": 6.526973724365234, "learning_rate": 1.677e-06, "loss": 2.9318, "step": 562 }, { "epoch": 0.6217559359469906, "grad_norm": 5.3813605308532715, "learning_rate": 1.68e-06, "loss": 2.9026, "step": 563 }, { "epoch": 0.6228602981778023, "grad_norm": 7.228806018829346, "learning_rate": 1.683e-06, "loss": 2.8922, "step": 564 }, { "epoch": 0.623964660408614, "grad_norm": 13.585256576538086, "learning_rate": 1.686e-06, "loss": 2.9086, "step": 565 }, { "epoch": 0.6250690226394258, "grad_norm": 5.991419792175293, "learning_rate": 1.689e-06, "loss": 2.9271, "step": 566 }, { "epoch": 0.6261733848702374, "grad_norm": 9.469718933105469, "learning_rate": 1.692e-06, "loss": 2.9378, "step": 567 }, { "epoch": 0.6272777471010491, "grad_norm": 10.544753074645996, "learning_rate": 1.695e-06, "loss": 2.9192, "step": 568 }, { "epoch": 0.6283821093318609, "grad_norm": 5.601041316986084, "learning_rate": 1.6979999999999999e-06, "loss": 2.856, "step": 569 }, { "epoch": 0.6294864715626726, "grad_norm": 5.0394158363342285, "learning_rate": 1.701e-06, "loss": 2.8999, "step": 570 }, { "epoch": 0.6305908337934842, "grad_norm": 14.053910255432129, "learning_rate": 1.7040000000000001e-06, "loss": 2.8911, "step": 571 }, { "epoch": 0.631695196024296, "grad_norm": 5.135149002075195, "learning_rate": 1.707e-06, "loss": 2.8753, "step": 572 }, { "epoch": 0.6327995582551077, "grad_norm": 6.869830131530762, "learning_rate": 1.7100000000000001e-06, "loss": 2.9621, "step": 573 }, { "epoch": 0.6339039204859194, "grad_norm": 26.027353286743164, "learning_rate": 1.713e-06, "loss": 2.9031, "step": 574 }, { "epoch": 0.635008282716731, "grad_norm": 3.539942979812622, "learning_rate": 1.7160000000000002e-06, "loss": 2.9186, "step": 575 }, { "epoch": 0.6361126449475428, "grad_norm": 4.482118129730225, "learning_rate": 1.719e-06, "loss": 2.9001, "step": 576 }, { "epoch": 0.6372170071783545, "grad_norm": 8.940092086791992, "learning_rate": 1.722e-06, "loss": 2.9226, "step": 577 }, { "epoch": 0.6383213694091662, "grad_norm": 5.776466369628906, "learning_rate": 1.7250000000000002e-06, "loss": 2.9095, "step": 578 }, { "epoch": 0.639425731639978, "grad_norm": 9.003252983093262, "learning_rate": 1.728e-06, "loss": 2.8983, "step": 579 }, { "epoch": 0.6405300938707896, "grad_norm": 10.742339134216309, "learning_rate": 1.731e-06, "loss": 2.9242, "step": 580 }, { "epoch": 0.6416344561016013, "grad_norm": 21.382654190063477, "learning_rate": 1.734e-06, "loss": 2.8635, "step": 581 }, { "epoch": 0.6427388183324131, "grad_norm": 9.863301277160645, "learning_rate": 1.737e-06, "loss": 2.9047, "step": 582 }, { "epoch": 0.6438431805632248, "grad_norm": 8.117234230041504, "learning_rate": 1.74e-06, "loss": 2.9012, "step": 583 }, { "epoch": 0.6449475427940364, "grad_norm": 7.308660507202148, "learning_rate": 1.743e-06, "loss": 2.8789, "step": 584 }, { "epoch": 0.6460519050248481, "grad_norm": 6.3989996910095215, "learning_rate": 1.746e-06, "loss": 2.8781, "step": 585 }, { "epoch": 0.6471562672556599, "grad_norm": 6.255735874176025, "learning_rate": 1.749e-06, "loss": 2.8951, "step": 586 }, { "epoch": 0.6482606294864716, "grad_norm": 8.60550594329834, "learning_rate": 1.7520000000000001e-06, "loss": 2.8701, "step": 587 }, { "epoch": 0.6493649917172832, "grad_norm": 11.702309608459473, "learning_rate": 1.7550000000000001e-06, "loss": 2.8857, "step": 588 }, { "epoch": 0.650469353948095, "grad_norm": 7.709960460662842, "learning_rate": 1.758e-06, "loss": 2.855, "step": 589 }, { "epoch": 0.6515737161789067, "grad_norm": 8.97531795501709, "learning_rate": 1.7610000000000002e-06, "loss": 2.9245, "step": 590 }, { "epoch": 0.6526780784097184, "grad_norm": 3.543203353881836, "learning_rate": 1.764e-06, "loss": 2.8588, "step": 591 }, { "epoch": 0.6537824406405301, "grad_norm": 8.829641342163086, "learning_rate": 1.767e-06, "loss": 2.9141, "step": 592 }, { "epoch": 0.6548868028713418, "grad_norm": 9.190896987915039, "learning_rate": 1.77e-06, "loss": 2.9093, "step": 593 }, { "epoch": 0.6559911651021535, "grad_norm": 8.038817405700684, "learning_rate": 1.773e-06, "loss": 2.8798, "step": 594 }, { "epoch": 0.6570955273329652, "grad_norm": 17.40242576599121, "learning_rate": 1.776e-06, "loss": 2.9151, "step": 595 }, { "epoch": 0.658199889563777, "grad_norm": 11.267077445983887, "learning_rate": 1.779e-06, "loss": 2.9477, "step": 596 }, { "epoch": 0.6593042517945886, "grad_norm": 20.675945281982422, "learning_rate": 1.782e-06, "loss": 2.9065, "step": 597 }, { "epoch": 0.6604086140254003, "grad_norm": 6.053557872772217, "learning_rate": 1.785e-06, "loss": 2.8983, "step": 598 }, { "epoch": 0.6615129762562121, "grad_norm": 6.969814300537109, "learning_rate": 1.788e-06, "loss": 2.9398, "step": 599 }, { "epoch": 0.6626173384870238, "grad_norm": 8.200342178344727, "learning_rate": 1.791e-06, "loss": 2.9016, "step": 600 }, { "epoch": 0.6637217007178354, "grad_norm": 7.972425937652588, "learning_rate": 1.794e-06, "loss": 3.0162, "step": 601 }, { "epoch": 0.6648260629486472, "grad_norm": 3.5462729930877686, "learning_rate": 1.7970000000000001e-06, "loss": 2.9671, "step": 602 }, { "epoch": 0.6659304251794589, "grad_norm": 3.8466086387634277, "learning_rate": 1.8e-06, "loss": 2.9923, "step": 603 }, { "epoch": 0.6670347874102706, "grad_norm": 4.883789539337158, "learning_rate": 1.8030000000000001e-06, "loss": 2.9584, "step": 604 }, { "epoch": 0.6681391496410822, "grad_norm": 4.646853923797607, "learning_rate": 1.806e-06, "loss": 2.9327, "step": 605 }, { "epoch": 0.669243511871894, "grad_norm": 4.045053958892822, "learning_rate": 1.809e-06, "loss": 2.9213, "step": 606 }, { "epoch": 0.6703478741027057, "grad_norm": 6.8176960945129395, "learning_rate": 1.8120000000000002e-06, "loss": 2.9371, "step": 607 }, { "epoch": 0.6714522363335174, "grad_norm": 13.057923316955566, "learning_rate": 1.815e-06, "loss": 2.9363, "step": 608 }, { "epoch": 0.6725565985643291, "grad_norm": 4.190262794494629, "learning_rate": 1.818e-06, "loss": 2.916, "step": 609 }, { "epoch": 0.6736609607951408, "grad_norm": 17.182729721069336, "learning_rate": 1.821e-06, "loss": 2.9016, "step": 610 }, { "epoch": 0.6747653230259525, "grad_norm": 5.8900532722473145, "learning_rate": 1.824e-06, "loss": 2.875, "step": 611 }, { "epoch": 0.6758696852567642, "grad_norm": 5.204771995544434, "learning_rate": 1.827e-06, "loss": 2.9223, "step": 612 }, { "epoch": 0.676974047487576, "grad_norm": 7.125707626342773, "learning_rate": 1.83e-06, "loss": 2.9035, "step": 613 }, { "epoch": 0.6780784097183876, "grad_norm": 4.869841575622559, "learning_rate": 1.833e-06, "loss": 2.894, "step": 614 }, { "epoch": 0.6791827719491993, "grad_norm": 7.957828521728516, "learning_rate": 1.8359999999999999e-06, "loss": 2.9069, "step": 615 }, { "epoch": 0.6802871341800111, "grad_norm": 4.163580417633057, "learning_rate": 1.839e-06, "loss": 2.8707, "step": 616 }, { "epoch": 0.6813914964108228, "grad_norm": 4.732425689697266, "learning_rate": 1.8420000000000001e-06, "loss": 2.8703, "step": 617 }, { "epoch": 0.6824958586416344, "grad_norm": 5.522703170776367, "learning_rate": 1.8450000000000001e-06, "loss": 2.8834, "step": 618 }, { "epoch": 0.6836002208724462, "grad_norm": 5.658855438232422, "learning_rate": 1.8480000000000001e-06, "loss": 2.8826, "step": 619 }, { "epoch": 0.6847045831032579, "grad_norm": 5.629660129547119, "learning_rate": 1.851e-06, "loss": 2.9211, "step": 620 }, { "epoch": 0.6858089453340696, "grad_norm": 11.534173011779785, "learning_rate": 1.8540000000000002e-06, "loss": 2.9202, "step": 621 }, { "epoch": 0.6869133075648812, "grad_norm": 12.805228233337402, "learning_rate": 1.857e-06, "loss": 2.909, "step": 622 }, { "epoch": 0.688017669795693, "grad_norm": 5.185667514801025, "learning_rate": 1.86e-06, "loss": 2.8612, "step": 623 }, { "epoch": 0.6891220320265047, "grad_norm": 6.3621416091918945, "learning_rate": 1.8630000000000002e-06, "loss": 2.8447, "step": 624 }, { "epoch": 0.6902263942573164, "grad_norm": 13.404684066772461, "learning_rate": 1.866e-06, "loss": 2.8853, "step": 625 }, { "epoch": 0.6913307564881281, "grad_norm": 14.161800384521484, "learning_rate": 1.869e-06, "loss": 2.9279, "step": 626 }, { "epoch": 0.6924351187189398, "grad_norm": 7.067657947540283, "learning_rate": 1.872e-06, "loss": 2.8509, "step": 627 }, { "epoch": 0.6935394809497515, "grad_norm": 11.697568893432617, "learning_rate": 1.875e-06, "loss": 2.8257, "step": 628 }, { "epoch": 0.6946438431805633, "grad_norm": 6.504116535186768, "learning_rate": 1.878e-06, "loss": 2.8888, "step": 629 }, { "epoch": 0.6957482054113749, "grad_norm": 11.849544525146484, "learning_rate": 1.8810000000000003e-06, "loss": 2.8399, "step": 630 }, { "epoch": 0.6968525676421866, "grad_norm": 24.17937660217285, "learning_rate": 1.8839999999999999e-06, "loss": 2.878, "step": 631 }, { "epoch": 0.6979569298729983, "grad_norm": 21.5490779876709, "learning_rate": 1.8869999999999999e-06, "loss": 2.8845, "step": 632 }, { "epoch": 0.6990612921038101, "grad_norm": 9.160719871520996, "learning_rate": 1.8900000000000001e-06, "loss": 2.8929, "step": 633 }, { "epoch": 0.7001656543346217, "grad_norm": 7.42363166809082, "learning_rate": 1.8930000000000001e-06, "loss": 2.8649, "step": 634 }, { "epoch": 0.7012700165654334, "grad_norm": 12.279134750366211, "learning_rate": 1.8960000000000001e-06, "loss": 2.9176, "step": 635 }, { "epoch": 0.7023743787962452, "grad_norm": 6.067274570465088, "learning_rate": 1.899e-06, "loss": 2.891, "step": 636 }, { "epoch": 0.7034787410270569, "grad_norm": 11.222203254699707, "learning_rate": 1.902e-06, "loss": 2.8714, "step": 637 }, { "epoch": 0.7045831032578685, "grad_norm": 9.214910507202148, "learning_rate": 1.905e-06, "loss": 2.8965, "step": 638 }, { "epoch": 0.7056874654886803, "grad_norm": 9.025790214538574, "learning_rate": 1.908e-06, "loss": 2.9123, "step": 639 }, { "epoch": 0.706791827719492, "grad_norm": 4.020140647888184, "learning_rate": 1.9110000000000004e-06, "loss": 2.8564, "step": 640 }, { "epoch": 0.7078961899503037, "grad_norm": 8.254931449890137, "learning_rate": 1.914e-06, "loss": 2.8853, "step": 641 }, { "epoch": 0.7090005521811154, "grad_norm": 8.283156394958496, "learning_rate": 1.917e-06, "loss": 2.8676, "step": 642 }, { "epoch": 0.7101049144119271, "grad_norm": 12.030715942382812, "learning_rate": 1.9200000000000003e-06, "loss": 2.9002, "step": 643 }, { "epoch": 0.7112092766427388, "grad_norm": 11.270806312561035, "learning_rate": 1.923e-06, "loss": 2.9455, "step": 644 }, { "epoch": 0.7123136388735505, "grad_norm": 5.478127479553223, "learning_rate": 1.926e-06, "loss": 2.9201, "step": 645 }, { "epoch": 0.7134180011043623, "grad_norm": 7.408079147338867, "learning_rate": 1.929e-06, "loss": 2.8722, "step": 646 }, { "epoch": 0.7145223633351739, "grad_norm": 5.4901838302612305, "learning_rate": 1.932e-06, "loss": 2.9253, "step": 647 }, { "epoch": 0.7156267255659856, "grad_norm": 10.147208213806152, "learning_rate": 1.935e-06, "loss": 2.8812, "step": 648 }, { "epoch": 0.7167310877967974, "grad_norm": 9.770915985107422, "learning_rate": 1.9380000000000003e-06, "loss": 2.9204, "step": 649 }, { "epoch": 0.7178354500276091, "grad_norm": 8.861368179321289, "learning_rate": 1.9409999999999997e-06, "loss": 2.8407, "step": 650 }, { "epoch": 0.7189398122584207, "grad_norm": 4.10565710067749, "learning_rate": 1.944e-06, "loss": 3.0052, "step": 651 }, { "epoch": 0.7200441744892324, "grad_norm": 10.22706413269043, "learning_rate": 1.947e-06, "loss": 2.983, "step": 652 }, { "epoch": 0.7211485367200442, "grad_norm": 6.04769229888916, "learning_rate": 1.95e-06, "loss": 2.9712, "step": 653 }, { "epoch": 0.7222528989508559, "grad_norm": 6.179854393005371, "learning_rate": 1.953e-06, "loss": 2.9155, "step": 654 }, { "epoch": 0.7233572611816675, "grad_norm": 3.890756845474243, "learning_rate": 1.956e-06, "loss": 2.9107, "step": 655 }, { "epoch": 0.7244616234124793, "grad_norm": 6.725774765014648, "learning_rate": 1.959e-06, "loss": 2.9041, "step": 656 }, { "epoch": 0.725565985643291, "grad_norm": 5.937934398651123, "learning_rate": 1.962e-06, "loss": 2.898, "step": 657 }, { "epoch": 0.7266703478741027, "grad_norm": 5.618699073791504, "learning_rate": 1.9650000000000002e-06, "loss": 2.8781, "step": 658 }, { "epoch": 0.7277747101049145, "grad_norm": 7.648959159851074, "learning_rate": 1.968e-06, "loss": 2.9009, "step": 659 }, { "epoch": 0.7288790723357261, "grad_norm": 4.427227973937988, "learning_rate": 1.971e-06, "loss": 2.9096, "step": 660 }, { "epoch": 0.7299834345665378, "grad_norm": 9.180110931396484, "learning_rate": 1.974e-06, "loss": 2.8634, "step": 661 }, { "epoch": 0.7310877967973495, "grad_norm": 4.953343868255615, "learning_rate": 1.977e-06, "loss": 2.8894, "step": 662 }, { "epoch": 0.7321921590281613, "grad_norm": 8.617053031921387, "learning_rate": 1.98e-06, "loss": 2.8615, "step": 663 }, { "epoch": 0.7332965212589729, "grad_norm": 5.456623077392578, "learning_rate": 1.9830000000000003e-06, "loss": 2.8487, "step": 664 }, { "epoch": 0.7344008834897846, "grad_norm": 8.799317359924316, "learning_rate": 1.9859999999999997e-06, "loss": 2.8905, "step": 665 }, { "epoch": 0.7355052457205964, "grad_norm": 10.384432792663574, "learning_rate": 1.989e-06, "loss": 2.82, "step": 666 }, { "epoch": 0.7366096079514081, "grad_norm": 5.496358871459961, "learning_rate": 1.992e-06, "loss": 2.8218, "step": 667 }, { "epoch": 0.7377139701822197, "grad_norm": 5.732351779937744, "learning_rate": 1.995e-06, "loss": 2.8788, "step": 668 }, { "epoch": 0.7388183324130315, "grad_norm": 10.178050994873047, "learning_rate": 1.998e-06, "loss": 2.8854, "step": 669 }, { "epoch": 0.7399226946438432, "grad_norm": 6.747450828552246, "learning_rate": 2.001e-06, "loss": 2.8277, "step": 670 }, { "epoch": 0.7410270568746549, "grad_norm": 6.763575553894043, "learning_rate": 2.004e-06, "loss": 2.932, "step": 671 }, { "epoch": 0.7421314191054665, "grad_norm": 8.28627872467041, "learning_rate": 2.007e-06, "loss": 2.9034, "step": 672 }, { "epoch": 0.7432357813362783, "grad_norm": 5.005800724029541, "learning_rate": 2.0100000000000002e-06, "loss": 2.886, "step": 673 }, { "epoch": 0.74434014356709, "grad_norm": 8.997291564941406, "learning_rate": 2.0130000000000005e-06, "loss": 2.8887, "step": 674 }, { "epoch": 0.7454445057979017, "grad_norm": 6.407253265380859, "learning_rate": 2.016e-06, "loss": 2.8766, "step": 675 }, { "epoch": 0.7465488680287135, "grad_norm": 10.471388816833496, "learning_rate": 2.019e-06, "loss": 2.8666, "step": 676 }, { "epoch": 0.7476532302595251, "grad_norm": 10.04470443725586, "learning_rate": 2.0220000000000003e-06, "loss": 2.9235, "step": 677 }, { "epoch": 0.7487575924903368, "grad_norm": 6.309896945953369, "learning_rate": 2.025e-06, "loss": 2.9046, "step": 678 }, { "epoch": 0.7498619547211486, "grad_norm": 6.57606840133667, "learning_rate": 2.028e-06, "loss": 2.8937, "step": 679 }, { "epoch": 0.7509663169519603, "grad_norm": 5.874166011810303, "learning_rate": 2.031e-06, "loss": 2.8626, "step": 680 }, { "epoch": 0.7520706791827719, "grad_norm": 7.255016326904297, "learning_rate": 2.034e-06, "loss": 2.8382, "step": 681 }, { "epoch": 0.7531750414135836, "grad_norm": 10.966043472290039, "learning_rate": 2.037e-06, "loss": 2.8624, "step": 682 }, { "epoch": 0.7542794036443954, "grad_norm": 8.28087043762207, "learning_rate": 2.0400000000000004e-06, "loss": 2.89, "step": 683 }, { "epoch": 0.7553837658752071, "grad_norm": 6.897957801818848, "learning_rate": 2.0429999999999998e-06, "loss": 2.9001, "step": 684 }, { "epoch": 0.7564881281060187, "grad_norm": 8.412449836730957, "learning_rate": 2.046e-06, "loss": 2.8275, "step": 685 }, { "epoch": 0.7575924903368305, "grad_norm": 5.476268291473389, "learning_rate": 2.049e-06, "loss": 2.8416, "step": 686 }, { "epoch": 0.7586968525676422, "grad_norm": 7.517270088195801, "learning_rate": 2.052e-06, "loss": 2.9162, "step": 687 }, { "epoch": 0.7598012147984539, "grad_norm": 8.240571975708008, "learning_rate": 2.0550000000000002e-06, "loss": 2.9154, "step": 688 }, { "epoch": 0.7609055770292656, "grad_norm": 13.37619686126709, "learning_rate": 2.058e-06, "loss": 2.8081, "step": 689 }, { "epoch": 0.7620099392600773, "grad_norm": 8.944254875183105, "learning_rate": 2.061e-06, "loss": 2.8213, "step": 690 }, { "epoch": 0.763114301490889, "grad_norm": 6.7290143966674805, "learning_rate": 2.064e-06, "loss": 2.8356, "step": 691 }, { "epoch": 0.7642186637217007, "grad_norm": 11.100077629089355, "learning_rate": 2.0670000000000003e-06, "loss": 2.8242, "step": 692 }, { "epoch": 0.7653230259525124, "grad_norm": 15.135931015014648, "learning_rate": 2.07e-06, "loss": 2.897, "step": 693 }, { "epoch": 0.7664273881833241, "grad_norm": 5.122869491577148, "learning_rate": 2.073e-06, "loss": 2.8581, "step": 694 }, { "epoch": 0.7675317504141358, "grad_norm": 16.444494247436523, "learning_rate": 2.076e-06, "loss": 2.8708, "step": 695 }, { "epoch": 0.7686361126449476, "grad_norm": 5.81422233581543, "learning_rate": 2.079e-06, "loss": 2.8662, "step": 696 }, { "epoch": 0.7697404748757593, "grad_norm": 4.8777899742126465, "learning_rate": 2.082e-06, "loss": 2.8574, "step": 697 }, { "epoch": 0.7708448371065709, "grad_norm": 9.85283374786377, "learning_rate": 2.0850000000000004e-06, "loss": 2.8837, "step": 698 }, { "epoch": 0.7719491993373827, "grad_norm": 31.78055763244629, "learning_rate": 2.0879999999999997e-06, "loss": 2.8656, "step": 699 }, { "epoch": 0.7730535615681944, "grad_norm": 18.80649757385254, "learning_rate": 2.091e-06, "loss": 2.9039, "step": 700 }, { "epoch": 0.774157923799006, "grad_norm": 8.838604927062988, "learning_rate": 2.094e-06, "loss": 3.0165, "step": 701 }, { "epoch": 0.7752622860298177, "grad_norm": 7.575699329376221, "learning_rate": 2.097e-06, "loss": 2.9624, "step": 702 }, { "epoch": 0.7763666482606295, "grad_norm": 4.912428855895996, "learning_rate": 2.1000000000000002e-06, "loss": 2.8986, "step": 703 }, { "epoch": 0.7774710104914412, "grad_norm": 6.301784515380859, "learning_rate": 2.103e-06, "loss": 2.9219, "step": 704 }, { "epoch": 0.7785753727222529, "grad_norm": 6.381612777709961, "learning_rate": 2.106e-06, "loss": 2.926, "step": 705 }, { "epoch": 0.7796797349530646, "grad_norm": 7.377673625946045, "learning_rate": 2.109e-06, "loss": 2.8919, "step": 706 }, { "epoch": 0.7807840971838763, "grad_norm": 5.588716506958008, "learning_rate": 2.1120000000000003e-06, "loss": 2.9122, "step": 707 }, { "epoch": 0.781888459414688, "grad_norm": 5.039252281188965, "learning_rate": 2.1149999999999997e-06, "loss": 2.8598, "step": 708 }, { "epoch": 0.7829928216454998, "grad_norm": 6.044271945953369, "learning_rate": 2.118e-06, "loss": 2.8558, "step": 709 }, { "epoch": 0.7840971838763114, "grad_norm": 4.835532188415527, "learning_rate": 2.121e-06, "loss": 2.8244, "step": 710 }, { "epoch": 0.7852015461071231, "grad_norm": 8.727535247802734, "learning_rate": 2.124e-06, "loss": 2.8354, "step": 711 }, { "epoch": 0.7863059083379348, "grad_norm": 8.23464584350586, "learning_rate": 2.127e-06, "loss": 2.8346, "step": 712 }, { "epoch": 0.7874102705687466, "grad_norm": 6.478096008300781, "learning_rate": 2.13e-06, "loss": 2.8458, "step": 713 }, { "epoch": 0.7885146327995582, "grad_norm": 5.618811130523682, "learning_rate": 2.133e-06, "loss": 2.8609, "step": 714 }, { "epoch": 0.7896189950303699, "grad_norm": 4.505098342895508, "learning_rate": 2.136e-06, "loss": 2.8183, "step": 715 }, { "epoch": 0.7907233572611817, "grad_norm": 7.638603687286377, "learning_rate": 2.139e-06, "loss": 2.8744, "step": 716 }, { "epoch": 0.7918277194919934, "grad_norm": 7.787363529205322, "learning_rate": 2.1420000000000004e-06, "loss": 2.8617, "step": 717 }, { "epoch": 0.792932081722805, "grad_norm": 5.816930770874023, "learning_rate": 2.145e-06, "loss": 2.8405, "step": 718 }, { "epoch": 0.7940364439536168, "grad_norm": 8.481728553771973, "learning_rate": 2.148e-06, "loss": 2.8464, "step": 719 }, { "epoch": 0.7951408061844285, "grad_norm": 7.4492292404174805, "learning_rate": 2.1510000000000002e-06, "loss": 2.8837, "step": 720 }, { "epoch": 0.7962451684152402, "grad_norm": 6.7462968826293945, "learning_rate": 2.154e-06, "loss": 2.7991, "step": 721 }, { "epoch": 0.7973495306460519, "grad_norm": 6.899726867675781, "learning_rate": 2.1570000000000003e-06, "loss": 2.8724, "step": 722 }, { "epoch": 0.7984538928768636, "grad_norm": 6.951638698577881, "learning_rate": 2.16e-06, "loss": 2.8733, "step": 723 }, { "epoch": 0.7995582551076753, "grad_norm": 14.196961402893066, "learning_rate": 2.163e-06, "loss": 2.8437, "step": 724 }, { "epoch": 0.800662617338487, "grad_norm": 8.304007530212402, "learning_rate": 2.166e-06, "loss": 2.7881, "step": 725 }, { "epoch": 0.8017669795692988, "grad_norm": 4.76495361328125, "learning_rate": 2.1690000000000003e-06, "loss": 2.8046, "step": 726 }, { "epoch": 0.8028713418001104, "grad_norm": 13.777726173400879, "learning_rate": 2.172e-06, "loss": 2.8367, "step": 727 }, { "epoch": 0.8039757040309221, "grad_norm": 7.03594446182251, "learning_rate": 2.175e-06, "loss": 2.8114, "step": 728 }, { "epoch": 0.8050800662617339, "grad_norm": 46.67255401611328, "learning_rate": 2.178e-06, "loss": 2.8297, "step": 729 }, { "epoch": 0.8061844284925456, "grad_norm": 4.9897236824035645, "learning_rate": 2.181e-06, "loss": 2.8464, "step": 730 }, { "epoch": 0.8072887907233572, "grad_norm": 46.8662109375, "learning_rate": 2.184e-06, "loss": 2.8801, "step": 731 }, { "epoch": 0.8083931529541689, "grad_norm": 6.6478166580200195, "learning_rate": 2.1870000000000004e-06, "loss": 2.8097, "step": 732 }, { "epoch": 0.8094975151849807, "grad_norm": 12.649857521057129, "learning_rate": 2.1899999999999998e-06, "loss": 2.8797, "step": 733 }, { "epoch": 0.8106018774157924, "grad_norm": 8.517200469970703, "learning_rate": 2.193e-06, "loss": 2.8437, "step": 734 }, { "epoch": 0.811706239646604, "grad_norm": 8.531791687011719, "learning_rate": 2.1960000000000002e-06, "loss": 2.8238, "step": 735 }, { "epoch": 0.8128106018774158, "grad_norm": 7.776071548461914, "learning_rate": 2.199e-06, "loss": 2.8129, "step": 736 }, { "epoch": 0.8139149641082275, "grad_norm": 4.694310188293457, "learning_rate": 2.2020000000000003e-06, "loss": 2.817, "step": 737 }, { "epoch": 0.8150193263390392, "grad_norm": 14.992208480834961, "learning_rate": 2.205e-06, "loss": 2.8554, "step": 738 }, { "epoch": 0.816123688569851, "grad_norm": 8.188716888427734, "learning_rate": 2.208e-06, "loss": 2.7932, "step": 739 }, { "epoch": 0.8172280508006626, "grad_norm": 9.709641456604004, "learning_rate": 2.211e-06, "loss": 2.8168, "step": 740 }, { "epoch": 0.8183324130314743, "grad_norm": 9.272575378417969, "learning_rate": 2.2140000000000003e-06, "loss": 2.7904, "step": 741 }, { "epoch": 0.819436775262286, "grad_norm": 8.172675132751465, "learning_rate": 2.2169999999999997e-06, "loss": 2.8389, "step": 742 }, { "epoch": 0.8205411374930978, "grad_norm": 4.885384559631348, "learning_rate": 2.22e-06, "loss": 2.7836, "step": 743 }, { "epoch": 0.8216454997239094, "grad_norm": 5.35915470123291, "learning_rate": 2.223e-06, "loss": 2.8363, "step": 744 }, { "epoch": 0.8227498619547211, "grad_norm": 5.360764503479004, "learning_rate": 2.226e-06, "loss": 2.8248, "step": 745 }, { "epoch": 0.8238542241855329, "grad_norm": 4.48638916015625, "learning_rate": 2.229e-06, "loss": 2.8012, "step": 746 }, { "epoch": 0.8249585864163446, "grad_norm": 7.45283842086792, "learning_rate": 2.232e-06, "loss": 2.8046, "step": 747 }, { "epoch": 0.8260629486471562, "grad_norm": 8.374062538146973, "learning_rate": 2.2349999999999998e-06, "loss": 2.7972, "step": 748 }, { "epoch": 0.827167310877968, "grad_norm": 27.807239532470703, "learning_rate": 2.238e-06, "loss": 2.824, "step": 749 }, { "epoch": 0.8282716731087797, "grad_norm": 6.585760116577148, "learning_rate": 2.2410000000000002e-06, "loss": 2.81, "step": 750 }, { "epoch": 0.8293760353395914, "grad_norm": 7.468047142028809, "learning_rate": 2.244e-06, "loss": 2.9824, "step": 751 }, { "epoch": 0.830480397570403, "grad_norm": 4.699227333068848, "learning_rate": 2.247e-06, "loss": 2.9107, "step": 752 }, { "epoch": 0.8315847598012148, "grad_norm": 8.216017723083496, "learning_rate": 2.25e-06, "loss": 2.8978, "step": 753 }, { "epoch": 0.8326891220320265, "grad_norm": 4.082802772521973, "learning_rate": 2.253e-06, "loss": 2.871, "step": 754 }, { "epoch": 0.8337934842628382, "grad_norm": 4.479636192321777, "learning_rate": 2.256e-06, "loss": 2.8774, "step": 755 }, { "epoch": 0.83489784649365, "grad_norm": 3.8712258338928223, "learning_rate": 2.2590000000000003e-06, "loss": 2.8307, "step": 756 }, { "epoch": 0.8360022087244616, "grad_norm": 3.636882781982422, "learning_rate": 2.262e-06, "loss": 2.8182, "step": 757 }, { "epoch": 0.8371065709552733, "grad_norm": 4.246091842651367, "learning_rate": 2.265e-06, "loss": 2.805, "step": 758 }, { "epoch": 0.8382109331860851, "grad_norm": 4.074064254760742, "learning_rate": 2.268e-06, "loss": 2.7868, "step": 759 }, { "epoch": 0.8393152954168968, "grad_norm": 5.526124000549316, "learning_rate": 2.2710000000000004e-06, "loss": 2.7841, "step": 760 }, { "epoch": 0.8404196576477084, "grad_norm": 4.347794532775879, "learning_rate": 2.274e-06, "loss": 2.8344, "step": 761 }, { "epoch": 0.8415240198785201, "grad_norm": 4.434337139129639, "learning_rate": 2.277e-06, "loss": 2.7765, "step": 762 }, { "epoch": 0.8426283821093319, "grad_norm": 5.308748722076416, "learning_rate": 2.28e-06, "loss": 2.7577, "step": 763 }, { "epoch": 0.8437327443401436, "grad_norm": 11.629925727844238, "learning_rate": 2.283e-06, "loss": 2.7534, "step": 764 }, { "epoch": 0.8448371065709552, "grad_norm": 6.296328067779541, "learning_rate": 2.2860000000000002e-06, "loss": 2.7551, "step": 765 }, { "epoch": 0.845941468801767, "grad_norm": 5.09592866897583, "learning_rate": 2.2890000000000004e-06, "loss": 2.7767, "step": 766 }, { "epoch": 0.8470458310325787, "grad_norm": 21.503662109375, "learning_rate": 2.292e-06, "loss": 2.7573, "step": 767 }, { "epoch": 0.8481501932633904, "grad_norm": 32.04489517211914, "learning_rate": 2.295e-06, "loss": 2.752, "step": 768 }, { "epoch": 0.8492545554942021, "grad_norm": 9.0078125, "learning_rate": 2.2980000000000003e-06, "loss": 2.8115, "step": 769 }, { "epoch": 0.8503589177250138, "grad_norm": 3.8854620456695557, "learning_rate": 2.301e-06, "loss": 2.7976, "step": 770 }, { "epoch": 0.8514632799558255, "grad_norm": 4.6231231689453125, "learning_rate": 2.304e-06, "loss": 2.7779, "step": 771 }, { "epoch": 0.8525676421866372, "grad_norm": 4.482365131378174, "learning_rate": 2.307e-06, "loss": 2.7646, "step": 772 }, { "epoch": 0.853672004417449, "grad_norm": 9.685529708862305, "learning_rate": 2.31e-06, "loss": 2.7763, "step": 773 }, { "epoch": 0.8547763666482606, "grad_norm": 4.868612766265869, "learning_rate": 2.313e-06, "loss": 2.7292, "step": 774 }, { "epoch": 0.8558807288790723, "grad_norm": 4.6026740074157715, "learning_rate": 2.3160000000000004e-06, "loss": 2.688, "step": 775 }, { "epoch": 0.8569850911098841, "grad_norm": 5.870129108428955, "learning_rate": 2.3189999999999997e-06, "loss": 2.7343, "step": 776 }, { "epoch": 0.8580894533406958, "grad_norm": 4.750532150268555, "learning_rate": 2.322e-06, "loss": 2.7469, "step": 777 }, { "epoch": 0.8591938155715074, "grad_norm": 3.443963050842285, "learning_rate": 2.325e-06, "loss": 2.7874, "step": 778 }, { "epoch": 0.8602981778023192, "grad_norm": 8.74045467376709, "learning_rate": 2.328e-06, "loss": 2.7375, "step": 779 }, { "epoch": 0.8614025400331309, "grad_norm": 4.99867057800293, "learning_rate": 2.3310000000000002e-06, "loss": 2.7921, "step": 780 }, { "epoch": 0.8625069022639426, "grad_norm": 4.467084884643555, "learning_rate": 2.334e-06, "loss": 2.7701, "step": 781 }, { "epoch": 0.8636112644947542, "grad_norm": 15.084308624267578, "learning_rate": 2.337e-06, "loss": 2.7321, "step": 782 }, { "epoch": 0.864715626725566, "grad_norm": 4.91359281539917, "learning_rate": 2.34e-06, "loss": 2.7755, "step": 783 }, { "epoch": 0.8658199889563777, "grad_norm": 3.637756824493408, "learning_rate": 2.3430000000000003e-06, "loss": 2.7444, "step": 784 }, { "epoch": 0.8669243511871894, "grad_norm": 3.1390814781188965, "learning_rate": 2.346e-06, "loss": 2.7523, "step": 785 }, { "epoch": 0.8680287134180011, "grad_norm": 26.70213508605957, "learning_rate": 2.349e-06, "loss": 2.7537, "step": 786 }, { "epoch": 0.8691330756488128, "grad_norm": 4.613979816436768, "learning_rate": 2.352e-06, "loss": 2.798, "step": 787 }, { "epoch": 0.8702374378796245, "grad_norm": 3.520793914794922, "learning_rate": 2.355e-06, "loss": 2.7395, "step": 788 }, { "epoch": 0.8713418001104363, "grad_norm": 4.511481285095215, "learning_rate": 2.358e-06, "loss": 2.7641, "step": 789 }, { "epoch": 0.8724461623412479, "grad_norm": 4.823662281036377, "learning_rate": 2.3610000000000003e-06, "loss": 2.7302, "step": 790 }, { "epoch": 0.8735505245720596, "grad_norm": 4.074328422546387, "learning_rate": 2.3639999999999997e-06, "loss": 2.7216, "step": 791 }, { "epoch": 0.8746548868028713, "grad_norm": 19.40118980407715, "learning_rate": 2.367e-06, "loss": 2.7168, "step": 792 }, { "epoch": 0.8757592490336831, "grad_norm": 13.606962203979492, "learning_rate": 2.37e-06, "loss": 2.7596, "step": 793 }, { "epoch": 0.8768636112644947, "grad_norm": 6.967868804931641, "learning_rate": 2.373e-06, "loss": 2.7878, "step": 794 }, { "epoch": 0.8779679734953064, "grad_norm": 10.875740051269531, "learning_rate": 2.376e-06, "loss": 2.7951, "step": 795 }, { "epoch": 0.8790723357261182, "grad_norm": 7.9806437492370605, "learning_rate": 2.379e-06, "loss": 2.7513, "step": 796 }, { "epoch": 0.8801766979569299, "grad_norm": 6.14340877532959, "learning_rate": 2.382e-06, "loss": 2.8068, "step": 797 }, { "epoch": 0.8812810601877415, "grad_norm": 6.231996059417725, "learning_rate": 2.385e-06, "loss": 2.7306, "step": 798 }, { "epoch": 0.8823854224185533, "grad_norm": 8.104541778564453, "learning_rate": 2.3880000000000003e-06, "loss": 2.7888, "step": 799 }, { "epoch": 0.883489784649365, "grad_norm": 8.865474700927734, "learning_rate": 2.391e-06, "loss": 2.8189, "step": 800 }, { "epoch": 0.8845941468801767, "grad_norm": 7.841440677642822, "learning_rate": 2.394e-06, "loss": 2.8718, "step": 801 }, { "epoch": 0.8856985091109884, "grad_norm": 3.9866762161254883, "learning_rate": 2.397e-06, "loss": 2.8596, "step": 802 }, { "epoch": 0.8868028713418001, "grad_norm": 4.578371524810791, "learning_rate": 2.4000000000000003e-06, "loss": 2.8361, "step": 803 }, { "epoch": 0.8879072335726118, "grad_norm": 4.267418384552002, "learning_rate": 2.403e-06, "loss": 2.8345, "step": 804 }, { "epoch": 0.8890115958034235, "grad_norm": 5.098931312561035, "learning_rate": 2.406e-06, "loss": 2.8255, "step": 805 }, { "epoch": 0.8901159580342353, "grad_norm": 4.34904146194458, "learning_rate": 2.409e-06, "loss": 2.8006, "step": 806 }, { "epoch": 0.8912203202650469, "grad_norm": 5.9788923263549805, "learning_rate": 2.412e-06, "loss": 2.7874, "step": 807 }, { "epoch": 0.8923246824958586, "grad_norm": 3.173029661178589, "learning_rate": 2.415e-06, "loss": 2.7887, "step": 808 }, { "epoch": 0.8934290447266704, "grad_norm": 4.28541374206543, "learning_rate": 2.4180000000000004e-06, "loss": 2.7804, "step": 809 }, { "epoch": 0.8945334069574821, "grad_norm": 5.41977596282959, "learning_rate": 2.4209999999999998e-06, "loss": 2.7591, "step": 810 }, { "epoch": 0.8956377691882937, "grad_norm": 4.136407852172852, "learning_rate": 2.424e-06, "loss": 2.7018, "step": 811 }, { "epoch": 0.8967421314191054, "grad_norm": 5.8345537185668945, "learning_rate": 2.4270000000000002e-06, "loss": 2.8033, "step": 812 }, { "epoch": 0.8978464936499172, "grad_norm": 5.525015830993652, "learning_rate": 2.43e-06, "loss": 2.7383, "step": 813 }, { "epoch": 0.8989508558807289, "grad_norm": 4.575953006744385, "learning_rate": 2.4330000000000003e-06, "loss": 2.7434, "step": 814 }, { "epoch": 0.9000552181115405, "grad_norm": 5.2104620933532715, "learning_rate": 2.436e-06, "loss": 2.7249, "step": 815 }, { "epoch": 0.9011595803423523, "grad_norm": 6.20648193359375, "learning_rate": 2.439e-06, "loss": 2.7557, "step": 816 }, { "epoch": 0.902263942573164, "grad_norm": 6.0468621253967285, "learning_rate": 2.442e-06, "loss": 2.7026, "step": 817 }, { "epoch": 0.9033683048039757, "grad_norm": 4.672908306121826, "learning_rate": 2.4450000000000003e-06, "loss": 2.7433, "step": 818 }, { "epoch": 0.9044726670347875, "grad_norm": 2.8970389366149902, "learning_rate": 2.448e-06, "loss": 2.7019, "step": 819 }, { "epoch": 0.9055770292655991, "grad_norm": 15.866710662841797, "learning_rate": 2.451e-06, "loss": 2.7453, "step": 820 }, { "epoch": 0.9066813914964108, "grad_norm": 3.9242308139801025, "learning_rate": 2.454e-06, "loss": 2.7272, "step": 821 }, { "epoch": 0.9077857537272225, "grad_norm": 5.132657527923584, "learning_rate": 2.457e-06, "loss": 2.7368, "step": 822 }, { "epoch": 0.9088901159580343, "grad_norm": 5.273726940155029, "learning_rate": 2.46e-06, "loss": 2.7632, "step": 823 }, { "epoch": 0.9099944781888459, "grad_norm": 4.313276290893555, "learning_rate": 2.4630000000000004e-06, "loss": 2.686, "step": 824 }, { "epoch": 0.9110988404196576, "grad_norm": 8.7987642288208, "learning_rate": 2.4659999999999998e-06, "loss": 2.7653, "step": 825 }, { "epoch": 0.9122032026504694, "grad_norm": 6.246443271636963, "learning_rate": 2.469e-06, "loss": 2.7076, "step": 826 }, { "epoch": 0.9133075648812811, "grad_norm": 4.382547378540039, "learning_rate": 2.4720000000000002e-06, "loss": 2.7477, "step": 827 }, { "epoch": 0.9144119271120927, "grad_norm": 18.213865280151367, "learning_rate": 2.475e-06, "loss": 2.7288, "step": 828 }, { "epoch": 0.9155162893429045, "grad_norm": 4.6512370109558105, "learning_rate": 2.4780000000000002e-06, "loss": 2.7587, "step": 829 }, { "epoch": 0.9166206515737162, "grad_norm": 8.168339729309082, "learning_rate": 2.481e-06, "loss": 2.7382, "step": 830 }, { "epoch": 0.9177250138045279, "grad_norm": 3.2970499992370605, "learning_rate": 2.484e-06, "loss": 2.7453, "step": 831 }, { "epoch": 0.9188293760353395, "grad_norm": 7.966495037078857, "learning_rate": 2.487e-06, "loss": 2.7139, "step": 832 }, { "epoch": 0.9199337382661513, "grad_norm": 5.637610912322998, "learning_rate": 2.4900000000000003e-06, "loss": 2.744, "step": 833 }, { "epoch": 0.921038100496963, "grad_norm": 25.517730712890625, "learning_rate": 2.4929999999999997e-06, "loss": 2.7216, "step": 834 }, { "epoch": 0.9221424627277747, "grad_norm": 5.415754795074463, "learning_rate": 2.496e-06, "loss": 2.6974, "step": 835 }, { "epoch": 0.9232468249585865, "grad_norm": 13.361658096313477, "learning_rate": 2.499e-06, "loss": 2.7605, "step": 836 }, { "epoch": 0.9243511871893981, "grad_norm": 14.42186450958252, "learning_rate": 2.502e-06, "loss": 2.7482, "step": 837 }, { "epoch": 0.9254555494202098, "grad_norm": 6.0025177001953125, "learning_rate": 2.505e-06, "loss": 2.7794, "step": 838 }, { "epoch": 0.9265599116510216, "grad_norm": 2.487039566040039, "learning_rate": 2.508e-06, "loss": 2.6807, "step": 839 }, { "epoch": 0.9276642738818333, "grad_norm": 3.805358648300171, "learning_rate": 2.5109999999999998e-06, "loss": 2.7459, "step": 840 }, { "epoch": 0.9287686361126449, "grad_norm": 5.416221618652344, "learning_rate": 2.514e-06, "loss": 2.7437, "step": 841 }, { "epoch": 0.9298729983434566, "grad_norm": 3.931941509246826, "learning_rate": 2.517e-06, "loss": 2.7242, "step": 842 }, { "epoch": 0.9309773605742684, "grad_norm": 4.7013092041015625, "learning_rate": 2.52e-06, "loss": 2.7268, "step": 843 }, { "epoch": 0.9320817228050801, "grad_norm": 3.069960594177246, "learning_rate": 2.523e-06, "loss": 2.7539, "step": 844 }, { "epoch": 0.9331860850358917, "grad_norm": 13.182802200317383, "learning_rate": 2.526e-06, "loss": 2.7416, "step": 845 }, { "epoch": 0.9342904472667035, "grad_norm": 5.875760078430176, "learning_rate": 2.5290000000000003e-06, "loss": 2.7159, "step": 846 }, { "epoch": 0.9353948094975152, "grad_norm": 5.399531841278076, "learning_rate": 2.532e-06, "loss": 2.6874, "step": 847 }, { "epoch": 0.9364991717283269, "grad_norm": 3.2290799617767334, "learning_rate": 2.5350000000000003e-06, "loss": 2.7175, "step": 848 }, { "epoch": 0.9376035339591386, "grad_norm": 4.282095909118652, "learning_rate": 2.538e-06, "loss": 2.7217, "step": 849 }, { "epoch": 0.9387078961899503, "grad_norm": 5.906040191650391, "learning_rate": 2.541e-06, "loss": 2.7666, "step": 850 }, { "epoch": 0.939812258420762, "grad_norm": 4.4127326011657715, "learning_rate": 2.544e-06, "loss": 2.8716, "step": 851 }, { "epoch": 0.9409166206515737, "grad_norm": 2.606186866760254, "learning_rate": 2.5470000000000003e-06, "loss": 2.8369, "step": 852 }, { "epoch": 0.9420209828823854, "grad_norm": 3.2216591835021973, "learning_rate": 2.55e-06, "loss": 2.8488, "step": 853 }, { "epoch": 0.9431253451131971, "grad_norm": 5.20005989074707, "learning_rate": 2.553e-06, "loss": 2.8211, "step": 854 }, { "epoch": 0.9442297073440088, "grad_norm": 2.7807693481445312, "learning_rate": 2.556e-06, "loss": 2.8187, "step": 855 }, { "epoch": 0.9453340695748206, "grad_norm": 3.2286016941070557, "learning_rate": 2.559e-06, "loss": 2.7839, "step": 856 }, { "epoch": 0.9464384318056323, "grad_norm": 4.286034107208252, "learning_rate": 2.562e-06, "loss": 2.7633, "step": 857 }, { "epoch": 0.9475427940364439, "grad_norm": 3.4438748359680176, "learning_rate": 2.5650000000000004e-06, "loss": 2.7568, "step": 858 }, { "epoch": 0.9486471562672557, "grad_norm": 7.18031120300293, "learning_rate": 2.568e-06, "loss": 2.7055, "step": 859 }, { "epoch": 0.9497515184980674, "grad_norm": 8.257355690002441, "learning_rate": 2.571e-06, "loss": 2.7276, "step": 860 }, { "epoch": 0.950855880728879, "grad_norm": 6.619614601135254, "learning_rate": 2.5740000000000003e-06, "loss": 2.7194, "step": 861 }, { "epoch": 0.9519602429596907, "grad_norm": 7.799700736999512, "learning_rate": 2.577e-06, "loss": 2.7019, "step": 862 }, { "epoch": 0.9530646051905025, "grad_norm": 6.137044906616211, "learning_rate": 2.58e-06, "loss": 2.7271, "step": 863 }, { "epoch": 0.9541689674213142, "grad_norm": 6.049748420715332, "learning_rate": 2.583e-06, "loss": 2.7349, "step": 864 }, { "epoch": 0.9552733296521259, "grad_norm": 3.7724852561950684, "learning_rate": 2.586e-06, "loss": 2.7196, "step": 865 }, { "epoch": 0.9563776918829376, "grad_norm": 5.546345233917236, "learning_rate": 2.589e-06, "loss": 2.6993, "step": 866 }, { "epoch": 0.9574820541137493, "grad_norm": 7.750048637390137, "learning_rate": 2.5920000000000003e-06, "loss": 2.6604, "step": 867 }, { "epoch": 0.958586416344561, "grad_norm": 5.52736234664917, "learning_rate": 2.5949999999999997e-06, "loss": 2.6625, "step": 868 }, { "epoch": 0.9596907785753728, "grad_norm": 2.4821910858154297, "learning_rate": 2.598e-06, "loss": 2.6743, "step": 869 }, { "epoch": 0.9607951408061844, "grad_norm": 9.26640510559082, "learning_rate": 2.601e-06, "loss": 2.7174, "step": 870 }, { "epoch": 0.9618995030369961, "grad_norm": 6.346329689025879, "learning_rate": 2.604e-06, "loss": 2.6522, "step": 871 }, { "epoch": 0.9630038652678078, "grad_norm": 3.7115609645843506, "learning_rate": 2.607e-06, "loss": 2.7383, "step": 872 }, { "epoch": 0.9641082274986196, "grad_norm": 7.449687957763672, "learning_rate": 2.61e-06, "loss": 2.6936, "step": 873 }, { "epoch": 0.9652125897294312, "grad_norm": 7.011687278747559, "learning_rate": 2.613e-06, "loss": 2.7483, "step": 874 }, { "epoch": 0.9663169519602429, "grad_norm": 9.063581466674805, "learning_rate": 2.616e-06, "loss": 2.7265, "step": 875 }, { "epoch": 0.9674213141910547, "grad_norm": 6.407412052154541, "learning_rate": 2.6190000000000003e-06, "loss": 2.6899, "step": 876 }, { "epoch": 0.9685256764218664, "grad_norm": 4.0232415199279785, "learning_rate": 2.622e-06, "loss": 2.722, "step": 877 }, { "epoch": 0.969630038652678, "grad_norm": 5.1762824058532715, "learning_rate": 2.625e-06, "loss": 2.6948, "step": 878 }, { "epoch": 0.9707344008834898, "grad_norm": 7.155012607574463, "learning_rate": 2.628e-06, "loss": 2.6802, "step": 879 }, { "epoch": 0.9718387631143015, "grad_norm": 5.864226341247559, "learning_rate": 2.631e-06, "loss": 2.7037, "step": 880 }, { "epoch": 0.9729431253451132, "grad_norm": 6.079779624938965, "learning_rate": 2.634e-06, "loss": 2.6991, "step": 881 }, { "epoch": 0.9740474875759249, "grad_norm": 7.494949817657471, "learning_rate": 2.6370000000000003e-06, "loss": 2.7172, "step": 882 }, { "epoch": 0.9751518498067366, "grad_norm": 4.90504264831543, "learning_rate": 2.6399999999999997e-06, "loss": 2.6657, "step": 883 }, { "epoch": 0.9762562120375483, "grad_norm": 2.6013810634613037, "learning_rate": 2.643e-06, "loss": 2.7443, "step": 884 }, { "epoch": 0.97736057426836, "grad_norm": 4.03771448135376, "learning_rate": 2.646e-06, "loss": 2.671, "step": 885 }, { "epoch": 0.9784649364991718, "grad_norm": 50.57145309448242, "learning_rate": 2.649e-06, "loss": 2.6806, "step": 886 }, { "epoch": 0.9795692987299834, "grad_norm": 5.441807746887207, "learning_rate": 2.652e-06, "loss": 2.6953, "step": 887 }, { "epoch": 0.9806736609607951, "grad_norm": 7.758111953735352, "learning_rate": 2.655e-06, "loss": 2.7103, "step": 888 }, { "epoch": 0.9817780231916069, "grad_norm": 3.9920079708099365, "learning_rate": 2.6580000000000002e-06, "loss": 2.6993, "step": 889 }, { "epoch": 0.9828823854224186, "grad_norm": 14.066719055175781, "learning_rate": 2.661e-06, "loss": 2.6433, "step": 890 }, { "epoch": 0.9839867476532302, "grad_norm": 14.268651008605957, "learning_rate": 2.6640000000000002e-06, "loss": 2.689, "step": 891 }, { "epoch": 0.9850911098840419, "grad_norm": 10.148332595825195, "learning_rate": 2.6670000000000005e-06, "loss": 2.7133, "step": 892 }, { "epoch": 0.9861954721148537, "grad_norm": 2.6451961994171143, "learning_rate": 2.67e-06, "loss": 2.6982, "step": 893 }, { "epoch": 0.9872998343456654, "grad_norm": 9.931758880615234, "learning_rate": 2.673e-06, "loss": 2.7013, "step": 894 }, { "epoch": 0.988404196576477, "grad_norm": 6.0109124183654785, "learning_rate": 2.6760000000000003e-06, "loss": 2.6766, "step": 895 }, { "epoch": 0.9895085588072888, "grad_norm": 3.769601345062256, "learning_rate": 2.679e-06, "loss": 2.685, "step": 896 }, { "epoch": 0.9906129210381005, "grad_norm": 5.001194000244141, "learning_rate": 2.682e-06, "loss": 2.6916, "step": 897 }, { "epoch": 0.9917172832689122, "grad_norm": 9.031503677368164, "learning_rate": 2.685e-06, "loss": 2.7097, "step": 898 }, { "epoch": 0.992821645499724, "grad_norm": 8.446287155151367, "learning_rate": 2.688e-06, "loss": 2.704, "step": 899 }, { "epoch": 0.9939260077305356, "grad_norm": 3.633786201477051, "learning_rate": 2.691e-06, "loss": 2.6789, "step": 900 }, { "epoch": 0.9950303699613473, "grad_norm": 5.042684555053711, "learning_rate": 2.6940000000000004e-06, "loss": 2.7752, "step": 901 }, { "epoch": 0.996134732192159, "grad_norm": 3.1309995651245117, "learning_rate": 2.6969999999999998e-06, "loss": 2.6617, "step": 902 }, { "epoch": 0.9972390944229708, "grad_norm": 4.575377464294434, "learning_rate": 2.7e-06, "loss": 2.6576, "step": 903 }, { "epoch": 0.9983434566537824, "grad_norm": 6.190492153167725, "learning_rate": 2.703e-06, "loss": 2.6431, "step": 904 }, { "epoch": 0.9994478188845941, "grad_norm": 3.8785059452056885, "learning_rate": 2.706e-06, "loss": 2.6414, "step": 905 }, { "epoch": 1.0, "grad_norm": 2.1259078979492188, "learning_rate": 2.7090000000000002e-06, "loss": 1.3317, "step": 906 }, { "epoch": 1.0011043622308118, "grad_norm": 4.567052841186523, "learning_rate": 2.712e-06, "loss": 2.8414, "step": 907 }, { "epoch": 1.0022087244616233, "grad_norm": 4.167389392852783, "learning_rate": 2.715e-06, "loss": 2.8341, "step": 908 }, { "epoch": 1.0033130866924351, "grad_norm": 5.1181206703186035, "learning_rate": 2.718e-06, "loss": 2.7869, "step": 909 }, { "epoch": 1.004417448923247, "grad_norm": 5.196169853210449, "learning_rate": 2.7210000000000003e-06, "loss": 2.7933, "step": 910 }, { "epoch": 1.0055218111540585, "grad_norm": 2.1508052349090576, "learning_rate": 2.724e-06, "loss": 2.7587, "step": 911 }, { "epoch": 1.0066261733848703, "grad_norm": 2.5957415103912354, "learning_rate": 2.727e-06, "loss": 2.7473, "step": 912 }, { "epoch": 1.007730535615682, "grad_norm": 3.5958218574523926, "learning_rate": 2.73e-06, "loss": 2.7413, "step": 913 }, { "epoch": 1.0088348978464936, "grad_norm": 4.188127040863037, "learning_rate": 2.733e-06, "loss": 2.6835, "step": 914 }, { "epoch": 1.0099392600773054, "grad_norm": 3.927041530609131, "learning_rate": 2.736e-06, "loss": 2.6835, "step": 915 }, { "epoch": 1.011043622308117, "grad_norm": 3.884631395339966, "learning_rate": 2.7390000000000004e-06, "loss": 2.6786, "step": 916 }, { "epoch": 1.0121479845389287, "grad_norm": 4.2509331703186035, "learning_rate": 2.7419999999999998e-06, "loss": 2.6645, "step": 917 }, { "epoch": 1.0132523467697405, "grad_norm": 4.386011123657227, "learning_rate": 2.745e-06, "loss": 2.6568, "step": 918 }, { "epoch": 1.014356709000552, "grad_norm": 8.464162826538086, "learning_rate": 2.748e-06, "loss": 2.6916, "step": 919 }, { "epoch": 1.0154610712313639, "grad_norm": 3.616569995880127, "learning_rate": 2.751e-06, "loss": 2.6546, "step": 920 }, { "epoch": 1.0165654334621756, "grad_norm": 8.478858947753906, "learning_rate": 2.7540000000000002e-06, "loss": 2.6389, "step": 921 }, { "epoch": 1.0176697956929872, "grad_norm": 5.246654510498047, "learning_rate": 2.757e-06, "loss": 2.6189, "step": 922 }, { "epoch": 1.018774157923799, "grad_norm": 3.1030054092407227, "learning_rate": 2.76e-06, "loss": 2.6552, "step": 923 }, { "epoch": 1.0198785201546108, "grad_norm": 9.03740406036377, "learning_rate": 2.763e-06, "loss": 2.6398, "step": 924 }, { "epoch": 1.0209828823854223, "grad_norm": 4.094885349273682, "learning_rate": 2.7660000000000003e-06, "loss": 2.5787, "step": 925 }, { "epoch": 1.0220872446162341, "grad_norm": 6.668762683868408, "learning_rate": 2.7689999999999997e-06, "loss": 2.6308, "step": 926 }, { "epoch": 1.023191606847046, "grad_norm": 19.210979461669922, "learning_rate": 2.772e-06, "loss": 2.6654, "step": 927 }, { "epoch": 1.0242959690778575, "grad_norm": 8.282061576843262, "learning_rate": 2.775e-06, "loss": 2.6748, "step": 928 }, { "epoch": 1.0254003313086693, "grad_norm": 16.874279022216797, "learning_rate": 2.778e-06, "loss": 2.6326, "step": 929 }, { "epoch": 1.026504693539481, "grad_norm": 5.025364875793457, "learning_rate": 2.781e-06, "loss": 2.6667, "step": 930 }, { "epoch": 1.0276090557702926, "grad_norm": 5.626621246337891, "learning_rate": 2.784e-06, "loss": 2.6669, "step": 931 }, { "epoch": 1.0287134180011044, "grad_norm": 3.7376327514648438, "learning_rate": 2.787e-06, "loss": 2.6804, "step": 932 }, { "epoch": 1.0298177802319162, "grad_norm": 17.038105010986328, "learning_rate": 2.79e-06, "loss": 2.6951, "step": 933 }, { "epoch": 1.0309221424627277, "grad_norm": 5.2837371826171875, "learning_rate": 2.793e-06, "loss": 2.6915, "step": 934 }, { "epoch": 1.0320265046935395, "grad_norm": 3.584681272506714, "learning_rate": 2.7960000000000004e-06, "loss": 2.6632, "step": 935 }, { "epoch": 1.0331308669243513, "grad_norm": 5.167210578918457, "learning_rate": 2.799e-06, "loss": 2.7109, "step": 936 }, { "epoch": 1.0342352291551629, "grad_norm": 30.344533920288086, "learning_rate": 2.802e-06, "loss": 2.6538, "step": 937 }, { "epoch": 1.0353395913859746, "grad_norm": 8.738783836364746, "learning_rate": 2.8050000000000002e-06, "loss": 2.6915, "step": 938 }, { "epoch": 1.0364439536167862, "grad_norm": 5.577009201049805, "learning_rate": 2.808e-06, "loss": 2.6806, "step": 939 }, { "epoch": 1.037548315847598, "grad_norm": 6.853292465209961, "learning_rate": 2.8110000000000003e-06, "loss": 2.6571, "step": 940 }, { "epoch": 1.0386526780784098, "grad_norm": 5.332571029663086, "learning_rate": 2.814e-06, "loss": 2.645, "step": 941 }, { "epoch": 1.0397570403092213, "grad_norm": 6.2560200691223145, "learning_rate": 2.817e-06, "loss": 2.6982, "step": 942 }, { "epoch": 1.0408614025400331, "grad_norm": 4.082701683044434, "learning_rate": 2.82e-06, "loss": 2.6811, "step": 943 }, { "epoch": 1.041965764770845, "grad_norm": 38.55173110961914, "learning_rate": 2.8230000000000003e-06, "loss": 2.6284, "step": 944 }, { "epoch": 1.0430701270016565, "grad_norm": 8.60342025756836, "learning_rate": 2.826e-06, "loss": 2.6774, "step": 945 }, { "epoch": 1.0441744892324683, "grad_norm": 4.723059177398682, "learning_rate": 2.829e-06, "loss": 2.6302, "step": 946 }, { "epoch": 1.04527885146328, "grad_norm": 3.7251522541046143, "learning_rate": 2.832e-06, "loss": 2.662, "step": 947 }, { "epoch": 1.0463832136940916, "grad_norm": 4.8158650398254395, "learning_rate": 2.835e-06, "loss": 2.6771, "step": 948 }, { "epoch": 1.0474875759249034, "grad_norm": 5.326158046722412, "learning_rate": 2.838e-06, "loss": 2.6851, "step": 949 }, { "epoch": 1.0485919381557152, "grad_norm": 5.196305751800537, "learning_rate": 2.8410000000000004e-06, "loss": 2.6709, "step": 950 }, { "epoch": 1.0496963003865267, "grad_norm": 8.817981719970703, "learning_rate": 2.844e-06, "loss": 2.6782, "step": 951 }, { "epoch": 1.0508006626173385, "grad_norm": 14.541473388671875, "learning_rate": 2.847e-06, "loss": 2.7221, "step": 952 }, { "epoch": 1.0519050248481503, "grad_norm": 8.281317710876465, "learning_rate": 2.8500000000000002e-06, "loss": 2.6888, "step": 953 }, { "epoch": 1.0530093870789619, "grad_norm": 14.410619735717773, "learning_rate": 2.853e-06, "loss": 2.6665, "step": 954 }, { "epoch": 1.0541137493097736, "grad_norm": 6.748003005981445, "learning_rate": 2.8560000000000003e-06, "loss": 2.6664, "step": 955 }, { "epoch": 1.0552181115405852, "grad_norm": 9.147809982299805, "learning_rate": 2.859e-06, "loss": 2.6927, "step": 956 }, { "epoch": 1.056322473771397, "grad_norm": 3.8853585720062256, "learning_rate": 2.862e-06, "loss": 2.8514, "step": 957 }, { "epoch": 1.0574268360022088, "grad_norm": 4.756077766418457, "learning_rate": 2.865e-06, "loss": 2.773, "step": 958 }, { "epoch": 1.0585311982330203, "grad_norm": 3.9456472396850586, "learning_rate": 2.8680000000000003e-06, "loss": 2.7694, "step": 959 }, { "epoch": 1.0596355604638321, "grad_norm": 3.4570322036743164, "learning_rate": 2.8709999999999997e-06, "loss": 2.7571, "step": 960 }, { "epoch": 1.060739922694644, "grad_norm": 3.5734505653381348, "learning_rate": 2.874e-06, "loss": 2.7364, "step": 961 }, { "epoch": 1.0618442849254555, "grad_norm": 3.768519878387451, "learning_rate": 2.877e-06, "loss": 2.7221, "step": 962 }, { "epoch": 1.0629486471562672, "grad_norm": 6.190154075622559, "learning_rate": 2.88e-06, "loss": 2.7126, "step": 963 }, { "epoch": 1.064053009387079, "grad_norm": 3.831538200378418, "learning_rate": 2.883e-06, "loss": 2.6988, "step": 964 }, { "epoch": 1.0651573716178906, "grad_norm": 3.524327516555786, "learning_rate": 2.886e-06, "loss": 2.6781, "step": 965 }, { "epoch": 1.0662617338487024, "grad_norm": 8.282210350036621, "learning_rate": 2.8889999999999998e-06, "loss": 2.6453, "step": 966 }, { "epoch": 1.0673660960795142, "grad_norm": 3.744211435317993, "learning_rate": 2.892e-06, "loss": 2.6056, "step": 967 }, { "epoch": 1.0684704583103257, "grad_norm": 6.43691349029541, "learning_rate": 2.8950000000000002e-06, "loss": 2.6308, "step": 968 }, { "epoch": 1.0695748205411375, "grad_norm": 7.362234592437744, "learning_rate": 2.898e-06, "loss": 2.6514, "step": 969 }, { "epoch": 1.0706791827719493, "grad_norm": 4.486380100250244, "learning_rate": 2.901e-06, "loss": 2.6724, "step": 970 }, { "epoch": 1.0717835450027609, "grad_norm": 4.717705726623535, "learning_rate": 2.904e-06, "loss": 2.5964, "step": 971 }, { "epoch": 1.0728879072335726, "grad_norm": 3.6690616607666016, "learning_rate": 2.907e-06, "loss": 2.6245, "step": 972 }, { "epoch": 1.0739922694643844, "grad_norm": 2.777660608291626, "learning_rate": 2.91e-06, "loss": 2.6563, "step": 973 }, { "epoch": 1.075096631695196, "grad_norm": 8.54758358001709, "learning_rate": 2.9130000000000003e-06, "loss": 2.6719, "step": 974 }, { "epoch": 1.0762009939260078, "grad_norm": 5.535069942474365, "learning_rate": 2.916e-06, "loss": 2.6614, "step": 975 }, { "epoch": 1.0773053561568195, "grad_norm": 4.803378105163574, "learning_rate": 2.919e-06, "loss": 2.5615, "step": 976 }, { "epoch": 1.078409718387631, "grad_norm": 10.156984329223633, "learning_rate": 2.922e-06, "loss": 2.6066, "step": 977 }, { "epoch": 1.079514080618443, "grad_norm": 3.9221391677856445, "learning_rate": 2.9250000000000004e-06, "loss": 2.5897, "step": 978 }, { "epoch": 1.0806184428492545, "grad_norm": 2.6225380897521973, "learning_rate": 2.928e-06, "loss": 2.5669, "step": 979 }, { "epoch": 1.0817228050800662, "grad_norm": 2.7243916988372803, "learning_rate": 2.931e-06, "loss": 2.6009, "step": 980 }, { "epoch": 1.082827167310878, "grad_norm": 4.194425582885742, "learning_rate": 2.934e-06, "loss": 2.5419, "step": 981 }, { "epoch": 1.0839315295416896, "grad_norm": 3.7887516021728516, "learning_rate": 2.937e-06, "loss": 2.6052, "step": 982 }, { "epoch": 1.0850358917725014, "grad_norm": 14.159911155700684, "learning_rate": 2.9400000000000002e-06, "loss": 2.5704, "step": 983 }, { "epoch": 1.0861402540033132, "grad_norm": 5.599745750427246, "learning_rate": 2.9430000000000005e-06, "loss": 2.6172, "step": 984 }, { "epoch": 1.0872446162341247, "grad_norm": 3.416196346282959, "learning_rate": 2.946e-06, "loss": 2.6066, "step": 985 }, { "epoch": 1.0883489784649365, "grad_norm": 3.188967227935791, "learning_rate": 2.949e-06, "loss": 2.5614, "step": 986 }, { "epoch": 1.0894533406957483, "grad_norm": 6.747070789337158, "learning_rate": 2.9520000000000003e-06, "loss": 2.5729, "step": 987 }, { "epoch": 1.0905577029265598, "grad_norm": 10.927618980407715, "learning_rate": 2.955e-06, "loss": 2.5842, "step": 988 }, { "epoch": 1.0916620651573716, "grad_norm": 14.300015449523926, "learning_rate": 2.958e-06, "loss": 2.6173, "step": 989 }, { "epoch": 1.0927664273881834, "grad_norm": 4.057072639465332, "learning_rate": 2.961e-06, "loss": 2.5623, "step": 990 }, { "epoch": 1.093870789618995, "grad_norm": 5.795204162597656, "learning_rate": 2.964e-06, "loss": 2.5413, "step": 991 }, { "epoch": 1.0949751518498068, "grad_norm": 5.759687423706055, "learning_rate": 2.967e-06, "loss": 2.5641, "step": 992 }, { "epoch": 1.0960795140806185, "grad_norm": 5.1638054847717285, "learning_rate": 2.9700000000000004e-06, "loss": 2.5682, "step": 993 }, { "epoch": 1.09718387631143, "grad_norm": 4.547430038452148, "learning_rate": 2.9729999999999997e-06, "loss": 2.533, "step": 994 }, { "epoch": 1.098288238542242, "grad_norm": 7.957910537719727, "learning_rate": 2.976e-06, "loss": 2.6417, "step": 995 }, { "epoch": 1.0993926007730535, "grad_norm": 9.610820770263672, "learning_rate": 2.979e-06, "loss": 2.5529, "step": 996 }, { "epoch": 1.1004969630038652, "grad_norm": 9.117647171020508, "learning_rate": 2.982e-06, "loss": 2.5483, "step": 997 }, { "epoch": 1.101601325234677, "grad_norm": 14.441547393798828, "learning_rate": 2.9850000000000002e-06, "loss": 2.5964, "step": 998 }, { "epoch": 1.1027056874654886, "grad_norm": 3.9426941871643066, "learning_rate": 2.988e-06, "loss": 2.5335, "step": 999 }, { "epoch": 1.1038100496963004, "grad_norm": 3.5406010150909424, "learning_rate": 2.991e-06, "loss": 2.5711, "step": 1000 }, { "epoch": 1.1038100496963004, "eval_cer": 0.858620421494673, "eval_loss": 2.8158023357391357, "eval_runtime": 15.8493, "eval_samples_per_second": 19.181, "eval_steps_per_second": 0.631, "eval_wer": 0.990598618572525, "step": 1000 }, { "epoch": 1.1049144119271122, "grad_norm": 4.26876974105835, "learning_rate": 2.994e-06, "loss": 2.5821, "step": 1001 }, { "epoch": 1.1060187741579237, "grad_norm": 5.859242916107178, "learning_rate": 2.9970000000000003e-06, "loss": 2.5494, "step": 1002 }, { "epoch": 1.1071231363887355, "grad_norm": 3.950961112976074, "learning_rate": 3e-06, "loss": 2.5742, "step": 1003 }, { "epoch": 1.1082274986195473, "grad_norm": 5.5172119140625, "learning_rate": 3.003e-06, "loss": 2.5125, "step": 1004 }, { "epoch": 1.1093318608503588, "grad_norm": 4.897273540496826, "learning_rate": 3.006e-06, "loss": 2.5558, "step": 1005 }, { "epoch": 1.1104362230811706, "grad_norm": 4.6135125160217285, "learning_rate": 3.009e-06, "loss": 2.5886, "step": 1006 }, { "epoch": 1.1115405853119824, "grad_norm": 11.387911796569824, "learning_rate": 3.012e-06, "loss": 2.7971, "step": 1007 }, { "epoch": 1.112644947542794, "grad_norm": 5.129026889801025, "learning_rate": 3.0150000000000004e-06, "loss": 2.6807, "step": 1008 }, { "epoch": 1.1137493097736058, "grad_norm": 7.48226261138916, "learning_rate": 3.0179999999999997e-06, "loss": 2.6889, "step": 1009 }, { "epoch": 1.1148536720044175, "grad_norm": 7.68060302734375, "learning_rate": 3.021e-06, "loss": 2.6727, "step": 1010 }, { "epoch": 1.115958034235229, "grad_norm": 6.274251461029053, "learning_rate": 3.024e-06, "loss": 2.6348, "step": 1011 }, { "epoch": 1.1170623964660409, "grad_norm": 3.4180116653442383, "learning_rate": 3.027e-06, "loss": 2.5967, "step": 1012 }, { "epoch": 1.1181667586968527, "grad_norm": 3.8238024711608887, "learning_rate": 3.0300000000000002e-06, "loss": 2.5553, "step": 1013 }, { "epoch": 1.1192711209276642, "grad_norm": 3.4177913665771484, "learning_rate": 3.033e-06, "loss": 2.5084, "step": 1014 }, { "epoch": 1.120375483158476, "grad_norm": 5.061764240264893, "learning_rate": 3.036e-06, "loss": 2.4925, "step": 1015 }, { "epoch": 1.1214798453892878, "grad_norm": 3.417938709259033, "learning_rate": 3.039e-06, "loss": 2.4991, "step": 1016 }, { "epoch": 1.1225842076200994, "grad_norm": 2.748145341873169, "learning_rate": 3.0420000000000003e-06, "loss": 2.4192, "step": 1017 }, { "epoch": 1.1236885698509111, "grad_norm": 4.923625946044922, "learning_rate": 3.0450000000000005e-06, "loss": 2.4561, "step": 1018 }, { "epoch": 1.1247929320817227, "grad_norm": 3.8951737880706787, "learning_rate": 3.048e-06, "loss": 2.4235, "step": 1019 }, { "epoch": 1.1258972943125345, "grad_norm": 3.3230767250061035, "learning_rate": 3.051e-06, "loss": 2.43, "step": 1020 }, { "epoch": 1.1270016565433463, "grad_norm": 4.772607326507568, "learning_rate": 3.0540000000000003e-06, "loss": 2.3815, "step": 1021 }, { "epoch": 1.1281060187741578, "grad_norm": 5.342062473297119, "learning_rate": 3.057e-06, "loss": 2.3658, "step": 1022 }, { "epoch": 1.1292103810049696, "grad_norm": 3.27280855178833, "learning_rate": 3.06e-06, "loss": 2.3431, "step": 1023 }, { "epoch": 1.1303147432357814, "grad_norm": 2.4579105377197266, "learning_rate": 3.063e-06, "loss": 2.3604, "step": 1024 }, { "epoch": 1.131419105466593, "grad_norm": 3.9909160137176514, "learning_rate": 3.066e-06, "loss": 2.3176, "step": 1025 }, { "epoch": 1.1325234676974048, "grad_norm": 3.5463430881500244, "learning_rate": 3.069e-06, "loss": 2.3241, "step": 1026 }, { "epoch": 1.1336278299282165, "grad_norm": 3.6147425174713135, "learning_rate": 3.0720000000000004e-06, "loss": 2.2974, "step": 1027 }, { "epoch": 1.134732192159028, "grad_norm": 5.1993865966796875, "learning_rate": 3.0749999999999998e-06, "loss": 2.2636, "step": 1028 }, { "epoch": 1.1358365543898399, "grad_norm": 4.798185348510742, "learning_rate": 3.078e-06, "loss": 2.2184, "step": 1029 }, { "epoch": 1.1369409166206517, "grad_norm": 3.4188528060913086, "learning_rate": 3.0810000000000002e-06, "loss": 2.2011, "step": 1030 }, { "epoch": 1.1380452788514632, "grad_norm": 6.232433795928955, "learning_rate": 3.084e-06, "loss": 2.1957, "step": 1031 }, { "epoch": 1.139149641082275, "grad_norm": 4.887523174285889, "learning_rate": 3.0870000000000003e-06, "loss": 2.1968, "step": 1032 }, { "epoch": 1.1402540033130868, "grad_norm": 3.3574609756469727, "learning_rate": 3.09e-06, "loss": 2.1492, "step": 1033 }, { "epoch": 1.1413583655438984, "grad_norm": 4.110033988952637, "learning_rate": 3.093e-06, "loss": 2.0875, "step": 1034 }, { "epoch": 1.1424627277747101, "grad_norm": 5.003952503204346, "learning_rate": 3.096e-06, "loss": 2.1198, "step": 1035 }, { "epoch": 1.1435670900055217, "grad_norm": 3.1240105628967285, "learning_rate": 3.0990000000000003e-06, "loss": 2.0711, "step": 1036 }, { "epoch": 1.1446714522363335, "grad_norm": 5.281810760498047, "learning_rate": 3.102e-06, "loss": 2.0381, "step": 1037 }, { "epoch": 1.1457758144671453, "grad_norm": 6.896369457244873, "learning_rate": 3.105e-06, "loss": 2.0348, "step": 1038 }, { "epoch": 1.146880176697957, "grad_norm": 5.4648308753967285, "learning_rate": 3.108e-06, "loss": 2.0388, "step": 1039 }, { "epoch": 1.1479845389287686, "grad_norm": 3.66410756111145, "learning_rate": 3.111e-06, "loss": 1.9541, "step": 1040 }, { "epoch": 1.1490889011595804, "grad_norm": 3.152104139328003, "learning_rate": 3.114e-06, "loss": 1.9187, "step": 1041 }, { "epoch": 1.150193263390392, "grad_norm": 3.9369587898254395, "learning_rate": 3.1170000000000004e-06, "loss": 1.9311, "step": 1042 }, { "epoch": 1.1512976256212037, "grad_norm": 3.755561113357544, "learning_rate": 3.1199999999999998e-06, "loss": 1.9023, "step": 1043 }, { "epoch": 1.1524019878520155, "grad_norm": 2.7792022228240967, "learning_rate": 3.123e-06, "loss": 1.8937, "step": 1044 }, { "epoch": 1.153506350082827, "grad_norm": 2.7494313716888428, "learning_rate": 3.1260000000000002e-06, "loss": 1.7942, "step": 1045 }, { "epoch": 1.1546107123136389, "grad_norm": 4.778997898101807, "learning_rate": 3.129e-06, "loss": 1.7907, "step": 1046 }, { "epoch": 1.1557150745444507, "grad_norm": 3.5304737091064453, "learning_rate": 3.1320000000000003e-06, "loss": 1.7808, "step": 1047 }, { "epoch": 1.1568194367752622, "grad_norm": 5.4986419677734375, "learning_rate": 3.135e-06, "loss": 1.7707, "step": 1048 }, { "epoch": 1.157923799006074, "grad_norm": 16.08661460876465, "learning_rate": 3.138e-06, "loss": 1.7378, "step": 1049 }, { "epoch": 1.1590281612368858, "grad_norm": 6.715534687042236, "learning_rate": 3.141e-06, "loss": 1.7767, "step": 1050 }, { "epoch": 1.1601325234676974, "grad_norm": 3.582834005355835, "learning_rate": 3.1440000000000003e-06, "loss": 1.7001, "step": 1051 }, { "epoch": 1.1612368856985091, "grad_norm": 8.326470375061035, "learning_rate": 3.1469999999999997e-06, "loss": 1.6708, "step": 1052 }, { "epoch": 1.1623412479293207, "grad_norm": 8.559704780578613, "learning_rate": 3.15e-06, "loss": 1.6759, "step": 1053 }, { "epoch": 1.1634456101601325, "grad_norm": 3.1500816345214844, "learning_rate": 3.153e-06, "loss": 1.6446, "step": 1054 }, { "epoch": 1.1645499723909443, "grad_norm": 5.6800079345703125, "learning_rate": 3.156e-06, "loss": 1.6367, "step": 1055 }, { "epoch": 1.165654334621756, "grad_norm": 4.377682685852051, "learning_rate": 3.159e-06, "loss": 1.6722, "step": 1056 }, { "epoch": 1.1667586968525676, "grad_norm": 8.845281600952148, "learning_rate": 3.162e-06, "loss": 2.1782, "step": 1057 }, { "epoch": 1.1678630590833794, "grad_norm": 5.345585346221924, "learning_rate": 3.1649999999999998e-06, "loss": 2.0132, "step": 1058 }, { "epoch": 1.168967421314191, "grad_norm": 4.213466644287109, "learning_rate": 3.168e-06, "loss": 1.9797, "step": 1059 }, { "epoch": 1.1700717835450027, "grad_norm": 3.5570671558380127, "learning_rate": 3.1710000000000002e-06, "loss": 1.8162, "step": 1060 }, { "epoch": 1.1711761457758145, "grad_norm": 4.77776575088501, "learning_rate": 3.1740000000000004e-06, "loss": 1.7686, "step": 1061 }, { "epoch": 1.172280508006626, "grad_norm": 6.935830116271973, "learning_rate": 3.177e-06, "loss": 1.7014, "step": 1062 }, { "epoch": 1.1733848702374379, "grad_norm": 6.369053363800049, "learning_rate": 3.18e-06, "loss": 1.6773, "step": 1063 }, { "epoch": 1.1744892324682497, "grad_norm": 22.753957748413086, "learning_rate": 3.1830000000000003e-06, "loss": 1.5124, "step": 1064 }, { "epoch": 1.1755935946990612, "grad_norm": 7.2118940353393555, "learning_rate": 3.186e-06, "loss": 1.4018, "step": 1065 }, { "epoch": 1.176697956929873, "grad_norm": 4.20494270324707, "learning_rate": 3.1890000000000003e-06, "loss": 1.3407, "step": 1066 }, { "epoch": 1.1778023191606848, "grad_norm": 4.24237585067749, "learning_rate": 3.192e-06, "loss": 1.3901, "step": 1067 }, { "epoch": 1.1789066813914963, "grad_norm": 3.4571008682250977, "learning_rate": 3.195e-06, "loss": 1.3225, "step": 1068 }, { "epoch": 1.1800110436223081, "grad_norm": 4.405813217163086, "learning_rate": 3.198e-06, "loss": 1.2897, "step": 1069 }, { "epoch": 1.18111540585312, "grad_norm": 3.0185413360595703, "learning_rate": 3.2010000000000004e-06, "loss": 1.2568, "step": 1070 }, { "epoch": 1.1822197680839315, "grad_norm": 3.7551889419555664, "learning_rate": 3.204e-06, "loss": 1.2365, "step": 1071 }, { "epoch": 1.1833241303147433, "grad_norm": 3.0082106590270996, "learning_rate": 3.207e-06, "loss": 1.1711, "step": 1072 }, { "epoch": 1.184428492545555, "grad_norm": 3.7669005393981934, "learning_rate": 3.21e-06, "loss": 1.2001, "step": 1073 }, { "epoch": 1.1855328547763666, "grad_norm": 4.610114574432373, "learning_rate": 3.213e-06, "loss": 1.1131, "step": 1074 }, { "epoch": 1.1866372170071784, "grad_norm": 4.192666530609131, "learning_rate": 3.216e-06, "loss": 1.1949, "step": 1075 }, { "epoch": 1.18774157923799, "grad_norm": 7.6816277503967285, "learning_rate": 3.2190000000000004e-06, "loss": 1.1208, "step": 1076 }, { "epoch": 1.1888459414688017, "grad_norm": 2.87235426902771, "learning_rate": 3.222e-06, "loss": 1.0982, "step": 1077 }, { "epoch": 1.1899503036996135, "grad_norm": 6.7498650550842285, "learning_rate": 3.225e-06, "loss": 1.0976, "step": 1078 }, { "epoch": 1.191054665930425, "grad_norm": 6.588761329650879, "learning_rate": 3.2280000000000003e-06, "loss": 1.0392, "step": 1079 }, { "epoch": 1.1921590281612369, "grad_norm": 4.3241777420043945, "learning_rate": 3.231e-06, "loss": 0.9984, "step": 1080 }, { "epoch": 1.1932633903920487, "grad_norm": 3.976806640625, "learning_rate": 3.2340000000000003e-06, "loss": 1.0348, "step": 1081 }, { "epoch": 1.1943677526228602, "grad_norm": 2.9710943698883057, "learning_rate": 3.237e-06, "loss": 0.9705, "step": 1082 }, { "epoch": 1.195472114853672, "grad_norm": 4.544233798980713, "learning_rate": 3.24e-06, "loss": 1.0478, "step": 1083 }, { "epoch": 1.1965764770844838, "grad_norm": 16.50016212463379, "learning_rate": 3.243e-06, "loss": 0.9462, "step": 1084 }, { "epoch": 1.1976808393152953, "grad_norm": 2.83762788772583, "learning_rate": 3.2460000000000003e-06, "loss": 0.9336, "step": 1085 }, { "epoch": 1.1987852015461071, "grad_norm": 2.9177377223968506, "learning_rate": 3.2489999999999997e-06, "loss": 0.9281, "step": 1086 }, { "epoch": 1.199889563776919, "grad_norm": 2.765058994293213, "learning_rate": 3.252e-06, "loss": 0.8996, "step": 1087 }, { "epoch": 1.2009939260077305, "grad_norm": 3.2723116874694824, "learning_rate": 3.255e-06, "loss": 0.9282, "step": 1088 }, { "epoch": 1.2020982882385423, "grad_norm": 3.215686798095703, "learning_rate": 3.258e-06, "loss": 0.8585, "step": 1089 }, { "epoch": 1.203202650469354, "grad_norm": 10.206930160522461, "learning_rate": 3.261e-06, "loss": 0.8927, "step": 1090 }, { "epoch": 1.2043070127001656, "grad_norm": 3.076479911804199, "learning_rate": 3.264e-06, "loss": 0.8923, "step": 1091 }, { "epoch": 1.2054113749309774, "grad_norm": 3.4889473915100098, "learning_rate": 3.267e-06, "loss": 0.9304, "step": 1092 }, { "epoch": 1.206515737161789, "grad_norm": 2.5749671459198, "learning_rate": 3.27e-06, "loss": 0.811, "step": 1093 }, { "epoch": 1.2076200993926007, "grad_norm": 3.453936815261841, "learning_rate": 3.2730000000000003e-06, "loss": 0.8311, "step": 1094 }, { "epoch": 1.2087244616234125, "grad_norm": 4.118605613708496, "learning_rate": 3.276e-06, "loss": 0.793, "step": 1095 }, { "epoch": 1.2098288238542243, "grad_norm": 2.4944841861724854, "learning_rate": 3.279e-06, "loss": 0.7618, "step": 1096 }, { "epoch": 1.2109331860850359, "grad_norm": 3.1550581455230713, "learning_rate": 3.282e-06, "loss": 0.7435, "step": 1097 }, { "epoch": 1.2120375483158476, "grad_norm": 5.135873794555664, "learning_rate": 3.285e-06, "loss": 0.7761, "step": 1098 }, { "epoch": 1.2131419105466592, "grad_norm": 3.605649471282959, "learning_rate": 3.288e-06, "loss": 0.7475, "step": 1099 }, { "epoch": 1.214246272777471, "grad_norm": 4.164962291717529, "learning_rate": 3.2910000000000003e-06, "loss": 0.7989, "step": 1100 }, { "epoch": 1.2153506350082828, "grad_norm": 4.345034122467041, "learning_rate": 3.2939999999999997e-06, "loss": 0.8168, "step": 1101 }, { "epoch": 1.2164549972390943, "grad_norm": 3.0624096393585205, "learning_rate": 3.297e-06, "loss": 0.7283, "step": 1102 }, { "epoch": 1.2175593594699061, "grad_norm": 9.258050918579102, "learning_rate": 3.3e-06, "loss": 0.7896, "step": 1103 }, { "epoch": 1.218663721700718, "grad_norm": 3.793116331100464, "learning_rate": 3.3030000000000004e-06, "loss": 0.7047, "step": 1104 }, { "epoch": 1.2197680839315295, "grad_norm": 3.7246358394622803, "learning_rate": 3.306e-06, "loss": 0.7322, "step": 1105 }, { "epoch": 1.2208724461623413, "grad_norm": 3.8187255859375, "learning_rate": 3.309e-06, "loss": 0.8308, "step": 1106 }, { "epoch": 1.221976808393153, "grad_norm": 18.355344772338867, "learning_rate": 3.3120000000000002e-06, "loss": 1.4923, "step": 1107 }, { "epoch": 1.2230811706239646, "grad_norm": 12.134858131408691, "learning_rate": 3.315e-06, "loss": 1.2666, "step": 1108 }, { "epoch": 1.2241855328547764, "grad_norm": 6.289427280426025, "learning_rate": 3.3180000000000003e-06, "loss": 1.4056, "step": 1109 }, { "epoch": 1.2252898950855882, "grad_norm": 4.124602794647217, "learning_rate": 3.3210000000000005e-06, "loss": 1.1238, "step": 1110 }, { "epoch": 1.2263942573163997, "grad_norm": 5.117330074310303, "learning_rate": 3.324e-06, "loss": 1.2186, "step": 1111 }, { "epoch": 1.2274986195472115, "grad_norm": 4.520598411560059, "learning_rate": 3.327e-06, "loss": 0.9372, "step": 1112 }, { "epoch": 1.2286029817780233, "grad_norm": 5.3503594398498535, "learning_rate": 3.3300000000000003e-06, "loss": 0.8961, "step": 1113 }, { "epoch": 1.2297073440088349, "grad_norm": 6.5734429359436035, "learning_rate": 3.333e-06, "loss": 0.8969, "step": 1114 }, { "epoch": 1.2308117062396466, "grad_norm": 3.903862953186035, "learning_rate": 3.336e-06, "loss": 0.7059, "step": 1115 }, { "epoch": 1.2319160684704582, "grad_norm": 2.8506686687469482, "learning_rate": 3.339e-06, "loss": 0.6811, "step": 1116 }, { "epoch": 1.23302043070127, "grad_norm": 3.2092907428741455, "learning_rate": 3.342e-06, "loss": 0.6536, "step": 1117 }, { "epoch": 1.2341247929320818, "grad_norm": 4.4556050300598145, "learning_rate": 3.345e-06, "loss": 0.6185, "step": 1118 }, { "epoch": 1.2352291551628933, "grad_norm": 7.079403877258301, "learning_rate": 3.3480000000000004e-06, "loss": 0.5989, "step": 1119 }, { "epoch": 1.2363335173937051, "grad_norm": 3.4804952144622803, "learning_rate": 3.3509999999999998e-06, "loss": 0.6173, "step": 1120 }, { "epoch": 1.237437879624517, "grad_norm": 3.253957748413086, "learning_rate": 3.354e-06, "loss": 0.5922, "step": 1121 }, { "epoch": 1.2385422418553285, "grad_norm": 2.9072883129119873, "learning_rate": 3.3570000000000002e-06, "loss": 0.5401, "step": 1122 }, { "epoch": 1.2396466040861402, "grad_norm": 3.4194083213806152, "learning_rate": 3.36e-06, "loss": 0.5343, "step": 1123 }, { "epoch": 1.240750966316952, "grad_norm": 2.750122308731079, "learning_rate": 3.3630000000000002e-06, "loss": 0.5844, "step": 1124 }, { "epoch": 1.2418553285477636, "grad_norm": 2.9750494956970215, "learning_rate": 3.366e-06, "loss": 0.5157, "step": 1125 }, { "epoch": 1.2429596907785754, "grad_norm": 2.6977813243865967, "learning_rate": 3.369e-06, "loss": 0.5428, "step": 1126 }, { "epoch": 1.2440640530093872, "grad_norm": 3.5567519664764404, "learning_rate": 3.372e-06, "loss": 0.531, "step": 1127 }, { "epoch": 1.2451684152401987, "grad_norm": 4.693999767303467, "learning_rate": 3.3750000000000003e-06, "loss": 0.4836, "step": 1128 }, { "epoch": 1.2462727774710105, "grad_norm": 2.7791662216186523, "learning_rate": 3.378e-06, "loss": 0.5376, "step": 1129 }, { "epoch": 1.2473771397018223, "grad_norm": 3.0599935054779053, "learning_rate": 3.381e-06, "loss": 0.5576, "step": 1130 }, { "epoch": 1.2484815019326339, "grad_norm": 4.124332427978516, "learning_rate": 3.384e-06, "loss": 0.5391, "step": 1131 }, { "epoch": 1.2495858641634456, "grad_norm": 2.336113452911377, "learning_rate": 3.387e-06, "loss": 0.5393, "step": 1132 }, { "epoch": 1.2506902263942572, "grad_norm": 3.1316275596618652, "learning_rate": 3.39e-06, "loss": 0.5343, "step": 1133 }, { "epoch": 1.251794588625069, "grad_norm": 2.98878812789917, "learning_rate": 3.3930000000000004e-06, "loss": 0.5497, "step": 1134 }, { "epoch": 1.2528989508558808, "grad_norm": 3.745880365371704, "learning_rate": 3.3959999999999998e-06, "loss": 0.5317, "step": 1135 }, { "epoch": 1.2540033130866926, "grad_norm": 6.2919158935546875, "learning_rate": 3.399e-06, "loss": 0.4902, "step": 1136 }, { "epoch": 1.2551076753175041, "grad_norm": 3.091437816619873, "learning_rate": 3.402e-06, "loss": 0.4764, "step": 1137 }, { "epoch": 1.256212037548316, "grad_norm": 4.52437686920166, "learning_rate": 3.405e-06, "loss": 0.4706, "step": 1138 }, { "epoch": 1.2573163997791275, "grad_norm": 5.2413458824157715, "learning_rate": 3.4080000000000002e-06, "loss": 0.5127, "step": 1139 }, { "epoch": 1.2584207620099392, "grad_norm": 2.8617734909057617, "learning_rate": 3.411e-06, "loss": 0.4855, "step": 1140 }, { "epoch": 1.259525124240751, "grad_norm": 2.9289729595184326, "learning_rate": 3.414e-06, "loss": 0.5064, "step": 1141 }, { "epoch": 1.2606294864715628, "grad_norm": 5.025636196136475, "learning_rate": 3.417e-06, "loss": 0.5004, "step": 1142 }, { "epoch": 1.2617338487023744, "grad_norm": 2.931385040283203, "learning_rate": 3.4200000000000003e-06, "loss": 0.4954, "step": 1143 }, { "epoch": 1.2628382109331862, "grad_norm": 3.978644847869873, "learning_rate": 3.4229999999999997e-06, "loss": 0.4761, "step": 1144 }, { "epoch": 1.2639425731639977, "grad_norm": 6.273467063903809, "learning_rate": 3.426e-06, "loss": 0.5158, "step": 1145 }, { "epoch": 1.2650469353948095, "grad_norm": 4.604176044464111, "learning_rate": 3.429e-06, "loss": 0.467, "step": 1146 }, { "epoch": 1.2661512976256213, "grad_norm": 5.801717758178711, "learning_rate": 3.4320000000000003e-06, "loss": 0.4755, "step": 1147 }, { "epoch": 1.2672556598564328, "grad_norm": 8.16181468963623, "learning_rate": 3.435e-06, "loss": 0.4867, "step": 1148 }, { "epoch": 1.2683600220872446, "grad_norm": 4.537282466888428, "learning_rate": 3.438e-06, "loss": 0.5176, "step": 1149 }, { "epoch": 1.2694643843180562, "grad_norm": 4.971229076385498, "learning_rate": 3.441e-06, "loss": 0.5525, "step": 1150 }, { "epoch": 1.270568746548868, "grad_norm": 6.2297163009643555, "learning_rate": 3.444e-06, "loss": 0.5224, "step": 1151 }, { "epoch": 1.2716731087796798, "grad_norm": 3.1814112663269043, "learning_rate": 3.447e-06, "loss": 0.5074, "step": 1152 }, { "epoch": 1.2727774710104915, "grad_norm": 3.35672926902771, "learning_rate": 3.4500000000000004e-06, "loss": 0.5927, "step": 1153 }, { "epoch": 1.273881833241303, "grad_norm": 11.69269847869873, "learning_rate": 3.453e-06, "loss": 0.4779, "step": 1154 }, { "epoch": 1.274986195472115, "grad_norm": 4.323886394500732, "learning_rate": 3.456e-06, "loss": 0.4912, "step": 1155 }, { "epoch": 1.2760905577029265, "grad_norm": 6.759964466094971, "learning_rate": 3.4590000000000003e-06, "loss": 0.4946, "step": 1156 }, { "epoch": 1.2771949199337382, "grad_norm": 6.993564605712891, "learning_rate": 3.462e-06, "loss": 1.2562, "step": 1157 }, { "epoch": 1.27829928216455, "grad_norm": 15.02093505859375, "learning_rate": 3.4650000000000003e-06, "loss": 1.0564, "step": 1158 }, { "epoch": 1.2794036443953618, "grad_norm": 9.359211921691895, "learning_rate": 3.468e-06, "loss": 0.8731, "step": 1159 }, { "epoch": 1.2805080066261734, "grad_norm": 4.063681125640869, "learning_rate": 3.471e-06, "loss": 0.9888, "step": 1160 }, { "epoch": 1.2816123688569852, "grad_norm": 3.4234468936920166, "learning_rate": 3.474e-06, "loss": 0.7932, "step": 1161 }, { "epoch": 1.2827167310877967, "grad_norm": 6.772645473480225, "learning_rate": 3.4770000000000003e-06, "loss": 0.7179, "step": 1162 }, { "epoch": 1.2838210933186085, "grad_norm": 4.391380310058594, "learning_rate": 3.48e-06, "loss": 0.6885, "step": 1163 }, { "epoch": 1.2849254555494203, "grad_norm": 4.853434085845947, "learning_rate": 3.483e-06, "loss": 0.5121, "step": 1164 }, { "epoch": 1.2860298177802318, "grad_norm": 3.057910203933716, "learning_rate": 3.486e-06, "loss": 0.5056, "step": 1165 }, { "epoch": 1.2871341800110436, "grad_norm": 3.052281141281128, "learning_rate": 3.489e-06, "loss": 0.4715, "step": 1166 }, { "epoch": 1.2882385422418554, "grad_norm": 2.3882346153259277, "learning_rate": 3.492e-06, "loss": 0.4234, "step": 1167 }, { "epoch": 1.289342904472667, "grad_norm": 2.598443031311035, "learning_rate": 3.4950000000000004e-06, "loss": 0.4458, "step": 1168 }, { "epoch": 1.2904472667034788, "grad_norm": 3.910985231399536, "learning_rate": 3.498e-06, "loss": 0.4615, "step": 1169 }, { "epoch": 1.2915516289342905, "grad_norm": 3.079627513885498, "learning_rate": 3.501e-06, "loss": 0.3587, "step": 1170 }, { "epoch": 1.292655991165102, "grad_norm": 2.4121625423431396, "learning_rate": 3.5040000000000002e-06, "loss": 0.4448, "step": 1171 }, { "epoch": 1.2937603533959139, "grad_norm": 2.531006336212158, "learning_rate": 3.507e-06, "loss": 0.4292, "step": 1172 }, { "epoch": 1.2948647156267254, "grad_norm": 6.531184673309326, "learning_rate": 3.5100000000000003e-06, "loss": 0.4114, "step": 1173 }, { "epoch": 1.2959690778575372, "grad_norm": 2.330799102783203, "learning_rate": 3.513e-06, "loss": 0.417, "step": 1174 }, { "epoch": 1.297073440088349, "grad_norm": 2.809675455093384, "learning_rate": 3.516e-06, "loss": 0.406, "step": 1175 }, { "epoch": 1.2981778023191608, "grad_norm": 2.917482852935791, "learning_rate": 3.519e-06, "loss": 0.4076, "step": 1176 }, { "epoch": 1.2992821645499724, "grad_norm": 3.530336380004883, "learning_rate": 3.5220000000000003e-06, "loss": 0.3795, "step": 1177 }, { "epoch": 1.3003865267807841, "grad_norm": 2.5175445079803467, "learning_rate": 3.5249999999999997e-06, "loss": 0.4279, "step": 1178 }, { "epoch": 1.3014908890115957, "grad_norm": 3.3120503425598145, "learning_rate": 3.528e-06, "loss": 0.4541, "step": 1179 }, { "epoch": 1.3025952512424075, "grad_norm": 3.2616336345672607, "learning_rate": 3.531e-06, "loss": 0.4487, "step": 1180 }, { "epoch": 1.3036996134732193, "grad_norm": 3.3668148517608643, "learning_rate": 3.534e-06, "loss": 0.3875, "step": 1181 }, { "epoch": 1.304803975704031, "grad_norm": 2.960481882095337, "learning_rate": 3.537e-06, "loss": 0.3751, "step": 1182 }, { "epoch": 1.3059083379348426, "grad_norm": 5.344047546386719, "learning_rate": 3.54e-06, "loss": 0.4309, "step": 1183 }, { "epoch": 1.3070127001656544, "grad_norm": 17.866933822631836, "learning_rate": 3.543e-06, "loss": 0.4113, "step": 1184 }, { "epoch": 1.308117062396466, "grad_norm": 2.502743721008301, "learning_rate": 3.546e-06, "loss": 0.3928, "step": 1185 }, { "epoch": 1.3092214246272778, "grad_norm": 3.537463426589966, "learning_rate": 3.5490000000000002e-06, "loss": 0.4042, "step": 1186 }, { "epoch": 1.3103257868580895, "grad_norm": 10.720636367797852, "learning_rate": 3.552e-06, "loss": 0.4106, "step": 1187 }, { "epoch": 1.311430149088901, "grad_norm": 3.866028308868408, "learning_rate": 3.555e-06, "loss": 0.4104, "step": 1188 }, { "epoch": 1.3125345113197129, "grad_norm": 2.499065637588501, "learning_rate": 3.558e-06, "loss": 0.4364, "step": 1189 }, { "epoch": 1.3136388735505244, "grad_norm": 3.0779809951782227, "learning_rate": 3.5610000000000003e-06, "loss": 0.3468, "step": 1190 }, { "epoch": 1.3147432357813362, "grad_norm": 3.6592652797698975, "learning_rate": 3.564e-06, "loss": 0.3936, "step": 1191 }, { "epoch": 1.315847598012148, "grad_norm": 5.002760887145996, "learning_rate": 3.5670000000000003e-06, "loss": 0.4027, "step": 1192 }, { "epoch": 1.3169519602429598, "grad_norm": 3.492158889770508, "learning_rate": 3.57e-06, "loss": 0.3909, "step": 1193 }, { "epoch": 1.3180563224737714, "grad_norm": 3.6423401832580566, "learning_rate": 3.573e-06, "loss": 0.3877, "step": 1194 }, { "epoch": 1.3191606847045831, "grad_norm": 2.7983438968658447, "learning_rate": 3.576e-06, "loss": 0.3626, "step": 1195 }, { "epoch": 1.3202650469353947, "grad_norm": 3.635935068130493, "learning_rate": 3.5790000000000004e-06, "loss": 0.4283, "step": 1196 }, { "epoch": 1.3213694091662065, "grad_norm": 4.954638957977295, "learning_rate": 3.582e-06, "loss": 0.3617, "step": 1197 }, { "epoch": 1.3224737713970183, "grad_norm": 2.7044241428375244, "learning_rate": 3.585e-06, "loss": 0.3537, "step": 1198 }, { "epoch": 1.32357813362783, "grad_norm": 2.8764536380767822, "learning_rate": 3.588e-06, "loss": 0.3725, "step": 1199 }, { "epoch": 1.3246824958586416, "grad_norm": 4.6984333992004395, "learning_rate": 3.591e-06, "loss": 0.4193, "step": 1200 }, { "epoch": 1.3257868580894534, "grad_norm": 2.30200457572937, "learning_rate": 3.5940000000000002e-06, "loss": 0.4024, "step": 1201 }, { "epoch": 1.326891220320265, "grad_norm": 6.574634552001953, "learning_rate": 3.5970000000000005e-06, "loss": 0.4218, "step": 1202 }, { "epoch": 1.3279955825510767, "grad_norm": 3.044316291809082, "learning_rate": 3.6e-06, "loss": 0.3909, "step": 1203 }, { "epoch": 1.3290999447818885, "grad_norm": 3.5505740642547607, "learning_rate": 3.603e-06, "loss": 0.4029, "step": 1204 }, { "epoch": 1.3302043070127, "grad_norm": 4.400083065032959, "learning_rate": 3.6060000000000003e-06, "loss": 0.4004, "step": 1205 }, { "epoch": 1.3313086692435119, "grad_norm": 6.802012920379639, "learning_rate": 3.609e-06, "loss": 0.4522, "step": 1206 }, { "epoch": 1.3324130314743237, "grad_norm": 8.411022186279297, "learning_rate": 3.612e-06, "loss": 1.3603, "step": 1207 }, { "epoch": 1.3335173937051352, "grad_norm": 3.4899990558624268, "learning_rate": 3.615e-06, "loss": 0.8429, "step": 1208 }, { "epoch": 1.334621755935947, "grad_norm": 3.6308581829071045, "learning_rate": 3.618e-06, "loss": 0.9764, "step": 1209 }, { "epoch": 1.3357261181667588, "grad_norm": 3.2628347873687744, "learning_rate": 3.621e-06, "loss": 0.7482, "step": 1210 }, { "epoch": 1.3368304803975704, "grad_norm": 2.8133227825164795, "learning_rate": 3.6240000000000004e-06, "loss": 0.8343, "step": 1211 }, { "epoch": 1.3379348426283821, "grad_norm": 3.8071091175079346, "learning_rate": 3.6269999999999997e-06, "loss": 0.7669, "step": 1212 }, { "epoch": 1.3390392048591937, "grad_norm": 3.3907506465911865, "learning_rate": 3.63e-06, "loss": 0.5666, "step": 1213 }, { "epoch": 1.3401435670900055, "grad_norm": 3.143338918685913, "learning_rate": 3.633e-06, "loss": 0.4751, "step": 1214 }, { "epoch": 1.3412479293208173, "grad_norm": 8.601523399353027, "learning_rate": 3.636e-06, "loss": 0.4836, "step": 1215 }, { "epoch": 1.342352291551629, "grad_norm": 4.343495845794678, "learning_rate": 3.6390000000000002e-06, "loss": 0.4692, "step": 1216 }, { "epoch": 1.3434566537824406, "grad_norm": 2.8793106079101562, "learning_rate": 3.642e-06, "loss": 0.3597, "step": 1217 }, { "epoch": 1.3445610160132524, "grad_norm": 3.418764591217041, "learning_rate": 3.645e-06, "loss": 0.4088, "step": 1218 }, { "epoch": 1.345665378244064, "grad_norm": 2.7326455116271973, "learning_rate": 3.648e-06, "loss": 0.4105, "step": 1219 }, { "epoch": 1.3467697404748757, "grad_norm": 2.381010055541992, "learning_rate": 3.6510000000000003e-06, "loss": 0.3651, "step": 1220 }, { "epoch": 1.3478741027056875, "grad_norm": 2.5365073680877686, "learning_rate": 3.654e-06, "loss": 0.3719, "step": 1221 }, { "epoch": 1.348978464936499, "grad_norm": 2.193091869354248, "learning_rate": 3.657e-06, "loss": 0.3317, "step": 1222 }, { "epoch": 1.3500828271673109, "grad_norm": 1.9560716152191162, "learning_rate": 3.66e-06, "loss": 0.3278, "step": 1223 }, { "epoch": 1.3511871893981227, "grad_norm": 3.188328981399536, "learning_rate": 3.663e-06, "loss": 0.3096, "step": 1224 }, { "epoch": 1.3522915516289342, "grad_norm": 2.2924633026123047, "learning_rate": 3.666e-06, "loss": 0.3672, "step": 1225 }, { "epoch": 1.353395913859746, "grad_norm": 5.3329010009765625, "learning_rate": 3.6690000000000004e-06, "loss": 0.35, "step": 1226 }, { "epoch": 1.3545002760905578, "grad_norm": 6.545080661773682, "learning_rate": 3.6719999999999997e-06, "loss": 0.3665, "step": 1227 }, { "epoch": 1.3556046383213693, "grad_norm": 10.421566009521484, "learning_rate": 3.675e-06, "loss": 0.3293, "step": 1228 }, { "epoch": 1.3567090005521811, "grad_norm": 3.2647459506988525, "learning_rate": 3.678e-06, "loss": 0.3343, "step": 1229 }, { "epoch": 1.3578133627829927, "grad_norm": 2.998887300491333, "learning_rate": 3.681e-06, "loss": 0.3904, "step": 1230 }, { "epoch": 1.3589177250138045, "grad_norm": 4.18053674697876, "learning_rate": 3.6840000000000002e-06, "loss": 0.3116, "step": 1231 }, { "epoch": 1.3600220872446163, "grad_norm": 3.243842124938965, "learning_rate": 3.687e-06, "loss": 0.3185, "step": 1232 }, { "epoch": 1.361126449475428, "grad_norm": 6.012299537658691, "learning_rate": 3.6900000000000002e-06, "loss": 0.4866, "step": 1233 }, { "epoch": 1.3622308117062396, "grad_norm": 5.92681884765625, "learning_rate": 3.693e-06, "loss": 0.2934, "step": 1234 }, { "epoch": 1.3633351739370514, "grad_norm": 3.3634045124053955, "learning_rate": 3.6960000000000003e-06, "loss": 0.3469, "step": 1235 }, { "epoch": 1.364439536167863, "grad_norm": 2.4441449642181396, "learning_rate": 3.6990000000000005e-06, "loss": 0.3156, "step": 1236 }, { "epoch": 1.3655438983986747, "grad_norm": 2.3131299018859863, "learning_rate": 3.702e-06, "loss": 0.358, "step": 1237 }, { "epoch": 1.3666482606294865, "grad_norm": 3.6888365745544434, "learning_rate": 3.705e-06, "loss": 0.279, "step": 1238 }, { "epoch": 1.3677526228602983, "grad_norm": 2.841843605041504, "learning_rate": 3.7080000000000003e-06, "loss": 0.2855, "step": 1239 }, { "epoch": 1.3688569850911099, "grad_norm": 2.412414312362671, "learning_rate": 3.711e-06, "loss": 0.3502, "step": 1240 }, { "epoch": 1.3699613473219217, "grad_norm": 4.406742095947266, "learning_rate": 3.714e-06, "loss": 0.3562, "step": 1241 }, { "epoch": 1.3710657095527332, "grad_norm": 2.305288076400757, "learning_rate": 3.717e-06, "loss": 0.2906, "step": 1242 }, { "epoch": 1.372170071783545, "grad_norm": 2.5488319396972656, "learning_rate": 3.72e-06, "loss": 0.3155, "step": 1243 }, { "epoch": 1.3732744340143568, "grad_norm": 2.416745662689209, "learning_rate": 3.723e-06, "loss": 0.3521, "step": 1244 }, { "epoch": 1.3743787962451683, "grad_norm": 4.0731201171875, "learning_rate": 3.7260000000000004e-06, "loss": 0.3313, "step": 1245 }, { "epoch": 1.3754831584759801, "grad_norm": 4.760964393615723, "learning_rate": 3.7289999999999998e-06, "loss": 0.3043, "step": 1246 }, { "epoch": 1.3765875207067917, "grad_norm": 4.122292995452881, "learning_rate": 3.732e-06, "loss": 0.3338, "step": 1247 }, { "epoch": 1.3776918829376035, "grad_norm": 2.772700071334839, "learning_rate": 3.7350000000000002e-06, "loss": 0.3169, "step": 1248 }, { "epoch": 1.3787962451684153, "grad_norm": 4.9197587966918945, "learning_rate": 3.738e-06, "loss": 0.3489, "step": 1249 }, { "epoch": 1.379900607399227, "grad_norm": 2.721877336502075, "learning_rate": 3.7410000000000003e-06, "loss": 0.3464, "step": 1250 }, { "epoch": 1.3810049696300386, "grad_norm": 3.413140296936035, "learning_rate": 3.744e-06, "loss": 0.3005, "step": 1251 }, { "epoch": 1.3821093318608504, "grad_norm": 3.581235408782959, "learning_rate": 3.747e-06, "loss": 0.338, "step": 1252 }, { "epoch": 1.383213694091662, "grad_norm": 2.6100003719329834, "learning_rate": 3.75e-06, "loss": 0.3176, "step": 1253 }, { "epoch": 1.3843180563224737, "grad_norm": 3.149233818054199, "learning_rate": 3.753e-06, "loss": 0.3447, "step": 1254 }, { "epoch": 1.3854224185532855, "grad_norm": 5.125613689422607, "learning_rate": 3.756e-06, "loss": 0.391, "step": 1255 }, { "epoch": 1.3865267807840973, "grad_norm": 2.8830931186676025, "learning_rate": 3.759e-06, "loss": 0.3658, "step": 1256 }, { "epoch": 1.3876311430149089, "grad_norm": 6.6424479484558105, "learning_rate": 3.7620000000000006e-06, "loss": 1.1784, "step": 1257 }, { "epoch": 1.3887355052457206, "grad_norm": 3.801513671875, "learning_rate": 3.765e-06, "loss": 0.8834, "step": 1258 }, { "epoch": 1.3898398674765322, "grad_norm": 3.5475354194641113, "learning_rate": 3.7679999999999998e-06, "loss": 0.8233, "step": 1259 }, { "epoch": 1.390944229707344, "grad_norm": 2.5214684009552, "learning_rate": 3.7710000000000004e-06, "loss": 0.7768, "step": 1260 }, { "epoch": 1.3920485919381558, "grad_norm": 4.117931365966797, "learning_rate": 3.7739999999999998e-06, "loss": 0.6498, "step": 1261 }, { "epoch": 1.3931529541689673, "grad_norm": 4.291290760040283, "learning_rate": 3.7770000000000004e-06, "loss": 0.6162, "step": 1262 }, { "epoch": 1.3942573163997791, "grad_norm": 5.454833984375, "learning_rate": 3.7800000000000002e-06, "loss": 0.4784, "step": 1263 }, { "epoch": 1.395361678630591, "grad_norm": 11.475358009338379, "learning_rate": 3.7829999999999996e-06, "loss": 0.4428, "step": 1264 }, { "epoch": 1.3964660408614025, "grad_norm": 9.990917205810547, "learning_rate": 3.7860000000000003e-06, "loss": 0.3512, "step": 1265 }, { "epoch": 1.3975704030922143, "grad_norm": 6.0944952964782715, "learning_rate": 3.789e-06, "loss": 0.4741, "step": 1266 }, { "epoch": 1.398674765323026, "grad_norm": 3.8548552989959717, "learning_rate": 3.7920000000000003e-06, "loss": 0.3105, "step": 1267 }, { "epoch": 1.3997791275538376, "grad_norm": 4.550102710723877, "learning_rate": 3.795e-06, "loss": 0.34, "step": 1268 }, { "epoch": 1.4008834897846494, "grad_norm": 2.1902999877929688, "learning_rate": 3.798e-06, "loss": 0.2771, "step": 1269 }, { "epoch": 1.401987852015461, "grad_norm": 13.562867164611816, "learning_rate": 3.801e-06, "loss": 0.34, "step": 1270 }, { "epoch": 1.4030922142462727, "grad_norm": 4.243081092834473, "learning_rate": 3.804e-06, "loss": 0.3576, "step": 1271 }, { "epoch": 1.4041965764770845, "grad_norm": 2.3520150184631348, "learning_rate": 3.8070000000000006e-06, "loss": 0.2984, "step": 1272 }, { "epoch": 1.4053009387078963, "grad_norm": 3.4047317504882812, "learning_rate": 3.81e-06, "loss": 0.3216, "step": 1273 }, { "epoch": 1.4064053009387079, "grad_norm": 2.5619640350341797, "learning_rate": 3.8129999999999997e-06, "loss": 0.2893, "step": 1274 }, { "epoch": 1.4075096631695196, "grad_norm": 2.4497146606445312, "learning_rate": 3.816e-06, "loss": 0.2947, "step": 1275 }, { "epoch": 1.4086140254003312, "grad_norm": 2.674879789352417, "learning_rate": 3.819e-06, "loss": 0.3673, "step": 1276 }, { "epoch": 1.409718387631143, "grad_norm": 2.645700454711914, "learning_rate": 3.822000000000001e-06, "loss": 0.2596, "step": 1277 }, { "epoch": 1.4108227498619548, "grad_norm": 6.103841781616211, "learning_rate": 3.825e-06, "loss": 0.3277, "step": 1278 }, { "epoch": 1.4119271120927666, "grad_norm": 3.0983505249023438, "learning_rate": 3.828e-06, "loss": 0.2836, "step": 1279 }, { "epoch": 1.4130314743235781, "grad_norm": 4.214552879333496, "learning_rate": 3.831e-06, "loss": 0.2624, "step": 1280 }, { "epoch": 1.41413583655439, "grad_norm": 2.9408161640167236, "learning_rate": 3.834e-06, "loss": 0.2658, "step": 1281 }, { "epoch": 1.4152401987852015, "grad_norm": 2.9184281826019287, "learning_rate": 3.837000000000001e-06, "loss": 0.2655, "step": 1282 }, { "epoch": 1.4163445610160132, "grad_norm": 15.179665565490723, "learning_rate": 3.8400000000000005e-06, "loss": 0.2792, "step": 1283 }, { "epoch": 1.417448923246825, "grad_norm": 3.456676959991455, "learning_rate": 3.8429999999999995e-06, "loss": 0.33, "step": 1284 }, { "epoch": 1.4185532854776366, "grad_norm": 2.2767751216888428, "learning_rate": 3.846e-06, "loss": 0.3097, "step": 1285 }, { "epoch": 1.4196576477084484, "grad_norm": 2.644407272338867, "learning_rate": 3.849e-06, "loss": 0.2909, "step": 1286 }, { "epoch": 1.42076200993926, "grad_norm": 3.761895179748535, "learning_rate": 3.852e-06, "loss": 0.2689, "step": 1287 }, { "epoch": 1.4218663721700717, "grad_norm": 6.186633110046387, "learning_rate": 3.855e-06, "loss": 0.2829, "step": 1288 }, { "epoch": 1.4229707344008835, "grad_norm": 8.511848449707031, "learning_rate": 3.858e-06, "loss": 0.2561, "step": 1289 }, { "epoch": 1.4240750966316953, "grad_norm": 4.0278849601745605, "learning_rate": 3.861e-06, "loss": 0.291, "step": 1290 }, { "epoch": 1.4251794588625069, "grad_norm": 3.5680606365203857, "learning_rate": 3.864e-06, "loss": 0.2619, "step": 1291 }, { "epoch": 1.4262838210933186, "grad_norm": 3.172346830368042, "learning_rate": 3.8669999999999996e-06, "loss": 0.2526, "step": 1292 }, { "epoch": 1.4273881833241302, "grad_norm": 2.0812366008758545, "learning_rate": 3.87e-06, "loss": 0.291, "step": 1293 }, { "epoch": 1.428492545554942, "grad_norm": 2.581552743911743, "learning_rate": 3.873e-06, "loss": 0.2775, "step": 1294 }, { "epoch": 1.4295969077857538, "grad_norm": 2.3443825244903564, "learning_rate": 3.876000000000001e-06, "loss": 0.2797, "step": 1295 }, { "epoch": 1.4307012700165656, "grad_norm": 2.222982406616211, "learning_rate": 3.8790000000000005e-06, "loss": 0.2689, "step": 1296 }, { "epoch": 1.4318056322473771, "grad_norm": 1.699418306350708, "learning_rate": 3.8819999999999994e-06, "loss": 0.274, "step": 1297 }, { "epoch": 1.432909994478189, "grad_norm": 2.4721717834472656, "learning_rate": 3.885e-06, "loss": 0.2539, "step": 1298 }, { "epoch": 1.4340143567090005, "grad_norm": 2.4298486709594727, "learning_rate": 3.888e-06, "loss": 0.2506, "step": 1299 }, { "epoch": 1.4351187189398122, "grad_norm": 2.7861523628234863, "learning_rate": 3.8910000000000005e-06, "loss": 0.3088, "step": 1300 }, { "epoch": 1.436223081170624, "grad_norm": 2.643608570098877, "learning_rate": 3.894e-06, "loss": 0.3061, "step": 1301 }, { "epoch": 1.4373274434014356, "grad_norm": 2.8514902591705322, "learning_rate": 3.897e-06, "loss": 0.324, "step": 1302 }, { "epoch": 1.4384318056322474, "grad_norm": 8.393292427062988, "learning_rate": 3.9e-06, "loss": 0.3756, "step": 1303 }, { "epoch": 1.4395361678630592, "grad_norm": 2.8413171768188477, "learning_rate": 3.903e-06, "loss": 0.2898, "step": 1304 }, { "epoch": 1.4406405300938707, "grad_norm": 3.3697707653045654, "learning_rate": 3.906e-06, "loss": 0.2981, "step": 1305 }, { "epoch": 1.4417448923246825, "grad_norm": 3.720684766769409, "learning_rate": 3.909e-06, "loss": 0.4088, "step": 1306 }, { "epoch": 1.4428492545554943, "grad_norm": 7.9144463539123535, "learning_rate": 3.912e-06, "loss": 1.1143, "step": 1307 }, { "epoch": 1.4439536167863058, "grad_norm": 4.789734840393066, "learning_rate": 3.915000000000001e-06, "loss": 0.9201, "step": 1308 }, { "epoch": 1.4450579790171176, "grad_norm": 2.572622060775757, "learning_rate": 3.918e-06, "loss": 0.6793, "step": 1309 }, { "epoch": 1.4461623412479292, "grad_norm": 3.7035562992095947, "learning_rate": 3.921e-06, "loss": 0.5817, "step": 1310 }, { "epoch": 1.447266703478741, "grad_norm": 3.511791229248047, "learning_rate": 3.924e-06, "loss": 0.6333, "step": 1311 }, { "epoch": 1.4483710657095528, "grad_norm": 4.20801305770874, "learning_rate": 3.927e-06, "loss": 0.5275, "step": 1312 }, { "epoch": 1.4494754279403645, "grad_norm": 14.167572021484375, "learning_rate": 3.9300000000000005e-06, "loss": 0.4871, "step": 1313 }, { "epoch": 1.450579790171176, "grad_norm": 3.5162999629974365, "learning_rate": 3.933e-06, "loss": 0.4686, "step": 1314 }, { "epoch": 1.451684152401988, "grad_norm": 2.7700886726379395, "learning_rate": 3.936e-06, "loss": 0.381, "step": 1315 }, { "epoch": 1.4527885146327995, "grad_norm": 2.883152961730957, "learning_rate": 3.939e-06, "loss": 0.2684, "step": 1316 }, { "epoch": 1.4538928768636112, "grad_norm": 3.410928249359131, "learning_rate": 3.942e-06, "loss": 0.3348, "step": 1317 }, { "epoch": 1.454997239094423, "grad_norm": 9.289589881896973, "learning_rate": 3.945e-06, "loss": 0.2871, "step": 1318 }, { "epoch": 1.4561016013252348, "grad_norm": 2.6423778533935547, "learning_rate": 3.948e-06, "loss": 0.3387, "step": 1319 }, { "epoch": 1.4572059635560464, "grad_norm": 2.2700035572052, "learning_rate": 3.951000000000001e-06, "loss": 0.3132, "step": 1320 }, { "epoch": 1.4583103257868582, "grad_norm": 2.575011730194092, "learning_rate": 3.954e-06, "loss": 0.2707, "step": 1321 }, { "epoch": 1.4594146880176697, "grad_norm": 2.169977903366089, "learning_rate": 3.9569999999999996e-06, "loss": 0.2709, "step": 1322 }, { "epoch": 1.4605190502484815, "grad_norm": 4.680166244506836, "learning_rate": 3.96e-06, "loss": 0.2361, "step": 1323 }, { "epoch": 1.4616234124792933, "grad_norm": 2.5697340965270996, "learning_rate": 3.963e-06, "loss": 0.2363, "step": 1324 }, { "epoch": 1.4627277747101048, "grad_norm": 2.2234532833099365, "learning_rate": 3.966000000000001e-06, "loss": 0.2575, "step": 1325 }, { "epoch": 1.4638321369409166, "grad_norm": 2.032365560531616, "learning_rate": 3.9690000000000005e-06, "loss": 0.2286, "step": 1326 }, { "epoch": 1.4649364991717282, "grad_norm": 2.3016698360443115, "learning_rate": 3.971999999999999e-06, "loss": 0.2637, "step": 1327 }, { "epoch": 1.46604086140254, "grad_norm": 2.674126148223877, "learning_rate": 3.975e-06, "loss": 0.3313, "step": 1328 }, { "epoch": 1.4671452236333518, "grad_norm": 1.8194009065628052, "learning_rate": 3.978e-06, "loss": 0.2636, "step": 1329 }, { "epoch": 1.4682495858641635, "grad_norm": 2.530024528503418, "learning_rate": 3.9810000000000005e-06, "loss": 0.2356, "step": 1330 }, { "epoch": 1.469353948094975, "grad_norm": 1.6556520462036133, "learning_rate": 3.984e-06, "loss": 0.2251, "step": 1331 }, { "epoch": 1.4704583103257869, "grad_norm": 2.694030523300171, "learning_rate": 3.987e-06, "loss": 0.2744, "step": 1332 }, { "epoch": 1.4715626725565985, "grad_norm": 3.5090534687042236, "learning_rate": 3.99e-06, "loss": 0.2571, "step": 1333 }, { "epoch": 1.4726670347874102, "grad_norm": 3.5828306674957275, "learning_rate": 3.993e-06, "loss": 0.2425, "step": 1334 }, { "epoch": 1.473771397018222, "grad_norm": 2.4859864711761475, "learning_rate": 3.996e-06, "loss": 0.2568, "step": 1335 }, { "epoch": 1.4748757592490338, "grad_norm": 2.349597692489624, "learning_rate": 3.999e-06, "loss": 0.2763, "step": 1336 }, { "epoch": 1.4759801214798454, "grad_norm": 2.0143511295318604, "learning_rate": 4.002e-06, "loss": 0.2738, "step": 1337 }, { "epoch": 1.4770844837106571, "grad_norm": 3.932546854019165, "learning_rate": 4.005000000000001e-06, "loss": 0.2682, "step": 1338 }, { "epoch": 1.4781888459414687, "grad_norm": 2.251002311706543, "learning_rate": 4.008e-06, "loss": 0.2553, "step": 1339 }, { "epoch": 1.4792932081722805, "grad_norm": 7.4417805671691895, "learning_rate": 4.011e-06, "loss": 0.293, "step": 1340 }, { "epoch": 1.4803975704030923, "grad_norm": 4.045816421508789, "learning_rate": 4.014e-06, "loss": 0.2835, "step": 1341 }, { "epoch": 1.4815019326339038, "grad_norm": 4.518927097320557, "learning_rate": 4.017e-06, "loss": 0.2516, "step": 1342 }, { "epoch": 1.4826062948647156, "grad_norm": 4.513194561004639, "learning_rate": 4.0200000000000005e-06, "loss": 0.2548, "step": 1343 }, { "epoch": 1.4837106570955274, "grad_norm": 2.59806489944458, "learning_rate": 4.023e-06, "loss": 0.273, "step": 1344 }, { "epoch": 1.484815019326339, "grad_norm": 12.752030372619629, "learning_rate": 4.026000000000001e-06, "loss": 0.2773, "step": 1345 }, { "epoch": 1.4859193815571508, "grad_norm": 27.07494354248047, "learning_rate": 4.029e-06, "loss": 0.2596, "step": 1346 }, { "epoch": 1.4870237437879625, "grad_norm": 2.2837960720062256, "learning_rate": 4.032e-06, "loss": 0.2524, "step": 1347 }, { "epoch": 1.488128106018774, "grad_norm": 4.551118850708008, "learning_rate": 4.035e-06, "loss": 0.2639, "step": 1348 }, { "epoch": 1.4892324682495859, "grad_norm": 3.0730433464050293, "learning_rate": 4.038e-06, "loss": 0.2192, "step": 1349 }, { "epoch": 1.4903368304803974, "grad_norm": 2.989443063735962, "learning_rate": 4.041e-06, "loss": 0.2571, "step": 1350 }, { "epoch": 1.4914411927112092, "grad_norm": 3.669097900390625, "learning_rate": 4.044000000000001e-06, "loss": 0.3563, "step": 1351 }, { "epoch": 1.492545554942021, "grad_norm": 2.269777297973633, "learning_rate": 4.0469999999999995e-06, "loss": 0.2793, "step": 1352 }, { "epoch": 1.4936499171728328, "grad_norm": 4.759781360626221, "learning_rate": 4.05e-06, "loss": 0.2944, "step": 1353 }, { "epoch": 1.4947542794036444, "grad_norm": 2.5148444175720215, "learning_rate": 4.053e-06, "loss": 0.2684, "step": 1354 }, { "epoch": 1.4958586416344561, "grad_norm": 5.173280239105225, "learning_rate": 4.056e-06, "loss": 0.2922, "step": 1355 }, { "epoch": 1.4969630038652677, "grad_norm": 2.827641010284424, "learning_rate": 4.0590000000000004e-06, "loss": 0.3587, "step": 1356 }, { "epoch": 1.4980673660960795, "grad_norm": 5.465133190155029, "learning_rate": 4.062e-06, "loss": 0.9322, "step": 1357 }, { "epoch": 1.4991717283268913, "grad_norm": 6.668369293212891, "learning_rate": 4.065e-06, "loss": 0.8422, "step": 1358 }, { "epoch": 1.500276090557703, "grad_norm": 3.033170223236084, "learning_rate": 4.068e-06, "loss": 0.7192, "step": 1359 }, { "epoch": 1.5013804527885146, "grad_norm": 3.0971484184265137, "learning_rate": 4.071e-06, "loss": 0.6519, "step": 1360 }, { "epoch": 1.5024848150193262, "grad_norm": 2.7103915214538574, "learning_rate": 4.074e-06, "loss": 0.5652, "step": 1361 }, { "epoch": 1.503589177250138, "grad_norm": 7.163049697875977, "learning_rate": 4.077e-06, "loss": 0.594, "step": 1362 }, { "epoch": 1.5046935394809497, "grad_norm": 5.222049713134766, "learning_rate": 4.080000000000001e-06, "loss": 0.5046, "step": 1363 }, { "epoch": 1.5057979017117615, "grad_norm": 2.8578712940216064, "learning_rate": 4.083e-06, "loss": 0.3919, "step": 1364 }, { "epoch": 1.5069022639425733, "grad_norm": 3.517366886138916, "learning_rate": 4.0859999999999995e-06, "loss": 0.4122, "step": 1365 }, { "epoch": 1.5080066261733849, "grad_norm": 3.4449098110198975, "learning_rate": 4.089e-06, "loss": 0.3064, "step": 1366 }, { "epoch": 1.5091109884041964, "grad_norm": 2.8075127601623535, "learning_rate": 4.092e-06, "loss": 0.3527, "step": 1367 }, { "epoch": 1.5102153506350082, "grad_norm": 2.230861186981201, "learning_rate": 4.095000000000001e-06, "loss": 0.2708, "step": 1368 }, { "epoch": 1.51131971286582, "grad_norm": 2.0034008026123047, "learning_rate": 4.098e-06, "loss": 0.2542, "step": 1369 }, { "epoch": 1.5124240750966318, "grad_norm": 2.1801655292510986, "learning_rate": 4.100999999999999e-06, "loss": 0.3034, "step": 1370 }, { "epoch": 1.5135284373274434, "grad_norm": 1.8534375429153442, "learning_rate": 4.104e-06, "loss": 0.2275, "step": 1371 }, { "epoch": 1.5146327995582551, "grad_norm": 1.8946672677993774, "learning_rate": 4.107e-06, "loss": 0.2213, "step": 1372 }, { "epoch": 1.5157371617890667, "grad_norm": 2.5488293170928955, "learning_rate": 4.1100000000000005e-06, "loss": 0.2709, "step": 1373 }, { "epoch": 1.5168415240198785, "grad_norm": 2.4326934814453125, "learning_rate": 4.113e-06, "loss": 0.2456, "step": 1374 }, { "epoch": 1.5179458862506903, "grad_norm": 2.45483136177063, "learning_rate": 4.116e-06, "loss": 0.2021, "step": 1375 }, { "epoch": 1.519050248481502, "grad_norm": 2.8296616077423096, "learning_rate": 4.119e-06, "loss": 0.233, "step": 1376 }, { "epoch": 1.5201546107123136, "grad_norm": 1.7203458547592163, "learning_rate": 4.122e-06, "loss": 0.2423, "step": 1377 }, { "epoch": 1.5212589729431254, "grad_norm": 5.425093650817871, "learning_rate": 4.125e-06, "loss": 0.2264, "step": 1378 }, { "epoch": 1.522363335173937, "grad_norm": 55.791160583496094, "learning_rate": 4.128e-06, "loss": 0.2858, "step": 1379 }, { "epoch": 1.5234676974047487, "grad_norm": 3.7090954780578613, "learning_rate": 4.131e-06, "loss": 0.22, "step": 1380 }, { "epoch": 1.5245720596355605, "grad_norm": 1.9922511577606201, "learning_rate": 4.1340000000000006e-06, "loss": 0.2644, "step": 1381 }, { "epoch": 1.5256764218663723, "grad_norm": 2.1849663257598877, "learning_rate": 4.137e-06, "loss": 0.2195, "step": 1382 }, { "epoch": 1.5267807840971839, "grad_norm": 1.7404240369796753, "learning_rate": 4.14e-06, "loss": 0.2142, "step": 1383 }, { "epoch": 1.5278851463279954, "grad_norm": 2.901822566986084, "learning_rate": 4.143e-06, "loss": 0.2476, "step": 1384 }, { "epoch": 1.5289895085588072, "grad_norm": 6.458728313446045, "learning_rate": 4.146e-06, "loss": 0.2803, "step": 1385 }, { "epoch": 1.530093870789619, "grad_norm": 2.0992431640625, "learning_rate": 4.1490000000000004e-06, "loss": 0.1954, "step": 1386 }, { "epoch": 1.5311982330204308, "grad_norm": 3.642296075820923, "learning_rate": 4.152e-06, "loss": 0.2098, "step": 1387 }, { "epoch": 1.5323025952512426, "grad_norm": 1.9773063659667969, "learning_rate": 4.155000000000001e-06, "loss": 0.2233, "step": 1388 }, { "epoch": 1.5334069574820541, "grad_norm": 3.7213571071624756, "learning_rate": 4.158e-06, "loss": 0.2548, "step": 1389 }, { "epoch": 1.5345113197128657, "grad_norm": 3.334959030151367, "learning_rate": 4.161e-06, "loss": 0.2147, "step": 1390 }, { "epoch": 1.5356156819436775, "grad_norm": 2.6523842811584473, "learning_rate": 4.164e-06, "loss": 0.2759, "step": 1391 }, { "epoch": 1.5367200441744893, "grad_norm": 2.6846656799316406, "learning_rate": 4.167e-06, "loss": 0.229, "step": 1392 }, { "epoch": 1.537824406405301, "grad_norm": 2.316077709197998, "learning_rate": 4.170000000000001e-06, "loss": 0.2264, "step": 1393 }, { "epoch": 1.5389287686361126, "grad_norm": 2.2132716178894043, "learning_rate": 4.1730000000000005e-06, "loss": 0.2773, "step": 1394 }, { "epoch": 1.5400331308669244, "grad_norm": 2.3899142742156982, "learning_rate": 4.1759999999999995e-06, "loss": 0.2458, "step": 1395 }, { "epoch": 1.541137493097736, "grad_norm": 2.053701400756836, "learning_rate": 4.179e-06, "loss": 0.2444, "step": 1396 }, { "epoch": 1.5422418553285477, "grad_norm": 2.0243301391601562, "learning_rate": 4.182e-06, "loss": 0.2162, "step": 1397 }, { "epoch": 1.5433462175593595, "grad_norm": 1.7245256900787354, "learning_rate": 4.185000000000001e-06, "loss": 0.2108, "step": 1398 }, { "epoch": 1.5444505797901713, "grad_norm": 2.2758026123046875, "learning_rate": 4.188e-06, "loss": 0.2068, "step": 1399 }, { "epoch": 1.5455549420209829, "grad_norm": 2.3843328952789307, "learning_rate": 4.191e-06, "loss": 0.198, "step": 1400 }, { "epoch": 1.5466593042517944, "grad_norm": 2.322357416152954, "learning_rate": 4.194e-06, "loss": 0.2551, "step": 1401 }, { "epoch": 1.5477636664826062, "grad_norm": 3.9382474422454834, "learning_rate": 4.197e-06, "loss": 0.2326, "step": 1402 }, { "epoch": 1.548868028713418, "grad_norm": 4.981159210205078, "learning_rate": 4.2000000000000004e-06, "loss": 0.2211, "step": 1403 }, { "epoch": 1.5499723909442298, "grad_norm": 4.626099109649658, "learning_rate": 4.203e-06, "loss": 0.2984, "step": 1404 }, { "epoch": 1.5510767531750416, "grad_norm": 3.2953450679779053, "learning_rate": 4.206e-06, "loss": 0.3152, "step": 1405 }, { "epoch": 1.5521811154058531, "grad_norm": 2.7025039196014404, "learning_rate": 4.209000000000001e-06, "loss": 0.3002, "step": 1406 }, { "epoch": 1.5532854776366647, "grad_norm": 5.727576732635498, "learning_rate": 4.212e-06, "loss": 0.9711, "step": 1407 }, { "epoch": 1.5543898398674765, "grad_norm": 6.3200154304504395, "learning_rate": 4.215e-06, "loss": 0.7424, "step": 1408 }, { "epoch": 1.5554942020982883, "grad_norm": 2.242210865020752, "learning_rate": 4.218e-06, "loss": 0.672, "step": 1409 }, { "epoch": 1.5565985643291, "grad_norm": 2.8398489952087402, "learning_rate": 4.221e-06, "loss": 0.5796, "step": 1410 }, { "epoch": 1.5577029265599116, "grad_norm": 2.344322681427002, "learning_rate": 4.2240000000000006e-06, "loss": 0.5515, "step": 1411 }, { "epoch": 1.5588072887907234, "grad_norm": 3.360574960708618, "learning_rate": 4.227e-06, "loss": 0.6896, "step": 1412 }, { "epoch": 1.559911651021535, "grad_norm": 2.218393325805664, "learning_rate": 4.229999999999999e-06, "loss": 0.4789, "step": 1413 }, { "epoch": 1.5610160132523467, "grad_norm": 2.661341667175293, "learning_rate": 4.233e-06, "loss": 0.3825, "step": 1414 }, { "epoch": 1.5621203754831585, "grad_norm": 2.458850383758545, "learning_rate": 4.236e-06, "loss": 0.3943, "step": 1415 }, { "epoch": 1.5632247377139703, "grad_norm": 2.4148828983306885, "learning_rate": 4.239e-06, "loss": 0.2801, "step": 1416 }, { "epoch": 1.5643290999447819, "grad_norm": 2.397721290588379, "learning_rate": 4.242e-06, "loss": 0.3416, "step": 1417 }, { "epoch": 1.5654334621755936, "grad_norm": 5.369162082672119, "learning_rate": 4.245e-06, "loss": 0.2464, "step": 1418 }, { "epoch": 1.5665378244064052, "grad_norm": 1.8798178434371948, "learning_rate": 4.248e-06, "loss": 0.2373, "step": 1419 }, { "epoch": 1.567642186637217, "grad_norm": 2.245194911956787, "learning_rate": 4.251e-06, "loss": 0.2298, "step": 1420 }, { "epoch": 1.5687465488680288, "grad_norm": 2.1059415340423584, "learning_rate": 4.254e-06, "loss": 0.2243, "step": 1421 }, { "epoch": 1.5698509110988406, "grad_norm": 1.5888265371322632, "learning_rate": 4.257e-06, "loss": 0.1954, "step": 1422 }, { "epoch": 1.5709552733296521, "grad_norm": 1.845062255859375, "learning_rate": 4.26e-06, "loss": 0.1985, "step": 1423 }, { "epoch": 1.5720596355604637, "grad_norm": 2.7678463459014893, "learning_rate": 4.2630000000000005e-06, "loss": 0.2858, "step": 1424 }, { "epoch": 1.5731639977912755, "grad_norm": 2.089360237121582, "learning_rate": 4.266e-06, "loss": 0.1925, "step": 1425 }, { "epoch": 1.5742683600220873, "grad_norm": 1.614412546157837, "learning_rate": 4.269e-06, "loss": 0.2002, "step": 1426 }, { "epoch": 1.575372722252899, "grad_norm": 6.219518184661865, "learning_rate": 4.272e-06, "loss": 0.1776, "step": 1427 }, { "epoch": 1.5764770844837108, "grad_norm": 2.1126294136047363, "learning_rate": 4.275e-06, "loss": 0.2377, "step": 1428 }, { "epoch": 1.5775814467145224, "grad_norm": 2.4134438037872314, "learning_rate": 4.278e-06, "loss": 0.2196, "step": 1429 }, { "epoch": 1.578685808945334, "grad_norm": 2.4937968254089355, "learning_rate": 4.281e-06, "loss": 0.3018, "step": 1430 }, { "epoch": 1.5797901711761457, "grad_norm": 2.0790679454803467, "learning_rate": 4.284000000000001e-06, "loss": 0.1867, "step": 1431 }, { "epoch": 1.5808945334069575, "grad_norm": 1.64890456199646, "learning_rate": 4.287e-06, "loss": 0.223, "step": 1432 }, { "epoch": 1.5819988956377693, "grad_norm": 1.8963347673416138, "learning_rate": 4.29e-06, "loss": 0.2252, "step": 1433 }, { "epoch": 1.5831032578685809, "grad_norm": 1.9251641035079956, "learning_rate": 4.293e-06, "loss": 0.1819, "step": 1434 }, { "epoch": 1.5842076200993926, "grad_norm": 1.4676270484924316, "learning_rate": 4.296e-06, "loss": 0.2015, "step": 1435 }, { "epoch": 1.5853119823302042, "grad_norm": 9.824906349182129, "learning_rate": 4.299000000000001e-06, "loss": 0.2031, "step": 1436 }, { "epoch": 1.586416344561016, "grad_norm": 1.9645531177520752, "learning_rate": 4.3020000000000005e-06, "loss": 0.203, "step": 1437 }, { "epoch": 1.5875207067918278, "grad_norm": 4.5478196144104, "learning_rate": 4.3049999999999994e-06, "loss": 0.2331, "step": 1438 }, { "epoch": 1.5886250690226396, "grad_norm": 2.8663692474365234, "learning_rate": 4.308e-06, "loss": 0.2787, "step": 1439 }, { "epoch": 1.5897294312534511, "grad_norm": 1.8180763721466064, "learning_rate": 4.311e-06, "loss": 0.1489, "step": 1440 }, { "epoch": 1.5908337934842627, "grad_norm": 2.04052996635437, "learning_rate": 4.3140000000000005e-06, "loss": 0.2128, "step": 1441 }, { "epoch": 1.5919381557150745, "grad_norm": 2.2661972045898438, "learning_rate": 4.317e-06, "loss": 0.2169, "step": 1442 }, { "epoch": 1.5930425179458862, "grad_norm": 2.701204776763916, "learning_rate": 4.32e-06, "loss": 0.2002, "step": 1443 }, { "epoch": 1.594146880176698, "grad_norm": 2.6935412883758545, "learning_rate": 4.323e-06, "loss": 0.2346, "step": 1444 }, { "epoch": 1.5952512424075098, "grad_norm": 2.543327808380127, "learning_rate": 4.326e-06, "loss": 0.2324, "step": 1445 }, { "epoch": 1.5963556046383214, "grad_norm": 8.962162971496582, "learning_rate": 4.329e-06, "loss": 0.2344, "step": 1446 }, { "epoch": 1.597459966869133, "grad_norm": 2.267014980316162, "learning_rate": 4.332e-06, "loss": 0.1973, "step": 1447 }, { "epoch": 1.5985643290999447, "grad_norm": 2.214792490005493, "learning_rate": 4.335e-06, "loss": 0.2489, "step": 1448 }, { "epoch": 1.5996686913307565, "grad_norm": 2.1870739459991455, "learning_rate": 4.338000000000001e-06, "loss": 0.2192, "step": 1449 }, { "epoch": 1.6007730535615683, "grad_norm": 2.3545329570770264, "learning_rate": 4.341e-06, "loss": 0.1994, "step": 1450 }, { "epoch": 1.6018774157923799, "grad_norm": 2.6360673904418945, "learning_rate": 4.344e-06, "loss": 0.2059, "step": 1451 }, { "epoch": 1.6029817780231916, "grad_norm": 4.3400187492370605, "learning_rate": 4.347e-06, "loss": 0.2408, "step": 1452 }, { "epoch": 1.6040861402540032, "grad_norm": 2.4548890590667725, "learning_rate": 4.35e-06, "loss": 0.2312, "step": 1453 }, { "epoch": 1.605190502484815, "grad_norm": 3.937988758087158, "learning_rate": 4.3530000000000005e-06, "loss": 0.2552, "step": 1454 }, { "epoch": 1.6062948647156268, "grad_norm": 3.4154725074768066, "learning_rate": 4.356e-06, "loss": 0.2265, "step": 1455 }, { "epoch": 1.6073992269464386, "grad_norm": 2.306382656097412, "learning_rate": 4.359e-06, "loss": 0.2235, "step": 1456 }, { "epoch": 1.6085035891772501, "grad_norm": 4.3061981201171875, "learning_rate": 4.362e-06, "loss": 0.7113, "step": 1457 }, { "epoch": 1.609607951408062, "grad_norm": 3.088841438293457, "learning_rate": 4.365e-06, "loss": 0.6334, "step": 1458 }, { "epoch": 1.6107123136388735, "grad_norm": 2.8514111042022705, "learning_rate": 4.368e-06, "loss": 0.5962, "step": 1459 }, { "epoch": 1.6118166758696852, "grad_norm": 2.3789544105529785, "learning_rate": 4.371e-06, "loss": 0.5643, "step": 1460 }, { "epoch": 1.612921038100497, "grad_norm": 4.258628845214844, "learning_rate": 4.374000000000001e-06, "loss": 0.5516, "step": 1461 }, { "epoch": 1.6140254003313088, "grad_norm": 3.2271511554718018, "learning_rate": 4.377e-06, "loss": 0.4135, "step": 1462 }, { "epoch": 1.6151297625621204, "grad_norm": 2.861872434616089, "learning_rate": 4.3799999999999996e-06, "loss": 0.3795, "step": 1463 }, { "epoch": 1.616234124792932, "grad_norm": 14.011645317077637, "learning_rate": 4.383e-06, "loss": 0.4882, "step": 1464 }, { "epoch": 1.6173384870237437, "grad_norm": 1.9932457208633423, "learning_rate": 4.386e-06, "loss": 0.3176, "step": 1465 }, { "epoch": 1.6184428492545555, "grad_norm": 2.050855875015259, "learning_rate": 4.389000000000001e-06, "loss": 0.2915, "step": 1466 }, { "epoch": 1.6195472114853673, "grad_norm": 1.7149161100387573, "learning_rate": 4.3920000000000005e-06, "loss": 0.2102, "step": 1467 }, { "epoch": 1.620651573716179, "grad_norm": 1.6267096996307373, "learning_rate": 4.395e-06, "loss": 0.1578, "step": 1468 }, { "epoch": 1.6217559359469906, "grad_norm": 1.877923607826233, "learning_rate": 4.398e-06, "loss": 0.1881, "step": 1469 }, { "epoch": 1.6228602981778022, "grad_norm": 1.9372315406799316, "learning_rate": 4.401e-06, "loss": 0.1902, "step": 1470 }, { "epoch": 1.623964660408614, "grad_norm": 2.139620065689087, "learning_rate": 4.4040000000000005e-06, "loss": 0.1895, "step": 1471 }, { "epoch": 1.6250690226394258, "grad_norm": 1.5416247844696045, "learning_rate": 4.407e-06, "loss": 0.1629, "step": 1472 }, { "epoch": 1.6261733848702375, "grad_norm": 2.5091540813446045, "learning_rate": 4.41e-06, "loss": 0.1953, "step": 1473 }, { "epoch": 1.627277747101049, "grad_norm": 2.586718797683716, "learning_rate": 4.413000000000001e-06, "loss": 0.2561, "step": 1474 }, { "epoch": 1.628382109331861, "grad_norm": 1.9124733209609985, "learning_rate": 4.416e-06, "loss": 0.1748, "step": 1475 }, { "epoch": 1.6294864715626725, "grad_norm": 3.064894437789917, "learning_rate": 4.4189999999999995e-06, "loss": 0.2009, "step": 1476 }, { "epoch": 1.6305908337934842, "grad_norm": 2.3641772270202637, "learning_rate": 4.422e-06, "loss": 0.1635, "step": 1477 }, { "epoch": 1.631695196024296, "grad_norm": 2.3008430004119873, "learning_rate": 4.425e-06, "loss": 0.2222, "step": 1478 }, { "epoch": 1.6327995582551078, "grad_norm": 2.593804359436035, "learning_rate": 4.428000000000001e-06, "loss": 0.2327, "step": 1479 }, { "epoch": 1.6339039204859194, "grad_norm": 1.890676498413086, "learning_rate": 4.4310000000000004e-06, "loss": 0.2157, "step": 1480 }, { "epoch": 1.635008282716731, "grad_norm": 1.8632936477661133, "learning_rate": 4.433999999999999e-06, "loss": 0.1992, "step": 1481 }, { "epoch": 1.6361126449475427, "grad_norm": 1.590497374534607, "learning_rate": 4.437e-06, "loss": 0.1577, "step": 1482 }, { "epoch": 1.6372170071783545, "grad_norm": 4.646111488342285, "learning_rate": 4.44e-06, "loss": 0.1837, "step": 1483 }, { "epoch": 1.6383213694091663, "grad_norm": 8.14866828918457, "learning_rate": 4.4430000000000005e-06, "loss": 0.178, "step": 1484 }, { "epoch": 1.639425731639978, "grad_norm": 2.102694272994995, "learning_rate": 4.446e-06, "loss": 0.1749, "step": 1485 }, { "epoch": 1.6405300938707896, "grad_norm": 3.0138373374938965, "learning_rate": 4.449e-06, "loss": 0.2263, "step": 1486 }, { "epoch": 1.6416344561016012, "grad_norm": 3.4107115268707275, "learning_rate": 4.452e-06, "loss": 0.1812, "step": 1487 }, { "epoch": 1.642738818332413, "grad_norm": 1.7795912027359009, "learning_rate": 4.455e-06, "loss": 0.2096, "step": 1488 }, { "epoch": 1.6438431805632248, "grad_norm": 2.0559775829315186, "learning_rate": 4.458e-06, "loss": 0.1758, "step": 1489 }, { "epoch": 1.6449475427940365, "grad_norm": 2.248054265975952, "learning_rate": 4.461e-06, "loss": 0.2202, "step": 1490 }, { "epoch": 1.646051905024848, "grad_norm": 3.0751636028289795, "learning_rate": 4.464e-06, "loss": 0.2179, "step": 1491 }, { "epoch": 1.64715626725566, "grad_norm": 2.32919979095459, "learning_rate": 4.467000000000001e-06, "loss": 0.1788, "step": 1492 }, { "epoch": 1.6482606294864715, "grad_norm": 2.6923909187316895, "learning_rate": 4.4699999999999996e-06, "loss": 0.2169, "step": 1493 }, { "epoch": 1.6493649917172832, "grad_norm": 2.522768497467041, "learning_rate": 4.473e-06, "loss": 0.2428, "step": 1494 }, { "epoch": 1.650469353948095, "grad_norm": 6.290024757385254, "learning_rate": 4.476e-06, "loss": 0.1961, "step": 1495 }, { "epoch": 1.6515737161789068, "grad_norm": 1.9300096035003662, "learning_rate": 4.479e-06, "loss": 0.167, "step": 1496 }, { "epoch": 1.6526780784097184, "grad_norm": 2.3714189529418945, "learning_rate": 4.4820000000000005e-06, "loss": 0.1864, "step": 1497 }, { "epoch": 1.6537824406405301, "grad_norm": 1.9983469247817993, "learning_rate": 4.485e-06, "loss": 0.2189, "step": 1498 }, { "epoch": 1.6548868028713417, "grad_norm": 1.7659796476364136, "learning_rate": 4.488e-06, "loss": 0.1672, "step": 1499 }, { "epoch": 1.6559911651021535, "grad_norm": 2.047520637512207, "learning_rate": 4.491e-06, "loss": 0.2388, "step": 1500 }, { "epoch": 1.6570955273329653, "grad_norm": 2.49212908744812, "learning_rate": 4.494e-06, "loss": 0.1497, "step": 1501 }, { "epoch": 1.658199889563777, "grad_norm": 2.1135659217834473, "learning_rate": 4.497e-06, "loss": 0.1784, "step": 1502 }, { "epoch": 1.6593042517945886, "grad_norm": 2.598388910293579, "learning_rate": 4.5e-06, "loss": 0.2575, "step": 1503 }, { "epoch": 1.6604086140254002, "grad_norm": 7.456160068511963, "learning_rate": 4.503000000000001e-06, "loss": 0.2477, "step": 1504 }, { "epoch": 1.661512976256212, "grad_norm": 2.4400110244750977, "learning_rate": 4.506e-06, "loss": 0.2081, "step": 1505 }, { "epoch": 1.6626173384870238, "grad_norm": 2.408940553665161, "learning_rate": 4.5089999999999995e-06, "loss": 0.2675, "step": 1506 }, { "epoch": 1.6637217007178355, "grad_norm": 5.603972434997559, "learning_rate": 4.512e-06, "loss": 0.9203, "step": 1507 }, { "epoch": 1.6648260629486473, "grad_norm": 3.214468479156494, "learning_rate": 4.515e-06, "loss": 0.6628, "step": 1508 }, { "epoch": 1.6659304251794589, "grad_norm": 3.936572313308716, "learning_rate": 4.518000000000001e-06, "loss": 0.6148, "step": 1509 }, { "epoch": 1.6670347874102704, "grad_norm": 2.3718435764312744, "learning_rate": 4.521e-06, "loss": 0.5198, "step": 1510 }, { "epoch": 1.6681391496410822, "grad_norm": 2.370652198791504, "learning_rate": 4.524e-06, "loss": 0.4587, "step": 1511 }, { "epoch": 1.669243511871894, "grad_norm": 2.661370277404785, "learning_rate": 4.527e-06, "loss": 0.4432, "step": 1512 }, { "epoch": 1.6703478741027058, "grad_norm": 2.309558391571045, "learning_rate": 4.53e-06, "loss": 0.3755, "step": 1513 }, { "epoch": 1.6714522363335174, "grad_norm": 2.4313747882843018, "learning_rate": 4.5330000000000005e-06, "loss": 0.3773, "step": 1514 }, { "epoch": 1.6725565985643291, "grad_norm": 5.918329238891602, "learning_rate": 4.536e-06, "loss": 0.2884, "step": 1515 }, { "epoch": 1.6736609607951407, "grad_norm": 1.7323861122131348, "learning_rate": 4.539e-06, "loss": 0.2035, "step": 1516 }, { "epoch": 1.6747653230259525, "grad_norm": 2.137678623199463, "learning_rate": 4.542000000000001e-06, "loss": 0.2355, "step": 1517 }, { "epoch": 1.6758696852567643, "grad_norm": 1.8599412441253662, "learning_rate": 4.545e-06, "loss": 0.2069, "step": 1518 }, { "epoch": 1.676974047487576, "grad_norm": 2.202833652496338, "learning_rate": 4.548e-06, "loss": 0.2035, "step": 1519 }, { "epoch": 1.6780784097183876, "grad_norm": 2.150496482849121, "learning_rate": 4.551e-06, "loss": 0.2233, "step": 1520 }, { "epoch": 1.6791827719491992, "grad_norm": 2.832571506500244, "learning_rate": 4.554e-06, "loss": 0.1666, "step": 1521 }, { "epoch": 1.680287134180011, "grad_norm": 1.6706387996673584, "learning_rate": 4.557000000000001e-06, "loss": 0.1666, "step": 1522 }, { "epoch": 1.6813914964108228, "grad_norm": 1.4365876913070679, "learning_rate": 4.56e-06, "loss": 0.1881, "step": 1523 }, { "epoch": 1.6824958586416345, "grad_norm": 1.7372915744781494, "learning_rate": 4.563e-06, "loss": 0.1712, "step": 1524 }, { "epoch": 1.6836002208724463, "grad_norm": 1.5484102964401245, "learning_rate": 4.566e-06, "loss": 0.1913, "step": 1525 }, { "epoch": 1.6847045831032579, "grad_norm": 1.6182485818862915, "learning_rate": 4.569e-06, "loss": 0.1838, "step": 1526 }, { "epoch": 1.6858089453340694, "grad_norm": 2.1161069869995117, "learning_rate": 4.5720000000000004e-06, "loss": 0.1607, "step": 1527 }, { "epoch": 1.6869133075648812, "grad_norm": 2.528053045272827, "learning_rate": 4.575e-06, "loss": 0.1998, "step": 1528 }, { "epoch": 1.688017669795693, "grad_norm": 2.0826210975646973, "learning_rate": 4.578000000000001e-06, "loss": 0.1805, "step": 1529 }, { "epoch": 1.6891220320265048, "grad_norm": 1.6010230779647827, "learning_rate": 4.581e-06, "loss": 0.1764, "step": 1530 }, { "epoch": 1.6902263942573164, "grad_norm": 1.7355114221572876, "learning_rate": 4.584e-06, "loss": 0.1779, "step": 1531 }, { "epoch": 1.6913307564881281, "grad_norm": 1.7507034540176392, "learning_rate": 4.587e-06, "loss": 0.1544, "step": 1532 }, { "epoch": 1.6924351187189397, "grad_norm": 2.4913065433502197, "learning_rate": 4.59e-06, "loss": 0.1545, "step": 1533 }, { "epoch": 1.6935394809497515, "grad_norm": 2.3857293128967285, "learning_rate": 4.593000000000001e-06, "loss": 0.1869, "step": 1534 }, { "epoch": 1.6946438431805633, "grad_norm": 1.5683714151382446, "learning_rate": 4.5960000000000006e-06, "loss": 0.1609, "step": 1535 }, { "epoch": 1.695748205411375, "grad_norm": 2.34368896484375, "learning_rate": 4.5989999999999995e-06, "loss": 0.1928, "step": 1536 }, { "epoch": 1.6968525676421866, "grad_norm": 2.5453708171844482, "learning_rate": 4.602e-06, "loss": 0.1822, "step": 1537 }, { "epoch": 1.6979569298729982, "grad_norm": 3.0296549797058105, "learning_rate": 4.605e-06, "loss": 0.2127, "step": 1538 }, { "epoch": 1.69906129210381, "grad_norm": 2.209139823913574, "learning_rate": 4.608e-06, "loss": 0.1631, "step": 1539 }, { "epoch": 1.7001656543346217, "grad_norm": 2.9429619312286377, "learning_rate": 4.611e-06, "loss": 0.1818, "step": 1540 }, { "epoch": 1.7012700165654335, "grad_norm": 2.3529930114746094, "learning_rate": 4.614e-06, "loss": 0.1744, "step": 1541 }, { "epoch": 1.7023743787962453, "grad_norm": 1.6770211458206177, "learning_rate": 4.617e-06, "loss": 0.1279, "step": 1542 }, { "epoch": 1.7034787410270569, "grad_norm": 2.7872977256774902, "learning_rate": 4.62e-06, "loss": 0.1925, "step": 1543 }, { "epoch": 1.7045831032578684, "grad_norm": 2.08050274848938, "learning_rate": 4.623e-06, "loss": 0.236, "step": 1544 }, { "epoch": 1.7056874654886802, "grad_norm": 1.8824586868286133, "learning_rate": 4.626e-06, "loss": 0.2018, "step": 1545 }, { "epoch": 1.706791827719492, "grad_norm": 1.3932756185531616, "learning_rate": 4.629e-06, "loss": 0.1404, "step": 1546 }, { "epoch": 1.7078961899503038, "grad_norm": 2.234299421310425, "learning_rate": 4.632000000000001e-06, "loss": 0.1496, "step": 1547 }, { "epoch": 1.7090005521811154, "grad_norm": 1.856527328491211, "learning_rate": 4.635e-06, "loss": 0.1806, "step": 1548 }, { "epoch": 1.7101049144119271, "grad_norm": 2.4831957817077637, "learning_rate": 4.6379999999999995e-06, "loss": 0.1882, "step": 1549 }, { "epoch": 1.7112092766427387, "grad_norm": 3.0323731899261475, "learning_rate": 4.641e-06, "loss": 0.2226, "step": 1550 }, { "epoch": 1.7123136388735505, "grad_norm": 2.0404844284057617, "learning_rate": 4.644e-06, "loss": 0.2047, "step": 1551 }, { "epoch": 1.7134180011043623, "grad_norm": 2.247649669647217, "learning_rate": 4.6470000000000006e-06, "loss": 0.1687, "step": 1552 }, { "epoch": 1.714522363335174, "grad_norm": 2.138322353363037, "learning_rate": 4.65e-06, "loss": 0.1873, "step": 1553 }, { "epoch": 1.7156267255659856, "grad_norm": 2.278672456741333, "learning_rate": 4.653e-06, "loss": 0.2291, "step": 1554 }, { "epoch": 1.7167310877967974, "grad_norm": 2.3810198307037354, "learning_rate": 4.656e-06, "loss": 0.1877, "step": 1555 }, { "epoch": 1.717835450027609, "grad_norm": 2.699206829071045, "learning_rate": 4.659e-06, "loss": 0.2808, "step": 1556 }, { "epoch": 1.7189398122584207, "grad_norm": 4.946401119232178, "learning_rate": 4.6620000000000004e-06, "loss": 0.8941, "step": 1557 }, { "epoch": 1.7200441744892325, "grad_norm": 3.479182243347168, "learning_rate": 4.665e-06, "loss": 0.7689, "step": 1558 }, { "epoch": 1.7211485367200443, "grad_norm": 2.5684304237365723, "learning_rate": 4.668e-06, "loss": 0.6045, "step": 1559 }, { "epoch": 1.7222528989508559, "grad_norm": 2.9401755332946777, "learning_rate": 4.671000000000001e-06, "loss": 0.5785, "step": 1560 }, { "epoch": 1.7233572611816674, "grad_norm": 11.043280601501465, "learning_rate": 4.674e-06, "loss": 0.6007, "step": 1561 }, { "epoch": 1.7244616234124792, "grad_norm": 2.6660714149475098, "learning_rate": 4.677e-06, "loss": 0.445, "step": 1562 }, { "epoch": 1.725565985643291, "grad_norm": 5.381633758544922, "learning_rate": 4.68e-06, "loss": 0.3144, "step": 1563 }, { "epoch": 1.7266703478741028, "grad_norm": 2.2749619483947754, "learning_rate": 4.683e-06, "loss": 0.295, "step": 1564 }, { "epoch": 1.7277747101049146, "grad_norm": 1.5849007368087769, "learning_rate": 4.6860000000000005e-06, "loss": 0.209, "step": 1565 }, { "epoch": 1.7288790723357261, "grad_norm": 1.3929362297058105, "learning_rate": 4.689e-06, "loss": 0.1877, "step": 1566 }, { "epoch": 1.7299834345665377, "grad_norm": 1.7225453853607178, "learning_rate": 4.692e-06, "loss": 0.192, "step": 1567 }, { "epoch": 1.7310877967973495, "grad_norm": 1.6836518049240112, "learning_rate": 4.695e-06, "loss": 0.1966, "step": 1568 }, { "epoch": 1.7321921590281613, "grad_norm": 1.8950902223587036, "learning_rate": 4.698e-06, "loss": 0.2083, "step": 1569 }, { "epoch": 1.733296521258973, "grad_norm": 1.8489198684692383, "learning_rate": 4.701e-06, "loss": 0.1938, "step": 1570 }, { "epoch": 1.7344008834897846, "grad_norm": 2.320350408554077, "learning_rate": 4.704e-06, "loss": 0.1383, "step": 1571 }, { "epoch": 1.7355052457205964, "grad_norm": 2.0248870849609375, "learning_rate": 4.707000000000001e-06, "loss": 0.1602, "step": 1572 }, { "epoch": 1.736609607951408, "grad_norm": 2.166673421859741, "learning_rate": 4.71e-06, "loss": 0.1658, "step": 1573 }, { "epoch": 1.7377139701822197, "grad_norm": 1.326450228691101, "learning_rate": 4.713e-06, "loss": 0.1333, "step": 1574 }, { "epoch": 1.7388183324130315, "grad_norm": 2.338785171508789, "learning_rate": 4.716e-06, "loss": 0.1278, "step": 1575 }, { "epoch": 1.7399226946438433, "grad_norm": 1.8027926683425903, "learning_rate": 4.719e-06, "loss": 0.1397, "step": 1576 }, { "epoch": 1.7410270568746549, "grad_norm": 2.489922523498535, "learning_rate": 4.722000000000001e-06, "loss": 0.199, "step": 1577 }, { "epoch": 1.7421314191054664, "grad_norm": 1.724420428276062, "learning_rate": 4.7250000000000005e-06, "loss": 0.1338, "step": 1578 }, { "epoch": 1.7432357813362782, "grad_norm": 2.3618552684783936, "learning_rate": 4.7279999999999995e-06, "loss": 0.1602, "step": 1579 }, { "epoch": 1.74434014356709, "grad_norm": 2.095590114593506, "learning_rate": 4.731e-06, "loss": 0.1786, "step": 1580 }, { "epoch": 1.7454445057979018, "grad_norm": 2.244307279586792, "learning_rate": 4.734e-06, "loss": 0.1526, "step": 1581 }, { "epoch": 1.7465488680287136, "grad_norm": 1.8941210508346558, "learning_rate": 4.7370000000000006e-06, "loss": 0.1764, "step": 1582 }, { "epoch": 1.7476532302595251, "grad_norm": 1.9160478115081787, "learning_rate": 4.74e-06, "loss": 0.191, "step": 1583 }, { "epoch": 1.7487575924903367, "grad_norm": 2.0410282611846924, "learning_rate": 4.743e-06, "loss": 0.1426, "step": 1584 }, { "epoch": 1.7498619547211485, "grad_norm": 1.5547336339950562, "learning_rate": 4.746e-06, "loss": 0.1756, "step": 1585 }, { "epoch": 1.7509663169519603, "grad_norm": 2.561053991317749, "learning_rate": 4.749e-06, "loss": 0.169, "step": 1586 }, { "epoch": 1.752070679182772, "grad_norm": 1.8578076362609863, "learning_rate": 4.752e-06, "loss": 0.1379, "step": 1587 }, { "epoch": 1.7531750414135836, "grad_norm": 1.545737624168396, "learning_rate": 4.755e-06, "loss": 0.1623, "step": 1588 }, { "epoch": 1.7542794036443954, "grad_norm": 2.3791189193725586, "learning_rate": 4.758e-06, "loss": 0.1636, "step": 1589 }, { "epoch": 1.755383765875207, "grad_norm": 1.9635244607925415, "learning_rate": 4.761000000000001e-06, "loss": 0.1817, "step": 1590 }, { "epoch": 1.7564881281060187, "grad_norm": 2.0049545764923096, "learning_rate": 4.764e-06, "loss": 0.1296, "step": 1591 }, { "epoch": 1.7575924903368305, "grad_norm": 1.8657246828079224, "learning_rate": 4.767e-06, "loss": 0.1416, "step": 1592 }, { "epoch": 1.7586968525676423, "grad_norm": 1.7694047689437866, "learning_rate": 4.77e-06, "loss": 0.1916, "step": 1593 }, { "epoch": 1.7598012147984539, "grad_norm": 1.5756224393844604, "learning_rate": 4.773e-06, "loss": 0.1571, "step": 1594 }, { "epoch": 1.7609055770292656, "grad_norm": 1.5462496280670166, "learning_rate": 4.7760000000000005e-06, "loss": 0.1415, "step": 1595 }, { "epoch": 1.7620099392600772, "grad_norm": 2.5095205307006836, "learning_rate": 4.779e-06, "loss": 0.1689, "step": 1596 }, { "epoch": 1.763114301490889, "grad_norm": 2.9641404151916504, "learning_rate": 4.782e-06, "loss": 0.1521, "step": 1597 }, { "epoch": 1.7642186637217008, "grad_norm": 1.727625846862793, "learning_rate": 4.785e-06, "loss": 0.1632, "step": 1598 }, { "epoch": 1.7653230259525126, "grad_norm": 1.6551084518432617, "learning_rate": 4.788e-06, "loss": 0.1675, "step": 1599 }, { "epoch": 1.7664273881833241, "grad_norm": 1.984458327293396, "learning_rate": 4.791e-06, "loss": 0.1539, "step": 1600 }, { "epoch": 1.7675317504141357, "grad_norm": 6.62855339050293, "learning_rate": 4.794e-06, "loss": 0.2206, "step": 1601 }, { "epoch": 1.7686361126449475, "grad_norm": 2.0755534172058105, "learning_rate": 4.797e-06, "loss": 0.2112, "step": 1602 }, { "epoch": 1.7697404748757593, "grad_norm": 1.8999557495117188, "learning_rate": 4.800000000000001e-06, "loss": 0.1986, "step": 1603 }, { "epoch": 1.770844837106571, "grad_norm": 2.0732476711273193, "learning_rate": 4.803e-06, "loss": 0.166, "step": 1604 }, { "epoch": 1.7719491993373828, "grad_norm": 1.9250768423080444, "learning_rate": 4.806e-06, "loss": 0.2225, "step": 1605 }, { "epoch": 1.7730535615681944, "grad_norm": 2.274790048599243, "learning_rate": 4.809e-06, "loss": 0.2036, "step": 1606 }, { "epoch": 1.774157923799006, "grad_norm": 4.261341571807861, "learning_rate": 4.812e-06, "loss": 0.8205, "step": 1607 }, { "epoch": 1.7752622860298177, "grad_norm": 3.771475315093994, "learning_rate": 4.8150000000000005e-06, "loss": 0.6674, "step": 1608 }, { "epoch": 1.7763666482606295, "grad_norm": 2.2879798412323, "learning_rate": 4.818e-06, "loss": 0.5492, "step": 1609 }, { "epoch": 1.7774710104914413, "grad_norm": 1.8729336261749268, "learning_rate": 4.821e-06, "loss": 0.4387, "step": 1610 }, { "epoch": 1.7785753727222529, "grad_norm": 2.5017950534820557, "learning_rate": 4.824e-06, "loss": 0.4621, "step": 1611 }, { "epoch": 1.7796797349530646, "grad_norm": 1.9008718729019165, "learning_rate": 4.827e-06, "loss": 0.3917, "step": 1612 }, { "epoch": 1.7807840971838762, "grad_norm": 1.5845110416412354, "learning_rate": 4.83e-06, "loss": 0.4528, "step": 1613 }, { "epoch": 1.781888459414688, "grad_norm": 2.69905948638916, "learning_rate": 4.833e-06, "loss": 0.2822, "step": 1614 }, { "epoch": 1.7829928216454998, "grad_norm": 2.173790216445923, "learning_rate": 4.836000000000001e-06, "loss": 0.2412, "step": 1615 }, { "epoch": 1.7840971838763116, "grad_norm": 1.8500988483428955, "learning_rate": 4.839e-06, "loss": 0.2602, "step": 1616 }, { "epoch": 1.7852015461071231, "grad_norm": 2.571061611175537, "learning_rate": 4.8419999999999996e-06, "loss": 0.1787, "step": 1617 }, { "epoch": 1.7863059083379347, "grad_norm": 1.9837923049926758, "learning_rate": 4.845e-06, "loss": 0.135, "step": 1618 }, { "epoch": 1.7874102705687465, "grad_norm": 1.9139059782028198, "learning_rate": 4.848e-06, "loss": 0.1635, "step": 1619 }, { "epoch": 1.7885146327995582, "grad_norm": 3.1160953044891357, "learning_rate": 4.851000000000001e-06, "loss": 0.1597, "step": 1620 }, { "epoch": 1.78961899503037, "grad_norm": 1.2097563743591309, "learning_rate": 4.8540000000000005e-06, "loss": 0.1968, "step": 1621 }, { "epoch": 1.7907233572611818, "grad_norm": 1.4502737522125244, "learning_rate": 4.856999999999999e-06, "loss": 0.1507, "step": 1622 }, { "epoch": 1.7918277194919934, "grad_norm": 2.257697343826294, "learning_rate": 4.86e-06, "loss": 0.1451, "step": 1623 }, { "epoch": 1.792932081722805, "grad_norm": 1.5759865045547485, "learning_rate": 4.863e-06, "loss": 0.1772, "step": 1624 }, { "epoch": 1.7940364439536167, "grad_norm": 1.3521077632904053, "learning_rate": 4.8660000000000005e-06, "loss": 0.1069, "step": 1625 }, { "epoch": 1.7951408061844285, "grad_norm": 1.839968204498291, "learning_rate": 4.869e-06, "loss": 0.1425, "step": 1626 }, { "epoch": 1.7962451684152403, "grad_norm": 1.7515575885772705, "learning_rate": 4.872e-06, "loss": 0.1213, "step": 1627 }, { "epoch": 1.7973495306460519, "grad_norm": 3.5710928440093994, "learning_rate": 4.875e-06, "loss": 0.1505, "step": 1628 }, { "epoch": 1.7984538928768636, "grad_norm": 1.6749248504638672, "learning_rate": 4.878e-06, "loss": 0.1821, "step": 1629 }, { "epoch": 1.7995582551076752, "grad_norm": 1.5092672109603882, "learning_rate": 4.881e-06, "loss": 0.1587, "step": 1630 }, { "epoch": 1.800662617338487, "grad_norm": 1.6165363788604736, "learning_rate": 4.884e-06, "loss": 0.153, "step": 1631 }, { "epoch": 1.8017669795692988, "grad_norm": 1.6825085878372192, "learning_rate": 4.887e-06, "loss": 0.1591, "step": 1632 }, { "epoch": 1.8028713418001105, "grad_norm": 1.5940436124801636, "learning_rate": 4.890000000000001e-06, "loss": 0.161, "step": 1633 }, { "epoch": 1.8039757040309221, "grad_norm": 1.15639066696167, "learning_rate": 4.8929999999999996e-06, "loss": 0.1285, "step": 1634 }, { "epoch": 1.805080066261734, "grad_norm": 1.914484977722168, "learning_rate": 4.896e-06, "loss": 0.148, "step": 1635 }, { "epoch": 1.8061844284925455, "grad_norm": 2.315354108810425, "learning_rate": 4.899e-06, "loss": 0.1473, "step": 1636 }, { "epoch": 1.8072887907233572, "grad_norm": 1.6811004877090454, "learning_rate": 4.902e-06, "loss": 0.1321, "step": 1637 }, { "epoch": 1.808393152954169, "grad_norm": 4.775970935821533, "learning_rate": 4.9050000000000005e-06, "loss": 0.1423, "step": 1638 }, { "epoch": 1.8094975151849808, "grad_norm": 1.6251327991485596, "learning_rate": 4.908e-06, "loss": 0.1513, "step": 1639 }, { "epoch": 1.8106018774157924, "grad_norm": 1.9468004703521729, "learning_rate": 4.911e-06, "loss": 0.1167, "step": 1640 }, { "epoch": 1.811706239646604, "grad_norm": 2.009047746658325, "learning_rate": 4.914e-06, "loss": 0.1529, "step": 1641 }, { "epoch": 1.8128106018774157, "grad_norm": 1.6770139932632446, "learning_rate": 4.917e-06, "loss": 0.1814, "step": 1642 }, { "epoch": 1.8139149641082275, "grad_norm": 1.793971061706543, "learning_rate": 4.92e-06, "loss": 0.1297, "step": 1643 }, { "epoch": 1.8150193263390393, "grad_norm": 2.8263916969299316, "learning_rate": 4.923e-06, "loss": 0.1413, "step": 1644 }, { "epoch": 1.816123688569851, "grad_norm": 2.293865203857422, "learning_rate": 4.926000000000001e-06, "loss": 0.1295, "step": 1645 }, { "epoch": 1.8172280508006626, "grad_norm": 1.506406545639038, "learning_rate": 4.929000000000001e-06, "loss": 0.1347, "step": 1646 }, { "epoch": 1.8183324130314742, "grad_norm": 2.432396411895752, "learning_rate": 4.9319999999999995e-06, "loss": 0.1492, "step": 1647 }, { "epoch": 1.819436775262286, "grad_norm": 2.120115280151367, "learning_rate": 4.935e-06, "loss": 0.1747, "step": 1648 }, { "epoch": 1.8205411374930978, "grad_norm": 1.8402961492538452, "learning_rate": 4.938e-06, "loss": 0.1538, "step": 1649 }, { "epoch": 1.8216454997239095, "grad_norm": 2.0889391899108887, "learning_rate": 4.941000000000001e-06, "loss": 0.1608, "step": 1650 }, { "epoch": 1.822749861954721, "grad_norm": 2.0419962406158447, "learning_rate": 4.9440000000000004e-06, "loss": 0.184, "step": 1651 }, { "epoch": 1.823854224185533, "grad_norm": 2.64088773727417, "learning_rate": 4.947e-06, "loss": 0.2388, "step": 1652 }, { "epoch": 1.8249585864163445, "grad_norm": 2.14335298538208, "learning_rate": 4.95e-06, "loss": 0.1524, "step": 1653 }, { "epoch": 1.8260629486471562, "grad_norm": 2.237051248550415, "learning_rate": 4.953e-06, "loss": 0.1467, "step": 1654 }, { "epoch": 1.827167310877968, "grad_norm": 2.6056904792785645, "learning_rate": 4.9560000000000005e-06, "loss": 0.2079, "step": 1655 }, { "epoch": 1.8282716731087798, "grad_norm": 2.7136290073394775, "learning_rate": 4.959e-06, "loss": 0.1962, "step": 1656 }, { "epoch": 1.8293760353395914, "grad_norm": 3.459641218185425, "learning_rate": 4.962e-06, "loss": 0.7971, "step": 1657 }, { "epoch": 1.830480397570403, "grad_norm": 3.0874428749084473, "learning_rate": 4.965000000000001e-06, "loss": 0.6124, "step": 1658 }, { "epoch": 1.8315847598012147, "grad_norm": 1.7247132062911987, "learning_rate": 4.968e-06, "loss": 0.4825, "step": 1659 }, { "epoch": 1.8326891220320265, "grad_norm": 2.735990285873413, "learning_rate": 4.9709999999999995e-06, "loss": 0.5334, "step": 1660 }, { "epoch": 1.8337934842628383, "grad_norm": 1.6758942604064941, "learning_rate": 4.974e-06, "loss": 0.415, "step": 1661 }, { "epoch": 1.83489784649365, "grad_norm": 3.493680000305176, "learning_rate": 4.977e-06, "loss": 0.5554, "step": 1662 }, { "epoch": 1.8360022087244616, "grad_norm": 2.421314001083374, "learning_rate": 4.980000000000001e-06, "loss": 0.3769, "step": 1663 }, { "epoch": 1.8371065709552732, "grad_norm": 1.777651071548462, "learning_rate": 4.983e-06, "loss": 0.2577, "step": 1664 }, { "epoch": 1.838210933186085, "grad_norm": 1.895829677581787, "learning_rate": 4.985999999999999e-06, "loss": 0.2827, "step": 1665 }, { "epoch": 1.8393152954168968, "grad_norm": 1.9323182106018066, "learning_rate": 4.989e-06, "loss": 0.2544, "step": 1666 }, { "epoch": 1.8404196576477085, "grad_norm": 1.4660927057266235, "learning_rate": 4.992e-06, "loss": 0.1831, "step": 1667 }, { "epoch": 1.84152401987852, "grad_norm": 1.4305180311203003, "learning_rate": 4.9950000000000005e-06, "loss": 0.1628, "step": 1668 }, { "epoch": 1.8426283821093319, "grad_norm": 2.5220584869384766, "learning_rate": 4.998e-06, "loss": 0.1486, "step": 1669 }, { "epoch": 1.8437327443401434, "grad_norm": 1.462658166885376, "learning_rate": 5.001e-06, "loss": 0.1608, "step": 1670 }, { "epoch": 1.8448371065709552, "grad_norm": 1.3967100381851196, "learning_rate": 5.004e-06, "loss": 0.1369, "step": 1671 }, { "epoch": 1.845941468801767, "grad_norm": 1.9626858234405518, "learning_rate": 5.007e-06, "loss": 0.1807, "step": 1672 }, { "epoch": 1.8470458310325788, "grad_norm": 1.4936636686325073, "learning_rate": 5.01e-06, "loss": 0.1402, "step": 1673 }, { "epoch": 1.8481501932633904, "grad_norm": 1.5359681844711304, "learning_rate": 5.013e-06, "loss": 0.1543, "step": 1674 }, { "epoch": 1.8492545554942021, "grad_norm": 1.4684914350509644, "learning_rate": 5.016e-06, "loss": 0.1304, "step": 1675 }, { "epoch": 1.8503589177250137, "grad_norm": 2.0942320823669434, "learning_rate": 5.0190000000000006e-06, "loss": 0.1437, "step": 1676 }, { "epoch": 1.8514632799558255, "grad_norm": 1.5958210229873657, "learning_rate": 5.0219999999999995e-06, "loss": 0.1354, "step": 1677 }, { "epoch": 1.8525676421866373, "grad_norm": 2.1251139640808105, "learning_rate": 5.025e-06, "loss": 0.1323, "step": 1678 }, { "epoch": 1.853672004417449, "grad_norm": 1.9082331657409668, "learning_rate": 5.028e-06, "loss": 0.1358, "step": 1679 }, { "epoch": 1.8547763666482606, "grad_norm": 1.4869931936264038, "learning_rate": 5.031e-06, "loss": 0.1299, "step": 1680 }, { "epoch": 1.8558807288790722, "grad_norm": 1.4952373504638672, "learning_rate": 5.034e-06, "loss": 0.1479, "step": 1681 }, { "epoch": 1.856985091109884, "grad_norm": 1.714263916015625, "learning_rate": 5.037e-06, "loss": 0.1324, "step": 1682 }, { "epoch": 1.8580894533406958, "grad_norm": 1.7396937608718872, "learning_rate": 5.04e-06, "loss": 0.1651, "step": 1683 }, { "epoch": 1.8591938155715075, "grad_norm": 2.2063746452331543, "learning_rate": 5.043e-06, "loss": 0.1357, "step": 1684 }, { "epoch": 1.8602981778023193, "grad_norm": 1.703795313835144, "learning_rate": 5.046e-06, "loss": 0.1007, "step": 1685 }, { "epoch": 1.8614025400331309, "grad_norm": 2.1686551570892334, "learning_rate": 5.049e-06, "loss": 0.1808, "step": 1686 }, { "epoch": 1.8625069022639424, "grad_norm": 1.6433000564575195, "learning_rate": 5.052e-06, "loss": 0.1434, "step": 1687 }, { "epoch": 1.8636112644947542, "grad_norm": 1.8880689144134521, "learning_rate": 5.055000000000001e-06, "loss": 0.1121, "step": 1688 }, { "epoch": 1.864715626725566, "grad_norm": 1.3253337144851685, "learning_rate": 5.0580000000000005e-06, "loss": 0.131, "step": 1689 }, { "epoch": 1.8658199889563778, "grad_norm": 5.182057857513428, "learning_rate": 5.0609999999999995e-06, "loss": 0.1601, "step": 1690 }, { "epoch": 1.8669243511871894, "grad_norm": 8.826643943786621, "learning_rate": 5.064e-06, "loss": 0.222, "step": 1691 }, { "epoch": 1.8680287134180011, "grad_norm": 1.8977512121200562, "learning_rate": 5.067e-06, "loss": 0.1795, "step": 1692 }, { "epoch": 1.8691330756488127, "grad_norm": 1.8737126588821411, "learning_rate": 5.070000000000001e-06, "loss": 0.1519, "step": 1693 }, { "epoch": 1.8702374378796245, "grad_norm": 2.8090834617614746, "learning_rate": 5.073e-06, "loss": 0.1928, "step": 1694 }, { "epoch": 1.8713418001104363, "grad_norm": 1.7241157293319702, "learning_rate": 5.076e-06, "loss": 0.158, "step": 1695 }, { "epoch": 1.872446162341248, "grad_norm": 1.799472689628601, "learning_rate": 5.079e-06, "loss": 0.1418, "step": 1696 }, { "epoch": 1.8735505245720596, "grad_norm": 2.5385279655456543, "learning_rate": 5.082e-06, "loss": 0.1453, "step": 1697 }, { "epoch": 1.8746548868028712, "grad_norm": 2.055436849594116, "learning_rate": 5.0850000000000004e-06, "loss": 0.1922, "step": 1698 }, { "epoch": 1.875759249033683, "grad_norm": 1.8865216970443726, "learning_rate": 5.088e-06, "loss": 0.1433, "step": 1699 }, { "epoch": 1.8768636112644947, "grad_norm": 1.715623140335083, "learning_rate": 5.091e-06, "loss": 0.1724, "step": 1700 }, { "epoch": 1.8779679734953065, "grad_norm": 2.0848889350891113, "learning_rate": 5.094000000000001e-06, "loss": 0.1337, "step": 1701 }, { "epoch": 1.8790723357261183, "grad_norm": 2.4698922634124756, "learning_rate": 5.097e-06, "loss": 0.1867, "step": 1702 }, { "epoch": 1.8801766979569299, "grad_norm": 1.6712956428527832, "learning_rate": 5.1e-06, "loss": 0.1516, "step": 1703 }, { "epoch": 1.8812810601877414, "grad_norm": 2.694268226623535, "learning_rate": 5.103e-06, "loss": 0.1462, "step": 1704 }, { "epoch": 1.8823854224185532, "grad_norm": 1.9275668859481812, "learning_rate": 5.106e-06, "loss": 0.1262, "step": 1705 }, { "epoch": 1.883489784649365, "grad_norm": 2.220471143722534, "learning_rate": 5.1090000000000006e-06, "loss": 0.169, "step": 1706 }, { "epoch": 1.8845941468801768, "grad_norm": 4.05960750579834, "learning_rate": 5.112e-06, "loss": 0.6649, "step": 1707 }, { "epoch": 1.8856985091109884, "grad_norm": 2.7388930320739746, "learning_rate": 5.115e-06, "loss": 0.7162, "step": 1708 }, { "epoch": 1.8868028713418001, "grad_norm": 2.2081968784332275, "learning_rate": 5.118e-06, "loss": 0.464, "step": 1709 }, { "epoch": 1.8879072335726117, "grad_norm": 1.8223410844802856, "learning_rate": 5.121e-06, "loss": 0.4506, "step": 1710 }, { "epoch": 1.8890115958034235, "grad_norm": 1.5883103609085083, "learning_rate": 5.124e-06, "loss": 0.4111, "step": 1711 }, { "epoch": 1.8901159580342353, "grad_norm": 1.8263508081436157, "learning_rate": 5.127e-06, "loss": 0.3602, "step": 1712 }, { "epoch": 1.891220320265047, "grad_norm": 1.4405359029769897, "learning_rate": 5.130000000000001e-06, "loss": 0.245, "step": 1713 }, { "epoch": 1.8923246824958586, "grad_norm": 2.0654995441436768, "learning_rate": 5.133e-06, "loss": 0.3432, "step": 1714 }, { "epoch": 1.8934290447266704, "grad_norm": 1.9493391513824463, "learning_rate": 5.136e-06, "loss": 0.208, "step": 1715 }, { "epoch": 1.894533406957482, "grad_norm": 1.3238449096679688, "learning_rate": 5.139e-06, "loss": 0.2118, "step": 1716 }, { "epoch": 1.8956377691882937, "grad_norm": 1.4535939693450928, "learning_rate": 5.142e-06, "loss": 0.2297, "step": 1717 }, { "epoch": 1.8967421314191055, "grad_norm": 1.7273449897766113, "learning_rate": 5.145000000000001e-06, "loss": 0.1493, "step": 1718 }, { "epoch": 1.8978464936499173, "grad_norm": 2.063040256500244, "learning_rate": 5.1480000000000005e-06, "loss": 0.1978, "step": 1719 }, { "epoch": 1.8989508558807289, "grad_norm": 1.7359946966171265, "learning_rate": 5.1509999999999995e-06, "loss": 0.1683, "step": 1720 }, { "epoch": 1.9000552181115404, "grad_norm": 1.2640416622161865, "learning_rate": 5.154e-06, "loss": 0.1341, "step": 1721 }, { "epoch": 1.9011595803423522, "grad_norm": 1.6784379482269287, "learning_rate": 5.157e-06, "loss": 0.1458, "step": 1722 }, { "epoch": 1.902263942573164, "grad_norm": 5.025092124938965, "learning_rate": 5.16e-06, "loss": 0.1343, "step": 1723 }, { "epoch": 1.9033683048039758, "grad_norm": 1.2770572900772095, "learning_rate": 5.163e-06, "loss": 0.1606, "step": 1724 }, { "epoch": 1.9044726670347876, "grad_norm": 1.8835076093673706, "learning_rate": 5.166e-06, "loss": 0.1361, "step": 1725 }, { "epoch": 1.9055770292655991, "grad_norm": 1.5439860820770264, "learning_rate": 5.169e-06, "loss": 0.1218, "step": 1726 }, { "epoch": 1.9066813914964107, "grad_norm": 1.4995362758636475, "learning_rate": 5.172e-06, "loss": 0.1386, "step": 1727 }, { "epoch": 1.9077857537272225, "grad_norm": 1.3283578157424927, "learning_rate": 5.175e-06, "loss": 0.1143, "step": 1728 }, { "epoch": 1.9088901159580343, "grad_norm": 1.4153269529342651, "learning_rate": 5.178e-06, "loss": 0.1594, "step": 1729 }, { "epoch": 1.909994478188846, "grad_norm": 2.3205766677856445, "learning_rate": 5.181e-06, "loss": 0.1207, "step": 1730 }, { "epoch": 1.9110988404196576, "grad_norm": 1.397584319114685, "learning_rate": 5.184000000000001e-06, "loss": 0.1353, "step": 1731 }, { "epoch": 1.9122032026504694, "grad_norm": 2.0549192428588867, "learning_rate": 5.1870000000000005e-06, "loss": 0.1709, "step": 1732 }, { "epoch": 1.913307564881281, "grad_norm": 1.2992265224456787, "learning_rate": 5.1899999999999994e-06, "loss": 0.1056, "step": 1733 }, { "epoch": 1.9144119271120927, "grad_norm": 17.359800338745117, "learning_rate": 5.193e-06, "loss": 0.1846, "step": 1734 }, { "epoch": 1.9155162893429045, "grad_norm": 1.5731918811798096, "learning_rate": 5.196e-06, "loss": 0.1495, "step": 1735 }, { "epoch": 1.9166206515737163, "grad_norm": 1.8664085865020752, "learning_rate": 5.1990000000000005e-06, "loss": 0.1784, "step": 1736 }, { "epoch": 1.9177250138045279, "grad_norm": 2.4549291133880615, "learning_rate": 5.202e-06, "loss": 0.1459, "step": 1737 }, { "epoch": 1.9188293760353394, "grad_norm": 1.9317691326141357, "learning_rate": 5.205e-06, "loss": 0.1417, "step": 1738 }, { "epoch": 1.9199337382661512, "grad_norm": 1.6288191080093384, "learning_rate": 5.208e-06, "loss": 0.1082, "step": 1739 }, { "epoch": 1.921038100496963, "grad_norm": 1.641944169998169, "learning_rate": 5.211e-06, "loss": 0.1444, "step": 1740 }, { "epoch": 1.9221424627277748, "grad_norm": 1.693672776222229, "learning_rate": 5.214e-06, "loss": 0.1531, "step": 1741 }, { "epoch": 1.9232468249585866, "grad_norm": 2.062808036804199, "learning_rate": 5.217e-06, "loss": 0.1591, "step": 1742 }, { "epoch": 1.9243511871893981, "grad_norm": 2.4307727813720703, "learning_rate": 5.22e-06, "loss": 0.1492, "step": 1743 }, { "epoch": 1.9254555494202097, "grad_norm": 1.6560192108154297, "learning_rate": 5.223000000000001e-06, "loss": 0.1374, "step": 1744 }, { "epoch": 1.9265599116510215, "grad_norm": 1.8122470378875732, "learning_rate": 5.226e-06, "loss": 0.119, "step": 1745 }, { "epoch": 1.9276642738818333, "grad_norm": 1.815094232559204, "learning_rate": 5.229e-06, "loss": 0.1441, "step": 1746 }, { "epoch": 1.928768636112645, "grad_norm": 3.4484450817108154, "learning_rate": 5.232e-06, "loss": 0.1415, "step": 1747 }, { "epoch": 1.9298729983434566, "grad_norm": 1.9064793586730957, "learning_rate": 5.235e-06, "loss": 0.1505, "step": 1748 }, { "epoch": 1.9309773605742684, "grad_norm": 2.3697128295898438, "learning_rate": 5.2380000000000005e-06, "loss": 0.2371, "step": 1749 }, { "epoch": 1.93208172280508, "grad_norm": 1.365509033203125, "learning_rate": 5.241e-06, "loss": 0.1416, "step": 1750 }, { "epoch": 1.9331860850358917, "grad_norm": 2.955034017562866, "learning_rate": 5.244e-06, "loss": 0.1471, "step": 1751 }, { "epoch": 1.9342904472667035, "grad_norm": 1.6081465482711792, "learning_rate": 5.247e-06, "loss": 0.0995, "step": 1752 }, { "epoch": 1.9353948094975153, "grad_norm": 1.7475676536560059, "learning_rate": 5.25e-06, "loss": 0.1179, "step": 1753 }, { "epoch": 1.9364991717283269, "grad_norm": 1.6616253852844238, "learning_rate": 5.253e-06, "loss": 0.1373, "step": 1754 }, { "epoch": 1.9376035339591386, "grad_norm": 2.2294890880584717, "learning_rate": 5.256e-06, "loss": 0.1313, "step": 1755 }, { "epoch": 1.9387078961899502, "grad_norm": 3.0614864826202393, "learning_rate": 5.259000000000001e-06, "loss": 0.2262, "step": 1756 }, { "epoch": 1.939812258420762, "grad_norm": 7.798871994018555, "learning_rate": 5.262e-06, "loss": 0.755, "step": 1757 }, { "epoch": 1.9409166206515738, "grad_norm": 2.7950141429901123, "learning_rate": 5.2649999999999996e-06, "loss": 0.6781, "step": 1758 }, { "epoch": 1.9420209828823856, "grad_norm": 2.138120412826538, "learning_rate": 5.268e-06, "loss": 0.5782, "step": 1759 }, { "epoch": 1.9431253451131971, "grad_norm": 1.6137439012527466, "learning_rate": 5.271e-06, "loss": 0.4084, "step": 1760 }, { "epoch": 1.9442297073440087, "grad_norm": 1.6179276704788208, "learning_rate": 5.274000000000001e-06, "loss": 0.3677, "step": 1761 }, { "epoch": 1.9453340695748205, "grad_norm": 3.874553918838501, "learning_rate": 5.2770000000000005e-06, "loss": 0.3153, "step": 1762 }, { "epoch": 1.9464384318056323, "grad_norm": 2.0381224155426025, "learning_rate": 5.279999999999999e-06, "loss": 0.2482, "step": 1763 }, { "epoch": 1.947542794036444, "grad_norm": 1.9060245752334595, "learning_rate": 5.283e-06, "loss": 0.2432, "step": 1764 }, { "epoch": 1.9486471562672558, "grad_norm": 1.6612011194229126, "learning_rate": 5.286e-06, "loss": 0.2211, "step": 1765 }, { "epoch": 1.9497515184980674, "grad_norm": 1.1295340061187744, "learning_rate": 5.2890000000000005e-06, "loss": 0.1736, "step": 1766 }, { "epoch": 1.950855880728879, "grad_norm": 1.5017000436782837, "learning_rate": 5.292e-06, "loss": 0.1644, "step": 1767 }, { "epoch": 1.9519602429596907, "grad_norm": 2.9781410694122314, "learning_rate": 5.295e-06, "loss": 0.156, "step": 1768 }, { "epoch": 1.9530646051905025, "grad_norm": 1.1295374631881714, "learning_rate": 5.298e-06, "loss": 0.13, "step": 1769 }, { "epoch": 1.9541689674213143, "grad_norm": 1.2504935264587402, "learning_rate": 5.301e-06, "loss": 0.1253, "step": 1770 }, { "epoch": 1.9552733296521259, "grad_norm": 1.429353952407837, "learning_rate": 5.304e-06, "loss": 0.1144, "step": 1771 }, { "epoch": 1.9563776918829376, "grad_norm": 1.5419481992721558, "learning_rate": 5.307e-06, "loss": 0.201, "step": 1772 }, { "epoch": 1.9574820541137492, "grad_norm": 1.3240151405334473, "learning_rate": 5.31e-06, "loss": 0.1054, "step": 1773 }, { "epoch": 1.958586416344561, "grad_norm": 1.5957924127578735, "learning_rate": 5.313000000000001e-06, "loss": 0.1499, "step": 1774 }, { "epoch": 1.9596907785753728, "grad_norm": 1.2826032638549805, "learning_rate": 5.3160000000000004e-06, "loss": 0.1214, "step": 1775 }, { "epoch": 1.9607951408061846, "grad_norm": 1.793731451034546, "learning_rate": 5.319e-06, "loss": 0.1361, "step": 1776 }, { "epoch": 1.9618995030369961, "grad_norm": 1.0482031106948853, "learning_rate": 5.322e-06, "loss": 0.084, "step": 1777 }, { "epoch": 1.9630038652678077, "grad_norm": 1.8873929977416992, "learning_rate": 5.325e-06, "loss": 0.1379, "step": 1778 }, { "epoch": 1.9641082274986195, "grad_norm": 1.3771593570709229, "learning_rate": 5.3280000000000005e-06, "loss": 0.1221, "step": 1779 }, { "epoch": 1.9652125897294312, "grad_norm": 1.1401993036270142, "learning_rate": 5.331e-06, "loss": 0.105, "step": 1780 }, { "epoch": 1.966316951960243, "grad_norm": 1.2289235591888428, "learning_rate": 5.334000000000001e-06, "loss": 0.0909, "step": 1781 }, { "epoch": 1.9674213141910548, "grad_norm": 2.05977725982666, "learning_rate": 5.337e-06, "loss": 0.1376, "step": 1782 }, { "epoch": 1.9685256764218664, "grad_norm": 1.4200043678283691, "learning_rate": 5.34e-06, "loss": 0.1294, "step": 1783 }, { "epoch": 1.969630038652678, "grad_norm": 1.4120618104934692, "learning_rate": 5.343e-06, "loss": 0.1853, "step": 1784 }, { "epoch": 1.9707344008834897, "grad_norm": 1.8101516962051392, "learning_rate": 5.346e-06, "loss": 0.1479, "step": 1785 }, { "epoch": 1.9718387631143015, "grad_norm": 1.22269606590271, "learning_rate": 5.349e-06, "loss": 0.1058, "step": 1786 }, { "epoch": 1.9729431253451133, "grad_norm": 1.854337215423584, "learning_rate": 5.352000000000001e-06, "loss": 0.1291, "step": 1787 }, { "epoch": 1.9740474875759249, "grad_norm": 1.6314358711242676, "learning_rate": 5.3549999999999996e-06, "loss": 0.1173, "step": 1788 }, { "epoch": 1.9751518498067366, "grad_norm": 1.5882675647735596, "learning_rate": 5.358e-06, "loss": 0.1092, "step": 1789 }, { "epoch": 1.9762562120375482, "grad_norm": 1.4603815078735352, "learning_rate": 5.361e-06, "loss": 0.1352, "step": 1790 }, { "epoch": 1.97736057426836, "grad_norm": 1.9107224941253662, "learning_rate": 5.364e-06, "loss": 0.1446, "step": 1791 }, { "epoch": 1.9784649364991718, "grad_norm": 1.977655291557312, "learning_rate": 5.3670000000000005e-06, "loss": 0.1573, "step": 1792 }, { "epoch": 1.9795692987299836, "grad_norm": 1.4327391386032104, "learning_rate": 5.37e-06, "loss": 0.1158, "step": 1793 }, { "epoch": 1.9806736609607951, "grad_norm": 1.7189252376556396, "learning_rate": 5.373e-06, "loss": 0.1207, "step": 1794 }, { "epoch": 1.981778023191607, "grad_norm": 1.7040061950683594, "learning_rate": 5.376e-06, "loss": 0.1511, "step": 1795 }, { "epoch": 1.9828823854224185, "grad_norm": 1.6864296197891235, "learning_rate": 5.379e-06, "loss": 0.1323, "step": 1796 }, { "epoch": 1.9839867476532302, "grad_norm": 1.493523120880127, "learning_rate": 5.382e-06, "loss": 0.107, "step": 1797 }, { "epoch": 1.985091109884042, "grad_norm": 1.545425295829773, "learning_rate": 5.385e-06, "loss": 0.1289, "step": 1798 }, { "epoch": 1.9861954721148538, "grad_norm": 1.5316189527511597, "learning_rate": 5.388000000000001e-06, "loss": 0.1312, "step": 1799 }, { "epoch": 1.9872998343456654, "grad_norm": 1.6622873544692993, "learning_rate": 5.391e-06, "loss": 0.1662, "step": 1800 }, { "epoch": 1.988404196576477, "grad_norm": 1.9122724533081055, "learning_rate": 5.3939999999999995e-06, "loss": 0.1451, "step": 1801 }, { "epoch": 1.9895085588072887, "grad_norm": 1.755497694015503, "learning_rate": 5.397e-06, "loss": 0.2217, "step": 1802 }, { "epoch": 1.9906129210381005, "grad_norm": 1.9301269054412842, "learning_rate": 5.4e-06, "loss": 0.1642, "step": 1803 }, { "epoch": 1.9917172832689123, "grad_norm": 3.117488145828247, "learning_rate": 5.403000000000001e-06, "loss": 0.157, "step": 1804 }, { "epoch": 1.992821645499724, "grad_norm": 1.6602163314819336, "learning_rate": 5.406e-06, "loss": 0.1379, "step": 1805 }, { "epoch": 1.9939260077305356, "grad_norm": 1.8383561372756958, "learning_rate": 5.408999999999999e-06, "loss": 0.1803, "step": 1806 }, { "epoch": 1.9950303699613472, "grad_norm": 5.2795562744140625, "learning_rate": 5.412e-06, "loss": 0.4062, "step": 1807 }, { "epoch": 1.996134732192159, "grad_norm": 1.5687057971954346, "learning_rate": 5.415e-06, "loss": 0.1499, "step": 1808 }, { "epoch": 1.9972390944229708, "grad_norm": 2.5165576934814453, "learning_rate": 5.4180000000000005e-06, "loss": 0.1533, "step": 1809 }, { "epoch": 1.9983434566537825, "grad_norm": 1.544351577758789, "learning_rate": 5.421e-06, "loss": 0.1081, "step": 1810 }, { "epoch": 1.999447818884594, "grad_norm": 1.3492364883422852, "learning_rate": 5.424e-06, "loss": 0.1311, "step": 1811 }, { "epoch": 2.0, "grad_norm": 1.7950867414474487, "learning_rate": 5.427e-06, "loss": 0.1114, "step": 1812 }, { "epoch": 2.001104362230812, "grad_norm": 2.031832218170166, "learning_rate": 5.43e-06, "loss": 0.5706, "step": 1813 }, { "epoch": 2.0022087244616236, "grad_norm": 2.63486385345459, "learning_rate": 5.433e-06, "loss": 0.4633, "step": 1814 }, { "epoch": 2.003313086692435, "grad_norm": 1.5990172624588013, "learning_rate": 5.436e-06, "loss": 0.4068, "step": 1815 }, { "epoch": 2.0044174489232467, "grad_norm": 1.5660516023635864, "learning_rate": 5.439e-06, "loss": 0.4215, "step": 1816 }, { "epoch": 2.0055218111540585, "grad_norm": 1.9419604539871216, "learning_rate": 5.442000000000001e-06, "loss": 0.3712, "step": 1817 }, { "epoch": 2.0066261733848703, "grad_norm": 1.7075302600860596, "learning_rate": 5.445e-06, "loss": 0.3642, "step": 1818 }, { "epoch": 2.007730535615682, "grad_norm": 1.6007345914840698, "learning_rate": 5.448e-06, "loss": 0.3196, "step": 1819 }, { "epoch": 2.008834897846494, "grad_norm": 1.3456369638442993, "learning_rate": 5.451e-06, "loss": 0.1774, "step": 1820 }, { "epoch": 2.009939260077305, "grad_norm": 1.130615472793579, "learning_rate": 5.454e-06, "loss": 0.1929, "step": 1821 }, { "epoch": 2.011043622308117, "grad_norm": 1.1239149570465088, "learning_rate": 5.4570000000000004e-06, "loss": 0.1399, "step": 1822 }, { "epoch": 2.0121479845389287, "grad_norm": 2.9333155155181885, "learning_rate": 5.46e-06, "loss": 0.1212, "step": 1823 }, { "epoch": 2.0132523467697405, "grad_norm": 1.4050160646438599, "learning_rate": 5.463000000000001e-06, "loss": 0.1396, "step": 1824 }, { "epoch": 2.0143567090005523, "grad_norm": 1.80942964553833, "learning_rate": 5.466e-06, "loss": 0.1831, "step": 1825 }, { "epoch": 2.015461071231364, "grad_norm": 1.0788378715515137, "learning_rate": 5.469e-06, "loss": 0.1089, "step": 1826 }, { "epoch": 2.0165654334621754, "grad_norm": 1.2534021139144897, "learning_rate": 5.472e-06, "loss": 0.0854, "step": 1827 }, { "epoch": 2.017669795692987, "grad_norm": 1.1114357709884644, "learning_rate": 5.475e-06, "loss": 0.1508, "step": 1828 }, { "epoch": 2.018774157923799, "grad_norm": 1.298318862915039, "learning_rate": 5.478000000000001e-06, "loss": 0.1157, "step": 1829 }, { "epoch": 2.019878520154611, "grad_norm": 1.2924572229385376, "learning_rate": 5.4810000000000005e-06, "loss": 0.0845, "step": 1830 }, { "epoch": 2.0209828823854226, "grad_norm": 1.2770264148712158, "learning_rate": 5.4839999999999995e-06, "loss": 0.1073, "step": 1831 }, { "epoch": 2.022087244616234, "grad_norm": 1.6476809978485107, "learning_rate": 5.487e-06, "loss": 0.1179, "step": 1832 }, { "epoch": 2.0231916068470457, "grad_norm": 1.5032565593719482, "learning_rate": 5.49e-06, "loss": 0.1367, "step": 1833 }, { "epoch": 2.0242959690778575, "grad_norm": 1.608903169631958, "learning_rate": 5.493000000000001e-06, "loss": 0.1336, "step": 1834 }, { "epoch": 2.0254003313086693, "grad_norm": 1.2984520196914673, "learning_rate": 5.496e-06, "loss": 0.1117, "step": 1835 }, { "epoch": 2.026504693539481, "grad_norm": 1.5289463996887207, "learning_rate": 5.499e-06, "loss": 0.1035, "step": 1836 }, { "epoch": 2.027609055770293, "grad_norm": 1.4246517419815063, "learning_rate": 5.502e-06, "loss": 0.1313, "step": 1837 }, { "epoch": 2.028713418001104, "grad_norm": 1.2653136253356934, "learning_rate": 5.505e-06, "loss": 0.1063, "step": 1838 }, { "epoch": 2.029817780231916, "grad_norm": 2.3028016090393066, "learning_rate": 5.5080000000000005e-06, "loss": 0.0906, "step": 1839 }, { "epoch": 2.0309221424627277, "grad_norm": 1.5417536497116089, "learning_rate": 5.511e-06, "loss": 0.1123, "step": 1840 }, { "epoch": 2.0320265046935395, "grad_norm": 1.3055840730667114, "learning_rate": 5.514e-06, "loss": 0.1097, "step": 1841 }, { "epoch": 2.0331308669243513, "grad_norm": 1.4127659797668457, "learning_rate": 5.517000000000001e-06, "loss": 0.1084, "step": 1842 }, { "epoch": 2.034235229155163, "grad_norm": 3.610492706298828, "learning_rate": 5.52e-06, "loss": 0.1257, "step": 1843 }, { "epoch": 2.0353395913859744, "grad_norm": 2.011259078979492, "learning_rate": 5.523e-06, "loss": 0.1117, "step": 1844 }, { "epoch": 2.036443953616786, "grad_norm": 1.5747829675674438, "learning_rate": 5.526e-06, "loss": 0.1461, "step": 1845 }, { "epoch": 2.037548315847598, "grad_norm": 1.7674592733383179, "learning_rate": 5.529e-06, "loss": 0.1588, "step": 1846 }, { "epoch": 2.0386526780784098, "grad_norm": 1.3843071460723877, "learning_rate": 5.5320000000000006e-06, "loss": 0.104, "step": 1847 }, { "epoch": 2.0397570403092216, "grad_norm": 1.9697208404541016, "learning_rate": 5.535e-06, "loss": 0.1109, "step": 1848 }, { "epoch": 2.0408614025400333, "grad_norm": 2.1310923099517822, "learning_rate": 5.537999999999999e-06, "loss": 0.1159, "step": 1849 }, { "epoch": 2.0419657647708447, "grad_norm": 1.5722094774246216, "learning_rate": 5.541e-06, "loss": 0.1232, "step": 1850 }, { "epoch": 2.0430701270016565, "grad_norm": 1.6523206233978271, "learning_rate": 5.544e-06, "loss": 0.1003, "step": 1851 }, { "epoch": 2.0441744892324683, "grad_norm": 1.2824095487594604, "learning_rate": 5.547e-06, "loss": 0.1234, "step": 1852 }, { "epoch": 2.04527885146328, "grad_norm": 1.4319510459899902, "learning_rate": 5.55e-06, "loss": 0.1179, "step": 1853 }, { "epoch": 2.046383213694092, "grad_norm": 1.3972458839416504, "learning_rate": 5.553e-06, "loss": 0.11, "step": 1854 }, { "epoch": 2.047487575924903, "grad_norm": 1.7233613729476929, "learning_rate": 5.556e-06, "loss": 0.1175, "step": 1855 }, { "epoch": 2.048591938155715, "grad_norm": 1.649674654006958, "learning_rate": 5.559e-06, "loss": 0.1345, "step": 1856 }, { "epoch": 2.0496963003865267, "grad_norm": 2.197955846786499, "learning_rate": 5.562e-06, "loss": 0.1619, "step": 1857 }, { "epoch": 2.0508006626173385, "grad_norm": 1.4225223064422607, "learning_rate": 5.565e-06, "loss": 0.149, "step": 1858 }, { "epoch": 2.0519050248481503, "grad_norm": 1.413541316986084, "learning_rate": 5.568e-06, "loss": 0.1557, "step": 1859 }, { "epoch": 2.053009387078962, "grad_norm": 2.6951520442962646, "learning_rate": 5.5710000000000005e-06, "loss": 0.0971, "step": 1860 }, { "epoch": 2.0541137493097734, "grad_norm": 2.4714226722717285, "learning_rate": 5.574e-06, "loss": 0.1604, "step": 1861 }, { "epoch": 2.055218111540585, "grad_norm": 1.8114675283432007, "learning_rate": 5.577e-06, "loss": 0.1767, "step": 1862 }, { "epoch": 2.056322473771397, "grad_norm": 2.5512075424194336, "learning_rate": 5.58e-06, "loss": 0.661, "step": 1863 }, { "epoch": 2.0574268360022088, "grad_norm": 2.266594886779785, "learning_rate": 5.583e-06, "loss": 0.5719, "step": 1864 }, { "epoch": 2.0585311982330206, "grad_norm": 1.9097262620925903, "learning_rate": 5.586e-06, "loss": 0.3712, "step": 1865 }, { "epoch": 2.0596355604638323, "grad_norm": 1.7194751501083374, "learning_rate": 5.589e-06, "loss": 0.4619, "step": 1866 }, { "epoch": 2.0607399226946437, "grad_norm": 1.5071940422058105, "learning_rate": 5.592000000000001e-06, "loss": 0.4025, "step": 1867 }, { "epoch": 2.0618442849254555, "grad_norm": 1.6810886859893799, "learning_rate": 5.595e-06, "loss": 0.3018, "step": 1868 }, { "epoch": 2.0629486471562672, "grad_norm": 1.9265727996826172, "learning_rate": 5.598e-06, "loss": 0.2486, "step": 1869 }, { "epoch": 2.064053009387079, "grad_norm": 1.5796146392822266, "learning_rate": 5.601e-06, "loss": 0.2389, "step": 1870 }, { "epoch": 2.065157371617891, "grad_norm": 1.2779170274734497, "learning_rate": 5.604e-06, "loss": 0.1426, "step": 1871 }, { "epoch": 2.0662617338487026, "grad_norm": 1.6317633390426636, "learning_rate": 5.607000000000001e-06, "loss": 0.1915, "step": 1872 }, { "epoch": 2.067366096079514, "grad_norm": 1.5240205526351929, "learning_rate": 5.6100000000000005e-06, "loss": 0.1786, "step": 1873 }, { "epoch": 2.0684704583103257, "grad_norm": 1.0693144798278809, "learning_rate": 5.6129999999999995e-06, "loss": 0.1789, "step": 1874 }, { "epoch": 2.0695748205411375, "grad_norm": 1.5029780864715576, "learning_rate": 5.616e-06, "loss": 0.1165, "step": 1875 }, { "epoch": 2.0706791827719493, "grad_norm": 1.2730019092559814, "learning_rate": 5.619e-06, "loss": 0.1232, "step": 1876 }, { "epoch": 2.071783545002761, "grad_norm": 1.4445147514343262, "learning_rate": 5.6220000000000006e-06, "loss": 0.134, "step": 1877 }, { "epoch": 2.0728879072335724, "grad_norm": 1.3863033056259155, "learning_rate": 5.625e-06, "loss": 0.1505, "step": 1878 }, { "epoch": 2.073992269464384, "grad_norm": 1.262351632118225, "learning_rate": 5.628e-06, "loss": 0.1092, "step": 1879 }, { "epoch": 2.075096631695196, "grad_norm": 1.4682823419570923, "learning_rate": 5.631e-06, "loss": 0.1201, "step": 1880 }, { "epoch": 2.0762009939260078, "grad_norm": 0.9939002990722656, "learning_rate": 5.634e-06, "loss": 0.0852, "step": 1881 }, { "epoch": 2.0773053561568195, "grad_norm": 1.5804942846298218, "learning_rate": 5.637e-06, "loss": 0.1069, "step": 1882 }, { "epoch": 2.0784097183876313, "grad_norm": 1.4594720602035522, "learning_rate": 5.64e-06, "loss": 0.1499, "step": 1883 }, { "epoch": 2.0795140806184427, "grad_norm": 1.0660531520843506, "learning_rate": 5.643e-06, "loss": 0.1182, "step": 1884 }, { "epoch": 2.0806184428492545, "grad_norm": 2.0912883281707764, "learning_rate": 5.646000000000001e-06, "loss": 0.159, "step": 1885 }, { "epoch": 2.0817228050800662, "grad_norm": 1.336705207824707, "learning_rate": 5.649e-06, "loss": 0.0971, "step": 1886 }, { "epoch": 2.082827167310878, "grad_norm": 3.0708346366882324, "learning_rate": 5.652e-06, "loss": 0.117, "step": 1887 }, { "epoch": 2.08393152954169, "grad_norm": 1.7440097332000732, "learning_rate": 5.655e-06, "loss": 0.1065, "step": 1888 }, { "epoch": 2.0850358917725016, "grad_norm": 1.6067919731140137, "learning_rate": 5.658e-06, "loss": 0.1384, "step": 1889 }, { "epoch": 2.086140254003313, "grad_norm": 1.5951980352401733, "learning_rate": 5.6610000000000005e-06, "loss": 0.1337, "step": 1890 }, { "epoch": 2.0872446162341247, "grad_norm": 1.051344871520996, "learning_rate": 5.664e-06, "loss": 0.1067, "step": 1891 }, { "epoch": 2.0883489784649365, "grad_norm": 1.2085750102996826, "learning_rate": 5.667e-06, "loss": 0.1296, "step": 1892 }, { "epoch": 2.0894533406957483, "grad_norm": 1.3564348220825195, "learning_rate": 5.67e-06, "loss": 0.0836, "step": 1893 }, { "epoch": 2.09055770292656, "grad_norm": 4.474702835083008, "learning_rate": 5.673e-06, "loss": 0.129, "step": 1894 }, { "epoch": 2.0916620651573714, "grad_norm": 2.2171380519866943, "learning_rate": 5.676e-06, "loss": 0.1443, "step": 1895 }, { "epoch": 2.092766427388183, "grad_norm": 1.312310814857483, "learning_rate": 5.679e-06, "loss": 0.0918, "step": 1896 }, { "epoch": 2.093870789618995, "grad_norm": 3.5585274696350098, "learning_rate": 5.682000000000001e-06, "loss": 0.1028, "step": 1897 }, { "epoch": 2.0949751518498068, "grad_norm": 2.005549430847168, "learning_rate": 5.685e-06, "loss": 0.1232, "step": 1898 }, { "epoch": 2.0960795140806185, "grad_norm": 1.168122410774231, "learning_rate": 5.688e-06, "loss": 0.1006, "step": 1899 }, { "epoch": 2.0971838763114303, "grad_norm": 1.4478447437286377, "learning_rate": 5.691e-06, "loss": 0.1835, "step": 1900 }, { "epoch": 2.0982882385422417, "grad_norm": 1.1645276546478271, "learning_rate": 5.694e-06, "loss": 0.0912, "step": 1901 }, { "epoch": 2.0993926007730535, "grad_norm": 1.501979112625122, "learning_rate": 5.697000000000001e-06, "loss": 0.1293, "step": 1902 }, { "epoch": 2.1004969630038652, "grad_norm": 1.7311400175094604, "learning_rate": 5.7000000000000005e-06, "loss": 0.1008, "step": 1903 }, { "epoch": 2.101601325234677, "grad_norm": 1.4565987586975098, "learning_rate": 5.703e-06, "loss": 0.1332, "step": 1904 }, { "epoch": 2.102705687465489, "grad_norm": 1.6974250078201294, "learning_rate": 5.706e-06, "loss": 0.1393, "step": 1905 }, { "epoch": 2.1038100496963006, "grad_norm": 1.4128152132034302, "learning_rate": 5.709e-06, "loss": 0.1289, "step": 1906 }, { "epoch": 2.104914411927112, "grad_norm": 2.0189478397369385, "learning_rate": 5.7120000000000005e-06, "loss": 0.0997, "step": 1907 }, { "epoch": 2.1060187741579237, "grad_norm": 1.5890041589736938, "learning_rate": 5.715e-06, "loss": 0.1356, "step": 1908 }, { "epoch": 2.1071231363887355, "grad_norm": 2.544367790222168, "learning_rate": 5.718e-06, "loss": 0.169, "step": 1909 }, { "epoch": 2.1082274986195473, "grad_norm": 1.984700322151184, "learning_rate": 5.721000000000001e-06, "loss": 0.1398, "step": 1910 }, { "epoch": 2.109331860850359, "grad_norm": 2.0308027267456055, "learning_rate": 5.724e-06, "loss": 0.1674, "step": 1911 }, { "epoch": 2.1104362230811704, "grad_norm": 1.4060554504394531, "learning_rate": 5.7269999999999995e-06, "loss": 0.1078, "step": 1912 }, { "epoch": 2.111540585311982, "grad_norm": 4.849432945251465, "learning_rate": 5.73e-06, "loss": 0.7524, "step": 1913 }, { "epoch": 2.112644947542794, "grad_norm": 1.68555748462677, "learning_rate": 5.733e-06, "loss": 0.4872, "step": 1914 }, { "epoch": 2.1137493097736058, "grad_norm": 1.6106401681900024, "learning_rate": 5.736000000000001e-06, "loss": 0.3605, "step": 1915 }, { "epoch": 2.1148536720044175, "grad_norm": 1.661150574684143, "learning_rate": 5.7390000000000004e-06, "loss": 0.3582, "step": 1916 }, { "epoch": 2.1159580342352293, "grad_norm": 2.6580541133880615, "learning_rate": 5.741999999999999e-06, "loss": 0.3601, "step": 1917 }, { "epoch": 2.1170623964660407, "grad_norm": 1.8517944812774658, "learning_rate": 5.745e-06, "loss": 0.4341, "step": 1918 }, { "epoch": 2.1181667586968524, "grad_norm": 1.3757073879241943, "learning_rate": 5.748e-06, "loss": 0.2306, "step": 1919 }, { "epoch": 2.1192711209276642, "grad_norm": 1.4300633668899536, "learning_rate": 5.7510000000000005e-06, "loss": 0.1781, "step": 1920 }, { "epoch": 2.120375483158476, "grad_norm": 0.8978107571601868, "learning_rate": 5.754e-06, "loss": 0.1051, "step": 1921 }, { "epoch": 2.121479845389288, "grad_norm": 2.1883437633514404, "learning_rate": 5.757e-06, "loss": 0.1348, "step": 1922 }, { "epoch": 2.1225842076200996, "grad_norm": 1.5348212718963623, "learning_rate": 5.76e-06, "loss": 0.0908, "step": 1923 }, { "epoch": 2.123688569850911, "grad_norm": 0.9828693270683289, "learning_rate": 5.763e-06, "loss": 0.0976, "step": 1924 }, { "epoch": 2.1247929320817227, "grad_norm": 1.1724907159805298, "learning_rate": 5.766e-06, "loss": 0.0989, "step": 1925 }, { "epoch": 2.1258972943125345, "grad_norm": 2.602374792098999, "learning_rate": 5.769e-06, "loss": 0.0918, "step": 1926 }, { "epoch": 2.1270016565433463, "grad_norm": 1.3653419017791748, "learning_rate": 5.772e-06, "loss": 0.0925, "step": 1927 }, { "epoch": 2.128106018774158, "grad_norm": 1.5782978534698486, "learning_rate": 5.775000000000001e-06, "loss": 0.0932, "step": 1928 }, { "epoch": 2.1292103810049694, "grad_norm": 0.9850481152534485, "learning_rate": 5.7779999999999996e-06, "loss": 0.0955, "step": 1929 }, { "epoch": 2.130314743235781, "grad_norm": 1.2789350748062134, "learning_rate": 5.781e-06, "loss": 0.1099, "step": 1930 }, { "epoch": 2.131419105466593, "grad_norm": 1.5726723670959473, "learning_rate": 5.784e-06, "loss": 0.098, "step": 1931 }, { "epoch": 2.1325234676974048, "grad_norm": 1.567274808883667, "learning_rate": 5.787e-06, "loss": 0.1044, "step": 1932 }, { "epoch": 2.1336278299282165, "grad_norm": 1.0952085256576538, "learning_rate": 5.7900000000000005e-06, "loss": 0.1007, "step": 1933 }, { "epoch": 2.1347321921590283, "grad_norm": 1.2528736591339111, "learning_rate": 5.793e-06, "loss": 0.0954, "step": 1934 }, { "epoch": 2.13583655438984, "grad_norm": 1.2732571363449097, "learning_rate": 5.796e-06, "loss": 0.0995, "step": 1935 }, { "epoch": 2.1369409166206514, "grad_norm": 1.8470247983932495, "learning_rate": 5.799e-06, "loss": 0.1096, "step": 1936 }, { "epoch": 2.1380452788514632, "grad_norm": 1.7828142642974854, "learning_rate": 5.802e-06, "loss": 0.0949, "step": 1937 }, { "epoch": 2.139149641082275, "grad_norm": 1.162078619003296, "learning_rate": 5.805e-06, "loss": 0.1172, "step": 1938 }, { "epoch": 2.140254003313087, "grad_norm": 1.7344058752059937, "learning_rate": 5.808e-06, "loss": 0.1009, "step": 1939 }, { "epoch": 2.1413583655438986, "grad_norm": 1.368398904800415, "learning_rate": 5.811000000000001e-06, "loss": 0.1392, "step": 1940 }, { "epoch": 2.14246272777471, "grad_norm": 1.087922215461731, "learning_rate": 5.814e-06, "loss": 0.0913, "step": 1941 }, { "epoch": 2.1435670900055217, "grad_norm": 1.7130022048950195, "learning_rate": 5.8169999999999995e-06, "loss": 0.141, "step": 1942 }, { "epoch": 2.1446714522363335, "grad_norm": 1.6609009504318237, "learning_rate": 5.82e-06, "loss": 0.1013, "step": 1943 }, { "epoch": 2.1457758144671453, "grad_norm": 1.2775241136550903, "learning_rate": 5.823e-06, "loss": 0.0915, "step": 1944 }, { "epoch": 2.146880176697957, "grad_norm": 1.927646279335022, "learning_rate": 5.826000000000001e-06, "loss": 0.1311, "step": 1945 }, { "epoch": 2.147984538928769, "grad_norm": 1.8636362552642822, "learning_rate": 5.8290000000000004e-06, "loss": 0.0998, "step": 1946 }, { "epoch": 2.14908890115958, "grad_norm": 1.6897103786468506, "learning_rate": 5.832e-06, "loss": 0.1113, "step": 1947 }, { "epoch": 2.150193263390392, "grad_norm": 1.3783762454986572, "learning_rate": 5.835e-06, "loss": 0.133, "step": 1948 }, { "epoch": 2.1512976256212037, "grad_norm": 1.3562713861465454, "learning_rate": 5.838e-06, "loss": 0.0917, "step": 1949 }, { "epoch": 2.1524019878520155, "grad_norm": 1.4079562425613403, "learning_rate": 5.8410000000000005e-06, "loss": 0.1039, "step": 1950 }, { "epoch": 2.1535063500828273, "grad_norm": 1.2915571928024292, "learning_rate": 5.844e-06, "loss": 0.1277, "step": 1951 }, { "epoch": 2.154610712313639, "grad_norm": 1.9186965227127075, "learning_rate": 5.847e-06, "loss": 0.1026, "step": 1952 }, { "epoch": 2.1557150745444504, "grad_norm": 1.6479198932647705, "learning_rate": 5.850000000000001e-06, "loss": 0.1085, "step": 1953 }, { "epoch": 2.156819436775262, "grad_norm": 2.8564040660858154, "learning_rate": 5.853e-06, "loss": 0.1059, "step": 1954 }, { "epoch": 2.157923799006074, "grad_norm": 1.503143072128296, "learning_rate": 5.856e-06, "loss": 0.1092, "step": 1955 }, { "epoch": 2.159028161236886, "grad_norm": 1.559287667274475, "learning_rate": 5.859e-06, "loss": 0.1159, "step": 1956 }, { "epoch": 2.1601325234676976, "grad_norm": 1.8438427448272705, "learning_rate": 5.862e-06, "loss": 0.1438, "step": 1957 }, { "epoch": 2.161236885698509, "grad_norm": 1.4729281663894653, "learning_rate": 5.865000000000001e-06, "loss": 0.1083, "step": 1958 }, { "epoch": 2.1623412479293207, "grad_norm": 1.1457557678222656, "learning_rate": 5.868e-06, "loss": 0.0799, "step": 1959 }, { "epoch": 2.1634456101601325, "grad_norm": 1.6735942363739014, "learning_rate": 5.871e-06, "loss": 0.1169, "step": 1960 }, { "epoch": 2.1645499723909443, "grad_norm": 2.1786630153656006, "learning_rate": 5.874e-06, "loss": 0.1344, "step": 1961 }, { "epoch": 2.165654334621756, "grad_norm": 1.5648478269577026, "learning_rate": 5.877e-06, "loss": 0.1052, "step": 1962 }, { "epoch": 2.166758696852568, "grad_norm": 3.118316411972046, "learning_rate": 5.8800000000000005e-06, "loss": 0.7479, "step": 1963 }, { "epoch": 2.167863059083379, "grad_norm": 2.1675450801849365, "learning_rate": 5.883e-06, "loss": 0.5127, "step": 1964 }, { "epoch": 2.168967421314191, "grad_norm": 2.0989787578582764, "learning_rate": 5.886000000000001e-06, "loss": 0.5182, "step": 1965 }, { "epoch": 2.1700717835450027, "grad_norm": 2.734524726867676, "learning_rate": 5.889e-06, "loss": 0.4185, "step": 1966 }, { "epoch": 2.1711761457758145, "grad_norm": 2.0414299964904785, "learning_rate": 5.892e-06, "loss": 0.3691, "step": 1967 }, { "epoch": 2.1722805080066263, "grad_norm": 2.1120386123657227, "learning_rate": 5.895e-06, "loss": 0.3385, "step": 1968 }, { "epoch": 2.173384870237438, "grad_norm": 1.132738471031189, "learning_rate": 5.898e-06, "loss": 0.2175, "step": 1969 }, { "epoch": 2.1744892324682494, "grad_norm": 1.9402074813842773, "learning_rate": 5.901000000000001e-06, "loss": 0.2285, "step": 1970 }, { "epoch": 2.175593594699061, "grad_norm": 1.643932819366455, "learning_rate": 5.9040000000000006e-06, "loss": 0.1673, "step": 1971 }, { "epoch": 2.176697956929873, "grad_norm": 1.7229174375534058, "learning_rate": 5.9069999999999995e-06, "loss": 0.1686, "step": 1972 }, { "epoch": 2.177802319160685, "grad_norm": 1.0916827917099, "learning_rate": 5.91e-06, "loss": 0.1723, "step": 1973 }, { "epoch": 2.1789066813914966, "grad_norm": 0.9582756161689758, "learning_rate": 5.913e-06, "loss": 0.1523, "step": 1974 }, { "epoch": 2.180011043622308, "grad_norm": 1.0639866590499878, "learning_rate": 5.916e-06, "loss": 0.1087, "step": 1975 }, { "epoch": 2.1811154058531197, "grad_norm": 1.6977102756500244, "learning_rate": 5.919e-06, "loss": 0.106, "step": 1976 }, { "epoch": 2.1822197680839315, "grad_norm": 1.233879804611206, "learning_rate": 5.922e-06, "loss": 0.0941, "step": 1977 }, { "epoch": 2.1833241303147433, "grad_norm": 1.3257954120635986, "learning_rate": 5.925e-06, "loss": 0.1252, "step": 1978 }, { "epoch": 2.184428492545555, "grad_norm": 1.2981300354003906, "learning_rate": 5.928e-06, "loss": 0.0858, "step": 1979 }, { "epoch": 2.185532854776367, "grad_norm": 1.1149438619613647, "learning_rate": 5.931e-06, "loss": 0.0788, "step": 1980 }, { "epoch": 2.186637217007178, "grad_norm": 1.1690253019332886, "learning_rate": 5.934e-06, "loss": 0.1109, "step": 1981 }, { "epoch": 2.18774157923799, "grad_norm": 1.313481092453003, "learning_rate": 5.937e-06, "loss": 0.0869, "step": 1982 }, { "epoch": 2.1888459414688017, "grad_norm": 1.1626766920089722, "learning_rate": 5.940000000000001e-06, "loss": 0.0871, "step": 1983 }, { "epoch": 2.1899503036996135, "grad_norm": 1.3910436630249023, "learning_rate": 5.943e-06, "loss": 0.0906, "step": 1984 }, { "epoch": 2.1910546659304253, "grad_norm": 2.2945849895477295, "learning_rate": 5.9459999999999995e-06, "loss": 0.1003, "step": 1985 }, { "epoch": 2.192159028161237, "grad_norm": 0.9458307027816772, "learning_rate": 5.949e-06, "loss": 0.0934, "step": 1986 }, { "epoch": 2.1932633903920484, "grad_norm": 1.8096791505813599, "learning_rate": 5.952e-06, "loss": 0.104, "step": 1987 }, { "epoch": 2.19436775262286, "grad_norm": 4.444669723510742, "learning_rate": 5.955000000000001e-06, "loss": 0.1863, "step": 1988 }, { "epoch": 2.195472114853672, "grad_norm": 7.783917427062988, "learning_rate": 5.958e-06, "loss": 0.1108, "step": 1989 }, { "epoch": 2.196576477084484, "grad_norm": 1.1930190324783325, "learning_rate": 5.961e-06, "loss": 0.1159, "step": 1990 }, { "epoch": 2.1976808393152956, "grad_norm": 2.317873239517212, "learning_rate": 5.964e-06, "loss": 0.0665, "step": 1991 }, { "epoch": 2.198785201546107, "grad_norm": 1.4461369514465332, "learning_rate": 5.967e-06, "loss": 0.1249, "step": 1992 }, { "epoch": 2.1998895637769187, "grad_norm": 1.6569947004318237, "learning_rate": 5.9700000000000004e-06, "loss": 0.1068, "step": 1993 }, { "epoch": 2.2009939260077305, "grad_norm": 2.2530813217163086, "learning_rate": 5.973e-06, "loss": 0.1243, "step": 1994 }, { "epoch": 2.2020982882385423, "grad_norm": 1.753528356552124, "learning_rate": 5.976e-06, "loss": 0.1048, "step": 1995 }, { "epoch": 2.203202650469354, "grad_norm": 2.6632165908813477, "learning_rate": 5.979000000000001e-06, "loss": 0.0858, "step": 1996 }, { "epoch": 2.204307012700166, "grad_norm": 1.6375726461410522, "learning_rate": 5.982e-06, "loss": 0.1043, "step": 1997 }, { "epoch": 2.205411374930977, "grad_norm": 1.2712324857711792, "learning_rate": 5.985e-06, "loss": 0.1103, "step": 1998 }, { "epoch": 2.206515737161789, "grad_norm": 1.1989914178848267, "learning_rate": 5.988e-06, "loss": 0.0858, "step": 1999 }, { "epoch": 2.2076200993926007, "grad_norm": 2.0873000621795654, "learning_rate": 5.991e-06, "loss": 0.1101, "step": 2000 }, { "epoch": 2.2076200993926007, "eval_cer": 0.16909039064727688, "eval_loss": 0.6336506009101868, "eval_runtime": 15.9177, "eval_samples_per_second": 19.098, "eval_steps_per_second": 0.628, "eval_wer": 0.5731005372217959, "step": 2000 }, { "epoch": 2.2087244616234125, "grad_norm": 2.169027328491211, "learning_rate": 5.9940000000000005e-06, "loss": 0.112, "step": 2001 }, { "epoch": 2.2098288238542243, "grad_norm": 1.292604923248291, "learning_rate": 5.997e-06, "loss": 0.0926, "step": 2002 }, { "epoch": 2.210933186085036, "grad_norm": 1.9323512315750122, "learning_rate": 6e-06, "loss": 0.0989, "step": 2003 }, { "epoch": 2.2120375483158474, "grad_norm": 2.086697816848755, "learning_rate": 6.003e-06, "loss": 0.1298, "step": 2004 }, { "epoch": 2.213141910546659, "grad_norm": 1.6602387428283691, "learning_rate": 6.006e-06, "loss": 0.1139, "step": 2005 }, { "epoch": 2.214246272777471, "grad_norm": 1.7613871097564697, "learning_rate": 6.009e-06, "loss": 0.1251, "step": 2006 }, { "epoch": 2.2153506350082828, "grad_norm": 1.2673425674438477, "learning_rate": 6.012e-06, "loss": 0.1049, "step": 2007 }, { "epoch": 2.2164549972390946, "grad_norm": 1.6484739780426025, "learning_rate": 6.015000000000001e-06, "loss": 0.1334, "step": 2008 }, { "epoch": 2.217559359469906, "grad_norm": 1.8669925928115845, "learning_rate": 6.018e-06, "loss": 0.131, "step": 2009 }, { "epoch": 2.2186637217007177, "grad_norm": 2.7714273929595947, "learning_rate": 6.021e-06, "loss": 0.1029, "step": 2010 }, { "epoch": 2.2197680839315295, "grad_norm": 2.1715238094329834, "learning_rate": 6.024e-06, "loss": 0.1394, "step": 2011 }, { "epoch": 2.2208724461623413, "grad_norm": 2.54402232170105, "learning_rate": 6.027e-06, "loss": 0.1603, "step": 2012 }, { "epoch": 2.221976808393153, "grad_norm": 2.3578574657440186, "learning_rate": 6.030000000000001e-06, "loss": 0.5811, "step": 2013 }, { "epoch": 2.223081170623965, "grad_norm": 1.9042143821716309, "learning_rate": 6.0330000000000005e-06, "loss": 0.5195, "step": 2014 }, { "epoch": 2.224185532854776, "grad_norm": 1.7755593061447144, "learning_rate": 6.0359999999999995e-06, "loss": 0.5653, "step": 2015 }, { "epoch": 2.225289895085588, "grad_norm": 1.6889865398406982, "learning_rate": 6.039e-06, "loss": 0.4163, "step": 2016 }, { "epoch": 2.2263942573163997, "grad_norm": 2.0907206535339355, "learning_rate": 6.042e-06, "loss": 0.4365, "step": 2017 }, { "epoch": 2.2274986195472115, "grad_norm": 1.5428690910339355, "learning_rate": 6.0450000000000006e-06, "loss": 0.3971, "step": 2018 }, { "epoch": 2.2286029817780233, "grad_norm": 1.860230803489685, "learning_rate": 6.048e-06, "loss": 0.2358, "step": 2019 }, { "epoch": 2.229707344008835, "grad_norm": 1.288303256034851, "learning_rate": 6.051e-06, "loss": 0.3203, "step": 2020 }, { "epoch": 2.2308117062396464, "grad_norm": 1.1729081869125366, "learning_rate": 6.054e-06, "loss": 0.2197, "step": 2021 }, { "epoch": 2.231916068470458, "grad_norm": 1.172668218612671, "learning_rate": 6.057e-06, "loss": 0.1586, "step": 2022 }, { "epoch": 2.23302043070127, "grad_norm": 1.0010986328125, "learning_rate": 6.0600000000000004e-06, "loss": 0.1288, "step": 2023 }, { "epoch": 2.2341247929320818, "grad_norm": 1.3563802242279053, "learning_rate": 6.063e-06, "loss": 0.1596, "step": 2024 }, { "epoch": 2.2352291551628936, "grad_norm": 1.732088565826416, "learning_rate": 6.066e-06, "loss": 0.0787, "step": 2025 }, { "epoch": 2.2363335173937053, "grad_norm": 8.487462997436523, "learning_rate": 6.069000000000001e-06, "loss": 0.0986, "step": 2026 }, { "epoch": 2.2374378796245167, "grad_norm": 1.743210792541504, "learning_rate": 6.072e-06, "loss": 0.1, "step": 2027 }, { "epoch": 2.2385422418553285, "grad_norm": 1.1743488311767578, "learning_rate": 6.075e-06, "loss": 0.0905, "step": 2028 }, { "epoch": 2.2396466040861402, "grad_norm": 1.8581501245498657, "learning_rate": 6.078e-06, "loss": 0.0691, "step": 2029 }, { "epoch": 2.240750966316952, "grad_norm": 1.4294092655181885, "learning_rate": 6.081e-06, "loss": 0.1326, "step": 2030 }, { "epoch": 2.241855328547764, "grad_norm": 1.1195588111877441, "learning_rate": 6.0840000000000005e-06, "loss": 0.0917, "step": 2031 }, { "epoch": 2.2429596907785756, "grad_norm": 2.0700840950012207, "learning_rate": 6.087e-06, "loss": 0.1048, "step": 2032 }, { "epoch": 2.244064053009387, "grad_norm": 1.0314353704452515, "learning_rate": 6.090000000000001e-06, "loss": 0.1128, "step": 2033 }, { "epoch": 2.2451684152401987, "grad_norm": 5.620008945465088, "learning_rate": 6.093e-06, "loss": 0.1042, "step": 2034 }, { "epoch": 2.2462727774710105, "grad_norm": 2.553497076034546, "learning_rate": 6.096e-06, "loss": 0.0975, "step": 2035 }, { "epoch": 2.2473771397018223, "grad_norm": 1.1940914392471313, "learning_rate": 6.099e-06, "loss": 0.0705, "step": 2036 }, { "epoch": 2.248481501932634, "grad_norm": 1.4470645189285278, "learning_rate": 6.102e-06, "loss": 0.1078, "step": 2037 }, { "epoch": 2.2495858641634454, "grad_norm": 1.5787018537521362, "learning_rate": 6.105e-06, "loss": 0.0996, "step": 2038 }, { "epoch": 2.250690226394257, "grad_norm": 1.5895438194274902, "learning_rate": 6.108000000000001e-06, "loss": 0.1099, "step": 2039 }, { "epoch": 2.251794588625069, "grad_norm": 1.339138150215149, "learning_rate": 6.111e-06, "loss": 0.075, "step": 2040 }, { "epoch": 2.2528989508558808, "grad_norm": 1.1132795810699463, "learning_rate": 6.114e-06, "loss": 0.0691, "step": 2041 }, { "epoch": 2.2540033130866926, "grad_norm": 2.214632749557495, "learning_rate": 6.117e-06, "loss": 0.0916, "step": 2042 }, { "epoch": 2.2551076753175043, "grad_norm": 2.0496959686279297, "learning_rate": 6.12e-06, "loss": 0.1011, "step": 2043 }, { "epoch": 2.2562120375483157, "grad_norm": 1.621727466583252, "learning_rate": 6.1230000000000005e-06, "loss": 0.1261, "step": 2044 }, { "epoch": 2.2573163997791275, "grad_norm": 1.924195647239685, "learning_rate": 6.126e-06, "loss": 0.1089, "step": 2045 }, { "epoch": 2.2584207620099392, "grad_norm": 1.7390772104263306, "learning_rate": 6.129e-06, "loss": 0.1008, "step": 2046 }, { "epoch": 2.259525124240751, "grad_norm": 1.729971170425415, "learning_rate": 6.132e-06, "loss": 0.1404, "step": 2047 }, { "epoch": 2.260629486471563, "grad_norm": 1.6432926654815674, "learning_rate": 6.135e-06, "loss": 0.0935, "step": 2048 }, { "epoch": 2.2617338487023746, "grad_norm": 1.250515103340149, "learning_rate": 6.138e-06, "loss": 0.0989, "step": 2049 }, { "epoch": 2.262838210933186, "grad_norm": 1.2902065515518188, "learning_rate": 6.141e-06, "loss": 0.0931, "step": 2050 }, { "epoch": 2.2639425731639977, "grad_norm": 1.617891788482666, "learning_rate": 6.144000000000001e-06, "loss": 0.1045, "step": 2051 }, { "epoch": 2.2650469353948095, "grad_norm": 1.4732519388198853, "learning_rate": 6.147e-06, "loss": 0.1034, "step": 2052 }, { "epoch": 2.2661512976256213, "grad_norm": 1.30794095993042, "learning_rate": 6.1499999999999996e-06, "loss": 0.1195, "step": 2053 }, { "epoch": 2.267255659856433, "grad_norm": 1.3346967697143555, "learning_rate": 6.153e-06, "loss": 0.0835, "step": 2054 }, { "epoch": 2.2683600220872444, "grad_norm": 1.7126686573028564, "learning_rate": 6.156e-06, "loss": 0.1117, "step": 2055 }, { "epoch": 2.269464384318056, "grad_norm": 1.1676411628723145, "learning_rate": 6.159000000000001e-06, "loss": 0.1145, "step": 2056 }, { "epoch": 2.270568746548868, "grad_norm": 1.3976161479949951, "learning_rate": 6.1620000000000005e-06, "loss": 0.0912, "step": 2057 }, { "epoch": 2.2716731087796798, "grad_norm": 2.0645718574523926, "learning_rate": 6.164999999999999e-06, "loss": 0.1054, "step": 2058 }, { "epoch": 2.2727774710104915, "grad_norm": 2.9307467937469482, "learning_rate": 6.168e-06, "loss": 0.1089, "step": 2059 }, { "epoch": 2.2738818332413033, "grad_norm": 1.5215603113174438, "learning_rate": 6.171e-06, "loss": 0.0974, "step": 2060 }, { "epoch": 2.2749861954721147, "grad_norm": 1.5894166231155396, "learning_rate": 6.1740000000000005e-06, "loss": 0.1211, "step": 2061 }, { "epoch": 2.2760905577029265, "grad_norm": 1.5443400144577026, "learning_rate": 6.177e-06, "loss": 0.1252, "step": 2062 }, { "epoch": 2.2771949199337382, "grad_norm": 5.114170074462891, "learning_rate": 6.18e-06, "loss": 0.5874, "step": 2063 }, { "epoch": 2.27829928216455, "grad_norm": 1.6119414567947388, "learning_rate": 6.183e-06, "loss": 0.4685, "step": 2064 }, { "epoch": 2.279403644395362, "grad_norm": 1.689845323562622, "learning_rate": 6.186e-06, "loss": 0.4888, "step": 2065 }, { "epoch": 2.2805080066261736, "grad_norm": 1.5852611064910889, "learning_rate": 6.189e-06, "loss": 0.3672, "step": 2066 }, { "epoch": 2.281612368856985, "grad_norm": 2.0155255794525146, "learning_rate": 6.192e-06, "loss": 0.3238, "step": 2067 }, { "epoch": 2.2827167310877967, "grad_norm": 2.061055898666382, "learning_rate": 6.195e-06, "loss": 0.3895, "step": 2068 }, { "epoch": 2.2838210933186085, "grad_norm": 1.360662579536438, "learning_rate": 6.198000000000001e-06, "loss": 0.2567, "step": 2069 }, { "epoch": 2.2849254555494203, "grad_norm": 1.5616410970687866, "learning_rate": 6.201e-06, "loss": 0.1858, "step": 2070 }, { "epoch": 2.286029817780232, "grad_norm": 1.0167369842529297, "learning_rate": 6.204e-06, "loss": 0.1281, "step": 2071 }, { "epoch": 2.2871341800110434, "grad_norm": 0.9095029234886169, "learning_rate": 6.207e-06, "loss": 0.124, "step": 2072 }, { "epoch": 2.288238542241855, "grad_norm": 1.0701298713684082, "learning_rate": 6.21e-06, "loss": 0.1109, "step": 2073 }, { "epoch": 2.289342904472667, "grad_norm": 1.2261731624603271, "learning_rate": 6.2130000000000005e-06, "loss": 0.0879, "step": 2074 }, { "epoch": 2.2904472667034788, "grad_norm": 1.8547030687332153, "learning_rate": 6.216e-06, "loss": 0.0686, "step": 2075 }, { "epoch": 2.2915516289342905, "grad_norm": 1.2666780948638916, "learning_rate": 6.219000000000001e-06, "loss": 0.1226, "step": 2076 }, { "epoch": 2.2926559911651023, "grad_norm": 0.8570523858070374, "learning_rate": 6.222e-06, "loss": 0.082, "step": 2077 }, { "epoch": 2.293760353395914, "grad_norm": 1.1916953325271606, "learning_rate": 6.225e-06, "loss": 0.0899, "step": 2078 }, { "epoch": 2.2948647156267254, "grad_norm": 0.9664155840873718, "learning_rate": 6.228e-06, "loss": 0.098, "step": 2079 }, { "epoch": 2.2959690778575372, "grad_norm": 0.9551731944084167, "learning_rate": 6.231e-06, "loss": 0.0766, "step": 2080 }, { "epoch": 2.297073440088349, "grad_norm": 1.0346934795379639, "learning_rate": 6.234000000000001e-06, "loss": 0.0942, "step": 2081 }, { "epoch": 2.298177802319161, "grad_norm": 1.0599218606948853, "learning_rate": 6.237000000000001e-06, "loss": 0.1291, "step": 2082 }, { "epoch": 2.2992821645499726, "grad_norm": 1.6932791471481323, "learning_rate": 6.2399999999999995e-06, "loss": 0.101, "step": 2083 }, { "epoch": 2.300386526780784, "grad_norm": 1.03407883644104, "learning_rate": 6.243e-06, "loss": 0.0774, "step": 2084 }, { "epoch": 2.3014908890115957, "grad_norm": 1.0215504169464111, "learning_rate": 6.246e-06, "loss": 0.0806, "step": 2085 }, { "epoch": 2.3025952512424075, "grad_norm": 1.2270565032958984, "learning_rate": 6.249000000000001e-06, "loss": 0.0878, "step": 2086 }, { "epoch": 2.3036996134732193, "grad_norm": 1.1400710344314575, "learning_rate": 6.2520000000000004e-06, "loss": 0.0901, "step": 2087 }, { "epoch": 2.304803975704031, "grad_norm": 1.7576019763946533, "learning_rate": 6.255e-06, "loss": 0.0925, "step": 2088 }, { "epoch": 2.3059083379348424, "grad_norm": 1.0008636713027954, "learning_rate": 6.258e-06, "loss": 0.0782, "step": 2089 }, { "epoch": 2.307012700165654, "grad_norm": 1.6498401165008545, "learning_rate": 6.261e-06, "loss": 0.1087, "step": 2090 }, { "epoch": 2.308117062396466, "grad_norm": 3.936826467514038, "learning_rate": 6.2640000000000005e-06, "loss": 0.108, "step": 2091 }, { "epoch": 2.3092214246272778, "grad_norm": 1.6269603967666626, "learning_rate": 6.267e-06, "loss": 0.0896, "step": 2092 }, { "epoch": 2.3103257868580895, "grad_norm": 1.597161054611206, "learning_rate": 6.27e-06, "loss": 0.1042, "step": 2093 }, { "epoch": 2.3114301490889013, "grad_norm": 1.56869637966156, "learning_rate": 6.273000000000001e-06, "loss": 0.1221, "step": 2094 }, { "epoch": 2.312534511319713, "grad_norm": 1.33024263381958, "learning_rate": 6.276e-06, "loss": 0.1175, "step": 2095 }, { "epoch": 2.3136388735505244, "grad_norm": 1.3907361030578613, "learning_rate": 6.279e-06, "loss": 0.1002, "step": 2096 }, { "epoch": 2.3147432357813362, "grad_norm": 1.3666874170303345, "learning_rate": 6.282e-06, "loss": 0.1181, "step": 2097 }, { "epoch": 2.315847598012148, "grad_norm": 1.2282770872116089, "learning_rate": 6.285e-06, "loss": 0.0929, "step": 2098 }, { "epoch": 2.31695196024296, "grad_norm": 2.1182308197021484, "learning_rate": 6.288000000000001e-06, "loss": 0.094, "step": 2099 }, { "epoch": 2.3180563224737716, "grad_norm": 1.4744027853012085, "learning_rate": 6.291e-06, "loss": 0.0884, "step": 2100 }, { "epoch": 2.319160684704583, "grad_norm": 1.244102954864502, "learning_rate": 6.293999999999999e-06, "loss": 0.1282, "step": 2101 }, { "epoch": 2.3202650469353947, "grad_norm": 1.1254452466964722, "learning_rate": 6.297e-06, "loss": 0.0968, "step": 2102 }, { "epoch": 2.3213694091662065, "grad_norm": 1.842201828956604, "learning_rate": 6.3e-06, "loss": 0.1385, "step": 2103 }, { "epoch": 2.3224737713970183, "grad_norm": 1.542518138885498, "learning_rate": 6.3030000000000005e-06, "loss": 0.1269, "step": 2104 }, { "epoch": 2.32357813362783, "grad_norm": 1.5702463388442993, "learning_rate": 6.306e-06, "loss": 0.1215, "step": 2105 }, { "epoch": 2.3246824958586414, "grad_norm": 1.6937766075134277, "learning_rate": 6.309e-06, "loss": 0.1277, "step": 2106 }, { "epoch": 2.325786858089453, "grad_norm": 1.1394554376602173, "learning_rate": 6.312e-06, "loss": 0.1137, "step": 2107 }, { "epoch": 2.326891220320265, "grad_norm": 1.548801302909851, "learning_rate": 6.315e-06, "loss": 0.0973, "step": 2108 }, { "epoch": 2.3279955825510767, "grad_norm": 1.7216906547546387, "learning_rate": 6.318e-06, "loss": 0.0973, "step": 2109 }, { "epoch": 2.3290999447818885, "grad_norm": 1.8623944520950317, "learning_rate": 6.321e-06, "loss": 0.1137, "step": 2110 }, { "epoch": 2.3302043070127003, "grad_norm": 1.2913450002670288, "learning_rate": 6.324e-06, "loss": 0.0955, "step": 2111 }, { "epoch": 2.331308669243512, "grad_norm": 1.6695036888122559, "learning_rate": 6.327000000000001e-06, "loss": 0.138, "step": 2112 }, { "epoch": 2.3324130314743234, "grad_norm": 1.4465196132659912, "learning_rate": 6.3299999999999995e-06, "loss": 0.4489, "step": 2113 }, { "epoch": 2.3335173937051352, "grad_norm": 2.163841485977173, "learning_rate": 6.333e-06, "loss": 0.6059, "step": 2114 }, { "epoch": 2.334621755935947, "grad_norm": 1.8777689933776855, "learning_rate": 6.336e-06, "loss": 0.4488, "step": 2115 }, { "epoch": 2.335726118166759, "grad_norm": 2.366431713104248, "learning_rate": 6.339e-06, "loss": 0.3966, "step": 2116 }, { "epoch": 2.3368304803975706, "grad_norm": 2.188671827316284, "learning_rate": 6.3420000000000004e-06, "loss": 0.3714, "step": 2117 }, { "epoch": 2.337934842628382, "grad_norm": 1.8069045543670654, "learning_rate": 6.345e-06, "loss": 0.2817, "step": 2118 }, { "epoch": 2.3390392048591937, "grad_norm": 1.2829368114471436, "learning_rate": 6.348000000000001e-06, "loss": 0.2494, "step": 2119 }, { "epoch": 2.3401435670900055, "grad_norm": 1.2872169017791748, "learning_rate": 6.351e-06, "loss": 0.2988, "step": 2120 }, { "epoch": 2.3412479293208173, "grad_norm": 1.0056990385055542, "learning_rate": 6.354e-06, "loss": 0.1394, "step": 2121 }, { "epoch": 2.342352291551629, "grad_norm": 1.27435302734375, "learning_rate": 6.357e-06, "loss": 0.1283, "step": 2122 }, { "epoch": 2.3434566537824404, "grad_norm": 1.4741981029510498, "learning_rate": 6.36e-06, "loss": 0.0908, "step": 2123 }, { "epoch": 2.344561016013252, "grad_norm": 1.2090224027633667, "learning_rate": 6.363000000000001e-06, "loss": 0.1766, "step": 2124 }, { "epoch": 2.345665378244064, "grad_norm": 1.051200032234192, "learning_rate": 6.3660000000000005e-06, "loss": 0.1084, "step": 2125 }, { "epoch": 2.3467697404748757, "grad_norm": 0.9730824828147888, "learning_rate": 6.3689999999999995e-06, "loss": 0.0918, "step": 2126 }, { "epoch": 2.3478741027056875, "grad_norm": 1.8600863218307495, "learning_rate": 6.372e-06, "loss": 0.1124, "step": 2127 }, { "epoch": 2.3489784649364993, "grad_norm": 0.9681894183158875, "learning_rate": 6.375e-06, "loss": 0.0663, "step": 2128 }, { "epoch": 2.350082827167311, "grad_norm": 1.4079035520553589, "learning_rate": 6.378000000000001e-06, "loss": 0.102, "step": 2129 }, { "epoch": 2.3511871893981224, "grad_norm": 1.434700846672058, "learning_rate": 6.381e-06, "loss": 0.0986, "step": 2130 }, { "epoch": 2.352291551628934, "grad_norm": 0.8464733362197876, "learning_rate": 6.384e-06, "loss": 0.0817, "step": 2131 }, { "epoch": 2.353395913859746, "grad_norm": 1.26555335521698, "learning_rate": 6.387e-06, "loss": 0.077, "step": 2132 }, { "epoch": 2.354500276090558, "grad_norm": 0.955219030380249, "learning_rate": 6.39e-06, "loss": 0.0882, "step": 2133 }, { "epoch": 2.3556046383213696, "grad_norm": 0.992814302444458, "learning_rate": 6.3930000000000005e-06, "loss": 0.0722, "step": 2134 }, { "epoch": 2.356709000552181, "grad_norm": 1.3321657180786133, "learning_rate": 6.396e-06, "loss": 0.0926, "step": 2135 }, { "epoch": 2.3578133627829927, "grad_norm": 1.6558467149734497, "learning_rate": 6.399e-06, "loss": 0.1327, "step": 2136 }, { "epoch": 2.3589177250138045, "grad_norm": 5.66308069229126, "learning_rate": 6.402000000000001e-06, "loss": 0.0665, "step": 2137 }, { "epoch": 2.3600220872446163, "grad_norm": 1.4888890981674194, "learning_rate": 6.405e-06, "loss": 0.1132, "step": 2138 }, { "epoch": 2.361126449475428, "grad_norm": 1.2869949340820312, "learning_rate": 6.408e-06, "loss": 0.0642, "step": 2139 }, { "epoch": 2.36223081170624, "grad_norm": 0.8259361386299133, "learning_rate": 6.411e-06, "loss": 0.0683, "step": 2140 }, { "epoch": 2.363335173937051, "grad_norm": 2.0741126537323, "learning_rate": 6.414e-06, "loss": 0.1244, "step": 2141 }, { "epoch": 2.364439536167863, "grad_norm": 3.4192543029785156, "learning_rate": 6.4170000000000006e-06, "loss": 0.1111, "step": 2142 }, { "epoch": 2.3655438983986747, "grad_norm": 1.240370750427246, "learning_rate": 6.42e-06, "loss": 0.0791, "step": 2143 }, { "epoch": 2.3666482606294865, "grad_norm": 1.2696969509124756, "learning_rate": 6.423e-06, "loss": 0.0924, "step": 2144 }, { "epoch": 2.3677526228602983, "grad_norm": 1.0890191793441772, "learning_rate": 6.426e-06, "loss": 0.1089, "step": 2145 }, { "epoch": 2.36885698509111, "grad_norm": 1.8912814855575562, "learning_rate": 6.429e-06, "loss": 0.1016, "step": 2146 }, { "epoch": 2.3699613473219214, "grad_norm": 1.238162875175476, "learning_rate": 6.432e-06, "loss": 0.1037, "step": 2147 }, { "epoch": 2.371065709552733, "grad_norm": 1.3848886489868164, "learning_rate": 6.435e-06, "loss": 0.0981, "step": 2148 }, { "epoch": 2.372170071783545, "grad_norm": 1.2216435670852661, "learning_rate": 6.438000000000001e-06, "loss": 0.0944, "step": 2149 }, { "epoch": 2.373274434014357, "grad_norm": 1.3153756856918335, "learning_rate": 6.441e-06, "loss": 0.1228, "step": 2150 }, { "epoch": 2.3743787962451686, "grad_norm": 1.1535102128982544, "learning_rate": 6.444e-06, "loss": 0.1154, "step": 2151 }, { "epoch": 2.37548315847598, "grad_norm": 1.4607313871383667, "learning_rate": 6.447e-06, "loss": 0.1295, "step": 2152 }, { "epoch": 2.3765875207067917, "grad_norm": 1.3334938287734985, "learning_rate": 6.45e-06, "loss": 0.0813, "step": 2153 }, { "epoch": 2.3776918829376035, "grad_norm": 1.3244373798370361, "learning_rate": 6.453000000000001e-06, "loss": 0.1227, "step": 2154 }, { "epoch": 2.3787962451684153, "grad_norm": 1.2365449666976929, "learning_rate": 6.4560000000000005e-06, "loss": 0.1026, "step": 2155 }, { "epoch": 2.379900607399227, "grad_norm": 1.180152177810669, "learning_rate": 6.4589999999999995e-06, "loss": 0.0747, "step": 2156 }, { "epoch": 2.381004969630039, "grad_norm": 1.2454949617385864, "learning_rate": 6.462e-06, "loss": 0.111, "step": 2157 }, { "epoch": 2.38210933186085, "grad_norm": 1.0534576177597046, "learning_rate": 6.465e-06, "loss": 0.0792, "step": 2158 }, { "epoch": 2.383213694091662, "grad_norm": 1.623797059059143, "learning_rate": 6.468000000000001e-06, "loss": 0.1301, "step": 2159 }, { "epoch": 2.3843180563224737, "grad_norm": 2.2950546741485596, "learning_rate": 6.471e-06, "loss": 0.1914, "step": 2160 }, { "epoch": 2.3854224185532855, "grad_norm": 1.9414726495742798, "learning_rate": 6.474e-06, "loss": 0.0865, "step": 2161 }, { "epoch": 2.3865267807840973, "grad_norm": 1.8705731630325317, "learning_rate": 6.477000000000001e-06, "loss": 0.1098, "step": 2162 }, { "epoch": 2.387631143014909, "grad_norm": 3.1607539653778076, "learning_rate": 6.48e-06, "loss": 0.5589, "step": 2163 }, { "epoch": 2.3887355052457204, "grad_norm": 1.893239140510559, "learning_rate": 6.483e-06, "loss": 0.4383, "step": 2164 }, { "epoch": 2.389839867476532, "grad_norm": 1.4198240041732788, "learning_rate": 6.486e-06, "loss": 0.4738, "step": 2165 }, { "epoch": 2.390944229707344, "grad_norm": 1.6131467819213867, "learning_rate": 6.489e-06, "loss": 0.4264, "step": 2166 }, { "epoch": 2.3920485919381558, "grad_norm": 1.4322198629379272, "learning_rate": 6.492000000000001e-06, "loss": 0.3723, "step": 2167 }, { "epoch": 2.3931529541689676, "grad_norm": 1.3950891494750977, "learning_rate": 6.4950000000000005e-06, "loss": 0.2855, "step": 2168 }, { "epoch": 2.394257316399779, "grad_norm": 1.3408490419387817, "learning_rate": 6.4979999999999994e-06, "loss": 0.2516, "step": 2169 }, { "epoch": 2.3953616786305907, "grad_norm": 1.217167615890503, "learning_rate": 6.501e-06, "loss": 0.2217, "step": 2170 }, { "epoch": 2.3964660408614025, "grad_norm": 1.0175466537475586, "learning_rate": 6.504e-06, "loss": 0.1448, "step": 2171 }, { "epoch": 2.3975704030922143, "grad_norm": 1.1380314826965332, "learning_rate": 6.5070000000000005e-06, "loss": 0.1423, "step": 2172 }, { "epoch": 2.398674765323026, "grad_norm": 1.1448051929473877, "learning_rate": 6.51e-06, "loss": 0.1352, "step": 2173 }, { "epoch": 2.399779127553838, "grad_norm": 0.9913449287414551, "learning_rate": 6.513e-06, "loss": 0.141, "step": 2174 }, { "epoch": 2.4008834897846496, "grad_norm": 0.8907628059387207, "learning_rate": 6.516e-06, "loss": 0.1011, "step": 2175 }, { "epoch": 2.401987852015461, "grad_norm": 1.0947275161743164, "learning_rate": 6.519e-06, "loss": 0.0967, "step": 2176 }, { "epoch": 2.4030922142462727, "grad_norm": 1.08083176612854, "learning_rate": 6.522e-06, "loss": 0.0816, "step": 2177 }, { "epoch": 2.4041965764770845, "grad_norm": 0.9785980582237244, "learning_rate": 6.525e-06, "loss": 0.0773, "step": 2178 }, { "epoch": 2.4053009387078963, "grad_norm": 0.8954561352729797, "learning_rate": 6.528e-06, "loss": 0.0815, "step": 2179 }, { "epoch": 2.406405300938708, "grad_norm": 1.155731439590454, "learning_rate": 6.531000000000001e-06, "loss": 0.1002, "step": 2180 }, { "epoch": 2.4075096631695194, "grad_norm": 1.2605392932891846, "learning_rate": 6.534e-06, "loss": 0.1029, "step": 2181 }, { "epoch": 2.408614025400331, "grad_norm": 1.4277347326278687, "learning_rate": 6.537e-06, "loss": 0.0808, "step": 2182 }, { "epoch": 2.409718387631143, "grad_norm": 1.3711135387420654, "learning_rate": 6.54e-06, "loss": 0.101, "step": 2183 }, { "epoch": 2.4108227498619548, "grad_norm": 0.9895614981651306, "learning_rate": 6.543e-06, "loss": 0.0938, "step": 2184 }, { "epoch": 2.4119271120927666, "grad_norm": 0.9940232038497925, "learning_rate": 6.5460000000000005e-06, "loss": 0.0716, "step": 2185 }, { "epoch": 2.413031474323578, "grad_norm": 0.9864546060562134, "learning_rate": 6.549e-06, "loss": 0.0703, "step": 2186 }, { "epoch": 2.4141358365543897, "grad_norm": 1.3203524351119995, "learning_rate": 6.552e-06, "loss": 0.0802, "step": 2187 }, { "epoch": 2.4152401987852015, "grad_norm": 0.9605754613876343, "learning_rate": 6.555e-06, "loss": 0.0693, "step": 2188 }, { "epoch": 2.4163445610160132, "grad_norm": 1.7647264003753662, "learning_rate": 6.558e-06, "loss": 0.1066, "step": 2189 }, { "epoch": 2.417448923246825, "grad_norm": 0.8740423321723938, "learning_rate": 6.561e-06, "loss": 0.076, "step": 2190 }, { "epoch": 2.418553285477637, "grad_norm": 1.4619381427764893, "learning_rate": 6.564e-06, "loss": 0.0872, "step": 2191 }, { "epoch": 2.4196576477084486, "grad_norm": 1.0946768522262573, "learning_rate": 6.567000000000001e-06, "loss": 0.0899, "step": 2192 }, { "epoch": 2.42076200993926, "grad_norm": 1.1297106742858887, "learning_rate": 6.57e-06, "loss": 0.0953, "step": 2193 }, { "epoch": 2.4218663721700717, "grad_norm": 1.559015154838562, "learning_rate": 6.573e-06, "loss": 0.0994, "step": 2194 }, { "epoch": 2.4229707344008835, "grad_norm": 1.1647502183914185, "learning_rate": 6.576e-06, "loss": 0.0859, "step": 2195 }, { "epoch": 2.4240750966316953, "grad_norm": 1.392546534538269, "learning_rate": 6.579e-06, "loss": 0.0861, "step": 2196 }, { "epoch": 2.425179458862507, "grad_norm": 1.3276978731155396, "learning_rate": 6.582000000000001e-06, "loss": 0.1035, "step": 2197 }, { "epoch": 2.4262838210933184, "grad_norm": 1.59230375289917, "learning_rate": 6.5850000000000005e-06, "loss": 0.1095, "step": 2198 }, { "epoch": 2.42738818332413, "grad_norm": 1.9296813011169434, "learning_rate": 6.5879999999999994e-06, "loss": 0.1, "step": 2199 }, { "epoch": 2.428492545554942, "grad_norm": 1.3933701515197754, "learning_rate": 6.591e-06, "loss": 0.0884, "step": 2200 }, { "epoch": 2.4295969077857538, "grad_norm": 1.4069260358810425, "learning_rate": 6.594e-06, "loss": 0.1147, "step": 2201 }, { "epoch": 2.4307012700165656, "grad_norm": 2.3456413745880127, "learning_rate": 6.5970000000000005e-06, "loss": 0.0813, "step": 2202 }, { "epoch": 2.431805632247377, "grad_norm": 1.554632544517517, "learning_rate": 6.6e-06, "loss": 0.073, "step": 2203 }, { "epoch": 2.4329099944781887, "grad_norm": 1.1774367094039917, "learning_rate": 6.603e-06, "loss": 0.0759, "step": 2204 }, { "epoch": 2.4340143567090005, "grad_norm": 1.053580641746521, "learning_rate": 6.606000000000001e-06, "loss": 0.0995, "step": 2205 }, { "epoch": 2.4351187189398122, "grad_norm": 1.237271785736084, "learning_rate": 6.609e-06, "loss": 0.1082, "step": 2206 }, { "epoch": 2.436223081170624, "grad_norm": 1.8458632230758667, "learning_rate": 6.612e-06, "loss": 0.1085, "step": 2207 }, { "epoch": 2.437327443401436, "grad_norm": 1.2958251237869263, "learning_rate": 6.615e-06, "loss": 0.1266, "step": 2208 }, { "epoch": 2.4384318056322476, "grad_norm": 1.3312565088272095, "learning_rate": 6.618e-06, "loss": 0.0964, "step": 2209 }, { "epoch": 2.439536167863059, "grad_norm": 1.2605688571929932, "learning_rate": 6.621000000000001e-06, "loss": 0.1214, "step": 2210 }, { "epoch": 2.4406405300938707, "grad_norm": 1.386703610420227, "learning_rate": 6.6240000000000004e-06, "loss": 0.1145, "step": 2211 }, { "epoch": 2.4417448923246825, "grad_norm": 1.5619633197784424, "learning_rate": 6.627e-06, "loss": 0.1597, "step": 2212 }, { "epoch": 2.4428492545554943, "grad_norm": 2.294569253921509, "learning_rate": 6.63e-06, "loss": 0.5389, "step": 2213 }, { "epoch": 2.443953616786306, "grad_norm": 1.5804555416107178, "learning_rate": 6.633e-06, "loss": 0.4068, "step": 2214 }, { "epoch": 2.4450579790171174, "grad_norm": 1.8180639743804932, "learning_rate": 6.6360000000000005e-06, "loss": 0.386, "step": 2215 }, { "epoch": 2.446162341247929, "grad_norm": 2.0482475757598877, "learning_rate": 6.639e-06, "loss": 0.3541, "step": 2216 }, { "epoch": 2.447266703478741, "grad_norm": 1.2608139514923096, "learning_rate": 6.642000000000001e-06, "loss": 0.2674, "step": 2217 }, { "epoch": 2.4483710657095528, "grad_norm": 1.7622202634811401, "learning_rate": 6.645e-06, "loss": 0.2706, "step": 2218 }, { "epoch": 2.4494754279403645, "grad_norm": 1.4064732789993286, "learning_rate": 6.648e-06, "loss": 0.2942, "step": 2219 }, { "epoch": 2.4505797901711763, "grad_norm": 1.098116397857666, "learning_rate": 6.651e-06, "loss": 0.2181, "step": 2220 }, { "epoch": 2.4516841524019877, "grad_norm": 1.348957896232605, "learning_rate": 6.654e-06, "loss": 0.2129, "step": 2221 }, { "epoch": 2.4527885146327995, "grad_norm": 1.0760711431503296, "learning_rate": 6.657e-06, "loss": 0.1437, "step": 2222 }, { "epoch": 2.4538928768636112, "grad_norm": 1.4978017807006836, "learning_rate": 6.660000000000001e-06, "loss": 0.2151, "step": 2223 }, { "epoch": 2.454997239094423, "grad_norm": 6.795744895935059, "learning_rate": 6.6629999999999996e-06, "loss": 0.1177, "step": 2224 }, { "epoch": 2.456101601325235, "grad_norm": 0.8640043139457703, "learning_rate": 6.666e-06, "loss": 0.1115, "step": 2225 }, { "epoch": 2.4572059635560466, "grad_norm": 1.2609373331069946, "learning_rate": 6.669e-06, "loss": 0.1183, "step": 2226 }, { "epoch": 2.458310325786858, "grad_norm": 1.4271125793457031, "learning_rate": 6.672e-06, "loss": 0.1599, "step": 2227 }, { "epoch": 2.4594146880176697, "grad_norm": 1.092406153678894, "learning_rate": 6.6750000000000005e-06, "loss": 0.0946, "step": 2228 }, { "epoch": 2.4605190502484815, "grad_norm": 1.4039180278778076, "learning_rate": 6.678e-06, "loss": 0.0816, "step": 2229 }, { "epoch": 2.4616234124792933, "grad_norm": 2.20707106590271, "learning_rate": 6.681e-06, "loss": 0.1534, "step": 2230 }, { "epoch": 2.462727774710105, "grad_norm": 1.2628105878829956, "learning_rate": 6.684e-06, "loss": 0.0661, "step": 2231 }, { "epoch": 2.4638321369409164, "grad_norm": 1.6961255073547363, "learning_rate": 6.687e-06, "loss": 0.0856, "step": 2232 }, { "epoch": 2.464936499171728, "grad_norm": 1.2539139986038208, "learning_rate": 6.69e-06, "loss": 0.0862, "step": 2233 }, { "epoch": 2.46604086140254, "grad_norm": 0.8507335186004639, "learning_rate": 6.693e-06, "loss": 0.0775, "step": 2234 }, { "epoch": 2.4671452236333518, "grad_norm": 1.9849992990493774, "learning_rate": 6.696000000000001e-06, "loss": 0.1261, "step": 2235 }, { "epoch": 2.4682495858641635, "grad_norm": 3.7527265548706055, "learning_rate": 6.699e-06, "loss": 0.1126, "step": 2236 }, { "epoch": 2.4693539480949753, "grad_norm": 1.5608822107315063, "learning_rate": 6.7019999999999995e-06, "loss": 0.0693, "step": 2237 }, { "epoch": 2.4704583103257867, "grad_norm": 2.150649309158325, "learning_rate": 6.705e-06, "loss": 0.0878, "step": 2238 }, { "epoch": 2.4715626725565985, "grad_norm": 1.6401735544204712, "learning_rate": 6.708e-06, "loss": 0.0764, "step": 2239 }, { "epoch": 2.4726670347874102, "grad_norm": 1.1718811988830566, "learning_rate": 6.711000000000001e-06, "loss": 0.101, "step": 2240 }, { "epoch": 2.473771397018222, "grad_norm": 0.9083468914031982, "learning_rate": 6.7140000000000004e-06, "loss": 0.0936, "step": 2241 }, { "epoch": 2.474875759249034, "grad_norm": 1.6190487146377563, "learning_rate": 6.716999999999999e-06, "loss": 0.0979, "step": 2242 }, { "epoch": 2.4759801214798456, "grad_norm": 1.3453961610794067, "learning_rate": 6.72e-06, "loss": 0.0665, "step": 2243 }, { "epoch": 2.477084483710657, "grad_norm": 1.0933324098587036, "learning_rate": 6.723e-06, "loss": 0.0695, "step": 2244 }, { "epoch": 2.4781888459414687, "grad_norm": 1.0358812808990479, "learning_rate": 6.7260000000000005e-06, "loss": 0.0851, "step": 2245 }, { "epoch": 2.4792932081722805, "grad_norm": 1.0589860677719116, "learning_rate": 6.729e-06, "loss": 0.0943, "step": 2246 }, { "epoch": 2.4803975704030923, "grad_norm": 1.022042155265808, "learning_rate": 6.732e-06, "loss": 0.0887, "step": 2247 }, { "epoch": 2.481501932633904, "grad_norm": 1.9610100984573364, "learning_rate": 6.735000000000001e-06, "loss": 0.1032, "step": 2248 }, { "epoch": 2.4826062948647154, "grad_norm": 1.0912940502166748, "learning_rate": 6.738e-06, "loss": 0.0894, "step": 2249 }, { "epoch": 2.483710657095527, "grad_norm": 2.21958065032959, "learning_rate": 6.741e-06, "loss": 0.076, "step": 2250 }, { "epoch": 2.484815019326339, "grad_norm": 1.7725297212600708, "learning_rate": 6.744e-06, "loss": 0.1164, "step": 2251 }, { "epoch": 2.4859193815571508, "grad_norm": 1.4874725341796875, "learning_rate": 6.747e-06, "loss": 0.1131, "step": 2252 }, { "epoch": 2.4870237437879625, "grad_norm": 1.8050434589385986, "learning_rate": 6.750000000000001e-06, "loss": 0.0851, "step": 2253 }, { "epoch": 2.4881281060187743, "grad_norm": 1.4957787990570068, "learning_rate": 6.753e-06, "loss": 0.1316, "step": 2254 }, { "epoch": 2.489232468249586, "grad_norm": 1.1072896718978882, "learning_rate": 6.756e-06, "loss": 0.0629, "step": 2255 }, { "epoch": 2.4903368304803974, "grad_norm": 1.4112818241119385, "learning_rate": 6.759e-06, "loss": 0.1199, "step": 2256 }, { "epoch": 2.4914411927112092, "grad_norm": 2.3877673149108887, "learning_rate": 6.762e-06, "loss": 0.1015, "step": 2257 }, { "epoch": 2.492545554942021, "grad_norm": 2.049729347229004, "learning_rate": 6.7650000000000005e-06, "loss": 0.1242, "step": 2258 }, { "epoch": 2.493649917172833, "grad_norm": 1.7305076122283936, "learning_rate": 6.768e-06, "loss": 0.1351, "step": 2259 }, { "epoch": 2.4947542794036446, "grad_norm": 1.2428312301635742, "learning_rate": 6.771000000000001e-06, "loss": 0.1018, "step": 2260 }, { "epoch": 2.495858641634456, "grad_norm": 1.7350273132324219, "learning_rate": 6.774e-06, "loss": 0.1296, "step": 2261 }, { "epoch": 2.4969630038652677, "grad_norm": 1.4722952842712402, "learning_rate": 6.777e-06, "loss": 0.1449, "step": 2262 }, { "epoch": 2.4980673660960795, "grad_norm": 2.6331889629364014, "learning_rate": 6.78e-06, "loss": 0.4989, "step": 2263 }, { "epoch": 2.4991717283268913, "grad_norm": 3.4904191493988037, "learning_rate": 6.783e-06, "loss": 0.5041, "step": 2264 }, { "epoch": 2.500276090557703, "grad_norm": 1.5093600749969482, "learning_rate": 6.786000000000001e-06, "loss": 0.3099, "step": 2265 }, { "epoch": 2.5013804527885144, "grad_norm": 1.5624420642852783, "learning_rate": 6.7890000000000006e-06, "loss": 0.351, "step": 2266 }, { "epoch": 2.502484815019326, "grad_norm": 1.8032007217407227, "learning_rate": 6.7919999999999995e-06, "loss": 0.3282, "step": 2267 }, { "epoch": 2.503589177250138, "grad_norm": 1.9029291868209839, "learning_rate": 6.795e-06, "loss": 0.2682, "step": 2268 }, { "epoch": 2.5046935394809497, "grad_norm": 1.6419997215270996, "learning_rate": 6.798e-06, "loss": 0.2461, "step": 2269 }, { "epoch": 2.5057979017117615, "grad_norm": 2.3120663166046143, "learning_rate": 6.801000000000001e-06, "loss": 0.16, "step": 2270 }, { "epoch": 2.5069022639425733, "grad_norm": 1.907149314880371, "learning_rate": 6.804e-06, "loss": 0.1557, "step": 2271 }, { "epoch": 2.508006626173385, "grad_norm": 1.022371768951416, "learning_rate": 6.807e-06, "loss": 0.0999, "step": 2272 }, { "epoch": 2.5091109884041964, "grad_norm": 1.1557761430740356, "learning_rate": 6.81e-06, "loss": 0.0644, "step": 2273 }, { "epoch": 2.5102153506350082, "grad_norm": 2.0597550868988037, "learning_rate": 6.813e-06, "loss": 0.0965, "step": 2274 }, { "epoch": 2.51131971286582, "grad_norm": 1.216890811920166, "learning_rate": 6.8160000000000005e-06, "loss": 0.0812, "step": 2275 }, { "epoch": 2.512424075096632, "grad_norm": 1.5153528451919556, "learning_rate": 6.819e-06, "loss": 0.0914, "step": 2276 }, { "epoch": 2.5135284373274436, "grad_norm": 0.9541594386100769, "learning_rate": 6.822e-06, "loss": 0.0688, "step": 2277 }, { "epoch": 2.514632799558255, "grad_norm": 0.8149120807647705, "learning_rate": 6.825000000000001e-06, "loss": 0.0577, "step": 2278 }, { "epoch": 2.5157371617890667, "grad_norm": 0.8630746006965637, "learning_rate": 6.828e-06, "loss": 0.0805, "step": 2279 }, { "epoch": 2.5168415240198785, "grad_norm": 0.9885759949684143, "learning_rate": 6.831e-06, "loss": 0.0706, "step": 2280 }, { "epoch": 2.5179458862506903, "grad_norm": 1.0217463970184326, "learning_rate": 6.834e-06, "loss": 0.0716, "step": 2281 }, { "epoch": 2.519050248481502, "grad_norm": 1.1016426086425781, "learning_rate": 6.837e-06, "loss": 0.0748, "step": 2282 }, { "epoch": 2.5201546107123134, "grad_norm": 1.7535667419433594, "learning_rate": 6.840000000000001e-06, "loss": 0.0947, "step": 2283 }, { "epoch": 2.5212589729431256, "grad_norm": 1.1695653200149536, "learning_rate": 6.843e-06, "loss": 0.0846, "step": 2284 }, { "epoch": 2.522363335173937, "grad_norm": 1.187543511390686, "learning_rate": 6.845999999999999e-06, "loss": 0.0698, "step": 2285 }, { "epoch": 2.5234676974047487, "grad_norm": 1.2993968725204468, "learning_rate": 6.849e-06, "loss": 0.0498, "step": 2286 }, { "epoch": 2.5245720596355605, "grad_norm": 1.3425061702728271, "learning_rate": 6.852e-06, "loss": 0.1393, "step": 2287 }, { "epoch": 2.5256764218663723, "grad_norm": 0.9271669387817383, "learning_rate": 6.8550000000000004e-06, "loss": 0.0836, "step": 2288 }, { "epoch": 2.526780784097184, "grad_norm": 2.1713080406188965, "learning_rate": 6.858e-06, "loss": 0.1452, "step": 2289 }, { "epoch": 2.5278851463279954, "grad_norm": 1.7725516557693481, "learning_rate": 6.861e-06, "loss": 0.1015, "step": 2290 }, { "epoch": 2.528989508558807, "grad_norm": 1.3127212524414062, "learning_rate": 6.864000000000001e-06, "loss": 0.0748, "step": 2291 }, { "epoch": 2.530093870789619, "grad_norm": 0.9600074291229248, "learning_rate": 6.867e-06, "loss": 0.0783, "step": 2292 }, { "epoch": 2.531198233020431, "grad_norm": 1.4318859577178955, "learning_rate": 6.87e-06, "loss": 0.0785, "step": 2293 }, { "epoch": 2.5323025952512426, "grad_norm": 1.102116584777832, "learning_rate": 6.873e-06, "loss": 0.0962, "step": 2294 }, { "epoch": 2.533406957482054, "grad_norm": 1.1613582372665405, "learning_rate": 6.876e-06, "loss": 0.0775, "step": 2295 }, { "epoch": 2.5345113197128657, "grad_norm": 0.9897307753562927, "learning_rate": 6.8790000000000005e-06, "loss": 0.0841, "step": 2296 }, { "epoch": 2.5356156819436775, "grad_norm": 1.1686660051345825, "learning_rate": 6.882e-06, "loss": 0.0788, "step": 2297 }, { "epoch": 2.5367200441744893, "grad_norm": 1.1920101642608643, "learning_rate": 6.885e-06, "loss": 0.0678, "step": 2298 }, { "epoch": 2.537824406405301, "grad_norm": 1.3703233003616333, "learning_rate": 6.888e-06, "loss": 0.0873, "step": 2299 }, { "epoch": 2.5389287686361124, "grad_norm": 1.057150959968567, "learning_rate": 6.891e-06, "loss": 0.0902, "step": 2300 }, { "epoch": 2.5400331308669246, "grad_norm": 2.106688976287842, "learning_rate": 6.894e-06, "loss": 0.1234, "step": 2301 }, { "epoch": 2.541137493097736, "grad_norm": 1.5191733837127686, "learning_rate": 6.897e-06, "loss": 0.0892, "step": 2302 }, { "epoch": 2.5422418553285477, "grad_norm": 1.540001630783081, "learning_rate": 6.900000000000001e-06, "loss": 0.1039, "step": 2303 }, { "epoch": 2.5433462175593595, "grad_norm": 1.037645936012268, "learning_rate": 6.903e-06, "loss": 0.0649, "step": 2304 }, { "epoch": 2.5444505797901713, "grad_norm": 1.3244024515151978, "learning_rate": 6.906e-06, "loss": 0.0901, "step": 2305 }, { "epoch": 2.545554942020983, "grad_norm": 1.3561434745788574, "learning_rate": 6.909e-06, "loss": 0.1162, "step": 2306 }, { "epoch": 2.5466593042517944, "grad_norm": 1.5465925931930542, "learning_rate": 6.912e-06, "loss": 0.0776, "step": 2307 }, { "epoch": 2.547763666482606, "grad_norm": 1.2090500593185425, "learning_rate": 6.915000000000001e-06, "loss": 0.0979, "step": 2308 }, { "epoch": 2.548868028713418, "grad_norm": 1.7622814178466797, "learning_rate": 6.9180000000000005e-06, "loss": 0.1323, "step": 2309 }, { "epoch": 2.54997239094423, "grad_norm": 1.3112194538116455, "learning_rate": 6.9209999999999995e-06, "loss": 0.0882, "step": 2310 }, { "epoch": 2.5510767531750416, "grad_norm": 1.6696691513061523, "learning_rate": 6.924e-06, "loss": 0.0838, "step": 2311 }, { "epoch": 2.552181115405853, "grad_norm": 2.3282718658447266, "learning_rate": 6.927e-06, "loss": 0.1655, "step": 2312 }, { "epoch": 2.5532854776366647, "grad_norm": 2.383082866668701, "learning_rate": 6.9300000000000006e-06, "loss": 0.5733, "step": 2313 }, { "epoch": 2.5543898398674765, "grad_norm": 1.6356867551803589, "learning_rate": 6.933e-06, "loss": 0.5055, "step": 2314 }, { "epoch": 2.5554942020982883, "grad_norm": 1.7946698665618896, "learning_rate": 6.936e-06, "loss": 0.4792, "step": 2315 }, { "epoch": 2.5565985643291, "grad_norm": 2.4608426094055176, "learning_rate": 6.939e-06, "loss": 0.4715, "step": 2316 }, { "epoch": 2.5577029265599114, "grad_norm": 1.5373443365097046, "learning_rate": 6.942e-06, "loss": 0.3234, "step": 2317 }, { "epoch": 2.5588072887907236, "grad_norm": 1.4804043769836426, "learning_rate": 6.945e-06, "loss": 0.1919, "step": 2318 }, { "epoch": 2.559911651021535, "grad_norm": 1.541253924369812, "learning_rate": 6.948e-06, "loss": 0.2284, "step": 2319 }, { "epoch": 2.5610160132523467, "grad_norm": 1.2964783906936646, "learning_rate": 6.951e-06, "loss": 0.2409, "step": 2320 }, { "epoch": 2.5621203754831585, "grad_norm": 0.8081741333007812, "learning_rate": 6.954000000000001e-06, "loss": 0.0907, "step": 2321 }, { "epoch": 2.5632247377139703, "grad_norm": 1.1412551403045654, "learning_rate": 6.957e-06, "loss": 0.0931, "step": 2322 }, { "epoch": 2.564329099944782, "grad_norm": 1.241645336151123, "learning_rate": 6.96e-06, "loss": 0.0919, "step": 2323 }, { "epoch": 2.5654334621755934, "grad_norm": 1.6478105783462524, "learning_rate": 6.963e-06, "loss": 0.0971, "step": 2324 }, { "epoch": 2.566537824406405, "grad_norm": 1.3867655992507935, "learning_rate": 6.966e-06, "loss": 0.1076, "step": 2325 }, { "epoch": 2.567642186637217, "grad_norm": 1.2772380113601685, "learning_rate": 6.9690000000000005e-06, "loss": 0.097, "step": 2326 }, { "epoch": 2.568746548868029, "grad_norm": 0.934259295463562, "learning_rate": 6.972e-06, "loss": 0.0584, "step": 2327 }, { "epoch": 2.5698509110988406, "grad_norm": 1.1570159196853638, "learning_rate": 6.975e-06, "loss": 0.0959, "step": 2328 }, { "epoch": 2.570955273329652, "grad_norm": 0.7311425805091858, "learning_rate": 6.978e-06, "loss": 0.0666, "step": 2329 }, { "epoch": 2.5720596355604637, "grad_norm": 0.626177191734314, "learning_rate": 6.981e-06, "loss": 0.0815, "step": 2330 }, { "epoch": 2.5731639977912755, "grad_norm": 1.3028552532196045, "learning_rate": 6.984e-06, "loss": 0.0838, "step": 2331 }, { "epoch": 2.5742683600220873, "grad_norm": 1.1920366287231445, "learning_rate": 6.987e-06, "loss": 0.0682, "step": 2332 }, { "epoch": 2.575372722252899, "grad_norm": 1.0960959196090698, "learning_rate": 6.990000000000001e-06, "loss": 0.0693, "step": 2333 }, { "epoch": 2.576477084483711, "grad_norm": 1.3839054107666016, "learning_rate": 6.993000000000001e-06, "loss": 0.1329, "step": 2334 }, { "epoch": 2.5775814467145226, "grad_norm": 1.8124258518218994, "learning_rate": 6.996e-06, "loss": 0.087, "step": 2335 }, { "epoch": 2.578685808945334, "grad_norm": 0.8952421545982361, "learning_rate": 6.999e-06, "loss": 0.0751, "step": 2336 }, { "epoch": 2.5797901711761457, "grad_norm": 1.3952844142913818, "learning_rate": 7.002e-06, "loss": 0.0786, "step": 2337 }, { "epoch": 2.5808945334069575, "grad_norm": 1.490206003189087, "learning_rate": 7.005000000000001e-06, "loss": 0.068, "step": 2338 }, { "epoch": 2.5819988956377693, "grad_norm": 1.1822766065597534, "learning_rate": 7.0080000000000005e-06, "loss": 0.0733, "step": 2339 }, { "epoch": 2.583103257868581, "grad_norm": 1.0681848526000977, "learning_rate": 7.011e-06, "loss": 0.0871, "step": 2340 }, { "epoch": 2.5842076200993924, "grad_norm": 6.340001106262207, "learning_rate": 7.014e-06, "loss": 0.0877, "step": 2341 }, { "epoch": 2.585311982330204, "grad_norm": 1.2369953393936157, "learning_rate": 7.017e-06, "loss": 0.096, "step": 2342 }, { "epoch": 2.586416344561016, "grad_norm": 1.1851528882980347, "learning_rate": 7.0200000000000006e-06, "loss": 0.0701, "step": 2343 }, { "epoch": 2.5875207067918278, "grad_norm": 1.478840947151184, "learning_rate": 7.023e-06, "loss": 0.0819, "step": 2344 }, { "epoch": 2.5886250690226396, "grad_norm": 1.162901759147644, "learning_rate": 7.026e-06, "loss": 0.0558, "step": 2345 }, { "epoch": 2.589729431253451, "grad_norm": 1.7435706853866577, "learning_rate": 7.029000000000001e-06, "loss": 0.101, "step": 2346 }, { "epoch": 2.5908337934842627, "grad_norm": 1.0156558752059937, "learning_rate": 7.032e-06, "loss": 0.0827, "step": 2347 }, { "epoch": 2.5919381557150745, "grad_norm": 1.4755027294158936, "learning_rate": 7.0349999999999996e-06, "loss": 0.0612, "step": 2348 }, { "epoch": 2.5930425179458862, "grad_norm": 2.093916654586792, "learning_rate": 7.038e-06, "loss": 0.1201, "step": 2349 }, { "epoch": 2.594146880176698, "grad_norm": 1.6266918182373047, "learning_rate": 7.041e-06, "loss": 0.0998, "step": 2350 }, { "epoch": 2.59525124240751, "grad_norm": 0.9094324707984924, "learning_rate": 7.044000000000001e-06, "loss": 0.0802, "step": 2351 }, { "epoch": 2.5963556046383216, "grad_norm": 1.3046889305114746, "learning_rate": 7.0470000000000005e-06, "loss": 0.0732, "step": 2352 }, { "epoch": 2.597459966869133, "grad_norm": 2.5237972736358643, "learning_rate": 7.049999999999999e-06, "loss": 0.0764, "step": 2353 }, { "epoch": 2.5985643290999447, "grad_norm": 1.597588300704956, "learning_rate": 7.053e-06, "loss": 0.1077, "step": 2354 }, { "epoch": 2.5996686913307565, "grad_norm": 1.1911654472351074, "learning_rate": 7.056e-06, "loss": 0.0829, "step": 2355 }, { "epoch": 2.6007730535615683, "grad_norm": 1.3173813819885254, "learning_rate": 7.0590000000000005e-06, "loss": 0.108, "step": 2356 }, { "epoch": 2.60187741579238, "grad_norm": 1.6583168506622314, "learning_rate": 7.062e-06, "loss": 0.1165, "step": 2357 }, { "epoch": 2.6029817780231914, "grad_norm": 1.5384455919265747, "learning_rate": 7.065e-06, "loss": 0.1059, "step": 2358 }, { "epoch": 2.604086140254003, "grad_norm": 1.4039828777313232, "learning_rate": 7.068e-06, "loss": 0.1075, "step": 2359 }, { "epoch": 2.605190502484815, "grad_norm": 1.2437083721160889, "learning_rate": 7.071e-06, "loss": 0.074, "step": 2360 }, { "epoch": 2.6062948647156268, "grad_norm": 2.1008241176605225, "learning_rate": 7.074e-06, "loss": 0.1201, "step": 2361 }, { "epoch": 2.6073992269464386, "grad_norm": 2.351454734802246, "learning_rate": 7.077e-06, "loss": 0.1054, "step": 2362 }, { "epoch": 2.60850358917725, "grad_norm": 3.179415702819824, "learning_rate": 7.08e-06, "loss": 0.5232, "step": 2363 }, { "epoch": 2.609607951408062, "grad_norm": 1.9500411748886108, "learning_rate": 7.083000000000001e-06, "loss": 0.4549, "step": 2364 }, { "epoch": 2.6107123136388735, "grad_norm": 1.3728128671646118, "learning_rate": 7.086e-06, "loss": 0.4039, "step": 2365 }, { "epoch": 2.6118166758696852, "grad_norm": 3.5595545768737793, "learning_rate": 7.089e-06, "loss": 0.4175, "step": 2366 }, { "epoch": 2.612921038100497, "grad_norm": 1.6150081157684326, "learning_rate": 7.092e-06, "loss": 0.3136, "step": 2367 }, { "epoch": 2.614025400331309, "grad_norm": 1.3670717477798462, "learning_rate": 7.095e-06, "loss": 0.2271, "step": 2368 }, { "epoch": 2.6151297625621206, "grad_norm": 6.909471035003662, "learning_rate": 7.0980000000000005e-06, "loss": 0.2579, "step": 2369 }, { "epoch": 2.616234124792932, "grad_norm": 3.293846368789673, "learning_rate": 7.101e-06, "loss": 0.1408, "step": 2370 }, { "epoch": 2.6173384870237437, "grad_norm": 1.7985769510269165, "learning_rate": 7.104e-06, "loss": 0.1143, "step": 2371 }, { "epoch": 2.6184428492545555, "grad_norm": 1.071373462677002, "learning_rate": 7.107e-06, "loss": 0.137, "step": 2372 }, { "epoch": 2.6195472114853673, "grad_norm": 0.9042783975601196, "learning_rate": 7.11e-06, "loss": 0.0971, "step": 2373 }, { "epoch": 2.620651573716179, "grad_norm": 0.9594714045524597, "learning_rate": 7.113e-06, "loss": 0.0898, "step": 2374 }, { "epoch": 2.6217559359469904, "grad_norm": 1.2961889505386353, "learning_rate": 7.116e-06, "loss": 0.1148, "step": 2375 }, { "epoch": 2.622860298177802, "grad_norm": 0.855324923992157, "learning_rate": 7.119000000000001e-06, "loss": 0.0956, "step": 2376 }, { "epoch": 2.623964660408614, "grad_norm": 0.7493290901184082, "learning_rate": 7.122000000000001e-06, "loss": 0.0916, "step": 2377 }, { "epoch": 2.6250690226394258, "grad_norm": 2.676032781600952, "learning_rate": 7.1249999999999995e-06, "loss": 0.0911, "step": 2378 }, { "epoch": 2.6261733848702375, "grad_norm": 0.8546279072761536, "learning_rate": 7.128e-06, "loss": 0.0643, "step": 2379 }, { "epoch": 2.627277747101049, "grad_norm": 1.192406177520752, "learning_rate": 7.131e-06, "loss": 0.1023, "step": 2380 }, { "epoch": 2.628382109331861, "grad_norm": 1.6772805452346802, "learning_rate": 7.134000000000001e-06, "loss": 0.0683, "step": 2381 }, { "epoch": 2.6294864715626725, "grad_norm": 1.3189154863357544, "learning_rate": 7.1370000000000004e-06, "loss": 0.064, "step": 2382 }, { "epoch": 2.6305908337934842, "grad_norm": 0.9972355961799622, "learning_rate": 7.14e-06, "loss": 0.0954, "step": 2383 }, { "epoch": 2.631695196024296, "grad_norm": 1.0777405500411987, "learning_rate": 7.143e-06, "loss": 0.0907, "step": 2384 }, { "epoch": 2.632799558255108, "grad_norm": 1.0881640911102295, "learning_rate": 7.146e-06, "loss": 0.0748, "step": 2385 }, { "epoch": 2.6339039204859196, "grad_norm": 1.2399653196334839, "learning_rate": 7.1490000000000005e-06, "loss": 0.0842, "step": 2386 }, { "epoch": 2.635008282716731, "grad_norm": 1.5923975706100464, "learning_rate": 7.152e-06, "loss": 0.1115, "step": 2387 }, { "epoch": 2.6361126449475427, "grad_norm": 0.8558706045150757, "learning_rate": 7.155e-06, "loss": 0.0636, "step": 2388 }, { "epoch": 2.6372170071783545, "grad_norm": 1.2521271705627441, "learning_rate": 7.158000000000001e-06, "loss": 0.1065, "step": 2389 }, { "epoch": 2.6383213694091663, "grad_norm": 1.0548993349075317, "learning_rate": 7.161e-06, "loss": 0.0727, "step": 2390 }, { "epoch": 2.639425731639978, "grad_norm": 1.3244216442108154, "learning_rate": 7.164e-06, "loss": 0.1057, "step": 2391 }, { "epoch": 2.6405300938707894, "grad_norm": 0.9903775453567505, "learning_rate": 7.167e-06, "loss": 0.0598, "step": 2392 }, { "epoch": 2.641634456101601, "grad_norm": 1.0903264284133911, "learning_rate": 7.17e-06, "loss": 0.0901, "step": 2393 }, { "epoch": 2.642738818332413, "grad_norm": 1.7710258960723877, "learning_rate": 7.173000000000001e-06, "loss": 0.0873, "step": 2394 }, { "epoch": 2.6438431805632248, "grad_norm": 0.8960608839988708, "learning_rate": 7.176e-06, "loss": 0.0667, "step": 2395 }, { "epoch": 2.6449475427940365, "grad_norm": 1.1628141403198242, "learning_rate": 7.179e-06, "loss": 0.0901, "step": 2396 }, { "epoch": 2.646051905024848, "grad_norm": 1.077284812927246, "learning_rate": 7.182e-06, "loss": 0.0917, "step": 2397 }, { "epoch": 2.64715626725566, "grad_norm": 2.156771183013916, "learning_rate": 7.185e-06, "loss": 0.0661, "step": 2398 }, { "epoch": 2.6482606294864715, "grad_norm": 1.650815725326538, "learning_rate": 7.1880000000000005e-06, "loss": 0.0814, "step": 2399 }, { "epoch": 2.6493649917172832, "grad_norm": 1.9217727184295654, "learning_rate": 7.191e-06, "loss": 0.1186, "step": 2400 }, { "epoch": 2.650469353948095, "grad_norm": 1.1417351961135864, "learning_rate": 7.194000000000001e-06, "loss": 0.0804, "step": 2401 }, { "epoch": 2.651573716178907, "grad_norm": 1.2623316049575806, "learning_rate": 7.197e-06, "loss": 0.0799, "step": 2402 }, { "epoch": 2.6526780784097186, "grad_norm": 1.6560264825820923, "learning_rate": 7.2e-06, "loss": 0.0811, "step": 2403 }, { "epoch": 2.65378244064053, "grad_norm": 0.8516725301742554, "learning_rate": 7.203e-06, "loss": 0.065, "step": 2404 }, { "epoch": 2.6548868028713417, "grad_norm": 1.427168369293213, "learning_rate": 7.206e-06, "loss": 0.0909, "step": 2405 }, { "epoch": 2.6559911651021535, "grad_norm": 1.3728996515274048, "learning_rate": 7.209000000000001e-06, "loss": 0.0893, "step": 2406 }, { "epoch": 2.6570955273329653, "grad_norm": 0.981032133102417, "learning_rate": 7.2120000000000006e-06, "loss": 0.0813, "step": 2407 }, { "epoch": 2.658199889563777, "grad_norm": 1.130138635635376, "learning_rate": 7.2149999999999995e-06, "loss": 0.0822, "step": 2408 }, { "epoch": 2.6593042517945884, "grad_norm": 2.4163615703582764, "learning_rate": 7.218e-06, "loss": 0.1387, "step": 2409 }, { "epoch": 2.6604086140254, "grad_norm": 2.335660219192505, "learning_rate": 7.221e-06, "loss": 0.0967, "step": 2410 }, { "epoch": 2.661512976256212, "grad_norm": 1.4815864562988281, "learning_rate": 7.224e-06, "loss": 0.1109, "step": 2411 }, { "epoch": 2.6626173384870238, "grad_norm": 2.017947196960449, "learning_rate": 7.2270000000000004e-06, "loss": 0.1008, "step": 2412 }, { "epoch": 2.6637217007178355, "grad_norm": 1.5696978569030762, "learning_rate": 7.23e-06, "loss": 0.5201, "step": 2413 }, { "epoch": 2.6648260629486473, "grad_norm": 2.1766200065612793, "learning_rate": 7.233e-06, "loss": 0.4299, "step": 2414 }, { "epoch": 2.665930425179459, "grad_norm": 1.4657434225082397, "learning_rate": 7.236e-06, "loss": 0.3627, "step": 2415 }, { "epoch": 2.6670347874102704, "grad_norm": 2.322420835494995, "learning_rate": 7.239e-06, "loss": 0.3531, "step": 2416 }, { "epoch": 2.6681391496410822, "grad_norm": 1.4736905097961426, "learning_rate": 7.242e-06, "loss": 0.2883, "step": 2417 }, { "epoch": 2.669243511871894, "grad_norm": 1.4683783054351807, "learning_rate": 7.245e-06, "loss": 0.2305, "step": 2418 }, { "epoch": 2.670347874102706, "grad_norm": 2.8769001960754395, "learning_rate": 7.248000000000001e-06, "loss": 0.2489, "step": 2419 }, { "epoch": 2.6714522363335176, "grad_norm": 0.9642735123634338, "learning_rate": 7.2510000000000005e-06, "loss": 0.1274, "step": 2420 }, { "epoch": 2.672556598564329, "grad_norm": 1.9694322347640991, "learning_rate": 7.2539999999999995e-06, "loss": 0.1249, "step": 2421 }, { "epoch": 2.6736609607951407, "grad_norm": 1.0253933668136597, "learning_rate": 7.257e-06, "loss": 0.1855, "step": 2422 }, { "epoch": 2.6747653230259525, "grad_norm": 1.050761103630066, "learning_rate": 7.26e-06, "loss": 0.0893, "step": 2423 }, { "epoch": 2.6758696852567643, "grad_norm": 1.087794542312622, "learning_rate": 7.263000000000001e-06, "loss": 0.0815, "step": 2424 }, { "epoch": 2.676974047487576, "grad_norm": 1.2812758684158325, "learning_rate": 7.266e-06, "loss": 0.1037, "step": 2425 }, { "epoch": 2.6780784097183874, "grad_norm": 3.810305595397949, "learning_rate": 7.269e-06, "loss": 0.0803, "step": 2426 }, { "epoch": 2.679182771949199, "grad_norm": 6.599170684814453, "learning_rate": 7.272e-06, "loss": 0.0608, "step": 2427 }, { "epoch": 2.680287134180011, "grad_norm": 1.2408872842788696, "learning_rate": 7.275e-06, "loss": 0.0996, "step": 2428 }, { "epoch": 2.6813914964108228, "grad_norm": 0.8873851299285889, "learning_rate": 7.2780000000000005e-06, "loss": 0.0629, "step": 2429 }, { "epoch": 2.6824958586416345, "grad_norm": 1.1267238855361938, "learning_rate": 7.281e-06, "loss": 0.0602, "step": 2430 }, { "epoch": 2.6836002208724463, "grad_norm": 1.17570161819458, "learning_rate": 7.284e-06, "loss": 0.0608, "step": 2431 }, { "epoch": 2.684704583103258, "grad_norm": 0.8922005891799927, "learning_rate": 7.287000000000001e-06, "loss": 0.0569, "step": 2432 }, { "epoch": 2.6858089453340694, "grad_norm": 0.9701915383338928, "learning_rate": 7.29e-06, "loss": 0.0759, "step": 2433 }, { "epoch": 2.6869133075648812, "grad_norm": 0.9781056046485901, "learning_rate": 7.293e-06, "loss": 0.0472, "step": 2434 }, { "epoch": 2.688017669795693, "grad_norm": 1.1904265880584717, "learning_rate": 7.296e-06, "loss": 0.0893, "step": 2435 }, { "epoch": 2.689122032026505, "grad_norm": 1.295560598373413, "learning_rate": 7.299e-06, "loss": 0.1672, "step": 2436 }, { "epoch": 2.6902263942573166, "grad_norm": 0.8920925259590149, "learning_rate": 7.3020000000000006e-06, "loss": 0.0524, "step": 2437 }, { "epoch": 2.691330756488128, "grad_norm": 1.6921473741531372, "learning_rate": 7.305e-06, "loss": 0.0702, "step": 2438 }, { "epoch": 2.6924351187189397, "grad_norm": 1.3493857383728027, "learning_rate": 7.308e-06, "loss": 0.0887, "step": 2439 }, { "epoch": 2.6935394809497515, "grad_norm": 1.2767040729522705, "learning_rate": 7.311e-06, "loss": 0.0575, "step": 2440 }, { "epoch": 2.6946438431805633, "grad_norm": 1.5336015224456787, "learning_rate": 7.314e-06, "loss": 0.1033, "step": 2441 }, { "epoch": 2.695748205411375, "grad_norm": 1.3656622171401978, "learning_rate": 7.317e-06, "loss": 0.0777, "step": 2442 }, { "epoch": 2.6968525676421864, "grad_norm": 1.9160743951797485, "learning_rate": 7.32e-06, "loss": 0.0712, "step": 2443 }, { "epoch": 2.697956929872998, "grad_norm": 0.8440284729003906, "learning_rate": 7.323000000000001e-06, "loss": 0.0688, "step": 2444 }, { "epoch": 2.69906129210381, "grad_norm": 1.1155742406845093, "learning_rate": 7.326e-06, "loss": 0.0874, "step": 2445 }, { "epoch": 2.7001656543346217, "grad_norm": 1.2818217277526855, "learning_rate": 7.329e-06, "loss": 0.0914, "step": 2446 }, { "epoch": 2.7012700165654335, "grad_norm": 1.0528172254562378, "learning_rate": 7.332e-06, "loss": 0.0593, "step": 2447 }, { "epoch": 2.7023743787962453, "grad_norm": 1.4643986225128174, "learning_rate": 7.335e-06, "loss": 0.0727, "step": 2448 }, { "epoch": 2.703478741027057, "grad_norm": 1.617531418800354, "learning_rate": 7.338000000000001e-06, "loss": 0.0948, "step": 2449 }, { "epoch": 2.7045831032578684, "grad_norm": 1.740343689918518, "learning_rate": 7.3410000000000005e-06, "loss": 0.0679, "step": 2450 }, { "epoch": 2.70568746548868, "grad_norm": 1.2167325019836426, "learning_rate": 7.3439999999999995e-06, "loss": 0.0837, "step": 2451 }, { "epoch": 2.706791827719492, "grad_norm": 1.133772850036621, "learning_rate": 7.347e-06, "loss": 0.0739, "step": 2452 }, { "epoch": 2.707896189950304, "grad_norm": 2.161285877227783, "learning_rate": 7.35e-06, "loss": 0.0877, "step": 2453 }, { "epoch": 2.7090005521811156, "grad_norm": 2.2355220317840576, "learning_rate": 7.353000000000001e-06, "loss": 0.0843, "step": 2454 }, { "epoch": 2.710104914411927, "grad_norm": 1.0731533765792847, "learning_rate": 7.356e-06, "loss": 0.0786, "step": 2455 }, { "epoch": 2.7112092766427387, "grad_norm": 3.2082109451293945, "learning_rate": 7.359e-06, "loss": 0.0784, "step": 2456 }, { "epoch": 2.7123136388735505, "grad_norm": 1.7330260276794434, "learning_rate": 7.362e-06, "loss": 0.0837, "step": 2457 }, { "epoch": 2.7134180011043623, "grad_norm": 2.7572896480560303, "learning_rate": 7.365e-06, "loss": 0.0873, "step": 2458 }, { "epoch": 2.714522363335174, "grad_norm": 1.7266662120819092, "learning_rate": 7.3680000000000004e-06, "loss": 0.1272, "step": 2459 }, { "epoch": 2.7156267255659854, "grad_norm": 1.4719027280807495, "learning_rate": 7.371e-06, "loss": 0.1111, "step": 2460 }, { "epoch": 2.7167310877967976, "grad_norm": 1.64420747756958, "learning_rate": 7.374e-06, "loss": 0.0829, "step": 2461 }, { "epoch": 2.717835450027609, "grad_norm": 2.336599111557007, "learning_rate": 7.377000000000001e-06, "loss": 0.1054, "step": 2462 }, { "epoch": 2.7189398122584207, "grad_norm": 1.821684718132019, "learning_rate": 7.3800000000000005e-06, "loss": 0.5313, "step": 2463 }, { "epoch": 2.7200441744892325, "grad_norm": 1.1428232192993164, "learning_rate": 7.383e-06, "loss": 0.3652, "step": 2464 }, { "epoch": 2.7211485367200443, "grad_norm": 2.4193854331970215, "learning_rate": 7.386e-06, "loss": 0.3848, "step": 2465 }, { "epoch": 2.722252898950856, "grad_norm": 2.7608046531677246, "learning_rate": 7.389e-06, "loss": 0.3228, "step": 2466 }, { "epoch": 2.7233572611816674, "grad_norm": 1.2086844444274902, "learning_rate": 7.3920000000000005e-06, "loss": 0.2582, "step": 2467 }, { "epoch": 2.724461623412479, "grad_norm": 1.445200800895691, "learning_rate": 7.395e-06, "loss": 0.3008, "step": 2468 }, { "epoch": 2.725565985643291, "grad_norm": 1.0237345695495605, "learning_rate": 7.398000000000001e-06, "loss": 0.2473, "step": 2469 }, { "epoch": 2.726670347874103, "grad_norm": 1.0695196390151978, "learning_rate": 7.401e-06, "loss": 0.1809, "step": 2470 }, { "epoch": 2.7277747101049146, "grad_norm": 0.8771357536315918, "learning_rate": 7.404e-06, "loss": 0.1181, "step": 2471 }, { "epoch": 2.728879072335726, "grad_norm": 1.2001820802688599, "learning_rate": 7.407e-06, "loss": 0.1458, "step": 2472 }, { "epoch": 2.7299834345665377, "grad_norm": 1.0876903533935547, "learning_rate": 7.41e-06, "loss": 0.1017, "step": 2473 }, { "epoch": 2.7310877967973495, "grad_norm": 0.864986777305603, "learning_rate": 7.413e-06, "loss": 0.0864, "step": 2474 }, { "epoch": 2.7321921590281613, "grad_norm": 1.213435411453247, "learning_rate": 7.416000000000001e-06, "loss": 0.1044, "step": 2475 }, { "epoch": 2.733296521258973, "grad_norm": 1.0479062795639038, "learning_rate": 7.419e-06, "loss": 0.1417, "step": 2476 }, { "epoch": 2.7344008834897844, "grad_norm": 1.5343916416168213, "learning_rate": 7.422e-06, "loss": 0.0914, "step": 2477 }, { "epoch": 2.7355052457205966, "grad_norm": 1.0877169370651245, "learning_rate": 7.425e-06, "loss": 0.1771, "step": 2478 }, { "epoch": 2.736609607951408, "grad_norm": 1.0996490716934204, "learning_rate": 7.428e-06, "loss": 0.0688, "step": 2479 }, { "epoch": 2.7377139701822197, "grad_norm": 0.9105908870697021, "learning_rate": 7.4310000000000005e-06, "loss": 0.0595, "step": 2480 }, { "epoch": 2.7388183324130315, "grad_norm": 2.0938608646392822, "learning_rate": 7.434e-06, "loss": 0.1053, "step": 2481 }, { "epoch": 2.7399226946438433, "grad_norm": 1.7213263511657715, "learning_rate": 7.437e-06, "loss": 0.0568, "step": 2482 }, { "epoch": 2.741027056874655, "grad_norm": 1.2819478511810303, "learning_rate": 7.44e-06, "loss": 0.0518, "step": 2483 }, { "epoch": 2.7421314191054664, "grad_norm": 0.9711375832557678, "learning_rate": 7.443e-06, "loss": 0.0628, "step": 2484 }, { "epoch": 2.743235781336278, "grad_norm": 1.2386369705200195, "learning_rate": 7.446e-06, "loss": 0.0727, "step": 2485 }, { "epoch": 2.74434014356709, "grad_norm": 0.8596223592758179, "learning_rate": 7.449e-06, "loss": 0.079, "step": 2486 }, { "epoch": 2.745444505797902, "grad_norm": 1.4888252019882202, "learning_rate": 7.452000000000001e-06, "loss": 0.0708, "step": 2487 }, { "epoch": 2.7465488680287136, "grad_norm": 1.035736322402954, "learning_rate": 7.455e-06, "loss": 0.0732, "step": 2488 }, { "epoch": 2.747653230259525, "grad_norm": 1.040420413017273, "learning_rate": 7.4579999999999996e-06, "loss": 0.0688, "step": 2489 }, { "epoch": 2.7487575924903367, "grad_norm": 0.9930934906005859, "learning_rate": 7.461e-06, "loss": 0.068, "step": 2490 }, { "epoch": 2.7498619547211485, "grad_norm": 1.2027716636657715, "learning_rate": 7.464e-06, "loss": 0.0821, "step": 2491 }, { "epoch": 2.7509663169519603, "grad_norm": 1.265304446220398, "learning_rate": 7.467000000000001e-06, "loss": 0.0704, "step": 2492 }, { "epoch": 2.752070679182772, "grad_norm": 3.160939931869507, "learning_rate": 7.4700000000000005e-06, "loss": 0.0941, "step": 2493 }, { "epoch": 2.7531750414135834, "grad_norm": 1.5419135093688965, "learning_rate": 7.4729999999999994e-06, "loss": 0.0823, "step": 2494 }, { "epoch": 2.7542794036443956, "grad_norm": 1.133671760559082, "learning_rate": 7.476e-06, "loss": 0.0829, "step": 2495 }, { "epoch": 2.755383765875207, "grad_norm": 1.7049754858016968, "learning_rate": 7.479e-06, "loss": 0.0767, "step": 2496 }, { "epoch": 2.7564881281060187, "grad_norm": 2.0041697025299072, "learning_rate": 7.4820000000000005e-06, "loss": 0.0898, "step": 2497 }, { "epoch": 2.7575924903368305, "grad_norm": 1.210215449333191, "learning_rate": 7.485e-06, "loss": 0.0721, "step": 2498 }, { "epoch": 2.7586968525676423, "grad_norm": 1.7182682752609253, "learning_rate": 7.488e-06, "loss": 0.1031, "step": 2499 }, { "epoch": 2.759801214798454, "grad_norm": 1.0050885677337646, "learning_rate": 7.491e-06, "loss": 0.06, "step": 2500 }, { "epoch": 2.7609055770292654, "grad_norm": 0.9032816886901855, "learning_rate": 7.494e-06, "loss": 0.0566, "step": 2501 }, { "epoch": 2.762009939260077, "grad_norm": 1.180126428604126, "learning_rate": 7.497e-06, "loss": 0.0813, "step": 2502 }, { "epoch": 2.763114301490889, "grad_norm": 1.3854917287826538, "learning_rate": 7.5e-06, "loss": 0.0742, "step": 2503 }, { "epoch": 2.7642186637217008, "grad_norm": 1.3430838584899902, "learning_rate": 7.503e-06, "loss": 0.0911, "step": 2504 }, { "epoch": 2.7653230259525126, "grad_norm": 1.2347517013549805, "learning_rate": 7.506e-06, "loss": 0.1038, "step": 2505 }, { "epoch": 2.766427388183324, "grad_norm": 0.9751605987548828, "learning_rate": 7.5090000000000004e-06, "loss": 0.0899, "step": 2506 }, { "epoch": 2.7675317504141357, "grad_norm": 1.5740947723388672, "learning_rate": 7.512e-06, "loss": 0.072, "step": 2507 }, { "epoch": 2.7686361126449475, "grad_norm": 1.107120394706726, "learning_rate": 7.515e-06, "loss": 0.0665, "step": 2508 }, { "epoch": 2.7697404748757593, "grad_norm": 1.5790448188781738, "learning_rate": 7.518e-06, "loss": 0.0694, "step": 2509 }, { "epoch": 2.770844837106571, "grad_norm": 1.4877108335494995, "learning_rate": 7.521e-06, "loss": 0.0543, "step": 2510 }, { "epoch": 2.771949199337383, "grad_norm": 2.045738458633423, "learning_rate": 7.524000000000001e-06, "loss": 0.1854, "step": 2511 }, { "epoch": 2.7730535615681946, "grad_norm": 1.334574818611145, "learning_rate": 7.527000000000001e-06, "loss": 0.0755, "step": 2512 }, { "epoch": 2.774157923799006, "grad_norm": 3.0806331634521484, "learning_rate": 7.53e-06, "loss": 0.6859, "step": 2513 }, { "epoch": 2.7752622860298177, "grad_norm": 1.7112239599227905, "learning_rate": 7.533e-06, "loss": 0.4173, "step": 2514 }, { "epoch": 2.7763666482606295, "grad_norm": 1.391003966331482, "learning_rate": 7.5359999999999995e-06, "loss": 0.359, "step": 2515 }, { "epoch": 2.7774710104914413, "grad_norm": 1.3301501274108887, "learning_rate": 7.539000000000001e-06, "loss": 0.394, "step": 2516 }, { "epoch": 2.778575372722253, "grad_norm": 1.455418348312378, "learning_rate": 7.542000000000001e-06, "loss": 0.2786, "step": 2517 }, { "epoch": 2.7796797349530644, "grad_norm": 1.3962129354476929, "learning_rate": 7.545000000000001e-06, "loss": 0.2993, "step": 2518 }, { "epoch": 2.780784097183876, "grad_norm": 1.186911940574646, "learning_rate": 7.5479999999999996e-06, "loss": 0.2027, "step": 2519 }, { "epoch": 2.781888459414688, "grad_norm": 1.1706942319869995, "learning_rate": 7.550999999999999e-06, "loss": 0.2214, "step": 2520 }, { "epoch": 2.7829928216454998, "grad_norm": 1.3682162761688232, "learning_rate": 7.554000000000001e-06, "loss": 0.1536, "step": 2521 }, { "epoch": 2.7840971838763116, "grad_norm": 1.0194228887557983, "learning_rate": 7.557000000000001e-06, "loss": 0.1107, "step": 2522 }, { "epoch": 2.785201546107123, "grad_norm": 1.0090359449386597, "learning_rate": 7.5600000000000005e-06, "loss": 0.0993, "step": 2523 }, { "epoch": 2.7863059083379347, "grad_norm": 0.6501477956771851, "learning_rate": 7.563e-06, "loss": 0.0507, "step": 2524 }, { "epoch": 2.7874102705687465, "grad_norm": 1.547908067703247, "learning_rate": 7.565999999999999e-06, "loss": 0.0965, "step": 2525 }, { "epoch": 2.7885146327995582, "grad_norm": 1.1753640174865723, "learning_rate": 7.569000000000001e-06, "loss": 0.107, "step": 2526 }, { "epoch": 2.78961899503037, "grad_norm": 1.1998223066329956, "learning_rate": 7.5720000000000005e-06, "loss": 0.1184, "step": 2527 }, { "epoch": 2.790723357261182, "grad_norm": 0.8907150030136108, "learning_rate": 7.575e-06, "loss": 0.0738, "step": 2528 }, { "epoch": 2.7918277194919936, "grad_norm": 0.7995448708534241, "learning_rate": 7.578e-06, "loss": 0.056, "step": 2529 }, { "epoch": 2.792932081722805, "grad_norm": 0.7098666429519653, "learning_rate": 7.581e-06, "loss": 0.0524, "step": 2530 }, { "epoch": 2.7940364439536167, "grad_norm": 1.17220938205719, "learning_rate": 7.5840000000000006e-06, "loss": 0.0565, "step": 2531 }, { "epoch": 2.7951408061844285, "grad_norm": 1.9477427005767822, "learning_rate": 7.587e-06, "loss": 0.0556, "step": 2532 }, { "epoch": 2.7962451684152403, "grad_norm": 0.8447205424308777, "learning_rate": 7.59e-06, "loss": 0.0739, "step": 2533 }, { "epoch": 2.797349530646052, "grad_norm": 0.9617547988891602, "learning_rate": 7.593e-06, "loss": 0.0763, "step": 2534 }, { "epoch": 2.7984538928768634, "grad_norm": 1.061442494392395, "learning_rate": 7.596e-06, "loss": 0.0754, "step": 2535 }, { "epoch": 2.799558255107675, "grad_norm": 1.0360311269760132, "learning_rate": 7.599000000000001e-06, "loss": 0.0629, "step": 2536 }, { "epoch": 2.800662617338487, "grad_norm": 2.1451897621154785, "learning_rate": 7.602e-06, "loss": 0.0697, "step": 2537 }, { "epoch": 2.8017669795692988, "grad_norm": 1.1930640935897827, "learning_rate": 7.605e-06, "loss": 0.0804, "step": 2538 }, { "epoch": 2.8028713418001105, "grad_norm": 1.0239790678024292, "learning_rate": 7.608e-06, "loss": 0.0581, "step": 2539 }, { "epoch": 2.803975704030922, "grad_norm": 1.0174241065979004, "learning_rate": 7.611e-06, "loss": 0.0593, "step": 2540 }, { "epoch": 2.805080066261734, "grad_norm": 6.055801868438721, "learning_rate": 7.614000000000001e-06, "loss": 0.1574, "step": 2541 }, { "epoch": 2.8061844284925455, "grad_norm": 0.9455788731575012, "learning_rate": 7.617000000000001e-06, "loss": 0.0588, "step": 2542 }, { "epoch": 2.8072887907233572, "grad_norm": 0.9007384181022644, "learning_rate": 7.62e-06, "loss": 0.0671, "step": 2543 }, { "epoch": 2.808393152954169, "grad_norm": 1.2027311325073242, "learning_rate": 7.623e-06, "loss": 0.0492, "step": 2544 }, { "epoch": 2.809497515184981, "grad_norm": 1.2507292032241821, "learning_rate": 7.6259999999999995e-06, "loss": 0.0907, "step": 2545 }, { "epoch": 2.8106018774157926, "grad_norm": 1.2507210969924927, "learning_rate": 7.629000000000001e-06, "loss": 0.0962, "step": 2546 }, { "epoch": 2.811706239646604, "grad_norm": 1.6523631811141968, "learning_rate": 7.632e-06, "loss": 0.124, "step": 2547 }, { "epoch": 2.8128106018774157, "grad_norm": 1.3170528411865234, "learning_rate": 7.635e-06, "loss": 0.0874, "step": 2548 }, { "epoch": 2.8139149641082275, "grad_norm": 0.9526091814041138, "learning_rate": 7.638e-06, "loss": 0.1076, "step": 2549 }, { "epoch": 2.8150193263390393, "grad_norm": 1.8649225234985352, "learning_rate": 7.641e-06, "loss": 0.0831, "step": 2550 }, { "epoch": 2.816123688569851, "grad_norm": 1.0859932899475098, "learning_rate": 7.644000000000002e-06, "loss": 0.0585, "step": 2551 }, { "epoch": 2.8172280508006624, "grad_norm": 1.2090907096862793, "learning_rate": 7.647000000000001e-06, "loss": 0.0768, "step": 2552 }, { "epoch": 2.818332413031474, "grad_norm": 1.2657445669174194, "learning_rate": 7.65e-06, "loss": 0.0833, "step": 2553 }, { "epoch": 2.819436775262286, "grad_norm": 1.3946410417556763, "learning_rate": 7.653e-06, "loss": 0.075, "step": 2554 }, { "epoch": 2.8205411374930978, "grad_norm": 1.0721924304962158, "learning_rate": 7.656e-06, "loss": 0.087, "step": 2555 }, { "epoch": 2.8216454997239095, "grad_norm": 2.746964454650879, "learning_rate": 7.659e-06, "loss": 0.0839, "step": 2556 }, { "epoch": 2.822749861954721, "grad_norm": 1.2369366884231567, "learning_rate": 7.662e-06, "loss": 0.1277, "step": 2557 }, { "epoch": 2.823854224185533, "grad_norm": 1.2733689546585083, "learning_rate": 7.665e-06, "loss": 0.1161, "step": 2558 }, { "epoch": 2.8249585864163445, "grad_norm": 1.2123035192489624, "learning_rate": 7.668e-06, "loss": 0.0873, "step": 2559 }, { "epoch": 2.8260629486471562, "grad_norm": 1.0221339464187622, "learning_rate": 7.671e-06, "loss": 0.094, "step": 2560 }, { "epoch": 2.827167310877968, "grad_norm": 1.4133131504058838, "learning_rate": 7.674000000000001e-06, "loss": 0.0587, "step": 2561 }, { "epoch": 2.82827167310878, "grad_norm": 1.593459963798523, "learning_rate": 7.677000000000001e-06, "loss": 0.092, "step": 2562 }, { "epoch": 2.8293760353395916, "grad_norm": 1.4197213649749756, "learning_rate": 7.680000000000001e-06, "loss": 0.5393, "step": 2563 }, { "epoch": 2.830480397570403, "grad_norm": 1.2752515077590942, "learning_rate": 7.683e-06, "loss": 0.3687, "step": 2564 }, { "epoch": 2.8315847598012147, "grad_norm": 1.351129174232483, "learning_rate": 7.685999999999999e-06, "loss": 0.3393, "step": 2565 }, { "epoch": 2.8326891220320265, "grad_norm": 1.2672885656356812, "learning_rate": 7.688999999999999e-06, "loss": 0.309, "step": 2566 }, { "epoch": 2.8337934842628383, "grad_norm": 1.5463016033172607, "learning_rate": 7.692e-06, "loss": 0.3038, "step": 2567 }, { "epoch": 2.83489784649365, "grad_norm": 2.156524181365967, "learning_rate": 7.695e-06, "loss": 0.3637, "step": 2568 }, { "epoch": 2.8360022087244614, "grad_norm": 0.9684768319129944, "learning_rate": 7.698e-06, "loss": 0.1814, "step": 2569 }, { "epoch": 2.837106570955273, "grad_norm": 2.1924619674682617, "learning_rate": 7.701e-06, "loss": 0.2266, "step": 2570 }, { "epoch": 2.838210933186085, "grad_norm": 1.105035424232483, "learning_rate": 7.704e-06, "loss": 0.1262, "step": 2571 }, { "epoch": 2.8393152954168968, "grad_norm": 1.106803297996521, "learning_rate": 7.707000000000001e-06, "loss": 0.1198, "step": 2572 }, { "epoch": 2.8404196576477085, "grad_norm": 0.8679218888282776, "learning_rate": 7.71e-06, "loss": 0.0705, "step": 2573 }, { "epoch": 2.84152401987852, "grad_norm": 0.9221953749656677, "learning_rate": 7.713e-06, "loss": 0.083, "step": 2574 }, { "epoch": 2.842628382109332, "grad_norm": 2.96393084526062, "learning_rate": 7.716e-06, "loss": 0.0611, "step": 2575 }, { "epoch": 2.8437327443401434, "grad_norm": 1.7312467098236084, "learning_rate": 7.719e-06, "loss": 0.0796, "step": 2576 }, { "epoch": 2.8448371065709552, "grad_norm": 1.3390634059906006, "learning_rate": 7.722e-06, "loss": 0.0696, "step": 2577 }, { "epoch": 2.845941468801767, "grad_norm": 1.297659993171692, "learning_rate": 7.725e-06, "loss": 0.0744, "step": 2578 }, { "epoch": 2.847045831032579, "grad_norm": 1.8528318405151367, "learning_rate": 7.728e-06, "loss": 0.1161, "step": 2579 }, { "epoch": 2.8481501932633906, "grad_norm": 0.9049222469329834, "learning_rate": 7.731e-06, "loss": 0.0814, "step": 2580 }, { "epoch": 2.849254555494202, "grad_norm": 2.097262144088745, "learning_rate": 7.733999999999999e-06, "loss": 0.0815, "step": 2581 }, { "epoch": 2.8503589177250137, "grad_norm": 2.364151954650879, "learning_rate": 7.737e-06, "loss": 0.114, "step": 2582 }, { "epoch": 2.8514632799558255, "grad_norm": 1.0760868787765503, "learning_rate": 7.74e-06, "loss": 0.0557, "step": 2583 }, { "epoch": 2.8525676421866373, "grad_norm": 1.068156123161316, "learning_rate": 7.743e-06, "loss": 0.0659, "step": 2584 }, { "epoch": 2.853672004417449, "grad_norm": 1.4115129709243774, "learning_rate": 7.746e-06, "loss": 0.0887, "step": 2585 }, { "epoch": 2.8547763666482604, "grad_norm": 1.0034053325653076, "learning_rate": 7.749e-06, "loss": 0.0773, "step": 2586 }, { "epoch": 2.855880728879072, "grad_norm": 0.9743639230728149, "learning_rate": 7.752000000000001e-06, "loss": 0.0776, "step": 2587 }, { "epoch": 2.856985091109884, "grad_norm": 1.7806628942489624, "learning_rate": 7.755000000000001e-06, "loss": 0.1086, "step": 2588 }, { "epoch": 2.8580894533406958, "grad_norm": 1.7747935056686401, "learning_rate": 7.758000000000001e-06, "loss": 0.0554, "step": 2589 }, { "epoch": 2.8591938155715075, "grad_norm": 1.4947857856750488, "learning_rate": 7.760999999999999e-06, "loss": 0.1166, "step": 2590 }, { "epoch": 2.8602981778023193, "grad_norm": 0.9902533292770386, "learning_rate": 7.763999999999999e-06, "loss": 0.0713, "step": 2591 }, { "epoch": 2.861402540033131, "grad_norm": 1.4606404304504395, "learning_rate": 7.767e-06, "loss": 0.0598, "step": 2592 }, { "epoch": 2.8625069022639424, "grad_norm": 0.8974879384040833, "learning_rate": 7.77e-06, "loss": 0.0675, "step": 2593 }, { "epoch": 2.8636112644947542, "grad_norm": 0.921290934085846, "learning_rate": 7.773e-06, "loss": 0.0695, "step": 2594 }, { "epoch": 2.864715626725566, "grad_norm": 1.1049728393554688, "learning_rate": 7.776e-06, "loss": 0.0711, "step": 2595 }, { "epoch": 2.865819988956378, "grad_norm": 1.6609610319137573, "learning_rate": 7.779e-06, "loss": 0.0618, "step": 2596 }, { "epoch": 2.8669243511871896, "grad_norm": 1.2364017963409424, "learning_rate": 7.782000000000001e-06, "loss": 0.0975, "step": 2597 }, { "epoch": 2.868028713418001, "grad_norm": 1.0662833452224731, "learning_rate": 7.785000000000001e-06, "loss": 0.0706, "step": 2598 }, { "epoch": 2.8691330756488127, "grad_norm": 1.501939296722412, "learning_rate": 7.788e-06, "loss": 0.0929, "step": 2599 }, { "epoch": 2.8702374378796245, "grad_norm": 0.8619601130485535, "learning_rate": 7.791e-06, "loss": 0.0666, "step": 2600 }, { "epoch": 2.8713418001104363, "grad_norm": 1.3998209238052368, "learning_rate": 7.794e-06, "loss": 0.076, "step": 2601 }, { "epoch": 2.872446162341248, "grad_norm": 1.1516207456588745, "learning_rate": 7.797e-06, "loss": 0.0637, "step": 2602 }, { "epoch": 2.8735505245720594, "grad_norm": 1.1210463047027588, "learning_rate": 7.8e-06, "loss": 0.0896, "step": 2603 }, { "epoch": 2.874654886802871, "grad_norm": 1.042142629623413, "learning_rate": 7.803e-06, "loss": 0.0777, "step": 2604 }, { "epoch": 2.875759249033683, "grad_norm": 1.0922828912734985, "learning_rate": 7.806e-06, "loss": 0.0594, "step": 2605 }, { "epoch": 2.8768636112644947, "grad_norm": 0.8793681263923645, "learning_rate": 7.809e-06, "loss": 0.0784, "step": 2606 }, { "epoch": 2.8779679734953065, "grad_norm": 1.385758399963379, "learning_rate": 7.812e-06, "loss": 0.0629, "step": 2607 }, { "epoch": 2.8790723357261183, "grad_norm": 2.2274019718170166, "learning_rate": 7.815e-06, "loss": 0.102, "step": 2608 }, { "epoch": 2.88017669795693, "grad_norm": 0.7669299244880676, "learning_rate": 7.818e-06, "loss": 0.0578, "step": 2609 }, { "epoch": 2.8812810601877414, "grad_norm": 2.4637885093688965, "learning_rate": 7.821e-06, "loss": 0.1275, "step": 2610 }, { "epoch": 2.8823854224185532, "grad_norm": 1.8848775625228882, "learning_rate": 7.824e-06, "loss": 0.1049, "step": 2611 }, { "epoch": 2.883489784649365, "grad_norm": 3.3265488147735596, "learning_rate": 7.827000000000001e-06, "loss": 0.1198, "step": 2612 }, { "epoch": 2.884594146880177, "grad_norm": 1.569579005241394, "learning_rate": 7.830000000000001e-06, "loss": 0.4767, "step": 2613 }, { "epoch": 2.8856985091109886, "grad_norm": 1.6813092231750488, "learning_rate": 7.833e-06, "loss": 0.3247, "step": 2614 }, { "epoch": 2.8868028713418, "grad_norm": 2.1126744747161865, "learning_rate": 7.836e-06, "loss": 0.379, "step": 2615 }, { "epoch": 2.8879072335726117, "grad_norm": 1.7838501930236816, "learning_rate": 7.838999999999999e-06, "loss": 0.3827, "step": 2616 }, { "epoch": 2.8890115958034235, "grad_norm": 1.2981007099151611, "learning_rate": 7.842e-06, "loss": 0.2252, "step": 2617 }, { "epoch": 2.8901159580342353, "grad_norm": 1.5977956056594849, "learning_rate": 7.845e-06, "loss": 0.23, "step": 2618 }, { "epoch": 2.891220320265047, "grad_norm": 1.5392519235610962, "learning_rate": 7.848e-06, "loss": 0.2184, "step": 2619 }, { "epoch": 2.8923246824958584, "grad_norm": 1.4241431951522827, "learning_rate": 7.851e-06, "loss": 0.2782, "step": 2620 }, { "epoch": 2.8934290447266706, "grad_norm": 0.8899269700050354, "learning_rate": 7.854e-06, "loss": 0.1462, "step": 2621 }, { "epoch": 2.894533406957482, "grad_norm": 1.0126371383666992, "learning_rate": 7.857000000000001e-06, "loss": 0.1124, "step": 2622 }, { "epoch": 2.8956377691882937, "grad_norm": 0.7754946351051331, "learning_rate": 7.860000000000001e-06, "loss": 0.085, "step": 2623 }, { "epoch": 2.8967421314191055, "grad_norm": 0.9814462661743164, "learning_rate": 7.863e-06, "loss": 0.0749, "step": 2624 }, { "epoch": 2.8978464936499173, "grad_norm": 0.8614133596420288, "learning_rate": 7.866e-06, "loss": 0.1173, "step": 2625 }, { "epoch": 2.898950855880729, "grad_norm": 0.8107333183288574, "learning_rate": 7.868999999999999e-06, "loss": 0.0633, "step": 2626 }, { "epoch": 2.9000552181115404, "grad_norm": 1.0246301889419556, "learning_rate": 7.872e-06, "loss": 0.1184, "step": 2627 }, { "epoch": 2.901159580342352, "grad_norm": 1.1349176168441772, "learning_rate": 7.875e-06, "loss": 0.0657, "step": 2628 }, { "epoch": 2.902263942573164, "grad_norm": 0.6216087937355042, "learning_rate": 7.878e-06, "loss": 0.0513, "step": 2629 }, { "epoch": 2.903368304803976, "grad_norm": 1.449971318244934, "learning_rate": 7.881e-06, "loss": 0.1092, "step": 2630 }, { "epoch": 2.9044726670347876, "grad_norm": 0.9359760880470276, "learning_rate": 7.884e-06, "loss": 0.0672, "step": 2631 }, { "epoch": 2.905577029265599, "grad_norm": 0.7296327352523804, "learning_rate": 7.887000000000001e-06, "loss": 0.0649, "step": 2632 }, { "epoch": 2.9066813914964107, "grad_norm": 0.9642044901847839, "learning_rate": 7.89e-06, "loss": 0.0599, "step": 2633 }, { "epoch": 2.9077857537272225, "grad_norm": 0.7320525050163269, "learning_rate": 7.893e-06, "loss": 0.0547, "step": 2634 }, { "epoch": 2.9088901159580343, "grad_norm": 1.8813438415527344, "learning_rate": 7.896e-06, "loss": 0.0727, "step": 2635 }, { "epoch": 2.909994478188846, "grad_norm": 0.9470276236534119, "learning_rate": 7.899e-06, "loss": 0.0799, "step": 2636 }, { "epoch": 2.9110988404196574, "grad_norm": 1.1047546863555908, "learning_rate": 7.902000000000002e-06, "loss": 0.0686, "step": 2637 }, { "epoch": 2.9122032026504696, "grad_norm": 1.4874777793884277, "learning_rate": 7.905000000000001e-06, "loss": 0.063, "step": 2638 }, { "epoch": 2.913307564881281, "grad_norm": 1.038008689880371, "learning_rate": 7.908e-06, "loss": 0.0658, "step": 2639 }, { "epoch": 2.9144119271120927, "grad_norm": 1.2131003141403198, "learning_rate": 7.911e-06, "loss": 0.0583, "step": 2640 }, { "epoch": 2.9155162893429045, "grad_norm": 0.7700634002685547, "learning_rate": 7.913999999999999e-06, "loss": 0.0587, "step": 2641 }, { "epoch": 2.9166206515737163, "grad_norm": 1.1658564805984497, "learning_rate": 7.917e-06, "loss": 0.0764, "step": 2642 }, { "epoch": 2.917725013804528, "grad_norm": 1.198525071144104, "learning_rate": 7.92e-06, "loss": 0.0843, "step": 2643 }, { "epoch": 2.9188293760353394, "grad_norm": 1.507452130317688, "learning_rate": 7.923e-06, "loss": 0.0886, "step": 2644 }, { "epoch": 2.919933738266151, "grad_norm": 0.8054594397544861, "learning_rate": 7.926e-06, "loss": 0.0514, "step": 2645 }, { "epoch": 2.921038100496963, "grad_norm": 1.1121033430099487, "learning_rate": 7.929e-06, "loss": 0.0627, "step": 2646 }, { "epoch": 2.922142462727775, "grad_norm": 1.2701473236083984, "learning_rate": 7.932000000000001e-06, "loss": 0.1508, "step": 2647 }, { "epoch": 2.9232468249585866, "grad_norm": 1.06825590133667, "learning_rate": 7.935000000000001e-06, "loss": 0.0642, "step": 2648 }, { "epoch": 2.924351187189398, "grad_norm": 0.963325560092926, "learning_rate": 7.938000000000001e-06, "loss": 0.0939, "step": 2649 }, { "epoch": 2.9254555494202097, "grad_norm": 0.9950021505355835, "learning_rate": 7.941e-06, "loss": 0.0712, "step": 2650 }, { "epoch": 2.9265599116510215, "grad_norm": 0.9780741333961487, "learning_rate": 7.943999999999999e-06, "loss": 0.0551, "step": 2651 }, { "epoch": 2.9276642738818333, "grad_norm": 0.9173033237457275, "learning_rate": 7.947e-06, "loss": 0.0751, "step": 2652 }, { "epoch": 2.928768636112645, "grad_norm": 1.4638434648513794, "learning_rate": 7.95e-06, "loss": 0.101, "step": 2653 }, { "epoch": 2.9298729983434564, "grad_norm": 0.9101991057395935, "learning_rate": 7.953e-06, "loss": 0.0786, "step": 2654 }, { "epoch": 2.9309773605742686, "grad_norm": 1.2556639909744263, "learning_rate": 7.956e-06, "loss": 0.0788, "step": 2655 }, { "epoch": 2.93208172280508, "grad_norm": 1.0603340864181519, "learning_rate": 7.959e-06, "loss": 0.0952, "step": 2656 }, { "epoch": 2.9331860850358917, "grad_norm": 1.0343492031097412, "learning_rate": 7.962000000000001e-06, "loss": 0.0476, "step": 2657 }, { "epoch": 2.9342904472667035, "grad_norm": 1.3394705057144165, "learning_rate": 7.965e-06, "loss": 0.0997, "step": 2658 }, { "epoch": 2.9353948094975153, "grad_norm": 1.2809568643569946, "learning_rate": 7.968e-06, "loss": 0.0914, "step": 2659 }, { "epoch": 2.936499171728327, "grad_norm": 1.394665241241455, "learning_rate": 7.971e-06, "loss": 0.0985, "step": 2660 }, { "epoch": 2.9376035339591384, "grad_norm": 0.9395080804824829, "learning_rate": 7.974e-06, "loss": 0.0797, "step": 2661 }, { "epoch": 2.93870789618995, "grad_norm": 1.2041558027267456, "learning_rate": 7.977000000000002e-06, "loss": 0.0854, "step": 2662 }, { "epoch": 2.939812258420762, "grad_norm": 7.712404727935791, "learning_rate": 7.98e-06, "loss": 0.5639, "step": 2663 }, { "epoch": 2.9409166206515738, "grad_norm": 2.1502432823181152, "learning_rate": 7.983e-06, "loss": 0.4757, "step": 2664 }, { "epoch": 2.9420209828823856, "grad_norm": 1.4308788776397705, "learning_rate": 7.986e-06, "loss": 0.382, "step": 2665 }, { "epoch": 2.943125345113197, "grad_norm": 1.6594635248184204, "learning_rate": 7.989e-06, "loss": 0.2555, "step": 2666 }, { "epoch": 2.9442297073440087, "grad_norm": 4.602372169494629, "learning_rate": 7.992e-06, "loss": 0.3178, "step": 2667 }, { "epoch": 2.9453340695748205, "grad_norm": 1.366207242012024, "learning_rate": 7.995e-06, "loss": 0.3158, "step": 2668 }, { "epoch": 2.9464384318056323, "grad_norm": 1.28496515750885, "learning_rate": 7.998e-06, "loss": 0.1806, "step": 2669 }, { "epoch": 2.947542794036444, "grad_norm": 1.0980256795883179, "learning_rate": 8.001e-06, "loss": 0.159, "step": 2670 }, { "epoch": 2.948647156267256, "grad_norm": 0.9766718745231628, "learning_rate": 8.004e-06, "loss": 0.1276, "step": 2671 }, { "epoch": 2.9497515184980676, "grad_norm": 0.9944455027580261, "learning_rate": 8.007000000000001e-06, "loss": 0.1545, "step": 2672 }, { "epoch": 2.950855880728879, "grad_norm": 0.9059673547744751, "learning_rate": 8.010000000000001e-06, "loss": 0.0957, "step": 2673 }, { "epoch": 2.9519602429596907, "grad_norm": 1.226256012916565, "learning_rate": 8.013000000000001e-06, "loss": 0.0857, "step": 2674 }, { "epoch": 2.9530646051905025, "grad_norm": 0.8184188008308411, "learning_rate": 8.016e-06, "loss": 0.0767, "step": 2675 }, { "epoch": 2.9541689674213143, "grad_norm": 0.9786036610603333, "learning_rate": 8.018999999999999e-06, "loss": 0.0758, "step": 2676 }, { "epoch": 2.955273329652126, "grad_norm": 0.9878107309341431, "learning_rate": 8.022e-06, "loss": 0.0793, "step": 2677 }, { "epoch": 2.9563776918829374, "grad_norm": 0.8087683320045471, "learning_rate": 8.025e-06, "loss": 0.064, "step": 2678 }, { "epoch": 2.957482054113749, "grad_norm": 0.7070274949073792, "learning_rate": 8.028e-06, "loss": 0.0636, "step": 2679 }, { "epoch": 2.958586416344561, "grad_norm": 1.3051292896270752, "learning_rate": 8.031e-06, "loss": 0.0936, "step": 2680 }, { "epoch": 2.9596907785753728, "grad_norm": 1.2195472717285156, "learning_rate": 8.034e-06, "loss": 0.0927, "step": 2681 }, { "epoch": 2.9607951408061846, "grad_norm": 1.3215289115905762, "learning_rate": 8.037000000000001e-06, "loss": 0.0601, "step": 2682 }, { "epoch": 2.961899503036996, "grad_norm": 0.9113346338272095, "learning_rate": 8.040000000000001e-06, "loss": 0.0639, "step": 2683 }, { "epoch": 2.9630038652678077, "grad_norm": 1.0636694431304932, "learning_rate": 8.043e-06, "loss": 0.0725, "step": 2684 }, { "epoch": 2.9641082274986195, "grad_norm": 1.0889062881469727, "learning_rate": 8.046e-06, "loss": 0.0699, "step": 2685 }, { "epoch": 2.9652125897294312, "grad_norm": 1.265659213066101, "learning_rate": 8.049e-06, "loss": 0.0668, "step": 2686 }, { "epoch": 2.966316951960243, "grad_norm": 0.9548044204711914, "learning_rate": 8.052000000000002e-06, "loss": 0.0684, "step": 2687 }, { "epoch": 2.967421314191055, "grad_norm": 0.9435430765151978, "learning_rate": 8.055e-06, "loss": 0.0602, "step": 2688 }, { "epoch": 2.9685256764218666, "grad_norm": 3.2114100456237793, "learning_rate": 8.058e-06, "loss": 0.0738, "step": 2689 }, { "epoch": 2.969630038652678, "grad_norm": 0.9333289265632629, "learning_rate": 8.061e-06, "loss": 0.0676, "step": 2690 }, { "epoch": 2.9707344008834897, "grad_norm": 1.5601953268051147, "learning_rate": 8.064e-06, "loss": 0.0743, "step": 2691 }, { "epoch": 2.9718387631143015, "grad_norm": 0.9141520261764526, "learning_rate": 8.067e-06, "loss": 0.0636, "step": 2692 }, { "epoch": 2.9729431253451133, "grad_norm": 1.5116991996765137, "learning_rate": 8.07e-06, "loss": 0.0783, "step": 2693 }, { "epoch": 2.974047487575925, "grad_norm": 0.9418911337852478, "learning_rate": 8.073e-06, "loss": 0.0466, "step": 2694 }, { "epoch": 2.9751518498067364, "grad_norm": 1.1600829362869263, "learning_rate": 8.076e-06, "loss": 0.0726, "step": 2695 }, { "epoch": 2.976256212037548, "grad_norm": 1.0832732915878296, "learning_rate": 8.079e-06, "loss": 0.0607, "step": 2696 }, { "epoch": 2.97736057426836, "grad_norm": 1.67473566532135, "learning_rate": 8.082e-06, "loss": 0.0743, "step": 2697 }, { "epoch": 2.9784649364991718, "grad_norm": 1.4080766439437866, "learning_rate": 8.085000000000001e-06, "loss": 0.0704, "step": 2698 }, { "epoch": 2.9795692987299836, "grad_norm": 0.8095139265060425, "learning_rate": 8.088000000000001e-06, "loss": 0.0501, "step": 2699 }, { "epoch": 2.980673660960795, "grad_norm": 1.6301507949829102, "learning_rate": 8.091e-06, "loss": 0.0959, "step": 2700 }, { "epoch": 2.981778023191607, "grad_norm": 1.0550761222839355, "learning_rate": 8.093999999999999e-06, "loss": 0.0742, "step": 2701 }, { "epoch": 2.9828823854224185, "grad_norm": 0.8731775283813477, "learning_rate": 8.096999999999999e-06, "loss": 0.0543, "step": 2702 }, { "epoch": 2.9839867476532302, "grad_norm": 1.0798619985580444, "learning_rate": 8.1e-06, "loss": 0.0521, "step": 2703 }, { "epoch": 2.985091109884042, "grad_norm": 1.2782052755355835, "learning_rate": 8.103e-06, "loss": 0.0766, "step": 2704 }, { "epoch": 2.986195472114854, "grad_norm": 1.150305986404419, "learning_rate": 8.106e-06, "loss": 0.1084, "step": 2705 }, { "epoch": 2.9872998343456656, "grad_norm": 3.032325267791748, "learning_rate": 8.109e-06, "loss": 0.0695, "step": 2706 }, { "epoch": 2.988404196576477, "grad_norm": 1.2765469551086426, "learning_rate": 8.112e-06, "loss": 0.0811, "step": 2707 }, { "epoch": 2.9895085588072887, "grad_norm": 1.210927963256836, "learning_rate": 8.115000000000001e-06, "loss": 0.0786, "step": 2708 }, { "epoch": 2.9906129210381005, "grad_norm": 2.02043080329895, "learning_rate": 8.118000000000001e-06, "loss": 0.1125, "step": 2709 }, { "epoch": 2.9917172832689123, "grad_norm": 1.471185564994812, "learning_rate": 8.121e-06, "loss": 0.0793, "step": 2710 }, { "epoch": 2.992821645499724, "grad_norm": 1.1196860074996948, "learning_rate": 8.124e-06, "loss": 0.0934, "step": 2711 }, { "epoch": 2.9939260077305354, "grad_norm": 1.5638766288757324, "learning_rate": 8.126999999999999e-06, "loss": 0.0739, "step": 2712 }, { "epoch": 2.995030369961347, "grad_norm": 1.8156622648239136, "learning_rate": 8.13e-06, "loss": 0.2631, "step": 2713 }, { "epoch": 2.996134732192159, "grad_norm": 0.851960301399231, "learning_rate": 8.133e-06, "loss": 0.0677, "step": 2714 }, { "epoch": 2.9972390944229708, "grad_norm": 0.7567382454872131, "learning_rate": 8.136e-06, "loss": 0.0504, "step": 2715 }, { "epoch": 2.9983434566537825, "grad_norm": 1.1006011962890625, "learning_rate": 8.139e-06, "loss": 0.0909, "step": 2716 }, { "epoch": 2.999447818884594, "grad_norm": 1.4241687059402466, "learning_rate": 8.142e-06, "loss": 0.0804, "step": 2717 }, { "epoch": 3.0, "grad_norm": 1.5661710500717163, "learning_rate": 8.145e-06, "loss": 0.0377, "step": 2718 }, { "epoch": 3.001104362230812, "grad_norm": 1.6648496389389038, "learning_rate": 8.148e-06, "loss": 0.498, "step": 2719 }, { "epoch": 3.0022087244616236, "grad_norm": 1.2777899503707886, "learning_rate": 8.151e-06, "loss": 0.3313, "step": 2720 }, { "epoch": 3.003313086692435, "grad_norm": 1.7911511659622192, "learning_rate": 8.154e-06, "loss": 0.3032, "step": 2721 }, { "epoch": 3.0044174489232467, "grad_norm": 1.2150834798812866, "learning_rate": 8.157e-06, "loss": 0.2414, "step": 2722 }, { "epoch": 3.0055218111540585, "grad_norm": 1.208428978919983, "learning_rate": 8.160000000000001e-06, "loss": 0.2595, "step": 2723 }, { "epoch": 3.0066261733848703, "grad_norm": 1.2200771570205688, "learning_rate": 8.163000000000001e-06, "loss": 0.1904, "step": 2724 }, { "epoch": 3.007730535615682, "grad_norm": 1.3716362714767456, "learning_rate": 8.166e-06, "loss": 0.1984, "step": 2725 }, { "epoch": 3.008834897846494, "grad_norm": 1.4231876134872437, "learning_rate": 8.169e-06, "loss": 0.2289, "step": 2726 }, { "epoch": 3.009939260077305, "grad_norm": 12.700443267822266, "learning_rate": 8.171999999999999e-06, "loss": 0.1172, "step": 2727 }, { "epoch": 3.011043622308117, "grad_norm": 1.1469944715499878, "learning_rate": 8.175e-06, "loss": 0.1167, "step": 2728 }, { "epoch": 3.0121479845389287, "grad_norm": 2.3010544776916504, "learning_rate": 8.178e-06, "loss": 0.0635, "step": 2729 }, { "epoch": 3.0132523467697405, "grad_norm": 0.7627771496772766, "learning_rate": 8.181e-06, "loss": 0.0643, "step": 2730 }, { "epoch": 3.0143567090005523, "grad_norm": 0.7325945496559143, "learning_rate": 8.184e-06, "loss": 0.0555, "step": 2731 }, { "epoch": 3.015461071231364, "grad_norm": 0.7140897512435913, "learning_rate": 8.187e-06, "loss": 0.0527, "step": 2732 }, { "epoch": 3.0165654334621754, "grad_norm": 0.7269498109817505, "learning_rate": 8.190000000000001e-06, "loss": 0.0544, "step": 2733 }, { "epoch": 3.017669795692987, "grad_norm": 0.7689187526702881, "learning_rate": 8.193000000000001e-06, "loss": 0.0579, "step": 2734 }, { "epoch": 3.018774157923799, "grad_norm": 0.9821591377258301, "learning_rate": 8.196e-06, "loss": 0.0634, "step": 2735 }, { "epoch": 3.019878520154611, "grad_norm": 0.7853779792785645, "learning_rate": 8.199e-06, "loss": 0.0534, "step": 2736 }, { "epoch": 3.0209828823854226, "grad_norm": 1.0808976888656616, "learning_rate": 8.201999999999999e-06, "loss": 0.0575, "step": 2737 }, { "epoch": 3.022087244616234, "grad_norm": 1.2281590700149536, "learning_rate": 8.205e-06, "loss": 0.0504, "step": 2738 }, { "epoch": 3.0231916068470457, "grad_norm": 0.6680381298065186, "learning_rate": 8.208e-06, "loss": 0.0584, "step": 2739 }, { "epoch": 3.0242959690778575, "grad_norm": 0.9311277270317078, "learning_rate": 8.211e-06, "loss": 0.0654, "step": 2740 }, { "epoch": 3.0254003313086693, "grad_norm": 1.0273125171661377, "learning_rate": 8.214e-06, "loss": 0.0691, "step": 2741 }, { "epoch": 3.026504693539481, "grad_norm": 0.7859429121017456, "learning_rate": 8.217e-06, "loss": 0.0654, "step": 2742 }, { "epoch": 3.027609055770293, "grad_norm": 0.917801558971405, "learning_rate": 8.220000000000001e-06, "loss": 0.0662, "step": 2743 }, { "epoch": 3.028713418001104, "grad_norm": 0.8219058513641357, "learning_rate": 8.223e-06, "loss": 0.0467, "step": 2744 }, { "epoch": 3.029817780231916, "grad_norm": 2.4817254543304443, "learning_rate": 8.226e-06, "loss": 0.0814, "step": 2745 }, { "epoch": 3.0309221424627277, "grad_norm": 0.8581465482711792, "learning_rate": 8.229e-06, "loss": 0.0632, "step": 2746 }, { "epoch": 3.0320265046935395, "grad_norm": 1.404650330543518, "learning_rate": 8.232e-06, "loss": 0.069, "step": 2747 }, { "epoch": 3.0331308669243513, "grad_norm": 1.3947467803955078, "learning_rate": 8.235000000000002e-06, "loss": 0.0795, "step": 2748 }, { "epoch": 3.034235229155163, "grad_norm": 0.7720677852630615, "learning_rate": 8.238e-06, "loss": 0.0611, "step": 2749 }, { "epoch": 3.0353395913859744, "grad_norm": 1.0838021039962769, "learning_rate": 8.241e-06, "loss": 0.0591, "step": 2750 }, { "epoch": 3.036443953616786, "grad_norm": 1.4698351621627808, "learning_rate": 8.244e-06, "loss": 0.0472, "step": 2751 }, { "epoch": 3.037548315847598, "grad_norm": 1.3720825910568237, "learning_rate": 8.246999999999999e-06, "loss": 0.0674, "step": 2752 }, { "epoch": 3.0386526780784098, "grad_norm": 0.9728357791900635, "learning_rate": 8.25e-06, "loss": 0.0687, "step": 2753 }, { "epoch": 3.0397570403092216, "grad_norm": 0.9323943257331848, "learning_rate": 8.253e-06, "loss": 0.0554, "step": 2754 }, { "epoch": 3.0408614025400333, "grad_norm": 1.156424880027771, "learning_rate": 8.256e-06, "loss": 0.0576, "step": 2755 }, { "epoch": 3.0419657647708447, "grad_norm": 1.0561206340789795, "learning_rate": 8.259e-06, "loss": 0.07, "step": 2756 }, { "epoch": 3.0430701270016565, "grad_norm": 0.828457236289978, "learning_rate": 8.262e-06, "loss": 0.0799, "step": 2757 }, { "epoch": 3.0441744892324683, "grad_norm": 0.7013291716575623, "learning_rate": 8.265000000000001e-06, "loss": 0.0467, "step": 2758 }, { "epoch": 3.04527885146328, "grad_norm": 0.9205382466316223, "learning_rate": 8.268000000000001e-06, "loss": 0.0724, "step": 2759 }, { "epoch": 3.046383213694092, "grad_norm": 1.1082675457000732, "learning_rate": 8.271000000000001e-06, "loss": 0.0895, "step": 2760 }, { "epoch": 3.047487575924903, "grad_norm": 1.7504079341888428, "learning_rate": 8.274e-06, "loss": 0.0833, "step": 2761 }, { "epoch": 3.048591938155715, "grad_norm": 0.9424097537994385, "learning_rate": 8.276999999999999e-06, "loss": 0.0542, "step": 2762 }, { "epoch": 3.0496963003865267, "grad_norm": 1.6353352069854736, "learning_rate": 8.28e-06, "loss": 0.124, "step": 2763 }, { "epoch": 3.0508006626173385, "grad_norm": 1.8600677251815796, "learning_rate": 8.283e-06, "loss": 0.1009, "step": 2764 }, { "epoch": 3.0519050248481503, "grad_norm": 0.9176443815231323, "learning_rate": 8.286e-06, "loss": 0.0564, "step": 2765 }, { "epoch": 3.053009387078962, "grad_norm": 1.207424521446228, "learning_rate": 8.289e-06, "loss": 0.0735, "step": 2766 }, { "epoch": 3.0541137493097734, "grad_norm": 1.0894333124160767, "learning_rate": 8.292e-06, "loss": 0.0512, "step": 2767 }, { "epoch": 3.055218111540585, "grad_norm": 1.242913007736206, "learning_rate": 8.295000000000001e-06, "loss": 0.1317, "step": 2768 }, { "epoch": 3.056322473771397, "grad_norm": 2.5578198432922363, "learning_rate": 8.298000000000001e-06, "loss": 0.3898, "step": 2769 }, { "epoch": 3.0574268360022088, "grad_norm": 1.2042913436889648, "learning_rate": 8.301e-06, "loss": 0.4086, "step": 2770 }, { "epoch": 3.0585311982330206, "grad_norm": 1.859093189239502, "learning_rate": 8.304e-06, "loss": 0.3707, "step": 2771 }, { "epoch": 3.0596355604638323, "grad_norm": 1.79457688331604, "learning_rate": 8.307e-06, "loss": 0.2845, "step": 2772 }, { "epoch": 3.0607399226946437, "grad_norm": 1.6472852230072021, "learning_rate": 8.310000000000002e-06, "loss": 0.3349, "step": 2773 }, { "epoch": 3.0618442849254555, "grad_norm": 1.6244773864746094, "learning_rate": 8.313e-06, "loss": 0.2133, "step": 2774 }, { "epoch": 3.0629486471562672, "grad_norm": 1.527840256690979, "learning_rate": 8.316e-06, "loss": 0.1586, "step": 2775 }, { "epoch": 3.064053009387079, "grad_norm": 1.1184673309326172, "learning_rate": 8.319e-06, "loss": 0.1282, "step": 2776 }, { "epoch": 3.065157371617891, "grad_norm": 1.3219903707504272, "learning_rate": 8.322e-06, "loss": 0.1519, "step": 2777 }, { "epoch": 3.0662617338487026, "grad_norm": 0.8160322904586792, "learning_rate": 8.325e-06, "loss": 0.0876, "step": 2778 }, { "epoch": 3.067366096079514, "grad_norm": 0.6622399091720581, "learning_rate": 8.328e-06, "loss": 0.0661, "step": 2779 }, { "epoch": 3.0684704583103257, "grad_norm": 0.7524028420448303, "learning_rate": 8.331e-06, "loss": 0.0638, "step": 2780 }, { "epoch": 3.0695748205411375, "grad_norm": 0.8186447620391846, "learning_rate": 8.334e-06, "loss": 0.0618, "step": 2781 }, { "epoch": 3.0706791827719493, "grad_norm": 0.7394313216209412, "learning_rate": 8.337e-06, "loss": 0.0622, "step": 2782 }, { "epoch": 3.071783545002761, "grad_norm": 0.772883951663971, "learning_rate": 8.340000000000001e-06, "loss": 0.0438, "step": 2783 }, { "epoch": 3.0728879072335724, "grad_norm": 1.4348456859588623, "learning_rate": 8.343000000000001e-06, "loss": 0.0836, "step": 2784 }, { "epoch": 3.073992269464384, "grad_norm": 1.317693829536438, "learning_rate": 8.346000000000001e-06, "loss": 0.0533, "step": 2785 }, { "epoch": 3.075096631695196, "grad_norm": 0.7317615151405334, "learning_rate": 8.349e-06, "loss": 0.0411, "step": 2786 }, { "epoch": 3.0762009939260078, "grad_norm": 0.6898232102394104, "learning_rate": 8.351999999999999e-06, "loss": 0.0341, "step": 2787 }, { "epoch": 3.0773053561568195, "grad_norm": 0.8297454714775085, "learning_rate": 8.355e-06, "loss": 0.0602, "step": 2788 }, { "epoch": 3.0784097183876313, "grad_norm": 1.456163763999939, "learning_rate": 8.358e-06, "loss": 0.0388, "step": 2789 }, { "epoch": 3.0795140806184427, "grad_norm": 0.630677342414856, "learning_rate": 8.361e-06, "loss": 0.0439, "step": 2790 }, { "epoch": 3.0806184428492545, "grad_norm": 0.9718624353408813, "learning_rate": 8.364e-06, "loss": 0.0535, "step": 2791 }, { "epoch": 3.0817228050800662, "grad_norm": 1.5598485469818115, "learning_rate": 8.367e-06, "loss": 0.0532, "step": 2792 }, { "epoch": 3.082827167310878, "grad_norm": 0.7865296006202698, "learning_rate": 8.370000000000001e-06, "loss": 0.0544, "step": 2793 }, { "epoch": 3.08393152954169, "grad_norm": 0.7648913264274597, "learning_rate": 8.373000000000001e-06, "loss": 0.0469, "step": 2794 }, { "epoch": 3.0850358917725016, "grad_norm": 0.754201352596283, "learning_rate": 8.376e-06, "loss": 0.0397, "step": 2795 }, { "epoch": 3.086140254003313, "grad_norm": 1.4531394243240356, "learning_rate": 8.379e-06, "loss": 0.0865, "step": 2796 }, { "epoch": 3.0872446162341247, "grad_norm": 0.6968564391136169, "learning_rate": 8.382e-06, "loss": 0.0471, "step": 2797 }, { "epoch": 3.0883489784649365, "grad_norm": 0.5073904991149902, "learning_rate": 8.385e-06, "loss": 0.0274, "step": 2798 }, { "epoch": 3.0894533406957483, "grad_norm": 0.9785099625587463, "learning_rate": 8.388e-06, "loss": 0.0622, "step": 2799 }, { "epoch": 3.09055770292656, "grad_norm": 0.8447487354278564, "learning_rate": 8.391e-06, "loss": 0.075, "step": 2800 }, { "epoch": 3.0916620651573714, "grad_norm": 1.0366053581237793, "learning_rate": 8.394e-06, "loss": 0.0446, "step": 2801 }, { "epoch": 3.092766427388183, "grad_norm": 1.6081608533859253, "learning_rate": 8.397e-06, "loss": 0.0577, "step": 2802 }, { "epoch": 3.093870789618995, "grad_norm": 0.9654091596603394, "learning_rate": 8.400000000000001e-06, "loss": 0.0564, "step": 2803 }, { "epoch": 3.0949751518498068, "grad_norm": 0.9857527613639832, "learning_rate": 8.403e-06, "loss": 0.0748, "step": 2804 }, { "epoch": 3.0960795140806185, "grad_norm": 1.098172903060913, "learning_rate": 8.406e-06, "loss": 0.0574, "step": 2805 }, { "epoch": 3.0971838763114303, "grad_norm": 1.3757219314575195, "learning_rate": 8.409e-06, "loss": 0.0856, "step": 2806 }, { "epoch": 3.0982882385422417, "grad_norm": 0.8367680311203003, "learning_rate": 8.412e-06, "loss": 0.0713, "step": 2807 }, { "epoch": 3.0993926007730535, "grad_norm": 1.1761384010314941, "learning_rate": 8.415000000000002e-06, "loss": 0.074, "step": 2808 }, { "epoch": 3.1004969630038652, "grad_norm": 1.001431941986084, "learning_rate": 8.418000000000001e-06, "loss": 0.083, "step": 2809 }, { "epoch": 3.101601325234677, "grad_norm": 0.9538286328315735, "learning_rate": 8.421000000000001e-06, "loss": 0.0767, "step": 2810 }, { "epoch": 3.102705687465489, "grad_norm": 1.013659954071045, "learning_rate": 8.424e-06, "loss": 0.0789, "step": 2811 }, { "epoch": 3.1038100496963006, "grad_norm": 0.8951537609100342, "learning_rate": 8.426999999999999e-06, "loss": 0.0679, "step": 2812 }, { "epoch": 3.104914411927112, "grad_norm": 1.3546841144561768, "learning_rate": 8.43e-06, "loss": 0.0823, "step": 2813 }, { "epoch": 3.1060187741579237, "grad_norm": 1.0184736251831055, "learning_rate": 8.433e-06, "loss": 0.0982, "step": 2814 }, { "epoch": 3.1071231363887355, "grad_norm": 1.0134742259979248, "learning_rate": 8.436e-06, "loss": 0.0769, "step": 2815 }, { "epoch": 3.1082274986195473, "grad_norm": 1.1096864938735962, "learning_rate": 8.439e-06, "loss": 0.073, "step": 2816 }, { "epoch": 3.109331860850359, "grad_norm": 1.388634204864502, "learning_rate": 8.442e-06, "loss": 0.0858, "step": 2817 }, { "epoch": 3.1104362230811704, "grad_norm": 1.2564799785614014, "learning_rate": 8.445e-06, "loss": 0.0866, "step": 2818 }, { "epoch": 3.111540585311982, "grad_norm": 1.5027010440826416, "learning_rate": 8.448000000000001e-06, "loss": 0.3389, "step": 2819 }, { "epoch": 3.112644947542794, "grad_norm": 1.6153757572174072, "learning_rate": 8.451000000000001e-06, "loss": 0.3797, "step": 2820 }, { "epoch": 3.1137493097736058, "grad_norm": 1.199059247970581, "learning_rate": 8.454e-06, "loss": 0.2856, "step": 2821 }, { "epoch": 3.1148536720044175, "grad_norm": 1.5470348596572876, "learning_rate": 8.457e-06, "loss": 0.2925, "step": 2822 }, { "epoch": 3.1159580342352293, "grad_norm": 1.3084803819656372, "learning_rate": 8.459999999999999e-06, "loss": 0.2843, "step": 2823 }, { "epoch": 3.1170623964660407, "grad_norm": 2.2908968925476074, "learning_rate": 8.463e-06, "loss": 0.3007, "step": 2824 }, { "epoch": 3.1181667586968524, "grad_norm": 1.2864155769348145, "learning_rate": 8.466e-06, "loss": 0.1824, "step": 2825 }, { "epoch": 3.1192711209276642, "grad_norm": 0.8480859398841858, "learning_rate": 8.469e-06, "loss": 0.1169, "step": 2826 }, { "epoch": 3.120375483158476, "grad_norm": 1.1431649923324585, "learning_rate": 8.472e-06, "loss": 0.1599, "step": 2827 }, { "epoch": 3.121479845389288, "grad_norm": 0.8385676741600037, "learning_rate": 8.475e-06, "loss": 0.0836, "step": 2828 }, { "epoch": 3.1225842076200996, "grad_norm": 0.6849327087402344, "learning_rate": 8.478e-06, "loss": 0.065, "step": 2829 }, { "epoch": 3.123688569850911, "grad_norm": 1.104119896888733, "learning_rate": 8.481e-06, "loss": 0.0772, "step": 2830 }, { "epoch": 3.1247929320817227, "grad_norm": 0.9004542827606201, "learning_rate": 8.484e-06, "loss": 0.0624, "step": 2831 }, { "epoch": 3.1258972943125345, "grad_norm": 0.9853359460830688, "learning_rate": 8.487e-06, "loss": 0.0522, "step": 2832 }, { "epoch": 3.1270016565433463, "grad_norm": 0.8543922305107117, "learning_rate": 8.49e-06, "loss": 0.0708, "step": 2833 }, { "epoch": 3.128106018774158, "grad_norm": 0.689545214176178, "learning_rate": 8.493000000000002e-06, "loss": 0.0464, "step": 2834 }, { "epoch": 3.1292103810049694, "grad_norm": 0.8129315376281738, "learning_rate": 8.496e-06, "loss": 0.0542, "step": 2835 }, { "epoch": 3.130314743235781, "grad_norm": 0.8329964280128479, "learning_rate": 8.499e-06, "loss": 0.0495, "step": 2836 }, { "epoch": 3.131419105466593, "grad_norm": 1.102799654006958, "learning_rate": 8.502e-06, "loss": 0.0577, "step": 2837 }, { "epoch": 3.1325234676974048, "grad_norm": 1.397733211517334, "learning_rate": 8.504999999999999e-06, "loss": 0.0379, "step": 2838 }, { "epoch": 3.1336278299282165, "grad_norm": 3.290316343307495, "learning_rate": 8.508e-06, "loss": 0.0638, "step": 2839 }, { "epoch": 3.1347321921590283, "grad_norm": 0.693838357925415, "learning_rate": 8.511e-06, "loss": 0.0481, "step": 2840 }, { "epoch": 3.13583655438984, "grad_norm": 0.9385219216346741, "learning_rate": 8.514e-06, "loss": 0.0804, "step": 2841 }, { "epoch": 3.1369409166206514, "grad_norm": 3.717437505722046, "learning_rate": 8.517e-06, "loss": 0.0459, "step": 2842 }, { "epoch": 3.1380452788514632, "grad_norm": 1.0307321548461914, "learning_rate": 8.52e-06, "loss": 0.0673, "step": 2843 }, { "epoch": 3.139149641082275, "grad_norm": 2.0136396884918213, "learning_rate": 8.523000000000001e-06, "loss": 0.0636, "step": 2844 }, { "epoch": 3.140254003313087, "grad_norm": 1.0365290641784668, "learning_rate": 8.526000000000001e-06, "loss": 0.0634, "step": 2845 }, { "epoch": 3.1413583655438986, "grad_norm": 1.09528386592865, "learning_rate": 8.529e-06, "loss": 0.0565, "step": 2846 }, { "epoch": 3.14246272777471, "grad_norm": 0.8931270837783813, "learning_rate": 8.532e-06, "loss": 0.0655, "step": 2847 }, { "epoch": 3.1435670900055217, "grad_norm": 1.009911060333252, "learning_rate": 8.534999999999999e-06, "loss": 0.0592, "step": 2848 }, { "epoch": 3.1446714522363335, "grad_norm": 0.7146262526512146, "learning_rate": 8.538e-06, "loss": 0.056, "step": 2849 }, { "epoch": 3.1457758144671453, "grad_norm": 0.7285926938056946, "learning_rate": 8.541e-06, "loss": 0.0443, "step": 2850 }, { "epoch": 3.146880176697957, "grad_norm": 0.9417311549186707, "learning_rate": 8.544e-06, "loss": 0.0642, "step": 2851 }, { "epoch": 3.147984538928769, "grad_norm": 0.9864749312400818, "learning_rate": 8.547e-06, "loss": 0.0566, "step": 2852 }, { "epoch": 3.14908890115958, "grad_norm": 1.3849215507507324, "learning_rate": 8.55e-06, "loss": 0.0582, "step": 2853 }, { "epoch": 3.150193263390392, "grad_norm": 0.9983588457107544, "learning_rate": 8.553000000000001e-06, "loss": 0.067, "step": 2854 }, { "epoch": 3.1512976256212037, "grad_norm": 1.2451807260513306, "learning_rate": 8.556e-06, "loss": 0.0858, "step": 2855 }, { "epoch": 3.1524019878520155, "grad_norm": 1.0606284141540527, "learning_rate": 8.559e-06, "loss": 0.0479, "step": 2856 }, { "epoch": 3.1535063500828273, "grad_norm": 0.8174136281013489, "learning_rate": 8.562e-06, "loss": 0.0503, "step": 2857 }, { "epoch": 3.154610712313639, "grad_norm": 0.7756106853485107, "learning_rate": 8.565e-06, "loss": 0.0434, "step": 2858 }, { "epoch": 3.1557150745444504, "grad_norm": 1.2700576782226562, "learning_rate": 8.568000000000002e-06, "loss": 0.0671, "step": 2859 }, { "epoch": 3.156819436775262, "grad_norm": 0.8134320378303528, "learning_rate": 8.571e-06, "loss": 0.0575, "step": 2860 }, { "epoch": 3.157923799006074, "grad_norm": 0.9586177468299866, "learning_rate": 8.574e-06, "loss": 0.0604, "step": 2861 }, { "epoch": 3.159028161236886, "grad_norm": 1.0638457536697388, "learning_rate": 8.577e-06, "loss": 0.086, "step": 2862 }, { "epoch": 3.1601325234676976, "grad_norm": 1.1265347003936768, "learning_rate": 8.58e-06, "loss": 0.0688, "step": 2863 }, { "epoch": 3.161236885698509, "grad_norm": 1.3474714756011963, "learning_rate": 8.583e-06, "loss": 0.0726, "step": 2864 }, { "epoch": 3.1623412479293207, "grad_norm": 1.9754043817520142, "learning_rate": 8.586e-06, "loss": 0.0974, "step": 2865 }, { "epoch": 3.1634456101601325, "grad_norm": 1.2494988441467285, "learning_rate": 8.589e-06, "loss": 0.0751, "step": 2866 }, { "epoch": 3.1645499723909443, "grad_norm": 1.8952888250350952, "learning_rate": 8.592e-06, "loss": 0.0881, "step": 2867 }, { "epoch": 3.165654334621756, "grad_norm": 1.360041618347168, "learning_rate": 8.595e-06, "loss": 0.0816, "step": 2868 }, { "epoch": 3.166758696852568, "grad_norm": 1.64255952835083, "learning_rate": 8.598000000000001e-06, "loss": 0.414, "step": 2869 }, { "epoch": 3.167863059083379, "grad_norm": 2.017660617828369, "learning_rate": 8.601000000000001e-06, "loss": 0.3557, "step": 2870 }, { "epoch": 3.168967421314191, "grad_norm": 1.351014256477356, "learning_rate": 8.604000000000001e-06, "loss": 0.294, "step": 2871 }, { "epoch": 3.1700717835450027, "grad_norm": 1.4317615032196045, "learning_rate": 8.606999999999999e-06, "loss": 0.3107, "step": 2872 }, { "epoch": 3.1711761457758145, "grad_norm": 2.113192319869995, "learning_rate": 8.609999999999999e-06, "loss": 0.3207, "step": 2873 }, { "epoch": 3.1722805080066263, "grad_norm": 1.829118251800537, "learning_rate": 8.613e-06, "loss": 0.2481, "step": 2874 }, { "epoch": 3.173384870237438, "grad_norm": 1.1735990047454834, "learning_rate": 8.616e-06, "loss": 0.1743, "step": 2875 }, { "epoch": 3.1744892324682494, "grad_norm": 0.7903040647506714, "learning_rate": 8.619e-06, "loss": 0.1156, "step": 2876 }, { "epoch": 3.175593594699061, "grad_norm": 0.9150795340538025, "learning_rate": 8.622e-06, "loss": 0.1478, "step": 2877 }, { "epoch": 3.176697956929873, "grad_norm": 0.7924903035163879, "learning_rate": 8.625e-06, "loss": 0.0741, "step": 2878 }, { "epoch": 3.177802319160685, "grad_norm": 0.9540520310401917, "learning_rate": 8.628000000000001e-06, "loss": 0.0852, "step": 2879 }, { "epoch": 3.1789066813914966, "grad_norm": 1.3410838842391968, "learning_rate": 8.631000000000001e-06, "loss": 0.0669, "step": 2880 }, { "epoch": 3.180011043622308, "grad_norm": 0.8235264420509338, "learning_rate": 8.634e-06, "loss": 0.0715, "step": 2881 }, { "epoch": 3.1811154058531197, "grad_norm": 2.2077760696411133, "learning_rate": 8.637e-06, "loss": 0.142, "step": 2882 }, { "epoch": 3.1822197680839315, "grad_norm": 1.4905998706817627, "learning_rate": 8.64e-06, "loss": 0.0591, "step": 2883 }, { "epoch": 3.1833241303147433, "grad_norm": 0.7610519528388977, "learning_rate": 8.643e-06, "loss": 0.059, "step": 2884 }, { "epoch": 3.184428492545555, "grad_norm": 0.7538778781890869, "learning_rate": 8.646e-06, "loss": 0.066, "step": 2885 }, { "epoch": 3.185532854776367, "grad_norm": 0.6616063714027405, "learning_rate": 8.649e-06, "loss": 0.0626, "step": 2886 }, { "epoch": 3.186637217007178, "grad_norm": 0.8344321846961975, "learning_rate": 8.652e-06, "loss": 0.0304, "step": 2887 }, { "epoch": 3.18774157923799, "grad_norm": 0.8004347681999207, "learning_rate": 8.655e-06, "loss": 0.0735, "step": 2888 }, { "epoch": 3.1888459414688017, "grad_norm": 0.8024809956550598, "learning_rate": 8.658e-06, "loss": 0.053, "step": 2889 }, { "epoch": 3.1899503036996135, "grad_norm": 0.9147144556045532, "learning_rate": 8.661e-06, "loss": 0.0625, "step": 2890 }, { "epoch": 3.1910546659304253, "grad_norm": 1.3168758153915405, "learning_rate": 8.664e-06, "loss": 0.0826, "step": 2891 }, { "epoch": 3.192159028161237, "grad_norm": 1.0698065757751465, "learning_rate": 8.667e-06, "loss": 0.057, "step": 2892 }, { "epoch": 3.1932633903920484, "grad_norm": 0.8771417737007141, "learning_rate": 8.67e-06, "loss": 0.0488, "step": 2893 }, { "epoch": 3.19436775262286, "grad_norm": 0.7571267485618591, "learning_rate": 8.673000000000001e-06, "loss": 0.0567, "step": 2894 }, { "epoch": 3.195472114853672, "grad_norm": 0.8449209928512573, "learning_rate": 8.676000000000001e-06, "loss": 0.0574, "step": 2895 }, { "epoch": 3.196576477084484, "grad_norm": 1.0619304180145264, "learning_rate": 8.679000000000001e-06, "loss": 0.0579, "step": 2896 }, { "epoch": 3.1976808393152956, "grad_norm": 1.0223023891448975, "learning_rate": 8.682e-06, "loss": 0.0648, "step": 2897 }, { "epoch": 3.198785201546107, "grad_norm": 0.8706281781196594, "learning_rate": 8.684999999999999e-06, "loss": 0.0615, "step": 2898 }, { "epoch": 3.1998895637769187, "grad_norm": 0.7816802263259888, "learning_rate": 8.688e-06, "loss": 0.0349, "step": 2899 }, { "epoch": 3.2009939260077305, "grad_norm": 0.9973553419113159, "learning_rate": 8.691e-06, "loss": 0.075, "step": 2900 }, { "epoch": 3.2020982882385423, "grad_norm": 0.9333471059799194, "learning_rate": 8.694e-06, "loss": 0.0541, "step": 2901 }, { "epoch": 3.203202650469354, "grad_norm": 0.968670666217804, "learning_rate": 8.697e-06, "loss": 0.0524, "step": 2902 }, { "epoch": 3.204307012700166, "grad_norm": 0.9447479248046875, "learning_rate": 8.7e-06, "loss": 0.0744, "step": 2903 }, { "epoch": 3.205411374930977, "grad_norm": 0.9098367691040039, "learning_rate": 8.703000000000001e-06, "loss": 0.0508, "step": 2904 }, { "epoch": 3.206515737161789, "grad_norm": 1.749259352684021, "learning_rate": 8.706000000000001e-06, "loss": 0.0923, "step": 2905 }, { "epoch": 3.2076200993926007, "grad_norm": 0.985917329788208, "learning_rate": 8.709e-06, "loss": 0.0535, "step": 2906 }, { "epoch": 3.2087244616234125, "grad_norm": 1.0701780319213867, "learning_rate": 8.712e-06, "loss": 0.0817, "step": 2907 }, { "epoch": 3.2098288238542243, "grad_norm": 1.001468539237976, "learning_rate": 8.715e-06, "loss": 0.0588, "step": 2908 }, { "epoch": 3.210933186085036, "grad_norm": 1.0142706632614136, "learning_rate": 8.718e-06, "loss": 0.0707, "step": 2909 }, { "epoch": 3.2120375483158474, "grad_norm": 0.8208429217338562, "learning_rate": 8.721e-06, "loss": 0.0409, "step": 2910 }, { "epoch": 3.213141910546659, "grad_norm": 1.2041479349136353, "learning_rate": 8.724e-06, "loss": 0.08, "step": 2911 }, { "epoch": 3.214246272777471, "grad_norm": 1.1390689611434937, "learning_rate": 8.727e-06, "loss": 0.0945, "step": 2912 }, { "epoch": 3.2153506350082828, "grad_norm": 0.972468912601471, "learning_rate": 8.73e-06, "loss": 0.0527, "step": 2913 }, { "epoch": 3.2164549972390946, "grad_norm": 1.0173451900482178, "learning_rate": 8.733000000000001e-06, "loss": 0.0652, "step": 2914 }, { "epoch": 3.217559359469906, "grad_norm": 1.5423654317855835, "learning_rate": 8.736e-06, "loss": 0.0974, "step": 2915 }, { "epoch": 3.2186637217007177, "grad_norm": 1.3898694515228271, "learning_rate": 8.739e-06, "loss": 0.0941, "step": 2916 }, { "epoch": 3.2197680839315295, "grad_norm": 1.1412876844406128, "learning_rate": 8.742e-06, "loss": 0.0773, "step": 2917 }, { "epoch": 3.2208724461623413, "grad_norm": 1.4603344202041626, "learning_rate": 8.745e-06, "loss": 0.0745, "step": 2918 }, { "epoch": 3.221976808393153, "grad_norm": 2.9827375411987305, "learning_rate": 8.748000000000002e-06, "loss": 0.4122, "step": 2919 }, { "epoch": 3.223081170623965, "grad_norm": 2.6646242141723633, "learning_rate": 8.751000000000001e-06, "loss": 0.565, "step": 2920 }, { "epoch": 3.224185532854776, "grad_norm": 2.122164249420166, "learning_rate": 8.754e-06, "loss": 0.3405, "step": 2921 }, { "epoch": 3.225289895085588, "grad_norm": 1.416542410850525, "learning_rate": 8.757e-06, "loss": 0.2986, "step": 2922 }, { "epoch": 3.2263942573163997, "grad_norm": 1.7858089208602905, "learning_rate": 8.759999999999999e-06, "loss": 0.2267, "step": 2923 }, { "epoch": 3.2274986195472115, "grad_norm": 1.6600533723831177, "learning_rate": 8.763e-06, "loss": 0.319, "step": 2924 }, { "epoch": 3.2286029817780233, "grad_norm": 1.035719633102417, "learning_rate": 8.766e-06, "loss": 0.1863, "step": 2925 }, { "epoch": 3.229707344008835, "grad_norm": 1.4359068870544434, "learning_rate": 8.769e-06, "loss": 0.1654, "step": 2926 }, { "epoch": 3.2308117062396464, "grad_norm": 1.054736614227295, "learning_rate": 8.772e-06, "loss": 0.1188, "step": 2927 }, { "epoch": 3.231916068470458, "grad_norm": 0.9263907670974731, "learning_rate": 8.775e-06, "loss": 0.1933, "step": 2928 }, { "epoch": 3.23302043070127, "grad_norm": 0.7173705697059631, "learning_rate": 8.778000000000001e-06, "loss": 0.0736, "step": 2929 }, { "epoch": 3.2341247929320818, "grad_norm": 0.6649649143218994, "learning_rate": 8.781000000000001e-06, "loss": 0.0659, "step": 2930 }, { "epoch": 3.2352291551628936, "grad_norm": 0.8995980620384216, "learning_rate": 8.784000000000001e-06, "loss": 0.085, "step": 2931 }, { "epoch": 3.2363335173937053, "grad_norm": 1.4260903596878052, "learning_rate": 8.787e-06, "loss": 0.0781, "step": 2932 }, { "epoch": 3.2374378796245167, "grad_norm": 0.8496424555778503, "learning_rate": 8.79e-06, "loss": 0.0489, "step": 2933 }, { "epoch": 3.2385422418553285, "grad_norm": 0.6672207117080688, "learning_rate": 8.793e-06, "loss": 0.0737, "step": 2934 }, { "epoch": 3.2396466040861402, "grad_norm": 0.5493250489234924, "learning_rate": 8.796e-06, "loss": 0.0492, "step": 2935 }, { "epoch": 3.240750966316952, "grad_norm": 0.7507105469703674, "learning_rate": 8.799e-06, "loss": 0.06, "step": 2936 }, { "epoch": 3.241855328547764, "grad_norm": 0.6224769949913025, "learning_rate": 8.802e-06, "loss": 0.0587, "step": 2937 }, { "epoch": 3.2429596907785756, "grad_norm": 0.913545548915863, "learning_rate": 8.805e-06, "loss": 0.0645, "step": 2938 }, { "epoch": 3.244064053009387, "grad_norm": 1.0093964338302612, "learning_rate": 8.808000000000001e-06, "loss": 0.0589, "step": 2939 }, { "epoch": 3.2451684152401987, "grad_norm": 1.0669918060302734, "learning_rate": 8.811000000000001e-06, "loss": 0.0515, "step": 2940 }, { "epoch": 3.2462727774710105, "grad_norm": 0.8791043162345886, "learning_rate": 8.814e-06, "loss": 0.0522, "step": 2941 }, { "epoch": 3.2473771397018223, "grad_norm": 0.855740487575531, "learning_rate": 8.817e-06, "loss": 0.0537, "step": 2942 }, { "epoch": 3.248481501932634, "grad_norm": 1.0599949359893799, "learning_rate": 8.82e-06, "loss": 0.0665, "step": 2943 }, { "epoch": 3.2495858641634454, "grad_norm": 0.7401173114776611, "learning_rate": 8.823e-06, "loss": 0.0542, "step": 2944 }, { "epoch": 3.250690226394257, "grad_norm": 0.8621899485588074, "learning_rate": 8.826000000000002e-06, "loss": 0.0781, "step": 2945 }, { "epoch": 3.251794588625069, "grad_norm": 1.9783439636230469, "learning_rate": 8.829e-06, "loss": 0.0987, "step": 2946 }, { "epoch": 3.2528989508558808, "grad_norm": 0.8573099374771118, "learning_rate": 8.832e-06, "loss": 0.0594, "step": 2947 }, { "epoch": 3.2540033130866926, "grad_norm": 0.6602203845977783, "learning_rate": 8.835e-06, "loss": 0.0574, "step": 2948 }, { "epoch": 3.2551076753175043, "grad_norm": 0.865564227104187, "learning_rate": 8.837999999999999e-06, "loss": 0.0653, "step": 2949 }, { "epoch": 3.2562120375483157, "grad_norm": 0.7460607290267944, "learning_rate": 8.841e-06, "loss": 0.0452, "step": 2950 }, { "epoch": 3.2573163997791275, "grad_norm": 1.0330427885055542, "learning_rate": 8.844e-06, "loss": 0.0727, "step": 2951 }, { "epoch": 3.2584207620099392, "grad_norm": 0.6941193342208862, "learning_rate": 8.847e-06, "loss": 0.0442, "step": 2952 }, { "epoch": 3.259525124240751, "grad_norm": 1.6780833005905151, "learning_rate": 8.85e-06, "loss": 0.0743, "step": 2953 }, { "epoch": 3.260629486471563, "grad_norm": 0.7790685892105103, "learning_rate": 8.853e-06, "loss": 0.049, "step": 2954 }, { "epoch": 3.2617338487023746, "grad_norm": 0.9303802251815796, "learning_rate": 8.856000000000001e-06, "loss": 0.0649, "step": 2955 }, { "epoch": 3.262838210933186, "grad_norm": 0.8890965580940247, "learning_rate": 8.859000000000001e-06, "loss": 0.0587, "step": 2956 }, { "epoch": 3.2639425731639977, "grad_norm": 0.7032738327980042, "learning_rate": 8.862000000000001e-06, "loss": 0.0645, "step": 2957 }, { "epoch": 3.2650469353948095, "grad_norm": 1.2608510255813599, "learning_rate": 8.864999999999999e-06, "loss": 0.07, "step": 2958 }, { "epoch": 3.2661512976256213, "grad_norm": 1.0836387872695923, "learning_rate": 8.867999999999999e-06, "loss": 0.0638, "step": 2959 }, { "epoch": 3.267255659856433, "grad_norm": 1.0116827487945557, "learning_rate": 8.871e-06, "loss": 0.0437, "step": 2960 }, { "epoch": 3.2683600220872444, "grad_norm": 1.3076534271240234, "learning_rate": 8.874e-06, "loss": 0.0758, "step": 2961 }, { "epoch": 3.269464384318056, "grad_norm": 1.0346969366073608, "learning_rate": 8.877e-06, "loss": 0.0574, "step": 2962 }, { "epoch": 3.270568746548868, "grad_norm": 1.4642503261566162, "learning_rate": 8.88e-06, "loss": 0.0782, "step": 2963 }, { "epoch": 3.2716731087796798, "grad_norm": 0.8623605370521545, "learning_rate": 8.883e-06, "loss": 0.0686, "step": 2964 }, { "epoch": 3.2727774710104915, "grad_norm": 1.460631012916565, "learning_rate": 8.886000000000001e-06, "loss": 0.0753, "step": 2965 }, { "epoch": 3.2738818332413033, "grad_norm": 1.7564209699630737, "learning_rate": 8.889e-06, "loss": 0.1444, "step": 2966 }, { "epoch": 3.2749861954721147, "grad_norm": 1.0370173454284668, "learning_rate": 8.892e-06, "loss": 0.0671, "step": 2967 }, { "epoch": 3.2760905577029265, "grad_norm": 1.4123494625091553, "learning_rate": 8.895e-06, "loss": 0.0786, "step": 2968 }, { "epoch": 3.2771949199337382, "grad_norm": 2.2442338466644287, "learning_rate": 8.898e-06, "loss": 0.4319, "step": 2969 }, { "epoch": 3.27829928216455, "grad_norm": 1.0234134197235107, "learning_rate": 8.901e-06, "loss": 0.3324, "step": 2970 }, { "epoch": 3.279403644395362, "grad_norm": 1.5928384065628052, "learning_rate": 8.904e-06, "loss": 0.3235, "step": 2971 }, { "epoch": 3.2805080066261736, "grad_norm": 1.498742699623108, "learning_rate": 8.907e-06, "loss": 0.2856, "step": 2972 }, { "epoch": 3.281612368856985, "grad_norm": 1.9113165140151978, "learning_rate": 8.91e-06, "loss": 0.1896, "step": 2973 }, { "epoch": 3.2827167310877967, "grad_norm": 1.1206436157226562, "learning_rate": 8.913e-06, "loss": 0.2523, "step": 2974 }, { "epoch": 3.2838210933186085, "grad_norm": 1.2392746210098267, "learning_rate": 8.916e-06, "loss": 0.2289, "step": 2975 }, { "epoch": 3.2849254555494203, "grad_norm": 1.360916256904602, "learning_rate": 8.919e-06, "loss": 0.2599, "step": 2976 }, { "epoch": 3.286029817780232, "grad_norm": 0.9051672220230103, "learning_rate": 8.922e-06, "loss": 0.0875, "step": 2977 }, { "epoch": 3.2871341800110434, "grad_norm": 0.8981687426567078, "learning_rate": 8.925e-06, "loss": 0.103, "step": 2978 }, { "epoch": 3.288238542241855, "grad_norm": 0.792966365814209, "learning_rate": 8.928e-06, "loss": 0.0748, "step": 2979 }, { "epoch": 3.289342904472667, "grad_norm": 0.7722647786140442, "learning_rate": 8.931000000000001e-06, "loss": 0.0784, "step": 2980 }, { "epoch": 3.2904472667034788, "grad_norm": 0.698887825012207, "learning_rate": 8.934000000000001e-06, "loss": 0.0714, "step": 2981 }, { "epoch": 3.2915516289342905, "grad_norm": 0.8152661323547363, "learning_rate": 8.937000000000001e-06, "loss": 0.0599, "step": 2982 }, { "epoch": 3.2926559911651023, "grad_norm": 0.6947748064994812, "learning_rate": 8.939999999999999e-06, "loss": 0.0467, "step": 2983 }, { "epoch": 3.293760353395914, "grad_norm": 0.8362021446228027, "learning_rate": 8.942999999999999e-06, "loss": 0.0518, "step": 2984 }, { "epoch": 3.2948647156267254, "grad_norm": 1.490798830986023, "learning_rate": 8.946e-06, "loss": 0.0439, "step": 2985 }, { "epoch": 3.2959690778575372, "grad_norm": 0.639380693435669, "learning_rate": 8.949e-06, "loss": 0.0637, "step": 2986 }, { "epoch": 3.297073440088349, "grad_norm": 0.7084651589393616, "learning_rate": 8.952e-06, "loss": 0.06, "step": 2987 }, { "epoch": 3.298177802319161, "grad_norm": 0.6101498603820801, "learning_rate": 8.955e-06, "loss": 0.0473, "step": 2988 }, { "epoch": 3.2992821645499726, "grad_norm": 0.7470443248748779, "learning_rate": 8.958e-06, "loss": 0.0493, "step": 2989 }, { "epoch": 3.300386526780784, "grad_norm": 0.5818164348602295, "learning_rate": 8.961000000000001e-06, "loss": 0.0304, "step": 2990 }, { "epoch": 3.3014908890115957, "grad_norm": 0.8803582191467285, "learning_rate": 8.964000000000001e-06, "loss": 0.0548, "step": 2991 }, { "epoch": 3.3025952512424075, "grad_norm": 1.0694069862365723, "learning_rate": 8.967e-06, "loss": 0.0504, "step": 2992 }, { "epoch": 3.3036996134732193, "grad_norm": 1.0205577611923218, "learning_rate": 8.97e-06, "loss": 0.0619, "step": 2993 }, { "epoch": 3.304803975704031, "grad_norm": 0.675214409828186, "learning_rate": 8.973e-06, "loss": 0.0498, "step": 2994 }, { "epoch": 3.3059083379348424, "grad_norm": 0.7797443270683289, "learning_rate": 8.976e-06, "loss": 0.0559, "step": 2995 }, { "epoch": 3.307012700165654, "grad_norm": 1.2267284393310547, "learning_rate": 8.979e-06, "loss": 0.0527, "step": 2996 }, { "epoch": 3.308117062396466, "grad_norm": 0.8060405850410461, "learning_rate": 8.982e-06, "loss": 0.0588, "step": 2997 }, { "epoch": 3.3092214246272778, "grad_norm": 1.9774972200393677, "learning_rate": 8.985e-06, "loss": 0.0681, "step": 2998 }, { "epoch": 3.3103257868580895, "grad_norm": 1.0282522439956665, "learning_rate": 8.988e-06, "loss": 0.0415, "step": 2999 }, { "epoch": 3.3114301490889013, "grad_norm": 1.0633172988891602, "learning_rate": 8.991e-06, "loss": 0.0696, "step": 3000 }, { "epoch": 3.3114301490889013, "eval_cer": 0.1419239443191539, "eval_loss": 0.4766269624233246, "eval_runtime": 15.3568, "eval_samples_per_second": 19.796, "eval_steps_per_second": 0.651, "eval_wer": 0.5118956254796623, "step": 3000 }, { "epoch": 3.312534511319713, "grad_norm": 0.9052742123603821, "learning_rate": 8.994e-06, "loss": 0.0597, "step": 3001 }, { "epoch": 3.3136388735505244, "grad_norm": 1.315232515335083, "learning_rate": 8.997e-06, "loss": 0.0425, "step": 3002 }, { "epoch": 3.3147432357813362, "grad_norm": 1.059670329093933, "learning_rate": 9e-06, "loss": 0.0523, "step": 3003 }, { "epoch": 3.315847598012148, "grad_norm": 0.7529840469360352, "learning_rate": 9.003e-06, "loss": 0.061, "step": 3004 }, { "epoch": 3.31695196024296, "grad_norm": 1.0270869731903076, "learning_rate": 9.006000000000002e-06, "loss": 0.0746, "step": 3005 }, { "epoch": 3.3180563224737716, "grad_norm": 1.1721068620681763, "learning_rate": 9.009000000000001e-06, "loss": 0.0951, "step": 3006 }, { "epoch": 3.319160684704583, "grad_norm": 0.9134573936462402, "learning_rate": 9.012e-06, "loss": 0.0573, "step": 3007 }, { "epoch": 3.3202650469353947, "grad_norm": 1.4173476696014404, "learning_rate": 9.015e-06, "loss": 0.0587, "step": 3008 }, { "epoch": 3.3213694091662065, "grad_norm": 1.1215471029281616, "learning_rate": 9.017999999999999e-06, "loss": 0.0633, "step": 3009 }, { "epoch": 3.3224737713970183, "grad_norm": 0.8092219233512878, "learning_rate": 9.021e-06, "loss": 0.0522, "step": 3010 }, { "epoch": 3.32357813362783, "grad_norm": 1.9656392335891724, "learning_rate": 9.024e-06, "loss": 0.0521, "step": 3011 }, { "epoch": 3.3246824958586414, "grad_norm": 2.2305760383605957, "learning_rate": 9.027e-06, "loss": 0.0823, "step": 3012 }, { "epoch": 3.325786858089453, "grad_norm": 0.9308191537857056, "learning_rate": 9.03e-06, "loss": 0.0484, "step": 3013 }, { "epoch": 3.326891220320265, "grad_norm": 1.0976966619491577, "learning_rate": 9.033e-06, "loss": 0.0689, "step": 3014 }, { "epoch": 3.3279955825510767, "grad_norm": 0.6764135956764221, "learning_rate": 9.036000000000001e-06, "loss": 0.0572, "step": 3015 }, { "epoch": 3.3290999447818885, "grad_norm": 1.0509167909622192, "learning_rate": 9.039000000000001e-06, "loss": 0.0751, "step": 3016 }, { "epoch": 3.3302043070127003, "grad_norm": 1.1215051412582397, "learning_rate": 9.042e-06, "loss": 0.0807, "step": 3017 }, { "epoch": 3.331308669243512, "grad_norm": 1.1452810764312744, "learning_rate": 9.045e-06, "loss": 0.0806, "step": 3018 }, { "epoch": 3.3324130314743234, "grad_norm": 3.4547648429870605, "learning_rate": 9.048e-06, "loss": 0.5387, "step": 3019 }, { "epoch": 3.3335173937051352, "grad_norm": 1.6765457391738892, "learning_rate": 9.051e-06, "loss": 0.3982, "step": 3020 }, { "epoch": 3.334621755935947, "grad_norm": 1.3450496196746826, "learning_rate": 9.054e-06, "loss": 0.3595, "step": 3021 }, { "epoch": 3.335726118166759, "grad_norm": 1.3723983764648438, "learning_rate": 9.057e-06, "loss": 0.3153, "step": 3022 }, { "epoch": 3.3368304803975706, "grad_norm": 1.073921799659729, "learning_rate": 9.06e-06, "loss": 0.1684, "step": 3023 }, { "epoch": 3.337934842628382, "grad_norm": 1.5649453401565552, "learning_rate": 9.063e-06, "loss": 0.2482, "step": 3024 }, { "epoch": 3.3390392048591937, "grad_norm": 1.3484970331192017, "learning_rate": 9.066000000000001e-06, "loss": 0.257, "step": 3025 }, { "epoch": 3.3401435670900055, "grad_norm": 1.2352566719055176, "learning_rate": 9.069e-06, "loss": 0.284, "step": 3026 }, { "epoch": 3.3412479293208173, "grad_norm": 0.7842769622802734, "learning_rate": 9.072e-06, "loss": 0.0842, "step": 3027 }, { "epoch": 3.342352291551629, "grad_norm": 0.729658305644989, "learning_rate": 9.075e-06, "loss": 0.0474, "step": 3028 }, { "epoch": 3.3434566537824404, "grad_norm": 1.8621469736099243, "learning_rate": 9.078e-06, "loss": 0.1093, "step": 3029 }, { "epoch": 3.344561016013252, "grad_norm": 1.3771710395812988, "learning_rate": 9.081000000000002e-06, "loss": 0.0708, "step": 3030 }, { "epoch": 3.345665378244064, "grad_norm": 0.7869976162910461, "learning_rate": 9.084000000000001e-06, "loss": 0.1068, "step": 3031 }, { "epoch": 3.3467697404748757, "grad_norm": 0.8301262259483337, "learning_rate": 9.087e-06, "loss": 0.0546, "step": 3032 }, { "epoch": 3.3478741027056875, "grad_norm": 0.7545909881591797, "learning_rate": 9.09e-06, "loss": 0.0613, "step": 3033 }, { "epoch": 3.3489784649364993, "grad_norm": 0.8478608131408691, "learning_rate": 9.093e-06, "loss": 0.0513, "step": 3034 }, { "epoch": 3.350082827167311, "grad_norm": 0.7480030655860901, "learning_rate": 9.096e-06, "loss": 0.0461, "step": 3035 }, { "epoch": 3.3511871893981224, "grad_norm": 1.0831815004348755, "learning_rate": 9.099e-06, "loss": 0.0749, "step": 3036 }, { "epoch": 3.352291551628934, "grad_norm": 0.615908145904541, "learning_rate": 9.102e-06, "loss": 0.0335, "step": 3037 }, { "epoch": 3.353395913859746, "grad_norm": 1.0561937093734741, "learning_rate": 9.105e-06, "loss": 0.0435, "step": 3038 }, { "epoch": 3.354500276090558, "grad_norm": 0.8236063122749329, "learning_rate": 9.108e-06, "loss": 0.0536, "step": 3039 }, { "epoch": 3.3556046383213696, "grad_norm": 0.6298307180404663, "learning_rate": 9.111000000000001e-06, "loss": 0.0422, "step": 3040 }, { "epoch": 3.356709000552181, "grad_norm": 0.7638277411460876, "learning_rate": 9.114000000000001e-06, "loss": 0.056, "step": 3041 }, { "epoch": 3.3578133627829927, "grad_norm": 0.9441462159156799, "learning_rate": 9.117000000000001e-06, "loss": 0.053, "step": 3042 }, { "epoch": 3.3589177250138045, "grad_norm": 0.6598138213157654, "learning_rate": 9.12e-06, "loss": 0.0676, "step": 3043 }, { "epoch": 3.3600220872446163, "grad_norm": 0.5522363781929016, "learning_rate": 9.122999999999999e-06, "loss": 0.0494, "step": 3044 }, { "epoch": 3.361126449475428, "grad_norm": 1.0996299982070923, "learning_rate": 9.126e-06, "loss": 0.0691, "step": 3045 }, { "epoch": 3.36223081170624, "grad_norm": 0.7490952014923096, "learning_rate": 9.129e-06, "loss": 0.0625, "step": 3046 }, { "epoch": 3.363335173937051, "grad_norm": 0.4876120984554291, "learning_rate": 9.132e-06, "loss": 0.0249, "step": 3047 }, { "epoch": 3.364439536167863, "grad_norm": 0.8979233503341675, "learning_rate": 9.135e-06, "loss": 0.0555, "step": 3048 }, { "epoch": 3.3655438983986747, "grad_norm": 1.1942487955093384, "learning_rate": 9.138e-06, "loss": 0.0757, "step": 3049 }, { "epoch": 3.3666482606294865, "grad_norm": 1.29060697555542, "learning_rate": 9.141000000000001e-06, "loss": 0.0609, "step": 3050 }, { "epoch": 3.3677526228602983, "grad_norm": 0.9831134080886841, "learning_rate": 9.144000000000001e-06, "loss": 0.0479, "step": 3051 }, { "epoch": 3.36885698509111, "grad_norm": 0.8220939040184021, "learning_rate": 9.147e-06, "loss": 0.0384, "step": 3052 }, { "epoch": 3.3699613473219214, "grad_norm": 0.6525825262069702, "learning_rate": 9.15e-06, "loss": 0.0475, "step": 3053 }, { "epoch": 3.371065709552733, "grad_norm": 1.2385402917861938, "learning_rate": 9.153e-06, "loss": 0.0674, "step": 3054 }, { "epoch": 3.372170071783545, "grad_norm": 0.676205039024353, "learning_rate": 9.156000000000002e-06, "loss": 0.046, "step": 3055 }, { "epoch": 3.373274434014357, "grad_norm": 1.2974239587783813, "learning_rate": 9.159e-06, "loss": 0.0461, "step": 3056 }, { "epoch": 3.3743787962451686, "grad_norm": 0.7343958020210266, "learning_rate": 9.162e-06, "loss": 0.0587, "step": 3057 }, { "epoch": 3.37548315847598, "grad_norm": 0.8137938380241394, "learning_rate": 9.165e-06, "loss": 0.0601, "step": 3058 }, { "epoch": 3.3765875207067917, "grad_norm": 0.8749126195907593, "learning_rate": 9.168e-06, "loss": 0.07, "step": 3059 }, { "epoch": 3.3776918829376035, "grad_norm": 3.0019874572753906, "learning_rate": 9.171e-06, "loss": 0.0689, "step": 3060 }, { "epoch": 3.3787962451684153, "grad_norm": 1.1760849952697754, "learning_rate": 9.174e-06, "loss": 0.069, "step": 3061 }, { "epoch": 3.379900607399227, "grad_norm": 1.2937090396881104, "learning_rate": 9.177e-06, "loss": 0.0834, "step": 3062 }, { "epoch": 3.381004969630039, "grad_norm": 1.1934292316436768, "learning_rate": 9.18e-06, "loss": 0.0593, "step": 3063 }, { "epoch": 3.38210933186085, "grad_norm": 0.8994913101196289, "learning_rate": 9.183e-06, "loss": 0.0466, "step": 3064 }, { "epoch": 3.383213694091662, "grad_norm": 0.7747780084609985, "learning_rate": 9.186000000000001e-06, "loss": 0.0445, "step": 3065 }, { "epoch": 3.3843180563224737, "grad_norm": 1.1143522262573242, "learning_rate": 9.189000000000001e-06, "loss": 0.0805, "step": 3066 }, { "epoch": 3.3854224185532855, "grad_norm": 1.3259940147399902, "learning_rate": 9.192000000000001e-06, "loss": 0.0982, "step": 3067 }, { "epoch": 3.3865267807840973, "grad_norm": 1.2909026145935059, "learning_rate": 9.195000000000001e-06, "loss": 0.1041, "step": 3068 }, { "epoch": 3.387631143014909, "grad_norm": 1.7850068807601929, "learning_rate": 9.197999999999999e-06, "loss": 0.4681, "step": 3069 }, { "epoch": 3.3887355052457204, "grad_norm": 1.3028467893600464, "learning_rate": 9.200999999999999e-06, "loss": 0.3613, "step": 3070 }, { "epoch": 3.389839867476532, "grad_norm": 1.7856788635253906, "learning_rate": 9.204e-06, "loss": 0.4534, "step": 3071 }, { "epoch": 3.390944229707344, "grad_norm": 1.6077178716659546, "learning_rate": 9.207e-06, "loss": 0.4492, "step": 3072 }, { "epoch": 3.3920485919381558, "grad_norm": 1.286677360534668, "learning_rate": 9.21e-06, "loss": 0.248, "step": 3073 }, { "epoch": 3.3931529541689676, "grad_norm": 1.165377140045166, "learning_rate": 9.213e-06, "loss": 0.2257, "step": 3074 }, { "epoch": 3.394257316399779, "grad_norm": 0.9446426630020142, "learning_rate": 9.216e-06, "loss": 0.1869, "step": 3075 }, { "epoch": 3.3953616786305907, "grad_norm": 1.1789500713348389, "learning_rate": 9.219000000000001e-06, "loss": 0.1074, "step": 3076 }, { "epoch": 3.3964660408614025, "grad_norm": 0.8255411982536316, "learning_rate": 9.222e-06, "loss": 0.0914, "step": 3077 }, { "epoch": 3.3975704030922143, "grad_norm": 0.6192933320999146, "learning_rate": 9.225e-06, "loss": 0.0807, "step": 3078 }, { "epoch": 3.398674765323026, "grad_norm": 1.4220037460327148, "learning_rate": 9.228e-06, "loss": 0.1546, "step": 3079 }, { "epoch": 3.399779127553838, "grad_norm": 0.856469452381134, "learning_rate": 9.231e-06, "loss": 0.0642, "step": 3080 }, { "epoch": 3.4008834897846496, "grad_norm": 0.7157558798789978, "learning_rate": 9.234e-06, "loss": 0.0643, "step": 3081 }, { "epoch": 3.401987852015461, "grad_norm": 0.6159145832061768, "learning_rate": 9.237e-06, "loss": 0.0437, "step": 3082 }, { "epoch": 3.4030922142462727, "grad_norm": 1.0224609375, "learning_rate": 9.24e-06, "loss": 0.0663, "step": 3083 }, { "epoch": 3.4041965764770845, "grad_norm": 1.095940113067627, "learning_rate": 9.243e-06, "loss": 0.1453, "step": 3084 }, { "epoch": 3.4053009387078963, "grad_norm": 0.9143558144569397, "learning_rate": 9.246e-06, "loss": 0.0882, "step": 3085 }, { "epoch": 3.406405300938708, "grad_norm": 0.5536240339279175, "learning_rate": 9.249e-06, "loss": 0.0469, "step": 3086 }, { "epoch": 3.4075096631695194, "grad_norm": 0.7313112616539001, "learning_rate": 9.252e-06, "loss": 0.0576, "step": 3087 }, { "epoch": 3.408614025400331, "grad_norm": 0.910140872001648, "learning_rate": 9.255e-06, "loss": 0.0466, "step": 3088 }, { "epoch": 3.409718387631143, "grad_norm": 0.8320920467376709, "learning_rate": 9.258e-06, "loss": 0.0448, "step": 3089 }, { "epoch": 3.4108227498619548, "grad_norm": 0.791415810585022, "learning_rate": 9.261e-06, "loss": 0.0638, "step": 3090 }, { "epoch": 3.4119271120927666, "grad_norm": 0.9628296494483948, "learning_rate": 9.264000000000001e-06, "loss": 0.0748, "step": 3091 }, { "epoch": 3.413031474323578, "grad_norm": 0.8972890377044678, "learning_rate": 9.267000000000001e-06, "loss": 0.071, "step": 3092 }, { "epoch": 3.4141358365543897, "grad_norm": 0.7698621153831482, "learning_rate": 9.27e-06, "loss": 0.0414, "step": 3093 }, { "epoch": 3.4152401987852015, "grad_norm": 0.7880486845970154, "learning_rate": 9.272999999999999e-06, "loss": 0.0358, "step": 3094 }, { "epoch": 3.4163445610160132, "grad_norm": 0.8751145601272583, "learning_rate": 9.275999999999999e-06, "loss": 0.0466, "step": 3095 }, { "epoch": 3.417448923246825, "grad_norm": 0.8354352116584778, "learning_rate": 9.279e-06, "loss": 0.0438, "step": 3096 }, { "epoch": 3.418553285477637, "grad_norm": 0.7961638569831848, "learning_rate": 9.282e-06, "loss": 0.0404, "step": 3097 }, { "epoch": 3.4196576477084486, "grad_norm": 0.5979811549186707, "learning_rate": 9.285e-06, "loss": 0.0324, "step": 3098 }, { "epoch": 3.42076200993926, "grad_norm": 1.560943365097046, "learning_rate": 9.288e-06, "loss": 0.0707, "step": 3099 }, { "epoch": 3.4218663721700717, "grad_norm": 0.8890183568000793, "learning_rate": 9.291e-06, "loss": 0.0605, "step": 3100 }, { "epoch": 3.4229707344008835, "grad_norm": 0.5213092565536499, "learning_rate": 9.294000000000001e-06, "loss": 0.0324, "step": 3101 }, { "epoch": 3.4240750966316953, "grad_norm": 0.8872831463813782, "learning_rate": 9.297000000000001e-06, "loss": 0.072, "step": 3102 }, { "epoch": 3.425179458862507, "grad_norm": 1.2792738676071167, "learning_rate": 9.3e-06, "loss": 0.0712, "step": 3103 }, { "epoch": 3.4262838210933184, "grad_norm": 1.4008536338806152, "learning_rate": 9.303e-06, "loss": 0.0485, "step": 3104 }, { "epoch": 3.42738818332413, "grad_norm": 1.1146570444107056, "learning_rate": 9.306e-06, "loss": 0.0598, "step": 3105 }, { "epoch": 3.428492545554942, "grad_norm": 0.8967472314834595, "learning_rate": 9.309e-06, "loss": 0.0527, "step": 3106 }, { "epoch": 3.4295969077857538, "grad_norm": 0.6538492441177368, "learning_rate": 9.312e-06, "loss": 0.0455, "step": 3107 }, { "epoch": 3.4307012700165656, "grad_norm": 0.7675917744636536, "learning_rate": 9.315e-06, "loss": 0.0628, "step": 3108 }, { "epoch": 3.431805632247377, "grad_norm": 0.7285746335983276, "learning_rate": 9.318e-06, "loss": 0.0463, "step": 3109 }, { "epoch": 3.4329099944781887, "grad_norm": 0.7871891260147095, "learning_rate": 9.321e-06, "loss": 0.0467, "step": 3110 }, { "epoch": 3.4340143567090005, "grad_norm": 1.0822924375534058, "learning_rate": 9.324000000000001e-06, "loss": 0.0578, "step": 3111 }, { "epoch": 3.4351187189398122, "grad_norm": 1.1611603498458862, "learning_rate": 9.327e-06, "loss": 0.086, "step": 3112 }, { "epoch": 3.436223081170624, "grad_norm": 0.9279497861862183, "learning_rate": 9.33e-06, "loss": 0.0769, "step": 3113 }, { "epoch": 3.437327443401436, "grad_norm": 0.7880980968475342, "learning_rate": 9.333e-06, "loss": 0.0636, "step": 3114 }, { "epoch": 3.4384318056322476, "grad_norm": 1.1352728605270386, "learning_rate": 9.336e-06, "loss": 0.0797, "step": 3115 }, { "epoch": 3.439536167863059, "grad_norm": 1.0674951076507568, "learning_rate": 9.339000000000002e-06, "loss": 0.081, "step": 3116 }, { "epoch": 3.4406405300938707, "grad_norm": 2.5031981468200684, "learning_rate": 9.342000000000001e-06, "loss": 0.0562, "step": 3117 }, { "epoch": 3.4417448923246825, "grad_norm": 1.2184197902679443, "learning_rate": 9.345e-06, "loss": 0.0776, "step": 3118 }, { "epoch": 3.4428492545554943, "grad_norm": 4.089263916015625, "learning_rate": 9.348e-06, "loss": 0.4935, "step": 3119 }, { "epoch": 3.443953616786306, "grad_norm": 1.632197618484497, "learning_rate": 9.350999999999999e-06, "loss": 0.3299, "step": 3120 }, { "epoch": 3.4450579790171174, "grad_norm": 1.0715546607971191, "learning_rate": 9.354e-06, "loss": 0.3011, "step": 3121 }, { "epoch": 3.446162341247929, "grad_norm": 1.1930797100067139, "learning_rate": 9.357e-06, "loss": 0.2757, "step": 3122 }, { "epoch": 3.447266703478741, "grad_norm": 1.1914026737213135, "learning_rate": 9.36e-06, "loss": 0.2835, "step": 3123 }, { "epoch": 3.4483710657095528, "grad_norm": 1.0810906887054443, "learning_rate": 9.363e-06, "loss": 0.196, "step": 3124 }, { "epoch": 3.4494754279403645, "grad_norm": 1.1460179090499878, "learning_rate": 9.366e-06, "loss": 0.1822, "step": 3125 }, { "epoch": 3.4505797901711763, "grad_norm": 0.9324002265930176, "learning_rate": 9.369000000000001e-06, "loss": 0.1419, "step": 3126 }, { "epoch": 3.4516841524019877, "grad_norm": 0.8198550939559937, "learning_rate": 9.372000000000001e-06, "loss": 0.1294, "step": 3127 }, { "epoch": 3.4527885146327995, "grad_norm": 0.729794442653656, "learning_rate": 9.375000000000001e-06, "loss": 0.0707, "step": 3128 }, { "epoch": 3.4538928768636112, "grad_norm": 3.0194902420043945, "learning_rate": 9.378e-06, "loss": 0.1059, "step": 3129 }, { "epoch": 3.454997239094423, "grad_norm": 0.8914955854415894, "learning_rate": 9.380999999999999e-06, "loss": 0.0716, "step": 3130 }, { "epoch": 3.456101601325235, "grad_norm": 0.9883749485015869, "learning_rate": 9.384e-06, "loss": 0.0613, "step": 3131 }, { "epoch": 3.4572059635560466, "grad_norm": 1.1771352291107178, "learning_rate": 9.387e-06, "loss": 0.0971, "step": 3132 }, { "epoch": 3.458310325786858, "grad_norm": 0.8276172876358032, "learning_rate": 9.39e-06, "loss": 0.0645, "step": 3133 }, { "epoch": 3.4594146880176697, "grad_norm": 0.6859294176101685, "learning_rate": 9.393e-06, "loss": 0.0367, "step": 3134 }, { "epoch": 3.4605190502484815, "grad_norm": 0.7547460794448853, "learning_rate": 9.396e-06, "loss": 0.0545, "step": 3135 }, { "epoch": 3.4616234124792933, "grad_norm": 0.7486056089401245, "learning_rate": 9.399000000000001e-06, "loss": 0.0599, "step": 3136 }, { "epoch": 3.462727774710105, "grad_norm": 0.7345145344734192, "learning_rate": 9.402e-06, "loss": 0.056, "step": 3137 }, { "epoch": 3.4638321369409164, "grad_norm": 0.7614725828170776, "learning_rate": 9.405e-06, "loss": 0.0747, "step": 3138 }, { "epoch": 3.464936499171728, "grad_norm": 1.0400978326797485, "learning_rate": 9.408e-06, "loss": 0.0903, "step": 3139 }, { "epoch": 3.46604086140254, "grad_norm": 0.7263002395629883, "learning_rate": 9.411e-06, "loss": 0.0281, "step": 3140 }, { "epoch": 3.4671452236333518, "grad_norm": 0.7206075191497803, "learning_rate": 9.414000000000002e-06, "loss": 0.059, "step": 3141 }, { "epoch": 3.4682495858641635, "grad_norm": 0.8422106504440308, "learning_rate": 9.417e-06, "loss": 0.0509, "step": 3142 }, { "epoch": 3.4693539480949753, "grad_norm": 1.4644192457199097, "learning_rate": 9.42e-06, "loss": 0.0709, "step": 3143 }, { "epoch": 3.4704583103257867, "grad_norm": 0.5988531112670898, "learning_rate": 9.423e-06, "loss": 0.0387, "step": 3144 }, { "epoch": 3.4715626725565985, "grad_norm": 0.8086658716201782, "learning_rate": 9.426e-06, "loss": 0.0534, "step": 3145 }, { "epoch": 3.4726670347874102, "grad_norm": 1.1800013780593872, "learning_rate": 9.429e-06, "loss": 0.0554, "step": 3146 }, { "epoch": 3.473771397018222, "grad_norm": 1.0529850721359253, "learning_rate": 9.432e-06, "loss": 0.0519, "step": 3147 }, { "epoch": 3.474875759249034, "grad_norm": 0.8388659954071045, "learning_rate": 9.435e-06, "loss": 0.0468, "step": 3148 }, { "epoch": 3.4759801214798456, "grad_norm": 0.8282060623168945, "learning_rate": 9.438e-06, "loss": 0.0381, "step": 3149 }, { "epoch": 3.477084483710657, "grad_norm": 0.9171413779258728, "learning_rate": 9.441e-06, "loss": 0.0512, "step": 3150 }, { "epoch": 3.4781888459414687, "grad_norm": 1.4102764129638672, "learning_rate": 9.444000000000001e-06, "loss": 0.0541, "step": 3151 }, { "epoch": 3.4792932081722805, "grad_norm": 0.8744451403617859, "learning_rate": 9.447000000000001e-06, "loss": 0.0497, "step": 3152 }, { "epoch": 3.4803975704030923, "grad_norm": 0.7095316052436829, "learning_rate": 9.450000000000001e-06, "loss": 0.0606, "step": 3153 }, { "epoch": 3.481501932633904, "grad_norm": 0.748517632484436, "learning_rate": 9.453e-06, "loss": 0.0514, "step": 3154 }, { "epoch": 3.4826062948647154, "grad_norm": 0.8844909071922302, "learning_rate": 9.455999999999999e-06, "loss": 0.0428, "step": 3155 }, { "epoch": 3.483710657095527, "grad_norm": 1.7574952840805054, "learning_rate": 9.459e-06, "loss": 0.038, "step": 3156 }, { "epoch": 3.484815019326339, "grad_norm": 0.8585934638977051, "learning_rate": 9.462e-06, "loss": 0.0414, "step": 3157 }, { "epoch": 3.4859193815571508, "grad_norm": 0.8070463538169861, "learning_rate": 9.465e-06, "loss": 0.0592, "step": 3158 }, { "epoch": 3.4870237437879625, "grad_norm": 0.9863465428352356, "learning_rate": 9.468e-06, "loss": 0.059, "step": 3159 }, { "epoch": 3.4881281060187743, "grad_norm": 0.8386515974998474, "learning_rate": 9.471e-06, "loss": 0.0598, "step": 3160 }, { "epoch": 3.489232468249586, "grad_norm": 1.0558019876480103, "learning_rate": 9.474000000000001e-06, "loss": 0.0461, "step": 3161 }, { "epoch": 3.4903368304803974, "grad_norm": 0.6103773713111877, "learning_rate": 9.477000000000001e-06, "loss": 0.0436, "step": 3162 }, { "epoch": 3.4914411927112092, "grad_norm": 3.0902342796325684, "learning_rate": 9.48e-06, "loss": 0.0732, "step": 3163 }, { "epoch": 3.492545554942021, "grad_norm": 1.5146549940109253, "learning_rate": 9.483e-06, "loss": 0.0997, "step": 3164 }, { "epoch": 3.493649917172833, "grad_norm": 1.2132148742675781, "learning_rate": 9.486e-06, "loss": 0.0682, "step": 3165 }, { "epoch": 3.4947542794036446, "grad_norm": 1.2140942811965942, "learning_rate": 9.489000000000002e-06, "loss": 0.0865, "step": 3166 }, { "epoch": 3.495858641634456, "grad_norm": 1.0283281803131104, "learning_rate": 9.492e-06, "loss": 0.0719, "step": 3167 }, { "epoch": 3.4969630038652677, "grad_norm": 1.3192397356033325, "learning_rate": 9.495e-06, "loss": 0.0818, "step": 3168 }, { "epoch": 3.4980673660960795, "grad_norm": 4.623909950256348, "learning_rate": 9.498e-06, "loss": 0.4951, "step": 3169 }, { "epoch": 3.4991717283268913, "grad_norm": 1.4160740375518799, "learning_rate": 9.501e-06, "loss": 0.4186, "step": 3170 }, { "epoch": 3.500276090557703, "grad_norm": 2.270400285720825, "learning_rate": 9.504e-06, "loss": 0.3144, "step": 3171 }, { "epoch": 3.5013804527885144, "grad_norm": 1.4769254922866821, "learning_rate": 9.507e-06, "loss": 0.2486, "step": 3172 }, { "epoch": 3.502484815019326, "grad_norm": 1.36968195438385, "learning_rate": 9.51e-06, "loss": 0.3075, "step": 3173 }, { "epoch": 3.503589177250138, "grad_norm": 1.1897450685501099, "learning_rate": 9.513e-06, "loss": 0.243, "step": 3174 }, { "epoch": 3.5046935394809497, "grad_norm": 1.0917222499847412, "learning_rate": 9.516e-06, "loss": 0.237, "step": 3175 }, { "epoch": 3.5057979017117615, "grad_norm": 1.4055004119873047, "learning_rate": 9.519000000000002e-06, "loss": 0.2026, "step": 3176 }, { "epoch": 3.5069022639425733, "grad_norm": 0.8912985324859619, "learning_rate": 9.522000000000001e-06, "loss": 0.1451, "step": 3177 }, { "epoch": 3.508006626173385, "grad_norm": 0.8181736469268799, "learning_rate": 9.525000000000001e-06, "loss": 0.1152, "step": 3178 }, { "epoch": 3.5091109884041964, "grad_norm": 0.7499043345451355, "learning_rate": 9.528e-06, "loss": 0.0721, "step": 3179 }, { "epoch": 3.5102153506350082, "grad_norm": 0.7511653304100037, "learning_rate": 9.530999999999999e-06, "loss": 0.094, "step": 3180 }, { "epoch": 3.51131971286582, "grad_norm": 1.2134352922439575, "learning_rate": 9.534e-06, "loss": 0.0395, "step": 3181 }, { "epoch": 3.512424075096632, "grad_norm": 0.6195405721664429, "learning_rate": 9.537e-06, "loss": 0.061, "step": 3182 }, { "epoch": 3.5135284373274436, "grad_norm": 0.721058189868927, "learning_rate": 9.54e-06, "loss": 0.1207, "step": 3183 }, { "epoch": 3.514632799558255, "grad_norm": 0.6467572450637817, "learning_rate": 9.543e-06, "loss": 0.0547, "step": 3184 }, { "epoch": 3.5157371617890667, "grad_norm": 1.0183706283569336, "learning_rate": 9.546e-06, "loss": 0.0913, "step": 3185 }, { "epoch": 3.5168415240198785, "grad_norm": 0.630897045135498, "learning_rate": 9.549000000000001e-06, "loss": 0.0422, "step": 3186 }, { "epoch": 3.5179458862506903, "grad_norm": 0.5848580002784729, "learning_rate": 9.552000000000001e-06, "loss": 0.0322, "step": 3187 }, { "epoch": 3.519050248481502, "grad_norm": 0.9891391396522522, "learning_rate": 9.555e-06, "loss": 0.046, "step": 3188 }, { "epoch": 3.5201546107123134, "grad_norm": 1.080337643623352, "learning_rate": 9.558e-06, "loss": 0.0471, "step": 3189 }, { "epoch": 3.5212589729431256, "grad_norm": 0.6661126017570496, "learning_rate": 9.561e-06, "loss": 0.0445, "step": 3190 }, { "epoch": 3.522363335173937, "grad_norm": 0.9279839992523193, "learning_rate": 9.564e-06, "loss": 0.0711, "step": 3191 }, { "epoch": 3.5234676974047487, "grad_norm": 0.8603034019470215, "learning_rate": 9.567e-06, "loss": 0.0516, "step": 3192 }, { "epoch": 3.5245720596355605, "grad_norm": 0.7979432940483093, "learning_rate": 9.57e-06, "loss": 0.0548, "step": 3193 }, { "epoch": 3.5256764218663723, "grad_norm": 1.3947393894195557, "learning_rate": 9.573e-06, "loss": 0.0951, "step": 3194 }, { "epoch": 3.526780784097184, "grad_norm": 0.7153193950653076, "learning_rate": 9.576e-06, "loss": 0.0422, "step": 3195 }, { "epoch": 3.5278851463279954, "grad_norm": 0.9547489285469055, "learning_rate": 9.579e-06, "loss": 0.0547, "step": 3196 }, { "epoch": 3.528989508558807, "grad_norm": 0.9114145040512085, "learning_rate": 9.582e-06, "loss": 0.0563, "step": 3197 }, { "epoch": 3.530093870789619, "grad_norm": 0.707037091255188, "learning_rate": 9.585e-06, "loss": 0.0534, "step": 3198 }, { "epoch": 3.531198233020431, "grad_norm": 0.9315674901008606, "learning_rate": 9.588e-06, "loss": 0.0542, "step": 3199 }, { "epoch": 3.5323025952512426, "grad_norm": 0.8844774961471558, "learning_rate": 9.591e-06, "loss": 0.0662, "step": 3200 }, { "epoch": 3.533406957482054, "grad_norm": 1.6938647031784058, "learning_rate": 9.594e-06, "loss": 0.0559, "step": 3201 }, { "epoch": 3.5345113197128657, "grad_norm": 0.8017938733100891, "learning_rate": 9.597000000000001e-06, "loss": 0.0777, "step": 3202 }, { "epoch": 3.5356156819436775, "grad_norm": 0.8437641859054565, "learning_rate": 9.600000000000001e-06, "loss": 0.079, "step": 3203 }, { "epoch": 3.5367200441744893, "grad_norm": 1.0117559432983398, "learning_rate": 9.603e-06, "loss": 0.0488, "step": 3204 }, { "epoch": 3.537824406405301, "grad_norm": 1.1931477785110474, "learning_rate": 9.606e-06, "loss": 0.0603, "step": 3205 }, { "epoch": 3.5389287686361124, "grad_norm": 1.279559850692749, "learning_rate": 9.608999999999999e-06, "loss": 0.08, "step": 3206 }, { "epoch": 3.5400331308669246, "grad_norm": 0.9706202745437622, "learning_rate": 9.612e-06, "loss": 0.0662, "step": 3207 }, { "epoch": 3.541137493097736, "grad_norm": 1.2209829092025757, "learning_rate": 9.615e-06, "loss": 0.0596, "step": 3208 }, { "epoch": 3.5422418553285477, "grad_norm": 1.116081953048706, "learning_rate": 9.618e-06, "loss": 0.0597, "step": 3209 }, { "epoch": 3.5433462175593595, "grad_norm": 0.9296171069145203, "learning_rate": 9.621e-06, "loss": 0.056, "step": 3210 }, { "epoch": 3.5444505797901713, "grad_norm": 1.4807229042053223, "learning_rate": 9.624e-06, "loss": 0.0586, "step": 3211 }, { "epoch": 3.545554942020983, "grad_norm": 0.9858142733573914, "learning_rate": 9.627000000000001e-06, "loss": 0.0511, "step": 3212 }, { "epoch": 3.5466593042517944, "grad_norm": 0.9262193441390991, "learning_rate": 9.630000000000001e-06, "loss": 0.0688, "step": 3213 }, { "epoch": 3.547763666482606, "grad_norm": 1.6050978899002075, "learning_rate": 9.633e-06, "loss": 0.0742, "step": 3214 }, { "epoch": 3.548868028713418, "grad_norm": 0.8634001612663269, "learning_rate": 9.636e-06, "loss": 0.0681, "step": 3215 }, { "epoch": 3.54997239094423, "grad_norm": 0.9214967489242554, "learning_rate": 9.638999999999999e-06, "loss": 0.0593, "step": 3216 }, { "epoch": 3.5510767531750416, "grad_norm": 1.264862060546875, "learning_rate": 9.642e-06, "loss": 0.0614, "step": 3217 }, { "epoch": 3.552181115405853, "grad_norm": 1.6199802160263062, "learning_rate": 9.645e-06, "loss": 0.0674, "step": 3218 }, { "epoch": 3.5532854776366647, "grad_norm": 1.1113864183425903, "learning_rate": 9.648e-06, "loss": 0.4025, "step": 3219 }, { "epoch": 3.5543898398674765, "grad_norm": 1.137951374053955, "learning_rate": 9.651e-06, "loss": 0.3724, "step": 3220 }, { "epoch": 3.5554942020982883, "grad_norm": 1.1539701223373413, "learning_rate": 9.654e-06, "loss": 0.308, "step": 3221 }, { "epoch": 3.5565985643291, "grad_norm": 0.8899977803230286, "learning_rate": 9.657000000000001e-06, "loss": 0.2104, "step": 3222 }, { "epoch": 3.5577029265599114, "grad_norm": 1.2596254348754883, "learning_rate": 9.66e-06, "loss": 0.2345, "step": 3223 }, { "epoch": 3.5588072887907236, "grad_norm": 1.2442370653152466, "learning_rate": 9.663e-06, "loss": 0.3677, "step": 3224 }, { "epoch": 3.559911651021535, "grad_norm": 1.0474447011947632, "learning_rate": 9.666e-06, "loss": 0.12, "step": 3225 }, { "epoch": 3.5610160132523467, "grad_norm": 0.9912058115005493, "learning_rate": 9.669e-06, "loss": 0.1591, "step": 3226 }, { "epoch": 3.5621203754831585, "grad_norm": 0.8558464646339417, "learning_rate": 9.672000000000002e-06, "loss": 0.1815, "step": 3227 }, { "epoch": 3.5632247377139703, "grad_norm": 0.5410809516906738, "learning_rate": 9.675e-06, "loss": 0.0558, "step": 3228 }, { "epoch": 3.564329099944782, "grad_norm": 0.6548945903778076, "learning_rate": 9.678e-06, "loss": 0.0638, "step": 3229 }, { "epoch": 3.5654334621755934, "grad_norm": 0.6927734017372131, "learning_rate": 9.681e-06, "loss": 0.0666, "step": 3230 }, { "epoch": 3.566537824406405, "grad_norm": 0.7165170311927795, "learning_rate": 9.683999999999999e-06, "loss": 0.0675, "step": 3231 }, { "epoch": 3.567642186637217, "grad_norm": 0.7588794827461243, "learning_rate": 9.687e-06, "loss": 0.0672, "step": 3232 }, { "epoch": 3.568746548868029, "grad_norm": 0.708487868309021, "learning_rate": 9.69e-06, "loss": 0.0411, "step": 3233 }, { "epoch": 3.5698509110988406, "grad_norm": 0.5697234869003296, "learning_rate": 9.693e-06, "loss": 0.0547, "step": 3234 }, { "epoch": 3.570955273329652, "grad_norm": 0.5612917542457581, "learning_rate": 9.696e-06, "loss": 0.0361, "step": 3235 }, { "epoch": 3.5720596355604637, "grad_norm": 0.7900424003601074, "learning_rate": 9.699e-06, "loss": 0.0464, "step": 3236 }, { "epoch": 3.5731639977912755, "grad_norm": 0.729651689529419, "learning_rate": 9.702000000000001e-06, "loss": 0.0715, "step": 3237 }, { "epoch": 3.5742683600220873, "grad_norm": 0.5889029502868652, "learning_rate": 9.705000000000001e-06, "loss": 0.041, "step": 3238 }, { "epoch": 3.575372722252899, "grad_norm": 0.735913872718811, "learning_rate": 9.708000000000001e-06, "loss": 0.0535, "step": 3239 }, { "epoch": 3.576477084483711, "grad_norm": 0.6384629607200623, "learning_rate": 9.711e-06, "loss": 0.0413, "step": 3240 }, { "epoch": 3.5775814467145226, "grad_norm": 2.053691864013672, "learning_rate": 9.713999999999999e-06, "loss": 0.0473, "step": 3241 }, { "epoch": 3.578685808945334, "grad_norm": 1.1476343870162964, "learning_rate": 9.717e-06, "loss": 0.0546, "step": 3242 }, { "epoch": 3.5797901711761457, "grad_norm": 0.9103414416313171, "learning_rate": 9.72e-06, "loss": 0.1361, "step": 3243 }, { "epoch": 3.5808945334069575, "grad_norm": 0.6988769173622131, "learning_rate": 9.723e-06, "loss": 0.0543, "step": 3244 }, { "epoch": 3.5819988956377693, "grad_norm": 1.032153606414795, "learning_rate": 9.726e-06, "loss": 0.0554, "step": 3245 }, { "epoch": 3.583103257868581, "grad_norm": 1.2484323978424072, "learning_rate": 9.729e-06, "loss": 0.0602, "step": 3246 }, { "epoch": 3.5842076200993924, "grad_norm": 0.5592564344406128, "learning_rate": 9.732000000000001e-06, "loss": 0.0382, "step": 3247 }, { "epoch": 3.585311982330204, "grad_norm": 0.5041754245758057, "learning_rate": 9.735e-06, "loss": 0.0482, "step": 3248 }, { "epoch": 3.586416344561016, "grad_norm": 1.0655735731124878, "learning_rate": 9.738e-06, "loss": 0.0491, "step": 3249 }, { "epoch": 3.5875207067918278, "grad_norm": 0.558904230594635, "learning_rate": 9.741e-06, "loss": 0.0538, "step": 3250 }, { "epoch": 3.5886250690226396, "grad_norm": 0.8591431975364685, "learning_rate": 9.744e-06, "loss": 0.0669, "step": 3251 }, { "epoch": 3.589729431253451, "grad_norm": 0.6414563655853271, "learning_rate": 9.747000000000002e-06, "loss": 0.0491, "step": 3252 }, { "epoch": 3.5908337934842627, "grad_norm": 1.1943485736846924, "learning_rate": 9.75e-06, "loss": 0.0782, "step": 3253 }, { "epoch": 3.5919381557150745, "grad_norm": 1.2096184492111206, "learning_rate": 9.753e-06, "loss": 0.1014, "step": 3254 }, { "epoch": 3.5930425179458862, "grad_norm": 0.801874041557312, "learning_rate": 9.756e-06, "loss": 0.0549, "step": 3255 }, { "epoch": 3.594146880176698, "grad_norm": 0.6276560425758362, "learning_rate": 9.759e-06, "loss": 0.0403, "step": 3256 }, { "epoch": 3.59525124240751, "grad_norm": 0.7968191504478455, "learning_rate": 9.762e-06, "loss": 0.055, "step": 3257 }, { "epoch": 3.5963556046383216, "grad_norm": 0.5261571407318115, "learning_rate": 9.765e-06, "loss": 0.0334, "step": 3258 }, { "epoch": 3.597459966869133, "grad_norm": 0.753136932849884, "learning_rate": 9.768e-06, "loss": 0.0452, "step": 3259 }, { "epoch": 3.5985643290999447, "grad_norm": 0.6628236770629883, "learning_rate": 9.771e-06, "loss": 0.0451, "step": 3260 }, { "epoch": 3.5996686913307565, "grad_norm": 1.0396018028259277, "learning_rate": 9.774e-06, "loss": 0.0745, "step": 3261 }, { "epoch": 3.6007730535615683, "grad_norm": 1.1699796915054321, "learning_rate": 9.777000000000001e-06, "loss": 0.0633, "step": 3262 }, { "epoch": 3.60187741579238, "grad_norm": 0.8425217866897583, "learning_rate": 9.780000000000001e-06, "loss": 0.0628, "step": 3263 }, { "epoch": 3.6029817780231914, "grad_norm": 0.9479457139968872, "learning_rate": 9.783000000000001e-06, "loss": 0.0685, "step": 3264 }, { "epoch": 3.604086140254003, "grad_norm": 0.9370042681694031, "learning_rate": 9.785999999999999e-06, "loss": 0.0607, "step": 3265 }, { "epoch": 3.605190502484815, "grad_norm": 1.182309865951538, "learning_rate": 9.788999999999999e-06, "loss": 0.0664, "step": 3266 }, { "epoch": 3.6062948647156268, "grad_norm": 0.8188482522964478, "learning_rate": 9.792e-06, "loss": 0.0528, "step": 3267 }, { "epoch": 3.6073992269464386, "grad_norm": 1.0274938344955444, "learning_rate": 9.795e-06, "loss": 0.0674, "step": 3268 }, { "epoch": 3.60850358917725, "grad_norm": 4.074865818023682, "learning_rate": 9.798e-06, "loss": 0.4439, "step": 3269 }, { "epoch": 3.609607951408062, "grad_norm": 1.5632541179656982, "learning_rate": 9.801e-06, "loss": 0.3546, "step": 3270 }, { "epoch": 3.6107123136388735, "grad_norm": 0.9858515858650208, "learning_rate": 9.804e-06, "loss": 0.2902, "step": 3271 }, { "epoch": 3.6118166758696852, "grad_norm": 1.2278530597686768, "learning_rate": 9.807000000000001e-06, "loss": 0.2748, "step": 3272 }, { "epoch": 3.612921038100497, "grad_norm": 1.1589940786361694, "learning_rate": 9.810000000000001e-06, "loss": 0.244, "step": 3273 }, { "epoch": 3.614025400331309, "grad_norm": 0.9468016624450684, "learning_rate": 9.813e-06, "loss": 0.1795, "step": 3274 }, { "epoch": 3.6151297625621206, "grad_norm": 1.1840105056762695, "learning_rate": 9.816e-06, "loss": 0.2267, "step": 3275 }, { "epoch": 3.616234124792932, "grad_norm": 1.207323431968689, "learning_rate": 9.819e-06, "loss": 0.1984, "step": 3276 }, { "epoch": 3.6173384870237437, "grad_norm": 0.8063493371009827, "learning_rate": 9.822e-06, "loss": 0.1137, "step": 3277 }, { "epoch": 3.6184428492545555, "grad_norm": 0.9259895086288452, "learning_rate": 9.825e-06, "loss": 0.1308, "step": 3278 }, { "epoch": 3.6195472114853673, "grad_norm": 0.7296105623245239, "learning_rate": 9.828e-06, "loss": 0.1447, "step": 3279 }, { "epoch": 3.620651573716179, "grad_norm": 0.880242645740509, "learning_rate": 9.831e-06, "loss": 0.0841, "step": 3280 }, { "epoch": 3.6217559359469904, "grad_norm": 0.6975619792938232, "learning_rate": 9.834e-06, "loss": 0.0686, "step": 3281 }, { "epoch": 3.622860298177802, "grad_norm": 1.0870200395584106, "learning_rate": 9.837000000000001e-06, "loss": 0.0425, "step": 3282 }, { "epoch": 3.623964660408614, "grad_norm": 0.8327417373657227, "learning_rate": 9.84e-06, "loss": 0.0614, "step": 3283 }, { "epoch": 3.6250690226394258, "grad_norm": 0.6591500639915466, "learning_rate": 9.843e-06, "loss": 0.0529, "step": 3284 }, { "epoch": 3.6261733848702375, "grad_norm": 1.3355896472930908, "learning_rate": 9.846e-06, "loss": 0.07, "step": 3285 }, { "epoch": 3.627277747101049, "grad_norm": 0.8255430459976196, "learning_rate": 9.849e-06, "loss": 0.0414, "step": 3286 }, { "epoch": 3.628382109331861, "grad_norm": 1.097957968711853, "learning_rate": 9.852000000000002e-06, "loss": 0.0471, "step": 3287 }, { "epoch": 3.6294864715626725, "grad_norm": 0.8370970487594604, "learning_rate": 9.855000000000001e-06, "loss": 0.0687, "step": 3288 }, { "epoch": 3.6305908337934842, "grad_norm": 0.8514276742935181, "learning_rate": 9.858000000000001e-06, "loss": 0.0439, "step": 3289 }, { "epoch": 3.631695196024296, "grad_norm": 0.9209104180335999, "learning_rate": 9.861e-06, "loss": 0.0451, "step": 3290 }, { "epoch": 3.632799558255108, "grad_norm": 0.7241644859313965, "learning_rate": 9.863999999999999e-06, "loss": 0.0456, "step": 3291 }, { "epoch": 3.6339039204859196, "grad_norm": 1.8582279682159424, "learning_rate": 9.867e-06, "loss": 0.0574, "step": 3292 }, { "epoch": 3.635008282716731, "grad_norm": 0.9207750558853149, "learning_rate": 9.87e-06, "loss": 0.0647, "step": 3293 }, { "epoch": 3.6361126449475427, "grad_norm": 0.8308240175247192, "learning_rate": 9.873e-06, "loss": 0.0494, "step": 3294 }, { "epoch": 3.6372170071783545, "grad_norm": 0.9288588762283325, "learning_rate": 9.876e-06, "loss": 0.0617, "step": 3295 }, { "epoch": 3.6383213694091663, "grad_norm": 0.7164512276649475, "learning_rate": 9.879e-06, "loss": 0.044, "step": 3296 }, { "epoch": 3.639425731639978, "grad_norm": 0.8239517211914062, "learning_rate": 9.882000000000001e-06, "loss": 0.0454, "step": 3297 }, { "epoch": 3.6405300938707894, "grad_norm": 0.5389373898506165, "learning_rate": 9.885000000000001e-06, "loss": 0.0235, "step": 3298 }, { "epoch": 3.641634456101601, "grad_norm": 2.3235738277435303, "learning_rate": 9.888000000000001e-06, "loss": 0.058, "step": 3299 }, { "epoch": 3.642738818332413, "grad_norm": 0.83476322889328, "learning_rate": 9.891e-06, "loss": 0.0519, "step": 3300 }, { "epoch": 3.6438431805632248, "grad_norm": 1.1380752325057983, "learning_rate": 9.894e-06, "loss": 0.0761, "step": 3301 }, { "epoch": 3.6449475427940365, "grad_norm": 0.7328553199768066, "learning_rate": 9.897e-06, "loss": 0.0696, "step": 3302 }, { "epoch": 3.646051905024848, "grad_norm": 1.7668801546096802, "learning_rate": 9.9e-06, "loss": 0.0573, "step": 3303 }, { "epoch": 3.64715626725566, "grad_norm": 0.8433746099472046, "learning_rate": 9.903e-06, "loss": 0.0545, "step": 3304 }, { "epoch": 3.6482606294864715, "grad_norm": 0.8492766618728638, "learning_rate": 9.906e-06, "loss": 0.0552, "step": 3305 }, { "epoch": 3.6493649917172832, "grad_norm": 0.8253594636917114, "learning_rate": 9.909e-06, "loss": 0.0655, "step": 3306 }, { "epoch": 3.650469353948095, "grad_norm": 1.097392201423645, "learning_rate": 9.912000000000001e-06, "loss": 0.0548, "step": 3307 }, { "epoch": 3.651573716178907, "grad_norm": 0.9250900745391846, "learning_rate": 9.915e-06, "loss": 0.0518, "step": 3308 }, { "epoch": 3.6526780784097186, "grad_norm": 0.8813474774360657, "learning_rate": 9.918e-06, "loss": 0.0665, "step": 3309 }, { "epoch": 3.65378244064053, "grad_norm": 0.6975535750389099, "learning_rate": 9.921e-06, "loss": 0.0454, "step": 3310 }, { "epoch": 3.6548868028713417, "grad_norm": 1.0156440734863281, "learning_rate": 9.924e-06, "loss": 0.0889, "step": 3311 }, { "epoch": 3.6559911651021535, "grad_norm": 0.9708203077316284, "learning_rate": 9.927000000000002e-06, "loss": 0.0786, "step": 3312 }, { "epoch": 3.6570955273329653, "grad_norm": 0.886040985584259, "learning_rate": 9.930000000000001e-06, "loss": 0.0671, "step": 3313 }, { "epoch": 3.658199889563777, "grad_norm": 2.1953206062316895, "learning_rate": 9.933e-06, "loss": 0.0629, "step": 3314 }, { "epoch": 3.6593042517945884, "grad_norm": 1.0486915111541748, "learning_rate": 9.936e-06, "loss": 0.0769, "step": 3315 }, { "epoch": 3.6604086140254, "grad_norm": 0.8417356014251709, "learning_rate": 9.939e-06, "loss": 0.0561, "step": 3316 }, { "epoch": 3.661512976256212, "grad_norm": 0.8110786080360413, "learning_rate": 9.941999999999999e-06, "loss": 0.0712, "step": 3317 }, { "epoch": 3.6626173384870238, "grad_norm": 1.0830810070037842, "learning_rate": 9.945e-06, "loss": 0.0897, "step": 3318 }, { "epoch": 3.6637217007178355, "grad_norm": 1.6906644105911255, "learning_rate": 9.948e-06, "loss": 0.4317, "step": 3319 }, { "epoch": 3.6648260629486473, "grad_norm": 1.2448240518569946, "learning_rate": 9.951e-06, "loss": 0.3862, "step": 3320 }, { "epoch": 3.665930425179459, "grad_norm": 1.362113356590271, "learning_rate": 9.954e-06, "loss": 0.3873, "step": 3321 }, { "epoch": 3.6670347874102704, "grad_norm": 0.9006576538085938, "learning_rate": 9.957e-06, "loss": 0.2081, "step": 3322 }, { "epoch": 3.6681391496410822, "grad_norm": 1.0571417808532715, "learning_rate": 9.960000000000001e-06, "loss": 0.299, "step": 3323 }, { "epoch": 3.669243511871894, "grad_norm": 1.0079309940338135, "learning_rate": 9.963000000000001e-06, "loss": 0.1978, "step": 3324 }, { "epoch": 3.670347874102706, "grad_norm": 1.0370723009109497, "learning_rate": 9.966e-06, "loss": 0.1588, "step": 3325 }, { "epoch": 3.6714522363335176, "grad_norm": 0.994957685470581, "learning_rate": 9.969e-06, "loss": 0.1552, "step": 3326 }, { "epoch": 3.672556598564329, "grad_norm": 0.8289691209793091, "learning_rate": 9.971999999999999e-06, "loss": 0.0863, "step": 3327 }, { "epoch": 3.6736609607951407, "grad_norm": 0.8599858283996582, "learning_rate": 9.975e-06, "loss": 0.1221, "step": 3328 }, { "epoch": 3.6747653230259525, "grad_norm": 0.9548110961914062, "learning_rate": 9.978e-06, "loss": 0.0733, "step": 3329 }, { "epoch": 3.6758696852567643, "grad_norm": 0.7702039480209351, "learning_rate": 9.981e-06, "loss": 0.0596, "step": 3330 }, { "epoch": 3.676974047487576, "grad_norm": 0.9505282044410706, "learning_rate": 9.984e-06, "loss": 0.0762, "step": 3331 }, { "epoch": 3.6780784097183874, "grad_norm": 0.7713779807090759, "learning_rate": 9.987e-06, "loss": 0.0634, "step": 3332 }, { "epoch": 3.679182771949199, "grad_norm": 0.6132813692092896, "learning_rate": 9.990000000000001e-06, "loss": 0.0545, "step": 3333 }, { "epoch": 3.680287134180011, "grad_norm": 1.0368596315383911, "learning_rate": 9.993e-06, "loss": 0.061, "step": 3334 }, { "epoch": 3.6813914964108228, "grad_norm": 0.6441455483436584, "learning_rate": 9.996e-06, "loss": 0.0497, "step": 3335 }, { "epoch": 3.6824958586416345, "grad_norm": 0.7695950269699097, "learning_rate": 9.999e-06, "loss": 0.062, "step": 3336 }, { "epoch": 3.6836002208724463, "grad_norm": 0.747619092464447, "learning_rate": 1.0002e-05, "loss": 0.0514, "step": 3337 }, { "epoch": 3.684704583103258, "grad_norm": 0.9473074078559875, "learning_rate": 1.0005000000000002e-05, "loss": 0.0295, "step": 3338 }, { "epoch": 3.6858089453340694, "grad_norm": 0.7107280492782593, "learning_rate": 1.0008e-05, "loss": 0.0432, "step": 3339 }, { "epoch": 3.6869133075648812, "grad_norm": 0.6723143458366394, "learning_rate": 1.0011e-05, "loss": 0.0511, "step": 3340 }, { "epoch": 3.688017669795693, "grad_norm": 0.9808182120323181, "learning_rate": 1.0014e-05, "loss": 0.0439, "step": 3341 }, { "epoch": 3.689122032026505, "grad_norm": 0.6063432097434998, "learning_rate": 1.0016999999999999e-05, "loss": 0.05, "step": 3342 }, { "epoch": 3.6902263942573166, "grad_norm": 0.7620457410812378, "learning_rate": 1.002e-05, "loss": 0.0548, "step": 3343 }, { "epoch": 3.691330756488128, "grad_norm": 0.9941666722297668, "learning_rate": 1.0023e-05, "loss": 0.0397, "step": 3344 }, { "epoch": 3.6924351187189397, "grad_norm": 0.8314593434333801, "learning_rate": 1.0026e-05, "loss": 0.065, "step": 3345 }, { "epoch": 3.6935394809497515, "grad_norm": 1.1126964092254639, "learning_rate": 1.0029e-05, "loss": 0.0428, "step": 3346 }, { "epoch": 3.6946438431805633, "grad_norm": 0.7091181874275208, "learning_rate": 1.0032e-05, "loss": 0.0575, "step": 3347 }, { "epoch": 3.695748205411375, "grad_norm": 1.4399752616882324, "learning_rate": 1.0035000000000001e-05, "loss": 0.0786, "step": 3348 }, { "epoch": 3.6968525676421864, "grad_norm": 0.8926579356193542, "learning_rate": 1.0038000000000001e-05, "loss": 0.0579, "step": 3349 }, { "epoch": 3.697956929872998, "grad_norm": 0.7800250053405762, "learning_rate": 1.0041000000000001e-05, "loss": 0.0322, "step": 3350 }, { "epoch": 3.69906129210381, "grad_norm": 0.6081497073173523, "learning_rate": 1.0043999999999999e-05, "loss": 0.0442, "step": 3351 }, { "epoch": 3.7001656543346217, "grad_norm": 0.8407126069068909, "learning_rate": 1.0046999999999999e-05, "loss": 0.0422, "step": 3352 }, { "epoch": 3.7012700165654335, "grad_norm": 0.8902133703231812, "learning_rate": 1.005e-05, "loss": 0.0554, "step": 3353 }, { "epoch": 3.7023743787962453, "grad_norm": 0.6947841048240662, "learning_rate": 1.0053e-05, "loss": 0.046, "step": 3354 }, { "epoch": 3.703478741027057, "grad_norm": 0.7818667888641357, "learning_rate": 1.0056e-05, "loss": 0.0581, "step": 3355 }, { "epoch": 3.7045831032578684, "grad_norm": 0.9425652027130127, "learning_rate": 1.0059e-05, "loss": 0.066, "step": 3356 }, { "epoch": 3.70568746548868, "grad_norm": 0.9868588447570801, "learning_rate": 1.0062e-05, "loss": 0.0596, "step": 3357 }, { "epoch": 3.706791827719492, "grad_norm": 0.8763952851295471, "learning_rate": 1.0065000000000001e-05, "loss": 0.0723, "step": 3358 }, { "epoch": 3.707896189950304, "grad_norm": 1.2038495540618896, "learning_rate": 1.0068e-05, "loss": 0.0589, "step": 3359 }, { "epoch": 3.7090005521811156, "grad_norm": 0.7328248620033264, "learning_rate": 1.0071e-05, "loss": 0.0735, "step": 3360 }, { "epoch": 3.710104914411927, "grad_norm": 1.100351095199585, "learning_rate": 1.0074e-05, "loss": 0.0527, "step": 3361 }, { "epoch": 3.7112092766427387, "grad_norm": 1.5352694988250732, "learning_rate": 1.0077e-05, "loss": 0.0752, "step": 3362 }, { "epoch": 3.7123136388735505, "grad_norm": 0.7025403380393982, "learning_rate": 1.008e-05, "loss": 0.0424, "step": 3363 }, { "epoch": 3.7134180011043623, "grad_norm": 1.1808301210403442, "learning_rate": 1.0083e-05, "loss": 0.0715, "step": 3364 }, { "epoch": 3.714522363335174, "grad_norm": 0.7483868598937988, "learning_rate": 1.0086e-05, "loss": 0.0615, "step": 3365 }, { "epoch": 3.7156267255659854, "grad_norm": 0.7464598417282104, "learning_rate": 1.0089e-05, "loss": 0.055, "step": 3366 }, { "epoch": 3.7167310877967976, "grad_norm": 0.8753601908683777, "learning_rate": 1.0092e-05, "loss": 0.0634, "step": 3367 }, { "epoch": 3.717835450027609, "grad_norm": 1.3259329795837402, "learning_rate": 1.0095e-05, "loss": 0.068, "step": 3368 }, { "epoch": 3.7189398122584207, "grad_norm": 2.6221137046813965, "learning_rate": 1.0098e-05, "loss": 0.5257, "step": 3369 }, { "epoch": 3.7200441744892325, "grad_norm": 0.9248769879341125, "learning_rate": 1.0101e-05, "loss": 0.3525, "step": 3370 }, { "epoch": 3.7211485367200443, "grad_norm": 2.018771171569824, "learning_rate": 1.0104e-05, "loss": 0.4137, "step": 3371 }, { "epoch": 3.722252898950856, "grad_norm": 1.110823154449463, "learning_rate": 1.0107e-05, "loss": 0.2368, "step": 3372 }, { "epoch": 3.7233572611816674, "grad_norm": 1.277266502380371, "learning_rate": 1.0110000000000001e-05, "loss": 0.2565, "step": 3373 }, { "epoch": 3.724461623412479, "grad_norm": 1.7126816511154175, "learning_rate": 1.0113000000000001e-05, "loss": 0.2171, "step": 3374 }, { "epoch": 3.725565985643291, "grad_norm": 0.6585073471069336, "learning_rate": 1.0116000000000001e-05, "loss": 0.1401, "step": 3375 }, { "epoch": 3.726670347874103, "grad_norm": 1.1394708156585693, "learning_rate": 1.0119e-05, "loss": 0.0923, "step": 3376 }, { "epoch": 3.7277747101049146, "grad_norm": 0.5668245553970337, "learning_rate": 1.0121999999999999e-05, "loss": 0.074, "step": 3377 }, { "epoch": 3.728879072335726, "grad_norm": 0.682296633720398, "learning_rate": 1.0125e-05, "loss": 0.0706, "step": 3378 }, { "epoch": 3.7299834345665377, "grad_norm": 0.7204146981239319, "learning_rate": 1.0128e-05, "loss": 0.0629, "step": 3379 }, { "epoch": 3.7310877967973495, "grad_norm": 0.8610345125198364, "learning_rate": 1.0131e-05, "loss": 0.0906, "step": 3380 }, { "epoch": 3.7321921590281613, "grad_norm": 1.0649988651275635, "learning_rate": 1.0134e-05, "loss": 0.1357, "step": 3381 }, { "epoch": 3.733296521258973, "grad_norm": 0.6194301247596741, "learning_rate": 1.0137e-05, "loss": 0.0627, "step": 3382 }, { "epoch": 3.7344008834897844, "grad_norm": 1.2234469652175903, "learning_rate": 1.0140000000000001e-05, "loss": 0.0879, "step": 3383 }, { "epoch": 3.7355052457205966, "grad_norm": 0.9624341130256653, "learning_rate": 1.0143000000000001e-05, "loss": 0.0646, "step": 3384 }, { "epoch": 3.736609607951408, "grad_norm": 0.5728865265846252, "learning_rate": 1.0146e-05, "loss": 0.0552, "step": 3385 }, { "epoch": 3.7377139701822197, "grad_norm": 0.6579563617706299, "learning_rate": 1.0149e-05, "loss": 0.064, "step": 3386 }, { "epoch": 3.7388183324130315, "grad_norm": 0.8859608173370361, "learning_rate": 1.0152e-05, "loss": 0.0596, "step": 3387 }, { "epoch": 3.7399226946438433, "grad_norm": 0.9655731916427612, "learning_rate": 1.0155e-05, "loss": 0.0687, "step": 3388 }, { "epoch": 3.741027056874655, "grad_norm": 0.6835197806358337, "learning_rate": 1.0158e-05, "loss": 0.0475, "step": 3389 }, { "epoch": 3.7421314191054664, "grad_norm": 0.505918025970459, "learning_rate": 1.0161e-05, "loss": 0.043, "step": 3390 }, { "epoch": 3.743235781336278, "grad_norm": 0.8313736915588379, "learning_rate": 1.0164e-05, "loss": 0.0519, "step": 3391 }, { "epoch": 3.74434014356709, "grad_norm": 0.6687424182891846, "learning_rate": 1.0167e-05, "loss": 0.0403, "step": 3392 }, { "epoch": 3.745444505797902, "grad_norm": 0.9124475717544556, "learning_rate": 1.0170000000000001e-05, "loss": 0.0525, "step": 3393 }, { "epoch": 3.7465488680287136, "grad_norm": 0.9338250756263733, "learning_rate": 1.0173e-05, "loss": 0.0624, "step": 3394 }, { "epoch": 3.747653230259525, "grad_norm": 0.8565163612365723, "learning_rate": 1.0176e-05, "loss": 0.0499, "step": 3395 }, { "epoch": 3.7487575924903367, "grad_norm": 0.9137418866157532, "learning_rate": 1.0179e-05, "loss": 0.062, "step": 3396 }, { "epoch": 3.7498619547211485, "grad_norm": 0.9461405277252197, "learning_rate": 1.0182e-05, "loss": 0.0623, "step": 3397 }, { "epoch": 3.7509663169519603, "grad_norm": 0.7512488961219788, "learning_rate": 1.0185000000000002e-05, "loss": 0.0536, "step": 3398 }, { "epoch": 3.752070679182772, "grad_norm": 0.8786422610282898, "learning_rate": 1.0188000000000001e-05, "loss": 0.0464, "step": 3399 }, { "epoch": 3.7531750414135834, "grad_norm": 0.6586198210716248, "learning_rate": 1.0191e-05, "loss": 0.0671, "step": 3400 }, { "epoch": 3.7542794036443956, "grad_norm": 0.5876282453536987, "learning_rate": 1.0194e-05, "loss": 0.0506, "step": 3401 }, { "epoch": 3.755383765875207, "grad_norm": 0.7051442265510559, "learning_rate": 1.0196999999999999e-05, "loss": 0.0602, "step": 3402 }, { "epoch": 3.7564881281060187, "grad_norm": 0.6365216374397278, "learning_rate": 1.02e-05, "loss": 0.0327, "step": 3403 }, { "epoch": 3.7575924903368305, "grad_norm": 1.626230239868164, "learning_rate": 1.0203e-05, "loss": 0.0796, "step": 3404 }, { "epoch": 3.7586968525676423, "grad_norm": 0.6372084021568298, "learning_rate": 1.0206e-05, "loss": 0.0455, "step": 3405 }, { "epoch": 3.759801214798454, "grad_norm": 0.8069322109222412, "learning_rate": 1.0209e-05, "loss": 0.0608, "step": 3406 }, { "epoch": 3.7609055770292654, "grad_norm": 0.7485314607620239, "learning_rate": 1.0212e-05, "loss": 0.0535, "step": 3407 }, { "epoch": 3.762009939260077, "grad_norm": 0.9747768640518188, "learning_rate": 1.0215000000000001e-05, "loss": 0.0794, "step": 3408 }, { "epoch": 3.763114301490889, "grad_norm": 0.9319172501564026, "learning_rate": 1.0218000000000001e-05, "loss": 0.0516, "step": 3409 }, { "epoch": 3.7642186637217008, "grad_norm": 0.8049240708351135, "learning_rate": 1.0221000000000001e-05, "loss": 0.0583, "step": 3410 }, { "epoch": 3.7653230259525126, "grad_norm": 0.9438633322715759, "learning_rate": 1.0224e-05, "loss": 0.0605, "step": 3411 }, { "epoch": 3.766427388183324, "grad_norm": 0.7049112915992737, "learning_rate": 1.0227e-05, "loss": 0.0489, "step": 3412 }, { "epoch": 3.7675317504141357, "grad_norm": 0.771908164024353, "learning_rate": 1.023e-05, "loss": 0.0622, "step": 3413 }, { "epoch": 3.7686361126449475, "grad_norm": 0.8943352699279785, "learning_rate": 1.0233e-05, "loss": 0.0671, "step": 3414 }, { "epoch": 3.7697404748757593, "grad_norm": 1.3378491401672363, "learning_rate": 1.0236e-05, "loss": 0.0811, "step": 3415 }, { "epoch": 3.770844837106571, "grad_norm": 0.7316207885742188, "learning_rate": 1.0239e-05, "loss": 0.0546, "step": 3416 }, { "epoch": 3.771949199337383, "grad_norm": 1.0573515892028809, "learning_rate": 1.0242e-05, "loss": 0.0788, "step": 3417 }, { "epoch": 3.7730535615681946, "grad_norm": 1.879165768623352, "learning_rate": 1.0245000000000001e-05, "loss": 0.1289, "step": 3418 }, { "epoch": 3.774157923799006, "grad_norm": 3.1395180225372314, "learning_rate": 1.0248e-05, "loss": 0.4808, "step": 3419 }, { "epoch": 3.7752622860298177, "grad_norm": 1.5162992477416992, "learning_rate": 1.0251e-05, "loss": 0.4293, "step": 3420 }, { "epoch": 3.7763666482606295, "grad_norm": 1.2620923519134521, "learning_rate": 1.0254e-05, "loss": 0.3856, "step": 3421 }, { "epoch": 3.7774710104914413, "grad_norm": 1.291381597518921, "learning_rate": 1.0257e-05, "loss": 0.3656, "step": 3422 }, { "epoch": 3.778575372722253, "grad_norm": 1.407582402229309, "learning_rate": 1.0260000000000002e-05, "loss": 0.2498, "step": 3423 }, { "epoch": 3.7796797349530644, "grad_norm": 1.08617103099823, "learning_rate": 1.0263000000000002e-05, "loss": 0.2284, "step": 3424 }, { "epoch": 3.780784097183876, "grad_norm": 0.9893784523010254, "learning_rate": 1.0266e-05, "loss": 0.1852, "step": 3425 }, { "epoch": 3.781888459414688, "grad_norm": 1.1679736375808716, "learning_rate": 1.0269e-05, "loss": 0.1519, "step": 3426 }, { "epoch": 3.7829928216454998, "grad_norm": 0.7814311385154724, "learning_rate": 1.0272e-05, "loss": 0.0799, "step": 3427 }, { "epoch": 3.7840971838763116, "grad_norm": 0.7139643430709839, "learning_rate": 1.0275e-05, "loss": 0.0743, "step": 3428 }, { "epoch": 3.785201546107123, "grad_norm": 0.6357262134552002, "learning_rate": 1.0278e-05, "loss": 0.0532, "step": 3429 }, { "epoch": 3.7863059083379347, "grad_norm": 0.5170646905899048, "learning_rate": 1.0281e-05, "loss": 0.0642, "step": 3430 }, { "epoch": 3.7874102705687465, "grad_norm": 0.6393130421638489, "learning_rate": 1.0284e-05, "loss": 0.0399, "step": 3431 }, { "epoch": 3.7885146327995582, "grad_norm": 0.7361850142478943, "learning_rate": 1.0287e-05, "loss": 0.067, "step": 3432 }, { "epoch": 3.78961899503037, "grad_norm": 0.7532323002815247, "learning_rate": 1.0290000000000001e-05, "loss": 0.0556, "step": 3433 }, { "epoch": 3.790723357261182, "grad_norm": 0.7142893075942993, "learning_rate": 1.0293000000000001e-05, "loss": 0.0662, "step": 3434 }, { "epoch": 3.7918277194919936, "grad_norm": 0.681383490562439, "learning_rate": 1.0296000000000001e-05, "loss": 0.0314, "step": 3435 }, { "epoch": 3.792932081722805, "grad_norm": 0.8470467329025269, "learning_rate": 1.0299e-05, "loss": 0.0757, "step": 3436 }, { "epoch": 3.7940364439536167, "grad_norm": 0.6769718527793884, "learning_rate": 1.0301999999999999e-05, "loss": 0.0517, "step": 3437 }, { "epoch": 3.7951408061844285, "grad_norm": 0.47067320346832275, "learning_rate": 1.0305e-05, "loss": 0.044, "step": 3438 }, { "epoch": 3.7962451684152403, "grad_norm": 0.7508710026741028, "learning_rate": 1.0308e-05, "loss": 0.0397, "step": 3439 }, { "epoch": 3.797349530646052, "grad_norm": 0.7338812947273254, "learning_rate": 1.0311e-05, "loss": 0.0617, "step": 3440 }, { "epoch": 3.7984538928768634, "grad_norm": 0.9468108415603638, "learning_rate": 1.0314e-05, "loss": 0.0863, "step": 3441 }, { "epoch": 3.799558255107675, "grad_norm": 0.7343699932098389, "learning_rate": 1.0317e-05, "loss": 0.0519, "step": 3442 }, { "epoch": 3.800662617338487, "grad_norm": 0.6239741444587708, "learning_rate": 1.032e-05, "loss": 0.0389, "step": 3443 }, { "epoch": 3.8017669795692988, "grad_norm": 1.10675048828125, "learning_rate": 1.0323000000000001e-05, "loss": 0.0795, "step": 3444 }, { "epoch": 3.8028713418001105, "grad_norm": 0.8797118663787842, "learning_rate": 1.0326e-05, "loss": 0.1333, "step": 3445 }, { "epoch": 3.803975704030922, "grad_norm": 1.0164768695831299, "learning_rate": 1.0329e-05, "loss": 0.1323, "step": 3446 }, { "epoch": 3.805080066261734, "grad_norm": 1.0680415630340576, "learning_rate": 1.0332e-05, "loss": 0.047, "step": 3447 }, { "epoch": 3.8061844284925455, "grad_norm": 1.010044813156128, "learning_rate": 1.0335e-05, "loss": 0.0525, "step": 3448 }, { "epoch": 3.8072887907233572, "grad_norm": 0.726505696773529, "learning_rate": 1.0338e-05, "loss": 0.0569, "step": 3449 }, { "epoch": 3.808393152954169, "grad_norm": 0.6231745481491089, "learning_rate": 1.0341e-05, "loss": 0.0602, "step": 3450 }, { "epoch": 3.809497515184981, "grad_norm": 0.9712716341018677, "learning_rate": 1.0344e-05, "loss": 0.0598, "step": 3451 }, { "epoch": 3.8106018774157926, "grad_norm": 0.9386095404624939, "learning_rate": 1.0347e-05, "loss": 0.0602, "step": 3452 }, { "epoch": 3.811706239646604, "grad_norm": 0.673345148563385, "learning_rate": 1.035e-05, "loss": 0.0441, "step": 3453 }, { "epoch": 3.8128106018774157, "grad_norm": 1.0619385242462158, "learning_rate": 1.0353e-05, "loss": 0.0822, "step": 3454 }, { "epoch": 3.8139149641082275, "grad_norm": 0.9364734292030334, "learning_rate": 1.0356e-05, "loss": 0.0768, "step": 3455 }, { "epoch": 3.8150193263390393, "grad_norm": 0.6849554777145386, "learning_rate": 1.0359e-05, "loss": 0.0436, "step": 3456 }, { "epoch": 3.816123688569851, "grad_norm": 0.5929413437843323, "learning_rate": 1.0362e-05, "loss": 0.0422, "step": 3457 }, { "epoch": 3.8172280508006624, "grad_norm": 0.7758374810218811, "learning_rate": 1.0365e-05, "loss": 0.0609, "step": 3458 }, { "epoch": 3.818332413031474, "grad_norm": 1.2083625793457031, "learning_rate": 1.0368000000000001e-05, "loss": 0.062, "step": 3459 }, { "epoch": 3.819436775262286, "grad_norm": 0.8227546215057373, "learning_rate": 1.0371000000000001e-05, "loss": 0.0639, "step": 3460 }, { "epoch": 3.8205411374930978, "grad_norm": 1.7350033521652222, "learning_rate": 1.0374000000000001e-05, "loss": 0.0513, "step": 3461 }, { "epoch": 3.8216454997239095, "grad_norm": 0.7495331168174744, "learning_rate": 1.0376999999999999e-05, "loss": 0.0512, "step": 3462 }, { "epoch": 3.822749861954721, "grad_norm": 1.110453724861145, "learning_rate": 1.0379999999999999e-05, "loss": 0.0725, "step": 3463 }, { "epoch": 3.823854224185533, "grad_norm": 1.1616803407669067, "learning_rate": 1.0383e-05, "loss": 0.085, "step": 3464 }, { "epoch": 3.8249585864163445, "grad_norm": 1.6861674785614014, "learning_rate": 1.0386e-05, "loss": 0.077, "step": 3465 }, { "epoch": 3.8260629486471562, "grad_norm": 0.9208093285560608, "learning_rate": 1.0389e-05, "loss": 0.0719, "step": 3466 }, { "epoch": 3.827167310877968, "grad_norm": 0.9094186425209045, "learning_rate": 1.0392e-05, "loss": 0.0534, "step": 3467 }, { "epoch": 3.82827167310878, "grad_norm": 1.1737756729125977, "learning_rate": 1.0395e-05, "loss": 0.0787, "step": 3468 }, { "epoch": 3.8293760353395916, "grad_norm": 1.5081596374511719, "learning_rate": 1.0398000000000001e-05, "loss": 0.3762, "step": 3469 }, { "epoch": 3.830480397570403, "grad_norm": 1.3510630130767822, "learning_rate": 1.0401000000000001e-05, "loss": 0.3315, "step": 3470 }, { "epoch": 3.8315847598012147, "grad_norm": 0.939845085144043, "learning_rate": 1.0404e-05, "loss": 0.2725, "step": 3471 }, { "epoch": 3.8326891220320265, "grad_norm": 1.179208755493164, "learning_rate": 1.0407e-05, "loss": 0.232, "step": 3472 }, { "epoch": 3.8337934842628383, "grad_norm": 1.092445731163025, "learning_rate": 1.041e-05, "loss": 0.2472, "step": 3473 }, { "epoch": 3.83489784649365, "grad_norm": 0.8630483746528625, "learning_rate": 1.0413e-05, "loss": 0.1738, "step": 3474 }, { "epoch": 3.8360022087244614, "grad_norm": 1.112265706062317, "learning_rate": 1.0416e-05, "loss": 0.1338, "step": 3475 }, { "epoch": 3.837106570955273, "grad_norm": 0.9862213134765625, "learning_rate": 1.0419e-05, "loss": 0.1532, "step": 3476 }, { "epoch": 3.838210933186085, "grad_norm": 0.582807719707489, "learning_rate": 1.0422e-05, "loss": 0.0775, "step": 3477 }, { "epoch": 3.8393152954168968, "grad_norm": 0.7541035413742065, "learning_rate": 1.0425e-05, "loss": 0.0667, "step": 3478 }, { "epoch": 3.8404196576477085, "grad_norm": 0.6278290152549744, "learning_rate": 1.0428e-05, "loss": 0.06, "step": 3479 }, { "epoch": 3.84152401987852, "grad_norm": 0.8411014080047607, "learning_rate": 1.0431e-05, "loss": 0.0592, "step": 3480 }, { "epoch": 3.842628382109332, "grad_norm": 0.7688366174697876, "learning_rate": 1.0434e-05, "loss": 0.0493, "step": 3481 }, { "epoch": 3.8437327443401434, "grad_norm": 0.9064078330993652, "learning_rate": 1.0437e-05, "loss": 0.0674, "step": 3482 }, { "epoch": 3.8448371065709552, "grad_norm": 0.551420271396637, "learning_rate": 1.044e-05, "loss": 0.0353, "step": 3483 }, { "epoch": 3.845941468801767, "grad_norm": 0.6927017569541931, "learning_rate": 1.0443000000000001e-05, "loss": 0.0513, "step": 3484 }, { "epoch": 3.847045831032579, "grad_norm": 0.6446082592010498, "learning_rate": 1.0446000000000001e-05, "loss": 0.037, "step": 3485 }, { "epoch": 3.8481501932633906, "grad_norm": 0.6317927241325378, "learning_rate": 1.0449e-05, "loss": 0.0515, "step": 3486 }, { "epoch": 3.849254555494202, "grad_norm": 0.8634940981864929, "learning_rate": 1.0452e-05, "loss": 0.0807, "step": 3487 }, { "epoch": 3.8503589177250137, "grad_norm": 0.7938126921653748, "learning_rate": 1.0454999999999999e-05, "loss": 0.0496, "step": 3488 }, { "epoch": 3.8514632799558255, "grad_norm": 1.2841670513153076, "learning_rate": 1.0458e-05, "loss": 0.0837, "step": 3489 }, { "epoch": 3.8525676421866373, "grad_norm": 0.8519377112388611, "learning_rate": 1.0461e-05, "loss": 0.0465, "step": 3490 }, { "epoch": 3.853672004417449, "grad_norm": 0.8034703135490417, "learning_rate": 1.0464e-05, "loss": 0.056, "step": 3491 }, { "epoch": 3.8547763666482604, "grad_norm": 0.6162466406822205, "learning_rate": 1.0467e-05, "loss": 0.0385, "step": 3492 }, { "epoch": 3.855880728879072, "grad_norm": 0.6135486364364624, "learning_rate": 1.047e-05, "loss": 0.046, "step": 3493 }, { "epoch": 3.856985091109884, "grad_norm": 0.7567261457443237, "learning_rate": 1.0473000000000001e-05, "loss": 0.0379, "step": 3494 }, { "epoch": 3.8580894533406958, "grad_norm": 1.9199620485305786, "learning_rate": 1.0476000000000001e-05, "loss": 0.033, "step": 3495 }, { "epoch": 3.8591938155715075, "grad_norm": 0.819098949432373, "learning_rate": 1.0479e-05, "loss": 0.0527, "step": 3496 }, { "epoch": 3.8602981778023193, "grad_norm": 0.7188252806663513, "learning_rate": 1.0482e-05, "loss": 0.0639, "step": 3497 }, { "epoch": 3.861402540033131, "grad_norm": 0.9656946659088135, "learning_rate": 1.0485e-05, "loss": 0.0628, "step": 3498 }, { "epoch": 3.8625069022639424, "grad_norm": 0.6637988686561584, "learning_rate": 1.0488e-05, "loss": 0.0413, "step": 3499 }, { "epoch": 3.8636112644947542, "grad_norm": 0.8515700697898865, "learning_rate": 1.0491e-05, "loss": 0.071, "step": 3500 }, { "epoch": 3.864715626725566, "grad_norm": 6.24423360824585, "learning_rate": 1.0494e-05, "loss": 0.0633, "step": 3501 }, { "epoch": 3.865819988956378, "grad_norm": 0.7631388902664185, "learning_rate": 1.0497e-05, "loss": 0.0612, "step": 3502 }, { "epoch": 3.8669243511871896, "grad_norm": 0.5906660556793213, "learning_rate": 1.05e-05, "loss": 0.0365, "step": 3503 }, { "epoch": 3.868028713418001, "grad_norm": 0.5020113587379456, "learning_rate": 1.0503000000000001e-05, "loss": 0.0401, "step": 3504 }, { "epoch": 3.8691330756488127, "grad_norm": 1.9314974546432495, "learning_rate": 1.0506e-05, "loss": 0.0486, "step": 3505 }, { "epoch": 3.8702374378796245, "grad_norm": 0.7896802425384521, "learning_rate": 1.0509e-05, "loss": 0.0533, "step": 3506 }, { "epoch": 3.8713418001104363, "grad_norm": 1.2088345289230347, "learning_rate": 1.0512e-05, "loss": 0.0824, "step": 3507 }, { "epoch": 3.872446162341248, "grad_norm": 1.0337249040603638, "learning_rate": 1.0515e-05, "loss": 0.0536, "step": 3508 }, { "epoch": 3.8735505245720594, "grad_norm": 0.8708574175834656, "learning_rate": 1.0518000000000002e-05, "loss": 0.054, "step": 3509 }, { "epoch": 3.874654886802871, "grad_norm": 0.9076089859008789, "learning_rate": 1.0521000000000001e-05, "loss": 0.0582, "step": 3510 }, { "epoch": 3.875759249033683, "grad_norm": 0.7668710350990295, "learning_rate": 1.0524e-05, "loss": 0.0361, "step": 3511 }, { "epoch": 3.8768636112644947, "grad_norm": 0.9589788317680359, "learning_rate": 1.0527e-05, "loss": 0.0563, "step": 3512 }, { "epoch": 3.8779679734953065, "grad_norm": 1.1232486963272095, "learning_rate": 1.0529999999999999e-05, "loss": 0.0637, "step": 3513 }, { "epoch": 3.8790723357261183, "grad_norm": 0.559635579586029, "learning_rate": 1.0533e-05, "loss": 0.042, "step": 3514 }, { "epoch": 3.88017669795693, "grad_norm": 1.158599853515625, "learning_rate": 1.0536e-05, "loss": 0.0514, "step": 3515 }, { "epoch": 3.8812810601877414, "grad_norm": 0.978778064250946, "learning_rate": 1.0539e-05, "loss": 0.0593, "step": 3516 }, { "epoch": 3.8823854224185532, "grad_norm": 0.9849871397018433, "learning_rate": 1.0542e-05, "loss": 0.0456, "step": 3517 }, { "epoch": 3.883489784649365, "grad_norm": 1.0035741329193115, "learning_rate": 1.0545e-05, "loss": 0.0991, "step": 3518 }, { "epoch": 3.884594146880177, "grad_norm": 1.429272174835205, "learning_rate": 1.0548000000000001e-05, "loss": 0.464, "step": 3519 }, { "epoch": 3.8856985091109886, "grad_norm": 1.206222414970398, "learning_rate": 1.0551000000000001e-05, "loss": 0.368, "step": 3520 }, { "epoch": 3.8868028713418, "grad_norm": 1.3445017337799072, "learning_rate": 1.0554000000000001e-05, "loss": 0.2659, "step": 3521 }, { "epoch": 3.8879072335726117, "grad_norm": 1.3945153951644897, "learning_rate": 1.0557e-05, "loss": 0.2789, "step": 3522 }, { "epoch": 3.8890115958034235, "grad_norm": 0.75993812084198, "learning_rate": 1.0559999999999999e-05, "loss": 0.2199, "step": 3523 }, { "epoch": 3.8901159580342353, "grad_norm": 1.3416266441345215, "learning_rate": 1.0563e-05, "loss": 0.1849, "step": 3524 }, { "epoch": 3.891220320265047, "grad_norm": 1.0858550071716309, "learning_rate": 1.0566e-05, "loss": 0.1743, "step": 3525 }, { "epoch": 3.8923246824958584, "grad_norm": 0.6407228112220764, "learning_rate": 1.0569e-05, "loss": 0.0814, "step": 3526 }, { "epoch": 3.8934290447266706, "grad_norm": 0.7285434007644653, "learning_rate": 1.0572e-05, "loss": 0.0901, "step": 3527 }, { "epoch": 3.894533406957482, "grad_norm": 0.863735556602478, "learning_rate": 1.0575e-05, "loss": 0.1157, "step": 3528 }, { "epoch": 3.8956377691882937, "grad_norm": 0.44392696022987366, "learning_rate": 1.0578000000000001e-05, "loss": 0.0417, "step": 3529 }, { "epoch": 3.8967421314191055, "grad_norm": 0.5804027318954468, "learning_rate": 1.0581e-05, "loss": 0.0815, "step": 3530 }, { "epoch": 3.8978464936499173, "grad_norm": 0.5813788175582886, "learning_rate": 1.0584e-05, "loss": 0.0471, "step": 3531 }, { "epoch": 3.898950855880729, "grad_norm": 0.8002217411994934, "learning_rate": 1.0587e-05, "loss": 0.0637, "step": 3532 }, { "epoch": 3.9000552181115404, "grad_norm": 0.7384927272796631, "learning_rate": 1.059e-05, "loss": 0.0604, "step": 3533 }, { "epoch": 3.901159580342352, "grad_norm": 0.6017723083496094, "learning_rate": 1.0593000000000002e-05, "loss": 0.0383, "step": 3534 }, { "epoch": 3.902263942573164, "grad_norm": 0.6489041447639465, "learning_rate": 1.0596e-05, "loss": 0.0387, "step": 3535 }, { "epoch": 3.903368304803976, "grad_norm": 1.1263447999954224, "learning_rate": 1.0599e-05, "loss": 0.0628, "step": 3536 }, { "epoch": 3.9044726670347876, "grad_norm": 0.8626489043235779, "learning_rate": 1.0602e-05, "loss": 0.0542, "step": 3537 }, { "epoch": 3.905577029265599, "grad_norm": 0.5089144706726074, "learning_rate": 1.0605e-05, "loss": 0.0495, "step": 3538 }, { "epoch": 3.9066813914964107, "grad_norm": 0.5383116006851196, "learning_rate": 1.0608e-05, "loss": 0.0208, "step": 3539 }, { "epoch": 3.9077857537272225, "grad_norm": 0.7753933668136597, "learning_rate": 1.0611e-05, "loss": 0.0729, "step": 3540 }, { "epoch": 3.9088901159580343, "grad_norm": 0.8614526987075806, "learning_rate": 1.0614e-05, "loss": 0.0546, "step": 3541 }, { "epoch": 3.909994478188846, "grad_norm": 0.7070299983024597, "learning_rate": 1.0617e-05, "loss": 0.0407, "step": 3542 }, { "epoch": 3.9110988404196574, "grad_norm": 1.1709160804748535, "learning_rate": 1.062e-05, "loss": 0.0604, "step": 3543 }, { "epoch": 3.9122032026504696, "grad_norm": 1.0177483558654785, "learning_rate": 1.0623000000000001e-05, "loss": 0.0525, "step": 3544 }, { "epoch": 3.913307564881281, "grad_norm": 0.5999156832695007, "learning_rate": 1.0626000000000001e-05, "loss": 0.0306, "step": 3545 }, { "epoch": 3.9144119271120927, "grad_norm": 0.5539897680282593, "learning_rate": 1.0629000000000001e-05, "loss": 0.0343, "step": 3546 }, { "epoch": 3.9155162893429045, "grad_norm": 0.6926740407943726, "learning_rate": 1.0632000000000001e-05, "loss": 0.0363, "step": 3547 }, { "epoch": 3.9166206515737163, "grad_norm": 1.3145349025726318, "learning_rate": 1.0634999999999999e-05, "loss": 0.0837, "step": 3548 }, { "epoch": 3.917725013804528, "grad_norm": 1.3105040788650513, "learning_rate": 1.0638e-05, "loss": 0.0699, "step": 3549 }, { "epoch": 3.9188293760353394, "grad_norm": 0.5795076489448547, "learning_rate": 1.0641e-05, "loss": 0.0396, "step": 3550 }, { "epoch": 3.919933738266151, "grad_norm": 0.7091808915138245, "learning_rate": 1.0644e-05, "loss": 0.0585, "step": 3551 }, { "epoch": 3.921038100496963, "grad_norm": 0.6882075071334839, "learning_rate": 1.0647e-05, "loss": 0.0589, "step": 3552 }, { "epoch": 3.922142462727775, "grad_norm": 0.6711314916610718, "learning_rate": 1.065e-05, "loss": 0.0338, "step": 3553 }, { "epoch": 3.9232468249585866, "grad_norm": 0.8536028861999512, "learning_rate": 1.0653000000000001e-05, "loss": 0.0612, "step": 3554 }, { "epoch": 3.924351187189398, "grad_norm": 1.337795376777649, "learning_rate": 1.0656000000000001e-05, "loss": 0.0836, "step": 3555 }, { "epoch": 3.9254555494202097, "grad_norm": 0.6561750769615173, "learning_rate": 1.0659e-05, "loss": 0.0449, "step": 3556 }, { "epoch": 3.9265599116510215, "grad_norm": 0.7361724376678467, "learning_rate": 1.0662e-05, "loss": 0.0457, "step": 3557 }, { "epoch": 3.9276642738818333, "grad_norm": 0.7676254510879517, "learning_rate": 1.0665e-05, "loss": 0.0454, "step": 3558 }, { "epoch": 3.928768636112645, "grad_norm": 0.7472928166389465, "learning_rate": 1.0668000000000002e-05, "loss": 0.0458, "step": 3559 }, { "epoch": 3.9298729983434564, "grad_norm": 0.5718654990196228, "learning_rate": 1.0671e-05, "loss": 0.0559, "step": 3560 }, { "epoch": 3.9309773605742686, "grad_norm": 0.8353580832481384, "learning_rate": 1.0674e-05, "loss": 0.0422, "step": 3561 }, { "epoch": 3.93208172280508, "grad_norm": 0.6616442203521729, "learning_rate": 1.0677e-05, "loss": 0.0466, "step": 3562 }, { "epoch": 3.9331860850358917, "grad_norm": 0.9259065985679626, "learning_rate": 1.068e-05, "loss": 0.0569, "step": 3563 }, { "epoch": 3.9342904472667035, "grad_norm": 1.519403338432312, "learning_rate": 1.0683000000000001e-05, "loss": 0.0651, "step": 3564 }, { "epoch": 3.9353948094975153, "grad_norm": 0.9878258109092712, "learning_rate": 1.0686e-05, "loss": 0.0608, "step": 3565 }, { "epoch": 3.936499171728327, "grad_norm": 1.1501232385635376, "learning_rate": 1.0689e-05, "loss": 0.0525, "step": 3566 }, { "epoch": 3.9376035339591384, "grad_norm": 1.052339792251587, "learning_rate": 1.0692e-05, "loss": 0.0611, "step": 3567 }, { "epoch": 3.93870789618995, "grad_norm": 1.5002665519714355, "learning_rate": 1.0695e-05, "loss": 0.0852, "step": 3568 }, { "epoch": 3.939812258420762, "grad_norm": 1.2316603660583496, "learning_rate": 1.0698e-05, "loss": 0.4637, "step": 3569 }, { "epoch": 3.9409166206515738, "grad_norm": 0.9806750416755676, "learning_rate": 1.0701000000000001e-05, "loss": 0.3079, "step": 3570 }, { "epoch": 3.9420209828823856, "grad_norm": 1.4528969526290894, "learning_rate": 1.0704000000000001e-05, "loss": 0.332, "step": 3571 }, { "epoch": 3.943125345113197, "grad_norm": 0.877815306186676, "learning_rate": 1.0707e-05, "loss": 0.2102, "step": 3572 }, { "epoch": 3.9442297073440087, "grad_norm": 1.108527660369873, "learning_rate": 1.0709999999999999e-05, "loss": 0.2915, "step": 3573 }, { "epoch": 3.9453340695748205, "grad_norm": 0.8168070912361145, "learning_rate": 1.0712999999999999e-05, "loss": 0.1787, "step": 3574 }, { "epoch": 3.9464384318056323, "grad_norm": 0.811738908290863, "learning_rate": 1.0716e-05, "loss": 0.1402, "step": 3575 }, { "epoch": 3.947542794036444, "grad_norm": 0.7084110975265503, "learning_rate": 1.0719e-05, "loss": 0.0942, "step": 3576 }, { "epoch": 3.948647156267256, "grad_norm": 0.7691982388496399, "learning_rate": 1.0722e-05, "loss": 0.0629, "step": 3577 }, { "epoch": 3.9497515184980676, "grad_norm": 0.7151123285293579, "learning_rate": 1.0725e-05, "loss": 0.0898, "step": 3578 }, { "epoch": 3.950855880728879, "grad_norm": 0.8642895817756653, "learning_rate": 1.0728e-05, "loss": 0.0625, "step": 3579 }, { "epoch": 3.9519602429596907, "grad_norm": 0.6865328550338745, "learning_rate": 1.0731000000000001e-05, "loss": 0.0481, "step": 3580 }, { "epoch": 3.9530646051905025, "grad_norm": 0.4655342400074005, "learning_rate": 1.0734000000000001e-05, "loss": 0.0419, "step": 3581 }, { "epoch": 3.9541689674213143, "grad_norm": 0.6052537560462952, "learning_rate": 1.0737e-05, "loss": 0.0786, "step": 3582 }, { "epoch": 3.955273329652126, "grad_norm": 0.6466752886772156, "learning_rate": 1.074e-05, "loss": 0.0396, "step": 3583 }, { "epoch": 3.9563776918829374, "grad_norm": 2.72434139251709, "learning_rate": 1.0743e-05, "loss": 0.0619, "step": 3584 }, { "epoch": 3.957482054113749, "grad_norm": 0.6441872715950012, "learning_rate": 1.0746e-05, "loss": 0.0353, "step": 3585 }, { "epoch": 3.958586416344561, "grad_norm": 0.7143186926841736, "learning_rate": 1.0749e-05, "loss": 0.0446, "step": 3586 }, { "epoch": 3.9596907785753728, "grad_norm": 0.5271172523498535, "learning_rate": 1.0752e-05, "loss": 0.0373, "step": 3587 }, { "epoch": 3.9607951408061846, "grad_norm": 0.6953455209732056, "learning_rate": 1.0755e-05, "loss": 0.0448, "step": 3588 }, { "epoch": 3.961899503036996, "grad_norm": 0.7325077652931213, "learning_rate": 1.0758e-05, "loss": 0.0573, "step": 3589 }, { "epoch": 3.9630038652678077, "grad_norm": 0.8341305255889893, "learning_rate": 1.0761e-05, "loss": 0.0582, "step": 3590 }, { "epoch": 3.9641082274986195, "grad_norm": 0.7496870756149292, "learning_rate": 1.0764e-05, "loss": 0.0744, "step": 3591 }, { "epoch": 3.9652125897294312, "grad_norm": 0.7452301979064941, "learning_rate": 1.0767e-05, "loss": 0.0455, "step": 3592 }, { "epoch": 3.966316951960243, "grad_norm": 1.0524014234542847, "learning_rate": 1.077e-05, "loss": 0.0852, "step": 3593 }, { "epoch": 3.967421314191055, "grad_norm": 0.5823958516120911, "learning_rate": 1.0773e-05, "loss": 0.0352, "step": 3594 }, { "epoch": 3.9685256764218666, "grad_norm": 0.9534660577774048, "learning_rate": 1.0776000000000002e-05, "loss": 0.0522, "step": 3595 }, { "epoch": 3.969630038652678, "grad_norm": 0.6771032810211182, "learning_rate": 1.0779000000000001e-05, "loss": 0.054, "step": 3596 }, { "epoch": 3.9707344008834897, "grad_norm": 0.6918739080429077, "learning_rate": 1.0782e-05, "loss": 0.0652, "step": 3597 }, { "epoch": 3.9718387631143015, "grad_norm": 1.0363012552261353, "learning_rate": 1.0785e-05, "loss": 0.0568, "step": 3598 }, { "epoch": 3.9729431253451133, "grad_norm": 0.46808210015296936, "learning_rate": 1.0787999999999999e-05, "loss": 0.0416, "step": 3599 }, { "epoch": 3.974047487575925, "grad_norm": 0.7406917810440063, "learning_rate": 1.0791e-05, "loss": 0.0676, "step": 3600 }, { "epoch": 3.9751518498067364, "grad_norm": 0.6930081844329834, "learning_rate": 1.0794e-05, "loss": 0.0538, "step": 3601 }, { "epoch": 3.976256212037548, "grad_norm": 0.8654350638389587, "learning_rate": 1.0797e-05, "loss": 0.0554, "step": 3602 }, { "epoch": 3.97736057426836, "grad_norm": 1.4578840732574463, "learning_rate": 1.08e-05, "loss": 0.063, "step": 3603 }, { "epoch": 3.9784649364991718, "grad_norm": 0.6941872835159302, "learning_rate": 1.0803e-05, "loss": 0.0516, "step": 3604 }, { "epoch": 3.9795692987299836, "grad_norm": 0.5672191381454468, "learning_rate": 1.0806000000000001e-05, "loss": 0.0425, "step": 3605 }, { "epoch": 3.980673660960795, "grad_norm": 0.964184582233429, "learning_rate": 1.0809000000000001e-05, "loss": 0.0409, "step": 3606 }, { "epoch": 3.981778023191607, "grad_norm": 0.6206899881362915, "learning_rate": 1.0812e-05, "loss": 0.0388, "step": 3607 }, { "epoch": 3.9828823854224185, "grad_norm": 1.0429004430770874, "learning_rate": 1.0815e-05, "loss": 0.0706, "step": 3608 }, { "epoch": 3.9839867476532302, "grad_norm": 0.7072444558143616, "learning_rate": 1.0817999999999999e-05, "loss": 0.0483, "step": 3609 }, { "epoch": 3.985091109884042, "grad_norm": 1.0014970302581787, "learning_rate": 1.0821e-05, "loss": 0.062, "step": 3610 }, { "epoch": 3.986195472114854, "grad_norm": 0.5557447075843811, "learning_rate": 1.0824e-05, "loss": 0.0329, "step": 3611 }, { "epoch": 3.9872998343456656, "grad_norm": 0.9825363755226135, "learning_rate": 1.0827e-05, "loss": 0.0367, "step": 3612 }, { "epoch": 3.988404196576477, "grad_norm": 0.8148062229156494, "learning_rate": 1.083e-05, "loss": 0.0481, "step": 3613 }, { "epoch": 3.9895085588072887, "grad_norm": 0.8487544655799866, "learning_rate": 1.0833e-05, "loss": 0.0347, "step": 3614 }, { "epoch": 3.9906129210381005, "grad_norm": 1.050754189491272, "learning_rate": 1.0836000000000001e-05, "loss": 0.0666, "step": 3615 }, { "epoch": 3.9917172832689123, "grad_norm": 0.9849124550819397, "learning_rate": 1.0839e-05, "loss": 0.0545, "step": 3616 }, { "epoch": 3.992821645499724, "grad_norm": 1.4218584299087524, "learning_rate": 1.0842e-05, "loss": 0.0555, "step": 3617 }, { "epoch": 3.9939260077305354, "grad_norm": 1.480578064918518, "learning_rate": 1.0845e-05, "loss": 0.0658, "step": 3618 }, { "epoch": 3.995030369961347, "grad_norm": 0.9995057582855225, "learning_rate": 1.0848e-05, "loss": 0.2098, "step": 3619 }, { "epoch": 3.996134732192159, "grad_norm": 0.7607977390289307, "learning_rate": 1.0851000000000002e-05, "loss": 0.0426, "step": 3620 }, { "epoch": 3.9972390944229708, "grad_norm": 1.277876853942871, "learning_rate": 1.0854e-05, "loss": 0.0513, "step": 3621 }, { "epoch": 3.9983434566537825, "grad_norm": 1.0500752925872803, "learning_rate": 1.0857e-05, "loss": 0.0566, "step": 3622 }, { "epoch": 3.999447818884594, "grad_norm": 0.8171917200088501, "learning_rate": 1.086e-05, "loss": 0.0387, "step": 3623 }, { "epoch": 4.0, "grad_norm": 0.6187085509300232, "learning_rate": 1.0863e-05, "loss": 0.0209, "step": 3624 }, { "epoch": 4.001104362230811, "grad_norm": 1.586827039718628, "learning_rate": 1.0866e-05, "loss": 0.3822, "step": 3625 }, { "epoch": 4.002208724461624, "grad_norm": 1.4262884855270386, "learning_rate": 1.0869e-05, "loss": 0.261, "step": 3626 }, { "epoch": 4.003313086692435, "grad_norm": 1.6069140434265137, "learning_rate": 1.0872e-05, "loss": 0.2931, "step": 3627 }, { "epoch": 4.004417448923247, "grad_norm": 1.0401769876480103, "learning_rate": 1.0875e-05, "loss": 0.2592, "step": 3628 }, { "epoch": 4.0055218111540585, "grad_norm": 1.1278778314590454, "learning_rate": 1.0878e-05, "loss": 0.237, "step": 3629 }, { "epoch": 4.00662617338487, "grad_norm": 1.1807787418365479, "learning_rate": 1.0881000000000001e-05, "loss": 0.2421, "step": 3630 }, { "epoch": 4.007730535615682, "grad_norm": 0.8140753507614136, "learning_rate": 1.0884000000000001e-05, "loss": 0.1268, "step": 3631 }, { "epoch": 4.008834897846493, "grad_norm": 0.5394905805587769, "learning_rate": 1.0887000000000001e-05, "loss": 0.0666, "step": 3632 }, { "epoch": 4.009939260077306, "grad_norm": 0.6953742504119873, "learning_rate": 1.089e-05, "loss": 0.0843, "step": 3633 }, { "epoch": 4.011043622308117, "grad_norm": 0.6037065982818604, "learning_rate": 1.0892999999999999e-05, "loss": 0.0605, "step": 3634 }, { "epoch": 4.012147984538929, "grad_norm": 0.9576987028121948, "learning_rate": 1.0896e-05, "loss": 0.0415, "step": 3635 }, { "epoch": 4.0132523467697405, "grad_norm": 0.5431962013244629, "learning_rate": 1.0899e-05, "loss": 0.0516, "step": 3636 }, { "epoch": 4.014356709000552, "grad_norm": 0.6863569617271423, "learning_rate": 1.0902e-05, "loss": 0.0773, "step": 3637 }, { "epoch": 4.015461071231364, "grad_norm": 0.5311571359634399, "learning_rate": 1.0905e-05, "loss": 0.0599, "step": 3638 }, { "epoch": 4.016565433462175, "grad_norm": 0.647037923336029, "learning_rate": 1.0908e-05, "loss": 0.0665, "step": 3639 }, { "epoch": 4.017669795692988, "grad_norm": 0.7492836713790894, "learning_rate": 1.0911000000000001e-05, "loss": 0.0439, "step": 3640 }, { "epoch": 4.018774157923799, "grad_norm": 0.44125261902809143, "learning_rate": 1.0914000000000001e-05, "loss": 0.0308, "step": 3641 }, { "epoch": 4.01987852015461, "grad_norm": 0.4750896394252777, "learning_rate": 1.0917e-05, "loss": 0.0316, "step": 3642 }, { "epoch": 4.020982882385423, "grad_norm": 0.502220094203949, "learning_rate": 1.092e-05, "loss": 0.0392, "step": 3643 }, { "epoch": 4.022087244616234, "grad_norm": 0.6809626221656799, "learning_rate": 1.0923e-05, "loss": 0.0376, "step": 3644 }, { "epoch": 4.023191606847046, "grad_norm": 0.6274847984313965, "learning_rate": 1.0926000000000002e-05, "loss": 0.0407, "step": 3645 }, { "epoch": 4.0242959690778575, "grad_norm": 0.5970674157142639, "learning_rate": 1.0929e-05, "loss": 0.0341, "step": 3646 }, { "epoch": 4.025400331308669, "grad_norm": 0.8388712406158447, "learning_rate": 1.0932e-05, "loss": 0.0559, "step": 3647 }, { "epoch": 4.026504693539481, "grad_norm": 0.7572497129440308, "learning_rate": 1.0935e-05, "loss": 0.0562, "step": 3648 }, { "epoch": 4.027609055770292, "grad_norm": 0.8005492091178894, "learning_rate": 1.0938e-05, "loss": 0.0702, "step": 3649 }, { "epoch": 4.028713418001105, "grad_norm": 0.8345343470573425, "learning_rate": 1.0941e-05, "loss": 0.0439, "step": 3650 }, { "epoch": 4.029817780231916, "grad_norm": 0.9470311999320984, "learning_rate": 1.0944e-05, "loss": 0.05, "step": 3651 }, { "epoch": 4.030922142462728, "grad_norm": 0.573987603187561, "learning_rate": 1.0947e-05, "loss": 0.0354, "step": 3652 }, { "epoch": 4.0320265046935395, "grad_norm": 1.1087346076965332, "learning_rate": 1.095e-05, "loss": 0.1068, "step": 3653 }, { "epoch": 4.033130866924351, "grad_norm": 0.5285120606422424, "learning_rate": 1.0953e-05, "loss": 0.0353, "step": 3654 }, { "epoch": 4.034235229155163, "grad_norm": 0.6590080857276917, "learning_rate": 1.0956000000000001e-05, "loss": 0.0446, "step": 3655 }, { "epoch": 4.035339591385974, "grad_norm": 0.7394444942474365, "learning_rate": 1.0959000000000001e-05, "loss": 0.0381, "step": 3656 }, { "epoch": 4.036443953616787, "grad_norm": 0.5155125260353088, "learning_rate": 1.0962000000000001e-05, "loss": 0.0306, "step": 3657 }, { "epoch": 4.037548315847598, "grad_norm": 0.8678199648857117, "learning_rate": 1.0965e-05, "loss": 0.0385, "step": 3658 }, { "epoch": 4.038652678078409, "grad_norm": 0.645257294178009, "learning_rate": 1.0967999999999999e-05, "loss": 0.0424, "step": 3659 }, { "epoch": 4.039757040309222, "grad_norm": 0.5994009375572205, "learning_rate": 1.0971e-05, "loss": 0.0352, "step": 3660 }, { "epoch": 4.040861402540033, "grad_norm": 0.6966992020606995, "learning_rate": 1.0974e-05, "loss": 0.0454, "step": 3661 }, { "epoch": 4.041965764770845, "grad_norm": 1.0153878927230835, "learning_rate": 1.0977e-05, "loss": 0.0587, "step": 3662 }, { "epoch": 4.0430701270016565, "grad_norm": 1.0816395282745361, "learning_rate": 1.098e-05, "loss": 0.0511, "step": 3663 }, { "epoch": 4.044174489232468, "grad_norm": 1.385914921760559, "learning_rate": 1.0983e-05, "loss": 0.0326, "step": 3664 }, { "epoch": 4.04527885146328, "grad_norm": 0.7962536811828613, "learning_rate": 1.0986000000000001e-05, "loss": 0.0405, "step": 3665 }, { "epoch": 4.046383213694091, "grad_norm": 1.355802059173584, "learning_rate": 1.0989000000000001e-05, "loss": 0.0528, "step": 3666 }, { "epoch": 4.047487575924904, "grad_norm": 0.6496784687042236, "learning_rate": 1.0992e-05, "loss": 0.0227, "step": 3667 }, { "epoch": 4.048591938155715, "grad_norm": 0.9036570191383362, "learning_rate": 1.0995e-05, "loss": 0.0624, "step": 3668 }, { "epoch": 4.049696300386527, "grad_norm": 0.8061063289642334, "learning_rate": 1.0998e-05, "loss": 0.0487, "step": 3669 }, { "epoch": 4.0508006626173385, "grad_norm": 1.393489956855774, "learning_rate": 1.1001e-05, "loss": 0.0507, "step": 3670 }, { "epoch": 4.05190502484815, "grad_norm": 0.8356509804725647, "learning_rate": 1.1004e-05, "loss": 0.0531, "step": 3671 }, { "epoch": 4.053009387078962, "grad_norm": 0.9230273962020874, "learning_rate": 1.1007e-05, "loss": 0.0594, "step": 3672 }, { "epoch": 4.054113749309773, "grad_norm": 0.9045629501342773, "learning_rate": 1.101e-05, "loss": 0.0444, "step": 3673 }, { "epoch": 4.055218111540586, "grad_norm": 1.0304160118103027, "learning_rate": 1.1013e-05, "loss": 0.0701, "step": 3674 }, { "epoch": 4.056322473771397, "grad_norm": 4.552463531494141, "learning_rate": 1.1016000000000001e-05, "loss": 0.3913, "step": 3675 }, { "epoch": 4.057426836002208, "grad_norm": 1.097322940826416, "learning_rate": 1.1019e-05, "loss": 0.3268, "step": 3676 }, { "epoch": 4.0585311982330206, "grad_norm": 1.2571156024932861, "learning_rate": 1.1022e-05, "loss": 0.2848, "step": 3677 }, { "epoch": 4.059635560463832, "grad_norm": 2.248267889022827, "learning_rate": 1.1025e-05, "loss": 0.2996, "step": 3678 }, { "epoch": 4.060739922694644, "grad_norm": 0.988088071346283, "learning_rate": 1.1028e-05, "loss": 0.1773, "step": 3679 }, { "epoch": 4.0618442849254555, "grad_norm": 2.0295493602752686, "learning_rate": 1.1031000000000002e-05, "loss": 0.2518, "step": 3680 }, { "epoch": 4.062948647156268, "grad_norm": 1.8036710023880005, "learning_rate": 1.1034000000000001e-05, "loss": 0.2016, "step": 3681 }, { "epoch": 4.064053009387079, "grad_norm": 0.9018251299858093, "learning_rate": 1.1037000000000001e-05, "loss": 0.0794, "step": 3682 }, { "epoch": 4.06515737161789, "grad_norm": 0.522739827632904, "learning_rate": 1.104e-05, "loss": 0.0844, "step": 3683 }, { "epoch": 4.066261733848703, "grad_norm": 0.9470599889755249, "learning_rate": 1.1042999999999999e-05, "loss": 0.0679, "step": 3684 }, { "epoch": 4.067366096079514, "grad_norm": 0.8177869319915771, "learning_rate": 1.1046e-05, "loss": 0.083, "step": 3685 }, { "epoch": 4.068470458310326, "grad_norm": 0.633410632610321, "learning_rate": 1.1049e-05, "loss": 0.0356, "step": 3686 }, { "epoch": 4.0695748205411375, "grad_norm": 0.694625198841095, "learning_rate": 1.1052e-05, "loss": 0.0578, "step": 3687 }, { "epoch": 4.070679182771949, "grad_norm": 0.8860959410667419, "learning_rate": 1.1055e-05, "loss": 0.0619, "step": 3688 }, { "epoch": 4.071783545002761, "grad_norm": 0.5869988203048706, "learning_rate": 1.1058e-05, "loss": 0.0527, "step": 3689 }, { "epoch": 4.072887907233572, "grad_norm": 0.7036455273628235, "learning_rate": 1.1061000000000001e-05, "loss": 0.0431, "step": 3690 }, { "epoch": 4.073992269464385, "grad_norm": 0.4946216642856598, "learning_rate": 1.1064000000000001e-05, "loss": 0.0366, "step": 3691 }, { "epoch": 4.075096631695196, "grad_norm": 0.667076826095581, "learning_rate": 1.1067000000000001e-05, "loss": 0.0551, "step": 3692 }, { "epoch": 4.076200993926007, "grad_norm": 0.9372680187225342, "learning_rate": 1.107e-05, "loss": 0.0394, "step": 3693 }, { "epoch": 4.0773053561568195, "grad_norm": 0.3904375433921814, "learning_rate": 1.1073e-05, "loss": 0.0217, "step": 3694 }, { "epoch": 4.078409718387631, "grad_norm": 0.580248236656189, "learning_rate": 1.1075999999999999e-05, "loss": 0.0613, "step": 3695 }, { "epoch": 4.079514080618443, "grad_norm": 0.6726567149162292, "learning_rate": 1.1079e-05, "loss": 0.0573, "step": 3696 }, { "epoch": 4.0806184428492545, "grad_norm": 0.4780850410461426, "learning_rate": 1.1082e-05, "loss": 0.0411, "step": 3697 }, { "epoch": 4.081722805080067, "grad_norm": 0.492460697889328, "learning_rate": 1.1085e-05, "loss": 0.0273, "step": 3698 }, { "epoch": 4.082827167310878, "grad_norm": 0.8472488522529602, "learning_rate": 1.1088e-05, "loss": 0.0433, "step": 3699 }, { "epoch": 4.083931529541689, "grad_norm": 0.8232851028442383, "learning_rate": 1.1091e-05, "loss": 0.065, "step": 3700 }, { "epoch": 4.085035891772502, "grad_norm": 0.9865292906761169, "learning_rate": 1.1094e-05, "loss": 0.0615, "step": 3701 }, { "epoch": 4.086140254003313, "grad_norm": 0.5905766487121582, "learning_rate": 1.1097e-05, "loss": 0.0457, "step": 3702 }, { "epoch": 4.087244616234125, "grad_norm": 0.5848508477210999, "learning_rate": 1.11e-05, "loss": 0.0323, "step": 3703 }, { "epoch": 4.0883489784649365, "grad_norm": 0.8464433550834656, "learning_rate": 1.1103e-05, "loss": 0.0449, "step": 3704 }, { "epoch": 4.089453340695748, "grad_norm": 0.6083171367645264, "learning_rate": 1.1106e-05, "loss": 0.0386, "step": 3705 }, { "epoch": 4.09055770292656, "grad_norm": 0.7356440424919128, "learning_rate": 1.1109000000000002e-05, "loss": 0.038, "step": 3706 }, { "epoch": 4.091662065157371, "grad_norm": 0.634057343006134, "learning_rate": 1.1112e-05, "loss": 0.0474, "step": 3707 }, { "epoch": 4.092766427388184, "grad_norm": 0.6191884279251099, "learning_rate": 1.1115e-05, "loss": 0.0387, "step": 3708 }, { "epoch": 4.093870789618995, "grad_norm": 0.5264149308204651, "learning_rate": 1.1118e-05, "loss": 0.0307, "step": 3709 }, { "epoch": 4.094975151849806, "grad_norm": 0.8873062133789062, "learning_rate": 1.1120999999999999e-05, "loss": 0.0547, "step": 3710 }, { "epoch": 4.0960795140806185, "grad_norm": 0.6170629858970642, "learning_rate": 1.1124e-05, "loss": 0.0302, "step": 3711 }, { "epoch": 4.09718387631143, "grad_norm": 0.6498018503189087, "learning_rate": 1.1127e-05, "loss": 0.0519, "step": 3712 }, { "epoch": 4.098288238542242, "grad_norm": 1.0463507175445557, "learning_rate": 1.113e-05, "loss": 0.0615, "step": 3713 }, { "epoch": 4.0993926007730535, "grad_norm": 0.728865385055542, "learning_rate": 1.1133e-05, "loss": 0.0511, "step": 3714 }, { "epoch": 4.100496963003866, "grad_norm": 1.061154842376709, "learning_rate": 1.1136e-05, "loss": 0.0449, "step": 3715 }, { "epoch": 4.101601325234677, "grad_norm": 0.6636130809783936, "learning_rate": 1.1139000000000001e-05, "loss": 0.0587, "step": 3716 }, { "epoch": 4.102705687465488, "grad_norm": 0.894433319568634, "learning_rate": 1.1142000000000001e-05, "loss": 0.041, "step": 3717 }, { "epoch": 4.103810049696301, "grad_norm": 0.6738901138305664, "learning_rate": 1.1145000000000001e-05, "loss": 0.05, "step": 3718 }, { "epoch": 4.104914411927112, "grad_norm": 0.7308971285820007, "learning_rate": 1.1148e-05, "loss": 0.0573, "step": 3719 }, { "epoch": 4.106018774157924, "grad_norm": 1.0599501132965088, "learning_rate": 1.1150999999999999e-05, "loss": 0.0565, "step": 3720 }, { "epoch": 4.1071231363887355, "grad_norm": 1.1487146615982056, "learning_rate": 1.1154e-05, "loss": 0.0597, "step": 3721 }, { "epoch": 4.108227498619547, "grad_norm": 0.9426957368850708, "learning_rate": 1.1157e-05, "loss": 0.0524, "step": 3722 }, { "epoch": 4.109331860850359, "grad_norm": 1.2688963413238525, "learning_rate": 1.116e-05, "loss": 0.0767, "step": 3723 }, { "epoch": 4.11043622308117, "grad_norm": 1.124574899673462, "learning_rate": 1.1163e-05, "loss": 0.0613, "step": 3724 }, { "epoch": 4.111540585311983, "grad_norm": 2.1600186824798584, "learning_rate": 1.1166e-05, "loss": 0.3681, "step": 3725 }, { "epoch": 4.112644947542794, "grad_norm": 1.1148110628128052, "learning_rate": 1.1169000000000001e-05, "loss": 0.3296, "step": 3726 }, { "epoch": 4.113749309773605, "grad_norm": 1.1751412153244019, "learning_rate": 1.1172e-05, "loss": 0.34, "step": 3727 }, { "epoch": 4.1148536720044175, "grad_norm": 1.0692379474639893, "learning_rate": 1.1175e-05, "loss": 0.2374, "step": 3728 }, { "epoch": 4.115958034235229, "grad_norm": 1.0093492269515991, "learning_rate": 1.1178e-05, "loss": 0.2638, "step": 3729 }, { "epoch": 4.117062396466041, "grad_norm": 1.0309090614318848, "learning_rate": 1.1181e-05, "loss": 0.1714, "step": 3730 }, { "epoch": 4.1181667586968524, "grad_norm": 1.4947139024734497, "learning_rate": 1.1184000000000002e-05, "loss": 0.1326, "step": 3731 }, { "epoch": 4.119271120927665, "grad_norm": 0.8690940737724304, "learning_rate": 1.1187e-05, "loss": 0.1524, "step": 3732 }, { "epoch": 4.120375483158476, "grad_norm": 0.6504218578338623, "learning_rate": 1.119e-05, "loss": 0.091, "step": 3733 }, { "epoch": 4.121479845389287, "grad_norm": 0.7671085000038147, "learning_rate": 1.1193e-05, "loss": 0.0406, "step": 3734 }, { "epoch": 4.1225842076201, "grad_norm": 0.6253466606140137, "learning_rate": 1.1196e-05, "loss": 0.0583, "step": 3735 }, { "epoch": 4.123688569850911, "grad_norm": 0.5957848429679871, "learning_rate": 1.1199e-05, "loss": 0.0529, "step": 3736 }, { "epoch": 4.124792932081723, "grad_norm": 0.530552089214325, "learning_rate": 1.1202e-05, "loss": 0.032, "step": 3737 }, { "epoch": 4.1258972943125345, "grad_norm": 0.6965034008026123, "learning_rate": 1.1205e-05, "loss": 0.0874, "step": 3738 }, { "epoch": 4.127001656543346, "grad_norm": 0.47859737277030945, "learning_rate": 1.1208e-05, "loss": 0.0334, "step": 3739 }, { "epoch": 4.128106018774158, "grad_norm": 0.379219114780426, "learning_rate": 1.1211e-05, "loss": 0.0301, "step": 3740 }, { "epoch": 4.129210381004969, "grad_norm": 0.7661411166191101, "learning_rate": 1.1214000000000001e-05, "loss": 0.0457, "step": 3741 }, { "epoch": 4.130314743235782, "grad_norm": 0.7328221201896667, "learning_rate": 1.1217000000000001e-05, "loss": 0.0602, "step": 3742 }, { "epoch": 4.131419105466593, "grad_norm": 0.45273059606552124, "learning_rate": 1.1220000000000001e-05, "loss": 0.0445, "step": 3743 }, { "epoch": 4.132523467697405, "grad_norm": 1.3169153928756714, "learning_rate": 1.1222999999999999e-05, "loss": 0.0298, "step": 3744 }, { "epoch": 4.1336278299282165, "grad_norm": 0.7455900311470032, "learning_rate": 1.1225999999999999e-05, "loss": 0.0439, "step": 3745 }, { "epoch": 4.134732192159028, "grad_norm": 0.6893858909606934, "learning_rate": 1.1229e-05, "loss": 0.0462, "step": 3746 }, { "epoch": 4.13583655438984, "grad_norm": 0.5003895163536072, "learning_rate": 1.1232e-05, "loss": 0.0433, "step": 3747 }, { "epoch": 4.136940916620651, "grad_norm": 0.6537059545516968, "learning_rate": 1.1235e-05, "loss": 0.0425, "step": 3748 }, { "epoch": 4.138045278851464, "grad_norm": 0.8195690512657166, "learning_rate": 1.1238e-05, "loss": 0.0471, "step": 3749 }, { "epoch": 4.139149641082275, "grad_norm": 0.8053320646286011, "learning_rate": 1.1241e-05, "loss": 0.0592, "step": 3750 }, { "epoch": 4.140254003313086, "grad_norm": 0.583106517791748, "learning_rate": 1.1244000000000001e-05, "loss": 0.0907, "step": 3751 }, { "epoch": 4.141358365543899, "grad_norm": 0.4293788969516754, "learning_rate": 1.1247000000000001e-05, "loss": 0.0256, "step": 3752 }, { "epoch": 4.14246272777471, "grad_norm": 0.5418418645858765, "learning_rate": 1.125e-05, "loss": 0.0578, "step": 3753 }, { "epoch": 4.143567090005522, "grad_norm": 0.4189221262931824, "learning_rate": 1.1253e-05, "loss": 0.0234, "step": 3754 }, { "epoch": 4.1446714522363335, "grad_norm": 1.208659052848816, "learning_rate": 1.1256e-05, "loss": 0.0248, "step": 3755 }, { "epoch": 4.145775814467145, "grad_norm": 0.9351107478141785, "learning_rate": 1.1259e-05, "loss": 0.0728, "step": 3756 }, { "epoch": 4.146880176697957, "grad_norm": 0.6476815938949585, "learning_rate": 1.1262e-05, "loss": 0.0361, "step": 3757 }, { "epoch": 4.147984538928768, "grad_norm": 0.6550929546356201, "learning_rate": 1.1265e-05, "loss": 0.0565, "step": 3758 }, { "epoch": 4.149088901159581, "grad_norm": 0.7949283719062805, "learning_rate": 1.1268e-05, "loss": 0.0404, "step": 3759 }, { "epoch": 4.150193263390392, "grad_norm": 0.554231584072113, "learning_rate": 1.1271e-05, "loss": 0.0343, "step": 3760 }, { "epoch": 4.151297625621204, "grad_norm": 0.6300788521766663, "learning_rate": 1.1274e-05, "loss": 0.04, "step": 3761 }, { "epoch": 4.1524019878520155, "grad_norm": 0.7393540143966675, "learning_rate": 1.1277e-05, "loss": 0.0492, "step": 3762 }, { "epoch": 4.153506350082827, "grad_norm": 0.7009820938110352, "learning_rate": 1.128e-05, "loss": 0.0468, "step": 3763 }, { "epoch": 4.154610712313639, "grad_norm": 0.7193275690078735, "learning_rate": 1.1283e-05, "loss": 0.0358, "step": 3764 }, { "epoch": 4.15571507454445, "grad_norm": 0.764694333076477, "learning_rate": 1.1286e-05, "loss": 0.0332, "step": 3765 }, { "epoch": 4.156819436775263, "grad_norm": 0.7345694899559021, "learning_rate": 1.1289000000000002e-05, "loss": 0.0419, "step": 3766 }, { "epoch": 4.157923799006074, "grad_norm": 1.0793625116348267, "learning_rate": 1.1292000000000001e-05, "loss": 0.0578, "step": 3767 }, { "epoch": 4.159028161236885, "grad_norm": 0.7431222796440125, "learning_rate": 1.1295000000000001e-05, "loss": 0.0627, "step": 3768 }, { "epoch": 4.160132523467698, "grad_norm": 0.8245964646339417, "learning_rate": 1.1298e-05, "loss": 0.0505, "step": 3769 }, { "epoch": 4.161236885698509, "grad_norm": 1.0740216970443726, "learning_rate": 1.1300999999999999e-05, "loss": 0.0741, "step": 3770 }, { "epoch": 4.162341247929321, "grad_norm": 0.9474102854728699, "learning_rate": 1.1304e-05, "loss": 0.061, "step": 3771 }, { "epoch": 4.1634456101601325, "grad_norm": 0.9210101962089539, "learning_rate": 1.1307e-05, "loss": 0.0326, "step": 3772 }, { "epoch": 4.164549972390944, "grad_norm": 0.77455735206604, "learning_rate": 1.131e-05, "loss": 0.0487, "step": 3773 }, { "epoch": 4.165654334621756, "grad_norm": 0.8152170777320862, "learning_rate": 1.1313e-05, "loss": 0.0621, "step": 3774 }, { "epoch": 4.166758696852567, "grad_norm": 1.4682719707489014, "learning_rate": 1.1316e-05, "loss": 0.3449, "step": 3775 }, { "epoch": 4.16786305908338, "grad_norm": 0.9888486862182617, "learning_rate": 1.1319000000000001e-05, "loss": 0.3362, "step": 3776 }, { "epoch": 4.168967421314191, "grad_norm": 0.8325213193893433, "learning_rate": 1.1322000000000001e-05, "loss": 0.2204, "step": 3777 }, { "epoch": 4.170071783545003, "grad_norm": 1.760923981666565, "learning_rate": 1.1325e-05, "loss": 0.2336, "step": 3778 }, { "epoch": 4.1711761457758145, "grad_norm": 1.0005686283111572, "learning_rate": 1.1328e-05, "loss": 0.2162, "step": 3779 }, { "epoch": 4.172280508006626, "grad_norm": 1.4004185199737549, "learning_rate": 1.1331e-05, "loss": 0.2292, "step": 3780 }, { "epoch": 4.173384870237438, "grad_norm": 0.9130290150642395, "learning_rate": 1.1334e-05, "loss": 0.1447, "step": 3781 }, { "epoch": 4.174489232468249, "grad_norm": 0.940673291683197, "learning_rate": 1.1337e-05, "loss": 0.1415, "step": 3782 }, { "epoch": 4.175593594699062, "grad_norm": 1.8516004085540771, "learning_rate": 1.134e-05, "loss": 0.0503, "step": 3783 }, { "epoch": 4.176697956929873, "grad_norm": 0.8812551498413086, "learning_rate": 1.1343e-05, "loss": 0.0617, "step": 3784 }, { "epoch": 4.177802319160684, "grad_norm": 0.7207247018814087, "learning_rate": 1.1346e-05, "loss": 0.0671, "step": 3785 }, { "epoch": 4.178906681391497, "grad_norm": 6.467493057250977, "learning_rate": 1.1349000000000001e-05, "loss": 0.0404, "step": 3786 }, { "epoch": 4.180011043622308, "grad_norm": 0.6640885472297668, "learning_rate": 1.1352e-05, "loss": 0.0658, "step": 3787 }, { "epoch": 4.18111540585312, "grad_norm": 0.6740532517433167, "learning_rate": 1.1355e-05, "loss": 0.0399, "step": 3788 }, { "epoch": 4.1822197680839315, "grad_norm": 0.612688422203064, "learning_rate": 1.1358e-05, "loss": 0.0388, "step": 3789 }, { "epoch": 4.183324130314743, "grad_norm": 0.49500572681427, "learning_rate": 1.1361e-05, "loss": 0.0395, "step": 3790 }, { "epoch": 4.184428492545555, "grad_norm": 0.8778026103973389, "learning_rate": 1.1364000000000002e-05, "loss": 0.0587, "step": 3791 }, { "epoch": 4.185532854776366, "grad_norm": 0.6208480000495911, "learning_rate": 1.1367000000000001e-05, "loss": 0.046, "step": 3792 }, { "epoch": 4.186637217007179, "grad_norm": 0.3885766267776489, "learning_rate": 1.137e-05, "loss": 0.0256, "step": 3793 }, { "epoch": 4.18774157923799, "grad_norm": 0.5895208120346069, "learning_rate": 1.1373e-05, "loss": 0.0466, "step": 3794 }, { "epoch": 4.188845941468802, "grad_norm": 0.8383753895759583, "learning_rate": 1.1376e-05, "loss": 0.0487, "step": 3795 }, { "epoch": 4.1899503036996135, "grad_norm": 0.6420158743858337, "learning_rate": 1.1379e-05, "loss": 0.0454, "step": 3796 }, { "epoch": 4.191054665930425, "grad_norm": 1.3170382976531982, "learning_rate": 1.1382e-05, "loss": 0.047, "step": 3797 }, { "epoch": 4.192159028161237, "grad_norm": 0.7023798227310181, "learning_rate": 1.1385e-05, "loss": 0.0391, "step": 3798 }, { "epoch": 4.193263390392048, "grad_norm": 1.0379071235656738, "learning_rate": 1.1388e-05, "loss": 0.0565, "step": 3799 }, { "epoch": 4.194367752622861, "grad_norm": 0.5488101840019226, "learning_rate": 1.1391e-05, "loss": 0.0363, "step": 3800 }, { "epoch": 4.195472114853672, "grad_norm": 0.6727253794670105, "learning_rate": 1.1394000000000001e-05, "loss": 0.0321, "step": 3801 }, { "epoch": 4.196576477084483, "grad_norm": 0.6467809081077576, "learning_rate": 1.1397000000000001e-05, "loss": 0.054, "step": 3802 }, { "epoch": 4.197680839315296, "grad_norm": 0.5225046873092651, "learning_rate": 1.1400000000000001e-05, "loss": 0.0294, "step": 3803 }, { "epoch": 4.198785201546107, "grad_norm": 0.6932446360588074, "learning_rate": 1.1403e-05, "loss": 0.0311, "step": 3804 }, { "epoch": 4.199889563776919, "grad_norm": 0.40634894371032715, "learning_rate": 1.1406e-05, "loss": 0.0201, "step": 3805 }, { "epoch": 4.2009939260077305, "grad_norm": 0.5893606543540955, "learning_rate": 1.1409e-05, "loss": 0.0399, "step": 3806 }, { "epoch": 4.202098288238542, "grad_norm": 1.2426401376724243, "learning_rate": 1.1412e-05, "loss": 0.051, "step": 3807 }, { "epoch": 4.203202650469354, "grad_norm": 0.8175995945930481, "learning_rate": 1.1415e-05, "loss": 0.0535, "step": 3808 }, { "epoch": 4.204307012700165, "grad_norm": 0.6607005596160889, "learning_rate": 1.1418e-05, "loss": 0.0318, "step": 3809 }, { "epoch": 4.205411374930978, "grad_norm": 0.6407869458198547, "learning_rate": 1.1421e-05, "loss": 0.0645, "step": 3810 }, { "epoch": 4.206515737161789, "grad_norm": 1.069870948791504, "learning_rate": 1.1424000000000001e-05, "loss": 0.0466, "step": 3811 }, { "epoch": 4.207620099392601, "grad_norm": 0.5725802183151245, "learning_rate": 1.1427000000000001e-05, "loss": 0.0301, "step": 3812 }, { "epoch": 4.2087244616234125, "grad_norm": 0.5874717831611633, "learning_rate": 1.143e-05, "loss": 0.0356, "step": 3813 }, { "epoch": 4.209828823854224, "grad_norm": 0.8113133311271667, "learning_rate": 1.1433e-05, "loss": 0.0482, "step": 3814 }, { "epoch": 4.210933186085036, "grad_norm": 0.6608842611312866, "learning_rate": 1.1436e-05, "loss": 0.0368, "step": 3815 }, { "epoch": 4.212037548315847, "grad_norm": 0.8549175262451172, "learning_rate": 1.1439e-05, "loss": 0.0434, "step": 3816 }, { "epoch": 4.21314191054666, "grad_norm": 0.5967397689819336, "learning_rate": 1.1442000000000002e-05, "loss": 0.0272, "step": 3817 }, { "epoch": 4.214246272777471, "grad_norm": 0.9072781801223755, "learning_rate": 1.1445e-05, "loss": 0.0562, "step": 3818 }, { "epoch": 4.215350635008282, "grad_norm": 0.732192873954773, "learning_rate": 1.1448e-05, "loss": 0.0388, "step": 3819 }, { "epoch": 4.216454997239095, "grad_norm": 0.8789403438568115, "learning_rate": 1.1451e-05, "loss": 0.0421, "step": 3820 }, { "epoch": 4.217559359469906, "grad_norm": 0.9371208548545837, "learning_rate": 1.1453999999999999e-05, "loss": 0.0473, "step": 3821 }, { "epoch": 4.218663721700718, "grad_norm": 0.8926464319229126, "learning_rate": 1.1457e-05, "loss": 0.0565, "step": 3822 }, { "epoch": 4.2197680839315295, "grad_norm": 1.0132077932357788, "learning_rate": 1.146e-05, "loss": 0.04, "step": 3823 }, { "epoch": 4.220872446162341, "grad_norm": 1.0477184057235718, "learning_rate": 1.1463e-05, "loss": 0.0576, "step": 3824 }, { "epoch": 4.221976808393153, "grad_norm": 1.5423519611358643, "learning_rate": 1.1466e-05, "loss": 0.4299, "step": 3825 }, { "epoch": 4.223081170623964, "grad_norm": 1.0163551568984985, "learning_rate": 1.1469e-05, "loss": 0.2829, "step": 3826 }, { "epoch": 4.224185532854777, "grad_norm": 1.20978581905365, "learning_rate": 1.1472000000000001e-05, "loss": 0.2741, "step": 3827 }, { "epoch": 4.225289895085588, "grad_norm": 0.8264893293380737, "learning_rate": 1.1475000000000001e-05, "loss": 0.1836, "step": 3828 }, { "epoch": 4.2263942573164, "grad_norm": 0.932593584060669, "learning_rate": 1.1478000000000001e-05, "loss": 0.1701, "step": 3829 }, { "epoch": 4.2274986195472115, "grad_norm": 4.1682209968566895, "learning_rate": 1.1480999999999999e-05, "loss": 0.2061, "step": 3830 }, { "epoch": 4.228602981778023, "grad_norm": 0.8267365097999573, "learning_rate": 1.1483999999999999e-05, "loss": 0.1531, "step": 3831 }, { "epoch": 4.229707344008835, "grad_norm": 0.6487506628036499, "learning_rate": 1.1487e-05, "loss": 0.1157, "step": 3832 }, { "epoch": 4.230811706239646, "grad_norm": 0.9440504312515259, "learning_rate": 1.149e-05, "loss": 0.0829, "step": 3833 }, { "epoch": 4.231916068470459, "grad_norm": 0.40635478496551514, "learning_rate": 1.1493e-05, "loss": 0.0396, "step": 3834 }, { "epoch": 4.23302043070127, "grad_norm": 0.55528324842453, "learning_rate": 1.1496e-05, "loss": 0.0505, "step": 3835 }, { "epoch": 4.234124792932081, "grad_norm": 0.7117777466773987, "learning_rate": 1.1499e-05, "loss": 0.0722, "step": 3836 }, { "epoch": 4.2352291551628936, "grad_norm": 0.4262905716896057, "learning_rate": 1.1502000000000001e-05, "loss": 0.0405, "step": 3837 }, { "epoch": 4.236333517393705, "grad_norm": 0.5631389021873474, "learning_rate": 1.1505e-05, "loss": 0.0846, "step": 3838 }, { "epoch": 4.237437879624517, "grad_norm": 0.6347842216491699, "learning_rate": 1.1508e-05, "loss": 0.0612, "step": 3839 }, { "epoch": 4.2385422418553285, "grad_norm": 0.5790302753448486, "learning_rate": 1.1511e-05, "loss": 0.0364, "step": 3840 }, { "epoch": 4.23964660408614, "grad_norm": 0.8873240351676941, "learning_rate": 1.1514e-05, "loss": 0.0443, "step": 3841 }, { "epoch": 4.240750966316952, "grad_norm": 0.656541109085083, "learning_rate": 1.1517e-05, "loss": 0.0439, "step": 3842 }, { "epoch": 4.241855328547763, "grad_norm": 0.5492502450942993, "learning_rate": 1.152e-05, "loss": 0.0316, "step": 3843 }, { "epoch": 4.242959690778576, "grad_norm": 0.6124358773231506, "learning_rate": 1.1523e-05, "loss": 0.0502, "step": 3844 }, { "epoch": 4.244064053009387, "grad_norm": 0.7842099666595459, "learning_rate": 1.1526e-05, "loss": 0.0303, "step": 3845 }, { "epoch": 4.245168415240199, "grad_norm": 0.5689008831977844, "learning_rate": 1.1529e-05, "loss": 0.0445, "step": 3846 }, { "epoch": 4.2462727774710105, "grad_norm": 0.41796940565109253, "learning_rate": 1.1532e-05, "loss": 0.0215, "step": 3847 }, { "epoch": 4.247377139701822, "grad_norm": 0.5814855694770813, "learning_rate": 1.1535e-05, "loss": 0.0309, "step": 3848 }, { "epoch": 4.248481501932634, "grad_norm": 0.7574933171272278, "learning_rate": 1.1538e-05, "loss": 0.0429, "step": 3849 }, { "epoch": 4.249585864163445, "grad_norm": 0.6340706944465637, "learning_rate": 1.1541e-05, "loss": 0.0472, "step": 3850 }, { "epoch": 4.250690226394258, "grad_norm": 0.5595782399177551, "learning_rate": 1.1544e-05, "loss": 0.0363, "step": 3851 }, { "epoch": 4.251794588625069, "grad_norm": 0.6907392740249634, "learning_rate": 1.1547000000000001e-05, "loss": 0.0357, "step": 3852 }, { "epoch": 4.25289895085588, "grad_norm": 0.8499805331230164, "learning_rate": 1.1550000000000001e-05, "loss": 0.044, "step": 3853 }, { "epoch": 4.2540033130866926, "grad_norm": 0.6090306043624878, "learning_rate": 1.1553000000000001e-05, "loss": 0.0303, "step": 3854 }, { "epoch": 4.255107675317504, "grad_norm": 0.6643399000167847, "learning_rate": 1.1555999999999999e-05, "loss": 0.0441, "step": 3855 }, { "epoch": 4.256212037548316, "grad_norm": 0.6424278020858765, "learning_rate": 1.1558999999999999e-05, "loss": 0.0431, "step": 3856 }, { "epoch": 4.2573163997791275, "grad_norm": 0.6668751239776611, "learning_rate": 1.1562e-05, "loss": 0.0474, "step": 3857 }, { "epoch": 4.258420762009939, "grad_norm": 0.7112601399421692, "learning_rate": 1.1565e-05, "loss": 0.0333, "step": 3858 }, { "epoch": 4.259525124240751, "grad_norm": 0.425018310546875, "learning_rate": 1.1568e-05, "loss": 0.0256, "step": 3859 }, { "epoch": 4.260629486471562, "grad_norm": 0.8124565482139587, "learning_rate": 1.1571e-05, "loss": 0.0429, "step": 3860 }, { "epoch": 4.261733848702375, "grad_norm": 0.6777994632720947, "learning_rate": 1.1574e-05, "loss": 0.0444, "step": 3861 }, { "epoch": 4.262838210933186, "grad_norm": 0.697206974029541, "learning_rate": 1.1577000000000001e-05, "loss": 0.0413, "step": 3862 }, { "epoch": 4.263942573163998, "grad_norm": 0.7813876271247864, "learning_rate": 1.1580000000000001e-05, "loss": 0.0414, "step": 3863 }, { "epoch": 4.2650469353948095, "grad_norm": 1.5337560176849365, "learning_rate": 1.1583e-05, "loss": 0.0494, "step": 3864 }, { "epoch": 4.266151297625621, "grad_norm": 0.7599643468856812, "learning_rate": 1.1586e-05, "loss": 0.047, "step": 3865 }, { "epoch": 4.267255659856433, "grad_norm": 1.3514245748519897, "learning_rate": 1.1589e-05, "loss": 0.073, "step": 3866 }, { "epoch": 4.268360022087244, "grad_norm": 0.5308287143707275, "learning_rate": 1.1592e-05, "loss": 0.0396, "step": 3867 }, { "epoch": 4.269464384318057, "grad_norm": 0.7427220344543457, "learning_rate": 1.1595e-05, "loss": 0.0341, "step": 3868 }, { "epoch": 4.270568746548868, "grad_norm": 0.9350671768188477, "learning_rate": 1.1598e-05, "loss": 0.0667, "step": 3869 }, { "epoch": 4.27167310877968, "grad_norm": 0.9017960429191589, "learning_rate": 1.1601e-05, "loss": 0.0503, "step": 3870 }, { "epoch": 4.2727774710104915, "grad_norm": 0.931876540184021, "learning_rate": 1.1604e-05, "loss": 0.0663, "step": 3871 }, { "epoch": 4.273881833241303, "grad_norm": 0.7648642659187317, "learning_rate": 1.1607000000000001e-05, "loss": 0.025, "step": 3872 }, { "epoch": 4.274986195472115, "grad_norm": 1.4549705982208252, "learning_rate": 1.161e-05, "loss": 0.0581, "step": 3873 }, { "epoch": 4.2760905577029265, "grad_norm": 1.3920042514801025, "learning_rate": 1.1613e-05, "loss": 0.0509, "step": 3874 }, { "epoch": 4.277194919933739, "grad_norm": 2.593515396118164, "learning_rate": 1.1616e-05, "loss": 0.3787, "step": 3875 }, { "epoch": 4.27829928216455, "grad_norm": 1.3451370000839233, "learning_rate": 1.1619e-05, "loss": 0.3805, "step": 3876 }, { "epoch": 4.279403644395361, "grad_norm": 1.7459903955459595, "learning_rate": 1.1622000000000002e-05, "loss": 0.2679, "step": 3877 }, { "epoch": 4.280508006626174, "grad_norm": 1.2698218822479248, "learning_rate": 1.1625000000000001e-05, "loss": 0.2003, "step": 3878 }, { "epoch": 4.281612368856985, "grad_norm": 1.0269473791122437, "learning_rate": 1.1628e-05, "loss": 0.2011, "step": 3879 }, { "epoch": 4.282716731087797, "grad_norm": 1.2326232194900513, "learning_rate": 1.1631e-05, "loss": 0.1695, "step": 3880 }, { "epoch": 4.2838210933186085, "grad_norm": 0.7676670551300049, "learning_rate": 1.1633999999999999e-05, "loss": 0.1515, "step": 3881 }, { "epoch": 4.28492545554942, "grad_norm": 1.015423059463501, "learning_rate": 1.1637e-05, "loss": 0.1851, "step": 3882 }, { "epoch": 4.286029817780232, "grad_norm": 0.7887755036354065, "learning_rate": 1.164e-05, "loss": 0.1571, "step": 3883 }, { "epoch": 4.287134180011043, "grad_norm": 0.9299201965332031, "learning_rate": 1.1643e-05, "loss": 0.0861, "step": 3884 }, { "epoch": 4.288238542241856, "grad_norm": 0.5447169542312622, "learning_rate": 1.1646e-05, "loss": 0.0403, "step": 3885 }, { "epoch": 4.289342904472667, "grad_norm": 1.9090044498443604, "learning_rate": 1.1649e-05, "loss": 0.0588, "step": 3886 }, { "epoch": 4.290447266703479, "grad_norm": 0.7154409885406494, "learning_rate": 1.1652000000000001e-05, "loss": 0.0634, "step": 3887 }, { "epoch": 4.2915516289342905, "grad_norm": 0.9261832237243652, "learning_rate": 1.1655000000000001e-05, "loss": 0.0615, "step": 3888 }, { "epoch": 4.292655991165102, "grad_norm": 0.6143724322319031, "learning_rate": 1.1658000000000001e-05, "loss": 0.0582, "step": 3889 }, { "epoch": 4.293760353395914, "grad_norm": 0.6705088019371033, "learning_rate": 1.1661e-05, "loss": 0.0305, "step": 3890 }, { "epoch": 4.2948647156267254, "grad_norm": 0.8163818717002869, "learning_rate": 1.1664e-05, "loss": 0.0529, "step": 3891 }, { "epoch": 4.295969077857538, "grad_norm": 0.6624191999435425, "learning_rate": 1.1667e-05, "loss": 0.0236, "step": 3892 }, { "epoch": 4.297073440088349, "grad_norm": 0.6230679154396057, "learning_rate": 1.167e-05, "loss": 0.033, "step": 3893 }, { "epoch": 4.29817780231916, "grad_norm": 0.6441898941993713, "learning_rate": 1.1673e-05, "loss": 0.0312, "step": 3894 }, { "epoch": 4.299282164549973, "grad_norm": 1.4674949645996094, "learning_rate": 1.1676e-05, "loss": 0.0356, "step": 3895 }, { "epoch": 4.300386526780784, "grad_norm": 0.49408772587776184, "learning_rate": 1.1679e-05, "loss": 0.0367, "step": 3896 }, { "epoch": 4.301490889011596, "grad_norm": 1.255706548690796, "learning_rate": 1.1682000000000001e-05, "loss": 0.0504, "step": 3897 }, { "epoch": 4.3025952512424075, "grad_norm": 1.263674020767212, "learning_rate": 1.1685e-05, "loss": 0.0851, "step": 3898 }, { "epoch": 4.303699613473219, "grad_norm": 0.769600510597229, "learning_rate": 1.1688e-05, "loss": 0.0313, "step": 3899 }, { "epoch": 4.304803975704031, "grad_norm": 0.9271591901779175, "learning_rate": 1.1691e-05, "loss": 0.036, "step": 3900 }, { "epoch": 4.305908337934842, "grad_norm": 0.4840162694454193, "learning_rate": 1.1694e-05, "loss": 0.0321, "step": 3901 }, { "epoch": 4.307012700165655, "grad_norm": 0.5743745565414429, "learning_rate": 1.1697000000000002e-05, "loss": 0.0303, "step": 3902 }, { "epoch": 4.308117062396466, "grad_norm": 0.6658034920692444, "learning_rate": 1.1700000000000001e-05, "loss": 0.0405, "step": 3903 }, { "epoch": 4.309221424627278, "grad_norm": 0.8902512788772583, "learning_rate": 1.1703e-05, "loss": 0.0327, "step": 3904 }, { "epoch": 4.3103257868580895, "grad_norm": 1.1512025594711304, "learning_rate": 1.1706e-05, "loss": 0.0447, "step": 3905 }, { "epoch": 4.311430149088901, "grad_norm": 0.9698033332824707, "learning_rate": 1.1709e-05, "loss": 0.0615, "step": 3906 }, { "epoch": 4.312534511319713, "grad_norm": 1.0525873899459839, "learning_rate": 1.1712e-05, "loss": 0.0458, "step": 3907 }, { "epoch": 4.313638873550524, "grad_norm": 0.7784199118614197, "learning_rate": 1.1715e-05, "loss": 0.0427, "step": 3908 }, { "epoch": 4.314743235781337, "grad_norm": 0.8044857978820801, "learning_rate": 1.1718e-05, "loss": 0.0569, "step": 3909 }, { "epoch": 4.315847598012148, "grad_norm": 1.6548629999160767, "learning_rate": 1.1721e-05, "loss": 0.0655, "step": 3910 }, { "epoch": 4.316951960242959, "grad_norm": 0.723942220211029, "learning_rate": 1.1724e-05, "loss": 0.0458, "step": 3911 }, { "epoch": 4.318056322473772, "grad_norm": 0.8631066679954529, "learning_rate": 1.1727000000000001e-05, "loss": 0.0383, "step": 3912 }, { "epoch": 4.319160684704583, "grad_norm": 0.7785547375679016, "learning_rate": 1.1730000000000001e-05, "loss": 0.0482, "step": 3913 }, { "epoch": 4.320265046935395, "grad_norm": 0.6416071057319641, "learning_rate": 1.1733000000000001e-05, "loss": 0.0299, "step": 3914 }, { "epoch": 4.3213694091662065, "grad_norm": 0.7803390026092529, "learning_rate": 1.1736e-05, "loss": 0.0362, "step": 3915 }, { "epoch": 4.322473771397018, "grad_norm": 1.0650914907455444, "learning_rate": 1.1738999999999999e-05, "loss": 0.059, "step": 3916 }, { "epoch": 4.32357813362783, "grad_norm": 2.2271268367767334, "learning_rate": 1.1742e-05, "loss": 0.0559, "step": 3917 }, { "epoch": 4.324682495858641, "grad_norm": 1.0176374912261963, "learning_rate": 1.1745e-05, "loss": 0.0489, "step": 3918 }, { "epoch": 4.325786858089454, "grad_norm": 0.6075700521469116, "learning_rate": 1.1748e-05, "loss": 0.0287, "step": 3919 }, { "epoch": 4.326891220320265, "grad_norm": 0.5988365411758423, "learning_rate": 1.1751e-05, "loss": 0.0386, "step": 3920 }, { "epoch": 4.327995582551077, "grad_norm": 1.030076026916504, "learning_rate": 1.1754e-05, "loss": 0.1113, "step": 3921 }, { "epoch": 4.3290999447818885, "grad_norm": 1.6150319576263428, "learning_rate": 1.1757000000000001e-05, "loss": 0.0335, "step": 3922 }, { "epoch": 4.3302043070127, "grad_norm": 0.5759239196777344, "learning_rate": 1.1760000000000001e-05, "loss": 0.0444, "step": 3923 }, { "epoch": 4.331308669243512, "grad_norm": 0.7923122048377991, "learning_rate": 1.1763e-05, "loss": 0.0713, "step": 3924 }, { "epoch": 4.332413031474323, "grad_norm": 2.1637635231018066, "learning_rate": 1.1766e-05, "loss": 0.4152, "step": 3925 }, { "epoch": 4.333517393705136, "grad_norm": 1.3510921001434326, "learning_rate": 1.1769e-05, "loss": 0.3351, "step": 3926 }, { "epoch": 4.334621755935947, "grad_norm": 1.0642225742340088, "learning_rate": 1.1772000000000002e-05, "loss": 0.2799, "step": 3927 }, { "epoch": 4.335726118166758, "grad_norm": 1.0991679430007935, "learning_rate": 1.1775000000000002e-05, "loss": 0.191, "step": 3928 }, { "epoch": 4.336830480397571, "grad_norm": 1.071022868156433, "learning_rate": 1.1778e-05, "loss": 0.2719, "step": 3929 }, { "epoch": 4.337934842628382, "grad_norm": 1.1457585096359253, "learning_rate": 1.1781e-05, "loss": 0.201, "step": 3930 }, { "epoch": 4.339039204859194, "grad_norm": 0.7665845155715942, "learning_rate": 1.1784e-05, "loss": 0.172, "step": 3931 }, { "epoch": 4.3401435670900055, "grad_norm": 0.8598249554634094, "learning_rate": 1.1787e-05, "loss": 0.1011, "step": 3932 }, { "epoch": 4.341247929320817, "grad_norm": 0.7682515978813171, "learning_rate": 1.179e-05, "loss": 0.0808, "step": 3933 }, { "epoch": 4.342352291551629, "grad_norm": 0.6998701095581055, "learning_rate": 1.1793e-05, "loss": 0.0743, "step": 3934 }, { "epoch": 4.34345665378244, "grad_norm": 0.8551161885261536, "learning_rate": 1.1796e-05, "loss": 0.1165, "step": 3935 }, { "epoch": 4.344561016013253, "grad_norm": 0.7538609504699707, "learning_rate": 1.1799e-05, "loss": 0.0522, "step": 3936 }, { "epoch": 4.345665378244064, "grad_norm": 1.1060855388641357, "learning_rate": 1.1802000000000002e-05, "loss": 0.0785, "step": 3937 }, { "epoch": 4.346769740474876, "grad_norm": 0.6200692653656006, "learning_rate": 1.1805000000000001e-05, "loss": 0.0344, "step": 3938 }, { "epoch": 4.3478741027056875, "grad_norm": 0.45538854598999023, "learning_rate": 1.1808000000000001e-05, "loss": 0.0251, "step": 3939 }, { "epoch": 4.348978464936499, "grad_norm": 1.142547845840454, "learning_rate": 1.1811000000000001e-05, "loss": 0.0361, "step": 3940 }, { "epoch": 4.350082827167311, "grad_norm": 0.8553919792175293, "learning_rate": 1.1813999999999999e-05, "loss": 0.0521, "step": 3941 }, { "epoch": 4.351187189398122, "grad_norm": 0.6900449991226196, "learning_rate": 1.1816999999999999e-05, "loss": 0.0459, "step": 3942 }, { "epoch": 4.352291551628935, "grad_norm": 1.123405933380127, "learning_rate": 1.182e-05, "loss": 0.0417, "step": 3943 }, { "epoch": 4.353395913859746, "grad_norm": 0.649476945400238, "learning_rate": 1.1823e-05, "loss": 0.0522, "step": 3944 }, { "epoch": 4.354500276090557, "grad_norm": 0.8929991126060486, "learning_rate": 1.1826e-05, "loss": 0.047, "step": 3945 }, { "epoch": 4.35560463832137, "grad_norm": 0.6684836149215698, "learning_rate": 1.1829e-05, "loss": 0.0403, "step": 3946 }, { "epoch": 4.356709000552181, "grad_norm": 0.8605267405509949, "learning_rate": 1.1832e-05, "loss": 0.0633, "step": 3947 }, { "epoch": 4.357813362782993, "grad_norm": 0.8303072452545166, "learning_rate": 1.1835000000000001e-05, "loss": 0.0397, "step": 3948 }, { "epoch": 4.3589177250138045, "grad_norm": 0.46814557909965515, "learning_rate": 1.1838e-05, "loss": 0.0336, "step": 3949 }, { "epoch": 4.360022087244616, "grad_norm": 0.5977863073348999, "learning_rate": 1.1841e-05, "loss": 0.0452, "step": 3950 }, { "epoch": 4.361126449475428, "grad_norm": 1.1909736394882202, "learning_rate": 1.1844e-05, "loss": 0.0592, "step": 3951 }, { "epoch": 4.362230811706239, "grad_norm": 0.5703268051147461, "learning_rate": 1.1847e-05, "loss": 0.0321, "step": 3952 }, { "epoch": 4.363335173937052, "grad_norm": 0.7997004389762878, "learning_rate": 1.185e-05, "loss": 0.0583, "step": 3953 }, { "epoch": 4.364439536167863, "grad_norm": 1.1277406215667725, "learning_rate": 1.1853e-05, "loss": 0.0425, "step": 3954 }, { "epoch": 4.365543898398675, "grad_norm": 0.722107470035553, "learning_rate": 1.1856e-05, "loss": 0.0407, "step": 3955 }, { "epoch": 4.3666482606294865, "grad_norm": 0.5902332663536072, "learning_rate": 1.1859e-05, "loss": 0.0432, "step": 3956 }, { "epoch": 4.367752622860298, "grad_norm": 0.7804322838783264, "learning_rate": 1.1862e-05, "loss": 0.0404, "step": 3957 }, { "epoch": 4.36885698509111, "grad_norm": 0.8809769153594971, "learning_rate": 1.1865e-05, "loss": 0.0608, "step": 3958 }, { "epoch": 4.369961347321921, "grad_norm": 0.5867670774459839, "learning_rate": 1.1868e-05, "loss": 0.045, "step": 3959 }, { "epoch": 4.371065709552734, "grad_norm": 0.891512930393219, "learning_rate": 1.1871e-05, "loss": 0.0419, "step": 3960 }, { "epoch": 4.372170071783545, "grad_norm": 0.47924259305000305, "learning_rate": 1.1874e-05, "loss": 0.0363, "step": 3961 }, { "epoch": 4.373274434014356, "grad_norm": 0.9132100939750671, "learning_rate": 1.1877e-05, "loss": 0.0565, "step": 3962 }, { "epoch": 4.374378796245169, "grad_norm": 0.7044519782066345, "learning_rate": 1.1880000000000001e-05, "loss": 0.061, "step": 3963 }, { "epoch": 4.37548315847598, "grad_norm": 0.5189676284790039, "learning_rate": 1.1883000000000001e-05, "loss": 0.0341, "step": 3964 }, { "epoch": 4.376587520706792, "grad_norm": 0.8001095652580261, "learning_rate": 1.1886e-05, "loss": 0.0406, "step": 3965 }, { "epoch": 4.3776918829376035, "grad_norm": 0.7536934018135071, "learning_rate": 1.1889e-05, "loss": 0.038, "step": 3966 }, { "epoch": 4.378796245168415, "grad_norm": 0.5801182389259338, "learning_rate": 1.1891999999999999e-05, "loss": 0.0346, "step": 3967 }, { "epoch": 4.379900607399227, "grad_norm": 0.6948041319847107, "learning_rate": 1.1895e-05, "loss": 0.0607, "step": 3968 }, { "epoch": 4.381004969630038, "grad_norm": 0.9062990546226501, "learning_rate": 1.1898e-05, "loss": 0.0538, "step": 3969 }, { "epoch": 4.382109331860851, "grad_norm": 1.3828426599502563, "learning_rate": 1.1901e-05, "loss": 0.0503, "step": 3970 }, { "epoch": 4.383213694091662, "grad_norm": 1.1828322410583496, "learning_rate": 1.1904e-05, "loss": 0.0562, "step": 3971 }, { "epoch": 4.384318056322474, "grad_norm": 0.8128935694694519, "learning_rate": 1.1907e-05, "loss": 0.0595, "step": 3972 }, { "epoch": 4.3854224185532855, "grad_norm": 1.0781278610229492, "learning_rate": 1.1910000000000001e-05, "loss": 0.0639, "step": 3973 }, { "epoch": 4.386526780784097, "grad_norm": 0.6156337857246399, "learning_rate": 1.1913000000000001e-05, "loss": 0.0479, "step": 3974 }, { "epoch": 4.387631143014909, "grad_norm": 1.377052903175354, "learning_rate": 1.1916e-05, "loss": 0.3495, "step": 3975 }, { "epoch": 4.38873550524572, "grad_norm": 1.372626781463623, "learning_rate": 1.1919e-05, "loss": 0.3559, "step": 3976 }, { "epoch": 4.389839867476533, "grad_norm": 0.8812611699104309, "learning_rate": 1.1922e-05, "loss": 0.2628, "step": 3977 }, { "epoch": 4.390944229707344, "grad_norm": 1.2143386602401733, "learning_rate": 1.1925e-05, "loss": 0.2655, "step": 3978 }, { "epoch": 4.392048591938155, "grad_norm": 1.0538448095321655, "learning_rate": 1.1928e-05, "loss": 0.2429, "step": 3979 }, { "epoch": 4.393152954168968, "grad_norm": 1.1641353368759155, "learning_rate": 1.1931e-05, "loss": 0.1783, "step": 3980 }, { "epoch": 4.394257316399779, "grad_norm": 0.8963563442230225, "learning_rate": 1.1934e-05, "loss": 0.1369, "step": 3981 }, { "epoch": 4.395361678630591, "grad_norm": 1.3914319276809692, "learning_rate": 1.1937e-05, "loss": 0.1269, "step": 3982 }, { "epoch": 4.3964660408614025, "grad_norm": 0.9001570343971252, "learning_rate": 1.1940000000000001e-05, "loss": 0.1125, "step": 3983 }, { "epoch": 4.397570403092214, "grad_norm": 0.7491979002952576, "learning_rate": 1.1943e-05, "loss": 0.0686, "step": 3984 }, { "epoch": 4.398674765323026, "grad_norm": 1.32432222366333, "learning_rate": 1.1946e-05, "loss": 0.0647, "step": 3985 }, { "epoch": 4.399779127553837, "grad_norm": 1.1675719022750854, "learning_rate": 1.1949e-05, "loss": 0.0826, "step": 3986 }, { "epoch": 4.40088348978465, "grad_norm": 0.8315248489379883, "learning_rate": 1.1952e-05, "loss": 0.077, "step": 3987 }, { "epoch": 4.401987852015461, "grad_norm": 0.7389332056045532, "learning_rate": 1.1955000000000002e-05, "loss": 0.0661, "step": 3988 }, { "epoch": 4.403092214246273, "grad_norm": 0.6403196454048157, "learning_rate": 1.1958000000000001e-05, "loss": 0.0376, "step": 3989 }, { "epoch": 4.4041965764770845, "grad_norm": 0.6473315954208374, "learning_rate": 1.1961e-05, "loss": 0.0531, "step": 3990 }, { "epoch": 4.405300938707896, "grad_norm": 0.5754704475402832, "learning_rate": 1.1964e-05, "loss": 0.0498, "step": 3991 }, { "epoch": 4.406405300938708, "grad_norm": 0.4562685787677765, "learning_rate": 1.1966999999999999e-05, "loss": 0.0292, "step": 3992 }, { "epoch": 4.407509663169519, "grad_norm": 1.0153447389602661, "learning_rate": 1.197e-05, "loss": 0.0462, "step": 3993 }, { "epoch": 4.408614025400332, "grad_norm": 0.6595712304115295, "learning_rate": 1.1973e-05, "loss": 0.0393, "step": 3994 }, { "epoch": 4.409718387631143, "grad_norm": 0.556158721446991, "learning_rate": 1.1976e-05, "loss": 0.0307, "step": 3995 }, { "epoch": 4.410822749861954, "grad_norm": 0.5892423987388611, "learning_rate": 1.1979e-05, "loss": 0.056, "step": 3996 }, { "epoch": 4.411927112092767, "grad_norm": 0.8968649506568909, "learning_rate": 1.1982e-05, "loss": 0.0495, "step": 3997 }, { "epoch": 4.413031474323578, "grad_norm": 0.6988294720649719, "learning_rate": 1.1985000000000001e-05, "loss": 0.0399, "step": 3998 }, { "epoch": 4.41413583655439, "grad_norm": 0.5612804293632507, "learning_rate": 1.1988000000000001e-05, "loss": 0.0344, "step": 3999 }, { "epoch": 4.4152401987852015, "grad_norm": 0.6041646003723145, "learning_rate": 1.1991000000000001e-05, "loss": 0.0329, "step": 4000 }, { "epoch": 4.4152401987852015, "eval_cer": 0.13557300982450682, "eval_loss": 0.43428143858909607, "eval_runtime": 15.4324, "eval_samples_per_second": 19.699, "eval_steps_per_second": 0.648, "eval_wer": 0.47966231772831924, "step": 4000 }, { "epoch": 4.416344561016013, "grad_norm": 1.7297028303146362, "learning_rate": 1.1994e-05, "loss": 0.0623, "step": 4001 }, { "epoch": 4.417448923246825, "grad_norm": 0.6491885781288147, "learning_rate": 1.1996999999999999e-05, "loss": 0.0433, "step": 4002 }, { "epoch": 4.418553285477636, "grad_norm": 0.7004141211509705, "learning_rate": 1.2e-05, "loss": 0.0385, "step": 4003 }, { "epoch": 4.419657647708449, "grad_norm": 0.803501546382904, "learning_rate": 1.2003e-05, "loss": 0.0469, "step": 4004 }, { "epoch": 4.42076200993926, "grad_norm": 0.6751691102981567, "learning_rate": 1.2006e-05, "loss": 0.0315, "step": 4005 }, { "epoch": 4.421866372170072, "grad_norm": 0.6176151037216187, "learning_rate": 1.2009e-05, "loss": 0.0466, "step": 4006 }, { "epoch": 4.4229707344008835, "grad_norm": 1.2599507570266724, "learning_rate": 1.2012e-05, "loss": 0.0472, "step": 4007 }, { "epoch": 4.424075096631695, "grad_norm": 0.7577369213104248, "learning_rate": 1.2015000000000001e-05, "loss": 0.059, "step": 4008 }, { "epoch": 4.425179458862507, "grad_norm": 0.852900505065918, "learning_rate": 1.2018e-05, "loss": 0.0358, "step": 4009 }, { "epoch": 4.426283821093318, "grad_norm": 0.7818019390106201, "learning_rate": 1.2021e-05, "loss": 0.0219, "step": 4010 }, { "epoch": 4.427388183324131, "grad_norm": 0.9667893648147583, "learning_rate": 1.2024e-05, "loss": 0.0469, "step": 4011 }, { "epoch": 4.428492545554942, "grad_norm": 0.7965008020401001, "learning_rate": 1.2027e-05, "loss": 0.0558, "step": 4012 }, { "epoch": 4.429596907785753, "grad_norm": 0.6675998568534851, "learning_rate": 1.2030000000000002e-05, "loss": 0.0422, "step": 4013 }, { "epoch": 4.4307012700165656, "grad_norm": 0.867127001285553, "learning_rate": 1.2033000000000002e-05, "loss": 0.0383, "step": 4014 }, { "epoch": 4.431805632247377, "grad_norm": 0.8276312947273254, "learning_rate": 1.2036e-05, "loss": 0.0503, "step": 4015 }, { "epoch": 4.432909994478189, "grad_norm": 0.6273490786552429, "learning_rate": 1.2039e-05, "loss": 0.0528, "step": 4016 }, { "epoch": 4.4340143567090005, "grad_norm": 0.6248247623443604, "learning_rate": 1.2042e-05, "loss": 0.0279, "step": 4017 }, { "epoch": 4.435118718939812, "grad_norm": 0.8322957158088684, "learning_rate": 1.2045e-05, "loss": 0.047, "step": 4018 }, { "epoch": 4.436223081170624, "grad_norm": 1.0001994371414185, "learning_rate": 1.2048e-05, "loss": 0.0433, "step": 4019 }, { "epoch": 4.437327443401435, "grad_norm": 0.6255577802658081, "learning_rate": 1.2051e-05, "loss": 0.0389, "step": 4020 }, { "epoch": 4.438431805632248, "grad_norm": 0.8850947618484497, "learning_rate": 1.2054e-05, "loss": 0.0559, "step": 4021 }, { "epoch": 4.439536167863059, "grad_norm": 0.8121762871742249, "learning_rate": 1.2057e-05, "loss": 0.0765, "step": 4022 }, { "epoch": 4.440640530093871, "grad_norm": 0.4955102503299713, "learning_rate": 1.2060000000000001e-05, "loss": 0.0242, "step": 4023 }, { "epoch": 4.4417448923246825, "grad_norm": 1.10910165309906, "learning_rate": 1.2063000000000001e-05, "loss": 0.0752, "step": 4024 }, { "epoch": 4.442849254555494, "grad_norm": 1.1736811399459839, "learning_rate": 1.2066000000000001e-05, "loss": 0.3509, "step": 4025 }, { "epoch": 4.443953616786306, "grad_norm": 1.3166742324829102, "learning_rate": 1.2069e-05, "loss": 0.3773, "step": 4026 }, { "epoch": 4.445057979017117, "grad_norm": 5.3897247314453125, "learning_rate": 1.2071999999999999e-05, "loss": 0.3874, "step": 4027 }, { "epoch": 4.44616234124793, "grad_norm": 1.1569894552230835, "learning_rate": 1.2075e-05, "loss": 0.2957, "step": 4028 }, { "epoch": 4.447266703478741, "grad_norm": 0.9683704972267151, "learning_rate": 1.2078e-05, "loss": 0.1994, "step": 4029 }, { "epoch": 4.448371065709552, "grad_norm": 1.1362603902816772, "learning_rate": 1.2081e-05, "loss": 0.1917, "step": 4030 }, { "epoch": 4.4494754279403645, "grad_norm": 0.787715494632721, "learning_rate": 1.2084e-05, "loss": 0.1465, "step": 4031 }, { "epoch": 4.450579790171176, "grad_norm": 0.9408807754516602, "learning_rate": 1.2087e-05, "loss": 0.1039, "step": 4032 }, { "epoch": 4.451684152401988, "grad_norm": 0.835708737373352, "learning_rate": 1.2090000000000001e-05, "loss": 0.0968, "step": 4033 }, { "epoch": 4.4527885146327995, "grad_norm": 1.7294220924377441, "learning_rate": 1.2093000000000001e-05, "loss": 0.142, "step": 4034 }, { "epoch": 4.453892876863611, "grad_norm": 0.7503665685653687, "learning_rate": 1.2096e-05, "loss": 0.0468, "step": 4035 }, { "epoch": 4.454997239094423, "grad_norm": 1.4130651950836182, "learning_rate": 1.2099e-05, "loss": 0.0547, "step": 4036 }, { "epoch": 4.456101601325234, "grad_norm": 0.46084946393966675, "learning_rate": 1.2102e-05, "loss": 0.0453, "step": 4037 }, { "epoch": 4.457205963556047, "grad_norm": 0.7824862003326416, "learning_rate": 1.2105000000000002e-05, "loss": 0.0417, "step": 4038 }, { "epoch": 4.458310325786858, "grad_norm": 0.4355628490447998, "learning_rate": 1.2108e-05, "loss": 0.0267, "step": 4039 }, { "epoch": 4.45941468801767, "grad_norm": 0.565192699432373, "learning_rate": 1.2111e-05, "loss": 0.0312, "step": 4040 }, { "epoch": 4.4605190502484815, "grad_norm": 0.9717664122581482, "learning_rate": 1.2114e-05, "loss": 0.1198, "step": 4041 }, { "epoch": 4.461623412479293, "grad_norm": 0.5678005218505859, "learning_rate": 1.2117e-05, "loss": 0.0456, "step": 4042 }, { "epoch": 4.462727774710105, "grad_norm": 0.823849618434906, "learning_rate": 1.2120000000000001e-05, "loss": 0.0393, "step": 4043 }, { "epoch": 4.463832136940916, "grad_norm": 0.6384583711624146, "learning_rate": 1.2123e-05, "loss": 0.0445, "step": 4044 }, { "epoch": 4.464936499171729, "grad_norm": 0.7678049802780151, "learning_rate": 1.2126e-05, "loss": 0.0526, "step": 4045 }, { "epoch": 4.46604086140254, "grad_norm": 0.6709633469581604, "learning_rate": 1.2129e-05, "loss": 0.0416, "step": 4046 }, { "epoch": 4.467145223633352, "grad_norm": 0.6176455616950989, "learning_rate": 1.2132e-05, "loss": 0.0398, "step": 4047 }, { "epoch": 4.4682495858641635, "grad_norm": 0.7106865048408508, "learning_rate": 1.2135000000000002e-05, "loss": 0.0273, "step": 4048 }, { "epoch": 4.469353948094975, "grad_norm": 0.6219116449356079, "learning_rate": 1.2138000000000001e-05, "loss": 0.0517, "step": 4049 }, { "epoch": 4.470458310325787, "grad_norm": 0.6864408850669861, "learning_rate": 1.2141000000000001e-05, "loss": 0.0385, "step": 4050 }, { "epoch": 4.4715626725565985, "grad_norm": 0.545001745223999, "learning_rate": 1.2144e-05, "loss": 0.0303, "step": 4051 }, { "epoch": 4.472667034787411, "grad_norm": 0.5717228651046753, "learning_rate": 1.2146999999999999e-05, "loss": 0.0374, "step": 4052 }, { "epoch": 4.473771397018222, "grad_norm": 0.773167073726654, "learning_rate": 1.215e-05, "loss": 0.0249, "step": 4053 }, { "epoch": 4.474875759249033, "grad_norm": 0.5873830914497375, "learning_rate": 1.2153e-05, "loss": 0.0412, "step": 4054 }, { "epoch": 4.475980121479846, "grad_norm": 0.8390398621559143, "learning_rate": 1.2156e-05, "loss": 0.0529, "step": 4055 }, { "epoch": 4.477084483710657, "grad_norm": 0.8200417160987854, "learning_rate": 1.2159e-05, "loss": 0.0351, "step": 4056 }, { "epoch": 4.478188845941469, "grad_norm": 0.7120969891548157, "learning_rate": 1.2162e-05, "loss": 0.0417, "step": 4057 }, { "epoch": 4.4792932081722805, "grad_norm": 0.5721114277839661, "learning_rate": 1.2165000000000001e-05, "loss": 0.0245, "step": 4058 }, { "epoch": 4.480397570403092, "grad_norm": 0.8332997560501099, "learning_rate": 1.2168000000000001e-05, "loss": 0.0315, "step": 4059 }, { "epoch": 4.481501932633904, "grad_norm": 0.7850903272628784, "learning_rate": 1.2171000000000001e-05, "loss": 0.0438, "step": 4060 }, { "epoch": 4.482606294864715, "grad_norm": 1.4276888370513916, "learning_rate": 1.2174e-05, "loss": 0.0307, "step": 4061 }, { "epoch": 4.483710657095528, "grad_norm": 0.6482385993003845, "learning_rate": 1.2177e-05, "loss": 0.0486, "step": 4062 }, { "epoch": 4.484815019326339, "grad_norm": 0.7305192351341248, "learning_rate": 1.2180000000000002e-05, "loss": 0.0309, "step": 4063 }, { "epoch": 4.485919381557151, "grad_norm": 0.9643081426620483, "learning_rate": 1.2183e-05, "loss": 0.0406, "step": 4064 }, { "epoch": 4.4870237437879625, "grad_norm": 0.766066312789917, "learning_rate": 1.2186e-05, "loss": 0.049, "step": 4065 }, { "epoch": 4.488128106018774, "grad_norm": 0.7483507394790649, "learning_rate": 1.2189e-05, "loss": 0.0424, "step": 4066 }, { "epoch": 4.489232468249586, "grad_norm": 0.6133303642272949, "learning_rate": 1.2192e-05, "loss": 0.043, "step": 4067 }, { "epoch": 4.4903368304803974, "grad_norm": 0.8802093863487244, "learning_rate": 1.2195e-05, "loss": 0.0547, "step": 4068 }, { "epoch": 4.49144119271121, "grad_norm": 1.038792371749878, "learning_rate": 1.2198e-05, "loss": 0.0746, "step": 4069 }, { "epoch": 4.492545554942021, "grad_norm": 0.8997170925140381, "learning_rate": 1.2201e-05, "loss": 0.0567, "step": 4070 }, { "epoch": 4.493649917172832, "grad_norm": 1.084055781364441, "learning_rate": 1.2204e-05, "loss": 0.055, "step": 4071 }, { "epoch": 4.494754279403645, "grad_norm": 0.7066803574562073, "learning_rate": 1.2207e-05, "loss": 0.0485, "step": 4072 }, { "epoch": 4.495858641634456, "grad_norm": 0.7247127890586853, "learning_rate": 1.221e-05, "loss": 0.0446, "step": 4073 }, { "epoch": 4.496963003865268, "grad_norm": 1.1615214347839355, "learning_rate": 1.2213000000000001e-05, "loss": 0.0655, "step": 4074 }, { "epoch": 4.4980673660960795, "grad_norm": 2.4966790676116943, "learning_rate": 1.2216000000000001e-05, "loss": 0.4989, "step": 4075 }, { "epoch": 4.499171728326891, "grad_norm": 2.2691452503204346, "learning_rate": 1.2219e-05, "loss": 0.3855, "step": 4076 }, { "epoch": 4.500276090557703, "grad_norm": 1.1463924646377563, "learning_rate": 1.2222e-05, "loss": 0.2878, "step": 4077 }, { "epoch": 4.501380452788514, "grad_norm": 0.8766668438911438, "learning_rate": 1.2224999999999999e-05, "loss": 0.2074, "step": 4078 }, { "epoch": 4.502484815019327, "grad_norm": 0.8909496665000916, "learning_rate": 1.2228e-05, "loss": 0.1691, "step": 4079 }, { "epoch": 4.503589177250138, "grad_norm": 0.8742090463638306, "learning_rate": 1.2231e-05, "loss": 0.1406, "step": 4080 }, { "epoch": 4.50469353948095, "grad_norm": 1.1116193532943726, "learning_rate": 1.2234e-05, "loss": 0.1691, "step": 4081 }, { "epoch": 4.5057979017117615, "grad_norm": 0.6793201565742493, "learning_rate": 1.2237e-05, "loss": 0.0786, "step": 4082 }, { "epoch": 4.506902263942573, "grad_norm": 0.5043265223503113, "learning_rate": 1.224e-05, "loss": 0.0726, "step": 4083 }, { "epoch": 4.508006626173385, "grad_norm": 0.6066683530807495, "learning_rate": 1.2243000000000001e-05, "loss": 0.0651, "step": 4084 }, { "epoch": 4.509110988404196, "grad_norm": 0.6104297041893005, "learning_rate": 1.2246000000000001e-05, "loss": 0.0803, "step": 4085 }, { "epoch": 4.510215350635009, "grad_norm": 0.42068785429000854, "learning_rate": 1.2249e-05, "loss": 0.0466, "step": 4086 }, { "epoch": 4.51131971286582, "grad_norm": 0.7845321893692017, "learning_rate": 1.2252e-05, "loss": 0.0493, "step": 4087 }, { "epoch": 4.512424075096631, "grad_norm": 0.6567210555076599, "learning_rate": 1.2254999999999999e-05, "loss": 0.0433, "step": 4088 }, { "epoch": 4.513528437327444, "grad_norm": 0.915753185749054, "learning_rate": 1.2258e-05, "loss": 0.0495, "step": 4089 }, { "epoch": 4.514632799558255, "grad_norm": 0.6448092460632324, "learning_rate": 1.2261e-05, "loss": 0.0475, "step": 4090 }, { "epoch": 4.515737161789067, "grad_norm": 0.6014096736907959, "learning_rate": 1.2264e-05, "loss": 0.0531, "step": 4091 }, { "epoch": 4.5168415240198785, "grad_norm": 0.32626089453697205, "learning_rate": 1.2267e-05, "loss": 0.0244, "step": 4092 }, { "epoch": 4.51794588625069, "grad_norm": 1.0573359727859497, "learning_rate": 1.227e-05, "loss": 0.0279, "step": 4093 }, { "epoch": 4.519050248481502, "grad_norm": 0.5744552612304688, "learning_rate": 1.2273000000000001e-05, "loss": 0.0328, "step": 4094 }, { "epoch": 4.520154610712313, "grad_norm": 0.7148681282997131, "learning_rate": 1.2276e-05, "loss": 0.0416, "step": 4095 }, { "epoch": 4.521258972943126, "grad_norm": 0.5355203747749329, "learning_rate": 1.2279e-05, "loss": 0.041, "step": 4096 }, { "epoch": 4.522363335173937, "grad_norm": 0.5080739855766296, "learning_rate": 1.2282e-05, "loss": 0.0277, "step": 4097 }, { "epoch": 4.523467697404749, "grad_norm": 0.9536199569702148, "learning_rate": 1.2285e-05, "loss": 0.0301, "step": 4098 }, { "epoch": 4.5245720596355605, "grad_norm": 0.591834545135498, "learning_rate": 1.2288000000000002e-05, "loss": 0.031, "step": 4099 }, { "epoch": 4.525676421866372, "grad_norm": 0.5061255097389221, "learning_rate": 1.2291000000000001e-05, "loss": 0.0302, "step": 4100 }, { "epoch": 4.526780784097184, "grad_norm": 0.6068756580352783, "learning_rate": 1.2294e-05, "loss": 0.0412, "step": 4101 }, { "epoch": 4.527885146327995, "grad_norm": 0.6953774094581604, "learning_rate": 1.2297e-05, "loss": 0.0452, "step": 4102 }, { "epoch": 4.528989508558808, "grad_norm": 1.037645697593689, "learning_rate": 1.2299999999999999e-05, "loss": 0.0598, "step": 4103 }, { "epoch": 4.530093870789619, "grad_norm": 0.6781960725784302, "learning_rate": 1.2303e-05, "loss": 0.0263, "step": 4104 }, { "epoch": 4.53119823302043, "grad_norm": 0.7467535138130188, "learning_rate": 1.2306e-05, "loss": 0.0427, "step": 4105 }, { "epoch": 4.532302595251243, "grad_norm": 0.6723060607910156, "learning_rate": 1.2309e-05, "loss": 0.0326, "step": 4106 }, { "epoch": 4.533406957482054, "grad_norm": 0.6654223203659058, "learning_rate": 1.2312e-05, "loss": 0.0558, "step": 4107 }, { "epoch": 4.534511319712866, "grad_norm": 0.7049950361251831, "learning_rate": 1.2315e-05, "loss": 0.0414, "step": 4108 }, { "epoch": 4.5356156819436775, "grad_norm": 1.1744581460952759, "learning_rate": 1.2318000000000001e-05, "loss": 0.0606, "step": 4109 }, { "epoch": 4.536720044174489, "grad_norm": 0.919856071472168, "learning_rate": 1.2321000000000001e-05, "loss": 0.1166, "step": 4110 }, { "epoch": 4.537824406405301, "grad_norm": 0.6574409008026123, "learning_rate": 1.2324000000000001e-05, "loss": 0.0468, "step": 4111 }, { "epoch": 4.538928768636112, "grad_norm": 1.1204607486724854, "learning_rate": 1.2327e-05, "loss": 0.0572, "step": 4112 }, { "epoch": 4.540033130866925, "grad_norm": 0.5078701376914978, "learning_rate": 1.2329999999999999e-05, "loss": 0.0339, "step": 4113 }, { "epoch": 4.541137493097736, "grad_norm": 0.37308841943740845, "learning_rate": 1.2333e-05, "loss": 0.0288, "step": 4114 }, { "epoch": 4.542241855328548, "grad_norm": 0.8824888467788696, "learning_rate": 1.2336e-05, "loss": 0.0453, "step": 4115 }, { "epoch": 4.5433462175593595, "grad_norm": 0.5770021080970764, "learning_rate": 1.2339e-05, "loss": 0.0514, "step": 4116 }, { "epoch": 4.544450579790171, "grad_norm": 0.9796915650367737, "learning_rate": 1.2342e-05, "loss": 0.0534, "step": 4117 }, { "epoch": 4.545554942020983, "grad_norm": 0.674712061882019, "learning_rate": 1.2345e-05, "loss": 0.0372, "step": 4118 }, { "epoch": 4.546659304251794, "grad_norm": 0.8056592345237732, "learning_rate": 1.2348000000000001e-05, "loss": 0.0422, "step": 4119 }, { "epoch": 4.547763666482607, "grad_norm": 1.7988568544387817, "learning_rate": 1.2351e-05, "loss": 0.0448, "step": 4120 }, { "epoch": 4.548868028713418, "grad_norm": 0.556435227394104, "learning_rate": 1.2354e-05, "loss": 0.0415, "step": 4121 }, { "epoch": 4.549972390944229, "grad_norm": 10.366333961486816, "learning_rate": 1.2357e-05, "loss": 0.0498, "step": 4122 }, { "epoch": 4.551076753175042, "grad_norm": 0.6250652074813843, "learning_rate": 1.236e-05, "loss": 0.0355, "step": 4123 }, { "epoch": 4.552181115405853, "grad_norm": 1.3260631561279297, "learning_rate": 1.2363000000000002e-05, "loss": 0.0885, "step": 4124 }, { "epoch": 4.553285477636665, "grad_norm": 1.2412415742874146, "learning_rate": 1.2366e-05, "loss": 0.3439, "step": 4125 }, { "epoch": 4.5543898398674765, "grad_norm": 0.9320282936096191, "learning_rate": 1.2369e-05, "loss": 0.2895, "step": 4126 }, { "epoch": 4.555494202098288, "grad_norm": 2.762378692626953, "learning_rate": 1.2372e-05, "loss": 0.2793, "step": 4127 }, { "epoch": 4.5565985643291, "grad_norm": 1.3841798305511475, "learning_rate": 1.2375e-05, "loss": 0.2051, "step": 4128 }, { "epoch": 4.557702926559911, "grad_norm": 1.5040156841278076, "learning_rate": 1.2378e-05, "loss": 0.2493, "step": 4129 }, { "epoch": 4.558807288790724, "grad_norm": 1.092877984046936, "learning_rate": 1.2381e-05, "loss": 0.1899, "step": 4130 }, { "epoch": 4.559911651021535, "grad_norm": 1.0057448148727417, "learning_rate": 1.2384e-05, "loss": 0.1118, "step": 4131 }, { "epoch": 4.561016013252347, "grad_norm": 0.9174748659133911, "learning_rate": 1.2387e-05, "loss": 0.1303, "step": 4132 }, { "epoch": 4.5621203754831585, "grad_norm": 0.748266339302063, "learning_rate": 1.239e-05, "loss": 0.073, "step": 4133 }, { "epoch": 4.56322473771397, "grad_norm": 0.6222684979438782, "learning_rate": 1.2393000000000001e-05, "loss": 0.0659, "step": 4134 }, { "epoch": 4.564329099944782, "grad_norm": 0.5708920955657959, "learning_rate": 1.2396000000000001e-05, "loss": 0.0408, "step": 4135 }, { "epoch": 4.565433462175593, "grad_norm": 0.8044570088386536, "learning_rate": 1.2399000000000001e-05, "loss": 0.0603, "step": 4136 }, { "epoch": 4.566537824406406, "grad_norm": 0.5302620530128479, "learning_rate": 1.2402e-05, "loss": 0.037, "step": 4137 }, { "epoch": 4.567642186637217, "grad_norm": 0.8813542127609253, "learning_rate": 1.2404999999999999e-05, "loss": 0.0503, "step": 4138 }, { "epoch": 4.568746548868028, "grad_norm": 0.5880178809165955, "learning_rate": 1.2408e-05, "loss": 0.0394, "step": 4139 }, { "epoch": 4.569850911098841, "grad_norm": 0.7640584111213684, "learning_rate": 1.2411e-05, "loss": 0.0438, "step": 4140 }, { "epoch": 4.570955273329652, "grad_norm": 0.49444738030433655, "learning_rate": 1.2414e-05, "loss": 0.0336, "step": 4141 }, { "epoch": 4.572059635560464, "grad_norm": 0.5029177069664001, "learning_rate": 1.2417e-05, "loss": 0.0382, "step": 4142 }, { "epoch": 4.5731639977912755, "grad_norm": 0.5695894956588745, "learning_rate": 1.242e-05, "loss": 0.0375, "step": 4143 }, { "epoch": 4.574268360022087, "grad_norm": 0.49669474363327026, "learning_rate": 1.2423000000000001e-05, "loss": 0.0415, "step": 4144 }, { "epoch": 4.575372722252899, "grad_norm": 0.7565129995346069, "learning_rate": 1.2426000000000001e-05, "loss": 0.0607, "step": 4145 }, { "epoch": 4.57647708448371, "grad_norm": 0.5270214080810547, "learning_rate": 1.2429e-05, "loss": 0.0434, "step": 4146 }, { "epoch": 4.577581446714523, "grad_norm": 0.5704599022865295, "learning_rate": 1.2432e-05, "loss": 0.0539, "step": 4147 }, { "epoch": 4.578685808945334, "grad_norm": 0.6572616696357727, "learning_rate": 1.2435e-05, "loss": 0.0447, "step": 4148 }, { "epoch": 4.579790171176146, "grad_norm": 0.5278264880180359, "learning_rate": 1.2438000000000002e-05, "loss": 0.0275, "step": 4149 }, { "epoch": 4.5808945334069575, "grad_norm": 0.7232227325439453, "learning_rate": 1.2441e-05, "loss": 0.0536, "step": 4150 }, { "epoch": 4.581998895637769, "grad_norm": 1.243444561958313, "learning_rate": 1.2444e-05, "loss": 0.0507, "step": 4151 }, { "epoch": 4.583103257868581, "grad_norm": 0.5077757239341736, "learning_rate": 1.2447e-05, "loss": 0.0292, "step": 4152 }, { "epoch": 4.584207620099392, "grad_norm": 0.666634738445282, "learning_rate": 1.245e-05, "loss": 0.0404, "step": 4153 }, { "epoch": 4.585311982330205, "grad_norm": 0.48565080761909485, "learning_rate": 1.2453000000000001e-05, "loss": 0.0377, "step": 4154 }, { "epoch": 4.586416344561016, "grad_norm": 0.5705221891403198, "learning_rate": 1.2456e-05, "loss": 0.0395, "step": 4155 }, { "epoch": 4.587520706791828, "grad_norm": 0.5707361698150635, "learning_rate": 1.2459e-05, "loss": 0.0358, "step": 4156 }, { "epoch": 4.58862506902264, "grad_norm": 0.45676130056381226, "learning_rate": 1.2462e-05, "loss": 0.0261, "step": 4157 }, { "epoch": 4.589729431253451, "grad_norm": 0.5166676640510559, "learning_rate": 1.2465e-05, "loss": 0.0288, "step": 4158 }, { "epoch": 4.590833793484263, "grad_norm": 0.6672812700271606, "learning_rate": 1.2468000000000002e-05, "loss": 0.04, "step": 4159 }, { "epoch": 4.5919381557150745, "grad_norm": 0.6338879466056824, "learning_rate": 1.2471000000000001e-05, "loss": 0.0429, "step": 4160 }, { "epoch": 4.593042517945886, "grad_norm": 0.7353308796882629, "learning_rate": 1.2474000000000001e-05, "loss": 0.0507, "step": 4161 }, { "epoch": 4.594146880176698, "grad_norm": 0.5099920630455017, "learning_rate": 1.2477e-05, "loss": 0.0183, "step": 4162 }, { "epoch": 4.595251242407509, "grad_norm": 0.694939136505127, "learning_rate": 1.2479999999999999e-05, "loss": 0.0314, "step": 4163 }, { "epoch": 4.596355604638322, "grad_norm": 0.716848611831665, "learning_rate": 1.2483e-05, "loss": 0.0304, "step": 4164 }, { "epoch": 4.597459966869133, "grad_norm": 0.8147700428962708, "learning_rate": 1.2486e-05, "loss": 0.0426, "step": 4165 }, { "epoch": 4.598564329099945, "grad_norm": 0.7712846994400024, "learning_rate": 1.2489e-05, "loss": 0.034, "step": 4166 }, { "epoch": 4.5996686913307565, "grad_norm": 0.6027389764785767, "learning_rate": 1.2492e-05, "loss": 0.0416, "step": 4167 }, { "epoch": 4.600773053561568, "grad_norm": 1.1561881303787231, "learning_rate": 1.2495e-05, "loss": 0.053, "step": 4168 }, { "epoch": 4.60187741579238, "grad_norm": 0.9150592684745789, "learning_rate": 1.2498000000000001e-05, "loss": 0.0278, "step": 4169 }, { "epoch": 4.602981778023191, "grad_norm": 0.776819109916687, "learning_rate": 1.2501000000000001e-05, "loss": 0.0541, "step": 4170 }, { "epoch": 4.604086140254004, "grad_norm": 1.068248987197876, "learning_rate": 1.2504000000000001e-05, "loss": 0.0345, "step": 4171 }, { "epoch": 4.605190502484815, "grad_norm": 0.9502889513969421, "learning_rate": 1.2507e-05, "loss": 0.0367, "step": 4172 }, { "epoch": 4.606294864715627, "grad_norm": 0.6917609572410583, "learning_rate": 1.251e-05, "loss": 0.0464, "step": 4173 }, { "epoch": 4.6073992269464386, "grad_norm": 0.822118878364563, "learning_rate": 1.2513e-05, "loss": 0.0469, "step": 4174 }, { "epoch": 4.60850358917725, "grad_norm": 1.2089364528656006, "learning_rate": 1.2516e-05, "loss": 0.3721, "step": 4175 }, { "epoch": 4.609607951408062, "grad_norm": 0.985589325428009, "learning_rate": 1.2519e-05, "loss": 0.3501, "step": 4176 }, { "epoch": 4.6107123136388735, "grad_norm": 1.1549655199050903, "learning_rate": 1.2522e-05, "loss": 0.2497, "step": 4177 }, { "epoch": 4.611816675869685, "grad_norm": 0.6976132988929749, "learning_rate": 1.2525e-05, "loss": 0.2074, "step": 4178 }, { "epoch": 4.612921038100497, "grad_norm": 1.0413329601287842, "learning_rate": 1.2528000000000001e-05, "loss": 0.222, "step": 4179 }, { "epoch": 4.614025400331308, "grad_norm": 0.8705588579177856, "learning_rate": 1.2531e-05, "loss": 0.1673, "step": 4180 }, { "epoch": 4.615129762562121, "grad_norm": 0.6935985684394836, "learning_rate": 1.2534e-05, "loss": 0.1399, "step": 4181 }, { "epoch": 4.616234124792932, "grad_norm": 1.3439462184906006, "learning_rate": 1.2537e-05, "loss": 0.1667, "step": 4182 }, { "epoch": 4.617338487023744, "grad_norm": 0.771436870098114, "learning_rate": 1.254e-05, "loss": 0.0791, "step": 4183 }, { "epoch": 4.6184428492545555, "grad_norm": 0.8807432651519775, "learning_rate": 1.2543000000000002e-05, "loss": 0.0828, "step": 4184 }, { "epoch": 4.619547211485367, "grad_norm": 0.4463104009628296, "learning_rate": 1.2546000000000002e-05, "loss": 0.0394, "step": 4185 }, { "epoch": 4.620651573716179, "grad_norm": 0.5678229928016663, "learning_rate": 1.2549000000000001e-05, "loss": 0.0695, "step": 4186 }, { "epoch": 4.62175593594699, "grad_norm": 0.5706868767738342, "learning_rate": 1.2552e-05, "loss": 0.0334, "step": 4187 }, { "epoch": 4.622860298177803, "grad_norm": 0.4699414074420929, "learning_rate": 1.2555e-05, "loss": 0.0461, "step": 4188 }, { "epoch": 4.623964660408614, "grad_norm": 0.7284677624702454, "learning_rate": 1.2558e-05, "loss": 0.0403, "step": 4189 }, { "epoch": 4.625069022639426, "grad_norm": 0.5710897445678711, "learning_rate": 1.2561e-05, "loss": 0.0362, "step": 4190 }, { "epoch": 4.6261733848702375, "grad_norm": 0.679502546787262, "learning_rate": 1.2564e-05, "loss": 0.03, "step": 4191 }, { "epoch": 4.627277747101049, "grad_norm": 0.3686022460460663, "learning_rate": 1.2567e-05, "loss": 0.0291, "step": 4192 }, { "epoch": 4.628382109331861, "grad_norm": 0.6922078132629395, "learning_rate": 1.257e-05, "loss": 0.045, "step": 4193 }, { "epoch": 4.6294864715626725, "grad_norm": 0.469092458486557, "learning_rate": 1.2573e-05, "loss": 0.0317, "step": 4194 }, { "epoch": 4.630590833793484, "grad_norm": 0.6699690222740173, "learning_rate": 1.2576000000000001e-05, "loss": 0.0471, "step": 4195 }, { "epoch": 4.631695196024296, "grad_norm": 1.0550503730773926, "learning_rate": 1.2579000000000001e-05, "loss": 0.0364, "step": 4196 }, { "epoch": 4.632799558255107, "grad_norm": 0.5810962319374084, "learning_rate": 1.2582e-05, "loss": 0.0363, "step": 4197 }, { "epoch": 4.63390392048592, "grad_norm": 0.5061582326889038, "learning_rate": 1.2585e-05, "loss": 0.0317, "step": 4198 }, { "epoch": 4.635008282716731, "grad_norm": 0.34337499737739563, "learning_rate": 1.2587999999999999e-05, "loss": 0.019, "step": 4199 }, { "epoch": 4.636112644947543, "grad_norm": 0.6694722771644592, "learning_rate": 1.2591e-05, "loss": 0.0396, "step": 4200 }, { "epoch": 4.6372170071783545, "grad_norm": 1.3780525922775269, "learning_rate": 1.2594e-05, "loss": 0.0316, "step": 4201 }, { "epoch": 4.638321369409166, "grad_norm": 0.6411008238792419, "learning_rate": 1.2597e-05, "loss": 0.0355, "step": 4202 }, { "epoch": 4.639425731639978, "grad_norm": 0.6143317222595215, "learning_rate": 1.26e-05, "loss": 0.0414, "step": 4203 }, { "epoch": 4.640530093870789, "grad_norm": 0.7013039588928223, "learning_rate": 1.2603e-05, "loss": 0.0483, "step": 4204 }, { "epoch": 4.641634456101602, "grad_norm": 0.9577318429946899, "learning_rate": 1.2606000000000001e-05, "loss": 0.0471, "step": 4205 }, { "epoch": 4.642738818332413, "grad_norm": 1.0873335599899292, "learning_rate": 1.2609e-05, "loss": 0.0494, "step": 4206 }, { "epoch": 4.643843180563225, "grad_norm": 0.828682541847229, "learning_rate": 1.2612e-05, "loss": 0.0417, "step": 4207 }, { "epoch": 4.6449475427940365, "grad_norm": 0.6366488337516785, "learning_rate": 1.2615e-05, "loss": 0.0338, "step": 4208 }, { "epoch": 4.646051905024848, "grad_norm": 0.6037416458129883, "learning_rate": 1.2618e-05, "loss": 0.0418, "step": 4209 }, { "epoch": 4.64715626725566, "grad_norm": 0.6232718825340271, "learning_rate": 1.2621000000000002e-05, "loss": 0.0344, "step": 4210 }, { "epoch": 4.6482606294864715, "grad_norm": 0.844114363193512, "learning_rate": 1.2624e-05, "loss": 0.0434, "step": 4211 }, { "epoch": 4.649364991717283, "grad_norm": 0.8463589549064636, "learning_rate": 1.2627e-05, "loss": 0.0465, "step": 4212 }, { "epoch": 4.650469353948095, "grad_norm": 0.7041437029838562, "learning_rate": 1.263e-05, "loss": 0.0462, "step": 4213 }, { "epoch": 4.651573716178906, "grad_norm": 0.6481946706771851, "learning_rate": 1.2633e-05, "loss": 0.0434, "step": 4214 }, { "epoch": 4.652678078409719, "grad_norm": 0.7251396179199219, "learning_rate": 1.2636e-05, "loss": 0.0275, "step": 4215 }, { "epoch": 4.65378244064053, "grad_norm": 0.5544635653495789, "learning_rate": 1.2639e-05, "loss": 0.0418, "step": 4216 }, { "epoch": 4.654886802871342, "grad_norm": 0.581046998500824, "learning_rate": 1.2642e-05, "loss": 0.0378, "step": 4217 }, { "epoch": 4.6559911651021535, "grad_norm": 0.740582287311554, "learning_rate": 1.2645e-05, "loss": 0.0497, "step": 4218 }, { "epoch": 4.657095527332965, "grad_norm": 1.5901551246643066, "learning_rate": 1.2648e-05, "loss": 0.0835, "step": 4219 }, { "epoch": 4.658199889563777, "grad_norm": 0.6001721620559692, "learning_rate": 1.2651000000000001e-05, "loss": 0.0387, "step": 4220 }, { "epoch": 4.659304251794588, "grad_norm": 1.4718475341796875, "learning_rate": 1.2654000000000001e-05, "loss": 0.0566, "step": 4221 }, { "epoch": 4.660408614025401, "grad_norm": 0.7076300382614136, "learning_rate": 1.2657000000000001e-05, "loss": 0.0409, "step": 4222 }, { "epoch": 4.661512976256212, "grad_norm": 0.8673247694969177, "learning_rate": 1.2659999999999999e-05, "loss": 0.0602, "step": 4223 }, { "epoch": 4.662617338487024, "grad_norm": 0.6398583054542542, "learning_rate": 1.2662999999999999e-05, "loss": 0.04, "step": 4224 }, { "epoch": 4.6637217007178355, "grad_norm": 2.0800678730010986, "learning_rate": 1.2666e-05, "loss": 0.3137, "step": 4225 }, { "epoch": 4.664826062948647, "grad_norm": 1.0283368825912476, "learning_rate": 1.2669e-05, "loss": 0.2649, "step": 4226 }, { "epoch": 4.665930425179459, "grad_norm": 1.0056884288787842, "learning_rate": 1.2672e-05, "loss": 0.2295, "step": 4227 }, { "epoch": 4.6670347874102704, "grad_norm": 0.9890457987785339, "learning_rate": 1.2675e-05, "loss": 0.2447, "step": 4228 }, { "epoch": 4.668139149641082, "grad_norm": 1.6235157251358032, "learning_rate": 1.2678e-05, "loss": 0.2751, "step": 4229 }, { "epoch": 4.669243511871894, "grad_norm": 0.9959637522697449, "learning_rate": 1.2681000000000001e-05, "loss": 0.1998, "step": 4230 }, { "epoch": 4.670347874102705, "grad_norm": 1.1689358949661255, "learning_rate": 1.2684000000000001e-05, "loss": 0.2219, "step": 4231 }, { "epoch": 4.671452236333518, "grad_norm": 0.9175697565078735, "learning_rate": 1.2687e-05, "loss": 0.1014, "step": 4232 }, { "epoch": 4.672556598564329, "grad_norm": 0.6011679768562317, "learning_rate": 1.269e-05, "loss": 0.09, "step": 4233 }, { "epoch": 4.673660960795141, "grad_norm": 0.5342956781387329, "learning_rate": 1.2693e-05, "loss": 0.0732, "step": 4234 }, { "epoch": 4.6747653230259525, "grad_norm": 0.5552782416343689, "learning_rate": 1.2696000000000002e-05, "loss": 0.0518, "step": 4235 }, { "epoch": 4.675869685256764, "grad_norm": 0.6582059264183044, "learning_rate": 1.2699e-05, "loss": 0.0828, "step": 4236 }, { "epoch": 4.676974047487576, "grad_norm": 0.4544388949871063, "learning_rate": 1.2702e-05, "loss": 0.0443, "step": 4237 }, { "epoch": 4.678078409718387, "grad_norm": 0.3713201880455017, "learning_rate": 1.2705e-05, "loss": 0.0261, "step": 4238 }, { "epoch": 4.6791827719492, "grad_norm": 1.6593660116195679, "learning_rate": 1.2708e-05, "loss": 0.1726, "step": 4239 }, { "epoch": 4.680287134180011, "grad_norm": 0.4431353807449341, "learning_rate": 1.2711e-05, "loss": 0.0341, "step": 4240 }, { "epoch": 4.681391496410823, "grad_norm": 0.4498712420463562, "learning_rate": 1.2714e-05, "loss": 0.0417, "step": 4241 }, { "epoch": 4.6824958586416345, "grad_norm": 0.7186427116394043, "learning_rate": 1.2717e-05, "loss": 0.0281, "step": 4242 }, { "epoch": 4.683600220872446, "grad_norm": 0.4403086006641388, "learning_rate": 1.272e-05, "loss": 0.0343, "step": 4243 }, { "epoch": 4.684704583103258, "grad_norm": 0.4558698534965515, "learning_rate": 1.2723e-05, "loss": 0.0287, "step": 4244 }, { "epoch": 4.685808945334069, "grad_norm": 0.7966709136962891, "learning_rate": 1.2726000000000001e-05, "loss": 0.0418, "step": 4245 }, { "epoch": 4.686913307564881, "grad_norm": 0.6055822968482971, "learning_rate": 1.2729000000000001e-05, "loss": 0.0378, "step": 4246 }, { "epoch": 4.688017669795693, "grad_norm": 1.3988173007965088, "learning_rate": 1.2732000000000001e-05, "loss": 0.0697, "step": 4247 }, { "epoch": 4.689122032026504, "grad_norm": 0.6705009937286377, "learning_rate": 1.2735e-05, "loss": 0.0388, "step": 4248 }, { "epoch": 4.690226394257317, "grad_norm": 0.7996907830238342, "learning_rate": 1.2737999999999999e-05, "loss": 0.0574, "step": 4249 }, { "epoch": 4.691330756488128, "grad_norm": 0.7531254887580872, "learning_rate": 1.2741e-05, "loss": 0.0325, "step": 4250 }, { "epoch": 4.69243511871894, "grad_norm": 0.40629372000694275, "learning_rate": 1.2744e-05, "loss": 0.0264, "step": 4251 }, { "epoch": 4.6935394809497515, "grad_norm": 0.6581127047538757, "learning_rate": 1.2747e-05, "loss": 0.0335, "step": 4252 }, { "epoch": 4.694643843180563, "grad_norm": 0.5982281565666199, "learning_rate": 1.275e-05, "loss": 0.0295, "step": 4253 }, { "epoch": 4.695748205411375, "grad_norm": 0.6517385244369507, "learning_rate": 1.2753e-05, "loss": 0.0452, "step": 4254 }, { "epoch": 4.696852567642186, "grad_norm": 0.7266591191291809, "learning_rate": 1.2756000000000001e-05, "loss": 0.0586, "step": 4255 }, { "epoch": 4.697956929872999, "grad_norm": 1.5014984607696533, "learning_rate": 1.2759000000000001e-05, "loss": 0.0446, "step": 4256 }, { "epoch": 4.69906129210381, "grad_norm": 0.5113356113433838, "learning_rate": 1.2762e-05, "loss": 0.0191, "step": 4257 }, { "epoch": 4.700165654334622, "grad_norm": 0.5006110668182373, "learning_rate": 1.2765e-05, "loss": 0.0393, "step": 4258 }, { "epoch": 4.7012700165654335, "grad_norm": 0.7907095551490784, "learning_rate": 1.2768e-05, "loss": 0.0377, "step": 4259 }, { "epoch": 4.702374378796245, "grad_norm": 0.4523256719112396, "learning_rate": 1.2771e-05, "loss": 0.0383, "step": 4260 }, { "epoch": 4.703478741027057, "grad_norm": 0.7377075552940369, "learning_rate": 1.2774e-05, "loss": 0.0376, "step": 4261 }, { "epoch": 4.704583103257868, "grad_norm": 0.5110024809837341, "learning_rate": 1.2777e-05, "loss": 0.0407, "step": 4262 }, { "epoch": 4.705687465488681, "grad_norm": 0.6254483461380005, "learning_rate": 1.278e-05, "loss": 0.0336, "step": 4263 }, { "epoch": 4.706791827719492, "grad_norm": 0.9453412890434265, "learning_rate": 1.2783e-05, "loss": 0.0343, "step": 4264 }, { "epoch": 4.707896189950303, "grad_norm": 0.669739842414856, "learning_rate": 1.2786000000000001e-05, "loss": 0.0365, "step": 4265 }, { "epoch": 4.709000552181116, "grad_norm": 0.694541335105896, "learning_rate": 1.2789e-05, "loss": 0.0505, "step": 4266 }, { "epoch": 4.710104914411927, "grad_norm": 0.8324567675590515, "learning_rate": 1.2792e-05, "loss": 0.0473, "step": 4267 }, { "epoch": 4.711209276642739, "grad_norm": 0.8051469922065735, "learning_rate": 1.2795e-05, "loss": 0.0406, "step": 4268 }, { "epoch": 4.7123136388735505, "grad_norm": 0.7249347567558289, "learning_rate": 1.2798e-05, "loss": 0.048, "step": 4269 }, { "epoch": 4.713418001104362, "grad_norm": 0.8731049299240112, "learning_rate": 1.2801000000000002e-05, "loss": 0.0637, "step": 4270 }, { "epoch": 4.714522363335174, "grad_norm": 1.4447880983352661, "learning_rate": 1.2804000000000001e-05, "loss": 0.0417, "step": 4271 }, { "epoch": 4.715626725565985, "grad_norm": 0.635759711265564, "learning_rate": 1.2807000000000001e-05, "loss": 0.0383, "step": 4272 }, { "epoch": 4.716731087796798, "grad_norm": 0.4599716067314148, "learning_rate": 1.281e-05, "loss": 0.044, "step": 4273 }, { "epoch": 4.717835450027609, "grad_norm": 1.0478595495224, "learning_rate": 1.2812999999999999e-05, "loss": 0.0611, "step": 4274 }, { "epoch": 4.718939812258421, "grad_norm": 1.213186264038086, "learning_rate": 1.2816e-05, "loss": 0.4229, "step": 4275 }, { "epoch": 4.7200441744892325, "grad_norm": 1.18071448802948, "learning_rate": 1.2819e-05, "loss": 0.32, "step": 4276 }, { "epoch": 4.721148536720044, "grad_norm": 0.8695083260536194, "learning_rate": 1.2822e-05, "loss": 0.2748, "step": 4277 }, { "epoch": 4.722252898950856, "grad_norm": 1.0074717998504639, "learning_rate": 1.2825e-05, "loss": 0.2451, "step": 4278 }, { "epoch": 4.723357261181667, "grad_norm": 0.901001513004303, "learning_rate": 1.2828e-05, "loss": 0.1943, "step": 4279 }, { "epoch": 4.72446162341248, "grad_norm": 0.8213049173355103, "learning_rate": 1.2831000000000001e-05, "loss": 0.1586, "step": 4280 }, { "epoch": 4.725565985643291, "grad_norm": 1.9090369939804077, "learning_rate": 1.2834000000000001e-05, "loss": 0.1763, "step": 4281 }, { "epoch": 4.726670347874102, "grad_norm": 0.9329023957252502, "learning_rate": 1.2837000000000001e-05, "loss": 0.13, "step": 4282 }, { "epoch": 4.727774710104915, "grad_norm": 0.681583046913147, "learning_rate": 1.284e-05, "loss": 0.0598, "step": 4283 }, { "epoch": 4.728879072335726, "grad_norm": 0.7804955244064331, "learning_rate": 1.2843e-05, "loss": 0.1059, "step": 4284 }, { "epoch": 4.729983434566538, "grad_norm": 0.4578865170478821, "learning_rate": 1.2846e-05, "loss": 0.0508, "step": 4285 }, { "epoch": 4.7310877967973495, "grad_norm": 0.6385452151298523, "learning_rate": 1.2849e-05, "loss": 0.0507, "step": 4286 }, { "epoch": 4.732192159028161, "grad_norm": 0.7201411724090576, "learning_rate": 1.2852e-05, "loss": 0.0482, "step": 4287 }, { "epoch": 4.733296521258973, "grad_norm": 0.6535779237747192, "learning_rate": 1.2855e-05, "loss": 0.039, "step": 4288 }, { "epoch": 4.734400883489784, "grad_norm": 0.37023600935935974, "learning_rate": 1.2858e-05, "loss": 0.0299, "step": 4289 }, { "epoch": 4.735505245720597, "grad_norm": 0.4909016191959381, "learning_rate": 1.2861000000000001e-05, "loss": 0.0312, "step": 4290 }, { "epoch": 4.736609607951408, "grad_norm": 0.5866609215736389, "learning_rate": 1.2864e-05, "loss": 0.0335, "step": 4291 }, { "epoch": 4.73771397018222, "grad_norm": 0.5611176490783691, "learning_rate": 1.2867e-05, "loss": 0.0433, "step": 4292 }, { "epoch": 4.7388183324130315, "grad_norm": 0.8566970825195312, "learning_rate": 1.287e-05, "loss": 0.0489, "step": 4293 }, { "epoch": 4.739922694643843, "grad_norm": 0.5992603302001953, "learning_rate": 1.2873e-05, "loss": 0.0444, "step": 4294 }, { "epoch": 4.741027056874655, "grad_norm": 1.0641086101531982, "learning_rate": 1.2876000000000002e-05, "loss": 0.0374, "step": 4295 }, { "epoch": 4.742131419105466, "grad_norm": 0.5279338955879211, "learning_rate": 1.2879000000000002e-05, "loss": 0.0355, "step": 4296 }, { "epoch": 4.743235781336279, "grad_norm": 0.6156669855117798, "learning_rate": 1.2882e-05, "loss": 0.0394, "step": 4297 }, { "epoch": 4.74434014356709, "grad_norm": 0.6078835129737854, "learning_rate": 1.2885e-05, "loss": 0.0417, "step": 4298 }, { "epoch": 4.745444505797901, "grad_norm": 0.5039106011390686, "learning_rate": 1.2888e-05, "loss": 0.0322, "step": 4299 }, { "epoch": 4.746548868028714, "grad_norm": 0.5356905460357666, "learning_rate": 1.2891e-05, "loss": 0.0303, "step": 4300 }, { "epoch": 4.747653230259525, "grad_norm": 0.6196489930152893, "learning_rate": 1.2894e-05, "loss": 0.0533, "step": 4301 }, { "epoch": 4.748757592490337, "grad_norm": 0.837660014629364, "learning_rate": 1.2897e-05, "loss": 0.0537, "step": 4302 }, { "epoch": 4.7498619547211485, "grad_norm": 0.6738686561584473, "learning_rate": 1.29e-05, "loss": 0.036, "step": 4303 }, { "epoch": 4.75096631695196, "grad_norm": 0.5247911214828491, "learning_rate": 1.2903e-05, "loss": 0.0446, "step": 4304 }, { "epoch": 4.752070679182772, "grad_norm": 0.6516720652580261, "learning_rate": 1.2906000000000001e-05, "loss": 0.0439, "step": 4305 }, { "epoch": 4.753175041413583, "grad_norm": 0.9899722933769226, "learning_rate": 1.2909000000000001e-05, "loss": 0.0496, "step": 4306 }, { "epoch": 4.754279403644396, "grad_norm": 0.6981588006019592, "learning_rate": 1.2912000000000001e-05, "loss": 0.0583, "step": 4307 }, { "epoch": 4.755383765875207, "grad_norm": 0.5301433801651001, "learning_rate": 1.2915000000000001e-05, "loss": 0.041, "step": 4308 }, { "epoch": 4.756488128106019, "grad_norm": 0.5600716471672058, "learning_rate": 1.2917999999999999e-05, "loss": 0.0339, "step": 4309 }, { "epoch": 4.7575924903368305, "grad_norm": 1.1598750352859497, "learning_rate": 1.2921e-05, "loss": 0.0522, "step": 4310 }, { "epoch": 4.758696852567642, "grad_norm": 0.6332345008850098, "learning_rate": 1.2924e-05, "loss": 0.0327, "step": 4311 }, { "epoch": 4.759801214798454, "grad_norm": 0.9845707416534424, "learning_rate": 1.2927e-05, "loss": 0.0395, "step": 4312 }, { "epoch": 4.760905577029265, "grad_norm": 0.6494612097740173, "learning_rate": 1.293e-05, "loss": 0.0417, "step": 4313 }, { "epoch": 4.762009939260078, "grad_norm": 0.627305269241333, "learning_rate": 1.2933e-05, "loss": 0.0387, "step": 4314 }, { "epoch": 4.763114301490889, "grad_norm": 1.741435170173645, "learning_rate": 1.2936000000000001e-05, "loss": 0.06, "step": 4315 }, { "epoch": 4.7642186637217, "grad_norm": 0.6070664525032043, "learning_rate": 1.2939000000000001e-05, "loss": 0.0499, "step": 4316 }, { "epoch": 4.765323025952513, "grad_norm": 0.6851527690887451, "learning_rate": 1.2942e-05, "loss": 0.0333, "step": 4317 }, { "epoch": 4.766427388183324, "grad_norm": 0.9101213216781616, "learning_rate": 1.2945e-05, "loss": 0.0554, "step": 4318 }, { "epoch": 4.767531750414136, "grad_norm": 0.8540078401565552, "learning_rate": 1.2948e-05, "loss": 0.0463, "step": 4319 }, { "epoch": 4.7686361126449475, "grad_norm": 0.6184878349304199, "learning_rate": 1.2951e-05, "loss": 0.0314, "step": 4320 }, { "epoch": 4.769740474875759, "grad_norm": 0.8011671900749207, "learning_rate": 1.2954000000000002e-05, "loss": 0.072, "step": 4321 }, { "epoch": 4.770844837106571, "grad_norm": 0.9296676516532898, "learning_rate": 1.2957e-05, "loss": 0.0776, "step": 4322 }, { "epoch": 4.771949199337382, "grad_norm": 0.8817923665046692, "learning_rate": 1.296e-05, "loss": 0.0374, "step": 4323 }, { "epoch": 4.773053561568195, "grad_norm": 0.6443279981613159, "learning_rate": 1.2963e-05, "loss": 0.0448, "step": 4324 }, { "epoch": 4.774157923799006, "grad_norm": 1.4098148345947266, "learning_rate": 1.2966e-05, "loss": 0.366, "step": 4325 }, { "epoch": 4.775262286029818, "grad_norm": 1.0759055614471436, "learning_rate": 1.2969e-05, "loss": 0.2837, "step": 4326 }, { "epoch": 4.7763666482606295, "grad_norm": 1.3856664896011353, "learning_rate": 1.2972e-05, "loss": 0.3439, "step": 4327 }, { "epoch": 4.777471010491441, "grad_norm": 1.1768488883972168, "learning_rate": 1.2975e-05, "loss": 0.2609, "step": 4328 }, { "epoch": 4.778575372722253, "grad_norm": 0.8879263401031494, "learning_rate": 1.2978e-05, "loss": 0.2401, "step": 4329 }, { "epoch": 4.779679734953064, "grad_norm": 0.8193822503089905, "learning_rate": 1.2981e-05, "loss": 0.1231, "step": 4330 }, { "epoch": 4.780784097183877, "grad_norm": 0.877549946308136, "learning_rate": 1.2984000000000001e-05, "loss": 0.2055, "step": 4331 }, { "epoch": 4.781888459414688, "grad_norm": 1.0172522068023682, "learning_rate": 1.2987000000000001e-05, "loss": 0.1422, "step": 4332 }, { "epoch": 4.7829928216455, "grad_norm": 0.6420350074768066, "learning_rate": 1.2990000000000001e-05, "loss": 0.0836, "step": 4333 }, { "epoch": 4.7840971838763116, "grad_norm": 0.497551828622818, "learning_rate": 1.2992999999999999e-05, "loss": 0.042, "step": 4334 }, { "epoch": 4.785201546107123, "grad_norm": 0.4264639914035797, "learning_rate": 1.2995999999999999e-05, "loss": 0.0387, "step": 4335 }, { "epoch": 4.786305908337935, "grad_norm": 0.3588840067386627, "learning_rate": 1.2999e-05, "loss": 0.0268, "step": 4336 }, { "epoch": 4.7874102705687465, "grad_norm": 0.6043224930763245, "learning_rate": 1.3002e-05, "loss": 0.0369, "step": 4337 }, { "epoch": 4.788514632799558, "grad_norm": 0.5272365212440491, "learning_rate": 1.3005e-05, "loss": 0.0312, "step": 4338 }, { "epoch": 4.78961899503037, "grad_norm": 0.560787558555603, "learning_rate": 1.3008e-05, "loss": 0.0525, "step": 4339 }, { "epoch": 4.790723357261181, "grad_norm": 0.6935253143310547, "learning_rate": 1.3011e-05, "loss": 0.0701, "step": 4340 }, { "epoch": 4.791827719491994, "grad_norm": 0.7133506536483765, "learning_rate": 1.3014000000000001e-05, "loss": 0.0439, "step": 4341 }, { "epoch": 4.792932081722805, "grad_norm": 0.5640441179275513, "learning_rate": 1.3017000000000001e-05, "loss": 0.0331, "step": 4342 }, { "epoch": 4.794036443953617, "grad_norm": 0.6858233213424683, "learning_rate": 1.302e-05, "loss": 0.0371, "step": 4343 }, { "epoch": 4.7951408061844285, "grad_norm": 0.4627273380756378, "learning_rate": 1.3023e-05, "loss": 0.0267, "step": 4344 }, { "epoch": 4.79624516841524, "grad_norm": 0.6081977486610413, "learning_rate": 1.3026e-05, "loss": 0.0298, "step": 4345 }, { "epoch": 4.797349530646052, "grad_norm": 0.5301994681358337, "learning_rate": 1.3029e-05, "loss": 0.0224, "step": 4346 }, { "epoch": 4.798453892876863, "grad_norm": 0.4401805102825165, "learning_rate": 1.3032e-05, "loss": 0.0237, "step": 4347 }, { "epoch": 4.799558255107676, "grad_norm": 0.7353500127792358, "learning_rate": 1.3035e-05, "loss": 0.0525, "step": 4348 }, { "epoch": 4.800662617338487, "grad_norm": 0.6700345873832703, "learning_rate": 1.3038e-05, "loss": 0.0371, "step": 4349 }, { "epoch": 4.801766979569299, "grad_norm": 0.710516095161438, "learning_rate": 1.3041e-05, "loss": 0.0281, "step": 4350 }, { "epoch": 4.8028713418001105, "grad_norm": 0.5797708034515381, "learning_rate": 1.3044e-05, "loss": 0.0339, "step": 4351 }, { "epoch": 4.803975704030922, "grad_norm": 0.6438461542129517, "learning_rate": 1.3047e-05, "loss": 0.032, "step": 4352 }, { "epoch": 4.805080066261734, "grad_norm": 1.0015125274658203, "learning_rate": 1.305e-05, "loss": 0.0352, "step": 4353 }, { "epoch": 4.8061844284925455, "grad_norm": 0.9032297730445862, "learning_rate": 1.3053e-05, "loss": 0.0498, "step": 4354 }, { "epoch": 4.807288790723357, "grad_norm": 0.6785793900489807, "learning_rate": 1.3056e-05, "loss": 0.0482, "step": 4355 }, { "epoch": 4.808393152954169, "grad_norm": 0.6000563502311707, "learning_rate": 1.3059000000000002e-05, "loss": 0.0274, "step": 4356 }, { "epoch": 4.80949751518498, "grad_norm": 0.4540937542915344, "learning_rate": 1.3062000000000001e-05, "loss": 0.0335, "step": 4357 }, { "epoch": 4.810601877415793, "grad_norm": 0.48386308550834656, "learning_rate": 1.3065000000000001e-05, "loss": 0.0332, "step": 4358 }, { "epoch": 4.811706239646604, "grad_norm": 0.5947467684745789, "learning_rate": 1.3068e-05, "loss": 0.0389, "step": 4359 }, { "epoch": 4.812810601877416, "grad_norm": 0.5843924880027771, "learning_rate": 1.3070999999999999e-05, "loss": 0.0402, "step": 4360 }, { "epoch": 4.8139149641082275, "grad_norm": 0.7906097173690796, "learning_rate": 1.3074e-05, "loss": 0.0683, "step": 4361 }, { "epoch": 4.815019326339039, "grad_norm": 0.593004047870636, "learning_rate": 1.3077e-05, "loss": 0.0377, "step": 4362 }, { "epoch": 4.816123688569851, "grad_norm": 0.6340298056602478, "learning_rate": 1.308e-05, "loss": 0.0366, "step": 4363 }, { "epoch": 4.817228050800662, "grad_norm": 0.6971035003662109, "learning_rate": 1.3083e-05, "loss": 0.0431, "step": 4364 }, { "epoch": 4.818332413031475, "grad_norm": 0.5880928039550781, "learning_rate": 1.3086e-05, "loss": 0.0486, "step": 4365 }, { "epoch": 4.819436775262286, "grad_norm": 0.5743786096572876, "learning_rate": 1.3089000000000001e-05, "loss": 0.0494, "step": 4366 }, { "epoch": 4.820541137493098, "grad_norm": 0.6675478219985962, "learning_rate": 1.3092000000000001e-05, "loss": 0.0389, "step": 4367 }, { "epoch": 4.8216454997239095, "grad_norm": 0.7665414810180664, "learning_rate": 1.3095e-05, "loss": 0.0578, "step": 4368 }, { "epoch": 4.822749861954721, "grad_norm": 0.6427032351493835, "learning_rate": 1.3098e-05, "loss": 0.0353, "step": 4369 }, { "epoch": 4.823854224185533, "grad_norm": 0.9161781072616577, "learning_rate": 1.3101e-05, "loss": 0.0635, "step": 4370 }, { "epoch": 4.8249585864163445, "grad_norm": 1.0144315958023071, "learning_rate": 1.3104e-05, "loss": 0.0416, "step": 4371 }, { "epoch": 4.826062948647156, "grad_norm": 0.878693699836731, "learning_rate": 1.3107e-05, "loss": 0.0315, "step": 4372 }, { "epoch": 4.827167310877968, "grad_norm": 0.8026139140129089, "learning_rate": 1.311e-05, "loss": 0.0663, "step": 4373 }, { "epoch": 4.828271673108779, "grad_norm": 0.7550862431526184, "learning_rate": 1.3113e-05, "loss": 0.0753, "step": 4374 }, { "epoch": 4.829376035339592, "grad_norm": 1.2975486516952515, "learning_rate": 1.3116e-05, "loss": 0.5341, "step": 4375 }, { "epoch": 4.830480397570403, "grad_norm": 1.0412862300872803, "learning_rate": 1.3119000000000001e-05, "loss": 0.3633, "step": 4376 }, { "epoch": 4.831584759801215, "grad_norm": 1.3659968376159668, "learning_rate": 1.3122e-05, "loss": 0.2464, "step": 4377 }, { "epoch": 4.8326891220320265, "grad_norm": 0.8074595928192139, "learning_rate": 1.3125e-05, "loss": 0.2548, "step": 4378 }, { "epoch": 4.833793484262838, "grad_norm": 1.0323847532272339, "learning_rate": 1.3128e-05, "loss": 0.211, "step": 4379 }, { "epoch": 4.83489784649365, "grad_norm": 1.3047171831130981, "learning_rate": 1.3131e-05, "loss": 0.2861, "step": 4380 }, { "epoch": 4.836002208724461, "grad_norm": 0.7998205423355103, "learning_rate": 1.3134000000000002e-05, "loss": 0.1377, "step": 4381 }, { "epoch": 4.837106570955274, "grad_norm": 0.7755545973777771, "learning_rate": 1.3137000000000001e-05, "loss": 0.0826, "step": 4382 }, { "epoch": 4.838210933186085, "grad_norm": 0.7085301876068115, "learning_rate": 1.314e-05, "loss": 0.067, "step": 4383 }, { "epoch": 4.839315295416897, "grad_norm": 0.5957307815551758, "learning_rate": 1.3143e-05, "loss": 0.0706, "step": 4384 }, { "epoch": 4.8404196576477085, "grad_norm": 0.6857950091362, "learning_rate": 1.3146e-05, "loss": 0.1014, "step": 4385 }, { "epoch": 4.84152401987852, "grad_norm": 0.5873559713363647, "learning_rate": 1.3149e-05, "loss": 0.0618, "step": 4386 }, { "epoch": 4.842628382109332, "grad_norm": 0.5430670976638794, "learning_rate": 1.3152e-05, "loss": 0.0468, "step": 4387 }, { "epoch": 4.8437327443401434, "grad_norm": 0.5176882147789001, "learning_rate": 1.3155e-05, "loss": 0.0421, "step": 4388 }, { "epoch": 4.844837106570955, "grad_norm": 0.414989709854126, "learning_rate": 1.3158e-05, "loss": 0.0393, "step": 4389 }, { "epoch": 4.845941468801767, "grad_norm": 0.5742974281311035, "learning_rate": 1.3161e-05, "loss": 0.0401, "step": 4390 }, { "epoch": 4.847045831032578, "grad_norm": 0.5658702254295349, "learning_rate": 1.3164000000000001e-05, "loss": 0.0369, "step": 4391 }, { "epoch": 4.848150193263391, "grad_norm": 0.41624611616134644, "learning_rate": 1.3167000000000001e-05, "loss": 0.0275, "step": 4392 }, { "epoch": 4.849254555494202, "grad_norm": 0.5271943807601929, "learning_rate": 1.3170000000000001e-05, "loss": 0.0295, "step": 4393 }, { "epoch": 4.850358917725014, "grad_norm": 0.5146329402923584, "learning_rate": 1.3173e-05, "loss": 0.037, "step": 4394 }, { "epoch": 4.8514632799558255, "grad_norm": 0.860402524471283, "learning_rate": 1.3175999999999999e-05, "loss": 0.0626, "step": 4395 }, { "epoch": 4.852567642186637, "grad_norm": 0.4473738968372345, "learning_rate": 1.3179e-05, "loss": 0.0207, "step": 4396 }, { "epoch": 4.853672004417449, "grad_norm": 0.6407274007797241, "learning_rate": 1.3182e-05, "loss": 0.0581, "step": 4397 }, { "epoch": 4.85477636664826, "grad_norm": 0.5375542044639587, "learning_rate": 1.3185e-05, "loss": 0.033, "step": 4398 }, { "epoch": 4.855880728879073, "grad_norm": 0.5061052441596985, "learning_rate": 1.3188e-05, "loss": 0.0357, "step": 4399 }, { "epoch": 4.856985091109884, "grad_norm": 0.6074802875518799, "learning_rate": 1.3191e-05, "loss": 0.0425, "step": 4400 }, { "epoch": 4.858089453340696, "grad_norm": 0.5799782872200012, "learning_rate": 1.3194000000000001e-05, "loss": 0.0756, "step": 4401 }, { "epoch": 4.8591938155715075, "grad_norm": 0.8749635219573975, "learning_rate": 1.3197000000000001e-05, "loss": 0.0383, "step": 4402 }, { "epoch": 4.860298177802319, "grad_norm": 0.5060496926307678, "learning_rate": 1.32e-05, "loss": 0.0375, "step": 4403 }, { "epoch": 4.861402540033131, "grad_norm": 0.38940420746803284, "learning_rate": 1.3203e-05, "loss": 0.0287, "step": 4404 }, { "epoch": 4.862506902263942, "grad_norm": 0.5514257550239563, "learning_rate": 1.3206e-05, "loss": 0.035, "step": 4405 }, { "epoch": 4.863611264494754, "grad_norm": 0.893438994884491, "learning_rate": 1.3209000000000002e-05, "loss": 0.0531, "step": 4406 }, { "epoch": 4.864715626725566, "grad_norm": 0.8046233057975769, "learning_rate": 1.3212000000000002e-05, "loss": 0.0524, "step": 4407 }, { "epoch": 4.865819988956377, "grad_norm": 0.7840649485588074, "learning_rate": 1.3215e-05, "loss": 0.0366, "step": 4408 }, { "epoch": 4.86692435118719, "grad_norm": 0.7758929133415222, "learning_rate": 1.3218e-05, "loss": 0.0417, "step": 4409 }, { "epoch": 4.868028713418001, "grad_norm": 0.8044251203536987, "learning_rate": 1.3221e-05, "loss": 0.0309, "step": 4410 }, { "epoch": 4.869133075648813, "grad_norm": 1.683663010597229, "learning_rate": 1.3224e-05, "loss": 0.0651, "step": 4411 }, { "epoch": 4.8702374378796245, "grad_norm": 0.6009178757667542, "learning_rate": 1.3227e-05, "loss": 0.0279, "step": 4412 }, { "epoch": 4.871341800110436, "grad_norm": 0.7004084587097168, "learning_rate": 1.323e-05, "loss": 0.0371, "step": 4413 }, { "epoch": 4.872446162341248, "grad_norm": 0.9604376554489136, "learning_rate": 1.3233e-05, "loss": 0.0656, "step": 4414 }, { "epoch": 4.873550524572059, "grad_norm": 0.5757260322570801, "learning_rate": 1.3236e-05, "loss": 0.0384, "step": 4415 }, { "epoch": 4.874654886802872, "grad_norm": 0.5436621308326721, "learning_rate": 1.3239000000000001e-05, "loss": 0.0331, "step": 4416 }, { "epoch": 4.875759249033683, "grad_norm": 0.5458097457885742, "learning_rate": 1.3242000000000001e-05, "loss": 0.0382, "step": 4417 }, { "epoch": 4.876863611264495, "grad_norm": 1.2308903932571411, "learning_rate": 1.3245000000000001e-05, "loss": 0.0658, "step": 4418 }, { "epoch": 4.8779679734953065, "grad_norm": 0.8925753831863403, "learning_rate": 1.3248000000000001e-05, "loss": 0.0654, "step": 4419 }, { "epoch": 4.879072335726118, "grad_norm": 0.6302828192710876, "learning_rate": 1.3250999999999999e-05, "loss": 0.0468, "step": 4420 }, { "epoch": 4.88017669795693, "grad_norm": 0.7270291447639465, "learning_rate": 1.3254e-05, "loss": 0.0585, "step": 4421 }, { "epoch": 4.881281060187741, "grad_norm": 0.837143063545227, "learning_rate": 1.3257e-05, "loss": 0.0486, "step": 4422 }, { "epoch": 4.882385422418554, "grad_norm": 0.974802553653717, "learning_rate": 1.326e-05, "loss": 0.0679, "step": 4423 }, { "epoch": 4.883489784649365, "grad_norm": 0.775786280632019, "learning_rate": 1.3263e-05, "loss": 0.0542, "step": 4424 }, { "epoch": 4.884594146880176, "grad_norm": 1.0595550537109375, "learning_rate": 1.3266e-05, "loss": 0.3299, "step": 4425 }, { "epoch": 4.885698509110989, "grad_norm": 0.9318224191665649, "learning_rate": 1.3269000000000001e-05, "loss": 0.277, "step": 4426 }, { "epoch": 4.8868028713418, "grad_norm": 0.8348898887634277, "learning_rate": 1.3272000000000001e-05, "loss": 0.2206, "step": 4427 }, { "epoch": 4.887907233572612, "grad_norm": 0.8486049175262451, "learning_rate": 1.3275e-05, "loss": 0.1918, "step": 4428 }, { "epoch": 4.8890115958034235, "grad_norm": 1.0105884075164795, "learning_rate": 1.3278e-05, "loss": 0.2097, "step": 4429 }, { "epoch": 4.890115958034235, "grad_norm": 0.5728562474250793, "learning_rate": 1.3281e-05, "loss": 0.1056, "step": 4430 }, { "epoch": 4.891220320265047, "grad_norm": 0.7045697569847107, "learning_rate": 1.3284000000000002e-05, "loss": 0.1468, "step": 4431 }, { "epoch": 4.892324682495858, "grad_norm": 0.8880488872528076, "learning_rate": 1.3287e-05, "loss": 0.1966, "step": 4432 }, { "epoch": 4.893429044726671, "grad_norm": 0.7126341462135315, "learning_rate": 1.329e-05, "loss": 0.1052, "step": 4433 }, { "epoch": 4.894533406957482, "grad_norm": 0.4579137861728668, "learning_rate": 1.3293e-05, "loss": 0.0665, "step": 4434 }, { "epoch": 4.895637769188294, "grad_norm": 0.5450255870819092, "learning_rate": 1.3296e-05, "loss": 0.0529, "step": 4435 }, { "epoch": 4.8967421314191055, "grad_norm": 0.5038415193557739, "learning_rate": 1.3299000000000001e-05, "loss": 0.0613, "step": 4436 }, { "epoch": 4.897846493649917, "grad_norm": 0.4623507857322693, "learning_rate": 1.3302e-05, "loss": 0.0528, "step": 4437 }, { "epoch": 4.898950855880729, "grad_norm": 0.47298920154571533, "learning_rate": 1.3305e-05, "loss": 0.0511, "step": 4438 }, { "epoch": 4.90005521811154, "grad_norm": 0.4194391071796417, "learning_rate": 1.3308e-05, "loss": 0.0302, "step": 4439 }, { "epoch": 4.901159580342353, "grad_norm": 0.5841384530067444, "learning_rate": 1.3311e-05, "loss": 0.0451, "step": 4440 }, { "epoch": 4.902263942573164, "grad_norm": 0.5827933549880981, "learning_rate": 1.3314e-05, "loss": 0.0484, "step": 4441 }, { "epoch": 4.903368304803975, "grad_norm": 0.5169711709022522, "learning_rate": 1.3317000000000001e-05, "loss": 0.0388, "step": 4442 }, { "epoch": 4.904472667034788, "grad_norm": 0.49782615900039673, "learning_rate": 1.3320000000000001e-05, "loss": 0.0384, "step": 4443 }, { "epoch": 4.905577029265599, "grad_norm": 0.598872184753418, "learning_rate": 1.3323000000000001e-05, "loss": 0.0339, "step": 4444 }, { "epoch": 4.906681391496411, "grad_norm": 0.652538001537323, "learning_rate": 1.3325999999999999e-05, "loss": 0.0266, "step": 4445 }, { "epoch": 4.9077857537272225, "grad_norm": 0.8040586709976196, "learning_rate": 1.3328999999999999e-05, "loss": 0.0799, "step": 4446 }, { "epoch": 4.908890115958034, "grad_norm": 0.5072380304336548, "learning_rate": 1.3332e-05, "loss": 0.0514, "step": 4447 }, { "epoch": 4.909994478188846, "grad_norm": 1.7777432203292847, "learning_rate": 1.3335e-05, "loss": 0.1076, "step": 4448 }, { "epoch": 4.911098840419657, "grad_norm": 0.7477000951766968, "learning_rate": 1.3338e-05, "loss": 0.0293, "step": 4449 }, { "epoch": 4.91220320265047, "grad_norm": 0.46810469031333923, "learning_rate": 1.3341e-05, "loss": 0.0353, "step": 4450 }, { "epoch": 4.913307564881281, "grad_norm": 0.5793780088424683, "learning_rate": 1.3344e-05, "loss": 0.0335, "step": 4451 }, { "epoch": 4.914411927112093, "grad_norm": 0.5649362802505493, "learning_rate": 1.3347000000000001e-05, "loss": 0.0299, "step": 4452 }, { "epoch": 4.9155162893429045, "grad_norm": 0.537900984287262, "learning_rate": 1.3350000000000001e-05, "loss": 0.0443, "step": 4453 }, { "epoch": 4.916620651573716, "grad_norm": 0.4001084268093109, "learning_rate": 1.3353e-05, "loss": 0.027, "step": 4454 }, { "epoch": 4.917725013804528, "grad_norm": 0.5472209453582764, "learning_rate": 1.3356e-05, "loss": 0.0539, "step": 4455 }, { "epoch": 4.918829376035339, "grad_norm": 0.3341677784919739, "learning_rate": 1.3359e-05, "loss": 0.0201, "step": 4456 }, { "epoch": 4.919933738266152, "grad_norm": 0.34724166989326477, "learning_rate": 1.3362e-05, "loss": 0.0265, "step": 4457 }, { "epoch": 4.921038100496963, "grad_norm": 0.8604018092155457, "learning_rate": 1.3365e-05, "loss": 0.0401, "step": 4458 }, { "epoch": 4.922142462727774, "grad_norm": 0.7807360887527466, "learning_rate": 1.3368e-05, "loss": 0.0453, "step": 4459 }, { "epoch": 4.923246824958587, "grad_norm": 0.7913199663162231, "learning_rate": 1.3371e-05, "loss": 0.0443, "step": 4460 }, { "epoch": 4.924351187189398, "grad_norm": 0.9827150702476501, "learning_rate": 1.3374e-05, "loss": 0.0377, "step": 4461 }, { "epoch": 4.92545554942021, "grad_norm": 0.702349066734314, "learning_rate": 1.3377e-05, "loss": 0.0379, "step": 4462 }, { "epoch": 4.9265599116510215, "grad_norm": 0.7651888132095337, "learning_rate": 1.338e-05, "loss": 0.0576, "step": 4463 }, { "epoch": 4.927664273881833, "grad_norm": 0.5789722204208374, "learning_rate": 1.3383e-05, "loss": 0.0349, "step": 4464 }, { "epoch": 4.928768636112645, "grad_norm": 0.8999813199043274, "learning_rate": 1.3386e-05, "loss": 0.0458, "step": 4465 }, { "epoch": 4.929872998343456, "grad_norm": 0.5942460894584656, "learning_rate": 1.3389e-05, "loss": 0.0366, "step": 4466 }, { "epoch": 4.930977360574269, "grad_norm": 0.5195339322090149, "learning_rate": 1.3392000000000002e-05, "loss": 0.0324, "step": 4467 }, { "epoch": 4.93208172280508, "grad_norm": 0.9087166786193848, "learning_rate": 1.3395000000000001e-05, "loss": 0.0497, "step": 4468 }, { "epoch": 4.933186085035892, "grad_norm": 0.7818845510482788, "learning_rate": 1.3398e-05, "loss": 0.0482, "step": 4469 }, { "epoch": 4.9342904472667035, "grad_norm": 0.7557216286659241, "learning_rate": 1.3401e-05, "loss": 0.0538, "step": 4470 }, { "epoch": 4.935394809497515, "grad_norm": 0.7528947591781616, "learning_rate": 1.3403999999999999e-05, "loss": 0.0383, "step": 4471 }, { "epoch": 4.936499171728327, "grad_norm": 0.6829245686531067, "learning_rate": 1.3407e-05, "loss": 0.0373, "step": 4472 }, { "epoch": 4.937603533959138, "grad_norm": 1.089432716369629, "learning_rate": 1.341e-05, "loss": 0.0751, "step": 4473 }, { "epoch": 4.938707896189951, "grad_norm": 0.9838153719902039, "learning_rate": 1.3413e-05, "loss": 0.0529, "step": 4474 }, { "epoch": 4.939812258420762, "grad_norm": 2.849639892578125, "learning_rate": 1.3416e-05, "loss": 0.4431, "step": 4475 }, { "epoch": 4.940916620651573, "grad_norm": 1.3079659938812256, "learning_rate": 1.3419e-05, "loss": 0.2701, "step": 4476 }, { "epoch": 4.942020982882386, "grad_norm": 1.38360595703125, "learning_rate": 1.3422000000000001e-05, "loss": 0.2666, "step": 4477 }, { "epoch": 4.943125345113197, "grad_norm": 1.005631446838379, "learning_rate": 1.3425000000000001e-05, "loss": 0.2222, "step": 4478 }, { "epoch": 4.944229707344009, "grad_norm": 1.3104108572006226, "learning_rate": 1.3428000000000001e-05, "loss": 0.2557, "step": 4479 }, { "epoch": 4.9453340695748205, "grad_norm": 1.3360174894332886, "learning_rate": 1.3431e-05, "loss": 0.279, "step": 4480 }, { "epoch": 4.946438431805632, "grad_norm": 0.9608262181282043, "learning_rate": 1.3433999999999999e-05, "loss": 0.1618, "step": 4481 }, { "epoch": 4.947542794036444, "grad_norm": 1.0650084018707275, "learning_rate": 1.3437e-05, "loss": 0.1167, "step": 4482 }, { "epoch": 4.948647156267255, "grad_norm": 0.6270355582237244, "learning_rate": 1.344e-05, "loss": 0.0698, "step": 4483 }, { "epoch": 4.949751518498068, "grad_norm": 0.8827064633369446, "learning_rate": 1.3443e-05, "loss": 0.0841, "step": 4484 }, { "epoch": 4.950855880728879, "grad_norm": 0.645374596118927, "learning_rate": 1.3446e-05, "loss": 0.0861, "step": 4485 }, { "epoch": 4.951960242959691, "grad_norm": 0.5767124891281128, "learning_rate": 1.3449e-05, "loss": 0.0515, "step": 4486 }, { "epoch": 4.9530646051905025, "grad_norm": 0.4547816216945648, "learning_rate": 1.3452000000000001e-05, "loss": 0.0424, "step": 4487 }, { "epoch": 4.954168967421314, "grad_norm": 0.5953447818756104, "learning_rate": 1.3455e-05, "loss": 0.0414, "step": 4488 }, { "epoch": 4.955273329652126, "grad_norm": 1.612650990486145, "learning_rate": 1.3458e-05, "loss": 0.0542, "step": 4489 }, { "epoch": 4.956377691882937, "grad_norm": 0.5804612636566162, "learning_rate": 1.3461e-05, "loss": 0.0476, "step": 4490 }, { "epoch": 4.95748205411375, "grad_norm": 0.49903878569602966, "learning_rate": 1.3464e-05, "loss": 0.0306, "step": 4491 }, { "epoch": 4.958586416344561, "grad_norm": 0.6492904424667358, "learning_rate": 1.3467000000000002e-05, "loss": 0.0522, "step": 4492 }, { "epoch": 4.959690778575373, "grad_norm": 1.218979001045227, "learning_rate": 1.3470000000000001e-05, "loss": 0.0432, "step": 4493 }, { "epoch": 4.9607951408061846, "grad_norm": 0.5304592251777649, "learning_rate": 1.3473e-05, "loss": 0.025, "step": 4494 }, { "epoch": 4.961899503036996, "grad_norm": 0.452303022146225, "learning_rate": 1.3476e-05, "loss": 0.03, "step": 4495 }, { "epoch": 4.963003865267808, "grad_norm": 0.6696497797966003, "learning_rate": 1.3479e-05, "loss": 0.0471, "step": 4496 }, { "epoch": 4.9641082274986195, "grad_norm": 0.4660654664039612, "learning_rate": 1.3482e-05, "loss": 0.0445, "step": 4497 }, { "epoch": 4.965212589729431, "grad_norm": 0.758362889289856, "learning_rate": 1.3485e-05, "loss": 0.025, "step": 4498 }, { "epoch": 4.966316951960243, "grad_norm": 0.5671303272247314, "learning_rate": 1.3488e-05, "loss": 0.0464, "step": 4499 }, { "epoch": 4.967421314191054, "grad_norm": 0.5105318427085876, "learning_rate": 1.3491e-05, "loss": 0.0273, "step": 4500 }, { "epoch": 4.968525676421867, "grad_norm": 0.7844016551971436, "learning_rate": 1.3494e-05, "loss": 0.0407, "step": 4501 }, { "epoch": 4.969630038652678, "grad_norm": 0.6506255865097046, "learning_rate": 1.3497000000000001e-05, "loss": 0.0384, "step": 4502 }, { "epoch": 4.97073440088349, "grad_norm": 0.5604398250579834, "learning_rate": 1.3500000000000001e-05, "loss": 0.0384, "step": 4503 }, { "epoch": 4.9718387631143015, "grad_norm": 0.6370332837104797, "learning_rate": 1.3503000000000001e-05, "loss": 0.0513, "step": 4504 }, { "epoch": 4.972943125345113, "grad_norm": 0.4377123713493347, "learning_rate": 1.3506e-05, "loss": 0.0395, "step": 4505 }, { "epoch": 4.974047487575925, "grad_norm": 0.36411142349243164, "learning_rate": 1.3508999999999999e-05, "loss": 0.0239, "step": 4506 }, { "epoch": 4.975151849806736, "grad_norm": 0.5854771137237549, "learning_rate": 1.3512e-05, "loss": 0.0399, "step": 4507 }, { "epoch": 4.976256212037549, "grad_norm": 0.48184898495674133, "learning_rate": 1.3515e-05, "loss": 0.0303, "step": 4508 }, { "epoch": 4.97736057426836, "grad_norm": 0.6388391256332397, "learning_rate": 1.3518e-05, "loss": 0.0569, "step": 4509 }, { "epoch": 4.978464936499172, "grad_norm": 0.8490722179412842, "learning_rate": 1.3521e-05, "loss": 0.0506, "step": 4510 }, { "epoch": 4.9795692987299836, "grad_norm": 0.5555480122566223, "learning_rate": 1.3524e-05, "loss": 0.0333, "step": 4511 }, { "epoch": 4.980673660960795, "grad_norm": 0.5900985598564148, "learning_rate": 1.3527000000000001e-05, "loss": 0.0469, "step": 4512 }, { "epoch": 4.981778023191607, "grad_norm": 0.7631883025169373, "learning_rate": 1.3530000000000001e-05, "loss": 0.0496, "step": 4513 }, { "epoch": 4.9828823854224185, "grad_norm": 0.8042914271354675, "learning_rate": 1.3533e-05, "loss": 0.0232, "step": 4514 }, { "epoch": 4.98398674765323, "grad_norm": 0.3894449472427368, "learning_rate": 1.3536e-05, "loss": 0.0253, "step": 4515 }, { "epoch": 4.985091109884042, "grad_norm": 0.5825198888778687, "learning_rate": 1.3539e-05, "loss": 0.037, "step": 4516 }, { "epoch": 4.986195472114853, "grad_norm": 0.784947395324707, "learning_rate": 1.3542000000000002e-05, "loss": 0.0621, "step": 4517 }, { "epoch": 4.987299834345666, "grad_norm": 0.8889123201370239, "learning_rate": 1.3545e-05, "loss": 0.0495, "step": 4518 }, { "epoch": 4.988404196576477, "grad_norm": 0.5751716494560242, "learning_rate": 1.3548e-05, "loss": 0.0358, "step": 4519 }, { "epoch": 4.989508558807289, "grad_norm": 0.6602947115898132, "learning_rate": 1.3551e-05, "loss": 0.037, "step": 4520 }, { "epoch": 4.9906129210381005, "grad_norm": 0.5417563915252686, "learning_rate": 1.3554e-05, "loss": 0.0366, "step": 4521 }, { "epoch": 4.991717283268912, "grad_norm": 0.7954930663108826, "learning_rate": 1.3557e-05, "loss": 0.0581, "step": 4522 }, { "epoch": 4.992821645499724, "grad_norm": 1.3677654266357422, "learning_rate": 1.356e-05, "loss": 0.0398, "step": 4523 }, { "epoch": 4.993926007730535, "grad_norm": 0.9332214593887329, "learning_rate": 1.3563e-05, "loss": 0.0595, "step": 4524 }, { "epoch": 4.995030369961348, "grad_norm": 0.6290117502212524, "learning_rate": 1.3566e-05, "loss": 0.1123, "step": 4525 }, { "epoch": 4.996134732192159, "grad_norm": 0.8748478293418884, "learning_rate": 1.3569e-05, "loss": 0.0312, "step": 4526 }, { "epoch": 4.997239094422971, "grad_norm": 0.44953978061676025, "learning_rate": 1.3572000000000002e-05, "loss": 0.0304, "step": 4527 }, { "epoch": 4.9983434566537825, "grad_norm": 0.4584283232688904, "learning_rate": 1.3575000000000001e-05, "loss": 0.0288, "step": 4528 }, { "epoch": 4.999447818884594, "grad_norm": 0.5965391993522644, "learning_rate": 1.3578000000000001e-05, "loss": 0.0357, "step": 4529 }, { "epoch": 5.0, "grad_norm": 0.8818778395652771, "learning_rate": 1.3581000000000001e-05, "loss": 0.0234, "step": 4530 }, { "epoch": 5.001104362230811, "grad_norm": 1.4399293661117554, "learning_rate": 1.3583999999999999e-05, "loss": 0.321, "step": 4531 }, { "epoch": 5.002208724461624, "grad_norm": 1.0131076574325562, "learning_rate": 1.3587e-05, "loss": 0.3349, "step": 4532 }, { "epoch": 5.003313086692435, "grad_norm": 1.4127328395843506, "learning_rate": 1.359e-05, "loss": 0.3462, "step": 4533 }, { "epoch": 5.004417448923247, "grad_norm": 0.9935476183891296, "learning_rate": 1.3593e-05, "loss": 0.1925, "step": 4534 }, { "epoch": 5.0055218111540585, "grad_norm": 0.7645038366317749, "learning_rate": 1.3596e-05, "loss": 0.1302, "step": 4535 }, { "epoch": 5.00662617338487, "grad_norm": 1.5229988098144531, "learning_rate": 1.3599e-05, "loss": 0.1566, "step": 4536 }, { "epoch": 5.007730535615682, "grad_norm": 1.641744613647461, "learning_rate": 1.3602000000000001e-05, "loss": 0.1316, "step": 4537 }, { "epoch": 5.008834897846493, "grad_norm": 0.9323728680610657, "learning_rate": 1.3605000000000001e-05, "loss": 0.0981, "step": 4538 }, { "epoch": 5.009939260077306, "grad_norm": 0.5979220867156982, "learning_rate": 1.3608e-05, "loss": 0.0502, "step": 4539 }, { "epoch": 5.011043622308117, "grad_norm": 0.6029958128929138, "learning_rate": 1.3611e-05, "loss": 0.057, "step": 4540 }, { "epoch": 5.012147984538929, "grad_norm": 0.7058076858520508, "learning_rate": 1.3614e-05, "loss": 0.0529, "step": 4541 }, { "epoch": 5.0132523467697405, "grad_norm": 0.5155399441719055, "learning_rate": 1.3617000000000002e-05, "loss": 0.0365, "step": 4542 }, { "epoch": 5.014356709000552, "grad_norm": 0.5953194499015808, "learning_rate": 1.362e-05, "loss": 0.0335, "step": 4543 }, { "epoch": 5.015461071231364, "grad_norm": 0.5129544138908386, "learning_rate": 1.3623e-05, "loss": 0.0438, "step": 4544 }, { "epoch": 5.016565433462175, "grad_norm": 0.47210484743118286, "learning_rate": 1.3626e-05, "loss": 0.0383, "step": 4545 }, { "epoch": 5.017669795692988, "grad_norm": 0.5349909067153931, "learning_rate": 1.3629e-05, "loss": 0.0218, "step": 4546 }, { "epoch": 5.018774157923799, "grad_norm": 0.3990004360675812, "learning_rate": 1.3632000000000001e-05, "loss": 0.0302, "step": 4547 }, { "epoch": 5.01987852015461, "grad_norm": 0.49174436926841736, "learning_rate": 1.3635e-05, "loss": 0.0349, "step": 4548 }, { "epoch": 5.020982882385423, "grad_norm": 0.5766555070877075, "learning_rate": 1.3638e-05, "loss": 0.0398, "step": 4549 }, { "epoch": 5.022087244616234, "grad_norm": 0.4504316449165344, "learning_rate": 1.3641e-05, "loss": 0.0279, "step": 4550 }, { "epoch": 5.023191606847046, "grad_norm": 0.5351954698562622, "learning_rate": 1.3644e-05, "loss": 0.0258, "step": 4551 }, { "epoch": 5.0242959690778575, "grad_norm": 0.5392816066741943, "learning_rate": 1.3647000000000002e-05, "loss": 0.0243, "step": 4552 }, { "epoch": 5.025400331308669, "grad_norm": 0.6269030570983887, "learning_rate": 1.3650000000000001e-05, "loss": 0.03, "step": 4553 }, { "epoch": 5.026504693539481, "grad_norm": 0.4884231686592102, "learning_rate": 1.3653000000000001e-05, "loss": 0.0424, "step": 4554 }, { "epoch": 5.027609055770292, "grad_norm": 0.6359586119651794, "learning_rate": 1.3656e-05, "loss": 0.0353, "step": 4555 }, { "epoch": 5.028713418001105, "grad_norm": 0.7432135343551636, "learning_rate": 1.3659e-05, "loss": 0.0383, "step": 4556 }, { "epoch": 5.029817780231916, "grad_norm": 0.812377393245697, "learning_rate": 1.3662e-05, "loss": 0.0288, "step": 4557 }, { "epoch": 5.030922142462728, "grad_norm": 0.668122410774231, "learning_rate": 1.3665e-05, "loss": 0.0357, "step": 4558 }, { "epoch": 5.0320265046935395, "grad_norm": 0.4699881076812744, "learning_rate": 1.3668e-05, "loss": 0.0255, "step": 4559 }, { "epoch": 5.033130866924351, "grad_norm": 0.33725202083587646, "learning_rate": 1.3671e-05, "loss": 0.0169, "step": 4560 }, { "epoch": 5.034235229155163, "grad_norm": 0.40139931440353394, "learning_rate": 1.3674e-05, "loss": 0.0234, "step": 4561 }, { "epoch": 5.035339591385974, "grad_norm": 0.8204588294029236, "learning_rate": 1.3677000000000001e-05, "loss": 0.0371, "step": 4562 }, { "epoch": 5.036443953616787, "grad_norm": 0.5235228538513184, "learning_rate": 1.3680000000000001e-05, "loss": 0.0189, "step": 4563 }, { "epoch": 5.037548315847598, "grad_norm": 0.5879145264625549, "learning_rate": 1.3683000000000001e-05, "loss": 0.0323, "step": 4564 }, { "epoch": 5.038652678078409, "grad_norm": 0.5658747553825378, "learning_rate": 1.3686e-05, "loss": 0.0265, "step": 4565 }, { "epoch": 5.039757040309222, "grad_norm": 0.47372931241989136, "learning_rate": 1.3689e-05, "loss": 0.034, "step": 4566 }, { "epoch": 5.040861402540033, "grad_norm": 0.783696711063385, "learning_rate": 1.3691999999999999e-05, "loss": 0.039, "step": 4567 }, { "epoch": 5.041965764770845, "grad_norm": 0.4622023105621338, "learning_rate": 1.3695e-05, "loss": 0.0288, "step": 4568 }, { "epoch": 5.0430701270016565, "grad_norm": 0.42887410521507263, "learning_rate": 1.3698e-05, "loss": 0.0294, "step": 4569 }, { "epoch": 5.044174489232468, "grad_norm": 0.5971174240112305, "learning_rate": 1.3701e-05, "loss": 0.0279, "step": 4570 }, { "epoch": 5.04527885146328, "grad_norm": 0.7020743489265442, "learning_rate": 1.3704e-05, "loss": 0.0401, "step": 4571 }, { "epoch": 5.046383213694091, "grad_norm": 0.4918578863143921, "learning_rate": 1.3707e-05, "loss": 0.0236, "step": 4572 }, { "epoch": 5.047487575924904, "grad_norm": 0.5859169363975525, "learning_rate": 1.3710000000000001e-05, "loss": 0.0421, "step": 4573 }, { "epoch": 5.048591938155715, "grad_norm": 0.5782279372215271, "learning_rate": 1.3713e-05, "loss": 0.0353, "step": 4574 }, { "epoch": 5.049696300386527, "grad_norm": 0.6183058619499207, "learning_rate": 1.3716e-05, "loss": 0.0272, "step": 4575 }, { "epoch": 5.0508006626173385, "grad_norm": 0.5792909264564514, "learning_rate": 1.3719e-05, "loss": 0.0329, "step": 4576 }, { "epoch": 5.05190502484815, "grad_norm": 0.5096104741096497, "learning_rate": 1.3722e-05, "loss": 0.0337, "step": 4577 }, { "epoch": 5.053009387078962, "grad_norm": 0.5349365472793579, "learning_rate": 1.3725000000000002e-05, "loss": 0.0244, "step": 4578 }, { "epoch": 5.054113749309773, "grad_norm": 0.5413799285888672, "learning_rate": 1.3728000000000001e-05, "loss": 0.0225, "step": 4579 }, { "epoch": 5.055218111540586, "grad_norm": 1.1500910520553589, "learning_rate": 1.3731e-05, "loss": 0.0731, "step": 4580 }, { "epoch": 5.056322473771397, "grad_norm": 1.6169637441635132, "learning_rate": 1.3734e-05, "loss": 0.3991, "step": 4581 }, { "epoch": 5.057426836002208, "grad_norm": 0.9681587219238281, "learning_rate": 1.3736999999999999e-05, "loss": 0.25, "step": 4582 }, { "epoch": 5.0585311982330206, "grad_norm": 1.2659486532211304, "learning_rate": 1.374e-05, "loss": 0.2532, "step": 4583 }, { "epoch": 5.059635560463832, "grad_norm": 0.8132521510124207, "learning_rate": 1.3743e-05, "loss": 0.2011, "step": 4584 }, { "epoch": 5.060739922694644, "grad_norm": 0.8838621377944946, "learning_rate": 1.3746e-05, "loss": 0.2117, "step": 4585 }, { "epoch": 5.0618442849254555, "grad_norm": 0.8421056866645813, "learning_rate": 1.3749e-05, "loss": 0.1637, "step": 4586 }, { "epoch": 5.062948647156268, "grad_norm": 0.7856550812721252, "learning_rate": 1.3752e-05, "loss": 0.1332, "step": 4587 }, { "epoch": 5.064053009387079, "grad_norm": 0.8266968727111816, "learning_rate": 1.3755000000000001e-05, "loss": 0.0858, "step": 4588 }, { "epoch": 5.06515737161789, "grad_norm": 0.822523295879364, "learning_rate": 1.3758000000000001e-05, "loss": 0.0476, "step": 4589 }, { "epoch": 5.066261733848703, "grad_norm": 1.4085620641708374, "learning_rate": 1.3761000000000001e-05, "loss": 0.0904, "step": 4590 }, { "epoch": 5.067366096079514, "grad_norm": 0.5370631217956543, "learning_rate": 1.3764e-05, "loss": 0.0674, "step": 4591 }, { "epoch": 5.068470458310326, "grad_norm": 0.6657488942146301, "learning_rate": 1.3766999999999999e-05, "loss": 0.0536, "step": 4592 }, { "epoch": 5.0695748205411375, "grad_norm": 0.37407490611076355, "learning_rate": 1.377e-05, "loss": 0.0504, "step": 4593 }, { "epoch": 5.070679182771949, "grad_norm": 0.4180389642715454, "learning_rate": 1.3773e-05, "loss": 0.0242, "step": 4594 }, { "epoch": 5.071783545002761, "grad_norm": 0.6695135235786438, "learning_rate": 1.3776e-05, "loss": 0.0399, "step": 4595 }, { "epoch": 5.072887907233572, "grad_norm": 0.49924904108047485, "learning_rate": 1.3779e-05, "loss": 0.032, "step": 4596 }, { "epoch": 5.073992269464385, "grad_norm": 0.500236988067627, "learning_rate": 1.3782e-05, "loss": 0.0225, "step": 4597 }, { "epoch": 5.075096631695196, "grad_norm": 0.46062010526657104, "learning_rate": 1.3785000000000001e-05, "loss": 0.0212, "step": 4598 }, { "epoch": 5.076200993926007, "grad_norm": 0.4062208831310272, "learning_rate": 1.3788e-05, "loss": 0.0203, "step": 4599 }, { "epoch": 5.0773053561568195, "grad_norm": 0.3337559700012207, "learning_rate": 1.3791e-05, "loss": 0.0222, "step": 4600 }, { "epoch": 5.078409718387631, "grad_norm": 0.592441201210022, "learning_rate": 1.3794e-05, "loss": 0.0323, "step": 4601 }, { "epoch": 5.079514080618443, "grad_norm": 1.5592936277389526, "learning_rate": 1.3797e-05, "loss": 0.0327, "step": 4602 }, { "epoch": 5.0806184428492545, "grad_norm": 0.7550548315048218, "learning_rate": 1.3800000000000002e-05, "loss": 0.0223, "step": 4603 }, { "epoch": 5.081722805080067, "grad_norm": 0.643032968044281, "learning_rate": 1.3803e-05, "loss": 0.0215, "step": 4604 }, { "epoch": 5.082827167310878, "grad_norm": 0.39368897676467896, "learning_rate": 1.3806e-05, "loss": 0.0257, "step": 4605 }, { "epoch": 5.083931529541689, "grad_norm": 0.5415320992469788, "learning_rate": 1.3809e-05, "loss": 0.0171, "step": 4606 }, { "epoch": 5.085035891772502, "grad_norm": 0.5570206046104431, "learning_rate": 1.3812e-05, "loss": 0.0226, "step": 4607 }, { "epoch": 5.086140254003313, "grad_norm": 0.24740959703922272, "learning_rate": 1.3815e-05, "loss": 0.0205, "step": 4608 }, { "epoch": 5.087244616234125, "grad_norm": 0.9807864427566528, "learning_rate": 1.3818e-05, "loss": 0.0344, "step": 4609 }, { "epoch": 5.0883489784649365, "grad_norm": 0.6165496110916138, "learning_rate": 1.3821e-05, "loss": 0.0298, "step": 4610 }, { "epoch": 5.089453340695748, "grad_norm": 0.42018190026283264, "learning_rate": 1.3824e-05, "loss": 0.0145, "step": 4611 }, { "epoch": 5.09055770292656, "grad_norm": 0.7400372624397278, "learning_rate": 1.3827e-05, "loss": 0.0317, "step": 4612 }, { "epoch": 5.091662065157371, "grad_norm": 0.5287002325057983, "learning_rate": 1.3830000000000001e-05, "loss": 0.0346, "step": 4613 }, { "epoch": 5.092766427388184, "grad_norm": 1.1216516494750977, "learning_rate": 1.3833000000000001e-05, "loss": 0.0417, "step": 4614 }, { "epoch": 5.093870789618995, "grad_norm": 1.3637299537658691, "learning_rate": 1.3836000000000001e-05, "loss": 0.109, "step": 4615 }, { "epoch": 5.094975151849806, "grad_norm": 0.9130722880363464, "learning_rate": 1.3839e-05, "loss": 0.0275, "step": 4616 }, { "epoch": 5.0960795140806185, "grad_norm": 1.3386332988739014, "learning_rate": 1.3841999999999999e-05, "loss": 0.0301, "step": 4617 }, { "epoch": 5.09718387631143, "grad_norm": 0.5717974305152893, "learning_rate": 1.3845e-05, "loss": 0.0388, "step": 4618 }, { "epoch": 5.098288238542242, "grad_norm": 0.2991066575050354, "learning_rate": 1.3848e-05, "loss": 0.0123, "step": 4619 }, { "epoch": 5.0993926007730535, "grad_norm": 0.7295675873756409, "learning_rate": 1.3851e-05, "loss": 0.036, "step": 4620 }, { "epoch": 5.100496963003866, "grad_norm": 0.8172610402107239, "learning_rate": 1.3854e-05, "loss": 0.03, "step": 4621 }, { "epoch": 5.101601325234677, "grad_norm": 0.6703318953514099, "learning_rate": 1.3857e-05, "loss": 0.0324, "step": 4622 }, { "epoch": 5.102705687465488, "grad_norm": 0.9278773069381714, "learning_rate": 1.3860000000000001e-05, "loss": 0.0421, "step": 4623 }, { "epoch": 5.103810049696301, "grad_norm": 0.7508123517036438, "learning_rate": 1.3863000000000001e-05, "loss": 0.0478, "step": 4624 }, { "epoch": 5.104914411927112, "grad_norm": 0.6833733916282654, "learning_rate": 1.3866e-05, "loss": 0.0522, "step": 4625 }, { "epoch": 5.106018774157924, "grad_norm": 0.5788169503211975, "learning_rate": 1.3869e-05, "loss": 0.0413, "step": 4626 }, { "epoch": 5.1071231363887355, "grad_norm": 0.9073108434677124, "learning_rate": 1.3872e-05, "loss": 0.0424, "step": 4627 }, { "epoch": 5.108227498619547, "grad_norm": 0.691870927810669, "learning_rate": 1.3875000000000002e-05, "loss": 0.032, "step": 4628 }, { "epoch": 5.109331860850359, "grad_norm": 1.224108099937439, "learning_rate": 1.3878e-05, "loss": 0.0291, "step": 4629 }, { "epoch": 5.11043622308117, "grad_norm": 0.7778895497322083, "learning_rate": 1.3881e-05, "loss": 0.0393, "step": 4630 }, { "epoch": 5.111540585311983, "grad_norm": 1.2222844362258911, "learning_rate": 1.3884e-05, "loss": 0.3059, "step": 4631 }, { "epoch": 5.112644947542794, "grad_norm": 0.8739486336708069, "learning_rate": 1.3887e-05, "loss": 0.2193, "step": 4632 }, { "epoch": 5.113749309773605, "grad_norm": 1.5363889932632446, "learning_rate": 1.389e-05, "loss": 0.2212, "step": 4633 }, { "epoch": 5.1148536720044175, "grad_norm": 0.8941835761070251, "learning_rate": 1.3893e-05, "loss": 0.1854, "step": 4634 }, { "epoch": 5.115958034235229, "grad_norm": 1.0228005647659302, "learning_rate": 1.3896e-05, "loss": 0.1794, "step": 4635 }, { "epoch": 5.117062396466041, "grad_norm": 0.9598357677459717, "learning_rate": 1.3899e-05, "loss": 0.1585, "step": 4636 }, { "epoch": 5.1181667586968524, "grad_norm": 0.9842882752418518, "learning_rate": 1.3902e-05, "loss": 0.1224, "step": 4637 }, { "epoch": 5.119271120927665, "grad_norm": 0.5737741589546204, "learning_rate": 1.3905000000000002e-05, "loss": 0.0725, "step": 4638 }, { "epoch": 5.120375483158476, "grad_norm": 0.6843544244766235, "learning_rate": 1.3908000000000001e-05, "loss": 0.0768, "step": 4639 }, { "epoch": 5.121479845389287, "grad_norm": 0.44883882999420166, "learning_rate": 1.3911000000000001e-05, "loss": 0.0379, "step": 4640 }, { "epoch": 5.1225842076201, "grad_norm": 0.598380446434021, "learning_rate": 1.3914e-05, "loss": 0.0467, "step": 4641 }, { "epoch": 5.123688569850911, "grad_norm": 0.4065484404563904, "learning_rate": 1.3916999999999999e-05, "loss": 0.0308, "step": 4642 }, { "epoch": 5.124792932081723, "grad_norm": 0.5429601073265076, "learning_rate": 1.392e-05, "loss": 0.031, "step": 4643 }, { "epoch": 5.1258972943125345, "grad_norm": 0.4433300793170929, "learning_rate": 1.3923e-05, "loss": 0.0392, "step": 4644 }, { "epoch": 5.127001656543346, "grad_norm": 0.8039875030517578, "learning_rate": 1.3926e-05, "loss": 0.032, "step": 4645 }, { "epoch": 5.128106018774158, "grad_norm": 1.4098762273788452, "learning_rate": 1.3929e-05, "loss": 0.0303, "step": 4646 }, { "epoch": 5.129210381004969, "grad_norm": 0.5035215616226196, "learning_rate": 1.3932e-05, "loss": 0.0262, "step": 4647 }, { "epoch": 5.130314743235782, "grad_norm": 0.7155830264091492, "learning_rate": 1.3935000000000001e-05, "loss": 0.0525, "step": 4648 }, { "epoch": 5.131419105466593, "grad_norm": 0.5249045491218567, "learning_rate": 1.3938000000000001e-05, "loss": 0.0222, "step": 4649 }, { "epoch": 5.132523467697405, "grad_norm": 0.7820571064949036, "learning_rate": 1.3941000000000001e-05, "loss": 0.0435, "step": 4650 }, { "epoch": 5.1336278299282165, "grad_norm": 0.32826095819473267, "learning_rate": 1.3944e-05, "loss": 0.0233, "step": 4651 }, { "epoch": 5.134732192159028, "grad_norm": 0.5896167159080505, "learning_rate": 1.3947e-05, "loss": 0.0374, "step": 4652 }, { "epoch": 5.13583655438984, "grad_norm": 0.7816912531852722, "learning_rate": 1.395e-05, "loss": 0.0334, "step": 4653 }, { "epoch": 5.136940916620651, "grad_norm": 0.45459792017936707, "learning_rate": 1.3953e-05, "loss": 0.0202, "step": 4654 }, { "epoch": 5.138045278851464, "grad_norm": 0.5658319592475891, "learning_rate": 1.3956e-05, "loss": 0.0433, "step": 4655 }, { "epoch": 5.139149641082275, "grad_norm": 0.6736620664596558, "learning_rate": 1.3959e-05, "loss": 0.0463, "step": 4656 }, { "epoch": 5.140254003313086, "grad_norm": 0.4303630292415619, "learning_rate": 1.3962e-05, "loss": 0.027, "step": 4657 }, { "epoch": 5.141358365543899, "grad_norm": 0.44571608304977417, "learning_rate": 1.3965000000000001e-05, "loss": 0.0206, "step": 4658 }, { "epoch": 5.14246272777471, "grad_norm": 0.35874849557876587, "learning_rate": 1.3968e-05, "loss": 0.0214, "step": 4659 }, { "epoch": 5.143567090005522, "grad_norm": 0.786314070224762, "learning_rate": 1.3971e-05, "loss": 0.0405, "step": 4660 }, { "epoch": 5.1446714522363335, "grad_norm": 0.37641674280166626, "learning_rate": 1.3974e-05, "loss": 0.019, "step": 4661 }, { "epoch": 5.145775814467145, "grad_norm": 0.8253212571144104, "learning_rate": 1.3977e-05, "loss": 0.0408, "step": 4662 }, { "epoch": 5.146880176697957, "grad_norm": 0.5611538290977478, "learning_rate": 1.3980000000000002e-05, "loss": 0.0345, "step": 4663 }, { "epoch": 5.147984538928768, "grad_norm": 0.679786741733551, "learning_rate": 1.3983000000000001e-05, "loss": 0.0357, "step": 4664 }, { "epoch": 5.149088901159581, "grad_norm": 0.7106631994247437, "learning_rate": 1.3986000000000001e-05, "loss": 0.0394, "step": 4665 }, { "epoch": 5.150193263390392, "grad_norm": 0.3754577934741974, "learning_rate": 1.3989e-05, "loss": 0.0232, "step": 4666 }, { "epoch": 5.151297625621204, "grad_norm": 0.7954962253570557, "learning_rate": 1.3992e-05, "loss": 0.0335, "step": 4667 }, { "epoch": 5.1524019878520155, "grad_norm": 0.44651710987091064, "learning_rate": 1.3995e-05, "loss": 0.0257, "step": 4668 }, { "epoch": 5.153506350082827, "grad_norm": 0.6668897867202759, "learning_rate": 1.3998e-05, "loss": 0.0288, "step": 4669 }, { "epoch": 5.154610712313639, "grad_norm": 0.6051453351974487, "learning_rate": 1.4001e-05, "loss": 0.0274, "step": 4670 }, { "epoch": 5.15571507454445, "grad_norm": 0.8471406102180481, "learning_rate": 1.4004e-05, "loss": 0.0515, "step": 4671 }, { "epoch": 5.156819436775263, "grad_norm": 0.5718347430229187, "learning_rate": 1.4007e-05, "loss": 0.0258, "step": 4672 }, { "epoch": 5.157923799006074, "grad_norm": 0.8802564740180969, "learning_rate": 1.4010000000000001e-05, "loss": 0.0642, "step": 4673 }, { "epoch": 5.159028161236885, "grad_norm": 0.5928635001182556, "learning_rate": 1.4013000000000001e-05, "loss": 0.0451, "step": 4674 }, { "epoch": 5.160132523467698, "grad_norm": 0.4200727045536041, "learning_rate": 1.4016000000000001e-05, "loss": 0.0198, "step": 4675 }, { "epoch": 5.161236885698509, "grad_norm": 0.41067424416542053, "learning_rate": 1.4019e-05, "loss": 0.0277, "step": 4676 }, { "epoch": 5.162341247929321, "grad_norm": 0.4064567983150482, "learning_rate": 1.4022e-05, "loss": 0.0255, "step": 4677 }, { "epoch": 5.1634456101601325, "grad_norm": 1.17593514919281, "learning_rate": 1.4025e-05, "loss": 0.0403, "step": 4678 }, { "epoch": 5.164549972390944, "grad_norm": 0.7465616464614868, "learning_rate": 1.4028e-05, "loss": 0.0377, "step": 4679 }, { "epoch": 5.165654334621756, "grad_norm": 0.8093053102493286, "learning_rate": 1.4031e-05, "loss": 0.0407, "step": 4680 }, { "epoch": 5.166758696852567, "grad_norm": 2.2272746562957764, "learning_rate": 1.4034e-05, "loss": 0.3866, "step": 4681 }, { "epoch": 5.16786305908338, "grad_norm": 1.0536481142044067, "learning_rate": 1.4037e-05, "loss": 0.2521, "step": 4682 }, { "epoch": 5.168967421314191, "grad_norm": 0.8770247101783752, "learning_rate": 1.4040000000000001e-05, "loss": 0.2275, "step": 4683 }, { "epoch": 5.170071783545003, "grad_norm": 1.7877613306045532, "learning_rate": 1.4043000000000001e-05, "loss": 0.2859, "step": 4684 }, { "epoch": 5.1711761457758145, "grad_norm": 1.1892664432525635, "learning_rate": 1.4046e-05, "loss": 0.2015, "step": 4685 }, { "epoch": 5.172280508006626, "grad_norm": 0.9248642325401306, "learning_rate": 1.4049e-05, "loss": 0.1643, "step": 4686 }, { "epoch": 5.173384870237438, "grad_norm": 0.9650898575782776, "learning_rate": 1.4052e-05, "loss": 0.1577, "step": 4687 }, { "epoch": 5.174489232468249, "grad_norm": 0.7430605888366699, "learning_rate": 1.4055000000000002e-05, "loss": 0.0814, "step": 4688 }, { "epoch": 5.175593594699062, "grad_norm": 0.3615111708641052, "learning_rate": 1.4058000000000002e-05, "loss": 0.0518, "step": 4689 }, { "epoch": 5.176697956929873, "grad_norm": 0.7660077214241028, "learning_rate": 1.4061e-05, "loss": 0.047, "step": 4690 }, { "epoch": 5.177802319160684, "grad_norm": 0.4491119384765625, "learning_rate": 1.4064e-05, "loss": 0.0279, "step": 4691 }, { "epoch": 5.178906681391497, "grad_norm": 0.4525921642780304, "learning_rate": 1.4067e-05, "loss": 0.0393, "step": 4692 }, { "epoch": 5.180011043622308, "grad_norm": 0.464013934135437, "learning_rate": 1.4069999999999999e-05, "loss": 0.0374, "step": 4693 }, { "epoch": 5.18111540585312, "grad_norm": 0.5750874876976013, "learning_rate": 1.4073e-05, "loss": 0.0572, "step": 4694 }, { "epoch": 5.1822197680839315, "grad_norm": 0.6399899125099182, "learning_rate": 1.4076e-05, "loss": 0.0759, "step": 4695 }, { "epoch": 5.183324130314743, "grad_norm": 0.5776243209838867, "learning_rate": 1.4079e-05, "loss": 0.0303, "step": 4696 }, { "epoch": 5.184428492545555, "grad_norm": 0.37093013525009155, "learning_rate": 1.4082e-05, "loss": 0.0298, "step": 4697 }, { "epoch": 5.185532854776366, "grad_norm": 0.4776296019554138, "learning_rate": 1.4085e-05, "loss": 0.0446, "step": 4698 }, { "epoch": 5.186637217007179, "grad_norm": 0.5145987868309021, "learning_rate": 1.4088000000000001e-05, "loss": 0.033, "step": 4699 }, { "epoch": 5.18774157923799, "grad_norm": 0.439800888299942, "learning_rate": 1.4091000000000001e-05, "loss": 0.025, "step": 4700 }, { "epoch": 5.188845941468802, "grad_norm": 0.41642695665359497, "learning_rate": 1.4094000000000001e-05, "loss": 0.0266, "step": 4701 }, { "epoch": 5.1899503036996135, "grad_norm": 0.5440358519554138, "learning_rate": 1.4097e-05, "loss": 0.0348, "step": 4702 }, { "epoch": 5.191054665930425, "grad_norm": 0.5091832876205444, "learning_rate": 1.4099999999999999e-05, "loss": 0.0254, "step": 4703 }, { "epoch": 5.192159028161237, "grad_norm": 0.43399959802627563, "learning_rate": 1.4103e-05, "loss": 0.0272, "step": 4704 }, { "epoch": 5.193263390392048, "grad_norm": 0.44368210434913635, "learning_rate": 1.4106e-05, "loss": 0.0222, "step": 4705 }, { "epoch": 5.194367752622861, "grad_norm": 0.6155173182487488, "learning_rate": 1.4109e-05, "loss": 0.0326, "step": 4706 }, { "epoch": 5.195472114853672, "grad_norm": 0.4015309810638428, "learning_rate": 1.4112e-05, "loss": 0.0202, "step": 4707 }, { "epoch": 5.196576477084483, "grad_norm": 0.43213599920272827, "learning_rate": 1.4115e-05, "loss": 0.0228, "step": 4708 }, { "epoch": 5.197680839315296, "grad_norm": 0.442026823759079, "learning_rate": 1.4118000000000001e-05, "loss": 0.0271, "step": 4709 }, { "epoch": 5.198785201546107, "grad_norm": 0.4883839786052704, "learning_rate": 1.4121e-05, "loss": 0.0322, "step": 4710 }, { "epoch": 5.199889563776919, "grad_norm": 0.40550467371940613, "learning_rate": 1.4124e-05, "loss": 0.0243, "step": 4711 }, { "epoch": 5.2009939260077305, "grad_norm": 0.6129952073097229, "learning_rate": 1.4127e-05, "loss": 0.0385, "step": 4712 }, { "epoch": 5.202098288238542, "grad_norm": 0.39730921387672424, "learning_rate": 1.413e-05, "loss": 0.0307, "step": 4713 }, { "epoch": 5.203202650469354, "grad_norm": 0.6390796303749084, "learning_rate": 1.4133000000000002e-05, "loss": 0.04, "step": 4714 }, { "epoch": 5.204307012700165, "grad_norm": 0.4652286767959595, "learning_rate": 1.4136e-05, "loss": 0.0403, "step": 4715 }, { "epoch": 5.205411374930978, "grad_norm": 0.6751409769058228, "learning_rate": 1.4139e-05, "loss": 0.0404, "step": 4716 }, { "epoch": 5.206515737161789, "grad_norm": 0.6411330103874207, "learning_rate": 1.4142e-05, "loss": 0.0319, "step": 4717 }, { "epoch": 5.207620099392601, "grad_norm": 0.5573740005493164, "learning_rate": 1.4145e-05, "loss": 0.0313, "step": 4718 }, { "epoch": 5.2087244616234125, "grad_norm": 0.5569042563438416, "learning_rate": 1.4148e-05, "loss": 0.0246, "step": 4719 }, { "epoch": 5.209828823854224, "grad_norm": 1.5422559976577759, "learning_rate": 1.4151e-05, "loss": 0.0318, "step": 4720 }, { "epoch": 5.210933186085036, "grad_norm": 0.5017350316047668, "learning_rate": 1.4154e-05, "loss": 0.028, "step": 4721 }, { "epoch": 5.212037548315847, "grad_norm": 0.7388946413993835, "learning_rate": 1.4157e-05, "loss": 0.0369, "step": 4722 }, { "epoch": 5.21314191054666, "grad_norm": 0.422857403755188, "learning_rate": 1.416e-05, "loss": 0.022, "step": 4723 }, { "epoch": 5.214246272777471, "grad_norm": 0.5234198570251465, "learning_rate": 1.4163000000000001e-05, "loss": 0.0313, "step": 4724 }, { "epoch": 5.215350635008282, "grad_norm": 0.6284419894218445, "learning_rate": 1.4166000000000001e-05, "loss": 0.0298, "step": 4725 }, { "epoch": 5.216454997239095, "grad_norm": 0.7939203381538391, "learning_rate": 1.4169000000000001e-05, "loss": 0.0557, "step": 4726 }, { "epoch": 5.217559359469906, "grad_norm": 1.2959344387054443, "learning_rate": 1.4172e-05, "loss": 0.0325, "step": 4727 }, { "epoch": 5.218663721700718, "grad_norm": 0.8261449933052063, "learning_rate": 1.4174999999999999e-05, "loss": 0.0313, "step": 4728 }, { "epoch": 5.2197680839315295, "grad_norm": 1.0655524730682373, "learning_rate": 1.4178e-05, "loss": 0.0424, "step": 4729 }, { "epoch": 5.220872446162341, "grad_norm": 0.828749418258667, "learning_rate": 1.4181e-05, "loss": 0.0348, "step": 4730 }, { "epoch": 5.221976808393153, "grad_norm": 1.4167331457138062, "learning_rate": 1.4184e-05, "loss": 0.3438, "step": 4731 }, { "epoch": 5.223081170623964, "grad_norm": 1.5297006368637085, "learning_rate": 1.4187e-05, "loss": 0.3368, "step": 4732 }, { "epoch": 5.224185532854777, "grad_norm": 0.8927820324897766, "learning_rate": 1.419e-05, "loss": 0.2498, "step": 4733 }, { "epoch": 5.225289895085588, "grad_norm": 0.7537247538566589, "learning_rate": 1.4193000000000001e-05, "loss": 0.1994, "step": 4734 }, { "epoch": 5.2263942573164, "grad_norm": 0.6577887535095215, "learning_rate": 1.4196000000000001e-05, "loss": 0.138, "step": 4735 }, { "epoch": 5.2274986195472115, "grad_norm": 0.8686401844024658, "learning_rate": 1.4199e-05, "loss": 0.1309, "step": 4736 }, { "epoch": 5.228602981778023, "grad_norm": 2.0961415767669678, "learning_rate": 1.4202e-05, "loss": 0.1579, "step": 4737 }, { "epoch": 5.229707344008835, "grad_norm": 0.6371603012084961, "learning_rate": 1.4205e-05, "loss": 0.0952, "step": 4738 }, { "epoch": 5.230811706239646, "grad_norm": 0.718500554561615, "learning_rate": 1.4208e-05, "loss": 0.0965, "step": 4739 }, { "epoch": 5.231916068470459, "grad_norm": 0.5007385611534119, "learning_rate": 1.4211e-05, "loss": 0.0489, "step": 4740 }, { "epoch": 5.23302043070127, "grad_norm": 0.5857154130935669, "learning_rate": 1.4214e-05, "loss": 0.0368, "step": 4741 }, { "epoch": 5.234124792932081, "grad_norm": 0.5253879427909851, "learning_rate": 1.4217e-05, "loss": 0.0318, "step": 4742 }, { "epoch": 5.2352291551628936, "grad_norm": 2.853876829147339, "learning_rate": 1.422e-05, "loss": 0.0404, "step": 4743 }, { "epoch": 5.236333517393705, "grad_norm": 1.1118800640106201, "learning_rate": 1.4223000000000001e-05, "loss": 0.0752, "step": 4744 }, { "epoch": 5.237437879624517, "grad_norm": 0.7702341675758362, "learning_rate": 1.4226e-05, "loss": 0.0413, "step": 4745 }, { "epoch": 5.2385422418553285, "grad_norm": 0.4787181615829468, "learning_rate": 1.4229e-05, "loss": 0.0445, "step": 4746 }, { "epoch": 5.23964660408614, "grad_norm": 0.4279666543006897, "learning_rate": 1.4232e-05, "loss": 0.0335, "step": 4747 }, { "epoch": 5.240750966316952, "grad_norm": 0.852993369102478, "learning_rate": 1.4235e-05, "loss": 0.0282, "step": 4748 }, { "epoch": 5.241855328547763, "grad_norm": 0.5255760550498962, "learning_rate": 1.4238000000000002e-05, "loss": 0.024, "step": 4749 }, { "epoch": 5.242959690778576, "grad_norm": 0.9095035791397095, "learning_rate": 1.4241000000000001e-05, "loss": 0.0321, "step": 4750 }, { "epoch": 5.244064053009387, "grad_norm": 0.8933199048042297, "learning_rate": 1.4244000000000001e-05, "loss": 0.0347, "step": 4751 }, { "epoch": 5.245168415240199, "grad_norm": 0.8970818519592285, "learning_rate": 1.4247e-05, "loss": 0.024, "step": 4752 }, { "epoch": 5.2462727774710105, "grad_norm": 0.34686657786369324, "learning_rate": 1.4249999999999999e-05, "loss": 0.0231, "step": 4753 }, { "epoch": 5.247377139701822, "grad_norm": 0.5216172933578491, "learning_rate": 1.4253e-05, "loss": 0.0479, "step": 4754 }, { "epoch": 5.248481501932634, "grad_norm": 0.5204651951789856, "learning_rate": 1.4256e-05, "loss": 0.0325, "step": 4755 }, { "epoch": 5.249585864163445, "grad_norm": 0.4649903476238251, "learning_rate": 1.4259e-05, "loss": 0.0648, "step": 4756 }, { "epoch": 5.250690226394258, "grad_norm": 0.42948174476623535, "learning_rate": 1.4262e-05, "loss": 0.0326, "step": 4757 }, { "epoch": 5.251794588625069, "grad_norm": 0.5635321140289307, "learning_rate": 1.4265e-05, "loss": 0.0253, "step": 4758 }, { "epoch": 5.25289895085588, "grad_norm": 0.5098764300346375, "learning_rate": 1.4268000000000001e-05, "loss": 0.025, "step": 4759 }, { "epoch": 5.2540033130866926, "grad_norm": 0.5445019602775574, "learning_rate": 1.4271000000000001e-05, "loss": 0.0288, "step": 4760 }, { "epoch": 5.255107675317504, "grad_norm": 0.5168808102607727, "learning_rate": 1.4274000000000001e-05, "loss": 0.0318, "step": 4761 }, { "epoch": 5.256212037548316, "grad_norm": 0.5060727000236511, "learning_rate": 1.4277e-05, "loss": 0.0237, "step": 4762 }, { "epoch": 5.2573163997791275, "grad_norm": 0.5882315635681152, "learning_rate": 1.428e-05, "loss": 0.0244, "step": 4763 }, { "epoch": 5.258420762009939, "grad_norm": 0.5449221134185791, "learning_rate": 1.4283e-05, "loss": 0.0325, "step": 4764 }, { "epoch": 5.259525124240751, "grad_norm": 0.6640456914901733, "learning_rate": 1.4286e-05, "loss": 0.0352, "step": 4765 }, { "epoch": 5.260629486471562, "grad_norm": 0.3812299966812134, "learning_rate": 1.4289e-05, "loss": 0.0143, "step": 4766 }, { "epoch": 5.261733848702375, "grad_norm": 0.5136684775352478, "learning_rate": 1.4292e-05, "loss": 0.0213, "step": 4767 }, { "epoch": 5.262838210933186, "grad_norm": 0.5683413147926331, "learning_rate": 1.4295e-05, "loss": 0.0252, "step": 4768 }, { "epoch": 5.263942573163998, "grad_norm": 0.6973309516906738, "learning_rate": 1.4298000000000001e-05, "loss": 0.049, "step": 4769 }, { "epoch": 5.2650469353948095, "grad_norm": 0.7875422835350037, "learning_rate": 1.4301e-05, "loss": 0.0344, "step": 4770 }, { "epoch": 5.266151297625621, "grad_norm": 0.6567531824111938, "learning_rate": 1.4304e-05, "loss": 0.0327, "step": 4771 }, { "epoch": 5.267255659856433, "grad_norm": 0.7730956673622131, "learning_rate": 1.4307e-05, "loss": 0.0374, "step": 4772 }, { "epoch": 5.268360022087244, "grad_norm": 0.46696141362190247, "learning_rate": 1.431e-05, "loss": 0.0152, "step": 4773 }, { "epoch": 5.269464384318057, "grad_norm": 0.6857742667198181, "learning_rate": 1.4313000000000002e-05, "loss": 0.0298, "step": 4774 }, { "epoch": 5.270568746548868, "grad_norm": 0.7606381773948669, "learning_rate": 1.4316000000000002e-05, "loss": 0.0341, "step": 4775 }, { "epoch": 5.27167310877968, "grad_norm": 0.8910508155822754, "learning_rate": 1.4319e-05, "loss": 0.028, "step": 4776 }, { "epoch": 5.2727774710104915, "grad_norm": 0.9081307053565979, "learning_rate": 1.4322e-05, "loss": 0.0817, "step": 4777 }, { "epoch": 5.273881833241303, "grad_norm": 0.6732075214385986, "learning_rate": 1.4325e-05, "loss": 0.0398, "step": 4778 }, { "epoch": 5.274986195472115, "grad_norm": 1.4457498788833618, "learning_rate": 1.4328e-05, "loss": 0.0691, "step": 4779 }, { "epoch": 5.2760905577029265, "grad_norm": 0.7878539562225342, "learning_rate": 1.4331e-05, "loss": 0.0342, "step": 4780 }, { "epoch": 5.277194919933739, "grad_norm": 2.1473000049591064, "learning_rate": 1.4334e-05, "loss": 0.3704, "step": 4781 }, { "epoch": 5.27829928216455, "grad_norm": 1.1372413635253906, "learning_rate": 1.4337e-05, "loss": 0.3029, "step": 4782 }, { "epoch": 5.279403644395361, "grad_norm": 0.8905017971992493, "learning_rate": 1.434e-05, "loss": 0.2337, "step": 4783 }, { "epoch": 5.280508006626174, "grad_norm": 0.8629001379013062, "learning_rate": 1.4343000000000001e-05, "loss": 0.2143, "step": 4784 }, { "epoch": 5.281612368856985, "grad_norm": 0.8889244794845581, "learning_rate": 1.4346000000000001e-05, "loss": 0.2082, "step": 4785 }, { "epoch": 5.282716731087797, "grad_norm": 1.1602228879928589, "learning_rate": 1.4349000000000001e-05, "loss": 0.1828, "step": 4786 }, { "epoch": 5.2838210933186085, "grad_norm": 0.9245853424072266, "learning_rate": 1.4352e-05, "loss": 0.1277, "step": 4787 }, { "epoch": 5.28492545554942, "grad_norm": 0.9204116463661194, "learning_rate": 1.4355e-05, "loss": 0.1277, "step": 4788 }, { "epoch": 5.286029817780232, "grad_norm": 0.4785742461681366, "learning_rate": 1.4358e-05, "loss": 0.0558, "step": 4789 }, { "epoch": 5.287134180011043, "grad_norm": 0.6478977799415588, "learning_rate": 1.4361e-05, "loss": 0.0734, "step": 4790 }, { "epoch": 5.288238542241856, "grad_norm": 0.43941646814346313, "learning_rate": 1.4364e-05, "loss": 0.0479, "step": 4791 }, { "epoch": 5.289342904472667, "grad_norm": 0.7473098635673523, "learning_rate": 1.4367e-05, "loss": 0.0724, "step": 4792 }, { "epoch": 5.290447266703479, "grad_norm": 0.6243553757667542, "learning_rate": 1.437e-05, "loss": 0.0607, "step": 4793 }, { "epoch": 5.2915516289342905, "grad_norm": 0.363036185503006, "learning_rate": 1.4373000000000001e-05, "loss": 0.0365, "step": 4794 }, { "epoch": 5.292655991165102, "grad_norm": 1.3659024238586426, "learning_rate": 1.4376000000000001e-05, "loss": 0.0274, "step": 4795 }, { "epoch": 5.293760353395914, "grad_norm": 0.41740551590919495, "learning_rate": 1.4379e-05, "loss": 0.0284, "step": 4796 }, { "epoch": 5.2948647156267254, "grad_norm": 0.42382046580314636, "learning_rate": 1.4382e-05, "loss": 0.0225, "step": 4797 }, { "epoch": 5.295969077857538, "grad_norm": 1.0024229288101196, "learning_rate": 1.4385e-05, "loss": 0.0412, "step": 4798 }, { "epoch": 5.297073440088349, "grad_norm": 0.6348753571510315, "learning_rate": 1.4388000000000002e-05, "loss": 0.0434, "step": 4799 }, { "epoch": 5.29817780231916, "grad_norm": 0.5393027663230896, "learning_rate": 1.4391000000000002e-05, "loss": 0.0352, "step": 4800 }, { "epoch": 5.299282164549973, "grad_norm": 0.5228685736656189, "learning_rate": 1.4394e-05, "loss": 0.0415, "step": 4801 }, { "epoch": 5.300386526780784, "grad_norm": 0.42772865295410156, "learning_rate": 1.4397e-05, "loss": 0.0428, "step": 4802 }, { "epoch": 5.301490889011596, "grad_norm": 0.34574079513549805, "learning_rate": 1.44e-05, "loss": 0.0201, "step": 4803 }, { "epoch": 5.3025952512424075, "grad_norm": 0.49519404768943787, "learning_rate": 1.4403e-05, "loss": 0.0409, "step": 4804 }, { "epoch": 5.303699613473219, "grad_norm": 0.3423428535461426, "learning_rate": 1.4406e-05, "loss": 0.0206, "step": 4805 }, { "epoch": 5.304803975704031, "grad_norm": 0.5892621874809265, "learning_rate": 1.4409e-05, "loss": 0.0301, "step": 4806 }, { "epoch": 5.305908337934842, "grad_norm": 0.8076890110969543, "learning_rate": 1.4412e-05, "loss": 0.0418, "step": 4807 }, { "epoch": 5.307012700165655, "grad_norm": 0.7229380011558533, "learning_rate": 1.4415e-05, "loss": 0.0295, "step": 4808 }, { "epoch": 5.308117062396466, "grad_norm": 0.54018235206604, "learning_rate": 1.4418000000000002e-05, "loss": 0.0343, "step": 4809 }, { "epoch": 5.309221424627278, "grad_norm": 0.5344401597976685, "learning_rate": 1.4421000000000001e-05, "loss": 0.0166, "step": 4810 }, { "epoch": 5.3103257868580895, "grad_norm": 0.6630844473838806, "learning_rate": 1.4424000000000001e-05, "loss": 0.0431, "step": 4811 }, { "epoch": 5.311430149088901, "grad_norm": 0.44569581747055054, "learning_rate": 1.4427000000000001e-05, "loss": 0.0272, "step": 4812 }, { "epoch": 5.312534511319713, "grad_norm": 0.48243746161460876, "learning_rate": 1.4429999999999999e-05, "loss": 0.0404, "step": 4813 }, { "epoch": 5.313638873550524, "grad_norm": 0.33782538771629333, "learning_rate": 1.4433e-05, "loss": 0.0214, "step": 4814 }, { "epoch": 5.314743235781337, "grad_norm": 0.44274041056632996, "learning_rate": 1.4436e-05, "loss": 0.0207, "step": 4815 }, { "epoch": 5.315847598012148, "grad_norm": 0.44375962018966675, "learning_rate": 1.4439e-05, "loss": 0.0441, "step": 4816 }, { "epoch": 5.316951960242959, "grad_norm": 0.5906809568405151, "learning_rate": 1.4442e-05, "loss": 0.0376, "step": 4817 }, { "epoch": 5.318056322473772, "grad_norm": 0.5444981455802917, "learning_rate": 1.4445e-05, "loss": 0.0378, "step": 4818 }, { "epoch": 5.319160684704583, "grad_norm": 0.5858832001686096, "learning_rate": 1.4448e-05, "loss": 0.0426, "step": 4819 }, { "epoch": 5.320265046935395, "grad_norm": 0.6375450491905212, "learning_rate": 1.4451000000000001e-05, "loss": 0.0259, "step": 4820 }, { "epoch": 5.3213694091662065, "grad_norm": 0.6064670085906982, "learning_rate": 1.4454000000000001e-05, "loss": 0.0188, "step": 4821 }, { "epoch": 5.322473771397018, "grad_norm": 1.009832739830017, "learning_rate": 1.4457e-05, "loss": 0.0369, "step": 4822 }, { "epoch": 5.32357813362783, "grad_norm": 0.5238858461380005, "learning_rate": 1.446e-05, "loss": 0.0404, "step": 4823 }, { "epoch": 5.324682495858641, "grad_norm": 0.6684252619743347, "learning_rate": 1.4463e-05, "loss": 0.0424, "step": 4824 }, { "epoch": 5.325786858089454, "grad_norm": 0.9393660426139832, "learning_rate": 1.4466e-05, "loss": 0.0455, "step": 4825 }, { "epoch": 5.326891220320265, "grad_norm": 0.9162062406539917, "learning_rate": 1.4469e-05, "loss": 0.041, "step": 4826 }, { "epoch": 5.327995582551077, "grad_norm": 1.0872753858566284, "learning_rate": 1.4472e-05, "loss": 0.0352, "step": 4827 }, { "epoch": 5.3290999447818885, "grad_norm": 0.5190155506134033, "learning_rate": 1.4475e-05, "loss": 0.024, "step": 4828 }, { "epoch": 5.3302043070127, "grad_norm": 0.7186395525932312, "learning_rate": 1.4478e-05, "loss": 0.0447, "step": 4829 }, { "epoch": 5.331308669243512, "grad_norm": 0.8161322474479675, "learning_rate": 1.4481e-05, "loss": 0.0535, "step": 4830 }, { "epoch": 5.332413031474323, "grad_norm": 1.5482491254806519, "learning_rate": 1.4484e-05, "loss": 0.3365, "step": 4831 }, { "epoch": 5.333517393705136, "grad_norm": 0.9577637910842896, "learning_rate": 1.4487e-05, "loss": 0.2839, "step": 4832 }, { "epoch": 5.334621755935947, "grad_norm": 1.160318374633789, "learning_rate": 1.449e-05, "loss": 0.3288, "step": 4833 }, { "epoch": 5.335726118166758, "grad_norm": 0.968704104423523, "learning_rate": 1.4493e-05, "loss": 0.2331, "step": 4834 }, { "epoch": 5.336830480397571, "grad_norm": 1.0487462282180786, "learning_rate": 1.4496000000000001e-05, "loss": 0.1711, "step": 4835 }, { "epoch": 5.337934842628382, "grad_norm": 0.8473931550979614, "learning_rate": 1.4499000000000001e-05, "loss": 0.1802, "step": 4836 }, { "epoch": 5.339039204859194, "grad_norm": 0.7379992008209229, "learning_rate": 1.4502000000000001e-05, "loss": 0.0955, "step": 4837 }, { "epoch": 5.3401435670900055, "grad_norm": 0.5456183552742004, "learning_rate": 1.4505e-05, "loss": 0.0912, "step": 4838 }, { "epoch": 5.341247929320817, "grad_norm": 0.5843274593353271, "learning_rate": 1.4507999999999999e-05, "loss": 0.0801, "step": 4839 }, { "epoch": 5.342352291551629, "grad_norm": 0.5115396976470947, "learning_rate": 1.4511e-05, "loss": 0.0495, "step": 4840 }, { "epoch": 5.34345665378244, "grad_norm": 0.8831512331962585, "learning_rate": 1.4514e-05, "loss": 0.0882, "step": 4841 }, { "epoch": 5.344561016013253, "grad_norm": 0.7413141131401062, "learning_rate": 1.4517e-05, "loss": 0.038, "step": 4842 }, { "epoch": 5.345665378244064, "grad_norm": 0.6677969694137573, "learning_rate": 1.452e-05, "loss": 0.035, "step": 4843 }, { "epoch": 5.346769740474876, "grad_norm": 0.4588460922241211, "learning_rate": 1.4523e-05, "loss": 0.0428, "step": 4844 }, { "epoch": 5.3478741027056875, "grad_norm": 2.701115369796753, "learning_rate": 1.4526000000000001e-05, "loss": 0.075, "step": 4845 }, { "epoch": 5.348978464936499, "grad_norm": 0.6286947131156921, "learning_rate": 1.4529000000000001e-05, "loss": 0.0411, "step": 4846 }, { "epoch": 5.350082827167311, "grad_norm": 0.45560118556022644, "learning_rate": 1.4532e-05, "loss": 0.023, "step": 4847 }, { "epoch": 5.351187189398122, "grad_norm": 0.8444555401802063, "learning_rate": 1.4535e-05, "loss": 0.0213, "step": 4848 }, { "epoch": 5.352291551628935, "grad_norm": 0.8642932772636414, "learning_rate": 1.4538e-05, "loss": 0.0456, "step": 4849 }, { "epoch": 5.353395913859746, "grad_norm": 0.5157015323638916, "learning_rate": 1.4541e-05, "loss": 0.0392, "step": 4850 }, { "epoch": 5.354500276090557, "grad_norm": 0.6695736050605774, "learning_rate": 1.4544e-05, "loss": 0.0359, "step": 4851 }, { "epoch": 5.35560463832137, "grad_norm": 0.43089380860328674, "learning_rate": 1.4547e-05, "loss": 0.0405, "step": 4852 }, { "epoch": 5.356709000552181, "grad_norm": 0.45489242672920227, "learning_rate": 1.455e-05, "loss": 0.0206, "step": 4853 }, { "epoch": 5.357813362782993, "grad_norm": 0.6047874689102173, "learning_rate": 1.4553e-05, "loss": 0.0287, "step": 4854 }, { "epoch": 5.3589177250138045, "grad_norm": 0.420675128698349, "learning_rate": 1.4556000000000001e-05, "loss": 0.0255, "step": 4855 }, { "epoch": 5.360022087244616, "grad_norm": 0.4939134120941162, "learning_rate": 1.4559e-05, "loss": 0.0358, "step": 4856 }, { "epoch": 5.361126449475428, "grad_norm": 0.6131874322891235, "learning_rate": 1.4562e-05, "loss": 0.035, "step": 4857 }, { "epoch": 5.362230811706239, "grad_norm": 0.5505065321922302, "learning_rate": 1.4565e-05, "loss": 0.0224, "step": 4858 }, { "epoch": 5.363335173937052, "grad_norm": 0.8145177364349365, "learning_rate": 1.4568e-05, "loss": 0.0406, "step": 4859 }, { "epoch": 5.364439536167863, "grad_norm": 0.51108318567276, "learning_rate": 1.4571000000000002e-05, "loss": 0.0216, "step": 4860 }, { "epoch": 5.365543898398675, "grad_norm": 1.0812019109725952, "learning_rate": 1.4574000000000001e-05, "loss": 0.0507, "step": 4861 }, { "epoch": 5.3666482606294865, "grad_norm": 0.4286056458950043, "learning_rate": 1.4577e-05, "loss": 0.0304, "step": 4862 }, { "epoch": 5.367752622860298, "grad_norm": 0.528074324131012, "learning_rate": 1.458e-05, "loss": 0.0233, "step": 4863 }, { "epoch": 5.36885698509111, "grad_norm": 0.5876320004463196, "learning_rate": 1.4582999999999999e-05, "loss": 0.0234, "step": 4864 }, { "epoch": 5.369961347321921, "grad_norm": 1.1767098903656006, "learning_rate": 1.4586e-05, "loss": 0.0266, "step": 4865 }, { "epoch": 5.371065709552734, "grad_norm": 0.8607004284858704, "learning_rate": 1.4589e-05, "loss": 0.0341, "step": 4866 }, { "epoch": 5.372170071783545, "grad_norm": 1.3807393312454224, "learning_rate": 1.4592e-05, "loss": 0.0302, "step": 4867 }, { "epoch": 5.373274434014356, "grad_norm": 0.6269333362579346, "learning_rate": 1.4595e-05, "loss": 0.0227, "step": 4868 }, { "epoch": 5.374378796245169, "grad_norm": 1.2241202592849731, "learning_rate": 1.4598e-05, "loss": 0.0588, "step": 4869 }, { "epoch": 5.37548315847598, "grad_norm": 0.5869101285934448, "learning_rate": 1.4601000000000001e-05, "loss": 0.0247, "step": 4870 }, { "epoch": 5.376587520706792, "grad_norm": 0.757669985294342, "learning_rate": 1.4604000000000001e-05, "loss": 0.0482, "step": 4871 }, { "epoch": 5.3776918829376035, "grad_norm": 0.5819917917251587, "learning_rate": 1.4607000000000001e-05, "loss": 0.0291, "step": 4872 }, { "epoch": 5.378796245168415, "grad_norm": 1.376861333847046, "learning_rate": 1.461e-05, "loss": 0.0453, "step": 4873 }, { "epoch": 5.379900607399227, "grad_norm": 0.8742395639419556, "learning_rate": 1.4613e-05, "loss": 0.0611, "step": 4874 }, { "epoch": 5.381004969630038, "grad_norm": 1.1216647624969482, "learning_rate": 1.4616e-05, "loss": 0.0571, "step": 4875 }, { "epoch": 5.382109331860851, "grad_norm": 0.43918904662132263, "learning_rate": 1.4619e-05, "loss": 0.0249, "step": 4876 }, { "epoch": 5.383213694091662, "grad_norm": 0.6599015593528748, "learning_rate": 1.4622e-05, "loss": 0.0288, "step": 4877 }, { "epoch": 5.384318056322474, "grad_norm": 1.1373820304870605, "learning_rate": 1.4625e-05, "loss": 0.055, "step": 4878 }, { "epoch": 5.3854224185532855, "grad_norm": 0.8982183933258057, "learning_rate": 1.4628e-05, "loss": 0.0398, "step": 4879 }, { "epoch": 5.386526780784097, "grad_norm": 0.986747145652771, "learning_rate": 1.4631000000000001e-05, "loss": 0.0691, "step": 4880 }, { "epoch": 5.387631143014909, "grad_norm": 1.5688832998275757, "learning_rate": 1.4634e-05, "loss": 0.3876, "step": 4881 }, { "epoch": 5.38873550524572, "grad_norm": 0.873629629611969, "learning_rate": 1.4637e-05, "loss": 0.3269, "step": 4882 }, { "epoch": 5.389839867476533, "grad_norm": 1.4895402193069458, "learning_rate": 1.464e-05, "loss": 0.2197, "step": 4883 }, { "epoch": 5.390944229707344, "grad_norm": 1.6143028736114502, "learning_rate": 1.4643e-05, "loss": 0.2421, "step": 4884 }, { "epoch": 5.392048591938155, "grad_norm": 1.0192776918411255, "learning_rate": 1.4646000000000002e-05, "loss": 0.1752, "step": 4885 }, { "epoch": 5.393152954168968, "grad_norm": 0.8684067726135254, "learning_rate": 1.4649000000000002e-05, "loss": 0.1496, "step": 4886 }, { "epoch": 5.394257316399779, "grad_norm": 0.8029965758323669, "learning_rate": 1.4652e-05, "loss": 0.1237, "step": 4887 }, { "epoch": 5.395361678630591, "grad_norm": 0.6912276744842529, "learning_rate": 1.4655e-05, "loss": 0.0624, "step": 4888 }, { "epoch": 5.3964660408614025, "grad_norm": 0.597069263458252, "learning_rate": 1.4658e-05, "loss": 0.0613, "step": 4889 }, { "epoch": 5.397570403092214, "grad_norm": 0.47712987661361694, "learning_rate": 1.4661e-05, "loss": 0.0689, "step": 4890 }, { "epoch": 5.398674765323026, "grad_norm": 0.6841446161270142, "learning_rate": 1.4664e-05, "loss": 0.0361, "step": 4891 }, { "epoch": 5.399779127553837, "grad_norm": 0.5535289645195007, "learning_rate": 1.4667e-05, "loss": 0.0384, "step": 4892 }, { "epoch": 5.40088348978465, "grad_norm": 0.3900723457336426, "learning_rate": 1.467e-05, "loss": 0.0244, "step": 4893 }, { "epoch": 5.401987852015461, "grad_norm": 1.5757006406784058, "learning_rate": 1.4673e-05, "loss": 0.0452, "step": 4894 }, { "epoch": 5.403092214246273, "grad_norm": 0.7393357753753662, "learning_rate": 1.4676000000000001e-05, "loss": 0.0291, "step": 4895 }, { "epoch": 5.4041965764770845, "grad_norm": 0.7772746086120605, "learning_rate": 1.4679000000000001e-05, "loss": 0.0527, "step": 4896 }, { "epoch": 5.405300938707896, "grad_norm": 0.6436273455619812, "learning_rate": 1.4682000000000001e-05, "loss": 0.0349, "step": 4897 }, { "epoch": 5.406405300938708, "grad_norm": 0.4782566428184509, "learning_rate": 1.4685000000000001e-05, "loss": 0.0351, "step": 4898 }, { "epoch": 5.407509663169519, "grad_norm": 0.48957183957099915, "learning_rate": 1.4687999999999999e-05, "loss": 0.0478, "step": 4899 }, { "epoch": 5.408614025400332, "grad_norm": 0.5767601728439331, "learning_rate": 1.4691e-05, "loss": 0.029, "step": 4900 }, { "epoch": 5.409718387631143, "grad_norm": 0.4695214331150055, "learning_rate": 1.4694e-05, "loss": 0.0293, "step": 4901 }, { "epoch": 5.410822749861954, "grad_norm": 0.9276853799819946, "learning_rate": 1.4697e-05, "loss": 0.0408, "step": 4902 }, { "epoch": 5.411927112092767, "grad_norm": 0.4849580228328705, "learning_rate": 1.47e-05, "loss": 0.0375, "step": 4903 }, { "epoch": 5.413031474323578, "grad_norm": 0.4092622995376587, "learning_rate": 1.4703e-05, "loss": 0.0397, "step": 4904 }, { "epoch": 5.41413583655439, "grad_norm": 0.5014208555221558, "learning_rate": 1.4706000000000001e-05, "loss": 0.0273, "step": 4905 }, { "epoch": 5.4152401987852015, "grad_norm": 0.4266551434993744, "learning_rate": 1.4709000000000001e-05, "loss": 0.0286, "step": 4906 }, { "epoch": 5.416344561016013, "grad_norm": 0.5136438608169556, "learning_rate": 1.4712e-05, "loss": 0.0363, "step": 4907 }, { "epoch": 5.417448923246825, "grad_norm": 0.6827118992805481, "learning_rate": 1.4715e-05, "loss": 0.0347, "step": 4908 }, { "epoch": 5.418553285477636, "grad_norm": 0.6898578405380249, "learning_rate": 1.4718e-05, "loss": 0.0406, "step": 4909 }, { "epoch": 5.419657647708449, "grad_norm": 0.6658198237419128, "learning_rate": 1.4721000000000002e-05, "loss": 0.0488, "step": 4910 }, { "epoch": 5.42076200993926, "grad_norm": 0.4947413206100464, "learning_rate": 1.4724e-05, "loss": 0.0207, "step": 4911 }, { "epoch": 5.421866372170072, "grad_norm": 0.6118384003639221, "learning_rate": 1.4727e-05, "loss": 0.0401, "step": 4912 }, { "epoch": 5.4229707344008835, "grad_norm": 0.5540432929992676, "learning_rate": 1.473e-05, "loss": 0.0424, "step": 4913 }, { "epoch": 5.424075096631695, "grad_norm": 0.35042351484298706, "learning_rate": 1.4733e-05, "loss": 0.024, "step": 4914 }, { "epoch": 5.425179458862507, "grad_norm": 0.6301450133323669, "learning_rate": 1.4736000000000001e-05, "loss": 0.0371, "step": 4915 }, { "epoch": 5.426283821093318, "grad_norm": 0.6177799701690674, "learning_rate": 1.4739e-05, "loss": 0.0274, "step": 4916 }, { "epoch": 5.427388183324131, "grad_norm": 0.4316175878047943, "learning_rate": 1.4742e-05, "loss": 0.026, "step": 4917 }, { "epoch": 5.428492545554942, "grad_norm": 0.6066516041755676, "learning_rate": 1.4745e-05, "loss": 0.0427, "step": 4918 }, { "epoch": 5.429596907785753, "grad_norm": 0.6622862815856934, "learning_rate": 1.4748e-05, "loss": 0.0442, "step": 4919 }, { "epoch": 5.4307012700165656, "grad_norm": 0.612697958946228, "learning_rate": 1.4751000000000002e-05, "loss": 0.0355, "step": 4920 }, { "epoch": 5.431805632247377, "grad_norm": 0.7198914885520935, "learning_rate": 1.4754000000000001e-05, "loss": 0.0312, "step": 4921 }, { "epoch": 5.432909994478189, "grad_norm": 0.8062668442726135, "learning_rate": 1.4757000000000001e-05, "loss": 0.0419, "step": 4922 }, { "epoch": 5.4340143567090005, "grad_norm": 1.0826743841171265, "learning_rate": 1.4760000000000001e-05, "loss": 0.0411, "step": 4923 }, { "epoch": 5.435118718939812, "grad_norm": 1.9944573640823364, "learning_rate": 1.4762999999999999e-05, "loss": 0.0378, "step": 4924 }, { "epoch": 5.436223081170624, "grad_norm": 0.4967125654220581, "learning_rate": 1.4766e-05, "loss": 0.0295, "step": 4925 }, { "epoch": 5.437327443401435, "grad_norm": 1.1991344690322876, "learning_rate": 1.4769e-05, "loss": 0.07, "step": 4926 }, { "epoch": 5.438431805632248, "grad_norm": 0.5584532022476196, "learning_rate": 1.4772e-05, "loss": 0.0225, "step": 4927 }, { "epoch": 5.439536167863059, "grad_norm": 0.5295829772949219, "learning_rate": 1.4775e-05, "loss": 0.0314, "step": 4928 }, { "epoch": 5.440640530093871, "grad_norm": 0.5505478382110596, "learning_rate": 1.4778e-05, "loss": 0.0398, "step": 4929 }, { "epoch": 5.4417448923246825, "grad_norm": 0.42136088013648987, "learning_rate": 1.4781000000000001e-05, "loss": 0.0257, "step": 4930 }, { "epoch": 5.442849254555494, "grad_norm": 1.2521569728851318, "learning_rate": 1.4784000000000001e-05, "loss": 0.4157, "step": 4931 }, { "epoch": 5.443953616786306, "grad_norm": 1.25179123878479, "learning_rate": 1.4787000000000001e-05, "loss": 0.2928, "step": 4932 }, { "epoch": 5.445057979017117, "grad_norm": 1.0527944564819336, "learning_rate": 1.479e-05, "loss": 0.2704, "step": 4933 }, { "epoch": 5.44616234124793, "grad_norm": 0.7092692852020264, "learning_rate": 1.4793e-05, "loss": 0.1912, "step": 4934 }, { "epoch": 5.447266703478741, "grad_norm": 0.9515571594238281, "learning_rate": 1.4796000000000002e-05, "loss": 0.1851, "step": 4935 }, { "epoch": 5.448371065709552, "grad_norm": 1.0277414321899414, "learning_rate": 1.4799e-05, "loss": 0.187, "step": 4936 }, { "epoch": 5.4494754279403645, "grad_norm": 1.080491542816162, "learning_rate": 1.4802e-05, "loss": 0.162, "step": 4937 }, { "epoch": 5.450579790171176, "grad_norm": 0.8190197348594666, "learning_rate": 1.4805e-05, "loss": 0.1594, "step": 4938 }, { "epoch": 5.451684152401988, "grad_norm": 0.8304286003112793, "learning_rate": 1.4808e-05, "loss": 0.0856, "step": 4939 }, { "epoch": 5.4527885146327995, "grad_norm": 0.6055247187614441, "learning_rate": 1.4811000000000001e-05, "loss": 0.0586, "step": 4940 }, { "epoch": 5.453892876863611, "grad_norm": 0.534356415271759, "learning_rate": 1.4814e-05, "loss": 0.0259, "step": 4941 }, { "epoch": 5.454997239094423, "grad_norm": 0.725064218044281, "learning_rate": 1.4817e-05, "loss": 0.0443, "step": 4942 }, { "epoch": 5.456101601325234, "grad_norm": 0.40065521001815796, "learning_rate": 1.482e-05, "loss": 0.0326, "step": 4943 }, { "epoch": 5.457205963556047, "grad_norm": 0.7980473041534424, "learning_rate": 1.4823e-05, "loss": 0.0424, "step": 4944 }, { "epoch": 5.458310325786858, "grad_norm": 0.4579915702342987, "learning_rate": 1.4826e-05, "loss": 0.0359, "step": 4945 }, { "epoch": 5.45941468801767, "grad_norm": 0.6368777751922607, "learning_rate": 1.4829000000000002e-05, "loss": 0.0413, "step": 4946 }, { "epoch": 5.4605190502484815, "grad_norm": 0.6500827670097351, "learning_rate": 1.4832000000000001e-05, "loss": 0.0531, "step": 4947 }, { "epoch": 5.461623412479293, "grad_norm": 0.5154445767402649, "learning_rate": 1.4835e-05, "loss": 0.0284, "step": 4948 }, { "epoch": 5.462727774710105, "grad_norm": 0.3964086174964905, "learning_rate": 1.4838e-05, "loss": 0.0234, "step": 4949 }, { "epoch": 5.463832136940916, "grad_norm": 0.8135250806808472, "learning_rate": 1.4840999999999999e-05, "loss": 0.0349, "step": 4950 }, { "epoch": 5.464936499171729, "grad_norm": 0.5809573531150818, "learning_rate": 1.4844e-05, "loss": 0.0335, "step": 4951 }, { "epoch": 5.46604086140254, "grad_norm": 0.6470138430595398, "learning_rate": 1.4847e-05, "loss": 0.0398, "step": 4952 }, { "epoch": 5.467145223633352, "grad_norm": 0.6762707829475403, "learning_rate": 1.485e-05, "loss": 0.0408, "step": 4953 }, { "epoch": 5.4682495858641635, "grad_norm": 0.529655933380127, "learning_rate": 1.4853e-05, "loss": 0.0307, "step": 4954 }, { "epoch": 5.469353948094975, "grad_norm": 0.4650346040725708, "learning_rate": 1.4856e-05, "loss": 0.0218, "step": 4955 }, { "epoch": 5.470458310325787, "grad_norm": 1.2180086374282837, "learning_rate": 1.4859000000000001e-05, "loss": 0.0635, "step": 4956 }, { "epoch": 5.4715626725565985, "grad_norm": 0.5318376421928406, "learning_rate": 1.4862000000000001e-05, "loss": 0.0272, "step": 4957 }, { "epoch": 5.472667034787411, "grad_norm": 3.9895148277282715, "learning_rate": 1.4865e-05, "loss": 0.0962, "step": 4958 }, { "epoch": 5.473771397018222, "grad_norm": 1.0454875230789185, "learning_rate": 1.4868e-05, "loss": 0.0307, "step": 4959 }, { "epoch": 5.474875759249033, "grad_norm": 0.5328517556190491, "learning_rate": 1.4871e-05, "loss": 0.0387, "step": 4960 }, { "epoch": 5.475980121479846, "grad_norm": 0.5317944288253784, "learning_rate": 1.4874e-05, "loss": 0.0457, "step": 4961 }, { "epoch": 5.477084483710657, "grad_norm": 0.5622000694274902, "learning_rate": 1.4877e-05, "loss": 0.0407, "step": 4962 }, { "epoch": 5.478188845941469, "grad_norm": 0.75391685962677, "learning_rate": 1.488e-05, "loss": 0.0415, "step": 4963 }, { "epoch": 5.4792932081722805, "grad_norm": 0.708770215511322, "learning_rate": 1.4883e-05, "loss": 0.0298, "step": 4964 }, { "epoch": 5.480397570403092, "grad_norm": 0.45225682854652405, "learning_rate": 1.4886e-05, "loss": 0.0286, "step": 4965 }, { "epoch": 5.481501932633904, "grad_norm": 0.9382547736167908, "learning_rate": 1.4889000000000001e-05, "loss": 0.044, "step": 4966 }, { "epoch": 5.482606294864715, "grad_norm": 0.6285936236381531, "learning_rate": 1.4892e-05, "loss": 0.0447, "step": 4967 }, { "epoch": 5.483710657095528, "grad_norm": 0.8394253253936768, "learning_rate": 1.4895e-05, "loss": 0.0535, "step": 4968 }, { "epoch": 5.484815019326339, "grad_norm": 0.5386883020401001, "learning_rate": 1.4898e-05, "loss": 0.0344, "step": 4969 }, { "epoch": 5.485919381557151, "grad_norm": 0.4804796278476715, "learning_rate": 1.4901e-05, "loss": 0.0215, "step": 4970 }, { "epoch": 5.4870237437879625, "grad_norm": 0.6360336542129517, "learning_rate": 1.4904000000000002e-05, "loss": 0.0273, "step": 4971 }, { "epoch": 5.488128106018774, "grad_norm": 0.4380389451980591, "learning_rate": 1.4907000000000001e-05, "loss": 0.0307, "step": 4972 }, { "epoch": 5.489232468249586, "grad_norm": 0.505203127861023, "learning_rate": 1.491e-05, "loss": 0.0282, "step": 4973 }, { "epoch": 5.4903368304803974, "grad_norm": 0.556068480014801, "learning_rate": 1.4913e-05, "loss": 0.031, "step": 4974 }, { "epoch": 5.49144119271121, "grad_norm": 0.5929484963417053, "learning_rate": 1.4915999999999999e-05, "loss": 0.032, "step": 4975 }, { "epoch": 5.492545554942021, "grad_norm": 0.5985431671142578, "learning_rate": 1.4919e-05, "loss": 0.0304, "step": 4976 }, { "epoch": 5.493649917172832, "grad_norm": 0.4506138563156128, "learning_rate": 1.4922e-05, "loss": 0.0335, "step": 4977 }, { "epoch": 5.494754279403645, "grad_norm": 0.7458927035331726, "learning_rate": 1.4925e-05, "loss": 0.0238, "step": 4978 }, { "epoch": 5.495858641634456, "grad_norm": 0.7614363431930542, "learning_rate": 1.4928e-05, "loss": 0.0333, "step": 4979 }, { "epoch": 5.496963003865268, "grad_norm": 1.215362787246704, "learning_rate": 1.4931e-05, "loss": 0.0435, "step": 4980 }, { "epoch": 5.4980673660960795, "grad_norm": 1.143339991569519, "learning_rate": 1.4934000000000001e-05, "loss": 0.3416, "step": 4981 }, { "epoch": 5.499171728326891, "grad_norm": 0.6801577210426331, "learning_rate": 1.4937000000000001e-05, "loss": 0.2054, "step": 4982 }, { "epoch": 5.500276090557703, "grad_norm": 0.9549333453178406, "learning_rate": 1.4940000000000001e-05, "loss": 0.2172, "step": 4983 }, { "epoch": 5.501380452788514, "grad_norm": 0.7477644681930542, "learning_rate": 1.4943e-05, "loss": 0.1867, "step": 4984 }, { "epoch": 5.502484815019327, "grad_norm": 0.9543768167495728, "learning_rate": 1.4945999999999999e-05, "loss": 0.1974, "step": 4985 }, { "epoch": 5.503589177250138, "grad_norm": 1.0233354568481445, "learning_rate": 1.4949e-05, "loss": 0.1721, "step": 4986 }, { "epoch": 5.50469353948095, "grad_norm": 0.5923447608947754, "learning_rate": 1.4952e-05, "loss": 0.0868, "step": 4987 }, { "epoch": 5.5057979017117615, "grad_norm": 0.5590853691101074, "learning_rate": 1.4955e-05, "loss": 0.1284, "step": 4988 }, { "epoch": 5.506902263942573, "grad_norm": 0.9632459282875061, "learning_rate": 1.4958e-05, "loss": 0.1702, "step": 4989 }, { "epoch": 5.508006626173385, "grad_norm": 0.5498195886611938, "learning_rate": 1.4961e-05, "loss": 0.0626, "step": 4990 }, { "epoch": 5.509110988404196, "grad_norm": 0.5838099122047424, "learning_rate": 1.4964000000000001e-05, "loss": 0.0704, "step": 4991 }, { "epoch": 5.510215350635009, "grad_norm": 0.8909317851066589, "learning_rate": 1.4967000000000001e-05, "loss": 0.0911, "step": 4992 }, { "epoch": 5.51131971286582, "grad_norm": 0.371926486492157, "learning_rate": 1.497e-05, "loss": 0.0362, "step": 4993 }, { "epoch": 5.512424075096631, "grad_norm": 0.509019672870636, "learning_rate": 1.4973e-05, "loss": 0.0573, "step": 4994 }, { "epoch": 5.513528437327444, "grad_norm": 0.3999793827533722, "learning_rate": 1.4976e-05, "loss": 0.0385, "step": 4995 }, { "epoch": 5.514632799558255, "grad_norm": 0.3295983076095581, "learning_rate": 1.4979000000000002e-05, "loss": 0.0292, "step": 4996 }, { "epoch": 5.515737161789067, "grad_norm": 0.4101661443710327, "learning_rate": 1.4982e-05, "loss": 0.0379, "step": 4997 }, { "epoch": 5.5168415240198785, "grad_norm": 0.5446997284889221, "learning_rate": 1.4985e-05, "loss": 0.0483, "step": 4998 }, { "epoch": 5.51794588625069, "grad_norm": 0.36597850918769836, "learning_rate": 1.4988e-05, "loss": 0.0217, "step": 4999 }, { "epoch": 5.519050248481502, "grad_norm": 0.4160904884338379, "learning_rate": 1.4991e-05, "loss": 0.0304, "step": 5000 }, { "epoch": 5.519050248481502, "eval_cer": 0.12932576405630297, "eval_loss": 0.3730880320072174, "eval_runtime": 16.42, "eval_samples_per_second": 18.514, "eval_steps_per_second": 0.609, "eval_wer": 0.44762087490406754, "step": 5000 }, { "epoch": 5.520154610712313, "grad_norm": 0.5485743284225464, "learning_rate": 1.4994e-05, "loss": 0.0319, "step": 5001 }, { "epoch": 5.521258972943126, "grad_norm": 0.6624093651771545, "learning_rate": 1.4997e-05, "loss": 0.0495, "step": 5002 }, { "epoch": 5.522363335173937, "grad_norm": 1.104049563407898, "learning_rate": 1.5e-05, "loss": 0.0331, "step": 5003 }, { "epoch": 5.523467697404749, "grad_norm": 0.46410760283470154, "learning_rate": 1.5003e-05, "loss": 0.0286, "step": 5004 }, { "epoch": 5.5245720596355605, "grad_norm": 0.5366384387016296, "learning_rate": 1.5006e-05, "loss": 0.0236, "step": 5005 }, { "epoch": 5.525676421866372, "grad_norm": 0.4133049547672272, "learning_rate": 1.5009e-05, "loss": 0.0212, "step": 5006 }, { "epoch": 5.526780784097184, "grad_norm": 0.7903376221656799, "learning_rate": 1.5012e-05, "loss": 0.0339, "step": 5007 }, { "epoch": 5.527885146327995, "grad_norm": 0.3075636923313141, "learning_rate": 1.5015e-05, "loss": 0.0157, "step": 5008 }, { "epoch": 5.528989508558808, "grad_norm": 0.43828096985816956, "learning_rate": 1.5018000000000001e-05, "loss": 0.0238, "step": 5009 }, { "epoch": 5.530093870789619, "grad_norm": 0.44705721735954285, "learning_rate": 1.5021e-05, "loss": 0.0249, "step": 5010 }, { "epoch": 5.53119823302043, "grad_norm": 0.5229362845420837, "learning_rate": 1.5024e-05, "loss": 0.0351, "step": 5011 }, { "epoch": 5.532302595251243, "grad_norm": 0.6815115213394165, "learning_rate": 1.5027e-05, "loss": 0.0382, "step": 5012 }, { "epoch": 5.533406957482054, "grad_norm": 0.6281918883323669, "learning_rate": 1.503e-05, "loss": 0.0277, "step": 5013 }, { "epoch": 5.534511319712866, "grad_norm": 0.4962705373764038, "learning_rate": 1.5033e-05, "loss": 0.0291, "step": 5014 }, { "epoch": 5.5356156819436775, "grad_norm": 0.8369146585464478, "learning_rate": 1.5036e-05, "loss": 0.04, "step": 5015 }, { "epoch": 5.536720044174489, "grad_norm": 0.5699604153633118, "learning_rate": 1.5039e-05, "loss": 0.0347, "step": 5016 }, { "epoch": 5.537824406405301, "grad_norm": 0.6354824304580688, "learning_rate": 1.5042e-05, "loss": 0.0286, "step": 5017 }, { "epoch": 5.538928768636112, "grad_norm": 0.7972630858421326, "learning_rate": 1.5044999999999999e-05, "loss": 0.0543, "step": 5018 }, { "epoch": 5.540033130866925, "grad_norm": 1.5970206260681152, "learning_rate": 1.5048000000000002e-05, "loss": 0.0333, "step": 5019 }, { "epoch": 5.541137493097736, "grad_norm": 0.5037512183189392, "learning_rate": 1.5051000000000002e-05, "loss": 0.0254, "step": 5020 }, { "epoch": 5.542241855328548, "grad_norm": 0.5691657066345215, "learning_rate": 1.5054000000000002e-05, "loss": 0.0361, "step": 5021 }, { "epoch": 5.5433462175593595, "grad_norm": 0.46031588315963745, "learning_rate": 1.5057e-05, "loss": 0.0228, "step": 5022 }, { "epoch": 5.544450579790171, "grad_norm": 0.5393099784851074, "learning_rate": 1.506e-05, "loss": 0.0371, "step": 5023 }, { "epoch": 5.545554942020983, "grad_norm": 0.670224130153656, "learning_rate": 1.5063e-05, "loss": 0.0424, "step": 5024 }, { "epoch": 5.546659304251794, "grad_norm": 0.8956446051597595, "learning_rate": 1.5066e-05, "loss": 0.0528, "step": 5025 }, { "epoch": 5.547763666482607, "grad_norm": 0.38950324058532715, "learning_rate": 1.5069e-05, "loss": 0.0259, "step": 5026 }, { "epoch": 5.548868028713418, "grad_norm": 0.6453602910041809, "learning_rate": 1.5071999999999999e-05, "loss": 0.0307, "step": 5027 }, { "epoch": 5.549972390944229, "grad_norm": 0.9806012511253357, "learning_rate": 1.5074999999999999e-05, "loss": 0.0337, "step": 5028 }, { "epoch": 5.551076753175042, "grad_norm": 0.6143108010292053, "learning_rate": 1.5078000000000002e-05, "loss": 0.0309, "step": 5029 }, { "epoch": 5.552181115405853, "grad_norm": 0.9684534668922424, "learning_rate": 1.5081000000000002e-05, "loss": 0.0683, "step": 5030 }, { "epoch": 5.553285477636665, "grad_norm": 1.133742094039917, "learning_rate": 1.5084000000000002e-05, "loss": 0.3215, "step": 5031 }, { "epoch": 5.5543898398674765, "grad_norm": 1.0412606000900269, "learning_rate": 1.5087000000000001e-05, "loss": 0.2868, "step": 5032 }, { "epoch": 5.555494202098288, "grad_norm": 0.8214174509048462, "learning_rate": 1.5090000000000001e-05, "loss": 0.2289, "step": 5033 }, { "epoch": 5.5565985643291, "grad_norm": 0.6997395157814026, "learning_rate": 1.5093e-05, "loss": 0.1559, "step": 5034 }, { "epoch": 5.557702926559911, "grad_norm": 0.8654664158821106, "learning_rate": 1.5095999999999999e-05, "loss": 0.1603, "step": 5035 }, { "epoch": 5.558807288790724, "grad_norm": 0.7350221872329712, "learning_rate": 1.5098999999999999e-05, "loss": 0.1406, "step": 5036 }, { "epoch": 5.559911651021535, "grad_norm": 0.6975065469741821, "learning_rate": 1.5101999999999999e-05, "loss": 0.1388, "step": 5037 }, { "epoch": 5.561016013252347, "grad_norm": 1.061813473701477, "learning_rate": 1.5104999999999999e-05, "loss": 0.2109, "step": 5038 }, { "epoch": 5.5621203754831585, "grad_norm": 0.5174030065536499, "learning_rate": 1.5108000000000002e-05, "loss": 0.0836, "step": 5039 }, { "epoch": 5.56322473771397, "grad_norm": 3.1309151649475098, "learning_rate": 1.5111000000000002e-05, "loss": 0.069, "step": 5040 }, { "epoch": 5.564329099944782, "grad_norm": 0.6557546257972717, "learning_rate": 1.5114000000000001e-05, "loss": 0.0367, "step": 5041 }, { "epoch": 5.565433462175593, "grad_norm": 0.6632775664329529, "learning_rate": 1.5117000000000001e-05, "loss": 0.043, "step": 5042 }, { "epoch": 5.566537824406406, "grad_norm": 0.463255375623703, "learning_rate": 1.5120000000000001e-05, "loss": 0.046, "step": 5043 }, { "epoch": 5.567642186637217, "grad_norm": 0.746256411075592, "learning_rate": 1.5123e-05, "loss": 0.0427, "step": 5044 }, { "epoch": 5.568746548868028, "grad_norm": 0.4324902594089508, "learning_rate": 1.5126e-05, "loss": 0.0394, "step": 5045 }, { "epoch": 5.569850911098841, "grad_norm": 0.3146231174468994, "learning_rate": 1.5129e-05, "loss": 0.0204, "step": 5046 }, { "epoch": 5.570955273329652, "grad_norm": 0.5838679075241089, "learning_rate": 1.5131999999999998e-05, "loss": 0.0376, "step": 5047 }, { "epoch": 5.572059635560464, "grad_norm": 0.364554762840271, "learning_rate": 1.5134999999999998e-05, "loss": 0.0238, "step": 5048 }, { "epoch": 5.5731639977912755, "grad_norm": 0.6369569301605225, "learning_rate": 1.5138000000000001e-05, "loss": 0.0363, "step": 5049 }, { "epoch": 5.574268360022087, "grad_norm": 0.7962713241577148, "learning_rate": 1.5141000000000001e-05, "loss": 0.0462, "step": 5050 }, { "epoch": 5.575372722252899, "grad_norm": 0.3653586804866791, "learning_rate": 1.5144000000000001e-05, "loss": 0.0322, "step": 5051 }, { "epoch": 5.57647708448371, "grad_norm": 0.36180487275123596, "learning_rate": 1.5147e-05, "loss": 0.0246, "step": 5052 }, { "epoch": 5.577581446714523, "grad_norm": 3.2723679542541504, "learning_rate": 1.515e-05, "loss": 0.0309, "step": 5053 }, { "epoch": 5.578685808945334, "grad_norm": 0.3757149279117584, "learning_rate": 1.5153e-05, "loss": 0.0206, "step": 5054 }, { "epoch": 5.579790171176146, "grad_norm": 1.214384913444519, "learning_rate": 1.5156e-05, "loss": 0.0547, "step": 5055 }, { "epoch": 5.5808945334069575, "grad_norm": 0.6883654594421387, "learning_rate": 1.5159e-05, "loss": 0.0459, "step": 5056 }, { "epoch": 5.581998895637769, "grad_norm": 1.184962272644043, "learning_rate": 1.5162e-05, "loss": 0.0416, "step": 5057 }, { "epoch": 5.583103257868581, "grad_norm": 0.6945188045501709, "learning_rate": 1.5165e-05, "loss": 0.0309, "step": 5058 }, { "epoch": 5.584207620099392, "grad_norm": 0.9244078397750854, "learning_rate": 1.5168000000000001e-05, "loss": 0.0455, "step": 5059 }, { "epoch": 5.585311982330205, "grad_norm": 0.37980470061302185, "learning_rate": 1.5171000000000001e-05, "loss": 0.0215, "step": 5060 }, { "epoch": 5.586416344561016, "grad_norm": 0.6490752696990967, "learning_rate": 1.5174e-05, "loss": 0.0296, "step": 5061 }, { "epoch": 5.587520706791828, "grad_norm": 0.5538461804389954, "learning_rate": 1.5177e-05, "loss": 0.0288, "step": 5062 }, { "epoch": 5.58862506902264, "grad_norm": 0.3955281674861908, "learning_rate": 1.518e-05, "loss": 0.0289, "step": 5063 }, { "epoch": 5.589729431253451, "grad_norm": 0.5643457770347595, "learning_rate": 1.5183e-05, "loss": 0.0279, "step": 5064 }, { "epoch": 5.590833793484263, "grad_norm": 0.3889906406402588, "learning_rate": 1.5186e-05, "loss": 0.0205, "step": 5065 }, { "epoch": 5.5919381557150745, "grad_norm": 0.5633745789527893, "learning_rate": 1.5189e-05, "loss": 0.0258, "step": 5066 }, { "epoch": 5.593042517945886, "grad_norm": 0.3362528085708618, "learning_rate": 1.5192e-05, "loss": 0.0247, "step": 5067 }, { "epoch": 5.594146880176698, "grad_norm": 0.30605506896972656, "learning_rate": 1.5195e-05, "loss": 0.0173, "step": 5068 }, { "epoch": 5.595251242407509, "grad_norm": 0.5720203518867493, "learning_rate": 1.5198000000000003e-05, "loss": 0.036, "step": 5069 }, { "epoch": 5.596355604638322, "grad_norm": 0.5107905864715576, "learning_rate": 1.5201000000000002e-05, "loss": 0.0438, "step": 5070 }, { "epoch": 5.597459966869133, "grad_norm": 0.4274516701698303, "learning_rate": 1.5204e-05, "loss": 0.0261, "step": 5071 }, { "epoch": 5.598564329099945, "grad_norm": 0.6656330823898315, "learning_rate": 1.5207e-05, "loss": 0.0307, "step": 5072 }, { "epoch": 5.5996686913307565, "grad_norm": 1.0119376182556152, "learning_rate": 1.521e-05, "loss": 0.036, "step": 5073 }, { "epoch": 5.600773053561568, "grad_norm": 0.5492851138114929, "learning_rate": 1.5213e-05, "loss": 0.0287, "step": 5074 }, { "epoch": 5.60187741579238, "grad_norm": 1.7699081897735596, "learning_rate": 1.5216e-05, "loss": 0.0549, "step": 5075 }, { "epoch": 5.602981778023191, "grad_norm": 1.154650330543518, "learning_rate": 1.5219e-05, "loss": 0.0412, "step": 5076 }, { "epoch": 5.604086140254004, "grad_norm": 0.5953810811042786, "learning_rate": 1.5222e-05, "loss": 0.0293, "step": 5077 }, { "epoch": 5.605190502484815, "grad_norm": 0.6176736354827881, "learning_rate": 1.5224999999999999e-05, "loss": 0.0402, "step": 5078 }, { "epoch": 5.606294864715627, "grad_norm": 1.1453912258148193, "learning_rate": 1.5228000000000002e-05, "loss": 0.0594, "step": 5079 }, { "epoch": 5.6073992269464386, "grad_norm": 0.7845963835716248, "learning_rate": 1.5231000000000002e-05, "loss": 0.0435, "step": 5080 }, { "epoch": 5.60850358917725, "grad_norm": 1.4899189472198486, "learning_rate": 1.5234000000000002e-05, "loss": 0.3336, "step": 5081 }, { "epoch": 5.609607951408062, "grad_norm": 1.0306134223937988, "learning_rate": 1.5237000000000002e-05, "loss": 0.2765, "step": 5082 }, { "epoch": 5.6107123136388735, "grad_norm": 0.8558189272880554, "learning_rate": 1.524e-05, "loss": 0.2482, "step": 5083 }, { "epoch": 5.611816675869685, "grad_norm": 0.6379735469818115, "learning_rate": 1.5243e-05, "loss": 0.1959, "step": 5084 }, { "epoch": 5.612921038100497, "grad_norm": 0.7670885324478149, "learning_rate": 1.5246e-05, "loss": 0.179, "step": 5085 }, { "epoch": 5.614025400331308, "grad_norm": 0.9331213235855103, "learning_rate": 1.5249e-05, "loss": 0.1307, "step": 5086 }, { "epoch": 5.615129762562121, "grad_norm": 0.6391100883483887, "learning_rate": 1.5251999999999999e-05, "loss": 0.0923, "step": 5087 }, { "epoch": 5.616234124792932, "grad_norm": 0.5737833976745605, "learning_rate": 1.5254999999999999e-05, "loss": 0.0969, "step": 5088 }, { "epoch": 5.617338487023744, "grad_norm": 0.42823565006256104, "learning_rate": 1.5258000000000002e-05, "loss": 0.0421, "step": 5089 }, { "epoch": 5.6184428492545555, "grad_norm": 0.46927329897880554, "learning_rate": 1.5261000000000002e-05, "loss": 0.0459, "step": 5090 }, { "epoch": 5.619547211485367, "grad_norm": 0.4882948398590088, "learning_rate": 1.5264e-05, "loss": 0.0409, "step": 5091 }, { "epoch": 5.620651573716179, "grad_norm": 0.5282712578773499, "learning_rate": 1.5267e-05, "loss": 0.0423, "step": 5092 }, { "epoch": 5.62175593594699, "grad_norm": 0.7151260375976562, "learning_rate": 1.527e-05, "loss": 0.045, "step": 5093 }, { "epoch": 5.622860298177803, "grad_norm": 0.584540605545044, "learning_rate": 1.5273e-05, "loss": 0.0313, "step": 5094 }, { "epoch": 5.623964660408614, "grad_norm": 0.4192463159561157, "learning_rate": 1.5276e-05, "loss": 0.0417, "step": 5095 }, { "epoch": 5.625069022639426, "grad_norm": 1.0866589546203613, "learning_rate": 1.5279e-05, "loss": 0.0273, "step": 5096 }, { "epoch": 5.6261733848702375, "grad_norm": 0.6645435094833374, "learning_rate": 1.5282e-05, "loss": 0.0529, "step": 5097 }, { "epoch": 5.627277747101049, "grad_norm": 2.3707547187805176, "learning_rate": 1.5285e-05, "loss": 0.0212, "step": 5098 }, { "epoch": 5.628382109331861, "grad_norm": 7.176962375640869, "learning_rate": 1.5288000000000003e-05, "loss": 0.0366, "step": 5099 }, { "epoch": 5.6294864715626725, "grad_norm": 0.40768933296203613, "learning_rate": 1.5291000000000003e-05, "loss": 0.0354, "step": 5100 }, { "epoch": 5.630590833793484, "grad_norm": 0.6756273508071899, "learning_rate": 1.5294000000000003e-05, "loss": 0.0281, "step": 5101 }, { "epoch": 5.631695196024296, "grad_norm": 0.5946435332298279, "learning_rate": 1.5297e-05, "loss": 0.0616, "step": 5102 }, { "epoch": 5.632799558255107, "grad_norm": 0.6459159851074219, "learning_rate": 1.53e-05, "loss": 0.0363, "step": 5103 }, { "epoch": 5.63390392048592, "grad_norm": 2.933478593826294, "learning_rate": 1.5303e-05, "loss": 0.0579, "step": 5104 }, { "epoch": 5.635008282716731, "grad_norm": 1.6075847148895264, "learning_rate": 1.5306e-05, "loss": 0.0425, "step": 5105 }, { "epoch": 5.636112644947543, "grad_norm": 0.44883596897125244, "learning_rate": 1.5309e-05, "loss": 0.0228, "step": 5106 }, { "epoch": 5.6372170071783545, "grad_norm": 12.561365127563477, "learning_rate": 1.5312e-05, "loss": 0.106, "step": 5107 }, { "epoch": 5.638321369409166, "grad_norm": 0.5480007529258728, "learning_rate": 1.5314999999999998e-05, "loss": 0.0311, "step": 5108 }, { "epoch": 5.639425731639978, "grad_norm": 0.3452880382537842, "learning_rate": 1.5318e-05, "loss": 0.0225, "step": 5109 }, { "epoch": 5.640530093870789, "grad_norm": 0.3444657623767853, "learning_rate": 1.5321e-05, "loss": 0.0193, "step": 5110 }, { "epoch": 5.641634456101602, "grad_norm": 0.6470435857772827, "learning_rate": 1.5324e-05, "loss": 0.0336, "step": 5111 }, { "epoch": 5.642738818332413, "grad_norm": 0.7813927531242371, "learning_rate": 1.5327e-05, "loss": 0.0553, "step": 5112 }, { "epoch": 5.643843180563225, "grad_norm": 0.4509355127811432, "learning_rate": 1.533e-05, "loss": 0.0301, "step": 5113 }, { "epoch": 5.6449475427940365, "grad_norm": 0.5771006941795349, "learning_rate": 1.5333e-05, "loss": 0.0372, "step": 5114 }, { "epoch": 5.646051905024848, "grad_norm": 0.5891707539558411, "learning_rate": 1.5336e-05, "loss": 0.0235, "step": 5115 }, { "epoch": 5.64715626725566, "grad_norm": 0.585747241973877, "learning_rate": 1.5339e-05, "loss": 0.0416, "step": 5116 }, { "epoch": 5.6482606294864715, "grad_norm": 0.7235275506973267, "learning_rate": 1.5342e-05, "loss": 0.0291, "step": 5117 }, { "epoch": 5.649364991717283, "grad_norm": 0.48028209805488586, "learning_rate": 1.5345e-05, "loss": 0.0306, "step": 5118 }, { "epoch": 5.650469353948095, "grad_norm": 0.6621924638748169, "learning_rate": 1.5348000000000003e-05, "loss": 0.0321, "step": 5119 }, { "epoch": 5.651573716178906, "grad_norm": 0.6012015342712402, "learning_rate": 1.5351000000000003e-05, "loss": 0.0444, "step": 5120 }, { "epoch": 5.652678078409719, "grad_norm": 1.0940450429916382, "learning_rate": 1.5354000000000002e-05, "loss": 0.0496, "step": 5121 }, { "epoch": 5.65378244064053, "grad_norm": 0.6103255152702332, "learning_rate": 1.5357000000000002e-05, "loss": 0.0232, "step": 5122 }, { "epoch": 5.654886802871342, "grad_norm": 0.5116482377052307, "learning_rate": 1.5360000000000002e-05, "loss": 0.0212, "step": 5123 }, { "epoch": 5.6559911651021535, "grad_norm": 0.9797738790512085, "learning_rate": 1.5363000000000002e-05, "loss": 0.0454, "step": 5124 }, { "epoch": 5.657095527332965, "grad_norm": 0.8044373393058777, "learning_rate": 1.5366e-05, "loss": 0.0566, "step": 5125 }, { "epoch": 5.658199889563777, "grad_norm": 0.763077437877655, "learning_rate": 1.5368999999999998e-05, "loss": 0.0587, "step": 5126 }, { "epoch": 5.659304251794588, "grad_norm": 0.7200475931167603, "learning_rate": 1.5371999999999998e-05, "loss": 0.0444, "step": 5127 }, { "epoch": 5.660408614025401, "grad_norm": 0.5769504308700562, "learning_rate": 1.5374999999999998e-05, "loss": 0.0331, "step": 5128 }, { "epoch": 5.661512976256212, "grad_norm": 0.9569725394248962, "learning_rate": 1.5377999999999997e-05, "loss": 0.0296, "step": 5129 }, { "epoch": 5.662617338487024, "grad_norm": 1.0673818588256836, "learning_rate": 1.5381e-05, "loss": 0.0792, "step": 5130 }, { "epoch": 5.6637217007178355, "grad_norm": 1.4124268293380737, "learning_rate": 1.5384e-05, "loss": 0.3097, "step": 5131 }, { "epoch": 5.664826062948647, "grad_norm": 1.1347039937973022, "learning_rate": 1.5387e-05, "loss": 0.2426, "step": 5132 }, { "epoch": 5.665930425179459, "grad_norm": 0.9034584164619446, "learning_rate": 1.539e-05, "loss": 0.1949, "step": 5133 }, { "epoch": 5.6670347874102704, "grad_norm": 1.2757508754730225, "learning_rate": 1.5393e-05, "loss": 0.2098, "step": 5134 }, { "epoch": 5.668139149641082, "grad_norm": 2.462360143661499, "learning_rate": 1.5396e-05, "loss": 0.1608, "step": 5135 }, { "epoch": 5.669243511871894, "grad_norm": 0.9870026707649231, "learning_rate": 1.5399e-05, "loss": 0.1345, "step": 5136 }, { "epoch": 5.670347874102705, "grad_norm": 0.9018725752830505, "learning_rate": 1.5402e-05, "loss": 0.1117, "step": 5137 }, { "epoch": 5.671452236333518, "grad_norm": 1.0059219598770142, "learning_rate": 1.5405e-05, "loss": 0.1518, "step": 5138 }, { "epoch": 5.672556598564329, "grad_norm": 0.4426823556423187, "learning_rate": 1.5408e-05, "loss": 0.0433, "step": 5139 }, { "epoch": 5.673660960795141, "grad_norm": 0.5197932124137878, "learning_rate": 1.5411000000000002e-05, "loss": 0.0727, "step": 5140 }, { "epoch": 5.6747653230259525, "grad_norm": 0.7459703683853149, "learning_rate": 1.5414000000000002e-05, "loss": 0.0422, "step": 5141 }, { "epoch": 5.675869685256764, "grad_norm": 0.5296281576156616, "learning_rate": 1.5417e-05, "loss": 0.042, "step": 5142 }, { "epoch": 5.676974047487576, "grad_norm": 0.4196280837059021, "learning_rate": 1.542e-05, "loss": 0.0272, "step": 5143 }, { "epoch": 5.678078409718387, "grad_norm": 0.8876551389694214, "learning_rate": 1.5423e-05, "loss": 0.0396, "step": 5144 }, { "epoch": 5.6791827719492, "grad_norm": 0.5365055799484253, "learning_rate": 1.5426e-05, "loss": 0.0344, "step": 5145 }, { "epoch": 5.680287134180011, "grad_norm": 0.5032123327255249, "learning_rate": 1.5429e-05, "loss": 0.0311, "step": 5146 }, { "epoch": 5.681391496410823, "grad_norm": 0.5716186165809631, "learning_rate": 1.5432e-05, "loss": 0.0391, "step": 5147 }, { "epoch": 5.6824958586416345, "grad_norm": 0.44260185956954956, "learning_rate": 1.5435e-05, "loss": 0.0264, "step": 5148 }, { "epoch": 5.683600220872446, "grad_norm": 0.5030239224433899, "learning_rate": 1.5438e-05, "loss": 0.0217, "step": 5149 }, { "epoch": 5.684704583103258, "grad_norm": 0.5805873274803162, "learning_rate": 1.5441000000000003e-05, "loss": 0.0289, "step": 5150 }, { "epoch": 5.685808945334069, "grad_norm": 0.4746769964694977, "learning_rate": 1.5444e-05, "loss": 0.0216, "step": 5151 }, { "epoch": 5.686913307564881, "grad_norm": 0.4653078019618988, "learning_rate": 1.5447e-05, "loss": 0.0326, "step": 5152 }, { "epoch": 5.688017669795693, "grad_norm": 0.3625011742115021, "learning_rate": 1.545e-05, "loss": 0.0206, "step": 5153 }, { "epoch": 5.689122032026504, "grad_norm": 0.7567986845970154, "learning_rate": 1.5453e-05, "loss": 0.0553, "step": 5154 }, { "epoch": 5.690226394257317, "grad_norm": 0.4502231478691101, "learning_rate": 1.5456e-05, "loss": 0.0393, "step": 5155 }, { "epoch": 5.691330756488128, "grad_norm": 0.5044530630111694, "learning_rate": 1.5459e-05, "loss": 0.0311, "step": 5156 }, { "epoch": 5.69243511871894, "grad_norm": 0.6354838013648987, "learning_rate": 1.5462e-05, "loss": 0.0468, "step": 5157 }, { "epoch": 5.6935394809497515, "grad_norm": 0.43927353620529175, "learning_rate": 1.5465e-05, "loss": 0.026, "step": 5158 }, { "epoch": 5.694643843180563, "grad_norm": 0.5004436373710632, "learning_rate": 1.5467999999999998e-05, "loss": 0.0259, "step": 5159 }, { "epoch": 5.695748205411375, "grad_norm": 0.3243292272090912, "learning_rate": 1.5471e-05, "loss": 0.0199, "step": 5160 }, { "epoch": 5.696852567642186, "grad_norm": 0.4224162697792053, "learning_rate": 1.5474e-05, "loss": 0.0256, "step": 5161 }, { "epoch": 5.697956929872999, "grad_norm": 0.5498998165130615, "learning_rate": 1.5477e-05, "loss": 0.0259, "step": 5162 }, { "epoch": 5.69906129210381, "grad_norm": 0.41305872797966003, "learning_rate": 1.548e-05, "loss": 0.0179, "step": 5163 }, { "epoch": 5.700165654334622, "grad_norm": 0.7352316975593567, "learning_rate": 1.5483e-05, "loss": 0.0274, "step": 5164 }, { "epoch": 5.7012700165654335, "grad_norm": 1.2140165567398071, "learning_rate": 1.5486e-05, "loss": 0.0325, "step": 5165 }, { "epoch": 5.702374378796245, "grad_norm": 0.6804016828536987, "learning_rate": 1.5489e-05, "loss": 0.0321, "step": 5166 }, { "epoch": 5.703478741027057, "grad_norm": 1.349866509437561, "learning_rate": 1.5492e-05, "loss": 0.0443, "step": 5167 }, { "epoch": 5.704583103257868, "grad_norm": 0.47500163316726685, "learning_rate": 1.5495e-05, "loss": 0.032, "step": 5168 }, { "epoch": 5.705687465488681, "grad_norm": 0.5636224746704102, "learning_rate": 1.5498e-05, "loss": 0.0233, "step": 5169 }, { "epoch": 5.706791827719492, "grad_norm": 0.7500084042549133, "learning_rate": 1.5501000000000003e-05, "loss": 0.0386, "step": 5170 }, { "epoch": 5.707896189950303, "grad_norm": 0.391875296831131, "learning_rate": 1.5504000000000003e-05, "loss": 0.0218, "step": 5171 }, { "epoch": 5.709000552181116, "grad_norm": 0.4462122917175293, "learning_rate": 1.5507000000000002e-05, "loss": 0.0323, "step": 5172 }, { "epoch": 5.710104914411927, "grad_norm": 0.5740343332290649, "learning_rate": 1.5510000000000002e-05, "loss": 0.0422, "step": 5173 }, { "epoch": 5.711209276642739, "grad_norm": 0.6477903127670288, "learning_rate": 1.5513000000000002e-05, "loss": 0.0342, "step": 5174 }, { "epoch": 5.7123136388735505, "grad_norm": 0.6573674082756042, "learning_rate": 1.5516000000000002e-05, "loss": 0.0402, "step": 5175 }, { "epoch": 5.713418001104362, "grad_norm": 0.5508013367652893, "learning_rate": 1.5518999999999998e-05, "loss": 0.0311, "step": 5176 }, { "epoch": 5.714522363335174, "grad_norm": 1.1944445371627808, "learning_rate": 1.5521999999999998e-05, "loss": 0.0434, "step": 5177 }, { "epoch": 5.715626725565985, "grad_norm": 1.2880324125289917, "learning_rate": 1.5524999999999998e-05, "loss": 0.0564, "step": 5178 }, { "epoch": 5.716731087796798, "grad_norm": 0.7718639969825745, "learning_rate": 1.5527999999999998e-05, "loss": 0.0278, "step": 5179 }, { "epoch": 5.717835450027609, "grad_norm": 1.23234224319458, "learning_rate": 1.5531e-05, "loss": 0.0495, "step": 5180 }, { "epoch": 5.718939812258421, "grad_norm": 1.3895751237869263, "learning_rate": 1.5534e-05, "loss": 0.39, "step": 5181 }, { "epoch": 5.7200441744892325, "grad_norm": 1.603262186050415, "learning_rate": 1.5537e-05, "loss": 0.305, "step": 5182 }, { "epoch": 5.721148536720044, "grad_norm": 1.1293165683746338, "learning_rate": 1.554e-05, "loss": 0.2618, "step": 5183 }, { "epoch": 5.722252898950856, "grad_norm": 0.900428831577301, "learning_rate": 1.5543e-05, "loss": 0.2193, "step": 5184 }, { "epoch": 5.723357261181667, "grad_norm": 0.7092989087104797, "learning_rate": 1.5546e-05, "loss": 0.162, "step": 5185 }, { "epoch": 5.72446162341248, "grad_norm": 1.2702412605285645, "learning_rate": 1.5549e-05, "loss": 0.1716, "step": 5186 }, { "epoch": 5.725565985643291, "grad_norm": 0.8600629568099976, "learning_rate": 1.5552e-05, "loss": 0.1057, "step": 5187 }, { "epoch": 5.726670347874102, "grad_norm": 1.1261273622512817, "learning_rate": 1.5555e-05, "loss": 0.1479, "step": 5188 }, { "epoch": 5.727774710104915, "grad_norm": 0.5486692786216736, "learning_rate": 1.5558e-05, "loss": 0.0681, "step": 5189 }, { "epoch": 5.728879072335726, "grad_norm": 0.6241763830184937, "learning_rate": 1.5561000000000002e-05, "loss": 0.0698, "step": 5190 }, { "epoch": 5.729983434566538, "grad_norm": 0.4540858566761017, "learning_rate": 1.5564000000000002e-05, "loss": 0.0399, "step": 5191 }, { "epoch": 5.7310877967973495, "grad_norm": 0.6342376470565796, "learning_rate": 1.5567000000000002e-05, "loss": 0.0352, "step": 5192 }, { "epoch": 5.732192159028161, "grad_norm": 0.665040910243988, "learning_rate": 1.5570000000000002e-05, "loss": 0.0373, "step": 5193 }, { "epoch": 5.733296521258973, "grad_norm": 0.5114085674285889, "learning_rate": 1.5573e-05, "loss": 0.0442, "step": 5194 }, { "epoch": 5.734400883489784, "grad_norm": 0.489018976688385, "learning_rate": 1.5576e-05, "loss": 0.0258, "step": 5195 }, { "epoch": 5.735505245720597, "grad_norm": 0.3636722266674042, "learning_rate": 1.5579e-05, "loss": 0.035, "step": 5196 }, { "epoch": 5.736609607951408, "grad_norm": 0.3471483588218689, "learning_rate": 1.5582e-05, "loss": 0.0331, "step": 5197 }, { "epoch": 5.73771397018222, "grad_norm": 1.1589665412902832, "learning_rate": 1.5585e-05, "loss": 0.0657, "step": 5198 }, { "epoch": 5.7388183324130315, "grad_norm": 0.4972028434276581, "learning_rate": 1.5588e-05, "loss": 0.0339, "step": 5199 }, { "epoch": 5.739922694643843, "grad_norm": 0.45974045991897583, "learning_rate": 1.5591e-05, "loss": 0.0194, "step": 5200 }, { "epoch": 5.741027056874655, "grad_norm": 0.9932156205177307, "learning_rate": 1.5594e-05, "loss": 0.0242, "step": 5201 }, { "epoch": 5.742131419105466, "grad_norm": 0.733561098575592, "learning_rate": 1.5597e-05, "loss": 0.0345, "step": 5202 }, { "epoch": 5.743235781336279, "grad_norm": 0.6537988781929016, "learning_rate": 1.56e-05, "loss": 0.0306, "step": 5203 }, { "epoch": 5.74434014356709, "grad_norm": 0.4957137107849121, "learning_rate": 1.5603e-05, "loss": 0.037, "step": 5204 }, { "epoch": 5.745444505797901, "grad_norm": 0.5452253818511963, "learning_rate": 1.5606e-05, "loss": 0.0353, "step": 5205 }, { "epoch": 5.746548868028714, "grad_norm": 0.8762975335121155, "learning_rate": 1.5609e-05, "loss": 0.0358, "step": 5206 }, { "epoch": 5.747653230259525, "grad_norm": 0.6286513209342957, "learning_rate": 1.5612e-05, "loss": 0.0376, "step": 5207 }, { "epoch": 5.748757592490337, "grad_norm": 0.41948065161705017, "learning_rate": 1.5615e-05, "loss": 0.0213, "step": 5208 }, { "epoch": 5.7498619547211485, "grad_norm": 0.45817825198173523, "learning_rate": 1.5618e-05, "loss": 0.0291, "step": 5209 }, { "epoch": 5.75096631695196, "grad_norm": 0.3705160915851593, "learning_rate": 1.5621000000000002e-05, "loss": 0.0186, "step": 5210 }, { "epoch": 5.752070679182772, "grad_norm": 0.6022815108299255, "learning_rate": 1.5624e-05, "loss": 0.0385, "step": 5211 }, { "epoch": 5.753175041413583, "grad_norm": 0.4859312176704407, "learning_rate": 1.5627e-05, "loss": 0.0221, "step": 5212 }, { "epoch": 5.754279403644396, "grad_norm": 0.5388861894607544, "learning_rate": 1.563e-05, "loss": 0.0299, "step": 5213 }, { "epoch": 5.755383765875207, "grad_norm": 0.6980404853820801, "learning_rate": 1.5633e-05, "loss": 0.0366, "step": 5214 }, { "epoch": 5.756488128106019, "grad_norm": 0.48355117440223694, "learning_rate": 1.5636e-05, "loss": 0.0358, "step": 5215 }, { "epoch": 5.7575924903368305, "grad_norm": 0.5240550637245178, "learning_rate": 1.5639e-05, "loss": 0.0267, "step": 5216 }, { "epoch": 5.758696852567642, "grad_norm": 0.4749653935432434, "learning_rate": 1.5642e-05, "loss": 0.026, "step": 5217 }, { "epoch": 5.759801214798454, "grad_norm": 0.9259072542190552, "learning_rate": 1.5645e-05, "loss": 0.0299, "step": 5218 }, { "epoch": 5.760905577029265, "grad_norm": 0.5657874941825867, "learning_rate": 1.5648e-05, "loss": 0.0255, "step": 5219 }, { "epoch": 5.762009939260078, "grad_norm": 0.3549799919128418, "learning_rate": 1.5651000000000003e-05, "loss": 0.0172, "step": 5220 }, { "epoch": 5.763114301490889, "grad_norm": 0.44876572489738464, "learning_rate": 1.5654000000000003e-05, "loss": 0.0358, "step": 5221 }, { "epoch": 5.7642186637217, "grad_norm": 1.5954864025115967, "learning_rate": 1.5657000000000003e-05, "loss": 0.0549, "step": 5222 }, { "epoch": 5.765323025952513, "grad_norm": 0.6380386352539062, "learning_rate": 1.5660000000000003e-05, "loss": 0.0474, "step": 5223 }, { "epoch": 5.766427388183324, "grad_norm": 0.8602557182312012, "learning_rate": 1.5663000000000002e-05, "loss": 0.0281, "step": 5224 }, { "epoch": 5.767531750414136, "grad_norm": 1.2971113920211792, "learning_rate": 1.5666e-05, "loss": 0.0499, "step": 5225 }, { "epoch": 5.7686361126449475, "grad_norm": 0.49953562021255493, "learning_rate": 1.5669e-05, "loss": 0.0394, "step": 5226 }, { "epoch": 5.769740474875759, "grad_norm": 0.7887263894081116, "learning_rate": 1.5672e-05, "loss": 0.0275, "step": 5227 }, { "epoch": 5.770844837106571, "grad_norm": 0.6744306087493896, "learning_rate": 1.5674999999999998e-05, "loss": 0.0661, "step": 5228 }, { "epoch": 5.771949199337382, "grad_norm": 0.8028941750526428, "learning_rate": 1.5677999999999998e-05, "loss": 0.052, "step": 5229 }, { "epoch": 5.773053561568195, "grad_norm": 0.48238053917884827, "learning_rate": 1.5681e-05, "loss": 0.0294, "step": 5230 }, { "epoch": 5.774157923799006, "grad_norm": 1.8235200643539429, "learning_rate": 1.5684e-05, "loss": 0.3486, "step": 5231 }, { "epoch": 5.775262286029818, "grad_norm": 1.4305881261825562, "learning_rate": 1.5687e-05, "loss": 0.2587, "step": 5232 }, { "epoch": 5.7763666482606295, "grad_norm": 1.0788804292678833, "learning_rate": 1.569e-05, "loss": 0.2161, "step": 5233 }, { "epoch": 5.777471010491441, "grad_norm": 1.011764645576477, "learning_rate": 1.5693e-05, "loss": 0.1799, "step": 5234 }, { "epoch": 5.778575372722253, "grad_norm": 0.9243083000183105, "learning_rate": 1.5696e-05, "loss": 0.1574, "step": 5235 }, { "epoch": 5.779679734953064, "grad_norm": 1.5372326374053955, "learning_rate": 1.5699e-05, "loss": 0.1854, "step": 5236 }, { "epoch": 5.780784097183877, "grad_norm": 0.6955728530883789, "learning_rate": 1.5702e-05, "loss": 0.0925, "step": 5237 }, { "epoch": 5.781888459414688, "grad_norm": 0.537190318107605, "learning_rate": 1.5705e-05, "loss": 0.0835, "step": 5238 }, { "epoch": 5.7829928216455, "grad_norm": 0.5489407181739807, "learning_rate": 1.5708e-05, "loss": 0.0836, "step": 5239 }, { "epoch": 5.7840971838763116, "grad_norm": 2.8743765354156494, "learning_rate": 1.5711000000000003e-05, "loss": 0.1074, "step": 5240 }, { "epoch": 5.785201546107123, "grad_norm": 0.8696379065513611, "learning_rate": 1.5714000000000002e-05, "loss": 0.0384, "step": 5241 }, { "epoch": 5.786305908337935, "grad_norm": 0.4804045557975769, "learning_rate": 1.5717000000000002e-05, "loss": 0.0459, "step": 5242 }, { "epoch": 5.7874102705687465, "grad_norm": 0.8092570304870605, "learning_rate": 1.5720000000000002e-05, "loss": 0.0443, "step": 5243 }, { "epoch": 5.788514632799558, "grad_norm": 0.44190534949302673, "learning_rate": 1.5723000000000002e-05, "loss": 0.0341, "step": 5244 }, { "epoch": 5.78961899503037, "grad_norm": 0.6998413801193237, "learning_rate": 1.5726e-05, "loss": 0.0971, "step": 5245 }, { "epoch": 5.790723357261181, "grad_norm": 0.5818309187889099, "learning_rate": 1.5729e-05, "loss": 0.0437, "step": 5246 }, { "epoch": 5.791827719491994, "grad_norm": 0.4279657006263733, "learning_rate": 1.5732e-05, "loss": 0.0283, "step": 5247 }, { "epoch": 5.792932081722805, "grad_norm": 0.3901316821575165, "learning_rate": 1.5735e-05, "loss": 0.0159, "step": 5248 }, { "epoch": 5.794036443953617, "grad_norm": 0.5239595174789429, "learning_rate": 1.5737999999999997e-05, "loss": 0.04, "step": 5249 }, { "epoch": 5.7951408061844285, "grad_norm": 0.8636828064918518, "learning_rate": 1.5741e-05, "loss": 0.0284, "step": 5250 }, { "epoch": 5.79624516841524, "grad_norm": 0.3113066554069519, "learning_rate": 1.5744e-05, "loss": 0.0246, "step": 5251 }, { "epoch": 5.797349530646052, "grad_norm": 0.49011537432670593, "learning_rate": 1.5747e-05, "loss": 0.0286, "step": 5252 }, { "epoch": 5.798453892876863, "grad_norm": 0.6026688814163208, "learning_rate": 1.575e-05, "loss": 0.0453, "step": 5253 }, { "epoch": 5.799558255107676, "grad_norm": 0.47657084465026855, "learning_rate": 1.5753e-05, "loss": 0.0308, "step": 5254 }, { "epoch": 5.800662617338487, "grad_norm": 0.5421423316001892, "learning_rate": 1.5756e-05, "loss": 0.031, "step": 5255 }, { "epoch": 5.801766979569299, "grad_norm": 0.6837341785430908, "learning_rate": 1.5759e-05, "loss": 0.04, "step": 5256 }, { "epoch": 5.8028713418001105, "grad_norm": 0.8211198449134827, "learning_rate": 1.5762e-05, "loss": 0.0711, "step": 5257 }, { "epoch": 5.803975704030922, "grad_norm": 0.8275536894798279, "learning_rate": 1.5765e-05, "loss": 0.0277, "step": 5258 }, { "epoch": 5.805080066261734, "grad_norm": 0.7565256953239441, "learning_rate": 1.5768e-05, "loss": 0.0529, "step": 5259 }, { "epoch": 5.8061844284925455, "grad_norm": 0.964436948299408, "learning_rate": 1.5771e-05, "loss": 0.0373, "step": 5260 }, { "epoch": 5.807288790723357, "grad_norm": 1.0582491159439087, "learning_rate": 1.5774000000000002e-05, "loss": 0.0521, "step": 5261 }, { "epoch": 5.808393152954169, "grad_norm": 0.481321781873703, "learning_rate": 1.5777e-05, "loss": 0.0321, "step": 5262 }, { "epoch": 5.80949751518498, "grad_norm": 0.7000911235809326, "learning_rate": 1.578e-05, "loss": 0.0435, "step": 5263 }, { "epoch": 5.810601877415793, "grad_norm": 0.7045184373855591, "learning_rate": 1.5783e-05, "loss": 0.0304, "step": 5264 }, { "epoch": 5.811706239646604, "grad_norm": 0.4055021107196808, "learning_rate": 1.5786e-05, "loss": 0.029, "step": 5265 }, { "epoch": 5.812810601877416, "grad_norm": 0.44551607966423035, "learning_rate": 1.5789e-05, "loss": 0.0334, "step": 5266 }, { "epoch": 5.8139149641082275, "grad_norm": 0.6657538414001465, "learning_rate": 1.5792e-05, "loss": 0.0418, "step": 5267 }, { "epoch": 5.815019326339039, "grad_norm": 0.7724745273590088, "learning_rate": 1.5795e-05, "loss": 0.0413, "step": 5268 }, { "epoch": 5.816123688569851, "grad_norm": 0.5107478499412537, "learning_rate": 1.5798e-05, "loss": 0.0272, "step": 5269 }, { "epoch": 5.817228050800662, "grad_norm": 0.6242534518241882, "learning_rate": 1.5801e-05, "loss": 0.0371, "step": 5270 }, { "epoch": 5.818332413031475, "grad_norm": 1.7220441102981567, "learning_rate": 1.5804000000000003e-05, "loss": 0.039, "step": 5271 }, { "epoch": 5.819436775262286, "grad_norm": 0.6157863140106201, "learning_rate": 1.5807000000000003e-05, "loss": 0.0299, "step": 5272 }, { "epoch": 5.820541137493098, "grad_norm": 0.8953573703765869, "learning_rate": 1.5810000000000003e-05, "loss": 0.0395, "step": 5273 }, { "epoch": 5.8216454997239095, "grad_norm": 1.0644323825836182, "learning_rate": 1.5813e-05, "loss": 0.0693, "step": 5274 }, { "epoch": 5.822749861954721, "grad_norm": 0.46419018507003784, "learning_rate": 1.5816e-05, "loss": 0.0266, "step": 5275 }, { "epoch": 5.823854224185533, "grad_norm": 0.4745626449584961, "learning_rate": 1.5819e-05, "loss": 0.0195, "step": 5276 }, { "epoch": 5.8249585864163445, "grad_norm": 0.9164865016937256, "learning_rate": 1.5822e-05, "loss": 0.0648, "step": 5277 }, { "epoch": 5.826062948647156, "grad_norm": 1.3559211492538452, "learning_rate": 1.5825e-05, "loss": 0.0334, "step": 5278 }, { "epoch": 5.827167310877968, "grad_norm": 0.46566033363342285, "learning_rate": 1.5827999999999998e-05, "loss": 0.0276, "step": 5279 }, { "epoch": 5.828271673108779, "grad_norm": 0.686750590801239, "learning_rate": 1.5830999999999998e-05, "loss": 0.0424, "step": 5280 }, { "epoch": 5.829376035339592, "grad_norm": 1.2030807733535767, "learning_rate": 1.5834e-05, "loss": 0.3391, "step": 5281 }, { "epoch": 5.830480397570403, "grad_norm": 1.0089064836502075, "learning_rate": 1.5837e-05, "loss": 0.3028, "step": 5282 }, { "epoch": 5.831584759801215, "grad_norm": 1.0316194295883179, "learning_rate": 1.584e-05, "loss": 0.2957, "step": 5283 }, { "epoch": 5.8326891220320265, "grad_norm": 0.7897334694862366, "learning_rate": 1.5843e-05, "loss": 0.1981, "step": 5284 }, { "epoch": 5.833793484262838, "grad_norm": 1.00235116481781, "learning_rate": 1.5846e-05, "loss": 0.2021, "step": 5285 }, { "epoch": 5.83489784649365, "grad_norm": 1.0677906274795532, "learning_rate": 1.5849e-05, "loss": 0.2057, "step": 5286 }, { "epoch": 5.836002208724461, "grad_norm": 0.688156247138977, "learning_rate": 1.5852e-05, "loss": 0.1305, "step": 5287 }, { "epoch": 5.837106570955274, "grad_norm": 0.6246659159660339, "learning_rate": 1.5855e-05, "loss": 0.0774, "step": 5288 }, { "epoch": 5.838210933186085, "grad_norm": 0.43832382559776306, "learning_rate": 1.5858e-05, "loss": 0.0507, "step": 5289 }, { "epoch": 5.839315295416897, "grad_norm": 0.49168720841407776, "learning_rate": 1.5861e-05, "loss": 0.0433, "step": 5290 }, { "epoch": 5.8404196576477085, "grad_norm": 0.38901495933532715, "learning_rate": 1.5864000000000003e-05, "loss": 0.0369, "step": 5291 }, { "epoch": 5.84152401987852, "grad_norm": 0.5616623759269714, "learning_rate": 1.5867000000000002e-05, "loss": 0.0362, "step": 5292 }, { "epoch": 5.842628382109332, "grad_norm": 0.3417746126651764, "learning_rate": 1.5870000000000002e-05, "loss": 0.0284, "step": 5293 }, { "epoch": 5.8437327443401434, "grad_norm": 0.42363280057907104, "learning_rate": 1.5873000000000002e-05, "loss": 0.0424, "step": 5294 }, { "epoch": 5.844837106570955, "grad_norm": 0.4434826970100403, "learning_rate": 1.5876000000000002e-05, "loss": 0.0197, "step": 5295 }, { "epoch": 5.845941468801767, "grad_norm": 0.5141258835792542, "learning_rate": 1.5879e-05, "loss": 0.0273, "step": 5296 }, { "epoch": 5.847045831032578, "grad_norm": 0.2944174110889435, "learning_rate": 1.5882e-05, "loss": 0.0196, "step": 5297 }, { "epoch": 5.848150193263391, "grad_norm": 0.5258718132972717, "learning_rate": 1.5884999999999998e-05, "loss": 0.026, "step": 5298 }, { "epoch": 5.849254555494202, "grad_norm": 0.3165179193019867, "learning_rate": 1.5887999999999998e-05, "loss": 0.0234, "step": 5299 }, { "epoch": 5.850358917725014, "grad_norm": 0.5210088491439819, "learning_rate": 1.5890999999999997e-05, "loss": 0.0295, "step": 5300 }, { "epoch": 5.8514632799558255, "grad_norm": 0.4082726538181305, "learning_rate": 1.5894e-05, "loss": 0.0252, "step": 5301 }, { "epoch": 5.852567642186637, "grad_norm": 0.6857134699821472, "learning_rate": 1.5897e-05, "loss": 0.0358, "step": 5302 }, { "epoch": 5.853672004417449, "grad_norm": 0.7589766383171082, "learning_rate": 1.59e-05, "loss": 0.037, "step": 5303 }, { "epoch": 5.85477636664826, "grad_norm": 0.5006126761436462, "learning_rate": 1.5903e-05, "loss": 0.0357, "step": 5304 }, { "epoch": 5.855880728879073, "grad_norm": 2.4391865730285645, "learning_rate": 1.5906e-05, "loss": 0.0968, "step": 5305 }, { "epoch": 5.856985091109884, "grad_norm": 0.7554435133934021, "learning_rate": 1.5909e-05, "loss": 0.0466, "step": 5306 }, { "epoch": 5.858089453340696, "grad_norm": 0.4603745639324188, "learning_rate": 1.5912e-05, "loss": 0.0282, "step": 5307 }, { "epoch": 5.8591938155715075, "grad_norm": 0.4990273714065552, "learning_rate": 1.5915e-05, "loss": 0.0272, "step": 5308 }, { "epoch": 5.860298177802319, "grad_norm": 0.6323208808898926, "learning_rate": 1.5918e-05, "loss": 0.0269, "step": 5309 }, { "epoch": 5.861402540033131, "grad_norm": 1.9685364961624146, "learning_rate": 1.5921e-05, "loss": 0.0359, "step": 5310 }, { "epoch": 5.862506902263942, "grad_norm": 0.6676451563835144, "learning_rate": 1.5924000000000002e-05, "loss": 0.0412, "step": 5311 }, { "epoch": 5.863611264494754, "grad_norm": 0.5075117945671082, "learning_rate": 1.5927000000000002e-05, "loss": 0.0236, "step": 5312 }, { "epoch": 5.864715626725566, "grad_norm": 0.6042349338531494, "learning_rate": 1.593e-05, "loss": 0.0313, "step": 5313 }, { "epoch": 5.865819988956377, "grad_norm": 0.8891940116882324, "learning_rate": 1.5933e-05, "loss": 0.0345, "step": 5314 }, { "epoch": 5.86692435118719, "grad_norm": 0.720758855342865, "learning_rate": 1.5936e-05, "loss": 0.0177, "step": 5315 }, { "epoch": 5.868028713418001, "grad_norm": 0.49828481674194336, "learning_rate": 1.5939e-05, "loss": 0.0355, "step": 5316 }, { "epoch": 5.869133075648813, "grad_norm": 0.3622315526008606, "learning_rate": 1.5942e-05, "loss": 0.0261, "step": 5317 }, { "epoch": 5.8702374378796245, "grad_norm": 0.8243712186813354, "learning_rate": 1.5945e-05, "loss": 0.0292, "step": 5318 }, { "epoch": 5.871341800110436, "grad_norm": 0.31330230832099915, "learning_rate": 1.5948e-05, "loss": 0.0227, "step": 5319 }, { "epoch": 5.872446162341248, "grad_norm": 0.9014034271240234, "learning_rate": 1.5951e-05, "loss": 0.0433, "step": 5320 }, { "epoch": 5.873550524572059, "grad_norm": 0.6825956702232361, "learning_rate": 1.5954000000000003e-05, "loss": 0.0261, "step": 5321 }, { "epoch": 5.874654886802872, "grad_norm": 0.9678831100463867, "learning_rate": 1.5957000000000003e-05, "loss": 0.0373, "step": 5322 }, { "epoch": 5.875759249033683, "grad_norm": 0.8825179934501648, "learning_rate": 1.596e-05, "loss": 0.0493, "step": 5323 }, { "epoch": 5.876863611264495, "grad_norm": 0.5832094550132751, "learning_rate": 1.5963e-05, "loss": 0.0446, "step": 5324 }, { "epoch": 5.8779679734953065, "grad_norm": 0.8089072108268738, "learning_rate": 1.5966e-05, "loss": 0.0395, "step": 5325 }, { "epoch": 5.879072335726118, "grad_norm": 0.5876917243003845, "learning_rate": 1.5969e-05, "loss": 0.0379, "step": 5326 }, { "epoch": 5.88017669795693, "grad_norm": 0.8455074429512024, "learning_rate": 1.5972e-05, "loss": 0.0331, "step": 5327 }, { "epoch": 5.881281060187741, "grad_norm": 0.8342544436454773, "learning_rate": 1.5975e-05, "loss": 0.0712, "step": 5328 }, { "epoch": 5.882385422418554, "grad_norm": 0.7361119389533997, "learning_rate": 1.5978e-05, "loss": 0.0394, "step": 5329 }, { "epoch": 5.883489784649365, "grad_norm": 1.3661768436431885, "learning_rate": 1.5980999999999998e-05, "loss": 0.073, "step": 5330 }, { "epoch": 5.884594146880176, "grad_norm": 1.4826232194900513, "learning_rate": 1.5984e-05, "loss": 0.3714, "step": 5331 }, { "epoch": 5.885698509110989, "grad_norm": 1.006463885307312, "learning_rate": 1.5987e-05, "loss": 0.2812, "step": 5332 }, { "epoch": 5.8868028713418, "grad_norm": 0.7566355466842651, "learning_rate": 1.599e-05, "loss": 0.2288, "step": 5333 }, { "epoch": 5.887907233572612, "grad_norm": 0.9780274033546448, "learning_rate": 1.5993e-05, "loss": 0.2344, "step": 5334 }, { "epoch": 5.8890115958034235, "grad_norm": 1.1627092361450195, "learning_rate": 1.5996e-05, "loss": 0.2076, "step": 5335 }, { "epoch": 5.890115958034235, "grad_norm": 1.1336625814437866, "learning_rate": 1.5999e-05, "loss": 0.1977, "step": 5336 }, { "epoch": 5.891220320265047, "grad_norm": 0.7550633549690247, "learning_rate": 1.6002e-05, "loss": 0.1465, "step": 5337 }, { "epoch": 5.892324682495858, "grad_norm": 0.6563544869422913, "learning_rate": 1.6005e-05, "loss": 0.1043, "step": 5338 }, { "epoch": 5.893429044726671, "grad_norm": 0.516499400138855, "learning_rate": 1.6008e-05, "loss": 0.0935, "step": 5339 }, { "epoch": 5.894533406957482, "grad_norm": 0.42865267395973206, "learning_rate": 1.6011e-05, "loss": 0.0718, "step": 5340 }, { "epoch": 5.895637769188294, "grad_norm": 0.6709861159324646, "learning_rate": 1.6014000000000003e-05, "loss": 0.0726, "step": 5341 }, { "epoch": 5.8967421314191055, "grad_norm": 0.42828577756881714, "learning_rate": 1.6017000000000003e-05, "loss": 0.0423, "step": 5342 }, { "epoch": 5.897846493649917, "grad_norm": 1.1527549028396606, "learning_rate": 1.6020000000000002e-05, "loss": 0.092, "step": 5343 }, { "epoch": 5.898950855880729, "grad_norm": 0.6070690751075745, "learning_rate": 1.6023000000000002e-05, "loss": 0.0426, "step": 5344 }, { "epoch": 5.90005521811154, "grad_norm": 0.3192751407623291, "learning_rate": 1.6026000000000002e-05, "loss": 0.0272, "step": 5345 }, { "epoch": 5.901159580342353, "grad_norm": 1.1972309350967407, "learning_rate": 1.6029000000000002e-05, "loss": 0.068, "step": 5346 }, { "epoch": 5.902263942573164, "grad_norm": 0.5084846019744873, "learning_rate": 1.6032e-05, "loss": 0.0398, "step": 5347 }, { "epoch": 5.903368304803975, "grad_norm": 0.9808180928230286, "learning_rate": 1.6034999999999998e-05, "loss": 0.0348, "step": 5348 }, { "epoch": 5.904472667034788, "grad_norm": 0.8308469653129578, "learning_rate": 1.6037999999999998e-05, "loss": 0.0441, "step": 5349 }, { "epoch": 5.905577029265599, "grad_norm": 0.41265878081321716, "learning_rate": 1.6040999999999998e-05, "loss": 0.0296, "step": 5350 }, { "epoch": 5.906681391496411, "grad_norm": 0.7814322710037231, "learning_rate": 1.6044e-05, "loss": 0.038, "step": 5351 }, { "epoch": 5.9077857537272225, "grad_norm": 0.4308355152606964, "learning_rate": 1.6047e-05, "loss": 0.0269, "step": 5352 }, { "epoch": 5.908890115958034, "grad_norm": 0.6570669412612915, "learning_rate": 1.605e-05, "loss": 0.0389, "step": 5353 }, { "epoch": 5.909994478188846, "grad_norm": 0.6427193880081177, "learning_rate": 1.6053e-05, "loss": 0.0289, "step": 5354 }, { "epoch": 5.911098840419657, "grad_norm": 0.4734151065349579, "learning_rate": 1.6056e-05, "loss": 0.0254, "step": 5355 }, { "epoch": 5.91220320265047, "grad_norm": 0.4866812229156494, "learning_rate": 1.6059e-05, "loss": 0.0216, "step": 5356 }, { "epoch": 5.913307564881281, "grad_norm": 0.4134118854999542, "learning_rate": 1.6062e-05, "loss": 0.0203, "step": 5357 }, { "epoch": 5.914411927112093, "grad_norm": 0.5266181826591492, "learning_rate": 1.6065e-05, "loss": 0.0264, "step": 5358 }, { "epoch": 5.9155162893429045, "grad_norm": 0.7242282629013062, "learning_rate": 1.6068e-05, "loss": 0.046, "step": 5359 }, { "epoch": 5.916620651573716, "grad_norm": 0.7623076438903809, "learning_rate": 1.6071e-05, "loss": 0.0302, "step": 5360 }, { "epoch": 5.917725013804528, "grad_norm": 1.0981773138046265, "learning_rate": 1.6074000000000002e-05, "loss": 0.0382, "step": 5361 }, { "epoch": 5.918829376035339, "grad_norm": 0.5971862077713013, "learning_rate": 1.6077000000000002e-05, "loss": 0.0374, "step": 5362 }, { "epoch": 5.919933738266152, "grad_norm": 0.5835162401199341, "learning_rate": 1.6080000000000002e-05, "loss": 0.0326, "step": 5363 }, { "epoch": 5.921038100496963, "grad_norm": 0.5973204970359802, "learning_rate": 1.6083000000000002e-05, "loss": 0.0315, "step": 5364 }, { "epoch": 5.922142462727774, "grad_norm": 0.6040316820144653, "learning_rate": 1.6086e-05, "loss": 0.0484, "step": 5365 }, { "epoch": 5.923246824958587, "grad_norm": 0.5431980490684509, "learning_rate": 1.6089e-05, "loss": 0.0302, "step": 5366 }, { "epoch": 5.924351187189398, "grad_norm": 0.6382043361663818, "learning_rate": 1.6092e-05, "loss": 0.0437, "step": 5367 }, { "epoch": 5.92545554942021, "grad_norm": 0.623655378818512, "learning_rate": 1.6095e-05, "loss": 0.0247, "step": 5368 }, { "epoch": 5.9265599116510215, "grad_norm": 0.6705486178398132, "learning_rate": 1.6098e-05, "loss": 0.0504, "step": 5369 }, { "epoch": 5.927664273881833, "grad_norm": 0.2926488518714905, "learning_rate": 1.6101e-05, "loss": 0.0149, "step": 5370 }, { "epoch": 5.928768636112645, "grad_norm": 0.47734713554382324, "learning_rate": 1.6104000000000004e-05, "loss": 0.0368, "step": 5371 }, { "epoch": 5.929872998343456, "grad_norm": 0.5293459892272949, "learning_rate": 1.6107e-05, "loss": 0.0296, "step": 5372 }, { "epoch": 5.930977360574269, "grad_norm": 0.819528341293335, "learning_rate": 1.611e-05, "loss": 0.0444, "step": 5373 }, { "epoch": 5.93208172280508, "grad_norm": 0.763493001461029, "learning_rate": 1.6113e-05, "loss": 0.0463, "step": 5374 }, { "epoch": 5.933186085035892, "grad_norm": 0.7948997616767883, "learning_rate": 1.6116e-05, "loss": 0.0558, "step": 5375 }, { "epoch": 5.9342904472667035, "grad_norm": 0.5809778571128845, "learning_rate": 1.6119e-05, "loss": 0.0383, "step": 5376 }, { "epoch": 5.935394809497515, "grad_norm": 0.670364260673523, "learning_rate": 1.6122e-05, "loss": 0.0394, "step": 5377 }, { "epoch": 5.936499171728327, "grad_norm": 1.1213356256484985, "learning_rate": 1.6125e-05, "loss": 0.0255, "step": 5378 }, { "epoch": 5.937603533959138, "grad_norm": 0.9232367277145386, "learning_rate": 1.6128e-05, "loss": 0.0407, "step": 5379 }, { "epoch": 5.938707896189951, "grad_norm": 0.6048018932342529, "learning_rate": 1.6131e-05, "loss": 0.0513, "step": 5380 }, { "epoch": 5.939812258420762, "grad_norm": 3.0738587379455566, "learning_rate": 1.6134e-05, "loss": 0.3648, "step": 5381 }, { "epoch": 5.940916620651573, "grad_norm": 0.9528884291648865, "learning_rate": 1.6137e-05, "loss": 0.2386, "step": 5382 }, { "epoch": 5.942020982882386, "grad_norm": 1.0630054473876953, "learning_rate": 1.614e-05, "loss": 0.2656, "step": 5383 }, { "epoch": 5.943125345113197, "grad_norm": 0.8142547011375427, "learning_rate": 1.6143e-05, "loss": 0.1886, "step": 5384 }, { "epoch": 5.944229707344009, "grad_norm": 0.7688286900520325, "learning_rate": 1.6146e-05, "loss": 0.1892, "step": 5385 }, { "epoch": 5.9453340695748205, "grad_norm": 0.94621342420578, "learning_rate": 1.6149e-05, "loss": 0.1867, "step": 5386 }, { "epoch": 5.946438431805632, "grad_norm": 0.7900606393814087, "learning_rate": 1.6152e-05, "loss": 0.1322, "step": 5387 }, { "epoch": 5.947542794036444, "grad_norm": 0.8649300932884216, "learning_rate": 1.6155e-05, "loss": 0.0972, "step": 5388 }, { "epoch": 5.948647156267255, "grad_norm": 0.4374910593032837, "learning_rate": 1.6158e-05, "loss": 0.0532, "step": 5389 }, { "epoch": 5.949751518498068, "grad_norm": 0.8190727829933167, "learning_rate": 1.6161e-05, "loss": 0.0486, "step": 5390 }, { "epoch": 5.950855880728879, "grad_norm": 0.4594596326351166, "learning_rate": 1.6164e-05, "loss": 0.0405, "step": 5391 }, { "epoch": 5.951960242959691, "grad_norm": 0.5979443192481995, "learning_rate": 1.6167000000000003e-05, "loss": 0.0342, "step": 5392 }, { "epoch": 5.9530646051905025, "grad_norm": 0.39959773421287537, "learning_rate": 1.6170000000000003e-05, "loss": 0.0271, "step": 5393 }, { "epoch": 5.954168967421314, "grad_norm": 0.5065043568611145, "learning_rate": 1.6173000000000003e-05, "loss": 0.045, "step": 5394 }, { "epoch": 5.955273329652126, "grad_norm": 0.50050288438797, "learning_rate": 1.6176000000000002e-05, "loss": 0.0394, "step": 5395 }, { "epoch": 5.956377691882937, "grad_norm": 1.1338233947753906, "learning_rate": 1.6179000000000002e-05, "loss": 0.0997, "step": 5396 }, { "epoch": 5.95748205411375, "grad_norm": 0.5257536172866821, "learning_rate": 1.6182e-05, "loss": 0.0327, "step": 5397 }, { "epoch": 5.958586416344561, "grad_norm": 0.44797593355178833, "learning_rate": 1.6185e-05, "loss": 0.0475, "step": 5398 }, { "epoch": 5.959690778575373, "grad_norm": 0.571107804775238, "learning_rate": 1.6187999999999998e-05, "loss": 0.0179, "step": 5399 }, { "epoch": 5.9607951408061846, "grad_norm": 0.5034916400909424, "learning_rate": 1.6190999999999998e-05, "loss": 0.0192, "step": 5400 }, { "epoch": 5.961899503036996, "grad_norm": 0.5281736254692078, "learning_rate": 1.6193999999999998e-05, "loss": 0.0455, "step": 5401 }, { "epoch": 5.963003865267808, "grad_norm": 0.6263573169708252, "learning_rate": 1.6197e-05, "loss": 0.0321, "step": 5402 }, { "epoch": 5.9641082274986195, "grad_norm": 0.5488722920417786, "learning_rate": 1.62e-05, "loss": 0.0306, "step": 5403 }, { "epoch": 5.965212589729431, "grad_norm": 1.0661922693252563, "learning_rate": 1.6203e-05, "loss": 0.0282, "step": 5404 }, { "epoch": 5.966316951960243, "grad_norm": 0.42315077781677246, "learning_rate": 1.6206e-05, "loss": 0.0218, "step": 5405 }, { "epoch": 5.967421314191054, "grad_norm": 0.7638886570930481, "learning_rate": 1.6209e-05, "loss": 0.0431, "step": 5406 }, { "epoch": 5.968525676421867, "grad_norm": 0.5250498652458191, "learning_rate": 1.6212e-05, "loss": 0.019, "step": 5407 }, { "epoch": 5.969630038652678, "grad_norm": 0.5078921318054199, "learning_rate": 1.6215e-05, "loss": 0.0406, "step": 5408 }, { "epoch": 5.97073440088349, "grad_norm": 0.2903442084789276, "learning_rate": 1.6218e-05, "loss": 0.0206, "step": 5409 }, { "epoch": 5.9718387631143015, "grad_norm": 0.5087637901306152, "learning_rate": 1.6221e-05, "loss": 0.0221, "step": 5410 }, { "epoch": 5.972943125345113, "grad_norm": 0.6095962524414062, "learning_rate": 1.6224e-05, "loss": 0.0411, "step": 5411 }, { "epoch": 5.974047487575925, "grad_norm": 0.7871698141098022, "learning_rate": 1.6227000000000002e-05, "loss": 0.0329, "step": 5412 }, { "epoch": 5.975151849806736, "grad_norm": 0.6655678749084473, "learning_rate": 1.6230000000000002e-05, "loss": 0.0322, "step": 5413 }, { "epoch": 5.976256212037549, "grad_norm": 0.5781036019325256, "learning_rate": 1.6233000000000002e-05, "loss": 0.0264, "step": 5414 }, { "epoch": 5.97736057426836, "grad_norm": 0.5123391151428223, "learning_rate": 1.6236000000000002e-05, "loss": 0.0367, "step": 5415 }, { "epoch": 5.978464936499172, "grad_norm": 0.9570339322090149, "learning_rate": 1.6239e-05, "loss": 0.0475, "step": 5416 }, { "epoch": 5.9795692987299836, "grad_norm": 0.6755600571632385, "learning_rate": 1.6242e-05, "loss": 0.031, "step": 5417 }, { "epoch": 5.980673660960795, "grad_norm": 0.6752155423164368, "learning_rate": 1.6245e-05, "loss": 0.0327, "step": 5418 }, { "epoch": 5.981778023191607, "grad_norm": 0.914593517780304, "learning_rate": 1.6248e-05, "loss": 0.035, "step": 5419 }, { "epoch": 5.9828823854224185, "grad_norm": 0.8738322854042053, "learning_rate": 1.6251e-05, "loss": 0.0275, "step": 5420 }, { "epoch": 5.98398674765323, "grad_norm": 0.7182899713516235, "learning_rate": 1.6253999999999997e-05, "loss": 0.0494, "step": 5421 }, { "epoch": 5.985091109884042, "grad_norm": 0.7870276570320129, "learning_rate": 1.6257e-05, "loss": 0.0409, "step": 5422 }, { "epoch": 5.986195472114853, "grad_norm": 0.7645009160041809, "learning_rate": 1.626e-05, "loss": 0.0396, "step": 5423 }, { "epoch": 5.987299834345666, "grad_norm": 0.5547922849655151, "learning_rate": 1.6263e-05, "loss": 0.0315, "step": 5424 }, { "epoch": 5.988404196576477, "grad_norm": 0.43350040912628174, "learning_rate": 1.6266e-05, "loss": 0.0165, "step": 5425 }, { "epoch": 5.989508558807289, "grad_norm": 1.060911774635315, "learning_rate": 1.6269e-05, "loss": 0.059, "step": 5426 }, { "epoch": 5.9906129210381005, "grad_norm": 0.6977058053016663, "learning_rate": 1.6272e-05, "loss": 0.0427, "step": 5427 }, { "epoch": 5.991717283268912, "grad_norm": 0.6805495619773865, "learning_rate": 1.6275e-05, "loss": 0.0363, "step": 5428 }, { "epoch": 5.992821645499724, "grad_norm": 1.1065154075622559, "learning_rate": 1.6278e-05, "loss": 0.0566, "step": 5429 }, { "epoch": 5.993926007730535, "grad_norm": 0.9833541512489319, "learning_rate": 1.6281e-05, "loss": 0.0538, "step": 5430 }, { "epoch": 5.995030369961348, "grad_norm": 0.9699321985244751, "learning_rate": 1.6284e-05, "loss": 0.1752, "step": 5431 }, { "epoch": 5.996134732192159, "grad_norm": 0.5203284025192261, "learning_rate": 1.6287000000000002e-05, "loss": 0.072, "step": 5432 }, { "epoch": 5.997239094422971, "grad_norm": 0.7480787634849548, "learning_rate": 1.629e-05, "loss": 0.0418, "step": 5433 }, { "epoch": 5.9983434566537825, "grad_norm": 0.5542523860931396, "learning_rate": 1.6293e-05, "loss": 0.0254, "step": 5434 }, { "epoch": 5.999447818884594, "grad_norm": 0.7560098767280579, "learning_rate": 1.6296e-05, "loss": 0.0383, "step": 5435 }, { "epoch": 6.0, "grad_norm": 0.4080607295036316, "learning_rate": 1.6299e-05, "loss": 0.0076, "step": 5436 }, { "epoch": 6.001104362230811, "grad_norm": 1.4660717248916626, "learning_rate": 1.6302e-05, "loss": 0.3819, "step": 5437 }, { "epoch": 6.002208724461624, "grad_norm": 2.089526414871216, "learning_rate": 1.6305e-05, "loss": 0.2274, "step": 5438 }, { "epoch": 6.003313086692435, "grad_norm": 0.9257135391235352, "learning_rate": 1.6308e-05, "loss": 0.2277, "step": 5439 }, { "epoch": 6.004417448923247, "grad_norm": 1.846831202507019, "learning_rate": 1.6311e-05, "loss": 0.2503, "step": 5440 }, { "epoch": 6.0055218111540585, "grad_norm": 0.780146062374115, "learning_rate": 1.6314e-05, "loss": 0.1351, "step": 5441 }, { "epoch": 6.00662617338487, "grad_norm": 0.8197493553161621, "learning_rate": 1.6317000000000003e-05, "loss": 0.1545, "step": 5442 }, { "epoch": 6.007730535615682, "grad_norm": 0.6163292527198792, "learning_rate": 1.6320000000000003e-05, "loss": 0.0807, "step": 5443 }, { "epoch": 6.008834897846493, "grad_norm": 2.208487033843994, "learning_rate": 1.6323000000000003e-05, "loss": 0.0869, "step": 5444 }, { "epoch": 6.009939260077306, "grad_norm": 0.8659749627113342, "learning_rate": 1.6326000000000003e-05, "loss": 0.1452, "step": 5445 }, { "epoch": 6.011043622308117, "grad_norm": 0.9161170125007629, "learning_rate": 1.6329e-05, "loss": 0.0734, "step": 5446 }, { "epoch": 6.012147984538929, "grad_norm": 0.3602812886238098, "learning_rate": 1.6332e-05, "loss": 0.0245, "step": 5447 }, { "epoch": 6.0132523467697405, "grad_norm": 0.6055265069007874, "learning_rate": 1.6335e-05, "loss": 0.0387, "step": 5448 }, { "epoch": 6.014356709000552, "grad_norm": 0.712013840675354, "learning_rate": 1.6338e-05, "loss": 0.0342, "step": 5449 }, { "epoch": 6.015461071231364, "grad_norm": 0.44564682245254517, "learning_rate": 1.6340999999999998e-05, "loss": 0.0376, "step": 5450 }, { "epoch": 6.016565433462175, "grad_norm": 0.451156884431839, "learning_rate": 1.6343999999999998e-05, "loss": 0.0261, "step": 5451 }, { "epoch": 6.017669795692988, "grad_norm": 0.5087893009185791, "learning_rate": 1.6347e-05, "loss": 0.0255, "step": 5452 }, { "epoch": 6.018774157923799, "grad_norm": 0.49384722113609314, "learning_rate": 1.635e-05, "loss": 0.0278, "step": 5453 }, { "epoch": 6.01987852015461, "grad_norm": 0.5189042687416077, "learning_rate": 1.6353e-05, "loss": 0.05, "step": 5454 }, { "epoch": 6.020982882385423, "grad_norm": 0.34148430824279785, "learning_rate": 1.6356e-05, "loss": 0.0154, "step": 5455 }, { "epoch": 6.022087244616234, "grad_norm": 0.5310083627700806, "learning_rate": 1.6359e-05, "loss": 0.0273, "step": 5456 }, { "epoch": 6.023191606847046, "grad_norm": 0.42128071188926697, "learning_rate": 1.6362e-05, "loss": 0.0242, "step": 5457 }, { "epoch": 6.0242959690778575, "grad_norm": 0.3726050555706024, "learning_rate": 1.6365e-05, "loss": 0.0234, "step": 5458 }, { "epoch": 6.025400331308669, "grad_norm": 0.34810200333595276, "learning_rate": 1.6368e-05, "loss": 0.0263, "step": 5459 }, { "epoch": 6.026504693539481, "grad_norm": 0.559108555316925, "learning_rate": 1.6371e-05, "loss": 0.0335, "step": 5460 }, { "epoch": 6.027609055770292, "grad_norm": 0.5535933971405029, "learning_rate": 1.6374e-05, "loss": 0.0369, "step": 5461 }, { "epoch": 6.028713418001105, "grad_norm": 0.4518758952617645, "learning_rate": 1.6377000000000003e-05, "loss": 0.0255, "step": 5462 }, { "epoch": 6.029817780231916, "grad_norm": 0.609062135219574, "learning_rate": 1.6380000000000002e-05, "loss": 0.0205, "step": 5463 }, { "epoch": 6.030922142462728, "grad_norm": 0.758206844329834, "learning_rate": 1.6383000000000002e-05, "loss": 0.0306, "step": 5464 }, { "epoch": 6.0320265046935395, "grad_norm": 0.5386209487915039, "learning_rate": 1.6386000000000002e-05, "loss": 0.0265, "step": 5465 }, { "epoch": 6.033130866924351, "grad_norm": 0.6506061553955078, "learning_rate": 1.6389000000000002e-05, "loss": 0.0277, "step": 5466 }, { "epoch": 6.034235229155163, "grad_norm": 0.3078385293483734, "learning_rate": 1.6392e-05, "loss": 0.0118, "step": 5467 }, { "epoch": 6.035339591385974, "grad_norm": 0.4038366675376892, "learning_rate": 1.6395e-05, "loss": 0.0232, "step": 5468 }, { "epoch": 6.036443953616787, "grad_norm": 0.4814819395542145, "learning_rate": 1.6398e-05, "loss": 0.0278, "step": 5469 }, { "epoch": 6.037548315847598, "grad_norm": 1.1953401565551758, "learning_rate": 1.6400999999999998e-05, "loss": 0.0278, "step": 5470 }, { "epoch": 6.038652678078409, "grad_norm": 0.39756280183792114, "learning_rate": 1.6403999999999997e-05, "loss": 0.0155, "step": 5471 }, { "epoch": 6.039757040309222, "grad_norm": 0.353110134601593, "learning_rate": 1.6407e-05, "loss": 0.016, "step": 5472 }, { "epoch": 6.040861402540033, "grad_norm": 1.1715078353881836, "learning_rate": 1.641e-05, "loss": 0.0399, "step": 5473 }, { "epoch": 6.041965764770845, "grad_norm": 0.40339285135269165, "learning_rate": 1.6413e-05, "loss": 0.0155, "step": 5474 }, { "epoch": 6.0430701270016565, "grad_norm": 0.26195961236953735, "learning_rate": 1.6416e-05, "loss": 0.016, "step": 5475 }, { "epoch": 6.044174489232468, "grad_norm": 0.5157092809677124, "learning_rate": 1.6419e-05, "loss": 0.0206, "step": 5476 }, { "epoch": 6.04527885146328, "grad_norm": 0.3832063376903534, "learning_rate": 1.6422e-05, "loss": 0.0261, "step": 5477 }, { "epoch": 6.046383213694091, "grad_norm": 0.4629087746143341, "learning_rate": 1.6425e-05, "loss": 0.0204, "step": 5478 }, { "epoch": 6.047487575924904, "grad_norm": 0.4542970359325409, "learning_rate": 1.6428e-05, "loss": 0.0261, "step": 5479 }, { "epoch": 6.048591938155715, "grad_norm": 0.542277991771698, "learning_rate": 1.6431e-05, "loss": 0.0188, "step": 5480 }, { "epoch": 6.049696300386527, "grad_norm": 0.7747577428817749, "learning_rate": 1.6434e-05, "loss": 0.0209, "step": 5481 }, { "epoch": 6.0508006626173385, "grad_norm": 0.5300416946411133, "learning_rate": 1.6437000000000002e-05, "loss": 0.0252, "step": 5482 }, { "epoch": 6.05190502484815, "grad_norm": 1.429945468902588, "learning_rate": 1.6440000000000002e-05, "loss": 0.0559, "step": 5483 }, { "epoch": 6.053009387078962, "grad_norm": 0.8611577749252319, "learning_rate": 1.6443e-05, "loss": 0.0374, "step": 5484 }, { "epoch": 6.054113749309773, "grad_norm": 0.46312978863716125, "learning_rate": 1.6446e-05, "loss": 0.0231, "step": 5485 }, { "epoch": 6.055218111540586, "grad_norm": 0.5624423027038574, "learning_rate": 1.6449e-05, "loss": 0.0268, "step": 5486 }, { "epoch": 6.056322473771397, "grad_norm": 1.2782071828842163, "learning_rate": 1.6452e-05, "loss": 0.3265, "step": 5487 }, { "epoch": 6.057426836002208, "grad_norm": 0.8645961880683899, "learning_rate": 1.6455e-05, "loss": 0.2397, "step": 5488 }, { "epoch": 6.0585311982330206, "grad_norm": 1.2062997817993164, "learning_rate": 1.6458e-05, "loss": 0.2351, "step": 5489 }, { "epoch": 6.059635560463832, "grad_norm": 0.7344040274620056, "learning_rate": 1.6461e-05, "loss": 0.1816, "step": 5490 }, { "epoch": 6.060739922694644, "grad_norm": 0.8145939707756042, "learning_rate": 1.6464e-05, "loss": 0.1666, "step": 5491 }, { "epoch": 6.0618442849254555, "grad_norm": 0.6431756019592285, "learning_rate": 1.6467000000000003e-05, "loss": 0.1329, "step": 5492 }, { "epoch": 6.062948647156268, "grad_norm": 0.5806809663772583, "learning_rate": 1.6470000000000003e-05, "loss": 0.1165, "step": 5493 }, { "epoch": 6.064053009387079, "grad_norm": 0.5271737575531006, "learning_rate": 1.6473000000000003e-05, "loss": 0.0673, "step": 5494 }, { "epoch": 6.06515737161789, "grad_norm": 0.3754681348800659, "learning_rate": 1.6476e-05, "loss": 0.0462, "step": 5495 }, { "epoch": 6.066261733848703, "grad_norm": 0.5458480715751648, "learning_rate": 1.6479e-05, "loss": 0.0746, "step": 5496 }, { "epoch": 6.067366096079514, "grad_norm": 0.42537036538124084, "learning_rate": 1.6482e-05, "loss": 0.0333, "step": 5497 }, { "epoch": 6.068470458310326, "grad_norm": 0.39359143376350403, "learning_rate": 1.6485e-05, "loss": 0.0264, "step": 5498 }, { "epoch": 6.0695748205411375, "grad_norm": 0.5296432971954346, "learning_rate": 1.6488e-05, "loss": 0.0305, "step": 5499 }, { "epoch": 6.070679182771949, "grad_norm": 0.3890821039676666, "learning_rate": 1.6491e-05, "loss": 0.0203, "step": 5500 }, { "epoch": 6.071783545002761, "grad_norm": 0.35945001244544983, "learning_rate": 1.6493999999999998e-05, "loss": 0.0271, "step": 5501 }, { "epoch": 6.072887907233572, "grad_norm": 0.4203242361545563, "learning_rate": 1.6497e-05, "loss": 0.0268, "step": 5502 }, { "epoch": 6.073992269464385, "grad_norm": 0.25394392013549805, "learning_rate": 1.65e-05, "loss": 0.0134, "step": 5503 }, { "epoch": 6.075096631695196, "grad_norm": 0.5678820013999939, "learning_rate": 1.6503e-05, "loss": 0.0307, "step": 5504 }, { "epoch": 6.076200993926007, "grad_norm": 0.31144022941589355, "learning_rate": 1.6506e-05, "loss": 0.0195, "step": 5505 }, { "epoch": 6.0773053561568195, "grad_norm": 0.23851284384727478, "learning_rate": 1.6509e-05, "loss": 0.0131, "step": 5506 }, { "epoch": 6.078409718387631, "grad_norm": 1.1331841945648193, "learning_rate": 1.6512e-05, "loss": 0.0311, "step": 5507 }, { "epoch": 6.079514080618443, "grad_norm": 0.3125782310962677, "learning_rate": 1.6515e-05, "loss": 0.0175, "step": 5508 }, { "epoch": 6.0806184428492545, "grad_norm": 0.31006520986557007, "learning_rate": 1.6518e-05, "loss": 0.0142, "step": 5509 }, { "epoch": 6.081722805080067, "grad_norm": 0.4015345573425293, "learning_rate": 1.6521e-05, "loss": 0.0242, "step": 5510 }, { "epoch": 6.082827167310878, "grad_norm": 0.6361057162284851, "learning_rate": 1.6524e-05, "loss": 0.0741, "step": 5511 }, { "epoch": 6.083931529541689, "grad_norm": 0.5417544841766357, "learning_rate": 1.6527e-05, "loss": 0.0212, "step": 5512 }, { "epoch": 6.085035891772502, "grad_norm": 0.4558606743812561, "learning_rate": 1.6530000000000003e-05, "loss": 0.0257, "step": 5513 }, { "epoch": 6.086140254003313, "grad_norm": 0.35699382424354553, "learning_rate": 1.6533000000000002e-05, "loss": 0.017, "step": 5514 }, { "epoch": 6.087244616234125, "grad_norm": 0.7601785063743591, "learning_rate": 1.6536000000000002e-05, "loss": 0.0386, "step": 5515 }, { "epoch": 6.0883489784649365, "grad_norm": 0.43760430812835693, "learning_rate": 1.6539000000000002e-05, "loss": 0.02, "step": 5516 }, { "epoch": 6.089453340695748, "grad_norm": 0.42752334475517273, "learning_rate": 1.6542000000000002e-05, "loss": 0.0297, "step": 5517 }, { "epoch": 6.09055770292656, "grad_norm": 0.7549715042114258, "learning_rate": 1.6545e-05, "loss": 0.0314, "step": 5518 }, { "epoch": 6.091662065157371, "grad_norm": 0.5141752362251282, "learning_rate": 1.6548e-05, "loss": 0.0443, "step": 5519 }, { "epoch": 6.092766427388184, "grad_norm": 3.3140316009521484, "learning_rate": 1.6550999999999998e-05, "loss": 0.041, "step": 5520 }, { "epoch": 6.093870789618995, "grad_norm": 1.0211992263793945, "learning_rate": 1.6553999999999998e-05, "loss": 0.033, "step": 5521 }, { "epoch": 6.094975151849806, "grad_norm": 0.7216109037399292, "learning_rate": 1.6556999999999998e-05, "loss": 0.0359, "step": 5522 }, { "epoch": 6.0960795140806185, "grad_norm": 0.43151575326919556, "learning_rate": 1.656e-05, "loss": 0.0335, "step": 5523 }, { "epoch": 6.09718387631143, "grad_norm": 2.0087478160858154, "learning_rate": 1.6563e-05, "loss": 0.0242, "step": 5524 }, { "epoch": 6.098288238542242, "grad_norm": 0.4052932560443878, "learning_rate": 1.6566e-05, "loss": 0.02, "step": 5525 }, { "epoch": 6.0993926007730535, "grad_norm": 0.335831880569458, "learning_rate": 1.6569e-05, "loss": 0.0196, "step": 5526 }, { "epoch": 6.100496963003866, "grad_norm": 0.27501380443573, "learning_rate": 1.6572e-05, "loss": 0.025, "step": 5527 }, { "epoch": 6.101601325234677, "grad_norm": 0.6110715866088867, "learning_rate": 1.6575e-05, "loss": 0.0335, "step": 5528 }, { "epoch": 6.102705687465488, "grad_norm": 1.0623350143432617, "learning_rate": 1.6578e-05, "loss": 0.0263, "step": 5529 }, { "epoch": 6.103810049696301, "grad_norm": 0.9065253138542175, "learning_rate": 1.6581e-05, "loss": 0.036, "step": 5530 }, { "epoch": 6.104914411927112, "grad_norm": 1.0184986591339111, "learning_rate": 1.6584e-05, "loss": 0.025, "step": 5531 }, { "epoch": 6.106018774157924, "grad_norm": 0.6512657403945923, "learning_rate": 1.6587e-05, "loss": 0.0467, "step": 5532 }, { "epoch": 6.1071231363887355, "grad_norm": 0.9064626693725586, "learning_rate": 1.6590000000000002e-05, "loss": 0.0307, "step": 5533 }, { "epoch": 6.108227498619547, "grad_norm": 0.7084583640098572, "learning_rate": 1.6593000000000002e-05, "loss": 0.0342, "step": 5534 }, { "epoch": 6.109331860850359, "grad_norm": 0.6230108141899109, "learning_rate": 1.6596000000000002e-05, "loss": 0.0397, "step": 5535 }, { "epoch": 6.11043622308117, "grad_norm": 0.7756036520004272, "learning_rate": 1.6599e-05, "loss": 0.04, "step": 5536 }, { "epoch": 6.111540585311983, "grad_norm": 1.0259983539581299, "learning_rate": 1.6602e-05, "loss": 0.359, "step": 5537 }, { "epoch": 6.112644947542794, "grad_norm": 0.8836740851402283, "learning_rate": 1.6605e-05, "loss": 0.2744, "step": 5538 }, { "epoch": 6.113749309773605, "grad_norm": 0.866710364818573, "learning_rate": 1.6608e-05, "loss": 0.1739, "step": 5539 }, { "epoch": 6.1148536720044175, "grad_norm": 0.6829307079315186, "learning_rate": 1.6611e-05, "loss": 0.1371, "step": 5540 }, { "epoch": 6.115958034235229, "grad_norm": 1.5919817686080933, "learning_rate": 1.6614e-05, "loss": 0.2035, "step": 5541 }, { "epoch": 6.117062396466041, "grad_norm": 0.9012275338172913, "learning_rate": 1.6617e-05, "loss": 0.1753, "step": 5542 }, { "epoch": 6.1181667586968524, "grad_norm": 0.8413013219833374, "learning_rate": 1.6620000000000004e-05, "loss": 0.1045, "step": 5543 }, { "epoch": 6.119271120927665, "grad_norm": 0.6890164613723755, "learning_rate": 1.6623e-05, "loss": 0.1046, "step": 5544 }, { "epoch": 6.120375483158476, "grad_norm": 0.47256141901016235, "learning_rate": 1.6626e-05, "loss": 0.0334, "step": 5545 }, { "epoch": 6.121479845389287, "grad_norm": 0.4029175341129303, "learning_rate": 1.6629e-05, "loss": 0.0324, "step": 5546 }, { "epoch": 6.1225842076201, "grad_norm": 1.4663848876953125, "learning_rate": 1.6632e-05, "loss": 0.0593, "step": 5547 }, { "epoch": 6.123688569850911, "grad_norm": 0.6581262946128845, "learning_rate": 1.6635e-05, "loss": 0.0666, "step": 5548 }, { "epoch": 6.124792932081723, "grad_norm": 0.4798172414302826, "learning_rate": 1.6638e-05, "loss": 0.0481, "step": 5549 }, { "epoch": 6.1258972943125345, "grad_norm": 0.3945938050746918, "learning_rate": 1.6641e-05, "loss": 0.0254, "step": 5550 }, { "epoch": 6.127001656543346, "grad_norm": 0.3804144561290741, "learning_rate": 1.6644e-05, "loss": 0.0338, "step": 5551 }, { "epoch": 6.128106018774158, "grad_norm": 1.1972026824951172, "learning_rate": 1.6647e-05, "loss": 0.041, "step": 5552 }, { "epoch": 6.129210381004969, "grad_norm": 0.6513330340385437, "learning_rate": 1.665e-05, "loss": 0.027, "step": 5553 }, { "epoch": 6.130314743235782, "grad_norm": 0.4792206883430481, "learning_rate": 1.6653e-05, "loss": 0.0294, "step": 5554 }, { "epoch": 6.131419105466593, "grad_norm": 0.46518877148628235, "learning_rate": 1.6656e-05, "loss": 0.0293, "step": 5555 }, { "epoch": 6.132523467697405, "grad_norm": 0.4443720579147339, "learning_rate": 1.6659e-05, "loss": 0.0194, "step": 5556 }, { "epoch": 6.1336278299282165, "grad_norm": 0.8325865268707275, "learning_rate": 1.6662e-05, "loss": 0.0357, "step": 5557 }, { "epoch": 6.134732192159028, "grad_norm": 0.6633812785148621, "learning_rate": 1.6665e-05, "loss": 0.0209, "step": 5558 }, { "epoch": 6.13583655438984, "grad_norm": 0.6128411889076233, "learning_rate": 1.6668e-05, "loss": 0.0343, "step": 5559 }, { "epoch": 6.136940916620651, "grad_norm": 0.9744877815246582, "learning_rate": 1.6671e-05, "loss": 0.0314, "step": 5560 }, { "epoch": 6.138045278851464, "grad_norm": 0.8573518991470337, "learning_rate": 1.6674e-05, "loss": 0.0268, "step": 5561 }, { "epoch": 6.139149641082275, "grad_norm": 0.3569982945919037, "learning_rate": 1.6677e-05, "loss": 0.0162, "step": 5562 }, { "epoch": 6.140254003313086, "grad_norm": 0.6543005704879761, "learning_rate": 1.6680000000000003e-05, "loss": 0.0286, "step": 5563 }, { "epoch": 6.141358365543899, "grad_norm": 0.7200274467468262, "learning_rate": 1.6683000000000003e-05, "loss": 0.028, "step": 5564 }, { "epoch": 6.14246272777471, "grad_norm": 0.5889564752578735, "learning_rate": 1.6686000000000003e-05, "loss": 0.0247, "step": 5565 }, { "epoch": 6.143567090005522, "grad_norm": 0.28337815403938293, "learning_rate": 1.6689000000000002e-05, "loss": 0.0162, "step": 5566 }, { "epoch": 6.1446714522363335, "grad_norm": 0.525460958480835, "learning_rate": 1.6692000000000002e-05, "loss": 0.0243, "step": 5567 }, { "epoch": 6.145775814467145, "grad_norm": 0.7456172704696655, "learning_rate": 1.6695000000000002e-05, "loss": 0.018, "step": 5568 }, { "epoch": 6.146880176697957, "grad_norm": 0.5344775915145874, "learning_rate": 1.6698e-05, "loss": 0.0324, "step": 5569 }, { "epoch": 6.147984538928768, "grad_norm": 0.7026100754737854, "learning_rate": 1.6700999999999998e-05, "loss": 0.0395, "step": 5570 }, { "epoch": 6.149088901159581, "grad_norm": 0.5668320655822754, "learning_rate": 1.6703999999999998e-05, "loss": 0.0285, "step": 5571 }, { "epoch": 6.150193263390392, "grad_norm": 0.3639571964740753, "learning_rate": 1.6706999999999998e-05, "loss": 0.0213, "step": 5572 }, { "epoch": 6.151297625621204, "grad_norm": 0.5766244530677795, "learning_rate": 1.671e-05, "loss": 0.0248, "step": 5573 }, { "epoch": 6.1524019878520155, "grad_norm": 0.5558867454528809, "learning_rate": 1.6713e-05, "loss": 0.0268, "step": 5574 }, { "epoch": 6.153506350082827, "grad_norm": 0.6459047198295593, "learning_rate": 1.6716e-05, "loss": 0.0318, "step": 5575 }, { "epoch": 6.154610712313639, "grad_norm": 0.5572948455810547, "learning_rate": 1.6719e-05, "loss": 0.029, "step": 5576 }, { "epoch": 6.15571507454445, "grad_norm": 0.6651903390884399, "learning_rate": 1.6722e-05, "loss": 0.0274, "step": 5577 }, { "epoch": 6.156819436775263, "grad_norm": 1.1052792072296143, "learning_rate": 1.6725e-05, "loss": 0.0513, "step": 5578 }, { "epoch": 6.157923799006074, "grad_norm": 0.5029468536376953, "learning_rate": 1.6728e-05, "loss": 0.0215, "step": 5579 }, { "epoch": 6.159028161236885, "grad_norm": 0.3002314567565918, "learning_rate": 1.6731e-05, "loss": 0.0188, "step": 5580 }, { "epoch": 6.160132523467698, "grad_norm": 0.4358421564102173, "learning_rate": 1.6734e-05, "loss": 0.0331, "step": 5581 }, { "epoch": 6.161236885698509, "grad_norm": 0.41437819600105286, "learning_rate": 1.6737e-05, "loss": 0.0188, "step": 5582 }, { "epoch": 6.162341247929321, "grad_norm": 0.6760925650596619, "learning_rate": 1.6740000000000002e-05, "loss": 0.0295, "step": 5583 }, { "epoch": 6.1634456101601325, "grad_norm": 0.7037971019744873, "learning_rate": 1.6743000000000002e-05, "loss": 0.0358, "step": 5584 }, { "epoch": 6.164549972390944, "grad_norm": 0.785056471824646, "learning_rate": 1.6746000000000002e-05, "loss": 0.0527, "step": 5585 }, { "epoch": 6.165654334621756, "grad_norm": 0.5556942224502563, "learning_rate": 1.6749000000000002e-05, "loss": 0.0335, "step": 5586 }, { "epoch": 6.166758696852567, "grad_norm": 1.911811351776123, "learning_rate": 1.6752e-05, "loss": 0.2641, "step": 5587 }, { "epoch": 6.16786305908338, "grad_norm": 0.9923376441001892, "learning_rate": 1.6755e-05, "loss": 0.262, "step": 5588 }, { "epoch": 6.168967421314191, "grad_norm": 0.5353972315788269, "learning_rate": 1.6758e-05, "loss": 0.1651, "step": 5589 }, { "epoch": 6.170071783545003, "grad_norm": 0.7140636444091797, "learning_rate": 1.6761e-05, "loss": 0.1573, "step": 5590 }, { "epoch": 6.1711761457758145, "grad_norm": 0.7563750147819519, "learning_rate": 1.6764e-05, "loss": 0.1452, "step": 5591 }, { "epoch": 6.172280508006626, "grad_norm": 0.8765189051628113, "learning_rate": 1.6767e-05, "loss": 0.1444, "step": 5592 }, { "epoch": 6.173384870237438, "grad_norm": 0.532326877117157, "learning_rate": 1.677e-05, "loss": 0.0742, "step": 5593 }, { "epoch": 6.174489232468249, "grad_norm": 0.9910098314285278, "learning_rate": 1.6773e-05, "loss": 0.1754, "step": 5594 }, { "epoch": 6.175593594699062, "grad_norm": 5.30670166015625, "learning_rate": 1.6776e-05, "loss": 0.0579, "step": 5595 }, { "epoch": 6.176697956929873, "grad_norm": 0.3768136203289032, "learning_rate": 1.6779e-05, "loss": 0.0384, "step": 5596 }, { "epoch": 6.177802319160684, "grad_norm": 0.47533315420150757, "learning_rate": 1.6782e-05, "loss": 0.0438, "step": 5597 }, { "epoch": 6.178906681391497, "grad_norm": 0.6360215544700623, "learning_rate": 1.6785e-05, "loss": 0.036, "step": 5598 }, { "epoch": 6.180011043622308, "grad_norm": 0.36801448464393616, "learning_rate": 1.6788e-05, "loss": 0.0286, "step": 5599 }, { "epoch": 6.18111540585312, "grad_norm": 0.4583686590194702, "learning_rate": 1.6791e-05, "loss": 0.0316, "step": 5600 }, { "epoch": 6.1822197680839315, "grad_norm": 0.5331354141235352, "learning_rate": 1.6794e-05, "loss": 0.0171, "step": 5601 }, { "epoch": 6.183324130314743, "grad_norm": 0.48735877871513367, "learning_rate": 1.6797e-05, "loss": 0.0304, "step": 5602 }, { "epoch": 6.184428492545555, "grad_norm": 0.41179823875427246, "learning_rate": 1.6800000000000002e-05, "loss": 0.0248, "step": 5603 }, { "epoch": 6.185532854776366, "grad_norm": 0.5224510431289673, "learning_rate": 1.6803e-05, "loss": 0.038, "step": 5604 }, { "epoch": 6.186637217007179, "grad_norm": 0.2903482913970947, "learning_rate": 1.6806e-05, "loss": 0.018, "step": 5605 }, { "epoch": 6.18774157923799, "grad_norm": 0.45437633991241455, "learning_rate": 1.6809e-05, "loss": 0.0266, "step": 5606 }, { "epoch": 6.188845941468802, "grad_norm": 0.29434484243392944, "learning_rate": 1.6812e-05, "loss": 0.0142, "step": 5607 }, { "epoch": 6.1899503036996135, "grad_norm": 0.550579309463501, "learning_rate": 1.6815e-05, "loss": 0.026, "step": 5608 }, { "epoch": 6.191054665930425, "grad_norm": 0.6671292185783386, "learning_rate": 1.6818e-05, "loss": 0.0379, "step": 5609 }, { "epoch": 6.192159028161237, "grad_norm": 1.3069523572921753, "learning_rate": 1.6821e-05, "loss": 0.0107, "step": 5610 }, { "epoch": 6.193263390392048, "grad_norm": 0.8729934692382812, "learning_rate": 1.6824e-05, "loss": 0.0469, "step": 5611 }, { "epoch": 6.194367752622861, "grad_norm": 0.5897033214569092, "learning_rate": 1.6827e-05, "loss": 0.0339, "step": 5612 }, { "epoch": 6.195472114853672, "grad_norm": 0.5132719278335571, "learning_rate": 1.6830000000000003e-05, "loss": 0.0203, "step": 5613 }, { "epoch": 6.196576477084483, "grad_norm": 0.4778342843055725, "learning_rate": 1.6833000000000003e-05, "loss": 0.036, "step": 5614 }, { "epoch": 6.197680839315296, "grad_norm": 0.5256407260894775, "learning_rate": 1.6836000000000003e-05, "loss": 0.0264, "step": 5615 }, { "epoch": 6.198785201546107, "grad_norm": 0.45139816403388977, "learning_rate": 1.6839000000000003e-05, "loss": 0.0213, "step": 5616 }, { "epoch": 6.199889563776919, "grad_norm": 0.5371431112289429, "learning_rate": 1.6842000000000002e-05, "loss": 0.0295, "step": 5617 }, { "epoch": 6.2009939260077305, "grad_norm": 0.636882483959198, "learning_rate": 1.6845e-05, "loss": 0.0412, "step": 5618 }, { "epoch": 6.202098288238542, "grad_norm": 0.47844770550727844, "learning_rate": 1.6848e-05, "loss": 0.0297, "step": 5619 }, { "epoch": 6.203202650469354, "grad_norm": 0.821898877620697, "learning_rate": 1.6851e-05, "loss": 0.0332, "step": 5620 }, { "epoch": 6.204307012700165, "grad_norm": 0.47601351141929626, "learning_rate": 1.6853999999999998e-05, "loss": 0.0306, "step": 5621 }, { "epoch": 6.205411374930978, "grad_norm": 0.6311721205711365, "learning_rate": 1.6856999999999998e-05, "loss": 0.0336, "step": 5622 }, { "epoch": 6.206515737161789, "grad_norm": 0.5157014727592468, "learning_rate": 1.686e-05, "loss": 0.029, "step": 5623 }, { "epoch": 6.207620099392601, "grad_norm": 0.5229120850563049, "learning_rate": 1.6863e-05, "loss": 0.0334, "step": 5624 }, { "epoch": 6.2087244616234125, "grad_norm": 0.4237936735153198, "learning_rate": 1.6866e-05, "loss": 0.0251, "step": 5625 }, { "epoch": 6.209828823854224, "grad_norm": 0.7281621098518372, "learning_rate": 1.6869e-05, "loss": 0.0224, "step": 5626 }, { "epoch": 6.210933186085036, "grad_norm": 0.45271459221839905, "learning_rate": 1.6872e-05, "loss": 0.0194, "step": 5627 }, { "epoch": 6.212037548315847, "grad_norm": 0.3152642250061035, "learning_rate": 1.6875e-05, "loss": 0.0212, "step": 5628 }, { "epoch": 6.21314191054666, "grad_norm": 0.3953224718570709, "learning_rate": 1.6878e-05, "loss": 0.0286, "step": 5629 }, { "epoch": 6.214246272777471, "grad_norm": 0.569150984287262, "learning_rate": 1.6881e-05, "loss": 0.0336, "step": 5630 }, { "epoch": 6.215350635008282, "grad_norm": 0.41098353266716003, "learning_rate": 1.6884e-05, "loss": 0.0267, "step": 5631 }, { "epoch": 6.216454997239095, "grad_norm": 0.4786875545978546, "learning_rate": 1.6887e-05, "loss": 0.022, "step": 5632 }, { "epoch": 6.217559359469906, "grad_norm": 0.6468968987464905, "learning_rate": 1.689e-05, "loss": 0.0674, "step": 5633 }, { "epoch": 6.218663721700718, "grad_norm": 0.7532563209533691, "learning_rate": 1.6893000000000002e-05, "loss": 0.0233, "step": 5634 }, { "epoch": 6.2197680839315295, "grad_norm": 0.5211818814277649, "learning_rate": 1.6896000000000002e-05, "loss": 0.0208, "step": 5635 }, { "epoch": 6.220872446162341, "grad_norm": 0.7378595471382141, "learning_rate": 1.6899000000000002e-05, "loss": 0.0458, "step": 5636 }, { "epoch": 6.221976808393153, "grad_norm": 1.1622098684310913, "learning_rate": 1.6902000000000002e-05, "loss": 0.3449, "step": 5637 }, { "epoch": 6.223081170623964, "grad_norm": 0.8255400061607361, "learning_rate": 1.6905e-05, "loss": 0.2139, "step": 5638 }, { "epoch": 6.224185532854777, "grad_norm": 0.8529388904571533, "learning_rate": 1.6908e-05, "loss": 0.2689, "step": 5639 }, { "epoch": 6.225289895085588, "grad_norm": 0.8236623406410217, "learning_rate": 1.6911e-05, "loss": 0.1493, "step": 5640 }, { "epoch": 6.2263942573164, "grad_norm": 1.2510924339294434, "learning_rate": 1.6914e-05, "loss": 0.1631, "step": 5641 }, { "epoch": 6.2274986195472115, "grad_norm": 0.7413782477378845, "learning_rate": 1.6916999999999997e-05, "loss": 0.1167, "step": 5642 }, { "epoch": 6.228602981778023, "grad_norm": 0.8775331974029541, "learning_rate": 1.6919999999999997e-05, "loss": 0.1321, "step": 5643 }, { "epoch": 6.229707344008835, "grad_norm": 0.5014748573303223, "learning_rate": 1.6923e-05, "loss": 0.0517, "step": 5644 }, { "epoch": 6.230811706239646, "grad_norm": 0.551421046257019, "learning_rate": 1.6926e-05, "loss": 0.0688, "step": 5645 }, { "epoch": 6.231916068470459, "grad_norm": 0.5823937058448792, "learning_rate": 1.6929e-05, "loss": 0.0516, "step": 5646 }, { "epoch": 6.23302043070127, "grad_norm": 0.7331896424293518, "learning_rate": 1.6932e-05, "loss": 0.0763, "step": 5647 }, { "epoch": 6.234124792932081, "grad_norm": 0.25227928161621094, "learning_rate": 1.6935e-05, "loss": 0.0199, "step": 5648 }, { "epoch": 6.2352291551628936, "grad_norm": 0.5264337658882141, "learning_rate": 1.6938e-05, "loss": 0.0213, "step": 5649 }, { "epoch": 6.236333517393705, "grad_norm": 0.30274298787117004, "learning_rate": 1.6941e-05, "loss": 0.029, "step": 5650 }, { "epoch": 6.237437879624517, "grad_norm": 0.41588497161865234, "learning_rate": 1.6944e-05, "loss": 0.0211, "step": 5651 }, { "epoch": 6.2385422418553285, "grad_norm": 0.3986227810382843, "learning_rate": 1.6947e-05, "loss": 0.0477, "step": 5652 }, { "epoch": 6.23964660408614, "grad_norm": 0.7269653677940369, "learning_rate": 1.695e-05, "loss": 0.0665, "step": 5653 }, { "epoch": 6.240750966316952, "grad_norm": 0.31615105271339417, "learning_rate": 1.6953000000000002e-05, "loss": 0.019, "step": 5654 }, { "epoch": 6.241855328547763, "grad_norm": 0.6812534332275391, "learning_rate": 1.6956e-05, "loss": 0.0283, "step": 5655 }, { "epoch": 6.242959690778576, "grad_norm": 0.36007189750671387, "learning_rate": 1.6959e-05, "loss": 0.015, "step": 5656 }, { "epoch": 6.244064053009387, "grad_norm": 0.504239559173584, "learning_rate": 1.6962e-05, "loss": 0.027, "step": 5657 }, { "epoch": 6.245168415240199, "grad_norm": 0.48574697971343994, "learning_rate": 1.6965e-05, "loss": 0.0314, "step": 5658 }, { "epoch": 6.2462727774710105, "grad_norm": 0.40864700078964233, "learning_rate": 1.6968e-05, "loss": 0.0177, "step": 5659 }, { "epoch": 6.247377139701822, "grad_norm": 0.44208431243896484, "learning_rate": 1.6971e-05, "loss": 0.0258, "step": 5660 }, { "epoch": 6.248481501932634, "grad_norm": 0.48302000761032104, "learning_rate": 1.6974e-05, "loss": 0.0248, "step": 5661 }, { "epoch": 6.249585864163445, "grad_norm": 0.3655542731285095, "learning_rate": 1.6977e-05, "loss": 0.0136, "step": 5662 }, { "epoch": 6.250690226394258, "grad_norm": 0.6188179850578308, "learning_rate": 1.698e-05, "loss": 0.0193, "step": 5663 }, { "epoch": 6.251794588625069, "grad_norm": 0.338145911693573, "learning_rate": 1.6983000000000003e-05, "loss": 0.0174, "step": 5664 }, { "epoch": 6.25289895085588, "grad_norm": 0.5162180662155151, "learning_rate": 1.6986000000000003e-05, "loss": 0.0209, "step": 5665 }, { "epoch": 6.2540033130866926, "grad_norm": 0.41367456316947937, "learning_rate": 1.6989000000000003e-05, "loss": 0.0208, "step": 5666 }, { "epoch": 6.255107675317504, "grad_norm": 0.4177365005016327, "learning_rate": 1.6992e-05, "loss": 0.032, "step": 5667 }, { "epoch": 6.256212037548316, "grad_norm": 0.5173865556716919, "learning_rate": 1.6995e-05, "loss": 0.0196, "step": 5668 }, { "epoch": 6.2573163997791275, "grad_norm": 0.6312974095344543, "learning_rate": 1.6998e-05, "loss": 0.0406, "step": 5669 }, { "epoch": 6.258420762009939, "grad_norm": 0.5245426297187805, "learning_rate": 1.7001e-05, "loss": 0.0237, "step": 5670 }, { "epoch": 6.259525124240751, "grad_norm": 0.43215110898017883, "learning_rate": 1.7004e-05, "loss": 0.0192, "step": 5671 }, { "epoch": 6.260629486471562, "grad_norm": 0.541658878326416, "learning_rate": 1.7006999999999998e-05, "loss": 0.0164, "step": 5672 }, { "epoch": 6.261733848702375, "grad_norm": 0.4860139787197113, "learning_rate": 1.7009999999999998e-05, "loss": 0.0193, "step": 5673 }, { "epoch": 6.262838210933186, "grad_norm": 0.6904074549674988, "learning_rate": 1.7013e-05, "loss": 0.0266, "step": 5674 }, { "epoch": 6.263942573163998, "grad_norm": 1.4339264631271362, "learning_rate": 1.7016e-05, "loss": 0.0365, "step": 5675 }, { "epoch": 6.2650469353948095, "grad_norm": 0.6550142765045166, "learning_rate": 1.7019e-05, "loss": 0.0153, "step": 5676 }, { "epoch": 6.266151297625621, "grad_norm": 0.7866590023040771, "learning_rate": 1.7022e-05, "loss": 0.0319, "step": 5677 }, { "epoch": 6.267255659856433, "grad_norm": 0.7789379358291626, "learning_rate": 1.7025e-05, "loss": 0.0316, "step": 5678 }, { "epoch": 6.268360022087244, "grad_norm": 0.9456736445426941, "learning_rate": 1.7028e-05, "loss": 0.0322, "step": 5679 }, { "epoch": 6.269464384318057, "grad_norm": 3.896202802658081, "learning_rate": 1.7031e-05, "loss": 0.0231, "step": 5680 }, { "epoch": 6.270568746548868, "grad_norm": 0.7091078162193298, "learning_rate": 1.7034e-05, "loss": 0.0269, "step": 5681 }, { "epoch": 6.27167310877968, "grad_norm": 0.4806394577026367, "learning_rate": 1.7037e-05, "loss": 0.034, "step": 5682 }, { "epoch": 6.2727774710104915, "grad_norm": 0.5028858184814453, "learning_rate": 1.704e-05, "loss": 0.0303, "step": 5683 }, { "epoch": 6.273881833241303, "grad_norm": 0.4841162860393524, "learning_rate": 1.7043000000000003e-05, "loss": 0.0204, "step": 5684 }, { "epoch": 6.274986195472115, "grad_norm": 0.5273348093032837, "learning_rate": 1.7046000000000002e-05, "loss": 0.0237, "step": 5685 }, { "epoch": 6.2760905577029265, "grad_norm": 0.830416202545166, "learning_rate": 1.7049000000000002e-05, "loss": 0.0299, "step": 5686 }, { "epoch": 6.277194919933739, "grad_norm": 1.0750013589859009, "learning_rate": 1.7052000000000002e-05, "loss": 0.2767, "step": 5687 }, { "epoch": 6.27829928216455, "grad_norm": 1.1052647829055786, "learning_rate": 1.7055000000000002e-05, "loss": 0.2647, "step": 5688 }, { "epoch": 6.279403644395361, "grad_norm": 0.7698046565055847, "learning_rate": 1.7058e-05, "loss": 0.2002, "step": 5689 }, { "epoch": 6.280508006626174, "grad_norm": 0.7955979704856873, "learning_rate": 1.7061e-05, "loss": 0.1072, "step": 5690 }, { "epoch": 6.281612368856985, "grad_norm": 1.0255756378173828, "learning_rate": 1.7064e-05, "loss": 0.1509, "step": 5691 }, { "epoch": 6.282716731087797, "grad_norm": 0.6821253895759583, "learning_rate": 1.7066999999999998e-05, "loss": 0.1247, "step": 5692 }, { "epoch": 6.2838210933186085, "grad_norm": 0.6246775984764099, "learning_rate": 1.7069999999999998e-05, "loss": 0.1393, "step": 5693 }, { "epoch": 6.28492545554942, "grad_norm": 0.5443447232246399, "learning_rate": 1.7073e-05, "loss": 0.0744, "step": 5694 }, { "epoch": 6.286029817780232, "grad_norm": 0.6108287572860718, "learning_rate": 1.7076e-05, "loss": 0.0483, "step": 5695 }, { "epoch": 6.287134180011043, "grad_norm": 0.4427650272846222, "learning_rate": 1.7079e-05, "loss": 0.046, "step": 5696 }, { "epoch": 6.288238542241856, "grad_norm": 0.3644169569015503, "learning_rate": 1.7082e-05, "loss": 0.0315, "step": 5697 }, { "epoch": 6.289342904472667, "grad_norm": 0.474344938993454, "learning_rate": 1.7085e-05, "loss": 0.0278, "step": 5698 }, { "epoch": 6.290447266703479, "grad_norm": 0.49665606021881104, "learning_rate": 1.7088e-05, "loss": 0.027, "step": 5699 }, { "epoch": 6.2915516289342905, "grad_norm": 0.6406963467597961, "learning_rate": 1.7091e-05, "loss": 0.0334, "step": 5700 }, { "epoch": 6.292655991165102, "grad_norm": 0.4172600209712982, "learning_rate": 1.7094e-05, "loss": 0.0275, "step": 5701 }, { "epoch": 6.293760353395914, "grad_norm": 0.7881233096122742, "learning_rate": 1.7097e-05, "loss": 0.0432, "step": 5702 }, { "epoch": 6.2948647156267254, "grad_norm": 0.38957417011260986, "learning_rate": 1.71e-05, "loss": 0.0276, "step": 5703 }, { "epoch": 6.295969077857538, "grad_norm": 0.5721333026885986, "learning_rate": 1.7103000000000002e-05, "loss": 0.0364, "step": 5704 }, { "epoch": 6.297073440088349, "grad_norm": 0.5555797219276428, "learning_rate": 1.7106000000000002e-05, "loss": 0.0369, "step": 5705 }, { "epoch": 6.29817780231916, "grad_norm": 0.6014741063117981, "learning_rate": 1.7109000000000002e-05, "loss": 0.0323, "step": 5706 }, { "epoch": 6.299282164549973, "grad_norm": 0.6515069007873535, "learning_rate": 1.7112e-05, "loss": 0.0381, "step": 5707 }, { "epoch": 6.300386526780784, "grad_norm": 0.3898453414440155, "learning_rate": 1.7115e-05, "loss": 0.0207, "step": 5708 }, { "epoch": 6.301490889011596, "grad_norm": 0.4579791724681854, "learning_rate": 1.7118e-05, "loss": 0.0508, "step": 5709 }, { "epoch": 6.3025952512424075, "grad_norm": 0.4343205988407135, "learning_rate": 1.7121e-05, "loss": 0.0309, "step": 5710 }, { "epoch": 6.303699613473219, "grad_norm": 0.6548508405685425, "learning_rate": 1.7124e-05, "loss": 0.0303, "step": 5711 }, { "epoch": 6.304803975704031, "grad_norm": 0.45943373441696167, "learning_rate": 1.7127e-05, "loss": 0.0188, "step": 5712 }, { "epoch": 6.305908337934842, "grad_norm": 0.574590802192688, "learning_rate": 1.713e-05, "loss": 0.0278, "step": 5713 }, { "epoch": 6.307012700165655, "grad_norm": 0.48101499676704407, "learning_rate": 1.7133000000000004e-05, "loss": 0.0184, "step": 5714 }, { "epoch": 6.308117062396466, "grad_norm": 0.4265982210636139, "learning_rate": 1.7136000000000003e-05, "loss": 0.0208, "step": 5715 }, { "epoch": 6.309221424627278, "grad_norm": 0.35731878876686096, "learning_rate": 1.7139e-05, "loss": 0.0143, "step": 5716 }, { "epoch": 6.3103257868580895, "grad_norm": 0.30400344729423523, "learning_rate": 1.7142e-05, "loss": 0.0172, "step": 5717 }, { "epoch": 6.311430149088901, "grad_norm": 0.45293331146240234, "learning_rate": 1.7145e-05, "loss": 0.0225, "step": 5718 }, { "epoch": 6.312534511319713, "grad_norm": 0.45599767565727234, "learning_rate": 1.7148e-05, "loss": 0.0236, "step": 5719 }, { "epoch": 6.313638873550524, "grad_norm": 0.4033202528953552, "learning_rate": 1.7151e-05, "loss": 0.0245, "step": 5720 }, { "epoch": 6.314743235781337, "grad_norm": 0.40382617712020874, "learning_rate": 1.7154e-05, "loss": 0.0613, "step": 5721 }, { "epoch": 6.315847598012148, "grad_norm": 0.48176512122154236, "learning_rate": 1.7157e-05, "loss": 0.0297, "step": 5722 }, { "epoch": 6.316951960242959, "grad_norm": 0.6076028943061829, "learning_rate": 1.716e-05, "loss": 0.0267, "step": 5723 }, { "epoch": 6.318056322473772, "grad_norm": 0.7479445338249207, "learning_rate": 1.7163e-05, "loss": 0.0367, "step": 5724 }, { "epoch": 6.319160684704583, "grad_norm": 0.48676180839538574, "learning_rate": 1.7166e-05, "loss": 0.0223, "step": 5725 }, { "epoch": 6.320265046935395, "grad_norm": 0.4183008074760437, "learning_rate": 1.7169e-05, "loss": 0.0272, "step": 5726 }, { "epoch": 6.3213694091662065, "grad_norm": 0.5664828419685364, "learning_rate": 1.7172e-05, "loss": 0.031, "step": 5727 }, { "epoch": 6.322473771397018, "grad_norm": 0.6223069429397583, "learning_rate": 1.7175e-05, "loss": 0.0185, "step": 5728 }, { "epoch": 6.32357813362783, "grad_norm": 0.5554924607276917, "learning_rate": 1.7178e-05, "loss": 0.0325, "step": 5729 }, { "epoch": 6.324682495858641, "grad_norm": 0.5347707867622375, "learning_rate": 1.7181e-05, "loss": 0.0308, "step": 5730 }, { "epoch": 6.325786858089454, "grad_norm": 0.7835640907287598, "learning_rate": 1.7184e-05, "loss": 0.0313, "step": 5731 }, { "epoch": 6.326891220320265, "grad_norm": 1.5365841388702393, "learning_rate": 1.7187e-05, "loss": 0.0271, "step": 5732 }, { "epoch": 6.327995582551077, "grad_norm": 0.510602593421936, "learning_rate": 1.719e-05, "loss": 0.0203, "step": 5733 }, { "epoch": 6.3290999447818885, "grad_norm": 0.572843074798584, "learning_rate": 1.7193000000000003e-05, "loss": 0.0302, "step": 5734 }, { "epoch": 6.3302043070127, "grad_norm": 0.6072409749031067, "learning_rate": 1.7196000000000003e-05, "loss": 0.0324, "step": 5735 }, { "epoch": 6.331308669243512, "grad_norm": 0.898391842842102, "learning_rate": 1.7199000000000003e-05, "loss": 0.0529, "step": 5736 }, { "epoch": 6.332413031474323, "grad_norm": 1.3094353675842285, "learning_rate": 1.7202000000000002e-05, "loss": 0.3262, "step": 5737 }, { "epoch": 6.333517393705136, "grad_norm": 1.1567331552505493, "learning_rate": 1.7205000000000002e-05, "loss": 0.2159, "step": 5738 }, { "epoch": 6.334621755935947, "grad_norm": 0.994841456413269, "learning_rate": 1.7208000000000002e-05, "loss": 0.1705, "step": 5739 }, { "epoch": 6.335726118166758, "grad_norm": 1.5118247270584106, "learning_rate": 1.7211000000000002e-05, "loss": 0.2316, "step": 5740 }, { "epoch": 6.336830480397571, "grad_norm": 0.7657289505004883, "learning_rate": 1.7213999999999998e-05, "loss": 0.1278, "step": 5741 }, { "epoch": 6.337934842628382, "grad_norm": 0.7901182174682617, "learning_rate": 1.7216999999999998e-05, "loss": 0.1438, "step": 5742 }, { "epoch": 6.339039204859194, "grad_norm": 0.7851188778877258, "learning_rate": 1.7219999999999998e-05, "loss": 0.1338, "step": 5743 }, { "epoch": 6.3401435670900055, "grad_norm": 1.4528659582138062, "learning_rate": 1.7223e-05, "loss": 0.1196, "step": 5744 }, { "epoch": 6.341247929320817, "grad_norm": 0.5626863241195679, "learning_rate": 1.7226e-05, "loss": 0.0622, "step": 5745 }, { "epoch": 6.342352291551629, "grad_norm": 0.33728712797164917, "learning_rate": 1.7229e-05, "loss": 0.0353, "step": 5746 }, { "epoch": 6.34345665378244, "grad_norm": 0.3890565037727356, "learning_rate": 1.7232e-05, "loss": 0.0183, "step": 5747 }, { "epoch": 6.344561016013253, "grad_norm": 0.3765653669834137, "learning_rate": 1.7235e-05, "loss": 0.0334, "step": 5748 }, { "epoch": 6.345665378244064, "grad_norm": 0.3825664520263672, "learning_rate": 1.7238e-05, "loss": 0.0335, "step": 5749 }, { "epoch": 6.346769740474876, "grad_norm": 0.351868599653244, "learning_rate": 1.7241e-05, "loss": 0.0233, "step": 5750 }, { "epoch": 6.3478741027056875, "grad_norm": 0.38232600688934326, "learning_rate": 1.7244e-05, "loss": 0.0275, "step": 5751 }, { "epoch": 6.348978464936499, "grad_norm": 0.4393835663795471, "learning_rate": 1.7247e-05, "loss": 0.0255, "step": 5752 }, { "epoch": 6.350082827167311, "grad_norm": 1.3080946207046509, "learning_rate": 1.725e-05, "loss": 0.0285, "step": 5753 }, { "epoch": 6.351187189398122, "grad_norm": 3.369187831878662, "learning_rate": 1.7253e-05, "loss": 0.0258, "step": 5754 }, { "epoch": 6.352291551628935, "grad_norm": 0.25583508610725403, "learning_rate": 1.7256000000000002e-05, "loss": 0.0308, "step": 5755 }, { "epoch": 6.353395913859746, "grad_norm": 0.37770453095436096, "learning_rate": 1.7259000000000002e-05, "loss": 0.0255, "step": 5756 }, { "epoch": 6.354500276090557, "grad_norm": 0.47018182277679443, "learning_rate": 1.7262000000000002e-05, "loss": 0.0186, "step": 5757 }, { "epoch": 6.35560463832137, "grad_norm": 0.6856340765953064, "learning_rate": 1.7265e-05, "loss": 0.0292, "step": 5758 }, { "epoch": 6.356709000552181, "grad_norm": 0.47113490104675293, "learning_rate": 1.7268e-05, "loss": 0.0291, "step": 5759 }, { "epoch": 6.357813362782993, "grad_norm": 0.30973199009895325, "learning_rate": 1.7271e-05, "loss": 0.0192, "step": 5760 }, { "epoch": 6.3589177250138045, "grad_norm": 0.7912088632583618, "learning_rate": 1.7274e-05, "loss": 0.0229, "step": 5761 }, { "epoch": 6.360022087244616, "grad_norm": 0.2600209712982178, "learning_rate": 1.7277e-05, "loss": 0.013, "step": 5762 }, { "epoch": 6.361126449475428, "grad_norm": 0.40363791584968567, "learning_rate": 1.728e-05, "loss": 0.0196, "step": 5763 }, { "epoch": 6.362230811706239, "grad_norm": 0.44582852721214294, "learning_rate": 1.7283e-05, "loss": 0.0235, "step": 5764 }, { "epoch": 6.363335173937052, "grad_norm": 0.7265487909317017, "learning_rate": 1.7286e-05, "loss": 0.0308, "step": 5765 }, { "epoch": 6.364439536167863, "grad_norm": 0.4590035378932953, "learning_rate": 1.7289e-05, "loss": 0.0288, "step": 5766 }, { "epoch": 6.365543898398675, "grad_norm": 0.5906921029090881, "learning_rate": 1.7292e-05, "loss": 0.028, "step": 5767 }, { "epoch": 6.3666482606294865, "grad_norm": 0.6477107405662537, "learning_rate": 1.7295e-05, "loss": 0.0256, "step": 5768 }, { "epoch": 6.367752622860298, "grad_norm": 0.5416406989097595, "learning_rate": 1.7298e-05, "loss": 0.0368, "step": 5769 }, { "epoch": 6.36885698509111, "grad_norm": 0.4920187294483185, "learning_rate": 1.7301e-05, "loss": 0.0269, "step": 5770 }, { "epoch": 6.369961347321921, "grad_norm": 0.6555492877960205, "learning_rate": 1.7304e-05, "loss": 0.021, "step": 5771 }, { "epoch": 6.371065709552734, "grad_norm": 1.3847053050994873, "learning_rate": 1.7307e-05, "loss": 0.0562, "step": 5772 }, { "epoch": 6.372170071783545, "grad_norm": 0.7739636898040771, "learning_rate": 1.731e-05, "loss": 0.0379, "step": 5773 }, { "epoch": 6.373274434014356, "grad_norm": 0.287933349609375, "learning_rate": 1.7313e-05, "loss": 0.0182, "step": 5774 }, { "epoch": 6.374378796245169, "grad_norm": 0.9673922657966614, "learning_rate": 1.7316e-05, "loss": 0.0352, "step": 5775 }, { "epoch": 6.37548315847598, "grad_norm": 0.6066411733627319, "learning_rate": 1.7319e-05, "loss": 0.0218, "step": 5776 }, { "epoch": 6.376587520706792, "grad_norm": 0.44856026768684387, "learning_rate": 1.7322e-05, "loss": 0.0209, "step": 5777 }, { "epoch": 6.3776918829376035, "grad_norm": 0.4244533181190491, "learning_rate": 1.7325e-05, "loss": 0.0295, "step": 5778 }, { "epoch": 6.378796245168415, "grad_norm": 1.208748459815979, "learning_rate": 1.7328e-05, "loss": 0.0374, "step": 5779 }, { "epoch": 6.379900607399227, "grad_norm": 0.7040916085243225, "learning_rate": 1.7331e-05, "loss": 0.0374, "step": 5780 }, { "epoch": 6.381004969630038, "grad_norm": 0.43645355105400085, "learning_rate": 1.7334e-05, "loss": 0.0225, "step": 5781 }, { "epoch": 6.382109331860851, "grad_norm": 0.6726194024085999, "learning_rate": 1.7337e-05, "loss": 0.0378, "step": 5782 }, { "epoch": 6.383213694091662, "grad_norm": 1.1059428453445435, "learning_rate": 1.734e-05, "loss": 0.0272, "step": 5783 }, { "epoch": 6.384318056322474, "grad_norm": 0.6746006011962891, "learning_rate": 1.7343e-05, "loss": 0.0286, "step": 5784 }, { "epoch": 6.3854224185532855, "grad_norm": 0.6430127620697021, "learning_rate": 1.7346000000000003e-05, "loss": 0.0356, "step": 5785 }, { "epoch": 6.386526780784097, "grad_norm": 0.7532399296760559, "learning_rate": 1.7349000000000003e-05, "loss": 0.0259, "step": 5786 }, { "epoch": 6.387631143014909, "grad_norm": 0.9207808375358582, "learning_rate": 1.7352000000000003e-05, "loss": 0.2613, "step": 5787 }, { "epoch": 6.38873550524572, "grad_norm": 0.8987547159194946, "learning_rate": 1.7355000000000002e-05, "loss": 0.2466, "step": 5788 }, { "epoch": 6.389839867476533, "grad_norm": 0.7583501935005188, "learning_rate": 1.7358000000000002e-05, "loss": 0.178, "step": 5789 }, { "epoch": 6.390944229707344, "grad_norm": 1.0414239168167114, "learning_rate": 1.7361e-05, "loss": 0.2606, "step": 5790 }, { "epoch": 6.392048591938155, "grad_norm": 0.6582611203193665, "learning_rate": 1.7364e-05, "loss": 0.1548, "step": 5791 }, { "epoch": 6.393152954168968, "grad_norm": 0.6664703488349915, "learning_rate": 1.7366999999999998e-05, "loss": 0.1104, "step": 5792 }, { "epoch": 6.394257316399779, "grad_norm": 0.7975437045097351, "learning_rate": 1.7369999999999998e-05, "loss": 0.154, "step": 5793 }, { "epoch": 6.395361678630591, "grad_norm": 0.6761810183525085, "learning_rate": 1.7372999999999998e-05, "loss": 0.1277, "step": 5794 }, { "epoch": 6.3964660408614025, "grad_norm": 0.6614953875541687, "learning_rate": 1.7376e-05, "loss": 0.0808, "step": 5795 }, { "epoch": 6.397570403092214, "grad_norm": 0.35006535053253174, "learning_rate": 1.7379e-05, "loss": 0.0406, "step": 5796 }, { "epoch": 6.398674765323026, "grad_norm": 0.33559301495552063, "learning_rate": 1.7382e-05, "loss": 0.0259, "step": 5797 }, { "epoch": 6.399779127553837, "grad_norm": 0.5327648520469666, "learning_rate": 1.7385e-05, "loss": 0.0427, "step": 5798 }, { "epoch": 6.40088348978465, "grad_norm": 0.29719555377960205, "learning_rate": 1.7388e-05, "loss": 0.0208, "step": 5799 }, { "epoch": 6.401987852015461, "grad_norm": 1.5414049625396729, "learning_rate": 1.7391e-05, "loss": 0.0674, "step": 5800 }, { "epoch": 6.403092214246273, "grad_norm": 0.6514975428581238, "learning_rate": 1.7394e-05, "loss": 0.0589, "step": 5801 }, { "epoch": 6.4041965764770845, "grad_norm": 0.28942739963531494, "learning_rate": 1.7397e-05, "loss": 0.0249, "step": 5802 }, { "epoch": 6.405300938707896, "grad_norm": 0.3409484624862671, "learning_rate": 1.74e-05, "loss": 0.022, "step": 5803 }, { "epoch": 6.406405300938708, "grad_norm": 0.3994418978691101, "learning_rate": 1.7403e-05, "loss": 0.021, "step": 5804 }, { "epoch": 6.407509663169519, "grad_norm": 0.3266616761684418, "learning_rate": 1.7406000000000002e-05, "loss": 0.0235, "step": 5805 }, { "epoch": 6.408614025400332, "grad_norm": 0.4856777489185333, "learning_rate": 1.7409000000000002e-05, "loss": 0.032, "step": 5806 }, { "epoch": 6.409718387631143, "grad_norm": 0.24135655164718628, "learning_rate": 1.7412000000000002e-05, "loss": 0.013, "step": 5807 }, { "epoch": 6.410822749861954, "grad_norm": 0.47977694869041443, "learning_rate": 1.7415000000000002e-05, "loss": 0.0351, "step": 5808 }, { "epoch": 6.411927112092767, "grad_norm": 0.359412282705307, "learning_rate": 1.7418e-05, "loss": 0.0239, "step": 5809 }, { "epoch": 6.413031474323578, "grad_norm": 0.5028817653656006, "learning_rate": 1.7421e-05, "loss": 0.0264, "step": 5810 }, { "epoch": 6.41413583655439, "grad_norm": 0.532035768032074, "learning_rate": 1.7424e-05, "loss": 0.0274, "step": 5811 }, { "epoch": 6.4152401987852015, "grad_norm": 0.5250234007835388, "learning_rate": 1.7427e-05, "loss": 0.0802, "step": 5812 }, { "epoch": 6.416344561016013, "grad_norm": 0.37893471121788025, "learning_rate": 1.743e-05, "loss": 0.0197, "step": 5813 }, { "epoch": 6.417448923246825, "grad_norm": 0.9300486445426941, "learning_rate": 1.7432999999999997e-05, "loss": 0.0852, "step": 5814 }, { "epoch": 6.418553285477636, "grad_norm": 0.9568973183631897, "learning_rate": 1.7436e-05, "loss": 0.0555, "step": 5815 }, { "epoch": 6.419657647708449, "grad_norm": 0.46669331192970276, "learning_rate": 1.7439e-05, "loss": 0.0342, "step": 5816 }, { "epoch": 6.42076200993926, "grad_norm": 1.0504595041275024, "learning_rate": 1.7442e-05, "loss": 0.0397, "step": 5817 }, { "epoch": 6.421866372170072, "grad_norm": 0.47689542174339294, "learning_rate": 1.7445e-05, "loss": 0.0284, "step": 5818 }, { "epoch": 6.4229707344008835, "grad_norm": 0.49429038166999817, "learning_rate": 1.7448e-05, "loss": 0.024, "step": 5819 }, { "epoch": 6.424075096631695, "grad_norm": 0.3648976981639862, "learning_rate": 1.7451e-05, "loss": 0.0273, "step": 5820 }, { "epoch": 6.425179458862507, "grad_norm": 0.2573281526565552, "learning_rate": 1.7454e-05, "loss": 0.0118, "step": 5821 }, { "epoch": 6.426283821093318, "grad_norm": 0.5887860655784607, "learning_rate": 1.7457e-05, "loss": 0.0341, "step": 5822 }, { "epoch": 6.427388183324131, "grad_norm": 0.5951401591300964, "learning_rate": 1.746e-05, "loss": 0.0269, "step": 5823 }, { "epoch": 6.428492545554942, "grad_norm": 0.5661342144012451, "learning_rate": 1.7463e-05, "loss": 0.0263, "step": 5824 }, { "epoch": 6.429596907785753, "grad_norm": 0.5505880117416382, "learning_rate": 1.7466000000000002e-05, "loss": 0.0306, "step": 5825 }, { "epoch": 6.4307012700165656, "grad_norm": 0.49700111150741577, "learning_rate": 1.7469e-05, "loss": 0.0242, "step": 5826 }, { "epoch": 6.431805632247377, "grad_norm": 0.6102114319801331, "learning_rate": 1.7472e-05, "loss": 0.0362, "step": 5827 }, { "epoch": 6.432909994478189, "grad_norm": 0.4794095754623413, "learning_rate": 1.7475e-05, "loss": 0.0223, "step": 5828 }, { "epoch": 6.4340143567090005, "grad_norm": 0.8910030722618103, "learning_rate": 1.7478e-05, "loss": 0.0296, "step": 5829 }, { "epoch": 6.435118718939812, "grad_norm": 0.7054679989814758, "learning_rate": 1.7481e-05, "loss": 0.0504, "step": 5830 }, { "epoch": 6.436223081170624, "grad_norm": 0.5495806932449341, "learning_rate": 1.7484e-05, "loss": 0.0362, "step": 5831 }, { "epoch": 6.437327443401435, "grad_norm": 0.6272101402282715, "learning_rate": 1.7487e-05, "loss": 0.053, "step": 5832 }, { "epoch": 6.438431805632248, "grad_norm": 0.4372742474079132, "learning_rate": 1.749e-05, "loss": 0.031, "step": 5833 }, { "epoch": 6.439536167863059, "grad_norm": 0.7880741953849792, "learning_rate": 1.7493e-05, "loss": 0.0338, "step": 5834 }, { "epoch": 6.440640530093871, "grad_norm": 0.4233153164386749, "learning_rate": 1.7496000000000003e-05, "loss": 0.0243, "step": 5835 }, { "epoch": 6.4417448923246825, "grad_norm": 0.6517809629440308, "learning_rate": 1.7499000000000003e-05, "loss": 0.0236, "step": 5836 }, { "epoch": 6.442849254555494, "grad_norm": 1.2869364023208618, "learning_rate": 1.7502000000000003e-05, "loss": 0.2979, "step": 5837 }, { "epoch": 6.443953616786306, "grad_norm": 1.0630635023117065, "learning_rate": 1.7505000000000003e-05, "loss": 0.2551, "step": 5838 }, { "epoch": 6.445057979017117, "grad_norm": 0.6031951904296875, "learning_rate": 1.7508e-05, "loss": 0.1244, "step": 5839 }, { "epoch": 6.44616234124793, "grad_norm": 1.3466179370880127, "learning_rate": 1.7511e-05, "loss": 0.1651, "step": 5840 }, { "epoch": 6.447266703478741, "grad_norm": 0.9259688854217529, "learning_rate": 1.7514e-05, "loss": 0.1875, "step": 5841 }, { "epoch": 6.448371065709552, "grad_norm": 0.7819691300392151, "learning_rate": 1.7517e-05, "loss": 0.1385, "step": 5842 }, { "epoch": 6.4494754279403645, "grad_norm": 0.8652586936950684, "learning_rate": 1.7519999999999998e-05, "loss": 0.1265, "step": 5843 }, { "epoch": 6.450579790171176, "grad_norm": 0.929806649684906, "learning_rate": 1.7522999999999998e-05, "loss": 0.1066, "step": 5844 }, { "epoch": 6.451684152401988, "grad_norm": 0.5010517835617065, "learning_rate": 1.7526e-05, "loss": 0.074, "step": 5845 }, { "epoch": 6.4527885146327995, "grad_norm": 0.6913653612136841, "learning_rate": 1.7529e-05, "loss": 0.0516, "step": 5846 }, { "epoch": 6.453892876863611, "grad_norm": 0.4729956388473511, "learning_rate": 1.7532e-05, "loss": 0.0672, "step": 5847 }, { "epoch": 6.454997239094423, "grad_norm": 0.8725947141647339, "learning_rate": 1.7535e-05, "loss": 0.0418, "step": 5848 }, { "epoch": 6.456101601325234, "grad_norm": 0.5819078683853149, "learning_rate": 1.7538e-05, "loss": 0.0566, "step": 5849 }, { "epoch": 6.457205963556047, "grad_norm": 0.4010501205921173, "learning_rate": 1.7541e-05, "loss": 0.0265, "step": 5850 }, { "epoch": 6.458310325786858, "grad_norm": 0.5067055225372314, "learning_rate": 1.7544e-05, "loss": 0.0451, "step": 5851 }, { "epoch": 6.45941468801767, "grad_norm": 0.5414619445800781, "learning_rate": 1.7547e-05, "loss": 0.0348, "step": 5852 }, { "epoch": 6.4605190502484815, "grad_norm": 0.8190457820892334, "learning_rate": 1.755e-05, "loss": 0.028, "step": 5853 }, { "epoch": 6.461623412479293, "grad_norm": 0.4091871976852417, "learning_rate": 1.7553e-05, "loss": 0.0285, "step": 5854 }, { "epoch": 6.462727774710105, "grad_norm": 0.260868638753891, "learning_rate": 1.7556000000000003e-05, "loss": 0.0144, "step": 5855 }, { "epoch": 6.463832136940916, "grad_norm": 0.3992276191711426, "learning_rate": 1.7559000000000002e-05, "loss": 0.0326, "step": 5856 }, { "epoch": 6.464936499171729, "grad_norm": 0.6424763202667236, "learning_rate": 1.7562000000000002e-05, "loss": 0.0273, "step": 5857 }, { "epoch": 6.46604086140254, "grad_norm": 0.5645071864128113, "learning_rate": 1.7565000000000002e-05, "loss": 0.0277, "step": 5858 }, { "epoch": 6.467145223633352, "grad_norm": 0.4150311052799225, "learning_rate": 1.7568000000000002e-05, "loss": 0.0137, "step": 5859 }, { "epoch": 6.4682495858641635, "grad_norm": 0.32225826382637024, "learning_rate": 1.7571e-05, "loss": 0.0188, "step": 5860 }, { "epoch": 6.469353948094975, "grad_norm": 0.48491784930229187, "learning_rate": 1.7574e-05, "loss": 0.0241, "step": 5861 }, { "epoch": 6.470458310325787, "grad_norm": 0.5237103700637817, "learning_rate": 1.7577e-05, "loss": 0.0343, "step": 5862 }, { "epoch": 6.4715626725565985, "grad_norm": 1.0870531797409058, "learning_rate": 1.758e-05, "loss": 0.0246, "step": 5863 }, { "epoch": 6.472667034787411, "grad_norm": 0.5145395398139954, "learning_rate": 1.7582999999999998e-05, "loss": 0.0144, "step": 5864 }, { "epoch": 6.473771397018222, "grad_norm": 0.49168211221694946, "learning_rate": 1.7586e-05, "loss": 0.0177, "step": 5865 }, { "epoch": 6.474875759249033, "grad_norm": 0.8088584542274475, "learning_rate": 1.7589e-05, "loss": 0.0248, "step": 5866 }, { "epoch": 6.475980121479846, "grad_norm": 1.2928508520126343, "learning_rate": 1.7592e-05, "loss": 0.0513, "step": 5867 }, { "epoch": 6.477084483710657, "grad_norm": 0.6593127250671387, "learning_rate": 1.7595e-05, "loss": 0.0361, "step": 5868 }, { "epoch": 6.478188845941469, "grad_norm": 0.41107243299484253, "learning_rate": 1.7598e-05, "loss": 0.0132, "step": 5869 }, { "epoch": 6.4792932081722805, "grad_norm": 0.4404507577419281, "learning_rate": 1.7601e-05, "loss": 0.0184, "step": 5870 }, { "epoch": 6.480397570403092, "grad_norm": 0.5222009420394897, "learning_rate": 1.7604e-05, "loss": 0.0284, "step": 5871 }, { "epoch": 6.481501932633904, "grad_norm": 0.6502174735069275, "learning_rate": 1.7607e-05, "loss": 0.0401, "step": 5872 }, { "epoch": 6.482606294864715, "grad_norm": 0.5533536076545715, "learning_rate": 1.761e-05, "loss": 0.0275, "step": 5873 }, { "epoch": 6.483710657095528, "grad_norm": 0.3937251567840576, "learning_rate": 1.7613e-05, "loss": 0.0214, "step": 5874 }, { "epoch": 6.484815019326339, "grad_norm": 0.3423137068748474, "learning_rate": 1.7616000000000002e-05, "loss": 0.0144, "step": 5875 }, { "epoch": 6.485919381557151, "grad_norm": 0.7443482875823975, "learning_rate": 1.7619000000000002e-05, "loss": 0.0375, "step": 5876 }, { "epoch": 6.4870237437879625, "grad_norm": 0.7711908221244812, "learning_rate": 1.7622000000000002e-05, "loss": 0.0279, "step": 5877 }, { "epoch": 6.488128106018774, "grad_norm": 0.6003729104995728, "learning_rate": 1.7625e-05, "loss": 0.0226, "step": 5878 }, { "epoch": 6.489232468249586, "grad_norm": 0.3707025349140167, "learning_rate": 1.7628e-05, "loss": 0.0202, "step": 5879 }, { "epoch": 6.4903368304803974, "grad_norm": 0.8906052112579346, "learning_rate": 1.7631e-05, "loss": 0.0466, "step": 5880 }, { "epoch": 6.49144119271121, "grad_norm": 0.5723229050636292, "learning_rate": 1.7634e-05, "loss": 0.0322, "step": 5881 }, { "epoch": 6.492545554942021, "grad_norm": 0.5633620023727417, "learning_rate": 1.7637e-05, "loss": 0.0516, "step": 5882 }, { "epoch": 6.493649917172832, "grad_norm": 0.9843148589134216, "learning_rate": 1.764e-05, "loss": 0.0437, "step": 5883 }, { "epoch": 6.494754279403645, "grad_norm": 0.9567678570747375, "learning_rate": 1.7643e-05, "loss": 0.0265, "step": 5884 }, { "epoch": 6.495858641634456, "grad_norm": 0.5846644639968872, "learning_rate": 1.7646e-05, "loss": 0.0276, "step": 5885 }, { "epoch": 6.496963003865268, "grad_norm": 0.5068992376327515, "learning_rate": 1.7649000000000003e-05, "loss": 0.023, "step": 5886 }, { "epoch": 6.4980673660960795, "grad_norm": 0.9840837121009827, "learning_rate": 1.7652000000000003e-05, "loss": 0.2773, "step": 5887 }, { "epoch": 6.499171728326891, "grad_norm": 1.5124431848526, "learning_rate": 1.7655e-05, "loss": 0.304, "step": 5888 }, { "epoch": 6.500276090557703, "grad_norm": 0.6952665448188782, "learning_rate": 1.7658e-05, "loss": 0.1774, "step": 5889 }, { "epoch": 6.501380452788514, "grad_norm": 0.6917198896408081, "learning_rate": 1.7661e-05, "loss": 0.1703, "step": 5890 }, { "epoch": 6.502484815019327, "grad_norm": 0.9457603693008423, "learning_rate": 1.7664e-05, "loss": 0.1939, "step": 5891 }, { "epoch": 6.503589177250138, "grad_norm": 1.6080057621002197, "learning_rate": 1.7667e-05, "loss": 0.1449, "step": 5892 }, { "epoch": 6.50469353948095, "grad_norm": 0.6928563714027405, "learning_rate": 1.767e-05, "loss": 0.1426, "step": 5893 }, { "epoch": 6.5057979017117615, "grad_norm": 0.5743803381919861, "learning_rate": 1.7673e-05, "loss": 0.0784, "step": 5894 }, { "epoch": 6.506902263942573, "grad_norm": 0.5262934565544128, "learning_rate": 1.7675999999999998e-05, "loss": 0.0558, "step": 5895 }, { "epoch": 6.508006626173385, "grad_norm": 0.5801037549972534, "learning_rate": 1.7679e-05, "loss": 0.0558, "step": 5896 }, { "epoch": 6.509110988404196, "grad_norm": 0.46419572830200195, "learning_rate": 1.7682e-05, "loss": 0.0379, "step": 5897 }, { "epoch": 6.510215350635009, "grad_norm": 0.6572535037994385, "learning_rate": 1.7685e-05, "loss": 0.0299, "step": 5898 }, { "epoch": 6.51131971286582, "grad_norm": 0.41442397236824036, "learning_rate": 1.7688e-05, "loss": 0.03, "step": 5899 }, { "epoch": 6.512424075096631, "grad_norm": 0.5895781517028809, "learning_rate": 1.7691e-05, "loss": 0.0794, "step": 5900 }, { "epoch": 6.513528437327444, "grad_norm": 1.1017568111419678, "learning_rate": 1.7694e-05, "loss": 0.0383, "step": 5901 }, { "epoch": 6.514632799558255, "grad_norm": 0.3240203261375427, "learning_rate": 1.7697e-05, "loss": 0.0305, "step": 5902 }, { "epoch": 6.515737161789067, "grad_norm": 0.4689534306526184, "learning_rate": 1.77e-05, "loss": 0.0245, "step": 5903 }, { "epoch": 6.5168415240198785, "grad_norm": 0.4137667417526245, "learning_rate": 1.7703e-05, "loss": 0.0346, "step": 5904 }, { "epoch": 6.51794588625069, "grad_norm": 0.28676414489746094, "learning_rate": 1.7706e-05, "loss": 0.0206, "step": 5905 }, { "epoch": 6.519050248481502, "grad_norm": 0.3535102605819702, "learning_rate": 1.7709000000000003e-05, "loss": 0.0204, "step": 5906 }, { "epoch": 6.520154610712313, "grad_norm": 0.4147856831550598, "learning_rate": 1.7712000000000003e-05, "loss": 0.0271, "step": 5907 }, { "epoch": 6.521258972943126, "grad_norm": 0.4493207037448883, "learning_rate": 1.7715000000000002e-05, "loss": 0.0359, "step": 5908 }, { "epoch": 6.522363335173937, "grad_norm": 0.49812081456184387, "learning_rate": 1.7718000000000002e-05, "loss": 0.0221, "step": 5909 }, { "epoch": 6.523467697404749, "grad_norm": 0.5452020168304443, "learning_rate": 1.7721000000000002e-05, "loss": 0.0299, "step": 5910 }, { "epoch": 6.5245720596355605, "grad_norm": 0.24769756197929382, "learning_rate": 1.7724000000000002e-05, "loss": 0.0123, "step": 5911 }, { "epoch": 6.525676421866372, "grad_norm": 0.4536379873752594, "learning_rate": 1.7727e-05, "loss": 0.0253, "step": 5912 }, { "epoch": 6.526780784097184, "grad_norm": 0.6888823509216309, "learning_rate": 1.7729999999999998e-05, "loss": 0.0394, "step": 5913 }, { "epoch": 6.527885146327995, "grad_norm": 0.7192149758338928, "learning_rate": 1.7732999999999998e-05, "loss": 0.0273, "step": 5914 }, { "epoch": 6.528989508558808, "grad_norm": 0.7381044030189514, "learning_rate": 1.7735999999999998e-05, "loss": 0.0439, "step": 5915 }, { "epoch": 6.530093870789619, "grad_norm": 0.6397042274475098, "learning_rate": 1.7739e-05, "loss": 0.0297, "step": 5916 }, { "epoch": 6.53119823302043, "grad_norm": 0.3083268702030182, "learning_rate": 1.7742e-05, "loss": 0.0173, "step": 5917 }, { "epoch": 6.532302595251243, "grad_norm": 0.5870762467384338, "learning_rate": 1.7745e-05, "loss": 0.0266, "step": 5918 }, { "epoch": 6.533406957482054, "grad_norm": 0.5496242046356201, "learning_rate": 1.7748e-05, "loss": 0.0279, "step": 5919 }, { "epoch": 6.534511319712866, "grad_norm": 0.5645901560783386, "learning_rate": 1.7751e-05, "loss": 0.0265, "step": 5920 }, { "epoch": 6.5356156819436775, "grad_norm": 0.5207200050354004, "learning_rate": 1.7754e-05, "loss": 0.0285, "step": 5921 }, { "epoch": 6.536720044174489, "grad_norm": 0.4445388615131378, "learning_rate": 1.7757e-05, "loss": 0.0277, "step": 5922 }, { "epoch": 6.537824406405301, "grad_norm": 0.5366127490997314, "learning_rate": 1.776e-05, "loss": 0.0327, "step": 5923 }, { "epoch": 6.538928768636112, "grad_norm": 0.7293980717658997, "learning_rate": 1.7763e-05, "loss": 0.0327, "step": 5924 }, { "epoch": 6.540033130866925, "grad_norm": 0.9860379695892334, "learning_rate": 1.7766e-05, "loss": 0.0224, "step": 5925 }, { "epoch": 6.541137493097736, "grad_norm": 0.5576764941215515, "learning_rate": 1.7769000000000002e-05, "loss": 0.0178, "step": 5926 }, { "epoch": 6.542241855328548, "grad_norm": 0.6805833578109741, "learning_rate": 1.7772000000000002e-05, "loss": 0.0212, "step": 5927 }, { "epoch": 6.5433462175593595, "grad_norm": 0.5308120250701904, "learning_rate": 1.7775000000000002e-05, "loss": 0.0455, "step": 5928 }, { "epoch": 6.544450579790171, "grad_norm": 1.131613850593567, "learning_rate": 1.7778e-05, "loss": 0.0316, "step": 5929 }, { "epoch": 6.545554942020983, "grad_norm": 0.6017208099365234, "learning_rate": 1.7781e-05, "loss": 0.0268, "step": 5930 }, { "epoch": 6.546659304251794, "grad_norm": 0.6641790866851807, "learning_rate": 1.7784e-05, "loss": 0.0422, "step": 5931 }, { "epoch": 6.547763666482607, "grad_norm": 0.7072007060050964, "learning_rate": 1.7787e-05, "loss": 0.0301, "step": 5932 }, { "epoch": 6.548868028713418, "grad_norm": 0.6254093050956726, "learning_rate": 1.779e-05, "loss": 0.0321, "step": 5933 }, { "epoch": 6.549972390944229, "grad_norm": 0.628917932510376, "learning_rate": 1.7793e-05, "loss": 0.0289, "step": 5934 }, { "epoch": 6.551076753175042, "grad_norm": 0.6354422569274902, "learning_rate": 1.7796e-05, "loss": 0.0266, "step": 5935 }, { "epoch": 6.552181115405853, "grad_norm": 0.7170236706733704, "learning_rate": 1.7799000000000004e-05, "loss": 0.0201, "step": 5936 }, { "epoch": 6.553285477636665, "grad_norm": 1.0661280155181885, "learning_rate": 1.7802e-05, "loss": 0.3276, "step": 5937 }, { "epoch": 6.5543898398674765, "grad_norm": 0.8811688423156738, "learning_rate": 1.7805e-05, "loss": 0.2491, "step": 5938 }, { "epoch": 6.555494202098288, "grad_norm": 0.928280234336853, "learning_rate": 1.7808e-05, "loss": 0.2685, "step": 5939 }, { "epoch": 6.5565985643291, "grad_norm": 1.0321481227874756, "learning_rate": 1.7811e-05, "loss": 0.2097, "step": 5940 }, { "epoch": 6.557702926559911, "grad_norm": 1.2822874784469604, "learning_rate": 1.7814e-05, "loss": 0.2276, "step": 5941 }, { "epoch": 6.558807288790724, "grad_norm": 0.9113977551460266, "learning_rate": 1.7817e-05, "loss": 0.2214, "step": 5942 }, { "epoch": 6.559911651021535, "grad_norm": 0.6136966943740845, "learning_rate": 1.782e-05, "loss": 0.0883, "step": 5943 }, { "epoch": 6.561016013252347, "grad_norm": 0.4098784029483795, "learning_rate": 1.7823e-05, "loss": 0.0551, "step": 5944 }, { "epoch": 6.5621203754831585, "grad_norm": 0.4774814546108246, "learning_rate": 1.7826e-05, "loss": 0.0527, "step": 5945 }, { "epoch": 6.56322473771397, "grad_norm": 0.6544873118400574, "learning_rate": 1.7829e-05, "loss": 0.0488, "step": 5946 }, { "epoch": 6.564329099944782, "grad_norm": 0.48310089111328125, "learning_rate": 1.7832e-05, "loss": 0.0473, "step": 5947 }, { "epoch": 6.565433462175593, "grad_norm": 0.4440838098526001, "learning_rate": 1.7835e-05, "loss": 0.0363, "step": 5948 }, { "epoch": 6.566537824406406, "grad_norm": 0.5889337658882141, "learning_rate": 1.7838e-05, "loss": 0.0186, "step": 5949 }, { "epoch": 6.567642186637217, "grad_norm": 0.9116459488868713, "learning_rate": 1.7841e-05, "loss": 0.0896, "step": 5950 }, { "epoch": 6.568746548868028, "grad_norm": 0.17190603911876678, "learning_rate": 1.7844e-05, "loss": 0.01, "step": 5951 }, { "epoch": 6.569850911098841, "grad_norm": 0.1925865113735199, "learning_rate": 1.7847e-05, "loss": 0.014, "step": 5952 }, { "epoch": 6.570955273329652, "grad_norm": 0.43541139364242554, "learning_rate": 1.785e-05, "loss": 0.0216, "step": 5953 }, { "epoch": 6.572059635560464, "grad_norm": 0.4172973930835724, "learning_rate": 1.7853e-05, "loss": 0.0259, "step": 5954 }, { "epoch": 6.5731639977912755, "grad_norm": 0.9400425553321838, "learning_rate": 1.7856e-05, "loss": 0.0424, "step": 5955 }, { "epoch": 6.574268360022087, "grad_norm": 0.8660666346549988, "learning_rate": 1.7859000000000003e-05, "loss": 0.0406, "step": 5956 }, { "epoch": 6.575372722252899, "grad_norm": 0.4133029580116272, "learning_rate": 1.7862000000000003e-05, "loss": 0.0238, "step": 5957 }, { "epoch": 6.57647708448371, "grad_norm": 0.5508719086647034, "learning_rate": 1.7865000000000003e-05, "loss": 0.034, "step": 5958 }, { "epoch": 6.577581446714523, "grad_norm": 0.43803679943084717, "learning_rate": 1.7868000000000002e-05, "loss": 0.0149, "step": 5959 }, { "epoch": 6.578685808945334, "grad_norm": 0.6033844351768494, "learning_rate": 1.7871000000000002e-05, "loss": 0.0268, "step": 5960 }, { "epoch": 6.579790171176146, "grad_norm": 0.38297590613365173, "learning_rate": 1.7874000000000002e-05, "loss": 0.025, "step": 5961 }, { "epoch": 6.5808945334069575, "grad_norm": 0.4777425527572632, "learning_rate": 1.7877e-05, "loss": 0.0392, "step": 5962 }, { "epoch": 6.581998895637769, "grad_norm": 0.395020067691803, "learning_rate": 1.7879999999999998e-05, "loss": 0.0298, "step": 5963 }, { "epoch": 6.583103257868581, "grad_norm": 0.5393251180648804, "learning_rate": 1.7882999999999998e-05, "loss": 0.0273, "step": 5964 }, { "epoch": 6.584207620099392, "grad_norm": 0.38755086064338684, "learning_rate": 1.7885999999999998e-05, "loss": 0.0211, "step": 5965 }, { "epoch": 6.585311982330205, "grad_norm": 0.4171062707901001, "learning_rate": 1.7889e-05, "loss": 0.0245, "step": 5966 }, { "epoch": 6.586416344561016, "grad_norm": 0.4546162188053131, "learning_rate": 1.7892e-05, "loss": 0.0256, "step": 5967 }, { "epoch": 6.587520706791828, "grad_norm": 0.9808604717254639, "learning_rate": 1.7895e-05, "loss": 0.0759, "step": 5968 }, { "epoch": 6.58862506902264, "grad_norm": 0.4266658425331116, "learning_rate": 1.7898e-05, "loss": 0.0217, "step": 5969 }, { "epoch": 6.589729431253451, "grad_norm": 0.5595084428787231, "learning_rate": 1.7901e-05, "loss": 0.0177, "step": 5970 }, { "epoch": 6.590833793484263, "grad_norm": 1.0313984155654907, "learning_rate": 1.7904e-05, "loss": 0.0454, "step": 5971 }, { "epoch": 6.5919381557150745, "grad_norm": 0.7298792004585266, "learning_rate": 1.7907e-05, "loss": 0.0213, "step": 5972 }, { "epoch": 6.593042517945886, "grad_norm": 0.5062596797943115, "learning_rate": 1.791e-05, "loss": 0.0239, "step": 5973 }, { "epoch": 6.594146880176698, "grad_norm": 0.5492674112319946, "learning_rate": 1.7913e-05, "loss": 0.0327, "step": 5974 }, { "epoch": 6.595251242407509, "grad_norm": 1.7280991077423096, "learning_rate": 1.7916e-05, "loss": 0.0226, "step": 5975 }, { "epoch": 6.596355604638322, "grad_norm": 0.7064198851585388, "learning_rate": 1.7919000000000002e-05, "loss": 0.034, "step": 5976 }, { "epoch": 6.597459966869133, "grad_norm": 0.4340056777000427, "learning_rate": 1.7922000000000002e-05, "loss": 0.0235, "step": 5977 }, { "epoch": 6.598564329099945, "grad_norm": 0.5070959329605103, "learning_rate": 1.7925000000000002e-05, "loss": 0.0234, "step": 5978 }, { "epoch": 6.5996686913307565, "grad_norm": 0.9153419733047485, "learning_rate": 1.7928000000000002e-05, "loss": 0.0512, "step": 5979 }, { "epoch": 6.600773053561568, "grad_norm": 0.6563403606414795, "learning_rate": 1.7931e-05, "loss": 0.0413, "step": 5980 }, { "epoch": 6.60187741579238, "grad_norm": 0.6111387014389038, "learning_rate": 1.7934e-05, "loss": 0.0353, "step": 5981 }, { "epoch": 6.602981778023191, "grad_norm": 0.3667881488800049, "learning_rate": 1.7937e-05, "loss": 0.0125, "step": 5982 }, { "epoch": 6.604086140254004, "grad_norm": 0.6091765761375427, "learning_rate": 1.794e-05, "loss": 0.0379, "step": 5983 }, { "epoch": 6.605190502484815, "grad_norm": 1.0038418769836426, "learning_rate": 1.7943e-05, "loss": 0.0407, "step": 5984 }, { "epoch": 6.606294864715627, "grad_norm": 0.5847413539886475, "learning_rate": 1.7946e-05, "loss": 0.0231, "step": 5985 }, { "epoch": 6.6073992269464386, "grad_norm": 0.847882091999054, "learning_rate": 1.7949e-05, "loss": 0.0805, "step": 5986 }, { "epoch": 6.60850358917725, "grad_norm": 1.6529825925827026, "learning_rate": 1.7952e-05, "loss": 0.3317, "step": 5987 }, { "epoch": 6.609607951408062, "grad_norm": 1.3478641510009766, "learning_rate": 1.7955e-05, "loss": 0.2767, "step": 5988 }, { "epoch": 6.6107123136388735, "grad_norm": 0.9665031433105469, "learning_rate": 1.7958e-05, "loss": 0.1958, "step": 5989 }, { "epoch": 6.611816675869685, "grad_norm": 0.5928921103477478, "learning_rate": 1.7961e-05, "loss": 0.1791, "step": 5990 }, { "epoch": 6.612921038100497, "grad_norm": 0.8334739804267883, "learning_rate": 1.7964e-05, "loss": 0.1437, "step": 5991 }, { "epoch": 6.614025400331308, "grad_norm": 0.6691427230834961, "learning_rate": 1.7967e-05, "loss": 0.1556, "step": 5992 }, { "epoch": 6.615129762562121, "grad_norm": 0.8596627116203308, "learning_rate": 1.797e-05, "loss": 0.0844, "step": 5993 }, { "epoch": 6.616234124792932, "grad_norm": 0.49007678031921387, "learning_rate": 1.7973e-05, "loss": 0.062, "step": 5994 }, { "epoch": 6.617338487023744, "grad_norm": 0.4366150200366974, "learning_rate": 1.7976e-05, "loss": 0.0511, "step": 5995 }, { "epoch": 6.6184428492545555, "grad_norm": 0.7383002042770386, "learning_rate": 1.7979000000000002e-05, "loss": 0.0542, "step": 5996 }, { "epoch": 6.619547211485367, "grad_norm": 0.990368127822876, "learning_rate": 1.7982e-05, "loss": 0.0825, "step": 5997 }, { "epoch": 6.620651573716179, "grad_norm": 0.39217233657836914, "learning_rate": 1.7985e-05, "loss": 0.0242, "step": 5998 }, { "epoch": 6.62175593594699, "grad_norm": 0.39273205399513245, "learning_rate": 1.7988e-05, "loss": 0.0268, "step": 5999 }, { "epoch": 6.622860298177803, "grad_norm": 0.4091517925262451, "learning_rate": 1.7991e-05, "loss": 0.0292, "step": 6000 }, { "epoch": 6.622860298177803, "eval_cer": 0.12204163102366695, "eval_loss": 0.39775553345680237, "eval_runtime": 16.3258, "eval_samples_per_second": 18.621, "eval_steps_per_second": 0.613, "eval_wer": 0.4217191097467383, "step": 6000 }, { "epoch": 6.623964660408614, "grad_norm": 0.700230598449707, "learning_rate": 1.7994e-05, "loss": 0.0719, "step": 6001 }, { "epoch": 6.625069022639426, "grad_norm": 0.3406820595264435, "learning_rate": 1.7997e-05, "loss": 0.0603, "step": 6002 }, { "epoch": 6.6261733848702375, "grad_norm": 0.4282561242580414, "learning_rate": 1.8e-05, "loss": 0.0223, "step": 6003 }, { "epoch": 6.627277747101049, "grad_norm": 0.35954752564430237, "learning_rate": 1.8003e-05, "loss": 0.0298, "step": 6004 }, { "epoch": 6.628382109331861, "grad_norm": 0.3050801455974579, "learning_rate": 1.8006e-05, "loss": 0.0137, "step": 6005 }, { "epoch": 6.6294864715626725, "grad_norm": 0.5040187835693359, "learning_rate": 1.8009e-05, "loss": 0.0418, "step": 6006 }, { "epoch": 6.630590833793484, "grad_norm": 0.5774208903312683, "learning_rate": 1.8012000000000003e-05, "loss": 0.0293, "step": 6007 }, { "epoch": 6.631695196024296, "grad_norm": 0.6755000352859497, "learning_rate": 1.8015000000000003e-05, "loss": 0.0375, "step": 6008 }, { "epoch": 6.632799558255107, "grad_norm": 0.7096226811408997, "learning_rate": 1.8018000000000003e-05, "loss": 0.046, "step": 6009 }, { "epoch": 6.63390392048592, "grad_norm": 0.418525755405426, "learning_rate": 1.8021000000000002e-05, "loss": 0.0168, "step": 6010 }, { "epoch": 6.635008282716731, "grad_norm": 0.32975828647613525, "learning_rate": 1.8024e-05, "loss": 0.016, "step": 6011 }, { "epoch": 6.636112644947543, "grad_norm": 0.2595294117927551, "learning_rate": 1.8027e-05, "loss": 0.0161, "step": 6012 }, { "epoch": 6.6372170071783545, "grad_norm": 0.4817773997783661, "learning_rate": 1.803e-05, "loss": 0.0331, "step": 6013 }, { "epoch": 6.638321369409166, "grad_norm": 0.5190382599830627, "learning_rate": 1.8032999999999998e-05, "loss": 0.0372, "step": 6014 }, { "epoch": 6.639425731639978, "grad_norm": 0.48946434259414673, "learning_rate": 1.8035999999999998e-05, "loss": 0.0316, "step": 6015 }, { "epoch": 6.640530093870789, "grad_norm": 0.7962324023246765, "learning_rate": 1.8038999999999998e-05, "loss": 0.0263, "step": 6016 }, { "epoch": 6.641634456101602, "grad_norm": 0.6464152932167053, "learning_rate": 1.8042e-05, "loss": 0.0348, "step": 6017 }, { "epoch": 6.642738818332413, "grad_norm": 0.5147970914840698, "learning_rate": 1.8045e-05, "loss": 0.0209, "step": 6018 }, { "epoch": 6.643843180563225, "grad_norm": 0.6151257157325745, "learning_rate": 1.8048e-05, "loss": 0.0207, "step": 6019 }, { "epoch": 6.6449475427940365, "grad_norm": 0.3855687975883484, "learning_rate": 1.8051e-05, "loss": 0.0151, "step": 6020 }, { "epoch": 6.646051905024848, "grad_norm": 0.4715401232242584, "learning_rate": 1.8054e-05, "loss": 0.0341, "step": 6021 }, { "epoch": 6.64715626725566, "grad_norm": 0.4135405421257019, "learning_rate": 1.8057e-05, "loss": 0.018, "step": 6022 }, { "epoch": 6.6482606294864715, "grad_norm": 0.4161234200000763, "learning_rate": 1.806e-05, "loss": 0.024, "step": 6023 }, { "epoch": 6.649364991717283, "grad_norm": 0.5753915905952454, "learning_rate": 1.8063e-05, "loss": 0.0356, "step": 6024 }, { "epoch": 6.650469353948095, "grad_norm": 0.6007962822914124, "learning_rate": 1.8066e-05, "loss": 0.0345, "step": 6025 }, { "epoch": 6.651573716178906, "grad_norm": 0.614213228225708, "learning_rate": 1.8069e-05, "loss": 0.0258, "step": 6026 }, { "epoch": 6.652678078409719, "grad_norm": 0.5193926692008972, "learning_rate": 1.8072000000000002e-05, "loss": 0.026, "step": 6027 }, { "epoch": 6.65378244064053, "grad_norm": 0.58078932762146, "learning_rate": 1.8075000000000002e-05, "loss": 0.0267, "step": 6028 }, { "epoch": 6.654886802871342, "grad_norm": 0.5745037794113159, "learning_rate": 1.8078000000000002e-05, "loss": 0.0375, "step": 6029 }, { "epoch": 6.6559911651021535, "grad_norm": 0.6999252438545227, "learning_rate": 1.8081000000000002e-05, "loss": 0.0357, "step": 6030 }, { "epoch": 6.657095527332965, "grad_norm": 0.7945511937141418, "learning_rate": 1.8084e-05, "loss": 0.0525, "step": 6031 }, { "epoch": 6.658199889563777, "grad_norm": 0.49946698546409607, "learning_rate": 1.8087e-05, "loss": 0.0348, "step": 6032 }, { "epoch": 6.659304251794588, "grad_norm": 0.41630813479423523, "learning_rate": 1.809e-05, "loss": 0.0312, "step": 6033 }, { "epoch": 6.660408614025401, "grad_norm": 0.7783458828926086, "learning_rate": 1.8093e-05, "loss": 0.0329, "step": 6034 }, { "epoch": 6.661512976256212, "grad_norm": 0.9782130122184753, "learning_rate": 1.8096e-05, "loss": 0.0599, "step": 6035 }, { "epoch": 6.662617338487024, "grad_norm": 1.086663007736206, "learning_rate": 1.8098999999999997e-05, "loss": 0.0378, "step": 6036 }, { "epoch": 6.6637217007178355, "grad_norm": 1.584932565689087, "learning_rate": 1.8102e-05, "loss": 0.3492, "step": 6037 }, { "epoch": 6.664826062948647, "grad_norm": 0.9818201661109924, "learning_rate": 1.8105e-05, "loss": 0.277, "step": 6038 }, { "epoch": 6.665930425179459, "grad_norm": 1.024071455001831, "learning_rate": 1.8108e-05, "loss": 0.2283, "step": 6039 }, { "epoch": 6.6670347874102704, "grad_norm": 0.767372190952301, "learning_rate": 1.8111e-05, "loss": 0.1529, "step": 6040 }, { "epoch": 6.668139149641082, "grad_norm": 0.9772828221321106, "learning_rate": 1.8114e-05, "loss": 0.2112, "step": 6041 }, { "epoch": 6.669243511871894, "grad_norm": 0.6313601732254028, "learning_rate": 1.8117e-05, "loss": 0.1061, "step": 6042 }, { "epoch": 6.670347874102705, "grad_norm": 0.630885124206543, "learning_rate": 1.812e-05, "loss": 0.1365, "step": 6043 }, { "epoch": 6.671452236333518, "grad_norm": 0.4911230504512787, "learning_rate": 1.8123e-05, "loss": 0.0588, "step": 6044 }, { "epoch": 6.672556598564329, "grad_norm": 0.6889855265617371, "learning_rate": 1.8126e-05, "loss": 0.0895, "step": 6045 }, { "epoch": 6.673660960795141, "grad_norm": 0.3055698573589325, "learning_rate": 1.8129e-05, "loss": 0.0723, "step": 6046 }, { "epoch": 6.6747653230259525, "grad_norm": 0.2880754768848419, "learning_rate": 1.8132000000000002e-05, "loss": 0.0301, "step": 6047 }, { "epoch": 6.675869685256764, "grad_norm": 0.5275571942329407, "learning_rate": 1.8135000000000002e-05, "loss": 0.0457, "step": 6048 }, { "epoch": 6.676974047487576, "grad_norm": 0.330114483833313, "learning_rate": 1.8138e-05, "loss": 0.0388, "step": 6049 }, { "epoch": 6.678078409718387, "grad_norm": 0.6183009147644043, "learning_rate": 1.8141e-05, "loss": 0.0531, "step": 6050 }, { "epoch": 6.6791827719492, "grad_norm": 0.3987119495868683, "learning_rate": 1.8144e-05, "loss": 0.0305, "step": 6051 }, { "epoch": 6.680287134180011, "grad_norm": 0.6232004165649414, "learning_rate": 1.8147e-05, "loss": 0.028, "step": 6052 }, { "epoch": 6.681391496410823, "grad_norm": 0.39820823073387146, "learning_rate": 1.815e-05, "loss": 0.026, "step": 6053 }, { "epoch": 6.6824958586416345, "grad_norm": 0.38190045952796936, "learning_rate": 1.8153e-05, "loss": 0.0327, "step": 6054 }, { "epoch": 6.683600220872446, "grad_norm": 0.301582008600235, "learning_rate": 1.8156e-05, "loss": 0.0239, "step": 6055 }, { "epoch": 6.684704583103258, "grad_norm": 0.5396243929862976, "learning_rate": 1.8159e-05, "loss": 0.0401, "step": 6056 }, { "epoch": 6.685808945334069, "grad_norm": 0.6830040216445923, "learning_rate": 1.8162000000000003e-05, "loss": 0.0376, "step": 6057 }, { "epoch": 6.686913307564881, "grad_norm": 0.7570891976356506, "learning_rate": 1.8165000000000003e-05, "loss": 0.049, "step": 6058 }, { "epoch": 6.688017669795693, "grad_norm": 0.7893233895301819, "learning_rate": 1.8168000000000003e-05, "loss": 0.0351, "step": 6059 }, { "epoch": 6.689122032026504, "grad_norm": 0.7296843528747559, "learning_rate": 1.8171e-05, "loss": 0.0404, "step": 6060 }, { "epoch": 6.690226394257317, "grad_norm": 0.4165400266647339, "learning_rate": 1.8174e-05, "loss": 0.0229, "step": 6061 }, { "epoch": 6.691330756488128, "grad_norm": 0.5471863746643066, "learning_rate": 1.8177e-05, "loss": 0.03, "step": 6062 }, { "epoch": 6.69243511871894, "grad_norm": 0.38781389594078064, "learning_rate": 1.818e-05, "loss": 0.0195, "step": 6063 }, { "epoch": 6.6935394809497515, "grad_norm": 0.3928893208503723, "learning_rate": 1.8183e-05, "loss": 0.0189, "step": 6064 }, { "epoch": 6.694643843180563, "grad_norm": 0.462735116481781, "learning_rate": 1.8186e-05, "loss": 0.0209, "step": 6065 }, { "epoch": 6.695748205411375, "grad_norm": 0.6046942472457886, "learning_rate": 1.8188999999999998e-05, "loss": 0.0277, "step": 6066 }, { "epoch": 6.696852567642186, "grad_norm": 0.31396758556365967, "learning_rate": 1.8192e-05, "loss": 0.0119, "step": 6067 }, { "epoch": 6.697956929872999, "grad_norm": 0.44893527030944824, "learning_rate": 1.8195e-05, "loss": 0.0262, "step": 6068 }, { "epoch": 6.69906129210381, "grad_norm": 0.538561224937439, "learning_rate": 1.8198e-05, "loss": 0.0216, "step": 6069 }, { "epoch": 6.700165654334622, "grad_norm": 0.5266879200935364, "learning_rate": 1.8201e-05, "loss": 0.0208, "step": 6070 }, { "epoch": 6.7012700165654335, "grad_norm": 0.3823188543319702, "learning_rate": 1.8204e-05, "loss": 0.0343, "step": 6071 }, { "epoch": 6.702374378796245, "grad_norm": 0.443411260843277, "learning_rate": 1.8207e-05, "loss": 0.029, "step": 6072 }, { "epoch": 6.703478741027057, "grad_norm": 0.7538196444511414, "learning_rate": 1.821e-05, "loss": 0.0323, "step": 6073 }, { "epoch": 6.704583103257868, "grad_norm": 0.40460628271102905, "learning_rate": 1.8213e-05, "loss": 0.0252, "step": 6074 }, { "epoch": 6.705687465488681, "grad_norm": 0.5964566469192505, "learning_rate": 1.8216e-05, "loss": 0.0423, "step": 6075 }, { "epoch": 6.706791827719492, "grad_norm": 0.552986741065979, "learning_rate": 1.8219e-05, "loss": 0.032, "step": 6076 }, { "epoch": 6.707896189950303, "grad_norm": 0.8866598606109619, "learning_rate": 1.8222000000000003e-05, "loss": 0.0269, "step": 6077 }, { "epoch": 6.709000552181116, "grad_norm": 0.777339518070221, "learning_rate": 1.8225000000000003e-05, "loss": 0.0455, "step": 6078 }, { "epoch": 6.710104914411927, "grad_norm": 0.5727009773254395, "learning_rate": 1.8228000000000002e-05, "loss": 0.0304, "step": 6079 }, { "epoch": 6.711209276642739, "grad_norm": 1.0555564165115356, "learning_rate": 1.8231000000000002e-05, "loss": 0.0328, "step": 6080 }, { "epoch": 6.7123136388735505, "grad_norm": 0.4529401361942291, "learning_rate": 1.8234000000000002e-05, "loss": 0.0236, "step": 6081 }, { "epoch": 6.713418001104362, "grad_norm": 0.4570425748825073, "learning_rate": 1.8237000000000002e-05, "loss": 0.0251, "step": 6082 }, { "epoch": 6.714522363335174, "grad_norm": 0.5978027582168579, "learning_rate": 1.824e-05, "loss": 0.0305, "step": 6083 }, { "epoch": 6.715626725565985, "grad_norm": 0.4758816361427307, "learning_rate": 1.8243e-05, "loss": 0.0215, "step": 6084 }, { "epoch": 6.716731087796798, "grad_norm": 0.5072824358940125, "learning_rate": 1.8245999999999998e-05, "loss": 0.0293, "step": 6085 }, { "epoch": 6.717835450027609, "grad_norm": 0.44837135076522827, "learning_rate": 1.8248999999999998e-05, "loss": 0.0165, "step": 6086 }, { "epoch": 6.718939812258421, "grad_norm": 0.7581111788749695, "learning_rate": 1.8252e-05, "loss": 0.2759, "step": 6087 }, { "epoch": 6.7200441744892325, "grad_norm": 0.8819749355316162, "learning_rate": 1.8255e-05, "loss": 0.2505, "step": 6088 }, { "epoch": 6.721148536720044, "grad_norm": 1.013227939605713, "learning_rate": 1.8258e-05, "loss": 0.2707, "step": 6089 }, { "epoch": 6.722252898950856, "grad_norm": 1.0007261037826538, "learning_rate": 1.8261e-05, "loss": 0.1969, "step": 6090 }, { "epoch": 6.723357261181667, "grad_norm": 0.8670690059661865, "learning_rate": 1.8264e-05, "loss": 0.1611, "step": 6091 }, { "epoch": 6.72446162341248, "grad_norm": 1.0920701026916504, "learning_rate": 1.8267e-05, "loss": 0.1771, "step": 6092 }, { "epoch": 6.725565985643291, "grad_norm": 0.8207418918609619, "learning_rate": 1.827e-05, "loss": 0.0932, "step": 6093 }, { "epoch": 6.726670347874102, "grad_norm": 1.0706144571304321, "learning_rate": 1.8273e-05, "loss": 0.1179, "step": 6094 }, { "epoch": 6.727774710104915, "grad_norm": 0.7366626262664795, "learning_rate": 1.8276e-05, "loss": 0.0741, "step": 6095 }, { "epoch": 6.728879072335726, "grad_norm": 0.7971512675285339, "learning_rate": 1.8279e-05, "loss": 0.0456, "step": 6096 }, { "epoch": 6.729983434566538, "grad_norm": 0.38707929849624634, "learning_rate": 1.8282000000000002e-05, "loss": 0.0484, "step": 6097 }, { "epoch": 6.7310877967973495, "grad_norm": 0.3128044903278351, "learning_rate": 1.8285000000000002e-05, "loss": 0.0194, "step": 6098 }, { "epoch": 6.732192159028161, "grad_norm": 0.4848228394985199, "learning_rate": 1.8288000000000002e-05, "loss": 0.034, "step": 6099 }, { "epoch": 6.733296521258973, "grad_norm": 0.29056546092033386, "learning_rate": 1.8291e-05, "loss": 0.0221, "step": 6100 }, { "epoch": 6.734400883489784, "grad_norm": 0.48036226630210876, "learning_rate": 1.8294e-05, "loss": 0.032, "step": 6101 }, { "epoch": 6.735505245720597, "grad_norm": 0.2939308285713196, "learning_rate": 1.8297e-05, "loss": 0.0184, "step": 6102 }, { "epoch": 6.736609607951408, "grad_norm": 0.5430750846862793, "learning_rate": 1.83e-05, "loss": 0.0241, "step": 6103 }, { "epoch": 6.73771397018222, "grad_norm": 0.318703830242157, "learning_rate": 1.8303e-05, "loss": 0.0217, "step": 6104 }, { "epoch": 6.7388183324130315, "grad_norm": 0.7178899049758911, "learning_rate": 1.8306e-05, "loss": 0.0451, "step": 6105 }, { "epoch": 6.739922694643843, "grad_norm": 0.3039206564426422, "learning_rate": 1.8309e-05, "loss": 0.016, "step": 6106 }, { "epoch": 6.741027056874655, "grad_norm": 1.3387092351913452, "learning_rate": 1.8312000000000004e-05, "loss": 0.0313, "step": 6107 }, { "epoch": 6.742131419105466, "grad_norm": 0.47757604718208313, "learning_rate": 1.8315000000000003e-05, "loss": 0.018, "step": 6108 }, { "epoch": 6.743235781336279, "grad_norm": 0.5553205013275146, "learning_rate": 1.8318e-05, "loss": 0.0377, "step": 6109 }, { "epoch": 6.74434014356709, "grad_norm": 0.5995219945907593, "learning_rate": 1.8321e-05, "loss": 0.0311, "step": 6110 }, { "epoch": 6.745444505797901, "grad_norm": 0.8935156464576721, "learning_rate": 1.8324e-05, "loss": 0.0247, "step": 6111 }, { "epoch": 6.746548868028714, "grad_norm": 0.8019065856933594, "learning_rate": 1.8327e-05, "loss": 0.0257, "step": 6112 }, { "epoch": 6.747653230259525, "grad_norm": 0.2909733057022095, "learning_rate": 1.833e-05, "loss": 0.0169, "step": 6113 }, { "epoch": 6.748757592490337, "grad_norm": 0.29247862100601196, "learning_rate": 1.8333e-05, "loss": 0.0172, "step": 6114 }, { "epoch": 6.7498619547211485, "grad_norm": 0.38604113459587097, "learning_rate": 1.8336e-05, "loss": 0.0263, "step": 6115 }, { "epoch": 6.75096631695196, "grad_norm": 0.6744775176048279, "learning_rate": 1.8339e-05, "loss": 0.0233, "step": 6116 }, { "epoch": 6.752070679182772, "grad_norm": 0.5418105125427246, "learning_rate": 1.8342e-05, "loss": 0.0351, "step": 6117 }, { "epoch": 6.753175041413583, "grad_norm": 0.7272911071777344, "learning_rate": 1.8345e-05, "loss": 0.038, "step": 6118 }, { "epoch": 6.754279403644396, "grad_norm": 0.9763161540031433, "learning_rate": 1.8348e-05, "loss": 0.037, "step": 6119 }, { "epoch": 6.755383765875207, "grad_norm": 0.38332048058509827, "learning_rate": 1.8351e-05, "loss": 0.0183, "step": 6120 }, { "epoch": 6.756488128106019, "grad_norm": 0.5180141925811768, "learning_rate": 1.8354e-05, "loss": 0.0376, "step": 6121 }, { "epoch": 6.7575924903368305, "grad_norm": 0.3747299015522003, "learning_rate": 1.8357e-05, "loss": 0.0194, "step": 6122 }, { "epoch": 6.758696852567642, "grad_norm": 0.3925855755805969, "learning_rate": 1.836e-05, "loss": 0.0201, "step": 6123 }, { "epoch": 6.759801214798454, "grad_norm": 0.41719067096710205, "learning_rate": 1.8363e-05, "loss": 0.0307, "step": 6124 }, { "epoch": 6.760905577029265, "grad_norm": 0.47686925530433655, "learning_rate": 1.8366e-05, "loss": 0.0396, "step": 6125 }, { "epoch": 6.762009939260078, "grad_norm": 0.5525075197219849, "learning_rate": 1.8369e-05, "loss": 0.0298, "step": 6126 }, { "epoch": 6.763114301490889, "grad_norm": 0.22781023383140564, "learning_rate": 1.8372000000000003e-05, "loss": 0.0121, "step": 6127 }, { "epoch": 6.7642186637217, "grad_norm": 0.5475013256072998, "learning_rate": 1.8375000000000003e-05, "loss": 0.0422, "step": 6128 }, { "epoch": 6.765323025952513, "grad_norm": 0.5355935096740723, "learning_rate": 1.8378000000000003e-05, "loss": 0.0302, "step": 6129 }, { "epoch": 6.766427388183324, "grad_norm": 0.6423351168632507, "learning_rate": 1.8381000000000002e-05, "loss": 0.0413, "step": 6130 }, { "epoch": 6.767531750414136, "grad_norm": 0.7449302673339844, "learning_rate": 1.8384000000000002e-05, "loss": 0.0339, "step": 6131 }, { "epoch": 6.7686361126449475, "grad_norm": 0.5687386393547058, "learning_rate": 1.8387000000000002e-05, "loss": 0.0259, "step": 6132 }, { "epoch": 6.769740474875759, "grad_norm": 0.540291965007782, "learning_rate": 1.8390000000000002e-05, "loss": 0.0204, "step": 6133 }, { "epoch": 6.770844837106571, "grad_norm": 0.7937856316566467, "learning_rate": 1.8392999999999998e-05, "loss": 0.0341, "step": 6134 }, { "epoch": 6.771949199337382, "grad_norm": 1.2349282503128052, "learning_rate": 1.8395999999999998e-05, "loss": 0.054, "step": 6135 }, { "epoch": 6.773053561568195, "grad_norm": 0.7137627005577087, "learning_rate": 1.8398999999999998e-05, "loss": 0.0404, "step": 6136 }, { "epoch": 6.774157923799006, "grad_norm": 0.9165695309638977, "learning_rate": 1.8401999999999998e-05, "loss": 0.2844, "step": 6137 }, { "epoch": 6.775262286029818, "grad_norm": 1.069711685180664, "learning_rate": 1.8405e-05, "loss": 0.2591, "step": 6138 }, { "epoch": 6.7763666482606295, "grad_norm": 1.0909423828125, "learning_rate": 1.8408e-05, "loss": 0.1945, "step": 6139 }, { "epoch": 6.777471010491441, "grad_norm": 0.5628758072853088, "learning_rate": 1.8411e-05, "loss": 0.1461, "step": 6140 }, { "epoch": 6.778575372722253, "grad_norm": 0.6637890338897705, "learning_rate": 1.8414e-05, "loss": 0.1644, "step": 6141 }, { "epoch": 6.779679734953064, "grad_norm": 0.8698848485946655, "learning_rate": 1.8417e-05, "loss": 0.0965, "step": 6142 }, { "epoch": 6.780784097183877, "grad_norm": 0.5967145562171936, "learning_rate": 1.842e-05, "loss": 0.093, "step": 6143 }, { "epoch": 6.781888459414688, "grad_norm": 0.3210154175758362, "learning_rate": 1.8423e-05, "loss": 0.0342, "step": 6144 }, { "epoch": 6.7829928216455, "grad_norm": 0.3783549666404724, "learning_rate": 1.8426e-05, "loss": 0.0254, "step": 6145 }, { "epoch": 6.7840971838763116, "grad_norm": 0.6982157826423645, "learning_rate": 1.8429e-05, "loss": 0.0626, "step": 6146 }, { "epoch": 6.785201546107123, "grad_norm": 0.5499213933944702, "learning_rate": 1.8432e-05, "loss": 0.0511, "step": 6147 }, { "epoch": 6.786305908337935, "grad_norm": 0.4150497317314148, "learning_rate": 1.8435000000000002e-05, "loss": 0.0398, "step": 6148 }, { "epoch": 6.7874102705687465, "grad_norm": 0.6879738569259644, "learning_rate": 1.8438000000000002e-05, "loss": 0.0484, "step": 6149 }, { "epoch": 6.788514632799558, "grad_norm": 0.3668714463710785, "learning_rate": 1.8441000000000002e-05, "loss": 0.0425, "step": 6150 }, { "epoch": 6.78961899503037, "grad_norm": 0.30864471197128296, "learning_rate": 1.8444e-05, "loss": 0.0161, "step": 6151 }, { "epoch": 6.790723357261181, "grad_norm": 0.4017232656478882, "learning_rate": 1.8447e-05, "loss": 0.0338, "step": 6152 }, { "epoch": 6.791827719491994, "grad_norm": 0.5809424519538879, "learning_rate": 1.845e-05, "loss": 0.0213, "step": 6153 }, { "epoch": 6.792932081722805, "grad_norm": 0.4874705672264099, "learning_rate": 1.8453e-05, "loss": 0.0312, "step": 6154 }, { "epoch": 6.794036443953617, "grad_norm": 0.38915854692459106, "learning_rate": 1.8456e-05, "loss": 0.0274, "step": 6155 }, { "epoch": 6.7951408061844285, "grad_norm": 0.3797423839569092, "learning_rate": 1.8459e-05, "loss": 0.0215, "step": 6156 }, { "epoch": 6.79624516841524, "grad_norm": 0.5824673175811768, "learning_rate": 1.8462e-05, "loss": 0.0327, "step": 6157 }, { "epoch": 6.797349530646052, "grad_norm": 0.468342125415802, "learning_rate": 1.8465e-05, "loss": 0.0406, "step": 6158 }, { "epoch": 6.798453892876863, "grad_norm": 0.31428292393684387, "learning_rate": 1.8468e-05, "loss": 0.0232, "step": 6159 }, { "epoch": 6.799558255107676, "grad_norm": 0.8632917404174805, "learning_rate": 1.8471e-05, "loss": 0.0877, "step": 6160 }, { "epoch": 6.800662617338487, "grad_norm": 0.6474770307540894, "learning_rate": 1.8474e-05, "loss": 0.036, "step": 6161 }, { "epoch": 6.801766979569299, "grad_norm": 0.47935959696769714, "learning_rate": 1.8477e-05, "loss": 0.0332, "step": 6162 }, { "epoch": 6.8028713418001105, "grad_norm": 0.3289296329021454, "learning_rate": 1.848e-05, "loss": 0.0233, "step": 6163 }, { "epoch": 6.803975704030922, "grad_norm": 0.5767672061920166, "learning_rate": 1.8483e-05, "loss": 0.0345, "step": 6164 }, { "epoch": 6.805080066261734, "grad_norm": 0.302485853433609, "learning_rate": 1.8486e-05, "loss": 0.0163, "step": 6165 }, { "epoch": 6.8061844284925455, "grad_norm": 0.4339601397514343, "learning_rate": 1.8489e-05, "loss": 0.0218, "step": 6166 }, { "epoch": 6.807288790723357, "grad_norm": 0.4891679286956787, "learning_rate": 1.8492e-05, "loss": 0.0326, "step": 6167 }, { "epoch": 6.808393152954169, "grad_norm": 0.6433894038200378, "learning_rate": 1.8495e-05, "loss": 0.0454, "step": 6168 }, { "epoch": 6.80949751518498, "grad_norm": 0.9040594100952148, "learning_rate": 1.8498e-05, "loss": 0.0237, "step": 6169 }, { "epoch": 6.810601877415793, "grad_norm": 0.8676738739013672, "learning_rate": 1.8501e-05, "loss": 0.0228, "step": 6170 }, { "epoch": 6.811706239646604, "grad_norm": 0.6273314952850342, "learning_rate": 1.8504e-05, "loss": 0.0253, "step": 6171 }, { "epoch": 6.812810601877416, "grad_norm": 0.5535468459129333, "learning_rate": 1.8507e-05, "loss": 0.0172, "step": 6172 }, { "epoch": 6.8139149641082275, "grad_norm": 0.6574344635009766, "learning_rate": 1.851e-05, "loss": 0.0295, "step": 6173 }, { "epoch": 6.815019326339039, "grad_norm": 1.1350456476211548, "learning_rate": 1.8513e-05, "loss": 0.0267, "step": 6174 }, { "epoch": 6.816123688569851, "grad_norm": 0.4380963444709778, "learning_rate": 1.8516e-05, "loss": 0.0277, "step": 6175 }, { "epoch": 6.817228050800662, "grad_norm": 0.6343154311180115, "learning_rate": 1.8519e-05, "loss": 0.0256, "step": 6176 }, { "epoch": 6.818332413031475, "grad_norm": 0.2868574261665344, "learning_rate": 1.8522e-05, "loss": 0.015, "step": 6177 }, { "epoch": 6.819436775262286, "grad_norm": 0.5427692532539368, "learning_rate": 1.8525000000000003e-05, "loss": 0.029, "step": 6178 }, { "epoch": 6.820541137493098, "grad_norm": 0.7414634227752686, "learning_rate": 1.8528000000000003e-05, "loss": 0.028, "step": 6179 }, { "epoch": 6.8216454997239095, "grad_norm": 0.8069746494293213, "learning_rate": 1.8531000000000003e-05, "loss": 0.0416, "step": 6180 }, { "epoch": 6.822749861954721, "grad_norm": 0.6444681286811829, "learning_rate": 1.8534000000000002e-05, "loss": 0.0385, "step": 6181 }, { "epoch": 6.823854224185533, "grad_norm": 0.7770747542381287, "learning_rate": 1.8537000000000002e-05, "loss": 0.0413, "step": 6182 }, { "epoch": 6.8249585864163445, "grad_norm": 0.4611702561378479, "learning_rate": 1.854e-05, "loss": 0.0211, "step": 6183 }, { "epoch": 6.826062948647156, "grad_norm": 0.8735053539276123, "learning_rate": 1.8543e-05, "loss": 0.0428, "step": 6184 }, { "epoch": 6.827167310877968, "grad_norm": 0.4150274395942688, "learning_rate": 1.8545999999999998e-05, "loss": 0.017, "step": 6185 }, { "epoch": 6.828271673108779, "grad_norm": 0.8746829032897949, "learning_rate": 1.8548999999999998e-05, "loss": 0.0327, "step": 6186 }, { "epoch": 6.829376035339592, "grad_norm": 1.2876653671264648, "learning_rate": 1.8551999999999998e-05, "loss": 0.344, "step": 6187 }, { "epoch": 6.830480397570403, "grad_norm": 1.2630949020385742, "learning_rate": 1.8555e-05, "loss": 0.1986, "step": 6188 }, { "epoch": 6.831584759801215, "grad_norm": 0.8520643711090088, "learning_rate": 1.8558e-05, "loss": 0.2566, "step": 6189 }, { "epoch": 6.8326891220320265, "grad_norm": 1.0721492767333984, "learning_rate": 1.8561e-05, "loss": 0.2431, "step": 6190 }, { "epoch": 6.833793484262838, "grad_norm": 0.6422768831253052, "learning_rate": 1.8564e-05, "loss": 0.1691, "step": 6191 }, { "epoch": 6.83489784649365, "grad_norm": 0.7583667039871216, "learning_rate": 1.8567e-05, "loss": 0.1016, "step": 6192 }, { "epoch": 6.836002208724461, "grad_norm": 0.9485027194023132, "learning_rate": 1.857e-05, "loss": 0.1112, "step": 6193 }, { "epoch": 6.837106570955274, "grad_norm": 0.4779466986656189, "learning_rate": 1.8573e-05, "loss": 0.0408, "step": 6194 }, { "epoch": 6.838210933186085, "grad_norm": 0.5466563701629639, "learning_rate": 1.8576e-05, "loss": 0.0994, "step": 6195 }, { "epoch": 6.839315295416897, "grad_norm": 0.6853750944137573, "learning_rate": 1.8579e-05, "loss": 0.0724, "step": 6196 }, { "epoch": 6.8404196576477085, "grad_norm": 0.4760863482952118, "learning_rate": 1.8582e-05, "loss": 0.0362, "step": 6197 }, { "epoch": 6.84152401987852, "grad_norm": 0.550514280796051, "learning_rate": 1.8585000000000002e-05, "loss": 0.0483, "step": 6198 }, { "epoch": 6.842628382109332, "grad_norm": 0.8511683344841003, "learning_rate": 1.8588000000000002e-05, "loss": 0.0425, "step": 6199 }, { "epoch": 6.8437327443401434, "grad_norm": 0.3580152988433838, "learning_rate": 1.8591000000000002e-05, "loss": 0.0314, "step": 6200 }, { "epoch": 6.844837106570955, "grad_norm": 0.6695486307144165, "learning_rate": 1.8594000000000002e-05, "loss": 0.017, "step": 6201 }, { "epoch": 6.845941468801767, "grad_norm": 0.34591245651245117, "learning_rate": 1.8597e-05, "loss": 0.0213, "step": 6202 }, { "epoch": 6.847045831032578, "grad_norm": 0.4149893522262573, "learning_rate": 1.86e-05, "loss": 0.0272, "step": 6203 }, { "epoch": 6.848150193263391, "grad_norm": 0.6579132676124573, "learning_rate": 1.8603e-05, "loss": 0.0271, "step": 6204 }, { "epoch": 6.849254555494202, "grad_norm": 0.6396152377128601, "learning_rate": 1.8606e-05, "loss": 0.0325, "step": 6205 }, { "epoch": 6.850358917725014, "grad_norm": 0.41037341952323914, "learning_rate": 1.8609e-05, "loss": 0.0228, "step": 6206 }, { "epoch": 6.8514632799558255, "grad_norm": 0.5101866126060486, "learning_rate": 1.8612e-05, "loss": 0.0268, "step": 6207 }, { "epoch": 6.852567642186637, "grad_norm": 0.9895380139350891, "learning_rate": 1.8615e-05, "loss": 0.0231, "step": 6208 }, { "epoch": 6.853672004417449, "grad_norm": 0.36830002069473267, "learning_rate": 1.8618e-05, "loss": 0.0149, "step": 6209 }, { "epoch": 6.85477636664826, "grad_norm": 0.44730344414711, "learning_rate": 1.8621e-05, "loss": 0.0188, "step": 6210 }, { "epoch": 6.855880728879073, "grad_norm": 1.623414397239685, "learning_rate": 1.8624e-05, "loss": 0.0222, "step": 6211 }, { "epoch": 6.856985091109884, "grad_norm": 0.5499439835548401, "learning_rate": 1.8627e-05, "loss": 0.0343, "step": 6212 }, { "epoch": 6.858089453340696, "grad_norm": 1.32041335105896, "learning_rate": 1.863e-05, "loss": 0.031, "step": 6213 }, { "epoch": 6.8591938155715075, "grad_norm": 0.5656471252441406, "learning_rate": 1.8633e-05, "loss": 0.0274, "step": 6214 }, { "epoch": 6.860298177802319, "grad_norm": 0.9413256049156189, "learning_rate": 1.8636e-05, "loss": 0.0253, "step": 6215 }, { "epoch": 6.861402540033131, "grad_norm": 0.7133969664573669, "learning_rate": 1.8639e-05, "loss": 0.0308, "step": 6216 }, { "epoch": 6.862506902263942, "grad_norm": 1.1099807024002075, "learning_rate": 1.8642e-05, "loss": 0.04, "step": 6217 }, { "epoch": 6.863611264494754, "grad_norm": 0.368562251329422, "learning_rate": 1.8645000000000002e-05, "loss": 0.0273, "step": 6218 }, { "epoch": 6.864715626725566, "grad_norm": 0.3793617784976959, "learning_rate": 1.8648000000000002e-05, "loss": 0.0219, "step": 6219 }, { "epoch": 6.865819988956377, "grad_norm": 1.216607689857483, "learning_rate": 1.8651e-05, "loss": 0.0458, "step": 6220 }, { "epoch": 6.86692435118719, "grad_norm": 0.4696968197822571, "learning_rate": 1.8654e-05, "loss": 0.0149, "step": 6221 }, { "epoch": 6.868028713418001, "grad_norm": 0.42305681109428406, "learning_rate": 1.8657e-05, "loss": 0.0274, "step": 6222 }, { "epoch": 6.869133075648813, "grad_norm": 0.4053809344768524, "learning_rate": 1.866e-05, "loss": 0.0134, "step": 6223 }, { "epoch": 6.8702374378796245, "grad_norm": 0.6196668744087219, "learning_rate": 1.8663e-05, "loss": 0.0375, "step": 6224 }, { "epoch": 6.871341800110436, "grad_norm": 1.1721738576889038, "learning_rate": 1.8666e-05, "loss": 0.0253, "step": 6225 }, { "epoch": 6.872446162341248, "grad_norm": 0.36359113454818726, "learning_rate": 1.8669e-05, "loss": 0.0244, "step": 6226 }, { "epoch": 6.873550524572059, "grad_norm": 0.5981326103210449, "learning_rate": 1.8672e-05, "loss": 0.0373, "step": 6227 }, { "epoch": 6.874654886802872, "grad_norm": 0.5180049538612366, "learning_rate": 1.8675000000000003e-05, "loss": 0.0244, "step": 6228 }, { "epoch": 6.875759249033683, "grad_norm": 0.43798449635505676, "learning_rate": 1.8678000000000003e-05, "loss": 0.0225, "step": 6229 }, { "epoch": 6.876863611264495, "grad_norm": 0.8435652852058411, "learning_rate": 1.8681000000000003e-05, "loss": 0.0374, "step": 6230 }, { "epoch": 6.8779679734953065, "grad_norm": 0.7023833990097046, "learning_rate": 1.8684000000000003e-05, "loss": 0.0289, "step": 6231 }, { "epoch": 6.879072335726118, "grad_norm": 0.5266730785369873, "learning_rate": 1.8687e-05, "loss": 0.0436, "step": 6232 }, { "epoch": 6.88017669795693, "grad_norm": 0.6929053664207458, "learning_rate": 1.869e-05, "loss": 0.0302, "step": 6233 }, { "epoch": 6.881281060187741, "grad_norm": 0.7248827219009399, "learning_rate": 1.8693e-05, "loss": 0.035, "step": 6234 }, { "epoch": 6.882385422418554, "grad_norm": 0.4494691789150238, "learning_rate": 1.8696e-05, "loss": 0.0226, "step": 6235 }, { "epoch": 6.883489784649365, "grad_norm": 1.065569281578064, "learning_rate": 1.8699e-05, "loss": 0.045, "step": 6236 }, { "epoch": 6.884594146880176, "grad_norm": 1.5633208751678467, "learning_rate": 1.8701999999999998e-05, "loss": 0.3733, "step": 6237 }, { "epoch": 6.885698509110989, "grad_norm": 0.9489918351173401, "learning_rate": 1.8705e-05, "loss": 0.2696, "step": 6238 }, { "epoch": 6.8868028713418, "grad_norm": 1.260854959487915, "learning_rate": 1.8708e-05, "loss": 0.2531, "step": 6239 }, { "epoch": 6.887907233572612, "grad_norm": 0.8906524777412415, "learning_rate": 1.8711e-05, "loss": 0.2537, "step": 6240 }, { "epoch": 6.8890115958034235, "grad_norm": 0.8687762022018433, "learning_rate": 1.8714e-05, "loss": 0.1598, "step": 6241 }, { "epoch": 6.890115958034235, "grad_norm": 0.6287236213684082, "learning_rate": 1.8717e-05, "loss": 0.1125, "step": 6242 }, { "epoch": 6.891220320265047, "grad_norm": 0.7093411087989807, "learning_rate": 1.872e-05, "loss": 0.1283, "step": 6243 }, { "epoch": 6.892324682495858, "grad_norm": 0.6289084553718567, "learning_rate": 1.8723e-05, "loss": 0.0929, "step": 6244 }, { "epoch": 6.893429044726671, "grad_norm": 0.6695185303688049, "learning_rate": 1.8726e-05, "loss": 0.0784, "step": 6245 }, { "epoch": 6.894533406957482, "grad_norm": 0.6029766201972961, "learning_rate": 1.8729e-05, "loss": 0.0619, "step": 6246 }, { "epoch": 6.895637769188294, "grad_norm": 0.40732207894325256, "learning_rate": 1.8732e-05, "loss": 0.037, "step": 6247 }, { "epoch": 6.8967421314191055, "grad_norm": 0.554591715335846, "learning_rate": 1.8735000000000003e-05, "loss": 0.0428, "step": 6248 }, { "epoch": 6.897846493649917, "grad_norm": 1.1497060060501099, "learning_rate": 1.8738000000000003e-05, "loss": 0.0692, "step": 6249 }, { "epoch": 6.898950855880729, "grad_norm": 0.5127660036087036, "learning_rate": 1.8741000000000002e-05, "loss": 0.0367, "step": 6250 }, { "epoch": 6.90005521811154, "grad_norm": 0.3043944537639618, "learning_rate": 1.8744000000000002e-05, "loss": 0.0249, "step": 6251 }, { "epoch": 6.901159580342353, "grad_norm": 0.5218027830123901, "learning_rate": 1.8747000000000002e-05, "loss": 0.02, "step": 6252 }, { "epoch": 6.902263942573164, "grad_norm": 0.33385613560676575, "learning_rate": 1.8750000000000002e-05, "loss": 0.023, "step": 6253 }, { "epoch": 6.903368304803975, "grad_norm": 0.6564791202545166, "learning_rate": 1.8753e-05, "loss": 0.0406, "step": 6254 }, { "epoch": 6.904472667034788, "grad_norm": 0.6386928558349609, "learning_rate": 1.8756e-05, "loss": 0.0224, "step": 6255 }, { "epoch": 6.905577029265599, "grad_norm": 0.6429592370986938, "learning_rate": 1.8759e-05, "loss": 0.0278, "step": 6256 }, { "epoch": 6.906681391496411, "grad_norm": 0.4349997341632843, "learning_rate": 1.8761999999999998e-05, "loss": 0.0236, "step": 6257 }, { "epoch": 6.9077857537272225, "grad_norm": 0.3915892541408539, "learning_rate": 1.8764999999999997e-05, "loss": 0.0394, "step": 6258 }, { "epoch": 6.908890115958034, "grad_norm": 1.6805291175842285, "learning_rate": 1.8768e-05, "loss": 0.0401, "step": 6259 }, { "epoch": 6.909994478188846, "grad_norm": 0.9144672155380249, "learning_rate": 1.8771e-05, "loss": 0.0312, "step": 6260 }, { "epoch": 6.911098840419657, "grad_norm": 0.6264879703521729, "learning_rate": 1.8774e-05, "loss": 0.0205, "step": 6261 }, { "epoch": 6.91220320265047, "grad_norm": 0.8787661194801331, "learning_rate": 1.8777e-05, "loss": 0.0488, "step": 6262 }, { "epoch": 6.913307564881281, "grad_norm": 0.4033423960208893, "learning_rate": 1.878e-05, "loss": 0.0235, "step": 6263 }, { "epoch": 6.914411927112093, "grad_norm": 0.42883676290512085, "learning_rate": 1.8783e-05, "loss": 0.0257, "step": 6264 }, { "epoch": 6.9155162893429045, "grad_norm": 0.9051626324653625, "learning_rate": 1.8786e-05, "loss": 0.0398, "step": 6265 }, { "epoch": 6.916620651573716, "grad_norm": 0.564452588558197, "learning_rate": 1.8789e-05, "loss": 0.0226, "step": 6266 }, { "epoch": 6.917725013804528, "grad_norm": 0.39750936627388, "learning_rate": 1.8792e-05, "loss": 0.0198, "step": 6267 }, { "epoch": 6.918829376035339, "grad_norm": 0.29252105951309204, "learning_rate": 1.8795e-05, "loss": 0.0204, "step": 6268 }, { "epoch": 6.919933738266152, "grad_norm": 0.5009545683860779, "learning_rate": 1.8798000000000002e-05, "loss": 0.0315, "step": 6269 }, { "epoch": 6.921038100496963, "grad_norm": 0.4586654007434845, "learning_rate": 1.8801000000000002e-05, "loss": 0.0134, "step": 6270 }, { "epoch": 6.922142462727774, "grad_norm": 0.4243815243244171, "learning_rate": 1.8804e-05, "loss": 0.0212, "step": 6271 }, { "epoch": 6.923246824958587, "grad_norm": 1.20852530002594, "learning_rate": 1.8807e-05, "loss": 0.0429, "step": 6272 }, { "epoch": 6.924351187189398, "grad_norm": 0.6710500121116638, "learning_rate": 1.881e-05, "loss": 0.0414, "step": 6273 }, { "epoch": 6.92545554942021, "grad_norm": 0.4816328287124634, "learning_rate": 1.8813e-05, "loss": 0.0254, "step": 6274 }, { "epoch": 6.9265599116510215, "grad_norm": 0.5534745454788208, "learning_rate": 1.8816e-05, "loss": 0.038, "step": 6275 }, { "epoch": 6.927664273881833, "grad_norm": 0.6049199104309082, "learning_rate": 1.8819e-05, "loss": 0.0386, "step": 6276 }, { "epoch": 6.928768636112645, "grad_norm": 0.6773431301116943, "learning_rate": 1.8822e-05, "loss": 0.0294, "step": 6277 }, { "epoch": 6.929872998343456, "grad_norm": 0.811198353767395, "learning_rate": 1.8825e-05, "loss": 0.05, "step": 6278 }, { "epoch": 6.930977360574269, "grad_norm": 0.6675153374671936, "learning_rate": 1.8828000000000003e-05, "loss": 0.0369, "step": 6279 }, { "epoch": 6.93208172280508, "grad_norm": 0.6199171543121338, "learning_rate": 1.8831000000000003e-05, "loss": 0.0294, "step": 6280 }, { "epoch": 6.933186085035892, "grad_norm": 1.142770767211914, "learning_rate": 1.8834e-05, "loss": 0.0365, "step": 6281 }, { "epoch": 6.9342904472667035, "grad_norm": 0.7083486318588257, "learning_rate": 1.8837e-05, "loss": 0.0351, "step": 6282 }, { "epoch": 6.935394809497515, "grad_norm": 0.4890183210372925, "learning_rate": 1.884e-05, "loss": 0.0291, "step": 6283 }, { "epoch": 6.936499171728327, "grad_norm": 0.8233686089515686, "learning_rate": 1.8843e-05, "loss": 0.0371, "step": 6284 }, { "epoch": 6.937603533959138, "grad_norm": 1.2291741371154785, "learning_rate": 1.8846e-05, "loss": 0.0323, "step": 6285 }, { "epoch": 6.938707896189951, "grad_norm": 1.6740996837615967, "learning_rate": 1.8849e-05, "loss": 0.0375, "step": 6286 }, { "epoch": 6.939812258420762, "grad_norm": 0.8365776538848877, "learning_rate": 1.8852e-05, "loss": 0.2856, "step": 6287 }, { "epoch": 6.940916620651573, "grad_norm": 1.015832781791687, "learning_rate": 1.8854999999999998e-05, "loss": 0.2278, "step": 6288 }, { "epoch": 6.942020982882386, "grad_norm": 1.4790656566619873, "learning_rate": 1.8858e-05, "loss": 0.2525, "step": 6289 }, { "epoch": 6.943125345113197, "grad_norm": 0.6297977566719055, "learning_rate": 1.8861e-05, "loss": 0.1622, "step": 6290 }, { "epoch": 6.944229707344009, "grad_norm": 0.7499319911003113, "learning_rate": 1.8864e-05, "loss": 0.1518, "step": 6291 }, { "epoch": 6.9453340695748205, "grad_norm": 0.48090603947639465, "learning_rate": 1.8867e-05, "loss": 0.1049, "step": 6292 }, { "epoch": 6.946438431805632, "grad_norm": 0.6766952872276306, "learning_rate": 1.887e-05, "loss": 0.0902, "step": 6293 }, { "epoch": 6.947542794036444, "grad_norm": 0.738494336605072, "learning_rate": 1.8873e-05, "loss": 0.0859, "step": 6294 }, { "epoch": 6.948647156267255, "grad_norm": 0.6524827480316162, "learning_rate": 1.8876e-05, "loss": 0.0877, "step": 6295 }, { "epoch": 6.949751518498068, "grad_norm": 0.34925058484077454, "learning_rate": 1.8879e-05, "loss": 0.0598, "step": 6296 }, { "epoch": 6.950855880728879, "grad_norm": 0.5536137819290161, "learning_rate": 1.8882e-05, "loss": 0.0369, "step": 6297 }, { "epoch": 6.951960242959691, "grad_norm": 0.4607568383216858, "learning_rate": 1.8885e-05, "loss": 0.0383, "step": 6298 }, { "epoch": 6.9530646051905025, "grad_norm": 0.42392241954803467, "learning_rate": 1.8888000000000003e-05, "loss": 0.0229, "step": 6299 }, { "epoch": 6.954168967421314, "grad_norm": 0.6182414293289185, "learning_rate": 1.8891000000000003e-05, "loss": 0.0394, "step": 6300 }, { "epoch": 6.955273329652126, "grad_norm": 0.3946205973625183, "learning_rate": 1.8894000000000002e-05, "loss": 0.0325, "step": 6301 }, { "epoch": 6.956377691882937, "grad_norm": 0.9266901016235352, "learning_rate": 1.8897000000000002e-05, "loss": 0.0378, "step": 6302 }, { "epoch": 6.95748205411375, "grad_norm": 0.4811552166938782, "learning_rate": 1.8900000000000002e-05, "loss": 0.0291, "step": 6303 }, { "epoch": 6.958586416344561, "grad_norm": 0.688617467880249, "learning_rate": 1.8903000000000002e-05, "loss": 0.0343, "step": 6304 }, { "epoch": 6.959690778575373, "grad_norm": 1.554370641708374, "learning_rate": 1.8906e-05, "loss": 0.031, "step": 6305 }, { "epoch": 6.9607951408061846, "grad_norm": 0.6363669633865356, "learning_rate": 1.8908999999999998e-05, "loss": 0.0207, "step": 6306 }, { "epoch": 6.961899503036996, "grad_norm": 0.4267365634441376, "learning_rate": 1.8911999999999998e-05, "loss": 0.0308, "step": 6307 }, { "epoch": 6.963003865267808, "grad_norm": 0.4104136526584625, "learning_rate": 1.8914999999999998e-05, "loss": 0.0368, "step": 6308 }, { "epoch": 6.9641082274986195, "grad_norm": 0.49628183245658875, "learning_rate": 1.8918e-05, "loss": 0.027, "step": 6309 }, { "epoch": 6.965212589729431, "grad_norm": 0.3938804268836975, "learning_rate": 1.8921e-05, "loss": 0.0307, "step": 6310 }, { "epoch": 6.966316951960243, "grad_norm": 0.4231780171394348, "learning_rate": 1.8924e-05, "loss": 0.0298, "step": 6311 }, { "epoch": 6.967421314191054, "grad_norm": 0.5907614827156067, "learning_rate": 1.8927e-05, "loss": 0.0256, "step": 6312 }, { "epoch": 6.968525676421867, "grad_norm": 0.6317795515060425, "learning_rate": 1.893e-05, "loss": 0.0673, "step": 6313 }, { "epoch": 6.969630038652678, "grad_norm": 0.26576781272888184, "learning_rate": 1.8933e-05, "loss": 0.019, "step": 6314 }, { "epoch": 6.97073440088349, "grad_norm": 0.3359147906303406, "learning_rate": 1.8936e-05, "loss": 0.0164, "step": 6315 }, { "epoch": 6.9718387631143015, "grad_norm": 0.6993585228919983, "learning_rate": 1.8939e-05, "loss": 0.0445, "step": 6316 }, { "epoch": 6.972943125345113, "grad_norm": 0.41046106815338135, "learning_rate": 1.8942e-05, "loss": 0.0245, "step": 6317 }, { "epoch": 6.974047487575925, "grad_norm": 0.4831092655658722, "learning_rate": 1.8945e-05, "loss": 0.0275, "step": 6318 }, { "epoch": 6.975151849806736, "grad_norm": 0.5199658274650574, "learning_rate": 1.8948000000000002e-05, "loss": 0.0275, "step": 6319 }, { "epoch": 6.976256212037549, "grad_norm": 0.6890923380851746, "learning_rate": 1.8951000000000002e-05, "loss": 0.0307, "step": 6320 }, { "epoch": 6.97736057426836, "grad_norm": 0.47377821803092957, "learning_rate": 1.8954000000000002e-05, "loss": 0.0195, "step": 6321 }, { "epoch": 6.978464936499172, "grad_norm": 0.5830671787261963, "learning_rate": 1.8957e-05, "loss": 0.0248, "step": 6322 }, { "epoch": 6.9795692987299836, "grad_norm": 0.6248395442962646, "learning_rate": 1.896e-05, "loss": 0.0314, "step": 6323 }, { "epoch": 6.980673660960795, "grad_norm": 0.4087722599506378, "learning_rate": 1.8963e-05, "loss": 0.0154, "step": 6324 }, { "epoch": 6.981778023191607, "grad_norm": 0.8075656294822693, "learning_rate": 1.8966e-05, "loss": 0.0337, "step": 6325 }, { "epoch": 6.9828823854224185, "grad_norm": 0.8589515089988708, "learning_rate": 1.8969e-05, "loss": 0.0446, "step": 6326 }, { "epoch": 6.98398674765323, "grad_norm": 0.5848573446273804, "learning_rate": 1.8972e-05, "loss": 0.0221, "step": 6327 }, { "epoch": 6.985091109884042, "grad_norm": 0.7331930994987488, "learning_rate": 1.8975e-05, "loss": 0.0371, "step": 6328 }, { "epoch": 6.986195472114853, "grad_norm": 0.29534396529197693, "learning_rate": 1.8978000000000004e-05, "loss": 0.0198, "step": 6329 }, { "epoch": 6.987299834345666, "grad_norm": 0.6268013715744019, "learning_rate": 1.8981e-05, "loss": 0.0311, "step": 6330 }, { "epoch": 6.988404196576477, "grad_norm": 0.7122977375984192, "learning_rate": 1.8984e-05, "loss": 0.0307, "step": 6331 }, { "epoch": 6.989508558807289, "grad_norm": 0.5300339460372925, "learning_rate": 1.8987e-05, "loss": 0.0236, "step": 6332 }, { "epoch": 6.9906129210381005, "grad_norm": 0.7403839826583862, "learning_rate": 1.899e-05, "loss": 0.0451, "step": 6333 }, { "epoch": 6.991717283268912, "grad_norm": 2.7435214519500732, "learning_rate": 1.8993e-05, "loss": 0.0282, "step": 6334 }, { "epoch": 6.992821645499724, "grad_norm": 0.5730907917022705, "learning_rate": 1.8996e-05, "loss": 0.0191, "step": 6335 }, { "epoch": 6.993926007730535, "grad_norm": 0.6775552034378052, "learning_rate": 1.8999e-05, "loss": 0.0498, "step": 6336 }, { "epoch": 6.995030369961348, "grad_norm": 0.7457230687141418, "learning_rate": 1.9002e-05, "loss": 0.181, "step": 6337 }, { "epoch": 6.996134732192159, "grad_norm": 0.37103912234306335, "learning_rate": 1.9005e-05, "loss": 0.0269, "step": 6338 }, { "epoch": 6.997239094422971, "grad_norm": 0.5191999673843384, "learning_rate": 1.9008e-05, "loss": 0.0371, "step": 6339 }, { "epoch": 6.9983434566537825, "grad_norm": 0.5324029326438904, "learning_rate": 1.9011e-05, "loss": 0.0333, "step": 6340 }, { "epoch": 6.999447818884594, "grad_norm": 0.990329384803772, "learning_rate": 1.9014e-05, "loss": 0.0487, "step": 6341 }, { "epoch": 7.0, "grad_norm": 0.4718097746372223, "learning_rate": 1.9017e-05, "loss": 0.0269, "step": 6342 }, { "epoch": 7.001104362230811, "grad_norm": 0.8875805139541626, "learning_rate": 1.902e-05, "loss": 0.2575, "step": 6343 }, { "epoch": 7.002208724461624, "grad_norm": 0.7615249752998352, "learning_rate": 1.9023e-05, "loss": 0.2256, "step": 6344 }, { "epoch": 7.003313086692435, "grad_norm": 0.6497626304626465, "learning_rate": 1.9026e-05, "loss": 0.1626, "step": 6345 }, { "epoch": 7.004417448923247, "grad_norm": 0.6587445735931396, "learning_rate": 1.9029e-05, "loss": 0.1969, "step": 6346 }, { "epoch": 7.0055218111540585, "grad_norm": 0.6937226057052612, "learning_rate": 1.9032e-05, "loss": 0.1402, "step": 6347 }, { "epoch": 7.00662617338487, "grad_norm": 0.5055064558982849, "learning_rate": 1.9035e-05, "loss": 0.1204, "step": 6348 }, { "epoch": 7.007730535615682, "grad_norm": 0.5071353316307068, "learning_rate": 1.9038000000000003e-05, "loss": 0.0768, "step": 6349 }, { "epoch": 7.008834897846493, "grad_norm": 0.5919198989868164, "learning_rate": 1.9041000000000003e-05, "loss": 0.0688, "step": 6350 }, { "epoch": 7.009939260077306, "grad_norm": 0.38709357380867004, "learning_rate": 1.9044000000000003e-05, "loss": 0.0488, "step": 6351 }, { "epoch": 7.011043622308117, "grad_norm": 0.3459746837615967, "learning_rate": 1.9047000000000002e-05, "loss": 0.0256, "step": 6352 }, { "epoch": 7.012147984538929, "grad_norm": 0.4748719036579132, "learning_rate": 1.9050000000000002e-05, "loss": 0.0409, "step": 6353 }, { "epoch": 7.0132523467697405, "grad_norm": 0.41421493887901306, "learning_rate": 1.9053000000000002e-05, "loss": 0.0261, "step": 6354 }, { "epoch": 7.014356709000552, "grad_norm": 0.5657185912132263, "learning_rate": 1.9056e-05, "loss": 0.0312, "step": 6355 }, { "epoch": 7.015461071231364, "grad_norm": 0.3201466500759125, "learning_rate": 1.9058999999999998e-05, "loss": 0.0257, "step": 6356 }, { "epoch": 7.016565433462175, "grad_norm": 0.3210131525993347, "learning_rate": 1.9061999999999998e-05, "loss": 0.0242, "step": 6357 }, { "epoch": 7.017669795692988, "grad_norm": 0.240849107503891, "learning_rate": 1.9064999999999998e-05, "loss": 0.0107, "step": 6358 }, { "epoch": 7.018774157923799, "grad_norm": 0.4773343801498413, "learning_rate": 1.9068e-05, "loss": 0.0186, "step": 6359 }, { "epoch": 7.01987852015461, "grad_norm": 0.5834535360336304, "learning_rate": 1.9071e-05, "loss": 0.025, "step": 6360 }, { "epoch": 7.020982882385423, "grad_norm": 0.5301131010055542, "learning_rate": 1.9074e-05, "loss": 0.0216, "step": 6361 }, { "epoch": 7.022087244616234, "grad_norm": 0.5456410646438599, "learning_rate": 1.9077e-05, "loss": 0.0292, "step": 6362 }, { "epoch": 7.023191606847046, "grad_norm": 1.1700286865234375, "learning_rate": 1.908e-05, "loss": 0.0264, "step": 6363 }, { "epoch": 7.0242959690778575, "grad_norm": 0.2956523895263672, "learning_rate": 1.9083e-05, "loss": 0.0177, "step": 6364 }, { "epoch": 7.025400331308669, "grad_norm": 1.9022467136383057, "learning_rate": 1.9086e-05, "loss": 0.0352, "step": 6365 }, { "epoch": 7.026504693539481, "grad_norm": 0.3135715126991272, "learning_rate": 1.9089e-05, "loss": 0.0213, "step": 6366 }, { "epoch": 7.027609055770292, "grad_norm": 0.6738377213478088, "learning_rate": 1.9092e-05, "loss": 0.0381, "step": 6367 }, { "epoch": 7.028713418001105, "grad_norm": 0.28727179765701294, "learning_rate": 1.9095e-05, "loss": 0.0151, "step": 6368 }, { "epoch": 7.029817780231916, "grad_norm": 0.48578307032585144, "learning_rate": 1.9098000000000002e-05, "loss": 0.0186, "step": 6369 }, { "epoch": 7.030922142462728, "grad_norm": 0.6350258588790894, "learning_rate": 1.9101000000000002e-05, "loss": 0.0108, "step": 6370 }, { "epoch": 7.0320265046935395, "grad_norm": 0.6934351325035095, "learning_rate": 1.9104000000000002e-05, "loss": 0.0316, "step": 6371 }, { "epoch": 7.033130866924351, "grad_norm": 0.3387226462364197, "learning_rate": 1.9107000000000002e-05, "loss": 0.0262, "step": 6372 }, { "epoch": 7.034235229155163, "grad_norm": 0.43547168374061584, "learning_rate": 1.911e-05, "loss": 0.0147, "step": 6373 }, { "epoch": 7.035339591385974, "grad_norm": 0.31210315227508545, "learning_rate": 1.9113e-05, "loss": 0.0184, "step": 6374 }, { "epoch": 7.036443953616787, "grad_norm": 0.8437998294830322, "learning_rate": 1.9116e-05, "loss": 0.0341, "step": 6375 }, { "epoch": 7.037548315847598, "grad_norm": 0.5227860808372498, "learning_rate": 1.9119e-05, "loss": 0.0238, "step": 6376 }, { "epoch": 7.038652678078409, "grad_norm": 0.5712561011314392, "learning_rate": 1.9122e-05, "loss": 0.0404, "step": 6377 }, { "epoch": 7.039757040309222, "grad_norm": 0.557783305644989, "learning_rate": 1.9125e-05, "loss": 0.0283, "step": 6378 }, { "epoch": 7.040861402540033, "grad_norm": 0.41985905170440674, "learning_rate": 1.9128e-05, "loss": 0.0162, "step": 6379 }, { "epoch": 7.041965764770845, "grad_norm": 0.35981833934783936, "learning_rate": 1.9131e-05, "loss": 0.0229, "step": 6380 }, { "epoch": 7.0430701270016565, "grad_norm": 0.8345394134521484, "learning_rate": 1.9134e-05, "loss": 0.0368, "step": 6381 }, { "epoch": 7.044174489232468, "grad_norm": 0.361105740070343, "learning_rate": 1.9137e-05, "loss": 0.013, "step": 6382 }, { "epoch": 7.04527885146328, "grad_norm": 0.40143507719039917, "learning_rate": 1.914e-05, "loss": 0.0229, "step": 6383 }, { "epoch": 7.046383213694091, "grad_norm": 0.38996607065200806, "learning_rate": 1.9143e-05, "loss": 0.0234, "step": 6384 }, { "epoch": 7.047487575924904, "grad_norm": 0.7034538984298706, "learning_rate": 1.9146e-05, "loss": 0.0241, "step": 6385 }, { "epoch": 7.048591938155715, "grad_norm": 0.2951688766479492, "learning_rate": 1.9149e-05, "loss": 0.0164, "step": 6386 }, { "epoch": 7.049696300386527, "grad_norm": 0.528276801109314, "learning_rate": 1.9152e-05, "loss": 0.0218, "step": 6387 }, { "epoch": 7.0508006626173385, "grad_norm": 0.7335876822471619, "learning_rate": 1.9155e-05, "loss": 0.0498, "step": 6388 }, { "epoch": 7.05190502484815, "grad_norm": 0.4526336193084717, "learning_rate": 1.9158e-05, "loss": 0.0182, "step": 6389 }, { "epoch": 7.053009387078962, "grad_norm": 0.6281260848045349, "learning_rate": 1.9161000000000002e-05, "loss": 0.0208, "step": 6390 }, { "epoch": 7.054113749309773, "grad_norm": 0.33886951208114624, "learning_rate": 1.9164e-05, "loss": 0.0174, "step": 6391 }, { "epoch": 7.055218111540586, "grad_norm": 0.6378398537635803, "learning_rate": 1.9167e-05, "loss": 0.0169, "step": 6392 }, { "epoch": 7.056322473771397, "grad_norm": 0.8643684983253479, "learning_rate": 1.917e-05, "loss": 0.2367, "step": 6393 }, { "epoch": 7.057426836002208, "grad_norm": 0.9052020907402039, "learning_rate": 1.9173e-05, "loss": 0.2374, "step": 6394 }, { "epoch": 7.0585311982330206, "grad_norm": 0.776799738407135, "learning_rate": 1.9176e-05, "loss": 0.2168, "step": 6395 }, { "epoch": 7.059635560463832, "grad_norm": 0.8016808032989502, "learning_rate": 1.9179e-05, "loss": 0.1592, "step": 6396 }, { "epoch": 7.060739922694644, "grad_norm": 0.7518221139907837, "learning_rate": 1.9182e-05, "loss": 0.1751, "step": 6397 }, { "epoch": 7.0618442849254555, "grad_norm": 0.931946337223053, "learning_rate": 1.9185e-05, "loss": 0.1375, "step": 6398 }, { "epoch": 7.062948647156268, "grad_norm": 0.7502181529998779, "learning_rate": 1.9188e-05, "loss": 0.0884, "step": 6399 }, { "epoch": 7.064053009387079, "grad_norm": 0.762988805770874, "learning_rate": 1.9191000000000003e-05, "loss": 0.0931, "step": 6400 }, { "epoch": 7.06515737161789, "grad_norm": 0.4292415976524353, "learning_rate": 1.9194000000000003e-05, "loss": 0.0521, "step": 6401 }, { "epoch": 7.066261733848703, "grad_norm": 0.3472151458263397, "learning_rate": 1.9197000000000003e-05, "loss": 0.0375, "step": 6402 }, { "epoch": 7.067366096079514, "grad_norm": 0.5786716938018799, "learning_rate": 1.9200000000000003e-05, "loss": 0.0285, "step": 6403 }, { "epoch": 7.068470458310326, "grad_norm": 0.4496563971042633, "learning_rate": 1.9203e-05, "loss": 0.0299, "step": 6404 }, { "epoch": 7.0695748205411375, "grad_norm": 0.29107895493507385, "learning_rate": 1.9206e-05, "loss": 0.0249, "step": 6405 }, { "epoch": 7.070679182771949, "grad_norm": 0.8558709621429443, "learning_rate": 1.9209e-05, "loss": 0.0437, "step": 6406 }, { "epoch": 7.071783545002761, "grad_norm": 0.38322174549102783, "learning_rate": 1.9212e-05, "loss": 0.0194, "step": 6407 }, { "epoch": 7.072887907233572, "grad_norm": 0.2752015292644501, "learning_rate": 1.9214999999999998e-05, "loss": 0.0154, "step": 6408 }, { "epoch": 7.073992269464385, "grad_norm": 0.393681138753891, "learning_rate": 1.9217999999999998e-05, "loss": 0.0261, "step": 6409 }, { "epoch": 7.075096631695196, "grad_norm": 0.540688693523407, "learning_rate": 1.9221e-05, "loss": 0.0273, "step": 6410 }, { "epoch": 7.076200993926007, "grad_norm": 0.3515778183937073, "learning_rate": 1.9224e-05, "loss": 0.0222, "step": 6411 }, { "epoch": 7.0773053561568195, "grad_norm": 0.376487135887146, "learning_rate": 1.9227e-05, "loss": 0.0238, "step": 6412 }, { "epoch": 7.078409718387631, "grad_norm": 0.3877439796924591, "learning_rate": 1.923e-05, "loss": 0.0206, "step": 6413 }, { "epoch": 7.079514080618443, "grad_norm": 0.2993476688861847, "learning_rate": 1.9233e-05, "loss": 0.0234, "step": 6414 }, { "epoch": 7.0806184428492545, "grad_norm": 0.35245513916015625, "learning_rate": 1.9236e-05, "loss": 0.0237, "step": 6415 }, { "epoch": 7.081722805080067, "grad_norm": 0.4017721712589264, "learning_rate": 1.9239e-05, "loss": 0.0249, "step": 6416 }, { "epoch": 7.082827167310878, "grad_norm": 0.32920750975608826, "learning_rate": 1.9242e-05, "loss": 0.0188, "step": 6417 }, { "epoch": 7.083931529541689, "grad_norm": 0.5263930559158325, "learning_rate": 1.9245e-05, "loss": 0.0242, "step": 6418 }, { "epoch": 7.085035891772502, "grad_norm": 0.4478440284729004, "learning_rate": 1.9248e-05, "loss": 0.0213, "step": 6419 }, { "epoch": 7.086140254003313, "grad_norm": 0.550735354423523, "learning_rate": 1.9251000000000003e-05, "loss": 0.0134, "step": 6420 }, { "epoch": 7.087244616234125, "grad_norm": 0.8917314410209656, "learning_rate": 1.9254000000000002e-05, "loss": 0.0382, "step": 6421 }, { "epoch": 7.0883489784649365, "grad_norm": 0.44309908151626587, "learning_rate": 1.9257000000000002e-05, "loss": 0.0255, "step": 6422 }, { "epoch": 7.089453340695748, "grad_norm": 0.4512721300125122, "learning_rate": 1.9260000000000002e-05, "loss": 0.0239, "step": 6423 }, { "epoch": 7.09055770292656, "grad_norm": 1.0315021276474, "learning_rate": 1.9263000000000002e-05, "loss": 0.0299, "step": 6424 }, { "epoch": 7.091662065157371, "grad_norm": 0.4732714891433716, "learning_rate": 1.9266e-05, "loss": 0.0282, "step": 6425 }, { "epoch": 7.092766427388184, "grad_norm": 0.2932622730731964, "learning_rate": 1.9269e-05, "loss": 0.0164, "step": 6426 }, { "epoch": 7.093870789618995, "grad_norm": 0.4264037013053894, "learning_rate": 1.9272e-05, "loss": 0.0141, "step": 6427 }, { "epoch": 7.094975151849806, "grad_norm": 0.8058129549026489, "learning_rate": 1.9275e-05, "loss": 0.0306, "step": 6428 }, { "epoch": 7.0960795140806185, "grad_norm": 0.4214133322238922, "learning_rate": 1.9277999999999997e-05, "loss": 0.0251, "step": 6429 }, { "epoch": 7.09718387631143, "grad_norm": 0.46292844414711, "learning_rate": 1.9281e-05, "loss": 0.0207, "step": 6430 }, { "epoch": 7.098288238542242, "grad_norm": 0.7565010190010071, "learning_rate": 1.9284e-05, "loss": 0.042, "step": 6431 }, { "epoch": 7.0993926007730535, "grad_norm": 0.8287942409515381, "learning_rate": 1.9287e-05, "loss": 0.0355, "step": 6432 }, { "epoch": 7.100496963003866, "grad_norm": 0.6097776889801025, "learning_rate": 1.929e-05, "loss": 0.0229, "step": 6433 }, { "epoch": 7.101601325234677, "grad_norm": 0.40300655364990234, "learning_rate": 1.9293e-05, "loss": 0.0183, "step": 6434 }, { "epoch": 7.102705687465488, "grad_norm": 0.71343594789505, "learning_rate": 1.9296e-05, "loss": 0.0259, "step": 6435 }, { "epoch": 7.103810049696301, "grad_norm": 0.43532299995422363, "learning_rate": 1.9299e-05, "loss": 0.0224, "step": 6436 }, { "epoch": 7.104914411927112, "grad_norm": 0.6632285714149475, "learning_rate": 1.9302e-05, "loss": 0.028, "step": 6437 }, { "epoch": 7.106018774157924, "grad_norm": 0.548641562461853, "learning_rate": 1.9305e-05, "loss": 0.0246, "step": 6438 }, { "epoch": 7.1071231363887355, "grad_norm": 0.623065173625946, "learning_rate": 1.9308e-05, "loss": 0.0308, "step": 6439 }, { "epoch": 7.108227498619547, "grad_norm": 0.4804857671260834, "learning_rate": 1.9311000000000002e-05, "loss": 0.0249, "step": 6440 }, { "epoch": 7.109331860850359, "grad_norm": 0.758013129234314, "learning_rate": 1.9314000000000002e-05, "loss": 0.0333, "step": 6441 }, { "epoch": 7.11043622308117, "grad_norm": 0.6559332609176636, "learning_rate": 1.9317e-05, "loss": 0.0422, "step": 6442 }, { "epoch": 7.111540585311983, "grad_norm": 1.3389734029769897, "learning_rate": 1.932e-05, "loss": 0.3299, "step": 6443 }, { "epoch": 7.112644947542794, "grad_norm": 1.1790355443954468, "learning_rate": 1.9323e-05, "loss": 0.3044, "step": 6444 }, { "epoch": 7.113749309773605, "grad_norm": 0.8539310097694397, "learning_rate": 1.9326e-05, "loss": 0.2341, "step": 6445 }, { "epoch": 7.1148536720044175, "grad_norm": 0.819188117980957, "learning_rate": 1.9329e-05, "loss": 0.1583, "step": 6446 }, { "epoch": 7.115958034235229, "grad_norm": 0.695145308971405, "learning_rate": 1.9332e-05, "loss": 0.1535, "step": 6447 }, { "epoch": 7.117062396466041, "grad_norm": 0.7483168244361877, "learning_rate": 1.9335e-05, "loss": 0.1394, "step": 6448 }, { "epoch": 7.1181667586968524, "grad_norm": 0.5892038941383362, "learning_rate": 1.9338e-05, "loss": 0.0829, "step": 6449 }, { "epoch": 7.119271120927665, "grad_norm": 0.9124040603637695, "learning_rate": 1.9341000000000003e-05, "loss": 0.1356, "step": 6450 }, { "epoch": 7.120375483158476, "grad_norm": 0.6632722616195679, "learning_rate": 1.9344000000000003e-05, "loss": 0.0795, "step": 6451 }, { "epoch": 7.121479845389287, "grad_norm": 0.6731335520744324, "learning_rate": 1.9347000000000003e-05, "loss": 0.0582, "step": 6452 }, { "epoch": 7.1225842076201, "grad_norm": 0.7227423787117004, "learning_rate": 1.935e-05, "loss": 0.0296, "step": 6453 }, { "epoch": 7.123688569850911, "grad_norm": 0.5093545317649841, "learning_rate": 1.9353e-05, "loss": 0.03, "step": 6454 }, { "epoch": 7.124792932081723, "grad_norm": 1.7370823621749878, "learning_rate": 1.9356e-05, "loss": 0.0808, "step": 6455 }, { "epoch": 7.1258972943125345, "grad_norm": 0.4034733772277832, "learning_rate": 1.9359e-05, "loss": 0.024, "step": 6456 }, { "epoch": 7.127001656543346, "grad_norm": 0.40844273567199707, "learning_rate": 1.9362e-05, "loss": 0.0537, "step": 6457 }, { "epoch": 7.128106018774158, "grad_norm": 0.6194123029708862, "learning_rate": 1.9365e-05, "loss": 0.0632, "step": 6458 }, { "epoch": 7.129210381004969, "grad_norm": 1.6468214988708496, "learning_rate": 1.9367999999999998e-05, "loss": 0.0254, "step": 6459 }, { "epoch": 7.130314743235782, "grad_norm": 0.29662615060806274, "learning_rate": 1.9371e-05, "loss": 0.0159, "step": 6460 }, { "epoch": 7.131419105466593, "grad_norm": 1.200758934020996, "learning_rate": 1.9374e-05, "loss": 0.0348, "step": 6461 }, { "epoch": 7.132523467697405, "grad_norm": 0.2317049354314804, "learning_rate": 1.9377e-05, "loss": 0.0158, "step": 6462 }, { "epoch": 7.1336278299282165, "grad_norm": 0.5094081163406372, "learning_rate": 1.938e-05, "loss": 0.0366, "step": 6463 }, { "epoch": 7.134732192159028, "grad_norm": 0.33156105875968933, "learning_rate": 1.9383e-05, "loss": 0.0159, "step": 6464 }, { "epoch": 7.13583655438984, "grad_norm": 0.3823058307170868, "learning_rate": 1.9386e-05, "loss": 0.0229, "step": 6465 }, { "epoch": 7.136940916620651, "grad_norm": 0.386167973279953, "learning_rate": 1.9389e-05, "loss": 0.029, "step": 6466 }, { "epoch": 7.138045278851464, "grad_norm": 0.3392610251903534, "learning_rate": 1.9392e-05, "loss": 0.0184, "step": 6467 }, { "epoch": 7.139149641082275, "grad_norm": 0.6638017296791077, "learning_rate": 1.9395e-05, "loss": 0.0293, "step": 6468 }, { "epoch": 7.140254003313086, "grad_norm": 0.4650706648826599, "learning_rate": 1.9398e-05, "loss": 0.0268, "step": 6469 }, { "epoch": 7.141358365543899, "grad_norm": 0.41176947951316833, "learning_rate": 1.9401000000000003e-05, "loss": 0.0581, "step": 6470 }, { "epoch": 7.14246272777471, "grad_norm": 0.4114823043346405, "learning_rate": 1.9404000000000003e-05, "loss": 0.0273, "step": 6471 }, { "epoch": 7.143567090005522, "grad_norm": 1.0648549795150757, "learning_rate": 1.9407000000000002e-05, "loss": 0.0257, "step": 6472 }, { "epoch": 7.1446714522363335, "grad_norm": 0.5913954377174377, "learning_rate": 1.9410000000000002e-05, "loss": 0.0301, "step": 6473 }, { "epoch": 7.145775814467145, "grad_norm": 0.6530779004096985, "learning_rate": 1.9413000000000002e-05, "loss": 0.0374, "step": 6474 }, { "epoch": 7.146880176697957, "grad_norm": 0.5643786191940308, "learning_rate": 1.9416000000000002e-05, "loss": 0.0224, "step": 6475 }, { "epoch": 7.147984538928768, "grad_norm": 0.5408956408500671, "learning_rate": 1.9419e-05, "loss": 0.0222, "step": 6476 }, { "epoch": 7.149088901159581, "grad_norm": 0.6046329736709595, "learning_rate": 1.9422e-05, "loss": 0.0274, "step": 6477 }, { "epoch": 7.150193263390392, "grad_norm": 0.3620535433292389, "learning_rate": 1.9424999999999998e-05, "loss": 0.0182, "step": 6478 }, { "epoch": 7.151297625621204, "grad_norm": 0.42987093329429626, "learning_rate": 1.9427999999999998e-05, "loss": 0.0231, "step": 6479 }, { "epoch": 7.1524019878520155, "grad_norm": 0.26858994364738464, "learning_rate": 1.9431e-05, "loss": 0.0159, "step": 6480 }, { "epoch": 7.153506350082827, "grad_norm": 0.6626471281051636, "learning_rate": 1.9434e-05, "loss": 0.0238, "step": 6481 }, { "epoch": 7.154610712313639, "grad_norm": 0.3693067133426666, "learning_rate": 1.9437e-05, "loss": 0.0178, "step": 6482 }, { "epoch": 7.15571507454445, "grad_norm": 0.3127802610397339, "learning_rate": 1.944e-05, "loss": 0.0212, "step": 6483 }, { "epoch": 7.156819436775263, "grad_norm": 0.5284605026245117, "learning_rate": 1.9443e-05, "loss": 0.0265, "step": 6484 }, { "epoch": 7.157923799006074, "grad_norm": 0.5769702792167664, "learning_rate": 1.9446e-05, "loss": 0.0224, "step": 6485 }, { "epoch": 7.159028161236885, "grad_norm": 0.6239283680915833, "learning_rate": 1.9449e-05, "loss": 0.0291, "step": 6486 }, { "epoch": 7.160132523467698, "grad_norm": 0.4422421157360077, "learning_rate": 1.9452e-05, "loss": 0.0185, "step": 6487 }, { "epoch": 7.161236885698509, "grad_norm": 0.822944164276123, "learning_rate": 1.9455e-05, "loss": 0.0324, "step": 6488 }, { "epoch": 7.162341247929321, "grad_norm": 0.3895721435546875, "learning_rate": 1.9458e-05, "loss": 0.022, "step": 6489 }, { "epoch": 7.1634456101601325, "grad_norm": 0.6633242964744568, "learning_rate": 1.9461000000000002e-05, "loss": 0.0316, "step": 6490 }, { "epoch": 7.164549972390944, "grad_norm": 0.537041187286377, "learning_rate": 1.9464000000000002e-05, "loss": 0.0262, "step": 6491 }, { "epoch": 7.165654334621756, "grad_norm": 0.45309191942214966, "learning_rate": 1.9467000000000002e-05, "loss": 0.0152, "step": 6492 }, { "epoch": 7.166758696852567, "grad_norm": 1.3322314023971558, "learning_rate": 1.947e-05, "loss": 0.2582, "step": 6493 }, { "epoch": 7.16786305908338, "grad_norm": 0.8766611814498901, "learning_rate": 1.9473e-05, "loss": 0.2354, "step": 6494 }, { "epoch": 7.168967421314191, "grad_norm": 1.0703057050704956, "learning_rate": 1.9476e-05, "loss": 0.2293, "step": 6495 }, { "epoch": 7.170071783545003, "grad_norm": 0.8066378235816956, "learning_rate": 1.9479e-05, "loss": 0.1612, "step": 6496 }, { "epoch": 7.1711761457758145, "grad_norm": 0.5741432905197144, "learning_rate": 1.9482e-05, "loss": 0.1042, "step": 6497 }, { "epoch": 7.172280508006626, "grad_norm": 0.6689640283584595, "learning_rate": 1.9485e-05, "loss": 0.1352, "step": 6498 }, { "epoch": 7.173384870237438, "grad_norm": 1.0479592084884644, "learning_rate": 1.9488e-05, "loss": 0.1175, "step": 6499 }, { "epoch": 7.174489232468249, "grad_norm": 0.5598539710044861, "learning_rate": 1.9491000000000004e-05, "loss": 0.0686, "step": 6500 }, { "epoch": 7.175593594699062, "grad_norm": 0.4728289842605591, "learning_rate": 1.9494000000000003e-05, "loss": 0.0364, "step": 6501 }, { "epoch": 7.176697956929873, "grad_norm": 0.43466800451278687, "learning_rate": 1.9497e-05, "loss": 0.0531, "step": 6502 }, { "epoch": 7.177802319160684, "grad_norm": 0.5790858864784241, "learning_rate": 1.95e-05, "loss": 0.0882, "step": 6503 }, { "epoch": 7.178906681391497, "grad_norm": 0.3099614977836609, "learning_rate": 1.9503e-05, "loss": 0.0191, "step": 6504 }, { "epoch": 7.180011043622308, "grad_norm": 0.23411504924297333, "learning_rate": 1.9506e-05, "loss": 0.0255, "step": 6505 }, { "epoch": 7.18111540585312, "grad_norm": 0.5505949854850769, "learning_rate": 1.9509e-05, "loss": 0.0515, "step": 6506 }, { "epoch": 7.1822197680839315, "grad_norm": 0.28779417276382446, "learning_rate": 1.9512e-05, "loss": 0.028, "step": 6507 }, { "epoch": 7.183324130314743, "grad_norm": 0.23341964185237885, "learning_rate": 1.9515e-05, "loss": 0.0241, "step": 6508 }, { "epoch": 7.184428492545555, "grad_norm": 0.22370930016040802, "learning_rate": 1.9518e-05, "loss": 0.0142, "step": 6509 }, { "epoch": 7.185532854776366, "grad_norm": 0.3309183716773987, "learning_rate": 1.9520999999999998e-05, "loss": 0.023, "step": 6510 }, { "epoch": 7.186637217007179, "grad_norm": 0.4244438409805298, "learning_rate": 1.9524e-05, "loss": 0.0283, "step": 6511 }, { "epoch": 7.18774157923799, "grad_norm": 0.46477359533309937, "learning_rate": 1.9527e-05, "loss": 0.0236, "step": 6512 }, { "epoch": 7.188845941468802, "grad_norm": 0.32505717873573303, "learning_rate": 1.953e-05, "loss": 0.0235, "step": 6513 }, { "epoch": 7.1899503036996135, "grad_norm": 0.6956391930580139, "learning_rate": 1.9533e-05, "loss": 0.0225, "step": 6514 }, { "epoch": 7.191054665930425, "grad_norm": 0.4587528109550476, "learning_rate": 1.9536e-05, "loss": 0.0233, "step": 6515 }, { "epoch": 7.192159028161237, "grad_norm": 0.5354644656181335, "learning_rate": 1.9539e-05, "loss": 0.0167, "step": 6516 }, { "epoch": 7.193263390392048, "grad_norm": 0.6025183796882629, "learning_rate": 1.9542e-05, "loss": 0.0368, "step": 6517 }, { "epoch": 7.194367752622861, "grad_norm": 0.22226782143115997, "learning_rate": 1.9545e-05, "loss": 0.0137, "step": 6518 }, { "epoch": 7.195472114853672, "grad_norm": 0.2909735441207886, "learning_rate": 1.9548e-05, "loss": 0.0216, "step": 6519 }, { "epoch": 7.196576477084483, "grad_norm": 0.5349558591842651, "learning_rate": 1.9551e-05, "loss": 0.02, "step": 6520 }, { "epoch": 7.197680839315296, "grad_norm": 0.7287629842758179, "learning_rate": 1.9554000000000003e-05, "loss": 0.0467, "step": 6521 }, { "epoch": 7.198785201546107, "grad_norm": 0.7660523056983948, "learning_rate": 1.9557000000000003e-05, "loss": 0.0194, "step": 6522 }, { "epoch": 7.199889563776919, "grad_norm": 0.23688550293445587, "learning_rate": 1.9560000000000002e-05, "loss": 0.0179, "step": 6523 }, { "epoch": 7.2009939260077305, "grad_norm": 0.192738875746727, "learning_rate": 1.9563000000000002e-05, "loss": 0.0084, "step": 6524 }, { "epoch": 7.202098288238542, "grad_norm": 0.45001450181007385, "learning_rate": 1.9566000000000002e-05, "loss": 0.0253, "step": 6525 }, { "epoch": 7.203202650469354, "grad_norm": 0.4060792922973633, "learning_rate": 1.9569000000000002e-05, "loss": 0.0276, "step": 6526 }, { "epoch": 7.204307012700165, "grad_norm": 0.5870605111122131, "learning_rate": 1.9571999999999998e-05, "loss": 0.026, "step": 6527 }, { "epoch": 7.205411374930978, "grad_norm": 0.4022027552127838, "learning_rate": 1.9574999999999998e-05, "loss": 0.0327, "step": 6528 }, { "epoch": 7.206515737161789, "grad_norm": 0.7258520722389221, "learning_rate": 1.9577999999999998e-05, "loss": 0.0224, "step": 6529 }, { "epoch": 7.207620099392601, "grad_norm": 0.344629168510437, "learning_rate": 1.9580999999999998e-05, "loss": 0.0224, "step": 6530 }, { "epoch": 7.2087244616234125, "grad_norm": 0.3638935387134552, "learning_rate": 1.9584e-05, "loss": 0.0269, "step": 6531 }, { "epoch": 7.209828823854224, "grad_norm": 0.5168378949165344, "learning_rate": 1.9587e-05, "loss": 0.0251, "step": 6532 }, { "epoch": 7.210933186085036, "grad_norm": 0.41566941142082214, "learning_rate": 1.959e-05, "loss": 0.0316, "step": 6533 }, { "epoch": 7.212037548315847, "grad_norm": 0.5760777592658997, "learning_rate": 1.9593e-05, "loss": 0.0292, "step": 6534 }, { "epoch": 7.21314191054666, "grad_norm": 0.49521952867507935, "learning_rate": 1.9596e-05, "loss": 0.0277, "step": 6535 }, { "epoch": 7.214246272777471, "grad_norm": 0.43507957458496094, "learning_rate": 1.9599e-05, "loss": 0.019, "step": 6536 }, { "epoch": 7.215350635008282, "grad_norm": 0.6513184905052185, "learning_rate": 1.9602e-05, "loss": 0.0273, "step": 6537 }, { "epoch": 7.216454997239095, "grad_norm": 0.3951204717159271, "learning_rate": 1.9605e-05, "loss": 0.0267, "step": 6538 }, { "epoch": 7.217559359469906, "grad_norm": 0.8452064990997314, "learning_rate": 1.9608e-05, "loss": 0.035, "step": 6539 }, { "epoch": 7.218663721700718, "grad_norm": 0.48625001311302185, "learning_rate": 1.9611e-05, "loss": 0.0175, "step": 6540 }, { "epoch": 7.2197680839315295, "grad_norm": 0.9598210453987122, "learning_rate": 1.9614000000000002e-05, "loss": 0.0502, "step": 6541 }, { "epoch": 7.220872446162341, "grad_norm": 0.4743281304836273, "learning_rate": 1.9617000000000002e-05, "loss": 0.0294, "step": 6542 }, { "epoch": 7.221976808393153, "grad_norm": 2.257549285888672, "learning_rate": 1.9620000000000002e-05, "loss": 0.3773, "step": 6543 }, { "epoch": 7.223081170623964, "grad_norm": 0.830775797367096, "learning_rate": 1.9623e-05, "loss": 0.1888, "step": 6544 }, { "epoch": 7.224185532854777, "grad_norm": 0.8262681365013123, "learning_rate": 1.9626e-05, "loss": 0.1823, "step": 6545 }, { "epoch": 7.225289895085588, "grad_norm": 0.8263283967971802, "learning_rate": 1.9629e-05, "loss": 0.1804, "step": 6546 }, { "epoch": 7.2263942573164, "grad_norm": 0.7550115585327148, "learning_rate": 1.9632e-05, "loss": 0.1115, "step": 6547 }, { "epoch": 7.2274986195472115, "grad_norm": 0.7597213983535767, "learning_rate": 1.9635e-05, "loss": 0.1322, "step": 6548 }, { "epoch": 7.228602981778023, "grad_norm": 0.5307948589324951, "learning_rate": 1.9638e-05, "loss": 0.0784, "step": 6549 }, { "epoch": 7.229707344008835, "grad_norm": 0.5373470187187195, "learning_rate": 1.9641e-05, "loss": 0.0506, "step": 6550 }, { "epoch": 7.230811706239646, "grad_norm": 0.4320833086967468, "learning_rate": 1.9644e-05, "loss": 0.0467, "step": 6551 }, { "epoch": 7.231916068470459, "grad_norm": 0.38510456681251526, "learning_rate": 1.9647e-05, "loss": 0.0416, "step": 6552 }, { "epoch": 7.23302043070127, "grad_norm": 0.42246213555336, "learning_rate": 1.965e-05, "loss": 0.0326, "step": 6553 }, { "epoch": 7.234124792932081, "grad_norm": 0.4135715663433075, "learning_rate": 1.9653e-05, "loss": 0.0528, "step": 6554 }, { "epoch": 7.2352291551628936, "grad_norm": 0.3416261076927185, "learning_rate": 1.9656e-05, "loss": 0.0291, "step": 6555 }, { "epoch": 7.236333517393705, "grad_norm": 0.36888936161994934, "learning_rate": 1.9659e-05, "loss": 0.0228, "step": 6556 }, { "epoch": 7.237437879624517, "grad_norm": 0.4832962453365326, "learning_rate": 1.9662e-05, "loss": 0.0241, "step": 6557 }, { "epoch": 7.2385422418553285, "grad_norm": 0.3891083896160126, "learning_rate": 1.9665e-05, "loss": 0.0203, "step": 6558 }, { "epoch": 7.23964660408614, "grad_norm": 0.3717041313648224, "learning_rate": 1.9668e-05, "loss": 0.0302, "step": 6559 }, { "epoch": 7.240750966316952, "grad_norm": 0.34703588485717773, "learning_rate": 1.9671e-05, "loss": 0.034, "step": 6560 }, { "epoch": 7.241855328547763, "grad_norm": 0.4604475796222687, "learning_rate": 1.9674000000000002e-05, "loss": 0.0218, "step": 6561 }, { "epoch": 7.242959690778576, "grad_norm": 0.38770681619644165, "learning_rate": 1.9677e-05, "loss": 0.0226, "step": 6562 }, { "epoch": 7.244064053009387, "grad_norm": 0.5186502933502197, "learning_rate": 1.968e-05, "loss": 0.0286, "step": 6563 }, { "epoch": 7.245168415240199, "grad_norm": 0.39147335290908813, "learning_rate": 1.9683e-05, "loss": 0.0206, "step": 6564 }, { "epoch": 7.2462727774710105, "grad_norm": 0.3505295515060425, "learning_rate": 1.9686e-05, "loss": 0.0209, "step": 6565 }, { "epoch": 7.247377139701822, "grad_norm": 0.30976158380508423, "learning_rate": 1.9689e-05, "loss": 0.0225, "step": 6566 }, { "epoch": 7.248481501932634, "grad_norm": 0.5718255043029785, "learning_rate": 1.9692e-05, "loss": 0.0409, "step": 6567 }, { "epoch": 7.249585864163445, "grad_norm": 0.32821425795555115, "learning_rate": 1.9695e-05, "loss": 0.0211, "step": 6568 }, { "epoch": 7.250690226394258, "grad_norm": 0.21580536663532257, "learning_rate": 1.9698e-05, "loss": 0.0094, "step": 6569 }, { "epoch": 7.251794588625069, "grad_norm": 0.3973643481731415, "learning_rate": 1.9701e-05, "loss": 0.0205, "step": 6570 }, { "epoch": 7.25289895085588, "grad_norm": 0.3358650207519531, "learning_rate": 1.9704000000000003e-05, "loss": 0.0203, "step": 6571 }, { "epoch": 7.2540033130866926, "grad_norm": 0.5687642693519592, "learning_rate": 1.9707000000000003e-05, "loss": 0.0126, "step": 6572 }, { "epoch": 7.255107675317504, "grad_norm": 0.2584598660469055, "learning_rate": 1.9710000000000003e-05, "loss": 0.0119, "step": 6573 }, { "epoch": 7.256212037548316, "grad_norm": 1.7979884147644043, "learning_rate": 1.9713000000000003e-05, "loss": 0.02, "step": 6574 }, { "epoch": 7.2573163997791275, "grad_norm": 0.3407597839832306, "learning_rate": 1.9716000000000002e-05, "loss": 0.0205, "step": 6575 }, { "epoch": 7.258420762009939, "grad_norm": 0.42847105860710144, "learning_rate": 1.9719e-05, "loss": 0.0221, "step": 6576 }, { "epoch": 7.259525124240751, "grad_norm": 0.38546037673950195, "learning_rate": 1.9722e-05, "loss": 0.0131, "step": 6577 }, { "epoch": 7.260629486471562, "grad_norm": 0.6539170145988464, "learning_rate": 1.9725e-05, "loss": 0.0152, "step": 6578 }, { "epoch": 7.261733848702375, "grad_norm": 0.6498764157295227, "learning_rate": 1.9727999999999998e-05, "loss": 0.0218, "step": 6579 }, { "epoch": 7.262838210933186, "grad_norm": 0.3286907970905304, "learning_rate": 1.9730999999999998e-05, "loss": 0.0236, "step": 6580 }, { "epoch": 7.263942573163998, "grad_norm": 0.40709376335144043, "learning_rate": 1.9734e-05, "loss": 0.0255, "step": 6581 }, { "epoch": 7.2650469353948095, "grad_norm": 0.4761652648448944, "learning_rate": 1.9737e-05, "loss": 0.0327, "step": 6582 }, { "epoch": 7.266151297625621, "grad_norm": 0.25840115547180176, "learning_rate": 1.974e-05, "loss": 0.0124, "step": 6583 }, { "epoch": 7.267255659856433, "grad_norm": 0.5073603987693787, "learning_rate": 1.9743e-05, "loss": 0.0316, "step": 6584 }, { "epoch": 7.268360022087244, "grad_norm": 0.39080870151519775, "learning_rate": 1.9746e-05, "loss": 0.0158, "step": 6585 }, { "epoch": 7.269464384318057, "grad_norm": 0.33010318875312805, "learning_rate": 1.9749e-05, "loss": 0.0133, "step": 6586 }, { "epoch": 7.270568746548868, "grad_norm": 0.4611932635307312, "learning_rate": 1.9752e-05, "loss": 0.0289, "step": 6587 }, { "epoch": 7.27167310877968, "grad_norm": 0.5964023470878601, "learning_rate": 1.9755e-05, "loss": 0.0222, "step": 6588 }, { "epoch": 7.2727774710104915, "grad_norm": 0.8528143167495728, "learning_rate": 1.9758e-05, "loss": 0.016, "step": 6589 }, { "epoch": 7.273881833241303, "grad_norm": 0.38376525044441223, "learning_rate": 1.9761e-05, "loss": 0.0189, "step": 6590 }, { "epoch": 7.274986195472115, "grad_norm": 0.6963515281677246, "learning_rate": 1.9764000000000003e-05, "loss": 0.0252, "step": 6591 }, { "epoch": 7.2760905577029265, "grad_norm": 0.6070207953453064, "learning_rate": 1.9767000000000002e-05, "loss": 0.022, "step": 6592 }, { "epoch": 7.277194919933739, "grad_norm": 0.9158272743225098, "learning_rate": 1.9770000000000002e-05, "loss": 0.2727, "step": 6593 }, { "epoch": 7.27829928216455, "grad_norm": 1.1020011901855469, "learning_rate": 1.9773000000000002e-05, "loss": 0.3094, "step": 6594 }, { "epoch": 7.279403644395361, "grad_norm": 0.9233022332191467, "learning_rate": 1.9776000000000002e-05, "loss": 0.2919, "step": 6595 }, { "epoch": 7.280508006626174, "grad_norm": 0.6696792840957642, "learning_rate": 1.9779e-05, "loss": 0.1573, "step": 6596 }, { "epoch": 7.281612368856985, "grad_norm": 0.7220635414123535, "learning_rate": 1.9782e-05, "loss": 0.1292, "step": 6597 }, { "epoch": 7.282716731087797, "grad_norm": 0.8828179836273193, "learning_rate": 1.9785e-05, "loss": 0.1379, "step": 6598 }, { "epoch": 7.2838210933186085, "grad_norm": 0.7162560224533081, "learning_rate": 1.9788e-05, "loss": 0.0571, "step": 6599 }, { "epoch": 7.28492545554942, "grad_norm": 0.7123187780380249, "learning_rate": 1.9791e-05, "loss": 0.0798, "step": 6600 }, { "epoch": 7.286029817780232, "grad_norm": 0.4334113895893097, "learning_rate": 1.9794e-05, "loss": 0.0348, "step": 6601 }, { "epoch": 7.287134180011043, "grad_norm": 0.8147215843200684, "learning_rate": 1.9797e-05, "loss": 0.0266, "step": 6602 }, { "epoch": 7.288238542241856, "grad_norm": 0.6257861256599426, "learning_rate": 1.98e-05, "loss": 0.0845, "step": 6603 }, { "epoch": 7.289342904472667, "grad_norm": 0.6829645037651062, "learning_rate": 1.9803e-05, "loss": 0.0444, "step": 6604 }, { "epoch": 7.290447266703479, "grad_norm": 0.7141764163970947, "learning_rate": 1.9806e-05, "loss": 0.0172, "step": 6605 }, { "epoch": 7.2915516289342905, "grad_norm": 0.3305490016937256, "learning_rate": 1.9809e-05, "loss": 0.0295, "step": 6606 }, { "epoch": 7.292655991165102, "grad_norm": 0.49126681685447693, "learning_rate": 1.9812e-05, "loss": 0.0198, "step": 6607 }, { "epoch": 7.293760353395914, "grad_norm": 0.271136999130249, "learning_rate": 1.9815e-05, "loss": 0.0249, "step": 6608 }, { "epoch": 7.2948647156267254, "grad_norm": 0.6307091116905212, "learning_rate": 1.9818e-05, "loss": 0.0229, "step": 6609 }, { "epoch": 7.295969077857538, "grad_norm": 0.5343537330627441, "learning_rate": 1.9821e-05, "loss": 0.0229, "step": 6610 }, { "epoch": 7.297073440088349, "grad_norm": 0.3225714862346649, "learning_rate": 1.9824000000000002e-05, "loss": 0.0116, "step": 6611 }, { "epoch": 7.29817780231916, "grad_norm": 0.7827106714248657, "learning_rate": 1.9827000000000002e-05, "loss": 0.0347, "step": 6612 }, { "epoch": 7.299282164549973, "grad_norm": 0.47400131821632385, "learning_rate": 1.983e-05, "loss": 0.0279, "step": 6613 }, { "epoch": 7.300386526780784, "grad_norm": 0.5816287994384766, "learning_rate": 1.9833e-05, "loss": 0.0254, "step": 6614 }, { "epoch": 7.301490889011596, "grad_norm": 0.2660137414932251, "learning_rate": 1.9836e-05, "loss": 0.0189, "step": 6615 }, { "epoch": 7.3025952512424075, "grad_norm": 0.4855640232563019, "learning_rate": 1.9839e-05, "loss": 0.0235, "step": 6616 }, { "epoch": 7.303699613473219, "grad_norm": 0.45815524458885193, "learning_rate": 1.9842e-05, "loss": 0.021, "step": 6617 }, { "epoch": 7.304803975704031, "grad_norm": 0.5720378160476685, "learning_rate": 1.9845e-05, "loss": 0.022, "step": 6618 }, { "epoch": 7.305908337934842, "grad_norm": 0.4141123294830322, "learning_rate": 1.9848e-05, "loss": 0.0684, "step": 6619 }, { "epoch": 7.307012700165655, "grad_norm": 0.30038419365882874, "learning_rate": 1.9851e-05, "loss": 0.0113, "step": 6620 }, { "epoch": 7.308117062396466, "grad_norm": 0.3214566111564636, "learning_rate": 1.9854000000000003e-05, "loss": 0.0193, "step": 6621 }, { "epoch": 7.309221424627278, "grad_norm": 0.3626258373260498, "learning_rate": 1.9857000000000003e-05, "loss": 0.0142, "step": 6622 }, { "epoch": 7.3103257868580895, "grad_norm": 0.5382555723190308, "learning_rate": 1.9860000000000003e-05, "loss": 0.0264, "step": 6623 }, { "epoch": 7.311430149088901, "grad_norm": 0.46018165349960327, "learning_rate": 1.9863000000000003e-05, "loss": 0.0216, "step": 6624 }, { "epoch": 7.312534511319713, "grad_norm": 0.36533504724502563, "learning_rate": 1.9866e-05, "loss": 0.0149, "step": 6625 }, { "epoch": 7.313638873550524, "grad_norm": 0.45335936546325684, "learning_rate": 1.9869e-05, "loss": 0.0197, "step": 6626 }, { "epoch": 7.314743235781337, "grad_norm": 0.5602146983146667, "learning_rate": 1.9872e-05, "loss": 0.0169, "step": 6627 }, { "epoch": 7.315847598012148, "grad_norm": 0.6883335709571838, "learning_rate": 1.9875e-05, "loss": 0.021, "step": 6628 }, { "epoch": 7.316951960242959, "grad_norm": 0.3457242250442505, "learning_rate": 1.9878e-05, "loss": 0.0246, "step": 6629 }, { "epoch": 7.318056322473772, "grad_norm": 0.6124860644340515, "learning_rate": 1.9880999999999998e-05, "loss": 0.0341, "step": 6630 }, { "epoch": 7.319160684704583, "grad_norm": 0.47358790040016174, "learning_rate": 1.9883999999999998e-05, "loss": 0.0261, "step": 6631 }, { "epoch": 7.320265046935395, "grad_norm": 0.3342713415622711, "learning_rate": 1.9887e-05, "loss": 0.0177, "step": 6632 }, { "epoch": 7.3213694091662065, "grad_norm": 0.30173057317733765, "learning_rate": 1.989e-05, "loss": 0.0126, "step": 6633 }, { "epoch": 7.322473771397018, "grad_norm": 0.406599760055542, "learning_rate": 1.9893e-05, "loss": 0.0161, "step": 6634 }, { "epoch": 7.32357813362783, "grad_norm": 0.5268474221229553, "learning_rate": 1.9896e-05, "loss": 0.024, "step": 6635 }, { "epoch": 7.324682495858641, "grad_norm": 0.8036971688270569, "learning_rate": 1.9899e-05, "loss": 0.0359, "step": 6636 }, { "epoch": 7.325786858089454, "grad_norm": 0.5505076050758362, "learning_rate": 1.9902e-05, "loss": 0.0173, "step": 6637 }, { "epoch": 7.326891220320265, "grad_norm": 0.45116764307022095, "learning_rate": 1.9905e-05, "loss": 0.0181, "step": 6638 }, { "epoch": 7.327995582551077, "grad_norm": 0.4742923080921173, "learning_rate": 1.9908e-05, "loss": 0.0167, "step": 6639 }, { "epoch": 7.3290999447818885, "grad_norm": 0.9233490228652954, "learning_rate": 1.9911e-05, "loss": 0.0384, "step": 6640 }, { "epoch": 7.3302043070127, "grad_norm": 0.5754578709602356, "learning_rate": 1.9914e-05, "loss": 0.0226, "step": 6641 }, { "epoch": 7.331308669243512, "grad_norm": 0.5529664754867554, "learning_rate": 1.9917000000000003e-05, "loss": 0.0211, "step": 6642 }, { "epoch": 7.332413031474323, "grad_norm": 0.9367413520812988, "learning_rate": 1.9920000000000002e-05, "loss": 0.2644, "step": 6643 }, { "epoch": 7.333517393705136, "grad_norm": 1.0107735395431519, "learning_rate": 1.9923000000000002e-05, "loss": 0.3169, "step": 6644 }, { "epoch": 7.334621755935947, "grad_norm": 0.8243107795715332, "learning_rate": 1.9926000000000002e-05, "loss": 0.1888, "step": 6645 }, { "epoch": 7.335726118166758, "grad_norm": 0.7211164236068726, "learning_rate": 1.9929000000000002e-05, "loss": 0.1699, "step": 6646 }, { "epoch": 7.336830480397571, "grad_norm": 0.5306048393249512, "learning_rate": 1.9932e-05, "loss": 0.0963, "step": 6647 }, { "epoch": 7.337934842628382, "grad_norm": 1.1491329669952393, "learning_rate": 1.9935e-05, "loss": 0.1045, "step": 6648 }, { "epoch": 7.339039204859194, "grad_norm": 0.6126936078071594, "learning_rate": 1.9938e-05, "loss": 0.1082, "step": 6649 }, { "epoch": 7.3401435670900055, "grad_norm": 1.8012405633926392, "learning_rate": 1.9940999999999998e-05, "loss": 0.0708, "step": 6650 }, { "epoch": 7.341247929320817, "grad_norm": 0.9272776246070862, "learning_rate": 1.9943999999999997e-05, "loss": 0.1915, "step": 6651 }, { "epoch": 7.342352291551629, "grad_norm": 0.47091469168663025, "learning_rate": 1.9947e-05, "loss": 0.0607, "step": 6652 }, { "epoch": 7.34345665378244, "grad_norm": 0.6675097346305847, "learning_rate": 1.995e-05, "loss": 0.0547, "step": 6653 }, { "epoch": 7.344561016013253, "grad_norm": 0.3129049837589264, "learning_rate": 1.9953e-05, "loss": 0.0223, "step": 6654 }, { "epoch": 7.345665378244064, "grad_norm": 0.37882766127586365, "learning_rate": 1.9956e-05, "loss": 0.0249, "step": 6655 }, { "epoch": 7.346769740474876, "grad_norm": 0.49452081322669983, "learning_rate": 1.9959e-05, "loss": 0.0269, "step": 6656 }, { "epoch": 7.3478741027056875, "grad_norm": 0.3003068268299103, "learning_rate": 1.9962e-05, "loss": 0.0219, "step": 6657 }, { "epoch": 7.348978464936499, "grad_norm": 0.5581066608428955, "learning_rate": 1.9965e-05, "loss": 0.0227, "step": 6658 }, { "epoch": 7.350082827167311, "grad_norm": 0.5283769369125366, "learning_rate": 1.9968e-05, "loss": 0.0189, "step": 6659 }, { "epoch": 7.351187189398122, "grad_norm": 0.3100410997867584, "learning_rate": 1.9971e-05, "loss": 0.0201, "step": 6660 }, { "epoch": 7.352291551628935, "grad_norm": 0.4918830990791321, "learning_rate": 1.9974e-05, "loss": 0.0396, "step": 6661 }, { "epoch": 7.353395913859746, "grad_norm": 0.42110133171081543, "learning_rate": 1.9977000000000002e-05, "loss": 0.0306, "step": 6662 }, { "epoch": 7.354500276090557, "grad_norm": 2.6039741039276123, "learning_rate": 1.9980000000000002e-05, "loss": 0.0287, "step": 6663 }, { "epoch": 7.35560463832137, "grad_norm": 0.3898586630821228, "learning_rate": 1.9983e-05, "loss": 0.0225, "step": 6664 }, { "epoch": 7.356709000552181, "grad_norm": 0.34737083315849304, "learning_rate": 1.9986e-05, "loss": 0.0143, "step": 6665 }, { "epoch": 7.357813362782993, "grad_norm": 0.6875115633010864, "learning_rate": 1.9989e-05, "loss": 0.0414, "step": 6666 }, { "epoch": 7.3589177250138045, "grad_norm": 0.2722066044807434, "learning_rate": 1.9992e-05, "loss": 0.0163, "step": 6667 }, { "epoch": 7.360022087244616, "grad_norm": 0.5300334095954895, "learning_rate": 1.9995e-05, "loss": 0.0369, "step": 6668 }, { "epoch": 7.361126449475428, "grad_norm": 0.7534385323524475, "learning_rate": 1.9998e-05, "loss": 0.0254, "step": 6669 }, { "epoch": 7.362230811706239, "grad_norm": 0.3041381537914276, "learning_rate": 2.0001e-05, "loss": 0.022, "step": 6670 }, { "epoch": 7.363335173937052, "grad_norm": 0.3511119484901428, "learning_rate": 2.0004e-05, "loss": 0.0273, "step": 6671 }, { "epoch": 7.364439536167863, "grad_norm": 0.2606247365474701, "learning_rate": 2.0007000000000003e-05, "loss": 0.0181, "step": 6672 }, { "epoch": 7.365543898398675, "grad_norm": 0.516930103302002, "learning_rate": 2.0010000000000003e-05, "loss": 0.0316, "step": 6673 }, { "epoch": 7.3666482606294865, "grad_norm": 0.4083203077316284, "learning_rate": 2.0013e-05, "loss": 0.0156, "step": 6674 }, { "epoch": 7.367752622860298, "grad_norm": 0.2923940122127533, "learning_rate": 2.0016e-05, "loss": 0.0129, "step": 6675 }, { "epoch": 7.36885698509111, "grad_norm": 0.5099921822547913, "learning_rate": 2.0019e-05, "loss": 0.0306, "step": 6676 }, { "epoch": 7.369961347321921, "grad_norm": 0.503378689289093, "learning_rate": 2.0022e-05, "loss": 0.0248, "step": 6677 }, { "epoch": 7.371065709552734, "grad_norm": 0.3437710702419281, "learning_rate": 2.0025e-05, "loss": 0.0181, "step": 6678 }, { "epoch": 7.372170071783545, "grad_norm": 0.8701093792915344, "learning_rate": 2.0028e-05, "loss": 0.032, "step": 6679 }, { "epoch": 7.373274434014356, "grad_norm": 1.093351125717163, "learning_rate": 2.0031e-05, "loss": 0.0259, "step": 6680 }, { "epoch": 7.374378796245169, "grad_norm": 0.4800034761428833, "learning_rate": 2.0033999999999998e-05, "loss": 0.0231, "step": 6681 }, { "epoch": 7.37548315847598, "grad_norm": 0.8781384229660034, "learning_rate": 2.0037e-05, "loss": 0.0352, "step": 6682 }, { "epoch": 7.376587520706792, "grad_norm": 0.39447587728500366, "learning_rate": 2.004e-05, "loss": 0.0222, "step": 6683 }, { "epoch": 7.3776918829376035, "grad_norm": 0.5151221752166748, "learning_rate": 2.0043e-05, "loss": 0.0289, "step": 6684 }, { "epoch": 7.378796245168415, "grad_norm": 0.220931276679039, "learning_rate": 2.0046e-05, "loss": 0.0123, "step": 6685 }, { "epoch": 7.379900607399227, "grad_norm": 0.6065500974655151, "learning_rate": 2.0049e-05, "loss": 0.0208, "step": 6686 }, { "epoch": 7.381004969630038, "grad_norm": 1.6684719324111938, "learning_rate": 2.0052e-05, "loss": 0.0202, "step": 6687 }, { "epoch": 7.382109331860851, "grad_norm": 0.6381514072418213, "learning_rate": 2.0055e-05, "loss": 0.0398, "step": 6688 }, { "epoch": 7.383213694091662, "grad_norm": 0.5593182444572449, "learning_rate": 2.0058e-05, "loss": 0.0354, "step": 6689 }, { "epoch": 7.384318056322474, "grad_norm": 0.7981013059616089, "learning_rate": 2.0061e-05, "loss": 0.0405, "step": 6690 }, { "epoch": 7.3854224185532855, "grad_norm": 0.9807796478271484, "learning_rate": 2.0064e-05, "loss": 0.041, "step": 6691 }, { "epoch": 7.386526780784097, "grad_norm": 0.6648764610290527, "learning_rate": 2.0067000000000003e-05, "loss": 0.0293, "step": 6692 }, { "epoch": 7.387631143014909, "grad_norm": 0.7396884560585022, "learning_rate": 2.0070000000000003e-05, "loss": 0.258, "step": 6693 }, { "epoch": 7.38873550524572, "grad_norm": 0.7944959998130798, "learning_rate": 2.0073000000000002e-05, "loss": 0.1824, "step": 6694 }, { "epoch": 7.389839867476533, "grad_norm": 0.7560776472091675, "learning_rate": 2.0076000000000002e-05, "loss": 0.1741, "step": 6695 }, { "epoch": 7.390944229707344, "grad_norm": 0.544981837272644, "learning_rate": 2.0079000000000002e-05, "loss": 0.1193, "step": 6696 }, { "epoch": 7.392048591938155, "grad_norm": 0.9494307637214661, "learning_rate": 2.0082000000000002e-05, "loss": 0.1843, "step": 6697 }, { "epoch": 7.393152954168968, "grad_norm": 0.49663788080215454, "learning_rate": 2.0085e-05, "loss": 0.0879, "step": 6698 }, { "epoch": 7.394257316399779, "grad_norm": 0.6476004123687744, "learning_rate": 2.0087999999999998e-05, "loss": 0.0813, "step": 6699 }, { "epoch": 7.395361678630591, "grad_norm": 0.6137819886207581, "learning_rate": 2.0090999999999998e-05, "loss": 0.0977, "step": 6700 }, { "epoch": 7.3964660408614025, "grad_norm": 0.5034668445587158, "learning_rate": 2.0093999999999998e-05, "loss": 0.0466, "step": 6701 }, { "epoch": 7.397570403092214, "grad_norm": 0.36829718947410583, "learning_rate": 2.0097e-05, "loss": 0.0276, "step": 6702 }, { "epoch": 7.398674765323026, "grad_norm": 0.4585888683795929, "learning_rate": 2.01e-05, "loss": 0.0311, "step": 6703 }, { "epoch": 7.399779127553837, "grad_norm": 0.23269666731357574, "learning_rate": 2.0103e-05, "loss": 0.0209, "step": 6704 }, { "epoch": 7.40088348978465, "grad_norm": 0.3631020486354828, "learning_rate": 2.0106e-05, "loss": 0.0236, "step": 6705 }, { "epoch": 7.401987852015461, "grad_norm": 1.0719712972640991, "learning_rate": 2.0109e-05, "loss": 0.0276, "step": 6706 }, { "epoch": 7.403092214246273, "grad_norm": 0.5637182593345642, "learning_rate": 2.0112e-05, "loss": 0.0921, "step": 6707 }, { "epoch": 7.4041965764770845, "grad_norm": 0.48380371928215027, "learning_rate": 2.0115e-05, "loss": 0.0217, "step": 6708 }, { "epoch": 7.405300938707896, "grad_norm": 0.5565721988677979, "learning_rate": 2.0118e-05, "loss": 0.025, "step": 6709 }, { "epoch": 7.406405300938708, "grad_norm": 0.32725363969802856, "learning_rate": 2.0121e-05, "loss": 0.0217, "step": 6710 }, { "epoch": 7.407509663169519, "grad_norm": 0.6467260718345642, "learning_rate": 2.0124e-05, "loss": 0.0147, "step": 6711 }, { "epoch": 7.408614025400332, "grad_norm": 0.2961876690387726, "learning_rate": 2.0127000000000002e-05, "loss": 0.0186, "step": 6712 }, { "epoch": 7.409718387631143, "grad_norm": 0.5818292498588562, "learning_rate": 2.0130000000000002e-05, "loss": 0.0326, "step": 6713 }, { "epoch": 7.410822749861954, "grad_norm": 0.3921523690223694, "learning_rate": 2.0133000000000002e-05, "loss": 0.0183, "step": 6714 }, { "epoch": 7.411927112092767, "grad_norm": 0.5113795399665833, "learning_rate": 2.0136e-05, "loss": 0.0293, "step": 6715 }, { "epoch": 7.413031474323578, "grad_norm": 0.3858933448791504, "learning_rate": 2.0139e-05, "loss": 0.0149, "step": 6716 }, { "epoch": 7.41413583655439, "grad_norm": 0.41339120268821716, "learning_rate": 2.0142e-05, "loss": 0.016, "step": 6717 }, { "epoch": 7.4152401987852015, "grad_norm": 0.34145888686180115, "learning_rate": 2.0145e-05, "loss": 0.0263, "step": 6718 }, { "epoch": 7.416344561016013, "grad_norm": 1.3201892375946045, "learning_rate": 2.0148e-05, "loss": 0.0176, "step": 6719 }, { "epoch": 7.417448923246825, "grad_norm": 0.770507276058197, "learning_rate": 2.0151e-05, "loss": 0.0136, "step": 6720 }, { "epoch": 7.418553285477636, "grad_norm": 0.22504878044128418, "learning_rate": 2.0154e-05, "loss": 0.0077, "step": 6721 }, { "epoch": 7.419657647708449, "grad_norm": 0.5954993367195129, "learning_rate": 2.0157000000000004e-05, "loss": 0.0351, "step": 6722 }, { "epoch": 7.42076200993926, "grad_norm": 0.8146042227745056, "learning_rate": 2.016e-05, "loss": 0.0201, "step": 6723 }, { "epoch": 7.421866372170072, "grad_norm": 1.0673786401748657, "learning_rate": 2.0163e-05, "loss": 0.0294, "step": 6724 }, { "epoch": 7.4229707344008835, "grad_norm": 0.4914948046207428, "learning_rate": 2.0166e-05, "loss": 0.0281, "step": 6725 }, { "epoch": 7.424075096631695, "grad_norm": 0.4140951335430145, "learning_rate": 2.0169e-05, "loss": 0.0222, "step": 6726 }, { "epoch": 7.425179458862507, "grad_norm": 0.43617475032806396, "learning_rate": 2.0172e-05, "loss": 0.0247, "step": 6727 }, { "epoch": 7.426283821093318, "grad_norm": 0.9051471948623657, "learning_rate": 2.0175e-05, "loss": 0.0226, "step": 6728 }, { "epoch": 7.427388183324131, "grad_norm": 0.6547560095787048, "learning_rate": 2.0178e-05, "loss": 0.0272, "step": 6729 }, { "epoch": 7.428492545554942, "grad_norm": 0.5013329386711121, "learning_rate": 2.0181e-05, "loss": 0.0255, "step": 6730 }, { "epoch": 7.429596907785753, "grad_norm": 1.3411612510681152, "learning_rate": 2.0184e-05, "loss": 0.016, "step": 6731 }, { "epoch": 7.4307012700165656, "grad_norm": 0.3741026818752289, "learning_rate": 2.0187000000000002e-05, "loss": 0.0212, "step": 6732 }, { "epoch": 7.431805632247377, "grad_norm": 0.5788151025772095, "learning_rate": 2.019e-05, "loss": 0.0268, "step": 6733 }, { "epoch": 7.432909994478189, "grad_norm": 0.7156489491462708, "learning_rate": 2.0193e-05, "loss": 0.0336, "step": 6734 }, { "epoch": 7.4340143567090005, "grad_norm": 0.4259074032306671, "learning_rate": 2.0196e-05, "loss": 0.0347, "step": 6735 }, { "epoch": 7.435118718939812, "grad_norm": 0.46345415711402893, "learning_rate": 2.0199e-05, "loss": 0.028, "step": 6736 }, { "epoch": 7.436223081170624, "grad_norm": 0.37343916296958923, "learning_rate": 2.0202e-05, "loss": 0.0209, "step": 6737 }, { "epoch": 7.437327443401435, "grad_norm": 0.44397860765457153, "learning_rate": 2.0205e-05, "loss": 0.0243, "step": 6738 }, { "epoch": 7.438431805632248, "grad_norm": 0.7367932796478271, "learning_rate": 2.0208e-05, "loss": 0.0343, "step": 6739 }, { "epoch": 7.439536167863059, "grad_norm": 0.40884628891944885, "learning_rate": 2.0211e-05, "loss": 0.0176, "step": 6740 }, { "epoch": 7.440640530093871, "grad_norm": 0.5805923342704773, "learning_rate": 2.0214e-05, "loss": 0.0314, "step": 6741 }, { "epoch": 7.4417448923246825, "grad_norm": 0.4515135884284973, "learning_rate": 2.0217000000000003e-05, "loss": 0.0329, "step": 6742 }, { "epoch": 7.442849254555494, "grad_norm": 0.959775984287262, "learning_rate": 2.0220000000000003e-05, "loss": 0.2922, "step": 6743 }, { "epoch": 7.443953616786306, "grad_norm": 0.9930148124694824, "learning_rate": 2.0223000000000003e-05, "loss": 0.2301, "step": 6744 }, { "epoch": 7.445057979017117, "grad_norm": 0.9028601050376892, "learning_rate": 2.0226000000000003e-05, "loss": 0.1984, "step": 6745 }, { "epoch": 7.44616234124793, "grad_norm": 1.2204591035842896, "learning_rate": 2.0229000000000002e-05, "loss": 0.2184, "step": 6746 }, { "epoch": 7.447266703478741, "grad_norm": 1.1587103605270386, "learning_rate": 2.0232000000000002e-05, "loss": 0.1712, "step": 6747 }, { "epoch": 7.448371065709552, "grad_norm": 0.5986649990081787, "learning_rate": 2.0235e-05, "loss": 0.1018, "step": 6748 }, { "epoch": 7.4494754279403645, "grad_norm": 0.6927028894424438, "learning_rate": 2.0238e-05, "loss": 0.0971, "step": 6749 }, { "epoch": 7.450579790171176, "grad_norm": 0.9923707246780396, "learning_rate": 2.0240999999999998e-05, "loss": 0.0785, "step": 6750 }, { "epoch": 7.451684152401988, "grad_norm": 0.5818222165107727, "learning_rate": 2.0243999999999998e-05, "loss": 0.0653, "step": 6751 }, { "epoch": 7.4527885146327995, "grad_norm": 0.8911911249160767, "learning_rate": 2.0247e-05, "loss": 0.0912, "step": 6752 }, { "epoch": 7.453892876863611, "grad_norm": 0.1974426954984665, "learning_rate": 2.025e-05, "loss": 0.0279, "step": 6753 }, { "epoch": 7.454997239094423, "grad_norm": 0.3610737919807434, "learning_rate": 2.0253e-05, "loss": 0.0347, "step": 6754 }, { "epoch": 7.456101601325234, "grad_norm": 0.5006141662597656, "learning_rate": 2.0256e-05, "loss": 0.0348, "step": 6755 }, { "epoch": 7.457205963556047, "grad_norm": 0.7195013761520386, "learning_rate": 2.0259e-05, "loss": 0.0507, "step": 6756 }, { "epoch": 7.458310325786858, "grad_norm": 0.7785109877586365, "learning_rate": 2.0262e-05, "loss": 0.0312, "step": 6757 }, { "epoch": 7.45941468801767, "grad_norm": 0.4857526421546936, "learning_rate": 2.0265e-05, "loss": 0.0284, "step": 6758 }, { "epoch": 7.4605190502484815, "grad_norm": 0.7267093062400818, "learning_rate": 2.0268e-05, "loss": 0.0294, "step": 6759 }, { "epoch": 7.461623412479293, "grad_norm": 0.3169584274291992, "learning_rate": 2.0271e-05, "loss": 0.0172, "step": 6760 }, { "epoch": 7.462727774710105, "grad_norm": 0.6060346364974976, "learning_rate": 2.0274e-05, "loss": 0.0275, "step": 6761 }, { "epoch": 7.463832136940916, "grad_norm": 0.43866604566574097, "learning_rate": 2.0277e-05, "loss": 0.0316, "step": 6762 }, { "epoch": 7.464936499171729, "grad_norm": 0.44148847460746765, "learning_rate": 2.0280000000000002e-05, "loss": 0.0256, "step": 6763 }, { "epoch": 7.46604086140254, "grad_norm": 0.38722100853919983, "learning_rate": 2.0283000000000002e-05, "loss": 0.0177, "step": 6764 }, { "epoch": 7.467145223633352, "grad_norm": 0.2448951154947281, "learning_rate": 2.0286000000000002e-05, "loss": 0.0158, "step": 6765 }, { "epoch": 7.4682495858641635, "grad_norm": 0.9073241353034973, "learning_rate": 2.0289000000000002e-05, "loss": 0.0499, "step": 6766 }, { "epoch": 7.469353948094975, "grad_norm": 0.3261277973651886, "learning_rate": 2.0292e-05, "loss": 0.0164, "step": 6767 }, { "epoch": 7.470458310325787, "grad_norm": 0.3448526859283447, "learning_rate": 2.0295e-05, "loss": 0.0248, "step": 6768 }, { "epoch": 7.4715626725565985, "grad_norm": 0.42962732911109924, "learning_rate": 2.0298e-05, "loss": 0.026, "step": 6769 }, { "epoch": 7.472667034787411, "grad_norm": 0.38762935996055603, "learning_rate": 2.0301e-05, "loss": 0.0182, "step": 6770 }, { "epoch": 7.473771397018222, "grad_norm": 0.3272431790828705, "learning_rate": 2.0304e-05, "loss": 0.0169, "step": 6771 }, { "epoch": 7.474875759249033, "grad_norm": 0.2740468978881836, "learning_rate": 2.0307e-05, "loss": 0.0154, "step": 6772 }, { "epoch": 7.475980121479846, "grad_norm": 0.39386802911758423, "learning_rate": 2.031e-05, "loss": 0.0259, "step": 6773 }, { "epoch": 7.477084483710657, "grad_norm": 0.6088123917579651, "learning_rate": 2.0313e-05, "loss": 0.028, "step": 6774 }, { "epoch": 7.478188845941469, "grad_norm": 0.40442511439323425, "learning_rate": 2.0316e-05, "loss": 0.0234, "step": 6775 }, { "epoch": 7.4792932081722805, "grad_norm": 0.19260206818580627, "learning_rate": 2.0319e-05, "loss": 0.011, "step": 6776 }, { "epoch": 7.480397570403092, "grad_norm": 0.7163945436477661, "learning_rate": 2.0322e-05, "loss": 0.0281, "step": 6777 }, { "epoch": 7.481501932633904, "grad_norm": 0.47538670897483826, "learning_rate": 2.0325e-05, "loss": 0.0254, "step": 6778 }, { "epoch": 7.482606294864715, "grad_norm": 0.5759218335151672, "learning_rate": 2.0328e-05, "loss": 0.0327, "step": 6779 }, { "epoch": 7.483710657095528, "grad_norm": 0.5057665109634399, "learning_rate": 2.0331e-05, "loss": 0.0193, "step": 6780 }, { "epoch": 7.484815019326339, "grad_norm": 0.30063530802726746, "learning_rate": 2.0334e-05, "loss": 0.0134, "step": 6781 }, { "epoch": 7.485919381557151, "grad_norm": 0.4727936089038849, "learning_rate": 2.0337e-05, "loss": 0.0232, "step": 6782 }, { "epoch": 7.4870237437879625, "grad_norm": 0.7969774007797241, "learning_rate": 2.0340000000000002e-05, "loss": 0.0394, "step": 6783 }, { "epoch": 7.488128106018774, "grad_norm": 0.43921029567718506, "learning_rate": 2.0343e-05, "loss": 0.0292, "step": 6784 }, { "epoch": 7.489232468249586, "grad_norm": 0.32827210426330566, "learning_rate": 2.0346e-05, "loss": 0.0159, "step": 6785 }, { "epoch": 7.4903368304803974, "grad_norm": 1.0675073862075806, "learning_rate": 2.0349e-05, "loss": 0.0306, "step": 6786 }, { "epoch": 7.49144119271121, "grad_norm": 0.682866632938385, "learning_rate": 2.0352e-05, "loss": 0.0304, "step": 6787 }, { "epoch": 7.492545554942021, "grad_norm": 0.34755387902259827, "learning_rate": 2.0355e-05, "loss": 0.0252, "step": 6788 }, { "epoch": 7.493649917172832, "grad_norm": 0.39794567227363586, "learning_rate": 2.0358e-05, "loss": 0.0221, "step": 6789 }, { "epoch": 7.494754279403645, "grad_norm": 0.46896201372146606, "learning_rate": 2.0361e-05, "loss": 0.0312, "step": 6790 }, { "epoch": 7.495858641634456, "grad_norm": 0.33736249804496765, "learning_rate": 2.0364e-05, "loss": 0.0215, "step": 6791 }, { "epoch": 7.496963003865268, "grad_norm": 0.6926752924919128, "learning_rate": 2.0367e-05, "loss": 0.028, "step": 6792 }, { "epoch": 7.4980673660960795, "grad_norm": 1.1705087423324585, "learning_rate": 2.0370000000000003e-05, "loss": 0.2906, "step": 6793 }, { "epoch": 7.499171728326891, "grad_norm": 0.8591741919517517, "learning_rate": 2.0373000000000003e-05, "loss": 0.1923, "step": 6794 }, { "epoch": 7.500276090557703, "grad_norm": 0.9565987586975098, "learning_rate": 2.0376000000000003e-05, "loss": 0.2309, "step": 6795 }, { "epoch": 7.501380452788514, "grad_norm": 0.8893325328826904, "learning_rate": 2.0379000000000003e-05, "loss": 0.159, "step": 6796 }, { "epoch": 7.502484815019327, "grad_norm": 0.5652045607566833, "learning_rate": 2.0382e-05, "loss": 0.1158, "step": 6797 }, { "epoch": 7.503589177250138, "grad_norm": 1.1026772260665894, "learning_rate": 2.0385e-05, "loss": 0.2085, "step": 6798 }, { "epoch": 7.50469353948095, "grad_norm": 0.9624462723731995, "learning_rate": 2.0388e-05, "loss": 0.0966, "step": 6799 }, { "epoch": 7.5057979017117615, "grad_norm": 0.37715548276901245, "learning_rate": 2.0391e-05, "loss": 0.0382, "step": 6800 }, { "epoch": 7.506902263942573, "grad_norm": 0.5702402591705322, "learning_rate": 2.0393999999999998e-05, "loss": 0.0502, "step": 6801 }, { "epoch": 7.508006626173385, "grad_norm": 0.6705979108810425, "learning_rate": 2.0396999999999998e-05, "loss": 0.0336, "step": 6802 }, { "epoch": 7.509110988404196, "grad_norm": 0.5977898240089417, "learning_rate": 2.04e-05, "loss": 0.0381, "step": 6803 }, { "epoch": 7.510215350635009, "grad_norm": 0.43755459785461426, "learning_rate": 2.0403e-05, "loss": 0.019, "step": 6804 }, { "epoch": 7.51131971286582, "grad_norm": 0.5571437478065491, "learning_rate": 2.0406e-05, "loss": 0.0298, "step": 6805 }, { "epoch": 7.512424075096631, "grad_norm": 0.5925357341766357, "learning_rate": 2.0409e-05, "loss": 0.0336, "step": 6806 }, { "epoch": 7.513528437327444, "grad_norm": 0.6498827338218689, "learning_rate": 2.0412e-05, "loss": 0.0417, "step": 6807 }, { "epoch": 7.514632799558255, "grad_norm": 0.429399311542511, "learning_rate": 2.0415e-05, "loss": 0.0309, "step": 6808 }, { "epoch": 7.515737161789067, "grad_norm": 0.4842258393764496, "learning_rate": 2.0418e-05, "loss": 0.0338, "step": 6809 }, { "epoch": 7.5168415240198785, "grad_norm": 0.26415833830833435, "learning_rate": 2.0421e-05, "loss": 0.0201, "step": 6810 }, { "epoch": 7.51794588625069, "grad_norm": 0.31213921308517456, "learning_rate": 2.0424e-05, "loss": 0.0192, "step": 6811 }, { "epoch": 7.519050248481502, "grad_norm": 0.4636227488517761, "learning_rate": 2.0427e-05, "loss": 0.0191, "step": 6812 }, { "epoch": 7.520154610712313, "grad_norm": 0.5666443705558777, "learning_rate": 2.0430000000000003e-05, "loss": 0.025, "step": 6813 }, { "epoch": 7.521258972943126, "grad_norm": 0.6103516817092896, "learning_rate": 2.0433000000000002e-05, "loss": 0.0482, "step": 6814 }, { "epoch": 7.522363335173937, "grad_norm": 0.5183997750282288, "learning_rate": 2.0436000000000002e-05, "loss": 0.0192, "step": 6815 }, { "epoch": 7.523467697404749, "grad_norm": 0.6238217949867249, "learning_rate": 2.0439000000000002e-05, "loss": 0.0185, "step": 6816 }, { "epoch": 7.5245720596355605, "grad_norm": 0.3548363447189331, "learning_rate": 2.0442000000000002e-05, "loss": 0.0197, "step": 6817 }, { "epoch": 7.525676421866372, "grad_norm": 0.5115543007850647, "learning_rate": 2.0445e-05, "loss": 0.0299, "step": 6818 }, { "epoch": 7.526780784097184, "grad_norm": 0.9778419137001038, "learning_rate": 2.0448e-05, "loss": 0.0337, "step": 6819 }, { "epoch": 7.527885146327995, "grad_norm": 0.41257229447364807, "learning_rate": 2.0451e-05, "loss": 0.03, "step": 6820 }, { "epoch": 7.528989508558808, "grad_norm": 0.3405947685241699, "learning_rate": 2.0454e-05, "loss": 0.0146, "step": 6821 }, { "epoch": 7.530093870789619, "grad_norm": 0.34786257147789, "learning_rate": 2.0456999999999997e-05, "loss": 0.0267, "step": 6822 }, { "epoch": 7.53119823302043, "grad_norm": 0.937124490737915, "learning_rate": 2.046e-05, "loss": 0.0408, "step": 6823 }, { "epoch": 7.532302595251243, "grad_norm": 0.4107080101966858, "learning_rate": 2.0463e-05, "loss": 0.0275, "step": 6824 }, { "epoch": 7.533406957482054, "grad_norm": 0.3245342969894409, "learning_rate": 2.0466e-05, "loss": 0.0162, "step": 6825 }, { "epoch": 7.534511319712866, "grad_norm": 0.4483734369277954, "learning_rate": 2.0469e-05, "loss": 0.0247, "step": 6826 }, { "epoch": 7.5356156819436775, "grad_norm": 0.6262621283531189, "learning_rate": 2.0472e-05, "loss": 0.0239, "step": 6827 }, { "epoch": 7.536720044174489, "grad_norm": 0.3948151767253876, "learning_rate": 2.0475e-05, "loss": 0.0178, "step": 6828 }, { "epoch": 7.537824406405301, "grad_norm": 0.38876619935035706, "learning_rate": 2.0478e-05, "loss": 0.0139, "step": 6829 }, { "epoch": 7.538928768636112, "grad_norm": 0.5018080472946167, "learning_rate": 2.0481e-05, "loss": 0.0284, "step": 6830 }, { "epoch": 7.540033130866925, "grad_norm": 0.4181593060493469, "learning_rate": 2.0484e-05, "loss": 0.0321, "step": 6831 }, { "epoch": 7.541137493097736, "grad_norm": 0.6993118524551392, "learning_rate": 2.0487e-05, "loss": 0.029, "step": 6832 }, { "epoch": 7.542241855328548, "grad_norm": 0.3961670994758606, "learning_rate": 2.0490000000000002e-05, "loss": 0.0256, "step": 6833 }, { "epoch": 7.5433462175593595, "grad_norm": 0.8721837997436523, "learning_rate": 2.0493000000000002e-05, "loss": 0.0304, "step": 6834 }, { "epoch": 7.544450579790171, "grad_norm": 0.4393433630466461, "learning_rate": 2.0496e-05, "loss": 0.024, "step": 6835 }, { "epoch": 7.545554942020983, "grad_norm": 0.2750988304615021, "learning_rate": 2.0499e-05, "loss": 0.0124, "step": 6836 }, { "epoch": 7.546659304251794, "grad_norm": 0.46246573328971863, "learning_rate": 2.0502e-05, "loss": 0.0263, "step": 6837 }, { "epoch": 7.547763666482607, "grad_norm": 0.7102046608924866, "learning_rate": 2.0505e-05, "loss": 0.049, "step": 6838 }, { "epoch": 7.548868028713418, "grad_norm": 0.5460326075553894, "learning_rate": 2.0508e-05, "loss": 0.0619, "step": 6839 }, { "epoch": 7.549972390944229, "grad_norm": 0.629112958908081, "learning_rate": 2.0511e-05, "loss": 0.0226, "step": 6840 }, { "epoch": 7.551076753175042, "grad_norm": 0.5389326214790344, "learning_rate": 2.0514e-05, "loss": 0.0183, "step": 6841 }, { "epoch": 7.552181115405853, "grad_norm": 0.5298333764076233, "learning_rate": 2.0517e-05, "loss": 0.0344, "step": 6842 }, { "epoch": 7.553285477636665, "grad_norm": 1.3223350048065186, "learning_rate": 2.0520000000000003e-05, "loss": 0.289, "step": 6843 }, { "epoch": 7.5543898398674765, "grad_norm": 1.005728840827942, "learning_rate": 2.0523000000000003e-05, "loss": 0.253, "step": 6844 }, { "epoch": 7.555494202098288, "grad_norm": 0.9575182199478149, "learning_rate": 2.0526000000000003e-05, "loss": 0.1609, "step": 6845 }, { "epoch": 7.5565985643291, "grad_norm": 0.662824273109436, "learning_rate": 2.0529e-05, "loss": 0.1562, "step": 6846 }, { "epoch": 7.557702926559911, "grad_norm": 0.8114109635353088, "learning_rate": 2.0532e-05, "loss": 0.1251, "step": 6847 }, { "epoch": 7.558807288790724, "grad_norm": 0.8220017552375793, "learning_rate": 2.0535e-05, "loss": 0.1376, "step": 6848 }, { "epoch": 7.559911651021535, "grad_norm": 0.40860337018966675, "learning_rate": 2.0538e-05, "loss": 0.0787, "step": 6849 }, { "epoch": 7.561016013252347, "grad_norm": 0.5124431252479553, "learning_rate": 2.0541e-05, "loss": 0.0781, "step": 6850 }, { "epoch": 7.5621203754831585, "grad_norm": 0.4708825647830963, "learning_rate": 2.0544e-05, "loss": 0.052, "step": 6851 }, { "epoch": 7.56322473771397, "grad_norm": 0.4094526469707489, "learning_rate": 2.0546999999999998e-05, "loss": 0.0439, "step": 6852 }, { "epoch": 7.564329099944782, "grad_norm": 1.1735163927078247, "learning_rate": 2.055e-05, "loss": 0.0813, "step": 6853 }, { "epoch": 7.565433462175593, "grad_norm": 0.26898443698883057, "learning_rate": 2.0553e-05, "loss": 0.0169, "step": 6854 }, { "epoch": 7.566537824406406, "grad_norm": 0.35155725479125977, "learning_rate": 2.0556e-05, "loss": 0.0264, "step": 6855 }, { "epoch": 7.567642186637217, "grad_norm": 0.3960714638233185, "learning_rate": 2.0559e-05, "loss": 0.0387, "step": 6856 }, { "epoch": 7.568746548868028, "grad_norm": 0.35883161425590515, "learning_rate": 2.0562e-05, "loss": 0.0298, "step": 6857 }, { "epoch": 7.569850911098841, "grad_norm": 0.4772499203681946, "learning_rate": 2.0565e-05, "loss": 0.0284, "step": 6858 }, { "epoch": 7.570955273329652, "grad_norm": 0.5257848501205444, "learning_rate": 2.0568e-05, "loss": 0.0159, "step": 6859 }, { "epoch": 7.572059635560464, "grad_norm": 0.47156068682670593, "learning_rate": 2.0571e-05, "loss": 0.0339, "step": 6860 }, { "epoch": 7.5731639977912755, "grad_norm": 0.5004174709320068, "learning_rate": 2.0574e-05, "loss": 0.0284, "step": 6861 }, { "epoch": 7.574268360022087, "grad_norm": 0.41448816657066345, "learning_rate": 2.0577e-05, "loss": 0.0239, "step": 6862 }, { "epoch": 7.575372722252899, "grad_norm": 0.2694776952266693, "learning_rate": 2.0580000000000003e-05, "loss": 0.014, "step": 6863 }, { "epoch": 7.57647708448371, "grad_norm": 0.35985538363456726, "learning_rate": 2.0583000000000003e-05, "loss": 0.0345, "step": 6864 }, { "epoch": 7.577581446714523, "grad_norm": 0.3249599039554596, "learning_rate": 2.0586000000000002e-05, "loss": 0.0213, "step": 6865 }, { "epoch": 7.578685808945334, "grad_norm": 0.5593196153640747, "learning_rate": 2.0589000000000002e-05, "loss": 0.0266, "step": 6866 }, { "epoch": 7.579790171176146, "grad_norm": 0.2990180253982544, "learning_rate": 2.0592000000000002e-05, "loss": 0.0227, "step": 6867 }, { "epoch": 7.5808945334069575, "grad_norm": 0.5661168098449707, "learning_rate": 2.0595000000000002e-05, "loss": 0.0216, "step": 6868 }, { "epoch": 7.581998895637769, "grad_norm": 0.4402409493923187, "learning_rate": 2.0598e-05, "loss": 0.0229, "step": 6869 }, { "epoch": 7.583103257868581, "grad_norm": 0.5749621987342834, "learning_rate": 2.0601e-05, "loss": 0.0168, "step": 6870 }, { "epoch": 7.584207620099392, "grad_norm": 0.7367793321609497, "learning_rate": 2.0603999999999998e-05, "loss": 0.0329, "step": 6871 }, { "epoch": 7.585311982330205, "grad_norm": 0.4123164713382721, "learning_rate": 2.0606999999999998e-05, "loss": 0.0191, "step": 6872 }, { "epoch": 7.586416344561016, "grad_norm": 0.5591285824775696, "learning_rate": 2.061e-05, "loss": 0.0144, "step": 6873 }, { "epoch": 7.587520706791828, "grad_norm": 0.45962414145469666, "learning_rate": 2.0613e-05, "loss": 0.0201, "step": 6874 }, { "epoch": 7.58862506902264, "grad_norm": 0.37966060638427734, "learning_rate": 2.0616e-05, "loss": 0.0155, "step": 6875 }, { "epoch": 7.589729431253451, "grad_norm": 0.5612559914588928, "learning_rate": 2.0619e-05, "loss": 0.022, "step": 6876 }, { "epoch": 7.590833793484263, "grad_norm": 0.34141167998313904, "learning_rate": 2.0622e-05, "loss": 0.0213, "step": 6877 }, { "epoch": 7.5919381557150745, "grad_norm": 0.3559190034866333, "learning_rate": 2.0625e-05, "loss": 0.0263, "step": 6878 }, { "epoch": 7.593042517945886, "grad_norm": 0.593887448310852, "learning_rate": 2.0628e-05, "loss": 0.0322, "step": 6879 }, { "epoch": 7.594146880176698, "grad_norm": 0.41768568754196167, "learning_rate": 2.0631e-05, "loss": 0.0269, "step": 6880 }, { "epoch": 7.595251242407509, "grad_norm": 0.5057148933410645, "learning_rate": 2.0634e-05, "loss": 0.0286, "step": 6881 }, { "epoch": 7.596355604638322, "grad_norm": 0.4230174124240875, "learning_rate": 2.0637e-05, "loss": 0.0226, "step": 6882 }, { "epoch": 7.597459966869133, "grad_norm": 0.5834264755249023, "learning_rate": 2.064e-05, "loss": 0.0225, "step": 6883 }, { "epoch": 7.598564329099945, "grad_norm": 0.6782994866371155, "learning_rate": 2.0643000000000002e-05, "loss": 0.0262, "step": 6884 }, { "epoch": 7.5996686913307565, "grad_norm": 0.6070343255996704, "learning_rate": 2.0646000000000002e-05, "loss": 0.0338, "step": 6885 }, { "epoch": 7.600773053561568, "grad_norm": 0.817694902420044, "learning_rate": 2.0649e-05, "loss": 0.0364, "step": 6886 }, { "epoch": 7.60187741579238, "grad_norm": 0.32549768686294556, "learning_rate": 2.0652e-05, "loss": 0.0218, "step": 6887 }, { "epoch": 7.602981778023191, "grad_norm": 0.3521968126296997, "learning_rate": 2.0655e-05, "loss": 0.0179, "step": 6888 }, { "epoch": 7.604086140254004, "grad_norm": 0.609268844127655, "learning_rate": 2.0658e-05, "loss": 0.0256, "step": 6889 }, { "epoch": 7.605190502484815, "grad_norm": 0.6589600443840027, "learning_rate": 2.0661e-05, "loss": 0.0191, "step": 6890 }, { "epoch": 7.606294864715627, "grad_norm": 1.3641434907913208, "learning_rate": 2.0664e-05, "loss": 0.064, "step": 6891 }, { "epoch": 7.6073992269464386, "grad_norm": 0.7334600687026978, "learning_rate": 2.0667e-05, "loss": 0.0205, "step": 6892 }, { "epoch": 7.60850358917725, "grad_norm": 1.243394374847412, "learning_rate": 2.067e-05, "loss": 0.3979, "step": 6893 }, { "epoch": 7.609607951408062, "grad_norm": 0.8191183805465698, "learning_rate": 2.0673000000000003e-05, "loss": 0.1959, "step": 6894 }, { "epoch": 7.6107123136388735, "grad_norm": 0.9191598892211914, "learning_rate": 2.0676e-05, "loss": 0.1821, "step": 6895 }, { "epoch": 7.611816675869685, "grad_norm": 0.7864307761192322, "learning_rate": 2.0679e-05, "loss": 0.155, "step": 6896 }, { "epoch": 7.612921038100497, "grad_norm": 0.9620609879493713, "learning_rate": 2.0682e-05, "loss": 0.1559, "step": 6897 }, { "epoch": 7.614025400331308, "grad_norm": 1.065793514251709, "learning_rate": 2.0685e-05, "loss": 0.1603, "step": 6898 }, { "epoch": 7.615129762562121, "grad_norm": 0.38251155614852905, "learning_rate": 2.0688e-05, "loss": 0.0522, "step": 6899 }, { "epoch": 7.616234124792932, "grad_norm": 1.6397813558578491, "learning_rate": 2.0691e-05, "loss": 0.071, "step": 6900 }, { "epoch": 7.617338487023744, "grad_norm": 0.4262763559818268, "learning_rate": 2.0694e-05, "loss": 0.0525, "step": 6901 }, { "epoch": 7.6184428492545555, "grad_norm": 0.6408179402351379, "learning_rate": 2.0697e-05, "loss": 0.0682, "step": 6902 }, { "epoch": 7.619547211485367, "grad_norm": 0.24151355028152466, "learning_rate": 2.07e-05, "loss": 0.0188, "step": 6903 }, { "epoch": 7.620651573716179, "grad_norm": 0.3730382025241852, "learning_rate": 2.0703e-05, "loss": 0.0236, "step": 6904 }, { "epoch": 7.62175593594699, "grad_norm": 0.4932500422000885, "learning_rate": 2.0706e-05, "loss": 0.0333, "step": 6905 }, { "epoch": 7.622860298177803, "grad_norm": 0.4155561625957489, "learning_rate": 2.0709e-05, "loss": 0.0197, "step": 6906 }, { "epoch": 7.623964660408614, "grad_norm": 0.5520433783531189, "learning_rate": 2.0712e-05, "loss": 0.0209, "step": 6907 }, { "epoch": 7.625069022639426, "grad_norm": 0.3815710246562958, "learning_rate": 2.0715e-05, "loss": 0.0312, "step": 6908 }, { "epoch": 7.6261733848702375, "grad_norm": 0.4496435523033142, "learning_rate": 2.0718e-05, "loss": 0.0224, "step": 6909 }, { "epoch": 7.627277747101049, "grad_norm": 0.4225694537162781, "learning_rate": 2.0721e-05, "loss": 0.0216, "step": 6910 }, { "epoch": 7.628382109331861, "grad_norm": 0.48639118671417236, "learning_rate": 2.0724e-05, "loss": 0.0288, "step": 6911 }, { "epoch": 7.6294864715626725, "grad_norm": 0.43995338678359985, "learning_rate": 2.0727e-05, "loss": 0.0247, "step": 6912 }, { "epoch": 7.630590833793484, "grad_norm": 0.42106524109840393, "learning_rate": 2.073e-05, "loss": 0.0262, "step": 6913 }, { "epoch": 7.631695196024296, "grad_norm": 0.33693552017211914, "learning_rate": 2.0733000000000003e-05, "loss": 0.025, "step": 6914 }, { "epoch": 7.632799558255107, "grad_norm": 0.4313447177410126, "learning_rate": 2.0736000000000003e-05, "loss": 0.0159, "step": 6915 }, { "epoch": 7.63390392048592, "grad_norm": 0.4135974943637848, "learning_rate": 2.0739000000000003e-05, "loss": 0.0108, "step": 6916 }, { "epoch": 7.635008282716731, "grad_norm": 0.4517279267311096, "learning_rate": 2.0742000000000002e-05, "loss": 0.0176, "step": 6917 }, { "epoch": 7.636112644947543, "grad_norm": 0.35124853253364563, "learning_rate": 2.0745000000000002e-05, "loss": 0.023, "step": 6918 }, { "epoch": 7.6372170071783545, "grad_norm": 0.6411839127540588, "learning_rate": 2.0748000000000002e-05, "loss": 0.03, "step": 6919 }, { "epoch": 7.638321369409166, "grad_norm": 0.2999049723148346, "learning_rate": 2.0751e-05, "loss": 0.0285, "step": 6920 }, { "epoch": 7.639425731639978, "grad_norm": 0.25808972120285034, "learning_rate": 2.0753999999999998e-05, "loss": 0.0209, "step": 6921 }, { "epoch": 7.640530093870789, "grad_norm": 0.787501335144043, "learning_rate": 2.0756999999999998e-05, "loss": 0.0221, "step": 6922 }, { "epoch": 7.641634456101602, "grad_norm": 0.4872530698776245, "learning_rate": 2.0759999999999998e-05, "loss": 0.0208, "step": 6923 }, { "epoch": 7.642738818332413, "grad_norm": 0.5438408851623535, "learning_rate": 2.0763e-05, "loss": 0.0459, "step": 6924 }, { "epoch": 7.643843180563225, "grad_norm": 1.3942182064056396, "learning_rate": 2.0766e-05, "loss": 0.0373, "step": 6925 }, { "epoch": 7.6449475427940365, "grad_norm": 0.6991454362869263, "learning_rate": 2.0769e-05, "loss": 0.0295, "step": 6926 }, { "epoch": 7.646051905024848, "grad_norm": 0.5344070792198181, "learning_rate": 2.0772e-05, "loss": 0.0289, "step": 6927 }, { "epoch": 7.64715626725566, "grad_norm": 0.350202351808548, "learning_rate": 2.0775e-05, "loss": 0.0265, "step": 6928 }, { "epoch": 7.6482606294864715, "grad_norm": 0.49033257365226746, "learning_rate": 2.0778e-05, "loss": 0.023, "step": 6929 }, { "epoch": 7.649364991717283, "grad_norm": 0.4686620831489563, "learning_rate": 2.0781e-05, "loss": 0.0125, "step": 6930 }, { "epoch": 7.650469353948095, "grad_norm": 0.35244351625442505, "learning_rate": 2.0784e-05, "loss": 0.0128, "step": 6931 }, { "epoch": 7.651573716178906, "grad_norm": 0.29572710394859314, "learning_rate": 2.0787e-05, "loss": 0.0156, "step": 6932 }, { "epoch": 7.652678078409719, "grad_norm": 0.824554443359375, "learning_rate": 2.079e-05, "loss": 0.034, "step": 6933 }, { "epoch": 7.65378244064053, "grad_norm": 0.591525673866272, "learning_rate": 2.0793000000000002e-05, "loss": 0.0353, "step": 6934 }, { "epoch": 7.654886802871342, "grad_norm": 0.6514571309089661, "learning_rate": 2.0796000000000002e-05, "loss": 0.0209, "step": 6935 }, { "epoch": 7.6559911651021535, "grad_norm": 0.5105667114257812, "learning_rate": 2.0799000000000002e-05, "loss": 0.024, "step": 6936 }, { "epoch": 7.657095527332965, "grad_norm": 0.4531562328338623, "learning_rate": 2.0802000000000002e-05, "loss": 0.0216, "step": 6937 }, { "epoch": 7.658199889563777, "grad_norm": 1.0390657186508179, "learning_rate": 2.0805e-05, "loss": 0.0258, "step": 6938 }, { "epoch": 7.659304251794588, "grad_norm": 0.7527960538864136, "learning_rate": 2.0808e-05, "loss": 0.0257, "step": 6939 }, { "epoch": 7.660408614025401, "grad_norm": 0.5247649550437927, "learning_rate": 2.0811e-05, "loss": 0.0318, "step": 6940 }, { "epoch": 7.661512976256212, "grad_norm": 0.5680697560310364, "learning_rate": 2.0814e-05, "loss": 0.0139, "step": 6941 }, { "epoch": 7.662617338487024, "grad_norm": 0.7761052846908569, "learning_rate": 2.0817e-05, "loss": 0.0215, "step": 6942 }, { "epoch": 7.6637217007178355, "grad_norm": 1.1671987771987915, "learning_rate": 2.082e-05, "loss": 0.3074, "step": 6943 }, { "epoch": 7.664826062948647, "grad_norm": 0.9240758419036865, "learning_rate": 2.0823e-05, "loss": 0.2156, "step": 6944 }, { "epoch": 7.665930425179459, "grad_norm": 0.7952302098274231, "learning_rate": 2.0826e-05, "loss": 0.1845, "step": 6945 }, { "epoch": 7.6670347874102704, "grad_norm": 0.6862605810165405, "learning_rate": 2.0829e-05, "loss": 0.153, "step": 6946 }, { "epoch": 7.668139149641082, "grad_norm": 0.6456018686294556, "learning_rate": 2.0832e-05, "loss": 0.1733, "step": 6947 }, { "epoch": 7.669243511871894, "grad_norm": 0.6711046099662781, "learning_rate": 2.0835e-05, "loss": 0.1253, "step": 6948 }, { "epoch": 7.670347874102705, "grad_norm": 0.6642774939537048, "learning_rate": 2.0838e-05, "loss": 0.1044, "step": 6949 }, { "epoch": 7.671452236333518, "grad_norm": 1.1939197778701782, "learning_rate": 2.0841e-05, "loss": 0.1318, "step": 6950 }, { "epoch": 7.672556598564329, "grad_norm": 0.8096734285354614, "learning_rate": 2.0844e-05, "loss": 0.0621, "step": 6951 }, { "epoch": 7.673660960795141, "grad_norm": 0.3223158121109009, "learning_rate": 2.0847e-05, "loss": 0.0335, "step": 6952 }, { "epoch": 7.6747653230259525, "grad_norm": 0.5224311947822571, "learning_rate": 2.085e-05, "loss": 0.0368, "step": 6953 }, { "epoch": 7.675869685256764, "grad_norm": 0.4355992078781128, "learning_rate": 2.0853000000000002e-05, "loss": 0.0415, "step": 6954 }, { "epoch": 7.676974047487576, "grad_norm": 0.6273760795593262, "learning_rate": 2.0856e-05, "loss": 0.0484, "step": 6955 }, { "epoch": 7.678078409718387, "grad_norm": 0.3655872642993927, "learning_rate": 2.0859e-05, "loss": 0.0184, "step": 6956 }, { "epoch": 7.6791827719492, "grad_norm": 0.29881829023361206, "learning_rate": 2.0862e-05, "loss": 0.0199, "step": 6957 }, { "epoch": 7.680287134180011, "grad_norm": 0.2958287298679352, "learning_rate": 2.0865e-05, "loss": 0.0223, "step": 6958 }, { "epoch": 7.681391496410823, "grad_norm": 0.2836463749408722, "learning_rate": 2.0868e-05, "loss": 0.0199, "step": 6959 }, { "epoch": 7.6824958586416345, "grad_norm": 0.3060131371021271, "learning_rate": 2.0871e-05, "loss": 0.0166, "step": 6960 }, { "epoch": 7.683600220872446, "grad_norm": 0.26742416620254517, "learning_rate": 2.0874e-05, "loss": 0.0184, "step": 6961 }, { "epoch": 7.684704583103258, "grad_norm": 0.44067662954330444, "learning_rate": 2.0877e-05, "loss": 0.021, "step": 6962 }, { "epoch": 7.685808945334069, "grad_norm": 0.2691965699195862, "learning_rate": 2.088e-05, "loss": 0.0153, "step": 6963 }, { "epoch": 7.686913307564881, "grad_norm": 0.4168700575828552, "learning_rate": 2.0883000000000003e-05, "loss": 0.023, "step": 6964 }, { "epoch": 7.688017669795693, "grad_norm": 0.5158352851867676, "learning_rate": 2.0886000000000003e-05, "loss": 0.0288, "step": 6965 }, { "epoch": 7.689122032026504, "grad_norm": 0.446834921836853, "learning_rate": 2.0889000000000003e-05, "loss": 0.0583, "step": 6966 }, { "epoch": 7.690226394257317, "grad_norm": 0.3435652256011963, "learning_rate": 2.0892000000000003e-05, "loss": 0.0186, "step": 6967 }, { "epoch": 7.691330756488128, "grad_norm": 0.3728223741054535, "learning_rate": 2.0895000000000002e-05, "loss": 0.0184, "step": 6968 }, { "epoch": 7.69243511871894, "grad_norm": 0.2589348256587982, "learning_rate": 2.0898e-05, "loss": 0.0142, "step": 6969 }, { "epoch": 7.6935394809497515, "grad_norm": 0.3538074493408203, "learning_rate": 2.0901e-05, "loss": 0.0255, "step": 6970 }, { "epoch": 7.694643843180563, "grad_norm": 0.39360010623931885, "learning_rate": 2.0904e-05, "loss": 0.0128, "step": 6971 }, { "epoch": 7.695748205411375, "grad_norm": 0.5957412719726562, "learning_rate": 2.0906999999999998e-05, "loss": 0.0201, "step": 6972 }, { "epoch": 7.696852567642186, "grad_norm": 0.4464495778083801, "learning_rate": 2.0909999999999998e-05, "loss": 0.0228, "step": 6973 }, { "epoch": 7.697956929872999, "grad_norm": 0.6410707831382751, "learning_rate": 2.0913e-05, "loss": 0.012, "step": 6974 }, { "epoch": 7.69906129210381, "grad_norm": 0.5174223184585571, "learning_rate": 2.0916e-05, "loss": 0.0256, "step": 6975 }, { "epoch": 7.700165654334622, "grad_norm": 0.6255757808685303, "learning_rate": 2.0919e-05, "loss": 0.0226, "step": 6976 }, { "epoch": 7.7012700165654335, "grad_norm": 0.7186802625656128, "learning_rate": 2.0922e-05, "loss": 0.0321, "step": 6977 }, { "epoch": 7.702374378796245, "grad_norm": 0.5703628063201904, "learning_rate": 2.0925e-05, "loss": 0.0301, "step": 6978 }, { "epoch": 7.703478741027057, "grad_norm": 0.33555856347084045, "learning_rate": 2.0928e-05, "loss": 0.0232, "step": 6979 }, { "epoch": 7.704583103257868, "grad_norm": 0.4638725221157074, "learning_rate": 2.0931e-05, "loss": 0.0305, "step": 6980 }, { "epoch": 7.705687465488681, "grad_norm": 0.6292564868927002, "learning_rate": 2.0934e-05, "loss": 0.0346, "step": 6981 }, { "epoch": 7.706791827719492, "grad_norm": 0.4324045181274414, "learning_rate": 2.0937e-05, "loss": 0.0251, "step": 6982 }, { "epoch": 7.707896189950303, "grad_norm": 0.6007684469223022, "learning_rate": 2.094e-05, "loss": 0.0259, "step": 6983 }, { "epoch": 7.709000552181116, "grad_norm": 0.4950233995914459, "learning_rate": 2.0943000000000003e-05, "loss": 0.026, "step": 6984 }, { "epoch": 7.710104914411927, "grad_norm": 0.3430192470550537, "learning_rate": 2.0946000000000002e-05, "loss": 0.0278, "step": 6985 }, { "epoch": 7.711209276642739, "grad_norm": 0.3497082591056824, "learning_rate": 2.0949000000000002e-05, "loss": 0.0215, "step": 6986 }, { "epoch": 7.7123136388735505, "grad_norm": 0.3834754526615143, "learning_rate": 2.0952000000000002e-05, "loss": 0.0303, "step": 6987 }, { "epoch": 7.713418001104362, "grad_norm": 0.5508893132209778, "learning_rate": 2.0955000000000002e-05, "loss": 0.0176, "step": 6988 }, { "epoch": 7.714522363335174, "grad_norm": 2.24371075630188, "learning_rate": 2.0958e-05, "loss": 0.0395, "step": 6989 }, { "epoch": 7.715626725565985, "grad_norm": 1.009516716003418, "learning_rate": 2.0961e-05, "loss": 0.0363, "step": 6990 }, { "epoch": 7.716731087796798, "grad_norm": 0.8181620240211487, "learning_rate": 2.0964e-05, "loss": 0.0352, "step": 6991 }, { "epoch": 7.717835450027609, "grad_norm": 0.6441917419433594, "learning_rate": 2.0967e-05, "loss": 0.0435, "step": 6992 }, { "epoch": 7.718939812258421, "grad_norm": 1.3022559881210327, "learning_rate": 2.097e-05, "loss": 0.3337, "step": 6993 }, { "epoch": 7.7200441744892325, "grad_norm": 0.7854050993919373, "learning_rate": 2.0973e-05, "loss": 0.2644, "step": 6994 }, { "epoch": 7.721148536720044, "grad_norm": 1.0695608854293823, "learning_rate": 2.0976e-05, "loss": 0.2138, "step": 6995 }, { "epoch": 7.722252898950856, "grad_norm": 1.452752709388733, "learning_rate": 2.0979e-05, "loss": 0.2211, "step": 6996 }, { "epoch": 7.723357261181667, "grad_norm": 1.3213509321212769, "learning_rate": 2.0982e-05, "loss": 0.1786, "step": 6997 }, { "epoch": 7.72446162341248, "grad_norm": 0.9624256491661072, "learning_rate": 2.0985e-05, "loss": 0.1302, "step": 6998 }, { "epoch": 7.725565985643291, "grad_norm": 0.6789586544036865, "learning_rate": 2.0988e-05, "loss": 0.1185, "step": 6999 }, { "epoch": 7.726670347874102, "grad_norm": 0.5043309926986694, "learning_rate": 2.0991e-05, "loss": 0.0644, "step": 7000 }, { "epoch": 7.726670347874102, "eval_cer": 0.1205640666718511, "eval_loss": 0.3462408483028412, "eval_runtime": 15.9317, "eval_samples_per_second": 19.081, "eval_steps_per_second": 0.628, "eval_wer": 0.41596316193399846, "step": 7000 }, { "epoch": 7.727774710104915, "grad_norm": 0.6057025194168091, "learning_rate": 2.0994e-05, "loss": 0.0672, "step": 7001 }, { "epoch": 7.728879072335726, "grad_norm": 0.5670091509819031, "learning_rate": 2.0997e-05, "loss": 0.0388, "step": 7002 }, { "epoch": 7.729983434566538, "grad_norm": 0.5187705159187317, "learning_rate": 2.1e-05, "loss": 0.047, "step": 7003 }, { "epoch": 7.7310877967973495, "grad_norm": 0.48687800765037537, "learning_rate": 2.1003e-05, "loss": 0.0354, "step": 7004 }, { "epoch": 7.732192159028161, "grad_norm": 0.34026870131492615, "learning_rate": 2.1006000000000002e-05, "loss": 0.0187, "step": 7005 }, { "epoch": 7.733296521258973, "grad_norm": 0.5488684773445129, "learning_rate": 2.1009e-05, "loss": 0.0711, "step": 7006 }, { "epoch": 7.734400883489784, "grad_norm": 0.3946409821510315, "learning_rate": 2.1012e-05, "loss": 0.0285, "step": 7007 }, { "epoch": 7.735505245720597, "grad_norm": 0.360228568315506, "learning_rate": 2.1015e-05, "loss": 0.0192, "step": 7008 }, { "epoch": 7.736609607951408, "grad_norm": 0.41074419021606445, "learning_rate": 2.1018e-05, "loss": 0.0169, "step": 7009 }, { "epoch": 7.73771397018222, "grad_norm": 0.38817861676216125, "learning_rate": 2.1021e-05, "loss": 0.0357, "step": 7010 }, { "epoch": 7.7388183324130315, "grad_norm": 0.34264087677001953, "learning_rate": 2.1024e-05, "loss": 0.027, "step": 7011 }, { "epoch": 7.739922694643843, "grad_norm": 0.34491726756095886, "learning_rate": 2.1027e-05, "loss": 0.0156, "step": 7012 }, { "epoch": 7.741027056874655, "grad_norm": 0.5927276611328125, "learning_rate": 2.103e-05, "loss": 0.0345, "step": 7013 }, { "epoch": 7.742131419105466, "grad_norm": 0.6777938008308411, "learning_rate": 2.1033e-05, "loss": 0.0394, "step": 7014 }, { "epoch": 7.743235781336279, "grad_norm": 0.8086104989051819, "learning_rate": 2.1036000000000003e-05, "loss": 0.0274, "step": 7015 }, { "epoch": 7.74434014356709, "grad_norm": 0.5806254744529724, "learning_rate": 2.1039000000000003e-05, "loss": 0.0247, "step": 7016 }, { "epoch": 7.745444505797901, "grad_norm": 0.23953160643577576, "learning_rate": 2.1042000000000003e-05, "loss": 0.0177, "step": 7017 }, { "epoch": 7.746548868028714, "grad_norm": 0.4371981918811798, "learning_rate": 2.1045e-05, "loss": 0.0289, "step": 7018 }, { "epoch": 7.747653230259525, "grad_norm": 0.4038844704627991, "learning_rate": 2.1048e-05, "loss": 0.0246, "step": 7019 }, { "epoch": 7.748757592490337, "grad_norm": 1.1464108228683472, "learning_rate": 2.1051e-05, "loss": 0.0349, "step": 7020 }, { "epoch": 7.7498619547211485, "grad_norm": 0.4411889910697937, "learning_rate": 2.1054e-05, "loss": 0.0266, "step": 7021 }, { "epoch": 7.75096631695196, "grad_norm": 0.4152282774448395, "learning_rate": 2.1057e-05, "loss": 0.0225, "step": 7022 }, { "epoch": 7.752070679182772, "grad_norm": 0.554194986820221, "learning_rate": 2.1059999999999998e-05, "loss": 0.0373, "step": 7023 }, { "epoch": 7.753175041413583, "grad_norm": 0.7445836663246155, "learning_rate": 2.1062999999999998e-05, "loss": 0.0261, "step": 7024 }, { "epoch": 7.754279403644396, "grad_norm": 0.6393954157829285, "learning_rate": 2.1066e-05, "loss": 0.0316, "step": 7025 }, { "epoch": 7.755383765875207, "grad_norm": 0.3165627717971802, "learning_rate": 2.1069e-05, "loss": 0.0204, "step": 7026 }, { "epoch": 7.756488128106019, "grad_norm": 0.6307549476623535, "learning_rate": 2.1072e-05, "loss": 0.0209, "step": 7027 }, { "epoch": 7.7575924903368305, "grad_norm": 0.5268778204917908, "learning_rate": 2.1075e-05, "loss": 0.0342, "step": 7028 }, { "epoch": 7.758696852567642, "grad_norm": 0.2651706337928772, "learning_rate": 2.1078e-05, "loss": 0.0157, "step": 7029 }, { "epoch": 7.759801214798454, "grad_norm": 0.4520755708217621, "learning_rate": 2.1081e-05, "loss": 0.0222, "step": 7030 }, { "epoch": 7.760905577029265, "grad_norm": 0.4653852880001068, "learning_rate": 2.1084e-05, "loss": 0.0234, "step": 7031 }, { "epoch": 7.762009939260078, "grad_norm": 0.6314061284065247, "learning_rate": 2.1087e-05, "loss": 0.0314, "step": 7032 }, { "epoch": 7.763114301490889, "grad_norm": 0.3797120153903961, "learning_rate": 2.109e-05, "loss": 0.0256, "step": 7033 }, { "epoch": 7.7642186637217, "grad_norm": 0.3470399081707001, "learning_rate": 2.1093e-05, "loss": 0.0213, "step": 7034 }, { "epoch": 7.765323025952513, "grad_norm": 0.38656023144721985, "learning_rate": 2.1096000000000003e-05, "loss": 0.0243, "step": 7035 }, { "epoch": 7.766427388183324, "grad_norm": 0.43927136063575745, "learning_rate": 2.1099000000000002e-05, "loss": 0.0254, "step": 7036 }, { "epoch": 7.767531750414136, "grad_norm": 0.800735592842102, "learning_rate": 2.1102000000000002e-05, "loss": 0.0255, "step": 7037 }, { "epoch": 7.7686361126449475, "grad_norm": 0.4838900864124298, "learning_rate": 2.1105000000000002e-05, "loss": 0.0229, "step": 7038 }, { "epoch": 7.769740474875759, "grad_norm": 0.3150424361228943, "learning_rate": 2.1108000000000002e-05, "loss": 0.0168, "step": 7039 }, { "epoch": 7.770844837106571, "grad_norm": 0.4561908543109894, "learning_rate": 2.1111e-05, "loss": 0.0288, "step": 7040 }, { "epoch": 7.771949199337382, "grad_norm": 1.1956506967544556, "learning_rate": 2.1114e-05, "loss": 0.0345, "step": 7041 }, { "epoch": 7.773053561568195, "grad_norm": 0.6841132044792175, "learning_rate": 2.1117e-05, "loss": 0.0357, "step": 7042 }, { "epoch": 7.774157923799006, "grad_norm": 1.3680870532989502, "learning_rate": 2.1119999999999998e-05, "loss": 0.2636, "step": 7043 }, { "epoch": 7.775262286029818, "grad_norm": 1.2463397979736328, "learning_rate": 2.1122999999999997e-05, "loss": 0.2209, "step": 7044 }, { "epoch": 7.7763666482606295, "grad_norm": 0.8572904467582703, "learning_rate": 2.1126e-05, "loss": 0.1761, "step": 7045 }, { "epoch": 7.777471010491441, "grad_norm": 1.0414077043533325, "learning_rate": 2.1129e-05, "loss": 0.2558, "step": 7046 }, { "epoch": 7.778575372722253, "grad_norm": 0.9266997575759888, "learning_rate": 2.1132e-05, "loss": 0.1659, "step": 7047 }, { "epoch": 7.779679734953064, "grad_norm": 0.7302135229110718, "learning_rate": 2.1135e-05, "loss": 0.1226, "step": 7048 }, { "epoch": 7.780784097183877, "grad_norm": 0.8058311939239502, "learning_rate": 2.1138e-05, "loss": 0.0781, "step": 7049 }, { "epoch": 7.781888459414688, "grad_norm": 0.5281840562820435, "learning_rate": 2.1141e-05, "loss": 0.0833, "step": 7050 }, { "epoch": 7.7829928216455, "grad_norm": 0.48339468240737915, "learning_rate": 2.1144e-05, "loss": 0.0618, "step": 7051 }, { "epoch": 7.7840971838763116, "grad_norm": 0.3862380385398865, "learning_rate": 2.1147e-05, "loss": 0.0434, "step": 7052 }, { "epoch": 7.785201546107123, "grad_norm": 0.3971157670021057, "learning_rate": 2.115e-05, "loss": 0.029, "step": 7053 }, { "epoch": 7.786305908337935, "grad_norm": 0.3395896255970001, "learning_rate": 2.1153e-05, "loss": 0.0353, "step": 7054 }, { "epoch": 7.7874102705687465, "grad_norm": 0.3030087947845459, "learning_rate": 2.1156000000000002e-05, "loss": 0.0171, "step": 7055 }, { "epoch": 7.788514632799558, "grad_norm": 0.9380459189414978, "learning_rate": 2.1159000000000002e-05, "loss": 0.027, "step": 7056 }, { "epoch": 7.78961899503037, "grad_norm": 0.40040454268455505, "learning_rate": 2.1162e-05, "loss": 0.0179, "step": 7057 }, { "epoch": 7.790723357261181, "grad_norm": 0.4194203019142151, "learning_rate": 2.1165e-05, "loss": 0.0265, "step": 7058 }, { "epoch": 7.791827719491994, "grad_norm": 0.5406920909881592, "learning_rate": 2.1168e-05, "loss": 0.0457, "step": 7059 }, { "epoch": 7.792932081722805, "grad_norm": 0.37522441148757935, "learning_rate": 2.1171e-05, "loss": 0.0228, "step": 7060 }, { "epoch": 7.794036443953617, "grad_norm": 0.3315814435482025, "learning_rate": 2.1174e-05, "loss": 0.0149, "step": 7061 }, { "epoch": 7.7951408061844285, "grad_norm": 0.5049982070922852, "learning_rate": 2.1177e-05, "loss": 0.0368, "step": 7062 }, { "epoch": 7.79624516841524, "grad_norm": 0.3059001863002777, "learning_rate": 2.118e-05, "loss": 0.0284, "step": 7063 }, { "epoch": 7.797349530646052, "grad_norm": 0.3872222602367401, "learning_rate": 2.1183e-05, "loss": 0.0271, "step": 7064 }, { "epoch": 7.798453892876863, "grad_norm": 0.3603285551071167, "learning_rate": 2.1186000000000003e-05, "loss": 0.0195, "step": 7065 }, { "epoch": 7.799558255107676, "grad_norm": 0.3934186100959778, "learning_rate": 2.1189000000000003e-05, "loss": 0.0254, "step": 7066 }, { "epoch": 7.800662617338487, "grad_norm": 0.44680288434028625, "learning_rate": 2.1192e-05, "loss": 0.0439, "step": 7067 }, { "epoch": 7.801766979569299, "grad_norm": 0.31166738271713257, "learning_rate": 2.1195e-05, "loss": 0.0158, "step": 7068 }, { "epoch": 7.8028713418001105, "grad_norm": 0.520345151424408, "learning_rate": 2.1198e-05, "loss": 0.0306, "step": 7069 }, { "epoch": 7.803975704030922, "grad_norm": 0.2999816834926605, "learning_rate": 2.1201e-05, "loss": 0.0162, "step": 7070 }, { "epoch": 7.805080066261734, "grad_norm": 0.458686888217926, "learning_rate": 2.1204e-05, "loss": 0.0272, "step": 7071 }, { "epoch": 7.8061844284925455, "grad_norm": 0.5156791806221008, "learning_rate": 2.1207e-05, "loss": 0.0172, "step": 7072 }, { "epoch": 7.807288790723357, "grad_norm": 0.4607134759426117, "learning_rate": 2.121e-05, "loss": 0.032, "step": 7073 }, { "epoch": 7.808393152954169, "grad_norm": 0.484308123588562, "learning_rate": 2.1213e-05, "loss": 0.0247, "step": 7074 }, { "epoch": 7.80949751518498, "grad_norm": 0.34674328565597534, "learning_rate": 2.1216e-05, "loss": 0.0178, "step": 7075 }, { "epoch": 7.810601877415793, "grad_norm": 0.5546953082084656, "learning_rate": 2.1219e-05, "loss": 0.0207, "step": 7076 }, { "epoch": 7.811706239646604, "grad_norm": 0.4579960107803345, "learning_rate": 2.1222e-05, "loss": 0.028, "step": 7077 }, { "epoch": 7.812810601877416, "grad_norm": 0.34197455644607544, "learning_rate": 2.1225e-05, "loss": 0.0202, "step": 7078 }, { "epoch": 7.8139149641082275, "grad_norm": 0.35588309168815613, "learning_rate": 2.1228e-05, "loss": 0.0167, "step": 7079 }, { "epoch": 7.815019326339039, "grad_norm": 0.3409660756587982, "learning_rate": 2.1231e-05, "loss": 0.0243, "step": 7080 }, { "epoch": 7.816123688569851, "grad_norm": 2.0802717208862305, "learning_rate": 2.1234e-05, "loss": 0.0328, "step": 7081 }, { "epoch": 7.817228050800662, "grad_norm": 0.9632081985473633, "learning_rate": 2.1237e-05, "loss": 0.0385, "step": 7082 }, { "epoch": 7.818332413031475, "grad_norm": 0.7221441268920898, "learning_rate": 2.124e-05, "loss": 0.0424, "step": 7083 }, { "epoch": 7.819436775262286, "grad_norm": 0.7445850372314453, "learning_rate": 2.1243e-05, "loss": 0.019, "step": 7084 }, { "epoch": 7.820541137493098, "grad_norm": 0.46733981370925903, "learning_rate": 2.1246000000000003e-05, "loss": 0.0327, "step": 7085 }, { "epoch": 7.8216454997239095, "grad_norm": 0.5349019169807434, "learning_rate": 2.1249000000000003e-05, "loss": 0.0359, "step": 7086 }, { "epoch": 7.822749861954721, "grad_norm": 0.3182936906814575, "learning_rate": 2.1252000000000003e-05, "loss": 0.0226, "step": 7087 }, { "epoch": 7.823854224185533, "grad_norm": 0.7290952205657959, "learning_rate": 2.1255000000000002e-05, "loss": 0.0289, "step": 7088 }, { "epoch": 7.8249585864163445, "grad_norm": 0.4161006510257721, "learning_rate": 2.1258000000000002e-05, "loss": 0.0275, "step": 7089 }, { "epoch": 7.826062948647156, "grad_norm": 0.5420160293579102, "learning_rate": 2.1261000000000002e-05, "loss": 0.0344, "step": 7090 }, { "epoch": 7.827167310877968, "grad_norm": 0.44901925325393677, "learning_rate": 2.1264000000000002e-05, "loss": 0.026, "step": 7091 }, { "epoch": 7.828271673108779, "grad_norm": 1.1506245136260986, "learning_rate": 2.1266999999999998e-05, "loss": 0.027, "step": 7092 }, { "epoch": 7.829376035339592, "grad_norm": 1.8274400234222412, "learning_rate": 2.1269999999999998e-05, "loss": 0.3415, "step": 7093 }, { "epoch": 7.830480397570403, "grad_norm": 1.02273428440094, "learning_rate": 2.1272999999999998e-05, "loss": 0.3505, "step": 7094 }, { "epoch": 7.831584759801215, "grad_norm": 0.6815208792686462, "learning_rate": 2.1276e-05, "loss": 0.2022, "step": 7095 }, { "epoch": 7.8326891220320265, "grad_norm": 0.7603879570960999, "learning_rate": 2.1279e-05, "loss": 0.1803, "step": 7096 }, { "epoch": 7.833793484262838, "grad_norm": 0.8710023760795593, "learning_rate": 2.1282e-05, "loss": 0.1434, "step": 7097 }, { "epoch": 7.83489784649365, "grad_norm": 0.6623790264129639, "learning_rate": 2.1285e-05, "loss": 0.1001, "step": 7098 }, { "epoch": 7.836002208724461, "grad_norm": 1.0875263214111328, "learning_rate": 2.1288e-05, "loss": 0.146, "step": 7099 }, { "epoch": 7.837106570955274, "grad_norm": 0.7257238626480103, "learning_rate": 2.1291e-05, "loss": 0.0792, "step": 7100 }, { "epoch": 7.838210933186085, "grad_norm": 0.34212422370910645, "learning_rate": 2.1294e-05, "loss": 0.0345, "step": 7101 }, { "epoch": 7.839315295416897, "grad_norm": 0.5363221168518066, "learning_rate": 2.1297e-05, "loss": 0.0354, "step": 7102 }, { "epoch": 7.8404196576477085, "grad_norm": 0.4712171256542206, "learning_rate": 2.13e-05, "loss": 0.0337, "step": 7103 }, { "epoch": 7.84152401987852, "grad_norm": 0.4462595283985138, "learning_rate": 2.1303e-05, "loss": 0.0259, "step": 7104 }, { "epoch": 7.842628382109332, "grad_norm": 0.532339334487915, "learning_rate": 2.1306000000000002e-05, "loss": 0.0361, "step": 7105 }, { "epoch": 7.8437327443401434, "grad_norm": 0.6815587878227234, "learning_rate": 2.1309000000000002e-05, "loss": 0.0396, "step": 7106 }, { "epoch": 7.844837106570955, "grad_norm": 0.4078325033187866, "learning_rate": 2.1312000000000002e-05, "loss": 0.0241, "step": 7107 }, { "epoch": 7.845941468801767, "grad_norm": 0.5710703730583191, "learning_rate": 2.1315000000000002e-05, "loss": 0.0358, "step": 7108 }, { "epoch": 7.847045831032578, "grad_norm": 0.34674662351608276, "learning_rate": 2.1318e-05, "loss": 0.0262, "step": 7109 }, { "epoch": 7.848150193263391, "grad_norm": 0.5204010009765625, "learning_rate": 2.1321e-05, "loss": 0.0232, "step": 7110 }, { "epoch": 7.849254555494202, "grad_norm": 0.367915540933609, "learning_rate": 2.1324e-05, "loss": 0.0177, "step": 7111 }, { "epoch": 7.850358917725014, "grad_norm": 0.7823232412338257, "learning_rate": 2.1327e-05, "loss": 0.0288, "step": 7112 }, { "epoch": 7.8514632799558255, "grad_norm": 0.401296466588974, "learning_rate": 2.133e-05, "loss": 0.0235, "step": 7113 }, { "epoch": 7.852567642186637, "grad_norm": 0.5684705376625061, "learning_rate": 2.1333e-05, "loss": 0.0229, "step": 7114 }, { "epoch": 7.853672004417449, "grad_norm": 0.5366519093513489, "learning_rate": 2.1336000000000004e-05, "loss": 0.0332, "step": 7115 }, { "epoch": 7.85477636664826, "grad_norm": 0.5246813893318176, "learning_rate": 2.1339e-05, "loss": 0.0209, "step": 7116 }, { "epoch": 7.855880728879073, "grad_norm": 0.5146178007125854, "learning_rate": 2.1342e-05, "loss": 0.0208, "step": 7117 }, { "epoch": 7.856985091109884, "grad_norm": 0.33649489283561707, "learning_rate": 2.1345e-05, "loss": 0.0149, "step": 7118 }, { "epoch": 7.858089453340696, "grad_norm": 0.5714642405509949, "learning_rate": 2.1348e-05, "loss": 0.0253, "step": 7119 }, { "epoch": 7.8591938155715075, "grad_norm": 0.5432299971580505, "learning_rate": 2.1351e-05, "loss": 0.0297, "step": 7120 }, { "epoch": 7.860298177802319, "grad_norm": 0.5726794004440308, "learning_rate": 2.1354e-05, "loss": 0.0142, "step": 7121 }, { "epoch": 7.861402540033131, "grad_norm": 0.32525715231895447, "learning_rate": 2.1357e-05, "loss": 0.0136, "step": 7122 }, { "epoch": 7.862506902263942, "grad_norm": 0.4718935191631317, "learning_rate": 2.136e-05, "loss": 0.0244, "step": 7123 }, { "epoch": 7.863611264494754, "grad_norm": 0.4091114103794098, "learning_rate": 2.1363e-05, "loss": 0.0347, "step": 7124 }, { "epoch": 7.864715626725566, "grad_norm": 0.5997706651687622, "learning_rate": 2.1366000000000002e-05, "loss": 0.0372, "step": 7125 }, { "epoch": 7.865819988956377, "grad_norm": 0.3104569911956787, "learning_rate": 2.1369e-05, "loss": 0.0192, "step": 7126 }, { "epoch": 7.86692435118719, "grad_norm": 0.6779515147209167, "learning_rate": 2.1372e-05, "loss": 0.0211, "step": 7127 }, { "epoch": 7.868028713418001, "grad_norm": 0.49278831481933594, "learning_rate": 2.1375e-05, "loss": 0.0194, "step": 7128 }, { "epoch": 7.869133075648813, "grad_norm": 0.5795642137527466, "learning_rate": 2.1378e-05, "loss": 0.0271, "step": 7129 }, { "epoch": 7.8702374378796245, "grad_norm": 0.5172497630119324, "learning_rate": 2.1381e-05, "loss": 0.0238, "step": 7130 }, { "epoch": 7.871341800110436, "grad_norm": 0.7012078166007996, "learning_rate": 2.1384e-05, "loss": 0.0247, "step": 7131 }, { "epoch": 7.872446162341248, "grad_norm": 0.33114567399024963, "learning_rate": 2.1387e-05, "loss": 0.0138, "step": 7132 }, { "epoch": 7.873550524572059, "grad_norm": 0.6898186802864075, "learning_rate": 2.139e-05, "loss": 0.0296, "step": 7133 }, { "epoch": 7.874654886802872, "grad_norm": 0.47242504358291626, "learning_rate": 2.1393e-05, "loss": 0.0167, "step": 7134 }, { "epoch": 7.875759249033683, "grad_norm": 0.375442236661911, "learning_rate": 2.1396e-05, "loss": 0.0159, "step": 7135 }, { "epoch": 7.876863611264495, "grad_norm": 0.631646454334259, "learning_rate": 2.1399000000000003e-05, "loss": 0.0325, "step": 7136 }, { "epoch": 7.8779679734953065, "grad_norm": 0.5846215486526489, "learning_rate": 2.1402000000000003e-05, "loss": 0.0277, "step": 7137 }, { "epoch": 7.879072335726118, "grad_norm": 0.6130900382995605, "learning_rate": 2.1405000000000003e-05, "loss": 0.0209, "step": 7138 }, { "epoch": 7.88017669795693, "grad_norm": 0.6741394400596619, "learning_rate": 2.1408000000000002e-05, "loss": 0.0277, "step": 7139 }, { "epoch": 7.881281060187741, "grad_norm": 0.5101789832115173, "learning_rate": 2.1411000000000002e-05, "loss": 0.0313, "step": 7140 }, { "epoch": 7.882385422418554, "grad_norm": 0.504234254360199, "learning_rate": 2.1414e-05, "loss": 0.0248, "step": 7141 }, { "epoch": 7.883489784649365, "grad_norm": 0.3612458109855652, "learning_rate": 2.1417e-05, "loss": 0.0169, "step": 7142 }, { "epoch": 7.884594146880176, "grad_norm": 1.4529391527175903, "learning_rate": 2.1419999999999998e-05, "loss": 0.3988, "step": 7143 }, { "epoch": 7.885698509110989, "grad_norm": 0.8600543141365051, "learning_rate": 2.1422999999999998e-05, "loss": 0.1984, "step": 7144 }, { "epoch": 7.8868028713418, "grad_norm": 0.8680622577667236, "learning_rate": 2.1425999999999998e-05, "loss": 0.1557, "step": 7145 }, { "epoch": 7.887907233572612, "grad_norm": 1.1026426553726196, "learning_rate": 2.1429e-05, "loss": 0.1708, "step": 7146 }, { "epoch": 7.8890115958034235, "grad_norm": 0.7454699277877808, "learning_rate": 2.1432e-05, "loss": 0.132, "step": 7147 }, { "epoch": 7.890115958034235, "grad_norm": 0.49136167764663696, "learning_rate": 2.1435e-05, "loss": 0.108, "step": 7148 }, { "epoch": 7.891220320265047, "grad_norm": 0.8237332105636597, "learning_rate": 2.1438e-05, "loss": 0.1476, "step": 7149 }, { "epoch": 7.892324682495858, "grad_norm": 0.4337781071662903, "learning_rate": 2.1441e-05, "loss": 0.0589, "step": 7150 }, { "epoch": 7.893429044726671, "grad_norm": 0.4587319493293762, "learning_rate": 2.1444e-05, "loss": 0.0526, "step": 7151 }, { "epoch": 7.894533406957482, "grad_norm": 0.379008412361145, "learning_rate": 2.1447e-05, "loss": 0.0314, "step": 7152 }, { "epoch": 7.895637769188294, "grad_norm": 0.4778851568698883, "learning_rate": 2.145e-05, "loss": 0.0668, "step": 7153 }, { "epoch": 7.8967421314191055, "grad_norm": 0.2571912407875061, "learning_rate": 2.1453e-05, "loss": 0.0212, "step": 7154 }, { "epoch": 7.897846493649917, "grad_norm": 0.35291576385498047, "learning_rate": 2.1456e-05, "loss": 0.0257, "step": 7155 }, { "epoch": 7.898950855880729, "grad_norm": 0.29404354095458984, "learning_rate": 2.1459000000000002e-05, "loss": 0.0264, "step": 7156 }, { "epoch": 7.90005521811154, "grad_norm": 0.44221317768096924, "learning_rate": 2.1462000000000002e-05, "loss": 0.0305, "step": 7157 }, { "epoch": 7.901159580342353, "grad_norm": 0.3724765181541443, "learning_rate": 2.1465000000000002e-05, "loss": 0.0285, "step": 7158 }, { "epoch": 7.902263942573164, "grad_norm": 0.32631534337997437, "learning_rate": 2.1468000000000002e-05, "loss": 0.0409, "step": 7159 }, { "epoch": 7.903368304803975, "grad_norm": 0.48300015926361084, "learning_rate": 2.1471e-05, "loss": 0.0374, "step": 7160 }, { "epoch": 7.904472667034788, "grad_norm": 0.7384272217750549, "learning_rate": 2.1474e-05, "loss": 0.0543, "step": 7161 }, { "epoch": 7.905577029265599, "grad_norm": 0.34935975074768066, "learning_rate": 2.1477e-05, "loss": 0.0243, "step": 7162 }, { "epoch": 7.906681391496411, "grad_norm": 0.3757302463054657, "learning_rate": 2.148e-05, "loss": 0.0245, "step": 7163 }, { "epoch": 7.9077857537272225, "grad_norm": 0.5659650564193726, "learning_rate": 2.1483e-05, "loss": 0.0243, "step": 7164 }, { "epoch": 7.908890115958034, "grad_norm": 0.5307693481445312, "learning_rate": 2.1486e-05, "loss": 0.049, "step": 7165 }, { "epoch": 7.909994478188846, "grad_norm": 0.4874962866306305, "learning_rate": 2.1489e-05, "loss": 0.0202, "step": 7166 }, { "epoch": 7.911098840419657, "grad_norm": 0.42072904109954834, "learning_rate": 2.1492e-05, "loss": 0.013, "step": 7167 }, { "epoch": 7.91220320265047, "grad_norm": 0.4167478680610657, "learning_rate": 2.1495e-05, "loss": 0.015, "step": 7168 }, { "epoch": 7.913307564881281, "grad_norm": 0.35759684443473816, "learning_rate": 2.1498e-05, "loss": 0.0194, "step": 7169 }, { "epoch": 7.914411927112093, "grad_norm": 0.27226805686950684, "learning_rate": 2.1501e-05, "loss": 0.0139, "step": 7170 }, { "epoch": 7.9155162893429045, "grad_norm": 0.5413070321083069, "learning_rate": 2.1504e-05, "loss": 0.0285, "step": 7171 }, { "epoch": 7.916620651573716, "grad_norm": 0.32576483488082886, "learning_rate": 2.1507e-05, "loss": 0.0143, "step": 7172 }, { "epoch": 7.917725013804528, "grad_norm": 0.4086461067199707, "learning_rate": 2.151e-05, "loss": 0.0301, "step": 7173 }, { "epoch": 7.918829376035339, "grad_norm": 0.387216717004776, "learning_rate": 2.1513e-05, "loss": 0.0269, "step": 7174 }, { "epoch": 7.919933738266152, "grad_norm": 0.5295884013175964, "learning_rate": 2.1516e-05, "loss": 0.0238, "step": 7175 }, { "epoch": 7.921038100496963, "grad_norm": 0.2674579620361328, "learning_rate": 2.1519000000000002e-05, "loss": 0.0125, "step": 7176 }, { "epoch": 7.922142462727774, "grad_norm": 0.4959191083908081, "learning_rate": 2.1522e-05, "loss": 0.0608, "step": 7177 }, { "epoch": 7.923246824958587, "grad_norm": 0.4136601686477661, "learning_rate": 2.1525e-05, "loss": 0.0225, "step": 7178 }, { "epoch": 7.924351187189398, "grad_norm": 0.4808410406112671, "learning_rate": 2.1528e-05, "loss": 0.0188, "step": 7179 }, { "epoch": 7.92545554942021, "grad_norm": 0.3476134240627289, "learning_rate": 2.1531e-05, "loss": 0.0225, "step": 7180 }, { "epoch": 7.9265599116510215, "grad_norm": 1.3164316415786743, "learning_rate": 2.1534e-05, "loss": 0.0172, "step": 7181 }, { "epoch": 7.927664273881833, "grad_norm": 0.8555864095687866, "learning_rate": 2.1537e-05, "loss": 0.034, "step": 7182 }, { "epoch": 7.928768636112645, "grad_norm": 0.3425846993923187, "learning_rate": 2.154e-05, "loss": 0.0127, "step": 7183 }, { "epoch": 7.929872998343456, "grad_norm": 0.6644253730773926, "learning_rate": 2.1543e-05, "loss": 0.0325, "step": 7184 }, { "epoch": 7.930977360574269, "grad_norm": 1.1075403690338135, "learning_rate": 2.1546e-05, "loss": 0.0411, "step": 7185 }, { "epoch": 7.93208172280508, "grad_norm": 0.4626133441925049, "learning_rate": 2.1549000000000003e-05, "loss": 0.0298, "step": 7186 }, { "epoch": 7.933186085035892, "grad_norm": 0.3785266876220703, "learning_rate": 2.1552000000000003e-05, "loss": 0.0219, "step": 7187 }, { "epoch": 7.9342904472667035, "grad_norm": 0.5429353713989258, "learning_rate": 2.1555000000000003e-05, "loss": 0.0379, "step": 7188 }, { "epoch": 7.935394809497515, "grad_norm": 0.6695501804351807, "learning_rate": 2.1558000000000003e-05, "loss": 0.0219, "step": 7189 }, { "epoch": 7.936499171728327, "grad_norm": 0.41278576850891113, "learning_rate": 2.1561e-05, "loss": 0.0268, "step": 7190 }, { "epoch": 7.937603533959138, "grad_norm": 0.7384737730026245, "learning_rate": 2.1564e-05, "loss": 0.0222, "step": 7191 }, { "epoch": 7.938707896189951, "grad_norm": 1.0441148281097412, "learning_rate": 2.1567e-05, "loss": 0.0479, "step": 7192 }, { "epoch": 7.939812258420762, "grad_norm": 1.3600834608078003, "learning_rate": 2.157e-05, "loss": 0.2546, "step": 7193 }, { "epoch": 7.940916620651573, "grad_norm": 1.0087745189666748, "learning_rate": 2.1572999999999998e-05, "loss": 0.3165, "step": 7194 }, { "epoch": 7.942020982882386, "grad_norm": 0.6539810299873352, "learning_rate": 2.1575999999999998e-05, "loss": 0.1769, "step": 7195 }, { "epoch": 7.943125345113197, "grad_norm": 0.6126609444618225, "learning_rate": 2.1579e-05, "loss": 0.1248, "step": 7196 }, { "epoch": 7.944229707344009, "grad_norm": 0.8542538285255432, "learning_rate": 2.1582e-05, "loss": 0.1609, "step": 7197 }, { "epoch": 7.9453340695748205, "grad_norm": 0.710395872592926, "learning_rate": 2.1585e-05, "loss": 0.1087, "step": 7198 }, { "epoch": 7.946438431805632, "grad_norm": 0.9761446714401245, "learning_rate": 2.1588e-05, "loss": 0.1343, "step": 7199 }, { "epoch": 7.947542794036444, "grad_norm": 0.7982999086380005, "learning_rate": 2.1591e-05, "loss": 0.13, "step": 7200 }, { "epoch": 7.948647156267255, "grad_norm": 0.45234808325767517, "learning_rate": 2.1594e-05, "loss": 0.0735, "step": 7201 }, { "epoch": 7.949751518498068, "grad_norm": 0.4932169020175934, "learning_rate": 2.1597e-05, "loss": 0.0713, "step": 7202 }, { "epoch": 7.950855880728879, "grad_norm": 0.5884379148483276, "learning_rate": 2.16e-05, "loss": 0.0422, "step": 7203 }, { "epoch": 7.951960242959691, "grad_norm": 0.45991331338882446, "learning_rate": 2.1603e-05, "loss": 0.0548, "step": 7204 }, { "epoch": 7.9530646051905025, "grad_norm": 0.47394096851348877, "learning_rate": 2.1606e-05, "loss": 0.0315, "step": 7205 }, { "epoch": 7.954168967421314, "grad_norm": 0.46157318353652954, "learning_rate": 2.1609000000000003e-05, "loss": 0.0308, "step": 7206 }, { "epoch": 7.955273329652126, "grad_norm": 0.26133787631988525, "learning_rate": 2.1612000000000002e-05, "loss": 0.022, "step": 7207 }, { "epoch": 7.956377691882937, "grad_norm": 0.5369722843170166, "learning_rate": 2.1615000000000002e-05, "loss": 0.0437, "step": 7208 }, { "epoch": 7.95748205411375, "grad_norm": 0.5830492973327637, "learning_rate": 2.1618000000000002e-05, "loss": 0.0339, "step": 7209 }, { "epoch": 7.958586416344561, "grad_norm": 0.43303182721138, "learning_rate": 2.1621000000000002e-05, "loss": 0.0194, "step": 7210 }, { "epoch": 7.959690778575373, "grad_norm": 0.5060000419616699, "learning_rate": 2.1624e-05, "loss": 0.0266, "step": 7211 }, { "epoch": 7.9607951408061846, "grad_norm": 0.4074822664260864, "learning_rate": 2.1627e-05, "loss": 0.0247, "step": 7212 }, { "epoch": 7.961899503036996, "grad_norm": 0.3050706088542938, "learning_rate": 2.163e-05, "loss": 0.0191, "step": 7213 }, { "epoch": 7.963003865267808, "grad_norm": 0.36592724919319153, "learning_rate": 2.1633e-05, "loss": 0.023, "step": 7214 }, { "epoch": 7.9641082274986195, "grad_norm": 0.6664083003997803, "learning_rate": 2.1635999999999997e-05, "loss": 0.0207, "step": 7215 }, { "epoch": 7.965212589729431, "grad_norm": 1.1762268543243408, "learning_rate": 2.1639e-05, "loss": 0.0392, "step": 7216 }, { "epoch": 7.966316951960243, "grad_norm": 0.21200726926326752, "learning_rate": 2.1642e-05, "loss": 0.0141, "step": 7217 }, { "epoch": 7.967421314191054, "grad_norm": 0.6861028075218201, "learning_rate": 2.1645e-05, "loss": 0.0325, "step": 7218 }, { "epoch": 7.968525676421867, "grad_norm": 0.450862318277359, "learning_rate": 2.1648e-05, "loss": 0.0203, "step": 7219 }, { "epoch": 7.969630038652678, "grad_norm": 0.5629989504814148, "learning_rate": 2.1651e-05, "loss": 0.0532, "step": 7220 }, { "epoch": 7.97073440088349, "grad_norm": 0.36381712555885315, "learning_rate": 2.1654e-05, "loss": 0.0136, "step": 7221 }, { "epoch": 7.9718387631143015, "grad_norm": 0.6119326949119568, "learning_rate": 2.1657e-05, "loss": 0.0196, "step": 7222 }, { "epoch": 7.972943125345113, "grad_norm": 0.4491708278656006, "learning_rate": 2.166e-05, "loss": 0.0269, "step": 7223 }, { "epoch": 7.974047487575925, "grad_norm": 0.31373506784439087, "learning_rate": 2.1663e-05, "loss": 0.0159, "step": 7224 }, { "epoch": 7.975151849806736, "grad_norm": 0.6856380701065063, "learning_rate": 2.1666e-05, "loss": 0.0322, "step": 7225 }, { "epoch": 7.976256212037549, "grad_norm": 0.5835053324699402, "learning_rate": 2.1669000000000002e-05, "loss": 0.0272, "step": 7226 }, { "epoch": 7.97736057426836, "grad_norm": 0.8314666152000427, "learning_rate": 2.1672000000000002e-05, "loss": 0.0317, "step": 7227 }, { "epoch": 7.978464936499172, "grad_norm": 0.7140384316444397, "learning_rate": 2.1675e-05, "loss": 0.0149, "step": 7228 }, { "epoch": 7.9795692987299836, "grad_norm": 0.38183459639549255, "learning_rate": 2.1678e-05, "loss": 0.02, "step": 7229 }, { "epoch": 7.980673660960795, "grad_norm": 0.6138250827789307, "learning_rate": 2.1681e-05, "loss": 0.0276, "step": 7230 }, { "epoch": 7.981778023191607, "grad_norm": 0.3990288972854614, "learning_rate": 2.1684e-05, "loss": 0.0265, "step": 7231 }, { "epoch": 7.9828823854224185, "grad_norm": 0.379313588142395, "learning_rate": 2.1687e-05, "loss": 0.0162, "step": 7232 }, { "epoch": 7.98398674765323, "grad_norm": 0.4954693913459778, "learning_rate": 2.169e-05, "loss": 0.0193, "step": 7233 }, { "epoch": 7.985091109884042, "grad_norm": 0.7190736532211304, "learning_rate": 2.1693e-05, "loss": 0.0359, "step": 7234 }, { "epoch": 7.986195472114853, "grad_norm": 0.5433146953582764, "learning_rate": 2.1696e-05, "loss": 0.0326, "step": 7235 }, { "epoch": 7.987299834345666, "grad_norm": 0.47957685589790344, "learning_rate": 2.1699000000000003e-05, "loss": 0.0346, "step": 7236 }, { "epoch": 7.988404196576477, "grad_norm": 0.533842921257019, "learning_rate": 2.1702000000000003e-05, "loss": 0.0277, "step": 7237 }, { "epoch": 7.989508558807289, "grad_norm": 0.6715341806411743, "learning_rate": 2.1705000000000003e-05, "loss": 0.0253, "step": 7238 }, { "epoch": 7.9906129210381005, "grad_norm": 0.43277376890182495, "learning_rate": 2.1708e-05, "loss": 0.0229, "step": 7239 }, { "epoch": 7.991717283268912, "grad_norm": 0.6449937224388123, "learning_rate": 2.1711e-05, "loss": 0.0206, "step": 7240 }, { "epoch": 7.992821645499724, "grad_norm": 0.46793895959854126, "learning_rate": 2.1714e-05, "loss": 0.021, "step": 7241 }, { "epoch": 7.993926007730535, "grad_norm": 0.6902749538421631, "learning_rate": 2.1717e-05, "loss": 0.0447, "step": 7242 }, { "epoch": 7.995030369961348, "grad_norm": 1.0525788068771362, "learning_rate": 2.172e-05, "loss": 0.184, "step": 7243 }, { "epoch": 7.996134732192159, "grad_norm": 0.35570889711380005, "learning_rate": 2.1723e-05, "loss": 0.0314, "step": 7244 }, { "epoch": 7.997239094422971, "grad_norm": 0.1883281171321869, "learning_rate": 2.1726e-05, "loss": 0.0113, "step": 7245 }, { "epoch": 7.9983434566537825, "grad_norm": 0.30803677439689636, "learning_rate": 2.1729e-05, "loss": 0.0109, "step": 7246 }, { "epoch": 7.999447818884594, "grad_norm": 0.764787495136261, "learning_rate": 2.1732e-05, "loss": 0.0306, "step": 7247 }, { "epoch": 8.0, "grad_norm": 2.1893696784973145, "learning_rate": 2.1735e-05, "loss": 0.0226, "step": 7248 }, { "epoch": 8.001104362230812, "grad_norm": 0.7761144638061523, "learning_rate": 2.1738e-05, "loss": 0.232, "step": 7249 }, { "epoch": 8.002208724461623, "grad_norm": 0.60636967420578, "learning_rate": 2.1741e-05, "loss": 0.1539, "step": 7250 }, { "epoch": 8.003313086692435, "grad_norm": 0.7183804512023926, "learning_rate": 2.1744e-05, "loss": 0.2196, "step": 7251 }, { "epoch": 8.004417448923247, "grad_norm": 0.6803920865058899, "learning_rate": 2.1747e-05, "loss": 0.1202, "step": 7252 }, { "epoch": 8.00552181115406, "grad_norm": 0.7071170806884766, "learning_rate": 2.175e-05, "loss": 0.0954, "step": 7253 }, { "epoch": 8.00662617338487, "grad_norm": 0.7232089042663574, "learning_rate": 2.1753e-05, "loss": 0.0961, "step": 7254 }, { "epoch": 8.007730535615682, "grad_norm": 0.4328238070011139, "learning_rate": 2.1756e-05, "loss": 0.0935, "step": 7255 }, { "epoch": 8.008834897846494, "grad_norm": 0.43150171637535095, "learning_rate": 2.1759e-05, "loss": 0.0449, "step": 7256 }, { "epoch": 8.009939260077305, "grad_norm": 0.5645415782928467, "learning_rate": 2.1762000000000003e-05, "loss": 0.0534, "step": 7257 }, { "epoch": 8.011043622308117, "grad_norm": 0.3027472496032715, "learning_rate": 2.1765000000000003e-05, "loss": 0.0396, "step": 7258 }, { "epoch": 8.01214798453893, "grad_norm": 0.3347572386264801, "learning_rate": 2.1768000000000002e-05, "loss": 0.0145, "step": 7259 }, { "epoch": 8.01325234676974, "grad_norm": 0.23410291969776154, "learning_rate": 2.1771000000000002e-05, "loss": 0.016, "step": 7260 }, { "epoch": 8.014356709000552, "grad_norm": 0.3429393172264099, "learning_rate": 2.1774000000000002e-05, "loss": 0.0226, "step": 7261 }, { "epoch": 8.015461071231364, "grad_norm": 0.3343806564807892, "learning_rate": 2.1777000000000002e-05, "loss": 0.0205, "step": 7262 }, { "epoch": 8.016565433462176, "grad_norm": 0.5422937870025635, "learning_rate": 2.178e-05, "loss": 0.0404, "step": 7263 }, { "epoch": 8.017669795692987, "grad_norm": 0.421509325504303, "learning_rate": 2.1782999999999998e-05, "loss": 0.0134, "step": 7264 }, { "epoch": 8.018774157923799, "grad_norm": 0.3827027678489685, "learning_rate": 2.1785999999999998e-05, "loss": 0.0213, "step": 7265 }, { "epoch": 8.019878520154611, "grad_norm": 0.27460893988609314, "learning_rate": 2.1788999999999998e-05, "loss": 0.0192, "step": 7266 }, { "epoch": 8.020982882385422, "grad_norm": 0.39808788895606995, "learning_rate": 2.1792e-05, "loss": 0.0162, "step": 7267 }, { "epoch": 8.022087244616234, "grad_norm": 0.17466092109680176, "learning_rate": 2.1795e-05, "loss": 0.0152, "step": 7268 }, { "epoch": 8.023191606847046, "grad_norm": 0.9840047359466553, "learning_rate": 2.1798e-05, "loss": 0.0183, "step": 7269 }, { "epoch": 8.024295969077858, "grad_norm": 0.25703513622283936, "learning_rate": 2.1801e-05, "loss": 0.0107, "step": 7270 }, { "epoch": 8.025400331308669, "grad_norm": 0.32547199726104736, "learning_rate": 2.1804e-05, "loss": 0.0156, "step": 7271 }, { "epoch": 8.026504693539481, "grad_norm": 0.4270044267177582, "learning_rate": 2.1807e-05, "loss": 0.0184, "step": 7272 }, { "epoch": 8.027609055770293, "grad_norm": 0.6302943825721741, "learning_rate": 2.181e-05, "loss": 0.0252, "step": 7273 }, { "epoch": 8.028713418001104, "grad_norm": 0.2790431082248688, "learning_rate": 2.1813e-05, "loss": 0.0181, "step": 7274 }, { "epoch": 8.029817780231916, "grad_norm": 0.34774452447891235, "learning_rate": 2.1816e-05, "loss": 0.0149, "step": 7275 }, { "epoch": 8.030922142462728, "grad_norm": 0.5545931458473206, "learning_rate": 2.1819e-05, "loss": 0.0244, "step": 7276 }, { "epoch": 8.032026504693539, "grad_norm": 0.33782821893692017, "learning_rate": 2.1822000000000002e-05, "loss": 0.0131, "step": 7277 }, { "epoch": 8.03313086692435, "grad_norm": 0.46480655670166016, "learning_rate": 2.1825000000000002e-05, "loss": 0.0173, "step": 7278 }, { "epoch": 8.034235229155163, "grad_norm": 0.3023747205734253, "learning_rate": 2.1828000000000002e-05, "loss": 0.0142, "step": 7279 }, { "epoch": 8.035339591385975, "grad_norm": 0.285377562046051, "learning_rate": 2.1831e-05, "loss": 0.0124, "step": 7280 }, { "epoch": 8.036443953616786, "grad_norm": 0.21587109565734863, "learning_rate": 2.1834e-05, "loss": 0.0132, "step": 7281 }, { "epoch": 8.037548315847598, "grad_norm": 0.5006358027458191, "learning_rate": 2.1837e-05, "loss": 0.0245, "step": 7282 }, { "epoch": 8.03865267807841, "grad_norm": 0.5121243000030518, "learning_rate": 2.184e-05, "loss": 0.0206, "step": 7283 }, { "epoch": 8.03975704030922, "grad_norm": 0.4899631440639496, "learning_rate": 2.1843e-05, "loss": 0.0189, "step": 7284 }, { "epoch": 8.040861402540033, "grad_norm": 0.5248818397521973, "learning_rate": 2.1846e-05, "loss": 0.0293, "step": 7285 }, { "epoch": 8.041965764770845, "grad_norm": 0.2263786941766739, "learning_rate": 2.1849e-05, "loss": 0.0086, "step": 7286 }, { "epoch": 8.043070127001657, "grad_norm": 0.5460213422775269, "learning_rate": 2.1852000000000004e-05, "loss": 0.0229, "step": 7287 }, { "epoch": 8.044174489232468, "grad_norm": 0.31343263387680054, "learning_rate": 2.1855e-05, "loss": 0.0102, "step": 7288 }, { "epoch": 8.04527885146328, "grad_norm": 0.7656375169754028, "learning_rate": 2.1858e-05, "loss": 0.021, "step": 7289 }, { "epoch": 8.046383213694092, "grad_norm": 0.5952855944633484, "learning_rate": 2.1861e-05, "loss": 0.0212, "step": 7290 }, { "epoch": 8.047487575924903, "grad_norm": 0.26824361085891724, "learning_rate": 2.1864e-05, "loss": 0.0153, "step": 7291 }, { "epoch": 8.048591938155715, "grad_norm": 0.4770207703113556, "learning_rate": 2.1867e-05, "loss": 0.0196, "step": 7292 }, { "epoch": 8.049696300386527, "grad_norm": 0.9847237467765808, "learning_rate": 2.187e-05, "loss": 0.0279, "step": 7293 }, { "epoch": 8.050800662617338, "grad_norm": 0.25179532170295715, "learning_rate": 2.1873e-05, "loss": 0.0136, "step": 7294 }, { "epoch": 8.05190502484815, "grad_norm": 0.5051051378250122, "learning_rate": 2.1876e-05, "loss": 0.0207, "step": 7295 }, { "epoch": 8.053009387078962, "grad_norm": 0.47688814997673035, "learning_rate": 2.1879e-05, "loss": 0.0202, "step": 7296 }, { "epoch": 8.054113749309774, "grad_norm": 0.41256698966026306, "learning_rate": 2.1882e-05, "loss": 0.021, "step": 7297 }, { "epoch": 8.055218111540585, "grad_norm": 0.5543317198753357, "learning_rate": 2.1885e-05, "loss": 0.0273, "step": 7298 }, { "epoch": 8.056322473771397, "grad_norm": 1.0500520467758179, "learning_rate": 2.1888e-05, "loss": 0.3022, "step": 7299 }, { "epoch": 8.05742683600221, "grad_norm": 0.7421385049819946, "learning_rate": 2.1891e-05, "loss": 0.2117, "step": 7300 }, { "epoch": 8.05853119823302, "grad_norm": 0.8247469067573547, "learning_rate": 2.1894e-05, "loss": 0.164, "step": 7301 }, { "epoch": 8.059635560463832, "grad_norm": 0.5651078224182129, "learning_rate": 2.1897e-05, "loss": 0.1272, "step": 7302 }, { "epoch": 8.060739922694644, "grad_norm": 0.6796460151672363, "learning_rate": 2.19e-05, "loss": 0.115, "step": 7303 }, { "epoch": 8.061844284925456, "grad_norm": 0.6407984495162964, "learning_rate": 2.1903e-05, "loss": 0.094, "step": 7304 }, { "epoch": 8.062948647156267, "grad_norm": 1.0238831043243408, "learning_rate": 2.1906e-05, "loss": 0.1112, "step": 7305 }, { "epoch": 8.064053009387079, "grad_norm": 0.48533710837364197, "learning_rate": 2.1909e-05, "loss": 0.0457, "step": 7306 }, { "epoch": 8.065157371617891, "grad_norm": 0.8675582408905029, "learning_rate": 2.1912000000000003e-05, "loss": 0.1348, "step": 7307 }, { "epoch": 8.066261733848702, "grad_norm": 0.6875382661819458, "learning_rate": 2.1915000000000003e-05, "loss": 0.1103, "step": 7308 }, { "epoch": 8.067366096079514, "grad_norm": 0.3077320158481598, "learning_rate": 2.1918000000000003e-05, "loss": 0.0468, "step": 7309 }, { "epoch": 8.068470458310326, "grad_norm": 0.28235507011413574, "learning_rate": 2.1921000000000002e-05, "loss": 0.0299, "step": 7310 }, { "epoch": 8.069574820541137, "grad_norm": 0.3022988438606262, "learning_rate": 2.1924000000000002e-05, "loss": 0.0165, "step": 7311 }, { "epoch": 8.070679182771949, "grad_norm": 0.19707591831684113, "learning_rate": 2.1927000000000002e-05, "loss": 0.0159, "step": 7312 }, { "epoch": 8.071783545002761, "grad_norm": 0.49069470167160034, "learning_rate": 2.193e-05, "loss": 0.0343, "step": 7313 }, { "epoch": 8.072887907233573, "grad_norm": 0.42569905519485474, "learning_rate": 2.1932999999999998e-05, "loss": 0.0166, "step": 7314 }, { "epoch": 8.073992269464384, "grad_norm": 0.2728276252746582, "learning_rate": 2.1935999999999998e-05, "loss": 0.0213, "step": 7315 }, { "epoch": 8.075096631695196, "grad_norm": 0.4051644206047058, "learning_rate": 2.1938999999999998e-05, "loss": 0.0169, "step": 7316 }, { "epoch": 8.076200993926008, "grad_norm": 0.33495330810546875, "learning_rate": 2.1942e-05, "loss": 0.0171, "step": 7317 }, { "epoch": 8.077305356156819, "grad_norm": 0.31232455372810364, "learning_rate": 2.1945e-05, "loss": 0.0147, "step": 7318 }, { "epoch": 8.078409718387631, "grad_norm": 0.2510296404361725, "learning_rate": 2.1948e-05, "loss": 0.0199, "step": 7319 }, { "epoch": 8.079514080618443, "grad_norm": 0.38335323333740234, "learning_rate": 2.1951e-05, "loss": 0.0178, "step": 7320 }, { "epoch": 8.080618442849255, "grad_norm": 0.3048839271068573, "learning_rate": 2.1954e-05, "loss": 0.0162, "step": 7321 }, { "epoch": 8.081722805080066, "grad_norm": 0.4992590546607971, "learning_rate": 2.1957e-05, "loss": 0.0247, "step": 7322 }, { "epoch": 8.082827167310878, "grad_norm": 0.38794592022895813, "learning_rate": 2.196e-05, "loss": 0.0215, "step": 7323 }, { "epoch": 8.08393152954169, "grad_norm": 0.2640796899795532, "learning_rate": 2.1963e-05, "loss": 0.0112, "step": 7324 }, { "epoch": 8.0850358917725, "grad_norm": 0.2696094810962677, "learning_rate": 2.1966e-05, "loss": 0.0125, "step": 7325 }, { "epoch": 8.086140254003313, "grad_norm": 0.42839813232421875, "learning_rate": 2.1969e-05, "loss": 0.015, "step": 7326 }, { "epoch": 8.087244616234125, "grad_norm": 0.6806778311729431, "learning_rate": 2.1972000000000002e-05, "loss": 0.0391, "step": 7327 }, { "epoch": 8.088348978464936, "grad_norm": 0.34444189071655273, "learning_rate": 2.1975000000000002e-05, "loss": 0.0236, "step": 7328 }, { "epoch": 8.089453340695748, "grad_norm": 0.5122315883636475, "learning_rate": 2.1978000000000002e-05, "loss": 0.0162, "step": 7329 }, { "epoch": 8.09055770292656, "grad_norm": 0.2243531495332718, "learning_rate": 2.1981000000000002e-05, "loss": 0.0137, "step": 7330 }, { "epoch": 8.091662065157372, "grad_norm": 0.31009137630462646, "learning_rate": 2.1984e-05, "loss": 0.0194, "step": 7331 }, { "epoch": 8.092766427388183, "grad_norm": 0.4595095217227936, "learning_rate": 2.1987e-05, "loss": 0.0187, "step": 7332 }, { "epoch": 8.093870789618995, "grad_norm": 0.2719804346561432, "learning_rate": 2.199e-05, "loss": 0.0126, "step": 7333 }, { "epoch": 8.094975151849807, "grad_norm": 0.35470670461654663, "learning_rate": 2.1993e-05, "loss": 0.0475, "step": 7334 }, { "epoch": 8.096079514080618, "grad_norm": 0.267831027507782, "learning_rate": 2.1996e-05, "loss": 0.013, "step": 7335 }, { "epoch": 8.09718387631143, "grad_norm": 0.3712809383869171, "learning_rate": 2.1999e-05, "loss": 0.0244, "step": 7336 }, { "epoch": 8.098288238542242, "grad_norm": 0.5807567834854126, "learning_rate": 2.2002e-05, "loss": 0.0291, "step": 7337 }, { "epoch": 8.099392600773054, "grad_norm": 0.6116604208946228, "learning_rate": 2.2005e-05, "loss": 0.0246, "step": 7338 }, { "epoch": 8.100496963003865, "grad_norm": 0.3577304184436798, "learning_rate": 2.2008e-05, "loss": 0.0137, "step": 7339 }, { "epoch": 8.101601325234677, "grad_norm": 0.3220914304256439, "learning_rate": 2.2011e-05, "loss": 0.0175, "step": 7340 }, { "epoch": 8.10270568746549, "grad_norm": 0.3069126605987549, "learning_rate": 2.2014e-05, "loss": 0.0161, "step": 7341 }, { "epoch": 8.1038100496963, "grad_norm": 0.5557069182395935, "learning_rate": 2.2017e-05, "loss": 0.0269, "step": 7342 }, { "epoch": 8.104914411927112, "grad_norm": 0.3328283727169037, "learning_rate": 2.202e-05, "loss": 0.0261, "step": 7343 }, { "epoch": 8.106018774157924, "grad_norm": 0.3693297803401947, "learning_rate": 2.2023e-05, "loss": 0.0114, "step": 7344 }, { "epoch": 8.107123136388736, "grad_norm": 0.24111346900463104, "learning_rate": 2.2026e-05, "loss": 0.0083, "step": 7345 }, { "epoch": 8.108227498619547, "grad_norm": 0.4919811487197876, "learning_rate": 2.2029e-05, "loss": 0.0238, "step": 7346 }, { "epoch": 8.109331860850359, "grad_norm": 0.41240108013153076, "learning_rate": 2.2032000000000002e-05, "loss": 0.0142, "step": 7347 }, { "epoch": 8.110436223081171, "grad_norm": 1.6000745296478271, "learning_rate": 2.2035e-05, "loss": 0.0693, "step": 7348 }, { "epoch": 8.111540585311982, "grad_norm": 0.8508907556533813, "learning_rate": 2.2038e-05, "loss": 0.2206, "step": 7349 }, { "epoch": 8.112644947542794, "grad_norm": 0.6118038892745972, "learning_rate": 2.2041e-05, "loss": 0.1641, "step": 7350 }, { "epoch": 8.113749309773606, "grad_norm": 0.8633766174316406, "learning_rate": 2.2044e-05, "loss": 0.2128, "step": 7351 }, { "epoch": 8.114853672004417, "grad_norm": 0.6302627325057983, "learning_rate": 2.2047e-05, "loss": 0.1605, "step": 7352 }, { "epoch": 8.115958034235229, "grad_norm": 0.5207327008247375, "learning_rate": 2.205e-05, "loss": 0.1013, "step": 7353 }, { "epoch": 8.117062396466041, "grad_norm": 0.4917049705982208, "learning_rate": 2.2053e-05, "loss": 0.0999, "step": 7354 }, { "epoch": 8.118166758696853, "grad_norm": 0.8603551387786865, "learning_rate": 2.2056e-05, "loss": 0.1057, "step": 7355 }, { "epoch": 8.119271120927664, "grad_norm": 0.5375520586967468, "learning_rate": 2.2059e-05, "loss": 0.0561, "step": 7356 }, { "epoch": 8.120375483158476, "grad_norm": 0.4821532666683197, "learning_rate": 2.2062000000000003e-05, "loss": 0.064, "step": 7357 }, { "epoch": 8.121479845389288, "grad_norm": 0.33839720487594604, "learning_rate": 2.2065000000000003e-05, "loss": 0.05, "step": 7358 }, { "epoch": 8.122584207620099, "grad_norm": 0.49855372309684753, "learning_rate": 2.2068000000000003e-05, "loss": 0.0499, "step": 7359 }, { "epoch": 8.123688569850911, "grad_norm": 0.30898281931877136, "learning_rate": 2.2071000000000003e-05, "loss": 0.0293, "step": 7360 }, { "epoch": 8.124792932081723, "grad_norm": 0.40948015451431274, "learning_rate": 2.2074000000000002e-05, "loss": 0.0285, "step": 7361 }, { "epoch": 8.125897294312535, "grad_norm": 0.38567134737968445, "learning_rate": 2.2077e-05, "loss": 0.0194, "step": 7362 }, { "epoch": 8.127001656543346, "grad_norm": 0.43844565749168396, "learning_rate": 2.208e-05, "loss": 0.027, "step": 7363 }, { "epoch": 8.128106018774158, "grad_norm": 0.2836553752422333, "learning_rate": 2.2083e-05, "loss": 0.0152, "step": 7364 }, { "epoch": 8.12921038100497, "grad_norm": 0.41674530506134033, "learning_rate": 2.2085999999999998e-05, "loss": 0.0293, "step": 7365 }, { "epoch": 8.13031474323578, "grad_norm": 0.29461267590522766, "learning_rate": 2.2088999999999998e-05, "loss": 0.0223, "step": 7366 }, { "epoch": 8.131419105466593, "grad_norm": 0.3150902986526489, "learning_rate": 2.2092e-05, "loss": 0.0167, "step": 7367 }, { "epoch": 8.132523467697405, "grad_norm": 0.33551347255706787, "learning_rate": 2.2095e-05, "loss": 0.0201, "step": 7368 }, { "epoch": 8.133627829928216, "grad_norm": 0.2850201427936554, "learning_rate": 2.2098e-05, "loss": 0.0216, "step": 7369 }, { "epoch": 8.134732192159028, "grad_norm": 0.33882731199264526, "learning_rate": 2.2101e-05, "loss": 0.0161, "step": 7370 }, { "epoch": 8.13583655438984, "grad_norm": 0.25873130559921265, "learning_rate": 2.2104e-05, "loss": 0.0123, "step": 7371 }, { "epoch": 8.136940916620652, "grad_norm": 0.4437432587146759, "learning_rate": 2.2107e-05, "loss": 0.0164, "step": 7372 }, { "epoch": 8.138045278851463, "grad_norm": 0.5196056962013245, "learning_rate": 2.211e-05, "loss": 0.0423, "step": 7373 }, { "epoch": 8.139149641082275, "grad_norm": 0.44661182165145874, "learning_rate": 2.2113e-05, "loss": 0.0309, "step": 7374 }, { "epoch": 8.140254003313087, "grad_norm": 0.2095436453819275, "learning_rate": 2.2116e-05, "loss": 0.0128, "step": 7375 }, { "epoch": 8.141358365543898, "grad_norm": 0.377308189868927, "learning_rate": 2.2119e-05, "loss": 0.0198, "step": 7376 }, { "epoch": 8.14246272777471, "grad_norm": 0.5449833869934082, "learning_rate": 2.2122000000000003e-05, "loss": 0.024, "step": 7377 }, { "epoch": 8.143567090005522, "grad_norm": 0.3190297484397888, "learning_rate": 2.2125000000000002e-05, "loss": 0.0137, "step": 7378 }, { "epoch": 8.144671452236334, "grad_norm": 0.4575926959514618, "learning_rate": 2.2128000000000002e-05, "loss": 0.0216, "step": 7379 }, { "epoch": 8.145775814467145, "grad_norm": 0.29194700717926025, "learning_rate": 2.2131000000000002e-05, "loss": 0.0157, "step": 7380 }, { "epoch": 8.146880176697957, "grad_norm": 0.4716545343399048, "learning_rate": 2.2134000000000002e-05, "loss": 0.0179, "step": 7381 }, { "epoch": 8.14798453892877, "grad_norm": 0.7198280096054077, "learning_rate": 2.2137e-05, "loss": 0.0149, "step": 7382 }, { "epoch": 8.14908890115958, "grad_norm": 0.5368106365203857, "learning_rate": 2.214e-05, "loss": 0.0289, "step": 7383 }, { "epoch": 8.150193263390392, "grad_norm": 0.5197526216506958, "learning_rate": 2.2143e-05, "loss": 0.0178, "step": 7384 }, { "epoch": 8.151297625621204, "grad_norm": 0.7047646641731262, "learning_rate": 2.2146e-05, "loss": 0.0242, "step": 7385 }, { "epoch": 8.152401987852015, "grad_norm": 0.3623329997062683, "learning_rate": 2.2149e-05, "loss": 0.0165, "step": 7386 }, { "epoch": 8.153506350082827, "grad_norm": 0.9000977873802185, "learning_rate": 2.2151999999999997e-05, "loss": 0.02, "step": 7387 }, { "epoch": 8.154610712313639, "grad_norm": 0.25072580575942993, "learning_rate": 2.2155e-05, "loss": 0.0182, "step": 7388 }, { "epoch": 8.155715074544451, "grad_norm": 0.46494001150131226, "learning_rate": 2.2158e-05, "loss": 0.0319, "step": 7389 }, { "epoch": 8.156819436775262, "grad_norm": 0.6922370791435242, "learning_rate": 2.2161e-05, "loss": 0.0169, "step": 7390 }, { "epoch": 8.157923799006074, "grad_norm": 0.975089430809021, "learning_rate": 2.2164e-05, "loss": 0.0201, "step": 7391 }, { "epoch": 8.159028161236886, "grad_norm": 0.37985220551490784, "learning_rate": 2.2167e-05, "loss": 0.0237, "step": 7392 }, { "epoch": 8.160132523467697, "grad_norm": 0.7252085208892822, "learning_rate": 2.217e-05, "loss": 0.0254, "step": 7393 }, { "epoch": 8.161236885698509, "grad_norm": 0.4501357078552246, "learning_rate": 2.2173e-05, "loss": 0.0215, "step": 7394 }, { "epoch": 8.162341247929321, "grad_norm": 0.8685494661331177, "learning_rate": 2.2176e-05, "loss": 0.0221, "step": 7395 }, { "epoch": 8.163445610160133, "grad_norm": 0.6466916799545288, "learning_rate": 2.2179e-05, "loss": 0.0195, "step": 7396 }, { "epoch": 8.164549972390944, "grad_norm": 0.45323681831359863, "learning_rate": 2.2182e-05, "loss": 0.0196, "step": 7397 }, { "epoch": 8.165654334621756, "grad_norm": 0.7153019309043884, "learning_rate": 2.2185000000000002e-05, "loss": 0.0284, "step": 7398 }, { "epoch": 8.166758696852568, "grad_norm": 0.816210925579071, "learning_rate": 2.2188e-05, "loss": 0.3479, "step": 7399 }, { "epoch": 8.167863059083379, "grad_norm": 0.7083925008773804, "learning_rate": 2.2191e-05, "loss": 0.1977, "step": 7400 }, { "epoch": 8.168967421314191, "grad_norm": 0.7437331080436707, "learning_rate": 2.2194e-05, "loss": 0.1788, "step": 7401 }, { "epoch": 8.170071783545003, "grad_norm": 0.6825221180915833, "learning_rate": 2.2197e-05, "loss": 0.1425, "step": 7402 }, { "epoch": 8.171176145775814, "grad_norm": 0.6488742232322693, "learning_rate": 2.22e-05, "loss": 0.1283, "step": 7403 }, { "epoch": 8.172280508006626, "grad_norm": 0.6816068291664124, "learning_rate": 2.2203e-05, "loss": 0.0988, "step": 7404 }, { "epoch": 8.173384870237438, "grad_norm": 0.3539947271347046, "learning_rate": 2.2206e-05, "loss": 0.0492, "step": 7405 }, { "epoch": 8.17448923246825, "grad_norm": 0.395949125289917, "learning_rate": 2.2209e-05, "loss": 0.0477, "step": 7406 }, { "epoch": 8.17559359469906, "grad_norm": 0.3725070655345917, "learning_rate": 2.2212e-05, "loss": 0.0389, "step": 7407 }, { "epoch": 8.176697956929873, "grad_norm": 0.3356624245643616, "learning_rate": 2.2215000000000003e-05, "loss": 0.024, "step": 7408 }, { "epoch": 8.177802319160685, "grad_norm": 0.3185681104660034, "learning_rate": 2.2218000000000003e-05, "loss": 0.0285, "step": 7409 }, { "epoch": 8.178906681391496, "grad_norm": 0.27425867319107056, "learning_rate": 2.2221000000000003e-05, "loss": 0.0197, "step": 7410 }, { "epoch": 8.180011043622308, "grad_norm": 0.3147253096103668, "learning_rate": 2.2224e-05, "loss": 0.019, "step": 7411 }, { "epoch": 8.18111540585312, "grad_norm": 0.37725841999053955, "learning_rate": 2.2227e-05, "loss": 0.0209, "step": 7412 }, { "epoch": 8.182219768083932, "grad_norm": 0.2846880257129669, "learning_rate": 2.223e-05, "loss": 0.0117, "step": 7413 }, { "epoch": 8.183324130314743, "grad_norm": 0.5108004212379456, "learning_rate": 2.2233e-05, "loss": 0.0241, "step": 7414 }, { "epoch": 8.184428492545555, "grad_norm": 0.30899578332901, "learning_rate": 2.2236e-05, "loss": 0.015, "step": 7415 }, { "epoch": 8.185532854776367, "grad_norm": 0.517044186592102, "learning_rate": 2.2239e-05, "loss": 0.0221, "step": 7416 }, { "epoch": 8.186637217007178, "grad_norm": 0.3581991493701935, "learning_rate": 2.2241999999999998e-05, "loss": 0.013, "step": 7417 }, { "epoch": 8.18774157923799, "grad_norm": 0.5524913668632507, "learning_rate": 2.2245e-05, "loss": 0.0187, "step": 7418 }, { "epoch": 8.188845941468802, "grad_norm": 0.6551677584648132, "learning_rate": 2.2248e-05, "loss": 0.0331, "step": 7419 }, { "epoch": 8.189950303699613, "grad_norm": 0.5227195620536804, "learning_rate": 2.2251e-05, "loss": 0.0314, "step": 7420 }, { "epoch": 8.191054665930425, "grad_norm": 0.34555646777153015, "learning_rate": 2.2254e-05, "loss": 0.0228, "step": 7421 }, { "epoch": 8.192159028161237, "grad_norm": 0.4634905755519867, "learning_rate": 2.2257e-05, "loss": 0.0169, "step": 7422 }, { "epoch": 8.19326339039205, "grad_norm": 0.3438761830329895, "learning_rate": 2.226e-05, "loss": 0.0203, "step": 7423 }, { "epoch": 8.19436775262286, "grad_norm": 0.6045361757278442, "learning_rate": 2.2263e-05, "loss": 0.0204, "step": 7424 }, { "epoch": 8.195472114853672, "grad_norm": 0.4219439625740051, "learning_rate": 2.2266e-05, "loss": 0.0213, "step": 7425 }, { "epoch": 8.196576477084484, "grad_norm": 4.065164566040039, "learning_rate": 2.2269e-05, "loss": 0.0578, "step": 7426 }, { "epoch": 8.197680839315295, "grad_norm": 0.40768107771873474, "learning_rate": 2.2272e-05, "loss": 0.0174, "step": 7427 }, { "epoch": 8.198785201546107, "grad_norm": 0.3301135301589966, "learning_rate": 2.2275000000000003e-05, "loss": 0.0175, "step": 7428 }, { "epoch": 8.19988956377692, "grad_norm": 0.7967609763145447, "learning_rate": 2.2278000000000003e-05, "loss": 0.0311, "step": 7429 }, { "epoch": 8.200993926007731, "grad_norm": 0.33022379875183105, "learning_rate": 2.2281000000000002e-05, "loss": 0.0167, "step": 7430 }, { "epoch": 8.202098288238542, "grad_norm": 0.4184964597225189, "learning_rate": 2.2284000000000002e-05, "loss": 0.0182, "step": 7431 }, { "epoch": 8.203202650469354, "grad_norm": 0.37042534351348877, "learning_rate": 2.2287000000000002e-05, "loss": 0.0232, "step": 7432 }, { "epoch": 8.204307012700166, "grad_norm": 0.5200182795524597, "learning_rate": 2.2290000000000002e-05, "loss": 0.0257, "step": 7433 }, { "epoch": 8.205411374930977, "grad_norm": 0.5108190774917603, "learning_rate": 2.2293e-05, "loss": 0.0291, "step": 7434 }, { "epoch": 8.206515737161789, "grad_norm": 0.46241405606269836, "learning_rate": 2.2296e-05, "loss": 0.0197, "step": 7435 }, { "epoch": 8.207620099392601, "grad_norm": 0.2717452645301819, "learning_rate": 2.2298999999999998e-05, "loss": 0.0131, "step": 7436 }, { "epoch": 8.208724461623412, "grad_norm": 0.3814608156681061, "learning_rate": 2.2301999999999998e-05, "loss": 0.0181, "step": 7437 }, { "epoch": 8.209828823854224, "grad_norm": 0.8944241404533386, "learning_rate": 2.2305e-05, "loss": 0.0408, "step": 7438 }, { "epoch": 8.210933186085036, "grad_norm": 0.2112695723772049, "learning_rate": 2.2308e-05, "loss": 0.0088, "step": 7439 }, { "epoch": 8.212037548315848, "grad_norm": 0.3301456868648529, "learning_rate": 2.2311e-05, "loss": 0.0197, "step": 7440 }, { "epoch": 8.213141910546659, "grad_norm": 0.3859938681125641, "learning_rate": 2.2314e-05, "loss": 0.0287, "step": 7441 }, { "epoch": 8.214246272777471, "grad_norm": 0.609773576259613, "learning_rate": 2.2317e-05, "loss": 0.0352, "step": 7442 }, { "epoch": 8.215350635008283, "grad_norm": 0.6658978462219238, "learning_rate": 2.232e-05, "loss": 0.0241, "step": 7443 }, { "epoch": 8.216454997239094, "grad_norm": 0.2876785099506378, "learning_rate": 2.2323e-05, "loss": 0.0196, "step": 7444 }, { "epoch": 8.217559359469906, "grad_norm": 0.4639776647090912, "learning_rate": 2.2326e-05, "loss": 0.0246, "step": 7445 }, { "epoch": 8.218663721700718, "grad_norm": 0.5430411100387573, "learning_rate": 2.2329e-05, "loss": 0.0202, "step": 7446 }, { "epoch": 8.21976808393153, "grad_norm": 0.25014734268188477, "learning_rate": 2.2332e-05, "loss": 0.0123, "step": 7447 }, { "epoch": 8.22087244616234, "grad_norm": 0.6237921118736267, "learning_rate": 2.2335000000000002e-05, "loss": 0.0341, "step": 7448 }, { "epoch": 8.221976808393153, "grad_norm": 1.1577730178833008, "learning_rate": 2.2338000000000002e-05, "loss": 0.2988, "step": 7449 }, { "epoch": 8.223081170623965, "grad_norm": 0.9150327444076538, "learning_rate": 2.2341000000000002e-05, "loss": 0.2192, "step": 7450 }, { "epoch": 8.224185532854776, "grad_norm": 0.9558724761009216, "learning_rate": 2.2344e-05, "loss": 0.1945, "step": 7451 }, { "epoch": 8.225289895085588, "grad_norm": 0.8145606517791748, "learning_rate": 2.2347e-05, "loss": 0.1519, "step": 7452 }, { "epoch": 8.2263942573164, "grad_norm": 0.653912365436554, "learning_rate": 2.235e-05, "loss": 0.1085, "step": 7453 }, { "epoch": 8.22749861954721, "grad_norm": 0.5959416031837463, "learning_rate": 2.2353e-05, "loss": 0.1114, "step": 7454 }, { "epoch": 8.228602981778023, "grad_norm": 0.7322404384613037, "learning_rate": 2.2356e-05, "loss": 0.0692, "step": 7455 }, { "epoch": 8.229707344008835, "grad_norm": 0.5995151996612549, "learning_rate": 2.2359e-05, "loss": 0.0983, "step": 7456 }, { "epoch": 8.230811706239647, "grad_norm": 0.4646073579788208, "learning_rate": 2.2362e-05, "loss": 0.0275, "step": 7457 }, { "epoch": 8.231916068470458, "grad_norm": 0.5000513195991516, "learning_rate": 2.2365000000000004e-05, "loss": 0.053, "step": 7458 }, { "epoch": 8.23302043070127, "grad_norm": 0.36170217394828796, "learning_rate": 2.2368000000000003e-05, "loss": 0.027, "step": 7459 }, { "epoch": 8.234124792932082, "grad_norm": 0.2694171071052551, "learning_rate": 2.2371e-05, "loss": 0.0159, "step": 7460 }, { "epoch": 8.235229155162893, "grad_norm": 0.5740802884101868, "learning_rate": 2.2374e-05, "loss": 0.0407, "step": 7461 }, { "epoch": 8.236333517393705, "grad_norm": 0.3125160038471222, "learning_rate": 2.2377e-05, "loss": 0.0159, "step": 7462 }, { "epoch": 8.237437879624517, "grad_norm": 0.31732919812202454, "learning_rate": 2.238e-05, "loss": 0.0152, "step": 7463 }, { "epoch": 8.23854224185533, "grad_norm": 0.287996768951416, "learning_rate": 2.2383e-05, "loss": 0.0178, "step": 7464 }, { "epoch": 8.23964660408614, "grad_norm": 0.34220007061958313, "learning_rate": 2.2386e-05, "loss": 0.0182, "step": 7465 }, { "epoch": 8.240750966316952, "grad_norm": 0.3454974889755249, "learning_rate": 2.2389e-05, "loss": 0.0129, "step": 7466 }, { "epoch": 8.241855328547764, "grad_norm": 0.49126186966896057, "learning_rate": 2.2392e-05, "loss": 0.0287, "step": 7467 }, { "epoch": 8.242959690778575, "grad_norm": 0.35976508259773254, "learning_rate": 2.2395e-05, "loss": 0.0195, "step": 7468 }, { "epoch": 8.244064053009387, "grad_norm": 0.2614459991455078, "learning_rate": 2.2398e-05, "loss": 0.015, "step": 7469 }, { "epoch": 8.2451684152402, "grad_norm": 0.4439387917518616, "learning_rate": 2.2401e-05, "loss": 0.0196, "step": 7470 }, { "epoch": 8.24627277747101, "grad_norm": 0.5251781344413757, "learning_rate": 2.2404e-05, "loss": 0.0265, "step": 7471 }, { "epoch": 8.247377139701822, "grad_norm": 0.3114047944545746, "learning_rate": 2.2407e-05, "loss": 0.0091, "step": 7472 }, { "epoch": 8.248481501932634, "grad_norm": 0.47736656665802, "learning_rate": 2.241e-05, "loss": 0.0317, "step": 7473 }, { "epoch": 8.249585864163446, "grad_norm": 0.3312668800354004, "learning_rate": 2.2413e-05, "loss": 0.0177, "step": 7474 }, { "epoch": 8.250690226394257, "grad_norm": 0.5312799215316772, "learning_rate": 2.2416e-05, "loss": 0.0142, "step": 7475 }, { "epoch": 8.251794588625069, "grad_norm": 0.4329008162021637, "learning_rate": 2.2419e-05, "loss": 0.0206, "step": 7476 }, { "epoch": 8.252898950855881, "grad_norm": 0.33313843607902527, "learning_rate": 2.2422e-05, "loss": 0.0168, "step": 7477 }, { "epoch": 8.254003313086692, "grad_norm": 0.424174427986145, "learning_rate": 2.2425000000000003e-05, "loss": 0.0168, "step": 7478 }, { "epoch": 8.255107675317504, "grad_norm": 0.27764999866485596, "learning_rate": 2.2428000000000003e-05, "loss": 0.0118, "step": 7479 }, { "epoch": 8.256212037548316, "grad_norm": 0.3898620009422302, "learning_rate": 2.2431000000000003e-05, "loss": 0.0156, "step": 7480 }, { "epoch": 8.257316399779128, "grad_norm": 0.6144782900810242, "learning_rate": 2.2434000000000002e-05, "loss": 0.01, "step": 7481 }, { "epoch": 8.258420762009939, "grad_norm": 1.929519772529602, "learning_rate": 2.2437000000000002e-05, "loss": 0.021, "step": 7482 }, { "epoch": 8.259525124240751, "grad_norm": 0.6862428188323975, "learning_rate": 2.2440000000000002e-05, "loss": 0.0233, "step": 7483 }, { "epoch": 8.260629486471563, "grad_norm": 0.49128955602645874, "learning_rate": 2.2443000000000002e-05, "loss": 0.0257, "step": 7484 }, { "epoch": 8.261733848702374, "grad_norm": 0.2930208146572113, "learning_rate": 2.2445999999999998e-05, "loss": 0.015, "step": 7485 }, { "epoch": 8.262838210933186, "grad_norm": 0.331321656703949, "learning_rate": 2.2448999999999998e-05, "loss": 0.0143, "step": 7486 }, { "epoch": 8.263942573163998, "grad_norm": 0.6352660655975342, "learning_rate": 2.2451999999999998e-05, "loss": 0.0221, "step": 7487 }, { "epoch": 8.26504693539481, "grad_norm": 0.5688889026641846, "learning_rate": 2.2455e-05, "loss": 0.0179, "step": 7488 }, { "epoch": 8.26615129762562, "grad_norm": 0.2914127707481384, "learning_rate": 2.2458e-05, "loss": 0.0156, "step": 7489 }, { "epoch": 8.267255659856433, "grad_norm": 0.4650498628616333, "learning_rate": 2.2461e-05, "loss": 0.0113, "step": 7490 }, { "epoch": 8.268360022087245, "grad_norm": 0.6904448866844177, "learning_rate": 2.2464e-05, "loss": 0.0202, "step": 7491 }, { "epoch": 8.269464384318056, "grad_norm": 0.3155810832977295, "learning_rate": 2.2467e-05, "loss": 0.015, "step": 7492 }, { "epoch": 8.270568746548868, "grad_norm": 0.5247137546539307, "learning_rate": 2.247e-05, "loss": 0.0212, "step": 7493 }, { "epoch": 8.27167310877968, "grad_norm": 0.41754385828971863, "learning_rate": 2.2473e-05, "loss": 0.0251, "step": 7494 }, { "epoch": 8.27277747101049, "grad_norm": 0.33301711082458496, "learning_rate": 2.2476e-05, "loss": 0.013, "step": 7495 }, { "epoch": 8.273881833241303, "grad_norm": 0.34361642599105835, "learning_rate": 2.2479e-05, "loss": 0.0196, "step": 7496 }, { "epoch": 8.274986195472115, "grad_norm": 1.0297757387161255, "learning_rate": 2.2482e-05, "loss": 0.0296, "step": 7497 }, { "epoch": 8.276090557702927, "grad_norm": 0.9963616132736206, "learning_rate": 2.2485000000000002e-05, "loss": 0.0268, "step": 7498 }, { "epoch": 8.277194919933738, "grad_norm": 0.9012911915779114, "learning_rate": 2.2488000000000002e-05, "loss": 0.2556, "step": 7499 }, { "epoch": 8.27829928216455, "grad_norm": 0.7999826669692993, "learning_rate": 2.2491000000000002e-05, "loss": 0.2818, "step": 7500 }, { "epoch": 8.279403644395362, "grad_norm": 0.5439144968986511, "learning_rate": 2.2494000000000002e-05, "loss": 0.1377, "step": 7501 }, { "epoch": 8.280508006626173, "grad_norm": 0.5374395251274109, "learning_rate": 2.2497e-05, "loss": 0.1285, "step": 7502 }, { "epoch": 8.281612368856985, "grad_norm": 0.5655427575111389, "learning_rate": 2.25e-05, "loss": 0.1261, "step": 7503 }, { "epoch": 8.282716731087797, "grad_norm": 0.6138741374015808, "learning_rate": 2.2503e-05, "loss": 0.1026, "step": 7504 }, { "epoch": 8.283821093318608, "grad_norm": 0.4186648428440094, "learning_rate": 2.2506e-05, "loss": 0.091, "step": 7505 }, { "epoch": 8.28492545554942, "grad_norm": 0.5635694265365601, "learning_rate": 2.2509e-05, "loss": 0.0564, "step": 7506 }, { "epoch": 8.286029817780232, "grad_norm": 0.7013324499130249, "learning_rate": 2.2512e-05, "loss": 0.0463, "step": 7507 }, { "epoch": 8.287134180011044, "grad_norm": 0.5680178999900818, "learning_rate": 2.2515e-05, "loss": 0.0394, "step": 7508 }, { "epoch": 8.288238542241855, "grad_norm": 0.3148302435874939, "learning_rate": 2.2518e-05, "loss": 0.0259, "step": 7509 }, { "epoch": 8.289342904472667, "grad_norm": 0.508184552192688, "learning_rate": 2.2521e-05, "loss": 0.0229, "step": 7510 }, { "epoch": 8.29044726670348, "grad_norm": 0.4804965853691101, "learning_rate": 2.2524e-05, "loss": 0.0243, "step": 7511 }, { "epoch": 8.29155162893429, "grad_norm": 0.43626102805137634, "learning_rate": 2.2527e-05, "loss": 0.0298, "step": 7512 }, { "epoch": 8.292655991165102, "grad_norm": 1.8920344114303589, "learning_rate": 2.253e-05, "loss": 0.0829, "step": 7513 }, { "epoch": 8.293760353395914, "grad_norm": 0.2986980676651001, "learning_rate": 2.2533e-05, "loss": 0.0236, "step": 7514 }, { "epoch": 8.294864715626726, "grad_norm": 0.19590900838375092, "learning_rate": 2.2536e-05, "loss": 0.0134, "step": 7515 }, { "epoch": 8.295969077857537, "grad_norm": 0.36871272325515747, "learning_rate": 2.2539e-05, "loss": 0.0184, "step": 7516 }, { "epoch": 8.297073440088349, "grad_norm": 0.3610565662384033, "learning_rate": 2.2542e-05, "loss": 0.0221, "step": 7517 }, { "epoch": 8.298177802319161, "grad_norm": 0.2787022292613983, "learning_rate": 2.2545e-05, "loss": 0.0159, "step": 7518 }, { "epoch": 8.299282164549972, "grad_norm": 0.3536522388458252, "learning_rate": 2.2548e-05, "loss": 0.0217, "step": 7519 }, { "epoch": 8.300386526780784, "grad_norm": 0.2952551245689392, "learning_rate": 2.2551e-05, "loss": 0.0149, "step": 7520 }, { "epoch": 8.301490889011596, "grad_norm": 0.6325940489768982, "learning_rate": 2.2554e-05, "loss": 0.0284, "step": 7521 }, { "epoch": 8.302595251242408, "grad_norm": 0.42121484875679016, "learning_rate": 2.2557e-05, "loss": 0.0174, "step": 7522 }, { "epoch": 8.303699613473219, "grad_norm": 0.30246657133102417, "learning_rate": 2.256e-05, "loss": 0.0098, "step": 7523 }, { "epoch": 8.304803975704031, "grad_norm": 0.9013733863830566, "learning_rate": 2.2563e-05, "loss": 0.0278, "step": 7524 }, { "epoch": 8.305908337934843, "grad_norm": 0.4651840329170227, "learning_rate": 2.2566e-05, "loss": 0.0479, "step": 7525 }, { "epoch": 8.307012700165654, "grad_norm": 0.2943674921989441, "learning_rate": 2.2569e-05, "loss": 0.0124, "step": 7526 }, { "epoch": 8.308117062396466, "grad_norm": 0.5048410296440125, "learning_rate": 2.2572e-05, "loss": 0.0197, "step": 7527 }, { "epoch": 8.309221424627278, "grad_norm": 0.6835417747497559, "learning_rate": 2.2575e-05, "loss": 0.0187, "step": 7528 }, { "epoch": 8.310325786858089, "grad_norm": 0.532868504524231, "learning_rate": 2.2578000000000003e-05, "loss": 0.0125, "step": 7529 }, { "epoch": 8.3114301490889, "grad_norm": 0.41252031922340393, "learning_rate": 2.2581000000000003e-05, "loss": 0.0304, "step": 7530 }, { "epoch": 8.312534511319713, "grad_norm": 0.7144143581390381, "learning_rate": 2.2584000000000003e-05, "loss": 0.0377, "step": 7531 }, { "epoch": 8.313638873550525, "grad_norm": 0.3426561951637268, "learning_rate": 2.2587000000000002e-05, "loss": 0.0157, "step": 7532 }, { "epoch": 8.314743235781336, "grad_norm": 0.40669986605644226, "learning_rate": 2.2590000000000002e-05, "loss": 0.0195, "step": 7533 }, { "epoch": 8.315847598012148, "grad_norm": 0.8627748489379883, "learning_rate": 2.2593e-05, "loss": 0.0198, "step": 7534 }, { "epoch": 8.31695196024296, "grad_norm": 0.8287287354469299, "learning_rate": 2.2596e-05, "loss": 0.0286, "step": 7535 }, { "epoch": 8.31805632247377, "grad_norm": 0.45382827520370483, "learning_rate": 2.2598999999999998e-05, "loss": 0.0248, "step": 7536 }, { "epoch": 8.319160684704583, "grad_norm": 0.5706880688667297, "learning_rate": 2.2601999999999998e-05, "loss": 0.0312, "step": 7537 }, { "epoch": 8.320265046935395, "grad_norm": 0.3232603073120117, "learning_rate": 2.2604999999999998e-05, "loss": 0.021, "step": 7538 }, { "epoch": 8.321369409166207, "grad_norm": 0.43282851576805115, "learning_rate": 2.2608e-05, "loss": 0.0153, "step": 7539 }, { "epoch": 8.322473771397018, "grad_norm": 0.23669801652431488, "learning_rate": 2.2611e-05, "loss": 0.0119, "step": 7540 }, { "epoch": 8.32357813362783, "grad_norm": 0.370267778635025, "learning_rate": 2.2614e-05, "loss": 0.0307, "step": 7541 }, { "epoch": 8.324682495858642, "grad_norm": 0.30954742431640625, "learning_rate": 2.2617e-05, "loss": 0.0186, "step": 7542 }, { "epoch": 8.325786858089453, "grad_norm": 0.47211769223213196, "learning_rate": 2.262e-05, "loss": 0.025, "step": 7543 }, { "epoch": 8.326891220320265, "grad_norm": 0.8932056427001953, "learning_rate": 2.2623e-05, "loss": 0.0408, "step": 7544 }, { "epoch": 8.327995582551077, "grad_norm": 0.3637886047363281, "learning_rate": 2.2626e-05, "loss": 0.016, "step": 7545 }, { "epoch": 8.329099944781888, "grad_norm": 0.5638242959976196, "learning_rate": 2.2629e-05, "loss": 0.049, "step": 7546 }, { "epoch": 8.3302043070127, "grad_norm": 0.4429882764816284, "learning_rate": 2.2632e-05, "loss": 0.0221, "step": 7547 }, { "epoch": 8.331308669243512, "grad_norm": 0.5503640174865723, "learning_rate": 2.2635e-05, "loss": 0.0207, "step": 7548 }, { "epoch": 8.332413031474324, "grad_norm": 0.9924345016479492, "learning_rate": 2.2638000000000002e-05, "loss": 0.2645, "step": 7549 }, { "epoch": 8.333517393705135, "grad_norm": 0.8549292087554932, "learning_rate": 2.2641000000000002e-05, "loss": 0.1844, "step": 7550 }, { "epoch": 8.334621755935947, "grad_norm": 0.8831964731216431, "learning_rate": 2.2644000000000002e-05, "loss": 0.2039, "step": 7551 }, { "epoch": 8.33572611816676, "grad_norm": 0.7688378691673279, "learning_rate": 2.2647000000000002e-05, "loss": 0.1289, "step": 7552 }, { "epoch": 8.33683048039757, "grad_norm": 0.5941742062568665, "learning_rate": 2.265e-05, "loss": 0.1075, "step": 7553 }, { "epoch": 8.337934842628382, "grad_norm": 0.722068727016449, "learning_rate": 2.2653e-05, "loss": 0.1104, "step": 7554 }, { "epoch": 8.339039204859194, "grad_norm": 0.8537099361419678, "learning_rate": 2.2656e-05, "loss": 0.1167, "step": 7555 }, { "epoch": 8.340143567090006, "grad_norm": 0.43150219321250916, "learning_rate": 2.2659e-05, "loss": 0.0587, "step": 7556 }, { "epoch": 8.341247929320817, "grad_norm": 0.6405130624771118, "learning_rate": 2.2662e-05, "loss": 0.1038, "step": 7557 }, { "epoch": 8.342352291551629, "grad_norm": 0.407845139503479, "learning_rate": 2.2665e-05, "loss": 0.0317, "step": 7558 }, { "epoch": 8.343456653782441, "grad_norm": 0.9913428425788879, "learning_rate": 2.2668e-05, "loss": 0.0658, "step": 7559 }, { "epoch": 8.344561016013252, "grad_norm": 0.3748294413089752, "learning_rate": 2.2671e-05, "loss": 0.0301, "step": 7560 }, { "epoch": 8.345665378244064, "grad_norm": 0.3260865807533264, "learning_rate": 2.2674e-05, "loss": 0.035, "step": 7561 }, { "epoch": 8.346769740474876, "grad_norm": 0.2543301284313202, "learning_rate": 2.2677e-05, "loss": 0.0293, "step": 7562 }, { "epoch": 8.347874102705687, "grad_norm": 0.26594090461730957, "learning_rate": 2.268e-05, "loss": 0.0193, "step": 7563 }, { "epoch": 8.348978464936499, "grad_norm": 0.4625028967857361, "learning_rate": 2.2683e-05, "loss": 0.0426, "step": 7564 }, { "epoch": 8.350082827167311, "grad_norm": 0.4509432911872864, "learning_rate": 2.2686e-05, "loss": 0.0326, "step": 7565 }, { "epoch": 8.351187189398123, "grad_norm": 0.3333624601364136, "learning_rate": 2.2689e-05, "loss": 0.0226, "step": 7566 }, { "epoch": 8.352291551628934, "grad_norm": 0.21701033413410187, "learning_rate": 2.2692e-05, "loss": 0.0142, "step": 7567 }, { "epoch": 8.353395913859746, "grad_norm": 0.3157070279121399, "learning_rate": 2.2695e-05, "loss": 0.0217, "step": 7568 }, { "epoch": 8.354500276090558, "grad_norm": 0.23800472915172577, "learning_rate": 2.2698000000000002e-05, "loss": 0.013, "step": 7569 }, { "epoch": 8.355604638321369, "grad_norm": 0.5983233451843262, "learning_rate": 2.2701000000000002e-05, "loss": 0.0278, "step": 7570 }, { "epoch": 8.356709000552181, "grad_norm": 0.42788538336753845, "learning_rate": 2.2704e-05, "loss": 0.0311, "step": 7571 }, { "epoch": 8.357813362782993, "grad_norm": 0.32120653986930847, "learning_rate": 2.2707e-05, "loss": 0.0169, "step": 7572 }, { "epoch": 8.358917725013805, "grad_norm": 0.4578939974308014, "learning_rate": 2.271e-05, "loss": 0.0202, "step": 7573 }, { "epoch": 8.360022087244616, "grad_norm": 0.5109807848930359, "learning_rate": 2.2713e-05, "loss": 0.0152, "step": 7574 }, { "epoch": 8.361126449475428, "grad_norm": 0.4269026517868042, "learning_rate": 2.2716e-05, "loss": 0.0218, "step": 7575 }, { "epoch": 8.36223081170624, "grad_norm": 0.6413455605506897, "learning_rate": 2.2719e-05, "loss": 0.0176, "step": 7576 }, { "epoch": 8.36333517393705, "grad_norm": 0.4270622730255127, "learning_rate": 2.2722e-05, "loss": 0.0202, "step": 7577 }, { "epoch": 8.364439536167863, "grad_norm": 0.20938490331172943, "learning_rate": 2.2725e-05, "loss": 0.0083, "step": 7578 }, { "epoch": 8.365543898398675, "grad_norm": 0.32864803075790405, "learning_rate": 2.2728000000000003e-05, "loss": 0.0145, "step": 7579 }, { "epoch": 8.366648260629486, "grad_norm": 0.35874882340431213, "learning_rate": 2.2731000000000003e-05, "loss": 0.0233, "step": 7580 }, { "epoch": 8.367752622860298, "grad_norm": 0.32845816016197205, "learning_rate": 2.2734000000000003e-05, "loss": 0.0132, "step": 7581 }, { "epoch": 8.36885698509111, "grad_norm": 0.4937278926372528, "learning_rate": 2.2737000000000003e-05, "loss": 0.0306, "step": 7582 }, { "epoch": 8.369961347321922, "grad_norm": 0.6039687395095825, "learning_rate": 2.274e-05, "loss": 0.0222, "step": 7583 }, { "epoch": 8.371065709552733, "grad_norm": 0.16809538006782532, "learning_rate": 2.2743e-05, "loss": 0.0097, "step": 7584 }, { "epoch": 8.372170071783545, "grad_norm": 0.6969953775405884, "learning_rate": 2.2746e-05, "loss": 0.0358, "step": 7585 }, { "epoch": 8.373274434014357, "grad_norm": 0.6619057059288025, "learning_rate": 2.2749e-05, "loss": 0.0315, "step": 7586 }, { "epoch": 8.374378796245168, "grad_norm": 0.4660901129245758, "learning_rate": 2.2752e-05, "loss": 0.0269, "step": 7587 }, { "epoch": 8.37548315847598, "grad_norm": 0.5274878144264221, "learning_rate": 2.2754999999999998e-05, "loss": 0.0164, "step": 7588 }, { "epoch": 8.376587520706792, "grad_norm": 0.788445234298706, "learning_rate": 2.2758e-05, "loss": 0.0374, "step": 7589 }, { "epoch": 8.377691882937604, "grad_norm": 0.4383906126022339, "learning_rate": 2.2761e-05, "loss": 0.0177, "step": 7590 }, { "epoch": 8.378796245168415, "grad_norm": 0.3410751521587372, "learning_rate": 2.2764e-05, "loss": 0.0201, "step": 7591 }, { "epoch": 8.379900607399227, "grad_norm": 0.3766111135482788, "learning_rate": 2.2767e-05, "loss": 0.0254, "step": 7592 }, { "epoch": 8.38100496963004, "grad_norm": 0.4676887094974518, "learning_rate": 2.277e-05, "loss": 0.0302, "step": 7593 }, { "epoch": 8.38210933186085, "grad_norm": 0.29878777265548706, "learning_rate": 2.2773e-05, "loss": 0.0178, "step": 7594 }, { "epoch": 8.383213694091662, "grad_norm": 0.46991032361984253, "learning_rate": 2.2776e-05, "loss": 0.0238, "step": 7595 }, { "epoch": 8.384318056322474, "grad_norm": 0.28669628500938416, "learning_rate": 2.2779e-05, "loss": 0.0126, "step": 7596 }, { "epoch": 8.385422418553285, "grad_norm": 0.6986401677131653, "learning_rate": 2.2782e-05, "loss": 0.0194, "step": 7597 }, { "epoch": 8.386526780784097, "grad_norm": 0.46788355708122253, "learning_rate": 2.2785e-05, "loss": 0.0287, "step": 7598 }, { "epoch": 8.387631143014909, "grad_norm": 1.2529093027114868, "learning_rate": 2.2788000000000003e-05, "loss": 0.2906, "step": 7599 }, { "epoch": 8.388735505245721, "grad_norm": 0.9729427099227905, "learning_rate": 2.2791000000000003e-05, "loss": 0.2413, "step": 7600 }, { "epoch": 8.389839867476532, "grad_norm": 0.7565017938613892, "learning_rate": 2.2794000000000002e-05, "loss": 0.2081, "step": 7601 }, { "epoch": 8.390944229707344, "grad_norm": 0.9959696531295776, "learning_rate": 2.2797000000000002e-05, "loss": 0.2207, "step": 7602 }, { "epoch": 8.392048591938156, "grad_norm": 0.8780102133750916, "learning_rate": 2.2800000000000002e-05, "loss": 0.1464, "step": 7603 }, { "epoch": 8.393152954168967, "grad_norm": 0.5737534761428833, "learning_rate": 2.2803000000000002e-05, "loss": 0.0889, "step": 7604 }, { "epoch": 8.394257316399779, "grad_norm": 0.5901390314102173, "learning_rate": 2.2806e-05, "loss": 0.0616, "step": 7605 }, { "epoch": 8.395361678630591, "grad_norm": 0.4038442373275757, "learning_rate": 2.2809e-05, "loss": 0.0498, "step": 7606 }, { "epoch": 8.396466040861403, "grad_norm": 0.42803338170051575, "learning_rate": 2.2812e-05, "loss": 0.0498, "step": 7607 }, { "epoch": 8.397570403092214, "grad_norm": 0.3078327178955078, "learning_rate": 2.2814999999999998e-05, "loss": 0.0286, "step": 7608 }, { "epoch": 8.398674765323026, "grad_norm": 0.7775486707687378, "learning_rate": 2.2818e-05, "loss": 0.0253, "step": 7609 }, { "epoch": 8.399779127553838, "grad_norm": 0.3904918134212494, "learning_rate": 2.2821e-05, "loss": 0.0365, "step": 7610 }, { "epoch": 8.400883489784649, "grad_norm": 0.7937735915184021, "learning_rate": 2.2824e-05, "loss": 0.0287, "step": 7611 }, { "epoch": 8.401987852015461, "grad_norm": 0.21268552541732788, "learning_rate": 2.2827e-05, "loss": 0.0106, "step": 7612 }, { "epoch": 8.403092214246273, "grad_norm": 0.41224661469459534, "learning_rate": 2.283e-05, "loss": 0.0257, "step": 7613 }, { "epoch": 8.404196576477084, "grad_norm": 0.2661443054676056, "learning_rate": 2.2833e-05, "loss": 0.0165, "step": 7614 }, { "epoch": 8.405300938707896, "grad_norm": 0.368207186460495, "learning_rate": 2.2836e-05, "loss": 0.0272, "step": 7615 }, { "epoch": 8.406405300938708, "grad_norm": 0.6268436312675476, "learning_rate": 2.2839e-05, "loss": 0.0159, "step": 7616 }, { "epoch": 8.40750966316952, "grad_norm": 0.6856321096420288, "learning_rate": 2.2842e-05, "loss": 0.0193, "step": 7617 }, { "epoch": 8.40861402540033, "grad_norm": 0.3093279004096985, "learning_rate": 2.2845e-05, "loss": 0.0144, "step": 7618 }, { "epoch": 8.409718387631143, "grad_norm": 0.49836060404777527, "learning_rate": 2.2848000000000002e-05, "loss": 0.0162, "step": 7619 }, { "epoch": 8.410822749861955, "grad_norm": 0.4406260550022125, "learning_rate": 2.2851000000000002e-05, "loss": 0.0293, "step": 7620 }, { "epoch": 8.411927112092766, "grad_norm": 0.33174335956573486, "learning_rate": 2.2854000000000002e-05, "loss": 0.0235, "step": 7621 }, { "epoch": 8.413031474323578, "grad_norm": 0.5208345651626587, "learning_rate": 2.2857e-05, "loss": 0.0293, "step": 7622 }, { "epoch": 8.41413583655439, "grad_norm": 0.23266710340976715, "learning_rate": 2.286e-05, "loss": 0.0129, "step": 7623 }, { "epoch": 8.415240198785202, "grad_norm": 0.2858826518058777, "learning_rate": 2.2863e-05, "loss": 0.012, "step": 7624 }, { "epoch": 8.416344561016013, "grad_norm": 0.37338191270828247, "learning_rate": 2.2866e-05, "loss": 0.0147, "step": 7625 }, { "epoch": 8.417448923246825, "grad_norm": 0.35868698358535767, "learning_rate": 2.2869e-05, "loss": 0.0213, "step": 7626 }, { "epoch": 8.418553285477637, "grad_norm": 0.23434099555015564, "learning_rate": 2.2872e-05, "loss": 0.0133, "step": 7627 }, { "epoch": 8.419657647708448, "grad_norm": 0.23292693495750427, "learning_rate": 2.2875e-05, "loss": 0.0127, "step": 7628 }, { "epoch": 8.42076200993926, "grad_norm": 0.335406094789505, "learning_rate": 2.2878e-05, "loss": 0.0163, "step": 7629 }, { "epoch": 8.421866372170072, "grad_norm": 0.6774119138717651, "learning_rate": 2.2881000000000003e-05, "loss": 0.0219, "step": 7630 }, { "epoch": 8.422970734400884, "grad_norm": 0.37701931595802307, "learning_rate": 2.2884000000000003e-05, "loss": 0.0141, "step": 7631 }, { "epoch": 8.424075096631695, "grad_norm": 0.4054180681705475, "learning_rate": 2.2887e-05, "loss": 0.0208, "step": 7632 }, { "epoch": 8.425179458862507, "grad_norm": 0.42729830741882324, "learning_rate": 2.289e-05, "loss": 0.0127, "step": 7633 }, { "epoch": 8.42628382109332, "grad_norm": 1.7015058994293213, "learning_rate": 2.2893e-05, "loss": 0.0327, "step": 7634 }, { "epoch": 8.42738818332413, "grad_norm": 0.46091610193252563, "learning_rate": 2.2896e-05, "loss": 0.0257, "step": 7635 }, { "epoch": 8.428492545554942, "grad_norm": 0.47555676102638245, "learning_rate": 2.2899e-05, "loss": 0.027, "step": 7636 }, { "epoch": 8.429596907785754, "grad_norm": 0.3842087984085083, "learning_rate": 2.2902e-05, "loss": 0.0132, "step": 7637 }, { "epoch": 8.430701270016565, "grad_norm": 0.6369902491569519, "learning_rate": 2.2905e-05, "loss": 0.0331, "step": 7638 }, { "epoch": 8.431805632247377, "grad_norm": 0.341418594121933, "learning_rate": 2.2907999999999998e-05, "loss": 0.0185, "step": 7639 }, { "epoch": 8.43290999447819, "grad_norm": 0.35303252935409546, "learning_rate": 2.2911e-05, "loss": 0.018, "step": 7640 }, { "epoch": 8.434014356709001, "grad_norm": 0.3843693137168884, "learning_rate": 2.2914e-05, "loss": 0.0312, "step": 7641 }, { "epoch": 8.435118718939812, "grad_norm": 0.7578052282333374, "learning_rate": 2.2917e-05, "loss": 0.0137, "step": 7642 }, { "epoch": 8.436223081170624, "grad_norm": 0.8867722749710083, "learning_rate": 2.292e-05, "loss": 0.0357, "step": 7643 }, { "epoch": 8.437327443401436, "grad_norm": 0.3642740547657013, "learning_rate": 2.2923e-05, "loss": 0.0148, "step": 7644 }, { "epoch": 8.438431805632247, "grad_norm": 0.3614395260810852, "learning_rate": 2.2926e-05, "loss": 0.0269, "step": 7645 }, { "epoch": 8.439536167863059, "grad_norm": 0.6200311183929443, "learning_rate": 2.2929e-05, "loss": 0.0251, "step": 7646 }, { "epoch": 8.440640530093871, "grad_norm": 0.382734477519989, "learning_rate": 2.2932e-05, "loss": 0.017, "step": 7647 }, { "epoch": 8.441744892324682, "grad_norm": 0.43734562397003174, "learning_rate": 2.2935e-05, "loss": 0.023, "step": 7648 }, { "epoch": 8.442849254555494, "grad_norm": 0.7838276028633118, "learning_rate": 2.2938e-05, "loss": 0.2776, "step": 7649 }, { "epoch": 8.443953616786306, "grad_norm": 0.9595345258712769, "learning_rate": 2.2941000000000003e-05, "loss": 0.2589, "step": 7650 }, { "epoch": 8.445057979017118, "grad_norm": 0.825056791305542, "learning_rate": 2.2944000000000003e-05, "loss": 0.178, "step": 7651 }, { "epoch": 8.446162341247929, "grad_norm": 0.5390656590461731, "learning_rate": 2.2947000000000002e-05, "loss": 0.1373, "step": 7652 }, { "epoch": 8.447266703478741, "grad_norm": 1.6294043064117432, "learning_rate": 2.2950000000000002e-05, "loss": 0.1135, "step": 7653 }, { "epoch": 8.448371065709553, "grad_norm": 0.5596446990966797, "learning_rate": 2.2953000000000002e-05, "loss": 0.1369, "step": 7654 }, { "epoch": 8.449475427940364, "grad_norm": 0.3697613477706909, "learning_rate": 2.2956000000000002e-05, "loss": 0.0562, "step": 7655 }, { "epoch": 8.450579790171176, "grad_norm": 0.555580198764801, "learning_rate": 2.2959e-05, "loss": 0.0497, "step": 7656 }, { "epoch": 8.451684152401988, "grad_norm": 0.2992810904979706, "learning_rate": 2.2961999999999998e-05, "loss": 0.0357, "step": 7657 }, { "epoch": 8.4527885146328, "grad_norm": 0.7322873473167419, "learning_rate": 2.2964999999999998e-05, "loss": 0.0692, "step": 7658 }, { "epoch": 8.45389287686361, "grad_norm": 0.3902950882911682, "learning_rate": 2.2967999999999998e-05, "loss": 0.0332, "step": 7659 }, { "epoch": 8.454997239094423, "grad_norm": 0.38339605927467346, "learning_rate": 2.2971e-05, "loss": 0.0289, "step": 7660 }, { "epoch": 8.456101601325235, "grad_norm": 0.4734453856945038, "learning_rate": 2.2974e-05, "loss": 0.0249, "step": 7661 }, { "epoch": 8.457205963556046, "grad_norm": 1.5537675619125366, "learning_rate": 2.2977e-05, "loss": 0.0326, "step": 7662 }, { "epoch": 8.458310325786858, "grad_norm": 0.43187418580055237, "learning_rate": 2.298e-05, "loss": 0.0299, "step": 7663 }, { "epoch": 8.45941468801767, "grad_norm": 0.5013328194618225, "learning_rate": 2.2983e-05, "loss": 0.017, "step": 7664 }, { "epoch": 8.460519050248482, "grad_norm": 0.48350846767425537, "learning_rate": 2.2986e-05, "loss": 0.0216, "step": 7665 }, { "epoch": 8.461623412479293, "grad_norm": 0.5070491433143616, "learning_rate": 2.2989e-05, "loss": 0.0227, "step": 7666 }, { "epoch": 8.462727774710105, "grad_norm": 0.2839772403240204, "learning_rate": 2.2992e-05, "loss": 0.0135, "step": 7667 }, { "epoch": 8.463832136940917, "grad_norm": 0.39069679379463196, "learning_rate": 2.2995e-05, "loss": 0.0209, "step": 7668 }, { "epoch": 8.464936499171728, "grad_norm": 0.4587925970554352, "learning_rate": 2.2998e-05, "loss": 0.0218, "step": 7669 }, { "epoch": 8.46604086140254, "grad_norm": 0.8798388242721558, "learning_rate": 2.3001000000000002e-05, "loss": 0.0424, "step": 7670 }, { "epoch": 8.467145223633352, "grad_norm": 0.3341400921344757, "learning_rate": 2.3004000000000002e-05, "loss": 0.0149, "step": 7671 }, { "epoch": 8.468249585864163, "grad_norm": 0.36301684379577637, "learning_rate": 2.3007000000000002e-05, "loss": 0.0169, "step": 7672 }, { "epoch": 8.469353948094975, "grad_norm": 0.47218120098114014, "learning_rate": 2.301e-05, "loss": 0.0179, "step": 7673 }, { "epoch": 8.470458310325787, "grad_norm": 0.3212907910346985, "learning_rate": 2.3013e-05, "loss": 0.0137, "step": 7674 }, { "epoch": 8.4715626725566, "grad_norm": 0.3551988899707794, "learning_rate": 2.3016e-05, "loss": 0.0173, "step": 7675 }, { "epoch": 8.47266703478741, "grad_norm": 0.40598419308662415, "learning_rate": 2.3019e-05, "loss": 0.0182, "step": 7676 }, { "epoch": 8.473771397018222, "grad_norm": 0.47040998935699463, "learning_rate": 2.3022e-05, "loss": 0.0258, "step": 7677 }, { "epoch": 8.474875759249034, "grad_norm": 0.3395856022834778, "learning_rate": 2.3025e-05, "loss": 0.0172, "step": 7678 }, { "epoch": 8.475980121479845, "grad_norm": 0.4715798497200012, "learning_rate": 2.3028e-05, "loss": 0.0204, "step": 7679 }, { "epoch": 8.477084483710657, "grad_norm": 0.9925972819328308, "learning_rate": 2.3031000000000004e-05, "loss": 0.0244, "step": 7680 }, { "epoch": 8.47818884594147, "grad_norm": 0.5753077864646912, "learning_rate": 2.3034e-05, "loss": 0.0324, "step": 7681 }, { "epoch": 8.47929320817228, "grad_norm": 0.6305984854698181, "learning_rate": 2.3037e-05, "loss": 0.0277, "step": 7682 }, { "epoch": 8.480397570403092, "grad_norm": 0.3069935739040375, "learning_rate": 2.304e-05, "loss": 0.0132, "step": 7683 }, { "epoch": 8.481501932633904, "grad_norm": 0.6973605155944824, "learning_rate": 2.3043e-05, "loss": 0.0206, "step": 7684 }, { "epoch": 8.482606294864716, "grad_norm": 0.4212479293346405, "learning_rate": 2.3046e-05, "loss": 0.0243, "step": 7685 }, { "epoch": 8.483710657095527, "grad_norm": 0.5496962666511536, "learning_rate": 2.3049e-05, "loss": 0.0256, "step": 7686 }, { "epoch": 8.484815019326339, "grad_norm": 0.4705318510532379, "learning_rate": 2.3052e-05, "loss": 0.0223, "step": 7687 }, { "epoch": 8.485919381557151, "grad_norm": 0.5050894618034363, "learning_rate": 2.3055e-05, "loss": 0.024, "step": 7688 }, { "epoch": 8.487023743787962, "grad_norm": 0.49951696395874023, "learning_rate": 2.3058e-05, "loss": 0.0177, "step": 7689 }, { "epoch": 8.488128106018774, "grad_norm": 0.3139188587665558, "learning_rate": 2.3061e-05, "loss": 0.0126, "step": 7690 }, { "epoch": 8.489232468249586, "grad_norm": 0.46966642141342163, "learning_rate": 2.3064e-05, "loss": 0.0361, "step": 7691 }, { "epoch": 8.490336830480398, "grad_norm": 0.8217701315879822, "learning_rate": 2.3067e-05, "loss": 0.029, "step": 7692 }, { "epoch": 8.491441192711209, "grad_norm": 0.6992577314376831, "learning_rate": 2.307e-05, "loss": 0.0167, "step": 7693 }, { "epoch": 8.492545554942021, "grad_norm": 0.7257513403892517, "learning_rate": 2.3073e-05, "loss": 0.0288, "step": 7694 }, { "epoch": 8.493649917172833, "grad_norm": 0.6136714816093445, "learning_rate": 2.3076e-05, "loss": 0.0354, "step": 7695 }, { "epoch": 8.494754279403644, "grad_norm": 0.5426920056343079, "learning_rate": 2.3079e-05, "loss": 0.0179, "step": 7696 }, { "epoch": 8.495858641634456, "grad_norm": 0.7254497408866882, "learning_rate": 2.3082e-05, "loss": 0.0187, "step": 7697 }, { "epoch": 8.496963003865268, "grad_norm": 0.5146791934967041, "learning_rate": 2.3085e-05, "loss": 0.0181, "step": 7698 }, { "epoch": 8.49806736609608, "grad_norm": 2.39084792137146, "learning_rate": 2.3088e-05, "loss": 0.3088, "step": 7699 }, { "epoch": 8.49917172832689, "grad_norm": 0.7576479911804199, "learning_rate": 2.3091000000000003e-05, "loss": 0.2396, "step": 7700 }, { "epoch": 8.500276090557703, "grad_norm": 1.2222055196762085, "learning_rate": 2.3094000000000003e-05, "loss": 0.2155, "step": 7701 }, { "epoch": 8.501380452788515, "grad_norm": 0.6096263527870178, "learning_rate": 2.3097000000000003e-05, "loss": 0.1378, "step": 7702 }, { "epoch": 8.502484815019326, "grad_norm": 0.8104092478752136, "learning_rate": 2.3100000000000002e-05, "loss": 0.1717, "step": 7703 }, { "epoch": 8.503589177250138, "grad_norm": 0.5022974610328674, "learning_rate": 2.3103000000000002e-05, "loss": 0.0842, "step": 7704 }, { "epoch": 8.50469353948095, "grad_norm": 0.927161455154419, "learning_rate": 2.3106000000000002e-05, "loss": 0.1155, "step": 7705 }, { "epoch": 8.50579790171176, "grad_norm": 0.8394664525985718, "learning_rate": 2.3109e-05, "loss": 0.1089, "step": 7706 }, { "epoch": 8.506902263942573, "grad_norm": 0.31455713510513306, "learning_rate": 2.3111999999999998e-05, "loss": 0.0434, "step": 7707 }, { "epoch": 8.508006626173385, "grad_norm": 0.3981918692588806, "learning_rate": 2.3114999999999998e-05, "loss": 0.0317, "step": 7708 }, { "epoch": 8.509110988404197, "grad_norm": 0.30447885394096375, "learning_rate": 2.3117999999999998e-05, "loss": 0.0239, "step": 7709 }, { "epoch": 8.510215350635008, "grad_norm": 0.4363061487674713, "learning_rate": 2.3121e-05, "loss": 0.0272, "step": 7710 }, { "epoch": 8.51131971286582, "grad_norm": 0.39200377464294434, "learning_rate": 2.3124e-05, "loss": 0.0257, "step": 7711 }, { "epoch": 8.512424075096632, "grad_norm": 0.33719149231910706, "learning_rate": 2.3127e-05, "loss": 0.0334, "step": 7712 }, { "epoch": 8.513528437327443, "grad_norm": 0.4857787489891052, "learning_rate": 2.313e-05, "loss": 0.0238, "step": 7713 }, { "epoch": 8.514632799558255, "grad_norm": 0.3306129276752472, "learning_rate": 2.3133e-05, "loss": 0.0193, "step": 7714 }, { "epoch": 8.515737161789067, "grad_norm": 0.34220317006111145, "learning_rate": 2.3136e-05, "loss": 0.0252, "step": 7715 }, { "epoch": 8.516841524019878, "grad_norm": 0.4620317816734314, "learning_rate": 2.3139e-05, "loss": 0.0209, "step": 7716 }, { "epoch": 8.51794588625069, "grad_norm": 0.2696228325366974, "learning_rate": 2.3142e-05, "loss": 0.0167, "step": 7717 }, { "epoch": 8.519050248481502, "grad_norm": 0.2649686634540558, "learning_rate": 2.3145e-05, "loss": 0.0202, "step": 7718 }, { "epoch": 8.520154610712314, "grad_norm": 0.5977685451507568, "learning_rate": 2.3148e-05, "loss": 0.0375, "step": 7719 }, { "epoch": 8.521258972943125, "grad_norm": 0.3456847369670868, "learning_rate": 2.3151000000000002e-05, "loss": 0.0161, "step": 7720 }, { "epoch": 8.522363335173937, "grad_norm": 0.32626813650131226, "learning_rate": 2.3154000000000002e-05, "loss": 0.0183, "step": 7721 }, { "epoch": 8.52346769740475, "grad_norm": 0.38525763154029846, "learning_rate": 2.3157000000000002e-05, "loss": 0.0193, "step": 7722 }, { "epoch": 8.52457205963556, "grad_norm": 0.3399628698825836, "learning_rate": 2.3160000000000002e-05, "loss": 0.0329, "step": 7723 }, { "epoch": 8.525676421866372, "grad_norm": 0.2581356465816498, "learning_rate": 2.3163e-05, "loss": 0.0197, "step": 7724 }, { "epoch": 8.526780784097184, "grad_norm": 0.3404172658920288, "learning_rate": 2.3166e-05, "loss": 0.0204, "step": 7725 }, { "epoch": 8.527885146327996, "grad_norm": 0.3668336570262909, "learning_rate": 2.3169e-05, "loss": 0.0162, "step": 7726 }, { "epoch": 8.528989508558807, "grad_norm": 0.41773760318756104, "learning_rate": 2.3172e-05, "loss": 0.019, "step": 7727 }, { "epoch": 8.530093870789619, "grad_norm": 0.58225417137146, "learning_rate": 2.3175e-05, "loss": 0.0139, "step": 7728 }, { "epoch": 8.531198233020431, "grad_norm": 0.5192969441413879, "learning_rate": 2.3178e-05, "loss": 0.0228, "step": 7729 }, { "epoch": 8.532302595251242, "grad_norm": 0.3222973644733429, "learning_rate": 2.3181000000000004e-05, "loss": 0.0138, "step": 7730 }, { "epoch": 8.533406957482054, "grad_norm": 0.4344801604747772, "learning_rate": 2.3184e-05, "loss": 0.0278, "step": 7731 }, { "epoch": 8.534511319712866, "grad_norm": 0.5660250186920166, "learning_rate": 2.3187e-05, "loss": 0.0284, "step": 7732 }, { "epoch": 8.535615681943678, "grad_norm": 0.29707255959510803, "learning_rate": 2.319e-05, "loss": 0.0178, "step": 7733 }, { "epoch": 8.536720044174489, "grad_norm": 1.0053291320800781, "learning_rate": 2.3193e-05, "loss": 0.0448, "step": 7734 }, { "epoch": 8.537824406405301, "grad_norm": 0.5416971445083618, "learning_rate": 2.3196e-05, "loss": 0.0221, "step": 7735 }, { "epoch": 8.538928768636113, "grad_norm": 0.23336303234100342, "learning_rate": 2.3199e-05, "loss": 0.014, "step": 7736 }, { "epoch": 8.540033130866924, "grad_norm": 0.38908880949020386, "learning_rate": 2.3202e-05, "loss": 0.0157, "step": 7737 }, { "epoch": 8.541137493097736, "grad_norm": 0.3958585858345032, "learning_rate": 2.3205e-05, "loss": 0.0331, "step": 7738 }, { "epoch": 8.542241855328548, "grad_norm": 0.5335636138916016, "learning_rate": 2.3208e-05, "loss": 0.0227, "step": 7739 }, { "epoch": 8.54334621755936, "grad_norm": 0.558034360408783, "learning_rate": 2.3211000000000002e-05, "loss": 0.03, "step": 7740 }, { "epoch": 8.54445057979017, "grad_norm": 0.4285505414009094, "learning_rate": 2.3214000000000002e-05, "loss": 0.021, "step": 7741 }, { "epoch": 8.545554942020983, "grad_norm": 0.4687410891056061, "learning_rate": 2.3217e-05, "loss": 0.0185, "step": 7742 }, { "epoch": 8.546659304251795, "grad_norm": 0.4697797894477844, "learning_rate": 2.322e-05, "loss": 0.0253, "step": 7743 }, { "epoch": 8.547763666482606, "grad_norm": 1.7691785097122192, "learning_rate": 2.3223e-05, "loss": 0.0278, "step": 7744 }, { "epoch": 8.548868028713418, "grad_norm": 0.32424038648605347, "learning_rate": 2.3226e-05, "loss": 0.0149, "step": 7745 }, { "epoch": 8.54997239094423, "grad_norm": 0.39469170570373535, "learning_rate": 2.3229e-05, "loss": 0.0174, "step": 7746 }, { "epoch": 8.55107675317504, "grad_norm": 3.5096986293792725, "learning_rate": 2.3232e-05, "loss": 0.0273, "step": 7747 }, { "epoch": 8.552181115405853, "grad_norm": 0.49645504355430603, "learning_rate": 2.3235e-05, "loss": 0.0196, "step": 7748 }, { "epoch": 8.553285477636665, "grad_norm": 0.9719564318656921, "learning_rate": 2.3238e-05, "loss": 0.3338, "step": 7749 }, { "epoch": 8.554389839867477, "grad_norm": 0.7565978765487671, "learning_rate": 2.3241000000000003e-05, "loss": 0.2492, "step": 7750 }, { "epoch": 8.555494202098288, "grad_norm": 0.6596867442131042, "learning_rate": 2.3244000000000003e-05, "loss": 0.1706, "step": 7751 }, { "epoch": 8.5565985643291, "grad_norm": 0.5239487290382385, "learning_rate": 2.3247000000000003e-05, "loss": 0.1124, "step": 7752 }, { "epoch": 8.557702926559912, "grad_norm": 0.9131392240524292, "learning_rate": 2.3250000000000003e-05, "loss": 0.1315, "step": 7753 }, { "epoch": 8.558807288790723, "grad_norm": 0.7453876733779907, "learning_rate": 2.3253000000000003e-05, "loss": 0.0884, "step": 7754 }, { "epoch": 8.559911651021535, "grad_norm": 0.6819980144500732, "learning_rate": 2.3256e-05, "loss": 0.0769, "step": 7755 }, { "epoch": 8.561016013252347, "grad_norm": 0.7697108387947083, "learning_rate": 2.3259e-05, "loss": 0.1318, "step": 7756 }, { "epoch": 8.562120375483158, "grad_norm": 0.3349169194698334, "learning_rate": 2.3262e-05, "loss": 0.0727, "step": 7757 }, { "epoch": 8.56322473771397, "grad_norm": 0.1938551515340805, "learning_rate": 2.3265e-05, "loss": 0.0216, "step": 7758 }, { "epoch": 8.564329099944782, "grad_norm": 0.417432576417923, "learning_rate": 2.3267999999999998e-05, "loss": 0.0325, "step": 7759 }, { "epoch": 8.565433462175594, "grad_norm": 0.6458976864814758, "learning_rate": 2.3270999999999998e-05, "loss": 0.0289, "step": 7760 }, { "epoch": 8.566537824406405, "grad_norm": 0.7799056172370911, "learning_rate": 2.3274e-05, "loss": 0.0701, "step": 7761 }, { "epoch": 8.567642186637217, "grad_norm": 0.26026633381843567, "learning_rate": 2.3277e-05, "loss": 0.0234, "step": 7762 }, { "epoch": 8.56874654886803, "grad_norm": 0.26976048946380615, "learning_rate": 2.328e-05, "loss": 0.0204, "step": 7763 }, { "epoch": 8.56985091109884, "grad_norm": 0.40394127368927, "learning_rate": 2.3283e-05, "loss": 0.0223, "step": 7764 }, { "epoch": 8.570955273329652, "grad_norm": 0.2022237479686737, "learning_rate": 2.3286e-05, "loss": 0.0135, "step": 7765 }, { "epoch": 8.572059635560464, "grad_norm": 0.3665851354598999, "learning_rate": 2.3289e-05, "loss": 0.0227, "step": 7766 }, { "epoch": 8.573163997791276, "grad_norm": 0.21280647814273834, "learning_rate": 2.3292e-05, "loss": 0.0172, "step": 7767 }, { "epoch": 8.574268360022087, "grad_norm": 0.3062848448753357, "learning_rate": 2.3295e-05, "loss": 0.0117, "step": 7768 }, { "epoch": 8.575372722252899, "grad_norm": 0.41805458068847656, "learning_rate": 2.3298e-05, "loss": 0.0411, "step": 7769 }, { "epoch": 8.576477084483711, "grad_norm": 0.2047952264547348, "learning_rate": 2.3301e-05, "loss": 0.0148, "step": 7770 }, { "epoch": 8.577581446714522, "grad_norm": 0.6426937580108643, "learning_rate": 2.3304000000000003e-05, "loss": 0.0179, "step": 7771 }, { "epoch": 8.578685808945334, "grad_norm": 0.35721150040626526, "learning_rate": 2.3307000000000002e-05, "loss": 0.021, "step": 7772 }, { "epoch": 8.579790171176146, "grad_norm": 0.5488400459289551, "learning_rate": 2.3310000000000002e-05, "loss": 0.0264, "step": 7773 }, { "epoch": 8.580894533406958, "grad_norm": 0.433723509311676, "learning_rate": 2.3313000000000002e-05, "loss": 0.014, "step": 7774 }, { "epoch": 8.581998895637769, "grad_norm": 0.3929094970226288, "learning_rate": 2.3316000000000002e-05, "loss": 0.0163, "step": 7775 }, { "epoch": 8.583103257868581, "grad_norm": 0.5848788022994995, "learning_rate": 2.3319e-05, "loss": 0.0209, "step": 7776 }, { "epoch": 8.584207620099393, "grad_norm": 0.3048003911972046, "learning_rate": 2.3322e-05, "loss": 0.0116, "step": 7777 }, { "epoch": 8.585311982330204, "grad_norm": 0.43730077147483826, "learning_rate": 2.3325e-05, "loss": 0.029, "step": 7778 }, { "epoch": 8.586416344561016, "grad_norm": 0.343408465385437, "learning_rate": 2.3328e-05, "loss": 0.0203, "step": 7779 }, { "epoch": 8.587520706791828, "grad_norm": 0.29657217860221863, "learning_rate": 2.3330999999999997e-05, "loss": 0.0119, "step": 7780 }, { "epoch": 8.588625069022639, "grad_norm": 0.4490777254104614, "learning_rate": 2.3334e-05, "loss": 0.0218, "step": 7781 }, { "epoch": 8.589729431253451, "grad_norm": 0.38897478580474854, "learning_rate": 2.3337e-05, "loss": 0.0212, "step": 7782 }, { "epoch": 8.590833793484263, "grad_norm": 0.47432830929756165, "learning_rate": 2.334e-05, "loss": 0.0181, "step": 7783 }, { "epoch": 8.591938155715075, "grad_norm": 0.37143072485923767, "learning_rate": 2.3343e-05, "loss": 0.016, "step": 7784 }, { "epoch": 8.593042517945886, "grad_norm": 0.5504614114761353, "learning_rate": 2.3346e-05, "loss": 0.0242, "step": 7785 }, { "epoch": 8.594146880176698, "grad_norm": 0.40346062183380127, "learning_rate": 2.3349e-05, "loss": 0.02, "step": 7786 }, { "epoch": 8.59525124240751, "grad_norm": 0.44655972719192505, "learning_rate": 2.3352e-05, "loss": 0.028, "step": 7787 }, { "epoch": 8.59635560463832, "grad_norm": 0.37520110607147217, "learning_rate": 2.3355e-05, "loss": 0.0101, "step": 7788 }, { "epoch": 8.597459966869133, "grad_norm": 0.3350585401058197, "learning_rate": 2.3358e-05, "loss": 0.0168, "step": 7789 }, { "epoch": 8.598564329099945, "grad_norm": 0.7615169882774353, "learning_rate": 2.3361e-05, "loss": 0.0277, "step": 7790 }, { "epoch": 8.599668691330756, "grad_norm": 0.4030831456184387, "learning_rate": 2.3364000000000002e-05, "loss": 0.022, "step": 7791 }, { "epoch": 8.600773053561568, "grad_norm": 0.468850702047348, "learning_rate": 2.3367000000000002e-05, "loss": 0.0238, "step": 7792 }, { "epoch": 8.60187741579238, "grad_norm": 1.1903655529022217, "learning_rate": 2.337e-05, "loss": 0.0579, "step": 7793 }, { "epoch": 8.602981778023192, "grad_norm": 0.4299735724925995, "learning_rate": 2.3373e-05, "loss": 0.0232, "step": 7794 }, { "epoch": 8.604086140254003, "grad_norm": 0.32511675357818604, "learning_rate": 2.3376e-05, "loss": 0.0253, "step": 7795 }, { "epoch": 8.605190502484815, "grad_norm": 0.6608052253723145, "learning_rate": 2.3379e-05, "loss": 0.0414, "step": 7796 }, { "epoch": 8.606294864715627, "grad_norm": 0.6103172302246094, "learning_rate": 2.3382e-05, "loss": 0.031, "step": 7797 }, { "epoch": 8.607399226946438, "grad_norm": 0.28647103905677795, "learning_rate": 2.3385e-05, "loss": 0.0121, "step": 7798 }, { "epoch": 8.60850358917725, "grad_norm": 1.8493295907974243, "learning_rate": 2.3388e-05, "loss": 0.4025, "step": 7799 }, { "epoch": 8.609607951408062, "grad_norm": 0.7094747424125671, "learning_rate": 2.3391e-05, "loss": 0.1624, "step": 7800 }, { "epoch": 8.610712313638874, "grad_norm": 0.8469622135162354, "learning_rate": 2.3394000000000003e-05, "loss": 0.1583, "step": 7801 }, { "epoch": 8.611816675869685, "grad_norm": 0.7027196884155273, "learning_rate": 2.3397000000000003e-05, "loss": 0.1645, "step": 7802 }, { "epoch": 8.612921038100497, "grad_norm": 0.5577940940856934, "learning_rate": 2.3400000000000003e-05, "loss": 0.1179, "step": 7803 }, { "epoch": 8.61402540033131, "grad_norm": 0.6250942349433899, "learning_rate": 2.3403e-05, "loss": 0.1171, "step": 7804 }, { "epoch": 8.61512976256212, "grad_norm": 0.46703723073005676, "learning_rate": 2.3406e-05, "loss": 0.0831, "step": 7805 }, { "epoch": 8.616234124792932, "grad_norm": 0.7893486022949219, "learning_rate": 2.3409e-05, "loss": 0.0716, "step": 7806 }, { "epoch": 8.617338487023744, "grad_norm": 0.3900213837623596, "learning_rate": 2.3412e-05, "loss": 0.039, "step": 7807 }, { "epoch": 8.618442849254556, "grad_norm": 0.4503231644630432, "learning_rate": 2.3415e-05, "loss": 0.0437, "step": 7808 }, { "epoch": 8.619547211485367, "grad_norm": 0.38797852396965027, "learning_rate": 2.3418e-05, "loss": 0.0616, "step": 7809 }, { "epoch": 8.620651573716179, "grad_norm": 0.39609280228614807, "learning_rate": 2.3420999999999998e-05, "loss": 0.0259, "step": 7810 }, { "epoch": 8.621755935946991, "grad_norm": 0.35832124948501587, "learning_rate": 2.3424e-05, "loss": 0.0327, "step": 7811 }, { "epoch": 8.622860298177802, "grad_norm": 0.6328892111778259, "learning_rate": 2.3427e-05, "loss": 0.0284, "step": 7812 }, { "epoch": 8.623964660408614, "grad_norm": 0.39222338795661926, "learning_rate": 2.343e-05, "loss": 0.0248, "step": 7813 }, { "epoch": 8.625069022639426, "grad_norm": 0.3980836868286133, "learning_rate": 2.3433e-05, "loss": 0.022, "step": 7814 }, { "epoch": 8.626173384870237, "grad_norm": 0.3385387659072876, "learning_rate": 2.3436e-05, "loss": 0.0197, "step": 7815 }, { "epoch": 8.627277747101049, "grad_norm": 0.27200400829315186, "learning_rate": 2.3439e-05, "loss": 0.0175, "step": 7816 }, { "epoch": 8.628382109331861, "grad_norm": 0.2595116198062897, "learning_rate": 2.3442e-05, "loss": 0.0115, "step": 7817 }, { "epoch": 8.629486471562673, "grad_norm": 0.42316070199012756, "learning_rate": 2.3445e-05, "loss": 0.0252, "step": 7818 }, { "epoch": 8.630590833793484, "grad_norm": 0.4485619068145752, "learning_rate": 2.3448e-05, "loss": 0.0231, "step": 7819 }, { "epoch": 8.631695196024296, "grad_norm": 0.6686201095581055, "learning_rate": 2.3451e-05, "loss": 0.0404, "step": 7820 }, { "epoch": 8.632799558255108, "grad_norm": 0.6796131730079651, "learning_rate": 2.3454000000000003e-05, "loss": 0.0284, "step": 7821 }, { "epoch": 8.633903920485919, "grad_norm": 0.36189568042755127, "learning_rate": 2.3457000000000003e-05, "loss": 0.0183, "step": 7822 }, { "epoch": 8.635008282716731, "grad_norm": 0.43523097038269043, "learning_rate": 2.3460000000000002e-05, "loss": 0.0135, "step": 7823 }, { "epoch": 8.636112644947543, "grad_norm": 0.2697887718677521, "learning_rate": 2.3463000000000002e-05, "loss": 0.016, "step": 7824 }, { "epoch": 8.637217007178354, "grad_norm": 0.3866470754146576, "learning_rate": 2.3466000000000002e-05, "loss": 0.0149, "step": 7825 }, { "epoch": 8.638321369409166, "grad_norm": 0.3937479555606842, "learning_rate": 2.3469000000000002e-05, "loss": 0.0284, "step": 7826 }, { "epoch": 8.639425731639978, "grad_norm": 0.26106876134872437, "learning_rate": 2.3472e-05, "loss": 0.0205, "step": 7827 }, { "epoch": 8.64053009387079, "grad_norm": 0.7399625778198242, "learning_rate": 2.3475e-05, "loss": 0.021, "step": 7828 }, { "epoch": 8.6416344561016, "grad_norm": 0.5954234004020691, "learning_rate": 2.3477999999999998e-05, "loss": 0.0248, "step": 7829 }, { "epoch": 8.642738818332413, "grad_norm": 0.30646637082099915, "learning_rate": 2.3480999999999998e-05, "loss": 0.0206, "step": 7830 }, { "epoch": 8.643843180563225, "grad_norm": 0.3211851716041565, "learning_rate": 2.3484e-05, "loss": 0.0183, "step": 7831 }, { "epoch": 8.644947542794036, "grad_norm": 0.4215145409107208, "learning_rate": 2.3487e-05, "loss": 0.0243, "step": 7832 }, { "epoch": 8.646051905024848, "grad_norm": 0.4866343140602112, "learning_rate": 2.349e-05, "loss": 0.0188, "step": 7833 }, { "epoch": 8.64715626725566, "grad_norm": 0.48188069462776184, "learning_rate": 2.3493e-05, "loss": 0.0186, "step": 7834 }, { "epoch": 8.648260629486472, "grad_norm": 0.32614701986312866, "learning_rate": 2.3496e-05, "loss": 0.0236, "step": 7835 }, { "epoch": 8.649364991717283, "grad_norm": 0.6355505585670471, "learning_rate": 2.3499e-05, "loss": 0.0387, "step": 7836 }, { "epoch": 8.650469353948095, "grad_norm": 0.39296954870224, "learning_rate": 2.3502e-05, "loss": 0.0198, "step": 7837 }, { "epoch": 8.651573716178907, "grad_norm": 0.40224993228912354, "learning_rate": 2.3505e-05, "loss": 0.0225, "step": 7838 }, { "epoch": 8.652678078409718, "grad_norm": 0.5230923295021057, "learning_rate": 2.3508e-05, "loss": 0.0354, "step": 7839 }, { "epoch": 8.65378244064053, "grad_norm": 0.4059845209121704, "learning_rate": 2.3511e-05, "loss": 0.0217, "step": 7840 }, { "epoch": 8.654886802871342, "grad_norm": 0.42104312777519226, "learning_rate": 2.3514000000000002e-05, "loss": 0.0196, "step": 7841 }, { "epoch": 8.655991165102154, "grad_norm": 0.44213324785232544, "learning_rate": 2.3517000000000002e-05, "loss": 0.0204, "step": 7842 }, { "epoch": 8.657095527332965, "grad_norm": 0.3216167092323303, "learning_rate": 2.3520000000000002e-05, "loss": 0.0293, "step": 7843 }, { "epoch": 8.658199889563777, "grad_norm": 0.4868146777153015, "learning_rate": 2.3523e-05, "loss": 0.0439, "step": 7844 }, { "epoch": 8.65930425179459, "grad_norm": 0.2296445518732071, "learning_rate": 2.3526e-05, "loss": 0.0124, "step": 7845 }, { "epoch": 8.6604086140254, "grad_norm": 0.33331382274627686, "learning_rate": 2.3529e-05, "loss": 0.0177, "step": 7846 }, { "epoch": 8.661512976256212, "grad_norm": 0.42335253953933716, "learning_rate": 2.3532e-05, "loss": 0.0165, "step": 7847 }, { "epoch": 8.662617338487024, "grad_norm": 0.8531894087791443, "learning_rate": 2.3535e-05, "loss": 0.0341, "step": 7848 }, { "epoch": 8.663721700717835, "grad_norm": 0.8897702097892761, "learning_rate": 2.3538e-05, "loss": 0.2282, "step": 7849 }, { "epoch": 8.664826062948647, "grad_norm": 0.6529412865638733, "learning_rate": 2.3541e-05, "loss": 0.2071, "step": 7850 }, { "epoch": 8.665930425179459, "grad_norm": 0.7046173810958862, "learning_rate": 2.3544000000000004e-05, "loss": 0.1812, "step": 7851 }, { "epoch": 8.667034787410271, "grad_norm": 0.7841024398803711, "learning_rate": 2.3547000000000003e-05, "loss": 0.1678, "step": 7852 }, { "epoch": 8.668139149641082, "grad_norm": 0.8805472254753113, "learning_rate": 2.3550000000000003e-05, "loss": 0.1725, "step": 7853 }, { "epoch": 8.669243511871894, "grad_norm": 0.5976255536079407, "learning_rate": 2.3553e-05, "loss": 0.1294, "step": 7854 }, { "epoch": 8.670347874102706, "grad_norm": 0.4722289443016052, "learning_rate": 2.3556e-05, "loss": 0.0711, "step": 7855 }, { "epoch": 8.671452236333517, "grad_norm": 0.37405285239219666, "learning_rate": 2.3559e-05, "loss": 0.0466, "step": 7856 }, { "epoch": 8.672556598564329, "grad_norm": 0.20011715590953827, "learning_rate": 2.3562e-05, "loss": 0.0275, "step": 7857 }, { "epoch": 8.673660960795141, "grad_norm": 0.6890009641647339, "learning_rate": 2.3565e-05, "loss": 0.0469, "step": 7858 }, { "epoch": 8.674765323025952, "grad_norm": 0.9451822638511658, "learning_rate": 2.3568e-05, "loss": 0.0587, "step": 7859 }, { "epoch": 8.675869685256764, "grad_norm": 0.27040913701057434, "learning_rate": 2.3571e-05, "loss": 0.0238, "step": 7860 }, { "epoch": 8.676974047487576, "grad_norm": 0.344146192073822, "learning_rate": 2.3574e-05, "loss": 0.0285, "step": 7861 }, { "epoch": 8.678078409718388, "grad_norm": 0.4798480272293091, "learning_rate": 2.3577e-05, "loss": 0.0586, "step": 7862 }, { "epoch": 8.679182771949199, "grad_norm": 0.2923796772956848, "learning_rate": 2.358e-05, "loss": 0.0411, "step": 7863 }, { "epoch": 8.680287134180011, "grad_norm": 0.341289222240448, "learning_rate": 2.3583e-05, "loss": 0.0194, "step": 7864 }, { "epoch": 8.681391496410823, "grad_norm": 0.2846512794494629, "learning_rate": 2.3586e-05, "loss": 0.0286, "step": 7865 }, { "epoch": 8.682495858641634, "grad_norm": 0.3894226849079132, "learning_rate": 2.3589e-05, "loss": 0.0355, "step": 7866 }, { "epoch": 8.683600220872446, "grad_norm": 0.28606918454170227, "learning_rate": 2.3592e-05, "loss": 0.0123, "step": 7867 }, { "epoch": 8.684704583103258, "grad_norm": 0.9935289621353149, "learning_rate": 2.3595e-05, "loss": 0.015, "step": 7868 }, { "epoch": 8.68580894533407, "grad_norm": 0.4696602523326874, "learning_rate": 2.3598e-05, "loss": 0.0239, "step": 7869 }, { "epoch": 8.68691330756488, "grad_norm": 0.4218493402004242, "learning_rate": 2.3601e-05, "loss": 0.0202, "step": 7870 }, { "epoch": 8.688017669795693, "grad_norm": 0.2842554748058319, "learning_rate": 2.3604000000000003e-05, "loss": 0.0165, "step": 7871 }, { "epoch": 8.689122032026505, "grad_norm": 0.3091796636581421, "learning_rate": 2.3607000000000003e-05, "loss": 0.0197, "step": 7872 }, { "epoch": 8.690226394257316, "grad_norm": 0.38699325919151306, "learning_rate": 2.3610000000000003e-05, "loss": 0.0299, "step": 7873 }, { "epoch": 8.691330756488128, "grad_norm": 0.25698330998420715, "learning_rate": 2.3613000000000002e-05, "loss": 0.019, "step": 7874 }, { "epoch": 8.69243511871894, "grad_norm": 0.37902501225471497, "learning_rate": 2.3616000000000002e-05, "loss": 0.0173, "step": 7875 }, { "epoch": 8.693539480949752, "grad_norm": 1.0035525560379028, "learning_rate": 2.3619000000000002e-05, "loss": 0.031, "step": 7876 }, { "epoch": 8.694643843180563, "grad_norm": 0.29208439588546753, "learning_rate": 2.3622000000000002e-05, "loss": 0.012, "step": 7877 }, { "epoch": 8.695748205411375, "grad_norm": 0.4893134832382202, "learning_rate": 2.3624999999999998e-05, "loss": 0.02, "step": 7878 }, { "epoch": 8.696852567642187, "grad_norm": 0.45620235800743103, "learning_rate": 2.3627999999999998e-05, "loss": 0.0201, "step": 7879 }, { "epoch": 8.697956929872998, "grad_norm": 0.3663676977157593, "learning_rate": 2.3630999999999998e-05, "loss": 0.0216, "step": 7880 }, { "epoch": 8.69906129210381, "grad_norm": 0.4150615632534027, "learning_rate": 2.3633999999999998e-05, "loss": 0.0175, "step": 7881 }, { "epoch": 8.700165654334622, "grad_norm": 0.42951154708862305, "learning_rate": 2.3637e-05, "loss": 0.0261, "step": 7882 }, { "epoch": 8.701270016565433, "grad_norm": 0.7052747011184692, "learning_rate": 2.364e-05, "loss": 0.0183, "step": 7883 }, { "epoch": 8.702374378796245, "grad_norm": 0.3343791365623474, "learning_rate": 2.3643e-05, "loss": 0.0203, "step": 7884 }, { "epoch": 8.703478741027057, "grad_norm": 0.3285515010356903, "learning_rate": 2.3646e-05, "loss": 0.0237, "step": 7885 }, { "epoch": 8.70458310325787, "grad_norm": 0.6674007773399353, "learning_rate": 2.3649e-05, "loss": 0.0199, "step": 7886 }, { "epoch": 8.70568746548868, "grad_norm": 0.385847806930542, "learning_rate": 2.3652e-05, "loss": 0.018, "step": 7887 }, { "epoch": 8.706791827719492, "grad_norm": 0.3115922808647156, "learning_rate": 2.3655e-05, "loss": 0.0166, "step": 7888 }, { "epoch": 8.707896189950304, "grad_norm": 0.6960015296936035, "learning_rate": 2.3658e-05, "loss": 0.0238, "step": 7889 }, { "epoch": 8.709000552181115, "grad_norm": 1.351116418838501, "learning_rate": 2.3661e-05, "loss": 0.0204, "step": 7890 }, { "epoch": 8.710104914411927, "grad_norm": 0.6874334812164307, "learning_rate": 2.3664e-05, "loss": 0.0227, "step": 7891 }, { "epoch": 8.71120927664274, "grad_norm": 0.4821278750896454, "learning_rate": 2.3667000000000002e-05, "loss": 0.0282, "step": 7892 }, { "epoch": 8.71231363887355, "grad_norm": 0.7920992970466614, "learning_rate": 2.3670000000000002e-05, "loss": 0.0291, "step": 7893 }, { "epoch": 8.713418001104362, "grad_norm": 0.5526517629623413, "learning_rate": 2.3673000000000002e-05, "loss": 0.0355, "step": 7894 }, { "epoch": 8.714522363335174, "grad_norm": 1.07856023311615, "learning_rate": 2.3676e-05, "loss": 0.0224, "step": 7895 }, { "epoch": 8.715626725565986, "grad_norm": 0.5246884226799011, "learning_rate": 2.3679e-05, "loss": 0.0245, "step": 7896 }, { "epoch": 8.716731087796797, "grad_norm": 0.4530845284461975, "learning_rate": 2.3682e-05, "loss": 0.0319, "step": 7897 }, { "epoch": 8.717835450027609, "grad_norm": 1.7186778783798218, "learning_rate": 2.3685e-05, "loss": 0.0385, "step": 7898 }, { "epoch": 8.718939812258421, "grad_norm": 0.6551305055618286, "learning_rate": 2.3688e-05, "loss": 0.2417, "step": 7899 }, { "epoch": 8.720044174489232, "grad_norm": 0.7114464044570923, "learning_rate": 2.3691e-05, "loss": 0.2119, "step": 7900 }, { "epoch": 8.721148536720044, "grad_norm": 0.9966531991958618, "learning_rate": 2.3694e-05, "loss": 0.1583, "step": 7901 }, { "epoch": 8.722252898950856, "grad_norm": 0.7749089002609253, "learning_rate": 2.3697000000000004e-05, "loss": 0.1714, "step": 7902 }, { "epoch": 8.723357261181668, "grad_norm": 0.5612877011299133, "learning_rate": 2.37e-05, "loss": 0.1274, "step": 7903 }, { "epoch": 8.724461623412479, "grad_norm": 0.526383638381958, "learning_rate": 2.3703e-05, "loss": 0.1211, "step": 7904 }, { "epoch": 8.725565985643291, "grad_norm": 0.598971962928772, "learning_rate": 2.3706e-05, "loss": 0.0883, "step": 7905 }, { "epoch": 8.726670347874103, "grad_norm": 0.46779248118400574, "learning_rate": 2.3709e-05, "loss": 0.0527, "step": 7906 }, { "epoch": 8.727774710104914, "grad_norm": 0.2856675386428833, "learning_rate": 2.3712e-05, "loss": 0.0285, "step": 7907 }, { "epoch": 8.728879072335726, "grad_norm": 0.4213119149208069, "learning_rate": 2.3715e-05, "loss": 0.0402, "step": 7908 }, { "epoch": 8.729983434566538, "grad_norm": 0.2889913320541382, "learning_rate": 2.3718e-05, "loss": 0.0283, "step": 7909 }, { "epoch": 8.73108779679735, "grad_norm": 0.3878883123397827, "learning_rate": 2.3721e-05, "loss": 0.0221, "step": 7910 }, { "epoch": 8.73219215902816, "grad_norm": 0.49197113513946533, "learning_rate": 2.3724e-05, "loss": 0.0348, "step": 7911 }, { "epoch": 8.733296521258973, "grad_norm": 0.48387274146080017, "learning_rate": 2.3727000000000002e-05, "loss": 0.0311, "step": 7912 }, { "epoch": 8.734400883489785, "grad_norm": 0.5891771912574768, "learning_rate": 2.373e-05, "loss": 0.0412, "step": 7913 }, { "epoch": 8.735505245720596, "grad_norm": 0.3072001338005066, "learning_rate": 2.3733e-05, "loss": 0.0196, "step": 7914 }, { "epoch": 8.736609607951408, "grad_norm": 0.2932295799255371, "learning_rate": 2.3736e-05, "loss": 0.0179, "step": 7915 }, { "epoch": 8.73771397018222, "grad_norm": 0.27717867493629456, "learning_rate": 2.3739e-05, "loss": 0.0183, "step": 7916 }, { "epoch": 8.738818332413032, "grad_norm": 0.3253791928291321, "learning_rate": 2.3742e-05, "loss": 0.0195, "step": 7917 }, { "epoch": 8.739922694643843, "grad_norm": 0.3793063759803772, "learning_rate": 2.3745e-05, "loss": 0.0191, "step": 7918 }, { "epoch": 8.741027056874655, "grad_norm": 0.3976621925830841, "learning_rate": 2.3748e-05, "loss": 0.0274, "step": 7919 }, { "epoch": 8.742131419105467, "grad_norm": 0.2900301516056061, "learning_rate": 2.3751e-05, "loss": 0.0141, "step": 7920 }, { "epoch": 8.743235781336278, "grad_norm": 0.2643390893936157, "learning_rate": 2.3754e-05, "loss": 0.0208, "step": 7921 }, { "epoch": 8.74434014356709, "grad_norm": 0.21679243445396423, "learning_rate": 2.3757000000000003e-05, "loss": 0.0154, "step": 7922 }, { "epoch": 8.745444505797902, "grad_norm": 0.4244755804538727, "learning_rate": 2.3760000000000003e-05, "loss": 0.0206, "step": 7923 }, { "epoch": 8.746548868028713, "grad_norm": 0.1930762678384781, "learning_rate": 2.3763000000000003e-05, "loss": 0.0082, "step": 7924 }, { "epoch": 8.747653230259525, "grad_norm": 0.3042944669723511, "learning_rate": 2.3766000000000003e-05, "loss": 0.0171, "step": 7925 }, { "epoch": 8.748757592490337, "grad_norm": 0.36601120233535767, "learning_rate": 2.3769000000000002e-05, "loss": 0.0228, "step": 7926 }, { "epoch": 8.74986195472115, "grad_norm": 0.47851571440696716, "learning_rate": 2.3772e-05, "loss": 0.0258, "step": 7927 }, { "epoch": 8.75096631695196, "grad_norm": 0.29941314458847046, "learning_rate": 2.3775e-05, "loss": 0.0155, "step": 7928 }, { "epoch": 8.752070679182772, "grad_norm": 0.3821336328983307, "learning_rate": 2.3778e-05, "loss": 0.0232, "step": 7929 }, { "epoch": 8.753175041413584, "grad_norm": 0.4004960060119629, "learning_rate": 2.3780999999999998e-05, "loss": 0.0249, "step": 7930 }, { "epoch": 8.754279403644395, "grad_norm": 0.5637465715408325, "learning_rate": 2.3783999999999998e-05, "loss": 0.0228, "step": 7931 }, { "epoch": 8.755383765875207, "grad_norm": 0.40771082043647766, "learning_rate": 2.3787e-05, "loss": 0.0183, "step": 7932 }, { "epoch": 8.75648812810602, "grad_norm": 0.2947099506855011, "learning_rate": 2.379e-05, "loss": 0.0101, "step": 7933 }, { "epoch": 8.75759249033683, "grad_norm": 0.554766058921814, "learning_rate": 2.3793e-05, "loss": 0.0199, "step": 7934 }, { "epoch": 8.758696852567642, "grad_norm": 0.3454049229621887, "learning_rate": 2.3796e-05, "loss": 0.0182, "step": 7935 }, { "epoch": 8.759801214798454, "grad_norm": 0.5713751912117004, "learning_rate": 2.3799e-05, "loss": 0.0319, "step": 7936 }, { "epoch": 8.760905577029266, "grad_norm": 0.281287282705307, "learning_rate": 2.3802e-05, "loss": 0.0161, "step": 7937 }, { "epoch": 8.762009939260077, "grad_norm": 0.21056202054023743, "learning_rate": 2.3805e-05, "loss": 0.0108, "step": 7938 }, { "epoch": 8.763114301490889, "grad_norm": 0.20345176756381989, "learning_rate": 2.3808e-05, "loss": 0.0096, "step": 7939 }, { "epoch": 8.764218663721701, "grad_norm": 0.5506344437599182, "learning_rate": 2.3811e-05, "loss": 0.0175, "step": 7940 }, { "epoch": 8.765323025952512, "grad_norm": 0.47052502632141113, "learning_rate": 2.3814e-05, "loss": 0.0225, "step": 7941 }, { "epoch": 8.766427388183324, "grad_norm": 0.44245588779449463, "learning_rate": 2.3817000000000003e-05, "loss": 0.0149, "step": 7942 }, { "epoch": 8.767531750414136, "grad_norm": 0.9147912859916687, "learning_rate": 2.3820000000000002e-05, "loss": 0.0254, "step": 7943 }, { "epoch": 8.768636112644948, "grad_norm": 0.42445462942123413, "learning_rate": 2.3823000000000002e-05, "loss": 0.0173, "step": 7944 }, { "epoch": 8.769740474875759, "grad_norm": 0.44239360094070435, "learning_rate": 2.3826000000000002e-05, "loss": 0.0254, "step": 7945 }, { "epoch": 8.770844837106571, "grad_norm": 0.350267618894577, "learning_rate": 2.3829000000000002e-05, "loss": 0.0228, "step": 7946 }, { "epoch": 8.771949199337383, "grad_norm": 0.24156662821769714, "learning_rate": 2.3832e-05, "loss": 0.0138, "step": 7947 }, { "epoch": 8.773053561568194, "grad_norm": 0.5060911178588867, "learning_rate": 2.3835e-05, "loss": 0.0124, "step": 7948 }, { "epoch": 8.774157923799006, "grad_norm": 0.9303985834121704, "learning_rate": 2.3838e-05, "loss": 0.276, "step": 7949 }, { "epoch": 8.775262286029818, "grad_norm": 0.883654773235321, "learning_rate": 2.3841e-05, "loss": 0.2471, "step": 7950 }, { "epoch": 8.77636664826063, "grad_norm": 0.6075789928436279, "learning_rate": 2.3844e-05, "loss": 0.1622, "step": 7951 }, { "epoch": 8.77747101049144, "grad_norm": 1.2702564001083374, "learning_rate": 2.3847e-05, "loss": 0.1467, "step": 7952 }, { "epoch": 8.778575372722253, "grad_norm": 0.9011539220809937, "learning_rate": 2.385e-05, "loss": 0.1467, "step": 7953 }, { "epoch": 8.779679734953065, "grad_norm": 0.8218197226524353, "learning_rate": 2.3853e-05, "loss": 0.1427, "step": 7954 }, { "epoch": 8.780784097183876, "grad_norm": 0.8097694516181946, "learning_rate": 2.3856e-05, "loss": 0.0788, "step": 7955 }, { "epoch": 8.781888459414688, "grad_norm": 0.37033629417419434, "learning_rate": 2.3859e-05, "loss": 0.0496, "step": 7956 }, { "epoch": 8.7829928216455, "grad_norm": 0.5022647380828857, "learning_rate": 2.3862e-05, "loss": 0.0861, "step": 7957 }, { "epoch": 8.78409718387631, "grad_norm": 0.3919115364551544, "learning_rate": 2.3865e-05, "loss": 0.0541, "step": 7958 }, { "epoch": 8.785201546107123, "grad_norm": 0.4381024241447449, "learning_rate": 2.3868e-05, "loss": 0.0402, "step": 7959 }, { "epoch": 8.786305908337935, "grad_norm": 0.4202248752117157, "learning_rate": 2.3871e-05, "loss": 0.0207, "step": 7960 }, { "epoch": 8.787410270568747, "grad_norm": 0.4930441975593567, "learning_rate": 2.3874e-05, "loss": 0.0354, "step": 7961 }, { "epoch": 8.788514632799558, "grad_norm": 0.282260924577713, "learning_rate": 2.3877000000000002e-05, "loss": 0.0203, "step": 7962 }, { "epoch": 8.78961899503037, "grad_norm": 0.5193387866020203, "learning_rate": 2.3880000000000002e-05, "loss": 0.0196, "step": 7963 }, { "epoch": 8.790723357261182, "grad_norm": 0.5927752256393433, "learning_rate": 2.3883e-05, "loss": 0.0205, "step": 7964 }, { "epoch": 8.791827719491993, "grad_norm": 0.3437518775463104, "learning_rate": 2.3886e-05, "loss": 0.0243, "step": 7965 }, { "epoch": 8.792932081722805, "grad_norm": 0.32967695593833923, "learning_rate": 2.3889e-05, "loss": 0.0209, "step": 7966 }, { "epoch": 8.794036443953617, "grad_norm": 0.736041784286499, "learning_rate": 2.3892e-05, "loss": 0.0253, "step": 7967 }, { "epoch": 8.795140806184428, "grad_norm": 0.4030004143714905, "learning_rate": 2.3895e-05, "loss": 0.0187, "step": 7968 }, { "epoch": 8.79624516841524, "grad_norm": 0.2513522505760193, "learning_rate": 2.3898e-05, "loss": 0.0162, "step": 7969 }, { "epoch": 8.797349530646052, "grad_norm": 0.4036809206008911, "learning_rate": 2.3901e-05, "loss": 0.0108, "step": 7970 }, { "epoch": 8.798453892876864, "grad_norm": 0.37107914686203003, "learning_rate": 2.3904e-05, "loss": 0.0308, "step": 7971 }, { "epoch": 8.799558255107675, "grad_norm": 0.28962504863739014, "learning_rate": 2.3907000000000003e-05, "loss": 0.0161, "step": 7972 }, { "epoch": 8.800662617338487, "grad_norm": 0.7426956295967102, "learning_rate": 2.3910000000000003e-05, "loss": 0.0554, "step": 7973 }, { "epoch": 8.8017669795693, "grad_norm": 0.26691046357154846, "learning_rate": 2.3913000000000003e-05, "loss": 0.0132, "step": 7974 }, { "epoch": 8.80287134180011, "grad_norm": 1.7917665243148804, "learning_rate": 2.3916000000000003e-05, "loss": 0.0245, "step": 7975 }, { "epoch": 8.803975704030922, "grad_norm": 0.5422857403755188, "learning_rate": 2.3919e-05, "loss": 0.0173, "step": 7976 }, { "epoch": 8.805080066261734, "grad_norm": 0.8869930505752563, "learning_rate": 2.3922e-05, "loss": 0.0298, "step": 7977 }, { "epoch": 8.806184428492546, "grad_norm": 0.25528484582901, "learning_rate": 2.3925e-05, "loss": 0.0114, "step": 7978 }, { "epoch": 8.807288790723357, "grad_norm": 0.2881971001625061, "learning_rate": 2.3928e-05, "loss": 0.0129, "step": 7979 }, { "epoch": 8.808393152954169, "grad_norm": 0.3857607841491699, "learning_rate": 2.3931e-05, "loss": 0.0189, "step": 7980 }, { "epoch": 8.809497515184981, "grad_norm": 0.39504164457321167, "learning_rate": 2.3933999999999998e-05, "loss": 0.0162, "step": 7981 }, { "epoch": 8.810601877415792, "grad_norm": 0.3469029366970062, "learning_rate": 2.3937e-05, "loss": 0.024, "step": 7982 }, { "epoch": 8.811706239646604, "grad_norm": 0.5849036574363708, "learning_rate": 2.394e-05, "loss": 0.0279, "step": 7983 }, { "epoch": 8.812810601877416, "grad_norm": 0.4165209233760834, "learning_rate": 2.3943e-05, "loss": 0.0167, "step": 7984 }, { "epoch": 8.813914964108228, "grad_norm": 0.32725319266319275, "learning_rate": 2.3946e-05, "loss": 0.0251, "step": 7985 }, { "epoch": 8.815019326339039, "grad_norm": 0.2243916094303131, "learning_rate": 2.3949e-05, "loss": 0.0143, "step": 7986 }, { "epoch": 8.816123688569851, "grad_norm": 0.44232508540153503, "learning_rate": 2.3952e-05, "loss": 0.0202, "step": 7987 }, { "epoch": 8.817228050800663, "grad_norm": 0.46011731028556824, "learning_rate": 2.3955e-05, "loss": 0.0157, "step": 7988 }, { "epoch": 8.818332413031474, "grad_norm": 0.3871176242828369, "learning_rate": 2.3958e-05, "loss": 0.0215, "step": 7989 }, { "epoch": 8.819436775262286, "grad_norm": 0.4474349021911621, "learning_rate": 2.3961e-05, "loss": 0.0202, "step": 7990 }, { "epoch": 8.820541137493098, "grad_norm": 0.7721408009529114, "learning_rate": 2.3964e-05, "loss": 0.0227, "step": 7991 }, { "epoch": 8.821645499723909, "grad_norm": 0.3829381763935089, "learning_rate": 2.3967000000000003e-05, "loss": 0.016, "step": 7992 }, { "epoch": 8.82274986195472, "grad_norm": 0.6232835650444031, "learning_rate": 2.3970000000000003e-05, "loss": 0.0244, "step": 7993 }, { "epoch": 8.823854224185533, "grad_norm": 0.4036944806575775, "learning_rate": 2.3973000000000002e-05, "loss": 0.0239, "step": 7994 }, { "epoch": 8.824958586416345, "grad_norm": 0.5505984425544739, "learning_rate": 2.3976000000000002e-05, "loss": 0.016, "step": 7995 }, { "epoch": 8.826062948647156, "grad_norm": 0.3975558280944824, "learning_rate": 2.3979000000000002e-05, "loss": 0.0259, "step": 7996 }, { "epoch": 8.827167310877968, "grad_norm": 0.5515516400337219, "learning_rate": 2.3982000000000002e-05, "loss": 0.0369, "step": 7997 }, { "epoch": 8.82827167310878, "grad_norm": 0.5794392824172974, "learning_rate": 2.3985e-05, "loss": 0.0264, "step": 7998 }, { "epoch": 8.82937603533959, "grad_norm": 2.552696943283081, "learning_rate": 2.3988e-05, "loss": 0.2833, "step": 7999 }, { "epoch": 8.830480397570403, "grad_norm": 1.003483772277832, "learning_rate": 2.3991e-05, "loss": 0.2232, "step": 8000 }, { "epoch": 8.830480397570403, "eval_cer": 0.11667573943023045, "eval_loss": 0.3414665162563324, "eval_runtime": 15.7585, "eval_samples_per_second": 19.291, "eval_steps_per_second": 0.635, "eval_wer": 0.40138142747505756, "step": 8000 }, { "epoch": 8.831584759801215, "grad_norm": 0.5644607543945312, "learning_rate": 2.3993999999999998e-05, "loss": 0.1259, "step": 8001 }, { "epoch": 8.832689122032026, "grad_norm": 1.0008114576339722, "learning_rate": 2.3997e-05, "loss": 0.2285, "step": 8002 }, { "epoch": 8.833793484262838, "grad_norm": 0.5509925484657288, "learning_rate": 2.4e-05, "loss": 0.0964, "step": 8003 }, { "epoch": 8.83489784649365, "grad_norm": 0.5951465368270874, "learning_rate": 2.4003e-05, "loss": 0.1482, "step": 8004 }, { "epoch": 8.836002208724462, "grad_norm": 0.5180028080940247, "learning_rate": 2.4006e-05, "loss": 0.1022, "step": 8005 }, { "epoch": 8.837106570955273, "grad_norm": 1.0057094097137451, "learning_rate": 2.4009e-05, "loss": 0.1071, "step": 8006 }, { "epoch": 8.838210933186085, "grad_norm": 0.3535504937171936, "learning_rate": 2.4012e-05, "loss": 0.0395, "step": 8007 }, { "epoch": 8.839315295416897, "grad_norm": 0.3247973322868347, "learning_rate": 2.4015e-05, "loss": 0.0351, "step": 8008 }, { "epoch": 8.840419657647708, "grad_norm": 0.35682809352874756, "learning_rate": 2.4018e-05, "loss": 0.0434, "step": 8009 }, { "epoch": 8.84152401987852, "grad_norm": 0.3800238370895386, "learning_rate": 2.4021e-05, "loss": 0.0222, "step": 8010 }, { "epoch": 8.842628382109332, "grad_norm": 0.5829501748085022, "learning_rate": 2.4024e-05, "loss": 0.0466, "step": 8011 }, { "epoch": 8.843732744340144, "grad_norm": 0.4766485393047333, "learning_rate": 2.4027e-05, "loss": 0.017, "step": 8012 }, { "epoch": 8.844837106570955, "grad_norm": 0.5202329754829407, "learning_rate": 2.4030000000000002e-05, "loss": 0.0842, "step": 8013 }, { "epoch": 8.845941468801767, "grad_norm": 0.24793097376823425, "learning_rate": 2.4033000000000002e-05, "loss": 0.0219, "step": 8014 }, { "epoch": 8.84704583103258, "grad_norm": 0.13450689613819122, "learning_rate": 2.4036e-05, "loss": 0.0062, "step": 8015 }, { "epoch": 8.84815019326339, "grad_norm": 0.34034663438796997, "learning_rate": 2.4039e-05, "loss": 0.0184, "step": 8016 }, { "epoch": 8.849254555494202, "grad_norm": 0.27607035636901855, "learning_rate": 2.4042e-05, "loss": 0.0288, "step": 8017 }, { "epoch": 8.850358917725014, "grad_norm": 0.33240175247192383, "learning_rate": 2.4045e-05, "loss": 0.0151, "step": 8018 }, { "epoch": 8.851463279955826, "grad_norm": 0.3626667261123657, "learning_rate": 2.4048e-05, "loss": 0.0139, "step": 8019 }, { "epoch": 8.852567642186637, "grad_norm": 0.3238058090209961, "learning_rate": 2.4051e-05, "loss": 0.017, "step": 8020 }, { "epoch": 8.853672004417449, "grad_norm": 0.4928475022315979, "learning_rate": 2.4054e-05, "loss": 0.021, "step": 8021 }, { "epoch": 8.854776366648261, "grad_norm": 0.34626147150993347, "learning_rate": 2.4057e-05, "loss": 0.0201, "step": 8022 }, { "epoch": 8.855880728879072, "grad_norm": 0.49001872539520264, "learning_rate": 2.4060000000000003e-05, "loss": 0.0227, "step": 8023 }, { "epoch": 8.856985091109884, "grad_norm": 0.3713516294956207, "learning_rate": 2.4063000000000003e-05, "loss": 0.0139, "step": 8024 }, { "epoch": 8.858089453340696, "grad_norm": 1.0737411975860596, "learning_rate": 2.4066000000000003e-05, "loss": 0.0298, "step": 8025 }, { "epoch": 8.859193815571507, "grad_norm": 0.3435867130756378, "learning_rate": 2.4069e-05, "loss": 0.0195, "step": 8026 }, { "epoch": 8.860298177802319, "grad_norm": 0.7258402109146118, "learning_rate": 2.4072e-05, "loss": 0.0302, "step": 8027 }, { "epoch": 8.861402540033131, "grad_norm": 0.38094741106033325, "learning_rate": 2.4075e-05, "loss": 0.0179, "step": 8028 }, { "epoch": 8.862506902263943, "grad_norm": 0.3504863381385803, "learning_rate": 2.4078e-05, "loss": 0.011, "step": 8029 }, { "epoch": 8.863611264494754, "grad_norm": 1.0729825496673584, "learning_rate": 2.4081e-05, "loss": 0.0236, "step": 8030 }, { "epoch": 8.864715626725566, "grad_norm": 0.3945605158805847, "learning_rate": 2.4084e-05, "loss": 0.0235, "step": 8031 }, { "epoch": 8.865819988956378, "grad_norm": 0.35456159710884094, "learning_rate": 2.4086999999999998e-05, "loss": 0.017, "step": 8032 }, { "epoch": 8.866924351187189, "grad_norm": 0.6110331416130066, "learning_rate": 2.409e-05, "loss": 0.0223, "step": 8033 }, { "epoch": 8.868028713418001, "grad_norm": 0.7516072988510132, "learning_rate": 2.4093e-05, "loss": 0.0252, "step": 8034 }, { "epoch": 8.869133075648813, "grad_norm": 0.4718743562698364, "learning_rate": 2.4096e-05, "loss": 0.0179, "step": 8035 }, { "epoch": 8.870237437879624, "grad_norm": 0.3605293929576874, "learning_rate": 2.4099e-05, "loss": 0.0199, "step": 8036 }, { "epoch": 8.871341800110436, "grad_norm": 0.4863714575767517, "learning_rate": 2.4102e-05, "loss": 0.0266, "step": 8037 }, { "epoch": 8.872446162341248, "grad_norm": 0.6829241514205933, "learning_rate": 2.4105e-05, "loss": 0.0298, "step": 8038 }, { "epoch": 8.87355052457206, "grad_norm": 0.9003987312316895, "learning_rate": 2.4108e-05, "loss": 0.0216, "step": 8039 }, { "epoch": 8.87465488680287, "grad_norm": 0.32943782210350037, "learning_rate": 2.4111e-05, "loss": 0.0221, "step": 8040 }, { "epoch": 8.875759249033683, "grad_norm": 0.3269975483417511, "learning_rate": 2.4114e-05, "loss": 0.0143, "step": 8041 }, { "epoch": 8.876863611264495, "grad_norm": 0.6813962459564209, "learning_rate": 2.4117e-05, "loss": 0.0391, "step": 8042 }, { "epoch": 8.877967973495306, "grad_norm": 0.4880363345146179, "learning_rate": 2.4120000000000003e-05, "loss": 0.0165, "step": 8043 }, { "epoch": 8.879072335726118, "grad_norm": 0.528015673160553, "learning_rate": 2.4123000000000003e-05, "loss": 0.0236, "step": 8044 }, { "epoch": 8.88017669795693, "grad_norm": 0.6333324909210205, "learning_rate": 2.4126000000000002e-05, "loss": 0.0307, "step": 8045 }, { "epoch": 8.881281060187742, "grad_norm": 0.9812113642692566, "learning_rate": 2.4129000000000002e-05, "loss": 0.0458, "step": 8046 }, { "epoch": 8.882385422418553, "grad_norm": 1.1082282066345215, "learning_rate": 2.4132000000000002e-05, "loss": 0.0561, "step": 8047 }, { "epoch": 8.883489784649365, "grad_norm": 0.5464839339256287, "learning_rate": 2.4135000000000002e-05, "loss": 0.0215, "step": 8048 }, { "epoch": 8.884594146880177, "grad_norm": 1.7661547660827637, "learning_rate": 2.4138e-05, "loss": 0.2527, "step": 8049 }, { "epoch": 8.885698509110988, "grad_norm": 0.7577903866767883, "learning_rate": 2.4140999999999998e-05, "loss": 0.2286, "step": 8050 }, { "epoch": 8.8868028713418, "grad_norm": 0.7328394651412964, "learning_rate": 2.4143999999999998e-05, "loss": 0.1984, "step": 8051 }, { "epoch": 8.887907233572612, "grad_norm": 1.4087066650390625, "learning_rate": 2.4146999999999998e-05, "loss": 0.1777, "step": 8052 }, { "epoch": 8.889011595803424, "grad_norm": 1.0271730422973633, "learning_rate": 2.415e-05, "loss": 0.1851, "step": 8053 }, { "epoch": 8.890115958034235, "grad_norm": 0.6381086111068726, "learning_rate": 2.4153e-05, "loss": 0.1322, "step": 8054 }, { "epoch": 8.891220320265047, "grad_norm": 0.6748161911964417, "learning_rate": 2.4156e-05, "loss": 0.0994, "step": 8055 }, { "epoch": 8.89232468249586, "grad_norm": 1.0503920316696167, "learning_rate": 2.4159e-05, "loss": 0.1545, "step": 8056 }, { "epoch": 8.89342904472667, "grad_norm": 2.031144380569458, "learning_rate": 2.4162e-05, "loss": 0.0383, "step": 8057 }, { "epoch": 8.894533406957482, "grad_norm": 0.6393496990203857, "learning_rate": 2.4165e-05, "loss": 0.1006, "step": 8058 }, { "epoch": 8.895637769188294, "grad_norm": 0.48227259516716003, "learning_rate": 2.4168e-05, "loss": 0.0304, "step": 8059 }, { "epoch": 8.896742131419105, "grad_norm": 0.3388071358203888, "learning_rate": 2.4171e-05, "loss": 0.0409, "step": 8060 }, { "epoch": 8.897846493649917, "grad_norm": 0.5261691212654114, "learning_rate": 2.4174e-05, "loss": 0.0397, "step": 8061 }, { "epoch": 8.898950855880729, "grad_norm": 0.47070494294166565, "learning_rate": 2.4177e-05, "loss": 0.0243, "step": 8062 }, { "epoch": 8.900055218111541, "grad_norm": 0.3108046352863312, "learning_rate": 2.4180000000000002e-05, "loss": 0.0227, "step": 8063 }, { "epoch": 8.901159580342352, "grad_norm": 0.2743915915489197, "learning_rate": 2.4183000000000002e-05, "loss": 0.0299, "step": 8064 }, { "epoch": 8.902263942573164, "grad_norm": 0.3534017503261566, "learning_rate": 2.4186000000000002e-05, "loss": 0.034, "step": 8065 }, { "epoch": 8.903368304803976, "grad_norm": 0.339860737323761, "learning_rate": 2.4189e-05, "loss": 0.0265, "step": 8066 }, { "epoch": 8.904472667034787, "grad_norm": 0.30370092391967773, "learning_rate": 2.4192e-05, "loss": 0.0214, "step": 8067 }, { "epoch": 8.905577029265599, "grad_norm": 0.7328924536705017, "learning_rate": 2.4195e-05, "loss": 0.0336, "step": 8068 }, { "epoch": 8.906681391496411, "grad_norm": 0.3617480993270874, "learning_rate": 2.4198e-05, "loss": 0.023, "step": 8069 }, { "epoch": 8.907785753727222, "grad_norm": 0.3371679186820984, "learning_rate": 2.4201e-05, "loss": 0.0177, "step": 8070 }, { "epoch": 8.908890115958034, "grad_norm": 0.2709488570690155, "learning_rate": 2.4204e-05, "loss": 0.0204, "step": 8071 }, { "epoch": 8.909994478188846, "grad_norm": 0.5008820295333862, "learning_rate": 2.4207e-05, "loss": 0.0223, "step": 8072 }, { "epoch": 8.911098840419658, "grad_norm": 0.3703078627586365, "learning_rate": 2.4210000000000004e-05, "loss": 0.0147, "step": 8073 }, { "epoch": 8.912203202650469, "grad_norm": 0.29265686869621277, "learning_rate": 2.4213000000000003e-05, "loss": 0.0288, "step": 8074 }, { "epoch": 8.913307564881281, "grad_norm": 0.41692546010017395, "learning_rate": 2.4216e-05, "loss": 0.0149, "step": 8075 }, { "epoch": 8.914411927112093, "grad_norm": 0.5176910161972046, "learning_rate": 2.4219e-05, "loss": 0.0162, "step": 8076 }, { "epoch": 8.915516289342904, "grad_norm": 0.28604862093925476, "learning_rate": 2.4222e-05, "loss": 0.0133, "step": 8077 }, { "epoch": 8.916620651573716, "grad_norm": 0.35766956210136414, "learning_rate": 2.4225e-05, "loss": 0.0185, "step": 8078 }, { "epoch": 8.917725013804528, "grad_norm": 0.3252013921737671, "learning_rate": 2.4228e-05, "loss": 0.0137, "step": 8079 }, { "epoch": 8.91882937603534, "grad_norm": 0.367929607629776, "learning_rate": 2.4231e-05, "loss": 0.0253, "step": 8080 }, { "epoch": 8.91993373826615, "grad_norm": 0.3721431493759155, "learning_rate": 2.4234e-05, "loss": 0.0172, "step": 8081 }, { "epoch": 8.921038100496963, "grad_norm": 0.34388241171836853, "learning_rate": 2.4237e-05, "loss": 0.0163, "step": 8082 }, { "epoch": 8.922142462727775, "grad_norm": 0.3756961524486542, "learning_rate": 2.4240000000000002e-05, "loss": 0.0258, "step": 8083 }, { "epoch": 8.923246824958586, "grad_norm": 0.2182079553604126, "learning_rate": 2.4243e-05, "loss": 0.0103, "step": 8084 }, { "epoch": 8.924351187189398, "grad_norm": 0.35424157977104187, "learning_rate": 2.4246e-05, "loss": 0.0169, "step": 8085 }, { "epoch": 8.92545554942021, "grad_norm": 1.1047104597091675, "learning_rate": 2.4249e-05, "loss": 0.0265, "step": 8086 }, { "epoch": 8.926559911651022, "grad_norm": 0.47547638416290283, "learning_rate": 2.4252e-05, "loss": 0.0182, "step": 8087 }, { "epoch": 8.927664273881833, "grad_norm": 0.4709599018096924, "learning_rate": 2.4255e-05, "loss": 0.0106, "step": 8088 }, { "epoch": 8.928768636112645, "grad_norm": 0.6589826941490173, "learning_rate": 2.4258e-05, "loss": 0.0209, "step": 8089 }, { "epoch": 8.929872998343457, "grad_norm": 0.5914947986602783, "learning_rate": 2.4261e-05, "loss": 0.0169, "step": 8090 }, { "epoch": 8.930977360574268, "grad_norm": 0.6203992366790771, "learning_rate": 2.4264e-05, "loss": 0.0215, "step": 8091 }, { "epoch": 8.93208172280508, "grad_norm": 0.5859860777854919, "learning_rate": 2.4267e-05, "loss": 0.0213, "step": 8092 }, { "epoch": 8.933186085035892, "grad_norm": 0.5907264947891235, "learning_rate": 2.4270000000000003e-05, "loss": 0.0166, "step": 8093 }, { "epoch": 8.934290447266704, "grad_norm": 0.3702773153781891, "learning_rate": 2.4273000000000003e-05, "loss": 0.0219, "step": 8094 }, { "epoch": 8.935394809497515, "grad_norm": 1.4443999528884888, "learning_rate": 2.4276000000000003e-05, "loss": 0.0358, "step": 8095 }, { "epoch": 8.936499171728327, "grad_norm": 0.43950021266937256, "learning_rate": 2.4279000000000003e-05, "loss": 0.0204, "step": 8096 }, { "epoch": 8.93760353395914, "grad_norm": 0.6852698922157288, "learning_rate": 2.4282000000000002e-05, "loss": 0.0341, "step": 8097 }, { "epoch": 8.93870789618995, "grad_norm": 0.7247726321220398, "learning_rate": 2.4285000000000002e-05, "loss": 0.0235, "step": 8098 }, { "epoch": 8.939812258420762, "grad_norm": 1.535403847694397, "learning_rate": 2.4288e-05, "loss": 0.2668, "step": 8099 }, { "epoch": 8.940916620651574, "grad_norm": 1.190456748008728, "learning_rate": 2.4291e-05, "loss": 0.2818, "step": 8100 }, { "epoch": 8.942020982882385, "grad_norm": 0.677956223487854, "learning_rate": 2.4293999999999998e-05, "loss": 0.177, "step": 8101 }, { "epoch": 8.943125345113197, "grad_norm": 0.6690753102302551, "learning_rate": 2.4296999999999998e-05, "loss": 0.1832, "step": 8102 }, { "epoch": 8.94422970734401, "grad_norm": 0.7249096632003784, "learning_rate": 2.43e-05, "loss": 0.1908, "step": 8103 }, { "epoch": 8.945334069574821, "grad_norm": 0.4777228534221649, "learning_rate": 2.4303e-05, "loss": 0.0695, "step": 8104 }, { "epoch": 8.946438431805632, "grad_norm": 0.8532505631446838, "learning_rate": 2.4306e-05, "loss": 0.1925, "step": 8105 }, { "epoch": 8.947542794036444, "grad_norm": 1.02102792263031, "learning_rate": 2.4309e-05, "loss": 0.0663, "step": 8106 }, { "epoch": 8.948647156267256, "grad_norm": 0.3775440454483032, "learning_rate": 2.4312e-05, "loss": 0.0348, "step": 8107 }, { "epoch": 8.949751518498067, "grad_norm": 0.3679262399673462, "learning_rate": 2.4315e-05, "loss": 0.0356, "step": 8108 }, { "epoch": 8.950855880728879, "grad_norm": 0.3117993474006653, "learning_rate": 2.4318e-05, "loss": 0.0263, "step": 8109 }, { "epoch": 8.951960242959691, "grad_norm": 0.36686432361602783, "learning_rate": 2.4321e-05, "loss": 0.0221, "step": 8110 }, { "epoch": 8.953064605190502, "grad_norm": 0.9540247321128845, "learning_rate": 2.4324e-05, "loss": 0.0565, "step": 8111 }, { "epoch": 8.954168967421314, "grad_norm": 1.1454650163650513, "learning_rate": 2.4327e-05, "loss": 0.0237, "step": 8112 }, { "epoch": 8.955273329652126, "grad_norm": 0.5369065999984741, "learning_rate": 2.4330000000000003e-05, "loss": 0.0238, "step": 8113 }, { "epoch": 8.956377691882938, "grad_norm": 0.4577413499355316, "learning_rate": 2.4333000000000002e-05, "loss": 0.0556, "step": 8114 }, { "epoch": 8.957482054113749, "grad_norm": 0.5594468712806702, "learning_rate": 2.4336000000000002e-05, "loss": 0.0408, "step": 8115 }, { "epoch": 8.958586416344561, "grad_norm": 0.5533521771430969, "learning_rate": 2.4339000000000002e-05, "loss": 0.0225, "step": 8116 }, { "epoch": 8.959690778575373, "grad_norm": 0.40063777565956116, "learning_rate": 2.4342000000000002e-05, "loss": 0.0341, "step": 8117 }, { "epoch": 8.960795140806184, "grad_norm": 0.4311443865299225, "learning_rate": 2.4345e-05, "loss": 0.0208, "step": 8118 }, { "epoch": 8.961899503036996, "grad_norm": 0.26996341347694397, "learning_rate": 2.4348e-05, "loss": 0.0119, "step": 8119 }, { "epoch": 8.963003865267808, "grad_norm": 0.6471078395843506, "learning_rate": 2.4351e-05, "loss": 0.0669, "step": 8120 }, { "epoch": 8.96410822749862, "grad_norm": 0.32709842920303345, "learning_rate": 2.4354e-05, "loss": 0.0152, "step": 8121 }, { "epoch": 8.96521258972943, "grad_norm": 0.3879993259906769, "learning_rate": 2.4357e-05, "loss": 0.0201, "step": 8122 }, { "epoch": 8.966316951960243, "grad_norm": 0.5539172291755676, "learning_rate": 2.4360000000000004e-05, "loss": 0.0256, "step": 8123 }, { "epoch": 8.967421314191055, "grad_norm": 0.5609552264213562, "learning_rate": 2.4363e-05, "loss": 0.0327, "step": 8124 }, { "epoch": 8.968525676421866, "grad_norm": 0.5159605741500854, "learning_rate": 2.4366e-05, "loss": 0.0704, "step": 8125 }, { "epoch": 8.969630038652678, "grad_norm": 0.30355215072631836, "learning_rate": 2.4369e-05, "loss": 0.0174, "step": 8126 }, { "epoch": 8.97073440088349, "grad_norm": 0.26779651641845703, "learning_rate": 2.4372e-05, "loss": 0.0185, "step": 8127 }, { "epoch": 8.971838763114302, "grad_norm": 0.32912203669548035, "learning_rate": 2.4375e-05, "loss": 0.0144, "step": 8128 }, { "epoch": 8.972943125345113, "grad_norm": 0.3037954866886139, "learning_rate": 2.4378e-05, "loss": 0.0178, "step": 8129 }, { "epoch": 8.974047487575925, "grad_norm": 0.2644025981426239, "learning_rate": 2.4381e-05, "loss": 0.0171, "step": 8130 }, { "epoch": 8.975151849806737, "grad_norm": 0.6835903525352478, "learning_rate": 2.4384e-05, "loss": 0.0336, "step": 8131 }, { "epoch": 8.976256212037548, "grad_norm": 0.2217441201210022, "learning_rate": 2.4387e-05, "loss": 0.014, "step": 8132 }, { "epoch": 8.97736057426836, "grad_norm": 0.4275642931461334, "learning_rate": 2.439e-05, "loss": 0.022, "step": 8133 }, { "epoch": 8.978464936499172, "grad_norm": 0.25935861468315125, "learning_rate": 2.4393000000000002e-05, "loss": 0.0128, "step": 8134 }, { "epoch": 8.979569298729983, "grad_norm": 0.43012097477912903, "learning_rate": 2.4396e-05, "loss": 0.0245, "step": 8135 }, { "epoch": 8.980673660960795, "grad_norm": 0.6689443588256836, "learning_rate": 2.4399e-05, "loss": 0.0352, "step": 8136 }, { "epoch": 8.981778023191607, "grad_norm": 0.32278868556022644, "learning_rate": 2.4402e-05, "loss": 0.0117, "step": 8137 }, { "epoch": 8.98288238542242, "grad_norm": 0.2683963477611542, "learning_rate": 2.4405e-05, "loss": 0.0173, "step": 8138 }, { "epoch": 8.98398674765323, "grad_norm": 0.41256052255630493, "learning_rate": 2.4408e-05, "loss": 0.0177, "step": 8139 }, { "epoch": 8.985091109884042, "grad_norm": 0.9416419267654419, "learning_rate": 2.4411e-05, "loss": 0.02, "step": 8140 }, { "epoch": 8.986195472114854, "grad_norm": 0.4072607755661011, "learning_rate": 2.4414e-05, "loss": 0.0137, "step": 8141 }, { "epoch": 8.987299834345665, "grad_norm": 1.0656874179840088, "learning_rate": 2.4417e-05, "loss": 0.0225, "step": 8142 }, { "epoch": 8.988404196576477, "grad_norm": 0.4162426292896271, "learning_rate": 2.442e-05, "loss": 0.0203, "step": 8143 }, { "epoch": 8.98950855880729, "grad_norm": 0.4117095470428467, "learning_rate": 2.4423000000000003e-05, "loss": 0.0181, "step": 8144 }, { "epoch": 8.9906129210381, "grad_norm": 0.23479756712913513, "learning_rate": 2.4426000000000003e-05, "loss": 0.0142, "step": 8145 }, { "epoch": 8.991717283268912, "grad_norm": 0.5056171417236328, "learning_rate": 2.4429000000000003e-05, "loss": 0.0269, "step": 8146 }, { "epoch": 8.992821645499724, "grad_norm": 0.5608214139938354, "learning_rate": 2.4432000000000003e-05, "loss": 0.0272, "step": 8147 }, { "epoch": 8.993926007730536, "grad_norm": 0.5871961712837219, "learning_rate": 2.4435e-05, "loss": 0.0568, "step": 8148 }, { "epoch": 8.995030369961347, "grad_norm": 0.9966949820518494, "learning_rate": 2.4438e-05, "loss": 0.139, "step": 8149 }, { "epoch": 8.996134732192159, "grad_norm": 0.269295871257782, "learning_rate": 2.4441e-05, "loss": 0.0195, "step": 8150 }, { "epoch": 8.997239094422971, "grad_norm": 0.286332905292511, "learning_rate": 2.4444e-05, "loss": 0.0101, "step": 8151 }, { "epoch": 8.998343456653782, "grad_norm": 0.3860313892364502, "learning_rate": 2.4446999999999998e-05, "loss": 0.0173, "step": 8152 }, { "epoch": 8.999447818884594, "grad_norm": 0.9873502254486084, "learning_rate": 2.4449999999999998e-05, "loss": 0.023, "step": 8153 }, { "epoch": 9.0, "grad_norm": 0.47417768836021423, "learning_rate": 2.4453e-05, "loss": 0.0085, "step": 8154 }, { "epoch": 9.001104362230812, "grad_norm": 2.1471662521362305, "learning_rate": 2.4456e-05, "loss": 0.256, "step": 8155 }, { "epoch": 9.002208724461623, "grad_norm": 0.8110017776489258, "learning_rate": 2.4459e-05, "loss": 0.227, "step": 8156 }, { "epoch": 9.003313086692435, "grad_norm": 0.9108894467353821, "learning_rate": 2.4462e-05, "loss": 0.1769, "step": 8157 }, { "epoch": 9.004417448923247, "grad_norm": 0.9889177083969116, "learning_rate": 2.4465e-05, "loss": 0.1531, "step": 8158 }, { "epoch": 9.00552181115406, "grad_norm": 0.7184625267982483, "learning_rate": 2.4468e-05, "loss": 0.0896, "step": 8159 }, { "epoch": 9.00662617338487, "grad_norm": 0.7543290853500366, "learning_rate": 2.4471e-05, "loss": 0.1022, "step": 8160 }, { "epoch": 9.007730535615682, "grad_norm": 0.533500611782074, "learning_rate": 2.4474e-05, "loss": 0.0787, "step": 8161 }, { "epoch": 9.008834897846494, "grad_norm": 0.4271180033683777, "learning_rate": 2.4477e-05, "loss": 0.0284, "step": 8162 }, { "epoch": 9.009939260077305, "grad_norm": 0.5764749646186829, "learning_rate": 2.448e-05, "loss": 0.057, "step": 8163 }, { "epoch": 9.011043622308117, "grad_norm": 0.5914919972419739, "learning_rate": 2.4483000000000003e-05, "loss": 0.0319, "step": 8164 }, { "epoch": 9.01214798453893, "grad_norm": 0.3283289670944214, "learning_rate": 2.4486000000000002e-05, "loss": 0.0167, "step": 8165 }, { "epoch": 9.01325234676974, "grad_norm": 0.3727489709854126, "learning_rate": 2.4489000000000002e-05, "loss": 0.0286, "step": 8166 }, { "epoch": 9.014356709000552, "grad_norm": 0.4969678521156311, "learning_rate": 2.4492000000000002e-05, "loss": 0.0551, "step": 8167 }, { "epoch": 9.015461071231364, "grad_norm": 0.2200242280960083, "learning_rate": 2.4495000000000002e-05, "loss": 0.0226, "step": 8168 }, { "epoch": 9.016565433462176, "grad_norm": 0.2753489315509796, "learning_rate": 2.4498e-05, "loss": 0.013, "step": 8169 }, { "epoch": 9.017669795692987, "grad_norm": 0.20947706699371338, "learning_rate": 2.4501e-05, "loss": 0.0169, "step": 8170 }, { "epoch": 9.018774157923799, "grad_norm": 0.47638264298439026, "learning_rate": 2.4504e-05, "loss": 0.016, "step": 8171 }, { "epoch": 9.019878520154611, "grad_norm": 0.3054129183292389, "learning_rate": 2.4507e-05, "loss": 0.0139, "step": 8172 }, { "epoch": 9.020982882385422, "grad_norm": 0.3819953501224518, "learning_rate": 2.4509999999999997e-05, "loss": 0.0166, "step": 8173 }, { "epoch": 9.022087244616234, "grad_norm": 0.2436257153749466, "learning_rate": 2.4513e-05, "loss": 0.0151, "step": 8174 }, { "epoch": 9.023191606847046, "grad_norm": 0.38438552618026733, "learning_rate": 2.4516e-05, "loss": 0.0242, "step": 8175 }, { "epoch": 9.024295969077858, "grad_norm": 0.3894604444503784, "learning_rate": 2.4519e-05, "loss": 0.0171, "step": 8176 }, { "epoch": 9.025400331308669, "grad_norm": 0.4399080276489258, "learning_rate": 2.4522e-05, "loss": 0.027, "step": 8177 }, { "epoch": 9.026504693539481, "grad_norm": 0.24682244658470154, "learning_rate": 2.4525e-05, "loss": 0.0123, "step": 8178 }, { "epoch": 9.027609055770293, "grad_norm": 0.32875144481658936, "learning_rate": 2.4528e-05, "loss": 0.0185, "step": 8179 }, { "epoch": 9.028713418001104, "grad_norm": 0.3141329884529114, "learning_rate": 2.4531e-05, "loss": 0.0268, "step": 8180 }, { "epoch": 9.029817780231916, "grad_norm": 0.24951398372650146, "learning_rate": 2.4534e-05, "loss": 0.0164, "step": 8181 }, { "epoch": 9.030922142462728, "grad_norm": 0.5847839713096619, "learning_rate": 2.4537e-05, "loss": 0.0144, "step": 8182 }, { "epoch": 9.032026504693539, "grad_norm": 0.556756317615509, "learning_rate": 2.454e-05, "loss": 0.025, "step": 8183 }, { "epoch": 9.03313086692435, "grad_norm": 0.3021996319293976, "learning_rate": 2.4543000000000002e-05, "loss": 0.0168, "step": 8184 }, { "epoch": 9.034235229155163, "grad_norm": 0.18999652564525604, "learning_rate": 2.4546000000000002e-05, "loss": 0.0108, "step": 8185 }, { "epoch": 9.035339591385975, "grad_norm": 0.2643440067768097, "learning_rate": 2.4549e-05, "loss": 0.018, "step": 8186 }, { "epoch": 9.036443953616786, "grad_norm": 0.37404465675354004, "learning_rate": 2.4552e-05, "loss": 0.0158, "step": 8187 }, { "epoch": 9.037548315847598, "grad_norm": 0.6778534054756165, "learning_rate": 2.4555e-05, "loss": 0.0219, "step": 8188 }, { "epoch": 9.03865267807841, "grad_norm": 0.25535833835601807, "learning_rate": 2.4558e-05, "loss": 0.0152, "step": 8189 }, { "epoch": 9.03975704030922, "grad_norm": 1.2880990505218506, "learning_rate": 2.4561e-05, "loss": 0.0203, "step": 8190 }, { "epoch": 9.040861402540033, "grad_norm": 0.30861249566078186, "learning_rate": 2.4564e-05, "loss": 0.0158, "step": 8191 }, { "epoch": 9.041965764770845, "grad_norm": 0.3211665153503418, "learning_rate": 2.4567e-05, "loss": 0.0269, "step": 8192 }, { "epoch": 9.043070127001657, "grad_norm": 0.43047067523002625, "learning_rate": 2.457e-05, "loss": 0.0238, "step": 8193 }, { "epoch": 9.044174489232468, "grad_norm": 0.7162350416183472, "learning_rate": 2.4573000000000003e-05, "loss": 0.0368, "step": 8194 }, { "epoch": 9.04527885146328, "grad_norm": 0.4388999938964844, "learning_rate": 2.4576000000000003e-05, "loss": 0.0105, "step": 8195 }, { "epoch": 9.046383213694092, "grad_norm": 0.5207008719444275, "learning_rate": 2.4579000000000003e-05, "loss": 0.0141, "step": 8196 }, { "epoch": 9.047487575924903, "grad_norm": 0.3867237865924835, "learning_rate": 2.4582000000000003e-05, "loss": 0.0157, "step": 8197 }, { "epoch": 9.048591938155715, "grad_norm": 0.2685398459434509, "learning_rate": 2.4585e-05, "loss": 0.0168, "step": 8198 }, { "epoch": 9.049696300386527, "grad_norm": 0.30926990509033203, "learning_rate": 2.4588e-05, "loss": 0.0152, "step": 8199 }, { "epoch": 9.050800662617338, "grad_norm": 0.5720512866973877, "learning_rate": 2.4591e-05, "loss": 0.0141, "step": 8200 }, { "epoch": 9.05190502484815, "grad_norm": 0.4952075779438019, "learning_rate": 2.4594e-05, "loss": 0.0171, "step": 8201 }, { "epoch": 9.053009387078962, "grad_norm": 0.5194934010505676, "learning_rate": 2.4597e-05, "loss": 0.0206, "step": 8202 }, { "epoch": 9.054113749309774, "grad_norm": 0.46247929334640503, "learning_rate": 2.4599999999999998e-05, "loss": 0.0241, "step": 8203 }, { "epoch": 9.055218111540585, "grad_norm": 0.5585333108901978, "learning_rate": 2.4603e-05, "loss": 0.0211, "step": 8204 }, { "epoch": 9.056322473771397, "grad_norm": 0.6824093461036682, "learning_rate": 2.4606e-05, "loss": 0.2431, "step": 8205 }, { "epoch": 9.05742683600221, "grad_norm": 0.7333905100822449, "learning_rate": 2.4609e-05, "loss": 0.2164, "step": 8206 }, { "epoch": 9.05853119823302, "grad_norm": 0.6015040874481201, "learning_rate": 2.4612e-05, "loss": 0.1431, "step": 8207 }, { "epoch": 9.059635560463832, "grad_norm": 0.47014832496643066, "learning_rate": 2.4615e-05, "loss": 0.0983, "step": 8208 }, { "epoch": 9.060739922694644, "grad_norm": 0.5365118384361267, "learning_rate": 2.4618e-05, "loss": 0.1239, "step": 8209 }, { "epoch": 9.061844284925456, "grad_norm": 0.5663251280784607, "learning_rate": 2.4621e-05, "loss": 0.1136, "step": 8210 }, { "epoch": 9.062948647156267, "grad_norm": 0.5718377232551575, "learning_rate": 2.4624e-05, "loss": 0.0693, "step": 8211 }, { "epoch": 9.064053009387079, "grad_norm": 0.2931109666824341, "learning_rate": 2.4627e-05, "loss": 0.0316, "step": 8212 }, { "epoch": 9.065157371617891, "grad_norm": 0.48188668489456177, "learning_rate": 2.463e-05, "loss": 0.0629, "step": 8213 }, { "epoch": 9.066261733848702, "grad_norm": 0.32692813873291016, "learning_rate": 2.4633000000000003e-05, "loss": 0.0229, "step": 8214 }, { "epoch": 9.067366096079514, "grad_norm": 0.28425201773643494, "learning_rate": 2.4636000000000003e-05, "loss": 0.0175, "step": 8215 }, { "epoch": 9.068470458310326, "grad_norm": 0.2539454698562622, "learning_rate": 2.4639000000000002e-05, "loss": 0.0236, "step": 8216 }, { "epoch": 9.069574820541137, "grad_norm": 0.22489210963249207, "learning_rate": 2.4642000000000002e-05, "loss": 0.015, "step": 8217 }, { "epoch": 9.070679182771949, "grad_norm": 0.33915501832962036, "learning_rate": 2.4645000000000002e-05, "loss": 0.0289, "step": 8218 }, { "epoch": 9.071783545002761, "grad_norm": 0.38731303811073303, "learning_rate": 2.4648000000000002e-05, "loss": 0.0175, "step": 8219 }, { "epoch": 9.072887907233573, "grad_norm": 0.4313422739505768, "learning_rate": 2.4651e-05, "loss": 0.0151, "step": 8220 }, { "epoch": 9.073992269464384, "grad_norm": 0.6242637038230896, "learning_rate": 2.4654e-05, "loss": 0.0279, "step": 8221 }, { "epoch": 9.075096631695196, "grad_norm": 0.25264260172843933, "learning_rate": 2.4656999999999998e-05, "loss": 0.0137, "step": 8222 }, { "epoch": 9.076200993926008, "grad_norm": 0.24765275418758392, "learning_rate": 2.4659999999999998e-05, "loss": 0.0122, "step": 8223 }, { "epoch": 9.077305356156819, "grad_norm": 0.49894145131111145, "learning_rate": 2.4663e-05, "loss": 0.0113, "step": 8224 }, { "epoch": 9.078409718387631, "grad_norm": 0.29687896370887756, "learning_rate": 2.4666e-05, "loss": 0.0139, "step": 8225 }, { "epoch": 9.079514080618443, "grad_norm": 0.2632060945034027, "learning_rate": 2.4669e-05, "loss": 0.0157, "step": 8226 }, { "epoch": 9.080618442849255, "grad_norm": 0.7272021174430847, "learning_rate": 2.4672e-05, "loss": 0.0245, "step": 8227 }, { "epoch": 9.081722805080066, "grad_norm": 0.7696133255958557, "learning_rate": 2.4675e-05, "loss": 0.0193, "step": 8228 }, { "epoch": 9.082827167310878, "grad_norm": 0.7776130437850952, "learning_rate": 2.4678e-05, "loss": 0.0174, "step": 8229 }, { "epoch": 9.08393152954169, "grad_norm": 0.27495285868644714, "learning_rate": 2.4681e-05, "loss": 0.0092, "step": 8230 }, { "epoch": 9.0850358917725, "grad_norm": 0.6638588905334473, "learning_rate": 2.4684e-05, "loss": 0.0406, "step": 8231 }, { "epoch": 9.086140254003313, "grad_norm": 0.2466324418783188, "learning_rate": 2.4687e-05, "loss": 0.016, "step": 8232 }, { "epoch": 9.087244616234125, "grad_norm": 0.4100338816642761, "learning_rate": 2.469e-05, "loss": 0.0158, "step": 8233 }, { "epoch": 9.088348978464936, "grad_norm": 0.1826726198196411, "learning_rate": 2.4693000000000002e-05, "loss": 0.011, "step": 8234 }, { "epoch": 9.089453340695748, "grad_norm": 0.4481251537799835, "learning_rate": 2.4696000000000002e-05, "loss": 0.0238, "step": 8235 }, { "epoch": 9.09055770292656, "grad_norm": 0.717094361782074, "learning_rate": 2.4699000000000002e-05, "loss": 0.0167, "step": 8236 }, { "epoch": 9.091662065157372, "grad_norm": 0.33321839570999146, "learning_rate": 2.4702e-05, "loss": 0.0252, "step": 8237 }, { "epoch": 9.092766427388183, "grad_norm": 0.6285895705223083, "learning_rate": 2.4705e-05, "loss": 0.0244, "step": 8238 }, { "epoch": 9.093870789618995, "grad_norm": 0.28446850180625916, "learning_rate": 2.4708e-05, "loss": 0.012, "step": 8239 }, { "epoch": 9.094975151849807, "grad_norm": 0.4082213342189789, "learning_rate": 2.4711e-05, "loss": 0.0113, "step": 8240 }, { "epoch": 9.096079514080618, "grad_norm": 0.40910616517066956, "learning_rate": 2.4714e-05, "loss": 0.0263, "step": 8241 }, { "epoch": 9.09718387631143, "grad_norm": 0.4234684109687805, "learning_rate": 2.4717e-05, "loss": 0.0136, "step": 8242 }, { "epoch": 9.098288238542242, "grad_norm": 0.3873792886734009, "learning_rate": 2.472e-05, "loss": 0.0144, "step": 8243 }, { "epoch": 9.099392600773054, "grad_norm": 0.2267679125070572, "learning_rate": 2.4723000000000004e-05, "loss": 0.0105, "step": 8244 }, { "epoch": 9.100496963003865, "grad_norm": 0.3901955783367157, "learning_rate": 2.4726000000000003e-05, "loss": 0.0138, "step": 8245 }, { "epoch": 9.101601325234677, "grad_norm": 0.39371293783187866, "learning_rate": 2.4729000000000003e-05, "loss": 0.0184, "step": 8246 }, { "epoch": 9.10270568746549, "grad_norm": 0.5816183686256409, "learning_rate": 2.4732e-05, "loss": 0.0189, "step": 8247 }, { "epoch": 9.1038100496963, "grad_norm": 0.35482674837112427, "learning_rate": 2.4735e-05, "loss": 0.018, "step": 8248 }, { "epoch": 9.104914411927112, "grad_norm": 0.22332775592803955, "learning_rate": 2.4738e-05, "loss": 0.0167, "step": 8249 }, { "epoch": 9.106018774157924, "grad_norm": 0.21601353585720062, "learning_rate": 2.4741e-05, "loss": 0.0106, "step": 8250 }, { "epoch": 9.107123136388736, "grad_norm": 0.5366539359092712, "learning_rate": 2.4744e-05, "loss": 0.0302, "step": 8251 }, { "epoch": 9.108227498619547, "grad_norm": 1.4325355291366577, "learning_rate": 2.4747e-05, "loss": 0.034, "step": 8252 }, { "epoch": 9.109331860850359, "grad_norm": 0.29105615615844727, "learning_rate": 2.475e-05, "loss": 0.0121, "step": 8253 }, { "epoch": 9.110436223081171, "grad_norm": 0.27267521619796753, "learning_rate": 2.4753e-05, "loss": 0.033, "step": 8254 }, { "epoch": 9.111540585311982, "grad_norm": 0.7092289328575134, "learning_rate": 2.4756e-05, "loss": 0.2526, "step": 8255 }, { "epoch": 9.112644947542794, "grad_norm": 0.629785418510437, "learning_rate": 2.4759e-05, "loss": 0.1631, "step": 8256 }, { "epoch": 9.113749309773606, "grad_norm": 0.588306725025177, "learning_rate": 2.4762e-05, "loss": 0.1401, "step": 8257 }, { "epoch": 9.114853672004417, "grad_norm": 0.56193608045578, "learning_rate": 2.4765e-05, "loss": 0.1644, "step": 8258 }, { "epoch": 9.115958034235229, "grad_norm": 0.6319046020507812, "learning_rate": 2.4768e-05, "loss": 0.1235, "step": 8259 }, { "epoch": 9.117062396466041, "grad_norm": 0.4425680637359619, "learning_rate": 2.4771e-05, "loss": 0.0892, "step": 8260 }, { "epoch": 9.118166758696853, "grad_norm": 0.5302813053131104, "learning_rate": 2.4774e-05, "loss": 0.0844, "step": 8261 }, { "epoch": 9.119271120927664, "grad_norm": 0.34468570351600647, "learning_rate": 2.4777e-05, "loss": 0.0353, "step": 8262 }, { "epoch": 9.120375483158476, "grad_norm": 0.375200480222702, "learning_rate": 2.478e-05, "loss": 0.037, "step": 8263 }, { "epoch": 9.121479845389288, "grad_norm": 0.29167062044143677, "learning_rate": 2.4783e-05, "loss": 0.0213, "step": 8264 }, { "epoch": 9.122584207620099, "grad_norm": 0.351012259721756, "learning_rate": 2.4786000000000003e-05, "loss": 0.0303, "step": 8265 }, { "epoch": 9.123688569850911, "grad_norm": 0.3092280924320221, "learning_rate": 2.4789000000000003e-05, "loss": 0.0245, "step": 8266 }, { "epoch": 9.124792932081723, "grad_norm": 0.45339834690093994, "learning_rate": 2.4792000000000003e-05, "loss": 0.0208, "step": 8267 }, { "epoch": 9.125897294312535, "grad_norm": 0.33387088775634766, "learning_rate": 2.4795000000000002e-05, "loss": 0.0173, "step": 8268 }, { "epoch": 9.127001656543346, "grad_norm": 0.24808084964752197, "learning_rate": 2.4798000000000002e-05, "loss": 0.0128, "step": 8269 }, { "epoch": 9.128106018774158, "grad_norm": 0.3913894295692444, "learning_rate": 2.4801000000000002e-05, "loss": 0.0195, "step": 8270 }, { "epoch": 9.12921038100497, "grad_norm": 0.25117093324661255, "learning_rate": 2.4804e-05, "loss": 0.0154, "step": 8271 }, { "epoch": 9.13031474323578, "grad_norm": 0.5817648768424988, "learning_rate": 2.4806999999999998e-05, "loss": 0.027, "step": 8272 }, { "epoch": 9.131419105466593, "grad_norm": 0.9803751111030579, "learning_rate": 2.4809999999999998e-05, "loss": 0.037, "step": 8273 }, { "epoch": 9.132523467697405, "grad_norm": 0.3270411193370819, "learning_rate": 2.4812999999999998e-05, "loss": 0.0198, "step": 8274 }, { "epoch": 9.133627829928216, "grad_norm": 0.453492671251297, "learning_rate": 2.4816e-05, "loss": 0.0233, "step": 8275 }, { "epoch": 9.134732192159028, "grad_norm": 0.5127384066581726, "learning_rate": 2.4819e-05, "loss": 0.0133, "step": 8276 }, { "epoch": 9.13583655438984, "grad_norm": 0.4087878465652466, "learning_rate": 2.4822e-05, "loss": 0.0113, "step": 8277 }, { "epoch": 9.136940916620652, "grad_norm": 0.14269959926605225, "learning_rate": 2.4825e-05, "loss": 0.0061, "step": 8278 }, { "epoch": 9.138045278851463, "grad_norm": 0.23894889652729034, "learning_rate": 2.4828e-05, "loss": 0.008, "step": 8279 }, { "epoch": 9.139149641082275, "grad_norm": 0.3767453730106354, "learning_rate": 2.4831e-05, "loss": 0.0129, "step": 8280 }, { "epoch": 9.140254003313087, "grad_norm": 0.35343948006629944, "learning_rate": 2.4834e-05, "loss": 0.0183, "step": 8281 }, { "epoch": 9.141358365543898, "grad_norm": 0.2735784947872162, "learning_rate": 2.4837e-05, "loss": 0.0136, "step": 8282 }, { "epoch": 9.14246272777471, "grad_norm": 0.3439311683177948, "learning_rate": 2.484e-05, "loss": 0.0084, "step": 8283 }, { "epoch": 9.143567090005522, "grad_norm": 0.4457731246948242, "learning_rate": 2.4843e-05, "loss": 0.0175, "step": 8284 }, { "epoch": 9.144671452236334, "grad_norm": 0.3061599135398865, "learning_rate": 2.4846000000000002e-05, "loss": 0.0167, "step": 8285 }, { "epoch": 9.145775814467145, "grad_norm": 0.34399452805519104, "learning_rate": 2.4849000000000002e-05, "loss": 0.0256, "step": 8286 }, { "epoch": 9.146880176697957, "grad_norm": 0.5895203351974487, "learning_rate": 2.4852000000000002e-05, "loss": 0.0306, "step": 8287 }, { "epoch": 9.14798453892877, "grad_norm": 0.45312267541885376, "learning_rate": 2.4855000000000002e-05, "loss": 0.0157, "step": 8288 }, { "epoch": 9.14908890115958, "grad_norm": 0.8118504881858826, "learning_rate": 2.4858e-05, "loss": 0.027, "step": 8289 }, { "epoch": 9.150193263390392, "grad_norm": 0.3798193037509918, "learning_rate": 2.4861e-05, "loss": 0.0121, "step": 8290 }, { "epoch": 9.151297625621204, "grad_norm": 0.5977201461791992, "learning_rate": 2.4864e-05, "loss": 0.0343, "step": 8291 }, { "epoch": 9.152401987852015, "grad_norm": 0.20284834504127502, "learning_rate": 2.4867e-05, "loss": 0.012, "step": 8292 }, { "epoch": 9.153506350082827, "grad_norm": 0.3177661895751953, "learning_rate": 2.487e-05, "loss": 0.0215, "step": 8293 }, { "epoch": 9.154610712313639, "grad_norm": 0.6530712842941284, "learning_rate": 2.4873e-05, "loss": 0.0122, "step": 8294 }, { "epoch": 9.155715074544451, "grad_norm": 0.48209384083747864, "learning_rate": 2.4876000000000004e-05, "loss": 0.0236, "step": 8295 }, { "epoch": 9.156819436775262, "grad_norm": 0.41290533542633057, "learning_rate": 2.4879e-05, "loss": 0.029, "step": 8296 }, { "epoch": 9.157923799006074, "grad_norm": 1.3566453456878662, "learning_rate": 2.4882e-05, "loss": 0.0172, "step": 8297 }, { "epoch": 9.159028161236886, "grad_norm": 0.37387919425964355, "learning_rate": 2.4885e-05, "loss": 0.0148, "step": 8298 }, { "epoch": 9.160132523467697, "grad_norm": 0.30452027916908264, "learning_rate": 2.4888e-05, "loss": 0.0114, "step": 8299 }, { "epoch": 9.161236885698509, "grad_norm": 0.5418270826339722, "learning_rate": 2.4891e-05, "loss": 0.026, "step": 8300 }, { "epoch": 9.162341247929321, "grad_norm": 0.31403738260269165, "learning_rate": 2.4894e-05, "loss": 0.0164, "step": 8301 }, { "epoch": 9.163445610160133, "grad_norm": 0.3490481376647949, "learning_rate": 2.4897e-05, "loss": 0.0152, "step": 8302 }, { "epoch": 9.164549972390944, "grad_norm": 0.3937329947948456, "learning_rate": 2.49e-05, "loss": 0.0214, "step": 8303 }, { "epoch": 9.165654334621756, "grad_norm": 0.5191195011138916, "learning_rate": 2.4903e-05, "loss": 0.0302, "step": 8304 }, { "epoch": 9.166758696852568, "grad_norm": 0.7363407015800476, "learning_rate": 2.4906000000000002e-05, "loss": 0.3004, "step": 8305 }, { "epoch": 9.167863059083379, "grad_norm": 0.8070223927497864, "learning_rate": 2.4909e-05, "loss": 0.186, "step": 8306 }, { "epoch": 9.168967421314191, "grad_norm": 1.077477216720581, "learning_rate": 2.4912e-05, "loss": 0.1819, "step": 8307 }, { "epoch": 9.170071783545003, "grad_norm": 0.5465591549873352, "learning_rate": 2.4915e-05, "loss": 0.1344, "step": 8308 }, { "epoch": 9.171176145775814, "grad_norm": 0.4138261675834656, "learning_rate": 2.4918e-05, "loss": 0.0727, "step": 8309 }, { "epoch": 9.172280508006626, "grad_norm": 0.38447338342666626, "learning_rate": 2.4921e-05, "loss": 0.0746, "step": 8310 }, { "epoch": 9.173384870237438, "grad_norm": 1.073140025138855, "learning_rate": 2.4924e-05, "loss": 0.111, "step": 8311 }, { "epoch": 9.17448923246825, "grad_norm": 0.3853676915168762, "learning_rate": 2.4927e-05, "loss": 0.0438, "step": 8312 }, { "epoch": 9.17559359469906, "grad_norm": 0.5302873253822327, "learning_rate": 2.493e-05, "loss": 0.0632, "step": 8313 }, { "epoch": 9.176697956929873, "grad_norm": 0.5956035852432251, "learning_rate": 2.4933e-05, "loss": 0.0489, "step": 8314 }, { "epoch": 9.177802319160685, "grad_norm": 0.2588292956352234, "learning_rate": 2.4936000000000003e-05, "loss": 0.0166, "step": 8315 }, { "epoch": 9.178906681391496, "grad_norm": 0.42060357332229614, "learning_rate": 2.4939000000000003e-05, "loss": 0.0272, "step": 8316 }, { "epoch": 9.180011043622308, "grad_norm": 0.43260473012924194, "learning_rate": 2.4942000000000003e-05, "loss": 0.0353, "step": 8317 }, { "epoch": 9.18111540585312, "grad_norm": 0.35534903407096863, "learning_rate": 2.4945000000000003e-05, "loss": 0.0202, "step": 8318 }, { "epoch": 9.182219768083932, "grad_norm": 0.7209563851356506, "learning_rate": 2.4948000000000002e-05, "loss": 0.115, "step": 8319 }, { "epoch": 9.183324130314743, "grad_norm": 0.2578587830066681, "learning_rate": 2.4951e-05, "loss": 0.0137, "step": 8320 }, { "epoch": 9.184428492545555, "grad_norm": 0.2669740319252014, "learning_rate": 2.4954e-05, "loss": 0.0094, "step": 8321 }, { "epoch": 9.185532854776367, "grad_norm": 0.33260560035705566, "learning_rate": 2.4957e-05, "loss": 0.0137, "step": 8322 }, { "epoch": 9.186637217007178, "grad_norm": 0.374698281288147, "learning_rate": 2.4959999999999998e-05, "loss": 0.0178, "step": 8323 }, { "epoch": 9.18774157923799, "grad_norm": 0.2953284978866577, "learning_rate": 2.4962999999999998e-05, "loss": 0.0137, "step": 8324 }, { "epoch": 9.188845941468802, "grad_norm": 0.3711552321910858, "learning_rate": 2.4966e-05, "loss": 0.0128, "step": 8325 }, { "epoch": 9.189950303699613, "grad_norm": 0.3168681859970093, "learning_rate": 2.4969e-05, "loss": 0.0108, "step": 8326 }, { "epoch": 9.191054665930425, "grad_norm": 0.43645554780960083, "learning_rate": 2.4972e-05, "loss": 0.021, "step": 8327 }, { "epoch": 9.192159028161237, "grad_norm": 0.5123345255851746, "learning_rate": 2.4975e-05, "loss": 0.0282, "step": 8328 }, { "epoch": 9.19326339039205, "grad_norm": 0.9880872368812561, "learning_rate": 2.4978e-05, "loss": 0.0212, "step": 8329 }, { "epoch": 9.19436775262286, "grad_norm": 0.30223360657691956, "learning_rate": 2.4981e-05, "loss": 0.0148, "step": 8330 }, { "epoch": 9.195472114853672, "grad_norm": 0.42188867926597595, "learning_rate": 2.4984e-05, "loss": 0.0182, "step": 8331 }, { "epoch": 9.196576477084484, "grad_norm": 0.28868821263313293, "learning_rate": 2.4987e-05, "loss": 0.0114, "step": 8332 }, { "epoch": 9.197680839315295, "grad_norm": 0.4535937011241913, "learning_rate": 2.499e-05, "loss": 0.04, "step": 8333 }, { "epoch": 9.198785201546107, "grad_norm": 5.876553058624268, "learning_rate": 2.4993e-05, "loss": 0.013, "step": 8334 }, { "epoch": 9.19988956377692, "grad_norm": 0.21158157289028168, "learning_rate": 2.4996000000000003e-05, "loss": 0.0124, "step": 8335 }, { "epoch": 9.200993926007731, "grad_norm": 0.3156277537345886, "learning_rate": 2.4999000000000002e-05, "loss": 0.0146, "step": 8336 }, { "epoch": 9.202098288238542, "grad_norm": 0.3834953308105469, "learning_rate": 2.5002000000000002e-05, "loss": 0.0134, "step": 8337 }, { "epoch": 9.203202650469354, "grad_norm": 0.5261604189872742, "learning_rate": 2.5005000000000002e-05, "loss": 0.0133, "step": 8338 }, { "epoch": 9.204307012700166, "grad_norm": 0.42184340953826904, "learning_rate": 2.5008000000000002e-05, "loss": 0.0194, "step": 8339 }, { "epoch": 9.205411374930977, "grad_norm": 0.2586362957954407, "learning_rate": 2.5011e-05, "loss": 0.0115, "step": 8340 }, { "epoch": 9.206515737161789, "grad_norm": 0.8498377203941345, "learning_rate": 2.5014e-05, "loss": 0.0236, "step": 8341 }, { "epoch": 9.207620099392601, "grad_norm": 1.1712076663970947, "learning_rate": 2.5017e-05, "loss": 0.0159, "step": 8342 }, { "epoch": 9.208724461623412, "grad_norm": 0.6822201609611511, "learning_rate": 2.502e-05, "loss": 0.0202, "step": 8343 }, { "epoch": 9.209828823854224, "grad_norm": 0.46307915449142456, "learning_rate": 2.5023e-05, "loss": 0.022, "step": 8344 }, { "epoch": 9.210933186085036, "grad_norm": 0.3014407157897949, "learning_rate": 2.5026e-05, "loss": 0.0105, "step": 8345 }, { "epoch": 9.212037548315848, "grad_norm": 0.40646687150001526, "learning_rate": 2.5029e-05, "loss": 0.013, "step": 8346 }, { "epoch": 9.213141910546659, "grad_norm": 0.6374040842056274, "learning_rate": 2.5032e-05, "loss": 0.0283, "step": 8347 }, { "epoch": 9.214246272777471, "grad_norm": 0.6446869969367981, "learning_rate": 2.5035e-05, "loss": 0.0235, "step": 8348 }, { "epoch": 9.215350635008283, "grad_norm": 0.3468242287635803, "learning_rate": 2.5038e-05, "loss": 0.016, "step": 8349 }, { "epoch": 9.216454997239094, "grad_norm": 0.25505122542381287, "learning_rate": 2.5041e-05, "loss": 0.0101, "step": 8350 }, { "epoch": 9.217559359469906, "grad_norm": 0.30621472001075745, "learning_rate": 2.5044e-05, "loss": 0.0083, "step": 8351 }, { "epoch": 9.218663721700718, "grad_norm": 0.42838987708091736, "learning_rate": 2.5047e-05, "loss": 0.0152, "step": 8352 }, { "epoch": 9.21976808393153, "grad_norm": 0.5963014960289001, "learning_rate": 2.505e-05, "loss": 0.0154, "step": 8353 }, { "epoch": 9.22087244616234, "grad_norm": 0.31679052114486694, "learning_rate": 2.5053e-05, "loss": 0.0097, "step": 8354 }, { "epoch": 9.221976808393153, "grad_norm": 0.9612240195274353, "learning_rate": 2.5056000000000002e-05, "loss": 0.2667, "step": 8355 }, { "epoch": 9.223081170623965, "grad_norm": 0.5890576243400574, "learning_rate": 2.5059000000000002e-05, "loss": 0.1471, "step": 8356 }, { "epoch": 9.224185532854776, "grad_norm": 0.6631546020507812, "learning_rate": 2.5062e-05, "loss": 0.1811, "step": 8357 }, { "epoch": 9.225289895085588, "grad_norm": 0.46920886635780334, "learning_rate": 2.5065e-05, "loss": 0.099, "step": 8358 }, { "epoch": 9.2263942573164, "grad_norm": 0.5793288946151733, "learning_rate": 2.5068e-05, "loss": 0.1355, "step": 8359 }, { "epoch": 9.22749861954721, "grad_norm": 0.6577750444412231, "learning_rate": 2.5071e-05, "loss": 0.138, "step": 8360 }, { "epoch": 9.228602981778023, "grad_norm": 0.44883203506469727, "learning_rate": 2.5074e-05, "loss": 0.0687, "step": 8361 }, { "epoch": 9.229707344008835, "grad_norm": 0.3066423535346985, "learning_rate": 2.5077e-05, "loss": 0.029, "step": 8362 }, { "epoch": 9.230811706239647, "grad_norm": 0.49320822954177856, "learning_rate": 2.508e-05, "loss": 0.0571, "step": 8363 }, { "epoch": 9.231916068470458, "grad_norm": 0.29693603515625, "learning_rate": 2.5083e-05, "loss": 0.0235, "step": 8364 }, { "epoch": 9.23302043070127, "grad_norm": 0.39909645915031433, "learning_rate": 2.5086000000000003e-05, "loss": 0.0223, "step": 8365 }, { "epoch": 9.234124792932082, "grad_norm": 0.27406758069992065, "learning_rate": 2.5089000000000003e-05, "loss": 0.0169, "step": 8366 }, { "epoch": 9.235229155162893, "grad_norm": 0.3922785222530365, "learning_rate": 2.5092000000000003e-05, "loss": 0.0162, "step": 8367 }, { "epoch": 9.236333517393705, "grad_norm": 0.3441765010356903, "learning_rate": 2.5095000000000003e-05, "loss": 0.0202, "step": 8368 }, { "epoch": 9.237437879624517, "grad_norm": 0.4429173469543457, "learning_rate": 2.5098000000000003e-05, "loss": 0.0224, "step": 8369 }, { "epoch": 9.23854224185533, "grad_norm": 0.42539897561073303, "learning_rate": 2.5101e-05, "loss": 0.0291, "step": 8370 }, { "epoch": 9.23964660408614, "grad_norm": 0.22268347442150116, "learning_rate": 2.5104e-05, "loss": 0.0201, "step": 8371 }, { "epoch": 9.240750966316952, "grad_norm": 0.30844706296920776, "learning_rate": 2.5107e-05, "loss": 0.0158, "step": 8372 }, { "epoch": 9.241855328547764, "grad_norm": 0.2767300605773926, "learning_rate": 2.511e-05, "loss": 0.018, "step": 8373 }, { "epoch": 9.242959690778575, "grad_norm": 0.28729426860809326, "learning_rate": 2.5112999999999998e-05, "loss": 0.0218, "step": 8374 }, { "epoch": 9.244064053009387, "grad_norm": 0.1683744192123413, "learning_rate": 2.5116e-05, "loss": 0.0153, "step": 8375 }, { "epoch": 9.2451684152402, "grad_norm": 0.2834773361682892, "learning_rate": 2.5119e-05, "loss": 0.0173, "step": 8376 }, { "epoch": 9.24627277747101, "grad_norm": 0.32890957593917847, "learning_rate": 2.5122e-05, "loss": 0.0137, "step": 8377 }, { "epoch": 9.247377139701822, "grad_norm": 0.20960761606693268, "learning_rate": 2.5125e-05, "loss": 0.0114, "step": 8378 }, { "epoch": 9.248481501932634, "grad_norm": 0.5059776306152344, "learning_rate": 2.5128e-05, "loss": 0.0197, "step": 8379 }, { "epoch": 9.249585864163446, "grad_norm": 0.24045968055725098, "learning_rate": 2.5131e-05, "loss": 0.0202, "step": 8380 }, { "epoch": 9.250690226394257, "grad_norm": 0.5347617268562317, "learning_rate": 2.5134e-05, "loss": 0.0208, "step": 8381 }, { "epoch": 9.251794588625069, "grad_norm": 0.33985596895217896, "learning_rate": 2.5137e-05, "loss": 0.0179, "step": 8382 }, { "epoch": 9.252898950855881, "grad_norm": 0.5365782380104065, "learning_rate": 2.514e-05, "loss": 0.0129, "step": 8383 }, { "epoch": 9.254003313086692, "grad_norm": 0.36405009031295776, "learning_rate": 2.5143e-05, "loss": 0.0173, "step": 8384 }, { "epoch": 9.255107675317504, "grad_norm": 0.2639673948287964, "learning_rate": 2.5146e-05, "loss": 0.013, "step": 8385 }, { "epoch": 9.256212037548316, "grad_norm": 0.753516435623169, "learning_rate": 2.5149000000000003e-05, "loss": 0.0153, "step": 8386 }, { "epoch": 9.257316399779128, "grad_norm": 0.41226333379745483, "learning_rate": 2.5152000000000002e-05, "loss": 0.0229, "step": 8387 }, { "epoch": 9.258420762009939, "grad_norm": 0.3624736964702606, "learning_rate": 2.5155000000000002e-05, "loss": 0.0225, "step": 8388 }, { "epoch": 9.259525124240751, "grad_norm": 0.3074045777320862, "learning_rate": 2.5158000000000002e-05, "loss": 0.0193, "step": 8389 }, { "epoch": 9.260629486471563, "grad_norm": 0.8655998706817627, "learning_rate": 2.5161000000000002e-05, "loss": 0.024, "step": 8390 }, { "epoch": 9.261733848702374, "grad_norm": 0.7025693655014038, "learning_rate": 2.5164e-05, "loss": 0.0297, "step": 8391 }, { "epoch": 9.262838210933186, "grad_norm": 0.3578696846961975, "learning_rate": 2.5167e-05, "loss": 0.0152, "step": 8392 }, { "epoch": 9.263942573163998, "grad_norm": 0.3005028963088989, "learning_rate": 2.517e-05, "loss": 0.0119, "step": 8393 }, { "epoch": 9.26504693539481, "grad_norm": 0.3724956214427948, "learning_rate": 2.5172999999999998e-05, "loss": 0.0148, "step": 8394 }, { "epoch": 9.26615129762562, "grad_norm": 0.34685683250427246, "learning_rate": 2.5175999999999997e-05, "loss": 0.0178, "step": 8395 }, { "epoch": 9.267255659856433, "grad_norm": 0.5864834189414978, "learning_rate": 2.5179e-05, "loss": 0.0247, "step": 8396 }, { "epoch": 9.268360022087245, "grad_norm": 0.6230900287628174, "learning_rate": 2.5182e-05, "loss": 0.0235, "step": 8397 }, { "epoch": 9.269464384318056, "grad_norm": 0.52983158826828, "learning_rate": 2.5185e-05, "loss": 0.0273, "step": 8398 }, { "epoch": 9.270568746548868, "grad_norm": 0.7306146025657654, "learning_rate": 2.5188e-05, "loss": 0.0237, "step": 8399 }, { "epoch": 9.27167310877968, "grad_norm": 0.3201918303966522, "learning_rate": 2.5191e-05, "loss": 0.0252, "step": 8400 }, { "epoch": 9.27277747101049, "grad_norm": 0.2870548367500305, "learning_rate": 2.5194e-05, "loss": 0.0168, "step": 8401 }, { "epoch": 9.273881833241303, "grad_norm": 0.605966329574585, "learning_rate": 2.5197e-05, "loss": 0.0231, "step": 8402 }, { "epoch": 9.274986195472115, "grad_norm": 0.37598007917404175, "learning_rate": 2.52e-05, "loss": 0.0171, "step": 8403 }, { "epoch": 9.276090557702927, "grad_norm": 0.27986258268356323, "learning_rate": 2.5203e-05, "loss": 0.0108, "step": 8404 }, { "epoch": 9.277194919933738, "grad_norm": 0.8542166948318481, "learning_rate": 2.5206e-05, "loss": 0.2232, "step": 8405 }, { "epoch": 9.27829928216455, "grad_norm": 0.6060273051261902, "learning_rate": 2.5209000000000002e-05, "loss": 0.1502, "step": 8406 }, { "epoch": 9.279403644395362, "grad_norm": 0.7138558626174927, "learning_rate": 2.5212000000000002e-05, "loss": 0.1486, "step": 8407 }, { "epoch": 9.280508006626173, "grad_norm": 0.6383795142173767, "learning_rate": 2.5215e-05, "loss": 0.163, "step": 8408 }, { "epoch": 9.281612368856985, "grad_norm": 0.8017145991325378, "learning_rate": 2.5218e-05, "loss": 0.1312, "step": 8409 }, { "epoch": 9.282716731087797, "grad_norm": 0.5040157437324524, "learning_rate": 2.5221e-05, "loss": 0.0902, "step": 8410 }, { "epoch": 9.283821093318608, "grad_norm": 0.37459781765937805, "learning_rate": 2.5224e-05, "loss": 0.0457, "step": 8411 }, { "epoch": 9.28492545554942, "grad_norm": 0.31863734126091003, "learning_rate": 2.5227e-05, "loss": 0.0318, "step": 8412 }, { "epoch": 9.286029817780232, "grad_norm": 0.4974931478500366, "learning_rate": 2.523e-05, "loss": 0.0399, "step": 8413 }, { "epoch": 9.287134180011044, "grad_norm": 0.41774827241897583, "learning_rate": 2.5233e-05, "loss": 0.0328, "step": 8414 }, { "epoch": 9.288238542241855, "grad_norm": 0.18880625069141388, "learning_rate": 2.5236e-05, "loss": 0.0157, "step": 8415 }, { "epoch": 9.289342904472667, "grad_norm": 0.3899955153465271, "learning_rate": 2.5239000000000003e-05, "loss": 0.0157, "step": 8416 }, { "epoch": 9.29044726670348, "grad_norm": 0.21987685561180115, "learning_rate": 2.5242000000000003e-05, "loss": 0.0226, "step": 8417 }, { "epoch": 9.29155162893429, "grad_norm": 0.43922722339630127, "learning_rate": 2.5245000000000003e-05, "loss": 0.0184, "step": 8418 }, { "epoch": 9.292655991165102, "grad_norm": 0.4394577145576477, "learning_rate": 2.5248e-05, "loss": 0.0266, "step": 8419 }, { "epoch": 9.293760353395914, "grad_norm": 0.3660811483860016, "learning_rate": 2.5251e-05, "loss": 0.021, "step": 8420 }, { "epoch": 9.294864715626726, "grad_norm": 0.2228272557258606, "learning_rate": 2.5254e-05, "loss": 0.0084, "step": 8421 }, { "epoch": 9.295969077857537, "grad_norm": 0.31836432218551636, "learning_rate": 2.5257e-05, "loss": 0.017, "step": 8422 }, { "epoch": 9.297073440088349, "grad_norm": 0.27825242280960083, "learning_rate": 2.526e-05, "loss": 0.0205, "step": 8423 }, { "epoch": 9.298177802319161, "grad_norm": 0.386639803647995, "learning_rate": 2.5263e-05, "loss": 0.0227, "step": 8424 }, { "epoch": 9.299282164549972, "grad_norm": 0.6690225005149841, "learning_rate": 2.5266e-05, "loss": 0.038, "step": 8425 }, { "epoch": 9.300386526780784, "grad_norm": 0.42540544271469116, "learning_rate": 2.5269e-05, "loss": 0.023, "step": 8426 }, { "epoch": 9.301490889011596, "grad_norm": 0.21877484023571014, "learning_rate": 2.5272e-05, "loss": 0.0084, "step": 8427 }, { "epoch": 9.302595251242408, "grad_norm": 0.7651259899139404, "learning_rate": 2.5275e-05, "loss": 0.0326, "step": 8428 }, { "epoch": 9.303699613473219, "grad_norm": 0.35237666964530945, "learning_rate": 2.5278e-05, "loss": 0.0136, "step": 8429 }, { "epoch": 9.304803975704031, "grad_norm": 0.30937331914901733, "learning_rate": 2.5281e-05, "loss": 0.0086, "step": 8430 }, { "epoch": 9.305908337934843, "grad_norm": 0.3334232568740845, "learning_rate": 2.5284e-05, "loss": 0.0142, "step": 8431 }, { "epoch": 9.307012700165654, "grad_norm": 0.4408300817012787, "learning_rate": 2.5287e-05, "loss": 0.0176, "step": 8432 }, { "epoch": 9.308117062396466, "grad_norm": 0.24860432744026184, "learning_rate": 2.529e-05, "loss": 0.015, "step": 8433 }, { "epoch": 9.309221424627278, "grad_norm": 0.32548895478248596, "learning_rate": 2.5293e-05, "loss": 0.023, "step": 8434 }, { "epoch": 9.310325786858089, "grad_norm": 0.41955476999282837, "learning_rate": 2.5296e-05, "loss": 0.0145, "step": 8435 }, { "epoch": 9.3114301490889, "grad_norm": 0.4523351490497589, "learning_rate": 2.5299000000000003e-05, "loss": 0.0153, "step": 8436 }, { "epoch": 9.312534511319713, "grad_norm": 0.35468974709510803, "learning_rate": 2.5302000000000003e-05, "loss": 0.0168, "step": 8437 }, { "epoch": 9.313638873550525, "grad_norm": 0.29652899503707886, "learning_rate": 2.5305000000000003e-05, "loss": 0.0157, "step": 8438 }, { "epoch": 9.314743235781336, "grad_norm": 0.5438205599784851, "learning_rate": 2.5308000000000002e-05, "loss": 0.0222, "step": 8439 }, { "epoch": 9.315847598012148, "grad_norm": 0.27218982577323914, "learning_rate": 2.5311000000000002e-05, "loss": 0.0141, "step": 8440 }, { "epoch": 9.31695196024296, "grad_norm": 0.2750907838344574, "learning_rate": 2.5314000000000002e-05, "loss": 0.0155, "step": 8441 }, { "epoch": 9.31805632247377, "grad_norm": 0.6111288070678711, "learning_rate": 2.5317000000000002e-05, "loss": 0.0228, "step": 8442 }, { "epoch": 9.319160684704583, "grad_norm": 0.3016197085380554, "learning_rate": 2.5319999999999998e-05, "loss": 0.0105, "step": 8443 }, { "epoch": 9.320265046935395, "grad_norm": 0.3768750727176666, "learning_rate": 2.5322999999999998e-05, "loss": 0.0245, "step": 8444 }, { "epoch": 9.321369409166207, "grad_norm": 0.4869030714035034, "learning_rate": 2.5325999999999998e-05, "loss": 0.0206, "step": 8445 }, { "epoch": 9.322473771397018, "grad_norm": 0.399172306060791, "learning_rate": 2.5329e-05, "loss": 0.0192, "step": 8446 }, { "epoch": 9.32357813362783, "grad_norm": 0.9021074175834656, "learning_rate": 2.5332e-05, "loss": 0.0225, "step": 8447 }, { "epoch": 9.324682495858642, "grad_norm": 0.3634742200374603, "learning_rate": 2.5335e-05, "loss": 0.023, "step": 8448 }, { "epoch": 9.325786858089453, "grad_norm": 0.29595446586608887, "learning_rate": 2.5338e-05, "loss": 0.0128, "step": 8449 }, { "epoch": 9.326891220320265, "grad_norm": 1.3597830533981323, "learning_rate": 2.5341e-05, "loss": 0.0198, "step": 8450 }, { "epoch": 9.327995582551077, "grad_norm": 0.3837946951389313, "learning_rate": 2.5344e-05, "loss": 0.0224, "step": 8451 }, { "epoch": 9.329099944781888, "grad_norm": 0.3837173879146576, "learning_rate": 2.5347e-05, "loss": 0.0216, "step": 8452 }, { "epoch": 9.3302043070127, "grad_norm": 0.36478474736213684, "learning_rate": 2.535e-05, "loss": 0.0134, "step": 8453 }, { "epoch": 9.331308669243512, "grad_norm": 0.4807637929916382, "learning_rate": 2.5353e-05, "loss": 0.0216, "step": 8454 }, { "epoch": 9.332413031474324, "grad_norm": 0.7253870368003845, "learning_rate": 2.5356e-05, "loss": 0.2401, "step": 8455 }, { "epoch": 9.333517393705135, "grad_norm": 0.9472302198410034, "learning_rate": 2.5359000000000002e-05, "loss": 0.22, "step": 8456 }, { "epoch": 9.334621755935947, "grad_norm": 0.7124325633049011, "learning_rate": 2.5362000000000002e-05, "loss": 0.2252, "step": 8457 }, { "epoch": 9.33572611816676, "grad_norm": 0.5966888070106506, "learning_rate": 2.5365000000000002e-05, "loss": 0.1498, "step": 8458 }, { "epoch": 9.33683048039757, "grad_norm": 0.7034581303596497, "learning_rate": 2.5368000000000002e-05, "loss": 0.0975, "step": 8459 }, { "epoch": 9.337934842628382, "grad_norm": 0.606854259967804, "learning_rate": 2.5371e-05, "loss": 0.0884, "step": 8460 }, { "epoch": 9.339039204859194, "grad_norm": 0.3947161138057709, "learning_rate": 2.5374e-05, "loss": 0.0653, "step": 8461 }, { "epoch": 9.340143567090006, "grad_norm": 0.39924323558807373, "learning_rate": 2.5377e-05, "loss": 0.0601, "step": 8462 }, { "epoch": 9.341247929320817, "grad_norm": 0.5445983409881592, "learning_rate": 2.538e-05, "loss": 0.0396, "step": 8463 }, { "epoch": 9.342352291551629, "grad_norm": 0.25043144822120667, "learning_rate": 2.5383e-05, "loss": 0.0272, "step": 8464 }, { "epoch": 9.343456653782441, "grad_norm": 0.24204514920711517, "learning_rate": 2.5386e-05, "loss": 0.0251, "step": 8465 }, { "epoch": 9.344561016013252, "grad_norm": 0.3255254328250885, "learning_rate": 2.5389000000000004e-05, "loss": 0.0282, "step": 8466 }, { "epoch": 9.345665378244064, "grad_norm": 0.35611942410469055, "learning_rate": 2.5392000000000004e-05, "loss": 0.018, "step": 8467 }, { "epoch": 9.346769740474876, "grad_norm": 0.36271777749061584, "learning_rate": 2.5395e-05, "loss": 0.0179, "step": 8468 }, { "epoch": 9.347874102705687, "grad_norm": 0.5547131299972534, "learning_rate": 2.5398e-05, "loss": 0.0374, "step": 8469 }, { "epoch": 9.348978464936499, "grad_norm": 0.24131056666374207, "learning_rate": 2.5401e-05, "loss": 0.015, "step": 8470 }, { "epoch": 9.350082827167311, "grad_norm": 0.37579673528671265, "learning_rate": 2.5404e-05, "loss": 0.0141, "step": 8471 }, { "epoch": 9.351187189398123, "grad_norm": 0.18926408886909485, "learning_rate": 2.5407e-05, "loss": 0.0101, "step": 8472 }, { "epoch": 9.352291551628934, "grad_norm": 0.3919876515865326, "learning_rate": 2.541e-05, "loss": 0.0137, "step": 8473 }, { "epoch": 9.353395913859746, "grad_norm": 0.27393046021461487, "learning_rate": 2.5413e-05, "loss": 0.022, "step": 8474 }, { "epoch": 9.354500276090558, "grad_norm": 0.3810427784919739, "learning_rate": 2.5416e-05, "loss": 0.0203, "step": 8475 }, { "epoch": 9.355604638321369, "grad_norm": 0.728223443031311, "learning_rate": 2.5419000000000002e-05, "loss": 0.0235, "step": 8476 }, { "epoch": 9.356709000552181, "grad_norm": 0.37097886204719543, "learning_rate": 2.5422e-05, "loss": 0.0174, "step": 8477 }, { "epoch": 9.357813362782993, "grad_norm": 0.938106894493103, "learning_rate": 2.5425e-05, "loss": 0.0679, "step": 8478 }, { "epoch": 9.358917725013805, "grad_norm": 0.5910711884498596, "learning_rate": 2.5428e-05, "loss": 0.0177, "step": 8479 }, { "epoch": 9.360022087244616, "grad_norm": 0.25052934885025024, "learning_rate": 2.5431e-05, "loss": 0.0102, "step": 8480 }, { "epoch": 9.361126449475428, "grad_norm": 0.5215208530426025, "learning_rate": 2.5434e-05, "loss": 0.0188, "step": 8481 }, { "epoch": 9.36223081170624, "grad_norm": 0.4341906011104584, "learning_rate": 2.5437e-05, "loss": 0.0499, "step": 8482 }, { "epoch": 9.36333517393705, "grad_norm": 0.3048522472381592, "learning_rate": 2.544e-05, "loss": 0.0124, "step": 8483 }, { "epoch": 9.364439536167863, "grad_norm": 0.48180684447288513, "learning_rate": 2.5443e-05, "loss": 0.016, "step": 8484 }, { "epoch": 9.365543898398675, "grad_norm": 0.7989155054092407, "learning_rate": 2.5446e-05, "loss": 0.0129, "step": 8485 }, { "epoch": 9.366648260629486, "grad_norm": 0.37601715326309204, "learning_rate": 2.5449000000000003e-05, "loss": 0.0136, "step": 8486 }, { "epoch": 9.367752622860298, "grad_norm": 0.5697135925292969, "learning_rate": 2.5452000000000003e-05, "loss": 0.0205, "step": 8487 }, { "epoch": 9.36885698509111, "grad_norm": 0.35184845328330994, "learning_rate": 2.5455000000000003e-05, "loss": 0.0174, "step": 8488 }, { "epoch": 9.369961347321922, "grad_norm": 0.5946150422096252, "learning_rate": 2.5458000000000003e-05, "loss": 0.0182, "step": 8489 }, { "epoch": 9.371065709552733, "grad_norm": 0.3991926312446594, "learning_rate": 2.5461000000000002e-05, "loss": 0.0198, "step": 8490 }, { "epoch": 9.372170071783545, "grad_norm": 0.9133858680725098, "learning_rate": 2.5464000000000002e-05, "loss": 0.0299, "step": 8491 }, { "epoch": 9.373274434014357, "grad_norm": 0.24665741622447968, "learning_rate": 2.5467e-05, "loss": 0.0107, "step": 8492 }, { "epoch": 9.374378796245168, "grad_norm": 0.5753602385520935, "learning_rate": 2.547e-05, "loss": 0.0205, "step": 8493 }, { "epoch": 9.37548315847598, "grad_norm": 0.5291146039962769, "learning_rate": 2.5472999999999998e-05, "loss": 0.022, "step": 8494 }, { "epoch": 9.376587520706792, "grad_norm": 0.6535728573799133, "learning_rate": 2.5475999999999998e-05, "loss": 0.0243, "step": 8495 }, { "epoch": 9.377691882937604, "grad_norm": 0.3648850619792938, "learning_rate": 2.5479e-05, "loss": 0.0169, "step": 8496 }, { "epoch": 9.378796245168415, "grad_norm": 0.44959497451782227, "learning_rate": 2.5482e-05, "loss": 0.0191, "step": 8497 }, { "epoch": 9.379900607399227, "grad_norm": 0.42860373854637146, "learning_rate": 2.5485e-05, "loss": 0.0248, "step": 8498 }, { "epoch": 9.38100496963004, "grad_norm": 0.4450722634792328, "learning_rate": 2.5488e-05, "loss": 0.0328, "step": 8499 }, { "epoch": 9.38210933186085, "grad_norm": 0.5040324330329895, "learning_rate": 2.5491e-05, "loss": 0.0307, "step": 8500 }, { "epoch": 9.383213694091662, "grad_norm": 0.39662808179855347, "learning_rate": 2.5494e-05, "loss": 0.0249, "step": 8501 }, { "epoch": 9.384318056322474, "grad_norm": 0.5547886490821838, "learning_rate": 2.5497e-05, "loss": 0.0222, "step": 8502 }, { "epoch": 9.385422418553285, "grad_norm": 0.607270359992981, "learning_rate": 2.55e-05, "loss": 0.0287, "step": 8503 }, { "epoch": 9.386526780784097, "grad_norm": 0.356477826833725, "learning_rate": 2.5503e-05, "loss": 0.0175, "step": 8504 }, { "epoch": 9.387631143014909, "grad_norm": 1.539347529411316, "learning_rate": 2.5506e-05, "loss": 0.2453, "step": 8505 }, { "epoch": 9.388735505245721, "grad_norm": 0.8574787378311157, "learning_rate": 2.5509e-05, "loss": 0.1573, "step": 8506 }, { "epoch": 9.389839867476532, "grad_norm": 0.9248663187026978, "learning_rate": 2.5512000000000002e-05, "loss": 0.1655, "step": 8507 }, { "epoch": 9.390944229707344, "grad_norm": 0.6936554908752441, "learning_rate": 2.5515000000000002e-05, "loss": 0.1162, "step": 8508 }, { "epoch": 9.392048591938156, "grad_norm": 0.5955015420913696, "learning_rate": 2.5518000000000002e-05, "loss": 0.1347, "step": 8509 }, { "epoch": 9.393152954168967, "grad_norm": 1.7229257822036743, "learning_rate": 2.5521000000000002e-05, "loss": 0.1541, "step": 8510 }, { "epoch": 9.394257316399779, "grad_norm": 0.711928129196167, "learning_rate": 2.5524e-05, "loss": 0.0909, "step": 8511 }, { "epoch": 9.395361678630591, "grad_norm": 0.5019592642784119, "learning_rate": 2.5527e-05, "loss": 0.052, "step": 8512 }, { "epoch": 9.396466040861403, "grad_norm": 0.3777667284011841, "learning_rate": 2.553e-05, "loss": 0.0525, "step": 8513 }, { "epoch": 9.397570403092214, "grad_norm": 0.31473416090011597, "learning_rate": 2.5533e-05, "loss": 0.0242, "step": 8514 }, { "epoch": 9.398674765323026, "grad_norm": 0.37350720167160034, "learning_rate": 2.5536e-05, "loss": 0.028, "step": 8515 }, { "epoch": 9.399779127553838, "grad_norm": 0.3184381425380707, "learning_rate": 2.5539e-05, "loss": 0.0198, "step": 8516 }, { "epoch": 9.400883489784649, "grad_norm": 0.3247572183609009, "learning_rate": 2.5542e-05, "loss": 0.0171, "step": 8517 }, { "epoch": 9.401987852015461, "grad_norm": 0.42575955390930176, "learning_rate": 2.5545e-05, "loss": 0.0225, "step": 8518 }, { "epoch": 9.403092214246273, "grad_norm": 0.19921521842479706, "learning_rate": 2.5548e-05, "loss": 0.0077, "step": 8519 }, { "epoch": 9.404196576477084, "grad_norm": 0.7595935463905334, "learning_rate": 2.5551e-05, "loss": 0.0243, "step": 8520 }, { "epoch": 9.405300938707896, "grad_norm": 0.3855801820755005, "learning_rate": 2.5554e-05, "loss": 0.0135, "step": 8521 }, { "epoch": 9.406405300938708, "grad_norm": 0.3116399049758911, "learning_rate": 2.5557e-05, "loss": 0.02, "step": 8522 }, { "epoch": 9.40750966316952, "grad_norm": 0.17474530637264252, "learning_rate": 2.556e-05, "loss": 0.0084, "step": 8523 }, { "epoch": 9.40861402540033, "grad_norm": 0.21215161681175232, "learning_rate": 2.5563e-05, "loss": 0.0188, "step": 8524 }, { "epoch": 9.409718387631143, "grad_norm": 0.5843149423599243, "learning_rate": 2.5566e-05, "loss": 0.0203, "step": 8525 }, { "epoch": 9.410822749861955, "grad_norm": 0.5866024494171143, "learning_rate": 2.5569e-05, "loss": 0.0231, "step": 8526 }, { "epoch": 9.411927112092766, "grad_norm": 0.35684189200401306, "learning_rate": 2.5572000000000002e-05, "loss": 0.0264, "step": 8527 }, { "epoch": 9.413031474323578, "grad_norm": 0.5042146444320679, "learning_rate": 2.5575e-05, "loss": 0.0226, "step": 8528 }, { "epoch": 9.41413583655439, "grad_norm": 0.38212352991104126, "learning_rate": 2.5578e-05, "loss": 0.0182, "step": 8529 }, { "epoch": 9.415240198785202, "grad_norm": 0.3196638226509094, "learning_rate": 2.5581e-05, "loss": 0.0243, "step": 8530 }, { "epoch": 9.416344561016013, "grad_norm": 0.5843973159790039, "learning_rate": 2.5584e-05, "loss": 0.0855, "step": 8531 }, { "epoch": 9.417448923246825, "grad_norm": 2.7605175971984863, "learning_rate": 2.5587e-05, "loss": 0.0373, "step": 8532 }, { "epoch": 9.418553285477637, "grad_norm": 0.29100045561790466, "learning_rate": 2.559e-05, "loss": 0.0135, "step": 8533 }, { "epoch": 9.419657647708448, "grad_norm": 0.5405256152153015, "learning_rate": 2.5593e-05, "loss": 0.0267, "step": 8534 }, { "epoch": 9.42076200993926, "grad_norm": 0.5196962356567383, "learning_rate": 2.5596e-05, "loss": 0.0319, "step": 8535 }, { "epoch": 9.421866372170072, "grad_norm": 0.23745717108249664, "learning_rate": 2.5599e-05, "loss": 0.013, "step": 8536 }, { "epoch": 9.422970734400884, "grad_norm": 0.2634347081184387, "learning_rate": 2.5602000000000003e-05, "loss": 0.0173, "step": 8537 }, { "epoch": 9.424075096631695, "grad_norm": 0.30621886253356934, "learning_rate": 2.5605000000000003e-05, "loss": 0.0144, "step": 8538 }, { "epoch": 9.425179458862507, "grad_norm": 0.43831145763397217, "learning_rate": 2.5608000000000003e-05, "loss": 0.0257, "step": 8539 }, { "epoch": 9.42628382109332, "grad_norm": 0.4978099763393402, "learning_rate": 2.5611000000000003e-05, "loss": 0.018, "step": 8540 }, { "epoch": 9.42738818332413, "grad_norm": 0.336507111787796, "learning_rate": 2.5614000000000002e-05, "loss": 0.0156, "step": 8541 }, { "epoch": 9.428492545554942, "grad_norm": 0.309599369764328, "learning_rate": 2.5617e-05, "loss": 0.0174, "step": 8542 }, { "epoch": 9.429596907785754, "grad_norm": 0.2593812346458435, "learning_rate": 2.562e-05, "loss": 0.0145, "step": 8543 }, { "epoch": 9.430701270016565, "grad_norm": 0.3911820948123932, "learning_rate": 2.5623e-05, "loss": 0.0189, "step": 8544 }, { "epoch": 9.431805632247377, "grad_norm": 0.356988787651062, "learning_rate": 2.5625999999999998e-05, "loss": 0.0153, "step": 8545 }, { "epoch": 9.43290999447819, "grad_norm": 0.4189974069595337, "learning_rate": 2.5628999999999998e-05, "loss": 0.0209, "step": 8546 }, { "epoch": 9.434014356709001, "grad_norm": 0.36229538917541504, "learning_rate": 2.5632e-05, "loss": 0.0205, "step": 8547 }, { "epoch": 9.435118718939812, "grad_norm": 0.3766450881958008, "learning_rate": 2.5635e-05, "loss": 0.0137, "step": 8548 }, { "epoch": 9.436223081170624, "grad_norm": 0.8187637329101562, "learning_rate": 2.5638e-05, "loss": 0.0402, "step": 8549 }, { "epoch": 9.437327443401436, "grad_norm": 1.0226268768310547, "learning_rate": 2.5641e-05, "loss": 0.0491, "step": 8550 }, { "epoch": 9.438431805632247, "grad_norm": 0.5734760165214539, "learning_rate": 2.5644e-05, "loss": 0.0235, "step": 8551 }, { "epoch": 9.439536167863059, "grad_norm": 0.34614297747612, "learning_rate": 2.5647e-05, "loss": 0.0108, "step": 8552 }, { "epoch": 9.440640530093871, "grad_norm": 0.8600751757621765, "learning_rate": 2.565e-05, "loss": 0.0273, "step": 8553 }, { "epoch": 9.441744892324682, "grad_norm": 0.5917934775352478, "learning_rate": 2.5653e-05, "loss": 0.0242, "step": 8554 }, { "epoch": 9.442849254555494, "grad_norm": 1.2788912057876587, "learning_rate": 2.5656e-05, "loss": 0.3227, "step": 8555 }, { "epoch": 9.443953616786306, "grad_norm": 1.1054984331130981, "learning_rate": 2.5659e-05, "loss": 0.2397, "step": 8556 }, { "epoch": 9.445057979017118, "grad_norm": 0.7479071617126465, "learning_rate": 2.5662000000000003e-05, "loss": 0.187, "step": 8557 }, { "epoch": 9.446162341247929, "grad_norm": 0.8295860886573792, "learning_rate": 2.5665000000000002e-05, "loss": 0.2134, "step": 8558 }, { "epoch": 9.447266703478741, "grad_norm": 0.6499350666999817, "learning_rate": 2.5668000000000002e-05, "loss": 0.1356, "step": 8559 }, { "epoch": 9.448371065709553, "grad_norm": 0.6479580998420715, "learning_rate": 2.5671000000000002e-05, "loss": 0.1362, "step": 8560 }, { "epoch": 9.449475427940364, "grad_norm": 0.5576170086860657, "learning_rate": 2.5674000000000002e-05, "loss": 0.1084, "step": 8561 }, { "epoch": 9.450579790171176, "grad_norm": 0.46227577328681946, "learning_rate": 2.5677e-05, "loss": 0.0548, "step": 8562 }, { "epoch": 9.451684152401988, "grad_norm": 0.7421991229057312, "learning_rate": 2.568e-05, "loss": 0.0943, "step": 8563 }, { "epoch": 9.4527885146328, "grad_norm": 0.41888096928596497, "learning_rate": 2.5683e-05, "loss": 0.0525, "step": 8564 }, { "epoch": 9.45389287686361, "grad_norm": 0.2492133378982544, "learning_rate": 2.5686e-05, "loss": 0.0238, "step": 8565 }, { "epoch": 9.454997239094423, "grad_norm": 0.3662011921405792, "learning_rate": 2.5688999999999997e-05, "loss": 0.0281, "step": 8566 }, { "epoch": 9.456101601325235, "grad_norm": 0.3406142592430115, "learning_rate": 2.5692e-05, "loss": 0.0331, "step": 8567 }, { "epoch": 9.457205963556046, "grad_norm": 0.5586264729499817, "learning_rate": 2.5695e-05, "loss": 0.0321, "step": 8568 }, { "epoch": 9.458310325786858, "grad_norm": 0.4823017716407776, "learning_rate": 2.5698e-05, "loss": 0.0231, "step": 8569 }, { "epoch": 9.45941468801767, "grad_norm": 0.25366929173469543, "learning_rate": 2.5701e-05, "loss": 0.0098, "step": 8570 }, { "epoch": 9.460519050248482, "grad_norm": 0.30290794372558594, "learning_rate": 2.5704e-05, "loss": 0.018, "step": 8571 }, { "epoch": 9.461623412479293, "grad_norm": 0.5391343235969543, "learning_rate": 2.5707e-05, "loss": 0.0174, "step": 8572 }, { "epoch": 9.462727774710105, "grad_norm": 0.2810716927051544, "learning_rate": 2.571e-05, "loss": 0.0138, "step": 8573 }, { "epoch": 9.463832136940917, "grad_norm": 0.3193711042404175, "learning_rate": 2.5713e-05, "loss": 0.0102, "step": 8574 }, { "epoch": 9.464936499171728, "grad_norm": 0.447774201631546, "learning_rate": 2.5716e-05, "loss": 0.0172, "step": 8575 }, { "epoch": 9.46604086140254, "grad_norm": 1.2326313257217407, "learning_rate": 2.5719e-05, "loss": 0.0172, "step": 8576 }, { "epoch": 9.467145223633352, "grad_norm": 0.620585560798645, "learning_rate": 2.5722000000000002e-05, "loss": 0.0208, "step": 8577 }, { "epoch": 9.468249585864163, "grad_norm": 1.5827748775482178, "learning_rate": 2.5725000000000002e-05, "loss": 0.0232, "step": 8578 }, { "epoch": 9.469353948094975, "grad_norm": 0.3678668439388275, "learning_rate": 2.5728e-05, "loss": 0.0126, "step": 8579 }, { "epoch": 9.470458310325787, "grad_norm": 0.9337801933288574, "learning_rate": 2.5731e-05, "loss": 0.0237, "step": 8580 }, { "epoch": 9.4715626725566, "grad_norm": 0.4761820137500763, "learning_rate": 2.5734e-05, "loss": 0.0265, "step": 8581 }, { "epoch": 9.47266703478741, "grad_norm": 0.3261776864528656, "learning_rate": 2.5737e-05, "loss": 0.0148, "step": 8582 }, { "epoch": 9.473771397018222, "grad_norm": 0.3973373770713806, "learning_rate": 2.574e-05, "loss": 0.0205, "step": 8583 }, { "epoch": 9.474875759249034, "grad_norm": 0.4914402365684509, "learning_rate": 2.5743e-05, "loss": 0.0267, "step": 8584 }, { "epoch": 9.475980121479845, "grad_norm": 0.9921497702598572, "learning_rate": 2.5746e-05, "loss": 0.0306, "step": 8585 }, { "epoch": 9.477084483710657, "grad_norm": 0.4644831717014313, "learning_rate": 2.5749e-05, "loss": 0.0232, "step": 8586 }, { "epoch": 9.47818884594147, "grad_norm": 0.7737648487091064, "learning_rate": 2.5752000000000003e-05, "loss": 0.034, "step": 8587 }, { "epoch": 9.47929320817228, "grad_norm": 0.3069632649421692, "learning_rate": 2.5755000000000003e-05, "loss": 0.0154, "step": 8588 }, { "epoch": 9.480397570403092, "grad_norm": 0.5186834931373596, "learning_rate": 2.5758000000000003e-05, "loss": 0.0224, "step": 8589 }, { "epoch": 9.481501932633904, "grad_norm": 3.815892219543457, "learning_rate": 2.5761000000000003e-05, "loss": 0.0238, "step": 8590 }, { "epoch": 9.482606294864716, "grad_norm": 3.2320497035980225, "learning_rate": 2.5764e-05, "loss": 0.0299, "step": 8591 }, { "epoch": 9.483710657095527, "grad_norm": 0.3865804076194763, "learning_rate": 2.5767e-05, "loss": 0.0189, "step": 8592 }, { "epoch": 9.484815019326339, "grad_norm": 0.5459467768669128, "learning_rate": 2.577e-05, "loss": 0.0159, "step": 8593 }, { "epoch": 9.485919381557151, "grad_norm": 1.0291677713394165, "learning_rate": 2.5773e-05, "loss": 0.0422, "step": 8594 }, { "epoch": 9.487023743787962, "grad_norm": 0.37881043553352356, "learning_rate": 2.5776e-05, "loss": 0.0158, "step": 8595 }, { "epoch": 9.488128106018774, "grad_norm": 0.700595498085022, "learning_rate": 2.5779e-05, "loss": 0.0275, "step": 8596 }, { "epoch": 9.489232468249586, "grad_norm": 0.37945443391799927, "learning_rate": 2.5782e-05, "loss": 0.0201, "step": 8597 }, { "epoch": 9.490336830480398, "grad_norm": 0.8374513387680054, "learning_rate": 2.5785e-05, "loss": 0.0425, "step": 8598 }, { "epoch": 9.491441192711209, "grad_norm": 0.6502439975738525, "learning_rate": 2.5788e-05, "loss": 0.022, "step": 8599 }, { "epoch": 9.492545554942021, "grad_norm": 0.4115917682647705, "learning_rate": 2.5791e-05, "loss": 0.0147, "step": 8600 }, { "epoch": 9.493649917172833, "grad_norm": 0.7825407981872559, "learning_rate": 2.5794e-05, "loss": 0.0397, "step": 8601 }, { "epoch": 9.494754279403644, "grad_norm": 0.45004820823669434, "learning_rate": 2.5797e-05, "loss": 0.0175, "step": 8602 }, { "epoch": 9.495858641634456, "grad_norm": 1.4140437841415405, "learning_rate": 2.58e-05, "loss": 0.0423, "step": 8603 }, { "epoch": 9.496963003865268, "grad_norm": 0.3990062177181244, "learning_rate": 2.5803e-05, "loss": 0.0214, "step": 8604 }, { "epoch": 9.49806736609608, "grad_norm": 0.941776692867279, "learning_rate": 2.5806e-05, "loss": 0.3082, "step": 8605 }, { "epoch": 9.49917172832689, "grad_norm": 0.6596989035606384, "learning_rate": 2.5809e-05, "loss": 0.2205, "step": 8606 }, { "epoch": 9.500276090557703, "grad_norm": 0.850493311882019, "learning_rate": 2.5812000000000003e-05, "loss": 0.2005, "step": 8607 }, { "epoch": 9.501380452788515, "grad_norm": 0.6521908044815063, "learning_rate": 2.5815000000000003e-05, "loss": 0.1521, "step": 8608 }, { "epoch": 9.502484815019326, "grad_norm": 0.7631153464317322, "learning_rate": 2.5818000000000003e-05, "loss": 0.1653, "step": 8609 }, { "epoch": 9.503589177250138, "grad_norm": 0.6114258170127869, "learning_rate": 2.5821000000000002e-05, "loss": 0.1385, "step": 8610 }, { "epoch": 9.50469353948095, "grad_norm": 1.2368651628494263, "learning_rate": 2.5824000000000002e-05, "loss": 0.129, "step": 8611 }, { "epoch": 9.50579790171176, "grad_norm": 0.39142417907714844, "learning_rate": 2.5827000000000002e-05, "loss": 0.0459, "step": 8612 }, { "epoch": 9.506902263942573, "grad_norm": 0.40665602684020996, "learning_rate": 2.5830000000000002e-05, "loss": 0.0279, "step": 8613 }, { "epoch": 9.508006626173385, "grad_norm": 0.6957946419715881, "learning_rate": 2.5833e-05, "loss": 0.0441, "step": 8614 }, { "epoch": 9.509110988404197, "grad_norm": 0.6600527763366699, "learning_rate": 2.5835999999999998e-05, "loss": 0.0624, "step": 8615 }, { "epoch": 9.510215350635008, "grad_norm": 0.36139366030693054, "learning_rate": 2.5838999999999998e-05, "loss": 0.0227, "step": 8616 }, { "epoch": 9.51131971286582, "grad_norm": 0.2645103931427002, "learning_rate": 2.5842e-05, "loss": 0.0093, "step": 8617 }, { "epoch": 9.512424075096632, "grad_norm": 0.5739006996154785, "learning_rate": 2.5845e-05, "loss": 0.0268, "step": 8618 }, { "epoch": 9.513528437327443, "grad_norm": 0.41745567321777344, "learning_rate": 2.5848e-05, "loss": 0.0246, "step": 8619 }, { "epoch": 9.514632799558255, "grad_norm": 0.3496246039867401, "learning_rate": 2.5851e-05, "loss": 0.0216, "step": 8620 }, { "epoch": 9.515737161789067, "grad_norm": 0.322577565908432, "learning_rate": 2.5854e-05, "loss": 0.0077, "step": 8621 }, { "epoch": 9.516841524019878, "grad_norm": 4.558658599853516, "learning_rate": 2.5857e-05, "loss": 0.0172, "step": 8622 }, { "epoch": 9.51794588625069, "grad_norm": 0.23761658370494843, "learning_rate": 2.586e-05, "loss": 0.0181, "step": 8623 }, { "epoch": 9.519050248481502, "grad_norm": 0.26001161336898804, "learning_rate": 2.5863e-05, "loss": 0.0176, "step": 8624 }, { "epoch": 9.520154610712314, "grad_norm": 0.5392215251922607, "learning_rate": 2.5866e-05, "loss": 0.008, "step": 8625 }, { "epoch": 9.521258972943125, "grad_norm": 0.4497869908809662, "learning_rate": 2.5869e-05, "loss": 0.0177, "step": 8626 }, { "epoch": 9.522363335173937, "grad_norm": 0.6835384964942932, "learning_rate": 2.5872000000000002e-05, "loss": 0.0118, "step": 8627 }, { "epoch": 9.52346769740475, "grad_norm": 0.28376954793930054, "learning_rate": 2.5875000000000002e-05, "loss": 0.0385, "step": 8628 }, { "epoch": 9.52457205963556, "grad_norm": 0.35740289092063904, "learning_rate": 2.5878000000000002e-05, "loss": 0.0179, "step": 8629 }, { "epoch": 9.525676421866372, "grad_norm": 0.7905446887016296, "learning_rate": 2.5881000000000002e-05, "loss": 0.0373, "step": 8630 }, { "epoch": 9.526780784097184, "grad_norm": 0.5026965737342834, "learning_rate": 2.5884e-05, "loss": 0.0169, "step": 8631 }, { "epoch": 9.527885146327996, "grad_norm": 0.5554665327072144, "learning_rate": 2.5887e-05, "loss": 0.0239, "step": 8632 }, { "epoch": 9.528989508558807, "grad_norm": 0.4675701856613159, "learning_rate": 2.589e-05, "loss": 0.0203, "step": 8633 }, { "epoch": 9.530093870789619, "grad_norm": 0.4128885269165039, "learning_rate": 2.5893e-05, "loss": 0.0179, "step": 8634 }, { "epoch": 9.531198233020431, "grad_norm": 0.5732763409614563, "learning_rate": 2.5896e-05, "loss": 0.0212, "step": 8635 }, { "epoch": 9.532302595251242, "grad_norm": 0.2840200960636139, "learning_rate": 2.5899e-05, "loss": 0.011, "step": 8636 }, { "epoch": 9.533406957482054, "grad_norm": 1.4983956813812256, "learning_rate": 2.5902e-05, "loss": 0.0157, "step": 8637 }, { "epoch": 9.534511319712866, "grad_norm": 0.9973030090332031, "learning_rate": 2.5905000000000004e-05, "loss": 0.0372, "step": 8638 }, { "epoch": 9.535615681943678, "grad_norm": 0.6288752555847168, "learning_rate": 2.5908000000000003e-05, "loss": 0.0159, "step": 8639 }, { "epoch": 9.536720044174489, "grad_norm": 0.5395722389221191, "learning_rate": 2.5911e-05, "loss": 0.0227, "step": 8640 }, { "epoch": 9.537824406405301, "grad_norm": 0.4163331687450409, "learning_rate": 2.5914e-05, "loss": 0.0233, "step": 8641 }, { "epoch": 9.538928768636113, "grad_norm": 0.5318607091903687, "learning_rate": 2.5917e-05, "loss": 0.0186, "step": 8642 }, { "epoch": 9.540033130866924, "grad_norm": 0.4616074860095978, "learning_rate": 2.592e-05, "loss": 0.0115, "step": 8643 }, { "epoch": 9.541137493097736, "grad_norm": 0.3040436804294586, "learning_rate": 2.5923e-05, "loss": 0.0153, "step": 8644 }, { "epoch": 9.542241855328548, "grad_norm": 0.7124415636062622, "learning_rate": 2.5926e-05, "loss": 0.0201, "step": 8645 }, { "epoch": 9.54334621755936, "grad_norm": 0.22815415263175964, "learning_rate": 2.5929e-05, "loss": 0.0139, "step": 8646 }, { "epoch": 9.54445057979017, "grad_norm": 0.6570734977722168, "learning_rate": 2.5932e-05, "loss": 0.034, "step": 8647 }, { "epoch": 9.545554942020983, "grad_norm": 0.2805143892765045, "learning_rate": 2.5935e-05, "loss": 0.0133, "step": 8648 }, { "epoch": 9.546659304251795, "grad_norm": 0.18595291674137115, "learning_rate": 2.5938e-05, "loss": 0.0077, "step": 8649 }, { "epoch": 9.547763666482606, "grad_norm": 0.39541515707969666, "learning_rate": 2.5941e-05, "loss": 0.0218, "step": 8650 }, { "epoch": 9.548868028713418, "grad_norm": 0.5013564229011536, "learning_rate": 2.5944e-05, "loss": 0.0311, "step": 8651 }, { "epoch": 9.54997239094423, "grad_norm": 0.38022172451019287, "learning_rate": 2.5947e-05, "loss": 0.0193, "step": 8652 }, { "epoch": 9.55107675317504, "grad_norm": 1.5196325778961182, "learning_rate": 2.595e-05, "loss": 0.0351, "step": 8653 }, { "epoch": 9.552181115405853, "grad_norm": 0.4891842305660248, "learning_rate": 2.5953e-05, "loss": 0.0188, "step": 8654 }, { "epoch": 9.553285477636665, "grad_norm": 0.7146255373954773, "learning_rate": 2.5956e-05, "loss": 0.2078, "step": 8655 }, { "epoch": 9.554389839867477, "grad_norm": 0.8027724027633667, "learning_rate": 2.5959e-05, "loss": 0.1845, "step": 8656 }, { "epoch": 9.555494202098288, "grad_norm": 0.5355453491210938, "learning_rate": 2.5962e-05, "loss": 0.1429, "step": 8657 }, { "epoch": 9.5565985643291, "grad_norm": 0.5599775314331055, "learning_rate": 2.5965000000000003e-05, "loss": 0.1561, "step": 8658 }, { "epoch": 9.557702926559912, "grad_norm": 0.5924291014671326, "learning_rate": 2.5968000000000003e-05, "loss": 0.0964, "step": 8659 }, { "epoch": 9.558807288790723, "grad_norm": 0.797440230846405, "learning_rate": 2.5971000000000003e-05, "loss": 0.1177, "step": 8660 }, { "epoch": 9.559911651021535, "grad_norm": 0.4094248414039612, "learning_rate": 2.5974000000000002e-05, "loss": 0.0916, "step": 8661 }, { "epoch": 9.561016013252347, "grad_norm": 0.6344519257545471, "learning_rate": 2.5977000000000002e-05, "loss": 0.1235, "step": 8662 }, { "epoch": 9.562120375483158, "grad_norm": 0.405908465385437, "learning_rate": 2.5980000000000002e-05, "loss": 0.0603, "step": 8663 }, { "epoch": 9.56322473771397, "grad_norm": 0.3323571979999542, "learning_rate": 2.5983000000000002e-05, "loss": 0.0741, "step": 8664 }, { "epoch": 9.564329099944782, "grad_norm": 0.353965163230896, "learning_rate": 2.5985999999999998e-05, "loss": 0.0214, "step": 8665 }, { "epoch": 9.565433462175594, "grad_norm": 0.22477781772613525, "learning_rate": 2.5988999999999998e-05, "loss": 0.0214, "step": 8666 }, { "epoch": 9.566537824406405, "grad_norm": 0.38036006689071655, "learning_rate": 2.5991999999999998e-05, "loss": 0.0239, "step": 8667 }, { "epoch": 9.567642186637217, "grad_norm": 0.1982722282409668, "learning_rate": 2.5995e-05, "loss": 0.014, "step": 8668 }, { "epoch": 9.56874654886803, "grad_norm": 0.2917332649230957, "learning_rate": 2.5998e-05, "loss": 0.028, "step": 8669 }, { "epoch": 9.56985091109884, "grad_norm": 0.4344874620437622, "learning_rate": 2.6001e-05, "loss": 0.0356, "step": 8670 }, { "epoch": 9.570955273329652, "grad_norm": 0.2413608282804489, "learning_rate": 2.6004e-05, "loss": 0.0119, "step": 8671 }, { "epoch": 9.572059635560464, "grad_norm": 0.385536789894104, "learning_rate": 2.6007e-05, "loss": 0.0271, "step": 8672 }, { "epoch": 9.573163997791276, "grad_norm": 0.7262977361679077, "learning_rate": 2.601e-05, "loss": 0.0134, "step": 8673 }, { "epoch": 9.574268360022087, "grad_norm": 0.3451066017150879, "learning_rate": 2.6013e-05, "loss": 0.0123, "step": 8674 }, { "epoch": 9.575372722252899, "grad_norm": 0.5628566145896912, "learning_rate": 2.6016e-05, "loss": 0.028, "step": 8675 }, { "epoch": 9.576477084483711, "grad_norm": 0.25130754709243774, "learning_rate": 2.6019e-05, "loss": 0.0138, "step": 8676 }, { "epoch": 9.577581446714522, "grad_norm": 0.39067375659942627, "learning_rate": 2.6022e-05, "loss": 0.0241, "step": 8677 }, { "epoch": 9.578685808945334, "grad_norm": 0.30723509192466736, "learning_rate": 2.6025000000000002e-05, "loss": 0.0171, "step": 8678 }, { "epoch": 9.579790171176146, "grad_norm": 0.5567193031311035, "learning_rate": 2.6028000000000002e-05, "loss": 0.0183, "step": 8679 }, { "epoch": 9.580894533406958, "grad_norm": 0.31431907415390015, "learning_rate": 2.6031000000000002e-05, "loss": 0.0201, "step": 8680 }, { "epoch": 9.581998895637769, "grad_norm": 0.37966036796569824, "learning_rate": 2.6034000000000002e-05, "loss": 0.0208, "step": 8681 }, { "epoch": 9.583103257868581, "grad_norm": 0.2827543616294861, "learning_rate": 2.6037e-05, "loss": 0.0111, "step": 8682 }, { "epoch": 9.584207620099393, "grad_norm": 0.3237892687320709, "learning_rate": 2.604e-05, "loss": 0.0173, "step": 8683 }, { "epoch": 9.585311982330204, "grad_norm": 0.425281286239624, "learning_rate": 2.6043e-05, "loss": 0.0142, "step": 8684 }, { "epoch": 9.586416344561016, "grad_norm": 0.42861685156822205, "learning_rate": 2.6046e-05, "loss": 0.0216, "step": 8685 }, { "epoch": 9.587520706791828, "grad_norm": 0.2375953048467636, "learning_rate": 2.6049e-05, "loss": 0.0105, "step": 8686 }, { "epoch": 9.588625069022639, "grad_norm": 0.5294870138168335, "learning_rate": 2.6052e-05, "loss": 0.0252, "step": 8687 }, { "epoch": 9.589729431253451, "grad_norm": 0.22809964418411255, "learning_rate": 2.6055000000000004e-05, "loss": 0.0134, "step": 8688 }, { "epoch": 9.590833793484263, "grad_norm": 0.6234241127967834, "learning_rate": 2.6058e-05, "loss": 0.0265, "step": 8689 }, { "epoch": 9.591938155715075, "grad_norm": 0.8764466643333435, "learning_rate": 2.6061e-05, "loss": 0.0234, "step": 8690 }, { "epoch": 9.593042517945886, "grad_norm": 0.39929714798927307, "learning_rate": 2.6064e-05, "loss": 0.0137, "step": 8691 }, { "epoch": 9.594146880176698, "grad_norm": 0.47283807396888733, "learning_rate": 2.6067e-05, "loss": 0.0121, "step": 8692 }, { "epoch": 9.59525124240751, "grad_norm": 0.5289340615272522, "learning_rate": 2.607e-05, "loss": 0.0232, "step": 8693 }, { "epoch": 9.59635560463832, "grad_norm": 0.3864181339740753, "learning_rate": 2.6073e-05, "loss": 0.0239, "step": 8694 }, { "epoch": 9.597459966869133, "grad_norm": 0.49642881751060486, "learning_rate": 2.6076e-05, "loss": 0.022, "step": 8695 }, { "epoch": 9.598564329099945, "grad_norm": 0.5219742655754089, "learning_rate": 2.6079e-05, "loss": 0.0203, "step": 8696 }, { "epoch": 9.599668691330756, "grad_norm": 0.4705218970775604, "learning_rate": 2.6082e-05, "loss": 0.0208, "step": 8697 }, { "epoch": 9.600773053561568, "grad_norm": 0.2361287921667099, "learning_rate": 2.6085000000000002e-05, "loss": 0.0136, "step": 8698 }, { "epoch": 9.60187741579238, "grad_norm": 0.5441699624061584, "learning_rate": 2.6088e-05, "loss": 0.021, "step": 8699 }, { "epoch": 9.602981778023192, "grad_norm": 0.3280920088291168, "learning_rate": 2.6091e-05, "loss": 0.014, "step": 8700 }, { "epoch": 9.604086140254003, "grad_norm": 0.5933502912521362, "learning_rate": 2.6094e-05, "loss": 0.025, "step": 8701 }, { "epoch": 9.605190502484815, "grad_norm": 0.44610270857810974, "learning_rate": 2.6097e-05, "loss": 0.0132, "step": 8702 }, { "epoch": 9.606294864715627, "grad_norm": 0.5431371331214905, "learning_rate": 2.61e-05, "loss": 0.021, "step": 8703 }, { "epoch": 9.607399226946438, "grad_norm": 0.29331839084625244, "learning_rate": 2.6103e-05, "loss": 0.013, "step": 8704 }, { "epoch": 9.60850358917725, "grad_norm": 0.7282566428184509, "learning_rate": 2.6106e-05, "loss": 0.2441, "step": 8705 }, { "epoch": 9.609607951408062, "grad_norm": 0.8774992823600769, "learning_rate": 2.6109e-05, "loss": 0.2318, "step": 8706 }, { "epoch": 9.610712313638874, "grad_norm": 0.528606116771698, "learning_rate": 2.6112e-05, "loss": 0.1604, "step": 8707 }, { "epoch": 9.611816675869685, "grad_norm": 0.8184887766838074, "learning_rate": 2.6115000000000003e-05, "loss": 0.1175, "step": 8708 }, { "epoch": 9.612921038100497, "grad_norm": 0.806551992893219, "learning_rate": 2.6118000000000003e-05, "loss": 0.1154, "step": 8709 }, { "epoch": 9.61402540033131, "grad_norm": 0.45215222239494324, "learning_rate": 2.6121000000000003e-05, "loss": 0.0594, "step": 8710 }, { "epoch": 9.61512976256212, "grad_norm": 0.4577607214450836, "learning_rate": 2.6124000000000003e-05, "loss": 0.0628, "step": 8711 }, { "epoch": 9.616234124792932, "grad_norm": 0.5013653635978699, "learning_rate": 2.6127000000000002e-05, "loss": 0.057, "step": 8712 }, { "epoch": 9.617338487023744, "grad_norm": 0.30402660369873047, "learning_rate": 2.6130000000000002e-05, "loss": 0.0412, "step": 8713 }, { "epoch": 9.618442849254556, "grad_norm": 0.5381269454956055, "learning_rate": 2.6133e-05, "loss": 0.0304, "step": 8714 }, { "epoch": 9.619547211485367, "grad_norm": 0.26068881154060364, "learning_rate": 2.6136e-05, "loss": 0.0292, "step": 8715 }, { "epoch": 9.620651573716179, "grad_norm": 0.2276190221309662, "learning_rate": 2.6138999999999998e-05, "loss": 0.0205, "step": 8716 }, { "epoch": 9.621755935946991, "grad_norm": 0.20821191370487213, "learning_rate": 2.6141999999999998e-05, "loss": 0.0233, "step": 8717 }, { "epoch": 9.622860298177802, "grad_norm": 0.39472246170043945, "learning_rate": 2.6145e-05, "loss": 0.0409, "step": 8718 }, { "epoch": 9.623964660408614, "grad_norm": 0.2675420343875885, "learning_rate": 2.6148e-05, "loss": 0.0147, "step": 8719 }, { "epoch": 9.625069022639426, "grad_norm": 0.3559044897556305, "learning_rate": 2.6151e-05, "loss": 0.0178, "step": 8720 }, { "epoch": 9.626173384870237, "grad_norm": 0.32325613498687744, "learning_rate": 2.6154e-05, "loss": 0.0258, "step": 8721 }, { "epoch": 9.627277747101049, "grad_norm": 0.3180265426635742, "learning_rate": 2.6157e-05, "loss": 0.026, "step": 8722 }, { "epoch": 9.628382109331861, "grad_norm": 0.2536696791648865, "learning_rate": 2.616e-05, "loss": 0.0164, "step": 8723 }, { "epoch": 9.629486471562673, "grad_norm": 0.5793942213058472, "learning_rate": 2.6163e-05, "loss": 0.0201, "step": 8724 }, { "epoch": 9.630590833793484, "grad_norm": 0.3531731963157654, "learning_rate": 2.6166e-05, "loss": 0.0223, "step": 8725 }, { "epoch": 9.631695196024296, "grad_norm": 0.512261152267456, "learning_rate": 2.6169e-05, "loss": 0.0265, "step": 8726 }, { "epoch": 9.632799558255108, "grad_norm": 0.4923448860645294, "learning_rate": 2.6172e-05, "loss": 0.0269, "step": 8727 }, { "epoch": 9.633903920485919, "grad_norm": 0.2751871645450592, "learning_rate": 2.6175000000000003e-05, "loss": 0.0099, "step": 8728 }, { "epoch": 9.635008282716731, "grad_norm": 0.5800724625587463, "learning_rate": 2.6178000000000002e-05, "loss": 0.026, "step": 8729 }, { "epoch": 9.636112644947543, "grad_norm": 0.20918668806552887, "learning_rate": 2.6181000000000002e-05, "loss": 0.0105, "step": 8730 }, { "epoch": 9.637217007178354, "grad_norm": 0.28083115816116333, "learning_rate": 2.6184000000000002e-05, "loss": 0.0086, "step": 8731 }, { "epoch": 9.638321369409166, "grad_norm": 0.897068440914154, "learning_rate": 2.6187000000000002e-05, "loss": 0.032, "step": 8732 }, { "epoch": 9.639425731639978, "grad_norm": 0.7625981569290161, "learning_rate": 2.619e-05, "loss": 0.0293, "step": 8733 }, { "epoch": 9.64053009387079, "grad_norm": 0.2812451720237732, "learning_rate": 2.6193e-05, "loss": 0.0182, "step": 8734 }, { "epoch": 9.6416344561016, "grad_norm": 0.3497077524662018, "learning_rate": 2.6196e-05, "loss": 0.0169, "step": 8735 }, { "epoch": 9.642738818332413, "grad_norm": 0.4860394299030304, "learning_rate": 2.6199e-05, "loss": 0.0207, "step": 8736 }, { "epoch": 9.643843180563225, "grad_norm": 0.5951259136199951, "learning_rate": 2.6202e-05, "loss": 0.0248, "step": 8737 }, { "epoch": 9.644947542794036, "grad_norm": 0.14764082431793213, "learning_rate": 2.6205e-05, "loss": 0.0066, "step": 8738 }, { "epoch": 9.646051905024848, "grad_norm": 0.48895323276519775, "learning_rate": 2.6208e-05, "loss": 0.0161, "step": 8739 }, { "epoch": 9.64715626725566, "grad_norm": 0.2548582851886749, "learning_rate": 2.6211e-05, "loss": 0.0133, "step": 8740 }, { "epoch": 9.648260629486472, "grad_norm": 0.2986546456813812, "learning_rate": 2.6214e-05, "loss": 0.0128, "step": 8741 }, { "epoch": 9.649364991717283, "grad_norm": 0.6174180507659912, "learning_rate": 2.6217e-05, "loss": 0.0178, "step": 8742 }, { "epoch": 9.650469353948095, "grad_norm": 0.2808133661746979, "learning_rate": 2.622e-05, "loss": 0.0193, "step": 8743 }, { "epoch": 9.651573716178907, "grad_norm": 0.3418254256248474, "learning_rate": 2.6223e-05, "loss": 0.0146, "step": 8744 }, { "epoch": 9.652678078409718, "grad_norm": 0.7615751624107361, "learning_rate": 2.6226e-05, "loss": 0.0178, "step": 8745 }, { "epoch": 9.65378244064053, "grad_norm": 0.6082271933555603, "learning_rate": 2.6229e-05, "loss": 0.0243, "step": 8746 }, { "epoch": 9.654886802871342, "grad_norm": 1.195202350616455, "learning_rate": 2.6232e-05, "loss": 0.0279, "step": 8747 }, { "epoch": 9.655991165102154, "grad_norm": 0.5552700161933899, "learning_rate": 2.6235000000000002e-05, "loss": 0.029, "step": 8748 }, { "epoch": 9.657095527332965, "grad_norm": 0.8615849018096924, "learning_rate": 2.6238000000000002e-05, "loss": 0.0216, "step": 8749 }, { "epoch": 9.658199889563777, "grad_norm": 0.3342786729335785, "learning_rate": 2.6241e-05, "loss": 0.0117, "step": 8750 }, { "epoch": 9.65930425179459, "grad_norm": 0.8149930238723755, "learning_rate": 2.6244e-05, "loss": 0.0375, "step": 8751 }, { "epoch": 9.6604086140254, "grad_norm": 0.3975354731082916, "learning_rate": 2.6247e-05, "loss": 0.0164, "step": 8752 }, { "epoch": 9.661512976256212, "grad_norm": 0.32742539048194885, "learning_rate": 2.625e-05, "loss": 0.0216, "step": 8753 }, { "epoch": 9.662617338487024, "grad_norm": 0.9197916388511658, "learning_rate": 2.6253e-05, "loss": 0.0376, "step": 8754 }, { "epoch": 9.663721700717835, "grad_norm": 1.1811814308166504, "learning_rate": 2.6256e-05, "loss": 0.2434, "step": 8755 }, { "epoch": 9.664826062948647, "grad_norm": 0.83965665102005, "learning_rate": 2.6259e-05, "loss": 0.2459, "step": 8756 }, { "epoch": 9.665930425179459, "grad_norm": 0.7708525657653809, "learning_rate": 2.6262e-05, "loss": 0.1981, "step": 8757 }, { "epoch": 9.667034787410271, "grad_norm": 0.8442225456237793, "learning_rate": 2.6265e-05, "loss": 0.119, "step": 8758 }, { "epoch": 9.668139149641082, "grad_norm": 0.5699493885040283, "learning_rate": 2.6268000000000003e-05, "loss": 0.1052, "step": 8759 }, { "epoch": 9.669243511871894, "grad_norm": 0.558240532875061, "learning_rate": 2.6271000000000003e-05, "loss": 0.0807, "step": 8760 }, { "epoch": 9.670347874102706, "grad_norm": 0.4716870188713074, "learning_rate": 2.6274000000000003e-05, "loss": 0.0839, "step": 8761 }, { "epoch": 9.671452236333517, "grad_norm": 0.4465672969818115, "learning_rate": 2.6277000000000003e-05, "loss": 0.0685, "step": 8762 }, { "epoch": 9.672556598564329, "grad_norm": 0.48227375745773315, "learning_rate": 2.628e-05, "loss": 0.0607, "step": 8763 }, { "epoch": 9.673660960795141, "grad_norm": 0.6943885684013367, "learning_rate": 2.6283e-05, "loss": 0.0538, "step": 8764 }, { "epoch": 9.674765323025952, "grad_norm": 0.4140637516975403, "learning_rate": 2.6286e-05, "loss": 0.0457, "step": 8765 }, { "epoch": 9.675869685256764, "grad_norm": 0.28674307465553284, "learning_rate": 2.6289e-05, "loss": 0.0114, "step": 8766 }, { "epoch": 9.676974047487576, "grad_norm": 0.22975799441337585, "learning_rate": 2.6292e-05, "loss": 0.0134, "step": 8767 }, { "epoch": 9.678078409718388, "grad_norm": 0.5109076499938965, "learning_rate": 2.6294999999999998e-05, "loss": 0.0384, "step": 8768 }, { "epoch": 9.679182771949199, "grad_norm": 0.29804494976997375, "learning_rate": 2.6298e-05, "loss": 0.0489, "step": 8769 }, { "epoch": 9.680287134180011, "grad_norm": 0.5433131456375122, "learning_rate": 2.6301e-05, "loss": 0.0587, "step": 8770 }, { "epoch": 9.681391496410823, "grad_norm": 0.5725875496864319, "learning_rate": 2.6304e-05, "loss": 0.0393, "step": 8771 }, { "epoch": 9.682495858641634, "grad_norm": 0.2664881944656372, "learning_rate": 2.6307e-05, "loss": 0.0191, "step": 8772 }, { "epoch": 9.683600220872446, "grad_norm": 0.2879613935947418, "learning_rate": 2.631e-05, "loss": 0.0149, "step": 8773 }, { "epoch": 9.684704583103258, "grad_norm": 0.7653622627258301, "learning_rate": 2.6313e-05, "loss": 0.035, "step": 8774 }, { "epoch": 9.68580894533407, "grad_norm": 0.3577098846435547, "learning_rate": 2.6316e-05, "loss": 0.0183, "step": 8775 }, { "epoch": 9.68691330756488, "grad_norm": 0.30450451374053955, "learning_rate": 2.6319e-05, "loss": 0.0195, "step": 8776 }, { "epoch": 9.688017669795693, "grad_norm": 0.3662222921848297, "learning_rate": 2.6322e-05, "loss": 0.0127, "step": 8777 }, { "epoch": 9.689122032026505, "grad_norm": 0.313294917345047, "learning_rate": 2.6325e-05, "loss": 0.0228, "step": 8778 }, { "epoch": 9.690226394257316, "grad_norm": 0.45011281967163086, "learning_rate": 2.6328000000000003e-05, "loss": 0.0202, "step": 8779 }, { "epoch": 9.691330756488128, "grad_norm": 0.3664844036102295, "learning_rate": 2.6331000000000003e-05, "loss": 0.0199, "step": 8780 }, { "epoch": 9.69243511871894, "grad_norm": 0.41424816846847534, "learning_rate": 2.6334000000000002e-05, "loss": 0.0293, "step": 8781 }, { "epoch": 9.693539480949752, "grad_norm": 0.3585834503173828, "learning_rate": 2.6337000000000002e-05, "loss": 0.0199, "step": 8782 }, { "epoch": 9.694643843180563, "grad_norm": 0.3050343692302704, "learning_rate": 2.6340000000000002e-05, "loss": 0.0144, "step": 8783 }, { "epoch": 9.695748205411375, "grad_norm": 0.3633005619049072, "learning_rate": 2.6343000000000002e-05, "loss": 0.0234, "step": 8784 }, { "epoch": 9.696852567642187, "grad_norm": 0.3332843780517578, "learning_rate": 2.6346e-05, "loss": 0.0184, "step": 8785 }, { "epoch": 9.697956929872998, "grad_norm": 0.5296278595924377, "learning_rate": 2.6349e-05, "loss": 0.0282, "step": 8786 }, { "epoch": 9.69906129210381, "grad_norm": 0.2850680649280548, "learning_rate": 2.6351999999999998e-05, "loss": 0.0107, "step": 8787 }, { "epoch": 9.700165654334622, "grad_norm": 0.3857482969760895, "learning_rate": 2.6354999999999998e-05, "loss": 0.0254, "step": 8788 }, { "epoch": 9.701270016565433, "grad_norm": 0.16459499299526215, "learning_rate": 2.6358e-05, "loss": 0.0088, "step": 8789 }, { "epoch": 9.702374378796245, "grad_norm": 0.2813546061515808, "learning_rate": 2.6361e-05, "loss": 0.0147, "step": 8790 }, { "epoch": 9.703478741027057, "grad_norm": 0.2459787130355835, "learning_rate": 2.6364e-05, "loss": 0.0122, "step": 8791 }, { "epoch": 9.70458310325787, "grad_norm": 0.573216438293457, "learning_rate": 2.6367e-05, "loss": 0.0179, "step": 8792 }, { "epoch": 9.70568746548868, "grad_norm": 0.5693495869636536, "learning_rate": 2.637e-05, "loss": 0.0266, "step": 8793 }, { "epoch": 9.706791827719492, "grad_norm": 0.36053141951560974, "learning_rate": 2.6373e-05, "loss": 0.0193, "step": 8794 }, { "epoch": 9.707896189950304, "grad_norm": 0.20249249041080475, "learning_rate": 2.6376e-05, "loss": 0.0134, "step": 8795 }, { "epoch": 9.709000552181115, "grad_norm": 0.4743991792201996, "learning_rate": 2.6379e-05, "loss": 0.0142, "step": 8796 }, { "epoch": 9.710104914411927, "grad_norm": 0.3601248562335968, "learning_rate": 2.6382e-05, "loss": 0.0176, "step": 8797 }, { "epoch": 9.71120927664274, "grad_norm": 1.1734545230865479, "learning_rate": 2.6385e-05, "loss": 0.0289, "step": 8798 }, { "epoch": 9.71231363887355, "grad_norm": 0.31955528259277344, "learning_rate": 2.6388000000000002e-05, "loss": 0.0233, "step": 8799 }, { "epoch": 9.713418001104362, "grad_norm": 0.47305795550346375, "learning_rate": 2.6391000000000002e-05, "loss": 0.0473, "step": 8800 }, { "epoch": 9.714522363335174, "grad_norm": 0.6695038676261902, "learning_rate": 2.6394000000000002e-05, "loss": 0.0206, "step": 8801 }, { "epoch": 9.715626725565986, "grad_norm": 0.3004641532897949, "learning_rate": 2.6397e-05, "loss": 0.0424, "step": 8802 }, { "epoch": 9.716731087796797, "grad_norm": 0.9827761650085449, "learning_rate": 2.64e-05, "loss": 0.0266, "step": 8803 }, { "epoch": 9.717835450027609, "grad_norm": 0.45357558131217957, "learning_rate": 2.6403e-05, "loss": 0.0168, "step": 8804 }, { "epoch": 9.718939812258421, "grad_norm": 1.140260100364685, "learning_rate": 2.6406e-05, "loss": 0.2634, "step": 8805 }, { "epoch": 9.720044174489232, "grad_norm": 0.5315662622451782, "learning_rate": 2.6409e-05, "loss": 0.2075, "step": 8806 }, { "epoch": 9.721148536720044, "grad_norm": 2.4804153442382812, "learning_rate": 2.6412e-05, "loss": 0.1695, "step": 8807 }, { "epoch": 9.722252898950856, "grad_norm": 0.5147624015808105, "learning_rate": 2.6415e-05, "loss": 0.1043, "step": 8808 }, { "epoch": 9.723357261181668, "grad_norm": 0.6724797487258911, "learning_rate": 2.6418000000000004e-05, "loss": 0.0965, "step": 8809 }, { "epoch": 9.724461623412479, "grad_norm": 0.9426968693733215, "learning_rate": 2.6421000000000003e-05, "loss": 0.12, "step": 8810 }, { "epoch": 9.725565985643291, "grad_norm": 0.3189395070075989, "learning_rate": 2.6424000000000003e-05, "loss": 0.0525, "step": 8811 }, { "epoch": 9.726670347874103, "grad_norm": 0.7305189371109009, "learning_rate": 2.6427e-05, "loss": 0.0907, "step": 8812 }, { "epoch": 9.727774710104914, "grad_norm": 0.6821882128715515, "learning_rate": 2.643e-05, "loss": 0.0434, "step": 8813 }, { "epoch": 9.728879072335726, "grad_norm": 0.3549629747867584, "learning_rate": 2.6433e-05, "loss": 0.0355, "step": 8814 }, { "epoch": 9.729983434566538, "grad_norm": 0.2650471031665802, "learning_rate": 2.6436e-05, "loss": 0.0203, "step": 8815 }, { "epoch": 9.73108779679735, "grad_norm": 0.6466639041900635, "learning_rate": 2.6439e-05, "loss": 0.0462, "step": 8816 }, { "epoch": 9.73219215902816, "grad_norm": 0.3701111972332001, "learning_rate": 2.6442e-05, "loss": 0.0219, "step": 8817 }, { "epoch": 9.733296521258973, "grad_norm": 0.33885496854782104, "learning_rate": 2.6445e-05, "loss": 0.018, "step": 8818 }, { "epoch": 9.734400883489785, "grad_norm": 0.4192425608634949, "learning_rate": 2.6448e-05, "loss": 0.0195, "step": 8819 }, { "epoch": 9.735505245720596, "grad_norm": 0.31617292761802673, "learning_rate": 2.6451e-05, "loss": 0.0209, "step": 8820 }, { "epoch": 9.736609607951408, "grad_norm": 0.3886798024177551, "learning_rate": 2.6454e-05, "loss": 0.0205, "step": 8821 }, { "epoch": 9.73771397018222, "grad_norm": 0.31875544786453247, "learning_rate": 2.6457e-05, "loss": 0.0215, "step": 8822 }, { "epoch": 9.738818332413032, "grad_norm": 0.47264212369918823, "learning_rate": 2.646e-05, "loss": 0.0197, "step": 8823 }, { "epoch": 9.739922694643843, "grad_norm": 0.23626337945461273, "learning_rate": 2.6463e-05, "loss": 0.0143, "step": 8824 }, { "epoch": 9.741027056874655, "grad_norm": 0.4987275004386902, "learning_rate": 2.6466e-05, "loss": 0.0247, "step": 8825 }, { "epoch": 9.742131419105467, "grad_norm": 0.4319305717945099, "learning_rate": 2.6469e-05, "loss": 0.0147, "step": 8826 }, { "epoch": 9.743235781336278, "grad_norm": 0.6882176399230957, "learning_rate": 2.6472e-05, "loss": 0.0592, "step": 8827 }, { "epoch": 9.74434014356709, "grad_norm": 0.2899248003959656, "learning_rate": 2.6475e-05, "loss": 0.0183, "step": 8828 }, { "epoch": 9.745444505797902, "grad_norm": 0.3321526348590851, "learning_rate": 2.6478000000000003e-05, "loss": 0.0198, "step": 8829 }, { "epoch": 9.746548868028713, "grad_norm": 0.43866777420043945, "learning_rate": 2.6481000000000003e-05, "loss": 0.0191, "step": 8830 }, { "epoch": 9.747653230259525, "grad_norm": 0.250010222196579, "learning_rate": 2.6484000000000003e-05, "loss": 0.0162, "step": 8831 }, { "epoch": 9.748757592490337, "grad_norm": 0.3104546070098877, "learning_rate": 2.6487000000000002e-05, "loss": 0.008, "step": 8832 }, { "epoch": 9.74986195472115, "grad_norm": 0.27650731801986694, "learning_rate": 2.6490000000000002e-05, "loss": 0.0184, "step": 8833 }, { "epoch": 9.75096631695196, "grad_norm": 0.5591205954551697, "learning_rate": 2.6493000000000002e-05, "loss": 0.0124, "step": 8834 }, { "epoch": 9.752070679182772, "grad_norm": 0.2228991985321045, "learning_rate": 2.6496000000000002e-05, "loss": 0.0121, "step": 8835 }, { "epoch": 9.753175041413584, "grad_norm": 0.22044441103935242, "learning_rate": 2.6499e-05, "loss": 0.0141, "step": 8836 }, { "epoch": 9.754279403644395, "grad_norm": 0.17676229774951935, "learning_rate": 2.6501999999999998e-05, "loss": 0.0139, "step": 8837 }, { "epoch": 9.755383765875207, "grad_norm": 0.21777689456939697, "learning_rate": 2.6504999999999998e-05, "loss": 0.0104, "step": 8838 }, { "epoch": 9.75648812810602, "grad_norm": 0.31903567910194397, "learning_rate": 2.6508e-05, "loss": 0.0195, "step": 8839 }, { "epoch": 9.75759249033683, "grad_norm": 0.21296538412570953, "learning_rate": 2.6511e-05, "loss": 0.0152, "step": 8840 }, { "epoch": 9.758696852567642, "grad_norm": 0.23852410912513733, "learning_rate": 2.6514e-05, "loss": 0.0157, "step": 8841 }, { "epoch": 9.759801214798454, "grad_norm": 0.5744771957397461, "learning_rate": 2.6517e-05, "loss": 0.0413, "step": 8842 }, { "epoch": 9.760905577029266, "grad_norm": 0.36524102091789246, "learning_rate": 2.652e-05, "loss": 0.021, "step": 8843 }, { "epoch": 9.762009939260077, "grad_norm": 0.531319797039032, "learning_rate": 2.6523e-05, "loss": 0.0247, "step": 8844 }, { "epoch": 9.763114301490889, "grad_norm": 0.3754769563674927, "learning_rate": 2.6526e-05, "loss": 0.0124, "step": 8845 }, { "epoch": 9.764218663721701, "grad_norm": 0.2690368592739105, "learning_rate": 2.6529e-05, "loss": 0.0167, "step": 8846 }, { "epoch": 9.765323025952512, "grad_norm": 0.4582330584526062, "learning_rate": 2.6532e-05, "loss": 0.0219, "step": 8847 }, { "epoch": 9.766427388183324, "grad_norm": 0.7445901036262512, "learning_rate": 2.6535e-05, "loss": 0.0241, "step": 8848 }, { "epoch": 9.767531750414136, "grad_norm": 0.2922150194644928, "learning_rate": 2.6538000000000002e-05, "loss": 0.0227, "step": 8849 }, { "epoch": 9.768636112644948, "grad_norm": 0.8738771677017212, "learning_rate": 2.6541000000000002e-05, "loss": 0.0284, "step": 8850 }, { "epoch": 9.769740474875759, "grad_norm": 0.5883990526199341, "learning_rate": 2.6544000000000002e-05, "loss": 0.0199, "step": 8851 }, { "epoch": 9.770844837106571, "grad_norm": 0.7511867880821228, "learning_rate": 2.6547000000000002e-05, "loss": 0.0291, "step": 8852 }, { "epoch": 9.771949199337383, "grad_norm": 0.9515043497085571, "learning_rate": 2.655e-05, "loss": 0.0258, "step": 8853 }, { "epoch": 9.773053561568194, "grad_norm": 0.5632907748222351, "learning_rate": 2.6553e-05, "loss": 0.0269, "step": 8854 }, { "epoch": 9.774157923799006, "grad_norm": 1.4907419681549072, "learning_rate": 2.6556e-05, "loss": 0.4097, "step": 8855 }, { "epoch": 9.775262286029818, "grad_norm": 1.0022320747375488, "learning_rate": 2.6559e-05, "loss": 0.2677, "step": 8856 }, { "epoch": 9.77636664826063, "grad_norm": 0.6236240267753601, "learning_rate": 2.6562e-05, "loss": 0.1262, "step": 8857 }, { "epoch": 9.77747101049144, "grad_norm": 0.7075533270835876, "learning_rate": 2.6565e-05, "loss": 0.1373, "step": 8858 }, { "epoch": 9.778575372722253, "grad_norm": 0.7677469849586487, "learning_rate": 2.6568000000000004e-05, "loss": 0.1294, "step": 8859 }, { "epoch": 9.779679734953065, "grad_norm": 0.5419256091117859, "learning_rate": 2.6571000000000004e-05, "loss": 0.0792, "step": 8860 }, { "epoch": 9.780784097183876, "grad_norm": 0.39156609773635864, "learning_rate": 2.6574e-05, "loss": 0.0759, "step": 8861 }, { "epoch": 9.781888459414688, "grad_norm": 0.41148242354393005, "learning_rate": 2.6577e-05, "loss": 0.0546, "step": 8862 }, { "epoch": 9.7829928216455, "grad_norm": 0.4952600300312042, "learning_rate": 2.658e-05, "loss": 0.0329, "step": 8863 }, { "epoch": 9.78409718387631, "grad_norm": 0.6830414533615112, "learning_rate": 2.6583e-05, "loss": 0.0257, "step": 8864 }, { "epoch": 9.785201546107123, "grad_norm": 0.3982184827327728, "learning_rate": 2.6586e-05, "loss": 0.0393, "step": 8865 }, { "epoch": 9.786305908337935, "grad_norm": 0.7541590332984924, "learning_rate": 2.6589e-05, "loss": 0.0268, "step": 8866 }, { "epoch": 9.787410270568747, "grad_norm": 0.2885846495628357, "learning_rate": 2.6592e-05, "loss": 0.0136, "step": 8867 }, { "epoch": 9.788514632799558, "grad_norm": 0.3564968705177307, "learning_rate": 2.6595e-05, "loss": 0.0191, "step": 8868 }, { "epoch": 9.78961899503037, "grad_norm": 0.30753713846206665, "learning_rate": 2.6598000000000002e-05, "loss": 0.0207, "step": 8869 }, { "epoch": 9.790723357261182, "grad_norm": 0.26320531964302063, "learning_rate": 2.6601e-05, "loss": 0.0179, "step": 8870 }, { "epoch": 9.791827719491993, "grad_norm": 0.2836143672466278, "learning_rate": 2.6604e-05, "loss": 0.023, "step": 8871 }, { "epoch": 9.792932081722805, "grad_norm": 0.5472736358642578, "learning_rate": 2.6607e-05, "loss": 0.0362, "step": 8872 }, { "epoch": 9.794036443953617, "grad_norm": 0.3424125909805298, "learning_rate": 2.661e-05, "loss": 0.0211, "step": 8873 }, { "epoch": 9.795140806184428, "grad_norm": 0.28211891651153564, "learning_rate": 2.6613e-05, "loss": 0.0156, "step": 8874 }, { "epoch": 9.79624516841524, "grad_norm": 0.2645380198955536, "learning_rate": 2.6616e-05, "loss": 0.0129, "step": 8875 }, { "epoch": 9.797349530646052, "grad_norm": 0.36417272686958313, "learning_rate": 2.6619e-05, "loss": 0.0202, "step": 8876 }, { "epoch": 9.798453892876864, "grad_norm": 0.602942705154419, "learning_rate": 2.6622e-05, "loss": 0.0288, "step": 8877 }, { "epoch": 9.799558255107675, "grad_norm": 0.3634059727191925, "learning_rate": 2.6625e-05, "loss": 0.0233, "step": 8878 }, { "epoch": 9.800662617338487, "grad_norm": 0.4325764775276184, "learning_rate": 2.6628e-05, "loss": 0.0136, "step": 8879 }, { "epoch": 9.8017669795693, "grad_norm": 0.4216238260269165, "learning_rate": 2.6631000000000003e-05, "loss": 0.0172, "step": 8880 }, { "epoch": 9.80287134180011, "grad_norm": 0.420742005109787, "learning_rate": 2.6634000000000003e-05, "loss": 0.0165, "step": 8881 }, { "epoch": 9.803975704030922, "grad_norm": 0.4651321768760681, "learning_rate": 2.6637000000000003e-05, "loss": 0.0182, "step": 8882 }, { "epoch": 9.805080066261734, "grad_norm": 0.46537744998931885, "learning_rate": 2.6640000000000002e-05, "loss": 0.0247, "step": 8883 }, { "epoch": 9.806184428492546, "grad_norm": 0.4070507287979126, "learning_rate": 2.6643000000000002e-05, "loss": 0.0239, "step": 8884 }, { "epoch": 9.807288790723357, "grad_norm": 0.3163467049598694, "learning_rate": 2.6646000000000002e-05, "loss": 0.0189, "step": 8885 }, { "epoch": 9.808393152954169, "grad_norm": 0.3387095332145691, "learning_rate": 2.6649e-05, "loss": 0.0234, "step": 8886 }, { "epoch": 9.809497515184981, "grad_norm": 0.2328922301530838, "learning_rate": 2.6651999999999998e-05, "loss": 0.012, "step": 8887 }, { "epoch": 9.810601877415792, "grad_norm": 0.5852007865905762, "learning_rate": 2.6654999999999998e-05, "loss": 0.0373, "step": 8888 }, { "epoch": 9.811706239646604, "grad_norm": 0.5833531618118286, "learning_rate": 2.6657999999999998e-05, "loss": 0.0168, "step": 8889 }, { "epoch": 9.812810601877416, "grad_norm": 0.4008578062057495, "learning_rate": 2.6661e-05, "loss": 0.016, "step": 8890 }, { "epoch": 9.813914964108228, "grad_norm": 0.30437538027763367, "learning_rate": 2.6664e-05, "loss": 0.0169, "step": 8891 }, { "epoch": 9.815019326339039, "grad_norm": 0.34860745072364807, "learning_rate": 2.6667e-05, "loss": 0.0183, "step": 8892 }, { "epoch": 9.816123688569851, "grad_norm": 0.3290729522705078, "learning_rate": 2.667e-05, "loss": 0.0141, "step": 8893 }, { "epoch": 9.817228050800663, "grad_norm": 0.21347516775131226, "learning_rate": 2.6673e-05, "loss": 0.01, "step": 8894 }, { "epoch": 9.818332413031474, "grad_norm": 0.5556511282920837, "learning_rate": 2.6676e-05, "loss": 0.036, "step": 8895 }, { "epoch": 9.819436775262286, "grad_norm": 0.6670355200767517, "learning_rate": 2.6679e-05, "loss": 0.0217, "step": 8896 }, { "epoch": 9.820541137493098, "grad_norm": 0.40461617708206177, "learning_rate": 2.6682e-05, "loss": 0.0228, "step": 8897 }, { "epoch": 9.821645499723909, "grad_norm": 0.6769081354141235, "learning_rate": 2.6685e-05, "loss": 0.0154, "step": 8898 }, { "epoch": 9.82274986195472, "grad_norm": 0.9909865260124207, "learning_rate": 2.6688e-05, "loss": 0.026, "step": 8899 }, { "epoch": 9.823854224185533, "grad_norm": 0.27416354417800903, "learning_rate": 2.6691000000000002e-05, "loss": 0.0173, "step": 8900 }, { "epoch": 9.824958586416345, "grad_norm": 0.26532500982284546, "learning_rate": 2.6694000000000002e-05, "loss": 0.0129, "step": 8901 }, { "epoch": 9.826062948647156, "grad_norm": 0.3733590841293335, "learning_rate": 2.6697000000000002e-05, "loss": 0.0236, "step": 8902 }, { "epoch": 9.827167310877968, "grad_norm": 0.49322280287742615, "learning_rate": 2.6700000000000002e-05, "loss": 0.0235, "step": 8903 }, { "epoch": 9.82827167310878, "grad_norm": 0.43517571687698364, "learning_rate": 2.6703e-05, "loss": 0.0238, "step": 8904 }, { "epoch": 9.82937603533959, "grad_norm": 0.7777904868125916, "learning_rate": 2.6706e-05, "loss": 0.2329, "step": 8905 }, { "epoch": 9.830480397570403, "grad_norm": 1.0430338382720947, "learning_rate": 2.6709e-05, "loss": 0.1886, "step": 8906 }, { "epoch": 9.831584759801215, "grad_norm": 0.8095927834510803, "learning_rate": 2.6712e-05, "loss": 0.1599, "step": 8907 }, { "epoch": 9.832689122032026, "grad_norm": 0.6426218748092651, "learning_rate": 2.6715e-05, "loss": 0.1507, "step": 8908 }, { "epoch": 9.833793484262838, "grad_norm": 0.3880494236946106, "learning_rate": 2.6718e-05, "loss": 0.0937, "step": 8909 }, { "epoch": 9.83489784649365, "grad_norm": 0.49958160519599915, "learning_rate": 2.6721e-05, "loss": 0.1399, "step": 8910 }, { "epoch": 9.836002208724462, "grad_norm": 0.4774201214313507, "learning_rate": 2.6724e-05, "loss": 0.1039, "step": 8911 }, { "epoch": 9.837106570955273, "grad_norm": 0.591862678527832, "learning_rate": 2.6727e-05, "loss": 0.0721, "step": 8912 }, { "epoch": 9.838210933186085, "grad_norm": 0.724899172782898, "learning_rate": 2.673e-05, "loss": 0.0814, "step": 8913 }, { "epoch": 9.839315295416897, "grad_norm": 0.5262103080749512, "learning_rate": 2.6733e-05, "loss": 0.0687, "step": 8914 }, { "epoch": 9.840419657647708, "grad_norm": 0.5338785648345947, "learning_rate": 2.6736e-05, "loss": 0.0443, "step": 8915 }, { "epoch": 9.84152401987852, "grad_norm": 0.24517542123794556, "learning_rate": 2.6739e-05, "loss": 0.0315, "step": 8916 }, { "epoch": 9.842628382109332, "grad_norm": 0.46169471740722656, "learning_rate": 2.6742e-05, "loss": 0.0318, "step": 8917 }, { "epoch": 9.843732744340144, "grad_norm": 0.1806519627571106, "learning_rate": 2.6745e-05, "loss": 0.0123, "step": 8918 }, { "epoch": 9.844837106570955, "grad_norm": 0.4993237555027008, "learning_rate": 2.6748e-05, "loss": 0.0297, "step": 8919 }, { "epoch": 9.845941468801767, "grad_norm": 0.21523456275463104, "learning_rate": 2.6751000000000002e-05, "loss": 0.0158, "step": 8920 }, { "epoch": 9.84704583103258, "grad_norm": 0.4922754466533661, "learning_rate": 2.6754e-05, "loss": 0.058, "step": 8921 }, { "epoch": 9.84815019326339, "grad_norm": 0.3312496542930603, "learning_rate": 2.6757e-05, "loss": 0.0251, "step": 8922 }, { "epoch": 9.849254555494202, "grad_norm": 0.34119942784309387, "learning_rate": 2.676e-05, "loss": 0.0268, "step": 8923 }, { "epoch": 9.850358917725014, "grad_norm": 0.8850733041763306, "learning_rate": 2.6763e-05, "loss": 0.0237, "step": 8924 }, { "epoch": 9.851463279955826, "grad_norm": 0.2366371750831604, "learning_rate": 2.6766e-05, "loss": 0.0117, "step": 8925 }, { "epoch": 9.852567642186637, "grad_norm": 0.40188220143318176, "learning_rate": 2.6769e-05, "loss": 0.0241, "step": 8926 }, { "epoch": 9.853672004417449, "grad_norm": 0.17977416515350342, "learning_rate": 2.6772e-05, "loss": 0.0126, "step": 8927 }, { "epoch": 9.854776366648261, "grad_norm": 0.35604655742645264, "learning_rate": 2.6775e-05, "loss": 0.0181, "step": 8928 }, { "epoch": 9.855880728879072, "grad_norm": 0.15626536309719086, "learning_rate": 2.6778e-05, "loss": 0.0111, "step": 8929 }, { "epoch": 9.856985091109884, "grad_norm": 0.20734956860542297, "learning_rate": 2.6781000000000003e-05, "loss": 0.0092, "step": 8930 }, { "epoch": 9.858089453340696, "grad_norm": 0.18354976177215576, "learning_rate": 2.6784000000000003e-05, "loss": 0.0137, "step": 8931 }, { "epoch": 9.859193815571507, "grad_norm": 0.29074451327323914, "learning_rate": 2.6787000000000003e-05, "loss": 0.0181, "step": 8932 }, { "epoch": 9.860298177802319, "grad_norm": 0.34371548891067505, "learning_rate": 2.6790000000000003e-05, "loss": 0.0219, "step": 8933 }, { "epoch": 9.861402540033131, "grad_norm": 0.5522485971450806, "learning_rate": 2.6793000000000002e-05, "loss": 0.0162, "step": 8934 }, { "epoch": 9.862506902263943, "grad_norm": 0.5940958857536316, "learning_rate": 2.6796e-05, "loss": 0.0275, "step": 8935 }, { "epoch": 9.863611264494754, "grad_norm": 0.3002626895904541, "learning_rate": 2.6799e-05, "loss": 0.0195, "step": 8936 }, { "epoch": 9.864715626725566, "grad_norm": 0.27224960923194885, "learning_rate": 2.6802e-05, "loss": 0.0113, "step": 8937 }, { "epoch": 9.865819988956378, "grad_norm": 0.4762168824672699, "learning_rate": 2.6805e-05, "loss": 0.0157, "step": 8938 }, { "epoch": 9.866924351187189, "grad_norm": 0.5317811965942383, "learning_rate": 2.6807999999999998e-05, "loss": 0.0518, "step": 8939 }, { "epoch": 9.868028713418001, "grad_norm": 0.42606309056282043, "learning_rate": 2.6811e-05, "loss": 0.0157, "step": 8940 }, { "epoch": 9.869133075648813, "grad_norm": 0.37788423895835876, "learning_rate": 2.6814e-05, "loss": 0.0164, "step": 8941 }, { "epoch": 9.870237437879624, "grad_norm": 0.4390276074409485, "learning_rate": 2.6817e-05, "loss": 0.0154, "step": 8942 }, { "epoch": 9.871341800110436, "grad_norm": 0.8926059603691101, "learning_rate": 2.682e-05, "loss": 0.0246, "step": 8943 }, { "epoch": 9.872446162341248, "grad_norm": 0.4071098268032074, "learning_rate": 2.6823e-05, "loss": 0.0152, "step": 8944 }, { "epoch": 9.87355052457206, "grad_norm": 0.7625567317008972, "learning_rate": 2.6826e-05, "loss": 0.0198, "step": 8945 }, { "epoch": 9.87465488680287, "grad_norm": 0.6219233870506287, "learning_rate": 2.6829e-05, "loss": 0.0182, "step": 8946 }, { "epoch": 9.875759249033683, "grad_norm": 0.5710915923118591, "learning_rate": 2.6832e-05, "loss": 0.0264, "step": 8947 }, { "epoch": 9.876863611264495, "grad_norm": 0.6202757954597473, "learning_rate": 2.6835e-05, "loss": 0.0105, "step": 8948 }, { "epoch": 9.877967973495306, "grad_norm": 0.5334099531173706, "learning_rate": 2.6838e-05, "loss": 0.0235, "step": 8949 }, { "epoch": 9.879072335726118, "grad_norm": 0.504680335521698, "learning_rate": 2.6841000000000003e-05, "loss": 0.0263, "step": 8950 }, { "epoch": 9.88017669795693, "grad_norm": 1.080145001411438, "learning_rate": 2.6844000000000003e-05, "loss": 0.0259, "step": 8951 }, { "epoch": 9.881281060187742, "grad_norm": 0.5735577940940857, "learning_rate": 2.6847000000000002e-05, "loss": 0.0172, "step": 8952 }, { "epoch": 9.882385422418553, "grad_norm": 0.5516744256019592, "learning_rate": 2.6850000000000002e-05, "loss": 0.033, "step": 8953 }, { "epoch": 9.883489784649365, "grad_norm": 0.5744791626930237, "learning_rate": 2.6853000000000002e-05, "loss": 0.0438, "step": 8954 }, { "epoch": 9.884594146880177, "grad_norm": 0.9907703399658203, "learning_rate": 2.6856000000000002e-05, "loss": 0.2494, "step": 8955 }, { "epoch": 9.885698509110988, "grad_norm": 0.6325370669364929, "learning_rate": 2.6859e-05, "loss": 0.1797, "step": 8956 }, { "epoch": 9.8868028713418, "grad_norm": 0.6775254011154175, "learning_rate": 2.6862e-05, "loss": 0.2193, "step": 8957 }, { "epoch": 9.887907233572612, "grad_norm": 0.7153055667877197, "learning_rate": 2.6865e-05, "loss": 0.1565, "step": 8958 }, { "epoch": 9.889011595803424, "grad_norm": 0.6830533742904663, "learning_rate": 2.6867999999999998e-05, "loss": 0.1448, "step": 8959 }, { "epoch": 9.890115958034235, "grad_norm": 0.4686148166656494, "learning_rate": 2.6871e-05, "loss": 0.0999, "step": 8960 }, { "epoch": 9.891220320265047, "grad_norm": 0.5116949677467346, "learning_rate": 2.6874e-05, "loss": 0.11, "step": 8961 }, { "epoch": 9.89232468249586, "grad_norm": 0.5168678760528564, "learning_rate": 2.6877e-05, "loss": 0.0549, "step": 8962 }, { "epoch": 9.89342904472667, "grad_norm": 0.31451234221458435, "learning_rate": 2.688e-05, "loss": 0.0674, "step": 8963 }, { "epoch": 9.894533406957482, "grad_norm": 0.2205224335193634, "learning_rate": 2.6883e-05, "loss": 0.0317, "step": 8964 }, { "epoch": 9.895637769188294, "grad_norm": 0.47901836037635803, "learning_rate": 2.6886e-05, "loss": 0.0741, "step": 8965 }, { "epoch": 9.896742131419105, "grad_norm": 0.6032803058624268, "learning_rate": 2.6889e-05, "loss": 0.0283, "step": 8966 }, { "epoch": 9.897846493649917, "grad_norm": 0.4652714133262634, "learning_rate": 2.6892e-05, "loss": 0.032, "step": 8967 }, { "epoch": 9.898950855880729, "grad_norm": 0.49999305605888367, "learning_rate": 2.6895e-05, "loss": 0.0448, "step": 8968 }, { "epoch": 9.900055218111541, "grad_norm": 0.2363167554140091, "learning_rate": 2.6898e-05, "loss": 0.0193, "step": 8969 }, { "epoch": 9.901159580342352, "grad_norm": 0.4064125418663025, "learning_rate": 2.6901000000000002e-05, "loss": 0.0274, "step": 8970 }, { "epoch": 9.902263942573164, "grad_norm": 0.21918636560440063, "learning_rate": 2.6904000000000002e-05, "loss": 0.0193, "step": 8971 }, { "epoch": 9.903368304803976, "grad_norm": 0.5802554488182068, "learning_rate": 2.6907000000000002e-05, "loss": 0.04, "step": 8972 }, { "epoch": 9.904472667034787, "grad_norm": 0.3158996105194092, "learning_rate": 2.691e-05, "loss": 0.0224, "step": 8973 }, { "epoch": 9.905577029265599, "grad_norm": 0.45206189155578613, "learning_rate": 2.6913e-05, "loss": 0.0183, "step": 8974 }, { "epoch": 9.906681391496411, "grad_norm": 0.3255937397480011, "learning_rate": 2.6916e-05, "loss": 0.0182, "step": 8975 }, { "epoch": 9.907785753727222, "grad_norm": 0.2293708324432373, "learning_rate": 2.6919e-05, "loss": 0.014, "step": 8976 }, { "epoch": 9.908890115958034, "grad_norm": 0.5146649479866028, "learning_rate": 2.6922e-05, "loss": 0.0238, "step": 8977 }, { "epoch": 9.909994478188846, "grad_norm": 0.32585930824279785, "learning_rate": 2.6925e-05, "loss": 0.0128, "step": 8978 }, { "epoch": 9.911098840419658, "grad_norm": 0.82969731092453, "learning_rate": 2.6928e-05, "loss": 0.0542, "step": 8979 }, { "epoch": 9.912203202650469, "grad_norm": 0.3726753294467926, "learning_rate": 2.6931000000000004e-05, "loss": 0.0199, "step": 8980 }, { "epoch": 9.913307564881281, "grad_norm": 0.3945523500442505, "learning_rate": 2.6934000000000003e-05, "loss": 0.0176, "step": 8981 }, { "epoch": 9.914411927112093, "grad_norm": 0.28772515058517456, "learning_rate": 2.6937000000000003e-05, "loss": 0.0182, "step": 8982 }, { "epoch": 9.915516289342904, "grad_norm": 0.2889237701892853, "learning_rate": 2.6940000000000003e-05, "loss": 0.0155, "step": 8983 }, { "epoch": 9.916620651573716, "grad_norm": 0.3987014591693878, "learning_rate": 2.6943e-05, "loss": 0.013, "step": 8984 }, { "epoch": 9.917725013804528, "grad_norm": 0.862190842628479, "learning_rate": 2.6946e-05, "loss": 0.0269, "step": 8985 }, { "epoch": 9.91882937603534, "grad_norm": 0.3296775817871094, "learning_rate": 2.6949e-05, "loss": 0.0158, "step": 8986 }, { "epoch": 9.91993373826615, "grad_norm": 0.4655817449092865, "learning_rate": 2.6952e-05, "loss": 0.0231, "step": 8987 }, { "epoch": 9.921038100496963, "grad_norm": 0.2189818173646927, "learning_rate": 2.6955e-05, "loss": 0.0106, "step": 8988 }, { "epoch": 9.922142462727775, "grad_norm": 0.4102749824523926, "learning_rate": 2.6958e-05, "loss": 0.0177, "step": 8989 }, { "epoch": 9.923246824958586, "grad_norm": 0.571000337600708, "learning_rate": 2.6961e-05, "loss": 0.0286, "step": 8990 }, { "epoch": 9.924351187189398, "grad_norm": 0.6617463231086731, "learning_rate": 2.6964e-05, "loss": 0.0207, "step": 8991 }, { "epoch": 9.92545554942021, "grad_norm": 0.8748771548271179, "learning_rate": 2.6967e-05, "loss": 0.0306, "step": 8992 }, { "epoch": 9.926559911651022, "grad_norm": 0.5932709574699402, "learning_rate": 2.697e-05, "loss": 0.0254, "step": 8993 }, { "epoch": 9.927664273881833, "grad_norm": 0.45833414793014526, "learning_rate": 2.6973e-05, "loss": 0.0135, "step": 8994 }, { "epoch": 9.928768636112645, "grad_norm": 0.7758292555809021, "learning_rate": 2.6976e-05, "loss": 0.0262, "step": 8995 }, { "epoch": 9.929872998343457, "grad_norm": 0.5783655047416687, "learning_rate": 2.6979e-05, "loss": 0.0344, "step": 8996 }, { "epoch": 9.930977360574268, "grad_norm": 0.26617804169654846, "learning_rate": 2.6982e-05, "loss": 0.0079, "step": 8997 }, { "epoch": 9.93208172280508, "grad_norm": 0.9909905195236206, "learning_rate": 2.6985e-05, "loss": 0.0195, "step": 8998 }, { "epoch": 9.933186085035892, "grad_norm": 0.7481018304824829, "learning_rate": 2.6988e-05, "loss": 0.0231, "step": 8999 }, { "epoch": 9.934290447266704, "grad_norm": 0.45158651471138, "learning_rate": 2.6991000000000003e-05, "loss": 0.0166, "step": 9000 }, { "epoch": 9.934290447266704, "eval_cer": 0.12224900847655339, "eval_loss": 0.32677167654037476, "eval_runtime": 16.1015, "eval_samples_per_second": 18.88, "eval_steps_per_second": 0.621, "eval_wer": 0.4063699155794321, "step": 9000 }, { "epoch": 9.935394809497515, "grad_norm": 0.8512423634529114, "learning_rate": 2.6994000000000003e-05, "loss": 0.0368, "step": 9001 }, { "epoch": 9.936499171728327, "grad_norm": 0.6817329525947571, "learning_rate": 2.6997000000000003e-05, "loss": 0.0237, "step": 9002 }, { "epoch": 9.93760353395914, "grad_norm": 0.6945475935935974, "learning_rate": 2.7000000000000002e-05, "loss": 0.0205, "step": 9003 }, { "epoch": 9.93870789618995, "grad_norm": 0.6857059597969055, "learning_rate": 2.7003000000000002e-05, "loss": 0.019, "step": 9004 }, { "epoch": 9.939812258420762, "grad_norm": 0.8833844661712646, "learning_rate": 2.7006000000000002e-05, "loss": 0.2599, "step": 9005 }, { "epoch": 9.940916620651574, "grad_norm": 0.7167413830757141, "learning_rate": 2.7009000000000002e-05, "loss": 0.1719, "step": 9006 }, { "epoch": 9.942020982882385, "grad_norm": 0.5920681953430176, "learning_rate": 2.7012e-05, "loss": 0.1041, "step": 9007 }, { "epoch": 9.943125345113197, "grad_norm": 1.081552505493164, "learning_rate": 2.7015e-05, "loss": 0.128, "step": 9008 }, { "epoch": 9.94422970734401, "grad_norm": 0.6021474003791809, "learning_rate": 2.7017999999999998e-05, "loss": 0.1182, "step": 9009 }, { "epoch": 9.945334069574821, "grad_norm": 0.7905261516571045, "learning_rate": 2.7020999999999998e-05, "loss": 0.1579, "step": 9010 }, { "epoch": 9.946438431805632, "grad_norm": 0.6843597292900085, "learning_rate": 2.7024e-05, "loss": 0.0669, "step": 9011 }, { "epoch": 9.947542794036444, "grad_norm": 0.715198814868927, "learning_rate": 2.7027e-05, "loss": 0.1652, "step": 9012 }, { "epoch": 9.948647156267256, "grad_norm": 0.33188971877098083, "learning_rate": 2.703e-05, "loss": 0.0558, "step": 9013 }, { "epoch": 9.949751518498067, "grad_norm": 0.3621635138988495, "learning_rate": 2.7033e-05, "loss": 0.0425, "step": 9014 }, { "epoch": 9.950855880728879, "grad_norm": 0.599626898765564, "learning_rate": 2.7036e-05, "loss": 0.0785, "step": 9015 }, { "epoch": 9.951960242959691, "grad_norm": 0.27703455090522766, "learning_rate": 2.7039e-05, "loss": 0.0258, "step": 9016 }, { "epoch": 9.953064605190502, "grad_norm": 0.48743128776550293, "learning_rate": 2.7042e-05, "loss": 0.0448, "step": 9017 }, { "epoch": 9.954168967421314, "grad_norm": 0.2849569618701935, "learning_rate": 2.7045e-05, "loss": 0.019, "step": 9018 }, { "epoch": 9.955273329652126, "grad_norm": 0.24010996520519257, "learning_rate": 2.7048e-05, "loss": 0.0188, "step": 9019 }, { "epoch": 9.956377691882938, "grad_norm": 0.3477511703968048, "learning_rate": 2.7051e-05, "loss": 0.0224, "step": 9020 }, { "epoch": 9.957482054113749, "grad_norm": 0.41006940603256226, "learning_rate": 2.7054000000000002e-05, "loss": 0.0205, "step": 9021 }, { "epoch": 9.958586416344561, "grad_norm": 0.6658679246902466, "learning_rate": 2.7057000000000002e-05, "loss": 0.0367, "step": 9022 }, { "epoch": 9.959690778575373, "grad_norm": 0.31996068358421326, "learning_rate": 2.7060000000000002e-05, "loss": 0.0217, "step": 9023 }, { "epoch": 9.960795140806184, "grad_norm": 0.3182416260242462, "learning_rate": 2.7063e-05, "loss": 0.0151, "step": 9024 }, { "epoch": 9.961899503036996, "grad_norm": 0.47599443793296814, "learning_rate": 2.7066e-05, "loss": 0.0198, "step": 9025 }, { "epoch": 9.963003865267808, "grad_norm": 0.5550960302352905, "learning_rate": 2.7069e-05, "loss": 0.0189, "step": 9026 }, { "epoch": 9.96410822749862, "grad_norm": 0.3569648563861847, "learning_rate": 2.7072e-05, "loss": 0.0238, "step": 9027 }, { "epoch": 9.96521258972943, "grad_norm": 0.27278998494148254, "learning_rate": 2.7075e-05, "loss": 0.0098, "step": 9028 }, { "epoch": 9.966316951960243, "grad_norm": 0.22294212877750397, "learning_rate": 2.7078e-05, "loss": 0.0152, "step": 9029 }, { "epoch": 9.967421314191055, "grad_norm": 0.5199846625328064, "learning_rate": 2.7081e-05, "loss": 0.0202, "step": 9030 }, { "epoch": 9.968525676421866, "grad_norm": 0.6163621544837952, "learning_rate": 2.7084000000000004e-05, "loss": 0.0219, "step": 9031 }, { "epoch": 9.969630038652678, "grad_norm": 0.26619112491607666, "learning_rate": 2.7087000000000003e-05, "loss": 0.0096, "step": 9032 }, { "epoch": 9.97073440088349, "grad_norm": 0.29587388038635254, "learning_rate": 2.709e-05, "loss": 0.0214, "step": 9033 }, { "epoch": 9.971838763114302, "grad_norm": 0.3985982835292816, "learning_rate": 2.7093e-05, "loss": 0.0182, "step": 9034 }, { "epoch": 9.972943125345113, "grad_norm": 0.468380331993103, "learning_rate": 2.7096e-05, "loss": 0.0238, "step": 9035 }, { "epoch": 9.974047487575925, "grad_norm": 0.2722693085670471, "learning_rate": 2.7099e-05, "loss": 0.0166, "step": 9036 }, { "epoch": 9.975151849806737, "grad_norm": 0.3120676875114441, "learning_rate": 2.7102e-05, "loss": 0.0142, "step": 9037 }, { "epoch": 9.976256212037548, "grad_norm": 0.3902539908885956, "learning_rate": 2.7105e-05, "loss": 0.018, "step": 9038 }, { "epoch": 9.97736057426836, "grad_norm": 0.4936384856700897, "learning_rate": 2.7108e-05, "loss": 0.0194, "step": 9039 }, { "epoch": 9.978464936499172, "grad_norm": 0.5186707973480225, "learning_rate": 2.7111e-05, "loss": 0.0218, "step": 9040 }, { "epoch": 9.979569298729983, "grad_norm": 0.5985786318778992, "learning_rate": 2.7114e-05, "loss": 0.0195, "step": 9041 }, { "epoch": 9.980673660960795, "grad_norm": 0.9610677361488342, "learning_rate": 2.7117e-05, "loss": 0.0257, "step": 9042 }, { "epoch": 9.981778023191607, "grad_norm": 0.34327584505081177, "learning_rate": 2.712e-05, "loss": 0.0213, "step": 9043 }, { "epoch": 9.98288238542242, "grad_norm": 0.4990732669830322, "learning_rate": 2.7123e-05, "loss": 0.0202, "step": 9044 }, { "epoch": 9.98398674765323, "grad_norm": 0.37637102603912354, "learning_rate": 2.7126e-05, "loss": 0.019, "step": 9045 }, { "epoch": 9.985091109884042, "grad_norm": 0.4672223925590515, "learning_rate": 2.7129e-05, "loss": 0.0222, "step": 9046 }, { "epoch": 9.986195472114854, "grad_norm": 0.3143559396266937, "learning_rate": 2.7132e-05, "loss": 0.0143, "step": 9047 }, { "epoch": 9.987299834345665, "grad_norm": 0.34323057532310486, "learning_rate": 2.7135e-05, "loss": 0.0227, "step": 9048 }, { "epoch": 9.988404196576477, "grad_norm": 0.5578515529632568, "learning_rate": 2.7138e-05, "loss": 0.0304, "step": 9049 }, { "epoch": 9.98950855880729, "grad_norm": 0.7517427206039429, "learning_rate": 2.7141e-05, "loss": 0.029, "step": 9050 }, { "epoch": 9.9906129210381, "grad_norm": 0.42354464530944824, "learning_rate": 2.7144000000000003e-05, "loss": 0.031, "step": 9051 }, { "epoch": 9.991717283268912, "grad_norm": 1.1597774028778076, "learning_rate": 2.7147000000000003e-05, "loss": 0.0341, "step": 9052 }, { "epoch": 9.992821645499724, "grad_norm": 0.5233387351036072, "learning_rate": 2.7150000000000003e-05, "loss": 0.0242, "step": 9053 }, { "epoch": 9.993926007730536, "grad_norm": 0.45626017451286316, "learning_rate": 2.7153000000000002e-05, "loss": 0.0192, "step": 9054 }, { "epoch": 9.995030369961347, "grad_norm": 0.6530646681785583, "learning_rate": 2.7156000000000002e-05, "loss": 0.1208, "step": 9055 }, { "epoch": 9.996134732192159, "grad_norm": 0.519381046295166, "learning_rate": 2.7159000000000002e-05, "loss": 0.0235, "step": 9056 }, { "epoch": 9.997239094422971, "grad_norm": 0.20847168564796448, "learning_rate": 2.7162000000000002e-05, "loss": 0.0137, "step": 9057 }, { "epoch": 9.998343456653782, "grad_norm": 0.30219441652297974, "learning_rate": 2.7164999999999998e-05, "loss": 0.0152, "step": 9058 }, { "epoch": 9.999447818884594, "grad_norm": 0.25107452273368835, "learning_rate": 2.7167999999999998e-05, "loss": 0.0151, "step": 9059 }, { "epoch": 10.0, "grad_norm": 1.005473017692566, "learning_rate": 2.7170999999999998e-05, "loss": 0.0221, "step": 9060 }, { "epoch": 10.001104362230812, "grad_norm": 1.6040751934051514, "learning_rate": 2.7174e-05, "loss": 0.2649, "step": 9061 }, { "epoch": 10.002208724461623, "grad_norm": 0.631400465965271, "learning_rate": 2.7177e-05, "loss": 0.2023, "step": 9062 }, { "epoch": 10.003313086692435, "grad_norm": 0.6895785927772522, "learning_rate": 2.718e-05, "loss": 0.2034, "step": 9063 }, { "epoch": 10.004417448923247, "grad_norm": 1.442748785018921, "learning_rate": 2.7183e-05, "loss": 0.1705, "step": 9064 }, { "epoch": 10.00552181115406, "grad_norm": 0.6890271306037903, "learning_rate": 2.7186e-05, "loss": 0.1291, "step": 9065 }, { "epoch": 10.00662617338487, "grad_norm": 0.5459824204444885, "learning_rate": 2.7189e-05, "loss": 0.0704, "step": 9066 }, { "epoch": 10.007730535615682, "grad_norm": 0.3516850471496582, "learning_rate": 2.7192e-05, "loss": 0.0502, "step": 9067 }, { "epoch": 10.008834897846494, "grad_norm": 0.4462882876396179, "learning_rate": 2.7195e-05, "loss": 0.0646, "step": 9068 }, { "epoch": 10.009939260077305, "grad_norm": 0.5789511203765869, "learning_rate": 2.7198e-05, "loss": 0.0676, "step": 9069 }, { "epoch": 10.011043622308117, "grad_norm": 0.4183391332626343, "learning_rate": 2.7201e-05, "loss": 0.0879, "step": 9070 }, { "epoch": 10.01214798453893, "grad_norm": 0.33958202600479126, "learning_rate": 2.7204000000000002e-05, "loss": 0.0253, "step": 9071 }, { "epoch": 10.01325234676974, "grad_norm": 0.6670125722885132, "learning_rate": 2.7207000000000002e-05, "loss": 0.0281, "step": 9072 }, { "epoch": 10.014356709000552, "grad_norm": 0.36669057607650757, "learning_rate": 2.7210000000000002e-05, "loss": 0.0212, "step": 9073 }, { "epoch": 10.015461071231364, "grad_norm": 0.4316140115261078, "learning_rate": 2.7213000000000002e-05, "loss": 0.027, "step": 9074 }, { "epoch": 10.016565433462176, "grad_norm": 0.3586699962615967, "learning_rate": 2.7216e-05, "loss": 0.0213, "step": 9075 }, { "epoch": 10.017669795692987, "grad_norm": 0.17780138552188873, "learning_rate": 2.7219e-05, "loss": 0.0197, "step": 9076 }, { "epoch": 10.018774157923799, "grad_norm": 0.33037129044532776, "learning_rate": 2.7222e-05, "loss": 0.0164, "step": 9077 }, { "epoch": 10.019878520154611, "grad_norm": 0.4135245084762573, "learning_rate": 2.7225e-05, "loss": 0.0326, "step": 9078 }, { "epoch": 10.020982882385422, "grad_norm": 0.28823646903038025, "learning_rate": 2.7228e-05, "loss": 0.0201, "step": 9079 }, { "epoch": 10.022087244616234, "grad_norm": 0.40497782826423645, "learning_rate": 2.7231e-05, "loss": 0.0265, "step": 9080 }, { "epoch": 10.023191606847046, "grad_norm": 0.29364293813705444, "learning_rate": 2.7234000000000004e-05, "loss": 0.0187, "step": 9081 }, { "epoch": 10.024295969077858, "grad_norm": 0.14831171929836273, "learning_rate": 2.7237e-05, "loss": 0.0099, "step": 9082 }, { "epoch": 10.025400331308669, "grad_norm": 0.261421799659729, "learning_rate": 2.724e-05, "loss": 0.0095, "step": 9083 }, { "epoch": 10.026504693539481, "grad_norm": 0.22800135612487793, "learning_rate": 2.7243e-05, "loss": 0.0145, "step": 9084 }, { "epoch": 10.027609055770293, "grad_norm": 0.6927878260612488, "learning_rate": 2.7246e-05, "loss": 0.0177, "step": 9085 }, { "epoch": 10.028713418001104, "grad_norm": 0.20955562591552734, "learning_rate": 2.7249e-05, "loss": 0.0091, "step": 9086 }, { "epoch": 10.029817780231916, "grad_norm": 0.47698214650154114, "learning_rate": 2.7252e-05, "loss": 0.0194, "step": 9087 }, { "epoch": 10.030922142462728, "grad_norm": 0.40928131341934204, "learning_rate": 2.7255e-05, "loss": 0.0206, "step": 9088 }, { "epoch": 10.032026504693539, "grad_norm": 0.5868310332298279, "learning_rate": 2.7258e-05, "loss": 0.024, "step": 9089 }, { "epoch": 10.03313086692435, "grad_norm": 0.19401387870311737, "learning_rate": 2.7261e-05, "loss": 0.0082, "step": 9090 }, { "epoch": 10.034235229155163, "grad_norm": 0.4594128131866455, "learning_rate": 2.7264000000000002e-05, "loss": 0.0114, "step": 9091 }, { "epoch": 10.035339591385975, "grad_norm": 0.40612971782684326, "learning_rate": 2.7267e-05, "loss": 0.0228, "step": 9092 }, { "epoch": 10.036443953616786, "grad_norm": 0.3255557715892792, "learning_rate": 2.727e-05, "loss": 0.0157, "step": 9093 }, { "epoch": 10.037548315847598, "grad_norm": 0.5184491276741028, "learning_rate": 2.7273e-05, "loss": 0.0175, "step": 9094 }, { "epoch": 10.03865267807841, "grad_norm": 0.3856593668460846, "learning_rate": 2.7276e-05, "loss": 0.0191, "step": 9095 }, { "epoch": 10.03975704030922, "grad_norm": 0.2878784239292145, "learning_rate": 2.7279e-05, "loss": 0.0179, "step": 9096 }, { "epoch": 10.040861402540033, "grad_norm": 0.9184284210205078, "learning_rate": 2.7282e-05, "loss": 0.0302, "step": 9097 }, { "epoch": 10.041965764770845, "grad_norm": 0.30993303656578064, "learning_rate": 2.7285e-05, "loss": 0.0179, "step": 9098 }, { "epoch": 10.043070127001657, "grad_norm": 0.6896310448646545, "learning_rate": 2.7288e-05, "loss": 0.0203, "step": 9099 }, { "epoch": 10.044174489232468, "grad_norm": 0.8971958756446838, "learning_rate": 2.7291e-05, "loss": 0.0196, "step": 9100 }, { "epoch": 10.04527885146328, "grad_norm": 1.0430337190628052, "learning_rate": 2.7294000000000003e-05, "loss": 0.0186, "step": 9101 }, { "epoch": 10.046383213694092, "grad_norm": 0.5389330387115479, "learning_rate": 2.7297000000000003e-05, "loss": 0.0219, "step": 9102 }, { "epoch": 10.047487575924903, "grad_norm": 0.6305811405181885, "learning_rate": 2.7300000000000003e-05, "loss": 0.0136, "step": 9103 }, { "epoch": 10.048591938155715, "grad_norm": 0.2882043421268463, "learning_rate": 2.7303000000000003e-05, "loss": 0.0121, "step": 9104 }, { "epoch": 10.049696300386527, "grad_norm": 0.5309380292892456, "learning_rate": 2.7306000000000002e-05, "loss": 0.012, "step": 9105 }, { "epoch": 10.050800662617338, "grad_norm": 0.44951504468917847, "learning_rate": 2.7309000000000002e-05, "loss": 0.013, "step": 9106 }, { "epoch": 10.05190502484815, "grad_norm": 0.33199435472488403, "learning_rate": 2.7312e-05, "loss": 0.0126, "step": 9107 }, { "epoch": 10.053009387078962, "grad_norm": 0.43881064653396606, "learning_rate": 2.7315e-05, "loss": 0.0217, "step": 9108 }, { "epoch": 10.054113749309774, "grad_norm": 0.5533071160316467, "learning_rate": 2.7318e-05, "loss": 0.0238, "step": 9109 }, { "epoch": 10.055218111540585, "grad_norm": 0.6228059530258179, "learning_rate": 2.7320999999999998e-05, "loss": 0.0225, "step": 9110 }, { "epoch": 10.056322473771397, "grad_norm": 0.959031343460083, "learning_rate": 2.7324e-05, "loss": 0.184, "step": 9111 }, { "epoch": 10.05742683600221, "grad_norm": 0.8119384050369263, "learning_rate": 2.7327e-05, "loss": 0.2438, "step": 9112 }, { "epoch": 10.05853119823302, "grad_norm": 0.735933780670166, "learning_rate": 2.733e-05, "loss": 0.1385, "step": 9113 }, { "epoch": 10.059635560463832, "grad_norm": 0.5237299799919128, "learning_rate": 2.7333e-05, "loss": 0.0996, "step": 9114 }, { "epoch": 10.060739922694644, "grad_norm": 0.6981085538864136, "learning_rate": 2.7336e-05, "loss": 0.1524, "step": 9115 }, { "epoch": 10.061844284925456, "grad_norm": 0.5842934846878052, "learning_rate": 2.7339e-05, "loss": 0.1446, "step": 9116 }, { "epoch": 10.062948647156267, "grad_norm": 0.38897979259490967, "learning_rate": 2.7342e-05, "loss": 0.0541, "step": 9117 }, { "epoch": 10.064053009387079, "grad_norm": 0.31626275181770325, "learning_rate": 2.7345e-05, "loss": 0.0344, "step": 9118 }, { "epoch": 10.065157371617891, "grad_norm": 0.277190625667572, "learning_rate": 2.7348e-05, "loss": 0.0329, "step": 9119 }, { "epoch": 10.066261733848702, "grad_norm": 0.293965607881546, "learning_rate": 2.7351e-05, "loss": 0.0274, "step": 9120 }, { "epoch": 10.067366096079514, "grad_norm": 0.25508207082748413, "learning_rate": 2.7354000000000003e-05, "loss": 0.0286, "step": 9121 }, { "epoch": 10.068470458310326, "grad_norm": 0.22341834008693695, "learning_rate": 2.7357000000000003e-05, "loss": 0.0199, "step": 9122 }, { "epoch": 10.069574820541137, "grad_norm": 0.36349108815193176, "learning_rate": 2.7360000000000002e-05, "loss": 0.0208, "step": 9123 }, { "epoch": 10.070679182771949, "grad_norm": 0.2793061137199402, "learning_rate": 2.7363000000000002e-05, "loss": 0.0233, "step": 9124 }, { "epoch": 10.071783545002761, "grad_norm": 0.17392675578594208, "learning_rate": 2.7366000000000002e-05, "loss": 0.0079, "step": 9125 }, { "epoch": 10.072887907233573, "grad_norm": 0.3374043107032776, "learning_rate": 2.7369000000000002e-05, "loss": 0.016, "step": 9126 }, { "epoch": 10.073992269464384, "grad_norm": 0.3531125783920288, "learning_rate": 2.7372e-05, "loss": 0.017, "step": 9127 }, { "epoch": 10.075096631695196, "grad_norm": 0.22194220125675201, "learning_rate": 2.7375e-05, "loss": 0.0119, "step": 9128 }, { "epoch": 10.076200993926008, "grad_norm": 0.32834017276763916, "learning_rate": 2.7378e-05, "loss": 0.0142, "step": 9129 }, { "epoch": 10.077305356156819, "grad_norm": 0.18756042420864105, "learning_rate": 2.7381e-05, "loss": 0.0155, "step": 9130 }, { "epoch": 10.078409718387631, "grad_norm": 0.17874614894390106, "learning_rate": 2.7383999999999997e-05, "loss": 0.0136, "step": 9131 }, { "epoch": 10.079514080618443, "grad_norm": 0.20155583322048187, "learning_rate": 2.7387e-05, "loss": 0.0135, "step": 9132 }, { "epoch": 10.080618442849255, "grad_norm": 0.4737233519554138, "learning_rate": 2.739e-05, "loss": 0.0157, "step": 9133 }, { "epoch": 10.081722805080066, "grad_norm": 0.2502332329750061, "learning_rate": 2.7393e-05, "loss": 0.0099, "step": 9134 }, { "epoch": 10.082827167310878, "grad_norm": 0.37183865904808044, "learning_rate": 2.7396e-05, "loss": 0.0105, "step": 9135 }, { "epoch": 10.08393152954169, "grad_norm": 0.4252408444881439, "learning_rate": 2.7399e-05, "loss": 0.0251, "step": 9136 }, { "epoch": 10.0850358917725, "grad_norm": 0.3870113492012024, "learning_rate": 2.7402e-05, "loss": 0.0182, "step": 9137 }, { "epoch": 10.086140254003313, "grad_norm": 0.3194650709629059, "learning_rate": 2.7405e-05, "loss": 0.0162, "step": 9138 }, { "epoch": 10.087244616234125, "grad_norm": 0.43811553716659546, "learning_rate": 2.7408e-05, "loss": 0.0138, "step": 9139 }, { "epoch": 10.088348978464936, "grad_norm": 0.23818598687648773, "learning_rate": 2.7411e-05, "loss": 0.0147, "step": 9140 }, { "epoch": 10.089453340695748, "grad_norm": 0.4856295883655548, "learning_rate": 2.7414e-05, "loss": 0.0174, "step": 9141 }, { "epoch": 10.09055770292656, "grad_norm": 0.2905747890472412, "learning_rate": 2.7417000000000002e-05, "loss": 0.014, "step": 9142 }, { "epoch": 10.091662065157372, "grad_norm": 0.6681841015815735, "learning_rate": 2.7420000000000002e-05, "loss": 0.0203, "step": 9143 }, { "epoch": 10.092766427388183, "grad_norm": 0.33478519320487976, "learning_rate": 2.7423e-05, "loss": 0.0221, "step": 9144 }, { "epoch": 10.093870789618995, "grad_norm": 0.7455723285675049, "learning_rate": 2.7426e-05, "loss": 0.0102, "step": 9145 }, { "epoch": 10.094975151849807, "grad_norm": 0.6036891937255859, "learning_rate": 2.7429e-05, "loss": 0.017, "step": 9146 }, { "epoch": 10.096079514080618, "grad_norm": 0.43003106117248535, "learning_rate": 2.7432e-05, "loss": 0.0159, "step": 9147 }, { "epoch": 10.09718387631143, "grad_norm": 0.12612278759479523, "learning_rate": 2.7435e-05, "loss": 0.007, "step": 9148 }, { "epoch": 10.098288238542242, "grad_norm": 0.4047454297542572, "learning_rate": 2.7438e-05, "loss": 0.0196, "step": 9149 }, { "epoch": 10.099392600773054, "grad_norm": 0.2734350860118866, "learning_rate": 2.7441e-05, "loss": 0.0112, "step": 9150 }, { "epoch": 10.100496963003865, "grad_norm": 0.21635892987251282, "learning_rate": 2.7444e-05, "loss": 0.0175, "step": 9151 }, { "epoch": 10.101601325234677, "grad_norm": 0.451587438583374, "learning_rate": 2.7447000000000003e-05, "loss": 0.0172, "step": 9152 }, { "epoch": 10.10270568746549, "grad_norm": 0.3151892423629761, "learning_rate": 2.7450000000000003e-05, "loss": 0.0089, "step": 9153 }, { "epoch": 10.1038100496963, "grad_norm": 0.4764936566352844, "learning_rate": 2.7453000000000003e-05, "loss": 0.0207, "step": 9154 }, { "epoch": 10.104914411927112, "grad_norm": 0.2571401596069336, "learning_rate": 2.7456000000000003e-05, "loss": 0.0113, "step": 9155 }, { "epoch": 10.106018774157924, "grad_norm": 0.5317484736442566, "learning_rate": 2.7459e-05, "loss": 0.0234, "step": 9156 }, { "epoch": 10.107123136388736, "grad_norm": 0.44271165132522583, "learning_rate": 2.7462e-05, "loss": 0.0314, "step": 9157 }, { "epoch": 10.108227498619547, "grad_norm": 0.6178978681564331, "learning_rate": 2.7465e-05, "loss": 0.0209, "step": 9158 }, { "epoch": 10.109331860850359, "grad_norm": 0.967779815196991, "learning_rate": 2.7468e-05, "loss": 0.0196, "step": 9159 }, { "epoch": 10.110436223081171, "grad_norm": 0.4485761225223541, "learning_rate": 2.7471e-05, "loss": 0.0197, "step": 9160 }, { "epoch": 10.111540585311982, "grad_norm": 0.5976493954658508, "learning_rate": 2.7473999999999998e-05, "loss": 0.2351, "step": 9161 }, { "epoch": 10.112644947542794, "grad_norm": 0.902554988861084, "learning_rate": 2.7477e-05, "loss": 0.1998, "step": 9162 }, { "epoch": 10.113749309773606, "grad_norm": 0.7357576489448547, "learning_rate": 2.748e-05, "loss": 0.2081, "step": 9163 }, { "epoch": 10.114853672004417, "grad_norm": 0.3648272156715393, "learning_rate": 2.7483e-05, "loss": 0.0891, "step": 9164 }, { "epoch": 10.115958034235229, "grad_norm": 0.5378370881080627, "learning_rate": 2.7486e-05, "loss": 0.086, "step": 9165 }, { "epoch": 10.117062396466041, "grad_norm": 0.4645026624202728, "learning_rate": 2.7489e-05, "loss": 0.0843, "step": 9166 }, { "epoch": 10.118166758696853, "grad_norm": 0.5440499782562256, "learning_rate": 2.7492e-05, "loss": 0.0576, "step": 9167 }, { "epoch": 10.119271120927664, "grad_norm": 0.31389182806015015, "learning_rate": 2.7495e-05, "loss": 0.032, "step": 9168 }, { "epoch": 10.120375483158476, "grad_norm": 0.4451742470264435, "learning_rate": 2.7498e-05, "loss": 0.0398, "step": 9169 }, { "epoch": 10.121479845389288, "grad_norm": 0.4527333378791809, "learning_rate": 2.7501e-05, "loss": 0.0259, "step": 9170 }, { "epoch": 10.122584207620099, "grad_norm": 0.28398397564888, "learning_rate": 2.7504e-05, "loss": 0.0197, "step": 9171 }, { "epoch": 10.123688569850911, "grad_norm": 0.5194274187088013, "learning_rate": 2.7507000000000003e-05, "loss": 0.0227, "step": 9172 }, { "epoch": 10.124792932081723, "grad_norm": 0.339765727519989, "learning_rate": 2.7510000000000003e-05, "loss": 0.0126, "step": 9173 }, { "epoch": 10.125897294312535, "grad_norm": 0.30255377292633057, "learning_rate": 2.7513000000000002e-05, "loss": 0.0151, "step": 9174 }, { "epoch": 10.127001656543346, "grad_norm": 0.2624610960483551, "learning_rate": 2.7516000000000002e-05, "loss": 0.0178, "step": 9175 }, { "epoch": 10.128106018774158, "grad_norm": 0.2697533965110779, "learning_rate": 2.7519000000000002e-05, "loss": 0.0128, "step": 9176 }, { "epoch": 10.12921038100497, "grad_norm": 0.2804308235645294, "learning_rate": 2.7522000000000002e-05, "loss": 0.0134, "step": 9177 }, { "epoch": 10.13031474323578, "grad_norm": 0.3411410450935364, "learning_rate": 2.7525e-05, "loss": 0.0261, "step": 9178 }, { "epoch": 10.131419105466593, "grad_norm": 1.0971827507019043, "learning_rate": 2.7528e-05, "loss": 0.0103, "step": 9179 }, { "epoch": 10.132523467697405, "grad_norm": 0.31436780095100403, "learning_rate": 2.7531e-05, "loss": 0.0192, "step": 9180 }, { "epoch": 10.133627829928216, "grad_norm": 0.4589761197566986, "learning_rate": 2.7533999999999998e-05, "loss": 0.0211, "step": 9181 }, { "epoch": 10.134732192159028, "grad_norm": 0.24589508771896362, "learning_rate": 2.7537e-05, "loss": 0.0058, "step": 9182 }, { "epoch": 10.13583655438984, "grad_norm": 0.45146483182907104, "learning_rate": 2.754e-05, "loss": 0.019, "step": 9183 }, { "epoch": 10.136940916620652, "grad_norm": 0.40068790316581726, "learning_rate": 2.7543e-05, "loss": 0.0195, "step": 9184 }, { "epoch": 10.138045278851463, "grad_norm": 0.26044726371765137, "learning_rate": 2.7546e-05, "loss": 0.0124, "step": 9185 }, { "epoch": 10.139149641082275, "grad_norm": 0.222887322306633, "learning_rate": 2.7549e-05, "loss": 0.0127, "step": 9186 }, { "epoch": 10.140254003313087, "grad_norm": 0.2475356161594391, "learning_rate": 2.7552e-05, "loss": 0.0218, "step": 9187 }, { "epoch": 10.141358365543898, "grad_norm": 0.8974306583404541, "learning_rate": 2.7555e-05, "loss": 0.02, "step": 9188 }, { "epoch": 10.14246272777471, "grad_norm": 0.7674508690834045, "learning_rate": 2.7558e-05, "loss": 0.0255, "step": 9189 }, { "epoch": 10.143567090005522, "grad_norm": 0.2641085684299469, "learning_rate": 2.7561e-05, "loss": 0.012, "step": 9190 }, { "epoch": 10.144671452236334, "grad_norm": 0.41716283559799194, "learning_rate": 2.7564e-05, "loss": 0.0135, "step": 9191 }, { "epoch": 10.145775814467145, "grad_norm": 0.42773932218551636, "learning_rate": 2.7567000000000002e-05, "loss": 0.014, "step": 9192 }, { "epoch": 10.146880176697957, "grad_norm": 0.48946109414100647, "learning_rate": 2.7570000000000002e-05, "loss": 0.0174, "step": 9193 }, { "epoch": 10.14798453892877, "grad_norm": 0.37118232250213623, "learning_rate": 2.7573000000000002e-05, "loss": 0.0143, "step": 9194 }, { "epoch": 10.14908890115958, "grad_norm": 0.2650814354419708, "learning_rate": 2.7576e-05, "loss": 0.0109, "step": 9195 }, { "epoch": 10.150193263390392, "grad_norm": 0.37112173438072205, "learning_rate": 2.7579e-05, "loss": 0.0121, "step": 9196 }, { "epoch": 10.151297625621204, "grad_norm": 0.31421297788619995, "learning_rate": 2.7582e-05, "loss": 0.0143, "step": 9197 }, { "epoch": 10.152401987852015, "grad_norm": 0.38840726017951965, "learning_rate": 2.7585e-05, "loss": 0.0211, "step": 9198 }, { "epoch": 10.153506350082827, "grad_norm": 0.35570278763771057, "learning_rate": 2.7588e-05, "loss": 0.0146, "step": 9199 }, { "epoch": 10.154610712313639, "grad_norm": 0.30813735723495483, "learning_rate": 2.7591e-05, "loss": 0.0103, "step": 9200 }, { "epoch": 10.155715074544451, "grad_norm": 0.31733793020248413, "learning_rate": 2.7594e-05, "loss": 0.0098, "step": 9201 }, { "epoch": 10.156819436775262, "grad_norm": 0.26034262776374817, "learning_rate": 2.7597000000000004e-05, "loss": 0.0148, "step": 9202 }, { "epoch": 10.157923799006074, "grad_norm": 0.6970019936561584, "learning_rate": 2.7600000000000003e-05, "loss": 0.0224, "step": 9203 }, { "epoch": 10.159028161236886, "grad_norm": 0.2995930314064026, "learning_rate": 2.7603000000000003e-05, "loss": 0.0043, "step": 9204 }, { "epoch": 10.160132523467697, "grad_norm": 0.48655229806900024, "learning_rate": 2.7606e-05, "loss": 0.0151, "step": 9205 }, { "epoch": 10.161236885698509, "grad_norm": 0.6011332273483276, "learning_rate": 2.7609e-05, "loss": 0.0307, "step": 9206 }, { "epoch": 10.162341247929321, "grad_norm": 0.44398167729377747, "learning_rate": 2.7612e-05, "loss": 0.0154, "step": 9207 }, { "epoch": 10.163445610160133, "grad_norm": 0.19732117652893066, "learning_rate": 2.7615e-05, "loss": 0.0086, "step": 9208 }, { "epoch": 10.164549972390944, "grad_norm": 0.6359963417053223, "learning_rate": 2.7618e-05, "loss": 0.0177, "step": 9209 }, { "epoch": 10.165654334621756, "grad_norm": 0.9912877082824707, "learning_rate": 2.7621e-05, "loss": 0.029, "step": 9210 }, { "epoch": 10.166758696852568, "grad_norm": 1.006744384765625, "learning_rate": 2.7624e-05, "loss": 0.2532, "step": 9211 }, { "epoch": 10.167863059083379, "grad_norm": 0.6367826461791992, "learning_rate": 2.7627e-05, "loss": 0.151, "step": 9212 }, { "epoch": 10.168967421314191, "grad_norm": 0.508236289024353, "learning_rate": 2.763e-05, "loss": 0.1776, "step": 9213 }, { "epoch": 10.170071783545003, "grad_norm": 0.605917751789093, "learning_rate": 2.7633e-05, "loss": 0.1, "step": 9214 }, { "epoch": 10.171176145775814, "grad_norm": 0.666464626789093, "learning_rate": 2.7636e-05, "loss": 0.1033, "step": 9215 }, { "epoch": 10.172280508006626, "grad_norm": 0.549519956111908, "learning_rate": 2.7639e-05, "loss": 0.0919, "step": 9216 }, { "epoch": 10.173384870237438, "grad_norm": 0.4296516478061676, "learning_rate": 2.7642e-05, "loss": 0.0565, "step": 9217 }, { "epoch": 10.17448923246825, "grad_norm": 0.49957072734832764, "learning_rate": 2.7645e-05, "loss": 0.0248, "step": 9218 }, { "epoch": 10.17559359469906, "grad_norm": 1.026414394378662, "learning_rate": 2.7648e-05, "loss": 0.1421, "step": 9219 }, { "epoch": 10.176697956929873, "grad_norm": 0.3863348662853241, "learning_rate": 2.7651e-05, "loss": 0.0437, "step": 9220 }, { "epoch": 10.177802319160685, "grad_norm": 0.343522846698761, "learning_rate": 2.7654e-05, "loss": 0.0229, "step": 9221 }, { "epoch": 10.178906681391496, "grad_norm": 0.2525605261325836, "learning_rate": 2.7657000000000003e-05, "loss": 0.0175, "step": 9222 }, { "epoch": 10.180011043622308, "grad_norm": 0.4917667508125305, "learning_rate": 2.7660000000000003e-05, "loss": 0.0271, "step": 9223 }, { "epoch": 10.18111540585312, "grad_norm": 0.421774685382843, "learning_rate": 2.7663000000000003e-05, "loss": 0.0307, "step": 9224 }, { "epoch": 10.182219768083932, "grad_norm": 0.28090861439704895, "learning_rate": 2.7666000000000002e-05, "loss": 0.0128, "step": 9225 }, { "epoch": 10.183324130314743, "grad_norm": 0.20426197350025177, "learning_rate": 2.7669000000000002e-05, "loss": 0.0137, "step": 9226 }, { "epoch": 10.184428492545555, "grad_norm": 0.314499169588089, "learning_rate": 2.7672000000000002e-05, "loss": 0.0117, "step": 9227 }, { "epoch": 10.185532854776367, "grad_norm": 0.23066838085651398, "learning_rate": 2.7675000000000002e-05, "loss": 0.0147, "step": 9228 }, { "epoch": 10.186637217007178, "grad_norm": 0.3068399429321289, "learning_rate": 2.7678e-05, "loss": 0.0177, "step": 9229 }, { "epoch": 10.18774157923799, "grad_norm": 0.20764772593975067, "learning_rate": 2.7680999999999998e-05, "loss": 0.0116, "step": 9230 }, { "epoch": 10.188845941468802, "grad_norm": 0.30710476636886597, "learning_rate": 2.7683999999999998e-05, "loss": 0.0186, "step": 9231 }, { "epoch": 10.189950303699613, "grad_norm": 0.4654804468154907, "learning_rate": 2.7687e-05, "loss": 0.0237, "step": 9232 }, { "epoch": 10.191054665930425, "grad_norm": 0.2454308271408081, "learning_rate": 2.769e-05, "loss": 0.0165, "step": 9233 }, { "epoch": 10.192159028161237, "grad_norm": 0.3800622224807739, "learning_rate": 2.7693e-05, "loss": 0.0194, "step": 9234 }, { "epoch": 10.19326339039205, "grad_norm": 0.3043679893016815, "learning_rate": 2.7696e-05, "loss": 0.0207, "step": 9235 }, { "epoch": 10.19436775262286, "grad_norm": 0.6052889227867126, "learning_rate": 2.7699e-05, "loss": 0.0187, "step": 9236 }, { "epoch": 10.195472114853672, "grad_norm": 0.3326396644115448, "learning_rate": 2.7702e-05, "loss": 0.0609, "step": 9237 }, { "epoch": 10.196576477084484, "grad_norm": 0.24279870092868805, "learning_rate": 2.7705e-05, "loss": 0.0136, "step": 9238 }, { "epoch": 10.197680839315295, "grad_norm": 0.2364385724067688, "learning_rate": 2.7708e-05, "loss": 0.0152, "step": 9239 }, { "epoch": 10.198785201546107, "grad_norm": 0.6735495924949646, "learning_rate": 2.7711e-05, "loss": 0.0258, "step": 9240 }, { "epoch": 10.19988956377692, "grad_norm": 0.26350268721580505, "learning_rate": 2.7714e-05, "loss": 0.0129, "step": 9241 }, { "epoch": 10.200993926007731, "grad_norm": 0.3470384478569031, "learning_rate": 2.7717000000000002e-05, "loss": 0.0136, "step": 9242 }, { "epoch": 10.202098288238542, "grad_norm": 0.19915692508220673, "learning_rate": 2.7720000000000002e-05, "loss": 0.013, "step": 9243 }, { "epoch": 10.203202650469354, "grad_norm": 0.22421152889728546, "learning_rate": 2.7723000000000002e-05, "loss": 0.0102, "step": 9244 }, { "epoch": 10.204307012700166, "grad_norm": 0.4451693296432495, "learning_rate": 2.7726000000000002e-05, "loss": 0.0173, "step": 9245 }, { "epoch": 10.205411374930977, "grad_norm": 0.39269885420799255, "learning_rate": 2.7729e-05, "loss": 0.0197, "step": 9246 }, { "epoch": 10.206515737161789, "grad_norm": 0.5205914378166199, "learning_rate": 2.7732e-05, "loss": 0.0209, "step": 9247 }, { "epoch": 10.207620099392601, "grad_norm": 0.5697563290596008, "learning_rate": 2.7735e-05, "loss": 0.0166, "step": 9248 }, { "epoch": 10.208724461623412, "grad_norm": 0.5210625529289246, "learning_rate": 2.7738e-05, "loss": 0.0151, "step": 9249 }, { "epoch": 10.209828823854224, "grad_norm": 0.29468563199043274, "learning_rate": 2.7741e-05, "loss": 0.0152, "step": 9250 }, { "epoch": 10.210933186085036, "grad_norm": 0.3576151132583618, "learning_rate": 2.7744e-05, "loss": 0.0172, "step": 9251 }, { "epoch": 10.212037548315848, "grad_norm": 0.3254373371601105, "learning_rate": 2.7747000000000004e-05, "loss": 0.0148, "step": 9252 }, { "epoch": 10.213141910546659, "grad_norm": 0.4211021065711975, "learning_rate": 2.7750000000000004e-05, "loss": 0.0185, "step": 9253 }, { "epoch": 10.214246272777471, "grad_norm": 0.31830617785453796, "learning_rate": 2.7753e-05, "loss": 0.011, "step": 9254 }, { "epoch": 10.215350635008283, "grad_norm": 0.5972391366958618, "learning_rate": 2.7756e-05, "loss": 0.025, "step": 9255 }, { "epoch": 10.216454997239094, "grad_norm": 0.2666863799095154, "learning_rate": 2.7759e-05, "loss": 0.0122, "step": 9256 }, { "epoch": 10.217559359469906, "grad_norm": 0.6365170478820801, "learning_rate": 2.7762e-05, "loss": 0.0258, "step": 9257 }, { "epoch": 10.218663721700718, "grad_norm": 0.3095386028289795, "learning_rate": 2.7765e-05, "loss": 0.0124, "step": 9258 }, { "epoch": 10.21976808393153, "grad_norm": 0.24030785262584686, "learning_rate": 2.7768e-05, "loss": 0.011, "step": 9259 }, { "epoch": 10.22087244616234, "grad_norm": 0.3416246771812439, "learning_rate": 2.7771e-05, "loss": 0.0134, "step": 9260 }, { "epoch": 10.221976808393153, "grad_norm": 0.6821689605712891, "learning_rate": 2.7774e-05, "loss": 0.2009, "step": 9261 }, { "epoch": 10.223081170623965, "grad_norm": 0.6705067753791809, "learning_rate": 2.7777e-05, "loss": 0.181, "step": 9262 }, { "epoch": 10.224185532854776, "grad_norm": 0.4500044286251068, "learning_rate": 2.778e-05, "loss": 0.1435, "step": 9263 }, { "epoch": 10.225289895085588, "grad_norm": 0.8736531138420105, "learning_rate": 2.7783e-05, "loss": 0.1736, "step": 9264 }, { "epoch": 10.2263942573164, "grad_norm": 0.6360740065574646, "learning_rate": 2.7786e-05, "loss": 0.1141, "step": 9265 }, { "epoch": 10.22749861954721, "grad_norm": 0.3400779664516449, "learning_rate": 2.7789e-05, "loss": 0.0697, "step": 9266 }, { "epoch": 10.228602981778023, "grad_norm": 0.7278170585632324, "learning_rate": 2.7792e-05, "loss": 0.115, "step": 9267 }, { "epoch": 10.229707344008835, "grad_norm": 0.3827362060546875, "learning_rate": 2.7795e-05, "loss": 0.0691, "step": 9268 }, { "epoch": 10.230811706239647, "grad_norm": 0.2430347055196762, "learning_rate": 2.7798e-05, "loss": 0.0251, "step": 9269 }, { "epoch": 10.231916068470458, "grad_norm": 0.36059048771858215, "learning_rate": 2.7801e-05, "loss": 0.0271, "step": 9270 }, { "epoch": 10.23302043070127, "grad_norm": 0.5467144846916199, "learning_rate": 2.7804e-05, "loss": 0.0258, "step": 9271 }, { "epoch": 10.234124792932082, "grad_norm": 0.39196518063545227, "learning_rate": 2.7807e-05, "loss": 0.0251, "step": 9272 }, { "epoch": 10.235229155162893, "grad_norm": 0.3451957404613495, "learning_rate": 2.7810000000000003e-05, "loss": 0.0214, "step": 9273 }, { "epoch": 10.236333517393705, "grad_norm": 0.32917875051498413, "learning_rate": 2.7813000000000003e-05, "loss": 0.0206, "step": 9274 }, { "epoch": 10.237437879624517, "grad_norm": 0.3062411844730377, "learning_rate": 2.7816000000000003e-05, "loss": 0.0208, "step": 9275 }, { "epoch": 10.23854224185533, "grad_norm": 0.35752570629119873, "learning_rate": 2.7819000000000002e-05, "loss": 0.0244, "step": 9276 }, { "epoch": 10.23964660408614, "grad_norm": 0.37399226427078247, "learning_rate": 2.7822000000000002e-05, "loss": 0.0201, "step": 9277 }, { "epoch": 10.240750966316952, "grad_norm": 0.4376109838485718, "learning_rate": 2.7825000000000002e-05, "loss": 0.0299, "step": 9278 }, { "epoch": 10.241855328547764, "grad_norm": 0.18937042355537415, "learning_rate": 2.7828e-05, "loss": 0.0109, "step": 9279 }, { "epoch": 10.242959690778575, "grad_norm": 0.37792161107063293, "learning_rate": 2.7831e-05, "loss": 0.0123, "step": 9280 }, { "epoch": 10.244064053009387, "grad_norm": 0.20499807596206665, "learning_rate": 2.7833999999999998e-05, "loss": 0.0114, "step": 9281 }, { "epoch": 10.2451684152402, "grad_norm": 0.2802335321903229, "learning_rate": 2.7836999999999998e-05, "loss": 0.0151, "step": 9282 }, { "epoch": 10.24627277747101, "grad_norm": 0.28282928466796875, "learning_rate": 2.784e-05, "loss": 0.0188, "step": 9283 }, { "epoch": 10.247377139701822, "grad_norm": 0.28952133655548096, "learning_rate": 2.7843e-05, "loss": 0.0117, "step": 9284 }, { "epoch": 10.248481501932634, "grad_norm": 0.42637622356414795, "learning_rate": 2.7846e-05, "loss": 0.0198, "step": 9285 }, { "epoch": 10.249585864163446, "grad_norm": 0.41833749413490295, "learning_rate": 2.7849e-05, "loss": 0.0189, "step": 9286 }, { "epoch": 10.250690226394257, "grad_norm": 0.5555050373077393, "learning_rate": 2.7852e-05, "loss": 0.024, "step": 9287 }, { "epoch": 10.251794588625069, "grad_norm": 0.2196861207485199, "learning_rate": 2.7855e-05, "loss": 0.0099, "step": 9288 }, { "epoch": 10.252898950855881, "grad_norm": 0.26330819725990295, "learning_rate": 2.7858e-05, "loss": 0.018, "step": 9289 }, { "epoch": 10.254003313086692, "grad_norm": 0.23038874566555023, "learning_rate": 2.7861e-05, "loss": 0.01, "step": 9290 }, { "epoch": 10.255107675317504, "grad_norm": 0.3277091383934021, "learning_rate": 2.7864e-05, "loss": 0.0167, "step": 9291 }, { "epoch": 10.256212037548316, "grad_norm": 0.25341105461120605, "learning_rate": 2.7867e-05, "loss": 0.0134, "step": 9292 }, { "epoch": 10.257316399779128, "grad_norm": 0.28897589445114136, "learning_rate": 2.7870000000000003e-05, "loss": 0.0112, "step": 9293 }, { "epoch": 10.258420762009939, "grad_norm": 0.5016345977783203, "learning_rate": 2.7873000000000002e-05, "loss": 0.0222, "step": 9294 }, { "epoch": 10.259525124240751, "grad_norm": 0.42701467871665955, "learning_rate": 2.7876000000000002e-05, "loss": 0.0178, "step": 9295 }, { "epoch": 10.260629486471563, "grad_norm": 0.2949969172477722, "learning_rate": 2.7879000000000002e-05, "loss": 0.0112, "step": 9296 }, { "epoch": 10.261733848702374, "grad_norm": 0.5936309695243835, "learning_rate": 2.7882000000000002e-05, "loss": 0.0166, "step": 9297 }, { "epoch": 10.262838210933186, "grad_norm": 0.24880412220954895, "learning_rate": 2.7885e-05, "loss": 0.0123, "step": 9298 }, { "epoch": 10.263942573163998, "grad_norm": 0.23878033459186554, "learning_rate": 2.7888e-05, "loss": 0.0084, "step": 9299 }, { "epoch": 10.26504693539481, "grad_norm": 0.24409496784210205, "learning_rate": 2.7891e-05, "loss": 0.0128, "step": 9300 }, { "epoch": 10.26615129762562, "grad_norm": 0.3844740390777588, "learning_rate": 2.7894e-05, "loss": 0.0112, "step": 9301 }, { "epoch": 10.267255659856433, "grad_norm": 0.3125913441181183, "learning_rate": 2.7897e-05, "loss": 0.0138, "step": 9302 }, { "epoch": 10.268360022087245, "grad_norm": 0.592292070388794, "learning_rate": 2.79e-05, "loss": 0.0291, "step": 9303 }, { "epoch": 10.269464384318056, "grad_norm": 0.4322625398635864, "learning_rate": 2.7903e-05, "loss": 0.0216, "step": 9304 }, { "epoch": 10.270568746548868, "grad_norm": 0.8288875222206116, "learning_rate": 2.7906e-05, "loss": 0.0186, "step": 9305 }, { "epoch": 10.27167310877968, "grad_norm": 0.45446956157684326, "learning_rate": 2.7909e-05, "loss": 0.0139, "step": 9306 }, { "epoch": 10.27277747101049, "grad_norm": 0.8907098770141602, "learning_rate": 2.7912e-05, "loss": 0.0262, "step": 9307 }, { "epoch": 10.273881833241303, "grad_norm": 0.34206610918045044, "learning_rate": 2.7915e-05, "loss": 0.0113, "step": 9308 }, { "epoch": 10.274986195472115, "grad_norm": 0.5907605886459351, "learning_rate": 2.7918e-05, "loss": 0.012, "step": 9309 }, { "epoch": 10.276090557702927, "grad_norm": 0.4470304548740387, "learning_rate": 2.7921e-05, "loss": 0.017, "step": 9310 }, { "epoch": 10.277194919933738, "grad_norm": 0.9075879454612732, "learning_rate": 2.7924e-05, "loss": 0.2643, "step": 9311 }, { "epoch": 10.27829928216455, "grad_norm": 0.6545663475990295, "learning_rate": 2.7927e-05, "loss": 0.2516, "step": 9312 }, { "epoch": 10.279403644395362, "grad_norm": 0.7839493751525879, "learning_rate": 2.7930000000000002e-05, "loss": 0.1829, "step": 9313 }, { "epoch": 10.280508006626173, "grad_norm": 0.6819607019424438, "learning_rate": 2.7933000000000002e-05, "loss": 0.111, "step": 9314 }, { "epoch": 10.281612368856985, "grad_norm": 1.0225155353546143, "learning_rate": 2.7936e-05, "loss": 0.0761, "step": 9315 }, { "epoch": 10.282716731087797, "grad_norm": 0.5123111009597778, "learning_rate": 2.7939e-05, "loss": 0.0939, "step": 9316 }, { "epoch": 10.283821093318608, "grad_norm": 0.7423773407936096, "learning_rate": 2.7942e-05, "loss": 0.0954, "step": 9317 }, { "epoch": 10.28492545554942, "grad_norm": 0.4291919767856598, "learning_rate": 2.7945e-05, "loss": 0.0417, "step": 9318 }, { "epoch": 10.286029817780232, "grad_norm": 0.4632335603237152, "learning_rate": 2.7948e-05, "loss": 0.0857, "step": 9319 }, { "epoch": 10.287134180011044, "grad_norm": 0.4752001166343689, "learning_rate": 2.7951e-05, "loss": 0.0303, "step": 9320 }, { "epoch": 10.288238542241855, "grad_norm": 0.3088882863521576, "learning_rate": 2.7954e-05, "loss": 0.0521, "step": 9321 }, { "epoch": 10.289342904472667, "grad_norm": 0.5924866795539856, "learning_rate": 2.7957e-05, "loss": 0.0214, "step": 9322 }, { "epoch": 10.29044726670348, "grad_norm": 0.3679393529891968, "learning_rate": 2.7960000000000003e-05, "loss": 0.0167, "step": 9323 }, { "epoch": 10.29155162893429, "grad_norm": 0.30381837487220764, "learning_rate": 2.7963000000000003e-05, "loss": 0.0171, "step": 9324 }, { "epoch": 10.292655991165102, "grad_norm": 0.2941063642501831, "learning_rate": 2.7966000000000003e-05, "loss": 0.0155, "step": 9325 }, { "epoch": 10.293760353395914, "grad_norm": 1.4485162496566772, "learning_rate": 2.7969000000000003e-05, "loss": 0.0149, "step": 9326 }, { "epoch": 10.294864715626726, "grad_norm": 0.22296841442584991, "learning_rate": 2.7972000000000003e-05, "loss": 0.02, "step": 9327 }, { "epoch": 10.295969077857537, "grad_norm": 0.32247307896614075, "learning_rate": 2.7975e-05, "loss": 0.017, "step": 9328 }, { "epoch": 10.297073440088349, "grad_norm": 0.31364905834198, "learning_rate": 2.7978e-05, "loss": 0.0141, "step": 9329 }, { "epoch": 10.298177802319161, "grad_norm": 0.3679731488227844, "learning_rate": 2.7981e-05, "loss": 0.0205, "step": 9330 }, { "epoch": 10.299282164549972, "grad_norm": 0.4549623131752014, "learning_rate": 2.7984e-05, "loss": 0.0233, "step": 9331 }, { "epoch": 10.300386526780784, "grad_norm": 0.3286687135696411, "learning_rate": 2.7986999999999998e-05, "loss": 0.0206, "step": 9332 }, { "epoch": 10.301490889011596, "grad_norm": 0.2805538773536682, "learning_rate": 2.799e-05, "loss": 0.0212, "step": 9333 }, { "epoch": 10.302595251242408, "grad_norm": 0.2814815938472748, "learning_rate": 2.7993e-05, "loss": 0.0173, "step": 9334 }, { "epoch": 10.303699613473219, "grad_norm": 0.4702124297618866, "learning_rate": 2.7996e-05, "loss": 0.0243, "step": 9335 }, { "epoch": 10.304803975704031, "grad_norm": 0.680526614189148, "learning_rate": 2.7999e-05, "loss": 0.0184, "step": 9336 }, { "epoch": 10.305908337934843, "grad_norm": 0.24440152943134308, "learning_rate": 2.8002e-05, "loss": 0.0115, "step": 9337 }, { "epoch": 10.307012700165654, "grad_norm": 0.2913415729999542, "learning_rate": 2.8005e-05, "loss": 0.014, "step": 9338 }, { "epoch": 10.308117062396466, "grad_norm": 0.21672821044921875, "learning_rate": 2.8008e-05, "loss": 0.0107, "step": 9339 }, { "epoch": 10.309221424627278, "grad_norm": 0.3496401309967041, "learning_rate": 2.8011e-05, "loss": 0.0186, "step": 9340 }, { "epoch": 10.310325786858089, "grad_norm": 0.39000698924064636, "learning_rate": 2.8014e-05, "loss": 0.0245, "step": 9341 }, { "epoch": 10.3114301490889, "grad_norm": 0.5008376836776733, "learning_rate": 2.8017e-05, "loss": 0.0203, "step": 9342 }, { "epoch": 10.312534511319713, "grad_norm": 0.5598403215408325, "learning_rate": 2.8020000000000003e-05, "loss": 0.0259, "step": 9343 }, { "epoch": 10.313638873550525, "grad_norm": 0.38022518157958984, "learning_rate": 2.8023000000000003e-05, "loss": 0.0198, "step": 9344 }, { "epoch": 10.314743235781336, "grad_norm": 0.3849703073501587, "learning_rate": 2.8026000000000002e-05, "loss": 0.0128, "step": 9345 }, { "epoch": 10.315847598012148, "grad_norm": 0.6173414587974548, "learning_rate": 2.8029000000000002e-05, "loss": 0.0152, "step": 9346 }, { "epoch": 10.31695196024296, "grad_norm": 0.4476248323917389, "learning_rate": 2.8032000000000002e-05, "loss": 0.0257, "step": 9347 }, { "epoch": 10.31805632247377, "grad_norm": 0.3020012080669403, "learning_rate": 2.8035000000000002e-05, "loss": 0.0185, "step": 9348 }, { "epoch": 10.319160684704583, "grad_norm": 0.35480785369873047, "learning_rate": 2.8038e-05, "loss": 0.0204, "step": 9349 }, { "epoch": 10.320265046935395, "grad_norm": 0.45240259170532227, "learning_rate": 2.8041e-05, "loss": 0.009, "step": 9350 }, { "epoch": 10.321369409166207, "grad_norm": 0.5971986055374146, "learning_rate": 2.8044e-05, "loss": 0.0235, "step": 9351 }, { "epoch": 10.322473771397018, "grad_norm": 0.6625749468803406, "learning_rate": 2.8047e-05, "loss": 0.0265, "step": 9352 }, { "epoch": 10.32357813362783, "grad_norm": 0.30988845229148865, "learning_rate": 2.805e-05, "loss": 0.0095, "step": 9353 }, { "epoch": 10.324682495858642, "grad_norm": 0.418559730052948, "learning_rate": 2.8053e-05, "loss": 0.0128, "step": 9354 }, { "epoch": 10.325786858089453, "grad_norm": 0.3941440284252167, "learning_rate": 2.8056e-05, "loss": 0.0202, "step": 9355 }, { "epoch": 10.326891220320265, "grad_norm": 0.5009801983833313, "learning_rate": 2.8059e-05, "loss": 0.0224, "step": 9356 }, { "epoch": 10.327995582551077, "grad_norm": 0.511481761932373, "learning_rate": 2.8062e-05, "loss": 0.0225, "step": 9357 }, { "epoch": 10.329099944781888, "grad_norm": 0.5013193488121033, "learning_rate": 2.8065e-05, "loss": 0.0211, "step": 9358 }, { "epoch": 10.3302043070127, "grad_norm": 0.4198288321495056, "learning_rate": 2.8068e-05, "loss": 0.0264, "step": 9359 }, { "epoch": 10.331308669243512, "grad_norm": 0.541334867477417, "learning_rate": 2.8071e-05, "loss": 0.0191, "step": 9360 }, { "epoch": 10.332413031474324, "grad_norm": 0.8542470335960388, "learning_rate": 2.8074e-05, "loss": 0.2807, "step": 9361 }, { "epoch": 10.333517393705135, "grad_norm": 0.7279584407806396, "learning_rate": 2.8077e-05, "loss": 0.1509, "step": 9362 }, { "epoch": 10.334621755935947, "grad_norm": 0.5412939786911011, "learning_rate": 2.8080000000000002e-05, "loss": 0.1308, "step": 9363 }, { "epoch": 10.33572611816676, "grad_norm": 0.6891998648643494, "learning_rate": 2.8083000000000002e-05, "loss": 0.1258, "step": 9364 }, { "epoch": 10.33683048039757, "grad_norm": 1.0928854942321777, "learning_rate": 2.8086000000000002e-05, "loss": 0.1622, "step": 9365 }, { "epoch": 10.337934842628382, "grad_norm": 0.44769492745399475, "learning_rate": 2.8089e-05, "loss": 0.0943, "step": 9366 }, { "epoch": 10.339039204859194, "grad_norm": 0.686352550983429, "learning_rate": 2.8092e-05, "loss": 0.1153, "step": 9367 }, { "epoch": 10.340143567090006, "grad_norm": 0.5316141247749329, "learning_rate": 2.8095e-05, "loss": 0.1059, "step": 9368 }, { "epoch": 10.341247929320817, "grad_norm": 0.37814053893089294, "learning_rate": 2.8098e-05, "loss": 0.0411, "step": 9369 }, { "epoch": 10.342352291551629, "grad_norm": 0.29911404848098755, "learning_rate": 2.8101e-05, "loss": 0.035, "step": 9370 }, { "epoch": 10.343456653782441, "grad_norm": 0.27984002232551575, "learning_rate": 2.8104e-05, "loss": 0.035, "step": 9371 }, { "epoch": 10.344561016013252, "grad_norm": 0.3607112169265747, "learning_rate": 2.8107e-05, "loss": 0.0311, "step": 9372 }, { "epoch": 10.345665378244064, "grad_norm": 0.5910866856575012, "learning_rate": 2.8110000000000004e-05, "loss": 0.0309, "step": 9373 }, { "epoch": 10.346769740474876, "grad_norm": 0.48142823576927185, "learning_rate": 2.8113000000000003e-05, "loss": 0.021, "step": 9374 }, { "epoch": 10.347874102705687, "grad_norm": 0.2842363715171814, "learning_rate": 2.8116000000000003e-05, "loss": 0.0193, "step": 9375 }, { "epoch": 10.348978464936499, "grad_norm": 0.5827527046203613, "learning_rate": 2.8119000000000003e-05, "loss": 0.0623, "step": 9376 }, { "epoch": 10.350082827167311, "grad_norm": 0.6239514946937561, "learning_rate": 2.8122e-05, "loss": 0.0383, "step": 9377 }, { "epoch": 10.351187189398123, "grad_norm": 0.4911936819553375, "learning_rate": 2.8125e-05, "loss": 0.0285, "step": 9378 }, { "epoch": 10.352291551628934, "grad_norm": 0.6032228469848633, "learning_rate": 2.8128e-05, "loss": 0.0227, "step": 9379 }, { "epoch": 10.353395913859746, "grad_norm": 0.3536894917488098, "learning_rate": 2.8131e-05, "loss": 0.018, "step": 9380 }, { "epoch": 10.354500276090558, "grad_norm": 0.3003961741924286, "learning_rate": 2.8134e-05, "loss": 0.0244, "step": 9381 }, { "epoch": 10.355604638321369, "grad_norm": 0.2742805480957031, "learning_rate": 2.8137e-05, "loss": 0.0142, "step": 9382 }, { "epoch": 10.356709000552181, "grad_norm": 0.38881388306617737, "learning_rate": 2.8139999999999998e-05, "loss": 0.0269, "step": 9383 }, { "epoch": 10.357813362782993, "grad_norm": 0.49518394470214844, "learning_rate": 2.8143e-05, "loss": 0.0201, "step": 9384 }, { "epoch": 10.358917725013805, "grad_norm": 0.22176197171211243, "learning_rate": 2.8146e-05, "loss": 0.0132, "step": 9385 }, { "epoch": 10.360022087244616, "grad_norm": 1.415652871131897, "learning_rate": 2.8149e-05, "loss": 0.0236, "step": 9386 }, { "epoch": 10.361126449475428, "grad_norm": 0.30334731936454773, "learning_rate": 2.8152e-05, "loss": 0.0127, "step": 9387 }, { "epoch": 10.36223081170624, "grad_norm": 0.2376714050769806, "learning_rate": 2.8155e-05, "loss": 0.0121, "step": 9388 }, { "epoch": 10.36333517393705, "grad_norm": 0.49075236916542053, "learning_rate": 2.8158e-05, "loss": 0.0151, "step": 9389 }, { "epoch": 10.364439536167863, "grad_norm": 0.4302528500556946, "learning_rate": 2.8161e-05, "loss": 0.0224, "step": 9390 }, { "epoch": 10.365543898398675, "grad_norm": 0.6454533338546753, "learning_rate": 2.8164e-05, "loss": 0.0158, "step": 9391 }, { "epoch": 10.366648260629486, "grad_norm": 0.5108789801597595, "learning_rate": 2.8167e-05, "loss": 0.024, "step": 9392 }, { "epoch": 10.367752622860298, "grad_norm": 0.45548367500305176, "learning_rate": 2.817e-05, "loss": 0.0199, "step": 9393 }, { "epoch": 10.36885698509111, "grad_norm": 0.27332785725593567, "learning_rate": 2.8173000000000003e-05, "loss": 0.0162, "step": 9394 }, { "epoch": 10.369961347321922, "grad_norm": 0.2795320451259613, "learning_rate": 2.8176000000000003e-05, "loss": 0.0127, "step": 9395 }, { "epoch": 10.371065709552733, "grad_norm": 0.12508371472358704, "learning_rate": 2.8179000000000002e-05, "loss": 0.0079, "step": 9396 }, { "epoch": 10.372170071783545, "grad_norm": 0.4328216016292572, "learning_rate": 2.8182000000000002e-05, "loss": 0.0169, "step": 9397 }, { "epoch": 10.373274434014357, "grad_norm": 0.2488744556903839, "learning_rate": 2.8185000000000002e-05, "loss": 0.0139, "step": 9398 }, { "epoch": 10.374378796245168, "grad_norm": 0.2918089032173157, "learning_rate": 2.8188000000000002e-05, "loss": 0.0158, "step": 9399 }, { "epoch": 10.37548315847598, "grad_norm": 0.29044562578201294, "learning_rate": 2.8191e-05, "loss": 0.0131, "step": 9400 }, { "epoch": 10.376587520706792, "grad_norm": 0.3677036166191101, "learning_rate": 2.8194e-05, "loss": 0.0195, "step": 9401 }, { "epoch": 10.377691882937604, "grad_norm": 0.3043649196624756, "learning_rate": 2.8196999999999998e-05, "loss": 0.0129, "step": 9402 }, { "epoch": 10.378796245168415, "grad_norm": 0.3414192497730255, "learning_rate": 2.8199999999999998e-05, "loss": 0.0173, "step": 9403 }, { "epoch": 10.379900607399227, "grad_norm": 0.3034190237522125, "learning_rate": 2.8203e-05, "loss": 0.0216, "step": 9404 }, { "epoch": 10.38100496963004, "grad_norm": 0.2073393017053604, "learning_rate": 2.8206e-05, "loss": 0.013, "step": 9405 }, { "epoch": 10.38210933186085, "grad_norm": 0.3265567719936371, "learning_rate": 2.8209e-05, "loss": 0.0269, "step": 9406 }, { "epoch": 10.383213694091662, "grad_norm": 0.3536030650138855, "learning_rate": 2.8212e-05, "loss": 0.0116, "step": 9407 }, { "epoch": 10.384318056322474, "grad_norm": 0.6108576655387878, "learning_rate": 2.8215e-05, "loss": 0.0201, "step": 9408 }, { "epoch": 10.385422418553285, "grad_norm": 0.7258756160736084, "learning_rate": 2.8218e-05, "loss": 0.0206, "step": 9409 }, { "epoch": 10.386526780784097, "grad_norm": 1.1017320156097412, "learning_rate": 2.8221e-05, "loss": 0.0353, "step": 9410 }, { "epoch": 10.387631143014909, "grad_norm": 1.1218433380126953, "learning_rate": 2.8224e-05, "loss": 0.2622, "step": 9411 }, { "epoch": 10.388735505245721, "grad_norm": 0.7661769390106201, "learning_rate": 2.8227e-05, "loss": 0.2047, "step": 9412 }, { "epoch": 10.389839867476532, "grad_norm": 0.6828936338424683, "learning_rate": 2.823e-05, "loss": 0.1558, "step": 9413 }, { "epoch": 10.390944229707344, "grad_norm": 0.6814522743225098, "learning_rate": 2.8233000000000002e-05, "loss": 0.136, "step": 9414 }, { "epoch": 10.392048591938156, "grad_norm": 0.6662505865097046, "learning_rate": 2.8236000000000002e-05, "loss": 0.0973, "step": 9415 }, { "epoch": 10.393152954168967, "grad_norm": 0.45238998532295227, "learning_rate": 2.8239000000000002e-05, "loss": 0.0803, "step": 9416 }, { "epoch": 10.394257316399779, "grad_norm": 0.6880195140838623, "learning_rate": 2.8242e-05, "loss": 0.1064, "step": 9417 }, { "epoch": 10.395361678630591, "grad_norm": 0.5701906681060791, "learning_rate": 2.8245e-05, "loss": 0.0992, "step": 9418 }, { "epoch": 10.396466040861403, "grad_norm": 0.36153554916381836, "learning_rate": 2.8248e-05, "loss": 0.032, "step": 9419 }, { "epoch": 10.397570403092214, "grad_norm": 0.39160895347595215, "learning_rate": 2.8251e-05, "loss": 0.0418, "step": 9420 }, { "epoch": 10.398674765323026, "grad_norm": 0.6131752729415894, "learning_rate": 2.8254e-05, "loss": 0.0545, "step": 9421 }, { "epoch": 10.399779127553838, "grad_norm": 0.35952138900756836, "learning_rate": 2.8257e-05, "loss": 0.0131, "step": 9422 }, { "epoch": 10.400883489784649, "grad_norm": 0.3261381685733795, "learning_rate": 2.826e-05, "loss": 0.0293, "step": 9423 }, { "epoch": 10.401987852015461, "grad_norm": 0.29108554124832153, "learning_rate": 2.8263000000000004e-05, "loss": 0.0162, "step": 9424 }, { "epoch": 10.403092214246273, "grad_norm": 0.5852933526039124, "learning_rate": 2.8266000000000003e-05, "loss": 0.0132, "step": 9425 }, { "epoch": 10.404196576477084, "grad_norm": 0.27040940523147583, "learning_rate": 2.8269e-05, "loss": 0.0142, "step": 9426 }, { "epoch": 10.405300938707896, "grad_norm": 0.3476613759994507, "learning_rate": 2.8272e-05, "loss": 0.0249, "step": 9427 }, { "epoch": 10.406405300938708, "grad_norm": 0.5472352504730225, "learning_rate": 2.8275e-05, "loss": 0.0167, "step": 9428 }, { "epoch": 10.40750966316952, "grad_norm": 0.10075502097606659, "learning_rate": 2.8278e-05, "loss": 0.0057, "step": 9429 }, { "epoch": 10.40861402540033, "grad_norm": 0.3168012797832489, "learning_rate": 2.8281e-05, "loss": 0.0184, "step": 9430 }, { "epoch": 10.409718387631143, "grad_norm": 0.36170855164527893, "learning_rate": 2.8284e-05, "loss": 0.0293, "step": 9431 }, { "epoch": 10.410822749861955, "grad_norm": 0.29806652665138245, "learning_rate": 2.8287e-05, "loss": 0.0131, "step": 9432 }, { "epoch": 10.411927112092766, "grad_norm": 0.42649000883102417, "learning_rate": 2.829e-05, "loss": 0.029, "step": 9433 }, { "epoch": 10.413031474323578, "grad_norm": 0.19325748085975647, "learning_rate": 2.8293e-05, "loss": 0.0103, "step": 9434 }, { "epoch": 10.41413583655439, "grad_norm": 0.1801258772611618, "learning_rate": 2.8296e-05, "loss": 0.0087, "step": 9435 }, { "epoch": 10.415240198785202, "grad_norm": 0.2209869921207428, "learning_rate": 2.8299e-05, "loss": 0.0066, "step": 9436 }, { "epoch": 10.416344561016013, "grad_norm": 0.33257433772087097, "learning_rate": 2.8302e-05, "loss": 0.0164, "step": 9437 }, { "epoch": 10.417448923246825, "grad_norm": 0.5150646567344666, "learning_rate": 2.8305e-05, "loss": 0.0153, "step": 9438 }, { "epoch": 10.418553285477637, "grad_norm": 0.29769420623779297, "learning_rate": 2.8308e-05, "loss": 0.0157, "step": 9439 }, { "epoch": 10.419657647708448, "grad_norm": 0.4324004650115967, "learning_rate": 2.8311e-05, "loss": 0.0187, "step": 9440 }, { "epoch": 10.42076200993926, "grad_norm": 0.58994060754776, "learning_rate": 2.8314e-05, "loss": 0.027, "step": 9441 }, { "epoch": 10.421866372170072, "grad_norm": 0.9720933437347412, "learning_rate": 2.8317e-05, "loss": 0.029, "step": 9442 }, { "epoch": 10.422970734400884, "grad_norm": 0.33564504981040955, "learning_rate": 2.832e-05, "loss": 0.013, "step": 9443 }, { "epoch": 10.424075096631695, "grad_norm": 0.5640291571617126, "learning_rate": 2.8323000000000003e-05, "loss": 0.0165, "step": 9444 }, { "epoch": 10.425179458862507, "grad_norm": 0.2464011013507843, "learning_rate": 2.8326000000000003e-05, "loss": 0.016, "step": 9445 }, { "epoch": 10.42628382109332, "grad_norm": 0.4930850863456726, "learning_rate": 2.8329000000000003e-05, "loss": 0.0198, "step": 9446 }, { "epoch": 10.42738818332413, "grad_norm": 0.35810399055480957, "learning_rate": 2.8332000000000002e-05, "loss": 0.0194, "step": 9447 }, { "epoch": 10.428492545554942, "grad_norm": 0.2994762361049652, "learning_rate": 2.8335000000000002e-05, "loss": 0.0132, "step": 9448 }, { "epoch": 10.429596907785754, "grad_norm": 0.23747912049293518, "learning_rate": 2.8338000000000002e-05, "loss": 0.0119, "step": 9449 }, { "epoch": 10.430701270016565, "grad_norm": 0.37033945322036743, "learning_rate": 2.8341000000000002e-05, "loss": 0.0291, "step": 9450 }, { "epoch": 10.431805632247377, "grad_norm": 0.3687937259674072, "learning_rate": 2.8344e-05, "loss": 0.0173, "step": 9451 }, { "epoch": 10.43290999447819, "grad_norm": 0.363099068403244, "learning_rate": 2.8346999999999998e-05, "loss": 0.0127, "step": 9452 }, { "epoch": 10.434014356709001, "grad_norm": 0.514155924320221, "learning_rate": 2.8349999999999998e-05, "loss": 0.013, "step": 9453 }, { "epoch": 10.435118718939812, "grad_norm": 0.41323602199554443, "learning_rate": 2.8353e-05, "loss": 0.0145, "step": 9454 }, { "epoch": 10.436223081170624, "grad_norm": 0.931809663772583, "learning_rate": 2.8356e-05, "loss": 0.0217, "step": 9455 }, { "epoch": 10.437327443401436, "grad_norm": 1.5592025518417358, "learning_rate": 2.8359e-05, "loss": 0.0386, "step": 9456 }, { "epoch": 10.438431805632247, "grad_norm": 0.43543925881385803, "learning_rate": 2.8362e-05, "loss": 0.0164, "step": 9457 }, { "epoch": 10.439536167863059, "grad_norm": 0.9001100063323975, "learning_rate": 2.8365e-05, "loss": 0.0256, "step": 9458 }, { "epoch": 10.440640530093871, "grad_norm": 0.3840153217315674, "learning_rate": 2.8368e-05, "loss": 0.018, "step": 9459 }, { "epoch": 10.441744892324682, "grad_norm": 0.8913416862487793, "learning_rate": 2.8371e-05, "loss": 0.0304, "step": 9460 }, { "epoch": 10.442849254555494, "grad_norm": 2.152564764022827, "learning_rate": 2.8374e-05, "loss": 0.2764, "step": 9461 }, { "epoch": 10.443953616786306, "grad_norm": 0.8517357707023621, "learning_rate": 2.8377e-05, "loss": 0.1853, "step": 9462 }, { "epoch": 10.445057979017118, "grad_norm": 0.5233083367347717, "learning_rate": 2.838e-05, "loss": 0.1442, "step": 9463 }, { "epoch": 10.446162341247929, "grad_norm": 0.787100076675415, "learning_rate": 2.8383000000000003e-05, "loss": 0.157, "step": 9464 }, { "epoch": 10.447266703478741, "grad_norm": 0.7955343723297119, "learning_rate": 2.8386000000000002e-05, "loss": 0.1483, "step": 9465 }, { "epoch": 10.448371065709553, "grad_norm": 0.4578603506088257, "learning_rate": 2.8389000000000002e-05, "loss": 0.0926, "step": 9466 }, { "epoch": 10.449475427940364, "grad_norm": 0.8682933449745178, "learning_rate": 2.8392000000000002e-05, "loss": 0.0838, "step": 9467 }, { "epoch": 10.450579790171176, "grad_norm": 0.38345956802368164, "learning_rate": 2.8395000000000002e-05, "loss": 0.041, "step": 9468 }, { "epoch": 10.451684152401988, "grad_norm": 0.44131091237068176, "learning_rate": 2.8398e-05, "loss": 0.0468, "step": 9469 }, { "epoch": 10.4527885146328, "grad_norm": 0.3093574345111847, "learning_rate": 2.8401e-05, "loss": 0.0374, "step": 9470 }, { "epoch": 10.45389287686361, "grad_norm": 0.3007580041885376, "learning_rate": 2.8404e-05, "loss": 0.0232, "step": 9471 }, { "epoch": 10.454997239094423, "grad_norm": 0.383699893951416, "learning_rate": 2.8407e-05, "loss": 0.0241, "step": 9472 }, { "epoch": 10.456101601325235, "grad_norm": 0.21234282851219177, "learning_rate": 2.841e-05, "loss": 0.0115, "step": 9473 }, { "epoch": 10.457205963556046, "grad_norm": 2.3112409114837646, "learning_rate": 2.8413000000000004e-05, "loss": 0.066, "step": 9474 }, { "epoch": 10.458310325786858, "grad_norm": 0.3884628117084503, "learning_rate": 2.8416e-05, "loss": 0.0243, "step": 9475 }, { "epoch": 10.45941468801767, "grad_norm": 0.32862016558647156, "learning_rate": 2.8419e-05, "loss": 0.0146, "step": 9476 }, { "epoch": 10.460519050248482, "grad_norm": 0.21418072283267975, "learning_rate": 2.8422e-05, "loss": 0.0121, "step": 9477 }, { "epoch": 10.461623412479293, "grad_norm": 0.35614001750946045, "learning_rate": 2.8425e-05, "loss": 0.0181, "step": 9478 }, { "epoch": 10.462727774710105, "grad_norm": 0.3327975869178772, "learning_rate": 2.8428e-05, "loss": 0.0202, "step": 9479 }, { "epoch": 10.463832136940917, "grad_norm": 0.652470588684082, "learning_rate": 2.8431e-05, "loss": 0.0258, "step": 9480 }, { "epoch": 10.464936499171728, "grad_norm": 0.4012923538684845, "learning_rate": 2.8434e-05, "loss": 0.0166, "step": 9481 }, { "epoch": 10.46604086140254, "grad_norm": 0.8116177320480347, "learning_rate": 2.8437e-05, "loss": 0.0334, "step": 9482 }, { "epoch": 10.467145223633352, "grad_norm": 0.2926778793334961, "learning_rate": 2.844e-05, "loss": 0.0177, "step": 9483 }, { "epoch": 10.468249585864163, "grad_norm": 0.4540201723575592, "learning_rate": 2.8443000000000002e-05, "loss": 0.0226, "step": 9484 }, { "epoch": 10.469353948094975, "grad_norm": 0.31294721364974976, "learning_rate": 2.8446000000000002e-05, "loss": 0.017, "step": 9485 }, { "epoch": 10.470458310325787, "grad_norm": 0.3233446776866913, "learning_rate": 2.8449e-05, "loss": 0.0109, "step": 9486 }, { "epoch": 10.4715626725566, "grad_norm": 0.43055325746536255, "learning_rate": 2.8452e-05, "loss": 0.0205, "step": 9487 }, { "epoch": 10.47266703478741, "grad_norm": 0.3150178790092468, "learning_rate": 2.8455e-05, "loss": 0.0152, "step": 9488 }, { "epoch": 10.473771397018222, "grad_norm": 0.38362547755241394, "learning_rate": 2.8458e-05, "loss": 0.0078, "step": 9489 }, { "epoch": 10.474875759249034, "grad_norm": 2.180208921432495, "learning_rate": 2.8461e-05, "loss": 0.0286, "step": 9490 }, { "epoch": 10.475980121479845, "grad_norm": 0.41656914353370667, "learning_rate": 2.8464e-05, "loss": 0.0199, "step": 9491 }, { "epoch": 10.477084483710657, "grad_norm": 0.5779104828834534, "learning_rate": 2.8467e-05, "loss": 0.0198, "step": 9492 }, { "epoch": 10.47818884594147, "grad_norm": 0.4658714234828949, "learning_rate": 2.847e-05, "loss": 0.0194, "step": 9493 }, { "epoch": 10.47929320817228, "grad_norm": 0.9476788640022278, "learning_rate": 2.8473000000000003e-05, "loss": 0.0233, "step": 9494 }, { "epoch": 10.480397570403092, "grad_norm": 0.43018171191215515, "learning_rate": 2.8476000000000003e-05, "loss": 0.0199, "step": 9495 }, { "epoch": 10.481501932633904, "grad_norm": 0.2638661861419678, "learning_rate": 2.8479000000000003e-05, "loss": 0.0146, "step": 9496 }, { "epoch": 10.482606294864716, "grad_norm": 0.3293394446372986, "learning_rate": 2.8482000000000003e-05, "loss": 0.0201, "step": 9497 }, { "epoch": 10.483710657095527, "grad_norm": 0.8855270147323608, "learning_rate": 2.8485000000000003e-05, "loss": 0.0204, "step": 9498 }, { "epoch": 10.484815019326339, "grad_norm": 0.26457273960113525, "learning_rate": 2.8488000000000002e-05, "loss": 0.0097, "step": 9499 }, { "epoch": 10.485919381557151, "grad_norm": 0.8401551246643066, "learning_rate": 2.8491e-05, "loss": 0.0255, "step": 9500 }, { "epoch": 10.487023743787962, "grad_norm": 0.35127347707748413, "learning_rate": 2.8494e-05, "loss": 0.0145, "step": 9501 }, { "epoch": 10.488128106018774, "grad_norm": 0.7224666476249695, "learning_rate": 2.8497e-05, "loss": 0.0151, "step": 9502 }, { "epoch": 10.489232468249586, "grad_norm": 0.5249468684196472, "learning_rate": 2.8499999999999998e-05, "loss": 0.0198, "step": 9503 }, { "epoch": 10.490336830480398, "grad_norm": 0.4314230680465698, "learning_rate": 2.8502999999999998e-05, "loss": 0.0214, "step": 9504 }, { "epoch": 10.491441192711209, "grad_norm": 0.6431276798248291, "learning_rate": 2.8506e-05, "loss": 0.0174, "step": 9505 }, { "epoch": 10.492545554942021, "grad_norm": 0.4578405022621155, "learning_rate": 2.8509e-05, "loss": 0.0097, "step": 9506 }, { "epoch": 10.493649917172833, "grad_norm": 0.7321548461914062, "learning_rate": 2.8512e-05, "loss": 0.0539, "step": 9507 }, { "epoch": 10.494754279403644, "grad_norm": 0.3932504951953888, "learning_rate": 2.8515e-05, "loss": 0.0153, "step": 9508 }, { "epoch": 10.495858641634456, "grad_norm": 0.6535060405731201, "learning_rate": 2.8518e-05, "loss": 0.0342, "step": 9509 }, { "epoch": 10.496963003865268, "grad_norm": 0.8459463715553284, "learning_rate": 2.8521e-05, "loss": 0.028, "step": 9510 }, { "epoch": 10.49806736609608, "grad_norm": 1.1496957540512085, "learning_rate": 2.8524e-05, "loss": 0.2383, "step": 9511 }, { "epoch": 10.49917172832689, "grad_norm": 0.825526237487793, "learning_rate": 2.8527e-05, "loss": 0.225, "step": 9512 }, { "epoch": 10.500276090557703, "grad_norm": 0.5559062361717224, "learning_rate": 2.853e-05, "loss": 0.1601, "step": 9513 }, { "epoch": 10.501380452788515, "grad_norm": 0.8342611789703369, "learning_rate": 2.8533e-05, "loss": 0.1751, "step": 9514 }, { "epoch": 10.502484815019326, "grad_norm": 0.7155459523200989, "learning_rate": 2.8536000000000003e-05, "loss": 0.1126, "step": 9515 }, { "epoch": 10.503589177250138, "grad_norm": 0.5032936930656433, "learning_rate": 2.8539000000000002e-05, "loss": 0.0834, "step": 9516 }, { "epoch": 10.50469353948095, "grad_norm": 0.7466610074043274, "learning_rate": 2.8542000000000002e-05, "loss": 0.1195, "step": 9517 }, { "epoch": 10.50579790171176, "grad_norm": 1.0018219947814941, "learning_rate": 2.8545000000000002e-05, "loss": 0.075, "step": 9518 }, { "epoch": 10.506902263942573, "grad_norm": 0.5323208570480347, "learning_rate": 2.8548000000000002e-05, "loss": 0.0577, "step": 9519 }, { "epoch": 10.508006626173385, "grad_norm": 0.3809696137905121, "learning_rate": 2.8551e-05, "loss": 0.0387, "step": 9520 }, { "epoch": 10.509110988404197, "grad_norm": 0.3420169949531555, "learning_rate": 2.8554e-05, "loss": 0.0347, "step": 9521 }, { "epoch": 10.510215350635008, "grad_norm": 0.26732736825942993, "learning_rate": 2.8557e-05, "loss": 0.0286, "step": 9522 }, { "epoch": 10.51131971286582, "grad_norm": 0.297126442193985, "learning_rate": 2.856e-05, "loss": 0.0182, "step": 9523 }, { "epoch": 10.512424075096632, "grad_norm": 0.30807599425315857, "learning_rate": 2.8563e-05, "loss": 0.0267, "step": 9524 }, { "epoch": 10.513528437327443, "grad_norm": 0.8175563812255859, "learning_rate": 2.8566e-05, "loss": 0.0179, "step": 9525 }, { "epoch": 10.514632799558255, "grad_norm": 0.31765663623809814, "learning_rate": 2.8569e-05, "loss": 0.0218, "step": 9526 }, { "epoch": 10.515737161789067, "grad_norm": 0.5637545585632324, "learning_rate": 2.8572e-05, "loss": 0.0274, "step": 9527 }, { "epoch": 10.516841524019878, "grad_norm": 0.2352224737405777, "learning_rate": 2.8575e-05, "loss": 0.0293, "step": 9528 }, { "epoch": 10.51794588625069, "grad_norm": 0.32747146487236023, "learning_rate": 2.8578e-05, "loss": 0.0229, "step": 9529 }, { "epoch": 10.519050248481502, "grad_norm": 0.32937145233154297, "learning_rate": 2.8581e-05, "loss": 0.014, "step": 9530 }, { "epoch": 10.520154610712314, "grad_norm": 0.23074470460414886, "learning_rate": 2.8584e-05, "loss": 0.0094, "step": 9531 }, { "epoch": 10.521258972943125, "grad_norm": 0.37496110796928406, "learning_rate": 2.8587e-05, "loss": 0.0288, "step": 9532 }, { "epoch": 10.522363335173937, "grad_norm": 0.5823801755905151, "learning_rate": 2.859e-05, "loss": 0.0388, "step": 9533 }, { "epoch": 10.52346769740475, "grad_norm": 0.44983619451522827, "learning_rate": 2.8593e-05, "loss": 0.0156, "step": 9534 }, { "epoch": 10.52457205963556, "grad_norm": 0.43163466453552246, "learning_rate": 2.8596000000000002e-05, "loss": 0.0136, "step": 9535 }, { "epoch": 10.525676421866372, "grad_norm": 0.34347060322761536, "learning_rate": 2.8599000000000002e-05, "loss": 0.0129, "step": 9536 }, { "epoch": 10.526780784097184, "grad_norm": 0.2722856104373932, "learning_rate": 2.8602e-05, "loss": 0.0105, "step": 9537 }, { "epoch": 10.527885146327996, "grad_norm": 0.4665413498878479, "learning_rate": 2.8605e-05, "loss": 0.0127, "step": 9538 }, { "epoch": 10.528989508558807, "grad_norm": 0.3900540769100189, "learning_rate": 2.8608e-05, "loss": 0.0234, "step": 9539 }, { "epoch": 10.530093870789619, "grad_norm": 0.810339093208313, "learning_rate": 2.8611e-05, "loss": 0.0303, "step": 9540 }, { "epoch": 10.531198233020431, "grad_norm": 0.44279539585113525, "learning_rate": 2.8614e-05, "loss": 0.0112, "step": 9541 }, { "epoch": 10.532302595251242, "grad_norm": 0.3115170896053314, "learning_rate": 2.8617e-05, "loss": 0.0211, "step": 9542 }, { "epoch": 10.533406957482054, "grad_norm": 0.719497561454773, "learning_rate": 2.862e-05, "loss": 0.0126, "step": 9543 }, { "epoch": 10.534511319712866, "grad_norm": 0.28821054100990295, "learning_rate": 2.8623e-05, "loss": 0.0122, "step": 9544 }, { "epoch": 10.535615681943678, "grad_norm": 1.3665777444839478, "learning_rate": 2.8626000000000003e-05, "loss": 0.0469, "step": 9545 }, { "epoch": 10.536720044174489, "grad_norm": 0.46164339780807495, "learning_rate": 2.8629000000000003e-05, "loss": 0.0128, "step": 9546 }, { "epoch": 10.537824406405301, "grad_norm": 0.4820692241191864, "learning_rate": 2.8632000000000003e-05, "loss": 0.0179, "step": 9547 }, { "epoch": 10.538928768636113, "grad_norm": 0.6465241312980652, "learning_rate": 2.8635000000000003e-05, "loss": 0.0235, "step": 9548 }, { "epoch": 10.540033130866924, "grad_norm": 0.3251113295555115, "learning_rate": 2.8638e-05, "loss": 0.0182, "step": 9549 }, { "epoch": 10.541137493097736, "grad_norm": 0.8732737302780151, "learning_rate": 2.8641e-05, "loss": 0.0188, "step": 9550 }, { "epoch": 10.542241855328548, "grad_norm": 0.4236850440502167, "learning_rate": 2.8644e-05, "loss": 0.0153, "step": 9551 }, { "epoch": 10.54334621755936, "grad_norm": 0.6793382167816162, "learning_rate": 2.8647e-05, "loss": 0.0249, "step": 9552 }, { "epoch": 10.54445057979017, "grad_norm": 0.36616402864456177, "learning_rate": 2.865e-05, "loss": 0.0131, "step": 9553 }, { "epoch": 10.545554942020983, "grad_norm": 0.7012689709663391, "learning_rate": 2.8652999999999998e-05, "loss": 0.0153, "step": 9554 }, { "epoch": 10.546659304251795, "grad_norm": 0.3778352737426758, "learning_rate": 2.8656e-05, "loss": 0.0169, "step": 9555 }, { "epoch": 10.547763666482606, "grad_norm": 0.663241446018219, "learning_rate": 2.8659e-05, "loss": 0.022, "step": 9556 }, { "epoch": 10.548868028713418, "grad_norm": 0.7325457334518433, "learning_rate": 2.8662e-05, "loss": 0.0211, "step": 9557 }, { "epoch": 10.54997239094423, "grad_norm": 0.37259235978126526, "learning_rate": 2.8665e-05, "loss": 0.0147, "step": 9558 }, { "epoch": 10.55107675317504, "grad_norm": 0.46314579248428345, "learning_rate": 2.8668e-05, "loss": 0.0352, "step": 9559 }, { "epoch": 10.552181115405853, "grad_norm": 0.3699708580970764, "learning_rate": 2.8671e-05, "loss": 0.0269, "step": 9560 }, { "epoch": 10.553285477636665, "grad_norm": 0.7928429841995239, "learning_rate": 2.8674e-05, "loss": 0.2477, "step": 9561 }, { "epoch": 10.554389839867477, "grad_norm": 0.8239920139312744, "learning_rate": 2.8677e-05, "loss": 0.1775, "step": 9562 }, { "epoch": 10.555494202098288, "grad_norm": 0.6211837530136108, "learning_rate": 2.868e-05, "loss": 0.1434, "step": 9563 }, { "epoch": 10.5565985643291, "grad_norm": 0.5873419642448425, "learning_rate": 2.8683e-05, "loss": 0.1472, "step": 9564 }, { "epoch": 10.557702926559912, "grad_norm": 0.7774002552032471, "learning_rate": 2.8686000000000003e-05, "loss": 0.1447, "step": 9565 }, { "epoch": 10.558807288790723, "grad_norm": 0.5378544926643372, "learning_rate": 2.8689000000000003e-05, "loss": 0.0831, "step": 9566 }, { "epoch": 10.559911651021535, "grad_norm": 0.4109898805618286, "learning_rate": 2.8692000000000002e-05, "loss": 0.0613, "step": 9567 }, { "epoch": 10.561016013252347, "grad_norm": 0.6382841467857361, "learning_rate": 2.8695000000000002e-05, "loss": 0.0573, "step": 9568 }, { "epoch": 10.562120375483158, "grad_norm": 0.33483052253723145, "learning_rate": 2.8698000000000002e-05, "loss": 0.0361, "step": 9569 }, { "epoch": 10.56322473771397, "grad_norm": 0.36657872796058655, "learning_rate": 2.8701000000000002e-05, "loss": 0.0835, "step": 9570 }, { "epoch": 10.564329099944782, "grad_norm": 0.3884289562702179, "learning_rate": 2.8704e-05, "loss": 0.0296, "step": 9571 }, { "epoch": 10.565433462175594, "grad_norm": 0.5550441145896912, "learning_rate": 2.8707e-05, "loss": 0.0315, "step": 9572 }, { "epoch": 10.566537824406405, "grad_norm": 0.45132502913475037, "learning_rate": 2.871e-05, "loss": 0.0251, "step": 9573 }, { "epoch": 10.567642186637217, "grad_norm": 0.305312842130661, "learning_rate": 2.8712999999999998e-05, "loss": 0.0208, "step": 9574 }, { "epoch": 10.56874654886803, "grad_norm": 0.35161158442497253, "learning_rate": 2.8716e-05, "loss": 0.0126, "step": 9575 }, { "epoch": 10.56985091109884, "grad_norm": 0.36292147636413574, "learning_rate": 2.8719e-05, "loss": 0.016, "step": 9576 }, { "epoch": 10.570955273329652, "grad_norm": 0.3773977756500244, "learning_rate": 2.8722e-05, "loss": 0.0203, "step": 9577 }, { "epoch": 10.572059635560464, "grad_norm": 0.2772001624107361, "learning_rate": 2.8725e-05, "loss": 0.0184, "step": 9578 }, { "epoch": 10.573163997791276, "grad_norm": 0.34393948316574097, "learning_rate": 2.8728e-05, "loss": 0.023, "step": 9579 }, { "epoch": 10.574268360022087, "grad_norm": 0.28027820587158203, "learning_rate": 2.8731e-05, "loss": 0.019, "step": 9580 }, { "epoch": 10.575372722252899, "grad_norm": 0.3133978247642517, "learning_rate": 2.8734e-05, "loss": 0.0154, "step": 9581 }, { "epoch": 10.576477084483711, "grad_norm": 0.32711708545684814, "learning_rate": 2.8737e-05, "loss": 0.0169, "step": 9582 }, { "epoch": 10.577581446714522, "grad_norm": 0.3197682499885559, "learning_rate": 2.874e-05, "loss": 0.0169, "step": 9583 }, { "epoch": 10.578685808945334, "grad_norm": 0.6082087159156799, "learning_rate": 2.8743e-05, "loss": 0.0243, "step": 9584 }, { "epoch": 10.579790171176146, "grad_norm": 0.3499990999698639, "learning_rate": 2.8746000000000002e-05, "loss": 0.0154, "step": 9585 }, { "epoch": 10.580894533406958, "grad_norm": 0.21552984416484833, "learning_rate": 2.8749000000000002e-05, "loss": 0.0073, "step": 9586 }, { "epoch": 10.581998895637769, "grad_norm": 0.28635042905807495, "learning_rate": 2.8752000000000002e-05, "loss": 0.0101, "step": 9587 }, { "epoch": 10.583103257868581, "grad_norm": 0.28337961435317993, "learning_rate": 2.8755e-05, "loss": 0.0411, "step": 9588 }, { "epoch": 10.584207620099393, "grad_norm": 0.531349241733551, "learning_rate": 2.8758e-05, "loss": 0.0227, "step": 9589 }, { "epoch": 10.585311982330204, "grad_norm": 0.17089149355888367, "learning_rate": 2.8761e-05, "loss": 0.0071, "step": 9590 }, { "epoch": 10.586416344561016, "grad_norm": 0.4133165180683136, "learning_rate": 2.8764e-05, "loss": 0.0197, "step": 9591 }, { "epoch": 10.587520706791828, "grad_norm": 0.2720910906791687, "learning_rate": 2.8767e-05, "loss": 0.0139, "step": 9592 }, { "epoch": 10.588625069022639, "grad_norm": 0.3761809468269348, "learning_rate": 2.877e-05, "loss": 0.0131, "step": 9593 }, { "epoch": 10.589729431253451, "grad_norm": 0.24854998290538788, "learning_rate": 2.8773e-05, "loss": 0.0131, "step": 9594 }, { "epoch": 10.590833793484263, "grad_norm": 0.21206283569335938, "learning_rate": 2.8776000000000004e-05, "loss": 0.013, "step": 9595 }, { "epoch": 10.591938155715075, "grad_norm": 0.2512601912021637, "learning_rate": 2.8779000000000003e-05, "loss": 0.0127, "step": 9596 }, { "epoch": 10.593042517945886, "grad_norm": 0.5102669596672058, "learning_rate": 2.8782000000000003e-05, "loss": 0.0202, "step": 9597 }, { "epoch": 10.594146880176698, "grad_norm": 0.2840011417865753, "learning_rate": 2.8785e-05, "loss": 0.0188, "step": 9598 }, { "epoch": 10.59525124240751, "grad_norm": 0.7040848731994629, "learning_rate": 2.8788e-05, "loss": 0.0227, "step": 9599 }, { "epoch": 10.59635560463832, "grad_norm": 0.412144273519516, "learning_rate": 2.8791e-05, "loss": 0.0145, "step": 9600 }, { "epoch": 10.597459966869133, "grad_norm": 0.2536388635635376, "learning_rate": 2.8794e-05, "loss": 0.0071, "step": 9601 }, { "epoch": 10.598564329099945, "grad_norm": 0.2839430868625641, "learning_rate": 2.8797e-05, "loss": 0.0122, "step": 9602 }, { "epoch": 10.599668691330756, "grad_norm": 0.2910665273666382, "learning_rate": 2.88e-05, "loss": 0.021, "step": 9603 }, { "epoch": 10.600773053561568, "grad_norm": 0.6114291548728943, "learning_rate": 2.8803e-05, "loss": 0.0223, "step": 9604 }, { "epoch": 10.60187741579238, "grad_norm": 0.706535816192627, "learning_rate": 2.8806e-05, "loss": 0.0174, "step": 9605 }, { "epoch": 10.602981778023192, "grad_norm": 0.8088027238845825, "learning_rate": 2.8809e-05, "loss": 0.0163, "step": 9606 }, { "epoch": 10.604086140254003, "grad_norm": 0.21282176673412323, "learning_rate": 2.8812e-05, "loss": 0.0139, "step": 9607 }, { "epoch": 10.605190502484815, "grad_norm": 0.6847018599510193, "learning_rate": 2.8815e-05, "loss": 0.0218, "step": 9608 }, { "epoch": 10.606294864715627, "grad_norm": 0.24698032438755035, "learning_rate": 2.8818e-05, "loss": 0.0119, "step": 9609 }, { "epoch": 10.607399226946438, "grad_norm": 0.6787197589874268, "learning_rate": 2.8821e-05, "loss": 0.0303, "step": 9610 }, { "epoch": 10.60850358917725, "grad_norm": 0.9498405456542969, "learning_rate": 2.8824e-05, "loss": 0.3053, "step": 9611 }, { "epoch": 10.609607951408062, "grad_norm": 0.6021514534950256, "learning_rate": 2.8827e-05, "loss": 0.1711, "step": 9612 }, { "epoch": 10.610712313638874, "grad_norm": 0.9422906041145325, "learning_rate": 2.883e-05, "loss": 0.1585, "step": 9613 }, { "epoch": 10.611816675869685, "grad_norm": 0.42760440707206726, "learning_rate": 2.8833e-05, "loss": 0.1039, "step": 9614 }, { "epoch": 10.612921038100497, "grad_norm": 0.4961826503276825, "learning_rate": 2.8836000000000003e-05, "loss": 0.1151, "step": 9615 }, { "epoch": 10.61402540033131, "grad_norm": 0.6129183769226074, "learning_rate": 2.8839000000000003e-05, "loss": 0.0994, "step": 9616 }, { "epoch": 10.61512976256212, "grad_norm": 0.6007256507873535, "learning_rate": 2.8842000000000003e-05, "loss": 0.0889, "step": 9617 }, { "epoch": 10.616234124792932, "grad_norm": 0.4783918559551239, "learning_rate": 2.8845000000000003e-05, "loss": 0.0872, "step": 9618 }, { "epoch": 10.617338487023744, "grad_norm": 2.5546417236328125, "learning_rate": 2.8848000000000002e-05, "loss": 0.0264, "step": 9619 }, { "epoch": 10.618442849254556, "grad_norm": 0.4249526858329773, "learning_rate": 2.8851000000000002e-05, "loss": 0.0382, "step": 9620 }, { "epoch": 10.619547211485367, "grad_norm": 0.6314547061920166, "learning_rate": 2.8854000000000002e-05, "loss": 0.052, "step": 9621 }, { "epoch": 10.620651573716179, "grad_norm": 0.4084519147872925, "learning_rate": 2.8857000000000002e-05, "loss": 0.0198, "step": 9622 }, { "epoch": 10.621755935946991, "grad_norm": 0.26929131150245667, "learning_rate": 2.8859999999999998e-05, "loss": 0.0168, "step": 9623 }, { "epoch": 10.622860298177802, "grad_norm": 0.24971039593219757, "learning_rate": 2.8862999999999998e-05, "loss": 0.0209, "step": 9624 }, { "epoch": 10.623964660408614, "grad_norm": 0.6727105379104614, "learning_rate": 2.8866e-05, "loss": 0.0264, "step": 9625 }, { "epoch": 10.625069022639426, "grad_norm": 0.26221737265586853, "learning_rate": 2.8869e-05, "loss": 0.018, "step": 9626 }, { "epoch": 10.626173384870237, "grad_norm": 1.4694205522537231, "learning_rate": 2.8872e-05, "loss": 0.0128, "step": 9627 }, { "epoch": 10.627277747101049, "grad_norm": 0.6785150766372681, "learning_rate": 2.8875e-05, "loss": 0.023, "step": 9628 }, { "epoch": 10.628382109331861, "grad_norm": 0.6156274080276489, "learning_rate": 2.8878e-05, "loss": 0.0208, "step": 9629 }, { "epoch": 10.629486471562673, "grad_norm": 0.46910807490348816, "learning_rate": 2.8881e-05, "loss": 0.0218, "step": 9630 }, { "epoch": 10.630590833793484, "grad_norm": 0.3553260564804077, "learning_rate": 2.8884e-05, "loss": 0.0319, "step": 9631 }, { "epoch": 10.631695196024296, "grad_norm": 0.32401517033576965, "learning_rate": 2.8887e-05, "loss": 0.0118, "step": 9632 }, { "epoch": 10.632799558255108, "grad_norm": 0.21612323820590973, "learning_rate": 2.889e-05, "loss": 0.0133, "step": 9633 }, { "epoch": 10.633903920485919, "grad_norm": 0.3359270989894867, "learning_rate": 2.8893e-05, "loss": 0.0167, "step": 9634 }, { "epoch": 10.635008282716731, "grad_norm": 0.32499802112579346, "learning_rate": 2.8896e-05, "loss": 0.0156, "step": 9635 }, { "epoch": 10.636112644947543, "grad_norm": 0.25222939252853394, "learning_rate": 2.8899000000000002e-05, "loss": 0.0129, "step": 9636 }, { "epoch": 10.637217007178354, "grad_norm": 0.3103511333465576, "learning_rate": 2.8902000000000002e-05, "loss": 0.0138, "step": 9637 }, { "epoch": 10.638321369409166, "grad_norm": 0.5803126692771912, "learning_rate": 2.8905000000000002e-05, "loss": 0.0431, "step": 9638 }, { "epoch": 10.639425731639978, "grad_norm": 0.5210294127464294, "learning_rate": 2.8908000000000002e-05, "loss": 0.0172, "step": 9639 }, { "epoch": 10.64053009387079, "grad_norm": 0.34275078773498535, "learning_rate": 2.8911e-05, "loss": 0.0182, "step": 9640 }, { "epoch": 10.6416344561016, "grad_norm": 0.2076704502105713, "learning_rate": 2.8914e-05, "loss": 0.0074, "step": 9641 }, { "epoch": 10.642738818332413, "grad_norm": 0.706996500492096, "learning_rate": 2.8917e-05, "loss": 0.0235, "step": 9642 }, { "epoch": 10.643843180563225, "grad_norm": 0.7905288934707642, "learning_rate": 2.892e-05, "loss": 0.0288, "step": 9643 }, { "epoch": 10.644947542794036, "grad_norm": 0.25902530550956726, "learning_rate": 2.8923e-05, "loss": 0.0176, "step": 9644 }, { "epoch": 10.646051905024848, "grad_norm": 0.39701151847839355, "learning_rate": 2.8926e-05, "loss": 0.0171, "step": 9645 }, { "epoch": 10.64715626725566, "grad_norm": 0.6045822501182556, "learning_rate": 2.8929000000000004e-05, "loss": 0.0246, "step": 9646 }, { "epoch": 10.648260629486472, "grad_norm": 0.28478509187698364, "learning_rate": 2.8932e-05, "loss": 0.0179, "step": 9647 }, { "epoch": 10.649364991717283, "grad_norm": 0.556319534778595, "learning_rate": 2.8935e-05, "loss": 0.018, "step": 9648 }, { "epoch": 10.650469353948095, "grad_norm": 0.22971560060977936, "learning_rate": 2.8938e-05, "loss": 0.0115, "step": 9649 }, { "epoch": 10.651573716178907, "grad_norm": 0.3014107048511505, "learning_rate": 2.8941e-05, "loss": 0.018, "step": 9650 }, { "epoch": 10.652678078409718, "grad_norm": 0.5447822213172913, "learning_rate": 2.8944e-05, "loss": 0.0323, "step": 9651 }, { "epoch": 10.65378244064053, "grad_norm": 0.12580962479114532, "learning_rate": 2.8947e-05, "loss": 0.0069, "step": 9652 }, { "epoch": 10.654886802871342, "grad_norm": 0.461971253156662, "learning_rate": 2.895e-05, "loss": 0.0187, "step": 9653 }, { "epoch": 10.655991165102154, "grad_norm": 0.6264379024505615, "learning_rate": 2.8953e-05, "loss": 0.0167, "step": 9654 }, { "epoch": 10.657095527332965, "grad_norm": 0.8104820251464844, "learning_rate": 2.8956e-05, "loss": 0.0221, "step": 9655 }, { "epoch": 10.658199889563777, "grad_norm": 0.27451905608177185, "learning_rate": 2.8959000000000002e-05, "loss": 0.0124, "step": 9656 }, { "epoch": 10.65930425179459, "grad_norm": 0.3719567060470581, "learning_rate": 2.8962e-05, "loss": 0.0447, "step": 9657 }, { "epoch": 10.6604086140254, "grad_norm": 0.47258442640304565, "learning_rate": 2.8965e-05, "loss": 0.0128, "step": 9658 }, { "epoch": 10.661512976256212, "grad_norm": 0.5697277784347534, "learning_rate": 2.8968e-05, "loss": 0.0248, "step": 9659 }, { "epoch": 10.662617338487024, "grad_norm": 0.4828152358531952, "learning_rate": 2.8971e-05, "loss": 0.0168, "step": 9660 }, { "epoch": 10.663721700717835, "grad_norm": 0.8802636861801147, "learning_rate": 2.8974e-05, "loss": 0.2429, "step": 9661 }, { "epoch": 10.664826062948647, "grad_norm": 0.7291384935379028, "learning_rate": 2.8977e-05, "loss": 0.1352, "step": 9662 }, { "epoch": 10.665930425179459, "grad_norm": 0.6279188394546509, "learning_rate": 2.898e-05, "loss": 0.1286, "step": 9663 }, { "epoch": 10.667034787410271, "grad_norm": 0.48941662907600403, "learning_rate": 2.8983e-05, "loss": 0.1229, "step": 9664 }, { "epoch": 10.668139149641082, "grad_norm": 0.8870094418525696, "learning_rate": 2.8986e-05, "loss": 0.1027, "step": 9665 }, { "epoch": 10.669243511871894, "grad_norm": 0.5506704449653625, "learning_rate": 2.8989000000000003e-05, "loss": 0.145, "step": 9666 }, { "epoch": 10.670347874102706, "grad_norm": 0.435879647731781, "learning_rate": 2.8992000000000003e-05, "loss": 0.0761, "step": 9667 }, { "epoch": 10.671452236333517, "grad_norm": 0.6558008193969727, "learning_rate": 2.8995000000000003e-05, "loss": 0.0665, "step": 9668 }, { "epoch": 10.672556598564329, "grad_norm": 0.33531707525253296, "learning_rate": 2.8998000000000003e-05, "loss": 0.0306, "step": 9669 }, { "epoch": 10.673660960795141, "grad_norm": 0.26112329959869385, "learning_rate": 2.9001000000000002e-05, "loss": 0.0203, "step": 9670 }, { "epoch": 10.674765323025952, "grad_norm": 0.8877696990966797, "learning_rate": 2.9004000000000002e-05, "loss": 0.027, "step": 9671 }, { "epoch": 10.675869685256764, "grad_norm": 0.2531541883945465, "learning_rate": 2.9007e-05, "loss": 0.0213, "step": 9672 }, { "epoch": 10.676974047487576, "grad_norm": 0.41501522064208984, "learning_rate": 2.901e-05, "loss": 0.0163, "step": 9673 }, { "epoch": 10.678078409718388, "grad_norm": 0.2831360995769501, "learning_rate": 2.9012999999999998e-05, "loss": 0.0192, "step": 9674 }, { "epoch": 10.679182771949199, "grad_norm": 0.24723145365715027, "learning_rate": 2.9015999999999998e-05, "loss": 0.0158, "step": 9675 }, { "epoch": 10.680287134180011, "grad_norm": 0.4750984311103821, "learning_rate": 2.9019e-05, "loss": 0.0456, "step": 9676 }, { "epoch": 10.681391496410823, "grad_norm": 0.3392501771450043, "learning_rate": 2.9022e-05, "loss": 0.0123, "step": 9677 }, { "epoch": 10.682495858641634, "grad_norm": 0.3467416763305664, "learning_rate": 2.9025e-05, "loss": 0.016, "step": 9678 }, { "epoch": 10.683600220872446, "grad_norm": 0.3411010503768921, "learning_rate": 2.9028e-05, "loss": 0.03, "step": 9679 }, { "epoch": 10.684704583103258, "grad_norm": 0.20092962682247162, "learning_rate": 2.9031e-05, "loss": 0.0103, "step": 9680 }, { "epoch": 10.68580894533407, "grad_norm": 0.32917463779449463, "learning_rate": 2.9034e-05, "loss": 0.0141, "step": 9681 }, { "epoch": 10.68691330756488, "grad_norm": 0.33340582251548767, "learning_rate": 2.9037e-05, "loss": 0.0186, "step": 9682 }, { "epoch": 10.688017669795693, "grad_norm": 0.37389305233955383, "learning_rate": 2.904e-05, "loss": 0.0188, "step": 9683 }, { "epoch": 10.689122032026505, "grad_norm": 0.2983386218547821, "learning_rate": 2.9043e-05, "loss": 0.0179, "step": 9684 }, { "epoch": 10.690226394257316, "grad_norm": 0.22322264313697815, "learning_rate": 2.9046e-05, "loss": 0.0153, "step": 9685 }, { "epoch": 10.691330756488128, "grad_norm": 0.41270899772644043, "learning_rate": 2.9049000000000003e-05, "loss": 0.0153, "step": 9686 }, { "epoch": 10.69243511871894, "grad_norm": 0.3712161183357239, "learning_rate": 2.9052000000000002e-05, "loss": 0.0237, "step": 9687 }, { "epoch": 10.693539480949752, "grad_norm": 0.3382883369922638, "learning_rate": 2.9055000000000002e-05, "loss": 0.0085, "step": 9688 }, { "epoch": 10.694643843180563, "grad_norm": 0.6132292151451111, "learning_rate": 2.9058000000000002e-05, "loss": 0.0202, "step": 9689 }, { "epoch": 10.695748205411375, "grad_norm": 0.233272984623909, "learning_rate": 2.9061000000000002e-05, "loss": 0.0094, "step": 9690 }, { "epoch": 10.696852567642187, "grad_norm": 0.3403233289718628, "learning_rate": 2.9064e-05, "loss": 0.0167, "step": 9691 }, { "epoch": 10.697956929872998, "grad_norm": 0.23894260823726654, "learning_rate": 2.9067e-05, "loss": 0.012, "step": 9692 }, { "epoch": 10.69906129210381, "grad_norm": 0.29235079884529114, "learning_rate": 2.907e-05, "loss": 0.0202, "step": 9693 }, { "epoch": 10.700165654334622, "grad_norm": 0.23847858607769012, "learning_rate": 2.9073e-05, "loss": 0.0112, "step": 9694 }, { "epoch": 10.701270016565433, "grad_norm": 0.4034966230392456, "learning_rate": 2.9076e-05, "loss": 0.0228, "step": 9695 }, { "epoch": 10.702374378796245, "grad_norm": 0.4143964648246765, "learning_rate": 2.9079e-05, "loss": 0.0199, "step": 9696 }, { "epoch": 10.703478741027057, "grad_norm": 0.2015404999256134, "learning_rate": 2.9082e-05, "loss": 0.0136, "step": 9697 }, { "epoch": 10.70458310325787, "grad_norm": 0.37123268842697144, "learning_rate": 2.9085e-05, "loss": 0.0235, "step": 9698 }, { "epoch": 10.70568746548868, "grad_norm": 0.12472082674503326, "learning_rate": 2.9088e-05, "loss": 0.008, "step": 9699 }, { "epoch": 10.706791827719492, "grad_norm": 0.3444516956806183, "learning_rate": 2.9091e-05, "loss": 0.0189, "step": 9700 }, { "epoch": 10.707896189950304, "grad_norm": 0.19443820416927338, "learning_rate": 2.9094e-05, "loss": 0.0074, "step": 9701 }, { "epoch": 10.709000552181115, "grad_norm": 0.33892619609832764, "learning_rate": 2.9097e-05, "loss": 0.0142, "step": 9702 }, { "epoch": 10.710104914411927, "grad_norm": 0.8712142109870911, "learning_rate": 2.91e-05, "loss": 0.0193, "step": 9703 }, { "epoch": 10.71120927664274, "grad_norm": 0.4839879870414734, "learning_rate": 2.9103e-05, "loss": 0.0194, "step": 9704 }, { "epoch": 10.71231363887355, "grad_norm": 0.34016597270965576, "learning_rate": 2.9106e-05, "loss": 0.0179, "step": 9705 }, { "epoch": 10.713418001104362, "grad_norm": 0.4703163802623749, "learning_rate": 2.9109000000000002e-05, "loss": 0.0236, "step": 9706 }, { "epoch": 10.714522363335174, "grad_norm": 0.4044443368911743, "learning_rate": 2.9112000000000002e-05, "loss": 0.0165, "step": 9707 }, { "epoch": 10.715626725565986, "grad_norm": 0.337978720664978, "learning_rate": 2.9115e-05, "loss": 0.0156, "step": 9708 }, { "epoch": 10.716731087796797, "grad_norm": 0.4756127893924713, "learning_rate": 2.9118e-05, "loss": 0.0173, "step": 9709 }, { "epoch": 10.717835450027609, "grad_norm": 0.3668855130672455, "learning_rate": 2.9121e-05, "loss": 0.0176, "step": 9710 }, { "epoch": 10.718939812258421, "grad_norm": 1.1982543468475342, "learning_rate": 2.9124e-05, "loss": 0.3376, "step": 9711 }, { "epoch": 10.720044174489232, "grad_norm": 0.641417384147644, "learning_rate": 2.9127e-05, "loss": 0.1712, "step": 9712 }, { "epoch": 10.721148536720044, "grad_norm": 0.5187115669250488, "learning_rate": 2.913e-05, "loss": 0.1495, "step": 9713 }, { "epoch": 10.722252898950856, "grad_norm": 1.0037556886672974, "learning_rate": 2.9133e-05, "loss": 0.1077, "step": 9714 }, { "epoch": 10.723357261181668, "grad_norm": 0.9546434879302979, "learning_rate": 2.9136e-05, "loss": 0.149, "step": 9715 }, { "epoch": 10.724461623412479, "grad_norm": 0.4234963655471802, "learning_rate": 2.9139000000000003e-05, "loss": 0.0688, "step": 9716 }, { "epoch": 10.725565985643291, "grad_norm": 0.4141346216201782, "learning_rate": 2.9142000000000003e-05, "loss": 0.0643, "step": 9717 }, { "epoch": 10.726670347874103, "grad_norm": 0.5318880081176758, "learning_rate": 2.9145000000000003e-05, "loss": 0.0491, "step": 9718 }, { "epoch": 10.727774710104914, "grad_norm": 0.2249072641134262, "learning_rate": 2.9148000000000003e-05, "loss": 0.0231, "step": 9719 }, { "epoch": 10.728879072335726, "grad_norm": 0.5595663785934448, "learning_rate": 2.9151000000000003e-05, "loss": 0.0583, "step": 9720 }, { "epoch": 10.729983434566538, "grad_norm": 0.35351940989494324, "learning_rate": 2.9154e-05, "loss": 0.0229, "step": 9721 }, { "epoch": 10.73108779679735, "grad_norm": 0.4453246593475342, "learning_rate": 2.9157e-05, "loss": 0.03, "step": 9722 }, { "epoch": 10.73219215902816, "grad_norm": 0.5043512582778931, "learning_rate": 2.916e-05, "loss": 0.0208, "step": 9723 }, { "epoch": 10.733296521258973, "grad_norm": 0.5504825115203857, "learning_rate": 2.9163e-05, "loss": 0.1003, "step": 9724 }, { "epoch": 10.734400883489785, "grad_norm": 0.38568803668022156, "learning_rate": 2.9165999999999998e-05, "loss": 0.0207, "step": 9725 }, { "epoch": 10.735505245720596, "grad_norm": 0.23275013267993927, "learning_rate": 2.9169e-05, "loss": 0.0134, "step": 9726 }, { "epoch": 10.736609607951408, "grad_norm": 0.6696230173110962, "learning_rate": 2.9172e-05, "loss": 0.0333, "step": 9727 }, { "epoch": 10.73771397018222, "grad_norm": 0.2377292960882187, "learning_rate": 2.9175e-05, "loss": 0.0154, "step": 9728 }, { "epoch": 10.738818332413032, "grad_norm": 0.19941958785057068, "learning_rate": 2.9178e-05, "loss": 0.0131, "step": 9729 }, { "epoch": 10.739922694643843, "grad_norm": 0.2015286237001419, "learning_rate": 2.9181e-05, "loss": 0.0108, "step": 9730 }, { "epoch": 10.741027056874655, "grad_norm": 0.28418853878974915, "learning_rate": 2.9184e-05, "loss": 0.0317, "step": 9731 }, { "epoch": 10.742131419105467, "grad_norm": 0.3337855041027069, "learning_rate": 2.9187e-05, "loss": 0.0152, "step": 9732 }, { "epoch": 10.743235781336278, "grad_norm": 0.2782873809337616, "learning_rate": 2.919e-05, "loss": 0.0137, "step": 9733 }, { "epoch": 10.74434014356709, "grad_norm": 0.2747857868671417, "learning_rate": 2.9193e-05, "loss": 0.0125, "step": 9734 }, { "epoch": 10.745444505797902, "grad_norm": 0.24404489994049072, "learning_rate": 2.9196e-05, "loss": 0.0099, "step": 9735 }, { "epoch": 10.746548868028713, "grad_norm": 0.2591730058193207, "learning_rate": 2.9199000000000003e-05, "loss": 0.0125, "step": 9736 }, { "epoch": 10.747653230259525, "grad_norm": 0.274366557598114, "learning_rate": 2.9202000000000003e-05, "loss": 0.0142, "step": 9737 }, { "epoch": 10.748757592490337, "grad_norm": 0.4354463815689087, "learning_rate": 2.9205000000000002e-05, "loss": 0.0226, "step": 9738 }, { "epoch": 10.74986195472115, "grad_norm": 0.2390083223581314, "learning_rate": 2.9208000000000002e-05, "loss": 0.0133, "step": 9739 }, { "epoch": 10.75096631695196, "grad_norm": 0.31989800930023193, "learning_rate": 2.9211000000000002e-05, "loss": 0.023, "step": 9740 }, { "epoch": 10.752070679182772, "grad_norm": 0.5180533528327942, "learning_rate": 2.9214000000000002e-05, "loss": 0.025, "step": 9741 }, { "epoch": 10.753175041413584, "grad_norm": 0.501224935054779, "learning_rate": 2.9217e-05, "loss": 0.0261, "step": 9742 }, { "epoch": 10.754279403644395, "grad_norm": 0.13507169485092163, "learning_rate": 2.922e-05, "loss": 0.0054, "step": 9743 }, { "epoch": 10.755383765875207, "grad_norm": 0.6076388955116272, "learning_rate": 2.9223e-05, "loss": 0.0485, "step": 9744 }, { "epoch": 10.75648812810602, "grad_norm": 0.7138223648071289, "learning_rate": 2.9226e-05, "loss": 0.0131, "step": 9745 }, { "epoch": 10.75759249033683, "grad_norm": 0.7122505307197571, "learning_rate": 2.9229e-05, "loss": 0.0179, "step": 9746 }, { "epoch": 10.758696852567642, "grad_norm": 0.1848374903202057, "learning_rate": 2.9232e-05, "loss": 0.0061, "step": 9747 }, { "epoch": 10.759801214798454, "grad_norm": 0.639607310295105, "learning_rate": 2.9235e-05, "loss": 0.028, "step": 9748 }, { "epoch": 10.760905577029266, "grad_norm": 0.2447141706943512, "learning_rate": 2.9238e-05, "loss": 0.0126, "step": 9749 }, { "epoch": 10.762009939260077, "grad_norm": 0.3260752856731415, "learning_rate": 2.9241e-05, "loss": 0.0208, "step": 9750 }, { "epoch": 10.763114301490889, "grad_norm": 0.2084171324968338, "learning_rate": 2.9244e-05, "loss": 0.008, "step": 9751 }, { "epoch": 10.764218663721701, "grad_norm": 0.46611395478248596, "learning_rate": 2.9247e-05, "loss": 0.0198, "step": 9752 }, { "epoch": 10.765323025952512, "grad_norm": 0.6141471266746521, "learning_rate": 2.925e-05, "loss": 0.0285, "step": 9753 }, { "epoch": 10.766427388183324, "grad_norm": 0.5765607953071594, "learning_rate": 2.9253e-05, "loss": 0.0229, "step": 9754 }, { "epoch": 10.767531750414136, "grad_norm": 0.3084258437156677, "learning_rate": 2.9256e-05, "loss": 0.0196, "step": 9755 }, { "epoch": 10.768636112644948, "grad_norm": 0.18940231204032898, "learning_rate": 2.9259e-05, "loss": 0.0057, "step": 9756 }, { "epoch": 10.769740474875759, "grad_norm": 0.9464513659477234, "learning_rate": 2.9262000000000002e-05, "loss": 0.0326, "step": 9757 }, { "epoch": 10.770844837106571, "grad_norm": 0.4855100214481354, "learning_rate": 2.9265000000000002e-05, "loss": 0.035, "step": 9758 }, { "epoch": 10.771949199337383, "grad_norm": 0.9242199063301086, "learning_rate": 2.9268e-05, "loss": 0.03, "step": 9759 }, { "epoch": 10.773053561568194, "grad_norm": 0.3592759370803833, "learning_rate": 2.9271e-05, "loss": 0.0186, "step": 9760 }, { "epoch": 10.774157923799006, "grad_norm": 0.7070873975753784, "learning_rate": 2.9274e-05, "loss": 0.2493, "step": 9761 }, { "epoch": 10.775262286029818, "grad_norm": 0.6605046987533569, "learning_rate": 2.9277e-05, "loss": 0.1803, "step": 9762 }, { "epoch": 10.77636664826063, "grad_norm": 0.8806664347648621, "learning_rate": 2.928e-05, "loss": 0.2026, "step": 9763 }, { "epoch": 10.77747101049144, "grad_norm": 0.5222176313400269, "learning_rate": 2.9283e-05, "loss": 0.1248, "step": 9764 }, { "epoch": 10.778575372722253, "grad_norm": 2.374037265777588, "learning_rate": 2.9286e-05, "loss": 0.136, "step": 9765 }, { "epoch": 10.779679734953065, "grad_norm": 0.4695151746273041, "learning_rate": 2.9289e-05, "loss": 0.0897, "step": 9766 }, { "epoch": 10.780784097183876, "grad_norm": 0.5607321262359619, "learning_rate": 2.9292000000000003e-05, "loss": 0.0491, "step": 9767 }, { "epoch": 10.781888459414688, "grad_norm": 0.40927472710609436, "learning_rate": 2.9295000000000003e-05, "loss": 0.0335, "step": 9768 }, { "epoch": 10.7829928216455, "grad_norm": 0.3465026915073395, "learning_rate": 2.9298000000000003e-05, "loss": 0.0295, "step": 9769 }, { "epoch": 10.78409718387631, "grad_norm": 0.27857962250709534, "learning_rate": 2.9301e-05, "loss": 0.0249, "step": 9770 }, { "epoch": 10.785201546107123, "grad_norm": 0.303223192691803, "learning_rate": 2.9304e-05, "loss": 0.0287, "step": 9771 }, { "epoch": 10.786305908337935, "grad_norm": 0.27466848492622375, "learning_rate": 2.9307e-05, "loss": 0.0174, "step": 9772 }, { "epoch": 10.787410270568747, "grad_norm": 0.39800533652305603, "learning_rate": 2.931e-05, "loss": 0.0239, "step": 9773 }, { "epoch": 10.788514632799558, "grad_norm": 0.45012056827545166, "learning_rate": 2.9313e-05, "loss": 0.0262, "step": 9774 }, { "epoch": 10.78961899503037, "grad_norm": 0.39716312289237976, "learning_rate": 2.9316e-05, "loss": 0.0166, "step": 9775 }, { "epoch": 10.790723357261182, "grad_norm": 0.3475897014141083, "learning_rate": 2.9318999999999998e-05, "loss": 0.014, "step": 9776 }, { "epoch": 10.791827719491993, "grad_norm": 0.3166714608669281, "learning_rate": 2.9322e-05, "loss": 0.0214, "step": 9777 }, { "epoch": 10.792932081722805, "grad_norm": 0.31005123257637024, "learning_rate": 2.9325e-05, "loss": 0.0086, "step": 9778 }, { "epoch": 10.794036443953617, "grad_norm": 0.3220333158969879, "learning_rate": 2.9328e-05, "loss": 0.008, "step": 9779 }, { "epoch": 10.795140806184428, "grad_norm": 0.35849636793136597, "learning_rate": 2.9331e-05, "loss": 0.0179, "step": 9780 }, { "epoch": 10.79624516841524, "grad_norm": 0.2172544002532959, "learning_rate": 2.9334e-05, "loss": 0.0109, "step": 9781 }, { "epoch": 10.797349530646052, "grad_norm": 0.6003341674804688, "learning_rate": 2.9337e-05, "loss": 0.0209, "step": 9782 }, { "epoch": 10.798453892876864, "grad_norm": 0.2803301215171814, "learning_rate": 2.934e-05, "loss": 0.0179, "step": 9783 }, { "epoch": 10.799558255107675, "grad_norm": 0.4504733979701996, "learning_rate": 2.9343e-05, "loss": 0.0245, "step": 9784 }, { "epoch": 10.800662617338487, "grad_norm": 0.5110149383544922, "learning_rate": 2.9346e-05, "loss": 0.0453, "step": 9785 }, { "epoch": 10.8017669795693, "grad_norm": 0.39193299412727356, "learning_rate": 2.9349e-05, "loss": 0.0281, "step": 9786 }, { "epoch": 10.80287134180011, "grad_norm": 0.21543538570404053, "learning_rate": 2.9352000000000003e-05, "loss": 0.0094, "step": 9787 }, { "epoch": 10.803975704030922, "grad_norm": 0.28634509444236755, "learning_rate": 2.9355000000000003e-05, "loss": 0.0184, "step": 9788 }, { "epoch": 10.805080066261734, "grad_norm": 0.21141405403614044, "learning_rate": 2.9358000000000003e-05, "loss": 0.0124, "step": 9789 }, { "epoch": 10.806184428492546, "grad_norm": 0.33797717094421387, "learning_rate": 2.9361000000000002e-05, "loss": 0.015, "step": 9790 }, { "epoch": 10.807288790723357, "grad_norm": 0.500571072101593, "learning_rate": 2.9364000000000002e-05, "loss": 0.0178, "step": 9791 }, { "epoch": 10.808393152954169, "grad_norm": 0.3716474175453186, "learning_rate": 2.9367000000000002e-05, "loss": 0.0179, "step": 9792 }, { "epoch": 10.809497515184981, "grad_norm": 0.3504523038864136, "learning_rate": 2.9370000000000002e-05, "loss": 0.0088, "step": 9793 }, { "epoch": 10.810601877415792, "grad_norm": 0.6788516044616699, "learning_rate": 2.9373e-05, "loss": 0.0197, "step": 9794 }, { "epoch": 10.811706239646604, "grad_norm": 3.6679441928863525, "learning_rate": 2.9375999999999998e-05, "loss": 0.0931, "step": 9795 }, { "epoch": 10.812810601877416, "grad_norm": 0.31492340564727783, "learning_rate": 2.9378999999999998e-05, "loss": 0.0184, "step": 9796 }, { "epoch": 10.813914964108228, "grad_norm": 0.3627106249332428, "learning_rate": 2.9382e-05, "loss": 0.0214, "step": 9797 }, { "epoch": 10.815019326339039, "grad_norm": 0.35800644755363464, "learning_rate": 2.9385e-05, "loss": 0.0239, "step": 9798 }, { "epoch": 10.816123688569851, "grad_norm": 0.5268235206604004, "learning_rate": 2.9388e-05, "loss": 0.0263, "step": 9799 }, { "epoch": 10.817228050800663, "grad_norm": 0.3783254325389862, "learning_rate": 2.9391e-05, "loss": 0.0214, "step": 9800 }, { "epoch": 10.818332413031474, "grad_norm": 0.26807814836502075, "learning_rate": 2.9394e-05, "loss": 0.0162, "step": 9801 }, { "epoch": 10.819436775262286, "grad_norm": 0.5139928460121155, "learning_rate": 2.9397e-05, "loss": 0.031, "step": 9802 }, { "epoch": 10.820541137493098, "grad_norm": 0.6428493857383728, "learning_rate": 2.94e-05, "loss": 0.0258, "step": 9803 }, { "epoch": 10.821645499723909, "grad_norm": 0.27471452951431274, "learning_rate": 2.9403e-05, "loss": 0.0194, "step": 9804 }, { "epoch": 10.82274986195472, "grad_norm": 0.15932326018810272, "learning_rate": 2.9406e-05, "loss": 0.0117, "step": 9805 }, { "epoch": 10.823854224185533, "grad_norm": 0.6424082517623901, "learning_rate": 2.9409e-05, "loss": 0.0217, "step": 9806 }, { "epoch": 10.824958586416345, "grad_norm": 0.38546594977378845, "learning_rate": 2.9412000000000002e-05, "loss": 0.0192, "step": 9807 }, { "epoch": 10.826062948647156, "grad_norm": 0.132679745554924, "learning_rate": 2.9415000000000002e-05, "loss": 0.0093, "step": 9808 }, { "epoch": 10.827167310877968, "grad_norm": 0.3476833403110504, "learning_rate": 2.9418000000000002e-05, "loss": 0.0146, "step": 9809 }, { "epoch": 10.82827167310878, "grad_norm": 0.3209647834300995, "learning_rate": 2.9421000000000002e-05, "loss": 0.0148, "step": 9810 }, { "epoch": 10.82937603533959, "grad_norm": 0.9033068418502808, "learning_rate": 2.9424e-05, "loss": 0.2125, "step": 9811 }, { "epoch": 10.830480397570403, "grad_norm": 0.8260529041290283, "learning_rate": 2.9427e-05, "loss": 0.1951, "step": 9812 }, { "epoch": 10.831584759801215, "grad_norm": 0.4636786878108978, "learning_rate": 2.943e-05, "loss": 0.12, "step": 9813 }, { "epoch": 10.832689122032026, "grad_norm": 0.786279559135437, "learning_rate": 2.9433e-05, "loss": 0.1446, "step": 9814 }, { "epoch": 10.833793484262838, "grad_norm": 0.6333957314491272, "learning_rate": 2.9436e-05, "loss": 0.091, "step": 9815 }, { "epoch": 10.83489784649365, "grad_norm": 0.6201778054237366, "learning_rate": 2.9439e-05, "loss": 0.1319, "step": 9816 }, { "epoch": 10.836002208724462, "grad_norm": 0.6348274350166321, "learning_rate": 2.9442000000000004e-05, "loss": 0.0882, "step": 9817 }, { "epoch": 10.837106570955273, "grad_norm": 0.5415244698524475, "learning_rate": 2.9445000000000004e-05, "loss": 0.0359, "step": 9818 }, { "epoch": 10.838210933186085, "grad_norm": 0.4966219961643219, "learning_rate": 2.9448e-05, "loss": 0.0475, "step": 9819 }, { "epoch": 10.839315295416897, "grad_norm": 0.4738425016403198, "learning_rate": 2.9451e-05, "loss": 0.0241, "step": 9820 }, { "epoch": 10.840419657647708, "grad_norm": 0.2638334035873413, "learning_rate": 2.9454e-05, "loss": 0.0159, "step": 9821 }, { "epoch": 10.84152401987852, "grad_norm": 0.23348256945610046, "learning_rate": 2.9457e-05, "loss": 0.0195, "step": 9822 }, { "epoch": 10.842628382109332, "grad_norm": 0.3871990740299225, "learning_rate": 2.946e-05, "loss": 0.0192, "step": 9823 }, { "epoch": 10.843732744340144, "grad_norm": 0.2564982771873474, "learning_rate": 2.9463e-05, "loss": 0.0406, "step": 9824 }, { "epoch": 10.844837106570955, "grad_norm": 0.3073350191116333, "learning_rate": 2.9466e-05, "loss": 0.0239, "step": 9825 }, { "epoch": 10.845941468801767, "grad_norm": 0.28344759345054626, "learning_rate": 2.9469e-05, "loss": 0.012, "step": 9826 }, { "epoch": 10.84704583103258, "grad_norm": 0.42884624004364014, "learning_rate": 2.9472000000000002e-05, "loss": 0.011, "step": 9827 }, { "epoch": 10.84815019326339, "grad_norm": 0.2424464374780655, "learning_rate": 2.9475e-05, "loss": 0.012, "step": 9828 }, { "epoch": 10.849254555494202, "grad_norm": 0.3752128481864929, "learning_rate": 2.9478e-05, "loss": 0.0143, "step": 9829 }, { "epoch": 10.850358917725014, "grad_norm": 0.26854178309440613, "learning_rate": 2.9481e-05, "loss": 0.0179, "step": 9830 }, { "epoch": 10.851463279955826, "grad_norm": 0.4412381947040558, "learning_rate": 2.9484e-05, "loss": 0.0177, "step": 9831 }, { "epoch": 10.852567642186637, "grad_norm": 0.6210622191429138, "learning_rate": 2.9487e-05, "loss": 0.0257, "step": 9832 }, { "epoch": 10.853672004417449, "grad_norm": 0.41875913739204407, "learning_rate": 2.949e-05, "loss": 0.0266, "step": 9833 }, { "epoch": 10.854776366648261, "grad_norm": 0.4977867901325226, "learning_rate": 2.9493e-05, "loss": 0.0122, "step": 9834 }, { "epoch": 10.855880728879072, "grad_norm": 0.18148711323738098, "learning_rate": 2.9496e-05, "loss": 0.0132, "step": 9835 }, { "epoch": 10.856985091109884, "grad_norm": 0.2689392566680908, "learning_rate": 2.9499e-05, "loss": 0.0155, "step": 9836 }, { "epoch": 10.858089453340696, "grad_norm": 0.6138120889663696, "learning_rate": 2.9502000000000003e-05, "loss": 0.0175, "step": 9837 }, { "epoch": 10.859193815571507, "grad_norm": 0.3645847737789154, "learning_rate": 2.9505000000000003e-05, "loss": 0.0232, "step": 9838 }, { "epoch": 10.860298177802319, "grad_norm": 0.272859126329422, "learning_rate": 2.9508000000000003e-05, "loss": 0.0162, "step": 9839 }, { "epoch": 10.861402540033131, "grad_norm": 0.3804044723510742, "learning_rate": 2.9511000000000003e-05, "loss": 0.0217, "step": 9840 }, { "epoch": 10.862506902263943, "grad_norm": 0.2995024025440216, "learning_rate": 2.9514000000000002e-05, "loss": 0.0132, "step": 9841 }, { "epoch": 10.863611264494754, "grad_norm": 0.34337398409843445, "learning_rate": 2.9517000000000002e-05, "loss": 0.0155, "step": 9842 }, { "epoch": 10.864715626725566, "grad_norm": 0.413246214389801, "learning_rate": 2.9520000000000002e-05, "loss": 0.016, "step": 9843 }, { "epoch": 10.865819988956378, "grad_norm": 0.3701498508453369, "learning_rate": 2.9523e-05, "loss": 0.0155, "step": 9844 }, { "epoch": 10.866924351187189, "grad_norm": 0.7143242359161377, "learning_rate": 2.9525999999999998e-05, "loss": 0.0197, "step": 9845 }, { "epoch": 10.868028713418001, "grad_norm": 0.2774052619934082, "learning_rate": 2.9528999999999998e-05, "loss": 0.0085, "step": 9846 }, { "epoch": 10.869133075648813, "grad_norm": 0.3125241994857788, "learning_rate": 2.9532e-05, "loss": 0.0153, "step": 9847 }, { "epoch": 10.870237437879624, "grad_norm": 0.2984626889228821, "learning_rate": 2.9535e-05, "loss": 0.0215, "step": 9848 }, { "epoch": 10.871341800110436, "grad_norm": 0.21442267298698425, "learning_rate": 2.9538e-05, "loss": 0.0107, "step": 9849 }, { "epoch": 10.872446162341248, "grad_norm": 0.36018890142440796, "learning_rate": 2.9541e-05, "loss": 0.011, "step": 9850 }, { "epoch": 10.87355052457206, "grad_norm": 0.7351223230361938, "learning_rate": 2.9544e-05, "loss": 0.0184, "step": 9851 }, { "epoch": 10.87465488680287, "grad_norm": 0.3424459993839264, "learning_rate": 2.9547e-05, "loss": 0.0176, "step": 9852 }, { "epoch": 10.875759249033683, "grad_norm": 0.5611189603805542, "learning_rate": 2.955e-05, "loss": 0.0151, "step": 9853 }, { "epoch": 10.876863611264495, "grad_norm": 0.7802550792694092, "learning_rate": 2.9553e-05, "loss": 0.0189, "step": 9854 }, { "epoch": 10.877967973495306, "grad_norm": 0.2603278160095215, "learning_rate": 2.9556e-05, "loss": 0.0113, "step": 9855 }, { "epoch": 10.879072335726118, "grad_norm": 0.24445924162864685, "learning_rate": 2.9559e-05, "loss": 0.0108, "step": 9856 }, { "epoch": 10.88017669795693, "grad_norm": 0.3786469101905823, "learning_rate": 2.9562000000000003e-05, "loss": 0.0246, "step": 9857 }, { "epoch": 10.881281060187742, "grad_norm": 0.6446999311447144, "learning_rate": 2.9565000000000002e-05, "loss": 0.03, "step": 9858 }, { "epoch": 10.882385422418553, "grad_norm": 0.7272006273269653, "learning_rate": 2.9568000000000002e-05, "loss": 0.0289, "step": 9859 }, { "epoch": 10.883489784649365, "grad_norm": 0.6948516964912415, "learning_rate": 2.9571000000000002e-05, "loss": 0.0241, "step": 9860 }, { "epoch": 10.884594146880177, "grad_norm": 0.9502177834510803, "learning_rate": 2.9574000000000002e-05, "loss": 0.2658, "step": 9861 }, { "epoch": 10.885698509110988, "grad_norm": 0.4830290973186493, "learning_rate": 2.9577e-05, "loss": 0.1822, "step": 9862 }, { "epoch": 10.8868028713418, "grad_norm": 0.9558109641075134, "learning_rate": 2.958e-05, "loss": 0.1796, "step": 9863 }, { "epoch": 10.887907233572612, "grad_norm": 0.5970392823219299, "learning_rate": 2.9583e-05, "loss": 0.1703, "step": 9864 }, { "epoch": 10.889011595803424, "grad_norm": 0.5917437076568604, "learning_rate": 2.9586e-05, "loss": 0.1116, "step": 9865 }, { "epoch": 10.890115958034235, "grad_norm": 0.5365331172943115, "learning_rate": 2.9589e-05, "loss": 0.0885, "step": 9866 }, { "epoch": 10.891220320265047, "grad_norm": 0.4946366548538208, "learning_rate": 2.9592000000000004e-05, "loss": 0.0601, "step": 9867 }, { "epoch": 10.89232468249586, "grad_norm": 0.6115498542785645, "learning_rate": 2.9595e-05, "loss": 0.0562, "step": 9868 }, { "epoch": 10.89342904472667, "grad_norm": 0.3165494501590729, "learning_rate": 2.9598e-05, "loss": 0.0252, "step": 9869 }, { "epoch": 10.894533406957482, "grad_norm": 0.30607670545578003, "learning_rate": 2.9601e-05, "loss": 0.0392, "step": 9870 }, { "epoch": 10.895637769188294, "grad_norm": 0.4071171283721924, "learning_rate": 2.9604e-05, "loss": 0.0176, "step": 9871 }, { "epoch": 10.896742131419105, "grad_norm": 0.44056543707847595, "learning_rate": 2.9607e-05, "loss": 0.0222, "step": 9872 }, { "epoch": 10.897846493649917, "grad_norm": 0.40947091579437256, "learning_rate": 2.961e-05, "loss": 0.0162, "step": 9873 }, { "epoch": 10.898950855880729, "grad_norm": 0.24769741296768188, "learning_rate": 2.9613e-05, "loss": 0.0398, "step": 9874 }, { "epoch": 10.900055218111541, "grad_norm": 0.3427309989929199, "learning_rate": 2.9616e-05, "loss": 0.0199, "step": 9875 }, { "epoch": 10.901159580342352, "grad_norm": 0.272121399641037, "learning_rate": 2.9619e-05, "loss": 0.0155, "step": 9876 }, { "epoch": 10.902263942573164, "grad_norm": 0.21025016903877258, "learning_rate": 2.9622000000000002e-05, "loss": 0.0143, "step": 9877 }, { "epoch": 10.903368304803976, "grad_norm": 0.27126893401145935, "learning_rate": 2.9625000000000002e-05, "loss": 0.0146, "step": 9878 }, { "epoch": 10.904472667034787, "grad_norm": 0.26111650466918945, "learning_rate": 2.9628e-05, "loss": 0.0145, "step": 9879 }, { "epoch": 10.905577029265599, "grad_norm": 0.26832476258277893, "learning_rate": 2.9631e-05, "loss": 0.0174, "step": 9880 }, { "epoch": 10.906681391496411, "grad_norm": 0.24775756895542145, "learning_rate": 2.9634e-05, "loss": 0.0144, "step": 9881 }, { "epoch": 10.907785753727222, "grad_norm": 0.31424999237060547, "learning_rate": 2.9637e-05, "loss": 0.013, "step": 9882 }, { "epoch": 10.908890115958034, "grad_norm": 0.28007444739341736, "learning_rate": 2.964e-05, "loss": 0.0155, "step": 9883 }, { "epoch": 10.909994478188846, "grad_norm": 0.47148725390434265, "learning_rate": 2.9643e-05, "loss": 0.0183, "step": 9884 }, { "epoch": 10.911098840419658, "grad_norm": 0.3622795641422272, "learning_rate": 2.9646e-05, "loss": 0.0549, "step": 9885 }, { "epoch": 10.912203202650469, "grad_norm": 0.2474776655435562, "learning_rate": 2.9649e-05, "loss": 0.0087, "step": 9886 }, { "epoch": 10.913307564881281, "grad_norm": 0.7485185265541077, "learning_rate": 2.9652e-05, "loss": 0.0512, "step": 9887 }, { "epoch": 10.914411927112093, "grad_norm": 0.4070633351802826, "learning_rate": 2.9655000000000003e-05, "loss": 0.0144, "step": 9888 }, { "epoch": 10.915516289342904, "grad_norm": 0.3019484877586365, "learning_rate": 2.9658000000000003e-05, "loss": 0.0157, "step": 9889 }, { "epoch": 10.916620651573716, "grad_norm": 0.3593926429748535, "learning_rate": 2.9661000000000003e-05, "loss": 0.0141, "step": 9890 }, { "epoch": 10.917725013804528, "grad_norm": 0.33803504705429077, "learning_rate": 2.9664000000000003e-05, "loss": 0.0168, "step": 9891 }, { "epoch": 10.91882937603534, "grad_norm": 0.4631496071815491, "learning_rate": 2.9667000000000002e-05, "loss": 0.0174, "step": 9892 }, { "epoch": 10.91993373826615, "grad_norm": 0.36693382263183594, "learning_rate": 2.967e-05, "loss": 0.0172, "step": 9893 }, { "epoch": 10.921038100496963, "grad_norm": 0.5375955104827881, "learning_rate": 2.9673e-05, "loss": 0.0211, "step": 9894 }, { "epoch": 10.922142462727775, "grad_norm": 0.3688669204711914, "learning_rate": 2.9676e-05, "loss": 0.0146, "step": 9895 }, { "epoch": 10.923246824958586, "grad_norm": 0.42692384123802185, "learning_rate": 2.9678999999999998e-05, "loss": 0.0161, "step": 9896 }, { "epoch": 10.924351187189398, "grad_norm": 0.33557435870170593, "learning_rate": 2.9681999999999998e-05, "loss": 0.0158, "step": 9897 }, { "epoch": 10.92545554942021, "grad_norm": 0.5136989951133728, "learning_rate": 2.9685e-05, "loss": 0.014, "step": 9898 }, { "epoch": 10.926559911651022, "grad_norm": 0.8768578171730042, "learning_rate": 2.9688e-05, "loss": 0.0347, "step": 9899 }, { "epoch": 10.927664273881833, "grad_norm": 0.337049275636673, "learning_rate": 2.9691e-05, "loss": 0.0102, "step": 9900 }, { "epoch": 10.928768636112645, "grad_norm": 0.37737882137298584, "learning_rate": 2.9694e-05, "loss": 0.0193, "step": 9901 }, { "epoch": 10.929872998343457, "grad_norm": 0.3172023892402649, "learning_rate": 2.9697e-05, "loss": 0.0097, "step": 9902 }, { "epoch": 10.930977360574268, "grad_norm": 0.4793936610221863, "learning_rate": 2.97e-05, "loss": 0.0245, "step": 9903 }, { "epoch": 10.93208172280508, "grad_norm": 0.39066338539123535, "learning_rate": 2.9703e-05, "loss": 0.0233, "step": 9904 }, { "epoch": 10.933186085035892, "grad_norm": 0.32870322465896606, "learning_rate": 2.9706e-05, "loss": 0.0156, "step": 9905 }, { "epoch": 10.934290447266704, "grad_norm": 0.480912446975708, "learning_rate": 2.9709e-05, "loss": 0.0209, "step": 9906 }, { "epoch": 10.935394809497515, "grad_norm": 0.40588054060935974, "learning_rate": 2.9712e-05, "loss": 0.0125, "step": 9907 }, { "epoch": 10.936499171728327, "grad_norm": 0.3789694309234619, "learning_rate": 2.9715000000000003e-05, "loss": 0.0224, "step": 9908 }, { "epoch": 10.93760353395914, "grad_norm": 0.3747263252735138, "learning_rate": 2.9718000000000002e-05, "loss": 0.0153, "step": 9909 }, { "epoch": 10.93870789618995, "grad_norm": 0.3664429485797882, "learning_rate": 2.9721000000000002e-05, "loss": 0.0224, "step": 9910 }, { "epoch": 10.939812258420762, "grad_norm": 0.8875914812088013, "learning_rate": 2.9724000000000002e-05, "loss": 0.2195, "step": 9911 }, { "epoch": 10.940916620651574, "grad_norm": 0.5866302847862244, "learning_rate": 2.9727000000000002e-05, "loss": 0.157, "step": 9912 }, { "epoch": 10.942020982882385, "grad_norm": 0.7133582830429077, "learning_rate": 2.973e-05, "loss": 0.16, "step": 9913 }, { "epoch": 10.943125345113197, "grad_norm": 0.9067575335502625, "learning_rate": 2.9733e-05, "loss": 0.105, "step": 9914 }, { "epoch": 10.94422970734401, "grad_norm": 0.7583879232406616, "learning_rate": 2.9736e-05, "loss": 0.1104, "step": 9915 }, { "epoch": 10.945334069574821, "grad_norm": 0.51625657081604, "learning_rate": 2.9739e-05, "loss": 0.0735, "step": 9916 }, { "epoch": 10.946438431805632, "grad_norm": 0.49565815925598145, "learning_rate": 2.9742e-05, "loss": 0.0737, "step": 9917 }, { "epoch": 10.947542794036444, "grad_norm": 0.7357165813446045, "learning_rate": 2.9745e-05, "loss": 0.1467, "step": 9918 }, { "epoch": 10.948647156267256, "grad_norm": 1.3064937591552734, "learning_rate": 2.9748e-05, "loss": 0.0555, "step": 9919 }, { "epoch": 10.949751518498067, "grad_norm": 0.5688902735710144, "learning_rate": 2.9751e-05, "loss": 0.0417, "step": 9920 }, { "epoch": 10.950855880728879, "grad_norm": 0.4647134244441986, "learning_rate": 2.9754e-05, "loss": 0.0482, "step": 9921 }, { "epoch": 10.951960242959691, "grad_norm": 0.4388723075389862, "learning_rate": 2.9757e-05, "loss": 0.0134, "step": 9922 }, { "epoch": 10.953064605190502, "grad_norm": 0.46475914120674133, "learning_rate": 2.976e-05, "loss": 0.0322, "step": 9923 }, { "epoch": 10.954168967421314, "grad_norm": 0.3983992338180542, "learning_rate": 2.9763e-05, "loss": 0.0337, "step": 9924 }, { "epoch": 10.955273329652126, "grad_norm": 0.6252115368843079, "learning_rate": 2.9766e-05, "loss": 0.0234, "step": 9925 }, { "epoch": 10.956377691882938, "grad_norm": 0.22923479974269867, "learning_rate": 2.9769e-05, "loss": 0.0153, "step": 9926 }, { "epoch": 10.957482054113749, "grad_norm": 0.21374277770519257, "learning_rate": 2.9772e-05, "loss": 0.0168, "step": 9927 }, { "epoch": 10.958586416344561, "grad_norm": 0.14502495527267456, "learning_rate": 2.9775000000000002e-05, "loss": 0.0102, "step": 9928 }, { "epoch": 10.959690778575373, "grad_norm": 0.18813854455947876, "learning_rate": 2.9778000000000002e-05, "loss": 0.0095, "step": 9929 }, { "epoch": 10.960795140806184, "grad_norm": 0.34298667311668396, "learning_rate": 2.9781e-05, "loss": 0.0176, "step": 9930 }, { "epoch": 10.961899503036996, "grad_norm": 0.6882712244987488, "learning_rate": 2.9784e-05, "loss": 0.0165, "step": 9931 }, { "epoch": 10.963003865267808, "grad_norm": 0.4559646248817444, "learning_rate": 2.9787e-05, "loss": 0.0224, "step": 9932 }, { "epoch": 10.96410822749862, "grad_norm": 0.33721157908439636, "learning_rate": 2.979e-05, "loss": 0.0214, "step": 9933 }, { "epoch": 10.96521258972943, "grad_norm": 0.3795466423034668, "learning_rate": 2.9793e-05, "loss": 0.0232, "step": 9934 }, { "epoch": 10.966316951960243, "grad_norm": 0.6356733441352844, "learning_rate": 2.9796e-05, "loss": 0.0257, "step": 9935 }, { "epoch": 10.967421314191055, "grad_norm": 0.364357054233551, "learning_rate": 2.9799e-05, "loss": 0.0189, "step": 9936 }, { "epoch": 10.968525676421866, "grad_norm": 0.29308000206947327, "learning_rate": 2.9802e-05, "loss": 0.0146, "step": 9937 }, { "epoch": 10.969630038652678, "grad_norm": 0.3597131371498108, "learning_rate": 2.9805000000000003e-05, "loss": 0.0212, "step": 9938 }, { "epoch": 10.97073440088349, "grad_norm": 0.251097172498703, "learning_rate": 2.9808000000000003e-05, "loss": 0.011, "step": 9939 }, { "epoch": 10.971838763114302, "grad_norm": 0.5886235237121582, "learning_rate": 2.9811000000000003e-05, "loss": 0.0196, "step": 9940 }, { "epoch": 10.972943125345113, "grad_norm": 0.2954232096672058, "learning_rate": 2.9814000000000003e-05, "loss": 0.0119, "step": 9941 }, { "epoch": 10.974047487575925, "grad_norm": 0.7275393605232239, "learning_rate": 2.9817e-05, "loss": 0.0225, "step": 9942 }, { "epoch": 10.975151849806737, "grad_norm": 0.8097251057624817, "learning_rate": 2.982e-05, "loss": 0.0211, "step": 9943 }, { "epoch": 10.976256212037548, "grad_norm": 0.40904751420021057, "learning_rate": 2.9823e-05, "loss": 0.0125, "step": 9944 }, { "epoch": 10.97736057426836, "grad_norm": 0.4248002767562866, "learning_rate": 2.9826e-05, "loss": 0.0229, "step": 9945 }, { "epoch": 10.978464936499172, "grad_norm": 0.365768700838089, "learning_rate": 2.9829e-05, "loss": 0.015, "step": 9946 }, { "epoch": 10.979569298729983, "grad_norm": 1.4787030220031738, "learning_rate": 2.9831999999999998e-05, "loss": 0.017, "step": 9947 }, { "epoch": 10.980673660960795, "grad_norm": 0.41839781403541565, "learning_rate": 2.9835e-05, "loss": 0.0107, "step": 9948 }, { "epoch": 10.981778023191607, "grad_norm": 0.40017712116241455, "learning_rate": 2.9838e-05, "loss": 0.0136, "step": 9949 }, { "epoch": 10.98288238542242, "grad_norm": 0.5630145072937012, "learning_rate": 2.9841e-05, "loss": 0.0265, "step": 9950 }, { "epoch": 10.98398674765323, "grad_norm": 0.583626389503479, "learning_rate": 2.9844e-05, "loss": 0.0189, "step": 9951 }, { "epoch": 10.985091109884042, "grad_norm": 0.4032357931137085, "learning_rate": 2.9847e-05, "loss": 0.019, "step": 9952 }, { "epoch": 10.986195472114854, "grad_norm": 0.4035950303077698, "learning_rate": 2.985e-05, "loss": 0.0157, "step": 9953 }, { "epoch": 10.987299834345665, "grad_norm": 0.5922316908836365, "learning_rate": 2.9853e-05, "loss": 0.021, "step": 9954 }, { "epoch": 10.988404196576477, "grad_norm": 0.3786817491054535, "learning_rate": 2.9856e-05, "loss": 0.0218, "step": 9955 }, { "epoch": 10.98950855880729, "grad_norm": 0.59747314453125, "learning_rate": 2.9859e-05, "loss": 0.0281, "step": 9956 }, { "epoch": 10.9906129210381, "grad_norm": 0.387344628572464, "learning_rate": 2.9862e-05, "loss": 0.0181, "step": 9957 }, { "epoch": 10.991717283268912, "grad_norm": 0.41282686591148376, "learning_rate": 2.9865000000000003e-05, "loss": 0.0287, "step": 9958 }, { "epoch": 10.992821645499724, "grad_norm": 0.33917921781539917, "learning_rate": 2.9868000000000003e-05, "loss": 0.014, "step": 9959 }, { "epoch": 10.993926007730536, "grad_norm": 0.7398399710655212, "learning_rate": 2.9871000000000003e-05, "loss": 0.0326, "step": 9960 }, { "epoch": 10.995030369961347, "grad_norm": 0.652824342250824, "learning_rate": 2.9874000000000002e-05, "loss": 0.1173, "step": 9961 }, { "epoch": 10.996134732192159, "grad_norm": 0.24862349033355713, "learning_rate": 2.9877000000000002e-05, "loss": 0.0194, "step": 9962 }, { "epoch": 10.997239094422971, "grad_norm": 0.35574015974998474, "learning_rate": 2.9880000000000002e-05, "loss": 0.0166, "step": 9963 }, { "epoch": 10.998343456653782, "grad_norm": 0.37385886907577515, "learning_rate": 2.9883000000000002e-05, "loss": 0.0179, "step": 9964 }, { "epoch": 10.999447818884594, "grad_norm": 0.3345628082752228, "learning_rate": 2.9886e-05, "loss": 0.0246, "step": 9965 }, { "epoch": 11.0, "grad_norm": 0.5817747712135315, "learning_rate": 2.9889e-05, "loss": 0.011, "step": 9966 }, { "epoch": 11.001104362230812, "grad_norm": 0.73814457654953, "learning_rate": 2.9891999999999998e-05, "loss": 0.1926, "step": 9967 }, { "epoch": 11.002208724461623, "grad_norm": 0.5850111246109009, "learning_rate": 2.9895e-05, "loss": 0.1561, "step": 9968 }, { "epoch": 11.003313086692435, "grad_norm": 1.029280424118042, "learning_rate": 2.9898e-05, "loss": 0.1891, "step": 9969 }, { "epoch": 11.004417448923247, "grad_norm": 0.439318984746933, "learning_rate": 2.9901e-05, "loss": 0.0967, "step": 9970 }, { "epoch": 11.00552181115406, "grad_norm": 0.6723684072494507, "learning_rate": 2.9904e-05, "loss": 0.128, "step": 9971 }, { "epoch": 11.00662617338487, "grad_norm": 0.4867003262042999, "learning_rate": 2.9907e-05, "loss": 0.0955, "step": 9972 }, { "epoch": 11.007730535615682, "grad_norm": 0.3927893042564392, "learning_rate": 2.991e-05, "loss": 0.067, "step": 9973 }, { "epoch": 11.008834897846494, "grad_norm": 0.33486148715019226, "learning_rate": 2.9913e-05, "loss": 0.0348, "step": 9974 }, { "epoch": 11.009939260077305, "grad_norm": 0.594277024269104, "learning_rate": 2.9916e-05, "loss": 0.067, "step": 9975 }, { "epoch": 11.011043622308117, "grad_norm": 0.18921399116516113, "learning_rate": 2.9919e-05, "loss": 0.017, "step": 9976 }, { "epoch": 11.01214798453893, "grad_norm": 0.2635926902294159, "learning_rate": 2.9922e-05, "loss": 0.0208, "step": 9977 }, { "epoch": 11.01325234676974, "grad_norm": 0.22340033948421478, "learning_rate": 2.9925000000000002e-05, "loss": 0.0177, "step": 9978 }, { "epoch": 11.014356709000552, "grad_norm": 0.2229943424463272, "learning_rate": 2.9928000000000002e-05, "loss": 0.0154, "step": 9979 }, { "epoch": 11.015461071231364, "grad_norm": 0.20994162559509277, "learning_rate": 2.9931000000000002e-05, "loss": 0.0151, "step": 9980 }, { "epoch": 11.016565433462176, "grad_norm": 0.1649070680141449, "learning_rate": 2.9934000000000002e-05, "loss": 0.0119, "step": 9981 }, { "epoch": 11.017669795692987, "grad_norm": 0.7428545355796814, "learning_rate": 2.9937e-05, "loss": 0.0234, "step": 9982 }, { "epoch": 11.018774157923799, "grad_norm": 0.3794715702533722, "learning_rate": 2.994e-05, "loss": 0.0263, "step": 9983 }, { "epoch": 11.019878520154611, "grad_norm": 0.3397858142852783, "learning_rate": 2.9943e-05, "loss": 0.0177, "step": 9984 }, { "epoch": 11.020982882385422, "grad_norm": 0.3211345970630646, "learning_rate": 2.9946e-05, "loss": 0.013, "step": 9985 }, { "epoch": 11.022087244616234, "grad_norm": 1.3759241104125977, "learning_rate": 2.9949e-05, "loss": 0.0356, "step": 9986 }, { "epoch": 11.023191606847046, "grad_norm": 0.2719171643257141, "learning_rate": 2.9952e-05, "loss": 0.0144, "step": 9987 }, { "epoch": 11.024295969077858, "grad_norm": 0.27154406905174255, "learning_rate": 2.9955000000000004e-05, "loss": 0.017, "step": 9988 }, { "epoch": 11.025400331308669, "grad_norm": 0.4588819742202759, "learning_rate": 2.9958000000000004e-05, "loss": 0.0187, "step": 9989 }, { "epoch": 11.026504693539481, "grad_norm": 0.24512076377868652, "learning_rate": 2.9961000000000003e-05, "loss": 0.0127, "step": 9990 }, { "epoch": 11.027609055770293, "grad_norm": 0.15573880076408386, "learning_rate": 2.9964e-05, "loss": 0.0091, "step": 9991 }, { "epoch": 11.028713418001104, "grad_norm": 0.15379509329795837, "learning_rate": 2.9967e-05, "loss": 0.0069, "step": 9992 }, { "epoch": 11.029817780231916, "grad_norm": 0.5157398581504822, "learning_rate": 2.997e-05, "loss": 0.0164, "step": 9993 }, { "epoch": 11.030922142462728, "grad_norm": 0.3887726962566376, "learning_rate": 2.9973e-05, "loss": 0.0201, "step": 9994 }, { "epoch": 11.032026504693539, "grad_norm": 0.5436230897903442, "learning_rate": 2.9976e-05, "loss": 0.0141, "step": 9995 }, { "epoch": 11.03313086692435, "grad_norm": 0.2348293662071228, "learning_rate": 2.9979e-05, "loss": 0.0065, "step": 9996 }, { "epoch": 11.034235229155163, "grad_norm": 0.39155763387680054, "learning_rate": 2.9982e-05, "loss": 0.0132, "step": 9997 }, { "epoch": 11.035339591385975, "grad_norm": 0.2196398228406906, "learning_rate": 2.9985000000000002e-05, "loss": 0.0126, "step": 9998 }, { "epoch": 11.036443953616786, "grad_norm": 0.30943191051483154, "learning_rate": 2.9988e-05, "loss": 0.0134, "step": 9999 }, { "epoch": 11.037548315847598, "grad_norm": 0.34953969717025757, "learning_rate": 2.9991e-05, "loss": 0.0216, "step": 10000 }, { "epoch": 11.037548315847598, "eval_cer": 0.1223526972029966, "eval_loss": 0.35794496536254883, "eval_runtime": 15.6603, "eval_samples_per_second": 19.412, "eval_steps_per_second": 0.639, "eval_wer": 0.4224865694551036, "step": 10000 }, { "epoch": 11.03865267807841, "grad_norm": 0.30372533202171326, "learning_rate": 2.9994e-05, "loss": 0.0137, "step": 10001 }, { "epoch": 11.03975704030922, "grad_norm": 0.3520931899547577, "learning_rate": 2.9997e-05, "loss": 0.0175, "step": 10002 }, { "epoch": 11.040861402540033, "grad_norm": 0.3181432783603668, "learning_rate": 3e-05, "loss": 0.0138, "step": 10003 }, { "epoch": 11.041965764770845, "grad_norm": 0.4473554790019989, "learning_rate": 2.999966666666667e-05, "loss": 0.0131, "step": 10004 }, { "epoch": 11.043070127001657, "grad_norm": 0.2978632152080536, "learning_rate": 2.9999333333333333e-05, "loss": 0.0136, "step": 10005 }, { "epoch": 11.044174489232468, "grad_norm": 0.38981541991233826, "learning_rate": 2.9999000000000002e-05, "loss": 0.0114, "step": 10006 }, { "epoch": 11.04527885146328, "grad_norm": 0.7827956080436707, "learning_rate": 2.9998666666666668e-05, "loss": 0.0412, "step": 10007 }, { "epoch": 11.046383213694092, "grad_norm": 0.38421937823295593, "learning_rate": 2.9998333333333334e-05, "loss": 0.0201, "step": 10008 }, { "epoch": 11.047487575924903, "grad_norm": 0.3885634243488312, "learning_rate": 2.9998e-05, "loss": 0.0158, "step": 10009 }, { "epoch": 11.048591938155715, "grad_norm": 0.3233562707901001, "learning_rate": 2.999766666666667e-05, "loss": 0.0188, "step": 10010 }, { "epoch": 11.049696300386527, "grad_norm": 0.5210999250411987, "learning_rate": 2.999733333333333e-05, "loss": 0.0231, "step": 10011 }, { "epoch": 11.050800662617338, "grad_norm": 0.24405449628829956, "learning_rate": 2.9997e-05, "loss": 0.0108, "step": 10012 }, { "epoch": 11.05190502484815, "grad_norm": 0.7587125301361084, "learning_rate": 2.999666666666667e-05, "loss": 0.0318, "step": 10013 }, { "epoch": 11.053009387078962, "grad_norm": 0.3940199017524719, "learning_rate": 2.9996333333333333e-05, "loss": 0.0196, "step": 10014 }, { "epoch": 11.054113749309774, "grad_norm": 0.4665983021259308, "learning_rate": 2.9996000000000002e-05, "loss": 0.0145, "step": 10015 }, { "epoch": 11.055218111540585, "grad_norm": 0.7240949869155884, "learning_rate": 2.9995666666666668e-05, "loss": 0.0149, "step": 10016 }, { "epoch": 11.056322473771397, "grad_norm": 0.9514729976654053, "learning_rate": 2.9995333333333334e-05, "loss": 0.2467, "step": 10017 }, { "epoch": 11.05742683600221, "grad_norm": 0.6382899880409241, "learning_rate": 2.9995e-05, "loss": 0.167, "step": 10018 }, { "epoch": 11.05853119823302, "grad_norm": 0.6041194796562195, "learning_rate": 2.9994666666666666e-05, "loss": 0.1453, "step": 10019 }, { "epoch": 11.059635560463832, "grad_norm": 0.5485204458236694, "learning_rate": 2.9994333333333335e-05, "loss": 0.1376, "step": 10020 }, { "epoch": 11.060739922694644, "grad_norm": 0.7601494193077087, "learning_rate": 2.9994e-05, "loss": 0.1165, "step": 10021 }, { "epoch": 11.061844284925456, "grad_norm": 0.3492348790168762, "learning_rate": 2.9993666666666667e-05, "loss": 0.0735, "step": 10022 }, { "epoch": 11.062948647156267, "grad_norm": 0.49401408433914185, "learning_rate": 2.9993333333333333e-05, "loss": 0.0824, "step": 10023 }, { "epoch": 11.064053009387079, "grad_norm": 0.275503009557724, "learning_rate": 2.9993000000000002e-05, "loss": 0.0366, "step": 10024 }, { "epoch": 11.065157371617891, "grad_norm": 0.23670180141925812, "learning_rate": 2.9992666666666665e-05, "loss": 0.0376, "step": 10025 }, { "epoch": 11.066261733848702, "grad_norm": 0.4811299443244934, "learning_rate": 2.9992333333333334e-05, "loss": 0.0747, "step": 10026 }, { "epoch": 11.067366096079514, "grad_norm": 0.20700380206108093, "learning_rate": 2.9992e-05, "loss": 0.0221, "step": 10027 }, { "epoch": 11.068470458310326, "grad_norm": 0.3319869935512543, "learning_rate": 2.9991666666666666e-05, "loss": 0.0205, "step": 10028 }, { "epoch": 11.069574820541137, "grad_norm": 0.619640588760376, "learning_rate": 2.9991333333333335e-05, "loss": 0.0192, "step": 10029 }, { "epoch": 11.070679182771949, "grad_norm": 0.26366057991981506, "learning_rate": 2.9991e-05, "loss": 0.0215, "step": 10030 }, { "epoch": 11.071783545002761, "grad_norm": 0.294611394405365, "learning_rate": 2.9990666666666667e-05, "loss": 0.0213, "step": 10031 }, { "epoch": 11.072887907233573, "grad_norm": 0.18360282480716705, "learning_rate": 2.9990333333333333e-05, "loss": 0.0291, "step": 10032 }, { "epoch": 11.073992269464384, "grad_norm": 0.1946028172969818, "learning_rate": 2.9990000000000003e-05, "loss": 0.0108, "step": 10033 }, { "epoch": 11.075096631695196, "grad_norm": 0.24945197999477386, "learning_rate": 2.9989666666666665e-05, "loss": 0.0106, "step": 10034 }, { "epoch": 11.076200993926008, "grad_norm": 0.2585911154747009, "learning_rate": 2.9989333333333334e-05, "loss": 0.0145, "step": 10035 }, { "epoch": 11.077305356156819, "grad_norm": 0.4424404799938202, "learning_rate": 2.9989e-05, "loss": 0.0172, "step": 10036 }, { "epoch": 11.078409718387631, "grad_norm": 0.2811141610145569, "learning_rate": 2.9988666666666666e-05, "loss": 0.018, "step": 10037 }, { "epoch": 11.079514080618443, "grad_norm": 0.3757708966732025, "learning_rate": 2.9988333333333336e-05, "loss": 0.0247, "step": 10038 }, { "epoch": 11.080618442849255, "grad_norm": 0.2975042462348938, "learning_rate": 2.9988e-05, "loss": 0.0149, "step": 10039 }, { "epoch": 11.081722805080066, "grad_norm": 0.21143700182437897, "learning_rate": 2.9987666666666667e-05, "loss": 0.0092, "step": 10040 }, { "epoch": 11.082827167310878, "grad_norm": 0.794707179069519, "learning_rate": 2.9987333333333333e-05, "loss": 0.0243, "step": 10041 }, { "epoch": 11.08393152954169, "grad_norm": 0.3015452027320862, "learning_rate": 2.9987000000000003e-05, "loss": 0.0136, "step": 10042 }, { "epoch": 11.0850358917725, "grad_norm": 0.18836085498332977, "learning_rate": 2.9986666666666665e-05, "loss": 0.0123, "step": 10043 }, { "epoch": 11.086140254003313, "grad_norm": 0.21823374927043915, "learning_rate": 2.9986333333333335e-05, "loss": 0.0082, "step": 10044 }, { "epoch": 11.087244616234125, "grad_norm": 0.33185461163520813, "learning_rate": 2.9986000000000004e-05, "loss": 0.0164, "step": 10045 }, { "epoch": 11.088348978464936, "grad_norm": 0.2314673364162445, "learning_rate": 2.9985666666666666e-05, "loss": 0.0159, "step": 10046 }, { "epoch": 11.089453340695748, "grad_norm": 0.8554831147193909, "learning_rate": 2.9985333333333336e-05, "loss": 0.0102, "step": 10047 }, { "epoch": 11.09055770292656, "grad_norm": 0.3739294707775116, "learning_rate": 2.9985000000000002e-05, "loss": 0.0252, "step": 10048 }, { "epoch": 11.091662065157372, "grad_norm": 0.4217591881752014, "learning_rate": 2.9984666666666668e-05, "loss": 0.03, "step": 10049 }, { "epoch": 11.092766427388183, "grad_norm": 0.42866605520248413, "learning_rate": 2.9984333333333334e-05, "loss": 0.0161, "step": 10050 }, { "epoch": 11.093870789618995, "grad_norm": 0.6577995419502258, "learning_rate": 2.9984e-05, "loss": 0.0209, "step": 10051 }, { "epoch": 11.094975151849807, "grad_norm": 0.7057844996452332, "learning_rate": 2.9983666666666665e-05, "loss": 0.0222, "step": 10052 }, { "epoch": 11.096079514080618, "grad_norm": 0.3712918162345886, "learning_rate": 2.9983333333333335e-05, "loss": 0.0225, "step": 10053 }, { "epoch": 11.09718387631143, "grad_norm": 0.23675139248371124, "learning_rate": 2.9983e-05, "loss": 0.0124, "step": 10054 }, { "epoch": 11.098288238542242, "grad_norm": 0.43601271510124207, "learning_rate": 2.9982666666666667e-05, "loss": 0.0148, "step": 10055 }, { "epoch": 11.099392600773054, "grad_norm": 0.41318753361701965, "learning_rate": 2.9982333333333336e-05, "loss": 0.0141, "step": 10056 }, { "epoch": 11.100496963003865, "grad_norm": 0.28707772493362427, "learning_rate": 2.9982e-05, "loss": 0.0177, "step": 10057 }, { "epoch": 11.101601325234677, "grad_norm": 0.4384709298610687, "learning_rate": 2.9981666666666668e-05, "loss": 0.0231, "step": 10058 }, { "epoch": 11.10270568746549, "grad_norm": 0.508992075920105, "learning_rate": 2.9981333333333334e-05, "loss": 0.021, "step": 10059 }, { "epoch": 11.1038100496963, "grad_norm": 0.5176553130149841, "learning_rate": 2.9981e-05, "loss": 0.0165, "step": 10060 }, { "epoch": 11.104914411927112, "grad_norm": 0.23432567715644836, "learning_rate": 2.9980666666666666e-05, "loss": 0.0092, "step": 10061 }, { "epoch": 11.106018774157924, "grad_norm": 0.2584088444709778, "learning_rate": 2.9980333333333335e-05, "loss": 0.0074, "step": 10062 }, { "epoch": 11.107123136388736, "grad_norm": 0.40257033705711365, "learning_rate": 2.998e-05, "loss": 0.0142, "step": 10063 }, { "epoch": 11.108227498619547, "grad_norm": 0.5552131533622742, "learning_rate": 2.9979666666666667e-05, "loss": 0.0138, "step": 10064 }, { "epoch": 11.109331860850359, "grad_norm": 0.39159080386161804, "learning_rate": 2.9979333333333336e-05, "loss": 0.0137, "step": 10065 }, { "epoch": 11.110436223081171, "grad_norm": 0.8295376896858215, "learning_rate": 2.9979e-05, "loss": 0.0263, "step": 10066 }, { "epoch": 11.111540585311982, "grad_norm": 0.6644278764724731, "learning_rate": 2.9978666666666668e-05, "loss": 0.1978, "step": 10067 }, { "epoch": 11.112644947542794, "grad_norm": 0.5441735982894897, "learning_rate": 2.9978333333333334e-05, "loss": 0.1031, "step": 10068 }, { "epoch": 11.113749309773606, "grad_norm": 0.6837537288665771, "learning_rate": 2.9978e-05, "loss": 0.167, "step": 10069 }, { "epoch": 11.114853672004417, "grad_norm": 0.7117825150489807, "learning_rate": 2.997766666666667e-05, "loss": 0.1456, "step": 10070 }, { "epoch": 11.115958034235229, "grad_norm": 0.6024593710899353, "learning_rate": 2.9977333333333335e-05, "loss": 0.0788, "step": 10071 }, { "epoch": 11.117062396466041, "grad_norm": 0.5210024118423462, "learning_rate": 2.9977e-05, "loss": 0.1013, "step": 10072 }, { "epoch": 11.118166758696853, "grad_norm": 0.34563469886779785, "learning_rate": 2.9976666666666667e-05, "loss": 0.0452, "step": 10073 }, { "epoch": 11.119271120927664, "grad_norm": 0.5147132873535156, "learning_rate": 2.9976333333333336e-05, "loss": 0.0349, "step": 10074 }, { "epoch": 11.120375483158476, "grad_norm": 0.5052406787872314, "learning_rate": 2.9976e-05, "loss": 0.03, "step": 10075 }, { "epoch": 11.121479845389288, "grad_norm": 0.4778067171573639, "learning_rate": 2.9975666666666668e-05, "loss": 0.0362, "step": 10076 }, { "epoch": 11.122584207620099, "grad_norm": 0.3264540433883667, "learning_rate": 2.9975333333333334e-05, "loss": 0.0344, "step": 10077 }, { "epoch": 11.123688569850911, "grad_norm": 0.41637122631073, "learning_rate": 2.9975e-05, "loss": 0.0144, "step": 10078 }, { "epoch": 11.124792932081723, "grad_norm": 0.3758701682090759, "learning_rate": 2.997466666666667e-05, "loss": 0.0192, "step": 10079 }, { "epoch": 11.125897294312535, "grad_norm": 0.2683785855770111, "learning_rate": 2.9974333333333332e-05, "loss": 0.0181, "step": 10080 }, { "epoch": 11.127001656543346, "grad_norm": 0.2249894142150879, "learning_rate": 2.9974e-05, "loss": 0.0154, "step": 10081 }, { "epoch": 11.128106018774158, "grad_norm": 0.9018808007240295, "learning_rate": 2.9973666666666667e-05, "loss": 0.0568, "step": 10082 }, { "epoch": 11.12921038100497, "grad_norm": 0.2060561180114746, "learning_rate": 2.9973333333333333e-05, "loss": 0.0215, "step": 10083 }, { "epoch": 11.13031474323578, "grad_norm": 0.3978613615036011, "learning_rate": 2.9973e-05, "loss": 0.0339, "step": 10084 }, { "epoch": 11.131419105466593, "grad_norm": 0.34791457653045654, "learning_rate": 2.997266666666667e-05, "loss": 0.0186, "step": 10085 }, { "epoch": 11.132523467697405, "grad_norm": 0.265284925699234, "learning_rate": 2.997233333333333e-05, "loss": 0.019, "step": 10086 }, { "epoch": 11.133627829928216, "grad_norm": 0.2883354127407074, "learning_rate": 2.9972e-05, "loss": 0.0137, "step": 10087 }, { "epoch": 11.134732192159028, "grad_norm": 0.25158166885375977, "learning_rate": 2.997166666666667e-05, "loss": 0.0172, "step": 10088 }, { "epoch": 11.13583655438984, "grad_norm": 0.17850637435913086, "learning_rate": 2.9971333333333332e-05, "loss": 0.0141, "step": 10089 }, { "epoch": 11.136940916620652, "grad_norm": 0.7880481481552124, "learning_rate": 2.9971e-05, "loss": 0.0234, "step": 10090 }, { "epoch": 11.138045278851463, "grad_norm": 0.23935164511203766, "learning_rate": 2.9970666666666667e-05, "loss": 0.0119, "step": 10091 }, { "epoch": 11.139149641082275, "grad_norm": 0.1800754815340042, "learning_rate": 2.9970333333333333e-05, "loss": 0.0103, "step": 10092 }, { "epoch": 11.140254003313087, "grad_norm": 0.2910502254962921, "learning_rate": 2.997e-05, "loss": 0.021, "step": 10093 }, { "epoch": 11.141358365543898, "grad_norm": 0.7035834193229675, "learning_rate": 2.996966666666667e-05, "loss": 0.0204, "step": 10094 }, { "epoch": 11.14246272777471, "grad_norm": 0.5563132762908936, "learning_rate": 2.9969333333333335e-05, "loss": 0.019, "step": 10095 }, { "epoch": 11.143567090005522, "grad_norm": 0.4014645516872406, "learning_rate": 2.9969e-05, "loss": 0.0197, "step": 10096 }, { "epoch": 11.144671452236334, "grad_norm": 0.47075217962265015, "learning_rate": 2.996866666666667e-05, "loss": 0.0137, "step": 10097 }, { "epoch": 11.145775814467145, "grad_norm": 0.1851145625114441, "learning_rate": 2.9968333333333332e-05, "loss": 0.0079, "step": 10098 }, { "epoch": 11.146880176697957, "grad_norm": 0.6119043231010437, "learning_rate": 2.9968000000000002e-05, "loss": 0.0253, "step": 10099 }, { "epoch": 11.14798453892877, "grad_norm": 0.26135769486427307, "learning_rate": 2.9967666666666668e-05, "loss": 0.0154, "step": 10100 }, { "epoch": 11.14908890115958, "grad_norm": 0.6846477389335632, "learning_rate": 2.9967333333333334e-05, "loss": 0.0469, "step": 10101 }, { "epoch": 11.150193263390392, "grad_norm": 0.21221128106117249, "learning_rate": 2.9967e-05, "loss": 0.0129, "step": 10102 }, { "epoch": 11.151297625621204, "grad_norm": 0.2809419631958008, "learning_rate": 2.996666666666667e-05, "loss": 0.0165, "step": 10103 }, { "epoch": 11.152401987852015, "grad_norm": 0.351583331823349, "learning_rate": 2.9966333333333335e-05, "loss": 0.0162, "step": 10104 }, { "epoch": 11.153506350082827, "grad_norm": 0.6467921733856201, "learning_rate": 2.9966e-05, "loss": 0.0231, "step": 10105 }, { "epoch": 11.154610712313639, "grad_norm": 0.1600044220685959, "learning_rate": 2.996566666666667e-05, "loss": 0.01, "step": 10106 }, { "epoch": 11.155715074544451, "grad_norm": 0.38931477069854736, "learning_rate": 2.9965333333333333e-05, "loss": 0.0115, "step": 10107 }, { "epoch": 11.156819436775262, "grad_norm": 0.5922982096672058, "learning_rate": 2.9965000000000002e-05, "loss": 0.0177, "step": 10108 }, { "epoch": 11.157923799006074, "grad_norm": 0.9956671595573425, "learning_rate": 2.9964666666666664e-05, "loss": 0.0235, "step": 10109 }, { "epoch": 11.159028161236886, "grad_norm": 0.364974707365036, "learning_rate": 2.9964333333333334e-05, "loss": 0.0205, "step": 10110 }, { "epoch": 11.160132523467697, "grad_norm": 0.4183393716812134, "learning_rate": 2.9964e-05, "loss": 0.0203, "step": 10111 }, { "epoch": 11.161236885698509, "grad_norm": 0.6413516402244568, "learning_rate": 2.9963666666666666e-05, "loss": 0.0396, "step": 10112 }, { "epoch": 11.162341247929321, "grad_norm": 0.4140004515647888, "learning_rate": 2.9963333333333335e-05, "loss": 0.0158, "step": 10113 }, { "epoch": 11.163445610160133, "grad_norm": 0.4148670732975006, "learning_rate": 2.9963e-05, "loss": 0.0243, "step": 10114 }, { "epoch": 11.164549972390944, "grad_norm": 0.39020904898643494, "learning_rate": 2.9962666666666667e-05, "loss": 0.0119, "step": 10115 }, { "epoch": 11.165654334621756, "grad_norm": 0.4490702152252197, "learning_rate": 2.9962333333333333e-05, "loss": 0.0216, "step": 10116 }, { "epoch": 11.166758696852568, "grad_norm": 0.8901506066322327, "learning_rate": 2.9962000000000002e-05, "loss": 0.2187, "step": 10117 }, { "epoch": 11.167863059083379, "grad_norm": 0.6122726202011108, "learning_rate": 2.9961666666666665e-05, "loss": 0.1453, "step": 10118 }, { "epoch": 11.168967421314191, "grad_norm": 0.5200590491294861, "learning_rate": 2.9961333333333334e-05, "loss": 0.1457, "step": 10119 }, { "epoch": 11.170071783545003, "grad_norm": 0.40144646167755127, "learning_rate": 2.9961000000000003e-05, "loss": 0.0653, "step": 10120 }, { "epoch": 11.171176145775814, "grad_norm": 0.5002021193504333, "learning_rate": 2.9960666666666666e-05, "loss": 0.094, "step": 10121 }, { "epoch": 11.172280508006626, "grad_norm": 0.5171406865119934, "learning_rate": 2.9960333333333335e-05, "loss": 0.0802, "step": 10122 }, { "epoch": 11.173384870237438, "grad_norm": 0.3407455384731293, "learning_rate": 2.996e-05, "loss": 0.0555, "step": 10123 }, { "epoch": 11.17448923246825, "grad_norm": 0.5389600992202759, "learning_rate": 2.9959666666666667e-05, "loss": 0.0554, "step": 10124 }, { "epoch": 11.17559359469906, "grad_norm": 0.3180895447731018, "learning_rate": 2.9959333333333333e-05, "loss": 0.0214, "step": 10125 }, { "epoch": 11.176697956929873, "grad_norm": 0.3128426969051361, "learning_rate": 2.9959000000000002e-05, "loss": 0.0339, "step": 10126 }, { "epoch": 11.177802319160685, "grad_norm": 0.4407701790332794, "learning_rate": 2.9958666666666665e-05, "loss": 0.028, "step": 10127 }, { "epoch": 11.178906681391496, "grad_norm": 0.30958718061447144, "learning_rate": 2.9958333333333334e-05, "loss": 0.0239, "step": 10128 }, { "epoch": 11.180011043622308, "grad_norm": 0.3524669110774994, "learning_rate": 2.9958000000000004e-05, "loss": 0.0148, "step": 10129 }, { "epoch": 11.18111540585312, "grad_norm": 0.4558661878108978, "learning_rate": 2.9957666666666666e-05, "loss": 0.0418, "step": 10130 }, { "epoch": 11.182219768083932, "grad_norm": 0.3131084144115448, "learning_rate": 2.9957333333333335e-05, "loss": 0.0142, "step": 10131 }, { "epoch": 11.183324130314743, "grad_norm": 0.2128073126077652, "learning_rate": 2.9957e-05, "loss": 0.0109, "step": 10132 }, { "epoch": 11.184428492545555, "grad_norm": 0.1875579059123993, "learning_rate": 2.9956666666666667e-05, "loss": 0.0122, "step": 10133 }, { "epoch": 11.185532854776367, "grad_norm": 0.2042914628982544, "learning_rate": 2.9956333333333333e-05, "loss": 0.0133, "step": 10134 }, { "epoch": 11.186637217007178, "grad_norm": 0.2626661956310272, "learning_rate": 2.9956000000000003e-05, "loss": 0.0151, "step": 10135 }, { "epoch": 11.18774157923799, "grad_norm": 0.2581774592399597, "learning_rate": 2.9955666666666665e-05, "loss": 0.0132, "step": 10136 }, { "epoch": 11.188845941468802, "grad_norm": 0.1374887228012085, "learning_rate": 2.9955333333333334e-05, "loss": 0.0126, "step": 10137 }, { "epoch": 11.189950303699613, "grad_norm": 0.27702170610427856, "learning_rate": 2.9955000000000004e-05, "loss": 0.0145, "step": 10138 }, { "epoch": 11.191054665930425, "grad_norm": 0.2762189507484436, "learning_rate": 2.9954666666666666e-05, "loss": 0.0139, "step": 10139 }, { "epoch": 11.192159028161237, "grad_norm": 0.40341776609420776, "learning_rate": 2.9954333333333336e-05, "loss": 0.017, "step": 10140 }, { "epoch": 11.19326339039205, "grad_norm": 0.4566856026649475, "learning_rate": 2.9953999999999998e-05, "loss": 0.0277, "step": 10141 }, { "epoch": 11.19436775262286, "grad_norm": 0.20736275613307953, "learning_rate": 2.9953666666666667e-05, "loss": 0.0111, "step": 10142 }, { "epoch": 11.195472114853672, "grad_norm": 0.22369548678398132, "learning_rate": 2.9953333333333333e-05, "loss": 0.0394, "step": 10143 }, { "epoch": 11.196576477084484, "grad_norm": 0.5665414333343506, "learning_rate": 2.9953e-05, "loss": 0.02, "step": 10144 }, { "epoch": 11.197680839315295, "grad_norm": 0.3387484848499298, "learning_rate": 2.995266666666667e-05, "loss": 0.0187, "step": 10145 }, { "epoch": 11.198785201546107, "grad_norm": 0.697171151638031, "learning_rate": 2.9952333333333335e-05, "loss": 0.0226, "step": 10146 }, { "epoch": 11.19988956377692, "grad_norm": 0.3076809048652649, "learning_rate": 2.9952e-05, "loss": 0.0122, "step": 10147 }, { "epoch": 11.200993926007731, "grad_norm": 0.485030859708786, "learning_rate": 2.9951666666666666e-05, "loss": 0.0208, "step": 10148 }, { "epoch": 11.202098288238542, "grad_norm": 0.8381736278533936, "learning_rate": 2.9951333333333336e-05, "loss": 0.0178, "step": 10149 }, { "epoch": 11.203202650469354, "grad_norm": 0.25156185030937195, "learning_rate": 2.9951e-05, "loss": 0.0115, "step": 10150 }, { "epoch": 11.204307012700166, "grad_norm": 0.24417811632156372, "learning_rate": 2.9950666666666668e-05, "loss": 0.0096, "step": 10151 }, { "epoch": 11.205411374930977, "grad_norm": 0.3790668249130249, "learning_rate": 2.9950333333333334e-05, "loss": 0.0193, "step": 10152 }, { "epoch": 11.206515737161789, "grad_norm": 0.2872833013534546, "learning_rate": 2.995e-05, "loss": 0.0165, "step": 10153 }, { "epoch": 11.207620099392601, "grad_norm": 0.41781872510910034, "learning_rate": 2.994966666666667e-05, "loss": 0.0141, "step": 10154 }, { "epoch": 11.208724461623412, "grad_norm": 0.29499849677085876, "learning_rate": 2.9949333333333335e-05, "loss": 0.01, "step": 10155 }, { "epoch": 11.209828823854224, "grad_norm": 0.2676733732223511, "learning_rate": 2.9949e-05, "loss": 0.0117, "step": 10156 }, { "epoch": 11.210933186085036, "grad_norm": 0.3902641832828522, "learning_rate": 2.9948666666666667e-05, "loss": 0.0204, "step": 10157 }, { "epoch": 11.212037548315848, "grad_norm": 0.2091355323791504, "learning_rate": 2.9948333333333336e-05, "loss": 0.0078, "step": 10158 }, { "epoch": 11.213141910546659, "grad_norm": 0.5893541574478149, "learning_rate": 2.9948e-05, "loss": 0.0145, "step": 10159 }, { "epoch": 11.214246272777471, "grad_norm": 0.18009205162525177, "learning_rate": 2.9947666666666668e-05, "loss": 0.0145, "step": 10160 }, { "epoch": 11.215350635008283, "grad_norm": 0.4300857484340668, "learning_rate": 2.9947333333333334e-05, "loss": 0.0115, "step": 10161 }, { "epoch": 11.216454997239094, "grad_norm": 0.30915653705596924, "learning_rate": 2.9947e-05, "loss": 0.0161, "step": 10162 }, { "epoch": 11.217559359469906, "grad_norm": 0.3057457208633423, "learning_rate": 2.994666666666667e-05, "loss": 0.0078, "step": 10163 }, { "epoch": 11.218663721700718, "grad_norm": 0.41398847103118896, "learning_rate": 2.9946333333333335e-05, "loss": 0.0165, "step": 10164 }, { "epoch": 11.21976808393153, "grad_norm": 0.6877546906471252, "learning_rate": 2.9946e-05, "loss": 0.0207, "step": 10165 }, { "epoch": 11.22087244616234, "grad_norm": 0.6072761416435242, "learning_rate": 2.9945666666666667e-05, "loss": 0.0233, "step": 10166 }, { "epoch": 11.221976808393153, "grad_norm": 0.8002055883407593, "learning_rate": 2.9945333333333336e-05, "loss": 0.3187, "step": 10167 }, { "epoch": 11.223081170623965, "grad_norm": 0.4937950074672699, "learning_rate": 2.9945e-05, "loss": 0.1125, "step": 10168 }, { "epoch": 11.224185532854776, "grad_norm": 0.4423198401927948, "learning_rate": 2.9944666666666668e-05, "loss": 0.0959, "step": 10169 }, { "epoch": 11.225289895085588, "grad_norm": 0.633002519607544, "learning_rate": 2.9944333333333334e-05, "loss": 0.1697, "step": 10170 }, { "epoch": 11.2263942573164, "grad_norm": 0.3835125267505646, "learning_rate": 2.9944e-05, "loss": 0.0624, "step": 10171 }, { "epoch": 11.22749861954721, "grad_norm": 0.5987542867660522, "learning_rate": 2.994366666666667e-05, "loss": 0.1252, "step": 10172 }, { "epoch": 11.228602981778023, "grad_norm": 0.3674895167350769, "learning_rate": 2.9943333333333332e-05, "loss": 0.0715, "step": 10173 }, { "epoch": 11.229707344008835, "grad_norm": 0.48892077803611755, "learning_rate": 2.9943e-05, "loss": 0.0533, "step": 10174 }, { "epoch": 11.230811706239647, "grad_norm": 0.27776822447776794, "learning_rate": 2.9942666666666667e-05, "loss": 0.0257, "step": 10175 }, { "epoch": 11.231916068470458, "grad_norm": 0.25272417068481445, "learning_rate": 2.9942333333333333e-05, "loss": 0.0211, "step": 10176 }, { "epoch": 11.23302043070127, "grad_norm": 0.24777239561080933, "learning_rate": 2.9942e-05, "loss": 0.0145, "step": 10177 }, { "epoch": 11.234124792932082, "grad_norm": 0.363272488117218, "learning_rate": 2.9941666666666668e-05, "loss": 0.0259, "step": 10178 }, { "epoch": 11.235229155162893, "grad_norm": 0.26118651032447815, "learning_rate": 2.9941333333333334e-05, "loss": 0.0134, "step": 10179 }, { "epoch": 11.236333517393705, "grad_norm": 0.28845152258872986, "learning_rate": 2.9941e-05, "loss": 0.01, "step": 10180 }, { "epoch": 11.237437879624517, "grad_norm": 0.22796092927455902, "learning_rate": 2.994066666666667e-05, "loss": 0.0136, "step": 10181 }, { "epoch": 11.23854224185533, "grad_norm": 0.25251305103302, "learning_rate": 2.9940333333333332e-05, "loss": 0.0199, "step": 10182 }, { "epoch": 11.23964660408614, "grad_norm": 0.1988344043493271, "learning_rate": 2.994e-05, "loss": 0.0101, "step": 10183 }, { "epoch": 11.240750966316952, "grad_norm": 1.0552281141281128, "learning_rate": 2.9939666666666667e-05, "loss": 0.0242, "step": 10184 }, { "epoch": 11.241855328547764, "grad_norm": 0.33940383791923523, "learning_rate": 2.9939333333333333e-05, "loss": 0.0145, "step": 10185 }, { "epoch": 11.242959690778575, "grad_norm": 0.2650139331817627, "learning_rate": 2.9939e-05, "loss": 0.0091, "step": 10186 }, { "epoch": 11.244064053009387, "grad_norm": 0.18744662404060364, "learning_rate": 2.993866666666667e-05, "loss": 0.009, "step": 10187 }, { "epoch": 11.2451684152402, "grad_norm": 0.2154531031847, "learning_rate": 2.9938333333333334e-05, "loss": 0.0087, "step": 10188 }, { "epoch": 11.24627277747101, "grad_norm": 0.3869817554950714, "learning_rate": 2.9938e-05, "loss": 0.019, "step": 10189 }, { "epoch": 11.247377139701822, "grad_norm": 0.328603059053421, "learning_rate": 2.993766666666667e-05, "loss": 0.0149, "step": 10190 }, { "epoch": 11.248481501932634, "grad_norm": 0.3054795265197754, "learning_rate": 2.9937333333333332e-05, "loss": 0.0171, "step": 10191 }, { "epoch": 11.249585864163446, "grad_norm": 0.40714794397354126, "learning_rate": 2.9937e-05, "loss": 0.015, "step": 10192 }, { "epoch": 11.250690226394257, "grad_norm": 0.5531030893325806, "learning_rate": 2.9936666666666667e-05, "loss": 0.0101, "step": 10193 }, { "epoch": 11.251794588625069, "grad_norm": 0.29206326603889465, "learning_rate": 2.9936333333333333e-05, "loss": 0.0146, "step": 10194 }, { "epoch": 11.252898950855881, "grad_norm": 0.25870510935783386, "learning_rate": 2.9936000000000003e-05, "loss": 0.0207, "step": 10195 }, { "epoch": 11.254003313086692, "grad_norm": 0.3810741901397705, "learning_rate": 2.993566666666667e-05, "loss": 0.0123, "step": 10196 }, { "epoch": 11.255107675317504, "grad_norm": 0.7966590523719788, "learning_rate": 2.9935333333333335e-05, "loss": 0.018, "step": 10197 }, { "epoch": 11.256212037548316, "grad_norm": 0.1135512962937355, "learning_rate": 2.9935e-05, "loss": 0.0049, "step": 10198 }, { "epoch": 11.257316399779128, "grad_norm": 0.1976097971200943, "learning_rate": 2.993466666666667e-05, "loss": 0.0067, "step": 10199 }, { "epoch": 11.258420762009939, "grad_norm": 0.2721356451511383, "learning_rate": 2.9934333333333332e-05, "loss": 0.0144, "step": 10200 }, { "epoch": 11.259525124240751, "grad_norm": 0.3821059465408325, "learning_rate": 2.9934000000000002e-05, "loss": 0.0139, "step": 10201 }, { "epoch": 11.260629486471563, "grad_norm": 0.3798726797103882, "learning_rate": 2.9933666666666664e-05, "loss": 0.0178, "step": 10202 }, { "epoch": 11.261733848702374, "grad_norm": 0.220615416765213, "learning_rate": 2.9933333333333334e-05, "loss": 0.0104, "step": 10203 }, { "epoch": 11.262838210933186, "grad_norm": 0.9462708830833435, "learning_rate": 2.9933000000000003e-05, "loss": 0.012, "step": 10204 }, { "epoch": 11.263942573163998, "grad_norm": 0.42389750480651855, "learning_rate": 2.9932666666666665e-05, "loss": 0.0185, "step": 10205 }, { "epoch": 11.26504693539481, "grad_norm": 1.3389087915420532, "learning_rate": 2.9932333333333335e-05, "loss": 0.0295, "step": 10206 }, { "epoch": 11.26615129762562, "grad_norm": 0.38337641954421997, "learning_rate": 2.9932e-05, "loss": 0.0127, "step": 10207 }, { "epoch": 11.267255659856433, "grad_norm": 0.23206891119480133, "learning_rate": 2.9931666666666667e-05, "loss": 0.0131, "step": 10208 }, { "epoch": 11.268360022087245, "grad_norm": 0.32852914929389954, "learning_rate": 2.9931333333333333e-05, "loss": 0.01, "step": 10209 }, { "epoch": 11.269464384318056, "grad_norm": 0.43999817967414856, "learning_rate": 2.9931000000000002e-05, "loss": 0.0156, "step": 10210 }, { "epoch": 11.270568746548868, "grad_norm": 0.6712571382522583, "learning_rate": 2.9930666666666668e-05, "loss": 0.0153, "step": 10211 }, { "epoch": 11.27167310877968, "grad_norm": 0.40381744503974915, "learning_rate": 2.9930333333333334e-05, "loss": 0.0186, "step": 10212 }, { "epoch": 11.27277747101049, "grad_norm": 0.6899452209472656, "learning_rate": 2.9930000000000003e-05, "loss": 0.0188, "step": 10213 }, { "epoch": 11.273881833241303, "grad_norm": 0.19690679013729095, "learning_rate": 2.9929666666666666e-05, "loss": 0.0087, "step": 10214 }, { "epoch": 11.274986195472115, "grad_norm": 0.1856355518102646, "learning_rate": 2.9929333333333335e-05, "loss": 0.0063, "step": 10215 }, { "epoch": 11.276090557702927, "grad_norm": 0.3432029187679291, "learning_rate": 2.9929e-05, "loss": 0.0167, "step": 10216 }, { "epoch": 11.277194919933738, "grad_norm": 0.7349125742912292, "learning_rate": 2.9928666666666667e-05, "loss": 0.2107, "step": 10217 }, { "epoch": 11.27829928216455, "grad_norm": 0.5369883179664612, "learning_rate": 2.9928333333333333e-05, "loss": 0.1861, "step": 10218 }, { "epoch": 11.279403644395362, "grad_norm": 0.5704773664474487, "learning_rate": 2.9928000000000002e-05, "loss": 0.1301, "step": 10219 }, { "epoch": 11.280508006626173, "grad_norm": 0.8177952170372009, "learning_rate": 2.9927666666666668e-05, "loss": 0.1122, "step": 10220 }, { "epoch": 11.281612368856985, "grad_norm": 0.6915560960769653, "learning_rate": 2.9927333333333334e-05, "loss": 0.1088, "step": 10221 }, { "epoch": 11.282716731087797, "grad_norm": 0.5227735042572021, "learning_rate": 2.9927000000000003e-05, "loss": 0.0605, "step": 10222 }, { "epoch": 11.283821093318608, "grad_norm": 0.5576677322387695, "learning_rate": 2.9926666666666666e-05, "loss": 0.0587, "step": 10223 }, { "epoch": 11.28492545554942, "grad_norm": 0.46761515736579895, "learning_rate": 2.9926333333333335e-05, "loss": 0.0359, "step": 10224 }, { "epoch": 11.286029817780232, "grad_norm": 0.2478722780942917, "learning_rate": 2.9926e-05, "loss": 0.0252, "step": 10225 }, { "epoch": 11.287134180011044, "grad_norm": 0.7321341633796692, "learning_rate": 2.9925666666666667e-05, "loss": 0.0482, "step": 10226 }, { "epoch": 11.288238542241855, "grad_norm": 0.41536766290664673, "learning_rate": 2.9925333333333333e-05, "loss": 0.0221, "step": 10227 }, { "epoch": 11.289342904472667, "grad_norm": 0.43956461548805237, "learning_rate": 2.9925000000000002e-05, "loss": 0.0266, "step": 10228 }, { "epoch": 11.29044726670348, "grad_norm": 0.39524176716804504, "learning_rate": 2.9924666666666668e-05, "loss": 0.0217, "step": 10229 }, { "epoch": 11.29155162893429, "grad_norm": 0.21289978921413422, "learning_rate": 2.9924333333333334e-05, "loss": 0.0199, "step": 10230 }, { "epoch": 11.292655991165102, "grad_norm": 0.46455705165863037, "learning_rate": 2.9924e-05, "loss": 0.0246, "step": 10231 }, { "epoch": 11.293760353395914, "grad_norm": 0.36638814210891724, "learning_rate": 2.9923666666666666e-05, "loss": 0.0088, "step": 10232 }, { "epoch": 11.294864715626726, "grad_norm": 0.21192365884780884, "learning_rate": 2.9923333333333335e-05, "loss": 0.0148, "step": 10233 }, { "epoch": 11.295969077857537, "grad_norm": 0.22125545144081116, "learning_rate": 2.9922999999999998e-05, "loss": 0.0156, "step": 10234 }, { "epoch": 11.297073440088349, "grad_norm": 0.23231066763401031, "learning_rate": 2.9922666666666667e-05, "loss": 0.0114, "step": 10235 }, { "epoch": 11.298177802319161, "grad_norm": 0.2448897659778595, "learning_rate": 2.9922333333333333e-05, "loss": 0.015, "step": 10236 }, { "epoch": 11.299282164549972, "grad_norm": 0.15583746135234833, "learning_rate": 2.9922e-05, "loss": 0.012, "step": 10237 }, { "epoch": 11.300386526780784, "grad_norm": 0.39418473839759827, "learning_rate": 2.992166666666667e-05, "loss": 0.0124, "step": 10238 }, { "epoch": 11.301490889011596, "grad_norm": 0.20723937451839447, "learning_rate": 2.9921333333333334e-05, "loss": 0.0124, "step": 10239 }, { "epoch": 11.302595251242408, "grad_norm": 0.40373754501342773, "learning_rate": 2.9921e-05, "loss": 0.0137, "step": 10240 }, { "epoch": 11.303699613473219, "grad_norm": 0.3571532964706421, "learning_rate": 2.9920666666666666e-05, "loss": 0.0187, "step": 10241 }, { "epoch": 11.304803975704031, "grad_norm": 0.1949436217546463, "learning_rate": 2.9920333333333336e-05, "loss": 0.0114, "step": 10242 }, { "epoch": 11.305908337934843, "grad_norm": 0.4228643774986267, "learning_rate": 2.9919999999999998e-05, "loss": 0.0132, "step": 10243 }, { "epoch": 11.307012700165654, "grad_norm": 0.43507230281829834, "learning_rate": 2.9919666666666667e-05, "loss": 0.047, "step": 10244 }, { "epoch": 11.308117062396466, "grad_norm": 0.36736586689949036, "learning_rate": 2.9919333333333337e-05, "loss": 0.0203, "step": 10245 }, { "epoch": 11.309221424627278, "grad_norm": 0.5283064246177673, "learning_rate": 2.9919e-05, "loss": 0.0207, "step": 10246 }, { "epoch": 11.310325786858089, "grad_norm": 0.5081459283828735, "learning_rate": 2.991866666666667e-05, "loss": 0.0178, "step": 10247 }, { "epoch": 11.3114301490889, "grad_norm": 0.24841931462287903, "learning_rate": 2.9918333333333335e-05, "loss": 0.0109, "step": 10248 }, { "epoch": 11.312534511319713, "grad_norm": 0.22651903331279755, "learning_rate": 2.9918e-05, "loss": 0.0091, "step": 10249 }, { "epoch": 11.313638873550525, "grad_norm": 0.2863112986087799, "learning_rate": 2.9917666666666666e-05, "loss": 0.0149, "step": 10250 }, { "epoch": 11.314743235781336, "grad_norm": 0.1839294135570526, "learning_rate": 2.9917333333333336e-05, "loss": 0.009, "step": 10251 }, { "epoch": 11.315847598012148, "grad_norm": 0.39125731587409973, "learning_rate": 2.9917e-05, "loss": 0.0155, "step": 10252 }, { "epoch": 11.31695196024296, "grad_norm": 0.3790256679058075, "learning_rate": 2.9916666666666668e-05, "loss": 0.0078, "step": 10253 }, { "epoch": 11.31805632247377, "grad_norm": 0.39373111724853516, "learning_rate": 2.9916333333333337e-05, "loss": 0.0197, "step": 10254 }, { "epoch": 11.319160684704583, "grad_norm": 0.4696429669857025, "learning_rate": 2.9916e-05, "loss": 0.0167, "step": 10255 }, { "epoch": 11.320265046935395, "grad_norm": 0.5726073384284973, "learning_rate": 2.991566666666667e-05, "loss": 0.0286, "step": 10256 }, { "epoch": 11.321369409166207, "grad_norm": 0.3411710262298584, "learning_rate": 2.9915333333333335e-05, "loss": 0.0125, "step": 10257 }, { "epoch": 11.322473771397018, "grad_norm": 0.33163243532180786, "learning_rate": 2.9915e-05, "loss": 0.0137, "step": 10258 }, { "epoch": 11.32357813362783, "grad_norm": 0.3490345776081085, "learning_rate": 2.9914666666666667e-05, "loss": 0.0118, "step": 10259 }, { "epoch": 11.324682495858642, "grad_norm": 0.744145929813385, "learning_rate": 2.9914333333333336e-05, "loss": 0.0223, "step": 10260 }, { "epoch": 11.325786858089453, "grad_norm": 0.606468677520752, "learning_rate": 2.9914000000000002e-05, "loss": 0.0194, "step": 10261 }, { "epoch": 11.326891220320265, "grad_norm": 0.7351585626602173, "learning_rate": 2.9913666666666668e-05, "loss": 0.0236, "step": 10262 }, { "epoch": 11.327995582551077, "grad_norm": 0.3713708817958832, "learning_rate": 2.9913333333333334e-05, "loss": 0.0177, "step": 10263 }, { "epoch": 11.329099944781888, "grad_norm": 0.4629136323928833, "learning_rate": 2.9913e-05, "loss": 0.0164, "step": 10264 }, { "epoch": 11.3302043070127, "grad_norm": 0.7022992372512817, "learning_rate": 2.991266666666667e-05, "loss": 0.0237, "step": 10265 }, { "epoch": 11.331308669243512, "grad_norm": 0.6375815868377686, "learning_rate": 2.991233333333333e-05, "loss": 0.0155, "step": 10266 }, { "epoch": 11.332413031474324, "grad_norm": 1.0023210048675537, "learning_rate": 2.9912e-05, "loss": 0.2229, "step": 10267 }, { "epoch": 11.333517393705135, "grad_norm": 0.6411682963371277, "learning_rate": 2.9911666666666667e-05, "loss": 0.1958, "step": 10268 }, { "epoch": 11.334621755935947, "grad_norm": 0.5967984795570374, "learning_rate": 2.9911333333333333e-05, "loss": 0.1632, "step": 10269 }, { "epoch": 11.33572611816676, "grad_norm": 0.5609418749809265, "learning_rate": 2.9911000000000002e-05, "loss": 0.1216, "step": 10270 }, { "epoch": 11.33683048039757, "grad_norm": 0.7432178854942322, "learning_rate": 2.9910666666666668e-05, "loss": 0.1309, "step": 10271 }, { "epoch": 11.337934842628382, "grad_norm": 0.46674585342407227, "learning_rate": 2.9910333333333334e-05, "loss": 0.0967, "step": 10272 }, { "epoch": 11.339039204859194, "grad_norm": 0.4285697042942047, "learning_rate": 2.991e-05, "loss": 0.0549, "step": 10273 }, { "epoch": 11.340143567090006, "grad_norm": 0.2833128869533539, "learning_rate": 2.990966666666667e-05, "loss": 0.0563, "step": 10274 }, { "epoch": 11.341247929320817, "grad_norm": 0.28725212812423706, "learning_rate": 2.9909333333333332e-05, "loss": 0.0342, "step": 10275 }, { "epoch": 11.342352291551629, "grad_norm": 0.34125837683677673, "learning_rate": 2.9909e-05, "loss": 0.0432, "step": 10276 }, { "epoch": 11.343456653782441, "grad_norm": 0.4502914845943451, "learning_rate": 2.9908666666666667e-05, "loss": 0.0142, "step": 10277 }, { "epoch": 11.344561016013252, "grad_norm": 0.33297401666641235, "learning_rate": 2.9908333333333333e-05, "loss": 0.0176, "step": 10278 }, { "epoch": 11.345665378244064, "grad_norm": 0.4351345896720886, "learning_rate": 2.9908000000000002e-05, "loss": 0.02, "step": 10279 }, { "epoch": 11.346769740474876, "grad_norm": 0.3868269622325897, "learning_rate": 2.9907666666666668e-05, "loss": 0.0216, "step": 10280 }, { "epoch": 11.347874102705687, "grad_norm": 0.4154298007488251, "learning_rate": 2.9907333333333334e-05, "loss": 0.025, "step": 10281 }, { "epoch": 11.348978464936499, "grad_norm": 0.3439108431339264, "learning_rate": 2.9907e-05, "loss": 0.0231, "step": 10282 }, { "epoch": 11.350082827167311, "grad_norm": 0.6335203647613525, "learning_rate": 2.990666666666667e-05, "loss": 0.0171, "step": 10283 }, { "epoch": 11.351187189398123, "grad_norm": 1.1757937669754028, "learning_rate": 2.9906333333333332e-05, "loss": 0.0539, "step": 10284 }, { "epoch": 11.352291551628934, "grad_norm": 0.3964332640171051, "learning_rate": 2.9906e-05, "loss": 0.01, "step": 10285 }, { "epoch": 11.353395913859746, "grad_norm": 0.3988467752933502, "learning_rate": 2.9905666666666667e-05, "loss": 0.0155, "step": 10286 }, { "epoch": 11.354500276090558, "grad_norm": 0.15138423442840576, "learning_rate": 2.9905333333333333e-05, "loss": 0.0122, "step": 10287 }, { "epoch": 11.355604638321369, "grad_norm": 0.1923830211162567, "learning_rate": 2.9905000000000003e-05, "loss": 0.017, "step": 10288 }, { "epoch": 11.356709000552181, "grad_norm": 0.38281285762786865, "learning_rate": 2.990466666666667e-05, "loss": 0.0178, "step": 10289 }, { "epoch": 11.357813362782993, "grad_norm": 0.14444704353809357, "learning_rate": 2.9904333333333334e-05, "loss": 0.0078, "step": 10290 }, { "epoch": 11.358917725013805, "grad_norm": 0.3168007433414459, "learning_rate": 2.9904e-05, "loss": 0.0167, "step": 10291 }, { "epoch": 11.360022087244616, "grad_norm": 0.23766475915908813, "learning_rate": 2.9903666666666666e-05, "loss": 0.0119, "step": 10292 }, { "epoch": 11.361126449475428, "grad_norm": 0.2785351872444153, "learning_rate": 2.9903333333333332e-05, "loss": 0.009, "step": 10293 }, { "epoch": 11.36223081170624, "grad_norm": 0.24934400618076324, "learning_rate": 2.9903e-05, "loss": 0.0084, "step": 10294 }, { "epoch": 11.36333517393705, "grad_norm": 0.32987484335899353, "learning_rate": 2.9902666666666667e-05, "loss": 0.0133, "step": 10295 }, { "epoch": 11.364439536167863, "grad_norm": 0.35398226976394653, "learning_rate": 2.9902333333333333e-05, "loss": 0.0126, "step": 10296 }, { "epoch": 11.365543898398675, "grad_norm": 0.3021194636821747, "learning_rate": 2.9902000000000003e-05, "loss": 0.0206, "step": 10297 }, { "epoch": 11.366648260629486, "grad_norm": 0.29852020740509033, "learning_rate": 2.9901666666666665e-05, "loss": 0.0165, "step": 10298 }, { "epoch": 11.367752622860298, "grad_norm": 3.240226984024048, "learning_rate": 2.9901333333333335e-05, "loss": 0.0152, "step": 10299 }, { "epoch": 11.36885698509111, "grad_norm": 0.283890962600708, "learning_rate": 2.9901e-05, "loss": 0.0142, "step": 10300 }, { "epoch": 11.369961347321922, "grad_norm": 0.16242901980876923, "learning_rate": 2.9900666666666666e-05, "loss": 0.008, "step": 10301 }, { "epoch": 11.371065709552733, "grad_norm": 0.4365086555480957, "learning_rate": 2.9900333333333332e-05, "loss": 0.0216, "step": 10302 }, { "epoch": 11.372170071783545, "grad_norm": 0.3466210961341858, "learning_rate": 2.9900000000000002e-05, "loss": 0.0103, "step": 10303 }, { "epoch": 11.373274434014357, "grad_norm": 0.5145957469940186, "learning_rate": 2.9899666666666668e-05, "loss": 0.0148, "step": 10304 }, { "epoch": 11.374378796245168, "grad_norm": 0.5608130097389221, "learning_rate": 2.9899333333333334e-05, "loss": 0.0159, "step": 10305 }, { "epoch": 11.37548315847598, "grad_norm": 0.42544320225715637, "learning_rate": 2.9899000000000003e-05, "loss": 0.0148, "step": 10306 }, { "epoch": 11.376587520706792, "grad_norm": 0.5410467982292175, "learning_rate": 2.9898666666666665e-05, "loss": 0.0166, "step": 10307 }, { "epoch": 11.377691882937604, "grad_norm": 0.36872968077659607, "learning_rate": 2.9898333333333335e-05, "loss": 0.0172, "step": 10308 }, { "epoch": 11.378796245168415, "grad_norm": 0.36773985624313354, "learning_rate": 2.9898e-05, "loss": 0.0098, "step": 10309 }, { "epoch": 11.379900607399227, "grad_norm": 0.6407467126846313, "learning_rate": 2.9897666666666667e-05, "loss": 0.0218, "step": 10310 }, { "epoch": 11.38100496963004, "grad_norm": 0.6913261413574219, "learning_rate": 2.9897333333333336e-05, "loss": 0.0255, "step": 10311 }, { "epoch": 11.38210933186085, "grad_norm": 0.361168771982193, "learning_rate": 2.9897000000000002e-05, "loss": 0.0192, "step": 10312 }, { "epoch": 11.383213694091662, "grad_norm": 0.5722167491912842, "learning_rate": 2.9896666666666668e-05, "loss": 0.0156, "step": 10313 }, { "epoch": 11.384318056322474, "grad_norm": 0.3566892743110657, "learning_rate": 2.9896333333333334e-05, "loss": 0.016, "step": 10314 }, { "epoch": 11.385422418553285, "grad_norm": 0.2719949781894684, "learning_rate": 2.9896000000000003e-05, "loss": 0.0083, "step": 10315 }, { "epoch": 11.386526780784097, "grad_norm": 0.4643316864967346, "learning_rate": 2.9895666666666666e-05, "loss": 0.0242, "step": 10316 }, { "epoch": 11.387631143014909, "grad_norm": 0.656139612197876, "learning_rate": 2.9895333333333335e-05, "loss": 0.1902, "step": 10317 }, { "epoch": 11.388735505245721, "grad_norm": 0.725371778011322, "learning_rate": 2.9895e-05, "loss": 0.1597, "step": 10318 }, { "epoch": 11.389839867476532, "grad_norm": 0.5199651122093201, "learning_rate": 2.9894666666666667e-05, "loss": 0.0948, "step": 10319 }, { "epoch": 11.390944229707344, "grad_norm": 0.5863124132156372, "learning_rate": 2.9894333333333336e-05, "loss": 0.1078, "step": 10320 }, { "epoch": 11.392048591938156, "grad_norm": 0.6356900930404663, "learning_rate": 2.9894e-05, "loss": 0.0854, "step": 10321 }, { "epoch": 11.393152954168967, "grad_norm": 0.5656743049621582, "learning_rate": 2.9893666666666668e-05, "loss": 0.0791, "step": 10322 }, { "epoch": 11.394257316399779, "grad_norm": 0.38094887137413025, "learning_rate": 2.9893333333333334e-05, "loss": 0.0557, "step": 10323 }, { "epoch": 11.395361678630591, "grad_norm": 0.4383048415184021, "learning_rate": 2.9893e-05, "loss": 0.0453, "step": 10324 }, { "epoch": 11.396466040861403, "grad_norm": 0.2925150990486145, "learning_rate": 2.9892666666666666e-05, "loss": 0.0405, "step": 10325 }, { "epoch": 11.397570403092214, "grad_norm": 0.2503814995288849, "learning_rate": 2.9892333333333335e-05, "loss": 0.0602, "step": 10326 }, { "epoch": 11.398674765323026, "grad_norm": 0.6164690852165222, "learning_rate": 2.9891999999999998e-05, "loss": 0.0178, "step": 10327 }, { "epoch": 11.399779127553838, "grad_norm": 0.22234301269054413, "learning_rate": 2.9891666666666667e-05, "loss": 0.0183, "step": 10328 }, { "epoch": 11.400883489784649, "grad_norm": 0.23447269201278687, "learning_rate": 2.9891333333333336e-05, "loss": 0.0147, "step": 10329 }, { "epoch": 11.401987852015461, "grad_norm": 0.29721683263778687, "learning_rate": 2.9891e-05, "loss": 0.0468, "step": 10330 }, { "epoch": 11.403092214246273, "grad_norm": 0.1937883347272873, "learning_rate": 2.9890666666666668e-05, "loss": 0.0126, "step": 10331 }, { "epoch": 11.404196576477084, "grad_norm": 0.3515342175960541, "learning_rate": 2.9890333333333334e-05, "loss": 0.0182, "step": 10332 }, { "epoch": 11.405300938707896, "grad_norm": 0.3923793137073517, "learning_rate": 2.989e-05, "loss": 0.0139, "step": 10333 }, { "epoch": 11.406405300938708, "grad_norm": 0.26573818922042847, "learning_rate": 2.9889666666666666e-05, "loss": 0.011, "step": 10334 }, { "epoch": 11.40750966316952, "grad_norm": 0.17435283958911896, "learning_rate": 2.9889333333333335e-05, "loss": 0.0079, "step": 10335 }, { "epoch": 11.40861402540033, "grad_norm": 0.17589297890663147, "learning_rate": 2.9889e-05, "loss": 0.0112, "step": 10336 }, { "epoch": 11.409718387631143, "grad_norm": 0.21157391369342804, "learning_rate": 2.9888666666666667e-05, "loss": 0.0324, "step": 10337 }, { "epoch": 11.410822749861955, "grad_norm": 0.28745678067207336, "learning_rate": 2.9888333333333337e-05, "loss": 0.0162, "step": 10338 }, { "epoch": 11.411927112092766, "grad_norm": 0.33319002389907837, "learning_rate": 2.9888e-05, "loss": 0.0094, "step": 10339 }, { "epoch": 11.413031474323578, "grad_norm": 0.2650826871395111, "learning_rate": 2.988766666666667e-05, "loss": 0.017, "step": 10340 }, { "epoch": 11.41413583655439, "grad_norm": 0.17828857898712158, "learning_rate": 2.9887333333333334e-05, "loss": 0.0098, "step": 10341 }, { "epoch": 11.415240198785202, "grad_norm": 0.41784965991973877, "learning_rate": 2.9887e-05, "loss": 0.0145, "step": 10342 }, { "epoch": 11.416344561016013, "grad_norm": 0.3925590217113495, "learning_rate": 2.9886666666666666e-05, "loss": 0.0171, "step": 10343 }, { "epoch": 11.417448923246825, "grad_norm": 0.4629620909690857, "learning_rate": 2.9886333333333336e-05, "loss": 0.0214, "step": 10344 }, { "epoch": 11.418553285477637, "grad_norm": 0.631501317024231, "learning_rate": 2.9886e-05, "loss": 0.0102, "step": 10345 }, { "epoch": 11.419657647708448, "grad_norm": 0.33362236618995667, "learning_rate": 2.9885666666666667e-05, "loss": 0.0143, "step": 10346 }, { "epoch": 11.42076200993926, "grad_norm": 0.3197154104709625, "learning_rate": 2.9885333333333337e-05, "loss": 0.0105, "step": 10347 }, { "epoch": 11.421866372170072, "grad_norm": 0.31549984216690063, "learning_rate": 2.9885e-05, "loss": 0.0137, "step": 10348 }, { "epoch": 11.422970734400884, "grad_norm": 1.1046624183654785, "learning_rate": 2.988466666666667e-05, "loss": 0.0369, "step": 10349 }, { "epoch": 11.424075096631695, "grad_norm": 0.247717022895813, "learning_rate": 2.9884333333333335e-05, "loss": 0.0104, "step": 10350 }, { "epoch": 11.425179458862507, "grad_norm": 0.2532314658164978, "learning_rate": 2.9884e-05, "loss": 0.0119, "step": 10351 }, { "epoch": 11.42628382109332, "grad_norm": 0.21797774732112885, "learning_rate": 2.9883666666666666e-05, "loss": 0.0107, "step": 10352 }, { "epoch": 11.42738818332413, "grad_norm": 0.4075472056865692, "learning_rate": 2.9883333333333332e-05, "loss": 0.0247, "step": 10353 }, { "epoch": 11.428492545554942, "grad_norm": 0.5511091947555542, "learning_rate": 2.9883000000000002e-05, "loss": 0.0157, "step": 10354 }, { "epoch": 11.429596907785754, "grad_norm": 0.3493212163448334, "learning_rate": 2.9882666666666668e-05, "loss": 0.0186, "step": 10355 }, { "epoch": 11.430701270016565, "grad_norm": 0.631395161151886, "learning_rate": 2.9882333333333334e-05, "loss": 0.0162, "step": 10356 }, { "epoch": 11.431805632247377, "grad_norm": 0.281932532787323, "learning_rate": 2.9882e-05, "loss": 0.0141, "step": 10357 }, { "epoch": 11.43290999447819, "grad_norm": 0.3497253656387329, "learning_rate": 2.988166666666667e-05, "loss": 0.0156, "step": 10358 }, { "epoch": 11.434014356709001, "grad_norm": 0.5068508386611938, "learning_rate": 2.988133333333333e-05, "loss": 0.0154, "step": 10359 }, { "epoch": 11.435118718939812, "grad_norm": 0.2222924381494522, "learning_rate": 2.9881e-05, "loss": 0.0136, "step": 10360 }, { "epoch": 11.436223081170624, "grad_norm": 0.45690783858299255, "learning_rate": 2.988066666666667e-05, "loss": 0.0269, "step": 10361 }, { "epoch": 11.437327443401436, "grad_norm": 0.38361483812332153, "learning_rate": 2.9880333333333333e-05, "loss": 0.0175, "step": 10362 }, { "epoch": 11.438431805632247, "grad_norm": 0.38806888461112976, "learning_rate": 2.9880000000000002e-05, "loss": 0.013, "step": 10363 }, { "epoch": 11.439536167863059, "grad_norm": 0.319352388381958, "learning_rate": 2.9879666666666668e-05, "loss": 0.015, "step": 10364 }, { "epoch": 11.440640530093871, "grad_norm": 0.5792565941810608, "learning_rate": 2.9879333333333334e-05, "loss": 0.0173, "step": 10365 }, { "epoch": 11.441744892324682, "grad_norm": 0.6598842144012451, "learning_rate": 2.9879e-05, "loss": 0.0269, "step": 10366 }, { "epoch": 11.442849254555494, "grad_norm": 0.655444324016571, "learning_rate": 2.987866666666667e-05, "loss": 0.2019, "step": 10367 }, { "epoch": 11.443953616786306, "grad_norm": 1.0635422468185425, "learning_rate": 2.987833333333333e-05, "loss": 0.2475, "step": 10368 }, { "epoch": 11.445057979017118, "grad_norm": 0.7106844186782837, "learning_rate": 2.9878e-05, "loss": 0.133, "step": 10369 }, { "epoch": 11.446162341247929, "grad_norm": 0.49899113178253174, "learning_rate": 2.987766666666667e-05, "loss": 0.1068, "step": 10370 }, { "epoch": 11.447266703478741, "grad_norm": 0.5502910017967224, "learning_rate": 2.9877333333333333e-05, "loss": 0.1327, "step": 10371 }, { "epoch": 11.448371065709553, "grad_norm": 0.50718754529953, "learning_rate": 2.9877000000000002e-05, "loss": 0.1094, "step": 10372 }, { "epoch": 11.449475427940364, "grad_norm": 0.4739335775375366, "learning_rate": 2.9876666666666668e-05, "loss": 0.0876, "step": 10373 }, { "epoch": 11.450579790171176, "grad_norm": 0.5796104669570923, "learning_rate": 2.9876333333333334e-05, "loss": 0.1002, "step": 10374 }, { "epoch": 11.451684152401988, "grad_norm": 0.32537466287612915, "learning_rate": 2.9876e-05, "loss": 0.0275, "step": 10375 }, { "epoch": 11.4527885146328, "grad_norm": 0.404248982667923, "learning_rate": 2.987566666666667e-05, "loss": 0.0624, "step": 10376 }, { "epoch": 11.45389287686361, "grad_norm": 0.7164422273635864, "learning_rate": 2.9875333333333332e-05, "loss": 0.0285, "step": 10377 }, { "epoch": 11.454997239094423, "grad_norm": 0.40410736203193665, "learning_rate": 2.9875e-05, "loss": 0.0329, "step": 10378 }, { "epoch": 11.456101601325235, "grad_norm": 0.42211008071899414, "learning_rate": 2.987466666666667e-05, "loss": 0.0162, "step": 10379 }, { "epoch": 11.457205963556046, "grad_norm": 0.44827979803085327, "learning_rate": 2.9874333333333333e-05, "loss": 0.0304, "step": 10380 }, { "epoch": 11.458310325786858, "grad_norm": 0.27546918392181396, "learning_rate": 2.9874000000000002e-05, "loss": 0.0297, "step": 10381 }, { "epoch": 11.45941468801767, "grad_norm": 0.27539360523223877, "learning_rate": 2.9873666666666665e-05, "loss": 0.0151, "step": 10382 }, { "epoch": 11.460519050248482, "grad_norm": 0.470350444316864, "learning_rate": 2.9873333333333334e-05, "loss": 0.0323, "step": 10383 }, { "epoch": 11.461623412479293, "grad_norm": 0.16519047319889069, "learning_rate": 2.9873e-05, "loss": 0.0114, "step": 10384 }, { "epoch": 11.462727774710105, "grad_norm": 0.20655736327171326, "learning_rate": 2.9872666666666666e-05, "loss": 0.0191, "step": 10385 }, { "epoch": 11.463832136940917, "grad_norm": 0.24770185351371765, "learning_rate": 2.9872333333333335e-05, "loss": 0.0151, "step": 10386 }, { "epoch": 11.464936499171728, "grad_norm": 0.3507630527019501, "learning_rate": 2.9872e-05, "loss": 0.0197, "step": 10387 }, { "epoch": 11.46604086140254, "grad_norm": 0.3116603493690491, "learning_rate": 2.9871666666666667e-05, "loss": 0.014, "step": 10388 }, { "epoch": 11.467145223633352, "grad_norm": 0.29298973083496094, "learning_rate": 2.9871333333333333e-05, "loss": 0.0141, "step": 10389 }, { "epoch": 11.468249585864163, "grad_norm": 0.6180578470230103, "learning_rate": 2.9871000000000003e-05, "loss": 0.0261, "step": 10390 }, { "epoch": 11.469353948094975, "grad_norm": 0.3527357876300812, "learning_rate": 2.9870666666666665e-05, "loss": 0.0103, "step": 10391 }, { "epoch": 11.470458310325787, "grad_norm": 0.4031645655632019, "learning_rate": 2.9870333333333334e-05, "loss": 0.0186, "step": 10392 }, { "epoch": 11.4715626725566, "grad_norm": 0.5293145775794983, "learning_rate": 2.987e-05, "loss": 0.0205, "step": 10393 }, { "epoch": 11.47266703478741, "grad_norm": 0.7810728549957275, "learning_rate": 2.9869666666666666e-05, "loss": 0.0265, "step": 10394 }, { "epoch": 11.473771397018222, "grad_norm": 0.26922711730003357, "learning_rate": 2.9869333333333336e-05, "loss": 0.0144, "step": 10395 }, { "epoch": 11.474875759249034, "grad_norm": 1.9153132438659668, "learning_rate": 2.9869e-05, "loss": 0.0117, "step": 10396 }, { "epoch": 11.475980121479845, "grad_norm": 0.16936466097831726, "learning_rate": 2.9868666666666667e-05, "loss": 0.008, "step": 10397 }, { "epoch": 11.477084483710657, "grad_norm": 0.2126225233078003, "learning_rate": 2.9868333333333333e-05, "loss": 0.0113, "step": 10398 }, { "epoch": 11.47818884594147, "grad_norm": 0.3007235527038574, "learning_rate": 2.9868000000000003e-05, "loss": 0.0141, "step": 10399 }, { "epoch": 11.47929320817228, "grad_norm": 0.3674451410770416, "learning_rate": 2.9867666666666665e-05, "loss": 0.0158, "step": 10400 }, { "epoch": 11.480397570403092, "grad_norm": 0.6642948985099792, "learning_rate": 2.9867333333333335e-05, "loss": 0.0163, "step": 10401 }, { "epoch": 11.481501932633904, "grad_norm": 0.24604736268520355, "learning_rate": 2.9867e-05, "loss": 0.0096, "step": 10402 }, { "epoch": 11.482606294864716, "grad_norm": 0.3065817356109619, "learning_rate": 2.9866666666666666e-05, "loss": 0.0201, "step": 10403 }, { "epoch": 11.483710657095527, "grad_norm": 0.3364432752132416, "learning_rate": 2.9866333333333336e-05, "loss": 0.0076, "step": 10404 }, { "epoch": 11.484815019326339, "grad_norm": 0.3120327591896057, "learning_rate": 2.9866000000000002e-05, "loss": 0.0147, "step": 10405 }, { "epoch": 11.485919381557151, "grad_norm": 0.2910771667957306, "learning_rate": 2.9865666666666668e-05, "loss": 0.013, "step": 10406 }, { "epoch": 11.487023743787962, "grad_norm": 0.5951279997825623, "learning_rate": 2.9865333333333334e-05, "loss": 0.0223, "step": 10407 }, { "epoch": 11.488128106018774, "grad_norm": 0.4389500319957733, "learning_rate": 2.9865000000000003e-05, "loss": 0.0113, "step": 10408 }, { "epoch": 11.489232468249586, "grad_norm": 0.7211446166038513, "learning_rate": 2.9864666666666665e-05, "loss": 0.0155, "step": 10409 }, { "epoch": 11.490336830480398, "grad_norm": 0.8195072412490845, "learning_rate": 2.9864333333333335e-05, "loss": 0.0111, "step": 10410 }, { "epoch": 11.491441192711209, "grad_norm": 0.5136827230453491, "learning_rate": 2.9864000000000004e-05, "loss": 0.0132, "step": 10411 }, { "epoch": 11.492545554942021, "grad_norm": 0.2865407466888428, "learning_rate": 2.9863666666666667e-05, "loss": 0.0111, "step": 10412 }, { "epoch": 11.493649917172833, "grad_norm": 1.040496587753296, "learning_rate": 2.9863333333333336e-05, "loss": 0.0279, "step": 10413 }, { "epoch": 11.494754279403644, "grad_norm": 0.9132295250892639, "learning_rate": 2.9863e-05, "loss": 0.0187, "step": 10414 }, { "epoch": 11.495858641634456, "grad_norm": 0.7694064378738403, "learning_rate": 2.9862666666666668e-05, "loss": 0.0149, "step": 10415 }, { "epoch": 11.496963003865268, "grad_norm": 0.2853563725948334, "learning_rate": 2.9862333333333334e-05, "loss": 0.01, "step": 10416 }, { "epoch": 11.49806736609608, "grad_norm": 0.7207319736480713, "learning_rate": 2.9862e-05, "loss": 0.1758, "step": 10417 }, { "epoch": 11.49917172832689, "grad_norm": 0.533711314201355, "learning_rate": 2.9861666666666666e-05, "loss": 0.1616, "step": 10418 }, { "epoch": 11.500276090557703, "grad_norm": 0.6688236594200134, "learning_rate": 2.9861333333333335e-05, "loss": 0.1084, "step": 10419 }, { "epoch": 11.501380452788515, "grad_norm": 0.6677356958389282, "learning_rate": 2.9861e-05, "loss": 0.1106, "step": 10420 }, { "epoch": 11.502484815019326, "grad_norm": 0.7869928479194641, "learning_rate": 2.9860666666666667e-05, "loss": 0.0976, "step": 10421 }, { "epoch": 11.503589177250138, "grad_norm": 0.5596247911453247, "learning_rate": 2.9860333333333336e-05, "loss": 0.097, "step": 10422 }, { "epoch": 11.50469353948095, "grad_norm": 0.4362373650074005, "learning_rate": 2.986e-05, "loss": 0.0787, "step": 10423 }, { "epoch": 11.50579790171176, "grad_norm": 0.5398104190826416, "learning_rate": 2.9859666666666668e-05, "loss": 0.0903, "step": 10424 }, { "epoch": 11.506902263942573, "grad_norm": 0.5009570121765137, "learning_rate": 2.9859333333333334e-05, "loss": 0.0265, "step": 10425 }, { "epoch": 11.508006626173385, "grad_norm": 0.48031577467918396, "learning_rate": 2.9859e-05, "loss": 0.038, "step": 10426 }, { "epoch": 11.509110988404197, "grad_norm": 0.44150736927986145, "learning_rate": 2.9858666666666666e-05, "loss": 0.0205, "step": 10427 }, { "epoch": 11.510215350635008, "grad_norm": 0.23489046096801758, "learning_rate": 2.9858333333333335e-05, "loss": 0.0214, "step": 10428 }, { "epoch": 11.51131971286582, "grad_norm": 0.391510546207428, "learning_rate": 2.9858e-05, "loss": 0.02, "step": 10429 }, { "epoch": 11.512424075096632, "grad_norm": 0.3467015326023102, "learning_rate": 2.9857666666666667e-05, "loss": 0.0191, "step": 10430 }, { "epoch": 11.513528437327443, "grad_norm": 0.1796746551990509, "learning_rate": 2.9857333333333336e-05, "loss": 0.0126, "step": 10431 }, { "epoch": 11.514632799558255, "grad_norm": 0.382286936044693, "learning_rate": 2.9857e-05, "loss": 0.0132, "step": 10432 }, { "epoch": 11.515737161789067, "grad_norm": 0.2087363451719284, "learning_rate": 2.9856666666666668e-05, "loss": 0.0112, "step": 10433 }, { "epoch": 11.516841524019878, "grad_norm": 0.7669963836669922, "learning_rate": 2.9856333333333334e-05, "loss": 0.0206, "step": 10434 }, { "epoch": 11.51794588625069, "grad_norm": 0.2539154887199402, "learning_rate": 2.9856e-05, "loss": 0.0153, "step": 10435 }, { "epoch": 11.519050248481502, "grad_norm": 0.15546628832817078, "learning_rate": 2.985566666666667e-05, "loss": 0.009, "step": 10436 }, { "epoch": 11.520154610712314, "grad_norm": 0.32157865166664124, "learning_rate": 2.9855333333333335e-05, "loss": 0.0119, "step": 10437 }, { "epoch": 11.521258972943125, "grad_norm": 0.28129079937934875, "learning_rate": 2.9855e-05, "loss": 0.018, "step": 10438 }, { "epoch": 11.522363335173937, "grad_norm": 0.4865304231643677, "learning_rate": 2.9854666666666667e-05, "loss": 0.0186, "step": 10439 }, { "epoch": 11.52346769740475, "grad_norm": 0.4811893403530121, "learning_rate": 2.9854333333333337e-05, "loss": 0.019, "step": 10440 }, { "epoch": 11.52457205963556, "grad_norm": 0.2991550862789154, "learning_rate": 2.9854e-05, "loss": 0.0166, "step": 10441 }, { "epoch": 11.525676421866372, "grad_norm": 0.26780012249946594, "learning_rate": 2.985366666666667e-05, "loss": 0.02, "step": 10442 }, { "epoch": 11.526780784097184, "grad_norm": 0.34488624334335327, "learning_rate": 2.985333333333333e-05, "loss": 0.0193, "step": 10443 }, { "epoch": 11.527885146327996, "grad_norm": 0.29415494203567505, "learning_rate": 2.9853e-05, "loss": 0.0127, "step": 10444 }, { "epoch": 11.528989508558807, "grad_norm": 0.35947152972221375, "learning_rate": 2.985266666666667e-05, "loss": 0.012, "step": 10445 }, { "epoch": 11.530093870789619, "grad_norm": 0.31770190596580505, "learning_rate": 2.9852333333333332e-05, "loss": 0.0148, "step": 10446 }, { "epoch": 11.531198233020431, "grad_norm": 0.4393382668495178, "learning_rate": 2.9852e-05, "loss": 0.0176, "step": 10447 }, { "epoch": 11.532302595251242, "grad_norm": 0.4719862937927246, "learning_rate": 2.9851666666666667e-05, "loss": 0.0199, "step": 10448 }, { "epoch": 11.533406957482054, "grad_norm": 0.3711574375629425, "learning_rate": 2.9851333333333333e-05, "loss": 0.0201, "step": 10449 }, { "epoch": 11.534511319712866, "grad_norm": 0.3686298727989197, "learning_rate": 2.9851e-05, "loss": 0.0277, "step": 10450 }, { "epoch": 11.535615681943678, "grad_norm": 0.7175032496452332, "learning_rate": 2.985066666666667e-05, "loss": 0.0139, "step": 10451 }, { "epoch": 11.536720044174489, "grad_norm": 0.7171622514724731, "learning_rate": 2.985033333333333e-05, "loss": 0.0175, "step": 10452 }, { "epoch": 11.537824406405301, "grad_norm": 1.1974244117736816, "learning_rate": 2.985e-05, "loss": 0.0147, "step": 10453 }, { "epoch": 11.538928768636113, "grad_norm": 0.30503201484680176, "learning_rate": 2.984966666666667e-05, "loss": 0.0234, "step": 10454 }, { "epoch": 11.540033130866924, "grad_norm": 0.49947667121887207, "learning_rate": 2.9849333333333332e-05, "loss": 0.0132, "step": 10455 }, { "epoch": 11.541137493097736, "grad_norm": 0.8764538764953613, "learning_rate": 2.9849000000000002e-05, "loss": 0.0117, "step": 10456 }, { "epoch": 11.542241855328548, "grad_norm": 0.3615801930427551, "learning_rate": 2.9848666666666668e-05, "loss": 0.0219, "step": 10457 }, { "epoch": 11.54334621755936, "grad_norm": 0.308584064245224, "learning_rate": 2.9848333333333334e-05, "loss": 0.0148, "step": 10458 }, { "epoch": 11.54445057979017, "grad_norm": 0.7010989785194397, "learning_rate": 2.9848e-05, "loss": 0.0214, "step": 10459 }, { "epoch": 11.545554942020983, "grad_norm": 0.5136050581932068, "learning_rate": 2.984766666666667e-05, "loss": 0.012, "step": 10460 }, { "epoch": 11.546659304251795, "grad_norm": 0.3784327208995819, "learning_rate": 2.9847333333333335e-05, "loss": 0.0176, "step": 10461 }, { "epoch": 11.547763666482606, "grad_norm": 0.3893918991088867, "learning_rate": 2.9847e-05, "loss": 0.0143, "step": 10462 }, { "epoch": 11.548868028713418, "grad_norm": 0.40204375982284546, "learning_rate": 2.984666666666667e-05, "loss": 0.0216, "step": 10463 }, { "epoch": 11.54997239094423, "grad_norm": 0.7419370412826538, "learning_rate": 2.9846333333333333e-05, "loss": 0.0167, "step": 10464 }, { "epoch": 11.55107675317504, "grad_norm": 0.6129883527755737, "learning_rate": 2.9846000000000002e-05, "loss": 0.0264, "step": 10465 }, { "epoch": 11.552181115405853, "grad_norm": 0.47113925218582153, "learning_rate": 2.9845666666666668e-05, "loss": 0.0084, "step": 10466 }, { "epoch": 11.553285477636665, "grad_norm": 0.6272479891777039, "learning_rate": 2.9845333333333334e-05, "loss": 0.2586, "step": 10467 }, { "epoch": 11.554389839867477, "grad_norm": 0.6957153677940369, "learning_rate": 2.9845e-05, "loss": 0.1581, "step": 10468 }, { "epoch": 11.555494202098288, "grad_norm": 0.620185136795044, "learning_rate": 2.984466666666667e-05, "loss": 0.2018, "step": 10469 }, { "epoch": 11.5565985643291, "grad_norm": 0.44852516055107117, "learning_rate": 2.9844333333333335e-05, "loss": 0.0745, "step": 10470 }, { "epoch": 11.557702926559912, "grad_norm": 0.5746833086013794, "learning_rate": 2.9844e-05, "loss": 0.1047, "step": 10471 }, { "epoch": 11.558807288790723, "grad_norm": 0.7043988108634949, "learning_rate": 2.9843666666666667e-05, "loss": 0.1012, "step": 10472 }, { "epoch": 11.559911651021535, "grad_norm": 0.5538885593414307, "learning_rate": 2.9843333333333333e-05, "loss": 0.0666, "step": 10473 }, { "epoch": 11.561016013252347, "grad_norm": 0.5288719534873962, "learning_rate": 2.9843000000000002e-05, "loss": 0.1, "step": 10474 }, { "epoch": 11.562120375483158, "grad_norm": 0.37061387300491333, "learning_rate": 2.9842666666666665e-05, "loss": 0.0359, "step": 10475 }, { "epoch": 11.56322473771397, "grad_norm": 0.4396373927593231, "learning_rate": 2.9842333333333334e-05, "loss": 0.0607, "step": 10476 }, { "epoch": 11.564329099944782, "grad_norm": 0.5006029009819031, "learning_rate": 2.9842e-05, "loss": 0.0164, "step": 10477 }, { "epoch": 11.565433462175594, "grad_norm": 0.4644975960254669, "learning_rate": 2.9841666666666666e-05, "loss": 0.0218, "step": 10478 }, { "epoch": 11.566537824406405, "grad_norm": 0.2760528028011322, "learning_rate": 2.9841333333333335e-05, "loss": 0.0221, "step": 10479 }, { "epoch": 11.567642186637217, "grad_norm": 0.28444400429725647, "learning_rate": 2.9841e-05, "loss": 0.0131, "step": 10480 }, { "epoch": 11.56874654886803, "grad_norm": 0.43680086731910706, "learning_rate": 2.9840666666666667e-05, "loss": 0.0291, "step": 10481 }, { "epoch": 11.56985091109884, "grad_norm": 0.38144800066947937, "learning_rate": 2.9840333333333333e-05, "loss": 0.0137, "step": 10482 }, { "epoch": 11.570955273329652, "grad_norm": 0.4640140235424042, "learning_rate": 2.9840000000000002e-05, "loss": 0.0293, "step": 10483 }, { "epoch": 11.572059635560464, "grad_norm": 0.9206424951553345, "learning_rate": 2.9839666666666665e-05, "loss": 0.0213, "step": 10484 }, { "epoch": 11.573163997791276, "grad_norm": 0.2170065939426422, "learning_rate": 2.9839333333333334e-05, "loss": 0.0117, "step": 10485 }, { "epoch": 11.574268360022087, "grad_norm": 0.3679555654525757, "learning_rate": 2.9839000000000003e-05, "loss": 0.0167, "step": 10486 }, { "epoch": 11.575372722252899, "grad_norm": 0.2126512974500656, "learning_rate": 2.9838666666666666e-05, "loss": 0.0161, "step": 10487 }, { "epoch": 11.576477084483711, "grad_norm": 0.3255724608898163, "learning_rate": 2.9838333333333335e-05, "loss": 0.0245, "step": 10488 }, { "epoch": 11.577581446714522, "grad_norm": 0.5120030045509338, "learning_rate": 2.9838e-05, "loss": 0.0198, "step": 10489 }, { "epoch": 11.578685808945334, "grad_norm": 0.2769697606563568, "learning_rate": 2.9837666666666667e-05, "loss": 0.0177, "step": 10490 }, { "epoch": 11.579790171176146, "grad_norm": 0.4069982171058655, "learning_rate": 2.9837333333333333e-05, "loss": 0.0149, "step": 10491 }, { "epoch": 11.580894533406958, "grad_norm": 0.6076458692550659, "learning_rate": 2.9837000000000002e-05, "loss": 0.0178, "step": 10492 }, { "epoch": 11.581998895637769, "grad_norm": 0.2964053452014923, "learning_rate": 2.9836666666666665e-05, "loss": 0.0124, "step": 10493 }, { "epoch": 11.583103257868581, "grad_norm": 0.3848743438720703, "learning_rate": 2.9836333333333334e-05, "loss": 0.0138, "step": 10494 }, { "epoch": 11.584207620099393, "grad_norm": 0.28752216696739197, "learning_rate": 2.9836000000000004e-05, "loss": 0.0153, "step": 10495 }, { "epoch": 11.585311982330204, "grad_norm": 0.5648534893989563, "learning_rate": 2.9835666666666666e-05, "loss": 0.0222, "step": 10496 }, { "epoch": 11.586416344561016, "grad_norm": 0.19245545566082, "learning_rate": 2.9835333333333336e-05, "loss": 0.0105, "step": 10497 }, { "epoch": 11.587520706791828, "grad_norm": 0.3180898129940033, "learning_rate": 2.9835e-05, "loss": 0.0152, "step": 10498 }, { "epoch": 11.588625069022639, "grad_norm": 0.47865554690361023, "learning_rate": 2.9834666666666667e-05, "loss": 0.0191, "step": 10499 }, { "epoch": 11.589729431253451, "grad_norm": 1.0591579675674438, "learning_rate": 2.9834333333333333e-05, "loss": 0.0261, "step": 10500 }, { "epoch": 11.590833793484263, "grad_norm": 1.0054060220718384, "learning_rate": 2.9834000000000003e-05, "loss": 0.009, "step": 10501 }, { "epoch": 11.591938155715075, "grad_norm": 0.2901996970176697, "learning_rate": 2.9833666666666665e-05, "loss": 0.0201, "step": 10502 }, { "epoch": 11.593042517945886, "grad_norm": 0.4718690514564514, "learning_rate": 2.9833333333333335e-05, "loss": 0.0316, "step": 10503 }, { "epoch": 11.594146880176698, "grad_norm": 0.3601992428302765, "learning_rate": 2.9833e-05, "loss": 0.0146, "step": 10504 }, { "epoch": 11.59525124240751, "grad_norm": 0.691421389579773, "learning_rate": 2.9832666666666666e-05, "loss": 0.0275, "step": 10505 }, { "epoch": 11.59635560463832, "grad_norm": 0.2677839398384094, "learning_rate": 2.9832333333333336e-05, "loss": 0.0158, "step": 10506 }, { "epoch": 11.597459966869133, "grad_norm": 0.63046795129776, "learning_rate": 2.9831999999999998e-05, "loss": 0.022, "step": 10507 }, { "epoch": 11.598564329099945, "grad_norm": 0.5237387418746948, "learning_rate": 2.9831666666666668e-05, "loss": 0.0286, "step": 10508 }, { "epoch": 11.599668691330756, "grad_norm": 0.6796875596046448, "learning_rate": 2.9831333333333334e-05, "loss": 0.0259, "step": 10509 }, { "epoch": 11.600773053561568, "grad_norm": 0.44288691878318787, "learning_rate": 2.9831e-05, "loss": 0.0236, "step": 10510 }, { "epoch": 11.60187741579238, "grad_norm": 0.4118497371673584, "learning_rate": 2.983066666666667e-05, "loss": 0.0215, "step": 10511 }, { "epoch": 11.602981778023192, "grad_norm": 0.3369785249233246, "learning_rate": 2.9830333333333335e-05, "loss": 0.0191, "step": 10512 }, { "epoch": 11.604086140254003, "grad_norm": 0.5543297529220581, "learning_rate": 2.983e-05, "loss": 0.0293, "step": 10513 }, { "epoch": 11.605190502484815, "grad_norm": 0.39592868089675903, "learning_rate": 2.9829666666666667e-05, "loss": 0.0096, "step": 10514 }, { "epoch": 11.606294864715627, "grad_norm": 0.3234170377254486, "learning_rate": 2.9829333333333336e-05, "loss": 0.0188, "step": 10515 }, { "epoch": 11.607399226946438, "grad_norm": 1.5895137786865234, "learning_rate": 2.9829e-05, "loss": 0.0422, "step": 10516 }, { "epoch": 11.60850358917725, "grad_norm": 0.9384580254554749, "learning_rate": 2.9828666666666668e-05, "loss": 0.2701, "step": 10517 }, { "epoch": 11.609607951408062, "grad_norm": 0.5725158452987671, "learning_rate": 2.9828333333333334e-05, "loss": 0.2068, "step": 10518 }, { "epoch": 11.610712313638874, "grad_norm": 0.7265406847000122, "learning_rate": 2.9828e-05, "loss": 0.1408, "step": 10519 }, { "epoch": 11.611816675869685, "grad_norm": 0.7699860334396362, "learning_rate": 2.982766666666667e-05, "loss": 0.1549, "step": 10520 }, { "epoch": 11.612921038100497, "grad_norm": 0.4788753390312195, "learning_rate": 2.9827333333333335e-05, "loss": 0.1102, "step": 10521 }, { "epoch": 11.61402540033131, "grad_norm": 0.3769218325614929, "learning_rate": 2.9827e-05, "loss": 0.0633, "step": 10522 }, { "epoch": 11.61512976256212, "grad_norm": 0.400971919298172, "learning_rate": 2.9826666666666667e-05, "loss": 0.0454, "step": 10523 }, { "epoch": 11.616234124792932, "grad_norm": 0.31341028213500977, "learning_rate": 2.9826333333333336e-05, "loss": 0.0382, "step": 10524 }, { "epoch": 11.617338487023744, "grad_norm": 0.5248139500617981, "learning_rate": 2.9826e-05, "loss": 0.0434, "step": 10525 }, { "epoch": 11.618442849254556, "grad_norm": 0.4173392653465271, "learning_rate": 2.9825666666666668e-05, "loss": 0.0353, "step": 10526 }, { "epoch": 11.619547211485367, "grad_norm": 1.9955108165740967, "learning_rate": 2.9825333333333334e-05, "loss": 0.0332, "step": 10527 }, { "epoch": 11.620651573716179, "grad_norm": 0.22563451528549194, "learning_rate": 2.9825e-05, "loss": 0.015, "step": 10528 }, { "epoch": 11.621755935946991, "grad_norm": 0.2529601752758026, "learning_rate": 2.982466666666667e-05, "loss": 0.0188, "step": 10529 }, { "epoch": 11.622860298177802, "grad_norm": 0.25690168142318726, "learning_rate": 2.9824333333333335e-05, "loss": 0.0244, "step": 10530 }, { "epoch": 11.623964660408614, "grad_norm": 0.1575312614440918, "learning_rate": 2.9824e-05, "loss": 0.0091, "step": 10531 }, { "epoch": 11.625069022639426, "grad_norm": 0.32066109776496887, "learning_rate": 2.9823666666666667e-05, "loss": 0.0133, "step": 10532 }, { "epoch": 11.626173384870237, "grad_norm": 0.34046831727027893, "learning_rate": 2.9823333333333333e-05, "loss": 0.0123, "step": 10533 }, { "epoch": 11.627277747101049, "grad_norm": 0.4169020354747772, "learning_rate": 2.9823e-05, "loss": 0.0136, "step": 10534 }, { "epoch": 11.628382109331861, "grad_norm": 0.3123646080493927, "learning_rate": 2.9822666666666668e-05, "loss": 0.0123, "step": 10535 }, { "epoch": 11.629486471562673, "grad_norm": 0.24820274114608765, "learning_rate": 2.9822333333333334e-05, "loss": 0.0131, "step": 10536 }, { "epoch": 11.630590833793484, "grad_norm": 0.270741730928421, "learning_rate": 2.9822e-05, "loss": 0.016, "step": 10537 }, { "epoch": 11.631695196024296, "grad_norm": 0.46949851512908936, "learning_rate": 2.982166666666667e-05, "loss": 0.0388, "step": 10538 }, { "epoch": 11.632799558255108, "grad_norm": 0.2664564251899719, "learning_rate": 2.9821333333333332e-05, "loss": 0.0134, "step": 10539 }, { "epoch": 11.633903920485919, "grad_norm": 0.4069778621196747, "learning_rate": 2.9821e-05, "loss": 0.0609, "step": 10540 }, { "epoch": 11.635008282716731, "grad_norm": 0.32598596811294556, "learning_rate": 2.9820666666666667e-05, "loss": 0.011, "step": 10541 }, { "epoch": 11.636112644947543, "grad_norm": 0.18472127616405487, "learning_rate": 2.9820333333333333e-05, "loss": 0.0103, "step": 10542 }, { "epoch": 11.637217007178354, "grad_norm": 0.35195353627204895, "learning_rate": 2.982e-05, "loss": 0.0164, "step": 10543 }, { "epoch": 11.638321369409166, "grad_norm": 0.3730606436729431, "learning_rate": 2.981966666666667e-05, "loss": 0.0199, "step": 10544 }, { "epoch": 11.639425731639978, "grad_norm": 0.859268069267273, "learning_rate": 2.9819333333333334e-05, "loss": 0.0179, "step": 10545 }, { "epoch": 11.64053009387079, "grad_norm": 0.4030526876449585, "learning_rate": 2.9819e-05, "loss": 0.0181, "step": 10546 }, { "epoch": 11.6416344561016, "grad_norm": 0.34850913286209106, "learning_rate": 2.981866666666667e-05, "loss": 0.0124, "step": 10547 }, { "epoch": 11.642738818332413, "grad_norm": 0.4656817317008972, "learning_rate": 2.9818333333333332e-05, "loss": 0.016, "step": 10548 }, { "epoch": 11.643843180563225, "grad_norm": 0.3896152675151825, "learning_rate": 2.9818e-05, "loss": 0.0149, "step": 10549 }, { "epoch": 11.644947542794036, "grad_norm": 0.24102799594402313, "learning_rate": 2.9817666666666667e-05, "loss": 0.0143, "step": 10550 }, { "epoch": 11.646051905024848, "grad_norm": 0.1614728420972824, "learning_rate": 2.9817333333333333e-05, "loss": 0.0068, "step": 10551 }, { "epoch": 11.64715626725566, "grad_norm": 0.4308353662490845, "learning_rate": 2.9817e-05, "loss": 0.0173, "step": 10552 }, { "epoch": 11.648260629486472, "grad_norm": 0.2712404131889343, "learning_rate": 2.981666666666667e-05, "loss": 0.0117, "step": 10553 }, { "epoch": 11.649364991717283, "grad_norm": 0.2789071798324585, "learning_rate": 2.9816333333333335e-05, "loss": 0.013, "step": 10554 }, { "epoch": 11.650469353948095, "grad_norm": 0.558464527130127, "learning_rate": 2.9816e-05, "loss": 0.0181, "step": 10555 }, { "epoch": 11.651573716178907, "grad_norm": 0.26636824011802673, "learning_rate": 2.981566666666667e-05, "loss": 0.0109, "step": 10556 }, { "epoch": 11.652678078409718, "grad_norm": 0.33214715123176575, "learning_rate": 2.9815333333333332e-05, "loss": 0.015, "step": 10557 }, { "epoch": 11.65378244064053, "grad_norm": 0.16724960505962372, "learning_rate": 2.9815e-05, "loss": 0.0067, "step": 10558 }, { "epoch": 11.654886802871342, "grad_norm": 0.7199345827102661, "learning_rate": 2.9814666666666668e-05, "loss": 0.0309, "step": 10559 }, { "epoch": 11.655991165102154, "grad_norm": 0.5281226634979248, "learning_rate": 2.9814333333333334e-05, "loss": 0.0314, "step": 10560 }, { "epoch": 11.657095527332965, "grad_norm": 0.3714923858642578, "learning_rate": 2.9814000000000003e-05, "loss": 0.024, "step": 10561 }, { "epoch": 11.658199889563777, "grad_norm": 0.316028892993927, "learning_rate": 2.981366666666667e-05, "loss": 0.0091, "step": 10562 }, { "epoch": 11.65930425179459, "grad_norm": 1.050034999847412, "learning_rate": 2.9813333333333335e-05, "loss": 0.0167, "step": 10563 }, { "epoch": 11.6604086140254, "grad_norm": 0.409180223941803, "learning_rate": 2.9813e-05, "loss": 0.0246, "step": 10564 }, { "epoch": 11.661512976256212, "grad_norm": 0.6729073524475098, "learning_rate": 2.9812666666666667e-05, "loss": 0.0227, "step": 10565 }, { "epoch": 11.662617338487024, "grad_norm": 0.9965610504150391, "learning_rate": 2.9812333333333333e-05, "loss": 0.0126, "step": 10566 }, { "epoch": 11.663721700717835, "grad_norm": 0.8909411430358887, "learning_rate": 2.9812000000000002e-05, "loss": 0.2531, "step": 10567 }, { "epoch": 11.664826062948647, "grad_norm": 0.9591250419616699, "learning_rate": 2.9811666666666664e-05, "loss": 0.284, "step": 10568 }, { "epoch": 11.665930425179459, "grad_norm": 0.5304281711578369, "learning_rate": 2.9811333333333334e-05, "loss": 0.1719, "step": 10569 }, { "epoch": 11.667034787410271, "grad_norm": 0.858803927898407, "learning_rate": 2.9811000000000003e-05, "loss": 0.1782, "step": 10570 }, { "epoch": 11.668139149641082, "grad_norm": 0.8165602684020996, "learning_rate": 2.9810666666666666e-05, "loss": 0.1087, "step": 10571 }, { "epoch": 11.669243511871894, "grad_norm": 0.4958570897579193, "learning_rate": 2.9810333333333335e-05, "loss": 0.0991, "step": 10572 }, { "epoch": 11.670347874102706, "grad_norm": 0.5393484234809875, "learning_rate": 2.981e-05, "loss": 0.1223, "step": 10573 }, { "epoch": 11.671452236333517, "grad_norm": 0.580464780330658, "learning_rate": 2.9809666666666667e-05, "loss": 0.0724, "step": 10574 }, { "epoch": 11.672556598564329, "grad_norm": 0.4867665767669678, "learning_rate": 2.9809333333333333e-05, "loss": 0.0485, "step": 10575 }, { "epoch": 11.673660960795141, "grad_norm": 0.5421802401542664, "learning_rate": 2.9809000000000002e-05, "loss": 0.0686, "step": 10576 }, { "epoch": 11.674765323025952, "grad_norm": 0.23751971125602722, "learning_rate": 2.9808666666666665e-05, "loss": 0.0233, "step": 10577 }, { "epoch": 11.675869685256764, "grad_norm": 0.5216959118843079, "learning_rate": 2.9808333333333334e-05, "loss": 0.025, "step": 10578 }, { "epoch": 11.676974047487576, "grad_norm": 0.352262020111084, "learning_rate": 2.9808000000000003e-05, "loss": 0.0255, "step": 10579 }, { "epoch": 11.678078409718388, "grad_norm": 0.4373011589050293, "learning_rate": 2.9807666666666666e-05, "loss": 0.0254, "step": 10580 }, { "epoch": 11.679182771949199, "grad_norm": 0.42909201979637146, "learning_rate": 2.9807333333333335e-05, "loss": 0.017, "step": 10581 }, { "epoch": 11.680287134180011, "grad_norm": 0.32509317994117737, "learning_rate": 2.9807e-05, "loss": 0.0327, "step": 10582 }, { "epoch": 11.681391496410823, "grad_norm": 0.25327757000923157, "learning_rate": 2.9806666666666667e-05, "loss": 0.0146, "step": 10583 }, { "epoch": 11.682495858641634, "grad_norm": 0.4471797049045563, "learning_rate": 2.9806333333333333e-05, "loss": 0.0105, "step": 10584 }, { "epoch": 11.683600220872446, "grad_norm": 0.686513364315033, "learning_rate": 2.9806000000000002e-05, "loss": 0.0141, "step": 10585 }, { "epoch": 11.684704583103258, "grad_norm": 1.6084333658218384, "learning_rate": 2.9805666666666668e-05, "loss": 0.0155, "step": 10586 }, { "epoch": 11.68580894533407, "grad_norm": 0.2875365614891052, "learning_rate": 2.9805333333333334e-05, "loss": 0.0147, "step": 10587 }, { "epoch": 11.68691330756488, "grad_norm": 0.6086956262588501, "learning_rate": 2.9805000000000003e-05, "loss": 0.0147, "step": 10588 }, { "epoch": 11.688017669795693, "grad_norm": 0.25251826643943787, "learning_rate": 2.9804666666666666e-05, "loss": 0.0429, "step": 10589 }, { "epoch": 11.689122032026505, "grad_norm": 0.43569624423980713, "learning_rate": 2.9804333333333335e-05, "loss": 0.0139, "step": 10590 }, { "epoch": 11.690226394257316, "grad_norm": 0.4064677953720093, "learning_rate": 2.9804e-05, "loss": 0.0315, "step": 10591 }, { "epoch": 11.691330756488128, "grad_norm": 0.24581865966320038, "learning_rate": 2.9803666666666667e-05, "loss": 0.0099, "step": 10592 }, { "epoch": 11.69243511871894, "grad_norm": 0.33803871273994446, "learning_rate": 2.9803333333333333e-05, "loss": 0.0146, "step": 10593 }, { "epoch": 11.693539480949752, "grad_norm": 0.21650457382202148, "learning_rate": 2.9803e-05, "loss": 0.0072, "step": 10594 }, { "epoch": 11.694643843180563, "grad_norm": 0.4241422712802887, "learning_rate": 2.980266666666667e-05, "loss": 0.0281, "step": 10595 }, { "epoch": 11.695748205411375, "grad_norm": 0.36368927359580994, "learning_rate": 2.9802333333333334e-05, "loss": 0.0149, "step": 10596 }, { "epoch": 11.696852567642187, "grad_norm": 0.623196005821228, "learning_rate": 2.9802e-05, "loss": 0.015, "step": 10597 }, { "epoch": 11.697956929872998, "grad_norm": 0.2840486168861389, "learning_rate": 2.9801666666666666e-05, "loss": 0.0149, "step": 10598 }, { "epoch": 11.69906129210381, "grad_norm": 0.3031231760978699, "learning_rate": 2.9801333333333336e-05, "loss": 0.0151, "step": 10599 }, { "epoch": 11.700165654334622, "grad_norm": 0.2115917056798935, "learning_rate": 2.9800999999999998e-05, "loss": 0.0167, "step": 10600 }, { "epoch": 11.701270016565433, "grad_norm": 0.5137585401535034, "learning_rate": 2.9800666666666667e-05, "loss": 0.0175, "step": 10601 }, { "epoch": 11.702374378796245, "grad_norm": 0.41196709871292114, "learning_rate": 2.9800333333333333e-05, "loss": 0.0131, "step": 10602 }, { "epoch": 11.703478741027057, "grad_norm": 0.4287760257720947, "learning_rate": 2.98e-05, "loss": 0.0172, "step": 10603 }, { "epoch": 11.70458310325787, "grad_norm": 0.5233548283576965, "learning_rate": 2.979966666666667e-05, "loss": 0.0162, "step": 10604 }, { "epoch": 11.70568746548868, "grad_norm": 0.35322386026382446, "learning_rate": 2.9799333333333335e-05, "loss": 0.0176, "step": 10605 }, { "epoch": 11.706791827719492, "grad_norm": 0.3892058730125427, "learning_rate": 2.9799e-05, "loss": 0.0141, "step": 10606 }, { "epoch": 11.707896189950304, "grad_norm": 0.6192851066589355, "learning_rate": 2.9798666666666666e-05, "loss": 0.0248, "step": 10607 }, { "epoch": 11.709000552181115, "grad_norm": 0.42831099033355713, "learning_rate": 2.9798333333333336e-05, "loss": 0.0116, "step": 10608 }, { "epoch": 11.710104914411927, "grad_norm": 0.21050956845283508, "learning_rate": 2.9797999999999998e-05, "loss": 0.012, "step": 10609 }, { "epoch": 11.71120927664274, "grad_norm": 0.3066547214984894, "learning_rate": 2.9797666666666668e-05, "loss": 0.0102, "step": 10610 }, { "epoch": 11.71231363887355, "grad_norm": 0.29205721616744995, "learning_rate": 2.9797333333333337e-05, "loss": 0.0093, "step": 10611 }, { "epoch": 11.713418001104362, "grad_norm": 0.22565250098705292, "learning_rate": 2.9797e-05, "loss": 0.0082, "step": 10612 }, { "epoch": 11.714522363335174, "grad_norm": 0.6268487572669983, "learning_rate": 2.979666666666667e-05, "loss": 0.0212, "step": 10613 }, { "epoch": 11.715626725565986, "grad_norm": 0.4117225706577301, "learning_rate": 2.9796333333333335e-05, "loss": 0.0129, "step": 10614 }, { "epoch": 11.716731087796797, "grad_norm": 0.20658467710018158, "learning_rate": 2.9796e-05, "loss": 0.0068, "step": 10615 }, { "epoch": 11.717835450027609, "grad_norm": 0.6413034200668335, "learning_rate": 2.9795666666666667e-05, "loss": 0.0389, "step": 10616 }, { "epoch": 11.718939812258421, "grad_norm": 0.9634369611740112, "learning_rate": 2.9795333333333336e-05, "loss": 0.2879, "step": 10617 }, { "epoch": 11.720044174489232, "grad_norm": 0.7606679797172546, "learning_rate": 2.9795e-05, "loss": 0.1403, "step": 10618 }, { "epoch": 11.721148536720044, "grad_norm": 0.7268904447555542, "learning_rate": 2.9794666666666668e-05, "loss": 0.1276, "step": 10619 }, { "epoch": 11.722252898950856, "grad_norm": 0.41172152757644653, "learning_rate": 2.9794333333333337e-05, "loss": 0.0785, "step": 10620 }, { "epoch": 11.723357261181668, "grad_norm": 0.7982465028762817, "learning_rate": 2.9794e-05, "loss": 0.1413, "step": 10621 }, { "epoch": 11.724461623412479, "grad_norm": 0.5681676268577576, "learning_rate": 2.979366666666667e-05, "loss": 0.0793, "step": 10622 }, { "epoch": 11.725565985643291, "grad_norm": 0.33985838294029236, "learning_rate": 2.979333333333333e-05, "loss": 0.0605, "step": 10623 }, { "epoch": 11.726670347874103, "grad_norm": 0.3026956021785736, "learning_rate": 2.9793e-05, "loss": 0.0404, "step": 10624 }, { "epoch": 11.727774710104914, "grad_norm": 0.3221200406551361, "learning_rate": 2.9792666666666667e-05, "loss": 0.0378, "step": 10625 }, { "epoch": 11.728879072335726, "grad_norm": 0.3417893052101135, "learning_rate": 2.9792333333333333e-05, "loss": 0.0281, "step": 10626 }, { "epoch": 11.729983434566538, "grad_norm": 0.2549474835395813, "learning_rate": 2.9792e-05, "loss": 0.0209, "step": 10627 }, { "epoch": 11.73108779679735, "grad_norm": 0.24440254271030426, "learning_rate": 2.9791666666666668e-05, "loss": 0.0244, "step": 10628 }, { "epoch": 11.73219215902816, "grad_norm": 0.6509816646575928, "learning_rate": 2.9791333333333334e-05, "loss": 0.0454, "step": 10629 }, { "epoch": 11.733296521258973, "grad_norm": 0.5200350284576416, "learning_rate": 2.9791e-05, "loss": 0.0564, "step": 10630 }, { "epoch": 11.734400883489785, "grad_norm": 0.44748440384864807, "learning_rate": 2.979066666666667e-05, "loss": 0.0226, "step": 10631 }, { "epoch": 11.735505245720596, "grad_norm": 0.3630926012992859, "learning_rate": 2.9790333333333332e-05, "loss": 0.0235, "step": 10632 }, { "epoch": 11.736609607951408, "grad_norm": 0.41253530979156494, "learning_rate": 2.979e-05, "loss": 0.0164, "step": 10633 }, { "epoch": 11.73771397018222, "grad_norm": 0.3746229410171509, "learning_rate": 2.9789666666666667e-05, "loss": 0.0219, "step": 10634 }, { "epoch": 11.738818332413032, "grad_norm": 0.23562656342983246, "learning_rate": 2.9789333333333333e-05, "loss": 0.0165, "step": 10635 }, { "epoch": 11.739922694643843, "grad_norm": 0.3581397831439972, "learning_rate": 2.9789000000000002e-05, "loss": 0.016, "step": 10636 }, { "epoch": 11.741027056874655, "grad_norm": 0.3316144347190857, "learning_rate": 2.9788666666666668e-05, "loss": 0.0138, "step": 10637 }, { "epoch": 11.742131419105467, "grad_norm": 0.3373295068740845, "learning_rate": 2.9788333333333334e-05, "loss": 0.0177, "step": 10638 }, { "epoch": 11.743235781336278, "grad_norm": 0.3654860854148865, "learning_rate": 2.9788e-05, "loss": 0.0192, "step": 10639 }, { "epoch": 11.74434014356709, "grad_norm": 0.299645334482193, "learning_rate": 2.978766666666667e-05, "loss": 0.0131, "step": 10640 }, { "epoch": 11.745444505797902, "grad_norm": 0.4031473398208618, "learning_rate": 2.9787333333333332e-05, "loss": 0.017, "step": 10641 }, { "epoch": 11.746548868028713, "grad_norm": 0.30295252799987793, "learning_rate": 2.9787e-05, "loss": 0.0126, "step": 10642 }, { "epoch": 11.747653230259525, "grad_norm": 0.21534429490566254, "learning_rate": 2.9786666666666667e-05, "loss": 0.0093, "step": 10643 }, { "epoch": 11.748757592490337, "grad_norm": 0.18071413040161133, "learning_rate": 2.9786333333333333e-05, "loss": 0.0094, "step": 10644 }, { "epoch": 11.74986195472115, "grad_norm": 1.3061254024505615, "learning_rate": 2.9786000000000002e-05, "loss": 0.0147, "step": 10645 }, { "epoch": 11.75096631695196, "grad_norm": 0.24254658818244934, "learning_rate": 2.978566666666667e-05, "loss": 0.0122, "step": 10646 }, { "epoch": 11.752070679182772, "grad_norm": 0.4192154109477997, "learning_rate": 2.9785333333333334e-05, "loss": 0.0109, "step": 10647 }, { "epoch": 11.753175041413584, "grad_norm": 0.320049911737442, "learning_rate": 2.9785e-05, "loss": 0.0214, "step": 10648 }, { "epoch": 11.754279403644395, "grad_norm": 0.4484023153781891, "learning_rate": 2.978466666666667e-05, "loss": 0.0187, "step": 10649 }, { "epoch": 11.755383765875207, "grad_norm": 0.3886108994483948, "learning_rate": 2.9784333333333332e-05, "loss": 0.0147, "step": 10650 }, { "epoch": 11.75648812810602, "grad_norm": 0.5382497906684875, "learning_rate": 2.9784e-05, "loss": 0.0198, "step": 10651 }, { "epoch": 11.75759249033683, "grad_norm": 0.726876974105835, "learning_rate": 2.9783666666666667e-05, "loss": 0.0196, "step": 10652 }, { "epoch": 11.758696852567642, "grad_norm": 0.30381691455841064, "learning_rate": 2.9783333333333333e-05, "loss": 0.014, "step": 10653 }, { "epoch": 11.759801214798454, "grad_norm": 0.3062939941883087, "learning_rate": 2.9783000000000003e-05, "loss": 0.0103, "step": 10654 }, { "epoch": 11.760905577029266, "grad_norm": 0.34590306878089905, "learning_rate": 2.9782666666666665e-05, "loss": 0.0219, "step": 10655 }, { "epoch": 11.762009939260077, "grad_norm": 0.30890393257141113, "learning_rate": 2.9782333333333335e-05, "loss": 0.0145, "step": 10656 }, { "epoch": 11.763114301490889, "grad_norm": 0.42709094285964966, "learning_rate": 2.9782e-05, "loss": 0.0162, "step": 10657 }, { "epoch": 11.764218663721701, "grad_norm": 0.2537758946418762, "learning_rate": 2.9781666666666666e-05, "loss": 0.0112, "step": 10658 }, { "epoch": 11.765323025952512, "grad_norm": 0.5451471209526062, "learning_rate": 2.9781333333333332e-05, "loss": 0.0173, "step": 10659 }, { "epoch": 11.766427388183324, "grad_norm": 0.6259496212005615, "learning_rate": 2.9781e-05, "loss": 0.0214, "step": 10660 }, { "epoch": 11.767531750414136, "grad_norm": 0.38949376344680786, "learning_rate": 2.9780666666666668e-05, "loss": 0.0171, "step": 10661 }, { "epoch": 11.768636112644948, "grad_norm": 0.9376842379570007, "learning_rate": 2.9780333333333334e-05, "loss": 0.0196, "step": 10662 }, { "epoch": 11.769740474875759, "grad_norm": 0.546678900718689, "learning_rate": 2.9780000000000003e-05, "loss": 0.0409, "step": 10663 }, { "epoch": 11.770844837106571, "grad_norm": 0.3363053798675537, "learning_rate": 2.9779666666666665e-05, "loss": 0.0184, "step": 10664 }, { "epoch": 11.771949199337383, "grad_norm": 0.4696354866027832, "learning_rate": 2.9779333333333335e-05, "loss": 0.0207, "step": 10665 }, { "epoch": 11.773053561568194, "grad_norm": 0.2828487157821655, "learning_rate": 2.9779e-05, "loss": 0.0117, "step": 10666 }, { "epoch": 11.774157923799006, "grad_norm": 0.7946391701698303, "learning_rate": 2.9778666666666667e-05, "loss": 0.2616, "step": 10667 }, { "epoch": 11.775262286029818, "grad_norm": 0.6181994080543518, "learning_rate": 2.9778333333333333e-05, "loss": 0.1518, "step": 10668 }, { "epoch": 11.77636664826063, "grad_norm": 0.6638982892036438, "learning_rate": 2.9778000000000002e-05, "loss": 0.1208, "step": 10669 }, { "epoch": 11.77747101049144, "grad_norm": 0.6651649475097656, "learning_rate": 2.9777666666666668e-05, "loss": 0.141, "step": 10670 }, { "epoch": 11.778575372722253, "grad_norm": 0.5384956002235413, "learning_rate": 2.9777333333333334e-05, "loss": 0.0658, "step": 10671 }, { "epoch": 11.779679734953065, "grad_norm": 0.46583104133605957, "learning_rate": 2.9777000000000003e-05, "loss": 0.0674, "step": 10672 }, { "epoch": 11.780784097183876, "grad_norm": 0.6728284955024719, "learning_rate": 2.9776666666666666e-05, "loss": 0.0623, "step": 10673 }, { "epoch": 11.781888459414688, "grad_norm": 0.3671974241733551, "learning_rate": 2.9776333333333335e-05, "loss": 0.0549, "step": 10674 }, { "epoch": 11.7829928216455, "grad_norm": 0.7063782811164856, "learning_rate": 2.9776e-05, "loss": 0.131, "step": 10675 }, { "epoch": 11.78409718387631, "grad_norm": 0.29803118109703064, "learning_rate": 2.9775666666666667e-05, "loss": 0.0279, "step": 10676 }, { "epoch": 11.785201546107123, "grad_norm": 0.366272509098053, "learning_rate": 2.9775333333333333e-05, "loss": 0.0295, "step": 10677 }, { "epoch": 11.786305908337935, "grad_norm": 0.3544297218322754, "learning_rate": 2.9775000000000002e-05, "loss": 0.0582, "step": 10678 }, { "epoch": 11.787410270568747, "grad_norm": 0.2532097101211548, "learning_rate": 2.9774666666666668e-05, "loss": 0.0235, "step": 10679 }, { "epoch": 11.788514632799558, "grad_norm": 0.228495791554451, "learning_rate": 2.9774333333333334e-05, "loss": 0.0166, "step": 10680 }, { "epoch": 11.78961899503037, "grad_norm": 0.2690788209438324, "learning_rate": 2.9774000000000003e-05, "loss": 0.0202, "step": 10681 }, { "epoch": 11.790723357261182, "grad_norm": 0.3244471549987793, "learning_rate": 2.9773666666666666e-05, "loss": 0.019, "step": 10682 }, { "epoch": 11.791827719491993, "grad_norm": 0.36039260029792786, "learning_rate": 2.9773333333333335e-05, "loss": 0.0148, "step": 10683 }, { "epoch": 11.792932081722805, "grad_norm": 0.23287296295166016, "learning_rate": 2.9772999999999998e-05, "loss": 0.0137, "step": 10684 }, { "epoch": 11.794036443953617, "grad_norm": 0.1915566623210907, "learning_rate": 2.9772666666666667e-05, "loss": 0.0161, "step": 10685 }, { "epoch": 11.795140806184428, "grad_norm": 0.19199346005916595, "learning_rate": 2.9772333333333336e-05, "loss": 0.0111, "step": 10686 }, { "epoch": 11.79624516841524, "grad_norm": 0.7875878810882568, "learning_rate": 2.9772e-05, "loss": 0.0156, "step": 10687 }, { "epoch": 11.797349530646052, "grad_norm": 0.4129694402217865, "learning_rate": 2.9771666666666668e-05, "loss": 0.0232, "step": 10688 }, { "epoch": 11.798453892876864, "grad_norm": 0.546676516532898, "learning_rate": 2.9771333333333334e-05, "loss": 0.0164, "step": 10689 }, { "epoch": 11.799558255107675, "grad_norm": 0.16855396330356598, "learning_rate": 2.9771e-05, "loss": 0.0091, "step": 10690 }, { "epoch": 11.800662617338487, "grad_norm": 0.2374776303768158, "learning_rate": 2.9770666666666666e-05, "loss": 0.0191, "step": 10691 }, { "epoch": 11.8017669795693, "grad_norm": 0.33741500973701477, "learning_rate": 2.9770333333333335e-05, "loss": 0.0129, "step": 10692 }, { "epoch": 11.80287134180011, "grad_norm": 0.2742571532726288, "learning_rate": 2.9769999999999998e-05, "loss": 0.0102, "step": 10693 }, { "epoch": 11.803975704030922, "grad_norm": 0.19194810092449188, "learning_rate": 2.9769666666666667e-05, "loss": 0.0062, "step": 10694 }, { "epoch": 11.805080066261734, "grad_norm": 0.3019082546234131, "learning_rate": 2.9769333333333337e-05, "loss": 0.0201, "step": 10695 }, { "epoch": 11.806184428492546, "grad_norm": 0.21784482896327972, "learning_rate": 2.9769e-05, "loss": 0.0106, "step": 10696 }, { "epoch": 11.807288790723357, "grad_norm": 0.33252426981925964, "learning_rate": 2.976866666666667e-05, "loss": 0.0174, "step": 10697 }, { "epoch": 11.808393152954169, "grad_norm": 0.39105936884880066, "learning_rate": 2.9768333333333334e-05, "loss": 0.0131, "step": 10698 }, { "epoch": 11.809497515184981, "grad_norm": 0.36326125264167786, "learning_rate": 2.9768e-05, "loss": 0.0186, "step": 10699 }, { "epoch": 11.810601877415792, "grad_norm": 0.26232069730758667, "learning_rate": 2.9767666666666666e-05, "loss": 0.006, "step": 10700 }, { "epoch": 11.811706239646604, "grad_norm": 0.29227593541145325, "learning_rate": 2.9767333333333336e-05, "loss": 0.0092, "step": 10701 }, { "epoch": 11.812810601877416, "grad_norm": 0.25613558292388916, "learning_rate": 2.9766999999999998e-05, "loss": 0.0115, "step": 10702 }, { "epoch": 11.813914964108228, "grad_norm": 0.4810691177845001, "learning_rate": 2.9766666666666667e-05, "loss": 0.0251, "step": 10703 }, { "epoch": 11.815019326339039, "grad_norm": 0.9852733016014099, "learning_rate": 2.9766333333333337e-05, "loss": 0.0161, "step": 10704 }, { "epoch": 11.816123688569851, "grad_norm": 0.2568093538284302, "learning_rate": 2.9766e-05, "loss": 0.0124, "step": 10705 }, { "epoch": 11.817228050800663, "grad_norm": 0.7217084169387817, "learning_rate": 2.976566666666667e-05, "loss": 0.0199, "step": 10706 }, { "epoch": 11.818332413031474, "grad_norm": 0.28780117630958557, "learning_rate": 2.9765333333333335e-05, "loss": 0.0104, "step": 10707 }, { "epoch": 11.819436775262286, "grad_norm": 0.9623191356658936, "learning_rate": 2.9765e-05, "loss": 0.0204, "step": 10708 }, { "epoch": 11.820541137493098, "grad_norm": 0.20642797648906708, "learning_rate": 2.9764666666666666e-05, "loss": 0.013, "step": 10709 }, { "epoch": 11.821645499723909, "grad_norm": 0.1847565472126007, "learning_rate": 2.9764333333333336e-05, "loss": 0.0123, "step": 10710 }, { "epoch": 11.82274986195472, "grad_norm": 0.3615695536136627, "learning_rate": 2.9764e-05, "loss": 0.015, "step": 10711 }, { "epoch": 11.823854224185533, "grad_norm": 0.41047540307044983, "learning_rate": 2.9763666666666668e-05, "loss": 0.0205, "step": 10712 }, { "epoch": 11.824958586416345, "grad_norm": 0.5034154653549194, "learning_rate": 2.9763333333333337e-05, "loss": 0.0181, "step": 10713 }, { "epoch": 11.826062948647156, "grad_norm": 0.3696519732475281, "learning_rate": 2.9763e-05, "loss": 0.0127, "step": 10714 }, { "epoch": 11.827167310877968, "grad_norm": 0.212185800075531, "learning_rate": 2.976266666666667e-05, "loss": 0.0138, "step": 10715 }, { "epoch": 11.82827167310878, "grad_norm": 0.3036418855190277, "learning_rate": 2.976233333333333e-05, "loss": 0.0135, "step": 10716 }, { "epoch": 11.82937603533959, "grad_norm": 0.8271763920783997, "learning_rate": 2.9762e-05, "loss": 0.2625, "step": 10717 }, { "epoch": 11.830480397570403, "grad_norm": 0.533197283744812, "learning_rate": 2.9761666666666667e-05, "loss": 0.1442, "step": 10718 }, { "epoch": 11.831584759801215, "grad_norm": 0.5198708176612854, "learning_rate": 2.9761333333333333e-05, "loss": 0.1633, "step": 10719 }, { "epoch": 11.832689122032026, "grad_norm": 0.4230818748474121, "learning_rate": 2.9761000000000002e-05, "loss": 0.0949, "step": 10720 }, { "epoch": 11.833793484262838, "grad_norm": 0.4315846562385559, "learning_rate": 2.9760666666666668e-05, "loss": 0.064, "step": 10721 }, { "epoch": 11.83489784649365, "grad_norm": 0.3685598075389862, "learning_rate": 2.9760333333333334e-05, "loss": 0.075, "step": 10722 }, { "epoch": 11.836002208724462, "grad_norm": 0.4033384621143341, "learning_rate": 2.976e-05, "loss": 0.0869, "step": 10723 }, { "epoch": 11.837106570955273, "grad_norm": 0.3884800970554352, "learning_rate": 2.975966666666667e-05, "loss": 0.0586, "step": 10724 }, { "epoch": 11.838210933186085, "grad_norm": 0.5537794828414917, "learning_rate": 2.975933333333333e-05, "loss": 0.1153, "step": 10725 }, { "epoch": 11.839315295416897, "grad_norm": 0.33509308099746704, "learning_rate": 2.9759e-05, "loss": 0.0391, "step": 10726 }, { "epoch": 11.840419657647708, "grad_norm": 0.2807404398918152, "learning_rate": 2.9758666666666667e-05, "loss": 0.0166, "step": 10727 }, { "epoch": 11.84152401987852, "grad_norm": 0.3624802529811859, "learning_rate": 2.9758333333333333e-05, "loss": 0.0168, "step": 10728 }, { "epoch": 11.842628382109332, "grad_norm": 0.13331346213817596, "learning_rate": 2.9758000000000002e-05, "loss": 0.0161, "step": 10729 }, { "epoch": 11.843732744340144, "grad_norm": 0.3117980360984802, "learning_rate": 2.9757666666666668e-05, "loss": 0.0432, "step": 10730 }, { "epoch": 11.844837106570955, "grad_norm": 0.2446688860654831, "learning_rate": 2.9757333333333334e-05, "loss": 0.0166, "step": 10731 }, { "epoch": 11.845941468801767, "grad_norm": 0.205571249127388, "learning_rate": 2.9757e-05, "loss": 0.0167, "step": 10732 }, { "epoch": 11.84704583103258, "grad_norm": 0.21498271822929382, "learning_rate": 2.975666666666667e-05, "loss": 0.0155, "step": 10733 }, { "epoch": 11.84815019326339, "grad_norm": 0.3548816740512848, "learning_rate": 2.9756333333333332e-05, "loss": 0.0228, "step": 10734 }, { "epoch": 11.849254555494202, "grad_norm": 0.13909241557121277, "learning_rate": 2.9756e-05, "loss": 0.0089, "step": 10735 }, { "epoch": 11.850358917725014, "grad_norm": 0.3214361071586609, "learning_rate": 2.975566666666667e-05, "loss": 0.0196, "step": 10736 }, { "epoch": 11.851463279955826, "grad_norm": 0.19594867527484894, "learning_rate": 2.9755333333333333e-05, "loss": 0.0066, "step": 10737 }, { "epoch": 11.852567642186637, "grad_norm": 0.2941429018974304, "learning_rate": 2.9755000000000002e-05, "loss": 0.0115, "step": 10738 }, { "epoch": 11.853672004417449, "grad_norm": 0.5316500067710876, "learning_rate": 2.9754666666666668e-05, "loss": 0.0178, "step": 10739 }, { "epoch": 11.854776366648261, "grad_norm": 0.32729464769363403, "learning_rate": 2.9754333333333334e-05, "loss": 0.0138, "step": 10740 }, { "epoch": 11.855880728879072, "grad_norm": 0.21531282365322113, "learning_rate": 2.9754e-05, "loss": 0.0093, "step": 10741 }, { "epoch": 11.856985091109884, "grad_norm": 0.18055710196495056, "learning_rate": 2.975366666666667e-05, "loss": 0.0077, "step": 10742 }, { "epoch": 11.858089453340696, "grad_norm": 0.5552669763565063, "learning_rate": 2.9753333333333332e-05, "loss": 0.0105, "step": 10743 }, { "epoch": 11.859193815571507, "grad_norm": 0.27101948857307434, "learning_rate": 2.9753e-05, "loss": 0.0186, "step": 10744 }, { "epoch": 11.860298177802319, "grad_norm": 0.28004470467567444, "learning_rate": 2.9752666666666667e-05, "loss": 0.0221, "step": 10745 }, { "epoch": 11.861402540033131, "grad_norm": 0.24404433369636536, "learning_rate": 2.9752333333333333e-05, "loss": 0.0132, "step": 10746 }, { "epoch": 11.862506902263943, "grad_norm": 0.3742729723453522, "learning_rate": 2.9752000000000002e-05, "loss": 0.0164, "step": 10747 }, { "epoch": 11.863611264494754, "grad_norm": 0.4564281105995178, "learning_rate": 2.9751666666666665e-05, "loss": 0.0151, "step": 10748 }, { "epoch": 11.864715626725566, "grad_norm": 0.3204229474067688, "learning_rate": 2.9751333333333334e-05, "loss": 0.0128, "step": 10749 }, { "epoch": 11.865819988956378, "grad_norm": 0.11091328412294388, "learning_rate": 2.9751e-05, "loss": 0.0076, "step": 10750 }, { "epoch": 11.866924351187189, "grad_norm": 0.2446392923593521, "learning_rate": 2.9750666666666666e-05, "loss": 0.0103, "step": 10751 }, { "epoch": 11.868028713418001, "grad_norm": 0.5788320899009705, "learning_rate": 2.9750333333333332e-05, "loss": 0.0173, "step": 10752 }, { "epoch": 11.869133075648813, "grad_norm": 0.4122682511806488, "learning_rate": 2.975e-05, "loss": 0.0234, "step": 10753 }, { "epoch": 11.870237437879624, "grad_norm": 0.3570205569267273, "learning_rate": 2.9749666666666667e-05, "loss": 0.0122, "step": 10754 }, { "epoch": 11.871341800110436, "grad_norm": 0.32447755336761475, "learning_rate": 2.9749333333333333e-05, "loss": 0.0151, "step": 10755 }, { "epoch": 11.872446162341248, "grad_norm": 0.2829029858112335, "learning_rate": 2.9749000000000003e-05, "loss": 0.0131, "step": 10756 }, { "epoch": 11.87355052457206, "grad_norm": 0.2793533205986023, "learning_rate": 2.9748666666666665e-05, "loss": 0.0146, "step": 10757 }, { "epoch": 11.87465488680287, "grad_norm": 0.42196157574653625, "learning_rate": 2.9748333333333335e-05, "loss": 0.0181, "step": 10758 }, { "epoch": 11.875759249033683, "grad_norm": 0.264934778213501, "learning_rate": 2.9748e-05, "loss": 0.0098, "step": 10759 }, { "epoch": 11.876863611264495, "grad_norm": 0.24015910923480988, "learning_rate": 2.9747666666666666e-05, "loss": 0.0103, "step": 10760 }, { "epoch": 11.877967973495306, "grad_norm": 0.14733590185642242, "learning_rate": 2.9747333333333336e-05, "loss": 0.0069, "step": 10761 }, { "epoch": 11.879072335726118, "grad_norm": 0.33733028173446655, "learning_rate": 2.9747e-05, "loss": 0.0171, "step": 10762 }, { "epoch": 11.88017669795693, "grad_norm": 0.4557478725910187, "learning_rate": 2.9746666666666668e-05, "loss": 0.0193, "step": 10763 }, { "epoch": 11.881281060187742, "grad_norm": 0.6456701159477234, "learning_rate": 2.9746333333333334e-05, "loss": 0.0244, "step": 10764 }, { "epoch": 11.882385422418553, "grad_norm": 0.4947403073310852, "learning_rate": 2.9746000000000003e-05, "loss": 0.0152, "step": 10765 }, { "epoch": 11.883489784649365, "grad_norm": 1.036176323890686, "learning_rate": 2.9745666666666665e-05, "loss": 0.0256, "step": 10766 }, { "epoch": 11.884594146880177, "grad_norm": 2.28226900100708, "learning_rate": 2.9745333333333335e-05, "loss": 0.2409, "step": 10767 }, { "epoch": 11.885698509110988, "grad_norm": 0.537449061870575, "learning_rate": 2.9745e-05, "loss": 0.1681, "step": 10768 }, { "epoch": 11.8868028713418, "grad_norm": 0.509572446346283, "learning_rate": 2.9744666666666667e-05, "loss": 0.1732, "step": 10769 }, { "epoch": 11.887907233572612, "grad_norm": 0.5080261826515198, "learning_rate": 2.9744333333333336e-05, "loss": 0.094, "step": 10770 }, { "epoch": 11.889011595803424, "grad_norm": 0.5095340013504028, "learning_rate": 2.9744000000000002e-05, "loss": 0.1007, "step": 10771 }, { "epoch": 11.890115958034235, "grad_norm": 0.7017920613288879, "learning_rate": 2.9743666666666668e-05, "loss": 0.1154, "step": 10772 }, { "epoch": 11.891220320265047, "grad_norm": 0.6438367366790771, "learning_rate": 2.9743333333333334e-05, "loss": 0.1047, "step": 10773 }, { "epoch": 11.89232468249586, "grad_norm": 1.5704890489578247, "learning_rate": 2.9743000000000003e-05, "loss": 0.0497, "step": 10774 }, { "epoch": 11.89342904472667, "grad_norm": 0.3654078543186188, "learning_rate": 2.9742666666666666e-05, "loss": 0.0301, "step": 10775 }, { "epoch": 11.894533406957482, "grad_norm": 0.21841110289096832, "learning_rate": 2.9742333333333335e-05, "loss": 0.0186, "step": 10776 }, { "epoch": 11.895637769188294, "grad_norm": 0.532241940498352, "learning_rate": 2.9742e-05, "loss": 0.0142, "step": 10777 }, { "epoch": 11.896742131419105, "grad_norm": 0.4532981514930725, "learning_rate": 2.9741666666666667e-05, "loss": 0.0182, "step": 10778 }, { "epoch": 11.897846493649917, "grad_norm": 0.15847256779670715, "learning_rate": 2.9741333333333336e-05, "loss": 0.016, "step": 10779 }, { "epoch": 11.898950855880729, "grad_norm": 0.24344564974308014, "learning_rate": 2.9741e-05, "loss": 0.02, "step": 10780 }, { "epoch": 11.900055218111541, "grad_norm": 0.3328177034854889, "learning_rate": 2.9740666666666668e-05, "loss": 0.0172, "step": 10781 }, { "epoch": 11.901159580342352, "grad_norm": 0.612175703048706, "learning_rate": 2.9740333333333334e-05, "loss": 0.0156, "step": 10782 }, { "epoch": 11.902263942573164, "grad_norm": 0.3256179094314575, "learning_rate": 2.974e-05, "loss": 0.0151, "step": 10783 }, { "epoch": 11.903368304803976, "grad_norm": 0.20825442671775818, "learning_rate": 2.9739666666666666e-05, "loss": 0.0147, "step": 10784 }, { "epoch": 11.904472667034787, "grad_norm": 0.21798206865787506, "learning_rate": 2.9739333333333335e-05, "loss": 0.0163, "step": 10785 }, { "epoch": 11.905577029265599, "grad_norm": 0.21946537494659424, "learning_rate": 2.9739e-05, "loss": 0.0146, "step": 10786 }, { "epoch": 11.906681391496411, "grad_norm": 0.32906073331832886, "learning_rate": 2.9738666666666667e-05, "loss": 0.0153, "step": 10787 }, { "epoch": 11.907785753727222, "grad_norm": 0.3047913610935211, "learning_rate": 2.9738333333333336e-05, "loss": 0.0131, "step": 10788 }, { "epoch": 11.908890115958034, "grad_norm": 0.3166041374206543, "learning_rate": 2.9738e-05, "loss": 0.0152, "step": 10789 }, { "epoch": 11.909994478188846, "grad_norm": 0.20567995309829712, "learning_rate": 2.9737666666666668e-05, "loss": 0.0136, "step": 10790 }, { "epoch": 11.911098840419658, "grad_norm": 0.27832087874412537, "learning_rate": 2.9737333333333334e-05, "loss": 0.0158, "step": 10791 }, { "epoch": 11.912203202650469, "grad_norm": 0.3276981711387634, "learning_rate": 2.9737e-05, "loss": 0.0246, "step": 10792 }, { "epoch": 11.913307564881281, "grad_norm": 0.42688795924186707, "learning_rate": 2.9736666666666666e-05, "loss": 0.0185, "step": 10793 }, { "epoch": 11.914411927112093, "grad_norm": 0.8351304531097412, "learning_rate": 2.9736333333333335e-05, "loss": 0.0135, "step": 10794 }, { "epoch": 11.915516289342904, "grad_norm": 0.2284965068101883, "learning_rate": 2.9736e-05, "loss": 0.0095, "step": 10795 }, { "epoch": 11.916620651573716, "grad_norm": 0.2501789927482605, "learning_rate": 2.9735666666666667e-05, "loss": 0.0112, "step": 10796 }, { "epoch": 11.917725013804528, "grad_norm": 0.2891710102558136, "learning_rate": 2.9735333333333337e-05, "loss": 0.0136, "step": 10797 }, { "epoch": 11.91882937603534, "grad_norm": 0.24197310209274292, "learning_rate": 2.9735e-05, "loss": 0.0116, "step": 10798 }, { "epoch": 11.91993373826615, "grad_norm": 0.13885238766670227, "learning_rate": 2.973466666666667e-05, "loss": 0.006, "step": 10799 }, { "epoch": 11.921038100496963, "grad_norm": 0.3414803743362427, "learning_rate": 2.9734333333333334e-05, "loss": 0.0103, "step": 10800 }, { "epoch": 11.922142462727775, "grad_norm": 0.9046513438224792, "learning_rate": 2.9734e-05, "loss": 0.0164, "step": 10801 }, { "epoch": 11.923246824958586, "grad_norm": 0.2018837332725525, "learning_rate": 2.9733666666666666e-05, "loss": 0.0096, "step": 10802 }, { "epoch": 11.924351187189398, "grad_norm": 0.34094905853271484, "learning_rate": 2.9733333333333336e-05, "loss": 0.0113, "step": 10803 }, { "epoch": 11.92545554942021, "grad_norm": 0.4339217245578766, "learning_rate": 2.9733e-05, "loss": 0.02, "step": 10804 }, { "epoch": 11.926559911651022, "grad_norm": 0.6453844904899597, "learning_rate": 2.9732666666666667e-05, "loss": 0.025, "step": 10805 }, { "epoch": 11.927664273881833, "grad_norm": 0.40684038400650024, "learning_rate": 2.9732333333333333e-05, "loss": 0.0078, "step": 10806 }, { "epoch": 11.928768636112645, "grad_norm": 0.2605142891407013, "learning_rate": 2.9732e-05, "loss": 0.0163, "step": 10807 }, { "epoch": 11.929872998343457, "grad_norm": 0.7246981263160706, "learning_rate": 2.973166666666667e-05, "loss": 0.0256, "step": 10808 }, { "epoch": 11.930977360574268, "grad_norm": 0.24389208853244781, "learning_rate": 2.973133333333333e-05, "loss": 0.0101, "step": 10809 }, { "epoch": 11.93208172280508, "grad_norm": 0.6815745830535889, "learning_rate": 2.9731e-05, "loss": 0.0172, "step": 10810 }, { "epoch": 11.933186085035892, "grad_norm": 0.2780747711658478, "learning_rate": 2.973066666666667e-05, "loss": 0.0172, "step": 10811 }, { "epoch": 11.934290447266704, "grad_norm": 0.24011778831481934, "learning_rate": 2.9730333333333332e-05, "loss": 0.0275, "step": 10812 }, { "epoch": 11.935394809497515, "grad_norm": 0.4006267786026001, "learning_rate": 2.973e-05, "loss": 0.0165, "step": 10813 }, { "epoch": 11.936499171728327, "grad_norm": 0.4532983899116516, "learning_rate": 2.9729666666666668e-05, "loss": 0.0182, "step": 10814 }, { "epoch": 11.93760353395914, "grad_norm": 0.5258052349090576, "learning_rate": 2.9729333333333334e-05, "loss": 0.0328, "step": 10815 }, { "epoch": 11.93870789618995, "grad_norm": 0.6139957904815674, "learning_rate": 2.9729e-05, "loss": 0.0115, "step": 10816 }, { "epoch": 11.939812258420762, "grad_norm": 1.1003949642181396, "learning_rate": 2.972866666666667e-05, "loss": 0.1721, "step": 10817 }, { "epoch": 11.940916620651574, "grad_norm": 0.589408278465271, "learning_rate": 2.972833333333333e-05, "loss": 0.1819, "step": 10818 }, { "epoch": 11.942020982882385, "grad_norm": 0.7728513479232788, "learning_rate": 2.9728e-05, "loss": 0.1294, "step": 10819 }, { "epoch": 11.943125345113197, "grad_norm": 0.4694587290287018, "learning_rate": 2.972766666666667e-05, "loss": 0.0993, "step": 10820 }, { "epoch": 11.94422970734401, "grad_norm": 0.6037648320198059, "learning_rate": 2.9727333333333333e-05, "loss": 0.1069, "step": 10821 }, { "epoch": 11.945334069574821, "grad_norm": 1.0407114028930664, "learning_rate": 2.9727000000000002e-05, "loss": 0.0793, "step": 10822 }, { "epoch": 11.946438431805632, "grad_norm": 0.5790600180625916, "learning_rate": 2.9726666666666668e-05, "loss": 0.0585, "step": 10823 }, { "epoch": 11.947542794036444, "grad_norm": 0.5545027256011963, "learning_rate": 2.9726333333333334e-05, "loss": 0.0492, "step": 10824 }, { "epoch": 11.948647156267256, "grad_norm": 0.34659460186958313, "learning_rate": 2.9726e-05, "loss": 0.0318, "step": 10825 }, { "epoch": 11.949751518498067, "grad_norm": 0.30373600125312805, "learning_rate": 2.972566666666667e-05, "loss": 0.0256, "step": 10826 }, { "epoch": 11.950855880728879, "grad_norm": 0.21468815207481384, "learning_rate": 2.9725333333333335e-05, "loss": 0.0195, "step": 10827 }, { "epoch": 11.951960242959691, "grad_norm": 0.4382961690425873, "learning_rate": 2.9725e-05, "loss": 0.0464, "step": 10828 }, { "epoch": 11.953064605190502, "grad_norm": 0.2183656543493271, "learning_rate": 2.972466666666667e-05, "loss": 0.014, "step": 10829 }, { "epoch": 11.954168967421314, "grad_norm": 0.3136356472969055, "learning_rate": 2.9724333333333333e-05, "loss": 0.0225, "step": 10830 }, { "epoch": 11.955273329652126, "grad_norm": 0.30321770906448364, "learning_rate": 2.9724000000000002e-05, "loss": 0.02, "step": 10831 }, { "epoch": 11.956377691882938, "grad_norm": 0.22539184987545013, "learning_rate": 2.9723666666666668e-05, "loss": 0.0131, "step": 10832 }, { "epoch": 11.957482054113749, "grad_norm": 0.205777108669281, "learning_rate": 2.9723333333333334e-05, "loss": 0.0168, "step": 10833 }, { "epoch": 11.958586416344561, "grad_norm": 0.2559376358985901, "learning_rate": 2.9723e-05, "loss": 0.0159, "step": 10834 }, { "epoch": 11.959690778575373, "grad_norm": 0.20028598606586456, "learning_rate": 2.9722666666666666e-05, "loss": 0.0165, "step": 10835 }, { "epoch": 11.960795140806184, "grad_norm": 0.3379865884780884, "learning_rate": 2.9722333333333335e-05, "loss": 0.0246, "step": 10836 }, { "epoch": 11.961899503036996, "grad_norm": 0.2360825538635254, "learning_rate": 2.9722e-05, "loss": 0.011, "step": 10837 }, { "epoch": 11.963003865267808, "grad_norm": 0.1348637491464615, "learning_rate": 2.9721666666666667e-05, "loss": 0.0088, "step": 10838 }, { "epoch": 11.96410822749862, "grad_norm": 0.30055567622184753, "learning_rate": 2.9721333333333333e-05, "loss": 0.0186, "step": 10839 }, { "epoch": 11.96521258972943, "grad_norm": 0.31340473890304565, "learning_rate": 2.9721000000000002e-05, "loss": 0.0096, "step": 10840 }, { "epoch": 11.966316951960243, "grad_norm": 0.41943541169166565, "learning_rate": 2.9720666666666665e-05, "loss": 0.0235, "step": 10841 }, { "epoch": 11.967421314191055, "grad_norm": 0.5556170344352722, "learning_rate": 2.9720333333333334e-05, "loss": 0.0178, "step": 10842 }, { "epoch": 11.968525676421866, "grad_norm": 0.5986836552619934, "learning_rate": 2.972e-05, "loss": 0.0644, "step": 10843 }, { "epoch": 11.969630038652678, "grad_norm": 0.45647069811820984, "learning_rate": 2.9719666666666666e-05, "loss": 0.0124, "step": 10844 }, { "epoch": 11.97073440088349, "grad_norm": 0.27948155999183655, "learning_rate": 2.9719333333333335e-05, "loss": 0.0143, "step": 10845 }, { "epoch": 11.971838763114302, "grad_norm": 0.3026582896709442, "learning_rate": 2.9719e-05, "loss": 0.0174, "step": 10846 }, { "epoch": 11.972943125345113, "grad_norm": 0.21447892487049103, "learning_rate": 2.9718666666666667e-05, "loss": 0.0124, "step": 10847 }, { "epoch": 11.974047487575925, "grad_norm": 0.28397178649902344, "learning_rate": 2.9718333333333333e-05, "loss": 0.0142, "step": 10848 }, { "epoch": 11.975151849806737, "grad_norm": 0.2006404548883438, "learning_rate": 2.9718000000000002e-05, "loss": 0.0092, "step": 10849 }, { "epoch": 11.976256212037548, "grad_norm": 0.4151577651500702, "learning_rate": 2.9717666666666665e-05, "loss": 0.0191, "step": 10850 }, { "epoch": 11.97736057426836, "grad_norm": 0.3769434690475464, "learning_rate": 2.9717333333333334e-05, "loss": 0.0166, "step": 10851 }, { "epoch": 11.978464936499172, "grad_norm": 0.2807818055152893, "learning_rate": 2.9717e-05, "loss": 0.0201, "step": 10852 }, { "epoch": 11.979569298729983, "grad_norm": 0.23732027411460876, "learning_rate": 2.9716666666666666e-05, "loss": 0.0091, "step": 10853 }, { "epoch": 11.980673660960795, "grad_norm": 0.3097614347934723, "learning_rate": 2.9716333333333336e-05, "loss": 0.0132, "step": 10854 }, { "epoch": 11.981778023191607, "grad_norm": 0.29525673389434814, "learning_rate": 2.9716e-05, "loss": 0.0091, "step": 10855 }, { "epoch": 11.98288238542242, "grad_norm": 0.26320770382881165, "learning_rate": 2.9715666666666667e-05, "loss": 0.0097, "step": 10856 }, { "epoch": 11.98398674765323, "grad_norm": 0.15495306253433228, "learning_rate": 2.9715333333333333e-05, "loss": 0.0078, "step": 10857 }, { "epoch": 11.985091109884042, "grad_norm": 0.7283427119255066, "learning_rate": 2.9715000000000003e-05, "loss": 0.0189, "step": 10858 }, { "epoch": 11.986195472114854, "grad_norm": 0.25272881984710693, "learning_rate": 2.9714666666666665e-05, "loss": 0.0102, "step": 10859 }, { "epoch": 11.987299834345665, "grad_norm": 0.29509034752845764, "learning_rate": 2.9714333333333335e-05, "loss": 0.0136, "step": 10860 }, { "epoch": 11.988404196576477, "grad_norm": 0.4341820180416107, "learning_rate": 2.9714000000000004e-05, "loss": 0.0161, "step": 10861 }, { "epoch": 11.98950855880729, "grad_norm": 0.45843207836151123, "learning_rate": 2.9713666666666666e-05, "loss": 0.0321, "step": 10862 }, { "epoch": 11.9906129210381, "grad_norm": 0.3536883294582367, "learning_rate": 2.9713333333333336e-05, "loss": 0.0122, "step": 10863 }, { "epoch": 11.991717283268912, "grad_norm": 0.5730183124542236, "learning_rate": 2.9713e-05, "loss": 0.0145, "step": 10864 }, { "epoch": 11.992821645499724, "grad_norm": 0.590969443321228, "learning_rate": 2.9712666666666668e-05, "loss": 0.0173, "step": 10865 }, { "epoch": 11.993926007730536, "grad_norm": 0.35088688135147095, "learning_rate": 2.9712333333333334e-05, "loss": 0.0118, "step": 10866 }, { "epoch": 11.995030369961347, "grad_norm": 0.8844590187072754, "learning_rate": 2.9712e-05, "loss": 0.1356, "step": 10867 }, { "epoch": 11.996134732192159, "grad_norm": 0.3856687843799591, "learning_rate": 2.9711666666666665e-05, "loss": 0.0329, "step": 10868 }, { "epoch": 11.997239094422971, "grad_norm": 0.3369850516319275, "learning_rate": 2.9711333333333335e-05, "loss": 0.0208, "step": 10869 }, { "epoch": 11.998343456653782, "grad_norm": 0.2584397792816162, "learning_rate": 2.9711e-05, "loss": 0.0109, "step": 10870 }, { "epoch": 11.999447818884594, "grad_norm": 0.7630301713943481, "learning_rate": 2.9710666666666667e-05, "loss": 0.0317, "step": 10871 }, { "epoch": 12.0, "grad_norm": 0.08194054663181305, "learning_rate": 2.9710333333333336e-05, "loss": 0.0012, "step": 10872 }, { "epoch": 12.001104362230812, "grad_norm": 0.6194990277290344, "learning_rate": 2.971e-05, "loss": 0.2023, "step": 10873 }, { "epoch": 12.002208724461623, "grad_norm": 0.7209044694900513, "learning_rate": 2.9709666666666668e-05, "loss": 0.1776, "step": 10874 }, { "epoch": 12.003313086692435, "grad_norm": 0.47158560156822205, "learning_rate": 2.9709333333333334e-05, "loss": 0.1035, "step": 10875 }, { "epoch": 12.004417448923247, "grad_norm": 0.6640446186065674, "learning_rate": 2.9709e-05, "loss": 0.0981, "step": 10876 }, { "epoch": 12.00552181115406, "grad_norm": 1.5411409139633179, "learning_rate": 2.970866666666667e-05, "loss": 0.0807, "step": 10877 }, { "epoch": 12.00662617338487, "grad_norm": 0.4114743173122406, "learning_rate": 2.9708333333333335e-05, "loss": 0.0788, "step": 10878 }, { "epoch": 12.007730535615682, "grad_norm": 0.35795003175735474, "learning_rate": 2.9708e-05, "loss": 0.0354, "step": 10879 }, { "epoch": 12.008834897846494, "grad_norm": 0.22784359753131866, "learning_rate": 2.9707666666666667e-05, "loss": 0.0316, "step": 10880 }, { "epoch": 12.009939260077305, "grad_norm": 0.3569014370441437, "learning_rate": 2.9707333333333336e-05, "loss": 0.0254, "step": 10881 }, { "epoch": 12.011043622308117, "grad_norm": 0.27047690749168396, "learning_rate": 2.9707e-05, "loss": 0.0279, "step": 10882 }, { "epoch": 12.01214798453893, "grad_norm": 0.6984362006187439, "learning_rate": 2.9706666666666668e-05, "loss": 0.0473, "step": 10883 }, { "epoch": 12.01325234676974, "grad_norm": 0.5089951753616333, "learning_rate": 2.9706333333333334e-05, "loss": 0.0184, "step": 10884 }, { "epoch": 12.014356709000552, "grad_norm": 0.1670721471309662, "learning_rate": 2.9706e-05, "loss": 0.0057, "step": 10885 }, { "epoch": 12.015461071231364, "grad_norm": 0.11167269945144653, "learning_rate": 2.970566666666667e-05, "loss": 0.004, "step": 10886 }, { "epoch": 12.016565433462176, "grad_norm": 0.1807474046945572, "learning_rate": 2.9705333333333335e-05, "loss": 0.0071, "step": 10887 }, { "epoch": 12.017669795692987, "grad_norm": 0.13102495670318604, "learning_rate": 2.9705e-05, "loss": 0.0062, "step": 10888 }, { "epoch": 12.018774157923799, "grad_norm": 0.2558037340641022, "learning_rate": 2.9704666666666667e-05, "loss": 0.0078, "step": 10889 }, { "epoch": 12.019878520154611, "grad_norm": 0.3118028938770294, "learning_rate": 2.9704333333333336e-05, "loss": 0.0078, "step": 10890 }, { "epoch": 12.020982882385422, "grad_norm": 0.25059419870376587, "learning_rate": 2.9704e-05, "loss": 0.0099, "step": 10891 }, { "epoch": 12.022087244616234, "grad_norm": 0.7136905789375305, "learning_rate": 2.9703666666666668e-05, "loss": 0.0114, "step": 10892 }, { "epoch": 12.023191606847046, "grad_norm": 0.2799716889858246, "learning_rate": 2.9703333333333334e-05, "loss": 0.0118, "step": 10893 }, { "epoch": 12.024295969077858, "grad_norm": 0.3979182243347168, "learning_rate": 2.9703e-05, "loss": 0.0271, "step": 10894 }, { "epoch": 12.025400331308669, "grad_norm": 0.29718905687332153, "learning_rate": 2.970266666666667e-05, "loss": 0.0198, "step": 10895 }, { "epoch": 12.026504693539481, "grad_norm": 0.16417662799358368, "learning_rate": 2.9702333333333332e-05, "loss": 0.0093, "step": 10896 }, { "epoch": 12.027609055770293, "grad_norm": 0.17631705105304718, "learning_rate": 2.9702e-05, "loss": 0.0107, "step": 10897 }, { "epoch": 12.028713418001104, "grad_norm": 0.424761027097702, "learning_rate": 2.9701666666666667e-05, "loss": 0.0131, "step": 10898 }, { "epoch": 12.029817780231916, "grad_norm": 0.1939740628004074, "learning_rate": 2.9701333333333333e-05, "loss": 0.0097, "step": 10899 }, { "epoch": 12.030922142462728, "grad_norm": 0.20125806331634521, "learning_rate": 2.9701e-05, "loss": 0.0131, "step": 10900 }, { "epoch": 12.032026504693539, "grad_norm": 0.28991949558258057, "learning_rate": 2.970066666666667e-05, "loss": 0.0081, "step": 10901 }, { "epoch": 12.03313086692435, "grad_norm": 0.17159180343151093, "learning_rate": 2.9700333333333334e-05, "loss": 0.0075, "step": 10902 }, { "epoch": 12.034235229155163, "grad_norm": 0.2913089692592621, "learning_rate": 2.97e-05, "loss": 0.007, "step": 10903 }, { "epoch": 12.035339591385975, "grad_norm": 0.8100084662437439, "learning_rate": 2.969966666666667e-05, "loss": 0.0138, "step": 10904 }, { "epoch": 12.036443953616786, "grad_norm": 0.21453957259655, "learning_rate": 2.9699333333333332e-05, "loss": 0.0075, "step": 10905 }, { "epoch": 12.037548315847598, "grad_norm": 0.4510153830051422, "learning_rate": 2.9699e-05, "loss": 0.0098, "step": 10906 }, { "epoch": 12.03865267807841, "grad_norm": 0.1567414253950119, "learning_rate": 2.9698666666666667e-05, "loss": 0.0208, "step": 10907 }, { "epoch": 12.03975704030922, "grad_norm": 0.19521908462047577, "learning_rate": 2.9698333333333333e-05, "loss": 0.0081, "step": 10908 }, { "epoch": 12.040861402540033, "grad_norm": 0.31906649470329285, "learning_rate": 2.9698e-05, "loss": 0.0108, "step": 10909 }, { "epoch": 12.041965764770845, "grad_norm": 0.17062297463417053, "learning_rate": 2.969766666666667e-05, "loss": 0.0098, "step": 10910 }, { "epoch": 12.043070127001657, "grad_norm": 0.3536786735057831, "learning_rate": 2.9697333333333335e-05, "loss": 0.0166, "step": 10911 }, { "epoch": 12.044174489232468, "grad_norm": 0.7092222571372986, "learning_rate": 2.9697e-05, "loss": 0.0109, "step": 10912 }, { "epoch": 12.04527885146328, "grad_norm": 0.7910100221633911, "learning_rate": 2.969666666666667e-05, "loss": 0.0261, "step": 10913 }, { "epoch": 12.046383213694092, "grad_norm": 0.404075562953949, "learning_rate": 2.9696333333333332e-05, "loss": 0.0149, "step": 10914 }, { "epoch": 12.047487575924903, "grad_norm": 0.1749355047941208, "learning_rate": 2.9696e-05, "loss": 0.0068, "step": 10915 }, { "epoch": 12.048591938155715, "grad_norm": 0.09763285517692566, "learning_rate": 2.9695666666666668e-05, "loss": 0.0051, "step": 10916 }, { "epoch": 12.049696300386527, "grad_norm": 0.3365199565887451, "learning_rate": 2.9695333333333334e-05, "loss": 0.0127, "step": 10917 }, { "epoch": 12.050800662617338, "grad_norm": 0.26662465929985046, "learning_rate": 2.9695e-05, "loss": 0.0116, "step": 10918 }, { "epoch": 12.05190502484815, "grad_norm": 0.17352576553821564, "learning_rate": 2.969466666666667e-05, "loss": 0.008, "step": 10919 }, { "epoch": 12.053009387078962, "grad_norm": 0.4172461926937103, "learning_rate": 2.9694333333333335e-05, "loss": 0.0117, "step": 10920 }, { "epoch": 12.054113749309774, "grad_norm": 0.4088843762874603, "learning_rate": 2.9694e-05, "loss": 0.021, "step": 10921 }, { "epoch": 12.055218111540585, "grad_norm": 0.5962039828300476, "learning_rate": 2.969366666666667e-05, "loss": 0.013, "step": 10922 }, { "epoch": 12.056322473771397, "grad_norm": 0.6858088970184326, "learning_rate": 2.9693333333333333e-05, "loss": 0.2054, "step": 10923 }, { "epoch": 12.05742683600221, "grad_norm": 0.5748186111450195, "learning_rate": 2.9693000000000002e-05, "loss": 0.1457, "step": 10924 }, { "epoch": 12.05853119823302, "grad_norm": 0.8382834792137146, "learning_rate": 2.9692666666666668e-05, "loss": 0.1166, "step": 10925 }, { "epoch": 12.059635560463832, "grad_norm": 0.6083920001983643, "learning_rate": 2.9692333333333334e-05, "loss": 0.1776, "step": 10926 }, { "epoch": 12.060739922694644, "grad_norm": 0.5026863813400269, "learning_rate": 2.9692000000000003e-05, "loss": 0.0872, "step": 10927 }, { "epoch": 12.061844284925456, "grad_norm": 0.36928269267082214, "learning_rate": 2.9691666666666666e-05, "loss": 0.0934, "step": 10928 }, { "epoch": 12.062948647156267, "grad_norm": 0.45591971278190613, "learning_rate": 2.9691333333333335e-05, "loss": 0.0697, "step": 10929 }, { "epoch": 12.064053009387079, "grad_norm": 0.5170159339904785, "learning_rate": 2.9691e-05, "loss": 0.0545, "step": 10930 }, { "epoch": 12.065157371617891, "grad_norm": 0.20767904818058014, "learning_rate": 2.9690666666666667e-05, "loss": 0.0234, "step": 10931 }, { "epoch": 12.066261733848702, "grad_norm": 0.27284008264541626, "learning_rate": 2.9690333333333333e-05, "loss": 0.0229, "step": 10932 }, { "epoch": 12.067366096079514, "grad_norm": 0.5258060097694397, "learning_rate": 2.9690000000000002e-05, "loss": 0.0226, "step": 10933 }, { "epoch": 12.068470458310326, "grad_norm": 0.19601799547672272, "learning_rate": 2.9689666666666665e-05, "loss": 0.0127, "step": 10934 }, { "epoch": 12.069574820541137, "grad_norm": 0.22021961212158203, "learning_rate": 2.9689333333333334e-05, "loss": 0.0147, "step": 10935 }, { "epoch": 12.070679182771949, "grad_norm": 0.22018834948539734, "learning_rate": 2.9689000000000003e-05, "loss": 0.0098, "step": 10936 }, { "epoch": 12.071783545002761, "grad_norm": 0.4570785462856293, "learning_rate": 2.9688666666666666e-05, "loss": 0.0133, "step": 10937 }, { "epoch": 12.072887907233573, "grad_norm": 1.0620489120483398, "learning_rate": 2.9688333333333335e-05, "loss": 0.0189, "step": 10938 }, { "epoch": 12.073992269464384, "grad_norm": 0.09247316420078278, "learning_rate": 2.9688e-05, "loss": 0.007, "step": 10939 }, { "epoch": 12.075096631695196, "grad_norm": 0.6926032900810242, "learning_rate": 2.9687666666666667e-05, "loss": 0.0158, "step": 10940 }, { "epoch": 12.076200993926008, "grad_norm": 0.27776968479156494, "learning_rate": 2.9687333333333333e-05, "loss": 0.0101, "step": 10941 }, { "epoch": 12.077305356156819, "grad_norm": 0.28084659576416016, "learning_rate": 2.9687000000000002e-05, "loss": 0.0178, "step": 10942 }, { "epoch": 12.078409718387631, "grad_norm": 0.40137457847595215, "learning_rate": 2.9686666666666665e-05, "loss": 0.0203, "step": 10943 }, { "epoch": 12.079514080618443, "grad_norm": 0.29795965552330017, "learning_rate": 2.9686333333333334e-05, "loss": 0.0111, "step": 10944 }, { "epoch": 12.080618442849255, "grad_norm": 1.4099891185760498, "learning_rate": 2.9686000000000003e-05, "loss": 0.0195, "step": 10945 }, { "epoch": 12.081722805080066, "grad_norm": 0.2107800543308258, "learning_rate": 2.9685666666666666e-05, "loss": 0.0123, "step": 10946 }, { "epoch": 12.082827167310878, "grad_norm": 0.2976265251636505, "learning_rate": 2.9685333333333335e-05, "loss": 0.0112, "step": 10947 }, { "epoch": 12.08393152954169, "grad_norm": 0.20363271236419678, "learning_rate": 2.9685e-05, "loss": 0.0064, "step": 10948 }, { "epoch": 12.0850358917725, "grad_norm": 0.46390479803085327, "learning_rate": 2.9684666666666667e-05, "loss": 0.0098, "step": 10949 }, { "epoch": 12.086140254003313, "grad_norm": 0.2706482708454132, "learning_rate": 2.9684333333333333e-05, "loss": 0.0172, "step": 10950 }, { "epoch": 12.087244616234125, "grad_norm": 0.24556396901607513, "learning_rate": 2.9684000000000002e-05, "loss": 0.0091, "step": 10951 }, { "epoch": 12.088348978464936, "grad_norm": 1.0384100675582886, "learning_rate": 2.968366666666667e-05, "loss": 0.0174, "step": 10952 }, { "epoch": 12.089453340695748, "grad_norm": 0.31033244729042053, "learning_rate": 2.9683333333333334e-05, "loss": 0.014, "step": 10953 }, { "epoch": 12.09055770292656, "grad_norm": 0.1818474680185318, "learning_rate": 2.9683000000000004e-05, "loss": 0.0089, "step": 10954 }, { "epoch": 12.091662065157372, "grad_norm": 0.24995911121368408, "learning_rate": 2.9682666666666666e-05, "loss": 0.0088, "step": 10955 }, { "epoch": 12.092766427388183, "grad_norm": 0.41128048300743103, "learning_rate": 2.9682333333333335e-05, "loss": 0.0206, "step": 10956 }, { "epoch": 12.093870789618995, "grad_norm": 0.46169543266296387, "learning_rate": 2.9681999999999998e-05, "loss": 0.0144, "step": 10957 }, { "epoch": 12.094975151849807, "grad_norm": 0.32775378227233887, "learning_rate": 2.9681666666666667e-05, "loss": 0.0104, "step": 10958 }, { "epoch": 12.096079514080618, "grad_norm": 0.6115478277206421, "learning_rate": 2.9681333333333333e-05, "loss": 0.0272, "step": 10959 }, { "epoch": 12.09718387631143, "grad_norm": 0.5645713806152344, "learning_rate": 2.9681e-05, "loss": 0.025, "step": 10960 }, { "epoch": 12.098288238542242, "grad_norm": 0.288129061460495, "learning_rate": 2.968066666666667e-05, "loss": 0.0109, "step": 10961 }, { "epoch": 12.099392600773054, "grad_norm": 0.3552462160587311, "learning_rate": 2.9680333333333334e-05, "loss": 0.0146, "step": 10962 }, { "epoch": 12.100496963003865, "grad_norm": 0.4784567952156067, "learning_rate": 2.968e-05, "loss": 0.0199, "step": 10963 }, { "epoch": 12.101601325234677, "grad_norm": 0.3970263600349426, "learning_rate": 2.9679666666666666e-05, "loss": 0.0188, "step": 10964 }, { "epoch": 12.10270568746549, "grad_norm": 0.24442067742347717, "learning_rate": 2.9679333333333336e-05, "loss": 0.0101, "step": 10965 }, { "epoch": 12.1038100496963, "grad_norm": 0.3762568533420563, "learning_rate": 2.9678999999999998e-05, "loss": 0.0124, "step": 10966 }, { "epoch": 12.104914411927112, "grad_norm": 0.38567548990249634, "learning_rate": 2.9678666666666668e-05, "loss": 0.0162, "step": 10967 }, { "epoch": 12.106018774157924, "grad_norm": 0.36077842116355896, "learning_rate": 2.9678333333333334e-05, "loss": 0.0216, "step": 10968 }, { "epoch": 12.107123136388736, "grad_norm": 0.7807308435440063, "learning_rate": 2.9678e-05, "loss": 0.041, "step": 10969 }, { "epoch": 12.108227498619547, "grad_norm": 0.29878583550453186, "learning_rate": 2.967766666666667e-05, "loss": 0.0106, "step": 10970 }, { "epoch": 12.109331860850359, "grad_norm": 1.0129671096801758, "learning_rate": 2.9677333333333335e-05, "loss": 0.0226, "step": 10971 }, { "epoch": 12.110436223081171, "grad_norm": 0.31328240036964417, "learning_rate": 2.9677e-05, "loss": 0.015, "step": 10972 }, { "epoch": 12.111540585311982, "grad_norm": 0.5572831630706787, "learning_rate": 2.9676666666666667e-05, "loss": 0.1698, "step": 10973 }, { "epoch": 12.112644947542794, "grad_norm": 0.44675499200820923, "learning_rate": 2.9676333333333336e-05, "loss": 0.1088, "step": 10974 }, { "epoch": 12.113749309773606, "grad_norm": 0.6340394020080566, "learning_rate": 2.9676e-05, "loss": 0.0934, "step": 10975 }, { "epoch": 12.114853672004417, "grad_norm": 0.4776047170162201, "learning_rate": 2.9675666666666668e-05, "loss": 0.1089, "step": 10976 }, { "epoch": 12.115958034235229, "grad_norm": 0.48145362734794617, "learning_rate": 2.9675333333333337e-05, "loss": 0.0785, "step": 10977 }, { "epoch": 12.117062396466041, "grad_norm": 0.6336449384689331, "learning_rate": 2.9675e-05, "loss": 0.078, "step": 10978 }, { "epoch": 12.118166758696853, "grad_norm": 0.4261382222175598, "learning_rate": 2.967466666666667e-05, "loss": 0.0437, "step": 10979 }, { "epoch": 12.119271120927664, "grad_norm": 0.28364109992980957, "learning_rate": 2.9674333333333335e-05, "loss": 0.042, "step": 10980 }, { "epoch": 12.120375483158476, "grad_norm": 0.17565874755382538, "learning_rate": 2.9674e-05, "loss": 0.0271, "step": 10981 }, { "epoch": 12.121479845389288, "grad_norm": 0.35379132628440857, "learning_rate": 2.9673666666666667e-05, "loss": 0.0366, "step": 10982 }, { "epoch": 12.122584207620099, "grad_norm": 0.2760956883430481, "learning_rate": 2.9673333333333336e-05, "loss": 0.0357, "step": 10983 }, { "epoch": 12.123688569850911, "grad_norm": 0.3005356192588806, "learning_rate": 2.9673e-05, "loss": 0.0208, "step": 10984 }, { "epoch": 12.124792932081723, "grad_norm": 0.2714526951313019, "learning_rate": 2.9672666666666668e-05, "loss": 0.019, "step": 10985 }, { "epoch": 12.125897294312535, "grad_norm": 0.41818997263908386, "learning_rate": 2.9672333333333334e-05, "loss": 0.0167, "step": 10986 }, { "epoch": 12.127001656543346, "grad_norm": 0.27781468629837036, "learning_rate": 2.9672e-05, "loss": 0.0146, "step": 10987 }, { "epoch": 12.128106018774158, "grad_norm": 0.4219697117805481, "learning_rate": 2.967166666666667e-05, "loss": 0.0139, "step": 10988 }, { "epoch": 12.12921038100497, "grad_norm": 0.30199185013771057, "learning_rate": 2.9671333333333332e-05, "loss": 0.0205, "step": 10989 }, { "epoch": 12.13031474323578, "grad_norm": 0.14737540483474731, "learning_rate": 2.9671e-05, "loss": 0.0078, "step": 10990 }, { "epoch": 12.131419105466593, "grad_norm": 0.5459687113761902, "learning_rate": 2.9670666666666667e-05, "loss": 0.0202, "step": 10991 }, { "epoch": 12.132523467697405, "grad_norm": 0.14455217123031616, "learning_rate": 2.9670333333333333e-05, "loss": 0.0075, "step": 10992 }, { "epoch": 12.133627829928216, "grad_norm": 0.6280132532119751, "learning_rate": 2.967e-05, "loss": 0.0255, "step": 10993 }, { "epoch": 12.134732192159028, "grad_norm": 0.23707053065299988, "learning_rate": 2.9669666666666668e-05, "loss": 0.0117, "step": 10994 }, { "epoch": 12.13583655438984, "grad_norm": 0.4668143689632416, "learning_rate": 2.9669333333333334e-05, "loss": 0.0088, "step": 10995 }, { "epoch": 12.136940916620652, "grad_norm": 0.31314584612846375, "learning_rate": 2.9669e-05, "loss": 0.0136, "step": 10996 }, { "epoch": 12.138045278851463, "grad_norm": 0.38605618476867676, "learning_rate": 2.966866666666667e-05, "loss": 0.0136, "step": 10997 }, { "epoch": 12.139149641082275, "grad_norm": 0.3529980480670929, "learning_rate": 2.9668333333333332e-05, "loss": 0.0106, "step": 10998 }, { "epoch": 12.140254003313087, "grad_norm": 0.2595624625682831, "learning_rate": 2.9668e-05, "loss": 0.01, "step": 10999 }, { "epoch": 12.141358365543898, "grad_norm": 0.353312611579895, "learning_rate": 2.9667666666666667e-05, "loss": 0.0104, "step": 11000 }, { "epoch": 12.141358365543898, "eval_cer": 0.12079736630634834, "eval_loss": 0.3526332974433899, "eval_runtime": 16.1542, "eval_samples_per_second": 18.819, "eval_steps_per_second": 0.619, "eval_wer": 0.41442824251726784, "step": 11000 }, { "epoch": 12.14246272777471, "grad_norm": 0.16806574165821075, "learning_rate": 2.9667333333333333e-05, "loss": 0.0095, "step": 11001 }, { "epoch": 12.143567090005522, "grad_norm": 0.9759131669998169, "learning_rate": 2.9667000000000002e-05, "loss": 0.013, "step": 11002 }, { "epoch": 12.144671452236334, "grad_norm": 0.26319292187690735, "learning_rate": 2.966666666666667e-05, "loss": 0.0137, "step": 11003 }, { "epoch": 12.145775814467145, "grad_norm": 1.0715112686157227, "learning_rate": 2.9666333333333334e-05, "loss": 0.0201, "step": 11004 }, { "epoch": 12.146880176697957, "grad_norm": 0.17097418010234833, "learning_rate": 2.9666e-05, "loss": 0.0065, "step": 11005 }, { "epoch": 12.14798453892877, "grad_norm": 1.5375944375991821, "learning_rate": 2.966566666666667e-05, "loss": 0.0168, "step": 11006 }, { "epoch": 12.14908890115958, "grad_norm": 0.2160770446062088, "learning_rate": 2.9665333333333332e-05, "loss": 0.0092, "step": 11007 }, { "epoch": 12.150193263390392, "grad_norm": 0.16566208004951477, "learning_rate": 2.9665e-05, "loss": 0.0077, "step": 11008 }, { "epoch": 12.151297625621204, "grad_norm": 0.24199435114860535, "learning_rate": 2.9664666666666667e-05, "loss": 0.0097, "step": 11009 }, { "epoch": 12.152401987852015, "grad_norm": 0.3846701383590698, "learning_rate": 2.9664333333333333e-05, "loss": 0.0162, "step": 11010 }, { "epoch": 12.153506350082827, "grad_norm": 0.6688212156295776, "learning_rate": 2.9664000000000003e-05, "loss": 0.0222, "step": 11011 }, { "epoch": 12.154610712313639, "grad_norm": 0.3034333288669586, "learning_rate": 2.966366666666667e-05, "loss": 0.0133, "step": 11012 }, { "epoch": 12.155715074544451, "grad_norm": 0.4546305537223816, "learning_rate": 2.9663333333333334e-05, "loss": 0.0098, "step": 11013 }, { "epoch": 12.156819436775262, "grad_norm": 0.4359303414821625, "learning_rate": 2.9663e-05, "loss": 0.0197, "step": 11014 }, { "epoch": 12.157923799006074, "grad_norm": 0.8160849213600159, "learning_rate": 2.966266666666667e-05, "loss": 0.0211, "step": 11015 }, { "epoch": 12.159028161236886, "grad_norm": 0.6498938798904419, "learning_rate": 2.9662333333333332e-05, "loss": 0.0109, "step": 11016 }, { "epoch": 12.160132523467697, "grad_norm": 0.6507357358932495, "learning_rate": 2.9662e-05, "loss": 0.0296, "step": 11017 }, { "epoch": 12.161236885698509, "grad_norm": 0.3729175925254822, "learning_rate": 2.9661666666666664e-05, "loss": 0.0136, "step": 11018 }, { "epoch": 12.162341247929321, "grad_norm": 0.7031843662261963, "learning_rate": 2.9661333333333333e-05, "loss": 0.024, "step": 11019 }, { "epoch": 12.163445610160133, "grad_norm": 0.48797401785850525, "learning_rate": 2.9661000000000003e-05, "loss": 0.0137, "step": 11020 }, { "epoch": 12.164549972390944, "grad_norm": 0.22045345604419708, "learning_rate": 2.9660666666666665e-05, "loss": 0.011, "step": 11021 }, { "epoch": 12.165654334621756, "grad_norm": 0.6587786078453064, "learning_rate": 2.9660333333333335e-05, "loss": 0.027, "step": 11022 }, { "epoch": 12.166758696852568, "grad_norm": 1.3971461057662964, "learning_rate": 2.966e-05, "loss": 0.282, "step": 11023 }, { "epoch": 12.167863059083379, "grad_norm": 0.5294533371925354, "learning_rate": 2.9659666666666667e-05, "loss": 0.1603, "step": 11024 }, { "epoch": 12.168967421314191, "grad_norm": 0.5513789653778076, "learning_rate": 2.9659333333333332e-05, "loss": 0.1414, "step": 11025 }, { "epoch": 12.170071783545003, "grad_norm": 0.9076592922210693, "learning_rate": 2.9659000000000002e-05, "loss": 0.1072, "step": 11026 }, { "epoch": 12.171176145775814, "grad_norm": 0.6284457445144653, "learning_rate": 2.9658666666666668e-05, "loss": 0.1163, "step": 11027 }, { "epoch": 12.172280508006626, "grad_norm": 0.6122445464134216, "learning_rate": 2.9658333333333334e-05, "loss": 0.1029, "step": 11028 }, { "epoch": 12.173384870237438, "grad_norm": 0.37956446409225464, "learning_rate": 2.9658000000000003e-05, "loss": 0.0603, "step": 11029 }, { "epoch": 12.17448923246825, "grad_norm": 0.3353649973869324, "learning_rate": 2.9657666666666666e-05, "loss": 0.0435, "step": 11030 }, { "epoch": 12.17559359469906, "grad_norm": 0.45006752014160156, "learning_rate": 2.9657333333333335e-05, "loss": 0.0355, "step": 11031 }, { "epoch": 12.176697956929873, "grad_norm": 0.4972158968448639, "learning_rate": 2.9657e-05, "loss": 0.0371, "step": 11032 }, { "epoch": 12.177802319160685, "grad_norm": 0.23841990530490875, "learning_rate": 2.9656666666666667e-05, "loss": 0.0141, "step": 11033 }, { "epoch": 12.178906681391496, "grad_norm": 0.18865665793418884, "learning_rate": 2.9656333333333333e-05, "loss": 0.0117, "step": 11034 }, { "epoch": 12.180011043622308, "grad_norm": 0.2154671847820282, "learning_rate": 2.9656000000000002e-05, "loss": 0.0168, "step": 11035 }, { "epoch": 12.18111540585312, "grad_norm": 0.6051074862480164, "learning_rate": 2.9655666666666668e-05, "loss": 0.031, "step": 11036 }, { "epoch": 12.182219768083932, "grad_norm": 0.2657654583454132, "learning_rate": 2.9655333333333334e-05, "loss": 0.0448, "step": 11037 }, { "epoch": 12.183324130314743, "grad_norm": 0.4781770706176758, "learning_rate": 2.9655000000000003e-05, "loss": 0.0164, "step": 11038 }, { "epoch": 12.184428492545555, "grad_norm": 0.30918869376182556, "learning_rate": 2.9654666666666666e-05, "loss": 0.0186, "step": 11039 }, { "epoch": 12.185532854776367, "grad_norm": 0.17527642846107483, "learning_rate": 2.9654333333333335e-05, "loss": 0.0165, "step": 11040 }, { "epoch": 12.186637217007178, "grad_norm": 0.2747272253036499, "learning_rate": 2.9654e-05, "loss": 0.0126, "step": 11041 }, { "epoch": 12.18774157923799, "grad_norm": 0.3616536855697632, "learning_rate": 2.9653666666666667e-05, "loss": 0.0168, "step": 11042 }, { "epoch": 12.188845941468802, "grad_norm": 0.45297491550445557, "learning_rate": 2.9653333333333333e-05, "loss": 0.0222, "step": 11043 }, { "epoch": 12.189950303699613, "grad_norm": 0.17229174077510834, "learning_rate": 2.9653000000000002e-05, "loss": 0.0091, "step": 11044 }, { "epoch": 12.191054665930425, "grad_norm": 0.44671645760536194, "learning_rate": 2.9652666666666668e-05, "loss": 0.0236, "step": 11045 }, { "epoch": 12.192159028161237, "grad_norm": 0.18385343253612518, "learning_rate": 2.9652333333333334e-05, "loss": 0.0123, "step": 11046 }, { "epoch": 12.19326339039205, "grad_norm": 0.6559111475944519, "learning_rate": 2.9652e-05, "loss": 0.0217, "step": 11047 }, { "epoch": 12.19436775262286, "grad_norm": 1.2051666975021362, "learning_rate": 2.9651666666666666e-05, "loss": 0.0116, "step": 11048 }, { "epoch": 12.195472114853672, "grad_norm": 0.19393232464790344, "learning_rate": 2.9651333333333335e-05, "loss": 0.0109, "step": 11049 }, { "epoch": 12.196576477084484, "grad_norm": 0.20110642910003662, "learning_rate": 2.9650999999999998e-05, "loss": 0.0106, "step": 11050 }, { "epoch": 12.197680839315295, "grad_norm": 0.6010412573814392, "learning_rate": 2.9650666666666667e-05, "loss": 0.017, "step": 11051 }, { "epoch": 12.198785201546107, "grad_norm": 0.43858641386032104, "learning_rate": 2.9650333333333336e-05, "loss": 0.0133, "step": 11052 }, { "epoch": 12.19988956377692, "grad_norm": 0.5458716154098511, "learning_rate": 2.965e-05, "loss": 0.0131, "step": 11053 }, { "epoch": 12.200993926007731, "grad_norm": 0.3873250186443329, "learning_rate": 2.964966666666667e-05, "loss": 0.0085, "step": 11054 }, { "epoch": 12.202098288238542, "grad_norm": 0.2531450092792511, "learning_rate": 2.9649333333333334e-05, "loss": 0.0106, "step": 11055 }, { "epoch": 12.203202650469354, "grad_norm": 0.35932543873786926, "learning_rate": 2.9649e-05, "loss": 0.0105, "step": 11056 }, { "epoch": 12.204307012700166, "grad_norm": 0.13428741693496704, "learning_rate": 2.9648666666666666e-05, "loss": 0.0046, "step": 11057 }, { "epoch": 12.205411374930977, "grad_norm": 0.17634031176567078, "learning_rate": 2.9648333333333335e-05, "loss": 0.0084, "step": 11058 }, { "epoch": 12.206515737161789, "grad_norm": 0.29572802782058716, "learning_rate": 2.9647999999999998e-05, "loss": 0.0126, "step": 11059 }, { "epoch": 12.207620099392601, "grad_norm": 0.5061589479446411, "learning_rate": 2.9647666666666667e-05, "loss": 0.0162, "step": 11060 }, { "epoch": 12.208724461623412, "grad_norm": 0.3740333616733551, "learning_rate": 2.9647333333333337e-05, "loss": 0.0161, "step": 11061 }, { "epoch": 12.209828823854224, "grad_norm": 0.3064236044883728, "learning_rate": 2.9647e-05, "loss": 0.0171, "step": 11062 }, { "epoch": 12.210933186085036, "grad_norm": 0.2982182800769806, "learning_rate": 2.964666666666667e-05, "loss": 0.0119, "step": 11063 }, { "epoch": 12.212037548315848, "grad_norm": 0.2910762131214142, "learning_rate": 2.9646333333333334e-05, "loss": 0.0178, "step": 11064 }, { "epoch": 12.213141910546659, "grad_norm": 0.5504857301712036, "learning_rate": 2.9646e-05, "loss": 0.0182, "step": 11065 }, { "epoch": 12.214246272777471, "grad_norm": 0.46120956540107727, "learning_rate": 2.9645666666666666e-05, "loss": 0.0135, "step": 11066 }, { "epoch": 12.215350635008283, "grad_norm": 0.5615058541297913, "learning_rate": 2.9645333333333336e-05, "loss": 0.0291, "step": 11067 }, { "epoch": 12.216454997239094, "grad_norm": 0.40071454644203186, "learning_rate": 2.9644999999999998e-05, "loss": 0.0148, "step": 11068 }, { "epoch": 12.217559359469906, "grad_norm": 0.22272662818431854, "learning_rate": 2.9644666666666668e-05, "loss": 0.0074, "step": 11069 }, { "epoch": 12.218663721700718, "grad_norm": 2.4316565990448, "learning_rate": 2.9644333333333337e-05, "loss": 0.0199, "step": 11070 }, { "epoch": 12.21976808393153, "grad_norm": 0.22445127367973328, "learning_rate": 2.9644e-05, "loss": 0.0117, "step": 11071 }, { "epoch": 12.22087244616234, "grad_norm": 0.3627847135066986, "learning_rate": 2.964366666666667e-05, "loss": 0.01, "step": 11072 }, { "epoch": 12.221976808393153, "grad_norm": 1.2475850582122803, "learning_rate": 2.9643333333333335e-05, "loss": 0.2992, "step": 11073 }, { "epoch": 12.223081170623965, "grad_norm": 0.6789341568946838, "learning_rate": 2.9643e-05, "loss": 0.151, "step": 11074 }, { "epoch": 12.224185532854776, "grad_norm": 0.5342986583709717, "learning_rate": 2.9642666666666667e-05, "loss": 0.1308, "step": 11075 }, { "epoch": 12.225289895085588, "grad_norm": 0.9166098237037659, "learning_rate": 2.9642333333333336e-05, "loss": 0.1388, "step": 11076 }, { "epoch": 12.2263942573164, "grad_norm": 0.640685498714447, "learning_rate": 2.9642000000000002e-05, "loss": 0.1276, "step": 11077 }, { "epoch": 12.22749861954721, "grad_norm": 0.42067986726760864, "learning_rate": 2.9641666666666668e-05, "loss": 0.0759, "step": 11078 }, { "epoch": 12.228602981778023, "grad_norm": 0.46955034136772156, "learning_rate": 2.9641333333333334e-05, "loss": 0.042, "step": 11079 }, { "epoch": 12.229707344008835, "grad_norm": 0.5063725113868713, "learning_rate": 2.9641e-05, "loss": 0.056, "step": 11080 }, { "epoch": 12.230811706239647, "grad_norm": 0.36088162660598755, "learning_rate": 2.964066666666667e-05, "loss": 0.0261, "step": 11081 }, { "epoch": 12.231916068470458, "grad_norm": 0.6599053740501404, "learning_rate": 2.964033333333333e-05, "loss": 0.0298, "step": 11082 }, { "epoch": 12.23302043070127, "grad_norm": 0.35309845209121704, "learning_rate": 2.964e-05, "loss": 0.034, "step": 11083 }, { "epoch": 12.234124792932082, "grad_norm": 0.36968913674354553, "learning_rate": 2.9639666666666667e-05, "loss": 0.0171, "step": 11084 }, { "epoch": 12.235229155162893, "grad_norm": 0.6058906316757202, "learning_rate": 2.9639333333333333e-05, "loss": 0.0278, "step": 11085 }, { "epoch": 12.236333517393705, "grad_norm": 0.34543919563293457, "learning_rate": 2.9639000000000002e-05, "loss": 0.0134, "step": 11086 }, { "epoch": 12.237437879624517, "grad_norm": 0.40064120292663574, "learning_rate": 2.9638666666666668e-05, "loss": 0.014, "step": 11087 }, { "epoch": 12.23854224185533, "grad_norm": 0.2825149595737457, "learning_rate": 2.9638333333333334e-05, "loss": 0.0433, "step": 11088 }, { "epoch": 12.23964660408614, "grad_norm": 0.2031339406967163, "learning_rate": 2.9638e-05, "loss": 0.0134, "step": 11089 }, { "epoch": 12.240750966316952, "grad_norm": 0.1541246920824051, "learning_rate": 2.963766666666667e-05, "loss": 0.0071, "step": 11090 }, { "epoch": 12.241855328547764, "grad_norm": 0.18646027147769928, "learning_rate": 2.963733333333333e-05, "loss": 0.012, "step": 11091 }, { "epoch": 12.242959690778575, "grad_norm": 0.1373167335987091, "learning_rate": 2.9637e-05, "loss": 0.0099, "step": 11092 }, { "epoch": 12.244064053009387, "grad_norm": 0.3415043354034424, "learning_rate": 2.9636666666666667e-05, "loss": 0.022, "step": 11093 }, { "epoch": 12.2451684152402, "grad_norm": 0.5077050924301147, "learning_rate": 2.9636333333333333e-05, "loss": 0.0159, "step": 11094 }, { "epoch": 12.24627277747101, "grad_norm": 0.5255492925643921, "learning_rate": 2.9636000000000002e-05, "loss": 0.012, "step": 11095 }, { "epoch": 12.247377139701822, "grad_norm": 0.3091178834438324, "learning_rate": 2.9635666666666668e-05, "loss": 0.0081, "step": 11096 }, { "epoch": 12.248481501932634, "grad_norm": 0.5655236840248108, "learning_rate": 2.9635333333333334e-05, "loss": 0.0164, "step": 11097 }, { "epoch": 12.249585864163446, "grad_norm": 0.2949424982070923, "learning_rate": 2.9635e-05, "loss": 0.0165, "step": 11098 }, { "epoch": 12.250690226394257, "grad_norm": 0.17221473157405853, "learning_rate": 2.963466666666667e-05, "loss": 0.0073, "step": 11099 }, { "epoch": 12.251794588625069, "grad_norm": 0.28557339310646057, "learning_rate": 2.9634333333333332e-05, "loss": 0.0121, "step": 11100 }, { "epoch": 12.252898950855881, "grad_norm": 0.23219788074493408, "learning_rate": 2.9634e-05, "loss": 0.0114, "step": 11101 }, { "epoch": 12.254003313086692, "grad_norm": 1.1570429801940918, "learning_rate": 2.963366666666667e-05, "loss": 0.0149, "step": 11102 }, { "epoch": 12.255107675317504, "grad_norm": 0.38700222969055176, "learning_rate": 2.9633333333333333e-05, "loss": 0.0124, "step": 11103 }, { "epoch": 12.256212037548316, "grad_norm": 0.3096141219139099, "learning_rate": 2.9633000000000002e-05, "loss": 0.0119, "step": 11104 }, { "epoch": 12.257316399779128, "grad_norm": 0.563029408454895, "learning_rate": 2.963266666666667e-05, "loss": 0.0246, "step": 11105 }, { "epoch": 12.258420762009939, "grad_norm": 0.5219769477844238, "learning_rate": 2.9632333333333334e-05, "loss": 0.0301, "step": 11106 }, { "epoch": 12.259525124240751, "grad_norm": 0.3909292221069336, "learning_rate": 2.9632e-05, "loss": 0.0104, "step": 11107 }, { "epoch": 12.260629486471563, "grad_norm": 0.29345348477363586, "learning_rate": 2.9631666666666666e-05, "loss": 0.0118, "step": 11108 }, { "epoch": 12.261733848702374, "grad_norm": 0.37342768907546997, "learning_rate": 2.9631333333333332e-05, "loss": 0.0231, "step": 11109 }, { "epoch": 12.262838210933186, "grad_norm": 0.3942885398864746, "learning_rate": 2.9631e-05, "loss": 0.0117, "step": 11110 }, { "epoch": 12.263942573163998, "grad_norm": 0.2920040488243103, "learning_rate": 2.9630666666666667e-05, "loss": 0.0095, "step": 11111 }, { "epoch": 12.26504693539481, "grad_norm": 0.3103142976760864, "learning_rate": 2.9630333333333333e-05, "loss": 0.0157, "step": 11112 }, { "epoch": 12.26615129762562, "grad_norm": 0.7354790568351746, "learning_rate": 2.9630000000000003e-05, "loss": 0.0265, "step": 11113 }, { "epoch": 12.267255659856433, "grad_norm": 0.37231552600860596, "learning_rate": 2.9629666666666665e-05, "loss": 0.0107, "step": 11114 }, { "epoch": 12.268360022087245, "grad_norm": 0.26842403411865234, "learning_rate": 2.9629333333333334e-05, "loss": 0.0124, "step": 11115 }, { "epoch": 12.269464384318056, "grad_norm": 0.45467883348464966, "learning_rate": 2.9629e-05, "loss": 0.024, "step": 11116 }, { "epoch": 12.270568746548868, "grad_norm": 0.4229796528816223, "learning_rate": 2.9628666666666666e-05, "loss": 0.0184, "step": 11117 }, { "epoch": 12.27167310877968, "grad_norm": 0.8721631169319153, "learning_rate": 2.9628333333333332e-05, "loss": 0.0125, "step": 11118 }, { "epoch": 12.27277747101049, "grad_norm": 0.39919012784957886, "learning_rate": 2.9628e-05, "loss": 0.0356, "step": 11119 }, { "epoch": 12.273881833241303, "grad_norm": 0.18527603149414062, "learning_rate": 2.9627666666666668e-05, "loss": 0.0072, "step": 11120 }, { "epoch": 12.274986195472115, "grad_norm": 0.5295423865318298, "learning_rate": 2.9627333333333333e-05, "loss": 0.0221, "step": 11121 }, { "epoch": 12.276090557702927, "grad_norm": 0.7209796905517578, "learning_rate": 2.9627000000000003e-05, "loss": 0.0145, "step": 11122 }, { "epoch": 12.277194919933738, "grad_norm": 1.8431222438812256, "learning_rate": 2.9626666666666665e-05, "loss": 0.2233, "step": 11123 }, { "epoch": 12.27829928216455, "grad_norm": 1.081202507019043, "learning_rate": 2.9626333333333335e-05, "loss": 0.206, "step": 11124 }, { "epoch": 12.279403644395362, "grad_norm": 0.6102065443992615, "learning_rate": 2.9626e-05, "loss": 0.1562, "step": 11125 }, { "epoch": 12.280508006626173, "grad_norm": 0.52718186378479, "learning_rate": 2.9625666666666667e-05, "loss": 0.0819, "step": 11126 }, { "epoch": 12.281612368856985, "grad_norm": 0.8734982013702393, "learning_rate": 2.9625333333333336e-05, "loss": 0.1364, "step": 11127 }, { "epoch": 12.282716731087797, "grad_norm": 0.4288519620895386, "learning_rate": 2.9625000000000002e-05, "loss": 0.0805, "step": 11128 }, { "epoch": 12.283821093318608, "grad_norm": 0.4514230191707611, "learning_rate": 2.9624666666666668e-05, "loss": 0.0532, "step": 11129 }, { "epoch": 12.28492545554942, "grad_norm": 0.3952484726905823, "learning_rate": 2.9624333333333334e-05, "loss": 0.0735, "step": 11130 }, { "epoch": 12.286029817780232, "grad_norm": 0.39403074979782104, "learning_rate": 2.9624000000000003e-05, "loss": 0.0549, "step": 11131 }, { "epoch": 12.287134180011044, "grad_norm": 0.2176227867603302, "learning_rate": 2.9623666666666666e-05, "loss": 0.0212, "step": 11132 }, { "epoch": 12.288238542241855, "grad_norm": 0.22380799055099487, "learning_rate": 2.9623333333333335e-05, "loss": 0.0178, "step": 11133 }, { "epoch": 12.289342904472667, "grad_norm": 0.23501572012901306, "learning_rate": 2.9623e-05, "loss": 0.0147, "step": 11134 }, { "epoch": 12.29044726670348, "grad_norm": 0.265705943107605, "learning_rate": 2.9622666666666667e-05, "loss": 0.0221, "step": 11135 }, { "epoch": 12.29155162893429, "grad_norm": 0.3877258002758026, "learning_rate": 2.9622333333333336e-05, "loss": 0.0249, "step": 11136 }, { "epoch": 12.292655991165102, "grad_norm": 0.4386245608329773, "learning_rate": 2.9622000000000002e-05, "loss": 0.0158, "step": 11137 }, { "epoch": 12.293760353395914, "grad_norm": 0.2675149440765381, "learning_rate": 2.9621666666666668e-05, "loss": 0.0153, "step": 11138 }, { "epoch": 12.294864715626726, "grad_norm": 0.3365258574485779, "learning_rate": 2.9621333333333334e-05, "loss": 0.0174, "step": 11139 }, { "epoch": 12.295969077857537, "grad_norm": 0.25608453154563904, "learning_rate": 2.9621e-05, "loss": 0.0207, "step": 11140 }, { "epoch": 12.297073440088349, "grad_norm": 0.5013037323951721, "learning_rate": 2.9620666666666666e-05, "loss": 0.0234, "step": 11141 }, { "epoch": 12.298177802319161, "grad_norm": 0.3323211073875427, "learning_rate": 2.9620333333333335e-05, "loss": 0.0158, "step": 11142 }, { "epoch": 12.299282164549972, "grad_norm": 0.38498446345329285, "learning_rate": 2.9619999999999998e-05, "loss": 0.0136, "step": 11143 }, { "epoch": 12.300386526780784, "grad_norm": 0.6952985525131226, "learning_rate": 2.9619666666666667e-05, "loss": 0.0274, "step": 11144 }, { "epoch": 12.301490889011596, "grad_norm": 0.1546172797679901, "learning_rate": 2.9619333333333336e-05, "loss": 0.0102, "step": 11145 }, { "epoch": 12.302595251242408, "grad_norm": 0.327355295419693, "learning_rate": 2.9619e-05, "loss": 0.0129, "step": 11146 }, { "epoch": 12.303699613473219, "grad_norm": 0.2715193033218384, "learning_rate": 2.9618666666666668e-05, "loss": 0.0257, "step": 11147 }, { "epoch": 12.304803975704031, "grad_norm": 0.2362881302833557, "learning_rate": 2.9618333333333334e-05, "loss": 0.016, "step": 11148 }, { "epoch": 12.305908337934843, "grad_norm": 0.21108250319957733, "learning_rate": 2.9618e-05, "loss": 0.0092, "step": 11149 }, { "epoch": 12.307012700165654, "grad_norm": 0.4851799011230469, "learning_rate": 2.9617666666666666e-05, "loss": 0.044, "step": 11150 }, { "epoch": 12.308117062396466, "grad_norm": 0.2321798950433731, "learning_rate": 2.9617333333333335e-05, "loss": 0.0093, "step": 11151 }, { "epoch": 12.309221424627278, "grad_norm": 0.1861819326877594, "learning_rate": 2.9617e-05, "loss": 0.0115, "step": 11152 }, { "epoch": 12.310325786858089, "grad_norm": 0.2604711055755615, "learning_rate": 2.9616666666666667e-05, "loss": 0.0145, "step": 11153 }, { "epoch": 12.3114301490889, "grad_norm": 0.18258489668369293, "learning_rate": 2.9616333333333336e-05, "loss": 0.0123, "step": 11154 }, { "epoch": 12.312534511319713, "grad_norm": 0.44035205245018005, "learning_rate": 2.9616e-05, "loss": 0.0186, "step": 11155 }, { "epoch": 12.313638873550525, "grad_norm": 0.6066728234291077, "learning_rate": 2.961566666666667e-05, "loss": 0.0151, "step": 11156 }, { "epoch": 12.314743235781336, "grad_norm": 0.268622487783432, "learning_rate": 2.9615333333333334e-05, "loss": 0.0135, "step": 11157 }, { "epoch": 12.315847598012148, "grad_norm": 0.6722234487533569, "learning_rate": 2.9615e-05, "loss": 0.0185, "step": 11158 }, { "epoch": 12.31695196024296, "grad_norm": 0.23456524312496185, "learning_rate": 2.9614666666666666e-05, "loss": 0.0132, "step": 11159 }, { "epoch": 12.31805632247377, "grad_norm": 0.24338386952877045, "learning_rate": 2.9614333333333335e-05, "loss": 0.0077, "step": 11160 }, { "epoch": 12.319160684704583, "grad_norm": 0.22064049541950226, "learning_rate": 2.9614e-05, "loss": 0.0109, "step": 11161 }, { "epoch": 12.320265046935395, "grad_norm": 0.35028275847435, "learning_rate": 2.9613666666666667e-05, "loss": 0.0142, "step": 11162 }, { "epoch": 12.321369409166207, "grad_norm": 0.17312917113304138, "learning_rate": 2.9613333333333337e-05, "loss": 0.0074, "step": 11163 }, { "epoch": 12.322473771397018, "grad_norm": 0.5446048378944397, "learning_rate": 2.9613e-05, "loss": 0.0197, "step": 11164 }, { "epoch": 12.32357813362783, "grad_norm": 0.23978716135025024, "learning_rate": 2.961266666666667e-05, "loss": 0.0155, "step": 11165 }, { "epoch": 12.324682495858642, "grad_norm": 0.3511435091495514, "learning_rate": 2.9612333333333334e-05, "loss": 0.0099, "step": 11166 }, { "epoch": 12.325786858089453, "grad_norm": 0.2985742688179016, "learning_rate": 2.9612e-05, "loss": 0.0117, "step": 11167 }, { "epoch": 12.326891220320265, "grad_norm": 0.33503457903862, "learning_rate": 2.9611666666666666e-05, "loss": 0.0127, "step": 11168 }, { "epoch": 12.327995582551077, "grad_norm": 0.49584272503852844, "learning_rate": 2.9611333333333332e-05, "loss": 0.0214, "step": 11169 }, { "epoch": 12.329099944781888, "grad_norm": 0.22437749803066254, "learning_rate": 2.9611e-05, "loss": 0.011, "step": 11170 }, { "epoch": 12.3302043070127, "grad_norm": 0.3435603678226471, "learning_rate": 2.9610666666666668e-05, "loss": 0.0133, "step": 11171 }, { "epoch": 12.331308669243512, "grad_norm": 0.3544321358203888, "learning_rate": 2.9610333333333333e-05, "loss": 0.0142, "step": 11172 }, { "epoch": 12.332413031474324, "grad_norm": 0.8223045468330383, "learning_rate": 2.961e-05, "loss": 0.2306, "step": 11173 }, { "epoch": 12.333517393705135, "grad_norm": 0.8976455926895142, "learning_rate": 2.960966666666667e-05, "loss": 0.1784, "step": 11174 }, { "epoch": 12.334621755935947, "grad_norm": 0.7273120284080505, "learning_rate": 2.960933333333333e-05, "loss": 0.2093, "step": 11175 }, { "epoch": 12.33572611816676, "grad_norm": 0.5162257552146912, "learning_rate": 2.9609e-05, "loss": 0.0925, "step": 11176 }, { "epoch": 12.33683048039757, "grad_norm": 0.4948268234729767, "learning_rate": 2.960866666666667e-05, "loss": 0.0754, "step": 11177 }, { "epoch": 12.337934842628382, "grad_norm": 0.30535250902175903, "learning_rate": 2.9608333333333332e-05, "loss": 0.0677, "step": 11178 }, { "epoch": 12.339039204859194, "grad_norm": 0.829904317855835, "learning_rate": 2.9608000000000002e-05, "loss": 0.082, "step": 11179 }, { "epoch": 12.340143567090006, "grad_norm": 0.8179749846458435, "learning_rate": 2.9607666666666668e-05, "loss": 0.0706, "step": 11180 }, { "epoch": 12.341247929320817, "grad_norm": 0.37581300735473633, "learning_rate": 2.9607333333333334e-05, "loss": 0.0526, "step": 11181 }, { "epoch": 12.342352291551629, "grad_norm": 0.31581342220306396, "learning_rate": 2.9607e-05, "loss": 0.0464, "step": 11182 }, { "epoch": 12.343456653782441, "grad_norm": 0.46003401279449463, "learning_rate": 2.960666666666667e-05, "loss": 0.0198, "step": 11183 }, { "epoch": 12.344561016013252, "grad_norm": 0.4387262463569641, "learning_rate": 2.960633333333333e-05, "loss": 0.045, "step": 11184 }, { "epoch": 12.345665378244064, "grad_norm": 0.20381498336791992, "learning_rate": 2.9606e-05, "loss": 0.0137, "step": 11185 }, { "epoch": 12.346769740474876, "grad_norm": 0.3015545904636383, "learning_rate": 2.960566666666667e-05, "loss": 0.0099, "step": 11186 }, { "epoch": 12.347874102705687, "grad_norm": 0.51915442943573, "learning_rate": 2.9605333333333333e-05, "loss": 0.0827, "step": 11187 }, { "epoch": 12.348978464936499, "grad_norm": 0.3505218029022217, "learning_rate": 2.9605000000000002e-05, "loss": 0.0141, "step": 11188 }, { "epoch": 12.350082827167311, "grad_norm": 0.12132108211517334, "learning_rate": 2.9604666666666668e-05, "loss": 0.0056, "step": 11189 }, { "epoch": 12.351187189398123, "grad_norm": 0.20367972552776337, "learning_rate": 2.9604333333333334e-05, "loss": 0.0156, "step": 11190 }, { "epoch": 12.352291551628934, "grad_norm": 0.27390584349632263, "learning_rate": 2.9604e-05, "loss": 0.0143, "step": 11191 }, { "epoch": 12.353395913859746, "grad_norm": 0.3100208640098572, "learning_rate": 2.960366666666667e-05, "loss": 0.0131, "step": 11192 }, { "epoch": 12.354500276090558, "grad_norm": 0.32146507501602173, "learning_rate": 2.960333333333333e-05, "loss": 0.0156, "step": 11193 }, { "epoch": 12.355604638321369, "grad_norm": 0.7651773691177368, "learning_rate": 2.9603e-05, "loss": 0.0258, "step": 11194 }, { "epoch": 12.356709000552181, "grad_norm": 0.2769199013710022, "learning_rate": 2.960266666666667e-05, "loss": 0.0175, "step": 11195 }, { "epoch": 12.357813362782993, "grad_norm": 0.24973346292972565, "learning_rate": 2.9602333333333333e-05, "loss": 0.0156, "step": 11196 }, { "epoch": 12.358917725013805, "grad_norm": 0.19498157501220703, "learning_rate": 2.9602000000000002e-05, "loss": 0.0091, "step": 11197 }, { "epoch": 12.360022087244616, "grad_norm": 0.3069148063659668, "learning_rate": 2.9601666666666665e-05, "loss": 0.0127, "step": 11198 }, { "epoch": 12.361126449475428, "grad_norm": 0.4981791377067566, "learning_rate": 2.9601333333333334e-05, "loss": 0.0157, "step": 11199 }, { "epoch": 12.36223081170624, "grad_norm": 0.4563565254211426, "learning_rate": 2.9601e-05, "loss": 0.0112, "step": 11200 }, { "epoch": 12.36333517393705, "grad_norm": 0.16542594134807587, "learning_rate": 2.9600666666666666e-05, "loss": 0.0096, "step": 11201 }, { "epoch": 12.364439536167863, "grad_norm": 0.29501259326934814, "learning_rate": 2.9600333333333335e-05, "loss": 0.0174, "step": 11202 }, { "epoch": 12.365543898398675, "grad_norm": 0.2914336621761322, "learning_rate": 2.96e-05, "loss": 0.0133, "step": 11203 }, { "epoch": 12.366648260629486, "grad_norm": 0.2593364715576172, "learning_rate": 2.9599666666666667e-05, "loss": 0.0106, "step": 11204 }, { "epoch": 12.367752622860298, "grad_norm": 0.38407543301582336, "learning_rate": 2.9599333333333333e-05, "loss": 0.014, "step": 11205 }, { "epoch": 12.36885698509111, "grad_norm": 0.29740628600120544, "learning_rate": 2.9599000000000002e-05, "loss": 0.0095, "step": 11206 }, { "epoch": 12.369961347321922, "grad_norm": 0.2428959459066391, "learning_rate": 2.9598666666666665e-05, "loss": 0.01, "step": 11207 }, { "epoch": 12.371065709552733, "grad_norm": 0.47452643513679504, "learning_rate": 2.9598333333333334e-05, "loss": 0.0132, "step": 11208 }, { "epoch": 12.372170071783545, "grad_norm": 0.40483617782592773, "learning_rate": 2.9598e-05, "loss": 0.0174, "step": 11209 }, { "epoch": 12.373274434014357, "grad_norm": 0.15418276190757751, "learning_rate": 2.9597666666666666e-05, "loss": 0.011, "step": 11210 }, { "epoch": 12.374378796245168, "grad_norm": 0.3481297194957733, "learning_rate": 2.9597333333333335e-05, "loss": 0.0158, "step": 11211 }, { "epoch": 12.37548315847598, "grad_norm": 0.2944134473800659, "learning_rate": 2.9597e-05, "loss": 0.0138, "step": 11212 }, { "epoch": 12.376587520706792, "grad_norm": 0.32795339822769165, "learning_rate": 2.9596666666666667e-05, "loss": 0.0131, "step": 11213 }, { "epoch": 12.377691882937604, "grad_norm": 0.20206886529922485, "learning_rate": 2.9596333333333333e-05, "loss": 0.0073, "step": 11214 }, { "epoch": 12.378796245168415, "grad_norm": 0.1654130220413208, "learning_rate": 2.9596000000000003e-05, "loss": 0.01, "step": 11215 }, { "epoch": 12.379900607399227, "grad_norm": 0.32183733582496643, "learning_rate": 2.9595666666666665e-05, "loss": 0.0127, "step": 11216 }, { "epoch": 12.38100496963004, "grad_norm": 0.20526309311389923, "learning_rate": 2.9595333333333334e-05, "loss": 0.01, "step": 11217 }, { "epoch": 12.38210933186085, "grad_norm": 1.417040228843689, "learning_rate": 2.9595e-05, "loss": 0.0144, "step": 11218 }, { "epoch": 12.383213694091662, "grad_norm": 0.36857354640960693, "learning_rate": 2.9594666666666666e-05, "loss": 0.0075, "step": 11219 }, { "epoch": 12.384318056322474, "grad_norm": 0.9207585453987122, "learning_rate": 2.9594333333333336e-05, "loss": 0.0099, "step": 11220 }, { "epoch": 12.385422418553285, "grad_norm": 0.28722894191741943, "learning_rate": 2.9594e-05, "loss": 0.014, "step": 11221 }, { "epoch": 12.386526780784097, "grad_norm": 0.44119730591773987, "learning_rate": 2.9593666666666668e-05, "loss": 0.0194, "step": 11222 }, { "epoch": 12.387631143014909, "grad_norm": 1.1876745223999023, "learning_rate": 2.9593333333333333e-05, "loss": 0.1897, "step": 11223 }, { "epoch": 12.388735505245721, "grad_norm": 0.6032551527023315, "learning_rate": 2.9593000000000003e-05, "loss": 0.1805, "step": 11224 }, { "epoch": 12.389839867476532, "grad_norm": 0.48956984281539917, "learning_rate": 2.9592666666666665e-05, "loss": 0.1425, "step": 11225 }, { "epoch": 12.390944229707344, "grad_norm": 0.6692984104156494, "learning_rate": 2.9592333333333335e-05, "loss": 0.0965, "step": 11226 }, { "epoch": 12.392048591938156, "grad_norm": 0.4024710953235626, "learning_rate": 2.9592000000000004e-05, "loss": 0.0514, "step": 11227 }, { "epoch": 12.393152954168967, "grad_norm": 0.4164634048938751, "learning_rate": 2.9591666666666667e-05, "loss": 0.0643, "step": 11228 }, { "epoch": 12.394257316399779, "grad_norm": 0.5054653882980347, "learning_rate": 2.9591333333333336e-05, "loss": 0.065, "step": 11229 }, { "epoch": 12.395361678630591, "grad_norm": 0.7684727907180786, "learning_rate": 2.9591e-05, "loss": 0.0658, "step": 11230 }, { "epoch": 12.396466040861403, "grad_norm": 0.2832321524620056, "learning_rate": 2.9590666666666668e-05, "loss": 0.0272, "step": 11231 }, { "epoch": 12.397570403092214, "grad_norm": 0.2912259101867676, "learning_rate": 2.9590333333333334e-05, "loss": 0.0317, "step": 11232 }, { "epoch": 12.398674765323026, "grad_norm": 0.20675061643123627, "learning_rate": 2.959e-05, "loss": 0.0417, "step": 11233 }, { "epoch": 12.399779127553838, "grad_norm": 0.6537545323371887, "learning_rate": 2.9589666666666666e-05, "loss": 0.0194, "step": 11234 }, { "epoch": 12.400883489784649, "grad_norm": 0.23603013157844543, "learning_rate": 2.9589333333333335e-05, "loss": 0.0157, "step": 11235 }, { "epoch": 12.401987852015461, "grad_norm": 0.2731022834777832, "learning_rate": 2.9589e-05, "loss": 0.043, "step": 11236 }, { "epoch": 12.403092214246273, "grad_norm": 0.16640673577785492, "learning_rate": 2.9588666666666667e-05, "loss": 0.0136, "step": 11237 }, { "epoch": 12.404196576477084, "grad_norm": 0.301523357629776, "learning_rate": 2.9588333333333336e-05, "loss": 0.012, "step": 11238 }, { "epoch": 12.405300938707896, "grad_norm": 0.36083248257637024, "learning_rate": 2.9588e-05, "loss": 0.0265, "step": 11239 }, { "epoch": 12.406405300938708, "grad_norm": 0.24411118030548096, "learning_rate": 2.9587666666666668e-05, "loss": 0.0219, "step": 11240 }, { "epoch": 12.40750966316952, "grad_norm": 0.3842402994632721, "learning_rate": 2.9587333333333334e-05, "loss": 0.0185, "step": 11241 }, { "epoch": 12.40861402540033, "grad_norm": 0.23389385640621185, "learning_rate": 2.9587e-05, "loss": 0.0165, "step": 11242 }, { "epoch": 12.409718387631143, "grad_norm": 0.1491173505783081, "learning_rate": 2.9586666666666666e-05, "loss": 0.0101, "step": 11243 }, { "epoch": 12.410822749861955, "grad_norm": 0.15182572603225708, "learning_rate": 2.9586333333333335e-05, "loss": 0.0087, "step": 11244 }, { "epoch": 12.411927112092766, "grad_norm": 0.3675696849822998, "learning_rate": 2.9586e-05, "loss": 0.015, "step": 11245 }, { "epoch": 12.413031474323578, "grad_norm": 0.0983416885137558, "learning_rate": 2.9585666666666667e-05, "loss": 0.0051, "step": 11246 }, { "epoch": 12.41413583655439, "grad_norm": 0.29774701595306396, "learning_rate": 2.9585333333333336e-05, "loss": 0.0118, "step": 11247 }, { "epoch": 12.415240198785202, "grad_norm": 0.11370784789323807, "learning_rate": 2.9585e-05, "loss": 0.0072, "step": 11248 }, { "epoch": 12.416344561016013, "grad_norm": 0.18369443714618683, "learning_rate": 2.9584666666666668e-05, "loss": 0.0107, "step": 11249 }, { "epoch": 12.417448923246825, "grad_norm": 0.33297187089920044, "learning_rate": 2.9584333333333334e-05, "loss": 0.0045, "step": 11250 }, { "epoch": 12.418553285477637, "grad_norm": 0.34703758358955383, "learning_rate": 2.9584e-05, "loss": 0.0131, "step": 11251 }, { "epoch": 12.419657647708448, "grad_norm": 0.17025533318519592, "learning_rate": 2.958366666666667e-05, "loss": 0.0124, "step": 11252 }, { "epoch": 12.42076200993926, "grad_norm": 0.14576172828674316, "learning_rate": 2.9583333333333335e-05, "loss": 0.0068, "step": 11253 }, { "epoch": 12.421866372170072, "grad_norm": 0.165750652551651, "learning_rate": 2.9583e-05, "loss": 0.0077, "step": 11254 }, { "epoch": 12.422970734400884, "grad_norm": 0.48914405703544617, "learning_rate": 2.9582666666666667e-05, "loss": 0.0191, "step": 11255 }, { "epoch": 12.424075096631695, "grad_norm": 0.3549889326095581, "learning_rate": 2.9582333333333336e-05, "loss": 0.0106, "step": 11256 }, { "epoch": 12.425179458862507, "grad_norm": 0.28816282749176025, "learning_rate": 2.9582e-05, "loss": 0.0371, "step": 11257 }, { "epoch": 12.42628382109332, "grad_norm": 0.6921641826629639, "learning_rate": 2.958166666666667e-05, "loss": 0.0171, "step": 11258 }, { "epoch": 12.42738818332413, "grad_norm": 0.17780034244060516, "learning_rate": 2.958133333333333e-05, "loss": 0.0113, "step": 11259 }, { "epoch": 12.428492545554942, "grad_norm": 0.16667349636554718, "learning_rate": 2.9581e-05, "loss": 0.0077, "step": 11260 }, { "epoch": 12.429596907785754, "grad_norm": 0.21494974195957184, "learning_rate": 2.958066666666667e-05, "loss": 0.0082, "step": 11261 }, { "epoch": 12.430701270016565, "grad_norm": 0.3746435046195984, "learning_rate": 2.9580333333333332e-05, "loss": 0.0128, "step": 11262 }, { "epoch": 12.431805632247377, "grad_norm": 0.59864741563797, "learning_rate": 2.958e-05, "loss": 0.0214, "step": 11263 }, { "epoch": 12.43290999447819, "grad_norm": 0.5485081672668457, "learning_rate": 2.9579666666666667e-05, "loss": 0.0113, "step": 11264 }, { "epoch": 12.434014356709001, "grad_norm": 0.40740767121315, "learning_rate": 2.9579333333333333e-05, "loss": 0.015, "step": 11265 }, { "epoch": 12.435118718939812, "grad_norm": 0.4494050443172455, "learning_rate": 2.9579e-05, "loss": 0.013, "step": 11266 }, { "epoch": 12.436223081170624, "grad_norm": 0.7119808793067932, "learning_rate": 2.957866666666667e-05, "loss": 0.0216, "step": 11267 }, { "epoch": 12.437327443401436, "grad_norm": 0.530486524105072, "learning_rate": 2.9578333333333334e-05, "loss": 0.0184, "step": 11268 }, { "epoch": 12.438431805632247, "grad_norm": 0.39756834506988525, "learning_rate": 2.9578e-05, "loss": 0.0141, "step": 11269 }, { "epoch": 12.439536167863059, "grad_norm": 0.4187345504760742, "learning_rate": 2.957766666666667e-05, "loss": 0.0241, "step": 11270 }, { "epoch": 12.440640530093871, "grad_norm": 0.9003933668136597, "learning_rate": 2.9577333333333332e-05, "loss": 0.0188, "step": 11271 }, { "epoch": 12.441744892324682, "grad_norm": 0.34320056438446045, "learning_rate": 2.9577e-05, "loss": 0.0099, "step": 11272 }, { "epoch": 12.442849254555494, "grad_norm": 0.6700934171676636, "learning_rate": 2.9576666666666668e-05, "loss": 0.2253, "step": 11273 }, { "epoch": 12.443953616786306, "grad_norm": 0.6765843629837036, "learning_rate": 2.9576333333333333e-05, "loss": 0.1877, "step": 11274 }, { "epoch": 12.445057979017118, "grad_norm": 0.5423116683959961, "learning_rate": 2.9576e-05, "loss": 0.1579, "step": 11275 }, { "epoch": 12.446162341247929, "grad_norm": 0.41137102246284485, "learning_rate": 2.957566666666667e-05, "loss": 0.0981, "step": 11276 }, { "epoch": 12.447266703478741, "grad_norm": 0.44257020950317383, "learning_rate": 2.9575333333333335e-05, "loss": 0.1012, "step": 11277 }, { "epoch": 12.448371065709553, "grad_norm": 0.592222273349762, "learning_rate": 2.9575e-05, "loss": 0.0719, "step": 11278 }, { "epoch": 12.449475427940364, "grad_norm": 0.9195201396942139, "learning_rate": 2.957466666666667e-05, "loss": 0.0942, "step": 11279 }, { "epoch": 12.450579790171176, "grad_norm": 0.5290955305099487, "learning_rate": 2.9574333333333332e-05, "loss": 0.1333, "step": 11280 }, { "epoch": 12.451684152401988, "grad_norm": 0.3596489727497101, "learning_rate": 2.9574000000000002e-05, "loss": 0.0239, "step": 11281 }, { "epoch": 12.4527885146328, "grad_norm": 0.38516145944595337, "learning_rate": 2.9573666666666668e-05, "loss": 0.0192, "step": 11282 }, { "epoch": 12.45389287686361, "grad_norm": 0.38623979687690735, "learning_rate": 2.9573333333333334e-05, "loss": 0.0322, "step": 11283 }, { "epoch": 12.454997239094423, "grad_norm": 0.41731417179107666, "learning_rate": 2.9573e-05, "loss": 0.0249, "step": 11284 }, { "epoch": 12.456101601325235, "grad_norm": 0.14077481627464294, "learning_rate": 2.957266666666667e-05, "loss": 0.0139, "step": 11285 }, { "epoch": 12.457205963556046, "grad_norm": 0.2504943013191223, "learning_rate": 2.9572333333333335e-05, "loss": 0.0193, "step": 11286 }, { "epoch": 12.458310325786858, "grad_norm": 0.39780569076538086, "learning_rate": 2.9572e-05, "loss": 0.0397, "step": 11287 }, { "epoch": 12.45941468801767, "grad_norm": 0.24179716408252716, "learning_rate": 2.957166666666667e-05, "loss": 0.0168, "step": 11288 }, { "epoch": 12.460519050248482, "grad_norm": 0.3534749746322632, "learning_rate": 2.9571333333333333e-05, "loss": 0.0209, "step": 11289 }, { "epoch": 12.461623412479293, "grad_norm": 0.39135822653770447, "learning_rate": 2.9571000000000002e-05, "loss": 0.016, "step": 11290 }, { "epoch": 12.462727774710105, "grad_norm": 0.24781832098960876, "learning_rate": 2.9570666666666665e-05, "loss": 0.0143, "step": 11291 }, { "epoch": 12.463832136940917, "grad_norm": 0.22635668516159058, "learning_rate": 2.9570333333333334e-05, "loss": 0.0092, "step": 11292 }, { "epoch": 12.464936499171728, "grad_norm": 0.1900983601808548, "learning_rate": 2.957e-05, "loss": 0.0082, "step": 11293 }, { "epoch": 12.46604086140254, "grad_norm": 0.2344493716955185, "learning_rate": 2.9569666666666666e-05, "loss": 0.017, "step": 11294 }, { "epoch": 12.467145223633352, "grad_norm": 0.2674720883369446, "learning_rate": 2.9569333333333335e-05, "loss": 0.0143, "step": 11295 }, { "epoch": 12.468249585864163, "grad_norm": 0.19340521097183228, "learning_rate": 2.9569e-05, "loss": 0.0108, "step": 11296 }, { "epoch": 12.469353948094975, "grad_norm": 0.24781829118728638, "learning_rate": 2.9568666666666667e-05, "loss": 0.0076, "step": 11297 }, { "epoch": 12.470458310325787, "grad_norm": 0.34450802206993103, "learning_rate": 2.9568333333333333e-05, "loss": 0.0112, "step": 11298 }, { "epoch": 12.4715626725566, "grad_norm": 0.4114616811275482, "learning_rate": 2.9568000000000002e-05, "loss": 0.015, "step": 11299 }, { "epoch": 12.47266703478741, "grad_norm": 0.24800735712051392, "learning_rate": 2.9567666666666665e-05, "loss": 0.0081, "step": 11300 }, { "epoch": 12.473771397018222, "grad_norm": 0.1209389939904213, "learning_rate": 2.9567333333333334e-05, "loss": 0.0047, "step": 11301 }, { "epoch": 12.474875759249034, "grad_norm": 0.2785961627960205, "learning_rate": 2.9567000000000003e-05, "loss": 0.0069, "step": 11302 }, { "epoch": 12.475980121479845, "grad_norm": 0.8018946051597595, "learning_rate": 2.9566666666666666e-05, "loss": 0.0194, "step": 11303 }, { "epoch": 12.477084483710657, "grad_norm": 0.24746674299240112, "learning_rate": 2.9566333333333335e-05, "loss": 0.0158, "step": 11304 }, { "epoch": 12.47818884594147, "grad_norm": 0.4554377794265747, "learning_rate": 2.9566e-05, "loss": 0.0212, "step": 11305 }, { "epoch": 12.47929320817228, "grad_norm": 0.2586056888103485, "learning_rate": 2.9565666666666667e-05, "loss": 0.0152, "step": 11306 }, { "epoch": 12.480397570403092, "grad_norm": 0.6353178024291992, "learning_rate": 2.9565333333333333e-05, "loss": 0.015, "step": 11307 }, { "epoch": 12.481501932633904, "grad_norm": 0.31767576932907104, "learning_rate": 2.9565000000000002e-05, "loss": 0.0167, "step": 11308 }, { "epoch": 12.482606294864716, "grad_norm": 0.308946818113327, "learning_rate": 2.9564666666666665e-05, "loss": 0.012, "step": 11309 }, { "epoch": 12.483710657095527, "grad_norm": 0.3166830837726593, "learning_rate": 2.9564333333333334e-05, "loss": 0.0117, "step": 11310 }, { "epoch": 12.484815019326339, "grad_norm": 0.6643165946006775, "learning_rate": 2.9564000000000004e-05, "loss": 0.0179, "step": 11311 }, { "epoch": 12.485919381557151, "grad_norm": 0.18871819972991943, "learning_rate": 2.9563666666666666e-05, "loss": 0.0054, "step": 11312 }, { "epoch": 12.487023743787962, "grad_norm": 0.4575573801994324, "learning_rate": 2.9563333333333335e-05, "loss": 0.0147, "step": 11313 }, { "epoch": 12.488128106018774, "grad_norm": 0.9213082194328308, "learning_rate": 2.9563e-05, "loss": 0.0207, "step": 11314 }, { "epoch": 12.489232468249586, "grad_norm": 0.5296552181243896, "learning_rate": 2.9562666666666667e-05, "loss": 0.0253, "step": 11315 }, { "epoch": 12.490336830480398, "grad_norm": 0.5330410003662109, "learning_rate": 2.9562333333333333e-05, "loss": 0.026, "step": 11316 }, { "epoch": 12.491441192711209, "grad_norm": 0.3471197187900543, "learning_rate": 2.9562000000000003e-05, "loss": 0.0136, "step": 11317 }, { "epoch": 12.492545554942021, "grad_norm": 0.352071076631546, "learning_rate": 2.956166666666667e-05, "loss": 0.0124, "step": 11318 }, { "epoch": 12.493649917172833, "grad_norm": 0.3870251774787903, "learning_rate": 2.9561333333333334e-05, "loss": 0.022, "step": 11319 }, { "epoch": 12.494754279403644, "grad_norm": 0.7457314729690552, "learning_rate": 2.9561e-05, "loss": 0.0286, "step": 11320 }, { "epoch": 12.495858641634456, "grad_norm": 0.47119036316871643, "learning_rate": 2.9560666666666666e-05, "loss": 0.0188, "step": 11321 }, { "epoch": 12.496963003865268, "grad_norm": 0.25582820177078247, "learning_rate": 2.9560333333333336e-05, "loss": 0.0166, "step": 11322 }, { "epoch": 12.49806736609608, "grad_norm": 0.6593071222305298, "learning_rate": 2.9559999999999998e-05, "loss": 0.2169, "step": 11323 }, { "epoch": 12.49917172832689, "grad_norm": 0.4939763844013214, "learning_rate": 2.9559666666666668e-05, "loss": 0.1718, "step": 11324 }, { "epoch": 12.500276090557703, "grad_norm": 0.5312337875366211, "learning_rate": 2.9559333333333333e-05, "loss": 0.1137, "step": 11325 }, { "epoch": 12.501380452788515, "grad_norm": 0.5531430840492249, "learning_rate": 2.9559e-05, "loss": 0.0853, "step": 11326 }, { "epoch": 12.502484815019326, "grad_norm": 0.37902194261550903, "learning_rate": 2.955866666666667e-05, "loss": 0.0953, "step": 11327 }, { "epoch": 12.503589177250138, "grad_norm": 0.6759823560714722, "learning_rate": 2.9558333333333335e-05, "loss": 0.0993, "step": 11328 }, { "epoch": 12.50469353948095, "grad_norm": 0.5920822620391846, "learning_rate": 2.9558e-05, "loss": 0.0433, "step": 11329 }, { "epoch": 12.50579790171176, "grad_norm": 0.29709431529045105, "learning_rate": 2.9557666666666667e-05, "loss": 0.0308, "step": 11330 }, { "epoch": 12.506902263942573, "grad_norm": 0.3445807695388794, "learning_rate": 2.9557333333333336e-05, "loss": 0.0374, "step": 11331 }, { "epoch": 12.508006626173385, "grad_norm": 0.2624148726463318, "learning_rate": 2.9557e-05, "loss": 0.0196, "step": 11332 }, { "epoch": 12.509110988404197, "grad_norm": 0.2440156787633896, "learning_rate": 2.9556666666666668e-05, "loss": 0.0147, "step": 11333 }, { "epoch": 12.510215350635008, "grad_norm": 0.14198020100593567, "learning_rate": 2.9556333333333334e-05, "loss": 0.0084, "step": 11334 }, { "epoch": 12.51131971286582, "grad_norm": 0.22511903941631317, "learning_rate": 2.9556e-05, "loss": 0.0193, "step": 11335 }, { "epoch": 12.512424075096632, "grad_norm": 0.1267324537038803, "learning_rate": 2.955566666666667e-05, "loss": 0.0082, "step": 11336 }, { "epoch": 12.513528437327443, "grad_norm": 0.1920701563358307, "learning_rate": 2.9555333333333335e-05, "loss": 0.0091, "step": 11337 }, { "epoch": 12.514632799558255, "grad_norm": 0.3624698519706726, "learning_rate": 2.9555e-05, "loss": 0.0175, "step": 11338 }, { "epoch": 12.515737161789067, "grad_norm": 0.3388652503490448, "learning_rate": 2.9554666666666667e-05, "loss": 0.0218, "step": 11339 }, { "epoch": 12.516841524019878, "grad_norm": 0.3468169867992401, "learning_rate": 2.9554333333333336e-05, "loss": 0.019, "step": 11340 }, { "epoch": 12.51794588625069, "grad_norm": 0.250740647315979, "learning_rate": 2.9554e-05, "loss": 0.0087, "step": 11341 }, { "epoch": 12.519050248481502, "grad_norm": 0.19764244556427002, "learning_rate": 2.9553666666666668e-05, "loss": 0.0118, "step": 11342 }, { "epoch": 12.520154610712314, "grad_norm": 0.45708540081977844, "learning_rate": 2.9553333333333334e-05, "loss": 0.0202, "step": 11343 }, { "epoch": 12.521258972943125, "grad_norm": 0.23885607719421387, "learning_rate": 2.9553e-05, "loss": 0.0151, "step": 11344 }, { "epoch": 12.522363335173937, "grad_norm": 0.5031306743621826, "learning_rate": 2.955266666666667e-05, "loss": 0.0157, "step": 11345 }, { "epoch": 12.52346769740475, "grad_norm": Infinity, "learning_rate": 2.955266666666667e-05, "loss": 0.0672, "step": 11346 }, { "epoch": 12.52457205963556, "grad_norm": 0.25345030426979065, "learning_rate": 2.9552333333333335e-05, "loss": 0.0126, "step": 11347 }, { "epoch": 12.525676421866372, "grad_norm": 0.37010878324508667, "learning_rate": 2.9552e-05, "loss": 0.0101, "step": 11348 }, { "epoch": 12.526780784097184, "grad_norm": 1.2524471282958984, "learning_rate": 2.9551666666666667e-05, "loss": 0.0635, "step": 11349 }, { "epoch": 12.527885146327996, "grad_norm": 0.23606736958026886, "learning_rate": 2.9551333333333333e-05, "loss": 0.0121, "step": 11350 }, { "epoch": 12.528989508558807, "grad_norm": 0.4938491880893707, "learning_rate": 2.9551e-05, "loss": 0.0159, "step": 11351 }, { "epoch": 12.530093870789619, "grad_norm": 0.2736108899116516, "learning_rate": 2.9550666666666668e-05, "loss": 0.0093, "step": 11352 }, { "epoch": 12.531198233020431, "grad_norm": 0.3393142521381378, "learning_rate": 2.9550333333333334e-05, "loss": 0.0163, "step": 11353 }, { "epoch": 12.532302595251242, "grad_norm": 0.24076367914676666, "learning_rate": 2.955e-05, "loss": 0.0108, "step": 11354 }, { "epoch": 12.533406957482054, "grad_norm": 1.1700918674468994, "learning_rate": 2.954966666666667e-05, "loss": 0.0142, "step": 11355 }, { "epoch": 12.534511319712866, "grad_norm": 0.24107076227664948, "learning_rate": 2.9549333333333332e-05, "loss": 0.0112, "step": 11356 }, { "epoch": 12.535615681943678, "grad_norm": 0.32905814051628113, "learning_rate": 2.9549e-05, "loss": 0.0113, "step": 11357 }, { "epoch": 12.536720044174489, "grad_norm": 0.3356166481971741, "learning_rate": 2.9548666666666667e-05, "loss": 0.0119, "step": 11358 }, { "epoch": 12.537824406405301, "grad_norm": 0.38944903016090393, "learning_rate": 2.9548333333333333e-05, "loss": 0.0184, "step": 11359 }, { "epoch": 12.538928768636113, "grad_norm": 0.5036919116973877, "learning_rate": 2.9548e-05, "loss": 0.014, "step": 11360 }, { "epoch": 12.540033130866924, "grad_norm": 0.6078060865402222, "learning_rate": 2.954766666666667e-05, "loss": 0.0251, "step": 11361 }, { "epoch": 12.541137493097736, "grad_norm": 0.28750357031822205, "learning_rate": 2.9547333333333334e-05, "loss": 0.0139, "step": 11362 }, { "epoch": 12.542241855328548, "grad_norm": 0.3293514847755432, "learning_rate": 2.9547e-05, "loss": 0.0123, "step": 11363 }, { "epoch": 12.54334621755936, "grad_norm": 0.44013816118240356, "learning_rate": 2.954666666666667e-05, "loss": 0.0181, "step": 11364 }, { "epoch": 12.54445057979017, "grad_norm": 0.218484565615654, "learning_rate": 2.9546333333333332e-05, "loss": 0.0084, "step": 11365 }, { "epoch": 12.545554942020983, "grad_norm": 0.3510917127132416, "learning_rate": 2.9546e-05, "loss": 0.0132, "step": 11366 }, { "epoch": 12.546659304251795, "grad_norm": 0.30245721340179443, "learning_rate": 2.9545666666666667e-05, "loss": 0.0111, "step": 11367 }, { "epoch": 12.547763666482606, "grad_norm": 0.14773677289485931, "learning_rate": 2.9545333333333333e-05, "loss": 0.0075, "step": 11368 }, { "epoch": 12.548868028713418, "grad_norm": 0.4313863515853882, "learning_rate": 2.9545000000000003e-05, "loss": 0.0175, "step": 11369 }, { "epoch": 12.54997239094423, "grad_norm": 0.23971416056156158, "learning_rate": 2.954466666666667e-05, "loss": 0.0122, "step": 11370 }, { "epoch": 12.55107675317504, "grad_norm": 0.5416075587272644, "learning_rate": 2.9544333333333334e-05, "loss": 0.0147, "step": 11371 }, { "epoch": 12.552181115405853, "grad_norm": 0.282695472240448, "learning_rate": 2.9544e-05, "loss": 0.0065, "step": 11372 }, { "epoch": 12.553285477636665, "grad_norm": 0.8619658946990967, "learning_rate": 2.954366666666667e-05, "loss": 0.2591, "step": 11373 }, { "epoch": 12.554389839867477, "grad_norm": 0.5564818382263184, "learning_rate": 2.9543333333333332e-05, "loss": 0.1181, "step": 11374 }, { "epoch": 12.555494202098288, "grad_norm": 0.7138904929161072, "learning_rate": 2.9543e-05, "loss": 0.1257, "step": 11375 }, { "epoch": 12.5565985643291, "grad_norm": 0.4833969473838806, "learning_rate": 2.9542666666666668e-05, "loss": 0.1048, "step": 11376 }, { "epoch": 12.557702926559912, "grad_norm": 0.42136111855506897, "learning_rate": 2.9542333333333333e-05, "loss": 0.0699, "step": 11377 }, { "epoch": 12.558807288790723, "grad_norm": 0.6734410524368286, "learning_rate": 2.9542000000000003e-05, "loss": 0.0602, "step": 11378 }, { "epoch": 12.559911651021535, "grad_norm": 0.3939650058746338, "learning_rate": 2.954166666666667e-05, "loss": 0.0306, "step": 11379 }, { "epoch": 12.561016013252347, "grad_norm": 0.27834805846214294, "learning_rate": 2.9541333333333335e-05, "loss": 0.017, "step": 11380 }, { "epoch": 12.562120375483158, "grad_norm": 0.6246044039726257, "learning_rate": 2.9541e-05, "loss": 0.0445, "step": 11381 }, { "epoch": 12.56322473771397, "grad_norm": 0.520288348197937, "learning_rate": 2.9540666666666667e-05, "loss": 0.0303, "step": 11382 }, { "epoch": 12.564329099944782, "grad_norm": 0.35430246591567993, "learning_rate": 2.9540333333333332e-05, "loss": 0.0201, "step": 11383 }, { "epoch": 12.565433462175594, "grad_norm": 0.5992002487182617, "learning_rate": 2.9540000000000002e-05, "loss": 0.0115, "step": 11384 }, { "epoch": 12.566537824406405, "grad_norm": 0.6143417954444885, "learning_rate": 2.9539666666666664e-05, "loss": 0.0376, "step": 11385 }, { "epoch": 12.567642186637217, "grad_norm": 0.26327282190322876, "learning_rate": 2.9539333333333334e-05, "loss": 0.0231, "step": 11386 }, { "epoch": 12.56874654886803, "grad_norm": 0.4279881417751312, "learning_rate": 2.9539000000000003e-05, "loss": 0.0143, "step": 11387 }, { "epoch": 12.56985091109884, "grad_norm": 0.27609702944755554, "learning_rate": 2.9538666666666666e-05, "loss": 0.0241, "step": 11388 }, { "epoch": 12.570955273329652, "grad_norm": 0.23970025777816772, "learning_rate": 2.9538333333333335e-05, "loss": 0.0072, "step": 11389 }, { "epoch": 12.572059635560464, "grad_norm": 0.46706119179725647, "learning_rate": 2.9538e-05, "loss": 0.0233, "step": 11390 }, { "epoch": 12.573163997791276, "grad_norm": 0.26014065742492676, "learning_rate": 2.9537666666666667e-05, "loss": 0.0127, "step": 11391 }, { "epoch": 12.574268360022087, "grad_norm": 0.26989901065826416, "learning_rate": 2.9537333333333333e-05, "loss": 0.0095, "step": 11392 }, { "epoch": 12.575372722252899, "grad_norm": 0.22967737913131714, "learning_rate": 2.9537000000000002e-05, "loss": 0.0158, "step": 11393 }, { "epoch": 12.576477084483711, "grad_norm": 0.1281682252883911, "learning_rate": 2.9536666666666668e-05, "loss": 0.0088, "step": 11394 }, { "epoch": 12.577581446714522, "grad_norm": 0.3531077206134796, "learning_rate": 2.9536333333333334e-05, "loss": 0.0149, "step": 11395 }, { "epoch": 12.578685808945334, "grad_norm": 0.2181721031665802, "learning_rate": 2.9536000000000003e-05, "loss": 0.0095, "step": 11396 }, { "epoch": 12.579790171176146, "grad_norm": 0.26482945680618286, "learning_rate": 2.9535666666666666e-05, "loss": 0.0092, "step": 11397 }, { "epoch": 12.580894533406958, "grad_norm": 0.22856369614601135, "learning_rate": 2.9535333333333335e-05, "loss": 0.0141, "step": 11398 }, { "epoch": 12.581998895637769, "grad_norm": 0.20487543940544128, "learning_rate": 2.9535e-05, "loss": 0.0123, "step": 11399 }, { "epoch": 12.583103257868581, "grad_norm": 0.4517592489719391, "learning_rate": 2.9534666666666667e-05, "loss": 0.0175, "step": 11400 }, { "epoch": 12.584207620099393, "grad_norm": 0.31377896666526794, "learning_rate": 2.9534333333333333e-05, "loss": 0.0167, "step": 11401 }, { "epoch": 12.585311982330204, "grad_norm": 0.16393589973449707, "learning_rate": 2.9534000000000002e-05, "loss": 0.0107, "step": 11402 }, { "epoch": 12.586416344561016, "grad_norm": 0.4075744152069092, "learning_rate": 2.9533666666666668e-05, "loss": 0.0174, "step": 11403 }, { "epoch": 12.587520706791828, "grad_norm": 0.1671440601348877, "learning_rate": 2.9533333333333334e-05, "loss": 0.0104, "step": 11404 }, { "epoch": 12.588625069022639, "grad_norm": 0.3187660574913025, "learning_rate": 2.9533000000000003e-05, "loss": 0.012, "step": 11405 }, { "epoch": 12.589729431253451, "grad_norm": 0.18832427263259888, "learning_rate": 2.9532666666666666e-05, "loss": 0.0113, "step": 11406 }, { "epoch": 12.590833793484263, "grad_norm": 0.3018519878387451, "learning_rate": 2.9532333333333335e-05, "loss": 0.0133, "step": 11407 }, { "epoch": 12.591938155715075, "grad_norm": 0.472208708524704, "learning_rate": 2.9532e-05, "loss": 0.0185, "step": 11408 }, { "epoch": 12.593042517945886, "grad_norm": 0.1506710797548294, "learning_rate": 2.9531666666666667e-05, "loss": 0.007, "step": 11409 }, { "epoch": 12.594146880176698, "grad_norm": 0.3398653566837311, "learning_rate": 2.9531333333333333e-05, "loss": 0.0205, "step": 11410 }, { "epoch": 12.59525124240751, "grad_norm": 0.5057369470596313, "learning_rate": 2.9531e-05, "loss": 0.0195, "step": 11411 }, { "epoch": 12.59635560463832, "grad_norm": 0.2383701354265213, "learning_rate": 2.953066666666667e-05, "loss": 0.0152, "step": 11412 }, { "epoch": 12.597459966869133, "grad_norm": 0.21149379014968872, "learning_rate": 2.9530333333333334e-05, "loss": 0.0138, "step": 11413 }, { "epoch": 12.598564329099945, "grad_norm": 0.46074458956718445, "learning_rate": 2.953e-05, "loss": 0.024, "step": 11414 }, { "epoch": 12.599668691330756, "grad_norm": 0.2213509976863861, "learning_rate": 2.9529666666666666e-05, "loss": 0.0115, "step": 11415 }, { "epoch": 12.600773053561568, "grad_norm": 0.23774345219135284, "learning_rate": 2.9529333333333335e-05, "loss": 0.0084, "step": 11416 }, { "epoch": 12.60187741579238, "grad_norm": 0.45703792572021484, "learning_rate": 2.9528999999999998e-05, "loss": 0.0081, "step": 11417 }, { "epoch": 12.602981778023192, "grad_norm": 0.7047241926193237, "learning_rate": 2.9528666666666667e-05, "loss": 0.0197, "step": 11418 }, { "epoch": 12.604086140254003, "grad_norm": 0.19024299085140228, "learning_rate": 2.9528333333333337e-05, "loss": 0.0088, "step": 11419 }, { "epoch": 12.605190502484815, "grad_norm": 0.37080755829811096, "learning_rate": 2.9528e-05, "loss": 0.013, "step": 11420 }, { "epoch": 12.606294864715627, "grad_norm": 0.3700105547904968, "learning_rate": 2.952766666666667e-05, "loss": 0.0159, "step": 11421 }, { "epoch": 12.607399226946438, "grad_norm": 0.21034738421440125, "learning_rate": 2.9527333333333334e-05, "loss": 0.0072, "step": 11422 }, { "epoch": 12.60850358917725, "grad_norm": 0.5606139898300171, "learning_rate": 2.9527e-05, "loss": 0.23, "step": 11423 }, { "epoch": 12.609607951408062, "grad_norm": 0.5610578656196594, "learning_rate": 2.9526666666666666e-05, "loss": 0.1329, "step": 11424 }, { "epoch": 12.610712313638874, "grad_norm": 0.462961882352829, "learning_rate": 2.9526333333333336e-05, "loss": 0.1177, "step": 11425 }, { "epoch": 12.611816675869685, "grad_norm": 0.6891769170761108, "learning_rate": 2.9525999999999998e-05, "loss": 0.0964, "step": 11426 }, { "epoch": 12.612921038100497, "grad_norm": 0.4464605748653412, "learning_rate": 2.9525666666666668e-05, "loss": 0.0729, "step": 11427 }, { "epoch": 12.61402540033131, "grad_norm": 0.5691347122192383, "learning_rate": 2.9525333333333337e-05, "loss": 0.1476, "step": 11428 }, { "epoch": 12.61512976256212, "grad_norm": 0.44914156198501587, "learning_rate": 2.9525e-05, "loss": 0.064, "step": 11429 }, { "epoch": 12.616234124792932, "grad_norm": 0.40088871121406555, "learning_rate": 2.952466666666667e-05, "loss": 0.0555, "step": 11430 }, { "epoch": 12.617338487023744, "grad_norm": 0.3933244049549103, "learning_rate": 2.9524333333333335e-05, "loss": 0.0334, "step": 11431 }, { "epoch": 12.618442849254556, "grad_norm": 0.31683817505836487, "learning_rate": 2.9524e-05, "loss": 0.0368, "step": 11432 }, { "epoch": 12.619547211485367, "grad_norm": 0.2792118489742279, "learning_rate": 2.9523666666666667e-05, "loss": 0.0405, "step": 11433 }, { "epoch": 12.620651573716179, "grad_norm": 0.19491100311279297, "learning_rate": 2.9523333333333336e-05, "loss": 0.0093, "step": 11434 }, { "epoch": 12.621755935946991, "grad_norm": 0.128497913479805, "learning_rate": 2.9523e-05, "loss": 0.0085, "step": 11435 }, { "epoch": 12.622860298177802, "grad_norm": 0.5212563872337341, "learning_rate": 2.9522666666666668e-05, "loss": 0.0164, "step": 11436 }, { "epoch": 12.623964660408614, "grad_norm": 0.26457110047340393, "learning_rate": 2.9522333333333337e-05, "loss": 0.0207, "step": 11437 }, { "epoch": 12.625069022639426, "grad_norm": 0.2062905877828598, "learning_rate": 2.9522e-05, "loss": 0.0145, "step": 11438 }, { "epoch": 12.626173384870237, "grad_norm": 0.26429396867752075, "learning_rate": 2.952166666666667e-05, "loss": 0.0095, "step": 11439 }, { "epoch": 12.627277747101049, "grad_norm": 0.3417024314403534, "learning_rate": 2.9521333333333335e-05, "loss": 0.023, "step": 11440 }, { "epoch": 12.628382109331861, "grad_norm": 0.3477291464805603, "learning_rate": 2.9521e-05, "loss": 0.0178, "step": 11441 }, { "epoch": 12.629486471562673, "grad_norm": 0.30134066939353943, "learning_rate": 2.9520666666666667e-05, "loss": 0.0133, "step": 11442 }, { "epoch": 12.630590833793484, "grad_norm": 0.17224833369255066, "learning_rate": 2.9520333333333333e-05, "loss": 0.0078, "step": 11443 }, { "epoch": 12.631695196024296, "grad_norm": 0.2709513008594513, "learning_rate": 2.9520000000000002e-05, "loss": 0.0153, "step": 11444 }, { "epoch": 12.632799558255108, "grad_norm": 0.26405391097068787, "learning_rate": 2.9519666666666668e-05, "loss": 0.0138, "step": 11445 }, { "epoch": 12.633903920485919, "grad_norm": 0.2936757504940033, "learning_rate": 2.9519333333333334e-05, "loss": 0.0289, "step": 11446 }, { "epoch": 12.635008282716731, "grad_norm": 0.3824059069156647, "learning_rate": 2.9519e-05, "loss": 0.0072, "step": 11447 }, { "epoch": 12.636112644947543, "grad_norm": 0.1629679799079895, "learning_rate": 2.951866666666667e-05, "loss": 0.0129, "step": 11448 }, { "epoch": 12.637217007178354, "grad_norm": 0.243944451212883, "learning_rate": 2.951833333333333e-05, "loss": 0.0099, "step": 11449 }, { "epoch": 12.638321369409166, "grad_norm": 0.4803660213947296, "learning_rate": 2.9518e-05, "loss": 0.0197, "step": 11450 }, { "epoch": 12.639425731639978, "grad_norm": 0.22517931461334229, "learning_rate": 2.9517666666666667e-05, "loss": 0.0173, "step": 11451 }, { "epoch": 12.64053009387079, "grad_norm": 0.29178938269615173, "learning_rate": 2.9517333333333333e-05, "loss": 0.0078, "step": 11452 }, { "epoch": 12.6416344561016, "grad_norm": 0.14325833320617676, "learning_rate": 2.9517000000000002e-05, "loss": 0.0063, "step": 11453 }, { "epoch": 12.642738818332413, "grad_norm": 0.20947624742984772, "learning_rate": 2.9516666666666668e-05, "loss": 0.0079, "step": 11454 }, { "epoch": 12.643843180563225, "grad_norm": 0.20538027584552765, "learning_rate": 2.9516333333333334e-05, "loss": 0.0197, "step": 11455 }, { "epoch": 12.644947542794036, "grad_norm": 0.16624784469604492, "learning_rate": 2.9516e-05, "loss": 0.0101, "step": 11456 }, { "epoch": 12.646051905024848, "grad_norm": 0.2921646535396576, "learning_rate": 2.951566666666667e-05, "loss": 0.0097, "step": 11457 }, { "epoch": 12.64715626725566, "grad_norm": 0.29998475313186646, "learning_rate": 2.9515333333333332e-05, "loss": 0.0104, "step": 11458 }, { "epoch": 12.648260629486472, "grad_norm": 0.23110289871692657, "learning_rate": 2.9515e-05, "loss": 0.0076, "step": 11459 }, { "epoch": 12.649364991717283, "grad_norm": 0.5077589750289917, "learning_rate": 2.9514666666666667e-05, "loss": 0.0114, "step": 11460 }, { "epoch": 12.650469353948095, "grad_norm": 0.12669657170772552, "learning_rate": 2.9514333333333333e-05, "loss": 0.0061, "step": 11461 }, { "epoch": 12.651573716178907, "grad_norm": 0.47823506593704224, "learning_rate": 2.9514000000000002e-05, "loss": 0.0074, "step": 11462 }, { "epoch": 12.652678078409718, "grad_norm": 1.304175853729248, "learning_rate": 2.9513666666666668e-05, "loss": 0.0367, "step": 11463 }, { "epoch": 12.65378244064053, "grad_norm": 0.49766749143600464, "learning_rate": 2.9513333333333334e-05, "loss": 0.0139, "step": 11464 }, { "epoch": 12.654886802871342, "grad_norm": 0.23643378913402557, "learning_rate": 2.9513e-05, "loss": 0.0116, "step": 11465 }, { "epoch": 12.655991165102154, "grad_norm": 0.28976911306381226, "learning_rate": 2.951266666666667e-05, "loss": 0.0086, "step": 11466 }, { "epoch": 12.657095527332965, "grad_norm": 0.8331310749053955, "learning_rate": 2.9512333333333332e-05, "loss": 0.0207, "step": 11467 }, { "epoch": 12.658199889563777, "grad_norm": 0.3101195991039276, "learning_rate": 2.9512e-05, "loss": 0.0126, "step": 11468 }, { "epoch": 12.65930425179459, "grad_norm": 0.4792432188987732, "learning_rate": 2.951166666666667e-05, "loss": 0.0135, "step": 11469 }, { "epoch": 12.6604086140254, "grad_norm": 0.32658326625823975, "learning_rate": 2.9511333333333333e-05, "loss": 0.0163, "step": 11470 }, { "epoch": 12.661512976256212, "grad_norm": 0.37526971101760864, "learning_rate": 2.9511000000000003e-05, "loss": 0.0181, "step": 11471 }, { "epoch": 12.662617338487024, "grad_norm": 0.5242461562156677, "learning_rate": 2.9510666666666665e-05, "loss": 0.0166, "step": 11472 }, { "epoch": 12.663721700717835, "grad_norm": 0.46244946122169495, "learning_rate": 2.9510333333333334e-05, "loss": 0.1449, "step": 11473 }, { "epoch": 12.664826062948647, "grad_norm": 0.7531967163085938, "learning_rate": 2.951e-05, "loss": 0.1523, "step": 11474 }, { "epoch": 12.665930425179459, "grad_norm": 0.6243905425071716, "learning_rate": 2.9509666666666666e-05, "loss": 0.1639, "step": 11475 }, { "epoch": 12.667034787410271, "grad_norm": 0.5007765293121338, "learning_rate": 2.9509333333333332e-05, "loss": 0.1026, "step": 11476 }, { "epoch": 12.668139149641082, "grad_norm": 0.41829779744148254, "learning_rate": 2.9509e-05, "loss": 0.0722, "step": 11477 }, { "epoch": 12.669243511871894, "grad_norm": 0.659700334072113, "learning_rate": 2.9508666666666668e-05, "loss": 0.1063, "step": 11478 }, { "epoch": 12.670347874102706, "grad_norm": 0.4310690462589264, "learning_rate": 2.9508333333333333e-05, "loss": 0.0483, "step": 11479 }, { "epoch": 12.671452236333517, "grad_norm": 0.3526271879673004, "learning_rate": 2.9508000000000003e-05, "loss": 0.0281, "step": 11480 }, { "epoch": 12.672556598564329, "grad_norm": 0.2115413397550583, "learning_rate": 2.9507666666666665e-05, "loss": 0.01, "step": 11481 }, { "epoch": 12.673660960795141, "grad_norm": 0.37831827998161316, "learning_rate": 2.9507333333333335e-05, "loss": 0.0302, "step": 11482 }, { "epoch": 12.674765323025952, "grad_norm": 0.21130532026290894, "learning_rate": 2.9507e-05, "loss": 0.0183, "step": 11483 }, { "epoch": 12.675869685256764, "grad_norm": 0.21384204924106598, "learning_rate": 2.9506666666666667e-05, "loss": 0.0195, "step": 11484 }, { "epoch": 12.676974047487576, "grad_norm": 0.30272606015205383, "learning_rate": 2.9506333333333332e-05, "loss": 0.019, "step": 11485 }, { "epoch": 12.678078409718388, "grad_norm": 0.245125412940979, "learning_rate": 2.9506000000000002e-05, "loss": 0.019, "step": 11486 }, { "epoch": 12.679182771949199, "grad_norm": 0.272583931684494, "learning_rate": 2.9505666666666668e-05, "loss": 0.0141, "step": 11487 }, { "epoch": 12.680287134180011, "grad_norm": 0.24155548214912415, "learning_rate": 2.9505333333333334e-05, "loss": 0.0123, "step": 11488 }, { "epoch": 12.681391496410823, "grad_norm": 0.4206956624984741, "learning_rate": 2.9505000000000003e-05, "loss": 0.0208, "step": 11489 }, { "epoch": 12.682495858641634, "grad_norm": 0.14110037684440613, "learning_rate": 2.9504666666666666e-05, "loss": 0.0063, "step": 11490 }, { "epoch": 12.683600220872446, "grad_norm": 0.22245216369628906, "learning_rate": 2.9504333333333335e-05, "loss": 0.0086, "step": 11491 }, { "epoch": 12.684704583103258, "grad_norm": 0.25797560811042786, "learning_rate": 2.9504e-05, "loss": 0.0168, "step": 11492 }, { "epoch": 12.68580894533407, "grad_norm": 0.17561683058738708, "learning_rate": 2.9503666666666667e-05, "loss": 0.0124, "step": 11493 }, { "epoch": 12.68691330756488, "grad_norm": 0.23331274092197418, "learning_rate": 2.9503333333333336e-05, "loss": 0.0174, "step": 11494 }, { "epoch": 12.688017669795693, "grad_norm": 0.6757140159606934, "learning_rate": 2.9503000000000002e-05, "loss": 0.0141, "step": 11495 }, { "epoch": 12.689122032026505, "grad_norm": 0.26494744420051575, "learning_rate": 2.9502666666666668e-05, "loss": 0.0167, "step": 11496 }, { "epoch": 12.690226394257316, "grad_norm": 0.7618947625160217, "learning_rate": 2.9502333333333334e-05, "loss": 0.014, "step": 11497 }, { "epoch": 12.691330756488128, "grad_norm": 0.3352535665035248, "learning_rate": 2.9502000000000003e-05, "loss": 0.0133, "step": 11498 }, { "epoch": 12.69243511871894, "grad_norm": 0.2706852853298187, "learning_rate": 2.9501666666666666e-05, "loss": 0.0101, "step": 11499 }, { "epoch": 12.693539480949752, "grad_norm": 0.21174030005931854, "learning_rate": 2.9501333333333335e-05, "loss": 0.0091, "step": 11500 }, { "epoch": 12.694643843180563, "grad_norm": 0.25396090745925903, "learning_rate": 2.9500999999999998e-05, "loss": 0.0138, "step": 11501 }, { "epoch": 12.695748205411375, "grad_norm": 0.2056809365749359, "learning_rate": 2.9500666666666667e-05, "loss": 0.0082, "step": 11502 }, { "epoch": 12.696852567642187, "grad_norm": 0.4324999749660492, "learning_rate": 2.9500333333333336e-05, "loss": 0.0128, "step": 11503 }, { "epoch": 12.697956929872998, "grad_norm": 0.5908531546592712, "learning_rate": 2.95e-05, "loss": 0.029, "step": 11504 }, { "epoch": 12.69906129210381, "grad_norm": 0.22892963886260986, "learning_rate": 2.9499666666666668e-05, "loss": 0.0113, "step": 11505 }, { "epoch": 12.700165654334622, "grad_norm": 0.20006398856639862, "learning_rate": 2.9499333333333334e-05, "loss": 0.015, "step": 11506 }, { "epoch": 12.701270016565433, "grad_norm": 0.3988220691680908, "learning_rate": 2.9499e-05, "loss": 0.0186, "step": 11507 }, { "epoch": 12.702374378796245, "grad_norm": 0.35929396748542786, "learning_rate": 2.9498666666666666e-05, "loss": 0.0195, "step": 11508 }, { "epoch": 12.703478741027057, "grad_norm": 0.3819391429424286, "learning_rate": 2.9498333333333335e-05, "loss": 0.0116, "step": 11509 }, { "epoch": 12.70458310325787, "grad_norm": 0.393616259098053, "learning_rate": 2.9497999999999998e-05, "loss": 0.0082, "step": 11510 }, { "epoch": 12.70568746548868, "grad_norm": 0.2969827353954315, "learning_rate": 2.9497666666666667e-05, "loss": 0.0148, "step": 11511 }, { "epoch": 12.706791827719492, "grad_norm": 0.35637134313583374, "learning_rate": 2.9497333333333336e-05, "loss": 0.0159, "step": 11512 }, { "epoch": 12.707896189950304, "grad_norm": 0.4720700979232788, "learning_rate": 2.9497e-05, "loss": 0.0071, "step": 11513 }, { "epoch": 12.709000552181115, "grad_norm": 0.3540847897529602, "learning_rate": 2.9496666666666668e-05, "loss": 0.0125, "step": 11514 }, { "epoch": 12.710104914411927, "grad_norm": 0.3582540452480316, "learning_rate": 2.9496333333333334e-05, "loss": 0.0185, "step": 11515 }, { "epoch": 12.71120927664274, "grad_norm": 0.3936487138271332, "learning_rate": 2.9496e-05, "loss": 0.0163, "step": 11516 }, { "epoch": 12.71231363887355, "grad_norm": 0.5017183423042297, "learning_rate": 2.9495666666666666e-05, "loss": 0.0221, "step": 11517 }, { "epoch": 12.713418001104362, "grad_norm": 0.4083687961101532, "learning_rate": 2.9495333333333335e-05, "loss": 0.0209, "step": 11518 }, { "epoch": 12.714522363335174, "grad_norm": 0.45942890644073486, "learning_rate": 2.9495e-05, "loss": 0.0109, "step": 11519 }, { "epoch": 12.715626725565986, "grad_norm": 0.26630622148513794, "learning_rate": 2.9494666666666667e-05, "loss": 0.0098, "step": 11520 }, { "epoch": 12.716731087796797, "grad_norm": 0.36406761407852173, "learning_rate": 2.9494333333333337e-05, "loss": 0.0136, "step": 11521 }, { "epoch": 12.717835450027609, "grad_norm": 0.35489746928215027, "learning_rate": 2.9494e-05, "loss": 0.0152, "step": 11522 }, { "epoch": 12.718939812258421, "grad_norm": 0.7876566648483276, "learning_rate": 2.949366666666667e-05, "loss": 0.2151, "step": 11523 }, { "epoch": 12.720044174489232, "grad_norm": 1.6158807277679443, "learning_rate": 2.9493333333333334e-05, "loss": 0.178, "step": 11524 }, { "epoch": 12.721148536720044, "grad_norm": 0.7437929511070251, "learning_rate": 2.9493e-05, "loss": 0.111, "step": 11525 }, { "epoch": 12.722252898950856, "grad_norm": 0.6666786670684814, "learning_rate": 2.9492666666666666e-05, "loss": 0.1844, "step": 11526 }, { "epoch": 12.723357261181668, "grad_norm": 0.41953691840171814, "learning_rate": 2.9492333333333336e-05, "loss": 0.0864, "step": 11527 }, { "epoch": 12.724461623412479, "grad_norm": 0.41655176877975464, "learning_rate": 2.9492e-05, "loss": 0.0599, "step": 11528 }, { "epoch": 12.725565985643291, "grad_norm": 0.5961641073226929, "learning_rate": 2.9491666666666667e-05, "loss": 0.0714, "step": 11529 }, { "epoch": 12.726670347874103, "grad_norm": 0.3774968981742859, "learning_rate": 2.9491333333333337e-05, "loss": 0.0305, "step": 11530 }, { "epoch": 12.727774710104914, "grad_norm": 0.19948896765708923, "learning_rate": 2.9491e-05, "loss": 0.0137, "step": 11531 }, { "epoch": 12.728879072335726, "grad_norm": 0.4628531336784363, "learning_rate": 2.949066666666667e-05, "loss": 0.0357, "step": 11532 }, { "epoch": 12.729983434566538, "grad_norm": 0.18276755511760712, "learning_rate": 2.949033333333333e-05, "loss": 0.0126, "step": 11533 }, { "epoch": 12.73108779679735, "grad_norm": 0.2683688700199127, "learning_rate": 2.949e-05, "loss": 0.0123, "step": 11534 }, { "epoch": 12.73219215902816, "grad_norm": 0.22102570533752441, "learning_rate": 2.9489666666666666e-05, "loss": 0.0124, "step": 11535 }, { "epoch": 12.733296521258973, "grad_norm": 0.3530611991882324, "learning_rate": 2.9489333333333332e-05, "loss": 0.0112, "step": 11536 }, { "epoch": 12.734400883489785, "grad_norm": 0.6421804428100586, "learning_rate": 2.9489000000000002e-05, "loss": 0.0142, "step": 11537 }, { "epoch": 12.735505245720596, "grad_norm": 0.18110451102256775, "learning_rate": 2.9488666666666668e-05, "loss": 0.0148, "step": 11538 }, { "epoch": 12.736609607951408, "grad_norm": 0.19477403163909912, "learning_rate": 2.9488333333333334e-05, "loss": 0.0118, "step": 11539 }, { "epoch": 12.73771397018222, "grad_norm": 0.4779641926288605, "learning_rate": 2.9488e-05, "loss": 0.0148, "step": 11540 }, { "epoch": 12.738818332413032, "grad_norm": 0.24405774474143982, "learning_rate": 2.948766666666667e-05, "loss": 0.0143, "step": 11541 }, { "epoch": 12.739922694643843, "grad_norm": 0.2930353581905365, "learning_rate": 2.948733333333333e-05, "loss": 0.0113, "step": 11542 }, { "epoch": 12.741027056874655, "grad_norm": 0.19912511110305786, "learning_rate": 2.9487e-05, "loss": 0.0076, "step": 11543 }, { "epoch": 12.742131419105467, "grad_norm": 0.2966066896915436, "learning_rate": 2.948666666666667e-05, "loss": 0.013, "step": 11544 }, { "epoch": 12.743235781336278, "grad_norm": 0.28933048248291016, "learning_rate": 2.9486333333333333e-05, "loss": 0.0141, "step": 11545 }, { "epoch": 12.74434014356709, "grad_norm": 0.8630377054214478, "learning_rate": 2.9486000000000002e-05, "loss": 0.0098, "step": 11546 }, { "epoch": 12.745444505797902, "grad_norm": 0.21284925937652588, "learning_rate": 2.9485666666666668e-05, "loss": 0.0116, "step": 11547 }, { "epoch": 12.746548868028713, "grad_norm": 0.8743779063224792, "learning_rate": 2.9485333333333334e-05, "loss": 0.1135, "step": 11548 }, { "epoch": 12.747653230259525, "grad_norm": 0.2849842607975006, "learning_rate": 2.9485e-05, "loss": 0.0108, "step": 11549 }, { "epoch": 12.748757592490337, "grad_norm": 0.45977112650871277, "learning_rate": 2.948466666666667e-05, "loss": 0.0218, "step": 11550 }, { "epoch": 12.74986195472115, "grad_norm": 0.36729004979133606, "learning_rate": 2.948433333333333e-05, "loss": 0.0061, "step": 11551 }, { "epoch": 12.75096631695196, "grad_norm": 0.32369372248649597, "learning_rate": 2.9484e-05, "loss": 0.0083, "step": 11552 }, { "epoch": 12.752070679182772, "grad_norm": 0.31966057419776917, "learning_rate": 2.948366666666667e-05, "loss": 0.016, "step": 11553 }, { "epoch": 12.753175041413584, "grad_norm": 0.22457833588123322, "learning_rate": 2.9483333333333333e-05, "loss": 0.0084, "step": 11554 }, { "epoch": 12.754279403644395, "grad_norm": 0.2971160113811493, "learning_rate": 2.9483000000000002e-05, "loss": 0.0088, "step": 11555 }, { "epoch": 12.755383765875207, "grad_norm": 0.14923076331615448, "learning_rate": 2.9482666666666668e-05, "loss": 0.0099, "step": 11556 }, { "epoch": 12.75648812810602, "grad_norm": 0.30782660841941833, "learning_rate": 2.9482333333333334e-05, "loss": 0.0106, "step": 11557 }, { "epoch": 12.75759249033683, "grad_norm": 1.7476223707199097, "learning_rate": 2.9482e-05, "loss": 0.0127, "step": 11558 }, { "epoch": 12.758696852567642, "grad_norm": 0.3372189998626709, "learning_rate": 2.948166666666667e-05, "loss": 0.0103, "step": 11559 }, { "epoch": 12.759801214798454, "grad_norm": 0.4122660458087921, "learning_rate": 2.9481333333333332e-05, "loss": 0.012, "step": 11560 }, { "epoch": 12.760905577029266, "grad_norm": 0.5317835807800293, "learning_rate": 2.9481e-05, "loss": 0.0229, "step": 11561 }, { "epoch": 12.762009939260077, "grad_norm": 0.30198362469673157, "learning_rate": 2.9480666666666667e-05, "loss": 0.0145, "step": 11562 }, { "epoch": 12.763114301490889, "grad_norm": 0.3221431076526642, "learning_rate": 2.9480333333333333e-05, "loss": 0.0107, "step": 11563 }, { "epoch": 12.764218663721701, "grad_norm": 0.30582156777381897, "learning_rate": 2.9480000000000002e-05, "loss": 0.0117, "step": 11564 }, { "epoch": 12.765323025952512, "grad_norm": 0.3090404272079468, "learning_rate": 2.9479666666666665e-05, "loss": 0.0154, "step": 11565 }, { "epoch": 12.766427388183324, "grad_norm": 0.272859126329422, "learning_rate": 2.9479333333333334e-05, "loss": 0.0142, "step": 11566 }, { "epoch": 12.767531750414136, "grad_norm": 0.24397951364517212, "learning_rate": 2.9479e-05, "loss": 0.0106, "step": 11567 }, { "epoch": 12.768636112644948, "grad_norm": 0.3584085702896118, "learning_rate": 2.9478666666666666e-05, "loss": 0.0224, "step": 11568 }, { "epoch": 12.769740474875759, "grad_norm": 0.432679682970047, "learning_rate": 2.9478333333333335e-05, "loss": 0.0092, "step": 11569 }, { "epoch": 12.770844837106571, "grad_norm": 0.32224100828170776, "learning_rate": 2.9478e-05, "loss": 0.0122, "step": 11570 }, { "epoch": 12.771949199337383, "grad_norm": 0.9181424379348755, "learning_rate": 2.9477666666666667e-05, "loss": 0.0201, "step": 11571 }, { "epoch": 12.773053561568194, "grad_norm": 0.7531841397285461, "learning_rate": 2.9477333333333333e-05, "loss": 0.0385, "step": 11572 }, { "epoch": 12.774157923799006, "grad_norm": 0.8841968178749084, "learning_rate": 2.9477000000000003e-05, "loss": 0.2242, "step": 11573 }, { "epoch": 12.775262286029818, "grad_norm": 0.5546919703483582, "learning_rate": 2.9476666666666665e-05, "loss": 0.1565, "step": 11574 }, { "epoch": 12.77636664826063, "grad_norm": 0.49287357926368713, "learning_rate": 2.9476333333333334e-05, "loss": 0.112, "step": 11575 }, { "epoch": 12.77747101049144, "grad_norm": 0.6807396411895752, "learning_rate": 2.9476e-05, "loss": 0.1634, "step": 11576 }, { "epoch": 12.778575372722253, "grad_norm": 0.5478333830833435, "learning_rate": 2.9475666666666666e-05, "loss": 0.129, "step": 11577 }, { "epoch": 12.779679734953065, "grad_norm": 0.4038785696029663, "learning_rate": 2.9475333333333336e-05, "loss": 0.0626, "step": 11578 }, { "epoch": 12.780784097183876, "grad_norm": 0.38105252385139465, "learning_rate": 2.9475e-05, "loss": 0.0531, "step": 11579 }, { "epoch": 12.781888459414688, "grad_norm": 0.590339183807373, "learning_rate": 2.9474666666666667e-05, "loss": 0.0925, "step": 11580 }, { "epoch": 12.7829928216455, "grad_norm": 0.30771389603614807, "learning_rate": 2.9474333333333333e-05, "loss": 0.0344, "step": 11581 }, { "epoch": 12.78409718387631, "grad_norm": 0.7245281934738159, "learning_rate": 2.9474000000000003e-05, "loss": 0.047, "step": 11582 }, { "epoch": 12.785201546107123, "grad_norm": 0.28991249203681946, "learning_rate": 2.9473666666666665e-05, "loss": 0.0141, "step": 11583 }, { "epoch": 12.786305908337935, "grad_norm": 0.45505568385124207, "learning_rate": 2.9473333333333335e-05, "loss": 0.0106, "step": 11584 }, { "epoch": 12.787410270568747, "grad_norm": 0.42092064023017883, "learning_rate": 2.9473e-05, "loss": 0.046, "step": 11585 }, { "epoch": 12.788514632799558, "grad_norm": 0.6416894793510437, "learning_rate": 2.9472666666666666e-05, "loss": 0.0158, "step": 11586 }, { "epoch": 12.78961899503037, "grad_norm": 0.45055118203163147, "learning_rate": 2.9472333333333336e-05, "loss": 0.0107, "step": 11587 }, { "epoch": 12.790723357261182, "grad_norm": 0.17367452383041382, "learning_rate": 2.9472000000000002e-05, "loss": 0.0094, "step": 11588 }, { "epoch": 12.791827719491993, "grad_norm": 0.340742290019989, "learning_rate": 2.9471666666666668e-05, "loss": 0.0192, "step": 11589 }, { "epoch": 12.792932081722805, "grad_norm": 0.4992195665836334, "learning_rate": 2.9471333333333334e-05, "loss": 0.0242, "step": 11590 }, { "epoch": 12.794036443953617, "grad_norm": 0.10468921810388565, "learning_rate": 2.9471000000000003e-05, "loss": 0.005, "step": 11591 }, { "epoch": 12.795140806184428, "grad_norm": 0.34126046299934387, "learning_rate": 2.9470666666666665e-05, "loss": 0.0109, "step": 11592 }, { "epoch": 12.79624516841524, "grad_norm": 0.22668835520744324, "learning_rate": 2.9470333333333335e-05, "loss": 0.0139, "step": 11593 }, { "epoch": 12.797349530646052, "grad_norm": 0.4932170510292053, "learning_rate": 2.947e-05, "loss": 0.0151, "step": 11594 }, { "epoch": 12.798453892876864, "grad_norm": 0.3452346920967102, "learning_rate": 2.9469666666666667e-05, "loss": 0.0141, "step": 11595 }, { "epoch": 12.799558255107675, "grad_norm": 0.2266470044851303, "learning_rate": 2.9469333333333336e-05, "loss": 0.0126, "step": 11596 }, { "epoch": 12.800662617338487, "grad_norm": 0.32906919717788696, "learning_rate": 2.9469e-05, "loss": 0.0219, "step": 11597 }, { "epoch": 12.8017669795693, "grad_norm": 0.16966164112091064, "learning_rate": 2.9468666666666668e-05, "loss": 0.0138, "step": 11598 }, { "epoch": 12.80287134180011, "grad_norm": 0.3854256868362427, "learning_rate": 2.9468333333333334e-05, "loss": 0.007, "step": 11599 }, { "epoch": 12.803975704030922, "grad_norm": 0.5941329002380371, "learning_rate": 2.9468e-05, "loss": 0.0137, "step": 11600 }, { "epoch": 12.805080066261734, "grad_norm": 0.21589650213718414, "learning_rate": 2.9467666666666666e-05, "loss": 0.0139, "step": 11601 }, { "epoch": 12.806184428492546, "grad_norm": 0.29293859004974365, "learning_rate": 2.9467333333333335e-05, "loss": 0.0175, "step": 11602 }, { "epoch": 12.807288790723357, "grad_norm": 0.21565186977386475, "learning_rate": 2.9467e-05, "loss": 0.0149, "step": 11603 }, { "epoch": 12.808393152954169, "grad_norm": 0.19936209917068481, "learning_rate": 2.9466666666666667e-05, "loss": 0.0111, "step": 11604 }, { "epoch": 12.809497515184981, "grad_norm": 0.18601767718791962, "learning_rate": 2.9466333333333336e-05, "loss": 0.0124, "step": 11605 }, { "epoch": 12.810601877415792, "grad_norm": 0.3523183763027191, "learning_rate": 2.9466e-05, "loss": 0.0159, "step": 11606 }, { "epoch": 12.811706239646604, "grad_norm": 0.2044489085674286, "learning_rate": 2.9465666666666668e-05, "loss": 0.0049, "step": 11607 }, { "epoch": 12.812810601877416, "grad_norm": 0.2937985360622406, "learning_rate": 2.9465333333333334e-05, "loss": 0.0088, "step": 11608 }, { "epoch": 12.813914964108228, "grad_norm": 0.33220618963241577, "learning_rate": 2.9465e-05, "loss": 0.0117, "step": 11609 }, { "epoch": 12.815019326339039, "grad_norm": 0.2628420293331146, "learning_rate": 2.9464666666666666e-05, "loss": 0.0117, "step": 11610 }, { "epoch": 12.816123688569851, "grad_norm": 0.45819002389907837, "learning_rate": 2.9464333333333335e-05, "loss": 0.0148, "step": 11611 }, { "epoch": 12.817228050800663, "grad_norm": 0.19544963538646698, "learning_rate": 2.9464e-05, "loss": 0.0058, "step": 11612 }, { "epoch": 12.818332413031474, "grad_norm": 0.5127703547477722, "learning_rate": 2.9463666666666667e-05, "loss": 0.015, "step": 11613 }, { "epoch": 12.819436775262286, "grad_norm": 0.1642703413963318, "learning_rate": 2.9463333333333336e-05, "loss": 0.0057, "step": 11614 }, { "epoch": 12.820541137493098, "grad_norm": 0.5039729475975037, "learning_rate": 2.9463e-05, "loss": 0.015, "step": 11615 }, { "epoch": 12.821645499723909, "grad_norm": 0.38431715965270996, "learning_rate": 2.9462666666666668e-05, "loss": 0.0128, "step": 11616 }, { "epoch": 12.82274986195472, "grad_norm": 0.3949478268623352, "learning_rate": 2.9462333333333334e-05, "loss": 0.014, "step": 11617 }, { "epoch": 12.823854224185533, "grad_norm": 0.49908602237701416, "learning_rate": 2.9462e-05, "loss": 0.0103, "step": 11618 }, { "epoch": 12.824958586416345, "grad_norm": 0.663379430770874, "learning_rate": 2.946166666666667e-05, "loss": 0.0175, "step": 11619 }, { "epoch": 12.826062948647156, "grad_norm": 0.9488901495933533, "learning_rate": 2.9461333333333335e-05, "loss": 0.0111, "step": 11620 }, { "epoch": 12.827167310877968, "grad_norm": 0.672150194644928, "learning_rate": 2.9461e-05, "loss": 0.0215, "step": 11621 }, { "epoch": 12.82827167310878, "grad_norm": 0.36159229278564453, "learning_rate": 2.9460666666666667e-05, "loss": 0.0422, "step": 11622 }, { "epoch": 12.82937603533959, "grad_norm": 0.6589663624763489, "learning_rate": 2.9460333333333333e-05, "loss": 0.1952, "step": 11623 }, { "epoch": 12.830480397570403, "grad_norm": 0.5628888607025146, "learning_rate": 2.946e-05, "loss": 0.1602, "step": 11624 }, { "epoch": 12.831584759801215, "grad_norm": 0.48333749175071716, "learning_rate": 2.945966666666667e-05, "loss": 0.1272, "step": 11625 }, { "epoch": 12.832689122032026, "grad_norm": 0.5994378924369812, "learning_rate": 2.945933333333333e-05, "loss": 0.1592, "step": 11626 }, { "epoch": 12.833793484262838, "grad_norm": 0.4532501995563507, "learning_rate": 2.9459e-05, "loss": 0.0898, "step": 11627 }, { "epoch": 12.83489784649365, "grad_norm": 0.8203868865966797, "learning_rate": 2.945866666666667e-05, "loss": 0.067, "step": 11628 }, { "epoch": 12.836002208724462, "grad_norm": 0.4410819709300995, "learning_rate": 2.9458333333333332e-05, "loss": 0.0809, "step": 11629 }, { "epoch": 12.837106570955273, "grad_norm": 1.4689223766326904, "learning_rate": 2.9458e-05, "loss": 0.037, "step": 11630 }, { "epoch": 12.838210933186085, "grad_norm": 0.3555079698562622, "learning_rate": 2.9457666666666667e-05, "loss": 0.0475, "step": 11631 }, { "epoch": 12.839315295416897, "grad_norm": 0.45123592019081116, "learning_rate": 2.9457333333333333e-05, "loss": 0.0454, "step": 11632 }, { "epoch": 12.840419657647708, "grad_norm": 0.41274359822273254, "learning_rate": 2.9457e-05, "loss": 0.0264, "step": 11633 }, { "epoch": 12.84152401987852, "grad_norm": 0.2131304144859314, "learning_rate": 2.945666666666667e-05, "loss": 0.017, "step": 11634 }, { "epoch": 12.842628382109332, "grad_norm": 0.5517654418945312, "learning_rate": 2.945633333333333e-05, "loss": 0.0404, "step": 11635 }, { "epoch": 12.843732744340144, "grad_norm": 0.25906842947006226, "learning_rate": 2.9456e-05, "loss": 0.0153, "step": 11636 }, { "epoch": 12.844837106570955, "grad_norm": 0.8390664458274841, "learning_rate": 2.945566666666667e-05, "loss": 0.0169, "step": 11637 }, { "epoch": 12.845941468801767, "grad_norm": 0.2744029462337494, "learning_rate": 2.9455333333333332e-05, "loss": 0.0214, "step": 11638 }, { "epoch": 12.84704583103258, "grad_norm": 0.11579331010580063, "learning_rate": 2.9455000000000002e-05, "loss": 0.0087, "step": 11639 }, { "epoch": 12.84815019326339, "grad_norm": 0.17543485760688782, "learning_rate": 2.9454666666666668e-05, "loss": 0.0111, "step": 11640 }, { "epoch": 12.849254555494202, "grad_norm": 0.16392262279987335, "learning_rate": 2.9454333333333334e-05, "loss": 0.0102, "step": 11641 }, { "epoch": 12.850358917725014, "grad_norm": 0.22970108687877655, "learning_rate": 2.9454e-05, "loss": 0.0165, "step": 11642 }, { "epoch": 12.851463279955826, "grad_norm": 0.2746169865131378, "learning_rate": 2.945366666666667e-05, "loss": 0.0165, "step": 11643 }, { "epoch": 12.852567642186637, "grad_norm": 0.3193839490413666, "learning_rate": 2.9453333333333335e-05, "loss": 0.0117, "step": 11644 }, { "epoch": 12.853672004417449, "grad_norm": 0.29441171884536743, "learning_rate": 2.9453e-05, "loss": 0.0134, "step": 11645 }, { "epoch": 12.854776366648261, "grad_norm": 0.14404301345348358, "learning_rate": 2.945266666666667e-05, "loss": 0.0065, "step": 11646 }, { "epoch": 12.855880728879072, "grad_norm": 0.16646422445774078, "learning_rate": 2.9452333333333333e-05, "loss": 0.0098, "step": 11647 }, { "epoch": 12.856985091109884, "grad_norm": 0.3492033779621124, "learning_rate": 2.9452000000000002e-05, "loss": 0.0145, "step": 11648 }, { "epoch": 12.858089453340696, "grad_norm": 0.5009938478469849, "learning_rate": 2.9451666666666668e-05, "loss": 0.0196, "step": 11649 }, { "epoch": 12.859193815571507, "grad_norm": 0.3157818019390106, "learning_rate": 2.9451333333333334e-05, "loss": 0.014, "step": 11650 }, { "epoch": 12.860298177802319, "grad_norm": 0.18734386563301086, "learning_rate": 2.9451e-05, "loss": 0.0118, "step": 11651 }, { "epoch": 12.861402540033131, "grad_norm": 0.24642904102802277, "learning_rate": 2.945066666666667e-05, "loss": 0.0086, "step": 11652 }, { "epoch": 12.862506902263943, "grad_norm": 1.3014711141586304, "learning_rate": 2.9450333333333335e-05, "loss": 0.0139, "step": 11653 }, { "epoch": 12.863611264494754, "grad_norm": 0.33598193526268005, "learning_rate": 2.945e-05, "loss": 0.0067, "step": 11654 }, { "epoch": 12.864715626725566, "grad_norm": 0.20273421704769135, "learning_rate": 2.9449666666666667e-05, "loss": 0.0063, "step": 11655 }, { "epoch": 12.865819988956378, "grad_norm": 0.4096050262451172, "learning_rate": 2.9449333333333333e-05, "loss": 0.0156, "step": 11656 }, { "epoch": 12.866924351187189, "grad_norm": 0.3111710548400879, "learning_rate": 2.9449000000000002e-05, "loss": 0.0117, "step": 11657 }, { "epoch": 12.868028713418001, "grad_norm": 0.24254141747951508, "learning_rate": 2.9448666666666665e-05, "loss": 0.0165, "step": 11658 }, { "epoch": 12.869133075648813, "grad_norm": 0.968893826007843, "learning_rate": 2.9448333333333334e-05, "loss": 0.0165, "step": 11659 }, { "epoch": 12.870237437879624, "grad_norm": 0.2812097668647766, "learning_rate": 2.9448e-05, "loss": 0.0158, "step": 11660 }, { "epoch": 12.871341800110436, "grad_norm": 0.29632440209388733, "learning_rate": 2.9447666666666666e-05, "loss": 0.0151, "step": 11661 }, { "epoch": 12.872446162341248, "grad_norm": 0.4242340922355652, "learning_rate": 2.9447333333333335e-05, "loss": 0.0128, "step": 11662 }, { "epoch": 12.87355052457206, "grad_norm": 1.4454443454742432, "learning_rate": 2.9447e-05, "loss": 0.0199, "step": 11663 }, { "epoch": 12.87465488680287, "grad_norm": 0.43991103768348694, "learning_rate": 2.9446666666666667e-05, "loss": 0.0159, "step": 11664 }, { "epoch": 12.875759249033683, "grad_norm": 0.2234213501214981, "learning_rate": 2.9446333333333333e-05, "loss": 0.0097, "step": 11665 }, { "epoch": 12.876863611264495, "grad_norm": 1.3140071630477905, "learning_rate": 2.9446000000000002e-05, "loss": 0.0221, "step": 11666 }, { "epoch": 12.877967973495306, "grad_norm": 0.20170286297798157, "learning_rate": 2.9445666666666665e-05, "loss": 0.0078, "step": 11667 }, { "epoch": 12.879072335726118, "grad_norm": 0.31034424901008606, "learning_rate": 2.9445333333333334e-05, "loss": 0.0122, "step": 11668 }, { "epoch": 12.88017669795693, "grad_norm": 0.33786827325820923, "learning_rate": 2.9445000000000004e-05, "loss": 0.0135, "step": 11669 }, { "epoch": 12.881281060187742, "grad_norm": 0.2982017695903778, "learning_rate": 2.9444666666666666e-05, "loss": 0.0101, "step": 11670 }, { "epoch": 12.882385422418553, "grad_norm": 0.6112930178642273, "learning_rate": 2.9444333333333335e-05, "loss": 0.0231, "step": 11671 }, { "epoch": 12.883489784649365, "grad_norm": 0.6459569334983826, "learning_rate": 2.9444e-05, "loss": 0.0282, "step": 11672 }, { "epoch": 12.884594146880177, "grad_norm": 0.8671150803565979, "learning_rate": 2.9443666666666667e-05, "loss": 0.1899, "step": 11673 }, { "epoch": 12.885698509110988, "grad_norm": 0.7218703627586365, "learning_rate": 2.9443333333333333e-05, "loss": 0.1901, "step": 11674 }, { "epoch": 12.8868028713418, "grad_norm": 0.6699933409690857, "learning_rate": 2.9443000000000003e-05, "loss": 0.1402, "step": 11675 }, { "epoch": 12.887907233572612, "grad_norm": 0.6169884204864502, "learning_rate": 2.9442666666666665e-05, "loss": 0.0789, "step": 11676 }, { "epoch": 12.889011595803424, "grad_norm": 0.6194108724594116, "learning_rate": 2.9442333333333334e-05, "loss": 0.1148, "step": 11677 }, { "epoch": 12.890115958034235, "grad_norm": 0.6041805744171143, "learning_rate": 2.9442000000000004e-05, "loss": 0.0686, "step": 11678 }, { "epoch": 12.891220320265047, "grad_norm": 0.5785470604896545, "learning_rate": 2.9441666666666666e-05, "loss": 0.076, "step": 11679 }, { "epoch": 12.89232468249586, "grad_norm": 0.46304771304130554, "learning_rate": 2.9441333333333336e-05, "loss": 0.0528, "step": 11680 }, { "epoch": 12.89342904472667, "grad_norm": 0.5798790454864502, "learning_rate": 2.9441e-05, "loss": 0.0659, "step": 11681 }, { "epoch": 12.894533406957482, "grad_norm": 0.2580527067184448, "learning_rate": 2.9440666666666667e-05, "loss": 0.0374, "step": 11682 }, { "epoch": 12.895637769188294, "grad_norm": 0.3394111096858978, "learning_rate": 2.9440333333333333e-05, "loss": 0.0169, "step": 11683 }, { "epoch": 12.896742131419105, "grad_norm": 0.22645525634288788, "learning_rate": 2.944e-05, "loss": 0.015, "step": 11684 }, { "epoch": 12.897846493649917, "grad_norm": 0.2885766923427582, "learning_rate": 2.9439666666666665e-05, "loss": 0.0171, "step": 11685 }, { "epoch": 12.898950855880729, "grad_norm": 0.3861544132232666, "learning_rate": 2.9439333333333335e-05, "loss": 0.0236, "step": 11686 }, { "epoch": 12.900055218111541, "grad_norm": 0.1756666600704193, "learning_rate": 2.9439e-05, "loss": 0.0161, "step": 11687 }, { "epoch": 12.901159580342352, "grad_norm": 0.2447338104248047, "learning_rate": 2.9438666666666666e-05, "loss": 0.0153, "step": 11688 }, { "epoch": 12.902263942573164, "grad_norm": 0.3474295735359192, "learning_rate": 2.9438333333333336e-05, "loss": 0.0185, "step": 11689 }, { "epoch": 12.903368304803976, "grad_norm": 0.25865665078163147, "learning_rate": 2.9438e-05, "loss": 0.0094, "step": 11690 }, { "epoch": 12.904472667034787, "grad_norm": 0.284016489982605, "learning_rate": 2.9437666666666668e-05, "loss": 0.0145, "step": 11691 }, { "epoch": 12.905577029265599, "grad_norm": 0.14092783629894257, "learning_rate": 2.9437333333333334e-05, "loss": 0.0065, "step": 11692 }, { "epoch": 12.906681391496411, "grad_norm": 0.3108130395412445, "learning_rate": 2.9437e-05, "loss": 0.0134, "step": 11693 }, { "epoch": 12.907785753727222, "grad_norm": 0.24463322758674622, "learning_rate": 2.943666666666667e-05, "loss": 0.009, "step": 11694 }, { "epoch": 12.908890115958034, "grad_norm": 0.3984210193157196, "learning_rate": 2.9436333333333335e-05, "loss": 0.0276, "step": 11695 }, { "epoch": 12.909994478188846, "grad_norm": 0.35887807607650757, "learning_rate": 2.9436e-05, "loss": 0.0118, "step": 11696 }, { "epoch": 12.911098840419658, "grad_norm": 0.2891682982444763, "learning_rate": 2.9435666666666667e-05, "loss": 0.019, "step": 11697 }, { "epoch": 12.912203202650469, "grad_norm": 0.304029643535614, "learning_rate": 2.9435333333333336e-05, "loss": 0.0103, "step": 11698 }, { "epoch": 12.913307564881281, "grad_norm": 0.1916303038597107, "learning_rate": 2.9435e-05, "loss": 0.0121, "step": 11699 }, { "epoch": 12.914411927112093, "grad_norm": 0.2858465611934662, "learning_rate": 2.9434666666666668e-05, "loss": 0.0099, "step": 11700 }, { "epoch": 12.915516289342904, "grad_norm": 0.38410234451293945, "learning_rate": 2.9434333333333334e-05, "loss": 0.0096, "step": 11701 }, { "epoch": 12.916620651573716, "grad_norm": 0.15711888670921326, "learning_rate": 2.9434e-05, "loss": 0.0177, "step": 11702 }, { "epoch": 12.917725013804528, "grad_norm": 0.22400519251823425, "learning_rate": 2.943366666666667e-05, "loss": 0.0122, "step": 11703 }, { "epoch": 12.91882937603534, "grad_norm": 0.2830585837364197, "learning_rate": 2.9433333333333335e-05, "loss": 0.0162, "step": 11704 }, { "epoch": 12.91993373826615, "grad_norm": 0.31727084517478943, "learning_rate": 2.9433e-05, "loss": 0.0115, "step": 11705 }, { "epoch": 12.921038100496963, "grad_norm": 0.48692813515663147, "learning_rate": 2.9432666666666667e-05, "loss": 0.013, "step": 11706 }, { "epoch": 12.922142462727775, "grad_norm": 0.2876559793949127, "learning_rate": 2.9432333333333336e-05, "loss": 0.0078, "step": 11707 }, { "epoch": 12.923246824958586, "grad_norm": 0.52283775806427, "learning_rate": 2.9432e-05, "loss": 0.0248, "step": 11708 }, { "epoch": 12.924351187189398, "grad_norm": 0.3614109754562378, "learning_rate": 2.9431666666666668e-05, "loss": 0.0257, "step": 11709 }, { "epoch": 12.92545554942021, "grad_norm": 1.053399920463562, "learning_rate": 2.9431333333333334e-05, "loss": 0.0221, "step": 11710 }, { "epoch": 12.926559911651022, "grad_norm": 0.2510138750076294, "learning_rate": 2.9431e-05, "loss": 0.0109, "step": 11711 }, { "epoch": 12.927664273881833, "grad_norm": 3.5894298553466797, "learning_rate": 2.943066666666667e-05, "loss": 0.031, "step": 11712 }, { "epoch": 12.928768636112645, "grad_norm": 0.8872613906860352, "learning_rate": 2.9430333333333332e-05, "loss": 0.0291, "step": 11713 }, { "epoch": 12.929872998343457, "grad_norm": 0.4055126905441284, "learning_rate": 2.943e-05, "loss": 0.0125, "step": 11714 }, { "epoch": 12.930977360574268, "grad_norm": 0.5340017676353455, "learning_rate": 2.9429666666666667e-05, "loss": 0.0184, "step": 11715 }, { "epoch": 12.93208172280508, "grad_norm": 0.4032803773880005, "learning_rate": 2.9429333333333333e-05, "loss": 0.013, "step": 11716 }, { "epoch": 12.933186085035892, "grad_norm": 0.35170871019363403, "learning_rate": 2.9429e-05, "loss": 0.0197, "step": 11717 }, { "epoch": 12.934290447266704, "grad_norm": 0.47958099842071533, "learning_rate": 2.9428666666666668e-05, "loss": 0.0183, "step": 11718 }, { "epoch": 12.935394809497515, "grad_norm": 0.9257031679153442, "learning_rate": 2.9428333333333334e-05, "loss": 0.0423, "step": 11719 }, { "epoch": 12.936499171728327, "grad_norm": 0.28455913066864014, "learning_rate": 2.9428e-05, "loss": 0.0116, "step": 11720 }, { "epoch": 12.93760353395914, "grad_norm": 0.6126305460929871, "learning_rate": 2.942766666666667e-05, "loss": 0.0194, "step": 11721 }, { "epoch": 12.93870789618995, "grad_norm": 0.26738470792770386, "learning_rate": 2.9427333333333332e-05, "loss": 0.0137, "step": 11722 }, { "epoch": 12.939812258420762, "grad_norm": 0.8312028646469116, "learning_rate": 2.9427e-05, "loss": 0.1886, "step": 11723 }, { "epoch": 12.940916620651574, "grad_norm": 0.6804441809654236, "learning_rate": 2.9426666666666667e-05, "loss": 0.1638, "step": 11724 }, { "epoch": 12.942020982882385, "grad_norm": 0.571524977684021, "learning_rate": 2.9426333333333333e-05, "loss": 0.1424, "step": 11725 }, { "epoch": 12.943125345113197, "grad_norm": 0.504696249961853, "learning_rate": 2.9426e-05, "loss": 0.107, "step": 11726 }, { "epoch": 12.94422970734401, "grad_norm": 0.8682608604431152, "learning_rate": 2.942566666666667e-05, "loss": 0.0879, "step": 11727 }, { "epoch": 12.945334069574821, "grad_norm": 0.6558408737182617, "learning_rate": 2.9425333333333334e-05, "loss": 0.0966, "step": 11728 }, { "epoch": 12.946438431805632, "grad_norm": 0.5838583111763, "learning_rate": 2.9425e-05, "loss": 0.0572, "step": 11729 }, { "epoch": 12.947542794036444, "grad_norm": 0.4447230100631714, "learning_rate": 2.942466666666667e-05, "loss": 0.0402, "step": 11730 }, { "epoch": 12.948647156267256, "grad_norm": 0.3666107952594757, "learning_rate": 2.9424333333333332e-05, "loss": 0.0727, "step": 11731 }, { "epoch": 12.949751518498067, "grad_norm": 0.45070841908454895, "learning_rate": 2.9424e-05, "loss": 0.0488, "step": 11732 }, { "epoch": 12.950855880728879, "grad_norm": 0.3838376998901367, "learning_rate": 2.9423666666666667e-05, "loss": 0.0323, "step": 11733 }, { "epoch": 12.951960242959691, "grad_norm": 0.360442578792572, "learning_rate": 2.9423333333333333e-05, "loss": 0.026, "step": 11734 }, { "epoch": 12.953064605190502, "grad_norm": 0.19904440641403198, "learning_rate": 2.9423e-05, "loss": 0.0115, "step": 11735 }, { "epoch": 12.954168967421314, "grad_norm": 0.3321535289287567, "learning_rate": 2.942266666666667e-05, "loss": 0.0136, "step": 11736 }, { "epoch": 12.955273329652126, "grad_norm": 0.2097688615322113, "learning_rate": 2.9422333333333335e-05, "loss": 0.0086, "step": 11737 }, { "epoch": 12.956377691882938, "grad_norm": 0.5712008476257324, "learning_rate": 2.9422e-05, "loss": 0.0281, "step": 11738 }, { "epoch": 12.957482054113749, "grad_norm": 0.18678367137908936, "learning_rate": 2.942166666666667e-05, "loss": 0.0092, "step": 11739 }, { "epoch": 12.958586416344561, "grad_norm": 0.23518472909927368, "learning_rate": 2.9421333333333332e-05, "loss": 0.0262, "step": 11740 }, { "epoch": 12.959690778575373, "grad_norm": 0.3372807502746582, "learning_rate": 2.9421000000000002e-05, "loss": 0.0159, "step": 11741 }, { "epoch": 12.960795140806184, "grad_norm": 0.40242359042167664, "learning_rate": 2.9420666666666668e-05, "loss": 0.0163, "step": 11742 }, { "epoch": 12.961899503036996, "grad_norm": 0.4791308641433716, "learning_rate": 2.9420333333333334e-05, "loss": 0.0088, "step": 11743 }, { "epoch": 12.963003865267808, "grad_norm": 0.18213967978954315, "learning_rate": 2.9420000000000003e-05, "loss": 0.006, "step": 11744 }, { "epoch": 12.96410822749862, "grad_norm": 0.38517701625823975, "learning_rate": 2.9419666666666665e-05, "loss": 0.0133, "step": 11745 }, { "epoch": 12.96521258972943, "grad_norm": 0.21493880450725555, "learning_rate": 2.9419333333333335e-05, "loss": 0.0118, "step": 11746 }, { "epoch": 12.966316951960243, "grad_norm": 0.13059444725513458, "learning_rate": 2.9419e-05, "loss": 0.0078, "step": 11747 }, { "epoch": 12.967421314191055, "grad_norm": 0.14158956706523895, "learning_rate": 2.9418666666666667e-05, "loss": 0.0138, "step": 11748 }, { "epoch": 12.968525676421866, "grad_norm": 0.40650326013565063, "learning_rate": 2.9418333333333333e-05, "loss": 0.0132, "step": 11749 }, { "epoch": 12.969630038652678, "grad_norm": 0.4208533465862274, "learning_rate": 2.9418000000000002e-05, "loss": 0.0124, "step": 11750 }, { "epoch": 12.97073440088349, "grad_norm": 0.23605625331401825, "learning_rate": 2.9417666666666664e-05, "loss": 0.0091, "step": 11751 }, { "epoch": 12.971838763114302, "grad_norm": 0.2857353091239929, "learning_rate": 2.9417333333333334e-05, "loss": 0.015, "step": 11752 }, { "epoch": 12.972943125345113, "grad_norm": 0.45648980140686035, "learning_rate": 2.9417000000000003e-05, "loss": 0.0276, "step": 11753 }, { "epoch": 12.974047487575925, "grad_norm": 0.5046243667602539, "learning_rate": 2.9416666666666666e-05, "loss": 0.0124, "step": 11754 }, { "epoch": 12.975151849806737, "grad_norm": 0.2650606036186218, "learning_rate": 2.9416333333333335e-05, "loss": 0.0152, "step": 11755 }, { "epoch": 12.976256212037548, "grad_norm": 0.20227596163749695, "learning_rate": 2.9416e-05, "loss": 0.0092, "step": 11756 }, { "epoch": 12.97736057426836, "grad_norm": 0.4309345781803131, "learning_rate": 2.9415666666666667e-05, "loss": 0.0187, "step": 11757 }, { "epoch": 12.978464936499172, "grad_norm": 0.6242203116416931, "learning_rate": 2.9415333333333333e-05, "loss": 0.0255, "step": 11758 }, { "epoch": 12.979569298729983, "grad_norm": 0.2604581415653229, "learning_rate": 2.9415000000000002e-05, "loss": 0.0128, "step": 11759 }, { "epoch": 12.980673660960795, "grad_norm": 0.22402667999267578, "learning_rate": 2.9414666666666668e-05, "loss": 0.0144, "step": 11760 }, { "epoch": 12.981778023191607, "grad_norm": 0.755124568939209, "learning_rate": 2.9414333333333334e-05, "loss": 0.0134, "step": 11761 }, { "epoch": 12.98288238542242, "grad_norm": 0.30632951855659485, "learning_rate": 2.9414000000000003e-05, "loss": 0.0104, "step": 11762 }, { "epoch": 12.98398674765323, "grad_norm": 0.24571259319782257, "learning_rate": 2.9413666666666666e-05, "loss": 0.0105, "step": 11763 }, { "epoch": 12.985091109884042, "grad_norm": 0.56494140625, "learning_rate": 2.9413333333333335e-05, "loss": 0.0136, "step": 11764 }, { "epoch": 12.986195472114854, "grad_norm": 0.23541216552257538, "learning_rate": 2.9413e-05, "loss": 0.0127, "step": 11765 }, { "epoch": 12.987299834345665, "grad_norm": 0.21077771484851837, "learning_rate": 2.9412666666666667e-05, "loss": 0.0131, "step": 11766 }, { "epoch": 12.988404196576477, "grad_norm": 0.6266595721244812, "learning_rate": 2.9412333333333333e-05, "loss": 0.0085, "step": 11767 }, { "epoch": 12.98950855880729, "grad_norm": 0.3334352970123291, "learning_rate": 2.9412000000000002e-05, "loss": 0.0151, "step": 11768 }, { "epoch": 12.9906129210381, "grad_norm": 0.4731382727622986, "learning_rate": 2.9411666666666668e-05, "loss": 0.0287, "step": 11769 }, { "epoch": 12.991717283268912, "grad_norm": 0.6597811579704285, "learning_rate": 2.9411333333333334e-05, "loss": 0.0227, "step": 11770 }, { "epoch": 12.992821645499724, "grad_norm": 0.4775550067424774, "learning_rate": 2.9411000000000004e-05, "loss": 0.0199, "step": 11771 }, { "epoch": 12.993926007730536, "grad_norm": 0.29879605770111084, "learning_rate": 2.9410666666666666e-05, "loss": 0.0215, "step": 11772 }, { "epoch": 12.995030369961347, "grad_norm": 0.7901854515075684, "learning_rate": 2.9410333333333335e-05, "loss": 0.1261, "step": 11773 }, { "epoch": 12.996134732192159, "grad_norm": 0.2220812439918518, "learning_rate": 2.9409999999999998e-05, "loss": 0.0154, "step": 11774 }, { "epoch": 12.997239094422971, "grad_norm": 0.3921228349208832, "learning_rate": 2.9409666666666667e-05, "loss": 0.0144, "step": 11775 }, { "epoch": 12.998343456653782, "grad_norm": 0.4031088054180145, "learning_rate": 2.9409333333333333e-05, "loss": 0.0197, "step": 11776 }, { "epoch": 12.999447818884594, "grad_norm": 0.36753740906715393, "learning_rate": 2.9409e-05, "loss": 0.0108, "step": 11777 }, { "epoch": 13.0, "grad_norm": 0.1584739089012146, "learning_rate": 2.940866666666667e-05, "loss": 0.0033, "step": 11778 }, { "epoch": 13.001104362230812, "grad_norm": 0.48918044567108154, "learning_rate": 2.9408333333333334e-05, "loss": 0.1824, "step": 11779 }, { "epoch": 13.002208724461623, "grad_norm": 0.48255929350852966, "learning_rate": 2.9408e-05, "loss": 0.1198, "step": 11780 }, { "epoch": 13.003313086692435, "grad_norm": 0.5054574012756348, "learning_rate": 2.9407666666666666e-05, "loss": 0.1494, "step": 11781 }, { "epoch": 13.004417448923247, "grad_norm": 0.4342459440231323, "learning_rate": 2.9407333333333336e-05, "loss": 0.0809, "step": 11782 }, { "epoch": 13.00552181115406, "grad_norm": 0.6419514417648315, "learning_rate": 2.9406999999999998e-05, "loss": 0.0767, "step": 11783 }, { "epoch": 13.00662617338487, "grad_norm": 0.31575247645378113, "learning_rate": 2.9406666666666667e-05, "loss": 0.0734, "step": 11784 }, { "epoch": 13.007730535615682, "grad_norm": 0.721613347530365, "learning_rate": 2.9406333333333333e-05, "loss": 0.0442, "step": 11785 }, { "epoch": 13.008834897846494, "grad_norm": 0.3126468062400818, "learning_rate": 2.9406e-05, "loss": 0.027, "step": 11786 }, { "epoch": 13.009939260077305, "grad_norm": 0.32479575276374817, "learning_rate": 2.940566666666667e-05, "loss": 0.0292, "step": 11787 }, { "epoch": 13.011043622308117, "grad_norm": 0.2579663097858429, "learning_rate": 2.9405333333333335e-05, "loss": 0.0154, "step": 11788 }, { "epoch": 13.01214798453893, "grad_norm": 0.25473588705062866, "learning_rate": 2.9405e-05, "loss": 0.0178, "step": 11789 }, { "epoch": 13.01325234676974, "grad_norm": 0.23785680532455444, "learning_rate": 2.9404666666666666e-05, "loss": 0.0067, "step": 11790 }, { "epoch": 13.014356709000552, "grad_norm": 0.47170352935791016, "learning_rate": 2.9404333333333336e-05, "loss": 0.0152, "step": 11791 }, { "epoch": 13.015461071231364, "grad_norm": 0.3682464361190796, "learning_rate": 2.9404e-05, "loss": 0.0191, "step": 11792 }, { "epoch": 13.016565433462176, "grad_norm": 0.2952916920185089, "learning_rate": 2.9403666666666668e-05, "loss": 0.0139, "step": 11793 }, { "epoch": 13.017669795692987, "grad_norm": 0.18604515492916107, "learning_rate": 2.9403333333333337e-05, "loss": 0.0099, "step": 11794 }, { "epoch": 13.018774157923799, "grad_norm": 0.369346022605896, "learning_rate": 2.9403e-05, "loss": 0.0095, "step": 11795 }, { "epoch": 13.019878520154611, "grad_norm": 0.24184466898441315, "learning_rate": 2.940266666666667e-05, "loss": 0.0098, "step": 11796 }, { "epoch": 13.020982882385422, "grad_norm": 0.28741946816444397, "learning_rate": 2.9402333333333335e-05, "loss": 0.0147, "step": 11797 }, { "epoch": 13.022087244616234, "grad_norm": 0.3482395112514496, "learning_rate": 2.9402e-05, "loss": 0.0145, "step": 11798 }, { "epoch": 13.023191606847046, "grad_norm": 0.3234262466430664, "learning_rate": 2.9401666666666667e-05, "loss": 0.0167, "step": 11799 }, { "epoch": 13.024295969077858, "grad_norm": 0.3759153187274933, "learning_rate": 2.9401333333333336e-05, "loss": 0.0142, "step": 11800 }, { "epoch": 13.025400331308669, "grad_norm": 0.20321029424667358, "learning_rate": 2.9401e-05, "loss": 0.0069, "step": 11801 }, { "epoch": 13.026504693539481, "grad_norm": 0.7473775744438171, "learning_rate": 2.9400666666666668e-05, "loss": 0.0176, "step": 11802 }, { "epoch": 13.027609055770293, "grad_norm": 1.1635583639144897, "learning_rate": 2.9400333333333337e-05, "loss": 0.0124, "step": 11803 }, { "epoch": 13.028713418001104, "grad_norm": 0.21093112230300903, "learning_rate": 2.94e-05, "loss": 0.0081, "step": 11804 }, { "epoch": 13.029817780231916, "grad_norm": 0.16047461330890656, "learning_rate": 2.939966666666667e-05, "loss": 0.0089, "step": 11805 }, { "epoch": 13.030922142462728, "grad_norm": 0.3034724295139313, "learning_rate": 2.939933333333333e-05, "loss": 0.0166, "step": 11806 }, { "epoch": 13.032026504693539, "grad_norm": 0.17403477430343628, "learning_rate": 2.9399e-05, "loss": 0.0058, "step": 11807 }, { "epoch": 13.03313086692435, "grad_norm": 0.32973113656044006, "learning_rate": 2.9398666666666667e-05, "loss": 0.0111, "step": 11808 }, { "epoch": 13.034235229155163, "grad_norm": 0.1451781690120697, "learning_rate": 2.9398333333333333e-05, "loss": 0.0064, "step": 11809 }, { "epoch": 13.035339591385975, "grad_norm": 0.2549229562282562, "learning_rate": 2.9398000000000002e-05, "loss": 0.0148, "step": 11810 }, { "epoch": 13.036443953616786, "grad_norm": 0.2975848913192749, "learning_rate": 2.9397666666666668e-05, "loss": 0.0131, "step": 11811 }, { "epoch": 13.037548315847598, "grad_norm": 0.20660652220249176, "learning_rate": 2.9397333333333334e-05, "loss": 0.0111, "step": 11812 }, { "epoch": 13.03865267807841, "grad_norm": 0.36336854100227356, "learning_rate": 2.9397e-05, "loss": 0.0311, "step": 11813 }, { "epoch": 13.03975704030922, "grad_norm": 0.8389037847518921, "learning_rate": 2.939666666666667e-05, "loss": 0.0135, "step": 11814 }, { "epoch": 13.040861402540033, "grad_norm": 0.3387545645236969, "learning_rate": 2.9396333333333332e-05, "loss": 0.0106, "step": 11815 }, { "epoch": 13.041965764770845, "grad_norm": 0.44748610258102417, "learning_rate": 2.9396e-05, "loss": 0.0085, "step": 11816 }, { "epoch": 13.043070127001657, "grad_norm": 0.16202475130558014, "learning_rate": 2.9395666666666667e-05, "loss": 0.0064, "step": 11817 }, { "epoch": 13.044174489232468, "grad_norm": 0.20120051503181458, "learning_rate": 2.9395333333333333e-05, "loss": 0.0102, "step": 11818 }, { "epoch": 13.04527885146328, "grad_norm": 0.37108656764030457, "learning_rate": 2.9395000000000002e-05, "loss": 0.0121, "step": 11819 }, { "epoch": 13.046383213694092, "grad_norm": 0.23467424511909485, "learning_rate": 2.9394666666666668e-05, "loss": 0.0084, "step": 11820 }, { "epoch": 13.047487575924903, "grad_norm": 0.2511523365974426, "learning_rate": 2.9394333333333334e-05, "loss": 0.0095, "step": 11821 }, { "epoch": 13.048591938155715, "grad_norm": 0.36538922786712646, "learning_rate": 2.9394e-05, "loss": 0.0225, "step": 11822 }, { "epoch": 13.049696300386527, "grad_norm": 0.15928088128566742, "learning_rate": 2.939366666666667e-05, "loss": 0.0049, "step": 11823 }, { "epoch": 13.050800662617338, "grad_norm": 0.5200538039207458, "learning_rate": 2.9393333333333332e-05, "loss": 0.0219, "step": 11824 }, { "epoch": 13.05190502484815, "grad_norm": 0.2867635488510132, "learning_rate": 2.9393e-05, "loss": 0.0095, "step": 11825 }, { "epoch": 13.053009387078962, "grad_norm": 0.19093963503837585, "learning_rate": 2.9392666666666667e-05, "loss": 0.0052, "step": 11826 }, { "epoch": 13.054113749309774, "grad_norm": 0.33758044242858887, "learning_rate": 2.9392333333333333e-05, "loss": 0.0138, "step": 11827 }, { "epoch": 13.055218111540585, "grad_norm": 0.2932763397693634, "learning_rate": 2.9392000000000003e-05, "loss": 0.0097, "step": 11828 }, { "epoch": 13.056322473771397, "grad_norm": 0.7247364521026611, "learning_rate": 2.939166666666667e-05, "loss": 0.1653, "step": 11829 }, { "epoch": 13.05742683600221, "grad_norm": 0.5911158919334412, "learning_rate": 2.9391333333333334e-05, "loss": 0.1416, "step": 11830 }, { "epoch": 13.05853119823302, "grad_norm": 0.47453686594963074, "learning_rate": 2.9391e-05, "loss": 0.0985, "step": 11831 }, { "epoch": 13.059635560463832, "grad_norm": 0.4873233735561371, "learning_rate": 2.939066666666667e-05, "loss": 0.1283, "step": 11832 }, { "epoch": 13.060739922694644, "grad_norm": 0.8096057176589966, "learning_rate": 2.9390333333333332e-05, "loss": 0.0787, "step": 11833 }, { "epoch": 13.061844284925456, "grad_norm": 0.47632721066474915, "learning_rate": 2.939e-05, "loss": 0.0488, "step": 11834 }, { "epoch": 13.062948647156267, "grad_norm": 0.3259577453136444, "learning_rate": 2.9389666666666667e-05, "loss": 0.0508, "step": 11835 }, { "epoch": 13.064053009387079, "grad_norm": 0.5260867476463318, "learning_rate": 2.9389333333333333e-05, "loss": 0.0601, "step": 11836 }, { "epoch": 13.065157371617891, "grad_norm": 0.2384239137172699, "learning_rate": 2.9389000000000003e-05, "loss": 0.0221, "step": 11837 }, { "epoch": 13.066261733848702, "grad_norm": 0.36841800808906555, "learning_rate": 2.9388666666666665e-05, "loss": 0.0256, "step": 11838 }, { "epoch": 13.067366096079514, "grad_norm": 0.3403058350086212, "learning_rate": 2.9388333333333335e-05, "loss": 0.0159, "step": 11839 }, { "epoch": 13.068470458310326, "grad_norm": 0.22117775678634644, "learning_rate": 2.9388e-05, "loss": 0.0147, "step": 11840 }, { "epoch": 13.069574820541137, "grad_norm": 0.7055602073669434, "learning_rate": 2.9387666666666666e-05, "loss": 0.0428, "step": 11841 }, { "epoch": 13.070679182771949, "grad_norm": 0.20036424696445465, "learning_rate": 2.9387333333333332e-05, "loss": 0.0122, "step": 11842 }, { "epoch": 13.071783545002761, "grad_norm": 0.20083265006542206, "learning_rate": 2.9387000000000002e-05, "loss": 0.012, "step": 11843 }, { "epoch": 13.072887907233573, "grad_norm": 0.16945961117744446, "learning_rate": 2.9386666666666668e-05, "loss": 0.0084, "step": 11844 }, { "epoch": 13.073992269464384, "grad_norm": 0.30774253606796265, "learning_rate": 2.9386333333333334e-05, "loss": 0.015, "step": 11845 }, { "epoch": 13.075096631695196, "grad_norm": 0.19703225791454315, "learning_rate": 2.9386000000000003e-05, "loss": 0.0071, "step": 11846 }, { "epoch": 13.076200993926008, "grad_norm": 0.22534115612506866, "learning_rate": 2.9385666666666665e-05, "loss": 0.0097, "step": 11847 }, { "epoch": 13.077305356156819, "grad_norm": 0.23147928714752197, "learning_rate": 2.9385333333333335e-05, "loss": 0.0065, "step": 11848 }, { "epoch": 13.078409718387631, "grad_norm": 0.577605128288269, "learning_rate": 2.9385e-05, "loss": 0.0227, "step": 11849 }, { "epoch": 13.079514080618443, "grad_norm": 0.19377142190933228, "learning_rate": 2.9384666666666667e-05, "loss": 0.0055, "step": 11850 }, { "epoch": 13.080618442849255, "grad_norm": 0.4053080677986145, "learning_rate": 2.9384333333333333e-05, "loss": 0.019, "step": 11851 }, { "epoch": 13.081722805080066, "grad_norm": 0.27986687421798706, "learning_rate": 2.9384000000000002e-05, "loss": 0.0099, "step": 11852 }, { "epoch": 13.082827167310878, "grad_norm": 0.30368566513061523, "learning_rate": 2.9383666666666668e-05, "loss": 0.0114, "step": 11853 }, { "epoch": 13.08393152954169, "grad_norm": 0.34680742025375366, "learning_rate": 2.9383333333333334e-05, "loss": 0.011, "step": 11854 }, { "epoch": 13.0850358917725, "grad_norm": 0.219823956489563, "learning_rate": 2.9383000000000003e-05, "loss": 0.0106, "step": 11855 }, { "epoch": 13.086140254003313, "grad_norm": 0.3721720278263092, "learning_rate": 2.9382666666666666e-05, "loss": 0.0147, "step": 11856 }, { "epoch": 13.087244616234125, "grad_norm": 0.27590709924697876, "learning_rate": 2.9382333333333335e-05, "loss": 0.0105, "step": 11857 }, { "epoch": 13.088348978464936, "grad_norm": 0.13789013028144836, "learning_rate": 2.9382e-05, "loss": 0.0035, "step": 11858 }, { "epoch": 13.089453340695748, "grad_norm": 0.30877283215522766, "learning_rate": 2.9381666666666667e-05, "loss": 0.0111, "step": 11859 }, { "epoch": 13.09055770292656, "grad_norm": 0.21653661131858826, "learning_rate": 2.9381333333333336e-05, "loss": 0.013, "step": 11860 }, { "epoch": 13.091662065157372, "grad_norm": 0.39068493247032166, "learning_rate": 2.9381000000000002e-05, "loss": 0.0093, "step": 11861 }, { "epoch": 13.092766427388183, "grad_norm": 0.2946622669696808, "learning_rate": 2.9380666666666668e-05, "loss": 0.012, "step": 11862 }, { "epoch": 13.093870789618995, "grad_norm": 0.36950016021728516, "learning_rate": 2.9380333333333334e-05, "loss": 0.0377, "step": 11863 }, { "epoch": 13.094975151849807, "grad_norm": 0.7007607221603394, "learning_rate": 2.938e-05, "loss": 0.0173, "step": 11864 }, { "epoch": 13.096079514080618, "grad_norm": 0.15541397035121918, "learning_rate": 2.9379666666666666e-05, "loss": 0.0078, "step": 11865 }, { "epoch": 13.09718387631143, "grad_norm": 0.23148323595523834, "learning_rate": 2.9379333333333335e-05, "loss": 0.0122, "step": 11866 }, { "epoch": 13.098288238542242, "grad_norm": 0.225725457072258, "learning_rate": 2.9378999999999998e-05, "loss": 0.007, "step": 11867 }, { "epoch": 13.099392600773054, "grad_norm": 0.15119777619838715, "learning_rate": 2.9378666666666667e-05, "loss": 0.006, "step": 11868 }, { "epoch": 13.100496963003865, "grad_norm": 0.3527454435825348, "learning_rate": 2.9378333333333336e-05, "loss": 0.0115, "step": 11869 }, { "epoch": 13.101601325234677, "grad_norm": 0.545637309551239, "learning_rate": 2.9378e-05, "loss": 0.0137, "step": 11870 }, { "epoch": 13.10270568746549, "grad_norm": 0.19910354912281036, "learning_rate": 2.9377666666666668e-05, "loss": 0.0139, "step": 11871 }, { "epoch": 13.1038100496963, "grad_norm": 0.3625088334083557, "learning_rate": 2.9377333333333334e-05, "loss": 0.0168, "step": 11872 }, { "epoch": 13.104914411927112, "grad_norm": 0.3573252558708191, "learning_rate": 2.9377e-05, "loss": 0.0134, "step": 11873 }, { "epoch": 13.106018774157924, "grad_norm": 0.4734732508659363, "learning_rate": 2.9376666666666666e-05, "loss": 0.0082, "step": 11874 }, { "epoch": 13.107123136388736, "grad_norm": 0.2845223844051361, "learning_rate": 2.9376333333333335e-05, "loss": 0.0076, "step": 11875 }, { "epoch": 13.108227498619547, "grad_norm": 0.48237738013267517, "learning_rate": 2.9375999999999998e-05, "loss": 0.0163, "step": 11876 }, { "epoch": 13.109331860850359, "grad_norm": 0.7141345739364624, "learning_rate": 2.9375666666666667e-05, "loss": 0.0071, "step": 11877 }, { "epoch": 13.110436223081171, "grad_norm": 0.3828650712966919, "learning_rate": 2.9375333333333337e-05, "loss": 0.0109, "step": 11878 }, { "epoch": 13.111540585311982, "grad_norm": 0.5368611216545105, "learning_rate": 2.9375e-05, "loss": 0.1337, "step": 11879 }, { "epoch": 13.112644947542794, "grad_norm": 0.5311067700386047, "learning_rate": 2.937466666666667e-05, "loss": 0.1378, "step": 11880 }, { "epoch": 13.113749309773606, "grad_norm": 0.8885895013809204, "learning_rate": 2.9374333333333334e-05, "loss": 0.1499, "step": 11881 }, { "epoch": 13.114853672004417, "grad_norm": 0.5335113406181335, "learning_rate": 2.9374e-05, "loss": 0.0756, "step": 11882 }, { "epoch": 13.115958034235229, "grad_norm": 0.45267346501350403, "learning_rate": 2.9373666666666666e-05, "loss": 0.0752, "step": 11883 }, { "epoch": 13.117062396466041, "grad_norm": 0.4049373269081116, "learning_rate": 2.9373333333333336e-05, "loss": 0.0567, "step": 11884 }, { "epoch": 13.118166758696853, "grad_norm": 0.4563067853450775, "learning_rate": 2.9373e-05, "loss": 0.0353, "step": 11885 }, { "epoch": 13.119271120927664, "grad_norm": 0.39604511857032776, "learning_rate": 2.9372666666666667e-05, "loss": 0.0767, "step": 11886 }, { "epoch": 13.120375483158476, "grad_norm": 0.252268522977829, "learning_rate": 2.9372333333333337e-05, "loss": 0.0444, "step": 11887 }, { "epoch": 13.121479845389288, "grad_norm": 0.37416526675224304, "learning_rate": 2.9372e-05, "loss": 0.0337, "step": 11888 }, { "epoch": 13.122584207620099, "grad_norm": 0.1336897611618042, "learning_rate": 2.937166666666667e-05, "loss": 0.0068, "step": 11889 }, { "epoch": 13.123688569850911, "grad_norm": 0.3603442907333374, "learning_rate": 2.9371333333333335e-05, "loss": 0.013, "step": 11890 }, { "epoch": 13.124792932081723, "grad_norm": 0.2249559760093689, "learning_rate": 2.9371e-05, "loss": 0.0243, "step": 11891 }, { "epoch": 13.125897294312535, "grad_norm": 0.12851542234420776, "learning_rate": 2.9370666666666666e-05, "loss": 0.036, "step": 11892 }, { "epoch": 13.127001656543346, "grad_norm": 0.23135864734649658, "learning_rate": 2.9370333333333336e-05, "loss": 0.0188, "step": 11893 }, { "epoch": 13.128106018774158, "grad_norm": 0.3339436650276184, "learning_rate": 2.9370000000000002e-05, "loss": 0.0165, "step": 11894 }, { "epoch": 13.12921038100497, "grad_norm": 0.33962157368659973, "learning_rate": 2.9369666666666668e-05, "loss": 0.0161, "step": 11895 }, { "epoch": 13.13031474323578, "grad_norm": 0.3859323263168335, "learning_rate": 2.9369333333333334e-05, "loss": 0.0235, "step": 11896 }, { "epoch": 13.131419105466593, "grad_norm": 0.1847515106201172, "learning_rate": 2.9369e-05, "loss": 0.011, "step": 11897 }, { "epoch": 13.132523467697405, "grad_norm": 1.0227222442626953, "learning_rate": 2.936866666666667e-05, "loss": 0.0149, "step": 11898 }, { "epoch": 13.133627829928216, "grad_norm": 0.3394255042076111, "learning_rate": 2.936833333333333e-05, "loss": 0.0182, "step": 11899 }, { "epoch": 13.134732192159028, "grad_norm": 0.33931130170822144, "learning_rate": 2.9368e-05, "loss": 0.0126, "step": 11900 }, { "epoch": 13.13583655438984, "grad_norm": 0.150729238986969, "learning_rate": 2.9367666666666667e-05, "loss": 0.0064, "step": 11901 }, { "epoch": 13.136940916620652, "grad_norm": 0.3555281460285187, "learning_rate": 2.9367333333333333e-05, "loss": 0.016, "step": 11902 }, { "epoch": 13.138045278851463, "grad_norm": 0.2349233478307724, "learning_rate": 2.9367000000000002e-05, "loss": 0.016, "step": 11903 }, { "epoch": 13.139149641082275, "grad_norm": 0.20556329190731049, "learning_rate": 2.9366666666666668e-05, "loss": 0.0092, "step": 11904 }, { "epoch": 13.140254003313087, "grad_norm": 0.228439599275589, "learning_rate": 2.9366333333333334e-05, "loss": 0.0142, "step": 11905 }, { "epoch": 13.141358365543898, "grad_norm": 0.12650954723358154, "learning_rate": 2.9366e-05, "loss": 0.0063, "step": 11906 }, { "epoch": 13.14246272777471, "grad_norm": 0.31762874126434326, "learning_rate": 2.936566666666667e-05, "loss": 0.014, "step": 11907 }, { "epoch": 13.143567090005522, "grad_norm": 1.8882588148117065, "learning_rate": 2.936533333333333e-05, "loss": 0.0221, "step": 11908 }, { "epoch": 13.144671452236334, "grad_norm": 0.3196313977241516, "learning_rate": 2.9365e-05, "loss": 0.0169, "step": 11909 }, { "epoch": 13.145775814467145, "grad_norm": 0.5511516332626343, "learning_rate": 2.936466666666667e-05, "loss": 0.0242, "step": 11910 }, { "epoch": 13.146880176697957, "grad_norm": 0.43005695939064026, "learning_rate": 2.9364333333333333e-05, "loss": 0.0133, "step": 11911 }, { "epoch": 13.14798453892877, "grad_norm": 0.39122071862220764, "learning_rate": 2.9364000000000002e-05, "loss": 0.0097, "step": 11912 }, { "epoch": 13.14908890115958, "grad_norm": 0.22870393097400665, "learning_rate": 2.9363666666666668e-05, "loss": 0.0111, "step": 11913 }, { "epoch": 13.150193263390392, "grad_norm": 0.1507079303264618, "learning_rate": 2.9363333333333334e-05, "loss": 0.0078, "step": 11914 }, { "epoch": 13.151297625621204, "grad_norm": 0.30491966009140015, "learning_rate": 2.9363e-05, "loss": 0.0099, "step": 11915 }, { "epoch": 13.152401987852015, "grad_norm": 0.2643210291862488, "learning_rate": 2.936266666666667e-05, "loss": 0.0117, "step": 11916 }, { "epoch": 13.153506350082827, "grad_norm": 0.5400535464286804, "learning_rate": 2.9362333333333332e-05, "loss": 0.0213, "step": 11917 }, { "epoch": 13.154610712313639, "grad_norm": 0.19730240106582642, "learning_rate": 2.9362e-05, "loss": 0.0074, "step": 11918 }, { "epoch": 13.155715074544451, "grad_norm": 0.3847975730895996, "learning_rate": 2.936166666666667e-05, "loss": 0.009, "step": 11919 }, { "epoch": 13.156819436775262, "grad_norm": 0.49634379148483276, "learning_rate": 2.9361333333333333e-05, "loss": 0.0061, "step": 11920 }, { "epoch": 13.157923799006074, "grad_norm": 0.2107115536928177, "learning_rate": 2.9361000000000002e-05, "loss": 0.0107, "step": 11921 }, { "epoch": 13.159028161236886, "grad_norm": 0.32978707551956177, "learning_rate": 2.9360666666666668e-05, "loss": 0.009, "step": 11922 }, { "epoch": 13.160132523467697, "grad_norm": 0.43804705142974854, "learning_rate": 2.9360333333333334e-05, "loss": 0.0118, "step": 11923 }, { "epoch": 13.161236885698509, "grad_norm": 0.24109084904193878, "learning_rate": 2.936e-05, "loss": 0.014, "step": 11924 }, { "epoch": 13.162341247929321, "grad_norm": 0.2887744903564453, "learning_rate": 2.9359666666666666e-05, "loss": 0.0132, "step": 11925 }, { "epoch": 13.163445610160133, "grad_norm": 0.24148370325565338, "learning_rate": 2.9359333333333332e-05, "loss": 0.0166, "step": 11926 }, { "epoch": 13.164549972390944, "grad_norm": 0.32461902499198914, "learning_rate": 2.9359e-05, "loss": 0.0114, "step": 11927 }, { "epoch": 13.165654334621756, "grad_norm": 0.307122141122818, "learning_rate": 2.9358666666666667e-05, "loss": 0.0344, "step": 11928 }, { "epoch": 13.166758696852568, "grad_norm": 0.7479631304740906, "learning_rate": 2.9358333333333333e-05, "loss": 0.2399, "step": 11929 }, { "epoch": 13.167863059083379, "grad_norm": 0.6887868046760559, "learning_rate": 2.9358000000000003e-05, "loss": 0.1468, "step": 11930 }, { "epoch": 13.168967421314191, "grad_norm": 0.5782636404037476, "learning_rate": 2.9357666666666665e-05, "loss": 0.198, "step": 11931 }, { "epoch": 13.170071783545003, "grad_norm": 0.7218849658966064, "learning_rate": 2.9357333333333334e-05, "loss": 0.1541, "step": 11932 }, { "epoch": 13.171176145775814, "grad_norm": 0.5103525519371033, "learning_rate": 2.9357e-05, "loss": 0.1026, "step": 11933 }, { "epoch": 13.172280508006626, "grad_norm": 1.263195276260376, "learning_rate": 2.9356666666666666e-05, "loss": 0.1482, "step": 11934 }, { "epoch": 13.173384870237438, "grad_norm": 0.4087557792663574, "learning_rate": 2.9356333333333336e-05, "loss": 0.0741, "step": 11935 }, { "epoch": 13.17448923246825, "grad_norm": 0.8138163089752197, "learning_rate": 2.9356e-05, "loss": 0.0482, "step": 11936 }, { "epoch": 13.17559359469906, "grad_norm": 0.24690768122673035, "learning_rate": 2.9355666666666667e-05, "loss": 0.0476, "step": 11937 }, { "epoch": 13.176697956929873, "grad_norm": 0.2450084239244461, "learning_rate": 2.9355333333333333e-05, "loss": 0.0357, "step": 11938 }, { "epoch": 13.177802319160685, "grad_norm": 0.35276126861572266, "learning_rate": 2.9355000000000003e-05, "loss": 0.0703, "step": 11939 }, { "epoch": 13.178906681391496, "grad_norm": 0.2850050628185272, "learning_rate": 2.9354666666666665e-05, "loss": 0.0178, "step": 11940 }, { "epoch": 13.180011043622308, "grad_norm": 0.18023651838302612, "learning_rate": 2.9354333333333335e-05, "loss": 0.0135, "step": 11941 }, { "epoch": 13.18111540585312, "grad_norm": 0.39649975299835205, "learning_rate": 2.9354e-05, "loss": 0.0149, "step": 11942 }, { "epoch": 13.182219768083932, "grad_norm": 0.3126329481601715, "learning_rate": 2.9353666666666666e-05, "loss": 0.0141, "step": 11943 }, { "epoch": 13.183324130314743, "grad_norm": 0.10706619173288345, "learning_rate": 2.9353333333333336e-05, "loss": 0.0059, "step": 11944 }, { "epoch": 13.184428492545555, "grad_norm": 0.17928382754325867, "learning_rate": 2.9353000000000002e-05, "loss": 0.0108, "step": 11945 }, { "epoch": 13.185532854776367, "grad_norm": 0.13665863871574402, "learning_rate": 2.9352666666666668e-05, "loss": 0.0078, "step": 11946 }, { "epoch": 13.186637217007178, "grad_norm": 0.25351807475090027, "learning_rate": 2.9352333333333334e-05, "loss": 0.0096, "step": 11947 }, { "epoch": 13.18774157923799, "grad_norm": 0.1658429056406021, "learning_rate": 2.9352000000000003e-05, "loss": 0.0084, "step": 11948 }, { "epoch": 13.188845941468802, "grad_norm": 0.431714802980423, "learning_rate": 2.9351666666666665e-05, "loss": 0.0114, "step": 11949 }, { "epoch": 13.189950303699613, "grad_norm": 0.21712517738342285, "learning_rate": 2.9351333333333335e-05, "loss": 0.0118, "step": 11950 }, { "epoch": 13.191054665930425, "grad_norm": 0.39412325620651245, "learning_rate": 2.9351e-05, "loss": 0.0153, "step": 11951 }, { "epoch": 13.192159028161237, "grad_norm": 0.44370055198669434, "learning_rate": 2.9350666666666667e-05, "loss": 0.0097, "step": 11952 }, { "epoch": 13.19326339039205, "grad_norm": 0.19171380996704102, "learning_rate": 2.9350333333333336e-05, "loss": 0.0118, "step": 11953 }, { "epoch": 13.19436775262286, "grad_norm": 0.21446067094802856, "learning_rate": 2.9350000000000002e-05, "loss": 0.0074, "step": 11954 }, { "epoch": 13.195472114853672, "grad_norm": 0.14152486622333527, "learning_rate": 2.9349666666666668e-05, "loss": 0.0072, "step": 11955 }, { "epoch": 13.196576477084484, "grad_norm": 0.4370584487915039, "learning_rate": 2.9349333333333334e-05, "loss": 0.0074, "step": 11956 }, { "epoch": 13.197680839315295, "grad_norm": 0.2664446234703064, "learning_rate": 2.9349e-05, "loss": 0.0152, "step": 11957 }, { "epoch": 13.198785201546107, "grad_norm": 0.23753920197486877, "learning_rate": 2.9348666666666666e-05, "loss": 0.0134, "step": 11958 }, { "epoch": 13.19988956377692, "grad_norm": 0.41989654302597046, "learning_rate": 2.9348333333333335e-05, "loss": 0.0163, "step": 11959 }, { "epoch": 13.200993926007731, "grad_norm": 0.2834402322769165, "learning_rate": 2.9348e-05, "loss": 0.0103, "step": 11960 }, { "epoch": 13.202098288238542, "grad_norm": 0.27848002314567566, "learning_rate": 2.9347666666666667e-05, "loss": 0.0079, "step": 11961 }, { "epoch": 13.203202650469354, "grad_norm": 0.7752796411514282, "learning_rate": 2.9347333333333336e-05, "loss": 0.0118, "step": 11962 }, { "epoch": 13.204307012700166, "grad_norm": 0.6658783555030823, "learning_rate": 2.9347e-05, "loss": 0.0245, "step": 11963 }, { "epoch": 13.205411374930977, "grad_norm": 0.913040280342102, "learning_rate": 2.9346666666666668e-05, "loss": 0.0072, "step": 11964 }, { "epoch": 13.206515737161789, "grad_norm": 0.28375232219696045, "learning_rate": 2.9346333333333334e-05, "loss": 0.0083, "step": 11965 }, { "epoch": 13.207620099392601, "grad_norm": 0.5362512469291687, "learning_rate": 2.9346e-05, "loss": 0.0093, "step": 11966 }, { "epoch": 13.208724461623412, "grad_norm": 0.302946537733078, "learning_rate": 2.9345666666666666e-05, "loss": 0.0078, "step": 11967 }, { "epoch": 13.209828823854224, "grad_norm": 0.18862013518810272, "learning_rate": 2.9345333333333335e-05, "loss": 0.0053, "step": 11968 }, { "epoch": 13.210933186085036, "grad_norm": 0.4127486050128937, "learning_rate": 2.9345e-05, "loss": 0.0087, "step": 11969 }, { "epoch": 13.212037548315848, "grad_norm": 0.46708881855010986, "learning_rate": 2.9344666666666667e-05, "loss": 0.0194, "step": 11970 }, { "epoch": 13.213141910546659, "grad_norm": 0.29133090376853943, "learning_rate": 2.9344333333333336e-05, "loss": 0.0123, "step": 11971 }, { "epoch": 13.214246272777471, "grad_norm": 0.25518104434013367, "learning_rate": 2.9344e-05, "loss": 0.0097, "step": 11972 }, { "epoch": 13.215350635008283, "grad_norm": 0.4998079538345337, "learning_rate": 2.9343666666666668e-05, "loss": 0.0197, "step": 11973 }, { "epoch": 13.216454997239094, "grad_norm": 1.0583826303482056, "learning_rate": 2.9343333333333334e-05, "loss": 0.0166, "step": 11974 }, { "epoch": 13.217559359469906, "grad_norm": 0.35513466596603394, "learning_rate": 2.9343e-05, "loss": 0.0113, "step": 11975 }, { "epoch": 13.218663721700718, "grad_norm": 0.4948465824127197, "learning_rate": 2.9342666666666666e-05, "loss": 0.017, "step": 11976 }, { "epoch": 13.21976808393153, "grad_norm": 0.21576131880283356, "learning_rate": 2.9342333333333335e-05, "loss": 0.0077, "step": 11977 }, { "epoch": 13.22087244616234, "grad_norm": 0.3755848705768585, "learning_rate": 2.9342e-05, "loss": 0.0124, "step": 11978 }, { "epoch": 13.221976808393153, "grad_norm": 0.5228234529495239, "learning_rate": 2.9341666666666667e-05, "loss": 0.1481, "step": 11979 }, { "epoch": 13.223081170623965, "grad_norm": 0.5545579195022583, "learning_rate": 2.9341333333333337e-05, "loss": 0.1373, "step": 11980 }, { "epoch": 13.224185532854776, "grad_norm": 0.5734761953353882, "learning_rate": 2.9341e-05, "loss": 0.0996, "step": 11981 }, { "epoch": 13.225289895085588, "grad_norm": 0.6114553213119507, "learning_rate": 2.934066666666667e-05, "loss": 0.0737, "step": 11982 }, { "epoch": 13.2263942573164, "grad_norm": 0.5133971571922302, "learning_rate": 2.9340333333333334e-05, "loss": 0.0966, "step": 11983 }, { "epoch": 13.22749861954721, "grad_norm": 0.5167726874351501, "learning_rate": 2.934e-05, "loss": 0.084, "step": 11984 }, { "epoch": 13.228602981778023, "grad_norm": 0.42785024642944336, "learning_rate": 2.933966666666667e-05, "loss": 0.043, "step": 11985 }, { "epoch": 13.229707344008835, "grad_norm": 1.2888695001602173, "learning_rate": 2.9339333333333332e-05, "loss": 0.119, "step": 11986 }, { "epoch": 13.230811706239647, "grad_norm": 0.23297446966171265, "learning_rate": 2.9339e-05, "loss": 0.0563, "step": 11987 }, { "epoch": 13.231916068470458, "grad_norm": 0.33913716673851013, "learning_rate": 2.9338666666666667e-05, "loss": 0.0261, "step": 11988 }, { "epoch": 13.23302043070127, "grad_norm": 0.19959738850593567, "learning_rate": 2.9338333333333333e-05, "loss": 0.0098, "step": 11989 }, { "epoch": 13.234124792932082, "grad_norm": 0.2542852759361267, "learning_rate": 2.9338e-05, "loss": 0.0194, "step": 11990 }, { "epoch": 13.235229155162893, "grad_norm": 0.3195132613182068, "learning_rate": 2.933766666666667e-05, "loss": 0.0189, "step": 11991 }, { "epoch": 13.236333517393705, "grad_norm": 0.24259808659553528, "learning_rate": 2.933733333333333e-05, "loss": 0.0133, "step": 11992 }, { "epoch": 13.237437879624517, "grad_norm": 0.30418750643730164, "learning_rate": 2.9337e-05, "loss": 0.0181, "step": 11993 }, { "epoch": 13.23854224185533, "grad_norm": 0.21102215349674225, "learning_rate": 2.933666666666667e-05, "loss": 0.0119, "step": 11994 }, { "epoch": 13.23964660408614, "grad_norm": 0.21064694225788116, "learning_rate": 2.9336333333333332e-05, "loss": 0.0118, "step": 11995 }, { "epoch": 13.240750966316952, "grad_norm": 0.20883353054523468, "learning_rate": 2.9336000000000002e-05, "loss": 0.0098, "step": 11996 }, { "epoch": 13.241855328547764, "grad_norm": 0.25205740332603455, "learning_rate": 2.9335666666666668e-05, "loss": 0.0123, "step": 11997 }, { "epoch": 13.242959690778575, "grad_norm": 0.18164297938346863, "learning_rate": 2.9335333333333334e-05, "loss": 0.009, "step": 11998 }, { "epoch": 13.244064053009387, "grad_norm": 0.13301655650138855, "learning_rate": 2.9335e-05, "loss": 0.0119, "step": 11999 }, { "epoch": 13.2451684152402, "grad_norm": 0.13481669127941132, "learning_rate": 2.933466666666667e-05, "loss": 0.0079, "step": 12000 }, { "epoch": 13.2451684152402, "eval_cer": 0.10695492132617881, "eval_loss": 0.29680335521698, "eval_runtime": 15.504, "eval_samples_per_second": 19.608, "eval_steps_per_second": 0.645, "eval_wer": 0.36128165771297005, "step": 12000 }, { "epoch": 13.24627277747101, "grad_norm": 0.10941074788570404, "learning_rate": 2.933433333333333e-05, "loss": 0.0089, "step": 12001 }, { "epoch": 13.247377139701822, "grad_norm": 0.2814425826072693, "learning_rate": 2.9334e-05, "loss": 0.0084, "step": 12002 }, { "epoch": 13.248481501932634, "grad_norm": 0.4662765860557556, "learning_rate": 2.933366666666667e-05, "loss": 0.0233, "step": 12003 }, { "epoch": 13.249585864163446, "grad_norm": 0.21891269087791443, "learning_rate": 2.9333333333333333e-05, "loss": 0.0103, "step": 12004 }, { "epoch": 13.250690226394257, "grad_norm": 0.14144285023212433, "learning_rate": 2.9333000000000002e-05, "loss": 0.0044, "step": 12005 }, { "epoch": 13.251794588625069, "grad_norm": 0.28073081374168396, "learning_rate": 2.9332666666666668e-05, "loss": 0.0063, "step": 12006 }, { "epoch": 13.252898950855881, "grad_norm": 0.22190693020820618, "learning_rate": 2.9332333333333334e-05, "loss": 0.0087, "step": 12007 }, { "epoch": 13.254003313086692, "grad_norm": 0.23191554844379425, "learning_rate": 2.9332e-05, "loss": 0.0116, "step": 12008 }, { "epoch": 13.255107675317504, "grad_norm": 0.2071465253829956, "learning_rate": 2.933166666666667e-05, "loss": 0.0104, "step": 12009 }, { "epoch": 13.256212037548316, "grad_norm": 0.3809616267681122, "learning_rate": 2.9331333333333335e-05, "loss": 0.01, "step": 12010 }, { "epoch": 13.257316399779128, "grad_norm": 0.16747719049453735, "learning_rate": 2.9331e-05, "loss": 0.0064, "step": 12011 }, { "epoch": 13.258420762009939, "grad_norm": 0.13767127692699432, "learning_rate": 2.933066666666667e-05, "loss": 0.0051, "step": 12012 }, { "epoch": 13.259525124240751, "grad_norm": 0.16539394855499268, "learning_rate": 2.9330333333333333e-05, "loss": 0.0065, "step": 12013 }, { "epoch": 13.260629486471563, "grad_norm": 0.4695453643798828, "learning_rate": 2.9330000000000002e-05, "loss": 0.0251, "step": 12014 }, { "epoch": 13.261733848702374, "grad_norm": 0.10893740504980087, "learning_rate": 2.9329666666666668e-05, "loss": 0.0046, "step": 12015 }, { "epoch": 13.262838210933186, "grad_norm": 0.2918541133403778, "learning_rate": 2.9329333333333334e-05, "loss": 0.011, "step": 12016 }, { "epoch": 13.263942573163998, "grad_norm": 0.25091829895973206, "learning_rate": 2.9329e-05, "loss": 0.0092, "step": 12017 }, { "epoch": 13.26504693539481, "grad_norm": 0.27782896161079407, "learning_rate": 2.9328666666666666e-05, "loss": 0.0087, "step": 12018 }, { "epoch": 13.26615129762562, "grad_norm": 0.17479567229747772, "learning_rate": 2.9328333333333335e-05, "loss": 0.0091, "step": 12019 }, { "epoch": 13.267255659856433, "grad_norm": 0.4443185329437256, "learning_rate": 2.9328e-05, "loss": 0.018, "step": 12020 }, { "epoch": 13.268360022087245, "grad_norm": 0.2590861916542053, "learning_rate": 2.9327666666666667e-05, "loss": 0.0115, "step": 12021 }, { "epoch": 13.269464384318056, "grad_norm": 0.21722589433193207, "learning_rate": 2.9327333333333333e-05, "loss": 0.0093, "step": 12022 }, { "epoch": 13.270568746548868, "grad_norm": 0.2647075653076172, "learning_rate": 2.9327000000000002e-05, "loss": 0.0094, "step": 12023 }, { "epoch": 13.27167310877968, "grad_norm": 0.2083617001771927, "learning_rate": 2.9326666666666665e-05, "loss": 0.0054, "step": 12024 }, { "epoch": 13.27277747101049, "grad_norm": 0.42020222544670105, "learning_rate": 2.9326333333333334e-05, "loss": 0.0103, "step": 12025 }, { "epoch": 13.273881833241303, "grad_norm": 0.29975852370262146, "learning_rate": 2.9326e-05, "loss": 0.0257, "step": 12026 }, { "epoch": 13.274986195472115, "grad_norm": 0.39968129992485046, "learning_rate": 2.9325666666666666e-05, "loss": 0.0168, "step": 12027 }, { "epoch": 13.276090557702927, "grad_norm": 0.23036262392997742, "learning_rate": 2.9325333333333335e-05, "loss": 0.0091, "step": 12028 }, { "epoch": 13.277194919933738, "grad_norm": 0.6499323844909668, "learning_rate": 2.9325e-05, "loss": 0.2224, "step": 12029 }, { "epoch": 13.27829928216455, "grad_norm": 0.7767085433006287, "learning_rate": 2.9324666666666667e-05, "loss": 0.1321, "step": 12030 }, { "epoch": 13.279403644395362, "grad_norm": 0.6983376741409302, "learning_rate": 2.9324333333333333e-05, "loss": 0.11, "step": 12031 }, { "epoch": 13.280508006626173, "grad_norm": 0.4313601553440094, "learning_rate": 2.9324000000000002e-05, "loss": 0.0826, "step": 12032 }, { "epoch": 13.281612368856985, "grad_norm": 0.32792672514915466, "learning_rate": 2.9323666666666665e-05, "loss": 0.0877, "step": 12033 }, { "epoch": 13.282716731087797, "grad_norm": 0.40869665145874023, "learning_rate": 2.9323333333333334e-05, "loss": 0.0634, "step": 12034 }, { "epoch": 13.283821093318608, "grad_norm": 0.5305112600326538, "learning_rate": 2.9323000000000004e-05, "loss": 0.0449, "step": 12035 }, { "epoch": 13.28492545554942, "grad_norm": 0.23156023025512695, "learning_rate": 2.9322666666666666e-05, "loss": 0.0211, "step": 12036 }, { "epoch": 13.286029817780232, "grad_norm": 0.34206897020339966, "learning_rate": 2.9322333333333336e-05, "loss": 0.027, "step": 12037 }, { "epoch": 13.287134180011044, "grad_norm": 0.29613766074180603, "learning_rate": 2.9322e-05, "loss": 0.0274, "step": 12038 }, { "epoch": 13.288238542241855, "grad_norm": 0.33629387617111206, "learning_rate": 2.9321666666666667e-05, "loss": 0.0426, "step": 12039 }, { "epoch": 13.289342904472667, "grad_norm": 0.46879664063453674, "learning_rate": 2.9321333333333333e-05, "loss": 0.0203, "step": 12040 }, { "epoch": 13.29044726670348, "grad_norm": 0.08616999536752701, "learning_rate": 2.9321000000000003e-05, "loss": 0.0034, "step": 12041 }, { "epoch": 13.29155162893429, "grad_norm": 0.4740622639656067, "learning_rate": 2.9320666666666665e-05, "loss": 0.0135, "step": 12042 }, { "epoch": 13.292655991165102, "grad_norm": 0.19181600213050842, "learning_rate": 2.9320333333333335e-05, "loss": 0.0103, "step": 12043 }, { "epoch": 13.293760353395914, "grad_norm": 0.1982770562171936, "learning_rate": 2.9320000000000004e-05, "loss": 0.0042, "step": 12044 }, { "epoch": 13.294864715626726, "grad_norm": 0.24588868021965027, "learning_rate": 2.9319666666666666e-05, "loss": 0.0147, "step": 12045 }, { "epoch": 13.295969077857537, "grad_norm": 0.09656242281198502, "learning_rate": 2.9319333333333336e-05, "loss": 0.0056, "step": 12046 }, { "epoch": 13.297073440088349, "grad_norm": 0.15443827211856842, "learning_rate": 2.9318999999999998e-05, "loss": 0.0101, "step": 12047 }, { "epoch": 13.298177802319161, "grad_norm": 0.15562891960144043, "learning_rate": 2.9318666666666668e-05, "loss": 0.0091, "step": 12048 }, { "epoch": 13.299282164549972, "grad_norm": 0.12282057851552963, "learning_rate": 2.9318333333333334e-05, "loss": 0.0067, "step": 12049 }, { "epoch": 13.300386526780784, "grad_norm": 0.2177095264196396, "learning_rate": 2.9318e-05, "loss": 0.0168, "step": 12050 }, { "epoch": 13.301490889011596, "grad_norm": 0.15767434239387512, "learning_rate": 2.9317666666666665e-05, "loss": 0.0053, "step": 12051 }, { "epoch": 13.302595251242408, "grad_norm": 0.32676267623901367, "learning_rate": 2.9317333333333335e-05, "loss": 0.0079, "step": 12052 }, { "epoch": 13.303699613473219, "grad_norm": 0.2198636829853058, "learning_rate": 2.9317e-05, "loss": 0.0054, "step": 12053 }, { "epoch": 13.304803975704031, "grad_norm": 0.211678609251976, "learning_rate": 2.9316666666666667e-05, "loss": 0.0557, "step": 12054 }, { "epoch": 13.305908337934843, "grad_norm": 0.24001798033714294, "learning_rate": 2.9316333333333336e-05, "loss": 0.0108, "step": 12055 }, { "epoch": 13.307012700165654, "grad_norm": 0.4625389873981476, "learning_rate": 2.9316e-05, "loss": 0.0347, "step": 12056 }, { "epoch": 13.308117062396466, "grad_norm": 0.27743664383888245, "learning_rate": 2.9315666666666668e-05, "loss": 0.0145, "step": 12057 }, { "epoch": 13.309221424627278, "grad_norm": 0.13258418440818787, "learning_rate": 2.9315333333333334e-05, "loss": 0.0065, "step": 12058 }, { "epoch": 13.310325786858089, "grad_norm": 0.28615802526474, "learning_rate": 2.9315e-05, "loss": 0.0086, "step": 12059 }, { "epoch": 13.3114301490889, "grad_norm": 0.15645846724510193, "learning_rate": 2.931466666666667e-05, "loss": 0.0066, "step": 12060 }, { "epoch": 13.312534511319713, "grad_norm": 0.6223406195640564, "learning_rate": 2.9314333333333335e-05, "loss": 0.0117, "step": 12061 }, { "epoch": 13.313638873550525, "grad_norm": 0.3088025450706482, "learning_rate": 2.9314e-05, "loss": 0.0185, "step": 12062 }, { "epoch": 13.314743235781336, "grad_norm": 0.13485543429851532, "learning_rate": 2.9313666666666667e-05, "loss": 0.0046, "step": 12063 }, { "epoch": 13.315847598012148, "grad_norm": 0.4363133907318115, "learning_rate": 2.9313333333333336e-05, "loss": 0.0141, "step": 12064 }, { "epoch": 13.31695196024296, "grad_norm": 0.22117425501346588, "learning_rate": 2.9313e-05, "loss": 0.0145, "step": 12065 }, { "epoch": 13.31805632247377, "grad_norm": 0.13708865642547607, "learning_rate": 2.9312666666666668e-05, "loss": 0.0074, "step": 12066 }, { "epoch": 13.319160684704583, "grad_norm": 0.3896609842777252, "learning_rate": 2.9312333333333334e-05, "loss": 0.0082, "step": 12067 }, { "epoch": 13.320265046935395, "grad_norm": 0.2849915325641632, "learning_rate": 2.9312e-05, "loss": 0.0152, "step": 12068 }, { "epoch": 13.321369409166207, "grad_norm": 0.198404923081398, "learning_rate": 2.931166666666667e-05, "loss": 0.0077, "step": 12069 }, { "epoch": 13.322473771397018, "grad_norm": 0.25904229283332825, "learning_rate": 2.9311333333333335e-05, "loss": 0.0064, "step": 12070 }, { "epoch": 13.32357813362783, "grad_norm": 0.21438686549663544, "learning_rate": 2.9311e-05, "loss": 0.0095, "step": 12071 }, { "epoch": 13.324682495858642, "grad_norm": 0.24302855134010315, "learning_rate": 2.9310666666666667e-05, "loss": 0.0115, "step": 12072 }, { "epoch": 13.325786858089453, "grad_norm": 0.322026789188385, "learning_rate": 2.9310333333333336e-05, "loss": 0.0117, "step": 12073 }, { "epoch": 13.326891220320265, "grad_norm": 0.17932899296283722, "learning_rate": 2.931e-05, "loss": 0.0116, "step": 12074 }, { "epoch": 13.327995582551077, "grad_norm": 0.38828742504119873, "learning_rate": 2.9309666666666668e-05, "loss": 0.0135, "step": 12075 }, { "epoch": 13.329099944781888, "grad_norm": 0.4795510768890381, "learning_rate": 2.930933333333333e-05, "loss": 0.015, "step": 12076 }, { "epoch": 13.3302043070127, "grad_norm": 0.16490936279296875, "learning_rate": 2.9309e-05, "loss": 0.0056, "step": 12077 }, { "epoch": 13.331308669243512, "grad_norm": 0.5438594222068787, "learning_rate": 2.930866666666667e-05, "loss": 0.0058, "step": 12078 }, { "epoch": 13.332413031474324, "grad_norm": 0.5792035460472107, "learning_rate": 2.9308333333333332e-05, "loss": 0.1658, "step": 12079 }, { "epoch": 13.333517393705135, "grad_norm": 0.5259382724761963, "learning_rate": 2.9308e-05, "loss": 0.1559, "step": 12080 }, { "epoch": 13.334621755935947, "grad_norm": 0.5927056074142456, "learning_rate": 2.9307666666666667e-05, "loss": 0.0937, "step": 12081 }, { "epoch": 13.33572611816676, "grad_norm": 0.5141152739524841, "learning_rate": 2.9307333333333333e-05, "loss": 0.1052, "step": 12082 }, { "epoch": 13.33683048039757, "grad_norm": 0.8469222187995911, "learning_rate": 2.9307e-05, "loss": 0.055, "step": 12083 }, { "epoch": 13.337934842628382, "grad_norm": 0.6869850158691406, "learning_rate": 2.930666666666667e-05, "loss": 0.093, "step": 12084 }, { "epoch": 13.339039204859194, "grad_norm": 0.3406233787536621, "learning_rate": 2.9306333333333334e-05, "loss": 0.0557, "step": 12085 }, { "epoch": 13.340143567090006, "grad_norm": 0.25103336572647095, "learning_rate": 2.9306e-05, "loss": 0.0348, "step": 12086 }, { "epoch": 13.341247929320817, "grad_norm": 0.49465295672416687, "learning_rate": 2.930566666666667e-05, "loss": 0.041, "step": 12087 }, { "epoch": 13.342352291551629, "grad_norm": 0.33158865571022034, "learning_rate": 2.9305333333333332e-05, "loss": 0.0305, "step": 12088 }, { "epoch": 13.343456653782441, "grad_norm": 0.29337361454963684, "learning_rate": 2.9305e-05, "loss": 0.0102, "step": 12089 }, { "epoch": 13.344561016013252, "grad_norm": 0.3980463445186615, "learning_rate": 2.9304666666666667e-05, "loss": 0.0267, "step": 12090 }, { "epoch": 13.345665378244064, "grad_norm": 0.6772866249084473, "learning_rate": 2.9304333333333333e-05, "loss": 0.022, "step": 12091 }, { "epoch": 13.346769740474876, "grad_norm": 0.31475165486335754, "learning_rate": 2.9304e-05, "loss": 0.0091, "step": 12092 }, { "epoch": 13.347874102705687, "grad_norm": 0.18861627578735352, "learning_rate": 2.930366666666667e-05, "loss": 0.0117, "step": 12093 }, { "epoch": 13.348978464936499, "grad_norm": 0.1686353087425232, "learning_rate": 2.9303333333333335e-05, "loss": 0.0127, "step": 12094 }, { "epoch": 13.350082827167311, "grad_norm": 0.19871962070465088, "learning_rate": 2.9303e-05, "loss": 0.0277, "step": 12095 }, { "epoch": 13.351187189398123, "grad_norm": 0.20095892250537872, "learning_rate": 2.930266666666667e-05, "loss": 0.0125, "step": 12096 }, { "epoch": 13.352291551628934, "grad_norm": 0.38707518577575684, "learning_rate": 2.9302333333333332e-05, "loss": 0.0168, "step": 12097 }, { "epoch": 13.353395913859746, "grad_norm": 0.20973537862300873, "learning_rate": 2.9302e-05, "loss": 0.0099, "step": 12098 }, { "epoch": 13.354500276090558, "grad_norm": 0.18104873597621918, "learning_rate": 2.9301666666666668e-05, "loss": 0.0122, "step": 12099 }, { "epoch": 13.355604638321369, "grad_norm": 0.279725044965744, "learning_rate": 2.9301333333333334e-05, "loss": 0.0108, "step": 12100 }, { "epoch": 13.356709000552181, "grad_norm": 0.19139069318771362, "learning_rate": 2.9301e-05, "loss": 0.0064, "step": 12101 }, { "epoch": 13.357813362782993, "grad_norm": 0.18824085593223572, "learning_rate": 2.930066666666667e-05, "loss": 0.0092, "step": 12102 }, { "epoch": 13.358917725013805, "grad_norm": 0.5123372673988342, "learning_rate": 2.9300333333333335e-05, "loss": 0.0077, "step": 12103 }, { "epoch": 13.360022087244616, "grad_norm": 0.18034258484840393, "learning_rate": 2.93e-05, "loss": 0.0074, "step": 12104 }, { "epoch": 13.361126449475428, "grad_norm": 0.17252685129642487, "learning_rate": 2.929966666666667e-05, "loss": 0.0085, "step": 12105 }, { "epoch": 13.36223081170624, "grad_norm": 0.17929063737392426, "learning_rate": 2.9299333333333333e-05, "loss": 0.0061, "step": 12106 }, { "epoch": 13.36333517393705, "grad_norm": 0.15490292012691498, "learning_rate": 2.9299000000000002e-05, "loss": 0.0071, "step": 12107 }, { "epoch": 13.364439536167863, "grad_norm": 0.5829626321792603, "learning_rate": 2.9298666666666664e-05, "loss": 0.0119, "step": 12108 }, { "epoch": 13.365543898398675, "grad_norm": 0.32531267404556274, "learning_rate": 2.9298333333333334e-05, "loss": 0.0081, "step": 12109 }, { "epoch": 13.366648260629486, "grad_norm": 0.20281881093978882, "learning_rate": 2.9298000000000003e-05, "loss": 0.008, "step": 12110 }, { "epoch": 13.367752622860298, "grad_norm": 0.24128440022468567, "learning_rate": 2.9297666666666666e-05, "loss": 0.0077, "step": 12111 }, { "epoch": 13.36885698509111, "grad_norm": 0.2542168200016022, "learning_rate": 2.9297333333333335e-05, "loss": 0.0121, "step": 12112 }, { "epoch": 13.369961347321922, "grad_norm": 0.30981501936912537, "learning_rate": 2.9297e-05, "loss": 0.0127, "step": 12113 }, { "epoch": 13.371065709552733, "grad_norm": 0.393588125705719, "learning_rate": 2.9296666666666667e-05, "loss": 0.0138, "step": 12114 }, { "epoch": 13.372170071783545, "grad_norm": 0.5324885845184326, "learning_rate": 2.9296333333333333e-05, "loss": 0.0106, "step": 12115 }, { "epoch": 13.373274434014357, "grad_norm": 0.17178510129451752, "learning_rate": 2.9296000000000002e-05, "loss": 0.0067, "step": 12116 }, { "epoch": 13.374378796245168, "grad_norm": 0.08944718539714813, "learning_rate": 2.9295666666666665e-05, "loss": 0.0038, "step": 12117 }, { "epoch": 13.37548315847598, "grad_norm": 0.25395962595939636, "learning_rate": 2.9295333333333334e-05, "loss": 0.0128, "step": 12118 }, { "epoch": 13.376587520706792, "grad_norm": 0.5073028206825256, "learning_rate": 2.9295000000000003e-05, "loss": 0.0139, "step": 12119 }, { "epoch": 13.377691882937604, "grad_norm": 0.372905433177948, "learning_rate": 2.9294666666666666e-05, "loss": 0.0084, "step": 12120 }, { "epoch": 13.378796245168415, "grad_norm": 0.3130914568901062, "learning_rate": 2.9294333333333335e-05, "loss": 0.0094, "step": 12121 }, { "epoch": 13.379900607399227, "grad_norm": 0.6320772767066956, "learning_rate": 2.9294e-05, "loss": 0.0131, "step": 12122 }, { "epoch": 13.38100496963004, "grad_norm": 0.21205025911331177, "learning_rate": 2.9293666666666667e-05, "loss": 0.0109, "step": 12123 }, { "epoch": 13.38210933186085, "grad_norm": 0.2746640741825104, "learning_rate": 2.9293333333333333e-05, "loss": 0.0054, "step": 12124 }, { "epoch": 13.383213694091662, "grad_norm": 0.2520391047000885, "learning_rate": 2.9293000000000002e-05, "loss": 0.0066, "step": 12125 }, { "epoch": 13.384318056322474, "grad_norm": 0.7671626210212708, "learning_rate": 2.9292666666666665e-05, "loss": 0.0185, "step": 12126 }, { "epoch": 13.385422418553285, "grad_norm": 0.3547215759754181, "learning_rate": 2.9292333333333334e-05, "loss": 0.0088, "step": 12127 }, { "epoch": 13.386526780784097, "grad_norm": 0.37357354164123535, "learning_rate": 2.9292000000000003e-05, "loss": 0.0086, "step": 12128 }, { "epoch": 13.387631143014909, "grad_norm": 0.7304930090904236, "learning_rate": 2.9291666666666666e-05, "loss": 0.1851, "step": 12129 }, { "epoch": 13.388735505245721, "grad_norm": 1.048322319984436, "learning_rate": 2.9291333333333335e-05, "loss": 0.1212, "step": 12130 }, { "epoch": 13.389839867476532, "grad_norm": 0.6782891750335693, "learning_rate": 2.9291e-05, "loss": 0.1, "step": 12131 }, { "epoch": 13.390944229707344, "grad_norm": 0.7473092079162598, "learning_rate": 2.9290666666666667e-05, "loss": 0.1167, "step": 12132 }, { "epoch": 13.392048591938156, "grad_norm": 0.34647414088249207, "learning_rate": 2.9290333333333333e-05, "loss": 0.0638, "step": 12133 }, { "epoch": 13.393152954168967, "grad_norm": 0.40591639280319214, "learning_rate": 2.9290000000000002e-05, "loss": 0.0605, "step": 12134 }, { "epoch": 13.394257316399779, "grad_norm": 0.4436351954936981, "learning_rate": 2.928966666666667e-05, "loss": 0.0439, "step": 12135 }, { "epoch": 13.395361678630591, "grad_norm": 0.39136624336242676, "learning_rate": 2.9289333333333334e-05, "loss": 0.0398, "step": 12136 }, { "epoch": 13.396466040861403, "grad_norm": 0.2861413359642029, "learning_rate": 2.9289e-05, "loss": 0.0241, "step": 12137 }, { "epoch": 13.397570403092214, "grad_norm": 0.33921948075294495, "learning_rate": 2.9288666666666666e-05, "loss": 0.0305, "step": 12138 }, { "epoch": 13.398674765323026, "grad_norm": 0.18632662296295166, "learning_rate": 2.9288333333333336e-05, "loss": 0.0068, "step": 12139 }, { "epoch": 13.399779127553838, "grad_norm": 0.25053226947784424, "learning_rate": 2.9287999999999998e-05, "loss": 0.0251, "step": 12140 }, { "epoch": 13.400883489784649, "grad_norm": 0.3924042284488678, "learning_rate": 2.9287666666666667e-05, "loss": 0.0381, "step": 12141 }, { "epoch": 13.401987852015461, "grad_norm": 0.5302098393440247, "learning_rate": 2.9287333333333333e-05, "loss": 0.0083, "step": 12142 }, { "epoch": 13.403092214246273, "grad_norm": 0.38216131925582886, "learning_rate": 2.9287e-05, "loss": 0.0149, "step": 12143 }, { "epoch": 13.404196576477084, "grad_norm": 0.31998318433761597, "learning_rate": 2.928666666666667e-05, "loss": 0.0126, "step": 12144 }, { "epoch": 13.405300938707896, "grad_norm": 0.24851149320602417, "learning_rate": 2.9286333333333335e-05, "loss": 0.0084, "step": 12145 }, { "epoch": 13.406405300938708, "grad_norm": 0.29935142397880554, "learning_rate": 2.9286e-05, "loss": 0.0115, "step": 12146 }, { "epoch": 13.40750966316952, "grad_norm": 0.19683107733726501, "learning_rate": 2.9285666666666666e-05, "loss": 0.0116, "step": 12147 }, { "epoch": 13.40861402540033, "grad_norm": 0.3729385435581207, "learning_rate": 2.9285333333333336e-05, "loss": 0.0131, "step": 12148 }, { "epoch": 13.409718387631143, "grad_norm": 0.38853004574775696, "learning_rate": 2.9284999999999998e-05, "loss": 0.0102, "step": 12149 }, { "epoch": 13.410822749861955, "grad_norm": 0.15159766376018524, "learning_rate": 2.9284666666666668e-05, "loss": 0.0074, "step": 12150 }, { "epoch": 13.411927112092766, "grad_norm": 0.17261698842048645, "learning_rate": 2.9284333333333334e-05, "loss": 0.0213, "step": 12151 }, { "epoch": 13.413031474323578, "grad_norm": 0.23276382684707642, "learning_rate": 2.9284e-05, "loss": 0.0093, "step": 12152 }, { "epoch": 13.41413583655439, "grad_norm": 0.39335396885871887, "learning_rate": 2.928366666666667e-05, "loss": 0.0135, "step": 12153 }, { "epoch": 13.415240198785202, "grad_norm": 0.30781325697898865, "learning_rate": 2.9283333333333335e-05, "loss": 0.0214, "step": 12154 }, { "epoch": 13.416344561016013, "grad_norm": 0.37736397981643677, "learning_rate": 2.9283e-05, "loss": 0.0136, "step": 12155 }, { "epoch": 13.417448923246825, "grad_norm": 0.21585248410701752, "learning_rate": 2.9282666666666667e-05, "loss": 0.0342, "step": 12156 }, { "epoch": 13.418553285477637, "grad_norm": 0.30214932560920715, "learning_rate": 2.9282333333333336e-05, "loss": 0.0204, "step": 12157 }, { "epoch": 13.419657647708448, "grad_norm": 0.3319716155529022, "learning_rate": 2.9282e-05, "loss": 0.0124, "step": 12158 }, { "epoch": 13.42076200993926, "grad_norm": 0.321510374546051, "learning_rate": 2.9281666666666668e-05, "loss": 0.0136, "step": 12159 }, { "epoch": 13.421866372170072, "grad_norm": 0.2815929651260376, "learning_rate": 2.9281333333333337e-05, "loss": 0.0099, "step": 12160 }, { "epoch": 13.422970734400884, "grad_norm": 0.2577342391014099, "learning_rate": 2.9281e-05, "loss": 0.021, "step": 12161 }, { "epoch": 13.424075096631695, "grad_norm": 0.2033453732728958, "learning_rate": 2.928066666666667e-05, "loss": 0.0078, "step": 12162 }, { "epoch": 13.425179458862507, "grad_norm": 0.2412445843219757, "learning_rate": 2.9280333333333335e-05, "loss": 0.0129, "step": 12163 }, { "epoch": 13.42628382109332, "grad_norm": 0.47003185749053955, "learning_rate": 2.928e-05, "loss": 0.0154, "step": 12164 }, { "epoch": 13.42738818332413, "grad_norm": 0.4138592779636383, "learning_rate": 2.9279666666666667e-05, "loss": 0.0169, "step": 12165 }, { "epoch": 13.428492545554942, "grad_norm": 0.14466990530490875, "learning_rate": 2.9279333333333336e-05, "loss": 0.0059, "step": 12166 }, { "epoch": 13.429596907785754, "grad_norm": 0.23033469915390015, "learning_rate": 2.9279e-05, "loss": 0.0092, "step": 12167 }, { "epoch": 13.430701270016565, "grad_norm": 0.3154689073562622, "learning_rate": 2.9278666666666668e-05, "loss": 0.0151, "step": 12168 }, { "epoch": 13.431805632247377, "grad_norm": 0.16657496988773346, "learning_rate": 2.9278333333333334e-05, "loss": 0.0113, "step": 12169 }, { "epoch": 13.43290999447819, "grad_norm": 0.2871674597263336, "learning_rate": 2.9278e-05, "loss": 0.0089, "step": 12170 }, { "epoch": 13.434014356709001, "grad_norm": 0.17733794450759888, "learning_rate": 2.927766666666667e-05, "loss": 0.0066, "step": 12171 }, { "epoch": 13.435118718939812, "grad_norm": 0.31414252519607544, "learning_rate": 2.9277333333333332e-05, "loss": 0.01, "step": 12172 }, { "epoch": 13.436223081170624, "grad_norm": 0.10697675496339798, "learning_rate": 2.9277e-05, "loss": 0.0035, "step": 12173 }, { "epoch": 13.437327443401436, "grad_norm": 1.1344850063323975, "learning_rate": 2.9276666666666667e-05, "loss": 0.0176, "step": 12174 }, { "epoch": 13.438431805632247, "grad_norm": 0.6654877662658691, "learning_rate": 2.9276333333333333e-05, "loss": 0.0255, "step": 12175 }, { "epoch": 13.439536167863059, "grad_norm": 0.2198731154203415, "learning_rate": 2.9276e-05, "loss": 0.0141, "step": 12176 }, { "epoch": 13.440640530093871, "grad_norm": 0.29509884119033813, "learning_rate": 2.9275666666666668e-05, "loss": 0.0084, "step": 12177 }, { "epoch": 13.441744892324682, "grad_norm": 0.2442728877067566, "learning_rate": 2.9275333333333334e-05, "loss": 0.0188, "step": 12178 }, { "epoch": 13.442849254555494, "grad_norm": 0.6938086152076721, "learning_rate": 2.9275e-05, "loss": 0.1724, "step": 12179 }, { "epoch": 13.443953616786306, "grad_norm": 0.590947151184082, "learning_rate": 2.927466666666667e-05, "loss": 0.1306, "step": 12180 }, { "epoch": 13.445057979017118, "grad_norm": 0.47405320405960083, "learning_rate": 2.9274333333333332e-05, "loss": 0.1182, "step": 12181 }, { "epoch": 13.446162341247929, "grad_norm": 0.7024762630462646, "learning_rate": 2.9274e-05, "loss": 0.1072, "step": 12182 }, { "epoch": 13.447266703478741, "grad_norm": 0.4832969307899475, "learning_rate": 2.9273666666666667e-05, "loss": 0.076, "step": 12183 }, { "epoch": 13.448371065709553, "grad_norm": 0.4889402687549591, "learning_rate": 2.9273333333333333e-05, "loss": 0.0558, "step": 12184 }, { "epoch": 13.449475427940364, "grad_norm": 0.3521714210510254, "learning_rate": 2.9273000000000002e-05, "loss": 0.0604, "step": 12185 }, { "epoch": 13.450579790171176, "grad_norm": 0.24579373002052307, "learning_rate": 2.927266666666667e-05, "loss": 0.0333, "step": 12186 }, { "epoch": 13.451684152401988, "grad_norm": 0.15447987616062164, "learning_rate": 2.9272333333333334e-05, "loss": 0.0114, "step": 12187 }, { "epoch": 13.4527885146328, "grad_norm": 0.49363937973976135, "learning_rate": 2.9272e-05, "loss": 0.0238, "step": 12188 }, { "epoch": 13.45389287686361, "grad_norm": 0.2220400869846344, "learning_rate": 2.927166666666667e-05, "loss": 0.0122, "step": 12189 }, { "epoch": 13.454997239094423, "grad_norm": 0.5228273868560791, "learning_rate": 2.9271333333333332e-05, "loss": 0.0192, "step": 12190 }, { "epoch": 13.456101601325235, "grad_norm": 0.21013523638248444, "learning_rate": 2.9271e-05, "loss": 0.012, "step": 12191 }, { "epoch": 13.457205963556046, "grad_norm": 0.4390753507614136, "learning_rate": 2.9270666666666667e-05, "loss": 0.0138, "step": 12192 }, { "epoch": 13.458310325786858, "grad_norm": 0.5659512281417847, "learning_rate": 2.9270333333333333e-05, "loss": 0.0155, "step": 12193 }, { "epoch": 13.45941468801767, "grad_norm": 0.20237933099269867, "learning_rate": 2.9270000000000003e-05, "loss": 0.0105, "step": 12194 }, { "epoch": 13.460519050248482, "grad_norm": 0.25423911213874817, "learning_rate": 2.926966666666667e-05, "loss": 0.0122, "step": 12195 }, { "epoch": 13.461623412479293, "grad_norm": 0.2508511543273926, "learning_rate": 2.9269333333333335e-05, "loss": 0.0129, "step": 12196 }, { "epoch": 13.462727774710105, "grad_norm": 0.6618533730506897, "learning_rate": 2.9269e-05, "loss": 0.0158, "step": 12197 }, { "epoch": 13.463832136940917, "grad_norm": 0.13651449978351593, "learning_rate": 2.9268666666666666e-05, "loss": 0.0089, "step": 12198 }, { "epoch": 13.464936499171728, "grad_norm": 0.2483569085597992, "learning_rate": 2.9268333333333332e-05, "loss": 0.0115, "step": 12199 }, { "epoch": 13.46604086140254, "grad_norm": 0.21093596518039703, "learning_rate": 2.9268e-05, "loss": 0.0136, "step": 12200 }, { "epoch": 13.467145223633352, "grad_norm": 0.2925276458263397, "learning_rate": 2.9267666666666664e-05, "loss": 0.0085, "step": 12201 }, { "epoch": 13.468249585864163, "grad_norm": 0.21796293556690216, "learning_rate": 2.9267333333333334e-05, "loss": 0.0235, "step": 12202 }, { "epoch": 13.469353948094975, "grad_norm": 0.13916298747062683, "learning_rate": 2.9267000000000003e-05, "loss": 0.0059, "step": 12203 }, { "epoch": 13.470458310325787, "grad_norm": 0.20151183009147644, "learning_rate": 2.9266666666666665e-05, "loss": 0.0258, "step": 12204 }, { "epoch": 13.4715626725566, "grad_norm": 0.24110548198223114, "learning_rate": 2.9266333333333335e-05, "loss": 0.011, "step": 12205 }, { "epoch": 13.47266703478741, "grad_norm": 0.27819377183914185, "learning_rate": 2.9266e-05, "loss": 0.015, "step": 12206 }, { "epoch": 13.473771397018222, "grad_norm": 0.16856572031974792, "learning_rate": 2.9265666666666667e-05, "loss": 0.0091, "step": 12207 }, { "epoch": 13.474875759249034, "grad_norm": 0.3618761897087097, "learning_rate": 2.9265333333333333e-05, "loss": 0.0113, "step": 12208 }, { "epoch": 13.475980121479845, "grad_norm": 0.21770262718200684, "learning_rate": 2.9265000000000002e-05, "loss": 0.0094, "step": 12209 }, { "epoch": 13.477084483710657, "grad_norm": 0.26193925738334656, "learning_rate": 2.9264666666666668e-05, "loss": 0.0121, "step": 12210 }, { "epoch": 13.47818884594147, "grad_norm": 0.23657429218292236, "learning_rate": 2.9264333333333334e-05, "loss": 0.0125, "step": 12211 }, { "epoch": 13.47929320817228, "grad_norm": 0.2324017882347107, "learning_rate": 2.9264000000000003e-05, "loss": 0.0077, "step": 12212 }, { "epoch": 13.480397570403092, "grad_norm": 0.3550870418548584, "learning_rate": 2.9263666666666666e-05, "loss": 0.0157, "step": 12213 }, { "epoch": 13.481501932633904, "grad_norm": 0.4922765791416168, "learning_rate": 2.9263333333333335e-05, "loss": 0.0231, "step": 12214 }, { "epoch": 13.482606294864716, "grad_norm": 0.25951439142227173, "learning_rate": 2.9263e-05, "loss": 0.0117, "step": 12215 }, { "epoch": 13.483710657095527, "grad_norm": 0.38570070266723633, "learning_rate": 2.9262666666666667e-05, "loss": 0.0154, "step": 12216 }, { "epoch": 13.484815019326339, "grad_norm": 0.30815884470939636, "learning_rate": 2.9262333333333333e-05, "loss": 0.0151, "step": 12217 }, { "epoch": 13.485919381557151, "grad_norm": 0.1584949642419815, "learning_rate": 2.9262000000000002e-05, "loss": 0.0053, "step": 12218 }, { "epoch": 13.487023743787962, "grad_norm": 0.25792714953422546, "learning_rate": 2.9261666666666668e-05, "loss": 0.0188, "step": 12219 }, { "epoch": 13.488128106018774, "grad_norm": 0.22965839505195618, "learning_rate": 2.9261333333333334e-05, "loss": 0.0096, "step": 12220 }, { "epoch": 13.489232468249586, "grad_norm": 0.20005978643894196, "learning_rate": 2.9261000000000003e-05, "loss": 0.0117, "step": 12221 }, { "epoch": 13.490336830480398, "grad_norm": 0.4149944484233856, "learning_rate": 2.9260666666666666e-05, "loss": 0.0107, "step": 12222 }, { "epoch": 13.491441192711209, "grad_norm": 0.27789491415023804, "learning_rate": 2.9260333333333335e-05, "loss": 0.009, "step": 12223 }, { "epoch": 13.492545554942021, "grad_norm": 0.5239988565444946, "learning_rate": 2.926e-05, "loss": 0.0182, "step": 12224 }, { "epoch": 13.493649917172833, "grad_norm": 0.17326028645038605, "learning_rate": 2.9259666666666667e-05, "loss": 0.0089, "step": 12225 }, { "epoch": 13.494754279403644, "grad_norm": 0.406032532453537, "learning_rate": 2.9259333333333333e-05, "loss": 0.0338, "step": 12226 }, { "epoch": 13.495858641634456, "grad_norm": 0.17356999218463898, "learning_rate": 2.9259e-05, "loss": 0.008, "step": 12227 }, { "epoch": 13.496963003865268, "grad_norm": 0.4378325641155243, "learning_rate": 2.9258666666666668e-05, "loss": 0.013, "step": 12228 }, { "epoch": 13.49806736609608, "grad_norm": 0.7344303131103516, "learning_rate": 2.9258333333333334e-05, "loss": 0.1978, "step": 12229 }, { "epoch": 13.49917172832689, "grad_norm": 0.8292389512062073, "learning_rate": 2.9258e-05, "loss": 0.2199, "step": 12230 }, { "epoch": 13.500276090557703, "grad_norm": 0.5520907640457153, "learning_rate": 2.9257666666666666e-05, "loss": 0.1519, "step": 12231 }, { "epoch": 13.501380452788515, "grad_norm": 0.5422305464744568, "learning_rate": 2.9257333333333335e-05, "loss": 0.0701, "step": 12232 }, { "epoch": 13.502484815019326, "grad_norm": 0.5734238624572754, "learning_rate": 2.9256999999999998e-05, "loss": 0.0828, "step": 12233 }, { "epoch": 13.503589177250138, "grad_norm": 0.5776795744895935, "learning_rate": 2.9256666666666667e-05, "loss": 0.0926, "step": 12234 }, { "epoch": 13.50469353948095, "grad_norm": 0.3300119936466217, "learning_rate": 2.9256333333333337e-05, "loss": 0.0377, "step": 12235 }, { "epoch": 13.50579790171176, "grad_norm": 0.31248438358306885, "learning_rate": 2.9256e-05, "loss": 0.0612, "step": 12236 }, { "epoch": 13.506902263942573, "grad_norm": 0.33071625232696533, "learning_rate": 2.925566666666667e-05, "loss": 0.0341, "step": 12237 }, { "epoch": 13.508006626173385, "grad_norm": 0.2650607228279114, "learning_rate": 2.9255333333333334e-05, "loss": 0.0299, "step": 12238 }, { "epoch": 13.509110988404197, "grad_norm": 0.516593873500824, "learning_rate": 2.9255e-05, "loss": 0.0155, "step": 12239 }, { "epoch": 13.510215350635008, "grad_norm": 0.22445376217365265, "learning_rate": 2.9254666666666666e-05, "loss": 0.0103, "step": 12240 }, { "epoch": 13.51131971286582, "grad_norm": 0.15832772850990295, "learning_rate": 2.9254333333333336e-05, "loss": 0.0157, "step": 12241 }, { "epoch": 13.512424075096632, "grad_norm": 0.2791604995727539, "learning_rate": 2.9253999999999998e-05, "loss": 0.019, "step": 12242 }, { "epoch": 13.513528437327443, "grad_norm": 0.11863846331834793, "learning_rate": 2.9253666666666667e-05, "loss": 0.0061, "step": 12243 }, { "epoch": 13.514632799558255, "grad_norm": 0.1294175684452057, "learning_rate": 2.9253333333333337e-05, "loss": 0.0079, "step": 12244 }, { "epoch": 13.515737161789067, "grad_norm": 0.1412268579006195, "learning_rate": 2.9253e-05, "loss": 0.0059, "step": 12245 }, { "epoch": 13.516841524019878, "grad_norm": 0.2073678821325302, "learning_rate": 2.925266666666667e-05, "loss": 0.015, "step": 12246 }, { "epoch": 13.51794588625069, "grad_norm": 0.1848420649766922, "learning_rate": 2.9252333333333335e-05, "loss": 0.0152, "step": 12247 }, { "epoch": 13.519050248481502, "grad_norm": 0.20021474361419678, "learning_rate": 2.9252e-05, "loss": 0.0091, "step": 12248 }, { "epoch": 13.520154610712314, "grad_norm": 0.18541108071804047, "learning_rate": 2.9251666666666666e-05, "loss": 0.0082, "step": 12249 }, { "epoch": 13.521258972943125, "grad_norm": 0.2027769386768341, "learning_rate": 2.9251333333333336e-05, "loss": 0.0243, "step": 12250 }, { "epoch": 13.522363335173937, "grad_norm": 0.583050549030304, "learning_rate": 2.9250999999999998e-05, "loss": 0.0145, "step": 12251 }, { "epoch": 13.52346769740475, "grad_norm": 0.1910085678100586, "learning_rate": 2.9250666666666668e-05, "loss": 0.0073, "step": 12252 }, { "epoch": 13.52457205963556, "grad_norm": 0.1435173898935318, "learning_rate": 2.9250333333333337e-05, "loss": 0.0082, "step": 12253 }, { "epoch": 13.525676421866372, "grad_norm": 0.13998937606811523, "learning_rate": 2.925e-05, "loss": 0.0065, "step": 12254 }, { "epoch": 13.526780784097184, "grad_norm": 0.2855049669742584, "learning_rate": 2.924966666666667e-05, "loss": 0.0059, "step": 12255 }, { "epoch": 13.527885146327996, "grad_norm": 0.27275922894477844, "learning_rate": 2.9249333333333335e-05, "loss": 0.0065, "step": 12256 }, { "epoch": 13.528989508558807, "grad_norm": 0.1524810492992401, "learning_rate": 2.9249e-05, "loss": 0.0102, "step": 12257 }, { "epoch": 13.530093870789619, "grad_norm": 0.28441473841667175, "learning_rate": 2.9248666666666667e-05, "loss": 0.0205, "step": 12258 }, { "epoch": 13.531198233020431, "grad_norm": 0.3237314224243164, "learning_rate": 2.9248333333333333e-05, "loss": 0.0121, "step": 12259 }, { "epoch": 13.532302595251242, "grad_norm": 0.23203326761722565, "learning_rate": 2.9248000000000002e-05, "loss": 0.0083, "step": 12260 }, { "epoch": 13.533406957482054, "grad_norm": 0.20200657844543457, "learning_rate": 2.9247666666666668e-05, "loss": 0.0106, "step": 12261 }, { "epoch": 13.534511319712866, "grad_norm": 0.4533126652240753, "learning_rate": 2.9247333333333334e-05, "loss": 0.0096, "step": 12262 }, { "epoch": 13.535615681943678, "grad_norm": 0.46594464778900146, "learning_rate": 2.9247e-05, "loss": 0.0106, "step": 12263 }, { "epoch": 13.536720044174489, "grad_norm": 0.45991817116737366, "learning_rate": 2.924666666666667e-05, "loss": 0.0092, "step": 12264 }, { "epoch": 13.537824406405301, "grad_norm": 0.30821335315704346, "learning_rate": 2.924633333333333e-05, "loss": 0.0129, "step": 12265 }, { "epoch": 13.538928768636113, "grad_norm": 0.3257552683353424, "learning_rate": 2.9246e-05, "loss": 0.013, "step": 12266 }, { "epoch": 13.540033130866924, "grad_norm": 0.2511192560195923, "learning_rate": 2.9245666666666667e-05, "loss": 0.0086, "step": 12267 }, { "epoch": 13.541137493097736, "grad_norm": 0.2673018276691437, "learning_rate": 2.9245333333333333e-05, "loss": 0.0089, "step": 12268 }, { "epoch": 13.542241855328548, "grad_norm": 0.12850260734558105, "learning_rate": 2.9245000000000002e-05, "loss": 0.0064, "step": 12269 }, { "epoch": 13.54334621755936, "grad_norm": 0.1358858346939087, "learning_rate": 2.9244666666666668e-05, "loss": 0.0063, "step": 12270 }, { "epoch": 13.54445057979017, "grad_norm": 0.426060289144516, "learning_rate": 2.9244333333333334e-05, "loss": 0.0131, "step": 12271 }, { "epoch": 13.545554942020983, "grad_norm": 0.3825324773788452, "learning_rate": 2.9244e-05, "loss": 0.0146, "step": 12272 }, { "epoch": 13.546659304251795, "grad_norm": 0.990183413028717, "learning_rate": 2.924366666666667e-05, "loss": 0.016, "step": 12273 }, { "epoch": 13.547763666482606, "grad_norm": 0.42482200264930725, "learning_rate": 2.9243333333333332e-05, "loss": 0.0162, "step": 12274 }, { "epoch": 13.548868028713418, "grad_norm": 0.38853156566619873, "learning_rate": 2.9243e-05, "loss": 0.0106, "step": 12275 }, { "epoch": 13.54997239094423, "grad_norm": 0.4161333739757538, "learning_rate": 2.9242666666666667e-05, "loss": 0.0095, "step": 12276 }, { "epoch": 13.55107675317504, "grad_norm": 0.48515474796295166, "learning_rate": 2.9242333333333333e-05, "loss": 0.0087, "step": 12277 }, { "epoch": 13.552181115405853, "grad_norm": 0.5094115138053894, "learning_rate": 2.9242000000000002e-05, "loss": 0.0152, "step": 12278 }, { "epoch": 13.553285477636665, "grad_norm": 0.5970699787139893, "learning_rate": 2.9241666666666668e-05, "loss": 0.1553, "step": 12279 }, { "epoch": 13.554389839867477, "grad_norm": 0.5159298181533813, "learning_rate": 2.9241333333333334e-05, "loss": 0.2074, "step": 12280 }, { "epoch": 13.555494202098288, "grad_norm": 0.5874344110488892, "learning_rate": 2.9241e-05, "loss": 0.1024, "step": 12281 }, { "epoch": 13.5565985643291, "grad_norm": 0.47476062178611755, "learning_rate": 2.924066666666667e-05, "loss": 0.1136, "step": 12282 }, { "epoch": 13.557702926559912, "grad_norm": 0.3554562032222748, "learning_rate": 2.9240333333333332e-05, "loss": 0.0826, "step": 12283 }, { "epoch": 13.558807288790723, "grad_norm": 0.4351070821285248, "learning_rate": 2.924e-05, "loss": 0.0852, "step": 12284 }, { "epoch": 13.559911651021535, "grad_norm": 0.35820668935775757, "learning_rate": 2.923966666666667e-05, "loss": 0.0457, "step": 12285 }, { "epoch": 13.561016013252347, "grad_norm": 0.3620182275772095, "learning_rate": 2.9239333333333333e-05, "loss": 0.0467, "step": 12286 }, { "epoch": 13.562120375483158, "grad_norm": 0.4678715765476227, "learning_rate": 2.9239000000000002e-05, "loss": 0.0279, "step": 12287 }, { "epoch": 13.56322473771397, "grad_norm": 0.5156887173652649, "learning_rate": 2.9238666666666665e-05, "loss": 0.018, "step": 12288 }, { "epoch": 13.564329099944782, "grad_norm": 0.7024457454681396, "learning_rate": 2.9238333333333334e-05, "loss": 0.0177, "step": 12289 }, { "epoch": 13.565433462175594, "grad_norm": 0.40830549597740173, "learning_rate": 2.9238e-05, "loss": 0.0206, "step": 12290 }, { "epoch": 13.566537824406405, "grad_norm": 0.18099011480808258, "learning_rate": 2.9237666666666666e-05, "loss": 0.0183, "step": 12291 }, { "epoch": 13.567642186637217, "grad_norm": 0.23238299787044525, "learning_rate": 2.9237333333333332e-05, "loss": 0.0349, "step": 12292 }, { "epoch": 13.56874654886803, "grad_norm": 0.16254131495952606, "learning_rate": 2.9237e-05, "loss": 0.0086, "step": 12293 }, { "epoch": 13.56985091109884, "grad_norm": 0.2623891830444336, "learning_rate": 2.9236666666666667e-05, "loss": 0.0407, "step": 12294 }, { "epoch": 13.570955273329652, "grad_norm": 0.19342774152755737, "learning_rate": 2.9236333333333333e-05, "loss": 0.0081, "step": 12295 }, { "epoch": 13.572059635560464, "grad_norm": 0.20737503468990326, "learning_rate": 2.9236000000000003e-05, "loss": 0.0058, "step": 12296 }, { "epoch": 13.573163997791276, "grad_norm": 0.4354366958141327, "learning_rate": 2.9235666666666665e-05, "loss": 0.0109, "step": 12297 }, { "epoch": 13.574268360022087, "grad_norm": 0.20758703351020813, "learning_rate": 2.9235333333333335e-05, "loss": 0.0092, "step": 12298 }, { "epoch": 13.575372722252899, "grad_norm": 0.16689527034759521, "learning_rate": 2.9235e-05, "loss": 0.009, "step": 12299 }, { "epoch": 13.576477084483711, "grad_norm": 0.4608910083770752, "learning_rate": 2.9234666666666666e-05, "loss": 0.0108, "step": 12300 }, { "epoch": 13.577581446714522, "grad_norm": 0.27001774311065674, "learning_rate": 2.9234333333333332e-05, "loss": 0.0102, "step": 12301 }, { "epoch": 13.578685808945334, "grad_norm": 0.15307727456092834, "learning_rate": 2.9234e-05, "loss": 0.0059, "step": 12302 }, { "epoch": 13.579790171176146, "grad_norm": 0.2804968059062958, "learning_rate": 2.9233666666666668e-05, "loss": 0.0067, "step": 12303 }, { "epoch": 13.580894533406958, "grad_norm": 0.2056579440832138, "learning_rate": 2.9233333333333334e-05, "loss": 0.0085, "step": 12304 }, { "epoch": 13.581998895637769, "grad_norm": 0.24101462960243225, "learning_rate": 2.9233000000000003e-05, "loss": 0.0131, "step": 12305 }, { "epoch": 13.583103257868581, "grad_norm": 0.5438271164894104, "learning_rate": 2.9232666666666665e-05, "loss": 0.0149, "step": 12306 }, { "epoch": 13.584207620099393, "grad_norm": 0.4592699110507965, "learning_rate": 2.9232333333333335e-05, "loss": 0.0115, "step": 12307 }, { "epoch": 13.585311982330204, "grad_norm": 0.31735655665397644, "learning_rate": 2.9232e-05, "loss": 0.0151, "step": 12308 }, { "epoch": 13.586416344561016, "grad_norm": 0.3796796500682831, "learning_rate": 2.9231666666666667e-05, "loss": 0.0167, "step": 12309 }, { "epoch": 13.587520706791828, "grad_norm": 0.3806895613670349, "learning_rate": 2.9231333333333336e-05, "loss": 0.0124, "step": 12310 }, { "epoch": 13.588625069022639, "grad_norm": 0.34960800409317017, "learning_rate": 2.9231000000000002e-05, "loss": 0.0172, "step": 12311 }, { "epoch": 13.589729431253451, "grad_norm": 0.4056326746940613, "learning_rate": 2.9230666666666668e-05, "loss": 0.0102, "step": 12312 }, { "epoch": 13.590833793484263, "grad_norm": 0.34990790486335754, "learning_rate": 2.9230333333333334e-05, "loss": 0.0176, "step": 12313 }, { "epoch": 13.591938155715075, "grad_norm": 0.19437192380428314, "learning_rate": 2.9230000000000003e-05, "loss": 0.0042, "step": 12314 }, { "epoch": 13.593042517945886, "grad_norm": 0.32591497898101807, "learning_rate": 2.9229666666666666e-05, "loss": 0.0103, "step": 12315 }, { "epoch": 13.594146880176698, "grad_norm": 0.35546112060546875, "learning_rate": 2.9229333333333335e-05, "loss": 0.0092, "step": 12316 }, { "epoch": 13.59525124240751, "grad_norm": 0.2128494679927826, "learning_rate": 2.9229e-05, "loss": 0.0084, "step": 12317 }, { "epoch": 13.59635560463832, "grad_norm": 0.24944940209388733, "learning_rate": 2.9228666666666667e-05, "loss": 0.0112, "step": 12318 }, { "epoch": 13.597459966869133, "grad_norm": 0.42537829279899597, "learning_rate": 2.9228333333333336e-05, "loss": 0.0138, "step": 12319 }, { "epoch": 13.598564329099945, "grad_norm": 0.15401919186115265, "learning_rate": 2.9228e-05, "loss": 0.0079, "step": 12320 }, { "epoch": 13.599668691330756, "grad_norm": 0.8071160316467285, "learning_rate": 2.9227666666666668e-05, "loss": 0.0241, "step": 12321 }, { "epoch": 13.600773053561568, "grad_norm": 0.29665103554725647, "learning_rate": 2.9227333333333334e-05, "loss": 0.0068, "step": 12322 }, { "epoch": 13.60187741579238, "grad_norm": 0.3679485023021698, "learning_rate": 2.9227e-05, "loss": 0.0138, "step": 12323 }, { "epoch": 13.602981778023192, "grad_norm": 0.3257368206977844, "learning_rate": 2.9226666666666666e-05, "loss": 0.0151, "step": 12324 }, { "epoch": 13.604086140254003, "grad_norm": 0.3894168436527252, "learning_rate": 2.9226333333333335e-05, "loss": 0.0109, "step": 12325 }, { "epoch": 13.605190502484815, "grad_norm": 1.0340591669082642, "learning_rate": 2.9226e-05, "loss": 0.0225, "step": 12326 }, { "epoch": 13.606294864715627, "grad_norm": 0.44799289107322693, "learning_rate": 2.9225666666666667e-05, "loss": 0.0088, "step": 12327 }, { "epoch": 13.607399226946438, "grad_norm": 0.3248445689678192, "learning_rate": 2.9225333333333336e-05, "loss": 0.0068, "step": 12328 }, { "epoch": 13.60850358917725, "grad_norm": 0.748397171497345, "learning_rate": 2.9225e-05, "loss": 0.2181, "step": 12329 }, { "epoch": 13.609607951408062, "grad_norm": 0.5534512400627136, "learning_rate": 2.9224666666666668e-05, "loss": 0.1299, "step": 12330 }, { "epoch": 13.610712313638874, "grad_norm": 0.42073479294776917, "learning_rate": 2.9224333333333334e-05, "loss": 0.091, "step": 12331 }, { "epoch": 13.611816675869685, "grad_norm": 0.5733541250228882, "learning_rate": 2.9224e-05, "loss": 0.0837, "step": 12332 }, { "epoch": 13.612921038100497, "grad_norm": 1.3430715799331665, "learning_rate": 2.9223666666666666e-05, "loss": 0.1651, "step": 12333 }, { "epoch": 13.61402540033131, "grad_norm": 0.37235260009765625, "learning_rate": 2.9223333333333335e-05, "loss": 0.0957, "step": 12334 }, { "epoch": 13.61512976256212, "grad_norm": 0.38537880778312683, "learning_rate": 2.9223e-05, "loss": 0.0473, "step": 12335 }, { "epoch": 13.616234124792932, "grad_norm": 1.0610074996948242, "learning_rate": 2.9222666666666667e-05, "loss": 0.0498, "step": 12336 }, { "epoch": 13.617338487023744, "grad_norm": 0.2235056757926941, "learning_rate": 2.9222333333333337e-05, "loss": 0.0226, "step": 12337 }, { "epoch": 13.618442849254556, "grad_norm": 0.26441067457199097, "learning_rate": 2.9222e-05, "loss": 0.0249, "step": 12338 }, { "epoch": 13.619547211485367, "grad_norm": 0.3612912893295288, "learning_rate": 2.922166666666667e-05, "loss": 0.0172, "step": 12339 }, { "epoch": 13.620651573716179, "grad_norm": 0.3564775586128235, "learning_rate": 2.9221333333333334e-05, "loss": 0.0213, "step": 12340 }, { "epoch": 13.621755935946991, "grad_norm": 0.20723660290241241, "learning_rate": 2.9221e-05, "loss": 0.0117, "step": 12341 }, { "epoch": 13.622860298177802, "grad_norm": 0.33018776774406433, "learning_rate": 2.9220666666666666e-05, "loss": 0.0193, "step": 12342 }, { "epoch": 13.623964660408614, "grad_norm": 0.14276158809661865, "learning_rate": 2.9220333333333336e-05, "loss": 0.0093, "step": 12343 }, { "epoch": 13.625069022639426, "grad_norm": 0.3246363401412964, "learning_rate": 2.922e-05, "loss": 0.0151, "step": 12344 }, { "epoch": 13.626173384870237, "grad_norm": 0.21015290915966034, "learning_rate": 2.9219666666666667e-05, "loss": 0.0203, "step": 12345 }, { "epoch": 13.627277747101049, "grad_norm": 0.6125131249427795, "learning_rate": 2.9219333333333337e-05, "loss": 0.0243, "step": 12346 }, { "epoch": 13.628382109331861, "grad_norm": 0.10246928781270981, "learning_rate": 2.9219e-05, "loss": 0.0057, "step": 12347 }, { "epoch": 13.629486471562673, "grad_norm": 0.643257200717926, "learning_rate": 2.921866666666667e-05, "loss": 0.0154, "step": 12348 }, { "epoch": 13.630590833793484, "grad_norm": 0.30915242433547974, "learning_rate": 2.921833333333333e-05, "loss": 0.0168, "step": 12349 }, { "epoch": 13.631695196024296, "grad_norm": 0.1790778785943985, "learning_rate": 2.9218e-05, "loss": 0.0089, "step": 12350 }, { "epoch": 13.632799558255108, "grad_norm": 0.4400085508823395, "learning_rate": 2.9217666666666666e-05, "loss": 0.0228, "step": 12351 }, { "epoch": 13.633903920485919, "grad_norm": 0.11367548257112503, "learning_rate": 2.9217333333333332e-05, "loss": 0.0063, "step": 12352 }, { "epoch": 13.635008282716731, "grad_norm": 0.41130152344703674, "learning_rate": 2.9217e-05, "loss": 0.0071, "step": 12353 }, { "epoch": 13.636112644947543, "grad_norm": 0.27122777700424194, "learning_rate": 2.9216666666666668e-05, "loss": 0.01, "step": 12354 }, { "epoch": 13.637217007178354, "grad_norm": 0.12444300949573517, "learning_rate": 2.9216333333333334e-05, "loss": 0.0071, "step": 12355 }, { "epoch": 13.638321369409166, "grad_norm": 0.24681048095226288, "learning_rate": 2.9216e-05, "loss": 0.013, "step": 12356 }, { "epoch": 13.639425731639978, "grad_norm": 0.16621066629886627, "learning_rate": 2.921566666666667e-05, "loss": 0.0077, "step": 12357 }, { "epoch": 13.64053009387079, "grad_norm": 0.20442433655261993, "learning_rate": 2.921533333333333e-05, "loss": 0.013, "step": 12358 }, { "epoch": 13.6416344561016, "grad_norm": 0.16498443484306335, "learning_rate": 2.9215e-05, "loss": 0.0107, "step": 12359 }, { "epoch": 13.642738818332413, "grad_norm": 0.1877904236316681, "learning_rate": 2.921466666666667e-05, "loss": 0.0096, "step": 12360 }, { "epoch": 13.643843180563225, "grad_norm": 0.20962540805339813, "learning_rate": 2.9214333333333333e-05, "loss": 0.0091, "step": 12361 }, { "epoch": 13.644947542794036, "grad_norm": 0.6734211444854736, "learning_rate": 2.9214000000000002e-05, "loss": 0.0187, "step": 12362 }, { "epoch": 13.646051905024848, "grad_norm": 0.4114606976509094, "learning_rate": 2.9213666666666668e-05, "loss": 0.0121, "step": 12363 }, { "epoch": 13.64715626725566, "grad_norm": 0.09688492119312286, "learning_rate": 2.9213333333333334e-05, "loss": 0.004, "step": 12364 }, { "epoch": 13.648260629486472, "grad_norm": 0.30000439286231995, "learning_rate": 2.9213e-05, "loss": 0.0097, "step": 12365 }, { "epoch": 13.649364991717283, "grad_norm": 0.22480247914791107, "learning_rate": 2.921266666666667e-05, "loss": 0.0138, "step": 12366 }, { "epoch": 13.650469353948095, "grad_norm": 0.25361499190330505, "learning_rate": 2.921233333333333e-05, "loss": 0.0093, "step": 12367 }, { "epoch": 13.651573716178907, "grad_norm": 0.29550284147262573, "learning_rate": 2.9212e-05, "loss": 0.0118, "step": 12368 }, { "epoch": 13.652678078409718, "grad_norm": 0.24995499849319458, "learning_rate": 2.921166666666667e-05, "loss": 0.015, "step": 12369 }, { "epoch": 13.65378244064053, "grad_norm": 0.28250160813331604, "learning_rate": 2.9211333333333333e-05, "loss": 0.0127, "step": 12370 }, { "epoch": 13.654886802871342, "grad_norm": 0.2133120447397232, "learning_rate": 2.9211000000000002e-05, "loss": 0.0089, "step": 12371 }, { "epoch": 13.655991165102154, "grad_norm": 0.6312498450279236, "learning_rate": 2.9210666666666668e-05, "loss": 0.0214, "step": 12372 }, { "epoch": 13.657095527332965, "grad_norm": 0.383210688829422, "learning_rate": 2.9210333333333334e-05, "loss": 0.0129, "step": 12373 }, { "epoch": 13.658199889563777, "grad_norm": 0.1594087928533554, "learning_rate": 2.921e-05, "loss": 0.0083, "step": 12374 }, { "epoch": 13.65930425179459, "grad_norm": 0.3139518201351166, "learning_rate": 2.920966666666667e-05, "loss": 0.0102, "step": 12375 }, { "epoch": 13.6604086140254, "grad_norm": 0.5287925601005554, "learning_rate": 2.9209333333333335e-05, "loss": 0.0164, "step": 12376 }, { "epoch": 13.661512976256212, "grad_norm": 0.3814643621444702, "learning_rate": 2.9209e-05, "loss": 0.0113, "step": 12377 }, { "epoch": 13.662617338487024, "grad_norm": 0.2711291015148163, "learning_rate": 2.9208666666666667e-05, "loss": 0.0056, "step": 12378 }, { "epoch": 13.663721700717835, "grad_norm": 0.8377645611763, "learning_rate": 2.9208333333333333e-05, "loss": 0.2604, "step": 12379 }, { "epoch": 13.664826062948647, "grad_norm": 0.5059998035430908, "learning_rate": 2.9208000000000002e-05, "loss": 0.2043, "step": 12380 }, { "epoch": 13.665930425179459, "grad_norm": 0.567876935005188, "learning_rate": 2.9207666666666665e-05, "loss": 0.1529, "step": 12381 }, { "epoch": 13.667034787410271, "grad_norm": 0.43705663084983826, "learning_rate": 2.9207333333333334e-05, "loss": 0.0755, "step": 12382 }, { "epoch": 13.668139149641082, "grad_norm": 0.48854315280914307, "learning_rate": 2.9207e-05, "loss": 0.1014, "step": 12383 }, { "epoch": 13.669243511871894, "grad_norm": 0.4506513774394989, "learning_rate": 2.9206666666666666e-05, "loss": 0.0499, "step": 12384 }, { "epoch": 13.670347874102706, "grad_norm": 0.4028024673461914, "learning_rate": 2.9206333333333335e-05, "loss": 0.0485, "step": 12385 }, { "epoch": 13.671452236333517, "grad_norm": 0.31963589787483215, "learning_rate": 2.9206e-05, "loss": 0.0654, "step": 12386 }, { "epoch": 13.672556598564329, "grad_norm": 0.3915565311908722, "learning_rate": 2.9205666666666667e-05, "loss": 0.0287, "step": 12387 }, { "epoch": 13.673660960795141, "grad_norm": 0.21691611409187317, "learning_rate": 2.9205333333333333e-05, "loss": 0.0227, "step": 12388 }, { "epoch": 13.674765323025952, "grad_norm": 0.39395976066589355, "learning_rate": 2.9205000000000002e-05, "loss": 0.0128, "step": 12389 }, { "epoch": 13.675869685256764, "grad_norm": 0.275710791349411, "learning_rate": 2.9204666666666665e-05, "loss": 0.0364, "step": 12390 }, { "epoch": 13.676974047487576, "grad_norm": 0.18129359185695648, "learning_rate": 2.9204333333333334e-05, "loss": 0.0117, "step": 12391 }, { "epoch": 13.678078409718388, "grad_norm": 0.298034131526947, "learning_rate": 2.9204e-05, "loss": 0.0094, "step": 12392 }, { "epoch": 13.679182771949199, "grad_norm": 0.16865618526935577, "learning_rate": 2.9203666666666666e-05, "loss": 0.0064, "step": 12393 }, { "epoch": 13.680287134180011, "grad_norm": 0.621229887008667, "learning_rate": 2.9203333333333336e-05, "loss": 0.0365, "step": 12394 }, { "epoch": 13.681391496410823, "grad_norm": 0.33173874020576477, "learning_rate": 2.9203e-05, "loss": 0.0164, "step": 12395 }, { "epoch": 13.682495858641634, "grad_norm": 0.21009178459644318, "learning_rate": 2.9202666666666667e-05, "loss": 0.0086, "step": 12396 }, { "epoch": 13.683600220872446, "grad_norm": 0.7535565495491028, "learning_rate": 2.9202333333333333e-05, "loss": 0.0184, "step": 12397 }, { "epoch": 13.684704583103258, "grad_norm": 0.1865282952785492, "learning_rate": 2.9202000000000003e-05, "loss": 0.0073, "step": 12398 }, { "epoch": 13.68580894533407, "grad_norm": 0.21419793367385864, "learning_rate": 2.9201666666666665e-05, "loss": 0.0096, "step": 12399 }, { "epoch": 13.68691330756488, "grad_norm": 0.17116715013980865, "learning_rate": 2.9201333333333335e-05, "loss": 0.0096, "step": 12400 }, { "epoch": 13.688017669795693, "grad_norm": 0.17516325414180756, "learning_rate": 2.9201e-05, "loss": 0.0073, "step": 12401 }, { "epoch": 13.689122032026505, "grad_norm": 0.21547210216522217, "learning_rate": 2.9200666666666666e-05, "loss": 0.011, "step": 12402 }, { "epoch": 13.690226394257316, "grad_norm": 0.44497811794281006, "learning_rate": 2.9200333333333336e-05, "loss": 0.0186, "step": 12403 }, { "epoch": 13.691330756488128, "grad_norm": 0.37906646728515625, "learning_rate": 2.92e-05, "loss": 0.0117, "step": 12404 }, { "epoch": 13.69243511871894, "grad_norm": 0.1512080281972885, "learning_rate": 2.9199666666666668e-05, "loss": 0.0078, "step": 12405 }, { "epoch": 13.693539480949752, "grad_norm": 0.25043654441833496, "learning_rate": 2.9199333333333334e-05, "loss": 0.0107, "step": 12406 }, { "epoch": 13.694643843180563, "grad_norm": 0.36518996953964233, "learning_rate": 2.9199000000000003e-05, "loss": 0.0127, "step": 12407 }, { "epoch": 13.695748205411375, "grad_norm": 0.21268650889396667, "learning_rate": 2.9198666666666665e-05, "loss": 0.0126, "step": 12408 }, { "epoch": 13.696852567642187, "grad_norm": 0.1014595553278923, "learning_rate": 2.9198333333333335e-05, "loss": 0.0049, "step": 12409 }, { "epoch": 13.697956929872998, "grad_norm": 0.14407244324684143, "learning_rate": 2.9198e-05, "loss": 0.0079, "step": 12410 }, { "epoch": 13.69906129210381, "grad_norm": 0.3434520959854126, "learning_rate": 2.9197666666666667e-05, "loss": 0.013, "step": 12411 }, { "epoch": 13.700165654334622, "grad_norm": 0.20921163260936737, "learning_rate": 2.9197333333333336e-05, "loss": 0.0118, "step": 12412 }, { "epoch": 13.701270016565433, "grad_norm": 0.15637272596359253, "learning_rate": 2.9197e-05, "loss": 0.0039, "step": 12413 }, { "epoch": 13.702374378796245, "grad_norm": 0.4661780595779419, "learning_rate": 2.9196666666666668e-05, "loss": 0.0105, "step": 12414 }, { "epoch": 13.703478741027057, "grad_norm": 0.19134590029716492, "learning_rate": 2.9196333333333334e-05, "loss": 0.0172, "step": 12415 }, { "epoch": 13.70458310325787, "grad_norm": 0.5000718235969543, "learning_rate": 2.9196e-05, "loss": 0.0169, "step": 12416 }, { "epoch": 13.70568746548868, "grad_norm": 0.2177535593509674, "learning_rate": 2.9195666666666666e-05, "loss": 0.0081, "step": 12417 }, { "epoch": 13.706791827719492, "grad_norm": 0.23680859804153442, "learning_rate": 2.9195333333333335e-05, "loss": 0.0084, "step": 12418 }, { "epoch": 13.707896189950304, "grad_norm": 0.301148921251297, "learning_rate": 2.9195e-05, "loss": 0.0131, "step": 12419 }, { "epoch": 13.709000552181115, "grad_norm": 0.47184252738952637, "learning_rate": 2.9194666666666667e-05, "loss": 0.0312, "step": 12420 }, { "epoch": 13.710104914411927, "grad_norm": 0.08383885025978088, "learning_rate": 2.9194333333333336e-05, "loss": 0.0032, "step": 12421 }, { "epoch": 13.71120927664274, "grad_norm": 0.4577709138393402, "learning_rate": 2.9194e-05, "loss": 0.011, "step": 12422 }, { "epoch": 13.71231363887355, "grad_norm": 0.33356973528862, "learning_rate": 2.9193666666666668e-05, "loss": 0.0099, "step": 12423 }, { "epoch": 13.713418001104362, "grad_norm": 0.509777843952179, "learning_rate": 2.9193333333333334e-05, "loss": 0.0159, "step": 12424 }, { "epoch": 13.714522363335174, "grad_norm": 0.18450798094272614, "learning_rate": 2.9193e-05, "loss": 0.0077, "step": 12425 }, { "epoch": 13.715626725565986, "grad_norm": 0.14302699267864227, "learning_rate": 2.919266666666667e-05, "loss": 0.0044, "step": 12426 }, { "epoch": 13.716731087796797, "grad_norm": 0.3203897774219513, "learning_rate": 2.9192333333333335e-05, "loss": 0.0126, "step": 12427 }, { "epoch": 13.717835450027609, "grad_norm": 0.483228862285614, "learning_rate": 2.9192e-05, "loss": 0.0202, "step": 12428 }, { "epoch": 13.718939812258421, "grad_norm": 0.6575841903686523, "learning_rate": 2.9191666666666667e-05, "loss": 0.1508, "step": 12429 }, { "epoch": 13.720044174489232, "grad_norm": 0.6298096179962158, "learning_rate": 2.9191333333333336e-05, "loss": 0.1041, "step": 12430 }, { "epoch": 13.721148536720044, "grad_norm": 0.5194991827011108, "learning_rate": 2.9191e-05, "loss": 0.1063, "step": 12431 }, { "epoch": 13.722252898950856, "grad_norm": 0.47742125391960144, "learning_rate": 2.9190666666666668e-05, "loss": 0.101, "step": 12432 }, { "epoch": 13.723357261181668, "grad_norm": 0.7415580153465271, "learning_rate": 2.9190333333333334e-05, "loss": 0.0921, "step": 12433 }, { "epoch": 13.724461623412479, "grad_norm": 0.2919398546218872, "learning_rate": 2.919e-05, "loss": 0.0533, "step": 12434 }, { "epoch": 13.725565985643291, "grad_norm": 0.36764392256736755, "learning_rate": 2.918966666666667e-05, "loss": 0.0626, "step": 12435 }, { "epoch": 13.726670347874103, "grad_norm": 0.4363009035587311, "learning_rate": 2.9189333333333335e-05, "loss": 0.1037, "step": 12436 }, { "epoch": 13.727774710104914, "grad_norm": 0.723171055316925, "learning_rate": 2.9189e-05, "loss": 0.0374, "step": 12437 }, { "epoch": 13.728879072335726, "grad_norm": 0.20564769208431244, "learning_rate": 2.9188666666666667e-05, "loss": 0.0202, "step": 12438 }, { "epoch": 13.729983434566538, "grad_norm": 0.22439484298229218, "learning_rate": 2.9188333333333333e-05, "loss": 0.0356, "step": 12439 }, { "epoch": 13.73108779679735, "grad_norm": 0.2706325352191925, "learning_rate": 2.9188e-05, "loss": 0.0146, "step": 12440 }, { "epoch": 13.73219215902816, "grad_norm": 0.2691904604434967, "learning_rate": 2.918766666666667e-05, "loss": 0.0117, "step": 12441 }, { "epoch": 13.733296521258973, "grad_norm": 0.23714672029018402, "learning_rate": 2.918733333333333e-05, "loss": 0.0142, "step": 12442 }, { "epoch": 13.734400883489785, "grad_norm": 0.16434745490550995, "learning_rate": 2.9187e-05, "loss": 0.0088, "step": 12443 }, { "epoch": 13.735505245720596, "grad_norm": 0.62846440076828, "learning_rate": 2.918666666666667e-05, "loss": 0.009, "step": 12444 }, { "epoch": 13.736609607951408, "grad_norm": 0.1660972237586975, "learning_rate": 2.9186333333333332e-05, "loss": 0.0188, "step": 12445 }, { "epoch": 13.73771397018222, "grad_norm": 0.3980516791343689, "learning_rate": 2.9186e-05, "loss": 0.0084, "step": 12446 }, { "epoch": 13.738818332413032, "grad_norm": 0.18820424377918243, "learning_rate": 2.9185666666666667e-05, "loss": 0.0203, "step": 12447 }, { "epoch": 13.739922694643843, "grad_norm": 0.36663180589675903, "learning_rate": 2.9185333333333333e-05, "loss": 0.0129, "step": 12448 }, { "epoch": 13.741027056874655, "grad_norm": 0.4245288670063019, "learning_rate": 2.9185e-05, "loss": 0.0199, "step": 12449 }, { "epoch": 13.742131419105467, "grad_norm": 0.2697848081588745, "learning_rate": 2.918466666666667e-05, "loss": 0.0191, "step": 12450 }, { "epoch": 13.743235781336278, "grad_norm": 0.24887652695178986, "learning_rate": 2.9184333333333335e-05, "loss": 0.0104, "step": 12451 }, { "epoch": 13.74434014356709, "grad_norm": 0.5825212001800537, "learning_rate": 2.9184e-05, "loss": 0.0143, "step": 12452 }, { "epoch": 13.745444505797902, "grad_norm": 0.2069297879934311, "learning_rate": 2.918366666666667e-05, "loss": 0.0082, "step": 12453 }, { "epoch": 13.746548868028713, "grad_norm": 0.21588453650474548, "learning_rate": 2.9183333333333332e-05, "loss": 0.0114, "step": 12454 }, { "epoch": 13.747653230259525, "grad_norm": 0.1519830971956253, "learning_rate": 2.9183e-05, "loss": 0.006, "step": 12455 }, { "epoch": 13.748757592490337, "grad_norm": 0.18608258664608002, "learning_rate": 2.9182666666666668e-05, "loss": 0.0059, "step": 12456 }, { "epoch": 13.74986195472115, "grad_norm": 0.1710113286972046, "learning_rate": 2.9182333333333334e-05, "loss": 0.0095, "step": 12457 }, { "epoch": 13.75096631695196, "grad_norm": 0.16542206704616547, "learning_rate": 2.9182e-05, "loss": 0.007, "step": 12458 }, { "epoch": 13.752070679182772, "grad_norm": 0.20616979897022247, "learning_rate": 2.918166666666667e-05, "loss": 0.0089, "step": 12459 }, { "epoch": 13.753175041413584, "grad_norm": 0.2470545470714569, "learning_rate": 2.9181333333333335e-05, "loss": 0.0111, "step": 12460 }, { "epoch": 13.754279403644395, "grad_norm": 0.45303866267204285, "learning_rate": 2.9181e-05, "loss": 0.0178, "step": 12461 }, { "epoch": 13.755383765875207, "grad_norm": 0.09467842429876328, "learning_rate": 2.918066666666667e-05, "loss": 0.0043, "step": 12462 }, { "epoch": 13.75648812810602, "grad_norm": 0.20329569280147552, "learning_rate": 2.9180333333333333e-05, "loss": 0.0083, "step": 12463 }, { "epoch": 13.75759249033683, "grad_norm": 0.2977662682533264, "learning_rate": 2.9180000000000002e-05, "loss": 0.0105, "step": 12464 }, { "epoch": 13.758696852567642, "grad_norm": 0.27296826243400574, "learning_rate": 2.9179666666666668e-05, "loss": 0.0113, "step": 12465 }, { "epoch": 13.759801214798454, "grad_norm": 0.3139137923717499, "learning_rate": 2.9179333333333334e-05, "loss": 0.0123, "step": 12466 }, { "epoch": 13.760905577029266, "grad_norm": 0.1578301191329956, "learning_rate": 2.9179e-05, "loss": 0.0062, "step": 12467 }, { "epoch": 13.762009939260077, "grad_norm": 0.5720379948616028, "learning_rate": 2.917866666666667e-05, "loss": 0.0171, "step": 12468 }, { "epoch": 13.763114301490889, "grad_norm": 0.20640817284584045, "learning_rate": 2.9178333333333335e-05, "loss": 0.01, "step": 12469 }, { "epoch": 13.764218663721701, "grad_norm": 0.32865437865257263, "learning_rate": 2.9178e-05, "loss": 0.0069, "step": 12470 }, { "epoch": 13.765323025952512, "grad_norm": 0.4912708103656769, "learning_rate": 2.9177666666666667e-05, "loss": 0.0164, "step": 12471 }, { "epoch": 13.766427388183324, "grad_norm": 0.2729620635509491, "learning_rate": 2.9177333333333333e-05, "loss": 0.01, "step": 12472 }, { "epoch": 13.767531750414136, "grad_norm": 0.19572706520557404, "learning_rate": 2.9177000000000002e-05, "loss": 0.0129, "step": 12473 }, { "epoch": 13.768636112644948, "grad_norm": 0.39442068338394165, "learning_rate": 2.9176666666666665e-05, "loss": 0.0163, "step": 12474 }, { "epoch": 13.769740474875759, "grad_norm": 0.15501224994659424, "learning_rate": 2.9176333333333334e-05, "loss": 0.005, "step": 12475 }, { "epoch": 13.770844837106571, "grad_norm": 0.6778349280357361, "learning_rate": 2.9176000000000003e-05, "loss": 0.0119, "step": 12476 }, { "epoch": 13.771949199337383, "grad_norm": 0.5476167798042297, "learning_rate": 2.9175666666666666e-05, "loss": 0.0169, "step": 12477 }, { "epoch": 13.773053561568194, "grad_norm": 0.17749933898448944, "learning_rate": 2.9175333333333335e-05, "loss": 0.0079, "step": 12478 }, { "epoch": 13.774157923799006, "grad_norm": 0.5960573554039001, "learning_rate": 2.9175e-05, "loss": 0.1912, "step": 12479 }, { "epoch": 13.775262286029818, "grad_norm": 0.7374034523963928, "learning_rate": 2.9174666666666667e-05, "loss": 0.1324, "step": 12480 }, { "epoch": 13.77636664826063, "grad_norm": 1.4240126609802246, "learning_rate": 2.9174333333333333e-05, "loss": 0.1225, "step": 12481 }, { "epoch": 13.77747101049144, "grad_norm": 0.3479647934436798, "learning_rate": 2.9174000000000002e-05, "loss": 0.0676, "step": 12482 }, { "epoch": 13.778575372722253, "grad_norm": 0.6761305332183838, "learning_rate": 2.9173666666666665e-05, "loss": 0.0667, "step": 12483 }, { "epoch": 13.779679734953065, "grad_norm": 0.763563334941864, "learning_rate": 2.9173333333333334e-05, "loss": 0.0844, "step": 12484 }, { "epoch": 13.780784097183876, "grad_norm": 0.3984890580177307, "learning_rate": 2.9173000000000003e-05, "loss": 0.0594, "step": 12485 }, { "epoch": 13.781888459414688, "grad_norm": 0.4030799865722656, "learning_rate": 2.9172666666666666e-05, "loss": 0.0621, "step": 12486 }, { "epoch": 13.7829928216455, "grad_norm": 0.34759122133255005, "learning_rate": 2.9172333333333335e-05, "loss": 0.0294, "step": 12487 }, { "epoch": 13.78409718387631, "grad_norm": 0.402668833732605, "learning_rate": 2.9172e-05, "loss": 0.0335, "step": 12488 }, { "epoch": 13.785201546107123, "grad_norm": 0.2475447952747345, "learning_rate": 2.9171666666666667e-05, "loss": 0.0441, "step": 12489 }, { "epoch": 13.786305908337935, "grad_norm": 0.30133938789367676, "learning_rate": 2.9171333333333333e-05, "loss": 0.021, "step": 12490 }, { "epoch": 13.787410270568747, "grad_norm": 0.1968218833208084, "learning_rate": 2.9171000000000002e-05, "loss": 0.0159, "step": 12491 }, { "epoch": 13.788514632799558, "grad_norm": 0.21274630725383759, "learning_rate": 2.9170666666666665e-05, "loss": 0.0138, "step": 12492 }, { "epoch": 13.78961899503037, "grad_norm": 0.23208343982696533, "learning_rate": 2.9170333333333334e-05, "loss": 0.0067, "step": 12493 }, { "epoch": 13.790723357261182, "grad_norm": 0.1813231110572815, "learning_rate": 2.9170000000000004e-05, "loss": 0.0119, "step": 12494 }, { "epoch": 13.791827719491993, "grad_norm": 0.26267024874687195, "learning_rate": 2.9169666666666666e-05, "loss": 0.0071, "step": 12495 }, { "epoch": 13.792932081722805, "grad_norm": 0.30242523550987244, "learning_rate": 2.9169333333333335e-05, "loss": 0.0146, "step": 12496 }, { "epoch": 13.794036443953617, "grad_norm": 0.2517889440059662, "learning_rate": 2.9169e-05, "loss": 0.0121, "step": 12497 }, { "epoch": 13.795140806184428, "grad_norm": 0.2226438820362091, "learning_rate": 2.9168666666666667e-05, "loss": 0.0121, "step": 12498 }, { "epoch": 13.79624516841524, "grad_norm": 0.23507924377918243, "learning_rate": 2.9168333333333333e-05, "loss": 0.011, "step": 12499 }, { "epoch": 13.797349530646052, "grad_norm": 0.3037761449813843, "learning_rate": 2.9168e-05, "loss": 0.0155, "step": 12500 }, { "epoch": 13.798453892876864, "grad_norm": 0.2032584398984909, "learning_rate": 2.916766666666667e-05, "loss": 0.0111, "step": 12501 }, { "epoch": 13.799558255107675, "grad_norm": 0.23765037953853607, "learning_rate": 2.9167333333333334e-05, "loss": 0.0135, "step": 12502 }, { "epoch": 13.800662617338487, "grad_norm": 0.14174626767635345, "learning_rate": 2.9167e-05, "loss": 0.0064, "step": 12503 }, { "epoch": 13.8017669795693, "grad_norm": 0.3844253718852997, "learning_rate": 2.9166666666666666e-05, "loss": 0.0142, "step": 12504 }, { "epoch": 13.80287134180011, "grad_norm": 0.36034470796585083, "learning_rate": 2.9166333333333336e-05, "loss": 0.0072, "step": 12505 }, { "epoch": 13.803975704030922, "grad_norm": 0.32452988624572754, "learning_rate": 2.9165999999999998e-05, "loss": 0.0123, "step": 12506 }, { "epoch": 13.805080066261734, "grad_norm": 0.1313282996416092, "learning_rate": 2.9165666666666668e-05, "loss": 0.0069, "step": 12507 }, { "epoch": 13.806184428492546, "grad_norm": 0.45018547773361206, "learning_rate": 2.9165333333333334e-05, "loss": 0.0183, "step": 12508 }, { "epoch": 13.807288790723357, "grad_norm": 0.21900293231010437, "learning_rate": 2.9165e-05, "loss": 0.014, "step": 12509 }, { "epoch": 13.808393152954169, "grad_norm": 0.26639223098754883, "learning_rate": 2.916466666666667e-05, "loss": 0.0127, "step": 12510 }, { "epoch": 13.809497515184981, "grad_norm": 0.15760794281959534, "learning_rate": 2.9164333333333335e-05, "loss": 0.0058, "step": 12511 }, { "epoch": 13.810601877415792, "grad_norm": 0.2109617441892624, "learning_rate": 2.9164e-05, "loss": 0.0085, "step": 12512 }, { "epoch": 13.811706239646604, "grad_norm": 0.31115275621414185, "learning_rate": 2.9163666666666667e-05, "loss": 0.0189, "step": 12513 }, { "epoch": 13.812810601877416, "grad_norm": 0.16589057445526123, "learning_rate": 2.9163333333333336e-05, "loss": 0.0111, "step": 12514 }, { "epoch": 13.813914964108228, "grad_norm": 0.3635397255420685, "learning_rate": 2.9163e-05, "loss": 0.012, "step": 12515 }, { "epoch": 13.815019326339039, "grad_norm": 0.16663171350955963, "learning_rate": 2.9162666666666668e-05, "loss": 0.0054, "step": 12516 }, { "epoch": 13.816123688569851, "grad_norm": 0.6678673624992371, "learning_rate": 2.9162333333333334e-05, "loss": 0.0224, "step": 12517 }, { "epoch": 13.817228050800663, "grad_norm": 0.3242202699184418, "learning_rate": 2.9162e-05, "loss": 0.0159, "step": 12518 }, { "epoch": 13.818332413031474, "grad_norm": 0.15556499361991882, "learning_rate": 2.916166666666667e-05, "loss": 0.0065, "step": 12519 }, { "epoch": 13.819436775262286, "grad_norm": 0.2067536562681198, "learning_rate": 2.9161333333333335e-05, "loss": 0.0088, "step": 12520 }, { "epoch": 13.820541137493098, "grad_norm": 0.443462610244751, "learning_rate": 2.9161e-05, "loss": 0.0161, "step": 12521 }, { "epoch": 13.821645499723909, "grad_norm": 0.19796304404735565, "learning_rate": 2.9160666666666667e-05, "loss": 0.0088, "step": 12522 }, { "epoch": 13.82274986195472, "grad_norm": 0.22884789109230042, "learning_rate": 2.9160333333333336e-05, "loss": 0.0092, "step": 12523 }, { "epoch": 13.823854224185533, "grad_norm": 0.9528101086616516, "learning_rate": 2.916e-05, "loss": 0.0245, "step": 12524 }, { "epoch": 13.824958586416345, "grad_norm": 0.4058557152748108, "learning_rate": 2.9159666666666668e-05, "loss": 0.0143, "step": 12525 }, { "epoch": 13.826062948647156, "grad_norm": 0.4143247604370117, "learning_rate": 2.9159333333333337e-05, "loss": 0.007, "step": 12526 }, { "epoch": 13.827167310877968, "grad_norm": 0.43072596192359924, "learning_rate": 2.9159e-05, "loss": 0.0168, "step": 12527 }, { "epoch": 13.82827167310878, "grad_norm": 0.9315626621246338, "learning_rate": 2.915866666666667e-05, "loss": 0.0156, "step": 12528 }, { "epoch": 13.82937603533959, "grad_norm": 0.6242070198059082, "learning_rate": 2.9158333333333335e-05, "loss": 0.2115, "step": 12529 }, { "epoch": 13.830480397570403, "grad_norm": 0.4927160143852234, "learning_rate": 2.9158e-05, "loss": 0.1454, "step": 12530 }, { "epoch": 13.831584759801215, "grad_norm": 0.705327570438385, "learning_rate": 2.9157666666666667e-05, "loss": 0.1459, "step": 12531 }, { "epoch": 13.832689122032026, "grad_norm": 0.40125349164009094, "learning_rate": 2.9157333333333333e-05, "loss": 0.1209, "step": 12532 }, { "epoch": 13.833793484262838, "grad_norm": 0.5840868353843689, "learning_rate": 2.9157e-05, "loss": 0.0929, "step": 12533 }, { "epoch": 13.83489784649365, "grad_norm": 0.2746516764163971, "learning_rate": 2.9156666666666668e-05, "loss": 0.0594, "step": 12534 }, { "epoch": 13.836002208724462, "grad_norm": 0.45595723390579224, "learning_rate": 2.9156333333333334e-05, "loss": 0.0546, "step": 12535 }, { "epoch": 13.837106570955273, "grad_norm": 0.5799002051353455, "learning_rate": 2.9156e-05, "loss": 0.0458, "step": 12536 }, { "epoch": 13.838210933186085, "grad_norm": 0.2818070948123932, "learning_rate": 2.915566666666667e-05, "loss": 0.0225, "step": 12537 }, { "epoch": 13.839315295416897, "grad_norm": 0.26671677827835083, "learning_rate": 2.9155333333333332e-05, "loss": 0.0201, "step": 12538 }, { "epoch": 13.840419657647708, "grad_norm": 0.2583540081977844, "learning_rate": 2.9155e-05, "loss": 0.0158, "step": 12539 }, { "epoch": 13.84152401987852, "grad_norm": 0.3455654978752136, "learning_rate": 2.9154666666666667e-05, "loss": 0.0182, "step": 12540 }, { "epoch": 13.842628382109332, "grad_norm": 0.22070902585983276, "learning_rate": 2.9154333333333333e-05, "loss": 0.0178, "step": 12541 }, { "epoch": 13.843732744340144, "grad_norm": 0.25508251786231995, "learning_rate": 2.9154e-05, "loss": 0.0325, "step": 12542 }, { "epoch": 13.844837106570955, "grad_norm": 0.23762068152427673, "learning_rate": 2.915366666666667e-05, "loss": 0.0113, "step": 12543 }, { "epoch": 13.845941468801767, "grad_norm": 0.15550503134727478, "learning_rate": 2.9153333333333334e-05, "loss": 0.0114, "step": 12544 }, { "epoch": 13.84704583103258, "grad_norm": 0.3456970751285553, "learning_rate": 2.9153e-05, "loss": 0.0068, "step": 12545 }, { "epoch": 13.84815019326339, "grad_norm": 0.20660319924354553, "learning_rate": 2.915266666666667e-05, "loss": 0.0086, "step": 12546 }, { "epoch": 13.849254555494202, "grad_norm": 0.2550719082355499, "learning_rate": 2.9152333333333332e-05, "loss": 0.0094, "step": 12547 }, { "epoch": 13.850358917725014, "grad_norm": 0.17051398754119873, "learning_rate": 2.9152e-05, "loss": 0.0068, "step": 12548 }, { "epoch": 13.851463279955826, "grad_norm": 0.30845850706100464, "learning_rate": 2.9151666666666667e-05, "loss": 0.0186, "step": 12549 }, { "epoch": 13.852567642186637, "grad_norm": 0.2371497005224228, "learning_rate": 2.9151333333333333e-05, "loss": 0.0046, "step": 12550 }, { "epoch": 13.853672004417449, "grad_norm": 0.40340888500213623, "learning_rate": 2.9151000000000003e-05, "loss": 0.0139, "step": 12551 }, { "epoch": 13.854776366648261, "grad_norm": 0.4777187407016754, "learning_rate": 2.915066666666667e-05, "loss": 0.0531, "step": 12552 }, { "epoch": 13.855880728879072, "grad_norm": 0.36643943190574646, "learning_rate": 2.9150333333333334e-05, "loss": 0.0131, "step": 12553 }, { "epoch": 13.856985091109884, "grad_norm": 0.24006393551826477, "learning_rate": 2.915e-05, "loss": 0.0181, "step": 12554 }, { "epoch": 13.858089453340696, "grad_norm": 0.5231205821037292, "learning_rate": 2.914966666666667e-05, "loss": 0.0135, "step": 12555 }, { "epoch": 13.859193815571507, "grad_norm": 0.20273436605930328, "learning_rate": 2.9149333333333332e-05, "loss": 0.008, "step": 12556 }, { "epoch": 13.860298177802319, "grad_norm": 0.13067109882831573, "learning_rate": 2.9149e-05, "loss": 0.0068, "step": 12557 }, { "epoch": 13.861402540033131, "grad_norm": 0.6036778688430786, "learning_rate": 2.9148666666666668e-05, "loss": 0.0103, "step": 12558 }, { "epoch": 13.862506902263943, "grad_norm": 0.21493975818157196, "learning_rate": 2.9148333333333333e-05, "loss": 0.0102, "step": 12559 }, { "epoch": 13.863611264494754, "grad_norm": 0.26010632514953613, "learning_rate": 2.9148000000000003e-05, "loss": 0.0098, "step": 12560 }, { "epoch": 13.864715626725566, "grad_norm": 0.25800952315330505, "learning_rate": 2.9147666666666665e-05, "loss": 0.0057, "step": 12561 }, { "epoch": 13.865819988956378, "grad_norm": 0.3099746108055115, "learning_rate": 2.9147333333333335e-05, "loss": 0.0169, "step": 12562 }, { "epoch": 13.866924351187189, "grad_norm": 0.7210347652435303, "learning_rate": 2.9147e-05, "loss": 0.0175, "step": 12563 }, { "epoch": 13.868028713418001, "grad_norm": 0.2433340698480606, "learning_rate": 2.9146666666666667e-05, "loss": 0.0116, "step": 12564 }, { "epoch": 13.869133075648813, "grad_norm": 0.5551407933235168, "learning_rate": 2.9146333333333332e-05, "loss": 0.0105, "step": 12565 }, { "epoch": 13.870237437879624, "grad_norm": 0.7121154069900513, "learning_rate": 2.9146000000000002e-05, "loss": 0.0213, "step": 12566 }, { "epoch": 13.871341800110436, "grad_norm": 0.2954210937023163, "learning_rate": 2.9145666666666664e-05, "loss": 0.0081, "step": 12567 }, { "epoch": 13.872446162341248, "grad_norm": 0.24594347178936005, "learning_rate": 2.9145333333333334e-05, "loss": 0.0076, "step": 12568 }, { "epoch": 13.87355052457206, "grad_norm": 0.7654222249984741, "learning_rate": 2.9145000000000003e-05, "loss": 0.0214, "step": 12569 }, { "epoch": 13.87465488680287, "grad_norm": 0.35705864429473877, "learning_rate": 2.9144666666666666e-05, "loss": 0.0156, "step": 12570 }, { "epoch": 13.875759249033683, "grad_norm": 0.25636178255081177, "learning_rate": 2.9144333333333335e-05, "loss": 0.0106, "step": 12571 }, { "epoch": 13.876863611264495, "grad_norm": 0.18329448997974396, "learning_rate": 2.9144e-05, "loss": 0.0086, "step": 12572 }, { "epoch": 13.877967973495306, "grad_norm": 0.41358253359794617, "learning_rate": 2.9143666666666667e-05, "loss": 0.0175, "step": 12573 }, { "epoch": 13.879072335726118, "grad_norm": 0.25474369525909424, "learning_rate": 2.9143333333333333e-05, "loss": 0.0128, "step": 12574 }, { "epoch": 13.88017669795693, "grad_norm": 0.6391776204109192, "learning_rate": 2.9143000000000002e-05, "loss": 0.022, "step": 12575 }, { "epoch": 13.881281060187742, "grad_norm": 0.23996245861053467, "learning_rate": 2.9142666666666668e-05, "loss": 0.0151, "step": 12576 }, { "epoch": 13.882385422418553, "grad_norm": 0.367852121591568, "learning_rate": 2.9142333333333334e-05, "loss": 0.0119, "step": 12577 }, { "epoch": 13.883489784649365, "grad_norm": 0.25991347432136536, "learning_rate": 2.9142000000000003e-05, "loss": 0.0107, "step": 12578 }, { "epoch": 13.884594146880177, "grad_norm": 0.5855111479759216, "learning_rate": 2.9141666666666666e-05, "loss": 0.2332, "step": 12579 }, { "epoch": 13.885698509110988, "grad_norm": 0.49270591139793396, "learning_rate": 2.9141333333333335e-05, "loss": 0.102, "step": 12580 }, { "epoch": 13.8868028713418, "grad_norm": 0.4012341797351837, "learning_rate": 2.9141e-05, "loss": 0.0725, "step": 12581 }, { "epoch": 13.887907233572612, "grad_norm": 0.5433380603790283, "learning_rate": 2.9140666666666667e-05, "loss": 0.1206, "step": 12582 }, { "epoch": 13.889011595803424, "grad_norm": 0.6061842441558838, "learning_rate": 2.9140333333333333e-05, "loss": 0.1103, "step": 12583 }, { "epoch": 13.890115958034235, "grad_norm": 0.3194636404514313, "learning_rate": 2.9140000000000002e-05, "loss": 0.0558, "step": 12584 }, { "epoch": 13.891220320265047, "grad_norm": 0.7104060053825378, "learning_rate": 2.9139666666666668e-05, "loss": 0.1339, "step": 12585 }, { "epoch": 13.89232468249586, "grad_norm": 0.29822206497192383, "learning_rate": 2.9139333333333334e-05, "loss": 0.0321, "step": 12586 }, { "epoch": 13.89342904472667, "grad_norm": 0.2741205394268036, "learning_rate": 2.9139000000000003e-05, "loss": 0.0529, "step": 12587 }, { "epoch": 13.894533406957482, "grad_norm": 0.41086864471435547, "learning_rate": 2.9138666666666666e-05, "loss": 0.0138, "step": 12588 }, { "epoch": 13.895637769188294, "grad_norm": 0.4646109342575073, "learning_rate": 2.9138333333333335e-05, "loss": 0.0206, "step": 12589 }, { "epoch": 13.896742131419105, "grad_norm": 0.18824657797813416, "learning_rate": 2.9137999999999998e-05, "loss": 0.0209, "step": 12590 }, { "epoch": 13.897846493649917, "grad_norm": 0.13011640310287476, "learning_rate": 2.9137666666666667e-05, "loss": 0.0096, "step": 12591 }, { "epoch": 13.898950855880729, "grad_norm": 0.18021699786186218, "learning_rate": 2.9137333333333333e-05, "loss": 0.014, "step": 12592 }, { "epoch": 13.900055218111541, "grad_norm": 0.27971407771110535, "learning_rate": 2.9137e-05, "loss": 0.052, "step": 12593 }, { "epoch": 13.901159580342352, "grad_norm": 0.4124378263950348, "learning_rate": 2.913666666666667e-05, "loss": 0.0094, "step": 12594 }, { "epoch": 13.902263942573164, "grad_norm": 0.1737397164106369, "learning_rate": 2.9136333333333334e-05, "loss": 0.0103, "step": 12595 }, { "epoch": 13.903368304803976, "grad_norm": 0.4562618136405945, "learning_rate": 2.9136e-05, "loss": 0.0142, "step": 12596 }, { "epoch": 13.904472667034787, "grad_norm": 0.24298802018165588, "learning_rate": 2.9135666666666666e-05, "loss": 0.0084, "step": 12597 }, { "epoch": 13.905577029265599, "grad_norm": 0.19079206883907318, "learning_rate": 2.9135333333333335e-05, "loss": 0.0076, "step": 12598 }, { "epoch": 13.906681391496411, "grad_norm": 0.3099830746650696, "learning_rate": 2.9134999999999998e-05, "loss": 0.0207, "step": 12599 }, { "epoch": 13.907785753727222, "grad_norm": 0.24139270186424255, "learning_rate": 2.9134666666666667e-05, "loss": 0.0188, "step": 12600 }, { "epoch": 13.908890115958034, "grad_norm": 0.18343770503997803, "learning_rate": 2.9134333333333337e-05, "loss": 0.0099, "step": 12601 }, { "epoch": 13.909994478188846, "grad_norm": 0.3905634582042694, "learning_rate": 2.9134e-05, "loss": 0.0085, "step": 12602 }, { "epoch": 13.911098840419658, "grad_norm": 0.453343003988266, "learning_rate": 2.913366666666667e-05, "loss": 0.0095, "step": 12603 }, { "epoch": 13.912203202650469, "grad_norm": 0.18181997537612915, "learning_rate": 2.9133333333333334e-05, "loss": 0.0143, "step": 12604 }, { "epoch": 13.913307564881281, "grad_norm": 0.25784438848495483, "learning_rate": 2.9133e-05, "loss": 0.0079, "step": 12605 }, { "epoch": 13.914411927112093, "grad_norm": 0.41642695665359497, "learning_rate": 2.9132666666666666e-05, "loss": 0.0191, "step": 12606 }, { "epoch": 13.915516289342904, "grad_norm": 0.4797915518283844, "learning_rate": 2.9132333333333336e-05, "loss": 0.0099, "step": 12607 }, { "epoch": 13.916620651573716, "grad_norm": 0.2048659324645996, "learning_rate": 2.9131999999999998e-05, "loss": 0.0064, "step": 12608 }, { "epoch": 13.917725013804528, "grad_norm": 0.23091121017932892, "learning_rate": 2.9131666666666668e-05, "loss": 0.0102, "step": 12609 }, { "epoch": 13.91882937603534, "grad_norm": 0.5029493570327759, "learning_rate": 2.9131333333333337e-05, "loss": 0.0133, "step": 12610 }, { "epoch": 13.91993373826615, "grad_norm": 0.16744168102741241, "learning_rate": 2.9131e-05, "loss": 0.0062, "step": 12611 }, { "epoch": 13.921038100496963, "grad_norm": 0.20973104238510132, "learning_rate": 2.913066666666667e-05, "loss": 0.0087, "step": 12612 }, { "epoch": 13.922142462727775, "grad_norm": 0.34101852774620056, "learning_rate": 2.9130333333333335e-05, "loss": 0.0142, "step": 12613 }, { "epoch": 13.923246824958586, "grad_norm": 0.2819006145000458, "learning_rate": 2.913e-05, "loss": 0.0122, "step": 12614 }, { "epoch": 13.924351187189398, "grad_norm": 0.24734337627887726, "learning_rate": 2.9129666666666667e-05, "loss": 0.014, "step": 12615 }, { "epoch": 13.92545554942021, "grad_norm": 0.24368543922901154, "learning_rate": 2.9129333333333336e-05, "loss": 0.0155, "step": 12616 }, { "epoch": 13.926559911651022, "grad_norm": 0.47551700472831726, "learning_rate": 2.9129e-05, "loss": 0.0166, "step": 12617 }, { "epoch": 13.927664273881833, "grad_norm": 0.49984148144721985, "learning_rate": 2.9128666666666668e-05, "loss": 0.0169, "step": 12618 }, { "epoch": 13.928768636112645, "grad_norm": 0.6766678094863892, "learning_rate": 2.9128333333333337e-05, "loss": 0.0151, "step": 12619 }, { "epoch": 13.929872998343457, "grad_norm": 0.5477297306060791, "learning_rate": 2.9128e-05, "loss": 0.0162, "step": 12620 }, { "epoch": 13.930977360574268, "grad_norm": 0.28531381487846375, "learning_rate": 2.912766666666667e-05, "loss": 0.0105, "step": 12621 }, { "epoch": 13.93208172280508, "grad_norm": 0.2794486880302429, "learning_rate": 2.912733333333333e-05, "loss": 0.0148, "step": 12622 }, { "epoch": 13.933186085035892, "grad_norm": 0.09319709986448288, "learning_rate": 2.9127e-05, "loss": 0.0028, "step": 12623 }, { "epoch": 13.934290447266704, "grad_norm": 0.3260006606578827, "learning_rate": 2.9126666666666667e-05, "loss": 0.0125, "step": 12624 }, { "epoch": 13.935394809497515, "grad_norm": 0.6454297304153442, "learning_rate": 2.9126333333333333e-05, "loss": 0.0156, "step": 12625 }, { "epoch": 13.936499171728327, "grad_norm": 0.18006542325019836, "learning_rate": 2.9126000000000002e-05, "loss": 0.0091, "step": 12626 }, { "epoch": 13.93760353395914, "grad_norm": 0.1790774166584015, "learning_rate": 2.9125666666666668e-05, "loss": 0.0085, "step": 12627 }, { "epoch": 13.93870789618995, "grad_norm": 0.9855011105537415, "learning_rate": 2.9125333333333334e-05, "loss": 0.0292, "step": 12628 }, { "epoch": 13.939812258420762, "grad_norm": 0.5852325558662415, "learning_rate": 2.9125e-05, "loss": 0.2061, "step": 12629 }, { "epoch": 13.940916620651574, "grad_norm": 0.6600956916809082, "learning_rate": 2.912466666666667e-05, "loss": 0.1548, "step": 12630 }, { "epoch": 13.942020982882385, "grad_norm": 0.598606526851654, "learning_rate": 2.912433333333333e-05, "loss": 0.1311, "step": 12631 }, { "epoch": 13.943125345113197, "grad_norm": 0.5448299050331116, "learning_rate": 2.9124e-05, "loss": 0.0941, "step": 12632 }, { "epoch": 13.94422970734401, "grad_norm": 0.5567513704299927, "learning_rate": 2.9123666666666667e-05, "loss": 0.0751, "step": 12633 }, { "epoch": 13.945334069574821, "grad_norm": 0.3669028878211975, "learning_rate": 2.9123333333333333e-05, "loss": 0.1021, "step": 12634 }, { "epoch": 13.946438431805632, "grad_norm": 0.5123915672302246, "learning_rate": 2.9123000000000002e-05, "loss": 0.0478, "step": 12635 }, { "epoch": 13.947542794036444, "grad_norm": 0.3520107567310333, "learning_rate": 2.9122666666666668e-05, "loss": 0.0396, "step": 12636 }, { "epoch": 13.948647156267256, "grad_norm": 0.31908202171325684, "learning_rate": 2.9122333333333334e-05, "loss": 0.0167, "step": 12637 }, { "epoch": 13.949751518498067, "grad_norm": 0.19327878952026367, "learning_rate": 2.9122e-05, "loss": 0.0191, "step": 12638 }, { "epoch": 13.950855880728879, "grad_norm": 0.15274345874786377, "learning_rate": 2.912166666666667e-05, "loss": 0.0168, "step": 12639 }, { "epoch": 13.951960242959691, "grad_norm": 0.1327105313539505, "learning_rate": 2.9121333333333332e-05, "loss": 0.0082, "step": 12640 }, { "epoch": 13.953064605190502, "grad_norm": 0.18434734642505646, "learning_rate": 2.9121e-05, "loss": 0.0132, "step": 12641 }, { "epoch": 13.954168967421314, "grad_norm": 0.2586889863014221, "learning_rate": 2.9120666666666667e-05, "loss": 0.012, "step": 12642 }, { "epoch": 13.955273329652126, "grad_norm": 0.15784059464931488, "learning_rate": 2.9120333333333333e-05, "loss": 0.0115, "step": 12643 }, { "epoch": 13.956377691882938, "grad_norm": 0.15276294946670532, "learning_rate": 2.9120000000000002e-05, "loss": 0.0087, "step": 12644 }, { "epoch": 13.957482054113749, "grad_norm": 0.39801159501075745, "learning_rate": 2.911966666666667e-05, "loss": 0.0136, "step": 12645 }, { "epoch": 13.958586416344561, "grad_norm": 0.4393322467803955, "learning_rate": 2.9119333333333334e-05, "loss": 0.0253, "step": 12646 }, { "epoch": 13.959690778575373, "grad_norm": 0.18374674022197723, "learning_rate": 2.9119e-05, "loss": 0.0091, "step": 12647 }, { "epoch": 13.960795140806184, "grad_norm": 0.2462826669216156, "learning_rate": 2.911866666666667e-05, "loss": 0.0048, "step": 12648 }, { "epoch": 13.961899503036996, "grad_norm": 0.18734575808048248, "learning_rate": 2.9118333333333332e-05, "loss": 0.0096, "step": 12649 }, { "epoch": 13.963003865267808, "grad_norm": 0.328786700963974, "learning_rate": 2.9118e-05, "loss": 0.0219, "step": 12650 }, { "epoch": 13.96410822749862, "grad_norm": 0.23488286137580872, "learning_rate": 2.9117666666666667e-05, "loss": 0.0216, "step": 12651 }, { "epoch": 13.96521258972943, "grad_norm": 0.18724016845226288, "learning_rate": 2.9117333333333333e-05, "loss": 0.0097, "step": 12652 }, { "epoch": 13.966316951960243, "grad_norm": 0.13273552060127258, "learning_rate": 2.9117000000000003e-05, "loss": 0.0069, "step": 12653 }, { "epoch": 13.967421314191055, "grad_norm": 0.4225194454193115, "learning_rate": 2.9116666666666665e-05, "loss": 0.0083, "step": 12654 }, { "epoch": 13.968525676421866, "grad_norm": 0.6723861694335938, "learning_rate": 2.9116333333333334e-05, "loss": 0.0123, "step": 12655 }, { "epoch": 13.969630038652678, "grad_norm": 0.12849147617816925, "learning_rate": 2.9116e-05, "loss": 0.006, "step": 12656 }, { "epoch": 13.97073440088349, "grad_norm": 0.3767375349998474, "learning_rate": 2.9115666666666666e-05, "loss": 0.0139, "step": 12657 }, { "epoch": 13.971838763114302, "grad_norm": 0.270949125289917, "learning_rate": 2.9115333333333332e-05, "loss": 0.0086, "step": 12658 }, { "epoch": 13.972943125345113, "grad_norm": 0.24693690240383148, "learning_rate": 2.9115e-05, "loss": 0.0121, "step": 12659 }, { "epoch": 13.974047487575925, "grad_norm": 0.2716935873031616, "learning_rate": 2.9114666666666668e-05, "loss": 0.0126, "step": 12660 }, { "epoch": 13.975151849806737, "grad_norm": 0.17398877441883087, "learning_rate": 2.9114333333333333e-05, "loss": 0.0078, "step": 12661 }, { "epoch": 13.976256212037548, "grad_norm": 0.7137631773948669, "learning_rate": 2.9114000000000003e-05, "loss": 0.0118, "step": 12662 }, { "epoch": 13.97736057426836, "grad_norm": 0.399373322725296, "learning_rate": 2.9113666666666665e-05, "loss": 0.0106, "step": 12663 }, { "epoch": 13.978464936499172, "grad_norm": 0.8411997556686401, "learning_rate": 2.9113333333333335e-05, "loss": 0.0188, "step": 12664 }, { "epoch": 13.979569298729983, "grad_norm": 0.2816202640533447, "learning_rate": 2.9113e-05, "loss": 0.0099, "step": 12665 }, { "epoch": 13.980673660960795, "grad_norm": 0.30474188923835754, "learning_rate": 2.9112666666666667e-05, "loss": 0.0099, "step": 12666 }, { "epoch": 13.981778023191607, "grad_norm": 1.3561053276062012, "learning_rate": 2.9112333333333332e-05, "loss": 0.0132, "step": 12667 }, { "epoch": 13.98288238542242, "grad_norm": 0.239903524518013, "learning_rate": 2.9112000000000002e-05, "loss": 0.0069, "step": 12668 }, { "epoch": 13.98398674765323, "grad_norm": 0.26987987756729126, "learning_rate": 2.9111666666666668e-05, "loss": 0.0095, "step": 12669 }, { "epoch": 13.985091109884042, "grad_norm": 0.4957968294620514, "learning_rate": 2.9111333333333334e-05, "loss": 0.0306, "step": 12670 }, { "epoch": 13.986195472114854, "grad_norm": 0.7237074971199036, "learning_rate": 2.9111000000000003e-05, "loss": 0.019, "step": 12671 }, { "epoch": 13.987299834345665, "grad_norm": 0.38733455538749695, "learning_rate": 2.9110666666666666e-05, "loss": 0.0141, "step": 12672 }, { "epoch": 13.988404196576477, "grad_norm": 0.919398844242096, "learning_rate": 2.9110333333333335e-05, "loss": 0.0249, "step": 12673 }, { "epoch": 13.98950855880729, "grad_norm": 0.32853686809539795, "learning_rate": 2.911e-05, "loss": 0.0147, "step": 12674 }, { "epoch": 13.9906129210381, "grad_norm": 0.5112884640693665, "learning_rate": 2.9109666666666667e-05, "loss": 0.0252, "step": 12675 }, { "epoch": 13.991717283268912, "grad_norm": 0.4351343512535095, "learning_rate": 2.9109333333333336e-05, "loss": 0.0166, "step": 12676 }, { "epoch": 13.992821645499724, "grad_norm": 0.3928810656070709, "learning_rate": 2.9109000000000002e-05, "loss": 0.0144, "step": 12677 }, { "epoch": 13.993926007730536, "grad_norm": 0.2998191714286804, "learning_rate": 2.9108666666666668e-05, "loss": 0.0146, "step": 12678 }, { "epoch": 13.995030369961347, "grad_norm": 0.603543758392334, "learning_rate": 2.9108333333333334e-05, "loss": 0.1398, "step": 12679 }, { "epoch": 13.996134732192159, "grad_norm": 0.3473682105541229, "learning_rate": 2.9108000000000003e-05, "loss": 0.011, "step": 12680 }, { "epoch": 13.997239094422971, "grad_norm": 0.3348838984966278, "learning_rate": 2.9107666666666666e-05, "loss": 0.0116, "step": 12681 }, { "epoch": 13.998343456653782, "grad_norm": 0.23613353073596954, "learning_rate": 2.9107333333333335e-05, "loss": 0.0158, "step": 12682 }, { "epoch": 13.999447818884594, "grad_norm": 0.3159645199775696, "learning_rate": 2.9106999999999998e-05, "loss": 0.0107, "step": 12683 }, { "epoch": 14.0, "grad_norm": 0.35420316457748413, "learning_rate": 2.9106666666666667e-05, "loss": 0.005, "step": 12684 }, { "epoch": 14.001104362230812, "grad_norm": 0.4284118711948395, "learning_rate": 2.9106333333333336e-05, "loss": 0.1848, "step": 12685 }, { "epoch": 14.002208724461623, "grad_norm": 0.45523494482040405, "learning_rate": 2.9106e-05, "loss": 0.121, "step": 12686 }, { "epoch": 14.003313086692435, "grad_norm": 0.5103397369384766, "learning_rate": 2.9105666666666668e-05, "loss": 0.1146, "step": 12687 }, { "epoch": 14.004417448923247, "grad_norm": 0.43947896361351013, "learning_rate": 2.9105333333333334e-05, "loss": 0.1032, "step": 12688 }, { "epoch": 14.00552181115406, "grad_norm": 0.4129101037979126, "learning_rate": 2.9105e-05, "loss": 0.0676, "step": 12689 }, { "epoch": 14.00662617338487, "grad_norm": 0.31381309032440186, "learning_rate": 2.9104666666666666e-05, "loss": 0.0429, "step": 12690 }, { "epoch": 14.007730535615682, "grad_norm": 0.3507222533226013, "learning_rate": 2.9104333333333335e-05, "loss": 0.0449, "step": 12691 }, { "epoch": 14.008834897846494, "grad_norm": 0.4640296697616577, "learning_rate": 2.9103999999999998e-05, "loss": 0.0432, "step": 12692 }, { "epoch": 14.009939260077305, "grad_norm": 0.30033260583877563, "learning_rate": 2.9103666666666667e-05, "loss": 0.0412, "step": 12693 }, { "epoch": 14.011043622308117, "grad_norm": 0.24677900969982147, "learning_rate": 2.9103333333333336e-05, "loss": 0.0302, "step": 12694 }, { "epoch": 14.01214798453893, "grad_norm": 0.6408105492591858, "learning_rate": 2.9103e-05, "loss": 0.0117, "step": 12695 }, { "epoch": 14.01325234676974, "grad_norm": 0.3237108290195465, "learning_rate": 2.910266666666667e-05, "loss": 0.0165, "step": 12696 }, { "epoch": 14.014356709000552, "grad_norm": 0.1971372365951538, "learning_rate": 2.9102333333333334e-05, "loss": 0.0071, "step": 12697 }, { "epoch": 14.015461071231364, "grad_norm": 0.48857587575912476, "learning_rate": 2.9102e-05, "loss": 0.0155, "step": 12698 }, { "epoch": 14.016565433462176, "grad_norm": 0.353075236082077, "learning_rate": 2.9101666666666666e-05, "loss": 0.0121, "step": 12699 }, { "epoch": 14.017669795692987, "grad_norm": 0.24399515986442566, "learning_rate": 2.9101333333333335e-05, "loss": 0.01, "step": 12700 }, { "epoch": 14.018774157923799, "grad_norm": 0.284049391746521, "learning_rate": 2.9101e-05, "loss": 0.0088, "step": 12701 }, { "epoch": 14.019878520154611, "grad_norm": 0.3804316818714142, "learning_rate": 2.9100666666666667e-05, "loss": 0.0095, "step": 12702 }, { "epoch": 14.020982882385422, "grad_norm": 0.1503216177225113, "learning_rate": 2.9100333333333337e-05, "loss": 0.0031, "step": 12703 }, { "epoch": 14.022087244616234, "grad_norm": 0.19786924123764038, "learning_rate": 2.91e-05, "loss": 0.0081, "step": 12704 }, { "epoch": 14.023191606847046, "grad_norm": 0.4024743437767029, "learning_rate": 2.909966666666667e-05, "loss": 0.0118, "step": 12705 }, { "epoch": 14.024295969077858, "grad_norm": 0.3295382857322693, "learning_rate": 2.9099333333333334e-05, "loss": 0.012, "step": 12706 }, { "epoch": 14.025400331308669, "grad_norm": 0.2311139851808548, "learning_rate": 2.9099e-05, "loss": 0.0165, "step": 12707 }, { "epoch": 14.026504693539481, "grad_norm": 0.4714888334274292, "learning_rate": 2.9098666666666666e-05, "loss": 0.0069, "step": 12708 }, { "epoch": 14.027609055770293, "grad_norm": 0.12703059613704681, "learning_rate": 2.9098333333333336e-05, "loss": 0.0054, "step": 12709 }, { "epoch": 14.028713418001104, "grad_norm": 0.25545400381088257, "learning_rate": 2.9098e-05, "loss": 0.008, "step": 12710 }, { "epoch": 14.029817780231916, "grad_norm": 0.21422983705997467, "learning_rate": 2.9097666666666668e-05, "loss": 0.0057, "step": 12711 }, { "epoch": 14.030922142462728, "grad_norm": 0.17711715400218964, "learning_rate": 2.9097333333333333e-05, "loss": 0.0072, "step": 12712 }, { "epoch": 14.032026504693539, "grad_norm": 0.29772520065307617, "learning_rate": 2.9097e-05, "loss": 0.0071, "step": 12713 }, { "epoch": 14.03313086692435, "grad_norm": 0.17590230703353882, "learning_rate": 2.909666666666667e-05, "loss": 0.0071, "step": 12714 }, { "epoch": 14.034235229155163, "grad_norm": 0.7922818660736084, "learning_rate": 2.909633333333333e-05, "loss": 0.0169, "step": 12715 }, { "epoch": 14.035339591385975, "grad_norm": 0.20231081545352936, "learning_rate": 2.9096e-05, "loss": 0.0068, "step": 12716 }, { "epoch": 14.036443953616786, "grad_norm": 0.4247932434082031, "learning_rate": 2.9095666666666667e-05, "loss": 0.0221, "step": 12717 }, { "epoch": 14.037548315847598, "grad_norm": 0.21976378560066223, "learning_rate": 2.9095333333333332e-05, "loss": 0.0077, "step": 12718 }, { "epoch": 14.03865267807841, "grad_norm": 0.11516009271144867, "learning_rate": 2.9095000000000002e-05, "loss": 0.006, "step": 12719 }, { "epoch": 14.03975704030922, "grad_norm": 0.3498842716217041, "learning_rate": 2.9094666666666668e-05, "loss": 0.0111, "step": 12720 }, { "epoch": 14.040861402540033, "grad_norm": 0.37900716066360474, "learning_rate": 2.9094333333333334e-05, "loss": 0.0111, "step": 12721 }, { "epoch": 14.041965764770845, "grad_norm": 0.3693603575229645, "learning_rate": 2.9094e-05, "loss": 0.0088, "step": 12722 }, { "epoch": 14.043070127001657, "grad_norm": 0.6309882998466492, "learning_rate": 2.909366666666667e-05, "loss": 0.0124, "step": 12723 }, { "epoch": 14.044174489232468, "grad_norm": 0.5122582316398621, "learning_rate": 2.909333333333333e-05, "loss": 0.0208, "step": 12724 }, { "epoch": 14.04527885146328, "grad_norm": 0.1934134066104889, "learning_rate": 2.9093e-05, "loss": 0.0062, "step": 12725 }, { "epoch": 14.046383213694092, "grad_norm": 0.32187506556510925, "learning_rate": 2.909266666666667e-05, "loss": 0.011, "step": 12726 }, { "epoch": 14.047487575924903, "grad_norm": 0.12487044930458069, "learning_rate": 2.9092333333333333e-05, "loss": 0.0061, "step": 12727 }, { "epoch": 14.048591938155715, "grad_norm": 0.5218733549118042, "learning_rate": 2.9092000000000002e-05, "loss": 0.0054, "step": 12728 }, { "epoch": 14.049696300386527, "grad_norm": 0.35568520426750183, "learning_rate": 2.9091666666666668e-05, "loss": 0.01, "step": 12729 }, { "epoch": 14.050800662617338, "grad_norm": 0.4912185072898865, "learning_rate": 2.9091333333333334e-05, "loss": 0.0166, "step": 12730 }, { "epoch": 14.05190502484815, "grad_norm": 0.2391768842935562, "learning_rate": 2.9091e-05, "loss": 0.0112, "step": 12731 }, { "epoch": 14.053009387078962, "grad_norm": 0.29882094264030457, "learning_rate": 2.909066666666667e-05, "loss": 0.0096, "step": 12732 }, { "epoch": 14.054113749309774, "grad_norm": 0.8360607624053955, "learning_rate": 2.909033333333333e-05, "loss": 0.023, "step": 12733 }, { "epoch": 14.055218111540585, "grad_norm": 0.5286732912063599, "learning_rate": 2.909e-05, "loss": 0.0128, "step": 12734 }, { "epoch": 14.056322473771397, "grad_norm": 0.7745164036750793, "learning_rate": 2.908966666666667e-05, "loss": 0.1719, "step": 12735 }, { "epoch": 14.05742683600221, "grad_norm": 0.7046250700950623, "learning_rate": 2.9089333333333333e-05, "loss": 0.1437, "step": 12736 }, { "epoch": 14.05853119823302, "grad_norm": 0.48808783292770386, "learning_rate": 2.9089000000000002e-05, "loss": 0.1092, "step": 12737 }, { "epoch": 14.059635560463832, "grad_norm": 0.5849718451499939, "learning_rate": 2.9088666666666668e-05, "loss": 0.1061, "step": 12738 }, { "epoch": 14.060739922694644, "grad_norm": 0.5035759806632996, "learning_rate": 2.9088333333333334e-05, "loss": 0.067, "step": 12739 }, { "epoch": 14.061844284925456, "grad_norm": 0.6985329389572144, "learning_rate": 2.9088e-05, "loss": 0.0959, "step": 12740 }, { "epoch": 14.062948647156267, "grad_norm": 0.44610631465911865, "learning_rate": 2.9087666666666666e-05, "loss": 0.0667, "step": 12741 }, { "epoch": 14.064053009387079, "grad_norm": 0.4325231909751892, "learning_rate": 2.9087333333333332e-05, "loss": 0.0559, "step": 12742 }, { "epoch": 14.065157371617891, "grad_norm": 0.23199427127838135, "learning_rate": 2.9087e-05, "loss": 0.0244, "step": 12743 }, { "epoch": 14.066261733848702, "grad_norm": 0.1915331482887268, "learning_rate": 2.9086666666666667e-05, "loss": 0.0162, "step": 12744 }, { "epoch": 14.067366096079514, "grad_norm": 0.1952289193868637, "learning_rate": 2.9086333333333333e-05, "loss": 0.0151, "step": 12745 }, { "epoch": 14.068470458310326, "grad_norm": 0.3896499574184418, "learning_rate": 2.9086000000000002e-05, "loss": 0.018, "step": 12746 }, { "epoch": 14.069574820541137, "grad_norm": 0.16263867914676666, "learning_rate": 2.9085666666666665e-05, "loss": 0.013, "step": 12747 }, { "epoch": 14.070679182771949, "grad_norm": 0.2433389127254486, "learning_rate": 2.9085333333333334e-05, "loss": 0.0341, "step": 12748 }, { "epoch": 14.071783545002761, "grad_norm": 0.19803836941719055, "learning_rate": 2.9085e-05, "loss": 0.0082, "step": 12749 }, { "epoch": 14.072887907233573, "grad_norm": 0.3511781394481659, "learning_rate": 2.9084666666666666e-05, "loss": 0.0117, "step": 12750 }, { "epoch": 14.073992269464384, "grad_norm": 0.17184554040431976, "learning_rate": 2.9084333333333335e-05, "loss": 0.0096, "step": 12751 }, { "epoch": 14.075096631695196, "grad_norm": 0.19695648550987244, "learning_rate": 2.9084e-05, "loss": 0.0175, "step": 12752 }, { "epoch": 14.076200993926008, "grad_norm": 0.4025076627731323, "learning_rate": 2.9083666666666667e-05, "loss": 0.0153, "step": 12753 }, { "epoch": 14.077305356156819, "grad_norm": 0.3542037010192871, "learning_rate": 2.9083333333333333e-05, "loss": 0.0096, "step": 12754 }, { "epoch": 14.078409718387631, "grad_norm": 0.31425508856773376, "learning_rate": 2.9083000000000003e-05, "loss": 0.0128, "step": 12755 }, { "epoch": 14.079514080618443, "grad_norm": 0.2871611416339874, "learning_rate": 2.9082666666666665e-05, "loss": 0.0084, "step": 12756 }, { "epoch": 14.080618442849255, "grad_norm": 0.26630568504333496, "learning_rate": 2.9082333333333334e-05, "loss": 0.0086, "step": 12757 }, { "epoch": 14.081722805080066, "grad_norm": 0.26937761902809143, "learning_rate": 2.9082e-05, "loss": 0.0121, "step": 12758 }, { "epoch": 14.082827167310878, "grad_norm": 0.37246301770210266, "learning_rate": 2.9081666666666666e-05, "loss": 0.0185, "step": 12759 }, { "epoch": 14.08393152954169, "grad_norm": 0.3995164930820465, "learning_rate": 2.9081333333333336e-05, "loss": 0.0066, "step": 12760 }, { "epoch": 14.0850358917725, "grad_norm": 0.11923690140247345, "learning_rate": 2.9081e-05, "loss": 0.0062, "step": 12761 }, { "epoch": 14.086140254003313, "grad_norm": 0.19307735562324524, "learning_rate": 2.9080666666666668e-05, "loss": 0.0083, "step": 12762 }, { "epoch": 14.087244616234125, "grad_norm": 0.13443470001220703, "learning_rate": 2.9080333333333333e-05, "loss": 0.0095, "step": 12763 }, { "epoch": 14.088348978464936, "grad_norm": 0.6870046854019165, "learning_rate": 2.9080000000000003e-05, "loss": 0.0225, "step": 12764 }, { "epoch": 14.089453340695748, "grad_norm": 0.22190497815608978, "learning_rate": 2.9079666666666665e-05, "loss": 0.008, "step": 12765 }, { "epoch": 14.09055770292656, "grad_norm": 0.22386434674263, "learning_rate": 2.9079333333333335e-05, "loss": 0.0071, "step": 12766 }, { "epoch": 14.091662065157372, "grad_norm": 0.16848124563694, "learning_rate": 2.9079e-05, "loss": 0.0097, "step": 12767 }, { "epoch": 14.092766427388183, "grad_norm": 0.17410670220851898, "learning_rate": 2.9078666666666667e-05, "loss": 0.008, "step": 12768 }, { "epoch": 14.093870789618995, "grad_norm": 0.2800268828868866, "learning_rate": 2.9078333333333336e-05, "loss": 0.01, "step": 12769 }, { "epoch": 14.094975151849807, "grad_norm": 0.2711072564125061, "learning_rate": 2.9078000000000002e-05, "loss": 0.0101, "step": 12770 }, { "epoch": 14.096079514080618, "grad_norm": 0.3875312805175781, "learning_rate": 2.9077666666666668e-05, "loss": 0.0125, "step": 12771 }, { "epoch": 14.09718387631143, "grad_norm": 0.7897698879241943, "learning_rate": 2.9077333333333334e-05, "loss": 0.0126, "step": 12772 }, { "epoch": 14.098288238542242, "grad_norm": 0.17692844569683075, "learning_rate": 2.9077e-05, "loss": 0.0054, "step": 12773 }, { "epoch": 14.099392600773054, "grad_norm": 0.3139415681362152, "learning_rate": 2.9076666666666666e-05, "loss": 0.0073, "step": 12774 }, { "epoch": 14.100496963003865, "grad_norm": 0.5299177765846252, "learning_rate": 2.9076333333333335e-05, "loss": 0.0105, "step": 12775 }, { "epoch": 14.101601325234677, "grad_norm": 0.5101584196090698, "learning_rate": 2.9076e-05, "loss": 0.0093, "step": 12776 }, { "epoch": 14.10270568746549, "grad_norm": 0.2573026418685913, "learning_rate": 2.9075666666666667e-05, "loss": 0.0113, "step": 12777 }, { "epoch": 14.1038100496963, "grad_norm": 0.39815855026245117, "learning_rate": 2.9075333333333336e-05, "loss": 0.0105, "step": 12778 }, { "epoch": 14.104914411927112, "grad_norm": 0.34791329503059387, "learning_rate": 2.9075e-05, "loss": 0.0114, "step": 12779 }, { "epoch": 14.106018774157924, "grad_norm": 0.73140549659729, "learning_rate": 2.9074666666666668e-05, "loss": 0.0157, "step": 12780 }, { "epoch": 14.107123136388736, "grad_norm": 0.27580559253692627, "learning_rate": 2.9074333333333334e-05, "loss": 0.0064, "step": 12781 }, { "epoch": 14.108227498619547, "grad_norm": 0.16311530768871307, "learning_rate": 2.9074e-05, "loss": 0.0068, "step": 12782 }, { "epoch": 14.109331860850359, "grad_norm": 0.28191959857940674, "learning_rate": 2.9073666666666666e-05, "loss": 0.0098, "step": 12783 }, { "epoch": 14.110436223081171, "grad_norm": 0.18413153290748596, "learning_rate": 2.9073333333333335e-05, "loss": 0.0069, "step": 12784 }, { "epoch": 14.111540585311982, "grad_norm": 0.5368654727935791, "learning_rate": 2.9073e-05, "loss": 0.1911, "step": 12785 }, { "epoch": 14.112644947542794, "grad_norm": 0.5704762935638428, "learning_rate": 2.9072666666666667e-05, "loss": 0.1468, "step": 12786 }, { "epoch": 14.113749309773606, "grad_norm": 0.4590566158294678, "learning_rate": 2.9072333333333336e-05, "loss": 0.1155, "step": 12787 }, { "epoch": 14.114853672004417, "grad_norm": 0.4966399073600769, "learning_rate": 2.9072e-05, "loss": 0.0792, "step": 12788 }, { "epoch": 14.115958034235229, "grad_norm": 0.4192921817302704, "learning_rate": 2.9071666666666668e-05, "loss": 0.0983, "step": 12789 }, { "epoch": 14.117062396466041, "grad_norm": 0.49526941776275635, "learning_rate": 2.9071333333333334e-05, "loss": 0.0442, "step": 12790 }, { "epoch": 14.118166758696853, "grad_norm": 0.3496496379375458, "learning_rate": 2.9071e-05, "loss": 0.0393, "step": 12791 }, { "epoch": 14.119271120927664, "grad_norm": 0.26658421754837036, "learning_rate": 2.9070666666666666e-05, "loss": 0.0305, "step": 12792 }, { "epoch": 14.120375483158476, "grad_norm": 0.4153522849082947, "learning_rate": 2.9070333333333335e-05, "loss": 0.043, "step": 12793 }, { "epoch": 14.121479845389288, "grad_norm": 0.24407106637954712, "learning_rate": 2.907e-05, "loss": 0.0232, "step": 12794 }, { "epoch": 14.122584207620099, "grad_norm": 0.45098352432250977, "learning_rate": 2.9069666666666667e-05, "loss": 0.0359, "step": 12795 }, { "epoch": 14.123688569850911, "grad_norm": 0.32748714089393616, "learning_rate": 2.9069333333333336e-05, "loss": 0.0161, "step": 12796 }, { "epoch": 14.124792932081723, "grad_norm": 0.15243269503116608, "learning_rate": 2.9069e-05, "loss": 0.0091, "step": 12797 }, { "epoch": 14.125897294312535, "grad_norm": 0.22010278701782227, "learning_rate": 2.906866666666667e-05, "loss": 0.0066, "step": 12798 }, { "epoch": 14.127001656543346, "grad_norm": 0.30269789695739746, "learning_rate": 2.9068333333333334e-05, "loss": 0.0419, "step": 12799 }, { "epoch": 14.128106018774158, "grad_norm": 0.1881006956100464, "learning_rate": 2.9068e-05, "loss": 0.0122, "step": 12800 }, { "epoch": 14.12921038100497, "grad_norm": 0.17674750089645386, "learning_rate": 2.906766666666667e-05, "loss": 0.0093, "step": 12801 }, { "epoch": 14.13031474323578, "grad_norm": 0.5840697288513184, "learning_rate": 2.9067333333333332e-05, "loss": 0.0118, "step": 12802 }, { "epoch": 14.131419105466593, "grad_norm": 1.38908052444458, "learning_rate": 2.9067e-05, "loss": 0.012, "step": 12803 }, { "epoch": 14.132523467697405, "grad_norm": 0.1541135460138321, "learning_rate": 2.9066666666666667e-05, "loss": 0.0113, "step": 12804 }, { "epoch": 14.133627829928216, "grad_norm": 0.26409628987312317, "learning_rate": 2.9066333333333333e-05, "loss": 0.0119, "step": 12805 }, { "epoch": 14.134732192159028, "grad_norm": 0.2715623676776886, "learning_rate": 2.9066e-05, "loss": 0.0136, "step": 12806 }, { "epoch": 14.13583655438984, "grad_norm": 0.18779213726520538, "learning_rate": 2.906566666666667e-05, "loss": 0.0083, "step": 12807 }, { "epoch": 14.136940916620652, "grad_norm": 0.29650962352752686, "learning_rate": 2.906533333333333e-05, "loss": 0.0083, "step": 12808 }, { "epoch": 14.138045278851463, "grad_norm": 0.1935194730758667, "learning_rate": 2.9065e-05, "loss": 0.0063, "step": 12809 }, { "epoch": 14.139149641082275, "grad_norm": 0.6587238311767578, "learning_rate": 2.906466666666667e-05, "loss": 0.0121, "step": 12810 }, { "epoch": 14.140254003313087, "grad_norm": 0.1932135969400406, "learning_rate": 2.9064333333333332e-05, "loss": 0.0123, "step": 12811 }, { "epoch": 14.141358365543898, "grad_norm": 0.28587377071380615, "learning_rate": 2.9064e-05, "loss": 0.013, "step": 12812 }, { "epoch": 14.14246272777471, "grad_norm": 0.15203700959682465, "learning_rate": 2.9063666666666668e-05, "loss": 0.0082, "step": 12813 }, { "epoch": 14.143567090005522, "grad_norm": 0.15429699420928955, "learning_rate": 2.9063333333333333e-05, "loss": 0.0055, "step": 12814 }, { "epoch": 14.144671452236334, "grad_norm": 0.2707558274269104, "learning_rate": 2.9063e-05, "loss": 0.008, "step": 12815 }, { "epoch": 14.145775814467145, "grad_norm": 0.2837885916233063, "learning_rate": 2.906266666666667e-05, "loss": 0.0101, "step": 12816 }, { "epoch": 14.146880176697957, "grad_norm": 0.18473456799983978, "learning_rate": 2.9062333333333335e-05, "loss": 0.0062, "step": 12817 }, { "epoch": 14.14798453892877, "grad_norm": 0.2945851981639862, "learning_rate": 2.9062e-05, "loss": 0.0098, "step": 12818 }, { "epoch": 14.14908890115958, "grad_norm": 0.1680598109960556, "learning_rate": 2.906166666666667e-05, "loss": 0.007, "step": 12819 }, { "epoch": 14.150193263390392, "grad_norm": 0.17765627801418304, "learning_rate": 2.9061333333333332e-05, "loss": 0.011, "step": 12820 }, { "epoch": 14.151297625621204, "grad_norm": 0.6008478403091431, "learning_rate": 2.9061000000000002e-05, "loss": 0.0155, "step": 12821 }, { "epoch": 14.152401987852015, "grad_norm": 0.4805828928947449, "learning_rate": 2.9060666666666668e-05, "loss": 0.0108, "step": 12822 }, { "epoch": 14.153506350082827, "grad_norm": 0.10229194909334183, "learning_rate": 2.9060333333333334e-05, "loss": 0.0041, "step": 12823 }, { "epoch": 14.154610712313639, "grad_norm": 0.4653175175189972, "learning_rate": 2.906e-05, "loss": 0.0112, "step": 12824 }, { "epoch": 14.155715074544451, "grad_norm": 0.5246871709823608, "learning_rate": 2.905966666666667e-05, "loss": 0.0116, "step": 12825 }, { "epoch": 14.156819436775262, "grad_norm": 0.29598966240882874, "learning_rate": 2.9059333333333335e-05, "loss": 0.0097, "step": 12826 }, { "epoch": 14.157923799006074, "grad_norm": 0.27451542019844055, "learning_rate": 2.9059e-05, "loss": 0.0119, "step": 12827 }, { "epoch": 14.159028161236886, "grad_norm": 0.15618924796581268, "learning_rate": 2.905866666666667e-05, "loss": 0.0056, "step": 12828 }, { "epoch": 14.160132523467697, "grad_norm": 0.3510308563709259, "learning_rate": 2.9058333333333333e-05, "loss": 0.0114, "step": 12829 }, { "epoch": 14.161236885698509, "grad_norm": 0.4922899305820465, "learning_rate": 2.9058000000000002e-05, "loss": 0.0087, "step": 12830 }, { "epoch": 14.162341247929321, "grad_norm": 0.426270067691803, "learning_rate": 2.9057666666666668e-05, "loss": 0.0275, "step": 12831 }, { "epoch": 14.163445610160133, "grad_norm": 0.3177942633628845, "learning_rate": 2.9057333333333334e-05, "loss": 0.015, "step": 12832 }, { "epoch": 14.164549972390944, "grad_norm": 0.46751031279563904, "learning_rate": 2.9057e-05, "loss": 0.0108, "step": 12833 }, { "epoch": 14.165654334621756, "grad_norm": 0.5442547798156738, "learning_rate": 2.9056666666666666e-05, "loss": 0.0124, "step": 12834 }, { "epoch": 14.166758696852568, "grad_norm": 0.7491967082023621, "learning_rate": 2.9056333333333335e-05, "loss": 0.1922, "step": 12835 }, { "epoch": 14.167863059083379, "grad_norm": 0.6882505416870117, "learning_rate": 2.9056e-05, "loss": 0.1513, "step": 12836 }, { "epoch": 14.168967421314191, "grad_norm": 0.40586057305336, "learning_rate": 2.9055666666666667e-05, "loss": 0.0909, "step": 12837 }, { "epoch": 14.170071783545003, "grad_norm": 0.3187527656555176, "learning_rate": 2.9055333333333333e-05, "loss": 0.1093, "step": 12838 }, { "epoch": 14.171176145775814, "grad_norm": 0.4635256230831146, "learning_rate": 2.9055000000000002e-05, "loss": 0.0813, "step": 12839 }, { "epoch": 14.172280508006626, "grad_norm": 0.9805706143379211, "learning_rate": 2.9054666666666665e-05, "loss": 0.0757, "step": 12840 }, { "epoch": 14.173384870237438, "grad_norm": 0.23908165097236633, "learning_rate": 2.9054333333333334e-05, "loss": 0.0337, "step": 12841 }, { "epoch": 14.17448923246825, "grad_norm": 0.26820263266563416, "learning_rate": 2.9054e-05, "loss": 0.0583, "step": 12842 }, { "epoch": 14.17559359469906, "grad_norm": 0.18388617038726807, "learning_rate": 2.9053666666666666e-05, "loss": 0.0179, "step": 12843 }, { "epoch": 14.176697956929873, "grad_norm": 0.501579225063324, "learning_rate": 2.9053333333333335e-05, "loss": 0.0254, "step": 12844 }, { "epoch": 14.177802319160685, "grad_norm": 0.19928516447544098, "learning_rate": 2.9053e-05, "loss": 0.009, "step": 12845 }, { "epoch": 14.178906681391496, "grad_norm": 0.17936891317367554, "learning_rate": 2.9052666666666667e-05, "loss": 0.0059, "step": 12846 }, { "epoch": 14.180011043622308, "grad_norm": 0.26998838782310486, "learning_rate": 2.9052333333333333e-05, "loss": 0.0119, "step": 12847 }, { "epoch": 14.18111540585312, "grad_norm": 0.3962465226650238, "learning_rate": 2.9052000000000002e-05, "loss": 0.0204, "step": 12848 }, { "epoch": 14.182219768083932, "grad_norm": 0.34767740964889526, "learning_rate": 2.9051666666666665e-05, "loss": 0.0089, "step": 12849 }, { "epoch": 14.183324130314743, "grad_norm": 0.275967001914978, "learning_rate": 2.9051333333333334e-05, "loss": 0.0094, "step": 12850 }, { "epoch": 14.184428492545555, "grad_norm": 0.15722179412841797, "learning_rate": 2.9051000000000004e-05, "loss": 0.0068, "step": 12851 }, { "epoch": 14.185532854776367, "grad_norm": 0.12266373634338379, "learning_rate": 2.9050666666666666e-05, "loss": 0.0051, "step": 12852 }, { "epoch": 14.186637217007178, "grad_norm": 0.2592793405056, "learning_rate": 2.9050333333333335e-05, "loss": 0.0106, "step": 12853 }, { "epoch": 14.18774157923799, "grad_norm": 0.6992338299751282, "learning_rate": 2.905e-05, "loss": 0.015, "step": 12854 }, { "epoch": 14.188845941468802, "grad_norm": 0.191041499376297, "learning_rate": 2.9049666666666667e-05, "loss": 0.0058, "step": 12855 }, { "epoch": 14.189950303699613, "grad_norm": 0.20445305109024048, "learning_rate": 2.9049333333333333e-05, "loss": 0.0082, "step": 12856 }, { "epoch": 14.191054665930425, "grad_norm": 0.2978125214576721, "learning_rate": 2.9049000000000003e-05, "loss": 0.0121, "step": 12857 }, { "epoch": 14.192159028161237, "grad_norm": 0.10955819487571716, "learning_rate": 2.9048666666666665e-05, "loss": 0.0032, "step": 12858 }, { "epoch": 14.19326339039205, "grad_norm": 0.19166560471057892, "learning_rate": 2.9048333333333334e-05, "loss": 0.0067, "step": 12859 }, { "epoch": 14.19436775262286, "grad_norm": 0.24147067964076996, "learning_rate": 2.9048000000000004e-05, "loss": 0.0125, "step": 12860 }, { "epoch": 14.195472114853672, "grad_norm": 0.2272801697254181, "learning_rate": 2.9047666666666666e-05, "loss": 0.0061, "step": 12861 }, { "epoch": 14.196576477084484, "grad_norm": 0.2901265323162079, "learning_rate": 2.9047333333333336e-05, "loss": 0.0115, "step": 12862 }, { "epoch": 14.197680839315295, "grad_norm": 0.16799336671829224, "learning_rate": 2.9046999999999998e-05, "loss": 0.0067, "step": 12863 }, { "epoch": 14.198785201546107, "grad_norm": 0.26511093974113464, "learning_rate": 2.9046666666666668e-05, "loss": 0.0073, "step": 12864 }, { "epoch": 14.19988956377692, "grad_norm": 0.2114565670490265, "learning_rate": 2.9046333333333333e-05, "loss": 0.0121, "step": 12865 }, { "epoch": 14.200993926007731, "grad_norm": 1.2864874601364136, "learning_rate": 2.9046e-05, "loss": 0.0192, "step": 12866 }, { "epoch": 14.202098288238542, "grad_norm": 0.4192396104335785, "learning_rate": 2.904566666666667e-05, "loss": 0.0062, "step": 12867 }, { "epoch": 14.203202650469354, "grad_norm": 0.31653231382369995, "learning_rate": 2.9045333333333335e-05, "loss": 0.0128, "step": 12868 }, { "epoch": 14.204307012700166, "grad_norm": 0.31370067596435547, "learning_rate": 2.9045e-05, "loss": 0.0244, "step": 12869 }, { "epoch": 14.205411374930977, "grad_norm": 0.15998893976211548, "learning_rate": 2.9044666666666667e-05, "loss": 0.0067, "step": 12870 }, { "epoch": 14.206515737161789, "grad_norm": 0.19393257796764374, "learning_rate": 2.9044333333333336e-05, "loss": 0.0085, "step": 12871 }, { "epoch": 14.207620099392601, "grad_norm": 0.24760539829730988, "learning_rate": 2.9044e-05, "loss": 0.0064, "step": 12872 }, { "epoch": 14.208724461623412, "grad_norm": 0.5662544369697571, "learning_rate": 2.9043666666666668e-05, "loss": 0.0187, "step": 12873 }, { "epoch": 14.209828823854224, "grad_norm": 0.7184682488441467, "learning_rate": 2.9043333333333334e-05, "loss": 0.0138, "step": 12874 }, { "epoch": 14.210933186085036, "grad_norm": 0.09280569851398468, "learning_rate": 2.9043e-05, "loss": 0.0043, "step": 12875 }, { "epoch": 14.212037548315848, "grad_norm": 0.4783534109592438, "learning_rate": 2.904266666666667e-05, "loss": 0.0085, "step": 12876 }, { "epoch": 14.213141910546659, "grad_norm": 0.7354757189750671, "learning_rate": 2.9042333333333335e-05, "loss": 0.0138, "step": 12877 }, { "epoch": 14.214246272777471, "grad_norm": 0.45352381467819214, "learning_rate": 2.9042e-05, "loss": 0.0169, "step": 12878 }, { "epoch": 14.215350635008283, "grad_norm": 0.4694053828716278, "learning_rate": 2.9041666666666667e-05, "loss": 0.0103, "step": 12879 }, { "epoch": 14.216454997239094, "grad_norm": 0.4213933050632477, "learning_rate": 2.9041333333333336e-05, "loss": 0.0097, "step": 12880 }, { "epoch": 14.217559359469906, "grad_norm": 0.7419649958610535, "learning_rate": 2.9041e-05, "loss": 0.0214, "step": 12881 }, { "epoch": 14.218663721700718, "grad_norm": 0.552142858505249, "learning_rate": 2.9040666666666668e-05, "loss": 0.0179, "step": 12882 }, { "epoch": 14.21976808393153, "grad_norm": 0.5177510380744934, "learning_rate": 2.9040333333333334e-05, "loss": 0.0092, "step": 12883 }, { "epoch": 14.22087244616234, "grad_norm": 0.6324915885925293, "learning_rate": 2.904e-05, "loss": 0.0111, "step": 12884 }, { "epoch": 14.221976808393153, "grad_norm": 0.705786406993866, "learning_rate": 2.903966666666667e-05, "loss": 0.2645, "step": 12885 }, { "epoch": 14.223081170623965, "grad_norm": 0.5889633893966675, "learning_rate": 2.9039333333333335e-05, "loss": 0.1158, "step": 12886 }, { "epoch": 14.224185532854776, "grad_norm": 0.8166938424110413, "learning_rate": 2.9039e-05, "loss": 0.1215, "step": 12887 }, { "epoch": 14.225289895085588, "grad_norm": 0.42439958453178406, "learning_rate": 2.9038666666666667e-05, "loss": 0.0831, "step": 12888 }, { "epoch": 14.2263942573164, "grad_norm": 0.4952404201030731, "learning_rate": 2.9038333333333336e-05, "loss": 0.0917, "step": 12889 }, { "epoch": 14.22749861954721, "grad_norm": 0.4177365005016327, "learning_rate": 2.9038e-05, "loss": 0.0462, "step": 12890 }, { "epoch": 14.228602981778023, "grad_norm": 0.4053819179534912, "learning_rate": 2.9037666666666668e-05, "loss": 0.0384, "step": 12891 }, { "epoch": 14.229707344008835, "grad_norm": 0.21375016868114471, "learning_rate": 2.9037333333333334e-05, "loss": 0.0145, "step": 12892 }, { "epoch": 14.230811706239647, "grad_norm": 0.16927582025527954, "learning_rate": 2.9037e-05, "loss": 0.0241, "step": 12893 }, { "epoch": 14.231916068470458, "grad_norm": 0.37042826414108276, "learning_rate": 2.903666666666667e-05, "loss": 0.0445, "step": 12894 }, { "epoch": 14.23302043070127, "grad_norm": 0.3304500877857208, "learning_rate": 2.9036333333333332e-05, "loss": 0.0081, "step": 12895 }, { "epoch": 14.234124792932082, "grad_norm": 0.41631072759628296, "learning_rate": 2.9036e-05, "loss": 0.0163, "step": 12896 }, { "epoch": 14.235229155162893, "grad_norm": 0.2093351036310196, "learning_rate": 2.9035666666666667e-05, "loss": 0.0132, "step": 12897 }, { "epoch": 14.236333517393705, "grad_norm": 0.659674882888794, "learning_rate": 2.9035333333333333e-05, "loss": 0.018, "step": 12898 }, { "epoch": 14.237437879624517, "grad_norm": 0.20585249364376068, "learning_rate": 2.9035e-05, "loss": 0.0108, "step": 12899 }, { "epoch": 14.23854224185533, "grad_norm": 1.0350900888442993, "learning_rate": 2.903466666666667e-05, "loss": 0.0263, "step": 12900 }, { "epoch": 14.23964660408614, "grad_norm": 0.5996382236480713, "learning_rate": 2.9034333333333334e-05, "loss": 0.0196, "step": 12901 }, { "epoch": 14.240750966316952, "grad_norm": 0.340002179145813, "learning_rate": 2.9034e-05, "loss": 0.01, "step": 12902 }, { "epoch": 14.241855328547764, "grad_norm": 0.29207319021224976, "learning_rate": 2.903366666666667e-05, "loss": 0.0091, "step": 12903 }, { "epoch": 14.242959690778575, "grad_norm": 0.25558382272720337, "learning_rate": 2.9033333333333332e-05, "loss": 0.0117, "step": 12904 }, { "epoch": 14.244064053009387, "grad_norm": 0.4481947124004364, "learning_rate": 2.9033e-05, "loss": 0.0124, "step": 12905 }, { "epoch": 14.2451684152402, "grad_norm": 0.1579609513282776, "learning_rate": 2.9032666666666667e-05, "loss": 0.0092, "step": 12906 }, { "epoch": 14.24627277747101, "grad_norm": 0.5042552947998047, "learning_rate": 2.9032333333333333e-05, "loss": 0.0124, "step": 12907 }, { "epoch": 14.247377139701822, "grad_norm": 0.22657835483551025, "learning_rate": 2.9032e-05, "loss": 0.0083, "step": 12908 }, { "epoch": 14.248481501932634, "grad_norm": 0.37926897406578064, "learning_rate": 2.903166666666667e-05, "loss": 0.0104, "step": 12909 }, { "epoch": 14.249585864163446, "grad_norm": 0.49966961145401, "learning_rate": 2.9031333333333334e-05, "loss": 0.0892, "step": 12910 }, { "epoch": 14.250690226394257, "grad_norm": 0.2508901357650757, "learning_rate": 2.9031e-05, "loss": 0.0118, "step": 12911 }, { "epoch": 14.251794588625069, "grad_norm": 0.5237796306610107, "learning_rate": 2.903066666666667e-05, "loss": 0.0116, "step": 12912 }, { "epoch": 14.252898950855881, "grad_norm": 0.23338694870471954, "learning_rate": 2.9030333333333332e-05, "loss": 0.01, "step": 12913 }, { "epoch": 14.254003313086692, "grad_norm": 0.32452312111854553, "learning_rate": 2.903e-05, "loss": 0.0088, "step": 12914 }, { "epoch": 14.255107675317504, "grad_norm": 0.14450986683368683, "learning_rate": 2.9029666666666668e-05, "loss": 0.0086, "step": 12915 }, { "epoch": 14.256212037548316, "grad_norm": 0.4784190356731415, "learning_rate": 2.9029333333333333e-05, "loss": 0.0181, "step": 12916 }, { "epoch": 14.257316399779128, "grad_norm": 0.6421703696250916, "learning_rate": 2.9029000000000003e-05, "loss": 0.0147, "step": 12917 }, { "epoch": 14.258420762009939, "grad_norm": 0.3004436194896698, "learning_rate": 2.902866666666667e-05, "loss": 0.0115, "step": 12918 }, { "epoch": 14.259525124240751, "grad_norm": 0.17745976150035858, "learning_rate": 2.9028333333333335e-05, "loss": 0.042, "step": 12919 }, { "epoch": 14.260629486471563, "grad_norm": 0.20263783633708954, "learning_rate": 2.9028e-05, "loss": 0.0085, "step": 12920 }, { "epoch": 14.261733848702374, "grad_norm": 0.20974938571453094, "learning_rate": 2.902766666666667e-05, "loss": 0.0118, "step": 12921 }, { "epoch": 14.262838210933186, "grad_norm": 0.49632692337036133, "learning_rate": 2.9027333333333332e-05, "loss": 0.0142, "step": 12922 }, { "epoch": 14.263942573163998, "grad_norm": 0.4467476010322571, "learning_rate": 2.9027000000000002e-05, "loss": 0.0062, "step": 12923 }, { "epoch": 14.26504693539481, "grad_norm": 0.4230256676673889, "learning_rate": 2.9026666666666664e-05, "loss": 0.0133, "step": 12924 }, { "epoch": 14.26615129762562, "grad_norm": 0.7990211844444275, "learning_rate": 2.9026333333333334e-05, "loss": 0.0101, "step": 12925 }, { "epoch": 14.267255659856433, "grad_norm": 0.2666972577571869, "learning_rate": 2.9026000000000003e-05, "loss": 0.0085, "step": 12926 }, { "epoch": 14.268360022087245, "grad_norm": 0.2590925693511963, "learning_rate": 2.9025666666666666e-05, "loss": 0.0127, "step": 12927 }, { "epoch": 14.269464384318056, "grad_norm": 0.23344048857688904, "learning_rate": 2.9025333333333335e-05, "loss": 0.0106, "step": 12928 }, { "epoch": 14.270568746548868, "grad_norm": 0.24144664406776428, "learning_rate": 2.9025e-05, "loss": 0.0148, "step": 12929 }, { "epoch": 14.27167310877968, "grad_norm": 0.23822705447673798, "learning_rate": 2.9024666666666667e-05, "loss": 0.0107, "step": 12930 }, { "epoch": 14.27277747101049, "grad_norm": 0.3801937401294708, "learning_rate": 2.9024333333333333e-05, "loss": 0.0142, "step": 12931 }, { "epoch": 14.273881833241303, "grad_norm": 0.7328447103500366, "learning_rate": 2.9024000000000002e-05, "loss": 0.0169, "step": 12932 }, { "epoch": 14.274986195472115, "grad_norm": 0.34932613372802734, "learning_rate": 2.9023666666666665e-05, "loss": 0.0095, "step": 12933 }, { "epoch": 14.276090557702927, "grad_norm": 0.7613198161125183, "learning_rate": 2.9023333333333334e-05, "loss": 0.0173, "step": 12934 }, { "epoch": 14.277194919933738, "grad_norm": 0.6063741445541382, "learning_rate": 2.9023000000000003e-05, "loss": 0.1679, "step": 12935 }, { "epoch": 14.27829928216455, "grad_norm": 0.4732435643672943, "learning_rate": 2.9022666666666666e-05, "loss": 0.0998, "step": 12936 }, { "epoch": 14.279403644395362, "grad_norm": 0.584374725818634, "learning_rate": 2.9022333333333335e-05, "loss": 0.09, "step": 12937 }, { "epoch": 14.280508006626173, "grad_norm": 0.4379066824913025, "learning_rate": 2.9022e-05, "loss": 0.0857, "step": 12938 }, { "epoch": 14.281612368856985, "grad_norm": 0.5425577163696289, "learning_rate": 2.9021666666666667e-05, "loss": 0.0861, "step": 12939 }, { "epoch": 14.282716731087797, "grad_norm": 0.5405992865562439, "learning_rate": 2.9021333333333333e-05, "loss": 0.1169, "step": 12940 }, { "epoch": 14.283821093318608, "grad_norm": 0.3527369797229767, "learning_rate": 2.9021000000000002e-05, "loss": 0.0602, "step": 12941 }, { "epoch": 14.28492545554942, "grad_norm": 0.3340982496738434, "learning_rate": 2.9020666666666668e-05, "loss": 0.0438, "step": 12942 }, { "epoch": 14.286029817780232, "grad_norm": 0.24582593142986298, "learning_rate": 2.9020333333333334e-05, "loss": 0.0186, "step": 12943 }, { "epoch": 14.287134180011044, "grad_norm": 0.2644514739513397, "learning_rate": 2.9020000000000003e-05, "loss": 0.0197, "step": 12944 }, { "epoch": 14.288238542241855, "grad_norm": 0.35950058698654175, "learning_rate": 2.9019666666666666e-05, "loss": 0.0198, "step": 12945 }, { "epoch": 14.289342904472667, "grad_norm": 0.1845829337835312, "learning_rate": 2.9019333333333335e-05, "loss": 0.0102, "step": 12946 }, { "epoch": 14.29044726670348, "grad_norm": 0.8536201119422913, "learning_rate": 2.9019e-05, "loss": 0.0195, "step": 12947 }, { "epoch": 14.29155162893429, "grad_norm": 0.24458540976047516, "learning_rate": 2.9018666666666667e-05, "loss": 0.0113, "step": 12948 }, { "epoch": 14.292655991165102, "grad_norm": 0.22009897232055664, "learning_rate": 2.9018333333333333e-05, "loss": 0.0123, "step": 12949 }, { "epoch": 14.293760353395914, "grad_norm": 0.1938943862915039, "learning_rate": 2.9018000000000002e-05, "loss": 0.0119, "step": 12950 }, { "epoch": 14.294864715626726, "grad_norm": 0.26017701625823975, "learning_rate": 2.9017666666666668e-05, "loss": 0.0078, "step": 12951 }, { "epoch": 14.295969077857537, "grad_norm": 0.351795494556427, "learning_rate": 2.9017333333333334e-05, "loss": 0.0201, "step": 12952 }, { "epoch": 14.297073440088349, "grad_norm": 0.2447156459093094, "learning_rate": 2.9017e-05, "loss": 0.0137, "step": 12953 }, { "epoch": 14.298177802319161, "grad_norm": 1.3548147678375244, "learning_rate": 2.9016666666666666e-05, "loss": 0.0084, "step": 12954 }, { "epoch": 14.299282164549972, "grad_norm": 0.3548238277435303, "learning_rate": 2.9016333333333335e-05, "loss": 0.0119, "step": 12955 }, { "epoch": 14.300386526780784, "grad_norm": 0.6530974507331848, "learning_rate": 2.9015999999999998e-05, "loss": 0.009, "step": 12956 }, { "epoch": 14.301490889011596, "grad_norm": 0.6859525442123413, "learning_rate": 2.9015666666666667e-05, "loss": 0.0129, "step": 12957 }, { "epoch": 14.302595251242408, "grad_norm": 0.8394780158996582, "learning_rate": 2.9015333333333333e-05, "loss": 0.0121, "step": 12958 }, { "epoch": 14.303699613473219, "grad_norm": 0.3727531433105469, "learning_rate": 2.9015e-05, "loss": 0.0103, "step": 12959 }, { "epoch": 14.304803975704031, "grad_norm": 0.3382096290588379, "learning_rate": 2.901466666666667e-05, "loss": 0.0135, "step": 12960 }, { "epoch": 14.305908337934843, "grad_norm": 0.43392235040664673, "learning_rate": 2.9014333333333334e-05, "loss": 0.0105, "step": 12961 }, { "epoch": 14.307012700165654, "grad_norm": 0.7593534588813782, "learning_rate": 2.9014e-05, "loss": 0.0218, "step": 12962 }, { "epoch": 14.308117062396466, "grad_norm": 0.5150559544563293, "learning_rate": 2.9013666666666666e-05, "loss": 0.0164, "step": 12963 }, { "epoch": 14.309221424627278, "grad_norm": 0.12011437863111496, "learning_rate": 2.9013333333333336e-05, "loss": 0.0069, "step": 12964 }, { "epoch": 14.310325786858089, "grad_norm": 0.35951411724090576, "learning_rate": 2.9012999999999998e-05, "loss": 0.0249, "step": 12965 }, { "epoch": 14.3114301490889, "grad_norm": 0.44967710971832275, "learning_rate": 2.9012666666666668e-05, "loss": 0.0151, "step": 12966 }, { "epoch": 14.312534511319713, "grad_norm": 0.3993227183818817, "learning_rate": 2.9012333333333337e-05, "loss": 0.0241, "step": 12967 }, { "epoch": 14.313638873550525, "grad_norm": 0.396727979183197, "learning_rate": 2.9012e-05, "loss": 0.0163, "step": 12968 }, { "epoch": 14.314743235781336, "grad_norm": 0.31790754199028015, "learning_rate": 2.901166666666667e-05, "loss": 0.0155, "step": 12969 }, { "epoch": 14.315847598012148, "grad_norm": 0.1863597184419632, "learning_rate": 2.9011333333333335e-05, "loss": 0.0092, "step": 12970 }, { "epoch": 14.31695196024296, "grad_norm": 0.588062047958374, "learning_rate": 2.9011e-05, "loss": 0.0145, "step": 12971 }, { "epoch": 14.31805632247377, "grad_norm": 0.20931826531887054, "learning_rate": 2.9010666666666667e-05, "loss": 0.009, "step": 12972 }, { "epoch": 14.319160684704583, "grad_norm": 0.2556565999984741, "learning_rate": 2.9010333333333336e-05, "loss": 0.0088, "step": 12973 }, { "epoch": 14.320265046935395, "grad_norm": 0.22816665470600128, "learning_rate": 2.901e-05, "loss": 0.0096, "step": 12974 }, { "epoch": 14.321369409166207, "grad_norm": 0.40123608708381653, "learning_rate": 2.9009666666666668e-05, "loss": 0.0141, "step": 12975 }, { "epoch": 14.322473771397018, "grad_norm": 0.2688629627227783, "learning_rate": 2.9009333333333337e-05, "loss": 0.0157, "step": 12976 }, { "epoch": 14.32357813362783, "grad_norm": 0.3912251889705658, "learning_rate": 2.9009e-05, "loss": 0.0181, "step": 12977 }, { "epoch": 14.324682495858642, "grad_norm": 0.41387608647346497, "learning_rate": 2.900866666666667e-05, "loss": 0.0144, "step": 12978 }, { "epoch": 14.325786858089453, "grad_norm": 0.25049006938934326, "learning_rate": 2.9008333333333335e-05, "loss": 0.0133, "step": 12979 }, { "epoch": 14.326891220320265, "grad_norm": 0.2781620919704437, "learning_rate": 2.9008e-05, "loss": 0.0136, "step": 12980 }, { "epoch": 14.327995582551077, "grad_norm": 0.3131557106971741, "learning_rate": 2.9007666666666667e-05, "loss": 0.0099, "step": 12981 }, { "epoch": 14.329099944781888, "grad_norm": 0.6341931819915771, "learning_rate": 2.9007333333333336e-05, "loss": 0.0251, "step": 12982 }, { "epoch": 14.3302043070127, "grad_norm": 1.419873833656311, "learning_rate": 2.9007e-05, "loss": 0.0054, "step": 12983 }, { "epoch": 14.331308669243512, "grad_norm": 0.25850480794906616, "learning_rate": 2.9006666666666668e-05, "loss": 0.0127, "step": 12984 }, { "epoch": 14.332413031474324, "grad_norm": 0.5603000521659851, "learning_rate": 2.9006333333333334e-05, "loss": 0.1612, "step": 12985 }, { "epoch": 14.333517393705135, "grad_norm": 1.0464287996292114, "learning_rate": 2.9006e-05, "loss": 0.1267, "step": 12986 }, { "epoch": 14.334621755935947, "grad_norm": 0.7856035828590393, "learning_rate": 2.900566666666667e-05, "loss": 0.1094, "step": 12987 }, { "epoch": 14.33572611816676, "grad_norm": 0.5157106518745422, "learning_rate": 2.900533333333333e-05, "loss": 0.0801, "step": 12988 }, { "epoch": 14.33683048039757, "grad_norm": 0.4588248133659363, "learning_rate": 2.9005e-05, "loss": 0.0597, "step": 12989 }, { "epoch": 14.337934842628382, "grad_norm": 0.5407727360725403, "learning_rate": 2.9004666666666667e-05, "loss": 0.0523, "step": 12990 }, { "epoch": 14.339039204859194, "grad_norm": 0.4275543689727783, "learning_rate": 2.9004333333333333e-05, "loss": 0.0553, "step": 12991 }, { "epoch": 14.340143567090006, "grad_norm": 0.18741007149219513, "learning_rate": 2.9004000000000002e-05, "loss": 0.0293, "step": 12992 }, { "epoch": 14.341247929320817, "grad_norm": 0.40561529994010925, "learning_rate": 2.9003666666666668e-05, "loss": 0.0264, "step": 12993 }, { "epoch": 14.342352291551629, "grad_norm": 0.546001672744751, "learning_rate": 2.9003333333333334e-05, "loss": 0.0299, "step": 12994 }, { "epoch": 14.343456653782441, "grad_norm": 0.21438421308994293, "learning_rate": 2.9003e-05, "loss": 0.0138, "step": 12995 }, { "epoch": 14.344561016013252, "grad_norm": 0.20357121527194977, "learning_rate": 2.900266666666667e-05, "loss": 0.0132, "step": 12996 }, { "epoch": 14.345665378244064, "grad_norm": 0.3845353424549103, "learning_rate": 2.9002333333333332e-05, "loss": 0.0173, "step": 12997 }, { "epoch": 14.346769740474876, "grad_norm": 0.256473571062088, "learning_rate": 2.9002e-05, "loss": 0.03, "step": 12998 }, { "epoch": 14.347874102705687, "grad_norm": 0.285268098115921, "learning_rate": 2.9001666666666667e-05, "loss": 0.0119, "step": 12999 }, { "epoch": 14.348978464936499, "grad_norm": 0.25918570160865784, "learning_rate": 2.9001333333333333e-05, "loss": 0.0116, "step": 13000 }, { "epoch": 14.348978464936499, "eval_cer": 0.11265780128055577, "eval_loss": 0.30532321333885193, "eval_runtime": 15.5049, "eval_samples_per_second": 19.607, "eval_steps_per_second": 0.645, "eval_wer": 0.3904451266308519, "step": 13000 }, { "epoch": 14.350082827167311, "grad_norm": 0.49976930022239685, "learning_rate": 2.9001000000000002e-05, "loss": 0.0072, "step": 13001 }, { "epoch": 14.351187189398123, "grad_norm": 0.19067281484603882, "learning_rate": 2.9000666666666668e-05, "loss": 0.01, "step": 13002 }, { "epoch": 14.352291551628934, "grad_norm": 0.24120093882083893, "learning_rate": 2.9000333333333334e-05, "loss": 0.0095, "step": 13003 }, { "epoch": 14.353395913859746, "grad_norm": 0.36333179473876953, "learning_rate": 2.9e-05, "loss": 0.0107, "step": 13004 }, { "epoch": 14.354500276090558, "grad_norm": 0.6227831840515137, "learning_rate": 2.899966666666667e-05, "loss": 0.0198, "step": 13005 }, { "epoch": 14.355604638321369, "grad_norm": 0.5846378207206726, "learning_rate": 2.8999333333333332e-05, "loss": 0.0369, "step": 13006 }, { "epoch": 14.356709000552181, "grad_norm": 0.24866339564323425, "learning_rate": 2.8999e-05, "loss": 0.0108, "step": 13007 }, { "epoch": 14.357813362782993, "grad_norm": 0.3378641605377197, "learning_rate": 2.8998666666666667e-05, "loss": 0.0105, "step": 13008 }, { "epoch": 14.358917725013805, "grad_norm": 0.9401803612709045, "learning_rate": 2.8998333333333333e-05, "loss": 0.0295, "step": 13009 }, { "epoch": 14.360022087244616, "grad_norm": 0.4006376564502716, "learning_rate": 2.8998000000000003e-05, "loss": 0.0091, "step": 13010 }, { "epoch": 14.361126449475428, "grad_norm": 0.11877135187387466, "learning_rate": 2.899766666666667e-05, "loss": 0.0072, "step": 13011 }, { "epoch": 14.36223081170624, "grad_norm": 0.2567169666290283, "learning_rate": 2.8997333333333334e-05, "loss": 0.0147, "step": 13012 }, { "epoch": 14.36333517393705, "grad_norm": 0.5181413888931274, "learning_rate": 2.8997e-05, "loss": 0.0245, "step": 13013 }, { "epoch": 14.364439536167863, "grad_norm": 0.2423935979604721, "learning_rate": 2.8996666666666666e-05, "loss": 0.0104, "step": 13014 }, { "epoch": 14.365543898398675, "grad_norm": 0.17347879707813263, "learning_rate": 2.8996333333333332e-05, "loss": 0.0086, "step": 13015 }, { "epoch": 14.366648260629486, "grad_norm": 0.4272896945476532, "learning_rate": 2.8996e-05, "loss": 0.0135, "step": 13016 }, { "epoch": 14.367752622860298, "grad_norm": 0.6404370069503784, "learning_rate": 2.8995666666666667e-05, "loss": 0.0125, "step": 13017 }, { "epoch": 14.36885698509111, "grad_norm": 0.17962516844272614, "learning_rate": 2.8995333333333333e-05, "loss": 0.0093, "step": 13018 }, { "epoch": 14.369961347321922, "grad_norm": 0.6819047927856445, "learning_rate": 2.8995000000000003e-05, "loss": 0.011, "step": 13019 }, { "epoch": 14.371065709552733, "grad_norm": 0.28581804037094116, "learning_rate": 2.8994666666666665e-05, "loss": 0.0113, "step": 13020 }, { "epoch": 14.372170071783545, "grad_norm": 0.356425404548645, "learning_rate": 2.8994333333333335e-05, "loss": 0.0159, "step": 13021 }, { "epoch": 14.373274434014357, "grad_norm": 0.21084420382976532, "learning_rate": 2.8994e-05, "loss": 0.0145, "step": 13022 }, { "epoch": 14.374378796245168, "grad_norm": 0.17331388592720032, "learning_rate": 2.8993666666666667e-05, "loss": 0.0113, "step": 13023 }, { "epoch": 14.37548315847598, "grad_norm": 0.2786846458911896, "learning_rate": 2.8993333333333332e-05, "loss": 0.0084, "step": 13024 }, { "epoch": 14.376587520706792, "grad_norm": 0.2686145603656769, "learning_rate": 2.8993000000000002e-05, "loss": 0.0084, "step": 13025 }, { "epoch": 14.377691882937604, "grad_norm": 0.08212971687316895, "learning_rate": 2.8992666666666668e-05, "loss": 0.0038, "step": 13026 }, { "epoch": 14.378796245168415, "grad_norm": 0.639206051826477, "learning_rate": 2.8992333333333334e-05, "loss": 0.0123, "step": 13027 }, { "epoch": 14.379900607399227, "grad_norm": 0.3328288197517395, "learning_rate": 2.8992000000000003e-05, "loss": 0.0111, "step": 13028 }, { "epoch": 14.38100496963004, "grad_norm": 0.3886515498161316, "learning_rate": 2.8991666666666666e-05, "loss": 0.0111, "step": 13029 }, { "epoch": 14.38210933186085, "grad_norm": 0.1962880790233612, "learning_rate": 2.8991333333333335e-05, "loss": 0.0087, "step": 13030 }, { "epoch": 14.383213694091662, "grad_norm": 0.27536362409591675, "learning_rate": 2.8991e-05, "loss": 0.0099, "step": 13031 }, { "epoch": 14.384318056322474, "grad_norm": 0.4016614556312561, "learning_rate": 2.8990666666666667e-05, "loss": 0.0118, "step": 13032 }, { "epoch": 14.385422418553285, "grad_norm": 0.6287330985069275, "learning_rate": 2.8990333333333333e-05, "loss": 0.0182, "step": 13033 }, { "epoch": 14.386526780784097, "grad_norm": 0.28980717062950134, "learning_rate": 2.8990000000000002e-05, "loss": 0.0115, "step": 13034 }, { "epoch": 14.387631143014909, "grad_norm": 0.7942038178443909, "learning_rate": 2.8989666666666668e-05, "loss": 0.1787, "step": 13035 }, { "epoch": 14.388735505245721, "grad_norm": 0.57025545835495, "learning_rate": 2.8989333333333334e-05, "loss": 0.1419, "step": 13036 }, { "epoch": 14.389839867476532, "grad_norm": 0.39373779296875, "learning_rate": 2.8989000000000003e-05, "loss": 0.0828, "step": 13037 }, { "epoch": 14.390944229707344, "grad_norm": 0.4619613587856293, "learning_rate": 2.8988666666666666e-05, "loss": 0.0725, "step": 13038 }, { "epoch": 14.392048591938156, "grad_norm": 0.8308469653129578, "learning_rate": 2.8988333333333335e-05, "loss": 0.1084, "step": 13039 }, { "epoch": 14.393152954168967, "grad_norm": 0.6661549806594849, "learning_rate": 2.8988e-05, "loss": 0.074, "step": 13040 }, { "epoch": 14.394257316399779, "grad_norm": 0.5909545421600342, "learning_rate": 2.8987666666666667e-05, "loss": 0.0496, "step": 13041 }, { "epoch": 14.395361678630591, "grad_norm": 0.5564236640930176, "learning_rate": 2.8987333333333336e-05, "loss": 0.0403, "step": 13042 }, { "epoch": 14.396466040861403, "grad_norm": 0.42686930298805237, "learning_rate": 2.8987000000000002e-05, "loss": 0.039, "step": 13043 }, { "epoch": 14.397570403092214, "grad_norm": 0.3160612881183624, "learning_rate": 2.8986666666666668e-05, "loss": 0.0195, "step": 13044 }, { "epoch": 14.398674765323026, "grad_norm": 0.31465205550193787, "learning_rate": 2.8986333333333334e-05, "loss": 0.0193, "step": 13045 }, { "epoch": 14.399779127553838, "grad_norm": 0.1893758624792099, "learning_rate": 2.8986e-05, "loss": 0.0117, "step": 13046 }, { "epoch": 14.400883489784649, "grad_norm": 0.3408186733722687, "learning_rate": 2.8985666666666666e-05, "loss": 0.0144, "step": 13047 }, { "epoch": 14.401987852015461, "grad_norm": 0.3185649812221527, "learning_rate": 2.8985333333333335e-05, "loss": 0.0078, "step": 13048 }, { "epoch": 14.403092214246273, "grad_norm": 0.17435187101364136, "learning_rate": 2.8984999999999998e-05, "loss": 0.0103, "step": 13049 }, { "epoch": 14.404196576477084, "grad_norm": 0.2520384192466736, "learning_rate": 2.8984666666666667e-05, "loss": 0.0179, "step": 13050 }, { "epoch": 14.405300938707896, "grad_norm": 0.202900692820549, "learning_rate": 2.8984333333333336e-05, "loss": 0.0106, "step": 13051 }, { "epoch": 14.406405300938708, "grad_norm": 0.4968835413455963, "learning_rate": 2.8984e-05, "loss": 0.0111, "step": 13052 }, { "epoch": 14.40750966316952, "grad_norm": 0.12912216782569885, "learning_rate": 2.8983666666666668e-05, "loss": 0.0097, "step": 13053 }, { "epoch": 14.40861402540033, "grad_norm": 0.28273773193359375, "learning_rate": 2.8983333333333334e-05, "loss": 0.0086, "step": 13054 }, { "epoch": 14.409718387631143, "grad_norm": 0.31164321303367615, "learning_rate": 2.8983e-05, "loss": 0.0187, "step": 13055 }, { "epoch": 14.410822749861955, "grad_norm": 0.23347991704940796, "learning_rate": 2.8982666666666666e-05, "loss": 0.0077, "step": 13056 }, { "epoch": 14.411927112092766, "grad_norm": 0.7824956178665161, "learning_rate": 2.8982333333333335e-05, "loss": 0.0135, "step": 13057 }, { "epoch": 14.413031474323578, "grad_norm": 0.11725952476263046, "learning_rate": 2.8981999999999998e-05, "loss": 0.0039, "step": 13058 }, { "epoch": 14.41413583655439, "grad_norm": 0.41367053985595703, "learning_rate": 2.8981666666666667e-05, "loss": 0.0127, "step": 13059 }, { "epoch": 14.415240198785202, "grad_norm": 0.20921550691127777, "learning_rate": 2.8981333333333337e-05, "loss": 0.014, "step": 13060 }, { "epoch": 14.416344561016013, "grad_norm": 0.4700057804584503, "learning_rate": 2.8981e-05, "loss": 0.0084, "step": 13061 }, { "epoch": 14.417448923246825, "grad_norm": 0.19253380596637726, "learning_rate": 2.898066666666667e-05, "loss": 0.0081, "step": 13062 }, { "epoch": 14.418553285477637, "grad_norm": 0.24611756205558777, "learning_rate": 2.8980333333333334e-05, "loss": 0.006, "step": 13063 }, { "epoch": 14.419657647708448, "grad_norm": 0.16540290415287018, "learning_rate": 2.898e-05, "loss": 0.0053, "step": 13064 }, { "epoch": 14.42076200993926, "grad_norm": 0.24571792781352997, "learning_rate": 2.8979666666666666e-05, "loss": 0.0108, "step": 13065 }, { "epoch": 14.421866372170072, "grad_norm": 0.2563283145427704, "learning_rate": 2.8979333333333336e-05, "loss": 0.0075, "step": 13066 }, { "epoch": 14.422970734400884, "grad_norm": 0.14396144449710846, "learning_rate": 2.8979e-05, "loss": 0.0129, "step": 13067 }, { "epoch": 14.424075096631695, "grad_norm": 0.23760156333446503, "learning_rate": 2.8978666666666667e-05, "loss": 0.0196, "step": 13068 }, { "epoch": 14.425179458862507, "grad_norm": 0.2962162494659424, "learning_rate": 2.8978333333333337e-05, "loss": 0.0201, "step": 13069 }, { "epoch": 14.42628382109332, "grad_norm": 0.20757976174354553, "learning_rate": 2.8978e-05, "loss": 0.0072, "step": 13070 }, { "epoch": 14.42738818332413, "grad_norm": 0.3912217617034912, "learning_rate": 2.897766666666667e-05, "loss": 0.0157, "step": 13071 }, { "epoch": 14.428492545554942, "grad_norm": 0.1665491759777069, "learning_rate": 2.8977333333333335e-05, "loss": 0.0074, "step": 13072 }, { "epoch": 14.429596907785754, "grad_norm": 0.17301268875598907, "learning_rate": 2.8977e-05, "loss": 0.0102, "step": 13073 }, { "epoch": 14.430701270016565, "grad_norm": 0.2321164757013321, "learning_rate": 2.8976666666666666e-05, "loss": 0.0115, "step": 13074 }, { "epoch": 14.431805632247377, "grad_norm": 0.24095311760902405, "learning_rate": 2.8976333333333332e-05, "loss": 0.0077, "step": 13075 }, { "epoch": 14.43290999447819, "grad_norm": 0.3808751404285431, "learning_rate": 2.8976000000000002e-05, "loss": 0.014, "step": 13076 }, { "epoch": 14.434014356709001, "grad_norm": 0.16840329766273499, "learning_rate": 2.8975666666666668e-05, "loss": 0.0082, "step": 13077 }, { "epoch": 14.435118718939812, "grad_norm": 0.2977524697780609, "learning_rate": 2.8975333333333334e-05, "loss": 0.0175, "step": 13078 }, { "epoch": 14.436223081170624, "grad_norm": 0.3275238871574402, "learning_rate": 2.8975e-05, "loss": 0.0089, "step": 13079 }, { "epoch": 14.437327443401436, "grad_norm": 0.2523486316204071, "learning_rate": 2.897466666666667e-05, "loss": 0.0107, "step": 13080 }, { "epoch": 14.438431805632247, "grad_norm": 0.5288494825363159, "learning_rate": 2.897433333333333e-05, "loss": 0.009, "step": 13081 }, { "epoch": 14.439536167863059, "grad_norm": 0.2527138292789459, "learning_rate": 2.8974e-05, "loss": 0.0126, "step": 13082 }, { "epoch": 14.440640530093871, "grad_norm": 1.037704586982727, "learning_rate": 2.8973666666666667e-05, "loss": 0.0205, "step": 13083 }, { "epoch": 14.441744892324682, "grad_norm": 0.31112027168273926, "learning_rate": 2.8973333333333333e-05, "loss": 0.0096, "step": 13084 }, { "epoch": 14.442849254555494, "grad_norm": 0.7125031352043152, "learning_rate": 2.8973000000000002e-05, "loss": 0.1469, "step": 13085 }, { "epoch": 14.443953616786306, "grad_norm": 0.5648980140686035, "learning_rate": 2.8972666666666668e-05, "loss": 0.1241, "step": 13086 }, { "epoch": 14.445057979017118, "grad_norm": 0.5145577788352966, "learning_rate": 2.8972333333333334e-05, "loss": 0.1329, "step": 13087 }, { "epoch": 14.446162341247929, "grad_norm": 0.5268210172653198, "learning_rate": 2.8972e-05, "loss": 0.0651, "step": 13088 }, { "epoch": 14.447266703478741, "grad_norm": 0.6387264728546143, "learning_rate": 2.897166666666667e-05, "loss": 0.0949, "step": 13089 }, { "epoch": 14.448371065709553, "grad_norm": 0.4422840476036072, "learning_rate": 2.897133333333333e-05, "loss": 0.0739, "step": 13090 }, { "epoch": 14.449475427940364, "grad_norm": 0.6109357476234436, "learning_rate": 2.8971e-05, "loss": 0.0684, "step": 13091 }, { "epoch": 14.450579790171176, "grad_norm": 0.5020570158958435, "learning_rate": 2.897066666666667e-05, "loss": 0.0231, "step": 13092 }, { "epoch": 14.451684152401988, "grad_norm": 0.3772293031215668, "learning_rate": 2.8970333333333333e-05, "loss": 0.0374, "step": 13093 }, { "epoch": 14.4527885146328, "grad_norm": 0.511902391910553, "learning_rate": 2.8970000000000002e-05, "loss": 0.0255, "step": 13094 }, { "epoch": 14.45389287686361, "grad_norm": 0.5602097511291504, "learning_rate": 2.8969666666666668e-05, "loss": 0.0189, "step": 13095 }, { "epoch": 14.454997239094423, "grad_norm": 0.3410123586654663, "learning_rate": 2.8969333333333334e-05, "loss": 0.014, "step": 13096 }, { "epoch": 14.456101601325235, "grad_norm": 0.22889424860477448, "learning_rate": 2.8969e-05, "loss": 0.0109, "step": 13097 }, { "epoch": 14.457205963556046, "grad_norm": 0.27820849418640137, "learning_rate": 2.896866666666667e-05, "loss": 0.0108, "step": 13098 }, { "epoch": 14.458310325786858, "grad_norm": 0.26694971323013306, "learning_rate": 2.8968333333333332e-05, "loss": 0.0115, "step": 13099 }, { "epoch": 14.45941468801767, "grad_norm": 0.40631070733070374, "learning_rate": 2.8968e-05, "loss": 0.019, "step": 13100 }, { "epoch": 14.460519050248482, "grad_norm": 0.2690814733505249, "learning_rate": 2.896766666666667e-05, "loss": 0.0359, "step": 13101 }, { "epoch": 14.461623412479293, "grad_norm": 0.5712239742279053, "learning_rate": 2.8967333333333333e-05, "loss": 0.0201, "step": 13102 }, { "epoch": 14.462727774710105, "grad_norm": 0.2754284143447876, "learning_rate": 2.8967000000000002e-05, "loss": 0.0123, "step": 13103 }, { "epoch": 14.463832136940917, "grad_norm": 0.3283574879169464, "learning_rate": 2.8966666666666665e-05, "loss": 0.0141, "step": 13104 }, { "epoch": 14.464936499171728, "grad_norm": 0.20880961418151855, "learning_rate": 2.8966333333333334e-05, "loss": 0.0142, "step": 13105 }, { "epoch": 14.46604086140254, "grad_norm": 0.17352522909641266, "learning_rate": 2.8966e-05, "loss": 0.0128, "step": 13106 }, { "epoch": 14.467145223633352, "grad_norm": 0.24163006246089935, "learning_rate": 2.8965666666666666e-05, "loss": 0.0114, "step": 13107 }, { "epoch": 14.468249585864163, "grad_norm": 0.14652597904205322, "learning_rate": 2.8965333333333332e-05, "loss": 0.0066, "step": 13108 }, { "epoch": 14.469353948094975, "grad_norm": 0.21162015199661255, "learning_rate": 2.8965e-05, "loss": 0.0077, "step": 13109 }, { "epoch": 14.470458310325787, "grad_norm": 0.26392415165901184, "learning_rate": 2.8964666666666667e-05, "loss": 0.0119, "step": 13110 }, { "epoch": 14.4715626725566, "grad_norm": 0.3827753961086273, "learning_rate": 2.8964333333333333e-05, "loss": 0.0091, "step": 13111 }, { "epoch": 14.47266703478741, "grad_norm": 0.7529509663581848, "learning_rate": 2.8964000000000003e-05, "loss": 0.0066, "step": 13112 }, { "epoch": 14.473771397018222, "grad_norm": 0.3642416000366211, "learning_rate": 2.8963666666666665e-05, "loss": 0.0074, "step": 13113 }, { "epoch": 14.474875759249034, "grad_norm": 0.7040218114852905, "learning_rate": 2.8963333333333334e-05, "loss": 0.0235, "step": 13114 }, { "epoch": 14.475980121479845, "grad_norm": 0.18832865357398987, "learning_rate": 2.8963e-05, "loss": 0.0094, "step": 13115 }, { "epoch": 14.477084483710657, "grad_norm": 0.2356864959001541, "learning_rate": 2.8962666666666666e-05, "loss": 0.0109, "step": 13116 }, { "epoch": 14.47818884594147, "grad_norm": 0.27343541383743286, "learning_rate": 2.8962333333333336e-05, "loss": 0.0123, "step": 13117 }, { "epoch": 14.47929320817228, "grad_norm": 0.31549766659736633, "learning_rate": 2.8962e-05, "loss": 0.0142, "step": 13118 }, { "epoch": 14.480397570403092, "grad_norm": 0.30685949325561523, "learning_rate": 2.8961666666666667e-05, "loss": 0.0118, "step": 13119 }, { "epoch": 14.481501932633904, "grad_norm": 0.28175631165504456, "learning_rate": 2.8961333333333333e-05, "loss": 0.0113, "step": 13120 }, { "epoch": 14.482606294864716, "grad_norm": 0.19774746894836426, "learning_rate": 2.8961000000000003e-05, "loss": 0.0096, "step": 13121 }, { "epoch": 14.483710657095527, "grad_norm": 0.15497881174087524, "learning_rate": 2.8960666666666665e-05, "loss": 0.0059, "step": 13122 }, { "epoch": 14.484815019326339, "grad_norm": 1.0443261861801147, "learning_rate": 2.8960333333333335e-05, "loss": 0.0101, "step": 13123 }, { "epoch": 14.485919381557151, "grad_norm": 0.25779587030410767, "learning_rate": 2.896e-05, "loss": 0.0142, "step": 13124 }, { "epoch": 14.487023743787962, "grad_norm": 0.2909967005252838, "learning_rate": 2.8959666666666666e-05, "loss": 0.0095, "step": 13125 }, { "epoch": 14.488128106018774, "grad_norm": 0.18439286947250366, "learning_rate": 2.8959333333333336e-05, "loss": 0.0046, "step": 13126 }, { "epoch": 14.489232468249586, "grad_norm": 0.3475409746170044, "learning_rate": 2.8959000000000002e-05, "loss": 0.0149, "step": 13127 }, { "epoch": 14.490336830480398, "grad_norm": 0.5299624800682068, "learning_rate": 2.8958666666666668e-05, "loss": 0.0108, "step": 13128 }, { "epoch": 14.491441192711209, "grad_norm": 0.6863001585006714, "learning_rate": 2.8958333333333334e-05, "loss": 0.0136, "step": 13129 }, { "epoch": 14.492545554942021, "grad_norm": 0.20188525319099426, "learning_rate": 2.8958000000000003e-05, "loss": 0.0076, "step": 13130 }, { "epoch": 14.493649917172833, "grad_norm": 0.3417169451713562, "learning_rate": 2.8957666666666665e-05, "loss": 0.0065, "step": 13131 }, { "epoch": 14.494754279403644, "grad_norm": 0.39567312598228455, "learning_rate": 2.8957333333333335e-05, "loss": 0.0086, "step": 13132 }, { "epoch": 14.495858641634456, "grad_norm": 1.7542277574539185, "learning_rate": 2.8957e-05, "loss": 0.028, "step": 13133 }, { "epoch": 14.496963003865268, "grad_norm": 1.9271228313446045, "learning_rate": 2.8956666666666667e-05, "loss": 0.0436, "step": 13134 }, { "epoch": 14.49806736609608, "grad_norm": 0.5925370454788208, "learning_rate": 2.8956333333333336e-05, "loss": 0.1841, "step": 13135 }, { "epoch": 14.49917172832689, "grad_norm": 0.6407110095024109, "learning_rate": 2.8956e-05, "loss": 0.1984, "step": 13136 }, { "epoch": 14.500276090557703, "grad_norm": 0.41454005241394043, "learning_rate": 2.8955666666666668e-05, "loss": 0.1039, "step": 13137 }, { "epoch": 14.501380452788515, "grad_norm": 0.3634851276874542, "learning_rate": 2.8955333333333334e-05, "loss": 0.0597, "step": 13138 }, { "epoch": 14.502484815019326, "grad_norm": 0.5033509731292725, "learning_rate": 2.8955e-05, "loss": 0.0511, "step": 13139 }, { "epoch": 14.503589177250138, "grad_norm": 0.4505651593208313, "learning_rate": 2.8954666666666666e-05, "loss": 0.0689, "step": 13140 }, { "epoch": 14.50469353948095, "grad_norm": 0.5546239018440247, "learning_rate": 2.8954333333333335e-05, "loss": 0.0629, "step": 13141 }, { "epoch": 14.50579790171176, "grad_norm": 0.30245116353034973, "learning_rate": 2.8954e-05, "loss": 0.0307, "step": 13142 }, { "epoch": 14.506902263942573, "grad_norm": 0.9912903308868408, "learning_rate": 2.8953666666666667e-05, "loss": 0.0388, "step": 13143 }, { "epoch": 14.508006626173385, "grad_norm": 0.41928890347480774, "learning_rate": 2.8953333333333336e-05, "loss": 0.0377, "step": 13144 }, { "epoch": 14.509110988404197, "grad_norm": 0.31675800681114197, "learning_rate": 2.8953e-05, "loss": 0.0342, "step": 13145 }, { "epoch": 14.510215350635008, "grad_norm": 0.3257542848587036, "learning_rate": 2.8952666666666668e-05, "loss": 0.0154, "step": 13146 }, { "epoch": 14.51131971286582, "grad_norm": 0.14993098378181458, "learning_rate": 2.8952333333333334e-05, "loss": 0.0111, "step": 13147 }, { "epoch": 14.512424075096632, "grad_norm": 0.19453439116477966, "learning_rate": 2.8952e-05, "loss": 0.0154, "step": 13148 }, { "epoch": 14.513528437327443, "grad_norm": 0.3602626621723175, "learning_rate": 2.8951666666666666e-05, "loss": 0.0081, "step": 13149 }, { "epoch": 14.514632799558255, "grad_norm": 0.3690304160118103, "learning_rate": 2.8951333333333335e-05, "loss": 0.0141, "step": 13150 }, { "epoch": 14.515737161789067, "grad_norm": 0.2146850824356079, "learning_rate": 2.8951e-05, "loss": 0.0295, "step": 13151 }, { "epoch": 14.516841524019878, "grad_norm": 0.1730778068304062, "learning_rate": 2.8950666666666667e-05, "loss": 0.0095, "step": 13152 }, { "epoch": 14.51794588625069, "grad_norm": 0.275735080242157, "learning_rate": 2.8950333333333336e-05, "loss": 0.0135, "step": 13153 }, { "epoch": 14.519050248481502, "grad_norm": 0.18955230712890625, "learning_rate": 2.895e-05, "loss": 0.0119, "step": 13154 }, { "epoch": 14.520154610712314, "grad_norm": 0.22636447846889496, "learning_rate": 2.8949666666666668e-05, "loss": 0.0113, "step": 13155 }, { "epoch": 14.521258972943125, "grad_norm": 0.7025851607322693, "learning_rate": 2.8949333333333334e-05, "loss": 0.0231, "step": 13156 }, { "epoch": 14.522363335173937, "grad_norm": 0.08754776418209076, "learning_rate": 2.8949e-05, "loss": 0.0065, "step": 13157 }, { "epoch": 14.52346769740475, "grad_norm": 0.1400148570537567, "learning_rate": 2.8948666666666666e-05, "loss": 0.0093, "step": 13158 }, { "epoch": 14.52457205963556, "grad_norm": 0.2821565270423889, "learning_rate": 2.8948333333333335e-05, "loss": 0.0064, "step": 13159 }, { "epoch": 14.525676421866372, "grad_norm": 0.2698551118373871, "learning_rate": 2.8948e-05, "loss": 0.0159, "step": 13160 }, { "epoch": 14.526780784097184, "grad_norm": 0.2767028212547302, "learning_rate": 2.8947666666666667e-05, "loss": 0.0109, "step": 13161 }, { "epoch": 14.527885146327996, "grad_norm": 0.35223740339279175, "learning_rate": 2.8947333333333337e-05, "loss": 0.0104, "step": 13162 }, { "epoch": 14.528989508558807, "grad_norm": 0.37696003913879395, "learning_rate": 2.8947e-05, "loss": 0.0109, "step": 13163 }, { "epoch": 14.530093870789619, "grad_norm": 0.15210700035095215, "learning_rate": 2.894666666666667e-05, "loss": 0.0064, "step": 13164 }, { "epoch": 14.531198233020431, "grad_norm": 0.19572603702545166, "learning_rate": 2.894633333333333e-05, "loss": 0.0071, "step": 13165 }, { "epoch": 14.532302595251242, "grad_norm": 0.37616851925849915, "learning_rate": 2.8946e-05, "loss": 0.0123, "step": 13166 }, { "epoch": 14.533406957482054, "grad_norm": 1.157793402671814, "learning_rate": 2.894566666666667e-05, "loss": 0.0159, "step": 13167 }, { "epoch": 14.534511319712866, "grad_norm": 0.2471371442079544, "learning_rate": 2.8945333333333332e-05, "loss": 0.0141, "step": 13168 }, { "epoch": 14.535615681943678, "grad_norm": 0.569315493106842, "learning_rate": 2.8945e-05, "loss": 0.0196, "step": 13169 }, { "epoch": 14.536720044174489, "grad_norm": 0.9471495747566223, "learning_rate": 2.8944666666666667e-05, "loss": 0.028, "step": 13170 }, { "epoch": 14.537824406405301, "grad_norm": 0.21499404311180115, "learning_rate": 2.8944333333333333e-05, "loss": 0.0081, "step": 13171 }, { "epoch": 14.538928768636113, "grad_norm": 0.34466439485549927, "learning_rate": 2.8944e-05, "loss": 0.0144, "step": 13172 }, { "epoch": 14.540033130866924, "grad_norm": 0.13390545547008514, "learning_rate": 2.894366666666667e-05, "loss": 0.0086, "step": 13173 }, { "epoch": 14.541137493097736, "grad_norm": 0.3076663315296173, "learning_rate": 2.894333333333333e-05, "loss": 0.0086, "step": 13174 }, { "epoch": 14.542241855328548, "grad_norm": 0.41207534074783325, "learning_rate": 2.8943e-05, "loss": 0.0067, "step": 13175 }, { "epoch": 14.54334621755936, "grad_norm": 0.18292024731636047, "learning_rate": 2.894266666666667e-05, "loss": 0.0107, "step": 13176 }, { "epoch": 14.54445057979017, "grad_norm": 0.3079465329647064, "learning_rate": 2.8942333333333332e-05, "loss": 0.0119, "step": 13177 }, { "epoch": 14.545554942020983, "grad_norm": 0.32016676664352417, "learning_rate": 2.8942000000000002e-05, "loss": 0.0148, "step": 13178 }, { "epoch": 14.546659304251795, "grad_norm": 0.18546046316623688, "learning_rate": 2.8941666666666668e-05, "loss": 0.0077, "step": 13179 }, { "epoch": 14.547763666482606, "grad_norm": 0.2677684724330902, "learning_rate": 2.8941333333333334e-05, "loss": 0.0108, "step": 13180 }, { "epoch": 14.548868028713418, "grad_norm": 0.34345543384552, "learning_rate": 2.8941e-05, "loss": 0.0177, "step": 13181 }, { "epoch": 14.54997239094423, "grad_norm": 0.41533362865448, "learning_rate": 2.894066666666667e-05, "loss": 0.0128, "step": 13182 }, { "epoch": 14.55107675317504, "grad_norm": 0.31157317757606506, "learning_rate": 2.894033333333333e-05, "loss": 0.0111, "step": 13183 }, { "epoch": 14.552181115405853, "grad_norm": 0.14596614241600037, "learning_rate": 2.894e-05, "loss": 0.0041, "step": 13184 }, { "epoch": 14.553285477636665, "grad_norm": 0.7311427593231201, "learning_rate": 2.893966666666667e-05, "loss": 0.2123, "step": 13185 }, { "epoch": 14.554389839867477, "grad_norm": 0.7436456680297852, "learning_rate": 2.8939333333333333e-05, "loss": 0.1496, "step": 13186 }, { "epoch": 14.555494202098288, "grad_norm": 0.48899585008621216, "learning_rate": 2.8939000000000002e-05, "loss": 0.1494, "step": 13187 }, { "epoch": 14.5565985643291, "grad_norm": 0.33896785974502563, "learning_rate": 2.8938666666666668e-05, "loss": 0.1211, "step": 13188 }, { "epoch": 14.557702926559912, "grad_norm": 0.48270177841186523, "learning_rate": 2.8938333333333334e-05, "loss": 0.0971, "step": 13189 }, { "epoch": 14.558807288790723, "grad_norm": 0.4271349310874939, "learning_rate": 2.8938e-05, "loss": 0.0597, "step": 13190 }, { "epoch": 14.559911651021535, "grad_norm": 0.36706310510635376, "learning_rate": 2.893766666666667e-05, "loss": 0.079, "step": 13191 }, { "epoch": 14.561016013252347, "grad_norm": 0.4554840624332428, "learning_rate": 2.8937333333333335e-05, "loss": 0.0863, "step": 13192 }, { "epoch": 14.562120375483158, "grad_norm": 0.49919068813323975, "learning_rate": 2.8937e-05, "loss": 0.0659, "step": 13193 }, { "epoch": 14.56322473771397, "grad_norm": 0.22602106630802155, "learning_rate": 2.893666666666667e-05, "loss": 0.0133, "step": 13194 }, { "epoch": 14.564329099944782, "grad_norm": 0.1868174970149994, "learning_rate": 2.8936333333333333e-05, "loss": 0.0125, "step": 13195 }, { "epoch": 14.565433462175594, "grad_norm": 0.2799728512763977, "learning_rate": 2.8936000000000002e-05, "loss": 0.0139, "step": 13196 }, { "epoch": 14.566537824406405, "grad_norm": 0.24219028651714325, "learning_rate": 2.8935666666666665e-05, "loss": 0.014, "step": 13197 }, { "epoch": 14.567642186637217, "grad_norm": 0.19893623888492584, "learning_rate": 2.8935333333333334e-05, "loss": 0.0103, "step": 13198 }, { "epoch": 14.56874654886803, "grad_norm": 0.29447630047798157, "learning_rate": 2.8935e-05, "loss": 0.0189, "step": 13199 }, { "epoch": 14.56985091109884, "grad_norm": 0.13583189249038696, "learning_rate": 2.8934666666666666e-05, "loss": 0.0046, "step": 13200 }, { "epoch": 14.570955273329652, "grad_norm": 0.23805266618728638, "learning_rate": 2.8934333333333335e-05, "loss": 0.0089, "step": 13201 }, { "epoch": 14.572059635560464, "grad_norm": 0.5524685382843018, "learning_rate": 2.8934e-05, "loss": 0.0182, "step": 13202 }, { "epoch": 14.573163997791276, "grad_norm": 0.11954344809055328, "learning_rate": 2.8933666666666667e-05, "loss": 0.0055, "step": 13203 }, { "epoch": 14.574268360022087, "grad_norm": 0.17701970040798187, "learning_rate": 2.8933333333333333e-05, "loss": 0.0094, "step": 13204 }, { "epoch": 14.575372722252899, "grad_norm": 0.3918859362602234, "learning_rate": 2.8933000000000002e-05, "loss": 0.0149, "step": 13205 }, { "epoch": 14.576477084483711, "grad_norm": 0.27287375926971436, "learning_rate": 2.8932666666666665e-05, "loss": 0.0125, "step": 13206 }, { "epoch": 14.577581446714522, "grad_norm": 0.40011337399482727, "learning_rate": 2.8932333333333334e-05, "loss": 0.0107, "step": 13207 }, { "epoch": 14.578685808945334, "grad_norm": 0.16123196482658386, "learning_rate": 2.8932e-05, "loss": 0.0089, "step": 13208 }, { "epoch": 14.579790171176146, "grad_norm": 0.22884494066238403, "learning_rate": 2.8931666666666666e-05, "loss": 0.0057, "step": 13209 }, { "epoch": 14.580894533406958, "grad_norm": 0.4060662090778351, "learning_rate": 2.8931333333333335e-05, "loss": 0.0122, "step": 13210 }, { "epoch": 14.581998895637769, "grad_norm": 0.3158213496208191, "learning_rate": 2.8931e-05, "loss": 0.0074, "step": 13211 }, { "epoch": 14.583103257868581, "grad_norm": 0.2163102626800537, "learning_rate": 2.8930666666666667e-05, "loss": 0.0128, "step": 13212 }, { "epoch": 14.584207620099393, "grad_norm": 0.3169748783111572, "learning_rate": 2.8930333333333333e-05, "loss": 0.0149, "step": 13213 }, { "epoch": 14.585311982330204, "grad_norm": 0.4617411494255066, "learning_rate": 2.8930000000000003e-05, "loss": 0.0209, "step": 13214 }, { "epoch": 14.586416344561016, "grad_norm": 0.4768090844154358, "learning_rate": 2.8929666666666665e-05, "loss": 0.019, "step": 13215 }, { "epoch": 14.587520706791828, "grad_norm": 0.5329919457435608, "learning_rate": 2.8929333333333334e-05, "loss": 0.0136, "step": 13216 }, { "epoch": 14.588625069022639, "grad_norm": 0.33219677209854126, "learning_rate": 2.8929000000000004e-05, "loss": 0.0137, "step": 13217 }, { "epoch": 14.589729431253451, "grad_norm": 0.23932580649852753, "learning_rate": 2.8928666666666666e-05, "loss": 0.0093, "step": 13218 }, { "epoch": 14.590833793484263, "grad_norm": 0.2698904573917389, "learning_rate": 2.8928333333333336e-05, "loss": 0.0078, "step": 13219 }, { "epoch": 14.591938155715075, "grad_norm": 0.19794157147407532, "learning_rate": 2.8928e-05, "loss": 0.008, "step": 13220 }, { "epoch": 14.593042517945886, "grad_norm": 0.565005898475647, "learning_rate": 2.8927666666666667e-05, "loss": 0.017, "step": 13221 }, { "epoch": 14.594146880176698, "grad_norm": 0.26179271936416626, "learning_rate": 2.8927333333333333e-05, "loss": 0.0161, "step": 13222 }, { "epoch": 14.59525124240751, "grad_norm": 0.34484046697616577, "learning_rate": 2.8927000000000003e-05, "loss": 0.0077, "step": 13223 }, { "epoch": 14.59635560463832, "grad_norm": 0.539111852645874, "learning_rate": 2.8926666666666665e-05, "loss": 0.0258, "step": 13224 }, { "epoch": 14.597459966869133, "grad_norm": 0.32804226875305176, "learning_rate": 2.8926333333333335e-05, "loss": 0.0142, "step": 13225 }, { "epoch": 14.598564329099945, "grad_norm": 0.2879837155342102, "learning_rate": 2.8926e-05, "loss": 0.0154, "step": 13226 }, { "epoch": 14.599668691330756, "grad_norm": 0.22333647310733795, "learning_rate": 2.8925666666666666e-05, "loss": 0.0087, "step": 13227 }, { "epoch": 14.600773053561568, "grad_norm": 0.2641274034976959, "learning_rate": 2.8925333333333336e-05, "loss": 0.0118, "step": 13228 }, { "epoch": 14.60187741579238, "grad_norm": 0.25506454706192017, "learning_rate": 2.8925e-05, "loss": 0.0137, "step": 13229 }, { "epoch": 14.602981778023192, "grad_norm": 0.16126956045627594, "learning_rate": 2.8924666666666668e-05, "loss": 0.0066, "step": 13230 }, { "epoch": 14.604086140254003, "grad_norm": 0.3102402091026306, "learning_rate": 2.8924333333333334e-05, "loss": 0.0179, "step": 13231 }, { "epoch": 14.605190502484815, "grad_norm": 0.3717604875564575, "learning_rate": 2.8924e-05, "loss": 0.0112, "step": 13232 }, { "epoch": 14.606294864715627, "grad_norm": 0.3369746506214142, "learning_rate": 2.8923666666666665e-05, "loss": 0.0138, "step": 13233 }, { "epoch": 14.607399226946438, "grad_norm": 0.965445876121521, "learning_rate": 2.8923333333333335e-05, "loss": 0.0168, "step": 13234 }, { "epoch": 14.60850358917725, "grad_norm": 0.5928501486778259, "learning_rate": 2.8923e-05, "loss": 0.1911, "step": 13235 }, { "epoch": 14.609607951408062, "grad_norm": 0.5183103680610657, "learning_rate": 2.8922666666666667e-05, "loss": 0.1396, "step": 13236 }, { "epoch": 14.610712313638874, "grad_norm": 0.5087526440620422, "learning_rate": 2.8922333333333336e-05, "loss": 0.0934, "step": 13237 }, { "epoch": 14.611816675869685, "grad_norm": 0.5707670450210571, "learning_rate": 2.8922e-05, "loss": 0.1033, "step": 13238 }, { "epoch": 14.612921038100497, "grad_norm": 0.3773752450942993, "learning_rate": 2.8921666666666668e-05, "loss": 0.0729, "step": 13239 }, { "epoch": 14.61402540033131, "grad_norm": 0.3292923867702484, "learning_rate": 2.8921333333333334e-05, "loss": 0.0448, "step": 13240 }, { "epoch": 14.61512976256212, "grad_norm": 0.37082552909851074, "learning_rate": 2.8921e-05, "loss": 0.0435, "step": 13241 }, { "epoch": 14.616234124792932, "grad_norm": 0.5777339935302734, "learning_rate": 2.892066666666667e-05, "loss": 0.0518, "step": 13242 }, { "epoch": 14.617338487023744, "grad_norm": 0.2123337984085083, "learning_rate": 2.8920333333333335e-05, "loss": 0.0196, "step": 13243 }, { "epoch": 14.618442849254556, "grad_norm": 0.20899394154548645, "learning_rate": 2.892e-05, "loss": 0.0195, "step": 13244 }, { "epoch": 14.619547211485367, "grad_norm": 0.8593840003013611, "learning_rate": 2.8919666666666667e-05, "loss": 0.0489, "step": 13245 }, { "epoch": 14.620651573716179, "grad_norm": 0.1766500025987625, "learning_rate": 2.8919333333333336e-05, "loss": 0.01, "step": 13246 }, { "epoch": 14.621755935946991, "grad_norm": 0.15462283790111542, "learning_rate": 2.8919e-05, "loss": 0.0047, "step": 13247 }, { "epoch": 14.622860298177802, "grad_norm": 0.24521121382713318, "learning_rate": 2.8918666666666668e-05, "loss": 0.0194, "step": 13248 }, { "epoch": 14.623964660408614, "grad_norm": 0.24075937271118164, "learning_rate": 2.8918333333333334e-05, "loss": 0.0135, "step": 13249 }, { "epoch": 14.625069022639426, "grad_norm": 0.20628799498081207, "learning_rate": 2.8918e-05, "loss": 0.0103, "step": 13250 }, { "epoch": 14.626173384870237, "grad_norm": 0.17246226966381073, "learning_rate": 2.891766666666667e-05, "loss": 0.0112, "step": 13251 }, { "epoch": 14.627277747101049, "grad_norm": 0.3068826496601105, "learning_rate": 2.8917333333333335e-05, "loss": 0.0215, "step": 13252 }, { "epoch": 14.628382109331861, "grad_norm": 0.17723652720451355, "learning_rate": 2.8917e-05, "loss": 0.0077, "step": 13253 }, { "epoch": 14.629486471562673, "grad_norm": 0.17742674052715302, "learning_rate": 2.8916666666666667e-05, "loss": 0.005, "step": 13254 }, { "epoch": 14.630590833793484, "grad_norm": 0.20109105110168457, "learning_rate": 2.8916333333333336e-05, "loss": 0.0095, "step": 13255 }, { "epoch": 14.631695196024296, "grad_norm": 0.3926621675491333, "learning_rate": 2.8916e-05, "loss": 0.0171, "step": 13256 }, { "epoch": 14.632799558255108, "grad_norm": 0.1477659046649933, "learning_rate": 2.8915666666666668e-05, "loss": 0.0055, "step": 13257 }, { "epoch": 14.633903920485919, "grad_norm": 0.4897425174713135, "learning_rate": 2.8915333333333334e-05, "loss": 0.0147, "step": 13258 }, { "epoch": 14.635008282716731, "grad_norm": 0.3653556704521179, "learning_rate": 2.8915e-05, "loss": 0.0117, "step": 13259 }, { "epoch": 14.636112644947543, "grad_norm": 0.16025792062282562, "learning_rate": 2.891466666666667e-05, "loss": 0.0058, "step": 13260 }, { "epoch": 14.637217007178354, "grad_norm": 0.1987123191356659, "learning_rate": 2.8914333333333332e-05, "loss": 0.0072, "step": 13261 }, { "epoch": 14.638321369409166, "grad_norm": 0.2037166953086853, "learning_rate": 2.8914e-05, "loss": 0.0095, "step": 13262 }, { "epoch": 14.639425731639978, "grad_norm": 0.1898944228887558, "learning_rate": 2.8913666666666667e-05, "loss": 0.0074, "step": 13263 }, { "epoch": 14.64053009387079, "grad_norm": 0.26581043004989624, "learning_rate": 2.8913333333333333e-05, "loss": 0.0116, "step": 13264 }, { "epoch": 14.6416344561016, "grad_norm": 0.33845049142837524, "learning_rate": 2.8913e-05, "loss": 0.0069, "step": 13265 }, { "epoch": 14.642738818332413, "grad_norm": 0.152500182390213, "learning_rate": 2.891266666666667e-05, "loss": 0.0091, "step": 13266 }, { "epoch": 14.643843180563225, "grad_norm": 0.3405706584453583, "learning_rate": 2.8912333333333334e-05, "loss": 0.0165, "step": 13267 }, { "epoch": 14.644947542794036, "grad_norm": 0.2536074221134186, "learning_rate": 2.8912e-05, "loss": 0.0118, "step": 13268 }, { "epoch": 14.646051905024848, "grad_norm": 1.8004306554794312, "learning_rate": 2.891166666666667e-05, "loss": 0.0156, "step": 13269 }, { "epoch": 14.64715626725566, "grad_norm": 0.644797682762146, "learning_rate": 2.8911333333333332e-05, "loss": 0.0231, "step": 13270 }, { "epoch": 14.648260629486472, "grad_norm": 0.2380872368812561, "learning_rate": 2.8911e-05, "loss": 0.0079, "step": 13271 }, { "epoch": 14.649364991717283, "grad_norm": 0.41955628991127014, "learning_rate": 2.8910666666666667e-05, "loss": 0.0076, "step": 13272 }, { "epoch": 14.650469353948095, "grad_norm": 0.21210946142673492, "learning_rate": 2.8910333333333333e-05, "loss": 0.0081, "step": 13273 }, { "epoch": 14.651573716178907, "grad_norm": 0.25195932388305664, "learning_rate": 2.891e-05, "loss": 0.0181, "step": 13274 }, { "epoch": 14.652678078409718, "grad_norm": 0.20708858966827393, "learning_rate": 2.890966666666667e-05, "loss": 0.0141, "step": 13275 }, { "epoch": 14.65378244064053, "grad_norm": 0.43611204624176025, "learning_rate": 2.8909333333333335e-05, "loss": 0.0132, "step": 13276 }, { "epoch": 14.654886802871342, "grad_norm": 0.3515923321247101, "learning_rate": 2.8909e-05, "loss": 0.0122, "step": 13277 }, { "epoch": 14.655991165102154, "grad_norm": 0.34872347116470337, "learning_rate": 2.890866666666667e-05, "loss": 0.0166, "step": 13278 }, { "epoch": 14.657095527332965, "grad_norm": 0.3906550705432892, "learning_rate": 2.8908333333333332e-05, "loss": 0.0143, "step": 13279 }, { "epoch": 14.658199889563777, "grad_norm": 0.3658529222011566, "learning_rate": 2.8908000000000002e-05, "loss": 0.008, "step": 13280 }, { "epoch": 14.65930425179459, "grad_norm": 0.2319512665271759, "learning_rate": 2.8907666666666668e-05, "loss": 0.008, "step": 13281 }, { "epoch": 14.6604086140254, "grad_norm": 0.7113214731216431, "learning_rate": 2.8907333333333334e-05, "loss": 0.0189, "step": 13282 }, { "epoch": 14.661512976256212, "grad_norm": 0.48184025287628174, "learning_rate": 2.8907e-05, "loss": 0.0126, "step": 13283 }, { "epoch": 14.662617338487024, "grad_norm": 0.31225576996803284, "learning_rate": 2.890666666666667e-05, "loss": 0.0107, "step": 13284 }, { "epoch": 14.663721700717835, "grad_norm": 0.905963122844696, "learning_rate": 2.8906333333333335e-05, "loss": 0.1891, "step": 13285 }, { "epoch": 14.664826062948647, "grad_norm": 0.8365752100944519, "learning_rate": 2.8906e-05, "loss": 0.1768, "step": 13286 }, { "epoch": 14.665930425179459, "grad_norm": 0.4145509898662567, "learning_rate": 2.8905666666666667e-05, "loss": 0.1104, "step": 13287 }, { "epoch": 14.667034787410271, "grad_norm": 0.5479421019554138, "learning_rate": 2.8905333333333333e-05, "loss": 0.1031, "step": 13288 }, { "epoch": 14.668139149641082, "grad_norm": 0.6380314230918884, "learning_rate": 2.8905000000000002e-05, "loss": 0.0741, "step": 13289 }, { "epoch": 14.669243511871894, "grad_norm": 0.5334500074386597, "learning_rate": 2.8904666666666664e-05, "loss": 0.0853, "step": 13290 }, { "epoch": 14.670347874102706, "grad_norm": 0.3726813793182373, "learning_rate": 2.8904333333333334e-05, "loss": 0.0375, "step": 13291 }, { "epoch": 14.671452236333517, "grad_norm": 0.234657421708107, "learning_rate": 2.8904000000000003e-05, "loss": 0.0401, "step": 13292 }, { "epoch": 14.672556598564329, "grad_norm": 0.2965414226055145, "learning_rate": 2.8903666666666666e-05, "loss": 0.031, "step": 13293 }, { "epoch": 14.673660960795141, "grad_norm": 0.2336396425962448, "learning_rate": 2.8903333333333335e-05, "loss": 0.0252, "step": 13294 }, { "epoch": 14.674765323025952, "grad_norm": 0.2754454016685486, "learning_rate": 2.8903e-05, "loss": 0.016, "step": 13295 }, { "epoch": 14.675869685256764, "grad_norm": 0.4249284267425537, "learning_rate": 2.8902666666666667e-05, "loss": 0.0244, "step": 13296 }, { "epoch": 14.676974047487576, "grad_norm": 0.4671592116355896, "learning_rate": 2.8902333333333333e-05, "loss": 0.0184, "step": 13297 }, { "epoch": 14.678078409718388, "grad_norm": 0.18275320529937744, "learning_rate": 2.8902000000000002e-05, "loss": 0.011, "step": 13298 }, { "epoch": 14.679182771949199, "grad_norm": 0.30923259258270264, "learning_rate": 2.8901666666666665e-05, "loss": 0.0222, "step": 13299 }, { "epoch": 14.680287134180011, "grad_norm": 0.43795648217201233, "learning_rate": 2.8901333333333334e-05, "loss": 0.014, "step": 13300 }, { "epoch": 14.681391496410823, "grad_norm": 0.14879627525806427, "learning_rate": 2.8901000000000003e-05, "loss": 0.006, "step": 13301 }, { "epoch": 14.682495858641634, "grad_norm": 0.2542973458766937, "learning_rate": 2.8900666666666666e-05, "loss": 0.0125, "step": 13302 }, { "epoch": 14.683600220872446, "grad_norm": 0.27783796191215515, "learning_rate": 2.8900333333333335e-05, "loss": 0.0093, "step": 13303 }, { "epoch": 14.684704583103258, "grad_norm": 0.2964329719543457, "learning_rate": 2.89e-05, "loss": 0.0132, "step": 13304 }, { "epoch": 14.68580894533407, "grad_norm": 0.2640724778175354, "learning_rate": 2.8899666666666667e-05, "loss": 0.0128, "step": 13305 }, { "epoch": 14.68691330756488, "grad_norm": 0.43348562717437744, "learning_rate": 2.8899333333333333e-05, "loss": 0.0167, "step": 13306 }, { "epoch": 14.688017669795693, "grad_norm": 0.28631508350372314, "learning_rate": 2.8899000000000002e-05, "loss": 0.012, "step": 13307 }, { "epoch": 14.689122032026505, "grad_norm": 0.3955630362033844, "learning_rate": 2.8898666666666668e-05, "loss": 0.0118, "step": 13308 }, { "epoch": 14.690226394257316, "grad_norm": 0.21464122831821442, "learning_rate": 2.8898333333333334e-05, "loss": 0.0048, "step": 13309 }, { "epoch": 14.691330756488128, "grad_norm": 0.3670448958873749, "learning_rate": 2.8898000000000004e-05, "loss": 0.013, "step": 13310 }, { "epoch": 14.69243511871894, "grad_norm": 0.19094696640968323, "learning_rate": 2.8897666666666666e-05, "loss": 0.0151, "step": 13311 }, { "epoch": 14.693539480949752, "grad_norm": 0.2595958411693573, "learning_rate": 2.8897333333333335e-05, "loss": 0.0076, "step": 13312 }, { "epoch": 14.694643843180563, "grad_norm": 0.3260619044303894, "learning_rate": 2.8897e-05, "loss": 0.0157, "step": 13313 }, { "epoch": 14.695748205411375, "grad_norm": 0.31186023354530334, "learning_rate": 2.8896666666666667e-05, "loss": 0.0111, "step": 13314 }, { "epoch": 14.696852567642187, "grad_norm": 0.20723751187324524, "learning_rate": 2.8896333333333333e-05, "loss": 0.0108, "step": 13315 }, { "epoch": 14.697956929872998, "grad_norm": 0.1368015557527542, "learning_rate": 2.8896e-05, "loss": 0.0047, "step": 13316 }, { "epoch": 14.69906129210381, "grad_norm": 0.507989764213562, "learning_rate": 2.889566666666667e-05, "loss": 0.0142, "step": 13317 }, { "epoch": 14.700165654334622, "grad_norm": 0.2550865411758423, "learning_rate": 2.8895333333333334e-05, "loss": 0.0081, "step": 13318 }, { "epoch": 14.701270016565433, "grad_norm": 0.3024655282497406, "learning_rate": 2.8895e-05, "loss": 0.0124, "step": 13319 }, { "epoch": 14.702374378796245, "grad_norm": 0.10939756035804749, "learning_rate": 2.8894666666666666e-05, "loss": 0.0077, "step": 13320 }, { "epoch": 14.703478741027057, "grad_norm": 0.25700119137763977, "learning_rate": 2.8894333333333336e-05, "loss": 0.0077, "step": 13321 }, { "epoch": 14.70458310325787, "grad_norm": 0.2019176334142685, "learning_rate": 2.8893999999999998e-05, "loss": 0.0115, "step": 13322 }, { "epoch": 14.70568746548868, "grad_norm": 0.18574731051921844, "learning_rate": 2.8893666666666667e-05, "loss": 0.0079, "step": 13323 }, { "epoch": 14.706791827719492, "grad_norm": 0.22610124945640564, "learning_rate": 2.8893333333333333e-05, "loss": 0.0078, "step": 13324 }, { "epoch": 14.707896189950304, "grad_norm": 0.1357153058052063, "learning_rate": 2.8893e-05, "loss": 0.0055, "step": 13325 }, { "epoch": 14.709000552181115, "grad_norm": 0.46436548233032227, "learning_rate": 2.889266666666667e-05, "loss": 0.0129, "step": 13326 }, { "epoch": 14.710104914411927, "grad_norm": 0.4161253869533539, "learning_rate": 2.8892333333333335e-05, "loss": 0.0147, "step": 13327 }, { "epoch": 14.71120927664274, "grad_norm": 0.5116335153579712, "learning_rate": 2.8892e-05, "loss": 0.0162, "step": 13328 }, { "epoch": 14.71231363887355, "grad_norm": 0.3263518512248993, "learning_rate": 2.8891666666666666e-05, "loss": 0.0152, "step": 13329 }, { "epoch": 14.713418001104362, "grad_norm": 0.17942319810390472, "learning_rate": 2.8891333333333336e-05, "loss": 0.0051, "step": 13330 }, { "epoch": 14.714522363335174, "grad_norm": 0.544376790523529, "learning_rate": 2.8891e-05, "loss": 0.0126, "step": 13331 }, { "epoch": 14.715626725565986, "grad_norm": 0.2751960754394531, "learning_rate": 2.8890666666666668e-05, "loss": 0.0082, "step": 13332 }, { "epoch": 14.716731087796797, "grad_norm": 0.29959234595298767, "learning_rate": 2.8890333333333334e-05, "loss": 0.0098, "step": 13333 }, { "epoch": 14.717835450027609, "grad_norm": 0.4988238215446472, "learning_rate": 2.889e-05, "loss": 0.0208, "step": 13334 }, { "epoch": 14.718939812258421, "grad_norm": 1.6427476406097412, "learning_rate": 2.888966666666667e-05, "loss": 0.2329, "step": 13335 }, { "epoch": 14.720044174489232, "grad_norm": 0.5919768214225769, "learning_rate": 2.8889333333333335e-05, "loss": 0.1216, "step": 13336 }, { "epoch": 14.721148536720044, "grad_norm": 0.5386435389518738, "learning_rate": 2.8889e-05, "loss": 0.1023, "step": 13337 }, { "epoch": 14.722252898950856, "grad_norm": 0.5683133602142334, "learning_rate": 2.8888666666666667e-05, "loss": 0.1041, "step": 13338 }, { "epoch": 14.723357261181668, "grad_norm": 0.9000874757766724, "learning_rate": 2.8888333333333336e-05, "loss": 0.0574, "step": 13339 }, { "epoch": 14.724461623412479, "grad_norm": 0.44192638993263245, "learning_rate": 2.8888e-05, "loss": 0.054, "step": 13340 }, { "epoch": 14.725565985643291, "grad_norm": 0.3017316460609436, "learning_rate": 2.8887666666666668e-05, "loss": 0.0683, "step": 13341 }, { "epoch": 14.726670347874103, "grad_norm": 0.3069196343421936, "learning_rate": 2.8887333333333337e-05, "loss": 0.0208, "step": 13342 }, { "epoch": 14.727774710104914, "grad_norm": 0.19376933574676514, "learning_rate": 2.8887e-05, "loss": 0.0124, "step": 13343 }, { "epoch": 14.728879072335726, "grad_norm": 0.19401560723781586, "learning_rate": 2.888666666666667e-05, "loss": 0.0333, "step": 13344 }, { "epoch": 14.729983434566538, "grad_norm": 0.2374073565006256, "learning_rate": 2.8886333333333335e-05, "loss": 0.0164, "step": 13345 }, { "epoch": 14.73108779679735, "grad_norm": 0.1897248774766922, "learning_rate": 2.8886e-05, "loss": 0.0161, "step": 13346 }, { "epoch": 14.73219215902816, "grad_norm": 0.2962588667869568, "learning_rate": 2.8885666666666667e-05, "loss": 0.0177, "step": 13347 }, { "epoch": 14.733296521258973, "grad_norm": 0.16733160614967346, "learning_rate": 2.8885333333333333e-05, "loss": 0.0118, "step": 13348 }, { "epoch": 14.734400883489785, "grad_norm": 0.17633846402168274, "learning_rate": 2.8885e-05, "loss": 0.0095, "step": 13349 }, { "epoch": 14.735505245720596, "grad_norm": 0.19487524032592773, "learning_rate": 2.8884666666666668e-05, "loss": 0.011, "step": 13350 }, { "epoch": 14.736609607951408, "grad_norm": 0.19203589856624603, "learning_rate": 2.8884333333333334e-05, "loss": 0.0118, "step": 13351 }, { "epoch": 14.73771397018222, "grad_norm": 0.24957455694675446, "learning_rate": 2.8884e-05, "loss": 0.015, "step": 13352 }, { "epoch": 14.738818332413032, "grad_norm": 0.2934603989124298, "learning_rate": 2.888366666666667e-05, "loss": 0.0207, "step": 13353 }, { "epoch": 14.739922694643843, "grad_norm": 0.9429675936698914, "learning_rate": 2.8883333333333332e-05, "loss": 0.0152, "step": 13354 }, { "epoch": 14.741027056874655, "grad_norm": 0.30376705527305603, "learning_rate": 2.8883e-05, "loss": 0.0123, "step": 13355 }, { "epoch": 14.742131419105467, "grad_norm": 0.18639007210731506, "learning_rate": 2.8882666666666667e-05, "loss": 0.01, "step": 13356 }, { "epoch": 14.743235781336278, "grad_norm": 0.32506969571113586, "learning_rate": 2.8882333333333333e-05, "loss": 0.01, "step": 13357 }, { "epoch": 14.74434014356709, "grad_norm": 0.08476822823286057, "learning_rate": 2.8882000000000002e-05, "loss": 0.0034, "step": 13358 }, { "epoch": 14.745444505797902, "grad_norm": 0.2598351836204529, "learning_rate": 2.8881666666666668e-05, "loss": 0.0131, "step": 13359 }, { "epoch": 14.746548868028713, "grad_norm": 0.19703444838523865, "learning_rate": 2.8881333333333334e-05, "loss": 0.0102, "step": 13360 }, { "epoch": 14.747653230259525, "grad_norm": 0.20557408034801483, "learning_rate": 2.8881e-05, "loss": 0.0062, "step": 13361 }, { "epoch": 14.748757592490337, "grad_norm": 0.22726203501224518, "learning_rate": 2.888066666666667e-05, "loss": 0.0102, "step": 13362 }, { "epoch": 14.74986195472115, "grad_norm": 0.24071352183818817, "learning_rate": 2.8880333333333332e-05, "loss": 0.0109, "step": 13363 }, { "epoch": 14.75096631695196, "grad_norm": 0.3294733166694641, "learning_rate": 2.888e-05, "loss": 0.014, "step": 13364 }, { "epoch": 14.752070679182772, "grad_norm": 0.10103029012680054, "learning_rate": 2.8879666666666667e-05, "loss": 0.005, "step": 13365 }, { "epoch": 14.753175041413584, "grad_norm": 0.4046560823917389, "learning_rate": 2.8879333333333333e-05, "loss": 0.0089, "step": 13366 }, { "epoch": 14.754279403644395, "grad_norm": 0.17657481133937836, "learning_rate": 2.8879000000000003e-05, "loss": 0.0039, "step": 13367 }, { "epoch": 14.755383765875207, "grad_norm": 0.19787736237049103, "learning_rate": 2.887866666666667e-05, "loss": 0.0044, "step": 13368 }, { "epoch": 14.75648812810602, "grad_norm": 0.1687619686126709, "learning_rate": 2.8878333333333334e-05, "loss": 0.0032, "step": 13369 }, { "epoch": 14.75759249033683, "grad_norm": 0.2947693467140198, "learning_rate": 2.8878e-05, "loss": 0.0146, "step": 13370 }, { "epoch": 14.758696852567642, "grad_norm": 0.26822635531425476, "learning_rate": 2.887766666666667e-05, "loss": 0.0204, "step": 13371 }, { "epoch": 14.759801214798454, "grad_norm": 0.4124528467655182, "learning_rate": 2.8877333333333332e-05, "loss": 0.0073, "step": 13372 }, { "epoch": 14.760905577029266, "grad_norm": 0.5678868293762207, "learning_rate": 2.8877e-05, "loss": 0.0087, "step": 13373 }, { "epoch": 14.762009939260077, "grad_norm": 0.18242798745632172, "learning_rate": 2.8876666666666667e-05, "loss": 0.0062, "step": 13374 }, { "epoch": 14.763114301490889, "grad_norm": 0.15194450318813324, "learning_rate": 2.8876333333333333e-05, "loss": 0.0069, "step": 13375 }, { "epoch": 14.764218663721701, "grad_norm": 0.28801336884498596, "learning_rate": 2.8876000000000003e-05, "loss": 0.0068, "step": 13376 }, { "epoch": 14.765323025952512, "grad_norm": 0.2986138164997101, "learning_rate": 2.8875666666666665e-05, "loss": 0.0131, "step": 13377 }, { "epoch": 14.766427388183324, "grad_norm": 0.24467630684375763, "learning_rate": 2.8875333333333335e-05, "loss": 0.0055, "step": 13378 }, { "epoch": 14.767531750414136, "grad_norm": 0.3428797125816345, "learning_rate": 2.8875e-05, "loss": 0.0077, "step": 13379 }, { "epoch": 14.768636112644948, "grad_norm": 0.1230950802564621, "learning_rate": 2.8874666666666666e-05, "loss": 0.0042, "step": 13380 }, { "epoch": 14.769740474875759, "grad_norm": 0.5436632633209229, "learning_rate": 2.8874333333333332e-05, "loss": 0.0069, "step": 13381 }, { "epoch": 14.770844837106571, "grad_norm": 0.25739556550979614, "learning_rate": 2.8874000000000002e-05, "loss": 0.0066, "step": 13382 }, { "epoch": 14.771949199337383, "grad_norm": 0.3021993935108185, "learning_rate": 2.8873666666666668e-05, "loss": 0.0141, "step": 13383 }, { "epoch": 14.773053561568194, "grad_norm": 0.40574127435684204, "learning_rate": 2.8873333333333334e-05, "loss": 0.0104, "step": 13384 }, { "epoch": 14.774157923799006, "grad_norm": 0.8139467835426331, "learning_rate": 2.8873000000000003e-05, "loss": 0.2099, "step": 13385 }, { "epoch": 14.775262286029818, "grad_norm": 0.48578906059265137, "learning_rate": 2.8872666666666665e-05, "loss": 0.1329, "step": 13386 }, { "epoch": 14.77636664826063, "grad_norm": 0.3949979543685913, "learning_rate": 2.8872333333333335e-05, "loss": 0.0985, "step": 13387 }, { "epoch": 14.77747101049144, "grad_norm": 0.47969022393226624, "learning_rate": 2.8872e-05, "loss": 0.0944, "step": 13388 }, { "epoch": 14.778575372722253, "grad_norm": 0.3670113980770111, "learning_rate": 2.8871666666666667e-05, "loss": 0.088, "step": 13389 }, { "epoch": 14.779679734953065, "grad_norm": 0.6135943531990051, "learning_rate": 2.8871333333333333e-05, "loss": 0.074, "step": 13390 }, { "epoch": 14.780784097183876, "grad_norm": 0.3580009341239929, "learning_rate": 2.8871000000000002e-05, "loss": 0.0407, "step": 13391 }, { "epoch": 14.781888459414688, "grad_norm": 0.7511084675788879, "learning_rate": 2.8870666666666668e-05, "loss": 0.0344, "step": 13392 }, { "epoch": 14.7829928216455, "grad_norm": 0.2589661180973053, "learning_rate": 2.8870333333333334e-05, "loss": 0.0198, "step": 13393 }, { "epoch": 14.78409718387631, "grad_norm": 0.42239341139793396, "learning_rate": 2.8870000000000003e-05, "loss": 0.0653, "step": 13394 }, { "epoch": 14.785201546107123, "grad_norm": 0.32650044560432434, "learning_rate": 2.8869666666666666e-05, "loss": 0.0267, "step": 13395 }, { "epoch": 14.786305908337935, "grad_norm": 0.3194599449634552, "learning_rate": 2.8869333333333335e-05, "loss": 0.0115, "step": 13396 }, { "epoch": 14.787410270568747, "grad_norm": 0.19395530223846436, "learning_rate": 2.8869e-05, "loss": 0.0127, "step": 13397 }, { "epoch": 14.788514632799558, "grad_norm": 0.10933460295200348, "learning_rate": 2.8868666666666667e-05, "loss": 0.0066, "step": 13398 }, { "epoch": 14.78961899503037, "grad_norm": 0.3057004511356354, "learning_rate": 2.8868333333333333e-05, "loss": 0.0134, "step": 13399 }, { "epoch": 14.790723357261182, "grad_norm": 0.8162906765937805, "learning_rate": 2.8868000000000002e-05, "loss": 0.04, "step": 13400 }, { "epoch": 14.791827719491993, "grad_norm": 0.2812858819961548, "learning_rate": 2.8867666666666668e-05, "loss": 0.0117, "step": 13401 }, { "epoch": 14.792932081722805, "grad_norm": 0.21661941707134247, "learning_rate": 2.8867333333333334e-05, "loss": 0.0101, "step": 13402 }, { "epoch": 14.794036443953617, "grad_norm": 0.9157020449638367, "learning_rate": 2.8867000000000003e-05, "loss": 0.0058, "step": 13403 }, { "epoch": 14.795140806184428, "grad_norm": 0.2480136603116989, "learning_rate": 2.8866666666666666e-05, "loss": 0.0142, "step": 13404 }, { "epoch": 14.79624516841524, "grad_norm": 0.2643585503101349, "learning_rate": 2.8866333333333335e-05, "loss": 0.014, "step": 13405 }, { "epoch": 14.797349530646052, "grad_norm": 0.44133487343788147, "learning_rate": 2.8866e-05, "loss": 0.0146, "step": 13406 }, { "epoch": 14.798453892876864, "grad_norm": 0.4734797477722168, "learning_rate": 2.8865666666666667e-05, "loss": 0.0176, "step": 13407 }, { "epoch": 14.799558255107675, "grad_norm": 0.24147158861160278, "learning_rate": 2.8865333333333336e-05, "loss": 0.0057, "step": 13408 }, { "epoch": 14.800662617338487, "grad_norm": 0.3067595362663269, "learning_rate": 2.8865e-05, "loss": 0.0141, "step": 13409 }, { "epoch": 14.8017669795693, "grad_norm": 0.40300413966178894, "learning_rate": 2.8864666666666668e-05, "loss": 0.0182, "step": 13410 }, { "epoch": 14.80287134180011, "grad_norm": 0.6371213793754578, "learning_rate": 2.8864333333333334e-05, "loss": 0.0183, "step": 13411 }, { "epoch": 14.803975704030922, "grad_norm": 0.16159996390342712, "learning_rate": 2.8864e-05, "loss": 0.006, "step": 13412 }, { "epoch": 14.805080066261734, "grad_norm": 0.5018903017044067, "learning_rate": 2.8863666666666666e-05, "loss": 0.0117, "step": 13413 }, { "epoch": 14.806184428492546, "grad_norm": 0.2665404975414276, "learning_rate": 2.8863333333333335e-05, "loss": 0.009, "step": 13414 }, { "epoch": 14.807288790723357, "grad_norm": 0.29104024171829224, "learning_rate": 2.8862999999999998e-05, "loss": 0.0142, "step": 13415 }, { "epoch": 14.808393152954169, "grad_norm": 0.29468774795532227, "learning_rate": 2.8862666666666667e-05, "loss": 0.0121, "step": 13416 }, { "epoch": 14.809497515184981, "grad_norm": 0.34810560941696167, "learning_rate": 2.8862333333333337e-05, "loss": 0.0106, "step": 13417 }, { "epoch": 14.810601877415792, "grad_norm": 0.5976868271827698, "learning_rate": 2.8862e-05, "loss": 0.0111, "step": 13418 }, { "epoch": 14.811706239646604, "grad_norm": 0.4348102807998657, "learning_rate": 2.886166666666667e-05, "loss": 0.0054, "step": 13419 }, { "epoch": 14.812810601877416, "grad_norm": 0.14399850368499756, "learning_rate": 2.8861333333333334e-05, "loss": 0.0095, "step": 13420 }, { "epoch": 14.813914964108228, "grad_norm": 0.38503631949424744, "learning_rate": 2.8861e-05, "loss": 0.0142, "step": 13421 }, { "epoch": 14.815019326339039, "grad_norm": 0.2245675027370453, "learning_rate": 2.8860666666666666e-05, "loss": 0.0103, "step": 13422 }, { "epoch": 14.816123688569851, "grad_norm": 0.2112375944852829, "learning_rate": 2.8860333333333336e-05, "loss": 0.013, "step": 13423 }, { "epoch": 14.817228050800663, "grad_norm": 0.849337100982666, "learning_rate": 2.8859999999999998e-05, "loss": 0.0208, "step": 13424 }, { "epoch": 14.818332413031474, "grad_norm": 0.271966814994812, "learning_rate": 2.8859666666666667e-05, "loss": 0.0131, "step": 13425 }, { "epoch": 14.819436775262286, "grad_norm": 0.4838116765022278, "learning_rate": 2.8859333333333337e-05, "loss": 0.0174, "step": 13426 }, { "epoch": 14.820541137493098, "grad_norm": 0.3658906817436218, "learning_rate": 2.8859e-05, "loss": 0.0137, "step": 13427 }, { "epoch": 14.821645499723909, "grad_norm": 0.3886788785457611, "learning_rate": 2.885866666666667e-05, "loss": 0.0066, "step": 13428 }, { "epoch": 14.82274986195472, "grad_norm": 0.22321869432926178, "learning_rate": 2.8858333333333335e-05, "loss": 0.0148, "step": 13429 }, { "epoch": 14.823854224185533, "grad_norm": 0.29440176486968994, "learning_rate": 2.8858e-05, "loss": 0.0127, "step": 13430 }, { "epoch": 14.824958586416345, "grad_norm": 0.196079820394516, "learning_rate": 2.8857666666666666e-05, "loss": 0.0126, "step": 13431 }, { "epoch": 14.826062948647156, "grad_norm": 0.6586089134216309, "learning_rate": 2.8857333333333336e-05, "loss": 0.0194, "step": 13432 }, { "epoch": 14.827167310877968, "grad_norm": 0.9175212383270264, "learning_rate": 2.8857000000000002e-05, "loss": 0.0167, "step": 13433 }, { "epoch": 14.82827167310878, "grad_norm": 0.36607232689857483, "learning_rate": 2.8856666666666668e-05, "loss": 0.0224, "step": 13434 }, { "epoch": 14.82937603533959, "grad_norm": 1.263213872909546, "learning_rate": 2.8856333333333337e-05, "loss": 0.1477, "step": 13435 }, { "epoch": 14.830480397570403, "grad_norm": 0.8519018292427063, "learning_rate": 2.8856e-05, "loss": 0.1699, "step": 13436 }, { "epoch": 14.831584759801215, "grad_norm": 0.5524945855140686, "learning_rate": 2.885566666666667e-05, "loss": 0.1431, "step": 13437 }, { "epoch": 14.832689122032026, "grad_norm": 0.7521023154258728, "learning_rate": 2.885533333333333e-05, "loss": 0.1016, "step": 13438 }, { "epoch": 14.833793484262838, "grad_norm": 0.3983161449432373, "learning_rate": 2.8855e-05, "loss": 0.0628, "step": 13439 }, { "epoch": 14.83489784649365, "grad_norm": 0.3630687892436981, "learning_rate": 2.8854666666666667e-05, "loss": 0.1132, "step": 13440 }, { "epoch": 14.836002208724462, "grad_norm": 0.35969334840774536, "learning_rate": 2.8854333333333333e-05, "loss": 0.0713, "step": 13441 }, { "epoch": 14.837106570955273, "grad_norm": 0.3245490491390228, "learning_rate": 2.8854000000000002e-05, "loss": 0.0347, "step": 13442 }, { "epoch": 14.838210933186085, "grad_norm": 0.40452179312705994, "learning_rate": 2.8853666666666668e-05, "loss": 0.0248, "step": 13443 }, { "epoch": 14.839315295416897, "grad_norm": 0.305006206035614, "learning_rate": 2.8853333333333334e-05, "loss": 0.0252, "step": 13444 }, { "epoch": 14.840419657647708, "grad_norm": 0.23711155354976654, "learning_rate": 2.8853e-05, "loss": 0.0178, "step": 13445 }, { "epoch": 14.84152401987852, "grad_norm": 0.3090340197086334, "learning_rate": 2.885266666666667e-05, "loss": 0.0223, "step": 13446 }, { "epoch": 14.842628382109332, "grad_norm": 0.3631863594055176, "learning_rate": 2.885233333333333e-05, "loss": 0.0281, "step": 13447 }, { "epoch": 14.843732744340144, "grad_norm": 0.6551122069358826, "learning_rate": 2.8852e-05, "loss": 0.0116, "step": 13448 }, { "epoch": 14.844837106570955, "grad_norm": 1.0291320085525513, "learning_rate": 2.8851666666666667e-05, "loss": 0.0635, "step": 13449 }, { "epoch": 14.845941468801767, "grad_norm": 0.30592283606529236, "learning_rate": 2.8851333333333333e-05, "loss": 0.013, "step": 13450 }, { "epoch": 14.84704583103258, "grad_norm": 0.21262821555137634, "learning_rate": 2.8851000000000002e-05, "loss": 0.0094, "step": 13451 }, { "epoch": 14.84815019326339, "grad_norm": 0.31835830211639404, "learning_rate": 2.8850666666666668e-05, "loss": 0.0083, "step": 13452 }, { "epoch": 14.849254555494202, "grad_norm": 0.18479587137699127, "learning_rate": 2.8850333333333334e-05, "loss": 0.0087, "step": 13453 }, { "epoch": 14.850358917725014, "grad_norm": 0.16035351157188416, "learning_rate": 2.885e-05, "loss": 0.0101, "step": 13454 }, { "epoch": 14.851463279955826, "grad_norm": 0.42870062589645386, "learning_rate": 2.884966666666667e-05, "loss": 0.0204, "step": 13455 }, { "epoch": 14.852567642186637, "grad_norm": 0.2464112639427185, "learning_rate": 2.8849333333333332e-05, "loss": 0.0107, "step": 13456 }, { "epoch": 14.853672004417449, "grad_norm": 0.4254676401615143, "learning_rate": 2.8849e-05, "loss": 0.0278, "step": 13457 }, { "epoch": 14.854776366648261, "grad_norm": 0.18915046751499176, "learning_rate": 2.884866666666667e-05, "loss": 0.0123, "step": 13458 }, { "epoch": 14.855880728879072, "grad_norm": 0.3668525516986847, "learning_rate": 2.8848333333333333e-05, "loss": 0.0534, "step": 13459 }, { "epoch": 14.856985091109884, "grad_norm": 0.22594545781612396, "learning_rate": 2.8848000000000002e-05, "loss": 0.0059, "step": 13460 }, { "epoch": 14.858089453340696, "grad_norm": 0.3081543743610382, "learning_rate": 2.8847666666666668e-05, "loss": 0.0158, "step": 13461 }, { "epoch": 14.859193815571507, "grad_norm": 0.3730872571468353, "learning_rate": 2.8847333333333334e-05, "loss": 0.0188, "step": 13462 }, { "epoch": 14.860298177802319, "grad_norm": 0.4025540351867676, "learning_rate": 2.8847e-05, "loss": 0.0113, "step": 13463 }, { "epoch": 14.861402540033131, "grad_norm": 0.15873834490776062, "learning_rate": 2.884666666666667e-05, "loss": 0.0056, "step": 13464 }, { "epoch": 14.862506902263943, "grad_norm": 0.2860700488090515, "learning_rate": 2.8846333333333332e-05, "loss": 0.0072, "step": 13465 }, { "epoch": 14.863611264494754, "grad_norm": 0.3711114227771759, "learning_rate": 2.8846e-05, "loss": 0.0118, "step": 13466 }, { "epoch": 14.864715626725566, "grad_norm": 0.23251794278621674, "learning_rate": 2.8845666666666667e-05, "loss": 0.0094, "step": 13467 }, { "epoch": 14.865819988956378, "grad_norm": 0.20330238342285156, "learning_rate": 2.8845333333333333e-05, "loss": 0.0102, "step": 13468 }, { "epoch": 14.866924351187189, "grad_norm": 0.45089301466941833, "learning_rate": 2.8845000000000003e-05, "loss": 0.0118, "step": 13469 }, { "epoch": 14.868028713418001, "grad_norm": 0.1674375683069229, "learning_rate": 2.8844666666666665e-05, "loss": 0.0056, "step": 13470 }, { "epoch": 14.869133075648813, "grad_norm": 0.17763616144657135, "learning_rate": 2.8844333333333334e-05, "loss": 0.0087, "step": 13471 }, { "epoch": 14.870237437879624, "grad_norm": 0.28164348006248474, "learning_rate": 2.8844e-05, "loss": 0.007, "step": 13472 }, { "epoch": 14.871341800110436, "grad_norm": 0.2105596363544464, "learning_rate": 2.8843666666666666e-05, "loss": 0.0074, "step": 13473 }, { "epoch": 14.872446162341248, "grad_norm": 0.35576173663139343, "learning_rate": 2.8843333333333332e-05, "loss": 0.0147, "step": 13474 }, { "epoch": 14.87355052457206, "grad_norm": 0.6487646102905273, "learning_rate": 2.8843e-05, "loss": 0.0107, "step": 13475 }, { "epoch": 14.87465488680287, "grad_norm": 0.21614645421504974, "learning_rate": 2.8842666666666667e-05, "loss": 0.009, "step": 13476 }, { "epoch": 14.875759249033683, "grad_norm": 0.19080917537212372, "learning_rate": 2.8842333333333333e-05, "loss": 0.0078, "step": 13477 }, { "epoch": 14.876863611264495, "grad_norm": 0.6489685773849487, "learning_rate": 2.8842000000000003e-05, "loss": 0.0175, "step": 13478 }, { "epoch": 14.877967973495306, "grad_norm": 0.30291685461997986, "learning_rate": 2.8841666666666665e-05, "loss": 0.0115, "step": 13479 }, { "epoch": 14.879072335726118, "grad_norm": 0.353592187166214, "learning_rate": 2.8841333333333335e-05, "loss": 0.0268, "step": 13480 }, { "epoch": 14.88017669795693, "grad_norm": 0.2977529466152191, "learning_rate": 2.8841e-05, "loss": 0.015, "step": 13481 }, { "epoch": 14.881281060187742, "grad_norm": 0.2839657962322235, "learning_rate": 2.8840666666666666e-05, "loss": 0.0086, "step": 13482 }, { "epoch": 14.882385422418553, "grad_norm": 0.2682904899120331, "learning_rate": 2.8840333333333336e-05, "loss": 0.0122, "step": 13483 }, { "epoch": 14.883489784649365, "grad_norm": 0.32250985503196716, "learning_rate": 2.8840000000000002e-05, "loss": 0.0168, "step": 13484 }, { "epoch": 14.884594146880177, "grad_norm": 0.6191229820251465, "learning_rate": 2.8839666666666668e-05, "loss": 0.177, "step": 13485 }, { "epoch": 14.885698509110988, "grad_norm": 0.5756614804267883, "learning_rate": 2.8839333333333334e-05, "loss": 0.1344, "step": 13486 }, { "epoch": 14.8868028713418, "grad_norm": 0.4365866482257843, "learning_rate": 2.8839000000000003e-05, "loss": 0.107, "step": 13487 }, { "epoch": 14.887907233572612, "grad_norm": 0.4305931031703949, "learning_rate": 2.8838666666666665e-05, "loss": 0.1107, "step": 13488 }, { "epoch": 14.889011595803424, "grad_norm": 1.0710195302963257, "learning_rate": 2.8838333333333335e-05, "loss": 0.1544, "step": 13489 }, { "epoch": 14.890115958034235, "grad_norm": 0.355271577835083, "learning_rate": 2.8838e-05, "loss": 0.0754, "step": 13490 }, { "epoch": 14.891220320265047, "grad_norm": 0.32046180963516235, "learning_rate": 2.8837666666666667e-05, "loss": 0.0406, "step": 13491 }, { "epoch": 14.89232468249586, "grad_norm": 0.36114010214805603, "learning_rate": 2.8837333333333336e-05, "loss": 0.0559, "step": 13492 }, { "epoch": 14.89342904472667, "grad_norm": 0.32638850808143616, "learning_rate": 2.8837000000000002e-05, "loss": 0.0565, "step": 13493 }, { "epoch": 14.894533406957482, "grad_norm": 0.2822471857070923, "learning_rate": 2.8836666666666668e-05, "loss": 0.0165, "step": 13494 }, { "epoch": 14.895637769188294, "grad_norm": 0.2125919759273529, "learning_rate": 2.8836333333333334e-05, "loss": 0.0539, "step": 13495 }, { "epoch": 14.896742131419105, "grad_norm": 0.33338212966918945, "learning_rate": 2.8836000000000003e-05, "loss": 0.0154, "step": 13496 }, { "epoch": 14.897846493649917, "grad_norm": 0.3370666205883026, "learning_rate": 2.8835666666666666e-05, "loss": 0.0422, "step": 13497 }, { "epoch": 14.898950855880729, "grad_norm": 0.5198678374290466, "learning_rate": 2.8835333333333335e-05, "loss": 0.0213, "step": 13498 }, { "epoch": 14.900055218111541, "grad_norm": 0.31586235761642456, "learning_rate": 2.8834999999999998e-05, "loss": 0.0166, "step": 13499 }, { "epoch": 14.901159580342352, "grad_norm": 0.2527746558189392, "learning_rate": 2.8834666666666667e-05, "loss": 0.0098, "step": 13500 }, { "epoch": 14.902263942573164, "grad_norm": 0.1815258264541626, "learning_rate": 2.8834333333333336e-05, "loss": 0.0072, "step": 13501 }, { "epoch": 14.903368304803976, "grad_norm": 0.3518275320529938, "learning_rate": 2.8834e-05, "loss": 0.0066, "step": 13502 }, { "epoch": 14.904472667034787, "grad_norm": 0.16494327783584595, "learning_rate": 2.8833666666666668e-05, "loss": 0.0063, "step": 13503 }, { "epoch": 14.905577029265599, "grad_norm": 0.3081873655319214, "learning_rate": 2.8833333333333334e-05, "loss": 0.0097, "step": 13504 }, { "epoch": 14.906681391496411, "grad_norm": 0.17155902087688446, "learning_rate": 2.8833e-05, "loss": 0.0279, "step": 13505 }, { "epoch": 14.907785753727222, "grad_norm": 0.28509005904197693, "learning_rate": 2.8832666666666666e-05, "loss": 0.0086, "step": 13506 }, { "epoch": 14.908890115958034, "grad_norm": 0.21039536595344543, "learning_rate": 2.8832333333333335e-05, "loss": 0.0079, "step": 13507 }, { "epoch": 14.909994478188846, "grad_norm": 0.2852078080177307, "learning_rate": 2.8832e-05, "loss": 0.0179, "step": 13508 }, { "epoch": 14.911098840419658, "grad_norm": 0.5149413347244263, "learning_rate": 2.8831666666666667e-05, "loss": 0.0151, "step": 13509 }, { "epoch": 14.912203202650469, "grad_norm": 0.45140567421913147, "learning_rate": 2.8831333333333336e-05, "loss": 0.013, "step": 13510 }, { "epoch": 14.913307564881281, "grad_norm": 0.16350463032722473, "learning_rate": 2.8831e-05, "loss": 0.0071, "step": 13511 }, { "epoch": 14.914411927112093, "grad_norm": 0.2270658314228058, "learning_rate": 2.8830666666666668e-05, "loss": 0.013, "step": 13512 }, { "epoch": 14.915516289342904, "grad_norm": 0.2034066915512085, "learning_rate": 2.8830333333333334e-05, "loss": 0.0052, "step": 13513 }, { "epoch": 14.916620651573716, "grad_norm": 0.1289011389017105, "learning_rate": 2.883e-05, "loss": 0.0058, "step": 13514 }, { "epoch": 14.917725013804528, "grad_norm": 0.28551793098449707, "learning_rate": 2.8829666666666666e-05, "loss": 0.0103, "step": 13515 }, { "epoch": 14.91882937603534, "grad_norm": 0.22090263664722443, "learning_rate": 2.8829333333333335e-05, "loss": 0.0145, "step": 13516 }, { "epoch": 14.91993373826615, "grad_norm": 0.13871680200099945, "learning_rate": 2.8829e-05, "loss": 0.0046, "step": 13517 }, { "epoch": 14.921038100496963, "grad_norm": 0.4951867163181305, "learning_rate": 2.8828666666666667e-05, "loss": 0.0167, "step": 13518 }, { "epoch": 14.922142462727775, "grad_norm": 0.09630823135375977, "learning_rate": 2.8828333333333337e-05, "loss": 0.0049, "step": 13519 }, { "epoch": 14.923246824958586, "grad_norm": 0.15101389586925507, "learning_rate": 2.8828e-05, "loss": 0.009, "step": 13520 }, { "epoch": 14.924351187189398, "grad_norm": 0.1780845820903778, "learning_rate": 2.882766666666667e-05, "loss": 0.0069, "step": 13521 }, { "epoch": 14.92545554942021, "grad_norm": 0.26961973309516907, "learning_rate": 2.8827333333333334e-05, "loss": 0.0093, "step": 13522 }, { "epoch": 14.926559911651022, "grad_norm": 0.39283227920532227, "learning_rate": 2.8827e-05, "loss": 0.0095, "step": 13523 }, { "epoch": 14.927664273881833, "grad_norm": 0.18372419476509094, "learning_rate": 2.8826666666666666e-05, "loss": 0.0065, "step": 13524 }, { "epoch": 14.928768636112645, "grad_norm": 0.16278080642223358, "learning_rate": 2.8826333333333336e-05, "loss": 0.0069, "step": 13525 }, { "epoch": 14.929872998343457, "grad_norm": 0.18627391755580902, "learning_rate": 2.8826e-05, "loss": 0.0066, "step": 13526 }, { "epoch": 14.930977360574268, "grad_norm": 0.31615084409713745, "learning_rate": 2.8825666666666667e-05, "loss": 0.0127, "step": 13527 }, { "epoch": 14.93208172280508, "grad_norm": 0.24004203081130981, "learning_rate": 2.8825333333333333e-05, "loss": 0.0065, "step": 13528 }, { "epoch": 14.933186085035892, "grad_norm": 0.30306416749954224, "learning_rate": 2.8825e-05, "loss": 0.0178, "step": 13529 }, { "epoch": 14.934290447266704, "grad_norm": 0.35071173310279846, "learning_rate": 2.882466666666667e-05, "loss": 0.0125, "step": 13530 }, { "epoch": 14.935394809497515, "grad_norm": 0.19299589097499847, "learning_rate": 2.882433333333333e-05, "loss": 0.0046, "step": 13531 }, { "epoch": 14.936499171728327, "grad_norm": 0.32094040513038635, "learning_rate": 2.8824e-05, "loss": 0.0107, "step": 13532 }, { "epoch": 14.93760353395914, "grad_norm": 0.9398680925369263, "learning_rate": 2.882366666666667e-05, "loss": 0.0064, "step": 13533 }, { "epoch": 14.93870789618995, "grad_norm": 0.48387131094932556, "learning_rate": 2.8823333333333332e-05, "loss": 0.0201, "step": 13534 }, { "epoch": 14.939812258420762, "grad_norm": 0.8573744893074036, "learning_rate": 2.8823000000000002e-05, "loss": 0.1996, "step": 13535 }, { "epoch": 14.940916620651574, "grad_norm": 0.6826062798500061, "learning_rate": 2.8822666666666668e-05, "loss": 0.1716, "step": 13536 }, { "epoch": 14.942020982882385, "grad_norm": 0.6653338670730591, "learning_rate": 2.8822333333333334e-05, "loss": 0.1599, "step": 13537 }, { "epoch": 14.943125345113197, "grad_norm": 0.47312605381011963, "learning_rate": 2.8822e-05, "loss": 0.0878, "step": 13538 }, { "epoch": 14.94422970734401, "grad_norm": 0.6735013723373413, "learning_rate": 2.882166666666667e-05, "loss": 0.1012, "step": 13539 }, { "epoch": 14.945334069574821, "grad_norm": 0.3545643985271454, "learning_rate": 2.882133333333333e-05, "loss": 0.0445, "step": 13540 }, { "epoch": 14.946438431805632, "grad_norm": 0.3128027021884918, "learning_rate": 2.8821e-05, "loss": 0.0374, "step": 13541 }, { "epoch": 14.947542794036444, "grad_norm": 0.28090718388557434, "learning_rate": 2.882066666666667e-05, "loss": 0.0279, "step": 13542 }, { "epoch": 14.948647156267256, "grad_norm": 0.23328711092472076, "learning_rate": 2.8820333333333333e-05, "loss": 0.0276, "step": 13543 }, { "epoch": 14.949751518498067, "grad_norm": 0.18118397891521454, "learning_rate": 2.8820000000000002e-05, "loss": 0.0141, "step": 13544 }, { "epoch": 14.950855880728879, "grad_norm": 0.4765743017196655, "learning_rate": 2.8819666666666668e-05, "loss": 0.0193, "step": 13545 }, { "epoch": 14.951960242959691, "grad_norm": 0.22497084736824036, "learning_rate": 2.8819333333333334e-05, "loss": 0.0382, "step": 13546 }, { "epoch": 14.953064605190502, "grad_norm": 0.314209520816803, "learning_rate": 2.8819e-05, "loss": 0.0125, "step": 13547 }, { "epoch": 14.954168967421314, "grad_norm": 0.17060469090938568, "learning_rate": 2.881866666666667e-05, "loss": 0.0272, "step": 13548 }, { "epoch": 14.955273329652126, "grad_norm": 0.2917662262916565, "learning_rate": 2.881833333333333e-05, "loss": 0.0073, "step": 13549 }, { "epoch": 14.956377691882938, "grad_norm": 0.13909076154232025, "learning_rate": 2.8818e-05, "loss": 0.0076, "step": 13550 }, { "epoch": 14.957482054113749, "grad_norm": 0.12134665250778198, "learning_rate": 2.881766666666667e-05, "loss": 0.0104, "step": 13551 }, { "epoch": 14.958586416344561, "grad_norm": 0.16640011966228485, "learning_rate": 2.8817333333333333e-05, "loss": 0.0138, "step": 13552 }, { "epoch": 14.959690778575373, "grad_norm": 0.2108234018087387, "learning_rate": 2.8817000000000002e-05, "loss": 0.0133, "step": 13553 }, { "epoch": 14.960795140806184, "grad_norm": 0.250937283039093, "learning_rate": 2.8816666666666668e-05, "loss": 0.011, "step": 13554 }, { "epoch": 14.961899503036996, "grad_norm": 0.19995169341564178, "learning_rate": 2.8816333333333334e-05, "loss": 0.0099, "step": 13555 }, { "epoch": 14.963003865267808, "grad_norm": 0.2503257393836975, "learning_rate": 2.8816e-05, "loss": 0.0133, "step": 13556 }, { "epoch": 14.96410822749862, "grad_norm": 0.2334171086549759, "learning_rate": 2.881566666666667e-05, "loss": 0.0123, "step": 13557 }, { "epoch": 14.96521258972943, "grad_norm": 0.24088382720947266, "learning_rate": 2.8815333333333335e-05, "loss": 0.0114, "step": 13558 }, { "epoch": 14.966316951960243, "grad_norm": 0.18151973187923431, "learning_rate": 2.8815e-05, "loss": 0.0093, "step": 13559 }, { "epoch": 14.967421314191055, "grad_norm": 0.18313923478126526, "learning_rate": 2.8814666666666667e-05, "loss": 0.0088, "step": 13560 }, { "epoch": 14.968525676421866, "grad_norm": 0.24154922366142273, "learning_rate": 2.8814333333333333e-05, "loss": 0.0074, "step": 13561 }, { "epoch": 14.969630038652678, "grad_norm": 0.10054464638233185, "learning_rate": 2.8814000000000002e-05, "loss": 0.0048, "step": 13562 }, { "epoch": 14.97073440088349, "grad_norm": 0.15689785778522491, "learning_rate": 2.8813666666666665e-05, "loss": 0.036, "step": 13563 }, { "epoch": 14.971838763114302, "grad_norm": 0.16179773211479187, "learning_rate": 2.8813333333333334e-05, "loss": 0.0118, "step": 13564 }, { "epoch": 14.972943125345113, "grad_norm": 0.19524678587913513, "learning_rate": 2.8813e-05, "loss": 0.0121, "step": 13565 }, { "epoch": 14.974047487575925, "grad_norm": 0.20371346175670624, "learning_rate": 2.8812666666666666e-05, "loss": 0.0076, "step": 13566 }, { "epoch": 14.975151849806737, "grad_norm": 0.2756071984767914, "learning_rate": 2.8812333333333335e-05, "loss": 0.016, "step": 13567 }, { "epoch": 14.976256212037548, "grad_norm": 0.20885297656059265, "learning_rate": 2.8812e-05, "loss": 0.0074, "step": 13568 }, { "epoch": 14.97736057426836, "grad_norm": 0.17065215110778809, "learning_rate": 2.8811666666666667e-05, "loss": 0.0097, "step": 13569 }, { "epoch": 14.978464936499172, "grad_norm": 0.4310421347618103, "learning_rate": 2.8811333333333333e-05, "loss": 0.0154, "step": 13570 }, { "epoch": 14.979569298729983, "grad_norm": 0.1595950871706009, "learning_rate": 2.8811000000000002e-05, "loss": 0.0084, "step": 13571 }, { "epoch": 14.980673660960795, "grad_norm": 0.19098660349845886, "learning_rate": 2.8810666666666665e-05, "loss": 0.0122, "step": 13572 }, { "epoch": 14.981778023191607, "grad_norm": 0.11528214067220688, "learning_rate": 2.8810333333333334e-05, "loss": 0.006, "step": 13573 }, { "epoch": 14.98288238542242, "grad_norm": 0.1380302906036377, "learning_rate": 2.881e-05, "loss": 0.0061, "step": 13574 }, { "epoch": 14.98398674765323, "grad_norm": 0.1990986466407776, "learning_rate": 2.8809666666666666e-05, "loss": 0.0083, "step": 13575 }, { "epoch": 14.985091109884042, "grad_norm": 0.19592228531837463, "learning_rate": 2.8809333333333336e-05, "loss": 0.0104, "step": 13576 }, { "epoch": 14.986195472114854, "grad_norm": 0.1286453753709793, "learning_rate": 2.8809e-05, "loss": 0.0058, "step": 13577 }, { "epoch": 14.987299834345665, "grad_norm": 0.20540353655815125, "learning_rate": 2.8808666666666667e-05, "loss": 0.0071, "step": 13578 }, { "epoch": 14.988404196576477, "grad_norm": 0.5980964303016663, "learning_rate": 2.8808333333333333e-05, "loss": 0.0144, "step": 13579 }, { "epoch": 14.98950855880729, "grad_norm": 0.15104445815086365, "learning_rate": 2.8808000000000003e-05, "loss": 0.0081, "step": 13580 }, { "epoch": 14.9906129210381, "grad_norm": 0.21824194490909576, "learning_rate": 2.8807666666666665e-05, "loss": 0.0068, "step": 13581 }, { "epoch": 14.991717283268912, "grad_norm": 0.21775498986244202, "learning_rate": 2.8807333333333335e-05, "loss": 0.008, "step": 13582 }, { "epoch": 14.992821645499724, "grad_norm": 0.4040262699127197, "learning_rate": 2.8807000000000004e-05, "loss": 0.0136, "step": 13583 }, { "epoch": 14.993926007730536, "grad_norm": 0.2757810652256012, "learning_rate": 2.8806666666666666e-05, "loss": 0.0067, "step": 13584 }, { "epoch": 14.995030369961347, "grad_norm": 0.5834855437278748, "learning_rate": 2.8806333333333336e-05, "loss": 0.1523, "step": 13585 }, { "epoch": 14.996134732192159, "grad_norm": 0.13187552988529205, "learning_rate": 2.8806e-05, "loss": 0.0122, "step": 13586 }, { "epoch": 14.997239094422971, "grad_norm": 0.19760483503341675, "learning_rate": 2.8805666666666668e-05, "loss": 0.0081, "step": 13587 }, { "epoch": 14.998343456653782, "grad_norm": 0.32378244400024414, "learning_rate": 2.8805333333333334e-05, "loss": 0.0112, "step": 13588 }, { "epoch": 14.999447818884594, "grad_norm": 0.1362360566854477, "learning_rate": 2.8805e-05, "loss": 0.0088, "step": 13589 }, { "epoch": 15.0, "grad_norm": 0.06563210487365723, "learning_rate": 2.8804666666666665e-05, "loss": 0.0015, "step": 13590 }, { "epoch": 15.001104362230812, "grad_norm": 0.5933535099029541, "learning_rate": 2.8804333333333335e-05, "loss": 0.1619, "step": 13591 }, { "epoch": 15.002208724461623, "grad_norm": 0.5554006695747375, "learning_rate": 2.8804e-05, "loss": 0.0959, "step": 13592 }, { "epoch": 15.003313086692435, "grad_norm": 0.3967132568359375, "learning_rate": 2.8803666666666667e-05, "loss": 0.1258, "step": 13593 }, { "epoch": 15.004417448923247, "grad_norm": 0.4658185541629791, "learning_rate": 2.8803333333333336e-05, "loss": 0.0532, "step": 13594 }, { "epoch": 15.00552181115406, "grad_norm": 0.3762846291065216, "learning_rate": 2.8803e-05, "loss": 0.0401, "step": 13595 }, { "epoch": 15.00662617338487, "grad_norm": 0.3482412099838257, "learning_rate": 2.8802666666666668e-05, "loss": 0.0369, "step": 13596 }, { "epoch": 15.007730535615682, "grad_norm": 0.18912692368030548, "learning_rate": 2.8802333333333334e-05, "loss": 0.0281, "step": 13597 }, { "epoch": 15.008834897846494, "grad_norm": 0.22432227432727814, "learning_rate": 2.8802e-05, "loss": 0.025, "step": 13598 }, { "epoch": 15.009939260077305, "grad_norm": 0.6441006660461426, "learning_rate": 2.8801666666666666e-05, "loss": 0.0165, "step": 13599 }, { "epoch": 15.011043622308117, "grad_norm": 0.31403249502182007, "learning_rate": 2.8801333333333335e-05, "loss": 0.0138, "step": 13600 }, { "epoch": 15.01214798453893, "grad_norm": 0.26590222120285034, "learning_rate": 2.8801e-05, "loss": 0.0142, "step": 13601 }, { "epoch": 15.01325234676974, "grad_norm": 0.3528619408607483, "learning_rate": 2.8800666666666667e-05, "loss": 0.0157, "step": 13602 }, { "epoch": 15.014356709000552, "grad_norm": 0.21829167008399963, "learning_rate": 2.8800333333333336e-05, "loss": 0.0424, "step": 13603 }, { "epoch": 15.015461071231364, "grad_norm": 0.17354841530323029, "learning_rate": 2.88e-05, "loss": 0.0082, "step": 13604 }, { "epoch": 15.016565433462176, "grad_norm": 0.2566092610359192, "learning_rate": 2.8799666666666668e-05, "loss": 0.0107, "step": 13605 }, { "epoch": 15.017669795692987, "grad_norm": 0.16869200766086578, "learning_rate": 2.8799333333333334e-05, "loss": 0.0072, "step": 13606 }, { "epoch": 15.018774157923799, "grad_norm": 0.13709335029125214, "learning_rate": 2.8799e-05, "loss": 0.0064, "step": 13607 }, { "epoch": 15.019878520154611, "grad_norm": 0.1505473405122757, "learning_rate": 2.879866666666667e-05, "loss": 0.0058, "step": 13608 }, { "epoch": 15.020982882385422, "grad_norm": 0.5909448266029358, "learning_rate": 2.8798333333333335e-05, "loss": 0.0105, "step": 13609 }, { "epoch": 15.022087244616234, "grad_norm": 0.14406564831733704, "learning_rate": 2.8798e-05, "loss": 0.007, "step": 13610 }, { "epoch": 15.023191606847046, "grad_norm": 0.3530309200286865, "learning_rate": 2.8797666666666667e-05, "loss": 0.0107, "step": 13611 }, { "epoch": 15.024295969077858, "grad_norm": 0.1915869116783142, "learning_rate": 2.8797333333333336e-05, "loss": 0.0059, "step": 13612 }, { "epoch": 15.025400331308669, "grad_norm": 0.1822618842124939, "learning_rate": 2.8797e-05, "loss": 0.0072, "step": 13613 }, { "epoch": 15.026504693539481, "grad_norm": 0.11205260455608368, "learning_rate": 2.8796666666666668e-05, "loss": 0.0057, "step": 13614 }, { "epoch": 15.027609055770293, "grad_norm": 0.33152323961257935, "learning_rate": 2.8796333333333334e-05, "loss": 0.0065, "step": 13615 }, { "epoch": 15.028713418001104, "grad_norm": 0.10487519949674606, "learning_rate": 2.8796e-05, "loss": 0.0022, "step": 13616 }, { "epoch": 15.029817780231916, "grad_norm": 0.23864863812923431, "learning_rate": 2.879566666666667e-05, "loss": 0.0078, "step": 13617 }, { "epoch": 15.030922142462728, "grad_norm": 0.4163760244846344, "learning_rate": 2.8795333333333332e-05, "loss": 0.0134, "step": 13618 }, { "epoch": 15.032026504693539, "grad_norm": 0.135252445936203, "learning_rate": 2.8795e-05, "loss": 0.0044, "step": 13619 }, { "epoch": 15.03313086692435, "grad_norm": 0.152622252702713, "learning_rate": 2.8794666666666667e-05, "loss": 0.0041, "step": 13620 }, { "epoch": 15.034235229155163, "grad_norm": 0.2194514125585556, "learning_rate": 2.8794333333333333e-05, "loss": 0.0116, "step": 13621 }, { "epoch": 15.035339591385975, "grad_norm": 0.26578855514526367, "learning_rate": 2.8794e-05, "loss": 0.0195, "step": 13622 }, { "epoch": 15.036443953616786, "grad_norm": 0.08902701735496521, "learning_rate": 2.879366666666667e-05, "loss": 0.004, "step": 13623 }, { "epoch": 15.037548315847598, "grad_norm": 0.1341698169708252, "learning_rate": 2.879333333333333e-05, "loss": 0.006, "step": 13624 }, { "epoch": 15.03865267807841, "grad_norm": 0.22096803784370422, "learning_rate": 2.8793e-05, "loss": 0.0091, "step": 13625 }, { "epoch": 15.03975704030922, "grad_norm": 0.252918541431427, "learning_rate": 2.879266666666667e-05, "loss": 0.0133, "step": 13626 }, { "epoch": 15.040861402540033, "grad_norm": 0.2129855453968048, "learning_rate": 2.8792333333333332e-05, "loss": 0.0041, "step": 13627 }, { "epoch": 15.041965764770845, "grad_norm": 0.2721899747848511, "learning_rate": 2.8792e-05, "loss": 0.0083, "step": 13628 }, { "epoch": 15.043070127001657, "grad_norm": 0.36616024374961853, "learning_rate": 2.8791666666666667e-05, "loss": 0.01, "step": 13629 }, { "epoch": 15.044174489232468, "grad_norm": 0.21680010855197906, "learning_rate": 2.8791333333333333e-05, "loss": 0.0057, "step": 13630 }, { "epoch": 15.04527885146328, "grad_norm": 0.34814390540122986, "learning_rate": 2.8791e-05, "loss": 0.0056, "step": 13631 }, { "epoch": 15.046383213694092, "grad_norm": 0.33013319969177246, "learning_rate": 2.879066666666667e-05, "loss": 0.0064, "step": 13632 }, { "epoch": 15.047487575924903, "grad_norm": 0.27505674958229065, "learning_rate": 2.8790333333333335e-05, "loss": 0.0078, "step": 13633 }, { "epoch": 15.048591938155715, "grad_norm": 0.797592282295227, "learning_rate": 2.879e-05, "loss": 0.011, "step": 13634 }, { "epoch": 15.049696300386527, "grad_norm": 0.09280543774366379, "learning_rate": 2.878966666666667e-05, "loss": 0.0033, "step": 13635 }, { "epoch": 15.050800662617338, "grad_norm": 0.3855490982532501, "learning_rate": 2.8789333333333332e-05, "loss": 0.0182, "step": 13636 }, { "epoch": 15.05190502484815, "grad_norm": 0.1907929629087448, "learning_rate": 2.8789e-05, "loss": 0.0047, "step": 13637 }, { "epoch": 15.053009387078962, "grad_norm": 0.28067949414253235, "learning_rate": 2.8788666666666668e-05, "loss": 0.0145, "step": 13638 }, { "epoch": 15.054113749309774, "grad_norm": 0.28586912155151367, "learning_rate": 2.8788333333333334e-05, "loss": 0.0052, "step": 13639 }, { "epoch": 15.055218111540585, "grad_norm": 0.20263291895389557, "learning_rate": 2.8788e-05, "loss": 0.0078, "step": 13640 }, { "epoch": 15.056322473771397, "grad_norm": 0.5610763430595398, "learning_rate": 2.878766666666667e-05, "loss": 0.1807, "step": 13641 }, { "epoch": 15.05742683600221, "grad_norm": 0.48416951298713684, "learning_rate": 2.8787333333333335e-05, "loss": 0.1081, "step": 13642 }, { "epoch": 15.05853119823302, "grad_norm": 0.4648912847042084, "learning_rate": 2.8787e-05, "loss": 0.0888, "step": 13643 }, { "epoch": 15.059635560463832, "grad_norm": 0.6308587789535522, "learning_rate": 2.878666666666667e-05, "loss": 0.0965, "step": 13644 }, { "epoch": 15.060739922694644, "grad_norm": 0.7956756949424744, "learning_rate": 2.8786333333333333e-05, "loss": 0.0579, "step": 13645 }, { "epoch": 15.061844284925456, "grad_norm": 0.37686052918434143, "learning_rate": 2.8786000000000002e-05, "loss": 0.0745, "step": 13646 }, { "epoch": 15.062948647156267, "grad_norm": 0.5584791898727417, "learning_rate": 2.8785666666666668e-05, "loss": 0.0543, "step": 13647 }, { "epoch": 15.064053009387079, "grad_norm": 0.3079907298088074, "learning_rate": 2.8785333333333334e-05, "loss": 0.0344, "step": 13648 }, { "epoch": 15.065157371617891, "grad_norm": 0.1903504580259323, "learning_rate": 2.8785e-05, "loss": 0.0229, "step": 13649 }, { "epoch": 15.066261733848702, "grad_norm": 0.26828402280807495, "learning_rate": 2.8784666666666666e-05, "loss": 0.0183, "step": 13650 }, { "epoch": 15.067366096079514, "grad_norm": 0.352193146944046, "learning_rate": 2.8784333333333335e-05, "loss": 0.0413, "step": 13651 }, { "epoch": 15.068470458310326, "grad_norm": 0.17364031076431274, "learning_rate": 2.8784e-05, "loss": 0.0104, "step": 13652 }, { "epoch": 15.069574820541137, "grad_norm": 0.2435876727104187, "learning_rate": 2.8783666666666667e-05, "loss": 0.0106, "step": 13653 }, { "epoch": 15.070679182771949, "grad_norm": 0.12997859716415405, "learning_rate": 2.8783333333333333e-05, "loss": 0.0082, "step": 13654 }, { "epoch": 15.071783545002761, "grad_norm": 0.4084683656692505, "learning_rate": 2.8783000000000002e-05, "loss": 0.0094, "step": 13655 }, { "epoch": 15.072887907233573, "grad_norm": 0.18887023627758026, "learning_rate": 2.8782666666666665e-05, "loss": 0.0109, "step": 13656 }, { "epoch": 15.073992269464384, "grad_norm": 0.13840755820274353, "learning_rate": 2.8782333333333334e-05, "loss": 0.0054, "step": 13657 }, { "epoch": 15.075096631695196, "grad_norm": 0.18666276335716248, "learning_rate": 2.8782000000000003e-05, "loss": 0.01, "step": 13658 }, { "epoch": 15.076200993926008, "grad_norm": 0.15143737196922302, "learning_rate": 2.8781666666666666e-05, "loss": 0.0109, "step": 13659 }, { "epoch": 15.077305356156819, "grad_norm": 0.1315518170595169, "learning_rate": 2.8781333333333335e-05, "loss": 0.0044, "step": 13660 }, { "epoch": 15.078409718387631, "grad_norm": 0.26363417506217957, "learning_rate": 2.8781e-05, "loss": 0.0108, "step": 13661 }, { "epoch": 15.079514080618443, "grad_norm": 0.3449365794658661, "learning_rate": 2.8780666666666667e-05, "loss": 0.0137, "step": 13662 }, { "epoch": 15.080618442849255, "grad_norm": 0.1054723933339119, "learning_rate": 2.8780333333333333e-05, "loss": 0.0062, "step": 13663 }, { "epoch": 15.081722805080066, "grad_norm": 0.20875945687294006, "learning_rate": 2.8780000000000002e-05, "loss": 0.0091, "step": 13664 }, { "epoch": 15.082827167310878, "grad_norm": 0.2429562509059906, "learning_rate": 2.8779666666666665e-05, "loss": 0.0104, "step": 13665 }, { "epoch": 15.08393152954169, "grad_norm": 0.2831304669380188, "learning_rate": 2.8779333333333334e-05, "loss": 0.0084, "step": 13666 }, { "epoch": 15.0850358917725, "grad_norm": 0.12313773483037949, "learning_rate": 2.8779000000000003e-05, "loss": 0.0035, "step": 13667 }, { "epoch": 15.086140254003313, "grad_norm": 0.26328039169311523, "learning_rate": 2.8778666666666666e-05, "loss": 0.0067, "step": 13668 }, { "epoch": 15.087244616234125, "grad_norm": 0.18649108707904816, "learning_rate": 2.8778333333333335e-05, "loss": 0.0076, "step": 13669 }, { "epoch": 15.088348978464936, "grad_norm": 0.21523605287075043, "learning_rate": 2.8778e-05, "loss": 0.008, "step": 13670 }, { "epoch": 15.089453340695748, "grad_norm": 0.22521460056304932, "learning_rate": 2.8777666666666667e-05, "loss": 0.0106, "step": 13671 }, { "epoch": 15.09055770292656, "grad_norm": 0.149595707654953, "learning_rate": 2.8777333333333333e-05, "loss": 0.0052, "step": 13672 }, { "epoch": 15.091662065157372, "grad_norm": 0.6576983332633972, "learning_rate": 2.8777000000000002e-05, "loss": 0.014, "step": 13673 }, { "epoch": 15.092766427388183, "grad_norm": 0.43037328124046326, "learning_rate": 2.8776666666666665e-05, "loss": 0.0094, "step": 13674 }, { "epoch": 15.093870789618995, "grad_norm": 0.09157765656709671, "learning_rate": 2.8776333333333334e-05, "loss": 0.0052, "step": 13675 }, { "epoch": 15.094975151849807, "grad_norm": 0.15171965956687927, "learning_rate": 2.8776000000000004e-05, "loss": 0.0062, "step": 13676 }, { "epoch": 15.096079514080618, "grad_norm": 0.30004435777664185, "learning_rate": 2.8775666666666666e-05, "loss": 0.0066, "step": 13677 }, { "epoch": 15.09718387631143, "grad_norm": 0.2895415425300598, "learning_rate": 2.8775333333333336e-05, "loss": 0.0066, "step": 13678 }, { "epoch": 15.098288238542242, "grad_norm": 0.21003681421279907, "learning_rate": 2.8774999999999998e-05, "loss": 0.0082, "step": 13679 }, { "epoch": 15.099392600773054, "grad_norm": 0.12770572304725647, "learning_rate": 2.8774666666666667e-05, "loss": 0.0054, "step": 13680 }, { "epoch": 15.100496963003865, "grad_norm": 0.16774946451187134, "learning_rate": 2.8774333333333333e-05, "loss": 0.0082, "step": 13681 }, { "epoch": 15.101601325234677, "grad_norm": 0.1489950716495514, "learning_rate": 2.8774e-05, "loss": 0.0046, "step": 13682 }, { "epoch": 15.10270568746549, "grad_norm": 0.1701502948999405, "learning_rate": 2.877366666666667e-05, "loss": 0.0078, "step": 13683 }, { "epoch": 15.1038100496963, "grad_norm": 0.1914220154285431, "learning_rate": 2.8773333333333335e-05, "loss": 0.0058, "step": 13684 }, { "epoch": 15.104914411927112, "grad_norm": 0.15668885409832, "learning_rate": 2.8773e-05, "loss": 0.0061, "step": 13685 }, { "epoch": 15.106018774157924, "grad_norm": 0.17786797881126404, "learning_rate": 2.8772666666666666e-05, "loss": 0.0128, "step": 13686 }, { "epoch": 15.107123136388736, "grad_norm": 0.27024051547050476, "learning_rate": 2.8772333333333336e-05, "loss": 0.006, "step": 13687 }, { "epoch": 15.108227498619547, "grad_norm": 0.5235285758972168, "learning_rate": 2.8771999999999998e-05, "loss": 0.007, "step": 13688 }, { "epoch": 15.109331860850359, "grad_norm": 0.1846955269575119, "learning_rate": 2.8771666666666668e-05, "loss": 0.0067, "step": 13689 }, { "epoch": 15.110436223081171, "grad_norm": 0.4467761218547821, "learning_rate": 2.8771333333333334e-05, "loss": 0.0087, "step": 13690 }, { "epoch": 15.111540585311982, "grad_norm": 0.46452757716178894, "learning_rate": 2.8771e-05, "loss": 0.1153, "step": 13691 }, { "epoch": 15.112644947542794, "grad_norm": 0.6010220050811768, "learning_rate": 2.877066666666667e-05, "loss": 0.0946, "step": 13692 }, { "epoch": 15.113749309773606, "grad_norm": 0.4476199448108673, "learning_rate": 2.8770333333333335e-05, "loss": 0.1048, "step": 13693 }, { "epoch": 15.114853672004417, "grad_norm": 0.3606717884540558, "learning_rate": 2.877e-05, "loss": 0.0619, "step": 13694 }, { "epoch": 15.115958034235229, "grad_norm": 0.35178032517433167, "learning_rate": 2.8769666666666667e-05, "loss": 0.0444, "step": 13695 }, { "epoch": 15.117062396466041, "grad_norm": 0.3288367688655853, "learning_rate": 2.8769333333333336e-05, "loss": 0.0495, "step": 13696 }, { "epoch": 15.118166758696853, "grad_norm": 0.3968721926212311, "learning_rate": 2.8769e-05, "loss": 0.068, "step": 13697 }, { "epoch": 15.119271120927664, "grad_norm": 0.4755655527114868, "learning_rate": 2.8768666666666668e-05, "loss": 0.0276, "step": 13698 }, { "epoch": 15.120375483158476, "grad_norm": 0.24481914937496185, "learning_rate": 2.8768333333333334e-05, "loss": 0.0216, "step": 13699 }, { "epoch": 15.121479845389288, "grad_norm": 0.520671546459198, "learning_rate": 2.8768e-05, "loss": 0.0173, "step": 13700 }, { "epoch": 15.122584207620099, "grad_norm": 0.530958890914917, "learning_rate": 2.876766666666667e-05, "loss": 0.0334, "step": 13701 }, { "epoch": 15.123688569850911, "grad_norm": 0.128362774848938, "learning_rate": 2.8767333333333335e-05, "loss": 0.0074, "step": 13702 }, { "epoch": 15.124792932081723, "grad_norm": 0.3148390054702759, "learning_rate": 2.8767e-05, "loss": 0.0098, "step": 13703 }, { "epoch": 15.125897294312535, "grad_norm": 0.23080624639987946, "learning_rate": 2.8766666666666667e-05, "loss": 0.01, "step": 13704 }, { "epoch": 15.127001656543346, "grad_norm": 0.3157350718975067, "learning_rate": 2.8766333333333336e-05, "loss": 0.0088, "step": 13705 }, { "epoch": 15.128106018774158, "grad_norm": 0.16083168983459473, "learning_rate": 2.8766e-05, "loss": 0.0106, "step": 13706 }, { "epoch": 15.12921038100497, "grad_norm": 0.2649783790111542, "learning_rate": 2.8765666666666668e-05, "loss": 0.0158, "step": 13707 }, { "epoch": 15.13031474323578, "grad_norm": 0.19867399334907532, "learning_rate": 2.8765333333333337e-05, "loss": 0.02, "step": 13708 }, { "epoch": 15.131419105466593, "grad_norm": 0.11622964590787888, "learning_rate": 2.8765e-05, "loss": 0.0056, "step": 13709 }, { "epoch": 15.132523467697405, "grad_norm": 0.6012980341911316, "learning_rate": 2.876466666666667e-05, "loss": 0.0132, "step": 13710 }, { "epoch": 15.133627829928216, "grad_norm": 0.34741315245628357, "learning_rate": 2.8764333333333332e-05, "loss": 0.01, "step": 13711 }, { "epoch": 15.134732192159028, "grad_norm": 0.17888198792934418, "learning_rate": 2.8764e-05, "loss": 0.0045, "step": 13712 }, { "epoch": 15.13583655438984, "grad_norm": 0.15425272285938263, "learning_rate": 2.8763666666666667e-05, "loss": 0.0064, "step": 13713 }, { "epoch": 15.136940916620652, "grad_norm": 0.22067807614803314, "learning_rate": 2.8763333333333333e-05, "loss": 0.0078, "step": 13714 }, { "epoch": 15.138045278851463, "grad_norm": 0.49579426646232605, "learning_rate": 2.8763e-05, "loss": 0.0442, "step": 13715 }, { "epoch": 15.139149641082275, "grad_norm": 0.18650679290294647, "learning_rate": 2.8762666666666668e-05, "loss": 0.0083, "step": 13716 }, { "epoch": 15.140254003313087, "grad_norm": 0.1349819302558899, "learning_rate": 2.8762333333333334e-05, "loss": 0.0035, "step": 13717 }, { "epoch": 15.141358365543898, "grad_norm": 0.21965622901916504, "learning_rate": 2.8762e-05, "loss": 0.0046, "step": 13718 }, { "epoch": 15.14246272777471, "grad_norm": 0.10825001448392868, "learning_rate": 2.876166666666667e-05, "loss": 0.004, "step": 13719 }, { "epoch": 15.143567090005522, "grad_norm": 0.708046555519104, "learning_rate": 2.8761333333333332e-05, "loss": 0.0113, "step": 13720 }, { "epoch": 15.144671452236334, "grad_norm": 0.30298808217048645, "learning_rate": 2.8761e-05, "loss": 0.0069, "step": 13721 }, { "epoch": 15.145775814467145, "grad_norm": 0.17840979993343353, "learning_rate": 2.8760666666666667e-05, "loss": 0.0076, "step": 13722 }, { "epoch": 15.146880176697957, "grad_norm": 0.2187308818101883, "learning_rate": 2.8760333333333333e-05, "loss": 0.0058, "step": 13723 }, { "epoch": 15.14798453892877, "grad_norm": 0.20874841511249542, "learning_rate": 2.876e-05, "loss": 0.0153, "step": 13724 }, { "epoch": 15.14908890115958, "grad_norm": 0.17570903897285461, "learning_rate": 2.875966666666667e-05, "loss": 0.0096, "step": 13725 }, { "epoch": 15.150193263390392, "grad_norm": 0.11018591374158859, "learning_rate": 2.8759333333333334e-05, "loss": 0.0039, "step": 13726 }, { "epoch": 15.151297625621204, "grad_norm": 0.26252785325050354, "learning_rate": 2.8759e-05, "loss": 0.0075, "step": 13727 }, { "epoch": 15.152401987852015, "grad_norm": 0.20434650778770447, "learning_rate": 2.875866666666667e-05, "loss": 0.0075, "step": 13728 }, { "epoch": 15.153506350082827, "grad_norm": 0.12587304413318634, "learning_rate": 2.8758333333333332e-05, "loss": 0.0042, "step": 13729 }, { "epoch": 15.154610712313639, "grad_norm": 0.24053749442100525, "learning_rate": 2.8758e-05, "loss": 0.0092, "step": 13730 }, { "epoch": 15.155715074544451, "grad_norm": 0.45430707931518555, "learning_rate": 2.8757666666666667e-05, "loss": 0.0121, "step": 13731 }, { "epoch": 15.156819436775262, "grad_norm": 0.1281890720129013, "learning_rate": 2.8757333333333333e-05, "loss": 0.0046, "step": 13732 }, { "epoch": 15.157923799006074, "grad_norm": 0.43956470489501953, "learning_rate": 2.8757000000000003e-05, "loss": 0.0109, "step": 13733 }, { "epoch": 15.159028161236886, "grad_norm": 0.6345350742340088, "learning_rate": 2.875666666666667e-05, "loss": 0.0155, "step": 13734 }, { "epoch": 15.160132523467697, "grad_norm": 0.2770831882953644, "learning_rate": 2.8756333333333335e-05, "loss": 0.0078, "step": 13735 }, { "epoch": 15.161236885698509, "grad_norm": 0.45582497119903564, "learning_rate": 2.8756e-05, "loss": 0.0149, "step": 13736 }, { "epoch": 15.162341247929321, "grad_norm": 0.587374746799469, "learning_rate": 2.875566666666667e-05, "loss": 0.0112, "step": 13737 }, { "epoch": 15.163445610160133, "grad_norm": 0.14363616704940796, "learning_rate": 2.8755333333333332e-05, "loss": 0.0074, "step": 13738 }, { "epoch": 15.164549972390944, "grad_norm": 0.37737950682640076, "learning_rate": 2.8755e-05, "loss": 0.0102, "step": 13739 }, { "epoch": 15.165654334621756, "grad_norm": 0.2221435308456421, "learning_rate": 2.8754666666666664e-05, "loss": 0.0054, "step": 13740 }, { "epoch": 15.166758696852568, "grad_norm": 0.458484411239624, "learning_rate": 2.8754333333333334e-05, "loss": 0.1154, "step": 13741 }, { "epoch": 15.167863059083379, "grad_norm": 0.4929122030735016, "learning_rate": 2.8754000000000003e-05, "loss": 0.1454, "step": 13742 }, { "epoch": 15.168967421314191, "grad_norm": 0.3811136484146118, "learning_rate": 2.8753666666666665e-05, "loss": 0.1145, "step": 13743 }, { "epoch": 15.170071783545003, "grad_norm": 0.48240965604782104, "learning_rate": 2.8753333333333335e-05, "loss": 0.1059, "step": 13744 }, { "epoch": 15.171176145775814, "grad_norm": 0.365662544965744, "learning_rate": 2.8753e-05, "loss": 0.0669, "step": 13745 }, { "epoch": 15.172280508006626, "grad_norm": 0.34132349491119385, "learning_rate": 2.8752666666666667e-05, "loss": 0.0564, "step": 13746 }, { "epoch": 15.173384870237438, "grad_norm": 0.6615760326385498, "learning_rate": 2.8752333333333333e-05, "loss": 0.0533, "step": 13747 }, { "epoch": 15.17448923246825, "grad_norm": 0.4041324555873871, "learning_rate": 2.8752000000000002e-05, "loss": 0.0368, "step": 13748 }, { "epoch": 15.17559359469906, "grad_norm": 0.1504499465227127, "learning_rate": 2.8751666666666664e-05, "loss": 0.0134, "step": 13749 }, { "epoch": 15.176697956929873, "grad_norm": 0.09187658131122589, "learning_rate": 2.8751333333333334e-05, "loss": 0.0088, "step": 13750 }, { "epoch": 15.177802319160685, "grad_norm": 0.5372411012649536, "learning_rate": 2.8751000000000003e-05, "loss": 0.0142, "step": 13751 }, { "epoch": 15.178906681391496, "grad_norm": 0.14595748484134674, "learning_rate": 2.8750666666666666e-05, "loss": 0.0092, "step": 13752 }, { "epoch": 15.180011043622308, "grad_norm": 0.21832285821437836, "learning_rate": 2.8750333333333335e-05, "loss": 0.0129, "step": 13753 }, { "epoch": 15.18111540585312, "grad_norm": 0.1936495155096054, "learning_rate": 2.875e-05, "loss": 0.0108, "step": 13754 }, { "epoch": 15.182219768083932, "grad_norm": 0.21668238937854767, "learning_rate": 2.8749666666666667e-05, "loss": 0.0412, "step": 13755 }, { "epoch": 15.183324130314743, "grad_norm": 0.18167266249656677, "learning_rate": 2.8749333333333333e-05, "loss": 0.0052, "step": 13756 }, { "epoch": 15.184428492545555, "grad_norm": 0.1647944450378418, "learning_rate": 2.8749000000000002e-05, "loss": 0.0086, "step": 13757 }, { "epoch": 15.185532854776367, "grad_norm": 0.18028998374938965, "learning_rate": 2.8748666666666668e-05, "loss": 0.009, "step": 13758 }, { "epoch": 15.186637217007178, "grad_norm": 0.1792728751897812, "learning_rate": 2.8748333333333334e-05, "loss": 0.0052, "step": 13759 }, { "epoch": 15.18774157923799, "grad_norm": 0.3315246105194092, "learning_rate": 2.8748000000000003e-05, "loss": 0.0106, "step": 13760 }, { "epoch": 15.188845941468802, "grad_norm": 0.12138210982084274, "learning_rate": 2.8747666666666666e-05, "loss": 0.0054, "step": 13761 }, { "epoch": 15.189950303699613, "grad_norm": 0.2634245455265045, "learning_rate": 2.8747333333333335e-05, "loss": 0.0095, "step": 13762 }, { "epoch": 15.191054665930425, "grad_norm": 0.3063337206840515, "learning_rate": 2.8747e-05, "loss": 0.02, "step": 13763 }, { "epoch": 15.192159028161237, "grad_norm": 0.16098971664905548, "learning_rate": 2.8746666666666667e-05, "loss": 0.0078, "step": 13764 }, { "epoch": 15.19326339039205, "grad_norm": 0.2391311228275299, "learning_rate": 2.8746333333333333e-05, "loss": 0.0074, "step": 13765 }, { "epoch": 15.19436775262286, "grad_norm": 0.5030652284622192, "learning_rate": 2.8746000000000002e-05, "loss": 0.0112, "step": 13766 }, { "epoch": 15.195472114853672, "grad_norm": 0.24254760146141052, "learning_rate": 2.8745666666666668e-05, "loss": 0.01, "step": 13767 }, { "epoch": 15.196576477084484, "grad_norm": 0.13520941138267517, "learning_rate": 2.8745333333333334e-05, "loss": 0.0078, "step": 13768 }, { "epoch": 15.197680839315295, "grad_norm": 0.1858261078596115, "learning_rate": 2.8745000000000003e-05, "loss": 0.0097, "step": 13769 }, { "epoch": 15.198785201546107, "grad_norm": 0.19440336525440216, "learning_rate": 2.8744666666666666e-05, "loss": 0.0098, "step": 13770 }, { "epoch": 15.19988956377692, "grad_norm": 0.16705772280693054, "learning_rate": 2.8744333333333335e-05, "loss": 0.0075, "step": 13771 }, { "epoch": 15.200993926007731, "grad_norm": 0.19365032017230988, "learning_rate": 2.8743999999999998e-05, "loss": 0.0086, "step": 13772 }, { "epoch": 15.202098288238542, "grad_norm": 0.39894917607307434, "learning_rate": 2.8743666666666667e-05, "loss": 0.0059, "step": 13773 }, { "epoch": 15.203202650469354, "grad_norm": 0.408846914768219, "learning_rate": 2.8743333333333333e-05, "loss": 0.0134, "step": 13774 }, { "epoch": 15.204307012700166, "grad_norm": 0.17824697494506836, "learning_rate": 2.8743e-05, "loss": 0.0052, "step": 13775 }, { "epoch": 15.205411374930977, "grad_norm": 0.11243930459022522, "learning_rate": 2.874266666666667e-05, "loss": 0.0063, "step": 13776 }, { "epoch": 15.206515737161789, "grad_norm": 0.1757350116968155, "learning_rate": 2.8742333333333334e-05, "loss": 0.0125, "step": 13777 }, { "epoch": 15.207620099392601, "grad_norm": 0.12837561964988708, "learning_rate": 2.8742e-05, "loss": 0.0053, "step": 13778 }, { "epoch": 15.208724461623412, "grad_norm": 0.3203410804271698, "learning_rate": 2.8741666666666666e-05, "loss": 0.0134, "step": 13779 }, { "epoch": 15.209828823854224, "grad_norm": 0.11920499801635742, "learning_rate": 2.8741333333333336e-05, "loss": 0.0038, "step": 13780 }, { "epoch": 15.210933186085036, "grad_norm": 0.22207777202129364, "learning_rate": 2.8740999999999998e-05, "loss": 0.0076, "step": 13781 }, { "epoch": 15.212037548315848, "grad_norm": 0.23894494771957397, "learning_rate": 2.8740666666666667e-05, "loss": 0.0068, "step": 13782 }, { "epoch": 15.213141910546659, "grad_norm": 0.29136019945144653, "learning_rate": 2.8740333333333337e-05, "loss": 0.0081, "step": 13783 }, { "epoch": 15.214246272777471, "grad_norm": 0.2607414126396179, "learning_rate": 2.874e-05, "loss": 0.0118, "step": 13784 }, { "epoch": 15.215350635008283, "grad_norm": 0.2688516676425934, "learning_rate": 2.873966666666667e-05, "loss": 0.0029, "step": 13785 }, { "epoch": 15.216454997239094, "grad_norm": 0.31127849221229553, "learning_rate": 2.8739333333333335e-05, "loss": 0.017, "step": 13786 }, { "epoch": 15.217559359469906, "grad_norm": 0.30675968527793884, "learning_rate": 2.8739e-05, "loss": 0.0245, "step": 13787 }, { "epoch": 15.218663721700718, "grad_norm": 0.34379810094833374, "learning_rate": 2.8738666666666666e-05, "loss": 0.0072, "step": 13788 }, { "epoch": 15.21976808393153, "grad_norm": 0.687223494052887, "learning_rate": 2.8738333333333336e-05, "loss": 0.0102, "step": 13789 }, { "epoch": 15.22087244616234, "grad_norm": 0.24939219653606415, "learning_rate": 2.8737999999999998e-05, "loss": 0.0051, "step": 13790 }, { "epoch": 15.221976808393153, "grad_norm": 0.6035788655281067, "learning_rate": 2.8737666666666668e-05, "loss": 0.1805, "step": 13791 }, { "epoch": 15.223081170623965, "grad_norm": 0.4345950782299042, "learning_rate": 2.8737333333333337e-05, "loss": 0.1218, "step": 13792 }, { "epoch": 15.224185532854776, "grad_norm": 0.6502115726470947, "learning_rate": 2.8737e-05, "loss": 0.0889, "step": 13793 }, { "epoch": 15.225289895085588, "grad_norm": 0.9391016364097595, "learning_rate": 2.873666666666667e-05, "loss": 0.1191, "step": 13794 }, { "epoch": 15.2263942573164, "grad_norm": 0.4251434803009033, "learning_rate": 2.8736333333333335e-05, "loss": 0.0638, "step": 13795 }, { "epoch": 15.22749861954721, "grad_norm": 0.23594506084918976, "learning_rate": 2.8736e-05, "loss": 0.0402, "step": 13796 }, { "epoch": 15.228602981778023, "grad_norm": 0.3733185827732086, "learning_rate": 2.8735666666666667e-05, "loss": 0.0468, "step": 13797 }, { "epoch": 15.229707344008835, "grad_norm": 0.4009402394294739, "learning_rate": 2.8735333333333336e-05, "loss": 0.0212, "step": 13798 }, { "epoch": 15.230811706239647, "grad_norm": 0.3460392653942108, "learning_rate": 2.8735e-05, "loss": 0.021, "step": 13799 }, { "epoch": 15.231916068470458, "grad_norm": 0.24595098197460175, "learning_rate": 2.8734666666666668e-05, "loss": 0.0143, "step": 13800 }, { "epoch": 15.23302043070127, "grad_norm": 0.19851306080818176, "learning_rate": 2.8734333333333334e-05, "loss": 0.0122, "step": 13801 }, { "epoch": 15.234124792932082, "grad_norm": 0.3811131417751312, "learning_rate": 2.8734e-05, "loss": 0.0187, "step": 13802 }, { "epoch": 15.235229155162893, "grad_norm": 0.13011103868484497, "learning_rate": 2.873366666666667e-05, "loss": 0.0079, "step": 13803 }, { "epoch": 15.236333517393705, "grad_norm": 0.25104865431785583, "learning_rate": 2.873333333333333e-05, "loss": 0.0289, "step": 13804 }, { "epoch": 15.237437879624517, "grad_norm": 0.09905356168746948, "learning_rate": 2.8733e-05, "loss": 0.0042, "step": 13805 }, { "epoch": 15.23854224185533, "grad_norm": 0.46734511852264404, "learning_rate": 2.8732666666666667e-05, "loss": 0.0112, "step": 13806 }, { "epoch": 15.23964660408614, "grad_norm": 0.2122475951910019, "learning_rate": 2.8732333333333333e-05, "loss": 0.0076, "step": 13807 }, { "epoch": 15.240750966316952, "grad_norm": 0.33939412236213684, "learning_rate": 2.8732000000000002e-05, "loss": 0.0108, "step": 13808 }, { "epoch": 15.241855328547764, "grad_norm": 0.12635472416877747, "learning_rate": 2.8731666666666668e-05, "loss": 0.0057, "step": 13809 }, { "epoch": 15.242959690778575, "grad_norm": 0.19114038348197937, "learning_rate": 2.8731333333333334e-05, "loss": 0.0125, "step": 13810 }, { "epoch": 15.244064053009387, "grad_norm": 0.24954691529273987, "learning_rate": 2.8731e-05, "loss": 0.0097, "step": 13811 }, { "epoch": 15.2451684152402, "grad_norm": 0.2511681318283081, "learning_rate": 2.873066666666667e-05, "loss": 0.0074, "step": 13812 }, { "epoch": 15.24627277747101, "grad_norm": 0.14340347051620483, "learning_rate": 2.8730333333333332e-05, "loss": 0.006, "step": 13813 }, { "epoch": 15.247377139701822, "grad_norm": 0.4707561731338501, "learning_rate": 2.873e-05, "loss": 0.0086, "step": 13814 }, { "epoch": 15.248481501932634, "grad_norm": 0.23921433091163635, "learning_rate": 2.8729666666666667e-05, "loss": 0.0099, "step": 13815 }, { "epoch": 15.249585864163446, "grad_norm": 0.11550889909267426, "learning_rate": 2.8729333333333333e-05, "loss": 0.0048, "step": 13816 }, { "epoch": 15.250690226394257, "grad_norm": 0.12346482276916504, "learning_rate": 2.8729000000000002e-05, "loss": 0.007, "step": 13817 }, { "epoch": 15.251794588625069, "grad_norm": 0.16168566048145294, "learning_rate": 2.8728666666666668e-05, "loss": 0.0063, "step": 13818 }, { "epoch": 15.252898950855881, "grad_norm": 0.2803109288215637, "learning_rate": 2.8728333333333334e-05, "loss": 0.0067, "step": 13819 }, { "epoch": 15.254003313086692, "grad_norm": 0.19588114321231842, "learning_rate": 2.8728e-05, "loss": 0.0041, "step": 13820 }, { "epoch": 15.255107675317504, "grad_norm": 0.1813516616821289, "learning_rate": 2.872766666666667e-05, "loss": 0.0087, "step": 13821 }, { "epoch": 15.256212037548316, "grad_norm": 0.17198584973812103, "learning_rate": 2.8727333333333332e-05, "loss": 0.0093, "step": 13822 }, { "epoch": 15.257316399779128, "grad_norm": 0.4758414924144745, "learning_rate": 2.8727e-05, "loss": 0.0058, "step": 13823 }, { "epoch": 15.258420762009939, "grad_norm": 0.17902393639087677, "learning_rate": 2.8726666666666667e-05, "loss": 0.0075, "step": 13824 }, { "epoch": 15.259525124240751, "grad_norm": 0.14890965819358826, "learning_rate": 2.8726333333333333e-05, "loss": 0.0036, "step": 13825 }, { "epoch": 15.260629486471563, "grad_norm": 0.1720794439315796, "learning_rate": 2.8726000000000002e-05, "loss": 0.0099, "step": 13826 }, { "epoch": 15.261733848702374, "grad_norm": 0.43852153420448303, "learning_rate": 2.872566666666667e-05, "loss": 0.0101, "step": 13827 }, { "epoch": 15.262838210933186, "grad_norm": 0.3115972876548767, "learning_rate": 2.8725333333333334e-05, "loss": 0.0143, "step": 13828 }, { "epoch": 15.263942573163998, "grad_norm": 0.24819758534431458, "learning_rate": 2.8725e-05, "loss": 0.0091, "step": 13829 }, { "epoch": 15.26504693539481, "grad_norm": 0.2396371066570282, "learning_rate": 2.8724666666666666e-05, "loss": 0.0107, "step": 13830 }, { "epoch": 15.26615129762562, "grad_norm": 0.10548018664121628, "learning_rate": 2.8724333333333332e-05, "loss": 0.0048, "step": 13831 }, { "epoch": 15.267255659856433, "grad_norm": 0.502963662147522, "learning_rate": 2.8724e-05, "loss": 0.0156, "step": 13832 }, { "epoch": 15.268360022087245, "grad_norm": 0.5382698178291321, "learning_rate": 2.8723666666666667e-05, "loss": 0.0118, "step": 13833 }, { "epoch": 15.269464384318056, "grad_norm": 0.14321301877498627, "learning_rate": 2.8723333333333333e-05, "loss": 0.0095, "step": 13834 }, { "epoch": 15.270568746548868, "grad_norm": 0.21738633513450623, "learning_rate": 2.8723000000000003e-05, "loss": 0.0085, "step": 13835 }, { "epoch": 15.27167310877968, "grad_norm": 0.35444486141204834, "learning_rate": 2.8722666666666665e-05, "loss": 0.0085, "step": 13836 }, { "epoch": 15.27277747101049, "grad_norm": 0.4314732849597931, "learning_rate": 2.8722333333333335e-05, "loss": 0.0151, "step": 13837 }, { "epoch": 15.273881833241303, "grad_norm": 0.4081888794898987, "learning_rate": 2.8722e-05, "loss": 0.01, "step": 13838 }, { "epoch": 15.274986195472115, "grad_norm": 0.17201745510101318, "learning_rate": 2.8721666666666666e-05, "loss": 0.0084, "step": 13839 }, { "epoch": 15.276090557702927, "grad_norm": 0.2814144194126129, "learning_rate": 2.8721333333333332e-05, "loss": 0.0068, "step": 13840 }, { "epoch": 15.277194919933738, "grad_norm": 0.5566623210906982, "learning_rate": 2.8721e-05, "loss": 0.1866, "step": 13841 }, { "epoch": 15.27829928216455, "grad_norm": 0.4355699419975281, "learning_rate": 2.8720666666666668e-05, "loss": 0.1152, "step": 13842 }, { "epoch": 15.279403644395362, "grad_norm": 1.2305266857147217, "learning_rate": 2.8720333333333334e-05, "loss": 0.1382, "step": 13843 }, { "epoch": 15.280508006626173, "grad_norm": 0.5564067959785461, "learning_rate": 2.8720000000000003e-05, "loss": 0.0839, "step": 13844 }, { "epoch": 15.281612368856985, "grad_norm": 0.5031759738922119, "learning_rate": 2.8719666666666665e-05, "loss": 0.0592, "step": 13845 }, { "epoch": 15.282716731087797, "grad_norm": 0.4340907633304596, "learning_rate": 2.8719333333333335e-05, "loss": 0.0571, "step": 13846 }, { "epoch": 15.283821093318608, "grad_norm": 0.43310272693634033, "learning_rate": 2.8719e-05, "loss": 0.0676, "step": 13847 }, { "epoch": 15.28492545554942, "grad_norm": 0.4062677025794983, "learning_rate": 2.8718666666666667e-05, "loss": 0.0257, "step": 13848 }, { "epoch": 15.286029817780232, "grad_norm": 0.3035278022289276, "learning_rate": 2.8718333333333333e-05, "loss": 0.0165, "step": 13849 }, { "epoch": 15.287134180011044, "grad_norm": 0.162126824259758, "learning_rate": 2.8718000000000002e-05, "loss": 0.0076, "step": 13850 }, { "epoch": 15.288238542241855, "grad_norm": 0.29362159967422485, "learning_rate": 2.8717666666666668e-05, "loss": 0.0162, "step": 13851 }, { "epoch": 15.289342904472667, "grad_norm": 0.19851423799991608, "learning_rate": 2.8717333333333334e-05, "loss": 0.0111, "step": 13852 }, { "epoch": 15.29044726670348, "grad_norm": 0.37290042638778687, "learning_rate": 2.8717000000000003e-05, "loss": 0.0052, "step": 13853 }, { "epoch": 15.29155162893429, "grad_norm": 0.13032659888267517, "learning_rate": 2.8716666666666666e-05, "loss": 0.0125, "step": 13854 }, { "epoch": 15.292655991165102, "grad_norm": 0.2532675266265869, "learning_rate": 2.8716333333333335e-05, "loss": 0.0131, "step": 13855 }, { "epoch": 15.293760353395914, "grad_norm": 0.18404021859169006, "learning_rate": 2.8716e-05, "loss": 0.0076, "step": 13856 }, { "epoch": 15.294864715626726, "grad_norm": 0.19707532227039337, "learning_rate": 2.8715666666666667e-05, "loss": 0.0081, "step": 13857 }, { "epoch": 15.295969077857537, "grad_norm": 0.15990738570690155, "learning_rate": 2.8715333333333336e-05, "loss": 0.0118, "step": 13858 }, { "epoch": 15.297073440088349, "grad_norm": 0.19069448113441467, "learning_rate": 2.8715000000000002e-05, "loss": 0.0075, "step": 13859 }, { "epoch": 15.298177802319161, "grad_norm": 0.21981631219387054, "learning_rate": 2.8714666666666668e-05, "loss": 0.0071, "step": 13860 }, { "epoch": 15.299282164549972, "grad_norm": 0.4538491368293762, "learning_rate": 2.8714333333333334e-05, "loss": 0.0112, "step": 13861 }, { "epoch": 15.300386526780784, "grad_norm": 0.3040347993373871, "learning_rate": 2.8714e-05, "loss": 0.0094, "step": 13862 }, { "epoch": 15.301490889011596, "grad_norm": 0.17286455631256104, "learning_rate": 2.8713666666666666e-05, "loss": 0.0049, "step": 13863 }, { "epoch": 15.302595251242408, "grad_norm": 0.097415991127491, "learning_rate": 2.8713333333333335e-05, "loss": 0.0035, "step": 13864 }, { "epoch": 15.303699613473219, "grad_norm": 0.3283158838748932, "learning_rate": 2.8712999999999998e-05, "loss": 0.011, "step": 13865 }, { "epoch": 15.304803975704031, "grad_norm": 0.2127041071653366, "learning_rate": 2.8712666666666667e-05, "loss": 0.0071, "step": 13866 }, { "epoch": 15.305908337934843, "grad_norm": 0.1743352711200714, "learning_rate": 2.8712333333333336e-05, "loss": 0.004, "step": 13867 }, { "epoch": 15.307012700165654, "grad_norm": 0.1150515154004097, "learning_rate": 2.8712e-05, "loss": 0.0031, "step": 13868 }, { "epoch": 15.308117062396466, "grad_norm": 0.5197811722755432, "learning_rate": 2.8711666666666668e-05, "loss": 0.0061, "step": 13869 }, { "epoch": 15.309221424627278, "grad_norm": 0.24565747380256653, "learning_rate": 2.8711333333333334e-05, "loss": 0.0082, "step": 13870 }, { "epoch": 15.310325786858089, "grad_norm": 0.3662467896938324, "learning_rate": 2.8711e-05, "loss": 0.0077, "step": 13871 }, { "epoch": 15.3114301490889, "grad_norm": 0.3945930600166321, "learning_rate": 2.8710666666666666e-05, "loss": 0.0089, "step": 13872 }, { "epoch": 15.312534511319713, "grad_norm": 0.13813209533691406, "learning_rate": 2.8710333333333335e-05, "loss": 0.0047, "step": 13873 }, { "epoch": 15.313638873550525, "grad_norm": 0.4879626929759979, "learning_rate": 2.871e-05, "loss": 0.0215, "step": 13874 }, { "epoch": 15.314743235781336, "grad_norm": 0.18406139314174652, "learning_rate": 2.8709666666666667e-05, "loss": 0.0097, "step": 13875 }, { "epoch": 15.315847598012148, "grad_norm": 0.21244707703590393, "learning_rate": 2.8709333333333337e-05, "loss": 0.0078, "step": 13876 }, { "epoch": 15.31695196024296, "grad_norm": 0.2825542390346527, "learning_rate": 2.8709e-05, "loss": 0.009, "step": 13877 }, { "epoch": 15.31805632247377, "grad_norm": 0.12661710381507874, "learning_rate": 2.870866666666667e-05, "loss": 0.0054, "step": 13878 }, { "epoch": 15.319160684704583, "grad_norm": 0.2500525116920471, "learning_rate": 2.8708333333333334e-05, "loss": 0.0069, "step": 13879 }, { "epoch": 15.320265046935395, "grad_norm": 0.3095642328262329, "learning_rate": 2.8708e-05, "loss": 0.015, "step": 13880 }, { "epoch": 15.321369409166207, "grad_norm": 0.14519815146923065, "learning_rate": 2.8707666666666666e-05, "loss": 0.0091, "step": 13881 }, { "epoch": 15.322473771397018, "grad_norm": 0.47382089495658875, "learning_rate": 2.8707333333333336e-05, "loss": 0.0066, "step": 13882 }, { "epoch": 15.32357813362783, "grad_norm": 0.20860837399959564, "learning_rate": 2.8707e-05, "loss": 0.0076, "step": 13883 }, { "epoch": 15.324682495858642, "grad_norm": 0.12304096668958664, "learning_rate": 2.8706666666666667e-05, "loss": 0.0041, "step": 13884 }, { "epoch": 15.325786858089453, "grad_norm": 0.19081252813339233, "learning_rate": 2.8706333333333337e-05, "loss": 0.0087, "step": 13885 }, { "epoch": 15.326891220320265, "grad_norm": 0.14849349856376648, "learning_rate": 2.8706e-05, "loss": 0.0052, "step": 13886 }, { "epoch": 15.327995582551077, "grad_norm": 0.3101571500301361, "learning_rate": 2.870566666666667e-05, "loss": 0.0243, "step": 13887 }, { "epoch": 15.329099944781888, "grad_norm": 0.2395392507314682, "learning_rate": 2.8705333333333335e-05, "loss": 0.0088, "step": 13888 }, { "epoch": 15.3302043070127, "grad_norm": 3.083205223083496, "learning_rate": 2.8705e-05, "loss": 0.0202, "step": 13889 }, { "epoch": 15.331308669243512, "grad_norm": 0.333638995885849, "learning_rate": 2.8704666666666666e-05, "loss": 0.0132, "step": 13890 }, { "epoch": 15.332413031474324, "grad_norm": 0.7293395400047302, "learning_rate": 2.8704333333333332e-05, "loss": 0.2335, "step": 13891 }, { "epoch": 15.333517393705135, "grad_norm": 0.48233041167259216, "learning_rate": 2.8704e-05, "loss": 0.142, "step": 13892 }, { "epoch": 15.334621755935947, "grad_norm": 0.7786401510238647, "learning_rate": 2.8703666666666668e-05, "loss": 0.1192, "step": 13893 }, { "epoch": 15.33572611816676, "grad_norm": 0.4269743859767914, "learning_rate": 2.8703333333333334e-05, "loss": 0.09, "step": 13894 }, { "epoch": 15.33683048039757, "grad_norm": 0.6501318216323853, "learning_rate": 2.8703e-05, "loss": 0.0964, "step": 13895 }, { "epoch": 15.337934842628382, "grad_norm": 0.48083561658859253, "learning_rate": 2.870266666666667e-05, "loss": 0.0796, "step": 13896 }, { "epoch": 15.339039204859194, "grad_norm": 0.31157130002975464, "learning_rate": 2.870233333333333e-05, "loss": 0.0273, "step": 13897 }, { "epoch": 15.340143567090006, "grad_norm": 0.3560914993286133, "learning_rate": 2.8702e-05, "loss": 0.0393, "step": 13898 }, { "epoch": 15.341247929320817, "grad_norm": 0.2820461094379425, "learning_rate": 2.8701666666666667e-05, "loss": 0.0539, "step": 13899 }, { "epoch": 15.342352291551629, "grad_norm": 0.36271655559539795, "learning_rate": 2.8701333333333333e-05, "loss": 0.0289, "step": 13900 }, { "epoch": 15.343456653782441, "grad_norm": 0.21063417196273804, "learning_rate": 2.8701000000000002e-05, "loss": 0.0138, "step": 13901 }, { "epoch": 15.344561016013252, "grad_norm": 0.12097345292568207, "learning_rate": 2.8700666666666668e-05, "loss": 0.0061, "step": 13902 }, { "epoch": 15.345665378244064, "grad_norm": 0.35114142298698425, "learning_rate": 2.8700333333333334e-05, "loss": 0.0109, "step": 13903 }, { "epoch": 15.346769740474876, "grad_norm": 0.16533303260803223, "learning_rate": 2.87e-05, "loss": 0.0066, "step": 13904 }, { "epoch": 15.347874102705687, "grad_norm": 0.14145931601524353, "learning_rate": 2.869966666666667e-05, "loss": 0.0104, "step": 13905 }, { "epoch": 15.348978464936499, "grad_norm": 0.3098498284816742, "learning_rate": 2.869933333333333e-05, "loss": 0.0116, "step": 13906 }, { "epoch": 15.350082827167311, "grad_norm": 0.5395901799201965, "learning_rate": 2.8699e-05, "loss": 0.0108, "step": 13907 }, { "epoch": 15.351187189398123, "grad_norm": 0.16395147144794464, "learning_rate": 2.869866666666667e-05, "loss": 0.0073, "step": 13908 }, { "epoch": 15.352291551628934, "grad_norm": 0.2860301733016968, "learning_rate": 2.8698333333333333e-05, "loss": 0.0109, "step": 13909 }, { "epoch": 15.353395913859746, "grad_norm": 0.5569563508033752, "learning_rate": 2.8698000000000002e-05, "loss": 0.0126, "step": 13910 }, { "epoch": 15.354500276090558, "grad_norm": 0.17202074825763702, "learning_rate": 2.8697666666666668e-05, "loss": 0.0063, "step": 13911 }, { "epoch": 15.355604638321369, "grad_norm": 0.19441822171211243, "learning_rate": 2.8697333333333334e-05, "loss": 0.0081, "step": 13912 }, { "epoch": 15.356709000552181, "grad_norm": 0.9751061201095581, "learning_rate": 2.8697e-05, "loss": 0.0084, "step": 13913 }, { "epoch": 15.357813362782993, "grad_norm": 0.21005500853061676, "learning_rate": 2.869666666666667e-05, "loss": 0.0083, "step": 13914 }, { "epoch": 15.358917725013805, "grad_norm": 0.15562835335731506, "learning_rate": 2.8696333333333332e-05, "loss": 0.0089, "step": 13915 }, { "epoch": 15.360022087244616, "grad_norm": 0.11235013604164124, "learning_rate": 2.8696e-05, "loss": 0.0037, "step": 13916 }, { "epoch": 15.361126449475428, "grad_norm": 0.2886931598186493, "learning_rate": 2.869566666666667e-05, "loss": 0.0192, "step": 13917 }, { "epoch": 15.36223081170624, "grad_norm": 0.2687285244464874, "learning_rate": 2.8695333333333333e-05, "loss": 0.0054, "step": 13918 }, { "epoch": 15.36333517393705, "grad_norm": 0.2974761724472046, "learning_rate": 2.8695000000000002e-05, "loss": 0.0092, "step": 13919 }, { "epoch": 15.364439536167863, "grad_norm": 0.3692917823791504, "learning_rate": 2.8694666666666668e-05, "loss": 0.0108, "step": 13920 }, { "epoch": 15.365543898398675, "grad_norm": 0.927943766117096, "learning_rate": 2.8694333333333334e-05, "loss": 0.0155, "step": 13921 }, { "epoch": 15.366648260629486, "grad_norm": 0.1460706889629364, "learning_rate": 2.8694e-05, "loss": 0.0075, "step": 13922 }, { "epoch": 15.367752622860298, "grad_norm": 0.1296900361776352, "learning_rate": 2.8693666666666666e-05, "loss": 0.0038, "step": 13923 }, { "epoch": 15.36885698509111, "grad_norm": 0.34654882550239563, "learning_rate": 2.8693333333333335e-05, "loss": 0.013, "step": 13924 }, { "epoch": 15.369961347321922, "grad_norm": 1.5779380798339844, "learning_rate": 2.8693e-05, "loss": 0.0317, "step": 13925 }, { "epoch": 15.371065709552733, "grad_norm": 0.18859384953975677, "learning_rate": 2.8692666666666667e-05, "loss": 0.0048, "step": 13926 }, { "epoch": 15.372170071783545, "grad_norm": 0.35073959827423096, "learning_rate": 2.8692333333333333e-05, "loss": 0.0163, "step": 13927 }, { "epoch": 15.373274434014357, "grad_norm": 0.281322181224823, "learning_rate": 2.8692000000000002e-05, "loss": 0.0109, "step": 13928 }, { "epoch": 15.374378796245168, "grad_norm": 0.15116435289382935, "learning_rate": 2.8691666666666665e-05, "loss": 0.0058, "step": 13929 }, { "epoch": 15.37548315847598, "grad_norm": 0.2489934116601944, "learning_rate": 2.8691333333333334e-05, "loss": 0.0096, "step": 13930 }, { "epoch": 15.376587520706792, "grad_norm": 1.112743854522705, "learning_rate": 2.8691e-05, "loss": 0.0068, "step": 13931 }, { "epoch": 15.377691882937604, "grad_norm": 0.14519435167312622, "learning_rate": 2.8690666666666666e-05, "loss": 0.0068, "step": 13932 }, { "epoch": 15.378796245168415, "grad_norm": 0.16938868165016174, "learning_rate": 2.8690333333333336e-05, "loss": 0.0066, "step": 13933 }, { "epoch": 15.379900607399227, "grad_norm": 0.1474607288837433, "learning_rate": 2.869e-05, "loss": 0.0061, "step": 13934 }, { "epoch": 15.38100496963004, "grad_norm": 0.2844817340373993, "learning_rate": 2.8689666666666667e-05, "loss": 0.0119, "step": 13935 }, { "epoch": 15.38210933186085, "grad_norm": 2.034165382385254, "learning_rate": 2.8689333333333333e-05, "loss": 0.0275, "step": 13936 }, { "epoch": 15.383213694091662, "grad_norm": 0.18214549124240875, "learning_rate": 2.8689000000000003e-05, "loss": 0.0072, "step": 13937 }, { "epoch": 15.384318056322474, "grad_norm": 0.30969688296318054, "learning_rate": 2.8688666666666665e-05, "loss": 0.0104, "step": 13938 }, { "epoch": 15.385422418553285, "grad_norm": 1.7334935665130615, "learning_rate": 2.8688333333333335e-05, "loss": 0.0153, "step": 13939 }, { "epoch": 15.386526780784097, "grad_norm": 0.5150189995765686, "learning_rate": 2.8688e-05, "loss": 0.0156, "step": 13940 }, { "epoch": 15.387631143014909, "grad_norm": 0.7304405570030212, "learning_rate": 2.8687666666666666e-05, "loss": 0.1832, "step": 13941 }, { "epoch": 15.388735505245721, "grad_norm": 0.5301385521888733, "learning_rate": 2.8687333333333336e-05, "loss": 0.1398, "step": 13942 }, { "epoch": 15.389839867476532, "grad_norm": 0.6633570194244385, "learning_rate": 2.8687e-05, "loss": 0.0958, "step": 13943 }, { "epoch": 15.390944229707344, "grad_norm": 0.32691559195518494, "learning_rate": 2.8686666666666668e-05, "loss": 0.057, "step": 13944 }, { "epoch": 15.392048591938156, "grad_norm": 0.3542136251926422, "learning_rate": 2.8686333333333334e-05, "loss": 0.0638, "step": 13945 }, { "epoch": 15.393152954168967, "grad_norm": 0.8242444396018982, "learning_rate": 2.8686000000000003e-05, "loss": 0.1461, "step": 13946 }, { "epoch": 15.394257316399779, "grad_norm": 0.3018832802772522, "learning_rate": 2.8685666666666665e-05, "loss": 0.0378, "step": 13947 }, { "epoch": 15.395361678630591, "grad_norm": 0.4794674217700958, "learning_rate": 2.8685333333333335e-05, "loss": 0.0619, "step": 13948 }, { "epoch": 15.396466040861403, "grad_norm": 0.34429851174354553, "learning_rate": 2.8685e-05, "loss": 0.0359, "step": 13949 }, { "epoch": 15.397570403092214, "grad_norm": 0.5232292413711548, "learning_rate": 2.8684666666666667e-05, "loss": 0.0681, "step": 13950 }, { "epoch": 15.398674765323026, "grad_norm": 0.25298863649368286, "learning_rate": 2.8684333333333336e-05, "loss": 0.0125, "step": 13951 }, { "epoch": 15.399779127553838, "grad_norm": 0.19416563212871552, "learning_rate": 2.8684e-05, "loss": 0.0134, "step": 13952 }, { "epoch": 15.400883489784649, "grad_norm": 0.2135496735572815, "learning_rate": 2.8683666666666668e-05, "loss": 0.011, "step": 13953 }, { "epoch": 15.401987852015461, "grad_norm": 0.1680987924337387, "learning_rate": 2.8683333333333334e-05, "loss": 0.0148, "step": 13954 }, { "epoch": 15.403092214246273, "grad_norm": 0.45624828338623047, "learning_rate": 2.8683e-05, "loss": 0.0192, "step": 13955 }, { "epoch": 15.404196576477084, "grad_norm": 0.1921549141407013, "learning_rate": 2.8682666666666666e-05, "loss": 0.0118, "step": 13956 }, { "epoch": 15.405300938707896, "grad_norm": 0.44621995091438293, "learning_rate": 2.8682333333333335e-05, "loss": 0.0107, "step": 13957 }, { "epoch": 15.406405300938708, "grad_norm": 0.47874265909194946, "learning_rate": 2.8682e-05, "loss": 0.0105, "step": 13958 }, { "epoch": 15.40750966316952, "grad_norm": 0.09062958508729935, "learning_rate": 2.8681666666666667e-05, "loss": 0.0044, "step": 13959 }, { "epoch": 15.40861402540033, "grad_norm": 0.10577011853456497, "learning_rate": 2.8681333333333336e-05, "loss": 0.0078, "step": 13960 }, { "epoch": 15.409718387631143, "grad_norm": 0.1433795839548111, "learning_rate": 2.8681e-05, "loss": 0.0091, "step": 13961 }, { "epoch": 15.410822749861955, "grad_norm": 0.2113315910100937, "learning_rate": 2.8680666666666668e-05, "loss": 0.0136, "step": 13962 }, { "epoch": 15.411927112092766, "grad_norm": 0.3723407983779907, "learning_rate": 2.8680333333333334e-05, "loss": 0.0143, "step": 13963 }, { "epoch": 15.413031474323578, "grad_norm": 0.12313469499349594, "learning_rate": 2.868e-05, "loss": 0.0066, "step": 13964 }, { "epoch": 15.41413583655439, "grad_norm": 0.1475372314453125, "learning_rate": 2.8679666666666666e-05, "loss": 0.0091, "step": 13965 }, { "epoch": 15.415240198785202, "grad_norm": 0.17393571138381958, "learning_rate": 2.8679333333333335e-05, "loss": 0.0065, "step": 13966 }, { "epoch": 15.416344561016013, "grad_norm": 0.23411159217357635, "learning_rate": 2.8679e-05, "loss": 0.0066, "step": 13967 }, { "epoch": 15.417448923246825, "grad_norm": 0.25918200612068176, "learning_rate": 2.8678666666666667e-05, "loss": 0.0097, "step": 13968 }, { "epoch": 15.418553285477637, "grad_norm": 0.35788312554359436, "learning_rate": 2.8678333333333336e-05, "loss": 0.0134, "step": 13969 }, { "epoch": 15.419657647708448, "grad_norm": 0.635846734046936, "learning_rate": 2.8678e-05, "loss": 0.0082, "step": 13970 }, { "epoch": 15.42076200993926, "grad_norm": 0.7288994193077087, "learning_rate": 2.8677666666666668e-05, "loss": 0.0141, "step": 13971 }, { "epoch": 15.421866372170072, "grad_norm": 0.2871544361114502, "learning_rate": 2.8677333333333334e-05, "loss": 0.0131, "step": 13972 }, { "epoch": 15.422970734400884, "grad_norm": 0.4736681580543518, "learning_rate": 2.8677e-05, "loss": 0.0166, "step": 13973 }, { "epoch": 15.424075096631695, "grad_norm": 0.16531622409820557, "learning_rate": 2.867666666666667e-05, "loss": 0.0083, "step": 13974 }, { "epoch": 15.425179458862507, "grad_norm": 0.18352918326854706, "learning_rate": 2.8676333333333335e-05, "loss": 0.0056, "step": 13975 }, { "epoch": 15.42628382109332, "grad_norm": 0.19543398916721344, "learning_rate": 2.8676e-05, "loss": 0.0135, "step": 13976 }, { "epoch": 15.42738818332413, "grad_norm": 0.14655913412570953, "learning_rate": 2.8675666666666667e-05, "loss": 0.0063, "step": 13977 }, { "epoch": 15.428492545554942, "grad_norm": 0.3899742364883423, "learning_rate": 2.8675333333333336e-05, "loss": 0.0206, "step": 13978 }, { "epoch": 15.429596907785754, "grad_norm": 0.3505527973175049, "learning_rate": 2.8675e-05, "loss": 0.0095, "step": 13979 }, { "epoch": 15.430701270016565, "grad_norm": 0.23705391585826874, "learning_rate": 2.867466666666667e-05, "loss": 0.01, "step": 13980 }, { "epoch": 15.431805632247377, "grad_norm": 0.40280261635780334, "learning_rate": 2.867433333333333e-05, "loss": 0.019, "step": 13981 }, { "epoch": 15.43290999447819, "grad_norm": 0.246274933218956, "learning_rate": 2.8674e-05, "loss": 0.0116, "step": 13982 }, { "epoch": 15.434014356709001, "grad_norm": 0.2975309193134308, "learning_rate": 2.867366666666667e-05, "loss": 0.0175, "step": 13983 }, { "epoch": 15.435118718939812, "grad_norm": 0.22282101213932037, "learning_rate": 2.8673333333333332e-05, "loss": 0.0094, "step": 13984 }, { "epoch": 15.436223081170624, "grad_norm": 0.3393450379371643, "learning_rate": 2.8673e-05, "loss": 0.0139, "step": 13985 }, { "epoch": 15.437327443401436, "grad_norm": 0.48452091217041016, "learning_rate": 2.8672666666666667e-05, "loss": 0.0121, "step": 13986 }, { "epoch": 15.438431805632247, "grad_norm": 0.6454862952232361, "learning_rate": 2.8672333333333333e-05, "loss": 0.0181, "step": 13987 }, { "epoch": 15.439536167863059, "grad_norm": 0.420748770236969, "learning_rate": 2.8672e-05, "loss": 0.0143, "step": 13988 }, { "epoch": 15.440640530093871, "grad_norm": 0.4613984525203705, "learning_rate": 2.867166666666667e-05, "loss": 0.0121, "step": 13989 }, { "epoch": 15.441744892324682, "grad_norm": 0.30581510066986084, "learning_rate": 2.867133333333333e-05, "loss": 0.0074, "step": 13990 }, { "epoch": 15.442849254555494, "grad_norm": 1.0801056623458862, "learning_rate": 2.8671e-05, "loss": 0.2332, "step": 13991 }, { "epoch": 15.443953616786306, "grad_norm": 0.8488472700119019, "learning_rate": 2.867066666666667e-05, "loss": 0.1372, "step": 13992 }, { "epoch": 15.445057979017118, "grad_norm": 0.4459705054759979, "learning_rate": 2.8670333333333332e-05, "loss": 0.1009, "step": 13993 }, { "epoch": 15.446162341247929, "grad_norm": 0.49018463492393494, "learning_rate": 2.867e-05, "loss": 0.0701, "step": 13994 }, { "epoch": 15.447266703478741, "grad_norm": 0.32058286666870117, "learning_rate": 2.8669666666666668e-05, "loss": 0.0443, "step": 13995 }, { "epoch": 15.448371065709553, "grad_norm": 0.4601423442363739, "learning_rate": 2.8669333333333334e-05, "loss": 0.0806, "step": 13996 }, { "epoch": 15.449475427940364, "grad_norm": 1.0525010824203491, "learning_rate": 2.8669e-05, "loss": 0.0676, "step": 13997 }, { "epoch": 15.450579790171176, "grad_norm": 0.6608363389968872, "learning_rate": 2.866866666666667e-05, "loss": 0.0311, "step": 13998 }, { "epoch": 15.451684152401988, "grad_norm": 0.6030898690223694, "learning_rate": 2.8668333333333335e-05, "loss": 0.0469, "step": 13999 }, { "epoch": 15.4527885146328, "grad_norm": 0.3802577257156372, "learning_rate": 2.8668e-05, "loss": 0.0284, "step": 14000 }, { "epoch": 15.4527885146328, "eval_cer": 0.10692899914456801, "eval_loss": 0.32161417603492737, "eval_runtime": 16.057, "eval_samples_per_second": 18.933, "eval_steps_per_second": 0.623, "eval_wer": 0.3643514965464313, "step": 14000 }, { "epoch": 15.45389287686361, "grad_norm": 0.17187723517417908, "learning_rate": 2.866766666666667e-05, "loss": 0.0119, "step": 14001 }, { "epoch": 15.454997239094423, "grad_norm": 0.26368868350982666, "learning_rate": 2.8667333333333333e-05, "loss": 0.0167, "step": 14002 }, { "epoch": 15.456101601325235, "grad_norm": 0.13125020265579224, "learning_rate": 2.8667000000000002e-05, "loss": 0.0091, "step": 14003 }, { "epoch": 15.457205963556046, "grad_norm": 0.20118200778961182, "learning_rate": 2.8666666666666668e-05, "loss": 0.0088, "step": 14004 }, { "epoch": 15.458310325786858, "grad_norm": 0.3939726650714874, "learning_rate": 2.8666333333333334e-05, "loss": 0.0135, "step": 14005 }, { "epoch": 15.45941468801767, "grad_norm": 0.5314702987670898, "learning_rate": 2.8666e-05, "loss": 0.036, "step": 14006 }, { "epoch": 15.460519050248482, "grad_norm": 0.5574765205383301, "learning_rate": 2.866566666666667e-05, "loss": 0.015, "step": 14007 }, { "epoch": 15.461623412479293, "grad_norm": 0.24639877676963806, "learning_rate": 2.8665333333333335e-05, "loss": 0.0124, "step": 14008 }, { "epoch": 15.462727774710105, "grad_norm": 0.20358192920684814, "learning_rate": 2.8665e-05, "loss": 0.009, "step": 14009 }, { "epoch": 15.463832136940917, "grad_norm": 0.466922402381897, "learning_rate": 2.866466666666667e-05, "loss": 0.0161, "step": 14010 }, { "epoch": 15.464936499171728, "grad_norm": 0.5403216481208801, "learning_rate": 2.8664333333333333e-05, "loss": 0.0148, "step": 14011 }, { "epoch": 15.46604086140254, "grad_norm": 0.4036964774131775, "learning_rate": 2.8664000000000002e-05, "loss": 0.0283, "step": 14012 }, { "epoch": 15.467145223633352, "grad_norm": 0.23328928649425507, "learning_rate": 2.8663666666666665e-05, "loss": 0.0086, "step": 14013 }, { "epoch": 15.468249585864163, "grad_norm": 0.326182097196579, "learning_rate": 2.8663333333333334e-05, "loss": 0.0162, "step": 14014 }, { "epoch": 15.469353948094975, "grad_norm": 0.28922221064567566, "learning_rate": 2.8663e-05, "loss": 0.0092, "step": 14015 }, { "epoch": 15.470458310325787, "grad_norm": 0.44241467118263245, "learning_rate": 2.8662666666666666e-05, "loss": 0.0116, "step": 14016 }, { "epoch": 15.4715626725566, "grad_norm": 0.378981351852417, "learning_rate": 2.8662333333333335e-05, "loss": 0.0073, "step": 14017 }, { "epoch": 15.47266703478741, "grad_norm": 0.15161427855491638, "learning_rate": 2.8662e-05, "loss": 0.0093, "step": 14018 }, { "epoch": 15.473771397018222, "grad_norm": 0.22066840529441833, "learning_rate": 2.8661666666666667e-05, "loss": 0.0072, "step": 14019 }, { "epoch": 15.474875759249034, "grad_norm": 0.17685572803020477, "learning_rate": 2.8661333333333333e-05, "loss": 0.0067, "step": 14020 }, { "epoch": 15.475980121479845, "grad_norm": 0.238273024559021, "learning_rate": 2.8661000000000002e-05, "loss": 0.0085, "step": 14021 }, { "epoch": 15.477084483710657, "grad_norm": 0.19808784127235413, "learning_rate": 2.8660666666666665e-05, "loss": 0.0094, "step": 14022 }, { "epoch": 15.47818884594147, "grad_norm": 0.5663729310035706, "learning_rate": 2.8660333333333334e-05, "loss": 0.0089, "step": 14023 }, { "epoch": 15.47929320817228, "grad_norm": 0.6066079139709473, "learning_rate": 2.8660000000000003e-05, "loss": 0.0119, "step": 14024 }, { "epoch": 15.480397570403092, "grad_norm": 0.2650446891784668, "learning_rate": 2.8659666666666666e-05, "loss": 0.0072, "step": 14025 }, { "epoch": 15.481501932633904, "grad_norm": 0.16874435544013977, "learning_rate": 2.8659333333333335e-05, "loss": 0.0069, "step": 14026 }, { "epoch": 15.482606294864716, "grad_norm": 0.26439011096954346, "learning_rate": 2.8659e-05, "loss": 0.0116, "step": 14027 }, { "epoch": 15.483710657095527, "grad_norm": 0.3052387833595276, "learning_rate": 2.8658666666666667e-05, "loss": 0.0087, "step": 14028 }, { "epoch": 15.484815019326339, "grad_norm": 0.7529382109642029, "learning_rate": 2.8658333333333333e-05, "loss": 0.0104, "step": 14029 }, { "epoch": 15.485919381557151, "grad_norm": 0.1712447702884674, "learning_rate": 2.8658000000000002e-05, "loss": 0.0035, "step": 14030 }, { "epoch": 15.487023743787962, "grad_norm": 0.26486676931381226, "learning_rate": 2.8657666666666665e-05, "loss": 0.009, "step": 14031 }, { "epoch": 15.488128106018774, "grad_norm": 0.26250597834587097, "learning_rate": 2.8657333333333334e-05, "loss": 0.0125, "step": 14032 }, { "epoch": 15.489232468249586, "grad_norm": 0.2738472521305084, "learning_rate": 2.8657000000000004e-05, "loss": 0.0066, "step": 14033 }, { "epoch": 15.490336830480398, "grad_norm": 0.36327898502349854, "learning_rate": 2.8656666666666666e-05, "loss": 0.013, "step": 14034 }, { "epoch": 15.491441192711209, "grad_norm": 0.36693835258483887, "learning_rate": 2.8656333333333335e-05, "loss": 0.0099, "step": 14035 }, { "epoch": 15.492545554942021, "grad_norm": 0.3176564872264862, "learning_rate": 2.8656e-05, "loss": 0.0074, "step": 14036 }, { "epoch": 15.493649917172833, "grad_norm": 0.30220919847488403, "learning_rate": 2.8655666666666667e-05, "loss": 0.008, "step": 14037 }, { "epoch": 15.494754279403644, "grad_norm": 0.2517567276954651, "learning_rate": 2.8655333333333333e-05, "loss": 0.0063, "step": 14038 }, { "epoch": 15.495858641634456, "grad_norm": 0.16722534596920013, "learning_rate": 2.8655000000000003e-05, "loss": 0.0065, "step": 14039 }, { "epoch": 15.496963003865268, "grad_norm": 1.0619219541549683, "learning_rate": 2.8654666666666665e-05, "loss": 0.0141, "step": 14040 }, { "epoch": 15.49806736609608, "grad_norm": 1.002760410308838, "learning_rate": 2.8654333333333334e-05, "loss": 0.2452, "step": 14041 }, { "epoch": 15.49917172832689, "grad_norm": 0.6421335339546204, "learning_rate": 2.8654e-05, "loss": 0.1373, "step": 14042 }, { "epoch": 15.500276090557703, "grad_norm": 0.5007486939430237, "learning_rate": 2.8653666666666666e-05, "loss": 0.1091, "step": 14043 }, { "epoch": 15.501380452788515, "grad_norm": 0.5195298194885254, "learning_rate": 2.8653333333333336e-05, "loss": 0.1124, "step": 14044 }, { "epoch": 15.502484815019326, "grad_norm": 0.5162807106971741, "learning_rate": 2.8652999999999998e-05, "loss": 0.1025, "step": 14045 }, { "epoch": 15.503589177250138, "grad_norm": 0.37654629349708557, "learning_rate": 2.8652666666666668e-05, "loss": 0.0383, "step": 14046 }, { "epoch": 15.50469353948095, "grad_norm": 0.39404743909835815, "learning_rate": 2.8652333333333334e-05, "loss": 0.1107, "step": 14047 }, { "epoch": 15.50579790171176, "grad_norm": 0.3921888768672943, "learning_rate": 2.8652e-05, "loss": 0.0193, "step": 14048 }, { "epoch": 15.506902263942573, "grad_norm": 0.21553915739059448, "learning_rate": 2.865166666666667e-05, "loss": 0.0257, "step": 14049 }, { "epoch": 15.508006626173385, "grad_norm": 0.31742408871650696, "learning_rate": 2.8651333333333335e-05, "loss": 0.0261, "step": 14050 }, { "epoch": 15.509110988404197, "grad_norm": 0.2160603106021881, "learning_rate": 2.8651e-05, "loss": 0.0135, "step": 14051 }, { "epoch": 15.510215350635008, "grad_norm": 0.18583005666732788, "learning_rate": 2.8650666666666667e-05, "loss": 0.0111, "step": 14052 }, { "epoch": 15.51131971286582, "grad_norm": 0.1360240876674652, "learning_rate": 2.8650333333333336e-05, "loss": 0.0129, "step": 14053 }, { "epoch": 15.512424075096632, "grad_norm": 0.29664796590805054, "learning_rate": 2.865e-05, "loss": 0.0144, "step": 14054 }, { "epoch": 15.513528437327443, "grad_norm": 0.1855698674917221, "learning_rate": 2.8649666666666668e-05, "loss": 0.0095, "step": 14055 }, { "epoch": 15.514632799558255, "grad_norm": 0.27654027938842773, "learning_rate": 2.8649333333333334e-05, "loss": 0.0089, "step": 14056 }, { "epoch": 15.515737161789067, "grad_norm": 0.46321725845336914, "learning_rate": 2.8649e-05, "loss": 0.0135, "step": 14057 }, { "epoch": 15.516841524019878, "grad_norm": 0.48947376012802124, "learning_rate": 2.864866666666667e-05, "loss": 0.0185, "step": 14058 }, { "epoch": 15.51794588625069, "grad_norm": 0.22536678612232208, "learning_rate": 2.8648333333333335e-05, "loss": 0.0121, "step": 14059 }, { "epoch": 15.519050248481502, "grad_norm": 0.14522498846054077, "learning_rate": 2.8648e-05, "loss": 0.0104, "step": 14060 }, { "epoch": 15.520154610712314, "grad_norm": 0.18655945360660553, "learning_rate": 2.8647666666666667e-05, "loss": 0.0127, "step": 14061 }, { "epoch": 15.521258972943125, "grad_norm": 0.14758090674877167, "learning_rate": 2.8647333333333336e-05, "loss": 0.007, "step": 14062 }, { "epoch": 15.522363335173937, "grad_norm": 0.15778230130672455, "learning_rate": 2.8647e-05, "loss": 0.0093, "step": 14063 }, { "epoch": 15.52346769740475, "grad_norm": 0.1739015281200409, "learning_rate": 2.8646666666666668e-05, "loss": 0.0114, "step": 14064 }, { "epoch": 15.52457205963556, "grad_norm": 0.3082105815410614, "learning_rate": 2.8646333333333334e-05, "loss": 0.0106, "step": 14065 }, { "epoch": 15.525676421866372, "grad_norm": 0.11797571927309036, "learning_rate": 2.8646e-05, "loss": 0.0066, "step": 14066 }, { "epoch": 15.526780784097184, "grad_norm": 0.2646239995956421, "learning_rate": 2.864566666666667e-05, "loss": 0.0332, "step": 14067 }, { "epoch": 15.527885146327996, "grad_norm": 0.18698394298553467, "learning_rate": 2.8645333333333335e-05, "loss": 0.0342, "step": 14068 }, { "epoch": 15.528989508558807, "grad_norm": 0.7053478956222534, "learning_rate": 2.8645e-05, "loss": 0.0119, "step": 14069 }, { "epoch": 15.530093870789619, "grad_norm": 0.2530365288257599, "learning_rate": 2.8644666666666667e-05, "loss": 0.009, "step": 14070 }, { "epoch": 15.531198233020431, "grad_norm": 0.23773808777332306, "learning_rate": 2.8644333333333336e-05, "loss": 0.0086, "step": 14071 }, { "epoch": 15.532302595251242, "grad_norm": 0.3624246418476105, "learning_rate": 2.8644e-05, "loss": 0.0095, "step": 14072 }, { "epoch": 15.533406957482054, "grad_norm": 0.2696498930454254, "learning_rate": 2.8643666666666668e-05, "loss": 0.0067, "step": 14073 }, { "epoch": 15.534511319712866, "grad_norm": 0.3000427782535553, "learning_rate": 2.8643333333333334e-05, "loss": 0.0116, "step": 14074 }, { "epoch": 15.535615681943678, "grad_norm": 0.39739808440208435, "learning_rate": 2.8643e-05, "loss": 0.01, "step": 14075 }, { "epoch": 15.536720044174489, "grad_norm": 0.153808131814003, "learning_rate": 2.864266666666667e-05, "loss": 0.0073, "step": 14076 }, { "epoch": 15.537824406405301, "grad_norm": 0.25964125990867615, "learning_rate": 2.8642333333333332e-05, "loss": 0.0071, "step": 14077 }, { "epoch": 15.538928768636113, "grad_norm": 0.26160576939582825, "learning_rate": 2.8642e-05, "loss": 0.0081, "step": 14078 }, { "epoch": 15.540033130866924, "grad_norm": 0.12619951367378235, "learning_rate": 2.8641666666666667e-05, "loss": 0.0062, "step": 14079 }, { "epoch": 15.541137493097736, "grad_norm": 0.24272339046001434, "learning_rate": 2.8641333333333333e-05, "loss": 0.0104, "step": 14080 }, { "epoch": 15.542241855328548, "grad_norm": 0.07817795872688293, "learning_rate": 2.8641e-05, "loss": 0.0035, "step": 14081 }, { "epoch": 15.54334621755936, "grad_norm": 0.1202499195933342, "learning_rate": 2.864066666666667e-05, "loss": 0.0029, "step": 14082 }, { "epoch": 15.54445057979017, "grad_norm": 0.23046137392520905, "learning_rate": 2.8640333333333334e-05, "loss": 0.0079, "step": 14083 }, { "epoch": 15.545554942020983, "grad_norm": 0.35122910141944885, "learning_rate": 2.864e-05, "loss": 0.0147, "step": 14084 }, { "epoch": 15.546659304251795, "grad_norm": 0.5112335085868835, "learning_rate": 2.863966666666667e-05, "loss": 0.0159, "step": 14085 }, { "epoch": 15.547763666482606, "grad_norm": 0.5238063335418701, "learning_rate": 2.8639333333333332e-05, "loss": 0.0115, "step": 14086 }, { "epoch": 15.548868028713418, "grad_norm": 0.34213364124298096, "learning_rate": 2.8639e-05, "loss": 0.0127, "step": 14087 }, { "epoch": 15.54997239094423, "grad_norm": 0.37606289982795715, "learning_rate": 2.8638666666666667e-05, "loss": 0.0154, "step": 14088 }, { "epoch": 15.55107675317504, "grad_norm": 0.33173856139183044, "learning_rate": 2.8638333333333333e-05, "loss": 0.0109, "step": 14089 }, { "epoch": 15.552181115405853, "grad_norm": 1.339942455291748, "learning_rate": 2.8638e-05, "loss": 0.0317, "step": 14090 }, { "epoch": 15.553285477636665, "grad_norm": 0.5957567691802979, "learning_rate": 2.863766666666667e-05, "loss": 0.1714, "step": 14091 }, { "epoch": 15.554389839867477, "grad_norm": 0.6966227293014526, "learning_rate": 2.8637333333333334e-05, "loss": 0.1937, "step": 14092 }, { "epoch": 15.555494202098288, "grad_norm": 0.6284185647964478, "learning_rate": 2.8637e-05, "loss": 0.0869, "step": 14093 }, { "epoch": 15.5565985643291, "grad_norm": 0.5427674651145935, "learning_rate": 2.863666666666667e-05, "loss": 0.1248, "step": 14094 }, { "epoch": 15.557702926559912, "grad_norm": 0.6144496202468872, "learning_rate": 2.8636333333333332e-05, "loss": 0.1059, "step": 14095 }, { "epoch": 15.558807288790723, "grad_norm": 0.3524654805660248, "learning_rate": 2.8636e-05, "loss": 0.0302, "step": 14096 }, { "epoch": 15.559911651021535, "grad_norm": 0.30826011300086975, "learning_rate": 2.8635666666666668e-05, "loss": 0.0493, "step": 14097 }, { "epoch": 15.561016013252347, "grad_norm": 0.40873318910598755, "learning_rate": 2.8635333333333333e-05, "loss": 0.0534, "step": 14098 }, { "epoch": 15.562120375483158, "grad_norm": 0.3309340178966522, "learning_rate": 2.8635000000000003e-05, "loss": 0.0463, "step": 14099 }, { "epoch": 15.56322473771397, "grad_norm": 0.38034796714782715, "learning_rate": 2.863466666666667e-05, "loss": 0.0374, "step": 14100 }, { "epoch": 15.564329099944782, "grad_norm": 0.15157318115234375, "learning_rate": 2.8634333333333335e-05, "loss": 0.0085, "step": 14101 }, { "epoch": 15.565433462175594, "grad_norm": 0.17319035530090332, "learning_rate": 2.8634e-05, "loss": 0.0267, "step": 14102 }, { "epoch": 15.566537824406405, "grad_norm": 0.40130680799484253, "learning_rate": 2.8633666666666667e-05, "loss": 0.0121, "step": 14103 }, { "epoch": 15.567642186637217, "grad_norm": 0.2586474120616913, "learning_rate": 2.8633333333333332e-05, "loss": 0.0452, "step": 14104 }, { "epoch": 15.56874654886803, "grad_norm": 0.20089054107666016, "learning_rate": 2.8633000000000002e-05, "loss": 0.0071, "step": 14105 }, { "epoch": 15.56985091109884, "grad_norm": 0.2030467689037323, "learning_rate": 2.8632666666666664e-05, "loss": 0.0068, "step": 14106 }, { "epoch": 15.570955273329652, "grad_norm": 0.3191782832145691, "learning_rate": 2.8632333333333334e-05, "loss": 0.0117, "step": 14107 }, { "epoch": 15.572059635560464, "grad_norm": 0.3087503910064697, "learning_rate": 2.8632000000000003e-05, "loss": 0.0154, "step": 14108 }, { "epoch": 15.573163997791276, "grad_norm": 0.31923148036003113, "learning_rate": 2.8631666666666666e-05, "loss": 0.0129, "step": 14109 }, { "epoch": 15.574268360022087, "grad_norm": 0.2972835898399353, "learning_rate": 2.8631333333333335e-05, "loss": 0.0078, "step": 14110 }, { "epoch": 15.575372722252899, "grad_norm": 0.2077360451221466, "learning_rate": 2.8631e-05, "loss": 0.0071, "step": 14111 }, { "epoch": 15.576477084483711, "grad_norm": 0.17316830158233643, "learning_rate": 2.8630666666666667e-05, "loss": 0.0076, "step": 14112 }, { "epoch": 15.577581446714522, "grad_norm": 0.3253907561302185, "learning_rate": 2.8630333333333333e-05, "loss": 0.0122, "step": 14113 }, { "epoch": 15.578685808945334, "grad_norm": 0.11366426944732666, "learning_rate": 2.8630000000000002e-05, "loss": 0.0062, "step": 14114 }, { "epoch": 15.579790171176146, "grad_norm": 0.15001918375492096, "learning_rate": 2.8629666666666665e-05, "loss": 0.0068, "step": 14115 }, { "epoch": 15.580894533406958, "grad_norm": 0.28877946734428406, "learning_rate": 2.8629333333333334e-05, "loss": 0.0059, "step": 14116 }, { "epoch": 15.581998895637769, "grad_norm": 0.22861655056476593, "learning_rate": 2.8629000000000003e-05, "loss": 0.006, "step": 14117 }, { "epoch": 15.583103257868581, "grad_norm": 0.2898414134979248, "learning_rate": 2.8628666666666666e-05, "loss": 0.0083, "step": 14118 }, { "epoch": 15.584207620099393, "grad_norm": 0.3073636293411255, "learning_rate": 2.8628333333333335e-05, "loss": 0.0167, "step": 14119 }, { "epoch": 15.585311982330204, "grad_norm": 0.2071220427751541, "learning_rate": 2.8628e-05, "loss": 0.0066, "step": 14120 }, { "epoch": 15.586416344561016, "grad_norm": 0.1538611799478531, "learning_rate": 2.8627666666666667e-05, "loss": 0.0045, "step": 14121 }, { "epoch": 15.587520706791828, "grad_norm": 0.26091188192367554, "learning_rate": 2.8627333333333333e-05, "loss": 0.0103, "step": 14122 }, { "epoch": 15.588625069022639, "grad_norm": 0.1586393564939499, "learning_rate": 2.8627000000000002e-05, "loss": 0.0049, "step": 14123 }, { "epoch": 15.589729431253451, "grad_norm": 0.27484026551246643, "learning_rate": 2.8626666666666668e-05, "loss": 0.0139, "step": 14124 }, { "epoch": 15.590833793484263, "grad_norm": 0.37243351340293884, "learning_rate": 2.8626333333333334e-05, "loss": 0.0116, "step": 14125 }, { "epoch": 15.591938155715075, "grad_norm": 0.4029911756515503, "learning_rate": 2.8626000000000003e-05, "loss": 0.0113, "step": 14126 }, { "epoch": 15.593042517945886, "grad_norm": 0.1604091227054596, "learning_rate": 2.8625666666666666e-05, "loss": 0.0096, "step": 14127 }, { "epoch": 15.594146880176698, "grad_norm": 0.30345389246940613, "learning_rate": 2.8625333333333335e-05, "loss": 0.0085, "step": 14128 }, { "epoch": 15.59525124240751, "grad_norm": 0.1997888833284378, "learning_rate": 2.8625e-05, "loss": 0.0064, "step": 14129 }, { "epoch": 15.59635560463832, "grad_norm": 0.2445085197687149, "learning_rate": 2.8624666666666667e-05, "loss": 0.0059, "step": 14130 }, { "epoch": 15.597459966869133, "grad_norm": 0.6556234955787659, "learning_rate": 2.8624333333333333e-05, "loss": 0.0112, "step": 14131 }, { "epoch": 15.598564329099945, "grad_norm": 0.7459039092063904, "learning_rate": 2.8624000000000002e-05, "loss": 0.0087, "step": 14132 }, { "epoch": 15.599668691330756, "grad_norm": 0.241312175989151, "learning_rate": 2.862366666666667e-05, "loss": 0.0143, "step": 14133 }, { "epoch": 15.600773053561568, "grad_norm": 0.22701500356197357, "learning_rate": 2.8623333333333334e-05, "loss": 0.0076, "step": 14134 }, { "epoch": 15.60187741579238, "grad_norm": 0.3560062348842621, "learning_rate": 2.8623e-05, "loss": 0.0127, "step": 14135 }, { "epoch": 15.602981778023192, "grad_norm": 0.12190146744251251, "learning_rate": 2.8622666666666666e-05, "loss": 0.0039, "step": 14136 }, { "epoch": 15.604086140254003, "grad_norm": 0.3854070007801056, "learning_rate": 2.8622333333333335e-05, "loss": 0.0054, "step": 14137 }, { "epoch": 15.605190502484815, "grad_norm": 0.2983592748641968, "learning_rate": 2.8621999999999998e-05, "loss": 0.0138, "step": 14138 }, { "epoch": 15.606294864715627, "grad_norm": 0.21469786763191223, "learning_rate": 2.8621666666666667e-05, "loss": 0.007, "step": 14139 }, { "epoch": 15.607399226946438, "grad_norm": 0.31181007623672485, "learning_rate": 2.8621333333333333e-05, "loss": 0.0166, "step": 14140 }, { "epoch": 15.60850358917725, "grad_norm": 0.7089667916297913, "learning_rate": 2.8621e-05, "loss": 0.1824, "step": 14141 }, { "epoch": 15.609607951408062, "grad_norm": 0.4215710461139679, "learning_rate": 2.862066666666667e-05, "loss": 0.1294, "step": 14142 }, { "epoch": 15.610712313638874, "grad_norm": 0.658913791179657, "learning_rate": 2.8620333333333334e-05, "loss": 0.1134, "step": 14143 }, { "epoch": 15.611816675869685, "grad_norm": 0.6952909827232361, "learning_rate": 2.862e-05, "loss": 0.1254, "step": 14144 }, { "epoch": 15.612921038100497, "grad_norm": 0.41062846779823303, "learning_rate": 2.8619666666666666e-05, "loss": 0.0523, "step": 14145 }, { "epoch": 15.61402540033131, "grad_norm": 0.9005101919174194, "learning_rate": 2.8619333333333336e-05, "loss": 0.0601, "step": 14146 }, { "epoch": 15.61512976256212, "grad_norm": 0.5153002142906189, "learning_rate": 2.8618999999999998e-05, "loss": 0.0767, "step": 14147 }, { "epoch": 15.616234124792932, "grad_norm": 0.4441898763179779, "learning_rate": 2.8618666666666668e-05, "loss": 0.0931, "step": 14148 }, { "epoch": 15.617338487023744, "grad_norm": 0.39372462034225464, "learning_rate": 2.8618333333333337e-05, "loss": 0.0189, "step": 14149 }, { "epoch": 15.618442849254556, "grad_norm": 0.12427215278148651, "learning_rate": 2.8618e-05, "loss": 0.0071, "step": 14150 }, { "epoch": 15.619547211485367, "grad_norm": 0.21429042518138885, "learning_rate": 2.861766666666667e-05, "loss": 0.0093, "step": 14151 }, { "epoch": 15.620651573716179, "grad_norm": 0.08413457125425339, "learning_rate": 2.8617333333333335e-05, "loss": 0.0046, "step": 14152 }, { "epoch": 15.621755935946991, "grad_norm": 0.29889655113220215, "learning_rate": 2.8617e-05, "loss": 0.0146, "step": 14153 }, { "epoch": 15.622860298177802, "grad_norm": 0.19928428530693054, "learning_rate": 2.8616666666666667e-05, "loss": 0.0119, "step": 14154 }, { "epoch": 15.623964660408614, "grad_norm": 0.16796760261058807, "learning_rate": 2.8616333333333336e-05, "loss": 0.0061, "step": 14155 }, { "epoch": 15.625069022639426, "grad_norm": 0.2547735273838043, "learning_rate": 2.8616e-05, "loss": 0.01, "step": 14156 }, { "epoch": 15.626173384870237, "grad_norm": 0.18218985199928284, "learning_rate": 2.8615666666666668e-05, "loss": 0.0165, "step": 14157 }, { "epoch": 15.627277747101049, "grad_norm": 0.1252739131450653, "learning_rate": 2.8615333333333337e-05, "loss": 0.0068, "step": 14158 }, { "epoch": 15.628382109331861, "grad_norm": 0.2037748545408249, "learning_rate": 2.8615e-05, "loss": 0.0079, "step": 14159 }, { "epoch": 15.629486471562673, "grad_norm": 0.19570578634738922, "learning_rate": 2.861466666666667e-05, "loss": 0.0107, "step": 14160 }, { "epoch": 15.630590833793484, "grad_norm": 0.18198326230049133, "learning_rate": 2.8614333333333335e-05, "loss": 0.0097, "step": 14161 }, { "epoch": 15.631695196024296, "grad_norm": 1.079269289970398, "learning_rate": 2.8614e-05, "loss": 0.0141, "step": 14162 }, { "epoch": 15.632799558255108, "grad_norm": 0.17217348515987396, "learning_rate": 2.8613666666666667e-05, "loss": 0.0057, "step": 14163 }, { "epoch": 15.633903920485919, "grad_norm": 0.07339362800121307, "learning_rate": 2.8613333333333333e-05, "loss": 0.0027, "step": 14164 }, { "epoch": 15.635008282716731, "grad_norm": 0.2978256344795227, "learning_rate": 2.8613e-05, "loss": 0.0159, "step": 14165 }, { "epoch": 15.636112644947543, "grad_norm": 0.1783829629421234, "learning_rate": 2.8612666666666668e-05, "loss": 0.0057, "step": 14166 }, { "epoch": 15.637217007178354, "grad_norm": 0.22487881779670715, "learning_rate": 2.8612333333333334e-05, "loss": 0.0551, "step": 14167 }, { "epoch": 15.638321369409166, "grad_norm": 0.5305521488189697, "learning_rate": 2.8612e-05, "loss": 0.0187, "step": 14168 }, { "epoch": 15.639425731639978, "grad_norm": 0.20852331817150116, "learning_rate": 2.861166666666667e-05, "loss": 0.01, "step": 14169 }, { "epoch": 15.64053009387079, "grad_norm": 0.201112762093544, "learning_rate": 2.861133333333333e-05, "loss": 0.0086, "step": 14170 }, { "epoch": 15.6416344561016, "grad_norm": 0.3020360469818115, "learning_rate": 2.8611e-05, "loss": 0.0159, "step": 14171 }, { "epoch": 15.642738818332413, "grad_norm": 0.18447625637054443, "learning_rate": 2.8610666666666667e-05, "loss": 0.0094, "step": 14172 }, { "epoch": 15.643843180563225, "grad_norm": 0.24346819519996643, "learning_rate": 2.8610333333333333e-05, "loss": 0.011, "step": 14173 }, { "epoch": 15.644947542794036, "grad_norm": 0.17932628095149994, "learning_rate": 2.8610000000000002e-05, "loss": 0.0106, "step": 14174 }, { "epoch": 15.646051905024848, "grad_norm": 0.16519884765148163, "learning_rate": 2.8609666666666668e-05, "loss": 0.0066, "step": 14175 }, { "epoch": 15.64715626725566, "grad_norm": 0.3370504081249237, "learning_rate": 2.8609333333333334e-05, "loss": 0.0102, "step": 14176 }, { "epoch": 15.648260629486472, "grad_norm": 0.3285646140575409, "learning_rate": 2.8609e-05, "loss": 0.0149, "step": 14177 }, { "epoch": 15.649364991717283, "grad_norm": 0.29970142245292664, "learning_rate": 2.860866666666667e-05, "loss": 0.011, "step": 14178 }, { "epoch": 15.650469353948095, "grad_norm": 0.0957917794585228, "learning_rate": 2.8608333333333332e-05, "loss": 0.0068, "step": 14179 }, { "epoch": 15.651573716178907, "grad_norm": 0.24769848585128784, "learning_rate": 2.8608e-05, "loss": 0.0103, "step": 14180 }, { "epoch": 15.652678078409718, "grad_norm": 0.3794620931148529, "learning_rate": 2.8607666666666667e-05, "loss": 0.0161, "step": 14181 }, { "epoch": 15.65378244064053, "grad_norm": 0.20786237716674805, "learning_rate": 2.8607333333333333e-05, "loss": 0.0106, "step": 14182 }, { "epoch": 15.654886802871342, "grad_norm": 0.24702322483062744, "learning_rate": 2.8607000000000002e-05, "loss": 0.0057, "step": 14183 }, { "epoch": 15.655991165102154, "grad_norm": 0.15572570264339447, "learning_rate": 2.860666666666667e-05, "loss": 0.0073, "step": 14184 }, { "epoch": 15.657095527332965, "grad_norm": 0.2186400145292282, "learning_rate": 2.8606333333333334e-05, "loss": 0.0103, "step": 14185 }, { "epoch": 15.658199889563777, "grad_norm": 0.7645488977432251, "learning_rate": 2.8606e-05, "loss": 0.0101, "step": 14186 }, { "epoch": 15.65930425179459, "grad_norm": 0.18124301731586456, "learning_rate": 2.860566666666667e-05, "loss": 0.0073, "step": 14187 }, { "epoch": 15.6604086140254, "grad_norm": 0.17884095013141632, "learning_rate": 2.8605333333333332e-05, "loss": 0.0036, "step": 14188 }, { "epoch": 15.661512976256212, "grad_norm": 0.2403203397989273, "learning_rate": 2.8605e-05, "loss": 0.0127, "step": 14189 }, { "epoch": 15.662617338487024, "grad_norm": 0.3061479330062866, "learning_rate": 2.8604666666666667e-05, "loss": 0.0178, "step": 14190 }, { "epoch": 15.663721700717835, "grad_norm": 0.8633767366409302, "learning_rate": 2.8604333333333333e-05, "loss": 0.1462, "step": 14191 }, { "epoch": 15.664826062948647, "grad_norm": 0.8575895428657532, "learning_rate": 2.8604000000000003e-05, "loss": 0.1363, "step": 14192 }, { "epoch": 15.665930425179459, "grad_norm": 0.5755537152290344, "learning_rate": 2.8603666666666665e-05, "loss": 0.1146, "step": 14193 }, { "epoch": 15.667034787410271, "grad_norm": 0.3745725452899933, "learning_rate": 2.8603333333333334e-05, "loss": 0.0703, "step": 14194 }, { "epoch": 15.668139149641082, "grad_norm": 0.36555832624435425, "learning_rate": 2.8603e-05, "loss": 0.0664, "step": 14195 }, { "epoch": 15.669243511871894, "grad_norm": 0.36927011609077454, "learning_rate": 2.8602666666666666e-05, "loss": 0.0486, "step": 14196 }, { "epoch": 15.670347874102706, "grad_norm": 0.3213944137096405, "learning_rate": 2.8602333333333332e-05, "loss": 0.0271, "step": 14197 }, { "epoch": 15.671452236333517, "grad_norm": 0.1972370445728302, "learning_rate": 2.8602e-05, "loss": 0.0242, "step": 14198 }, { "epoch": 15.672556598564329, "grad_norm": 0.380154550075531, "learning_rate": 2.8601666666666668e-05, "loss": 0.0768, "step": 14199 }, { "epoch": 15.673660960795141, "grad_norm": 0.13452546298503876, "learning_rate": 2.8601333333333333e-05, "loss": 0.0128, "step": 14200 }, { "epoch": 15.674765323025952, "grad_norm": 0.1621231585741043, "learning_rate": 2.8601000000000003e-05, "loss": 0.012, "step": 14201 }, { "epoch": 15.675869685256764, "grad_norm": 0.21169663965702057, "learning_rate": 2.8600666666666665e-05, "loss": 0.0104, "step": 14202 }, { "epoch": 15.676974047487576, "grad_norm": 0.21754997968673706, "learning_rate": 2.8600333333333335e-05, "loss": 0.0166, "step": 14203 }, { "epoch": 15.678078409718388, "grad_norm": 0.2016262710094452, "learning_rate": 2.86e-05, "loss": 0.0084, "step": 14204 }, { "epoch": 15.679182771949199, "grad_norm": 0.2492300271987915, "learning_rate": 2.8599666666666667e-05, "loss": 0.0097, "step": 14205 }, { "epoch": 15.680287134180011, "grad_norm": 0.12670093774795532, "learning_rate": 2.8599333333333332e-05, "loss": 0.003, "step": 14206 }, { "epoch": 15.681391496410823, "grad_norm": 0.27677085995674133, "learning_rate": 2.8599000000000002e-05, "loss": 0.0081, "step": 14207 }, { "epoch": 15.682495858641634, "grad_norm": 0.25426074862480164, "learning_rate": 2.8598666666666668e-05, "loss": 0.0081, "step": 14208 }, { "epoch": 15.683600220872446, "grad_norm": 0.3514496386051178, "learning_rate": 2.8598333333333334e-05, "loss": 0.0151, "step": 14209 }, { "epoch": 15.684704583103258, "grad_norm": 0.17558127641677856, "learning_rate": 2.8598000000000003e-05, "loss": 0.0088, "step": 14210 }, { "epoch": 15.68580894533407, "grad_norm": 0.497174471616745, "learning_rate": 2.8597666666666666e-05, "loss": 0.0105, "step": 14211 }, { "epoch": 15.68691330756488, "grad_norm": 0.33402252197265625, "learning_rate": 2.8597333333333335e-05, "loss": 0.0087, "step": 14212 }, { "epoch": 15.688017669795693, "grad_norm": 0.16913725435733795, "learning_rate": 2.8597e-05, "loss": 0.0074, "step": 14213 }, { "epoch": 15.689122032026505, "grad_norm": 0.11350942403078079, "learning_rate": 2.8596666666666667e-05, "loss": 0.0056, "step": 14214 }, { "epoch": 15.690226394257316, "grad_norm": 0.11280211806297302, "learning_rate": 2.8596333333333333e-05, "loss": 0.0061, "step": 14215 }, { "epoch": 15.691330756488128, "grad_norm": 0.35421690344810486, "learning_rate": 2.8596000000000002e-05, "loss": 0.0111, "step": 14216 }, { "epoch": 15.69243511871894, "grad_norm": 0.15696676075458527, "learning_rate": 2.8595666666666668e-05, "loss": 0.0094, "step": 14217 }, { "epoch": 15.693539480949752, "grad_norm": 0.2038520723581314, "learning_rate": 2.8595333333333334e-05, "loss": 0.0032, "step": 14218 }, { "epoch": 15.694643843180563, "grad_norm": 0.11008459329605103, "learning_rate": 2.8595000000000003e-05, "loss": 0.0067, "step": 14219 }, { "epoch": 15.695748205411375, "grad_norm": 0.1566227674484253, "learning_rate": 2.8594666666666666e-05, "loss": 0.0093, "step": 14220 }, { "epoch": 15.696852567642187, "grad_norm": 0.13859671354293823, "learning_rate": 2.8594333333333335e-05, "loss": 0.0051, "step": 14221 }, { "epoch": 15.697956929872998, "grad_norm": 0.23597675561904907, "learning_rate": 2.8594e-05, "loss": 0.0133, "step": 14222 }, { "epoch": 15.69906129210381, "grad_norm": 0.1539444625377655, "learning_rate": 2.8593666666666667e-05, "loss": 0.0053, "step": 14223 }, { "epoch": 15.700165654334622, "grad_norm": 0.8841589093208313, "learning_rate": 2.8593333333333336e-05, "loss": 0.0171, "step": 14224 }, { "epoch": 15.701270016565433, "grad_norm": 0.3086616098880768, "learning_rate": 2.8593e-05, "loss": 0.0112, "step": 14225 }, { "epoch": 15.702374378796245, "grad_norm": 0.33182045817375183, "learning_rate": 2.8592666666666668e-05, "loss": 0.0117, "step": 14226 }, { "epoch": 15.703478741027057, "grad_norm": 0.37625500559806824, "learning_rate": 2.8592333333333334e-05, "loss": 0.0092, "step": 14227 }, { "epoch": 15.70458310325787, "grad_norm": 0.541359543800354, "learning_rate": 2.8592e-05, "loss": 0.0144, "step": 14228 }, { "epoch": 15.70568746548868, "grad_norm": 0.3849635720252991, "learning_rate": 2.8591666666666666e-05, "loss": 0.0043, "step": 14229 }, { "epoch": 15.706791827719492, "grad_norm": 0.21591785550117493, "learning_rate": 2.8591333333333335e-05, "loss": 0.0049, "step": 14230 }, { "epoch": 15.707896189950304, "grad_norm": 0.30534419417381287, "learning_rate": 2.8590999999999998e-05, "loss": 0.0111, "step": 14231 }, { "epoch": 15.709000552181115, "grad_norm": 0.3096475303173065, "learning_rate": 2.8590666666666667e-05, "loss": 0.0134, "step": 14232 }, { "epoch": 15.710104914411927, "grad_norm": 0.2549248933792114, "learning_rate": 2.8590333333333336e-05, "loss": 0.0096, "step": 14233 }, { "epoch": 15.71120927664274, "grad_norm": 0.32962098717689514, "learning_rate": 2.859e-05, "loss": 0.0128, "step": 14234 }, { "epoch": 15.71231363887355, "grad_norm": 0.3154335618019104, "learning_rate": 2.858966666666667e-05, "loss": 0.0116, "step": 14235 }, { "epoch": 15.713418001104362, "grad_norm": 0.16067104041576385, "learning_rate": 2.8589333333333334e-05, "loss": 0.0065, "step": 14236 }, { "epoch": 15.714522363335174, "grad_norm": 0.26815685629844666, "learning_rate": 2.8589e-05, "loss": 0.0066, "step": 14237 }, { "epoch": 15.715626725565986, "grad_norm": 0.21455951035022736, "learning_rate": 2.8588666666666666e-05, "loss": 0.0082, "step": 14238 }, { "epoch": 15.716731087796797, "grad_norm": 3.3826770782470703, "learning_rate": 2.8588333333333335e-05, "loss": 0.0153, "step": 14239 }, { "epoch": 15.717835450027609, "grad_norm": 0.23847897350788116, "learning_rate": 2.8587999999999998e-05, "loss": 0.0207, "step": 14240 }, { "epoch": 15.718939812258421, "grad_norm": 0.7077188491821289, "learning_rate": 2.8587666666666667e-05, "loss": 0.1769, "step": 14241 }, { "epoch": 15.720044174489232, "grad_norm": 0.7155908942222595, "learning_rate": 2.8587333333333337e-05, "loss": 0.1057, "step": 14242 }, { "epoch": 15.721148536720044, "grad_norm": 0.48514705896377563, "learning_rate": 2.8587e-05, "loss": 0.0823, "step": 14243 }, { "epoch": 15.722252898950856, "grad_norm": 0.3775669038295746, "learning_rate": 2.858666666666667e-05, "loss": 0.0598, "step": 14244 }, { "epoch": 15.723357261181668, "grad_norm": 0.6033871173858643, "learning_rate": 2.8586333333333334e-05, "loss": 0.117, "step": 14245 }, { "epoch": 15.724461623412479, "grad_norm": 0.3366202712059021, "learning_rate": 2.8586e-05, "loss": 0.0412, "step": 14246 }, { "epoch": 15.725565985643291, "grad_norm": 0.3326568007469177, "learning_rate": 2.8585666666666666e-05, "loss": 0.0359, "step": 14247 }, { "epoch": 15.726670347874103, "grad_norm": 0.4154842793941498, "learning_rate": 2.8585333333333336e-05, "loss": 0.0754, "step": 14248 }, { "epoch": 15.727774710104914, "grad_norm": 0.6620563268661499, "learning_rate": 2.8585e-05, "loss": 0.031, "step": 14249 }, { "epoch": 15.728879072335726, "grad_norm": 0.24640746414661407, "learning_rate": 2.8584666666666668e-05, "loss": 0.0199, "step": 14250 }, { "epoch": 15.729983434566538, "grad_norm": 0.19142015278339386, "learning_rate": 2.8584333333333337e-05, "loss": 0.0131, "step": 14251 }, { "epoch": 15.73108779679735, "grad_norm": 0.3948199152946472, "learning_rate": 2.8584e-05, "loss": 0.0111, "step": 14252 }, { "epoch": 15.73219215902816, "grad_norm": 0.19265639781951904, "learning_rate": 2.858366666666667e-05, "loss": 0.0091, "step": 14253 }, { "epoch": 15.733296521258973, "grad_norm": 0.1966075450181961, "learning_rate": 2.858333333333333e-05, "loss": 0.0091, "step": 14254 }, { "epoch": 15.734400883489785, "grad_norm": 0.1658799648284912, "learning_rate": 2.8583e-05, "loss": 0.0127, "step": 14255 }, { "epoch": 15.735505245720596, "grad_norm": 0.22822441160678864, "learning_rate": 2.8582666666666667e-05, "loss": 0.0119, "step": 14256 }, { "epoch": 15.736609607951408, "grad_norm": 0.25324323773384094, "learning_rate": 2.8582333333333332e-05, "loss": 0.0102, "step": 14257 }, { "epoch": 15.73771397018222, "grad_norm": 0.24401192367076874, "learning_rate": 2.8582000000000002e-05, "loss": 0.0097, "step": 14258 }, { "epoch": 15.738818332413032, "grad_norm": 0.21018168330192566, "learning_rate": 2.8581666666666668e-05, "loss": 0.014, "step": 14259 }, { "epoch": 15.739922694643843, "grad_norm": 0.12061767280101776, "learning_rate": 2.8581333333333334e-05, "loss": 0.0087, "step": 14260 }, { "epoch": 15.741027056874655, "grad_norm": 0.18578806519508362, "learning_rate": 2.8581e-05, "loss": 0.0119, "step": 14261 }, { "epoch": 15.742131419105467, "grad_norm": 0.35578855872154236, "learning_rate": 2.858066666666667e-05, "loss": 0.0137, "step": 14262 }, { "epoch": 15.743235781336278, "grad_norm": 0.22781364619731903, "learning_rate": 2.858033333333333e-05, "loss": 0.0069, "step": 14263 }, { "epoch": 15.74434014356709, "grad_norm": 0.1716633439064026, "learning_rate": 2.858e-05, "loss": 0.0072, "step": 14264 }, { "epoch": 15.745444505797902, "grad_norm": 0.2831812798976898, "learning_rate": 2.8579666666666667e-05, "loss": 0.0105, "step": 14265 }, { "epoch": 15.746548868028713, "grad_norm": 0.1399301439523697, "learning_rate": 2.8579333333333333e-05, "loss": 0.0063, "step": 14266 }, { "epoch": 15.747653230259525, "grad_norm": 0.18630805611610413, "learning_rate": 2.8579000000000002e-05, "loss": 0.0071, "step": 14267 }, { "epoch": 15.748757592490337, "grad_norm": 0.1646912544965744, "learning_rate": 2.8578666666666668e-05, "loss": 0.011, "step": 14268 }, { "epoch": 15.74986195472115, "grad_norm": 0.39763352274894714, "learning_rate": 2.8578333333333334e-05, "loss": 0.0076, "step": 14269 }, { "epoch": 15.75096631695196, "grad_norm": 0.36103859543800354, "learning_rate": 2.8578e-05, "loss": 0.0076, "step": 14270 }, { "epoch": 15.752070679182772, "grad_norm": 0.3182050883769989, "learning_rate": 2.857766666666667e-05, "loss": 0.0112, "step": 14271 }, { "epoch": 15.753175041413584, "grad_norm": 0.19785812497138977, "learning_rate": 2.857733333333333e-05, "loss": 0.0077, "step": 14272 }, { "epoch": 15.754279403644395, "grad_norm": 0.3924276828765869, "learning_rate": 2.8577e-05, "loss": 0.0165, "step": 14273 }, { "epoch": 15.755383765875207, "grad_norm": 0.09866514056921005, "learning_rate": 2.857666666666667e-05, "loss": 0.006, "step": 14274 }, { "epoch": 15.75648812810602, "grad_norm": 0.2116527557373047, "learning_rate": 2.8576333333333333e-05, "loss": 0.0337, "step": 14275 }, { "epoch": 15.75759249033683, "grad_norm": 0.2185068577528, "learning_rate": 2.8576000000000002e-05, "loss": 0.0124, "step": 14276 }, { "epoch": 15.758696852567642, "grad_norm": 0.4482423961162567, "learning_rate": 2.8575666666666668e-05, "loss": 0.0126, "step": 14277 }, { "epoch": 15.759801214798454, "grad_norm": 0.32864582538604736, "learning_rate": 2.8575333333333334e-05, "loss": 0.0105, "step": 14278 }, { "epoch": 15.760905577029266, "grad_norm": 0.17042453587055206, "learning_rate": 2.8575e-05, "loss": 0.0083, "step": 14279 }, { "epoch": 15.762009939260077, "grad_norm": 0.4880090355873108, "learning_rate": 2.857466666666667e-05, "loss": 0.0128, "step": 14280 }, { "epoch": 15.763114301490889, "grad_norm": 0.2032058984041214, "learning_rate": 2.8574333333333332e-05, "loss": 0.0064, "step": 14281 }, { "epoch": 15.764218663721701, "grad_norm": 0.28186294436454773, "learning_rate": 2.8574e-05, "loss": 0.0074, "step": 14282 }, { "epoch": 15.765323025952512, "grad_norm": 0.7844197154045105, "learning_rate": 2.857366666666667e-05, "loss": 0.0104, "step": 14283 }, { "epoch": 15.766427388183324, "grad_norm": 0.17199625074863434, "learning_rate": 2.8573333333333333e-05, "loss": 0.0104, "step": 14284 }, { "epoch": 15.767531750414136, "grad_norm": 0.3616652488708496, "learning_rate": 2.8573000000000002e-05, "loss": 0.0142, "step": 14285 }, { "epoch": 15.768636112644948, "grad_norm": 0.23240073025226593, "learning_rate": 2.8572666666666665e-05, "loss": 0.0046, "step": 14286 }, { "epoch": 15.769740474875759, "grad_norm": 0.3255418539047241, "learning_rate": 2.8572333333333334e-05, "loss": 0.0144, "step": 14287 }, { "epoch": 15.770844837106571, "grad_norm": 0.7470912337303162, "learning_rate": 2.8572e-05, "loss": 0.0183, "step": 14288 }, { "epoch": 15.771949199337383, "grad_norm": 0.08739832043647766, "learning_rate": 2.8571666666666666e-05, "loss": 0.0027, "step": 14289 }, { "epoch": 15.773053561568194, "grad_norm": 0.34303775429725647, "learning_rate": 2.8571333333333332e-05, "loss": 0.0086, "step": 14290 }, { "epoch": 15.774157923799006, "grad_norm": 0.8795240521430969, "learning_rate": 2.8571e-05, "loss": 0.1989, "step": 14291 }, { "epoch": 15.775262286029818, "grad_norm": 0.6405091285705566, "learning_rate": 2.8570666666666667e-05, "loss": 0.2112, "step": 14292 }, { "epoch": 15.77636664826063, "grad_norm": 0.43529021739959717, "learning_rate": 2.8570333333333333e-05, "loss": 0.1148, "step": 14293 }, { "epoch": 15.77747101049144, "grad_norm": 0.7507809996604919, "learning_rate": 2.8570000000000003e-05, "loss": 0.0924, "step": 14294 }, { "epoch": 15.778575372722253, "grad_norm": 0.4661971926689148, "learning_rate": 2.8569666666666665e-05, "loss": 0.0726, "step": 14295 }, { "epoch": 15.779679734953065, "grad_norm": 0.3535061776638031, "learning_rate": 2.8569333333333334e-05, "loss": 0.0792, "step": 14296 }, { "epoch": 15.780784097183876, "grad_norm": 0.2966044545173645, "learning_rate": 2.8569e-05, "loss": 0.0517, "step": 14297 }, { "epoch": 15.781888459414688, "grad_norm": 0.29440566897392273, "learning_rate": 2.8568666666666666e-05, "loss": 0.0276, "step": 14298 }, { "epoch": 15.7829928216455, "grad_norm": 0.2383352667093277, "learning_rate": 2.8568333333333336e-05, "loss": 0.0232, "step": 14299 }, { "epoch": 15.78409718387631, "grad_norm": 0.23263420164585114, "learning_rate": 2.8568e-05, "loss": 0.0222, "step": 14300 }, { "epoch": 15.785201546107123, "grad_norm": 0.3142690658569336, "learning_rate": 2.8567666666666668e-05, "loss": 0.0146, "step": 14301 }, { "epoch": 15.786305908337935, "grad_norm": 0.13025149703025818, "learning_rate": 2.8567333333333333e-05, "loss": 0.0076, "step": 14302 }, { "epoch": 15.787410270568747, "grad_norm": 0.22496235370635986, "learning_rate": 2.8567000000000003e-05, "loss": 0.0105, "step": 14303 }, { "epoch": 15.788514632799558, "grad_norm": 0.173854798078537, "learning_rate": 2.8566666666666665e-05, "loss": 0.0087, "step": 14304 }, { "epoch": 15.78961899503037, "grad_norm": 0.31858745217323303, "learning_rate": 2.8566333333333335e-05, "loss": 0.0135, "step": 14305 }, { "epoch": 15.790723357261182, "grad_norm": 0.27507859468460083, "learning_rate": 2.8566e-05, "loss": 0.0299, "step": 14306 }, { "epoch": 15.791827719491993, "grad_norm": 0.22444643080234528, "learning_rate": 2.8565666666666667e-05, "loss": 0.0176, "step": 14307 }, { "epoch": 15.792932081722805, "grad_norm": 0.143843874335289, "learning_rate": 2.8565333333333336e-05, "loss": 0.0061, "step": 14308 }, { "epoch": 15.794036443953617, "grad_norm": 0.20286031067371368, "learning_rate": 2.8565000000000002e-05, "loss": 0.0126, "step": 14309 }, { "epoch": 15.795140806184428, "grad_norm": 0.27715352177619934, "learning_rate": 2.8564666666666668e-05, "loss": 0.011, "step": 14310 }, { "epoch": 15.79624516841524, "grad_norm": 0.22236424684524536, "learning_rate": 2.8564333333333334e-05, "loss": 0.0099, "step": 14311 }, { "epoch": 15.797349530646052, "grad_norm": 0.27438920736312866, "learning_rate": 2.8564000000000003e-05, "loss": 0.0049, "step": 14312 }, { "epoch": 15.798453892876864, "grad_norm": 0.26071467995643616, "learning_rate": 2.8563666666666666e-05, "loss": 0.0112, "step": 14313 }, { "epoch": 15.799558255107675, "grad_norm": 0.22156922519207, "learning_rate": 2.8563333333333335e-05, "loss": 0.006, "step": 14314 }, { "epoch": 15.800662617338487, "grad_norm": 0.19835799932479858, "learning_rate": 2.8563e-05, "loss": 0.006, "step": 14315 }, { "epoch": 15.8017669795693, "grad_norm": 0.25185951590538025, "learning_rate": 2.8562666666666667e-05, "loss": 0.006, "step": 14316 }, { "epoch": 15.80287134180011, "grad_norm": 0.1777193248271942, "learning_rate": 2.8562333333333336e-05, "loss": 0.0062, "step": 14317 }, { "epoch": 15.803975704030922, "grad_norm": 0.33763423562049866, "learning_rate": 2.8562e-05, "loss": 0.0084, "step": 14318 }, { "epoch": 15.805080066261734, "grad_norm": 0.14784173667430878, "learning_rate": 2.8561666666666668e-05, "loss": 0.0088, "step": 14319 }, { "epoch": 15.806184428492546, "grad_norm": 0.3007485568523407, "learning_rate": 2.8561333333333334e-05, "loss": 0.0074, "step": 14320 }, { "epoch": 15.807288790723357, "grad_norm": 0.14315778017044067, "learning_rate": 2.8561e-05, "loss": 0.0049, "step": 14321 }, { "epoch": 15.808393152954169, "grad_norm": 0.1712338626384735, "learning_rate": 2.8560666666666666e-05, "loss": 0.0107, "step": 14322 }, { "epoch": 15.809497515184981, "grad_norm": 0.15707090497016907, "learning_rate": 2.8560333333333335e-05, "loss": 0.0058, "step": 14323 }, { "epoch": 15.810601877415792, "grad_norm": 0.16865333914756775, "learning_rate": 2.856e-05, "loss": 0.0061, "step": 14324 }, { "epoch": 15.811706239646604, "grad_norm": 0.20105376839637756, "learning_rate": 2.8559666666666667e-05, "loss": 0.0074, "step": 14325 }, { "epoch": 15.812810601877416, "grad_norm": 2.192776679992676, "learning_rate": 2.8559333333333336e-05, "loss": 0.0295, "step": 14326 }, { "epoch": 15.813914964108228, "grad_norm": 0.2748767137527466, "learning_rate": 2.8559e-05, "loss": 0.0108, "step": 14327 }, { "epoch": 15.815019326339039, "grad_norm": 0.33579182624816895, "learning_rate": 2.8558666666666668e-05, "loss": 0.0107, "step": 14328 }, { "epoch": 15.816123688569851, "grad_norm": 0.19483204185962677, "learning_rate": 2.8558333333333334e-05, "loss": 0.0118, "step": 14329 }, { "epoch": 15.817228050800663, "grad_norm": 0.28197211027145386, "learning_rate": 2.8558e-05, "loss": 0.0097, "step": 14330 }, { "epoch": 15.818332413031474, "grad_norm": 0.18690936267375946, "learning_rate": 2.8557666666666666e-05, "loss": 0.0071, "step": 14331 }, { "epoch": 15.819436775262286, "grad_norm": 0.23277674615383148, "learning_rate": 2.8557333333333335e-05, "loss": 0.0091, "step": 14332 }, { "epoch": 15.820541137493098, "grad_norm": 0.3385266661643982, "learning_rate": 2.8557e-05, "loss": 0.0115, "step": 14333 }, { "epoch": 15.821645499723909, "grad_norm": 0.31203868985176086, "learning_rate": 2.8556666666666667e-05, "loss": 0.024, "step": 14334 }, { "epoch": 15.82274986195472, "grad_norm": 0.38471707701683044, "learning_rate": 2.8556333333333336e-05, "loss": 0.0092, "step": 14335 }, { "epoch": 15.823854224185533, "grad_norm": 0.35868868231773376, "learning_rate": 2.8556e-05, "loss": 0.0146, "step": 14336 }, { "epoch": 15.824958586416345, "grad_norm": 0.38634294271469116, "learning_rate": 2.855566666666667e-05, "loss": 0.011, "step": 14337 }, { "epoch": 15.826062948647156, "grad_norm": 0.35250991582870483, "learning_rate": 2.8555333333333334e-05, "loss": 0.0138, "step": 14338 }, { "epoch": 15.827167310877968, "grad_norm": 0.33231163024902344, "learning_rate": 2.8555e-05, "loss": 0.007, "step": 14339 }, { "epoch": 15.82827167310878, "grad_norm": 0.24119286239147186, "learning_rate": 2.8554666666666666e-05, "loss": 0.0075, "step": 14340 }, { "epoch": 15.82937603533959, "grad_norm": 0.8273525834083557, "learning_rate": 2.8554333333333335e-05, "loss": 0.2112, "step": 14341 }, { "epoch": 15.830480397570403, "grad_norm": 0.6363665461540222, "learning_rate": 2.8554e-05, "loss": 0.1054, "step": 14342 }, { "epoch": 15.831584759801215, "grad_norm": 0.4334680438041687, "learning_rate": 2.8553666666666667e-05, "loss": 0.1107, "step": 14343 }, { "epoch": 15.832689122032026, "grad_norm": 0.34596750140190125, "learning_rate": 2.8553333333333333e-05, "loss": 0.0786, "step": 14344 }, { "epoch": 15.833793484262838, "grad_norm": 0.5647856593132019, "learning_rate": 2.8553e-05, "loss": 0.1387, "step": 14345 }, { "epoch": 15.83489784649365, "grad_norm": 0.2933362126350403, "learning_rate": 2.855266666666667e-05, "loss": 0.0415, "step": 14346 }, { "epoch": 15.836002208724462, "grad_norm": 0.38855573534965515, "learning_rate": 2.855233333333333e-05, "loss": 0.0528, "step": 14347 }, { "epoch": 15.837106570955273, "grad_norm": 0.3122207224369049, "learning_rate": 2.8552e-05, "loss": 0.0618, "step": 14348 }, { "epoch": 15.838210933186085, "grad_norm": 0.37590426206588745, "learning_rate": 2.855166666666667e-05, "loss": 0.0366, "step": 14349 }, { "epoch": 15.839315295416897, "grad_norm": 0.21994875371456146, "learning_rate": 2.8551333333333332e-05, "loss": 0.0132, "step": 14350 }, { "epoch": 15.840419657647708, "grad_norm": 0.24943947792053223, "learning_rate": 2.8551e-05, "loss": 0.0115, "step": 14351 }, { "epoch": 15.84152401987852, "grad_norm": 0.14160355925559998, "learning_rate": 2.8550666666666668e-05, "loss": 0.01, "step": 14352 }, { "epoch": 15.842628382109332, "grad_norm": 0.3419751226902008, "learning_rate": 2.8550333333333333e-05, "loss": 0.0173, "step": 14353 }, { "epoch": 15.843732744340144, "grad_norm": 0.3369568884372711, "learning_rate": 2.855e-05, "loss": 0.0359, "step": 14354 }, { "epoch": 15.844837106570955, "grad_norm": 0.1143149584531784, "learning_rate": 2.854966666666667e-05, "loss": 0.0059, "step": 14355 }, { "epoch": 15.845941468801767, "grad_norm": 0.17137737572193146, "learning_rate": 2.854933333333333e-05, "loss": 0.0111, "step": 14356 }, { "epoch": 15.84704583103258, "grad_norm": 0.17129352688789368, "learning_rate": 2.8549e-05, "loss": 0.0059, "step": 14357 }, { "epoch": 15.84815019326339, "grad_norm": 0.39068493247032166, "learning_rate": 2.854866666666667e-05, "loss": 0.014, "step": 14358 }, { "epoch": 15.849254555494202, "grad_norm": 0.16091108322143555, "learning_rate": 2.8548333333333332e-05, "loss": 0.0087, "step": 14359 }, { "epoch": 15.850358917725014, "grad_norm": 0.2951662540435791, "learning_rate": 2.8548000000000002e-05, "loss": 0.0071, "step": 14360 }, { "epoch": 15.851463279955826, "grad_norm": 0.14709430932998657, "learning_rate": 2.8547666666666668e-05, "loss": 0.005, "step": 14361 }, { "epoch": 15.852567642186637, "grad_norm": 0.30631351470947266, "learning_rate": 2.8547333333333334e-05, "loss": 0.0327, "step": 14362 }, { "epoch": 15.853672004417449, "grad_norm": 0.23129217326641083, "learning_rate": 2.8547e-05, "loss": 0.0068, "step": 14363 }, { "epoch": 15.854776366648261, "grad_norm": 0.8515650629997253, "learning_rate": 2.854666666666667e-05, "loss": 0.0183, "step": 14364 }, { "epoch": 15.855880728879072, "grad_norm": 0.15415962040424347, "learning_rate": 2.8546333333333335e-05, "loss": 0.0244, "step": 14365 }, { "epoch": 15.856985091109884, "grad_norm": 0.1705590784549713, "learning_rate": 2.8546e-05, "loss": 0.0117, "step": 14366 }, { "epoch": 15.858089453340696, "grad_norm": 0.229379802942276, "learning_rate": 2.854566666666667e-05, "loss": 0.0109, "step": 14367 }, { "epoch": 15.859193815571507, "grad_norm": 0.24265798926353455, "learning_rate": 2.8545333333333333e-05, "loss": 0.0097, "step": 14368 }, { "epoch": 15.860298177802319, "grad_norm": 0.6570446491241455, "learning_rate": 2.8545000000000002e-05, "loss": 0.0244, "step": 14369 }, { "epoch": 15.861402540033131, "grad_norm": 0.2672165632247925, "learning_rate": 2.8544666666666668e-05, "loss": 0.0077, "step": 14370 }, { "epoch": 15.862506902263943, "grad_norm": 0.18985538184642792, "learning_rate": 2.8544333333333334e-05, "loss": 0.0109, "step": 14371 }, { "epoch": 15.863611264494754, "grad_norm": 0.23296916484832764, "learning_rate": 2.8544e-05, "loss": 0.0137, "step": 14372 }, { "epoch": 15.864715626725566, "grad_norm": 0.31184107065200806, "learning_rate": 2.854366666666667e-05, "loss": 0.0189, "step": 14373 }, { "epoch": 15.865819988956378, "grad_norm": 0.2522561252117157, "learning_rate": 2.8543333333333335e-05, "loss": 0.0073, "step": 14374 }, { "epoch": 15.866924351187189, "grad_norm": 0.27471402287483215, "learning_rate": 2.8543e-05, "loss": 0.0101, "step": 14375 }, { "epoch": 15.868028713418001, "grad_norm": 0.09868326038122177, "learning_rate": 2.8542666666666667e-05, "loss": 0.0047, "step": 14376 }, { "epoch": 15.869133075648813, "grad_norm": 0.8517089486122131, "learning_rate": 2.8542333333333333e-05, "loss": 0.0147, "step": 14377 }, { "epoch": 15.870237437879624, "grad_norm": 0.5311281085014343, "learning_rate": 2.8542000000000002e-05, "loss": 0.0233, "step": 14378 }, { "epoch": 15.871341800110436, "grad_norm": 0.2764798402786255, "learning_rate": 2.8541666666666665e-05, "loss": 0.0107, "step": 14379 }, { "epoch": 15.872446162341248, "grad_norm": 0.19773338735103607, "learning_rate": 2.8541333333333334e-05, "loss": 0.0055, "step": 14380 }, { "epoch": 15.87355052457206, "grad_norm": 0.22562699019908905, "learning_rate": 2.8541e-05, "loss": 0.0065, "step": 14381 }, { "epoch": 15.87465488680287, "grad_norm": 0.25468510389328003, "learning_rate": 2.8540666666666666e-05, "loss": 0.0083, "step": 14382 }, { "epoch": 15.875759249033683, "grad_norm": 0.09371752291917801, "learning_rate": 2.8540333333333335e-05, "loss": 0.0036, "step": 14383 }, { "epoch": 15.876863611264495, "grad_norm": 0.15883582830429077, "learning_rate": 2.854e-05, "loss": 0.0044, "step": 14384 }, { "epoch": 15.877967973495306, "grad_norm": 0.18377478420734406, "learning_rate": 2.8539666666666667e-05, "loss": 0.0072, "step": 14385 }, { "epoch": 15.879072335726118, "grad_norm": 0.22918172180652618, "learning_rate": 2.8539333333333333e-05, "loss": 0.0087, "step": 14386 }, { "epoch": 15.88017669795693, "grad_norm": 0.2972988188266754, "learning_rate": 2.8539000000000002e-05, "loss": 0.0186, "step": 14387 }, { "epoch": 15.881281060187742, "grad_norm": 0.4266006350517273, "learning_rate": 2.8538666666666665e-05, "loss": 0.0132, "step": 14388 }, { "epoch": 15.882385422418553, "grad_norm": 0.4082518219947815, "learning_rate": 2.8538333333333334e-05, "loss": 0.0129, "step": 14389 }, { "epoch": 15.883489784649365, "grad_norm": 0.19281914830207825, "learning_rate": 2.8538e-05, "loss": 0.0085, "step": 14390 }, { "epoch": 15.884594146880177, "grad_norm": 0.5098370313644409, "learning_rate": 2.8537666666666666e-05, "loss": 0.1558, "step": 14391 }, { "epoch": 15.885698509110988, "grad_norm": 0.5082809329032898, "learning_rate": 2.8537333333333335e-05, "loss": 0.0954, "step": 14392 }, { "epoch": 15.8868028713418, "grad_norm": 0.673779308795929, "learning_rate": 2.8537e-05, "loss": 0.1277, "step": 14393 }, { "epoch": 15.887907233572612, "grad_norm": 0.3957371413707733, "learning_rate": 2.8536666666666667e-05, "loss": 0.0704, "step": 14394 }, { "epoch": 15.889011595803424, "grad_norm": 0.3361409902572632, "learning_rate": 2.8536333333333333e-05, "loss": 0.0485, "step": 14395 }, { "epoch": 15.890115958034235, "grad_norm": 0.3619401752948761, "learning_rate": 2.8536000000000003e-05, "loss": 0.0679, "step": 14396 }, { "epoch": 15.891220320265047, "grad_norm": 0.4389525353908539, "learning_rate": 2.8535666666666665e-05, "loss": 0.028, "step": 14397 }, { "epoch": 15.89232468249586, "grad_norm": 0.3031812310218811, "learning_rate": 2.8535333333333334e-05, "loss": 0.0337, "step": 14398 }, { "epoch": 15.89342904472667, "grad_norm": 0.274764746427536, "learning_rate": 2.8535000000000004e-05, "loss": 0.0304, "step": 14399 }, { "epoch": 15.894533406957482, "grad_norm": 0.19114208221435547, "learning_rate": 2.8534666666666666e-05, "loss": 0.0118, "step": 14400 }, { "epoch": 15.895637769188294, "grad_norm": 0.39723676443099976, "learning_rate": 2.8534333333333336e-05, "loss": 0.0529, "step": 14401 }, { "epoch": 15.896742131419105, "grad_norm": 0.1737382858991623, "learning_rate": 2.8534e-05, "loss": 0.0133, "step": 14402 }, { "epoch": 15.897846493649917, "grad_norm": 0.1769777536392212, "learning_rate": 2.8533666666666668e-05, "loss": 0.0069, "step": 14403 }, { "epoch": 15.898950855880729, "grad_norm": 0.23375144600868225, "learning_rate": 2.8533333333333333e-05, "loss": 0.0117, "step": 14404 }, { "epoch": 15.900055218111541, "grad_norm": 0.21423958241939545, "learning_rate": 2.8533e-05, "loss": 0.0078, "step": 14405 }, { "epoch": 15.901159580342352, "grad_norm": 0.11207827925682068, "learning_rate": 2.8532666666666665e-05, "loss": 0.0059, "step": 14406 }, { "epoch": 15.902263942573164, "grad_norm": 0.11328896880149841, "learning_rate": 2.8532333333333335e-05, "loss": 0.0058, "step": 14407 }, { "epoch": 15.903368304803976, "grad_norm": 0.34346461296081543, "learning_rate": 2.8532e-05, "loss": 0.0092, "step": 14408 }, { "epoch": 15.904472667034787, "grad_norm": 0.1347380131483078, "learning_rate": 2.8531666666666667e-05, "loss": 0.0072, "step": 14409 }, { "epoch": 15.905577029265599, "grad_norm": 0.2094220668077469, "learning_rate": 2.8531333333333336e-05, "loss": 0.0114, "step": 14410 }, { "epoch": 15.906681391496411, "grad_norm": 0.18892475962638855, "learning_rate": 2.8531e-05, "loss": 0.013, "step": 14411 }, { "epoch": 15.907785753727222, "grad_norm": 0.2827717959880829, "learning_rate": 2.8530666666666668e-05, "loss": 0.0121, "step": 14412 }, { "epoch": 15.908890115958034, "grad_norm": 0.12947097420692444, "learning_rate": 2.8530333333333334e-05, "loss": 0.0081, "step": 14413 }, { "epoch": 15.909994478188846, "grad_norm": 0.08529837429523468, "learning_rate": 2.853e-05, "loss": 0.0044, "step": 14414 }, { "epoch": 15.911098840419658, "grad_norm": 0.21798455715179443, "learning_rate": 2.852966666666667e-05, "loss": 0.0083, "step": 14415 }, { "epoch": 15.912203202650469, "grad_norm": 0.14628395438194275, "learning_rate": 2.8529333333333335e-05, "loss": 0.0098, "step": 14416 }, { "epoch": 15.913307564881281, "grad_norm": 0.10297919809818268, "learning_rate": 2.8529e-05, "loss": 0.0032, "step": 14417 }, { "epoch": 15.914411927112093, "grad_norm": 0.28761914372444153, "learning_rate": 2.8528666666666667e-05, "loss": 0.007, "step": 14418 }, { "epoch": 15.915516289342904, "grad_norm": 0.09851904213428497, "learning_rate": 2.8528333333333336e-05, "loss": 0.0052, "step": 14419 }, { "epoch": 15.916620651573716, "grad_norm": 0.16024769842624664, "learning_rate": 2.8528e-05, "loss": 0.0073, "step": 14420 }, { "epoch": 15.917725013804528, "grad_norm": 0.5736392736434937, "learning_rate": 2.8527666666666668e-05, "loss": 0.0119, "step": 14421 }, { "epoch": 15.91882937603534, "grad_norm": 0.11241528391838074, "learning_rate": 2.8527333333333334e-05, "loss": 0.0043, "step": 14422 }, { "epoch": 15.91993373826615, "grad_norm": 0.12554264068603516, "learning_rate": 2.8527e-05, "loss": 0.0078, "step": 14423 }, { "epoch": 15.921038100496963, "grad_norm": 0.14629927277565002, "learning_rate": 2.852666666666667e-05, "loss": 0.0066, "step": 14424 }, { "epoch": 15.922142462727775, "grad_norm": 0.32129228115081787, "learning_rate": 2.8526333333333335e-05, "loss": 0.0121, "step": 14425 }, { "epoch": 15.923246824958586, "grad_norm": 0.2719438076019287, "learning_rate": 2.8526e-05, "loss": 0.0132, "step": 14426 }, { "epoch": 15.924351187189398, "grad_norm": 0.31087982654571533, "learning_rate": 2.8525666666666667e-05, "loss": 0.0108, "step": 14427 }, { "epoch": 15.92545554942021, "grad_norm": 0.45058661699295044, "learning_rate": 2.8525333333333336e-05, "loss": 0.0095, "step": 14428 }, { "epoch": 15.926559911651022, "grad_norm": 0.26981306076049805, "learning_rate": 2.8525e-05, "loss": 0.0062, "step": 14429 }, { "epoch": 15.927664273881833, "grad_norm": 0.07208142429590225, "learning_rate": 2.8524666666666668e-05, "loss": 0.0026, "step": 14430 }, { "epoch": 15.928768636112645, "grad_norm": 0.4617942273616791, "learning_rate": 2.8524333333333334e-05, "loss": 0.0127, "step": 14431 }, { "epoch": 15.929872998343457, "grad_norm": 0.392049103975296, "learning_rate": 2.8524e-05, "loss": 0.0155, "step": 14432 }, { "epoch": 15.930977360574268, "grad_norm": 0.3623062074184418, "learning_rate": 2.852366666666667e-05, "loss": 0.0076, "step": 14433 }, { "epoch": 15.93208172280508, "grad_norm": 0.5689537525177002, "learning_rate": 2.8523333333333335e-05, "loss": 0.0146, "step": 14434 }, { "epoch": 15.933186085035892, "grad_norm": 0.426532506942749, "learning_rate": 2.8523e-05, "loss": 0.0075, "step": 14435 }, { "epoch": 15.934290447266704, "grad_norm": 0.2845058739185333, "learning_rate": 2.8522666666666667e-05, "loss": 0.0144, "step": 14436 }, { "epoch": 15.935394809497515, "grad_norm": 0.09585607796907425, "learning_rate": 2.8522333333333333e-05, "loss": 0.0036, "step": 14437 }, { "epoch": 15.936499171728327, "grad_norm": 0.21008934080600739, "learning_rate": 2.8522e-05, "loss": 0.0098, "step": 14438 }, { "epoch": 15.93760353395914, "grad_norm": 0.4839317202568054, "learning_rate": 2.852166666666667e-05, "loss": 0.0139, "step": 14439 }, { "epoch": 15.93870789618995, "grad_norm": 0.42669814825057983, "learning_rate": 2.8521333333333334e-05, "loss": 0.0102, "step": 14440 }, { "epoch": 15.939812258420762, "grad_norm": 0.5239014625549316, "learning_rate": 2.8521e-05, "loss": 0.1627, "step": 14441 }, { "epoch": 15.940916620651574, "grad_norm": 0.5430079102516174, "learning_rate": 2.852066666666667e-05, "loss": 0.1307, "step": 14442 }, { "epoch": 15.942020982882385, "grad_norm": 0.47188523411750793, "learning_rate": 2.8520333333333332e-05, "loss": 0.1147, "step": 14443 }, { "epoch": 15.943125345113197, "grad_norm": 0.3137563467025757, "learning_rate": 2.852e-05, "loss": 0.1041, "step": 14444 }, { "epoch": 15.94422970734401, "grad_norm": 0.31184956431388855, "learning_rate": 2.8519666666666667e-05, "loss": 0.0818, "step": 14445 }, { "epoch": 15.945334069574821, "grad_norm": 0.3687973916530609, "learning_rate": 2.8519333333333333e-05, "loss": 0.0453, "step": 14446 }, { "epoch": 15.946438431805632, "grad_norm": 0.24993140995502472, "learning_rate": 2.8519e-05, "loss": 0.035, "step": 14447 }, { "epoch": 15.947542794036444, "grad_norm": 0.21241867542266846, "learning_rate": 2.851866666666667e-05, "loss": 0.0264, "step": 14448 }, { "epoch": 15.948647156267256, "grad_norm": 1.8359824419021606, "learning_rate": 2.8518333333333334e-05, "loss": 0.0245, "step": 14449 }, { "epoch": 15.949751518498067, "grad_norm": 0.21223853528499603, "learning_rate": 2.8518e-05, "loss": 0.0198, "step": 14450 }, { "epoch": 15.950855880728879, "grad_norm": 0.31675487756729126, "learning_rate": 2.851766666666667e-05, "loss": 0.0154, "step": 14451 }, { "epoch": 15.951960242959691, "grad_norm": 0.8292490243911743, "learning_rate": 2.8517333333333332e-05, "loss": 0.0408, "step": 14452 }, { "epoch": 15.953064605190502, "grad_norm": 0.12574402987957, "learning_rate": 2.8517e-05, "loss": 0.0085, "step": 14453 }, { "epoch": 15.954168967421314, "grad_norm": 0.18183660507202148, "learning_rate": 2.8516666666666668e-05, "loss": 0.0103, "step": 14454 }, { "epoch": 15.955273329652126, "grad_norm": 0.1744089424610138, "learning_rate": 2.8516333333333333e-05, "loss": 0.0098, "step": 14455 }, { "epoch": 15.956377691882938, "grad_norm": 0.3766566514968872, "learning_rate": 2.8516e-05, "loss": 0.0116, "step": 14456 }, { "epoch": 15.957482054113749, "grad_norm": 0.21621547639369965, "learning_rate": 2.851566666666667e-05, "loss": 0.0146, "step": 14457 }, { "epoch": 15.958586416344561, "grad_norm": 0.15115667879581451, "learning_rate": 2.8515333333333335e-05, "loss": 0.0055, "step": 14458 }, { "epoch": 15.959690778575373, "grad_norm": 0.4535285532474518, "learning_rate": 2.8515e-05, "loss": 0.0058, "step": 14459 }, { "epoch": 15.960795140806184, "grad_norm": 0.2433706521987915, "learning_rate": 2.851466666666667e-05, "loss": 0.0075, "step": 14460 }, { "epoch": 15.961899503036996, "grad_norm": 0.1557299643754959, "learning_rate": 2.8514333333333332e-05, "loss": 0.0074, "step": 14461 }, { "epoch": 15.963003865267808, "grad_norm": 0.10988333821296692, "learning_rate": 2.8514000000000002e-05, "loss": 0.0076, "step": 14462 }, { "epoch": 15.96410822749862, "grad_norm": 0.2431064397096634, "learning_rate": 2.8513666666666668e-05, "loss": 0.011, "step": 14463 }, { "epoch": 15.96521258972943, "grad_norm": 0.6907367706298828, "learning_rate": 2.8513333333333334e-05, "loss": 0.0167, "step": 14464 }, { "epoch": 15.966316951960243, "grad_norm": 0.08304177224636078, "learning_rate": 2.8513000000000003e-05, "loss": 0.0041, "step": 14465 }, { "epoch": 15.967421314191055, "grad_norm": 0.2300366312265396, "learning_rate": 2.8512666666666666e-05, "loss": 0.0124, "step": 14466 }, { "epoch": 15.968525676421866, "grad_norm": 0.24040186405181885, "learning_rate": 2.8512333333333335e-05, "loss": 0.006, "step": 14467 }, { "epoch": 15.969630038652678, "grad_norm": 0.14489835500717163, "learning_rate": 2.8512e-05, "loss": 0.006, "step": 14468 }, { "epoch": 15.97073440088349, "grad_norm": 0.25173619389533997, "learning_rate": 2.8511666666666667e-05, "loss": 0.0073, "step": 14469 }, { "epoch": 15.971838763114302, "grad_norm": 0.2575136423110962, "learning_rate": 2.8511333333333333e-05, "loss": 0.0083, "step": 14470 }, { "epoch": 15.972943125345113, "grad_norm": 0.2769523561000824, "learning_rate": 2.8511000000000002e-05, "loss": 0.0104, "step": 14471 }, { "epoch": 15.974047487575925, "grad_norm": 0.17637476325035095, "learning_rate": 2.8510666666666665e-05, "loss": 0.0076, "step": 14472 }, { "epoch": 15.975151849806737, "grad_norm": 0.2309902310371399, "learning_rate": 2.8510333333333334e-05, "loss": 0.0049, "step": 14473 }, { "epoch": 15.976256212037548, "grad_norm": 0.45150554180145264, "learning_rate": 2.8510000000000003e-05, "loss": 0.0062, "step": 14474 }, { "epoch": 15.97736057426836, "grad_norm": 0.49167314171791077, "learning_rate": 2.8509666666666666e-05, "loss": 0.0105, "step": 14475 }, { "epoch": 15.978464936499172, "grad_norm": 0.277007520198822, "learning_rate": 2.8509333333333335e-05, "loss": 0.005, "step": 14476 }, { "epoch": 15.979569298729983, "grad_norm": 0.3426642119884491, "learning_rate": 2.8509e-05, "loss": 0.016, "step": 14477 }, { "epoch": 15.980673660960795, "grad_norm": 0.20839746296405792, "learning_rate": 2.8508666666666667e-05, "loss": 0.0092, "step": 14478 }, { "epoch": 15.981778023191607, "grad_norm": 0.15581655502319336, "learning_rate": 2.8508333333333333e-05, "loss": 0.0106, "step": 14479 }, { "epoch": 15.98288238542242, "grad_norm": 0.2610301673412323, "learning_rate": 2.8508000000000002e-05, "loss": 0.0066, "step": 14480 }, { "epoch": 15.98398674765323, "grad_norm": 0.2387760430574417, "learning_rate": 2.8507666666666665e-05, "loss": 0.0082, "step": 14481 }, { "epoch": 15.985091109884042, "grad_norm": 0.21166189014911652, "learning_rate": 2.8507333333333334e-05, "loss": 0.007, "step": 14482 }, { "epoch": 15.986195472114854, "grad_norm": 0.23895344138145447, "learning_rate": 2.8507000000000003e-05, "loss": 0.0057, "step": 14483 }, { "epoch": 15.987299834345665, "grad_norm": 0.32726767659187317, "learning_rate": 2.8506666666666666e-05, "loss": 0.0156, "step": 14484 }, { "epoch": 15.988404196576477, "grad_norm": 0.5124574303627014, "learning_rate": 2.8506333333333335e-05, "loss": 0.0119, "step": 14485 }, { "epoch": 15.98950855880729, "grad_norm": 0.16068744659423828, "learning_rate": 2.8506e-05, "loss": 0.0056, "step": 14486 }, { "epoch": 15.9906129210381, "grad_norm": 0.33419564366340637, "learning_rate": 2.8505666666666667e-05, "loss": 0.0157, "step": 14487 }, { "epoch": 15.991717283268912, "grad_norm": 0.34037289023399353, "learning_rate": 2.8505333333333333e-05, "loss": 0.013, "step": 14488 }, { "epoch": 15.992821645499724, "grad_norm": 0.17402838170528412, "learning_rate": 2.8505000000000002e-05, "loss": 0.0055, "step": 14489 }, { "epoch": 15.993926007730536, "grad_norm": 0.9757802486419678, "learning_rate": 2.8504666666666668e-05, "loss": 0.0205, "step": 14490 }, { "epoch": 15.995030369961347, "grad_norm": 0.715382993221283, "learning_rate": 2.8504333333333334e-05, "loss": 0.1101, "step": 14491 }, { "epoch": 15.996134732192159, "grad_norm": 0.6069475412368774, "learning_rate": 2.8504000000000004e-05, "loss": 0.0341, "step": 14492 }, { "epoch": 15.997239094422971, "grad_norm": 0.3158799111843109, "learning_rate": 2.8503666666666666e-05, "loss": 0.0117, "step": 14493 }, { "epoch": 15.998343456653782, "grad_norm": 0.3052680790424347, "learning_rate": 2.8503333333333335e-05, "loss": 0.0111, "step": 14494 }, { "epoch": 15.999447818884594, "grad_norm": 0.5346364378929138, "learning_rate": 2.8502999999999998e-05, "loss": 0.0082, "step": 14495 }, { "epoch": 16.0, "grad_norm": 0.1774451732635498, "learning_rate": 2.8502666666666667e-05, "loss": 0.0012, "step": 14496 }, { "epoch": 16.001104362230812, "grad_norm": 0.5261469483375549, "learning_rate": 2.8502333333333333e-05, "loss": 0.1504, "step": 14497 }, { "epoch": 16.002208724461624, "grad_norm": 0.7411925196647644, "learning_rate": 2.8502e-05, "loss": 0.1417, "step": 14498 }, { "epoch": 16.003313086692437, "grad_norm": 0.37910911440849304, "learning_rate": 2.850166666666667e-05, "loss": 0.0764, "step": 14499 }, { "epoch": 16.004417448923245, "grad_norm": 0.391973614692688, "learning_rate": 2.8501333333333334e-05, "loss": 0.0608, "step": 14500 }, { "epoch": 16.005521811154058, "grad_norm": 0.6220912933349609, "learning_rate": 2.8501e-05, "loss": 0.0857, "step": 14501 }, { "epoch": 16.00662617338487, "grad_norm": 0.3655238151550293, "learning_rate": 2.8500666666666666e-05, "loss": 0.0512, "step": 14502 }, { "epoch": 16.007730535615682, "grad_norm": 0.3730216324329376, "learning_rate": 2.8500333333333336e-05, "loss": 0.0404, "step": 14503 }, { "epoch": 16.008834897846494, "grad_norm": 0.3493739068508148, "learning_rate": 2.8499999999999998e-05, "loss": 0.0345, "step": 14504 }, { "epoch": 16.009939260077306, "grad_norm": 0.17380808293819427, "learning_rate": 2.8499666666666668e-05, "loss": 0.0209, "step": 14505 }, { "epoch": 16.01104362230812, "grad_norm": 0.11044097691774368, "learning_rate": 2.8499333333333333e-05, "loss": 0.0082, "step": 14506 }, { "epoch": 16.012147984538927, "grad_norm": 0.23924367129802704, "learning_rate": 2.8499e-05, "loss": 0.0215, "step": 14507 }, { "epoch": 16.01325234676974, "grad_norm": 0.17560933530330658, "learning_rate": 2.849866666666667e-05, "loss": 0.0061, "step": 14508 }, { "epoch": 16.014356709000552, "grad_norm": 0.13184531033039093, "learning_rate": 2.8498333333333335e-05, "loss": 0.0121, "step": 14509 }, { "epoch": 16.015461071231364, "grad_norm": 0.16428890824317932, "learning_rate": 2.8498e-05, "loss": 0.0073, "step": 14510 }, { "epoch": 16.016565433462176, "grad_norm": 0.11457754671573639, "learning_rate": 2.8497666666666667e-05, "loss": 0.0064, "step": 14511 }, { "epoch": 16.01766979569299, "grad_norm": 0.39523977041244507, "learning_rate": 2.8497333333333336e-05, "loss": 0.0378, "step": 14512 }, { "epoch": 16.0187741579238, "grad_norm": 0.2576574981212616, "learning_rate": 2.8497e-05, "loss": 0.0062, "step": 14513 }, { "epoch": 16.01987852015461, "grad_norm": 0.320517897605896, "learning_rate": 2.8496666666666668e-05, "loss": 0.0096, "step": 14514 }, { "epoch": 16.02098288238542, "grad_norm": 0.27774178981781006, "learning_rate": 2.8496333333333337e-05, "loss": 0.0108, "step": 14515 }, { "epoch": 16.022087244616234, "grad_norm": 0.19539114832878113, "learning_rate": 2.8496e-05, "loss": 0.0076, "step": 14516 }, { "epoch": 16.023191606847046, "grad_norm": 0.19537398219108582, "learning_rate": 2.849566666666667e-05, "loss": 0.0087, "step": 14517 }, { "epoch": 16.02429596907786, "grad_norm": 0.13432201743125916, "learning_rate": 2.8495333333333335e-05, "loss": 0.0061, "step": 14518 }, { "epoch": 16.02540033130867, "grad_norm": 0.6327117681503296, "learning_rate": 2.8495e-05, "loss": 0.0133, "step": 14519 }, { "epoch": 16.02650469353948, "grad_norm": 0.076561838388443, "learning_rate": 2.8494666666666667e-05, "loss": 0.0046, "step": 14520 }, { "epoch": 16.02760905577029, "grad_norm": 0.15000563859939575, "learning_rate": 2.8494333333333336e-05, "loss": 0.0065, "step": 14521 }, { "epoch": 16.028713418001104, "grad_norm": 0.16564109921455383, "learning_rate": 2.8494e-05, "loss": 0.0099, "step": 14522 }, { "epoch": 16.029817780231916, "grad_norm": 0.16540095210075378, "learning_rate": 2.8493666666666668e-05, "loss": 0.0065, "step": 14523 }, { "epoch": 16.030922142462728, "grad_norm": 0.1462210863828659, "learning_rate": 2.8493333333333337e-05, "loss": 0.0134, "step": 14524 }, { "epoch": 16.03202650469354, "grad_norm": 0.2960096597671509, "learning_rate": 2.8493e-05, "loss": 0.0112, "step": 14525 }, { "epoch": 16.033130866924353, "grad_norm": 0.07113974541425705, "learning_rate": 2.849266666666667e-05, "loss": 0.004, "step": 14526 }, { "epoch": 16.03423522915516, "grad_norm": 0.18236884474754333, "learning_rate": 2.849233333333333e-05, "loss": 0.0063, "step": 14527 }, { "epoch": 16.035339591385974, "grad_norm": 0.05992089956998825, "learning_rate": 2.8492e-05, "loss": 0.0026, "step": 14528 }, { "epoch": 16.036443953616786, "grad_norm": 0.38213807344436646, "learning_rate": 2.8491666666666667e-05, "loss": 0.0075, "step": 14529 }, { "epoch": 16.037548315847598, "grad_norm": 0.11815396696329117, "learning_rate": 2.8491333333333333e-05, "loss": 0.0036, "step": 14530 }, { "epoch": 16.03865267807841, "grad_norm": 0.10454794019460678, "learning_rate": 2.8491e-05, "loss": 0.0062, "step": 14531 }, { "epoch": 16.039757040309222, "grad_norm": 0.23668473958969116, "learning_rate": 2.8490666666666668e-05, "loss": 0.0079, "step": 14532 }, { "epoch": 16.040861402540035, "grad_norm": 0.15915794670581818, "learning_rate": 2.8490333333333334e-05, "loss": 0.0063, "step": 14533 }, { "epoch": 16.041965764770843, "grad_norm": 0.4952057898044586, "learning_rate": 2.849e-05, "loss": 0.017, "step": 14534 }, { "epoch": 16.043070127001656, "grad_norm": 0.14217138290405273, "learning_rate": 2.848966666666667e-05, "loss": 0.0052, "step": 14535 }, { "epoch": 16.044174489232468, "grad_norm": 0.6006737351417542, "learning_rate": 2.8489333333333332e-05, "loss": 0.0062, "step": 14536 }, { "epoch": 16.04527885146328, "grad_norm": 0.7185475826263428, "learning_rate": 2.8489e-05, "loss": 0.0124, "step": 14537 }, { "epoch": 16.046383213694092, "grad_norm": 0.1551583856344223, "learning_rate": 2.8488666666666667e-05, "loss": 0.0062, "step": 14538 }, { "epoch": 16.047487575924904, "grad_norm": 0.08771208673715591, "learning_rate": 2.8488333333333333e-05, "loss": 0.0034, "step": 14539 }, { "epoch": 16.048591938155717, "grad_norm": 0.6783241629600525, "learning_rate": 2.8488000000000002e-05, "loss": 0.0202, "step": 14540 }, { "epoch": 16.049696300386525, "grad_norm": 0.9365737438201904, "learning_rate": 2.8487666666666668e-05, "loss": 0.0075, "step": 14541 }, { "epoch": 16.050800662617338, "grad_norm": 0.1828542798757553, "learning_rate": 2.8487333333333334e-05, "loss": 0.004, "step": 14542 }, { "epoch": 16.05190502484815, "grad_norm": 0.25297674536705017, "learning_rate": 2.8487e-05, "loss": 0.007, "step": 14543 }, { "epoch": 16.053009387078962, "grad_norm": 0.18110935389995575, "learning_rate": 2.848666666666667e-05, "loss": 0.0036, "step": 14544 }, { "epoch": 16.054113749309774, "grad_norm": 0.31769779324531555, "learning_rate": 2.8486333333333332e-05, "loss": 0.0078, "step": 14545 }, { "epoch": 16.055218111540587, "grad_norm": 0.26585841178894043, "learning_rate": 2.8486e-05, "loss": 0.0087, "step": 14546 }, { "epoch": 16.0563224737714, "grad_norm": 0.4923708140850067, "learning_rate": 2.8485666666666667e-05, "loss": 0.1926, "step": 14547 }, { "epoch": 16.057426836002207, "grad_norm": 0.408587247133255, "learning_rate": 2.8485333333333333e-05, "loss": 0.0906, "step": 14548 }, { "epoch": 16.05853119823302, "grad_norm": 0.46611636877059937, "learning_rate": 2.8485000000000003e-05, "loss": 0.0597, "step": 14549 }, { "epoch": 16.059635560463832, "grad_norm": 0.41046538949012756, "learning_rate": 2.848466666666667e-05, "loss": 0.0608, "step": 14550 }, { "epoch": 16.060739922694644, "grad_norm": 0.30924561619758606, "learning_rate": 2.8484333333333334e-05, "loss": 0.0504, "step": 14551 }, { "epoch": 16.061844284925456, "grad_norm": 0.3210897445678711, "learning_rate": 2.8484e-05, "loss": 0.0499, "step": 14552 }, { "epoch": 16.06294864715627, "grad_norm": 0.2473226636648178, "learning_rate": 2.848366666666667e-05, "loss": 0.0242, "step": 14553 }, { "epoch": 16.064053009387077, "grad_norm": 0.5795220732688904, "learning_rate": 2.8483333333333332e-05, "loss": 0.0548, "step": 14554 }, { "epoch": 16.06515737161789, "grad_norm": 0.26987916231155396, "learning_rate": 2.8483e-05, "loss": 0.0256, "step": 14555 }, { "epoch": 16.0662617338487, "grad_norm": 0.2565647065639496, "learning_rate": 2.8482666666666664e-05, "loss": 0.0194, "step": 14556 }, { "epoch": 16.067366096079514, "grad_norm": 0.19593247771263123, "learning_rate": 2.8482333333333333e-05, "loss": 0.008, "step": 14557 }, { "epoch": 16.068470458310326, "grad_norm": 0.26534855365753174, "learning_rate": 2.8482000000000003e-05, "loss": 0.007, "step": 14558 }, { "epoch": 16.06957482054114, "grad_norm": 0.44349297881126404, "learning_rate": 2.8481666666666665e-05, "loss": 0.0189, "step": 14559 }, { "epoch": 16.07067918277195, "grad_norm": 0.24839408695697784, "learning_rate": 2.8481333333333335e-05, "loss": 0.0136, "step": 14560 }, { "epoch": 16.07178354500276, "grad_norm": 0.3114165663719177, "learning_rate": 2.8481e-05, "loss": 0.0076, "step": 14561 }, { "epoch": 16.07288790723357, "grad_norm": 0.28602248430252075, "learning_rate": 2.8480666666666667e-05, "loss": 0.014, "step": 14562 }, { "epoch": 16.073992269464384, "grad_norm": 0.24623677134513855, "learning_rate": 2.8480333333333332e-05, "loss": 0.0064, "step": 14563 }, { "epoch": 16.075096631695196, "grad_norm": 0.1624702364206314, "learning_rate": 2.8480000000000002e-05, "loss": 0.0059, "step": 14564 }, { "epoch": 16.076200993926008, "grad_norm": 0.29399535059928894, "learning_rate": 2.8479666666666668e-05, "loss": 0.0096, "step": 14565 }, { "epoch": 16.07730535615682, "grad_norm": 0.17752188444137573, "learning_rate": 2.8479333333333334e-05, "loss": 0.0079, "step": 14566 }, { "epoch": 16.078409718387633, "grad_norm": 0.1302081197500229, "learning_rate": 2.8479000000000003e-05, "loss": 0.0082, "step": 14567 }, { "epoch": 16.07951408061844, "grad_norm": 0.22587241232395172, "learning_rate": 2.8478666666666666e-05, "loss": 0.0096, "step": 14568 }, { "epoch": 16.080618442849254, "grad_norm": 0.16168080270290375, "learning_rate": 2.8478333333333335e-05, "loss": 0.0066, "step": 14569 }, { "epoch": 16.081722805080066, "grad_norm": 0.15616925060749054, "learning_rate": 2.8478e-05, "loss": 0.0083, "step": 14570 }, { "epoch": 16.082827167310878, "grad_norm": 0.12956048548221588, "learning_rate": 2.8477666666666667e-05, "loss": 0.0037, "step": 14571 }, { "epoch": 16.08393152954169, "grad_norm": 0.25868508219718933, "learning_rate": 2.8477333333333333e-05, "loss": 0.0094, "step": 14572 }, { "epoch": 16.085035891772502, "grad_norm": 0.10780314356088638, "learning_rate": 2.8477000000000002e-05, "loss": 0.0044, "step": 14573 }, { "epoch": 16.086140254003315, "grad_norm": 0.262358695268631, "learning_rate": 2.8476666666666668e-05, "loss": 0.0072, "step": 14574 }, { "epoch": 16.087244616234123, "grad_norm": 0.9488514065742493, "learning_rate": 2.8476333333333334e-05, "loss": 0.0114, "step": 14575 }, { "epoch": 16.088348978464936, "grad_norm": 0.12023059278726578, "learning_rate": 2.8476000000000003e-05, "loss": 0.0073, "step": 14576 }, { "epoch": 16.089453340695748, "grad_norm": 0.25178611278533936, "learning_rate": 2.8475666666666666e-05, "loss": 0.0071, "step": 14577 }, { "epoch": 16.09055770292656, "grad_norm": 0.42185214161872864, "learning_rate": 2.8475333333333335e-05, "loss": 0.0061, "step": 14578 }, { "epoch": 16.091662065157372, "grad_norm": 0.7469238042831421, "learning_rate": 2.8475e-05, "loss": 0.0161, "step": 14579 }, { "epoch": 16.092766427388185, "grad_norm": 0.23945766687393188, "learning_rate": 2.8474666666666667e-05, "loss": 0.0089, "step": 14580 }, { "epoch": 16.093870789618997, "grad_norm": 0.33547279238700867, "learning_rate": 2.8474333333333333e-05, "loss": 0.007, "step": 14581 }, { "epoch": 16.094975151849805, "grad_norm": 0.17989160120487213, "learning_rate": 2.8474000000000002e-05, "loss": 0.0075, "step": 14582 }, { "epoch": 16.096079514080618, "grad_norm": 0.12998412549495697, "learning_rate": 2.8473666666666668e-05, "loss": 0.005, "step": 14583 }, { "epoch": 16.09718387631143, "grad_norm": 0.23676225543022156, "learning_rate": 2.8473333333333334e-05, "loss": 0.0086, "step": 14584 }, { "epoch": 16.098288238542242, "grad_norm": 0.31391507387161255, "learning_rate": 2.8473000000000003e-05, "loss": 0.0065, "step": 14585 }, { "epoch": 16.099392600773054, "grad_norm": 0.10737942904233932, "learning_rate": 2.8472666666666666e-05, "loss": 0.0042, "step": 14586 }, { "epoch": 16.100496963003867, "grad_norm": 0.18185539543628693, "learning_rate": 2.8472333333333335e-05, "loss": 0.0101, "step": 14587 }, { "epoch": 16.101601325234675, "grad_norm": 0.3231087923049927, "learning_rate": 2.8471999999999998e-05, "loss": 0.012, "step": 14588 }, { "epoch": 16.102705687465487, "grad_norm": 0.8082129955291748, "learning_rate": 2.8471666666666667e-05, "loss": 0.0319, "step": 14589 }, { "epoch": 16.1038100496963, "grad_norm": 0.3852614164352417, "learning_rate": 2.8471333333333336e-05, "loss": 0.0172, "step": 14590 }, { "epoch": 16.104914411927112, "grad_norm": 0.28721874952316284, "learning_rate": 2.8471e-05, "loss": 0.0065, "step": 14591 }, { "epoch": 16.106018774157924, "grad_norm": 0.4598626494407654, "learning_rate": 2.8470666666666668e-05, "loss": 0.0109, "step": 14592 }, { "epoch": 16.107123136388736, "grad_norm": 0.24199123680591583, "learning_rate": 2.8470333333333334e-05, "loss": 0.0057, "step": 14593 }, { "epoch": 16.10822749861955, "grad_norm": 0.29024800658226013, "learning_rate": 2.847e-05, "loss": 0.0113, "step": 14594 }, { "epoch": 16.109331860850357, "grad_norm": 0.4938352108001709, "learning_rate": 2.8469666666666666e-05, "loss": 0.011, "step": 14595 }, { "epoch": 16.11043622308117, "grad_norm": 0.19517509639263153, "learning_rate": 2.8469333333333335e-05, "loss": 0.0043, "step": 14596 }, { "epoch": 16.11154058531198, "grad_norm": 0.6622889637947083, "learning_rate": 2.8468999999999998e-05, "loss": 0.1953, "step": 14597 }, { "epoch": 16.112644947542794, "grad_norm": 0.42449724674224854, "learning_rate": 2.8468666666666667e-05, "loss": 0.1183, "step": 14598 }, { "epoch": 16.113749309773606, "grad_norm": 0.4626365602016449, "learning_rate": 2.8468333333333337e-05, "loss": 0.0776, "step": 14599 }, { "epoch": 16.11485367200442, "grad_norm": 0.3521125912666321, "learning_rate": 2.8468e-05, "loss": 0.0696, "step": 14600 }, { "epoch": 16.11595803423523, "grad_norm": 0.543459415435791, "learning_rate": 2.846766666666667e-05, "loss": 0.0757, "step": 14601 }, { "epoch": 16.11706239646604, "grad_norm": 0.32317206263542175, "learning_rate": 2.8467333333333334e-05, "loss": 0.0485, "step": 14602 }, { "epoch": 16.11816675869685, "grad_norm": 0.3215307891368866, "learning_rate": 2.8467e-05, "loss": 0.0503, "step": 14603 }, { "epoch": 16.119271120927664, "grad_norm": 0.7145450711250305, "learning_rate": 2.8466666666666666e-05, "loss": 0.035, "step": 14604 }, { "epoch": 16.120375483158476, "grad_norm": 0.23498736321926117, "learning_rate": 2.8466333333333336e-05, "loss": 0.0203, "step": 14605 }, { "epoch": 16.12147984538929, "grad_norm": 0.223485067486763, "learning_rate": 2.8465999999999998e-05, "loss": 0.0087, "step": 14606 }, { "epoch": 16.1225842076201, "grad_norm": 0.4024428129196167, "learning_rate": 2.8465666666666667e-05, "loss": 0.0075, "step": 14607 }, { "epoch": 16.123688569850913, "grad_norm": 0.13750839233398438, "learning_rate": 2.8465333333333337e-05, "loss": 0.009, "step": 14608 }, { "epoch": 16.12479293208172, "grad_norm": 0.2204567939043045, "learning_rate": 2.8465e-05, "loss": 0.0094, "step": 14609 }, { "epoch": 16.125897294312534, "grad_norm": 0.24376124143600464, "learning_rate": 2.846466666666667e-05, "loss": 0.0108, "step": 14610 }, { "epoch": 16.127001656543346, "grad_norm": 0.14711137115955353, "learning_rate": 2.8464333333333335e-05, "loss": 0.0059, "step": 14611 }, { "epoch": 16.128106018774158, "grad_norm": 0.1571028083562851, "learning_rate": 2.8464e-05, "loss": 0.01, "step": 14612 }, { "epoch": 16.12921038100497, "grad_norm": 0.3205995559692383, "learning_rate": 2.8463666666666666e-05, "loss": 0.0129, "step": 14613 }, { "epoch": 16.130314743235783, "grad_norm": 0.19437822699546814, "learning_rate": 2.8463333333333336e-05, "loss": 0.0113, "step": 14614 }, { "epoch": 16.131419105466595, "grad_norm": 0.12772515416145325, "learning_rate": 2.8463000000000002e-05, "loss": 0.0045, "step": 14615 }, { "epoch": 16.132523467697403, "grad_norm": 0.13420383632183075, "learning_rate": 2.8462666666666668e-05, "loss": 0.0105, "step": 14616 }, { "epoch": 16.133627829928216, "grad_norm": 0.2377341091632843, "learning_rate": 2.8462333333333334e-05, "loss": 0.0093, "step": 14617 }, { "epoch": 16.134732192159028, "grad_norm": 0.2627169191837311, "learning_rate": 2.8462e-05, "loss": 0.0138, "step": 14618 }, { "epoch": 16.13583655438984, "grad_norm": 0.2039884775876999, "learning_rate": 2.846166666666667e-05, "loss": 0.0081, "step": 14619 }, { "epoch": 16.136940916620652, "grad_norm": 0.23417401313781738, "learning_rate": 2.846133333333333e-05, "loss": 0.0236, "step": 14620 }, { "epoch": 16.138045278851465, "grad_norm": 0.2682601809501648, "learning_rate": 2.8461e-05, "loss": 0.0039, "step": 14621 }, { "epoch": 16.139149641082273, "grad_norm": 0.23488156497478485, "learning_rate": 2.8460666666666667e-05, "loss": 0.0081, "step": 14622 }, { "epoch": 16.140254003313085, "grad_norm": 0.2308415323495865, "learning_rate": 2.8460333333333333e-05, "loss": 0.0085, "step": 14623 }, { "epoch": 16.141358365543898, "grad_norm": 0.42849093675613403, "learning_rate": 2.8460000000000002e-05, "loss": 0.0224, "step": 14624 }, { "epoch": 16.14246272777471, "grad_norm": 0.10120011866092682, "learning_rate": 2.8459666666666668e-05, "loss": 0.0032, "step": 14625 }, { "epoch": 16.143567090005522, "grad_norm": 0.15203532576560974, "learning_rate": 2.8459333333333334e-05, "loss": 0.0041, "step": 14626 }, { "epoch": 16.144671452236334, "grad_norm": 0.3008543848991394, "learning_rate": 2.8459e-05, "loss": 0.0064, "step": 14627 }, { "epoch": 16.145775814467147, "grad_norm": 0.16426491737365723, "learning_rate": 2.845866666666667e-05, "loss": 0.0071, "step": 14628 }, { "epoch": 16.146880176697955, "grad_norm": 0.8945810794830322, "learning_rate": 2.845833333333333e-05, "loss": 0.0116, "step": 14629 }, { "epoch": 16.147984538928768, "grad_norm": 0.0851280614733696, "learning_rate": 2.8458e-05, "loss": 0.0053, "step": 14630 }, { "epoch": 16.14908890115958, "grad_norm": 0.19462472200393677, "learning_rate": 2.8457666666666667e-05, "loss": 0.008, "step": 14631 }, { "epoch": 16.150193263390392, "grad_norm": 0.6455438733100891, "learning_rate": 2.8457333333333333e-05, "loss": 0.0147, "step": 14632 }, { "epoch": 16.151297625621204, "grad_norm": 0.15600617229938507, "learning_rate": 2.8457000000000002e-05, "loss": 0.0088, "step": 14633 }, { "epoch": 16.152401987852016, "grad_norm": 0.1882198303937912, "learning_rate": 2.8456666666666668e-05, "loss": 0.0058, "step": 14634 }, { "epoch": 16.15350635008283, "grad_norm": 0.2614826560020447, "learning_rate": 2.8456333333333334e-05, "loss": 0.0112, "step": 14635 }, { "epoch": 16.154610712313637, "grad_norm": 0.562656819820404, "learning_rate": 2.8456e-05, "loss": 0.0259, "step": 14636 }, { "epoch": 16.15571507454445, "grad_norm": 0.11921080201864243, "learning_rate": 2.845566666666667e-05, "loss": 0.0031, "step": 14637 }, { "epoch": 16.156819436775262, "grad_norm": 0.35195037722587585, "learning_rate": 2.8455333333333332e-05, "loss": 0.015, "step": 14638 }, { "epoch": 16.157923799006074, "grad_norm": 0.18663427233695984, "learning_rate": 2.8455e-05, "loss": 0.0083, "step": 14639 }, { "epoch": 16.159028161236886, "grad_norm": 0.30309563875198364, "learning_rate": 2.845466666666667e-05, "loss": 0.0086, "step": 14640 }, { "epoch": 16.1601325234677, "grad_norm": 0.17769019305706024, "learning_rate": 2.8454333333333333e-05, "loss": 0.0073, "step": 14641 }, { "epoch": 16.16123688569851, "grad_norm": 0.08459585160017014, "learning_rate": 2.8454000000000002e-05, "loss": 0.002, "step": 14642 }, { "epoch": 16.16234124792932, "grad_norm": 0.2421846091747284, "learning_rate": 2.8453666666666668e-05, "loss": 0.026, "step": 14643 }, { "epoch": 16.16344561016013, "grad_norm": 0.35134074091911316, "learning_rate": 2.8453333333333334e-05, "loss": 0.0101, "step": 14644 }, { "epoch": 16.164549972390944, "grad_norm": 0.2741754949092865, "learning_rate": 2.8453e-05, "loss": 0.0078, "step": 14645 }, { "epoch": 16.165654334621756, "grad_norm": 0.19926291704177856, "learning_rate": 2.845266666666667e-05, "loss": 0.021, "step": 14646 }, { "epoch": 16.16675869685257, "grad_norm": 0.5057322382926941, "learning_rate": 2.8452333333333332e-05, "loss": 0.1193, "step": 14647 }, { "epoch": 16.16786305908338, "grad_norm": 0.5082675814628601, "learning_rate": 2.8452e-05, "loss": 0.0947, "step": 14648 }, { "epoch": 16.168967421314193, "grad_norm": 0.36063793301582336, "learning_rate": 2.8451666666666667e-05, "loss": 0.1075, "step": 14649 }, { "epoch": 16.170071783545, "grad_norm": 0.4125831723213196, "learning_rate": 2.8451333333333333e-05, "loss": 0.1012, "step": 14650 }, { "epoch": 16.171176145775814, "grad_norm": 0.42078274488449097, "learning_rate": 2.8451000000000003e-05, "loss": 0.0751, "step": 14651 }, { "epoch": 16.172280508006626, "grad_norm": 0.429633766412735, "learning_rate": 2.8450666666666665e-05, "loss": 0.0429, "step": 14652 }, { "epoch": 16.173384870237438, "grad_norm": 0.7881360650062561, "learning_rate": 2.8450333333333334e-05, "loss": 0.04, "step": 14653 }, { "epoch": 16.17448923246825, "grad_norm": 0.467633992433548, "learning_rate": 2.845e-05, "loss": 0.0518, "step": 14654 }, { "epoch": 16.175593594699063, "grad_norm": 0.3982212245464325, "learning_rate": 2.8449666666666666e-05, "loss": 0.0332, "step": 14655 }, { "epoch": 16.17669795692987, "grad_norm": 0.27438992261886597, "learning_rate": 2.8449333333333332e-05, "loss": 0.0161, "step": 14656 }, { "epoch": 16.177802319160683, "grad_norm": 0.21434074640274048, "learning_rate": 2.8449e-05, "loss": 0.0133, "step": 14657 }, { "epoch": 16.178906681391496, "grad_norm": 0.2077028453350067, "learning_rate": 2.8448666666666667e-05, "loss": 0.0129, "step": 14658 }, { "epoch": 16.180011043622308, "grad_norm": 0.17200659215450287, "learning_rate": 2.8448333333333333e-05, "loss": 0.0097, "step": 14659 }, { "epoch": 16.18111540585312, "grad_norm": 0.20868399739265442, "learning_rate": 2.8448000000000003e-05, "loss": 0.0126, "step": 14660 }, { "epoch": 16.182219768083932, "grad_norm": 0.2225591540336609, "learning_rate": 2.8447666666666665e-05, "loss": 0.0096, "step": 14661 }, { "epoch": 16.183324130314745, "grad_norm": 0.3740648627281189, "learning_rate": 2.8447333333333335e-05, "loss": 0.0108, "step": 14662 }, { "epoch": 16.184428492545553, "grad_norm": 0.391198068857193, "learning_rate": 2.8447e-05, "loss": 0.0067, "step": 14663 }, { "epoch": 16.185532854776365, "grad_norm": 0.5802624225616455, "learning_rate": 2.8446666666666666e-05, "loss": 0.0111, "step": 14664 }, { "epoch": 16.186637217007178, "grad_norm": 0.30689728260040283, "learning_rate": 2.8446333333333336e-05, "loss": 0.0103, "step": 14665 }, { "epoch": 16.18774157923799, "grad_norm": 0.20524753630161285, "learning_rate": 2.8446000000000002e-05, "loss": 0.0138, "step": 14666 }, { "epoch": 16.188845941468802, "grad_norm": 0.13395312428474426, "learning_rate": 2.8445666666666668e-05, "loss": 0.0064, "step": 14667 }, { "epoch": 16.189950303699614, "grad_norm": 0.14075873792171478, "learning_rate": 2.8445333333333334e-05, "loss": 0.0203, "step": 14668 }, { "epoch": 16.191054665930427, "grad_norm": 0.19697731733322144, "learning_rate": 2.8445000000000003e-05, "loss": 0.0076, "step": 14669 }, { "epoch": 16.192159028161235, "grad_norm": 0.09118924289941788, "learning_rate": 2.8444666666666665e-05, "loss": 0.0053, "step": 14670 }, { "epoch": 16.193263390392048, "grad_norm": 0.2218492329120636, "learning_rate": 2.8444333333333335e-05, "loss": 0.0087, "step": 14671 }, { "epoch": 16.19436775262286, "grad_norm": 0.3967113792896271, "learning_rate": 2.8444e-05, "loss": 0.009, "step": 14672 }, { "epoch": 16.195472114853672, "grad_norm": 0.18702109158039093, "learning_rate": 2.8443666666666667e-05, "loss": 0.0096, "step": 14673 }, { "epoch": 16.196576477084484, "grad_norm": 0.15888036787509918, "learning_rate": 2.8443333333333336e-05, "loss": 0.0068, "step": 14674 }, { "epoch": 16.197680839315296, "grad_norm": 0.15909968316555023, "learning_rate": 2.8443000000000002e-05, "loss": 0.0043, "step": 14675 }, { "epoch": 16.19878520154611, "grad_norm": 0.23358158767223358, "learning_rate": 2.8442666666666668e-05, "loss": 0.0086, "step": 14676 }, { "epoch": 16.199889563776917, "grad_norm": 0.15070824325084686, "learning_rate": 2.8442333333333334e-05, "loss": 0.0073, "step": 14677 }, { "epoch": 16.20099392600773, "grad_norm": 0.3002176582813263, "learning_rate": 2.8442e-05, "loss": 0.0063, "step": 14678 }, { "epoch": 16.202098288238542, "grad_norm": 0.2617436647415161, "learning_rate": 2.8441666666666666e-05, "loss": 0.0091, "step": 14679 }, { "epoch": 16.203202650469354, "grad_norm": 0.187950998544693, "learning_rate": 2.8441333333333335e-05, "loss": 0.0087, "step": 14680 }, { "epoch": 16.204307012700166, "grad_norm": 0.2774762511253357, "learning_rate": 2.8440999999999998e-05, "loss": 0.0054, "step": 14681 }, { "epoch": 16.20541137493098, "grad_norm": 1.9776846170425415, "learning_rate": 2.8440666666666667e-05, "loss": 0.0111, "step": 14682 }, { "epoch": 16.20651573716179, "grad_norm": 0.4062923491001129, "learning_rate": 2.8440333333333336e-05, "loss": 0.0118, "step": 14683 }, { "epoch": 16.2076200993926, "grad_norm": 0.10076311230659485, "learning_rate": 2.844e-05, "loss": 0.0026, "step": 14684 }, { "epoch": 16.20872446162341, "grad_norm": 0.44142571091651917, "learning_rate": 2.8439666666666668e-05, "loss": 0.0164, "step": 14685 }, { "epoch": 16.209828823854224, "grad_norm": 0.41391730308532715, "learning_rate": 2.8439333333333334e-05, "loss": 0.0089, "step": 14686 }, { "epoch": 16.210933186085036, "grad_norm": 0.13116766512393951, "learning_rate": 2.8439e-05, "loss": 0.011, "step": 14687 }, { "epoch": 16.21203754831585, "grad_norm": 0.2694562077522278, "learning_rate": 2.8438666666666666e-05, "loss": 0.0117, "step": 14688 }, { "epoch": 16.21314191054666, "grad_norm": 0.0986793115735054, "learning_rate": 2.8438333333333335e-05, "loss": 0.0035, "step": 14689 }, { "epoch": 16.214246272777473, "grad_norm": 0.48920756578445435, "learning_rate": 2.8438e-05, "loss": 0.0083, "step": 14690 }, { "epoch": 16.21535063500828, "grad_norm": 0.4420281648635864, "learning_rate": 2.8437666666666667e-05, "loss": 0.0138, "step": 14691 }, { "epoch": 16.216454997239094, "grad_norm": 0.27374687790870667, "learning_rate": 2.8437333333333336e-05, "loss": 0.0074, "step": 14692 }, { "epoch": 16.217559359469906, "grad_norm": 0.1915736347436905, "learning_rate": 2.8437e-05, "loss": 0.0086, "step": 14693 }, { "epoch": 16.218663721700718, "grad_norm": 0.38739481568336487, "learning_rate": 2.8436666666666668e-05, "loss": 0.0109, "step": 14694 }, { "epoch": 16.21976808393153, "grad_norm": 0.12294080853462219, "learning_rate": 2.8436333333333334e-05, "loss": 0.0056, "step": 14695 }, { "epoch": 16.220872446162343, "grad_norm": 0.20782317221164703, "learning_rate": 2.8436e-05, "loss": 0.0066, "step": 14696 }, { "epoch": 16.22197680839315, "grad_norm": 0.45423367619514465, "learning_rate": 2.8435666666666666e-05, "loss": 0.1554, "step": 14697 }, { "epoch": 16.223081170623963, "grad_norm": 0.5265239477157593, "learning_rate": 2.8435333333333335e-05, "loss": 0.1076, "step": 14698 }, { "epoch": 16.224185532854776, "grad_norm": 0.4517034590244293, "learning_rate": 2.8435e-05, "loss": 0.0892, "step": 14699 }, { "epoch": 16.225289895085588, "grad_norm": 0.5843547582626343, "learning_rate": 2.8434666666666667e-05, "loss": 0.0774, "step": 14700 }, { "epoch": 16.2263942573164, "grad_norm": 0.38020893931388855, "learning_rate": 2.8434333333333337e-05, "loss": 0.0548, "step": 14701 }, { "epoch": 16.227498619547212, "grad_norm": 0.46587708592414856, "learning_rate": 2.8434e-05, "loss": 0.0637, "step": 14702 }, { "epoch": 16.228602981778025, "grad_norm": 0.47931957244873047, "learning_rate": 2.843366666666667e-05, "loss": 0.0442, "step": 14703 }, { "epoch": 16.229707344008833, "grad_norm": 0.2857367694377899, "learning_rate": 2.8433333333333334e-05, "loss": 0.0363, "step": 14704 }, { "epoch": 16.230811706239646, "grad_norm": 0.31877270340919495, "learning_rate": 2.8433e-05, "loss": 0.0193, "step": 14705 }, { "epoch": 16.231916068470458, "grad_norm": 0.393935889005661, "learning_rate": 2.8432666666666666e-05, "loss": 0.028, "step": 14706 }, { "epoch": 16.23302043070127, "grad_norm": 0.2529694139957428, "learning_rate": 2.8432333333333332e-05, "loss": 0.014, "step": 14707 }, { "epoch": 16.234124792932082, "grad_norm": 0.2746891677379608, "learning_rate": 2.8432e-05, "loss": 0.0298, "step": 14708 }, { "epoch": 16.235229155162894, "grad_norm": 0.2351454347372055, "learning_rate": 2.8431666666666667e-05, "loss": 0.0168, "step": 14709 }, { "epoch": 16.236333517393707, "grad_norm": 0.2549979090690613, "learning_rate": 2.8431333333333333e-05, "loss": 0.0114, "step": 14710 }, { "epoch": 16.237437879624515, "grad_norm": 0.14693452417850494, "learning_rate": 2.8431e-05, "loss": 0.0047, "step": 14711 }, { "epoch": 16.238542241855328, "grad_norm": 0.32819563150405884, "learning_rate": 2.843066666666667e-05, "loss": 0.0109, "step": 14712 }, { "epoch": 16.23964660408614, "grad_norm": 0.36064496636390686, "learning_rate": 2.843033333333333e-05, "loss": 0.0087, "step": 14713 }, { "epoch": 16.240750966316952, "grad_norm": 0.20055867731571198, "learning_rate": 2.843e-05, "loss": 0.0053, "step": 14714 }, { "epoch": 16.241855328547764, "grad_norm": 0.20187385380268097, "learning_rate": 2.842966666666667e-05, "loss": 0.0103, "step": 14715 }, { "epoch": 16.242959690778576, "grad_norm": 0.1923808455467224, "learning_rate": 2.8429333333333332e-05, "loss": 0.0056, "step": 14716 }, { "epoch": 16.24406405300939, "grad_norm": 0.3015527129173279, "learning_rate": 2.8429000000000002e-05, "loss": 0.0051, "step": 14717 }, { "epoch": 16.245168415240197, "grad_norm": 0.11340438574552536, "learning_rate": 2.8428666666666668e-05, "loss": 0.0056, "step": 14718 }, { "epoch": 16.24627277747101, "grad_norm": 0.17726606130599976, "learning_rate": 2.8428333333333334e-05, "loss": 0.0097, "step": 14719 }, { "epoch": 16.247377139701822, "grad_norm": 0.9670537114143372, "learning_rate": 2.8428e-05, "loss": 0.0119, "step": 14720 }, { "epoch": 16.248481501932634, "grad_norm": 0.09833974391222, "learning_rate": 2.842766666666667e-05, "loss": 0.0057, "step": 14721 }, { "epoch": 16.249585864163446, "grad_norm": 0.36570286750793457, "learning_rate": 2.842733333333333e-05, "loss": 0.0121, "step": 14722 }, { "epoch": 16.25069022639426, "grad_norm": 0.18581520020961761, "learning_rate": 2.8427e-05, "loss": 0.0053, "step": 14723 }, { "epoch": 16.25179458862507, "grad_norm": 0.12267286330461502, "learning_rate": 2.842666666666667e-05, "loss": 0.0035, "step": 14724 }, { "epoch": 16.25289895085588, "grad_norm": 0.16749341785907745, "learning_rate": 2.8426333333333333e-05, "loss": 0.0091, "step": 14725 }, { "epoch": 16.25400331308669, "grad_norm": 0.2948436141014099, "learning_rate": 2.8426000000000002e-05, "loss": 0.0106, "step": 14726 }, { "epoch": 16.255107675317504, "grad_norm": 0.19812580943107605, "learning_rate": 2.8425666666666668e-05, "loss": 0.0079, "step": 14727 }, { "epoch": 16.256212037548316, "grad_norm": 0.23265647888183594, "learning_rate": 2.8425333333333334e-05, "loss": 0.0055, "step": 14728 }, { "epoch": 16.25731639977913, "grad_norm": 0.16507650911808014, "learning_rate": 2.8425e-05, "loss": 0.0054, "step": 14729 }, { "epoch": 16.25842076200994, "grad_norm": 0.1603434979915619, "learning_rate": 2.842466666666667e-05, "loss": 0.006, "step": 14730 }, { "epoch": 16.25952512424075, "grad_norm": 0.18695126473903656, "learning_rate": 2.842433333333333e-05, "loss": 0.0077, "step": 14731 }, { "epoch": 16.26062948647156, "grad_norm": 0.11817901581525803, "learning_rate": 2.8424e-05, "loss": 0.004, "step": 14732 }, { "epoch": 16.261733848702374, "grad_norm": 0.31474727392196655, "learning_rate": 2.842366666666667e-05, "loss": 0.0085, "step": 14733 }, { "epoch": 16.262838210933186, "grad_norm": 0.7829247117042542, "learning_rate": 2.8423333333333333e-05, "loss": 0.0186, "step": 14734 }, { "epoch": 16.263942573163998, "grad_norm": 0.23571114242076874, "learning_rate": 2.8423000000000002e-05, "loss": 0.0089, "step": 14735 }, { "epoch": 16.26504693539481, "grad_norm": 0.0859932005405426, "learning_rate": 2.8422666666666668e-05, "loss": 0.0022, "step": 14736 }, { "epoch": 16.266151297625623, "grad_norm": 0.24802063405513763, "learning_rate": 2.8422333333333334e-05, "loss": 0.0124, "step": 14737 }, { "epoch": 16.26725565985643, "grad_norm": 0.2530617117881775, "learning_rate": 2.8422e-05, "loss": 0.0041, "step": 14738 }, { "epoch": 16.268360022087244, "grad_norm": 0.4151695668697357, "learning_rate": 2.8421666666666666e-05, "loss": 0.0104, "step": 14739 }, { "epoch": 16.269464384318056, "grad_norm": 0.23988865315914154, "learning_rate": 2.8421333333333335e-05, "loss": 0.0105, "step": 14740 }, { "epoch": 16.270568746548868, "grad_norm": 0.11732697486877441, "learning_rate": 2.8421e-05, "loss": 0.0041, "step": 14741 }, { "epoch": 16.27167310877968, "grad_norm": 0.39416876435279846, "learning_rate": 2.8420666666666667e-05, "loss": 0.0054, "step": 14742 }, { "epoch": 16.272777471010492, "grad_norm": 0.2165840119123459, "learning_rate": 2.8420333333333333e-05, "loss": 0.0065, "step": 14743 }, { "epoch": 16.273881833241305, "grad_norm": 0.32055193185806274, "learning_rate": 2.8420000000000002e-05, "loss": 0.0084, "step": 14744 }, { "epoch": 16.274986195472113, "grad_norm": 0.40908193588256836, "learning_rate": 2.8419666666666665e-05, "loss": 0.0094, "step": 14745 }, { "epoch": 16.276090557702926, "grad_norm": 0.3316139280796051, "learning_rate": 2.8419333333333334e-05, "loss": 0.0068, "step": 14746 }, { "epoch": 16.277194919933738, "grad_norm": 0.6944859623908997, "learning_rate": 2.8419e-05, "loss": 0.1702, "step": 14747 }, { "epoch": 16.27829928216455, "grad_norm": 0.6167963743209839, "learning_rate": 2.8418666666666666e-05, "loss": 0.0938, "step": 14748 }, { "epoch": 16.279403644395362, "grad_norm": 0.5530174970626831, "learning_rate": 2.8418333333333335e-05, "loss": 0.1326, "step": 14749 }, { "epoch": 16.280508006626174, "grad_norm": 0.38867369294166565, "learning_rate": 2.8418e-05, "loss": 0.0581, "step": 14750 }, { "epoch": 16.281612368856987, "grad_norm": 0.2974892258644104, "learning_rate": 2.8417666666666667e-05, "loss": 0.0433, "step": 14751 }, { "epoch": 16.282716731087795, "grad_norm": 0.5890248417854309, "learning_rate": 2.8417333333333333e-05, "loss": 0.0451, "step": 14752 }, { "epoch": 16.283821093318608, "grad_norm": 0.24221104383468628, "learning_rate": 2.8417000000000003e-05, "loss": 0.0389, "step": 14753 }, { "epoch": 16.28492545554942, "grad_norm": 0.6890568137168884, "learning_rate": 2.8416666666666665e-05, "loss": 0.0299, "step": 14754 }, { "epoch": 16.286029817780232, "grad_norm": 0.24869994819164276, "learning_rate": 2.8416333333333334e-05, "loss": 0.0158, "step": 14755 }, { "epoch": 16.287134180011044, "grad_norm": 0.2575177848339081, "learning_rate": 2.8416e-05, "loss": 0.0174, "step": 14756 }, { "epoch": 16.288238542241857, "grad_norm": 0.19067616760730743, "learning_rate": 2.8415666666666666e-05, "loss": 0.0393, "step": 14757 }, { "epoch": 16.28934290447267, "grad_norm": 0.16501003503799438, "learning_rate": 2.8415333333333336e-05, "loss": 0.0104, "step": 14758 }, { "epoch": 16.290447266703477, "grad_norm": 0.23625020682811737, "learning_rate": 2.8415e-05, "loss": 0.0092, "step": 14759 }, { "epoch": 16.29155162893429, "grad_norm": 0.2196444869041443, "learning_rate": 2.8414666666666667e-05, "loss": 0.0081, "step": 14760 }, { "epoch": 16.292655991165102, "grad_norm": 0.17446208000183105, "learning_rate": 2.8414333333333333e-05, "loss": 0.0086, "step": 14761 }, { "epoch": 16.293760353395914, "grad_norm": 0.15027053654193878, "learning_rate": 2.8414000000000003e-05, "loss": 0.0079, "step": 14762 }, { "epoch": 16.294864715626726, "grad_norm": 0.16000570356845856, "learning_rate": 2.8413666666666665e-05, "loss": 0.0123, "step": 14763 }, { "epoch": 16.29596907785754, "grad_norm": 0.1855858862400055, "learning_rate": 2.8413333333333335e-05, "loss": 0.0148, "step": 14764 }, { "epoch": 16.297073440088347, "grad_norm": 0.22356869280338287, "learning_rate": 2.8413000000000004e-05, "loss": 0.0075, "step": 14765 }, { "epoch": 16.29817780231916, "grad_norm": 0.29592597484588623, "learning_rate": 2.8412666666666666e-05, "loss": 0.0086, "step": 14766 }, { "epoch": 16.29928216454997, "grad_norm": 0.16996049880981445, "learning_rate": 2.8412333333333336e-05, "loss": 0.006, "step": 14767 }, { "epoch": 16.300386526780784, "grad_norm": 0.42085909843444824, "learning_rate": 2.8412e-05, "loss": 0.0128, "step": 14768 }, { "epoch": 16.301490889011596, "grad_norm": 0.49058133363723755, "learning_rate": 2.8411666666666668e-05, "loss": 0.0117, "step": 14769 }, { "epoch": 16.30259525124241, "grad_norm": 0.48070836067199707, "learning_rate": 2.8411333333333334e-05, "loss": 0.0166, "step": 14770 }, { "epoch": 16.30369961347322, "grad_norm": 0.5232470035552979, "learning_rate": 2.8411e-05, "loss": 0.0091, "step": 14771 }, { "epoch": 16.30480397570403, "grad_norm": 0.16094739735126495, "learning_rate": 2.8410666666666665e-05, "loss": 0.0052, "step": 14772 }, { "epoch": 16.30590833793484, "grad_norm": 0.22880084812641144, "learning_rate": 2.8410333333333335e-05, "loss": 0.0062, "step": 14773 }, { "epoch": 16.307012700165654, "grad_norm": 0.11202887445688248, "learning_rate": 2.841e-05, "loss": 0.0056, "step": 14774 }, { "epoch": 16.308117062396466, "grad_norm": 0.23701676726341248, "learning_rate": 2.8409666666666667e-05, "loss": 0.0092, "step": 14775 }, { "epoch": 16.309221424627278, "grad_norm": 0.15425239503383636, "learning_rate": 2.8409333333333336e-05, "loss": 0.0054, "step": 14776 }, { "epoch": 16.31032578685809, "grad_norm": 0.26463258266448975, "learning_rate": 2.8409e-05, "loss": 0.0083, "step": 14777 }, { "epoch": 16.311430149088903, "grad_norm": 0.2684178948402405, "learning_rate": 2.8408666666666668e-05, "loss": 0.0077, "step": 14778 }, { "epoch": 16.31253451131971, "grad_norm": 0.12879212200641632, "learning_rate": 2.8408333333333334e-05, "loss": 0.0029, "step": 14779 }, { "epoch": 16.313638873550524, "grad_norm": 0.2681210935115814, "learning_rate": 2.8408e-05, "loss": 0.0087, "step": 14780 }, { "epoch": 16.314743235781336, "grad_norm": 0.15087637305259705, "learning_rate": 2.8407666666666666e-05, "loss": 0.0074, "step": 14781 }, { "epoch": 16.315847598012148, "grad_norm": 0.2232217937707901, "learning_rate": 2.8407333333333335e-05, "loss": 0.0065, "step": 14782 }, { "epoch": 16.31695196024296, "grad_norm": 0.48918625712394714, "learning_rate": 2.8407e-05, "loss": 0.0104, "step": 14783 }, { "epoch": 16.318056322473772, "grad_norm": 0.1520279049873352, "learning_rate": 2.8406666666666667e-05, "loss": 0.0078, "step": 14784 }, { "epoch": 16.319160684704585, "grad_norm": 0.26576098799705505, "learning_rate": 2.8406333333333336e-05, "loss": 0.0081, "step": 14785 }, { "epoch": 16.320265046935393, "grad_norm": 0.24110187590122223, "learning_rate": 2.8406e-05, "loss": 0.0082, "step": 14786 }, { "epoch": 16.321369409166206, "grad_norm": 0.16605311632156372, "learning_rate": 2.8405666666666668e-05, "loss": 0.0041, "step": 14787 }, { "epoch": 16.322473771397018, "grad_norm": 0.4509592652320862, "learning_rate": 2.8405333333333334e-05, "loss": 0.0068, "step": 14788 }, { "epoch": 16.32357813362783, "grad_norm": 0.23056724667549133, "learning_rate": 2.8405e-05, "loss": 0.0105, "step": 14789 }, { "epoch": 16.324682495858642, "grad_norm": 0.09850122779607773, "learning_rate": 2.840466666666667e-05, "loss": 0.0037, "step": 14790 }, { "epoch": 16.325786858089455, "grad_norm": 0.33838024735450745, "learning_rate": 2.8404333333333335e-05, "loss": 0.0114, "step": 14791 }, { "epoch": 16.326891220320267, "grad_norm": 0.3050314486026764, "learning_rate": 2.8404e-05, "loss": 0.0114, "step": 14792 }, { "epoch": 16.327995582551075, "grad_norm": 0.12314505130052567, "learning_rate": 2.8403666666666667e-05, "loss": 0.0033, "step": 14793 }, { "epoch": 16.329099944781888, "grad_norm": 0.3685150444507599, "learning_rate": 2.8403333333333336e-05, "loss": 0.0112, "step": 14794 }, { "epoch": 16.3302043070127, "grad_norm": 0.1434042900800705, "learning_rate": 2.8403e-05, "loss": 0.0044, "step": 14795 }, { "epoch": 16.331308669243512, "grad_norm": 0.7668036818504333, "learning_rate": 2.8402666666666668e-05, "loss": 0.0132, "step": 14796 }, { "epoch": 16.332413031474324, "grad_norm": 0.5882030725479126, "learning_rate": 2.8402333333333334e-05, "loss": 0.1219, "step": 14797 }, { "epoch": 16.333517393705137, "grad_norm": 0.533417284488678, "learning_rate": 2.8402e-05, "loss": 0.1432, "step": 14798 }, { "epoch": 16.33462175593595, "grad_norm": 0.45894676446914673, "learning_rate": 2.840166666666667e-05, "loss": 0.0927, "step": 14799 }, { "epoch": 16.335726118166757, "grad_norm": 0.39324328303337097, "learning_rate": 2.8401333333333332e-05, "loss": 0.0819, "step": 14800 }, { "epoch": 16.33683048039757, "grad_norm": 0.4361042082309723, "learning_rate": 2.8401e-05, "loss": 0.0789, "step": 14801 }, { "epoch": 16.337934842628382, "grad_norm": 0.35659292340278625, "learning_rate": 2.8400666666666667e-05, "loss": 0.0423, "step": 14802 }, { "epoch": 16.339039204859194, "grad_norm": 0.29995080828666687, "learning_rate": 2.8400333333333333e-05, "loss": 0.0235, "step": 14803 }, { "epoch": 16.340143567090006, "grad_norm": 0.21629147231578827, "learning_rate": 2.84e-05, "loss": 0.016, "step": 14804 }, { "epoch": 16.34124792932082, "grad_norm": 0.229240283370018, "learning_rate": 2.839966666666667e-05, "loss": 0.0187, "step": 14805 }, { "epoch": 16.342352291551627, "grad_norm": 0.3537611961364746, "learning_rate": 2.8399333333333334e-05, "loss": 0.0389, "step": 14806 }, { "epoch": 16.34345665378244, "grad_norm": 0.20262283086776733, "learning_rate": 2.8399e-05, "loss": 0.0125, "step": 14807 }, { "epoch": 16.34456101601325, "grad_norm": 0.17762354016304016, "learning_rate": 2.839866666666667e-05, "loss": 0.0105, "step": 14808 }, { "epoch": 16.345665378244064, "grad_norm": 0.14982293546199799, "learning_rate": 2.8398333333333332e-05, "loss": 0.0098, "step": 14809 }, { "epoch": 16.346769740474876, "grad_norm": 0.23835346102714539, "learning_rate": 2.8398e-05, "loss": 0.0296, "step": 14810 }, { "epoch": 16.34787410270569, "grad_norm": 0.10455141961574554, "learning_rate": 2.8397666666666667e-05, "loss": 0.0081, "step": 14811 }, { "epoch": 16.3489784649365, "grad_norm": 0.44755828380584717, "learning_rate": 2.8397333333333333e-05, "loss": 0.0178, "step": 14812 }, { "epoch": 16.35008282716731, "grad_norm": 0.1460343450307846, "learning_rate": 2.8397e-05, "loss": 0.005, "step": 14813 }, { "epoch": 16.35118718939812, "grad_norm": 0.24532738327980042, "learning_rate": 2.839666666666667e-05, "loss": 0.0072, "step": 14814 }, { "epoch": 16.352291551628934, "grad_norm": 0.11851813644170761, "learning_rate": 2.8396333333333335e-05, "loss": 0.0055, "step": 14815 }, { "epoch": 16.353395913859746, "grad_norm": 0.21558326482772827, "learning_rate": 2.8396e-05, "loss": 0.0075, "step": 14816 }, { "epoch": 16.35450027609056, "grad_norm": 0.10103928297758102, "learning_rate": 2.839566666666667e-05, "loss": 0.0043, "step": 14817 }, { "epoch": 16.35560463832137, "grad_norm": 0.25550705194473267, "learning_rate": 2.8395333333333332e-05, "loss": 0.008, "step": 14818 }, { "epoch": 16.356709000552183, "grad_norm": 0.22109180688858032, "learning_rate": 2.8395000000000002e-05, "loss": 0.0119, "step": 14819 }, { "epoch": 16.35781336278299, "grad_norm": 0.10873517394065857, "learning_rate": 2.8394666666666668e-05, "loss": 0.006, "step": 14820 }, { "epoch": 16.358917725013804, "grad_norm": 0.15741002559661865, "learning_rate": 2.8394333333333334e-05, "loss": 0.0053, "step": 14821 }, { "epoch": 16.360022087244616, "grad_norm": 0.13769878447055817, "learning_rate": 2.8394e-05, "loss": 0.0043, "step": 14822 }, { "epoch": 16.361126449475428, "grad_norm": 0.24852685630321503, "learning_rate": 2.839366666666667e-05, "loss": 0.0042, "step": 14823 }, { "epoch": 16.36223081170624, "grad_norm": 0.132476344704628, "learning_rate": 2.8393333333333335e-05, "loss": 0.0047, "step": 14824 }, { "epoch": 16.363335173937053, "grad_norm": 0.11715397238731384, "learning_rate": 2.8393e-05, "loss": 0.0057, "step": 14825 }, { "epoch": 16.364439536167865, "grad_norm": 0.1401996612548828, "learning_rate": 2.839266666666667e-05, "loss": 0.0066, "step": 14826 }, { "epoch": 16.365543898398673, "grad_norm": 0.41731885075569153, "learning_rate": 2.8392333333333333e-05, "loss": 0.0085, "step": 14827 }, { "epoch": 16.366648260629486, "grad_norm": 0.12985415756702423, "learning_rate": 2.8392000000000002e-05, "loss": 0.0058, "step": 14828 }, { "epoch": 16.367752622860298, "grad_norm": 0.21655665338039398, "learning_rate": 2.8391666666666664e-05, "loss": 0.0055, "step": 14829 }, { "epoch": 16.36885698509111, "grad_norm": 0.21878337860107422, "learning_rate": 2.8391333333333334e-05, "loss": 0.0054, "step": 14830 }, { "epoch": 16.369961347321922, "grad_norm": 0.2426627278327942, "learning_rate": 2.8391e-05, "loss": 0.0087, "step": 14831 }, { "epoch": 16.371065709552735, "grad_norm": 0.1464325487613678, "learning_rate": 2.8390666666666666e-05, "loss": 0.006, "step": 14832 }, { "epoch": 16.372170071783543, "grad_norm": 0.44739100337028503, "learning_rate": 2.8390333333333335e-05, "loss": 0.0098, "step": 14833 }, { "epoch": 16.373274434014355, "grad_norm": 0.31144413352012634, "learning_rate": 2.839e-05, "loss": 0.0129, "step": 14834 }, { "epoch": 16.374378796245168, "grad_norm": 0.30623602867126465, "learning_rate": 2.8389666666666667e-05, "loss": 0.0063, "step": 14835 }, { "epoch": 16.37548315847598, "grad_norm": 0.37191319465637207, "learning_rate": 2.8389333333333333e-05, "loss": 0.0076, "step": 14836 }, { "epoch": 16.376587520706792, "grad_norm": 0.14985108375549316, "learning_rate": 2.8389000000000002e-05, "loss": 0.0029, "step": 14837 }, { "epoch": 16.377691882937604, "grad_norm": 0.6975305080413818, "learning_rate": 2.8388666666666665e-05, "loss": 0.0138, "step": 14838 }, { "epoch": 16.378796245168417, "grad_norm": 0.17503607273101807, "learning_rate": 2.8388333333333334e-05, "loss": 0.0075, "step": 14839 }, { "epoch": 16.379900607399225, "grad_norm": 0.19895222783088684, "learning_rate": 2.8388000000000003e-05, "loss": 0.0045, "step": 14840 }, { "epoch": 16.381004969630037, "grad_norm": 0.35342761874198914, "learning_rate": 2.8387666666666666e-05, "loss": 0.0102, "step": 14841 }, { "epoch": 16.38210933186085, "grad_norm": 0.2612254321575165, "learning_rate": 2.8387333333333335e-05, "loss": 0.0088, "step": 14842 }, { "epoch": 16.383213694091662, "grad_norm": 0.2962404787540436, "learning_rate": 2.8387e-05, "loss": 0.0068, "step": 14843 }, { "epoch": 16.384318056322474, "grad_norm": 0.300149142742157, "learning_rate": 2.8386666666666667e-05, "loss": 0.007, "step": 14844 }, { "epoch": 16.385422418553286, "grad_norm": 0.4572078585624695, "learning_rate": 2.8386333333333333e-05, "loss": 0.0185, "step": 14845 }, { "epoch": 16.3865267807841, "grad_norm": 0.6671813726425171, "learning_rate": 2.8386000000000002e-05, "loss": 0.0066, "step": 14846 }, { "epoch": 16.387631143014907, "grad_norm": 0.449325829744339, "learning_rate": 2.8385666666666665e-05, "loss": 0.1158, "step": 14847 }, { "epoch": 16.38873550524572, "grad_norm": 0.47981005907058716, "learning_rate": 2.8385333333333334e-05, "loss": 0.1017, "step": 14848 }, { "epoch": 16.38983986747653, "grad_norm": 0.4688640534877777, "learning_rate": 2.8385000000000004e-05, "loss": 0.0992, "step": 14849 }, { "epoch": 16.390944229707344, "grad_norm": 0.5369166135787964, "learning_rate": 2.8384666666666666e-05, "loss": 0.0787, "step": 14850 }, { "epoch": 16.392048591938156, "grad_norm": 0.8187796473503113, "learning_rate": 2.8384333333333335e-05, "loss": 0.0552, "step": 14851 }, { "epoch": 16.39315295416897, "grad_norm": 0.49356353282928467, "learning_rate": 2.8384e-05, "loss": 0.0448, "step": 14852 }, { "epoch": 16.39425731639978, "grad_norm": 0.20596207678318024, "learning_rate": 2.8383666666666667e-05, "loss": 0.0245, "step": 14853 }, { "epoch": 16.39536167863059, "grad_norm": 1.2044240236282349, "learning_rate": 2.8383333333333333e-05, "loss": 0.0396, "step": 14854 }, { "epoch": 16.3964660408614, "grad_norm": 0.21024610102176666, "learning_rate": 2.8383000000000003e-05, "loss": 0.0155, "step": 14855 }, { "epoch": 16.397570403092214, "grad_norm": 0.29099953174591064, "learning_rate": 2.838266666666667e-05, "loss": 0.0119, "step": 14856 }, { "epoch": 16.398674765323026, "grad_norm": 0.2625688314437866, "learning_rate": 2.8382333333333334e-05, "loss": 0.0354, "step": 14857 }, { "epoch": 16.39977912755384, "grad_norm": 0.5796690583229065, "learning_rate": 2.8382e-05, "loss": 0.0187, "step": 14858 }, { "epoch": 16.40088348978465, "grad_norm": 0.5106789469718933, "learning_rate": 2.8381666666666666e-05, "loss": 0.013, "step": 14859 }, { "epoch": 16.401987852015463, "grad_norm": 0.23261204361915588, "learning_rate": 2.8381333333333336e-05, "loss": 0.0298, "step": 14860 }, { "epoch": 16.40309221424627, "grad_norm": 0.305686354637146, "learning_rate": 2.8380999999999998e-05, "loss": 0.0329, "step": 14861 }, { "epoch": 16.404196576477084, "grad_norm": 0.26518169045448303, "learning_rate": 2.8380666666666667e-05, "loss": 0.0094, "step": 14862 }, { "epoch": 16.405300938707896, "grad_norm": 0.2571980655193329, "learning_rate": 2.8380333333333333e-05, "loss": 0.012, "step": 14863 }, { "epoch": 16.406405300938708, "grad_norm": 0.12025990337133408, "learning_rate": 2.838e-05, "loss": 0.0045, "step": 14864 }, { "epoch": 16.40750966316952, "grad_norm": 0.2875649631023407, "learning_rate": 2.837966666666667e-05, "loss": 0.0101, "step": 14865 }, { "epoch": 16.408614025400333, "grad_norm": 0.28828805685043335, "learning_rate": 2.8379333333333335e-05, "loss": 0.0101, "step": 14866 }, { "epoch": 16.409718387631145, "grad_norm": 0.27119624614715576, "learning_rate": 2.8379e-05, "loss": 0.0116, "step": 14867 }, { "epoch": 16.410822749861953, "grad_norm": 0.21927516162395477, "learning_rate": 2.8378666666666666e-05, "loss": 0.0088, "step": 14868 }, { "epoch": 16.411927112092766, "grad_norm": 0.07725450396537781, "learning_rate": 2.8378333333333336e-05, "loss": 0.0037, "step": 14869 }, { "epoch": 16.413031474323578, "grad_norm": 0.16198214888572693, "learning_rate": 2.8378e-05, "loss": 0.0075, "step": 14870 }, { "epoch": 16.41413583655439, "grad_norm": 0.26629242300987244, "learning_rate": 2.8377666666666668e-05, "loss": 0.0066, "step": 14871 }, { "epoch": 16.415240198785202, "grad_norm": 0.33290231227874756, "learning_rate": 2.8377333333333334e-05, "loss": 0.0073, "step": 14872 }, { "epoch": 16.416344561016015, "grad_norm": 0.11565884202718735, "learning_rate": 2.8377e-05, "loss": 0.0048, "step": 14873 }, { "epoch": 16.417448923246823, "grad_norm": 0.26786866784095764, "learning_rate": 2.837666666666667e-05, "loss": 0.0092, "step": 14874 }, { "epoch": 16.418553285477635, "grad_norm": 0.1344761997461319, "learning_rate": 2.8376333333333335e-05, "loss": 0.0065, "step": 14875 }, { "epoch": 16.419657647708448, "grad_norm": 0.21518640220165253, "learning_rate": 2.8376e-05, "loss": 0.0086, "step": 14876 }, { "epoch": 16.42076200993926, "grad_norm": 0.12418898940086365, "learning_rate": 2.8375666666666667e-05, "loss": 0.0058, "step": 14877 }, { "epoch": 16.421866372170072, "grad_norm": 0.2944525182247162, "learning_rate": 2.8375333333333336e-05, "loss": 0.0138, "step": 14878 }, { "epoch": 16.422970734400884, "grad_norm": 0.17378269135951996, "learning_rate": 2.8375e-05, "loss": 0.0077, "step": 14879 }, { "epoch": 16.424075096631697, "grad_norm": 0.31903788447380066, "learning_rate": 2.8374666666666668e-05, "loss": 0.0097, "step": 14880 }, { "epoch": 16.425179458862505, "grad_norm": 1.2334752082824707, "learning_rate": 2.8374333333333334e-05, "loss": 0.0615, "step": 14881 }, { "epoch": 16.426283821093318, "grad_norm": 0.1100301668047905, "learning_rate": 2.8374e-05, "loss": 0.0083, "step": 14882 }, { "epoch": 16.42738818332413, "grad_norm": 0.17125378549098969, "learning_rate": 2.837366666666667e-05, "loss": 0.0065, "step": 14883 }, { "epoch": 16.428492545554942, "grad_norm": 0.2913999557495117, "learning_rate": 2.8373333333333335e-05, "loss": 0.0051, "step": 14884 }, { "epoch": 16.429596907785754, "grad_norm": 0.16250798106193542, "learning_rate": 2.8373e-05, "loss": 0.0045, "step": 14885 }, { "epoch": 16.430701270016566, "grad_norm": 0.42552095651626587, "learning_rate": 2.8372666666666667e-05, "loss": 0.0146, "step": 14886 }, { "epoch": 16.43180563224738, "grad_norm": 0.41663655638694763, "learning_rate": 2.8372333333333336e-05, "loss": 0.0132, "step": 14887 }, { "epoch": 16.432909994478187, "grad_norm": 0.2225470244884491, "learning_rate": 2.8372e-05, "loss": 0.0069, "step": 14888 }, { "epoch": 16.434014356709, "grad_norm": 0.2680768072605133, "learning_rate": 2.8371666666666668e-05, "loss": 0.009, "step": 14889 }, { "epoch": 16.435118718939812, "grad_norm": 0.2956250011920929, "learning_rate": 2.8371333333333334e-05, "loss": 0.0117, "step": 14890 }, { "epoch": 16.436223081170624, "grad_norm": 0.19791844487190247, "learning_rate": 2.8371e-05, "loss": 0.0063, "step": 14891 }, { "epoch": 16.437327443401436, "grad_norm": 0.5339944362640381, "learning_rate": 2.837066666666667e-05, "loss": 0.0319, "step": 14892 }, { "epoch": 16.43843180563225, "grad_norm": 0.3209594488143921, "learning_rate": 2.8370333333333332e-05, "loss": 0.0074, "step": 14893 }, { "epoch": 16.43953616786306, "grad_norm": 0.21806293725967407, "learning_rate": 2.837e-05, "loss": 0.0051, "step": 14894 }, { "epoch": 16.44064053009387, "grad_norm": 0.21993377804756165, "learning_rate": 2.8369666666666667e-05, "loss": 0.008, "step": 14895 }, { "epoch": 16.44174489232468, "grad_norm": 0.7877838611602783, "learning_rate": 2.8369333333333333e-05, "loss": 0.0259, "step": 14896 }, { "epoch": 16.442849254555494, "grad_norm": 1.1821271181106567, "learning_rate": 2.8369e-05, "loss": 0.1696, "step": 14897 }, { "epoch": 16.443953616786306, "grad_norm": 0.47230008244514465, "learning_rate": 2.8368666666666668e-05, "loss": 0.1368, "step": 14898 }, { "epoch": 16.44505797901712, "grad_norm": 0.8243343830108643, "learning_rate": 2.8368333333333334e-05, "loss": 0.0999, "step": 14899 }, { "epoch": 16.44616234124793, "grad_norm": 0.5740810632705688, "learning_rate": 2.8368e-05, "loss": 0.1084, "step": 14900 }, { "epoch": 16.447266703478743, "grad_norm": 0.7819128036499023, "learning_rate": 2.836766666666667e-05, "loss": 0.0704, "step": 14901 }, { "epoch": 16.44837106570955, "grad_norm": 0.40781790018081665, "learning_rate": 2.8367333333333332e-05, "loss": 0.0768, "step": 14902 }, { "epoch": 16.449475427940364, "grad_norm": 0.3921980559825897, "learning_rate": 2.8367e-05, "loss": 0.0498, "step": 14903 }, { "epoch": 16.450579790171176, "grad_norm": 0.4017355740070343, "learning_rate": 2.8366666666666667e-05, "loss": 0.0394, "step": 14904 }, { "epoch": 16.451684152401988, "grad_norm": 0.5307326912879944, "learning_rate": 2.8366333333333333e-05, "loss": 0.0571, "step": 14905 }, { "epoch": 16.4527885146328, "grad_norm": 0.5645928382873535, "learning_rate": 2.8366000000000003e-05, "loss": 0.0377, "step": 14906 }, { "epoch": 16.453892876863613, "grad_norm": 0.36815014481544495, "learning_rate": 2.836566666666667e-05, "loss": 0.042, "step": 14907 }, { "epoch": 16.45499723909442, "grad_norm": 0.10425211489200592, "learning_rate": 2.8365333333333334e-05, "loss": 0.0055, "step": 14908 }, { "epoch": 16.456101601325233, "grad_norm": 0.26527929306030273, "learning_rate": 2.8365e-05, "loss": 0.0092, "step": 14909 }, { "epoch": 16.457205963556046, "grad_norm": 0.14543868601322174, "learning_rate": 2.836466666666667e-05, "loss": 0.0094, "step": 14910 }, { "epoch": 16.458310325786858, "grad_norm": 0.3083386719226837, "learning_rate": 2.8364333333333332e-05, "loss": 0.0102, "step": 14911 }, { "epoch": 16.45941468801767, "grad_norm": 0.12677262723445892, "learning_rate": 2.8364e-05, "loss": 0.0099, "step": 14912 }, { "epoch": 16.460519050248482, "grad_norm": 0.36507511138916016, "learning_rate": 2.8363666666666667e-05, "loss": 0.0133, "step": 14913 }, { "epoch": 16.461623412479295, "grad_norm": 0.2043149769306183, "learning_rate": 2.8363333333333333e-05, "loss": 0.0102, "step": 14914 }, { "epoch": 16.462727774710103, "grad_norm": 0.18512068688869476, "learning_rate": 2.8363000000000003e-05, "loss": 0.0077, "step": 14915 }, { "epoch": 16.463832136940916, "grad_norm": 0.22633731365203857, "learning_rate": 2.836266666666667e-05, "loss": 0.0122, "step": 14916 }, { "epoch": 16.464936499171728, "grad_norm": 0.13615748286247253, "learning_rate": 2.8362333333333335e-05, "loss": 0.0078, "step": 14917 }, { "epoch": 16.46604086140254, "grad_norm": 0.20710675418376923, "learning_rate": 2.8362e-05, "loss": 0.0088, "step": 14918 }, { "epoch": 16.467145223633352, "grad_norm": 0.17882904410362244, "learning_rate": 2.8361666666666666e-05, "loss": 0.0057, "step": 14919 }, { "epoch": 16.468249585864164, "grad_norm": 0.12695135176181793, "learning_rate": 2.8361333333333332e-05, "loss": 0.0073, "step": 14920 }, { "epoch": 16.469353948094977, "grad_norm": 0.13949713110923767, "learning_rate": 2.8361000000000002e-05, "loss": 0.0083, "step": 14921 }, { "epoch": 16.470458310325785, "grad_norm": 0.1794164478778839, "learning_rate": 2.8360666666666664e-05, "loss": 0.0051, "step": 14922 }, { "epoch": 16.471562672556598, "grad_norm": 0.4006461203098297, "learning_rate": 2.8360333333333334e-05, "loss": 0.0107, "step": 14923 }, { "epoch": 16.47266703478741, "grad_norm": 0.1868991255760193, "learning_rate": 2.8360000000000003e-05, "loss": 0.0348, "step": 14924 }, { "epoch": 16.473771397018222, "grad_norm": 0.18455784022808075, "learning_rate": 2.8359666666666665e-05, "loss": 0.008, "step": 14925 }, { "epoch": 16.474875759249034, "grad_norm": 0.1697985827922821, "learning_rate": 2.8359333333333335e-05, "loss": 0.0068, "step": 14926 }, { "epoch": 16.475980121479846, "grad_norm": 0.21297059953212738, "learning_rate": 2.8359e-05, "loss": 0.0141, "step": 14927 }, { "epoch": 16.47708448371066, "grad_norm": 0.2743595838546753, "learning_rate": 2.8358666666666667e-05, "loss": 0.0107, "step": 14928 }, { "epoch": 16.478188845941467, "grad_norm": 0.20824700593948364, "learning_rate": 2.8358333333333333e-05, "loss": 0.0069, "step": 14929 }, { "epoch": 16.47929320817228, "grad_norm": 0.1251751184463501, "learning_rate": 2.8358000000000002e-05, "loss": 0.0053, "step": 14930 }, { "epoch": 16.480397570403092, "grad_norm": 0.41916993260383606, "learning_rate": 2.8357666666666668e-05, "loss": 0.0098, "step": 14931 }, { "epoch": 16.481501932633904, "grad_norm": 0.218656986951828, "learning_rate": 2.8357333333333334e-05, "loss": 0.0055, "step": 14932 }, { "epoch": 16.482606294864716, "grad_norm": 0.43973079323768616, "learning_rate": 2.8357000000000003e-05, "loss": 0.0098, "step": 14933 }, { "epoch": 16.48371065709553, "grad_norm": 0.3645522892475128, "learning_rate": 2.8356666666666666e-05, "loss": 0.0055, "step": 14934 }, { "epoch": 16.48481501932634, "grad_norm": 0.12725013494491577, "learning_rate": 2.8356333333333335e-05, "loss": 0.0049, "step": 14935 }, { "epoch": 16.48591938155715, "grad_norm": 0.1478835493326187, "learning_rate": 2.8356e-05, "loss": 0.0084, "step": 14936 }, { "epoch": 16.48702374378796, "grad_norm": 0.3165382146835327, "learning_rate": 2.8355666666666667e-05, "loss": 0.0157, "step": 14937 }, { "epoch": 16.488128106018774, "grad_norm": 0.310382217168808, "learning_rate": 2.8355333333333333e-05, "loss": 0.0063, "step": 14938 }, { "epoch": 16.489232468249586, "grad_norm": 0.5698592066764832, "learning_rate": 2.8355000000000002e-05, "loss": 0.0131, "step": 14939 }, { "epoch": 16.4903368304804, "grad_norm": 0.21152529120445251, "learning_rate": 2.8354666666666668e-05, "loss": 0.0075, "step": 14940 }, { "epoch": 16.49144119271121, "grad_norm": 0.20714756846427917, "learning_rate": 2.8354333333333334e-05, "loss": 0.0071, "step": 14941 }, { "epoch": 16.49254555494202, "grad_norm": 0.32363277673721313, "learning_rate": 2.8354000000000003e-05, "loss": 0.009, "step": 14942 }, { "epoch": 16.49364991717283, "grad_norm": 0.2716861069202423, "learning_rate": 2.8353666666666666e-05, "loss": 0.0102, "step": 14943 }, { "epoch": 16.494754279403644, "grad_norm": 1.0571452379226685, "learning_rate": 2.8353333333333335e-05, "loss": 0.0123, "step": 14944 }, { "epoch": 16.495858641634456, "grad_norm": 0.16166368126869202, "learning_rate": 2.8353e-05, "loss": 0.0042, "step": 14945 }, { "epoch": 16.496963003865268, "grad_norm": 0.6032339334487915, "learning_rate": 2.8352666666666667e-05, "loss": 0.0329, "step": 14946 }, { "epoch": 16.49806736609608, "grad_norm": 0.6907554268836975, "learning_rate": 2.8352333333333333e-05, "loss": 0.1805, "step": 14947 }, { "epoch": 16.499171728326893, "grad_norm": 0.9991822242736816, "learning_rate": 2.8352000000000002e-05, "loss": 0.153, "step": 14948 }, { "epoch": 16.5002760905577, "grad_norm": 0.5042764544487, "learning_rate": 2.8351666666666668e-05, "loss": 0.0933, "step": 14949 }, { "epoch": 16.501380452788514, "grad_norm": 0.54966801404953, "learning_rate": 2.8351333333333334e-05, "loss": 0.0572, "step": 14950 }, { "epoch": 16.502484815019326, "grad_norm": 0.43674570322036743, "learning_rate": 2.8351e-05, "loss": 0.0697, "step": 14951 }, { "epoch": 16.503589177250138, "grad_norm": 0.7211953401565552, "learning_rate": 2.8350666666666666e-05, "loss": 0.0424, "step": 14952 }, { "epoch": 16.50469353948095, "grad_norm": 0.56734699010849, "learning_rate": 2.8350333333333335e-05, "loss": 0.0442, "step": 14953 }, { "epoch": 16.505797901711762, "grad_norm": 0.2224322110414505, "learning_rate": 2.8349999999999998e-05, "loss": 0.0316, "step": 14954 }, { "epoch": 16.506902263942575, "grad_norm": 0.3607649803161621, "learning_rate": 2.8349666666666667e-05, "loss": 0.0401, "step": 14955 }, { "epoch": 16.508006626173383, "grad_norm": 0.2980075180530548, "learning_rate": 2.8349333333333337e-05, "loss": 0.0145, "step": 14956 }, { "epoch": 16.509110988404196, "grad_norm": 0.2847095727920532, "learning_rate": 2.8349e-05, "loss": 0.0132, "step": 14957 }, { "epoch": 16.510215350635008, "grad_norm": 0.5251865983009338, "learning_rate": 2.834866666666667e-05, "loss": 0.0084, "step": 14958 }, { "epoch": 16.51131971286582, "grad_norm": 0.24384506046772003, "learning_rate": 2.8348333333333334e-05, "loss": 0.0159, "step": 14959 }, { "epoch": 16.512424075096632, "grad_norm": 0.26881861686706543, "learning_rate": 2.8348e-05, "loss": 0.0144, "step": 14960 }, { "epoch": 16.513528437327444, "grad_norm": 0.3012334704399109, "learning_rate": 2.8347666666666666e-05, "loss": 0.0128, "step": 14961 }, { "epoch": 16.514632799558257, "grad_norm": 0.19996283948421478, "learning_rate": 2.8347333333333336e-05, "loss": 0.0067, "step": 14962 }, { "epoch": 16.515737161789065, "grad_norm": 0.06652779877185822, "learning_rate": 2.8346999999999998e-05, "loss": 0.0029, "step": 14963 }, { "epoch": 16.516841524019878, "grad_norm": 0.8859871029853821, "learning_rate": 2.8346666666666667e-05, "loss": 0.0118, "step": 14964 }, { "epoch": 16.51794588625069, "grad_norm": 0.1781049370765686, "learning_rate": 2.8346333333333337e-05, "loss": 0.0051, "step": 14965 }, { "epoch": 16.519050248481502, "grad_norm": 0.3232630789279938, "learning_rate": 2.8346e-05, "loss": 0.0104, "step": 14966 }, { "epoch": 16.520154610712314, "grad_norm": 0.4380629360675812, "learning_rate": 2.834566666666667e-05, "loss": 0.0093, "step": 14967 }, { "epoch": 16.521258972943127, "grad_norm": 0.4457903504371643, "learning_rate": 2.8345333333333335e-05, "loss": 0.0114, "step": 14968 }, { "epoch": 16.52236333517394, "grad_norm": 0.5235861539840698, "learning_rate": 2.8345e-05, "loss": 0.0166, "step": 14969 }, { "epoch": 16.523467697404747, "grad_norm": 0.31318747997283936, "learning_rate": 2.8344666666666666e-05, "loss": 0.022, "step": 14970 }, { "epoch": 16.52457205963556, "grad_norm": 0.26806771755218506, "learning_rate": 2.8344333333333336e-05, "loss": 0.0126, "step": 14971 }, { "epoch": 16.525676421866372, "grad_norm": 0.3206426203250885, "learning_rate": 2.8344e-05, "loss": 0.0143, "step": 14972 }, { "epoch": 16.526780784097184, "grad_norm": 0.27857887744903564, "learning_rate": 2.8343666666666668e-05, "loss": 0.0409, "step": 14973 }, { "epoch": 16.527885146327996, "grad_norm": 0.21539874374866486, "learning_rate": 2.8343333333333337e-05, "loss": 0.0074, "step": 14974 }, { "epoch": 16.52898950855881, "grad_norm": 0.31872206926345825, "learning_rate": 2.8343e-05, "loss": 0.0134, "step": 14975 }, { "epoch": 16.53009387078962, "grad_norm": 0.12811873853206635, "learning_rate": 2.834266666666667e-05, "loss": 0.0067, "step": 14976 }, { "epoch": 16.53119823302043, "grad_norm": 0.41755223274230957, "learning_rate": 2.8342333333333335e-05, "loss": 0.0121, "step": 14977 }, { "epoch": 16.53230259525124, "grad_norm": 0.6419353485107422, "learning_rate": 2.8342e-05, "loss": 0.0207, "step": 14978 }, { "epoch": 16.533406957482054, "grad_norm": 0.17416147887706757, "learning_rate": 2.8341666666666667e-05, "loss": 0.0069, "step": 14979 }, { "epoch": 16.534511319712866, "grad_norm": 0.5906449556350708, "learning_rate": 2.8341333333333333e-05, "loss": 0.0096, "step": 14980 }, { "epoch": 16.53561568194368, "grad_norm": 0.26230862736701965, "learning_rate": 2.8341000000000002e-05, "loss": 0.013, "step": 14981 }, { "epoch": 16.53672004417449, "grad_norm": 0.5000219345092773, "learning_rate": 2.8340666666666668e-05, "loss": 0.0136, "step": 14982 }, { "epoch": 16.5378244064053, "grad_norm": 0.3770330250263214, "learning_rate": 2.8340333333333334e-05, "loss": 0.0114, "step": 14983 }, { "epoch": 16.53892876863611, "grad_norm": 0.24693261086940765, "learning_rate": 2.834e-05, "loss": 0.0136, "step": 14984 }, { "epoch": 16.540033130866924, "grad_norm": 0.15677736699581146, "learning_rate": 2.833966666666667e-05, "loss": 0.0054, "step": 14985 }, { "epoch": 16.541137493097736, "grad_norm": 0.3180865943431854, "learning_rate": 2.833933333333333e-05, "loss": 0.0108, "step": 14986 }, { "epoch": 16.542241855328548, "grad_norm": 0.2833906412124634, "learning_rate": 2.8339e-05, "loss": 0.0128, "step": 14987 }, { "epoch": 16.54334621755936, "grad_norm": 0.29539579153060913, "learning_rate": 2.8338666666666667e-05, "loss": 0.0087, "step": 14988 }, { "epoch": 16.544450579790173, "grad_norm": 0.1311633586883545, "learning_rate": 2.8338333333333333e-05, "loss": 0.0081, "step": 14989 }, { "epoch": 16.54555494202098, "grad_norm": 0.26549258828163147, "learning_rate": 2.8338000000000002e-05, "loss": 0.0134, "step": 14990 }, { "epoch": 16.546659304251794, "grad_norm": 0.3409413695335388, "learning_rate": 2.8337666666666668e-05, "loss": 0.0045, "step": 14991 }, { "epoch": 16.547763666482606, "grad_norm": 0.21470317244529724, "learning_rate": 2.8337333333333334e-05, "loss": 0.01, "step": 14992 }, { "epoch": 16.548868028713418, "grad_norm": 0.531963050365448, "learning_rate": 2.8337e-05, "loss": 0.0166, "step": 14993 }, { "epoch": 16.54997239094423, "grad_norm": 0.24106216430664062, "learning_rate": 2.833666666666667e-05, "loss": 0.0103, "step": 14994 }, { "epoch": 16.551076753175042, "grad_norm": 0.2683860659599304, "learning_rate": 2.8336333333333332e-05, "loss": 0.0104, "step": 14995 }, { "epoch": 16.552181115405855, "grad_norm": 0.28519031405448914, "learning_rate": 2.8336e-05, "loss": 0.0126, "step": 14996 }, { "epoch": 16.553285477636663, "grad_norm": 0.7753328680992126, "learning_rate": 2.8335666666666667e-05, "loss": 0.1525, "step": 14997 }, { "epoch": 16.554389839867476, "grad_norm": 0.4533814787864685, "learning_rate": 2.8335333333333333e-05, "loss": 0.1115, "step": 14998 }, { "epoch": 16.555494202098288, "grad_norm": 0.6420403122901917, "learning_rate": 2.8335000000000002e-05, "loss": 0.1325, "step": 14999 }, { "epoch": 16.5565985643291, "grad_norm": 0.4913159906864166, "learning_rate": 2.8334666666666668e-05, "loss": 0.0838, "step": 15000 }, { "epoch": 16.5565985643291, "eval_cer": 0.10736967623195168, "eval_loss": 0.2915531098842621, "eval_runtime": 15.8742, "eval_samples_per_second": 19.151, "eval_steps_per_second": 0.63, "eval_wer": 0.3802762854950115, "step": 15000 }, { "epoch": 16.557702926559912, "grad_norm": 0.6704189777374268, "learning_rate": 2.8334333333333334e-05, "loss": 0.0667, "step": 15001 }, { "epoch": 16.558807288790724, "grad_norm": 0.7199884653091431, "learning_rate": 2.8334e-05, "loss": 0.0982, "step": 15002 }, { "epoch": 16.559911651021537, "grad_norm": 0.32933560013771057, "learning_rate": 2.833366666666667e-05, "loss": 0.0571, "step": 15003 }, { "epoch": 16.561016013252345, "grad_norm": 0.34290969371795654, "learning_rate": 2.8333333333333332e-05, "loss": 0.0792, "step": 15004 }, { "epoch": 16.562120375483158, "grad_norm": 0.2785225510597229, "learning_rate": 2.8333e-05, "loss": 0.0362, "step": 15005 }, { "epoch": 16.56322473771397, "grad_norm": 0.1510246992111206, "learning_rate": 2.833266666666667e-05, "loss": 0.0111, "step": 15006 }, { "epoch": 16.564329099944782, "grad_norm": 0.27306821942329407, "learning_rate": 2.8332333333333333e-05, "loss": 0.0139, "step": 15007 }, { "epoch": 16.565433462175594, "grad_norm": 0.13571326434612274, "learning_rate": 2.8332000000000002e-05, "loss": 0.0062, "step": 15008 }, { "epoch": 16.566537824406407, "grad_norm": 0.27507928013801575, "learning_rate": 2.833166666666667e-05, "loss": 0.0145, "step": 15009 }, { "epoch": 16.567642186637215, "grad_norm": 0.17677953839302063, "learning_rate": 2.8331333333333334e-05, "loss": 0.0092, "step": 15010 }, { "epoch": 16.568746548868027, "grad_norm": 0.4105384945869446, "learning_rate": 2.8331e-05, "loss": 0.0142, "step": 15011 }, { "epoch": 16.56985091109884, "grad_norm": 0.2054964005947113, "learning_rate": 2.8330666666666666e-05, "loss": 0.0066, "step": 15012 }, { "epoch": 16.570955273329652, "grad_norm": 0.20635025203227997, "learning_rate": 2.8330333333333332e-05, "loss": 0.0089, "step": 15013 }, { "epoch": 16.572059635560464, "grad_norm": 0.3229210078716278, "learning_rate": 2.833e-05, "loss": 0.0101, "step": 15014 }, { "epoch": 16.573163997791276, "grad_norm": 0.3601231873035431, "learning_rate": 2.8329666666666667e-05, "loss": 0.0162, "step": 15015 }, { "epoch": 16.57426836002209, "grad_norm": 0.21482908725738525, "learning_rate": 2.8329333333333333e-05, "loss": 0.0085, "step": 15016 }, { "epoch": 16.575372722252897, "grad_norm": 0.1405183970928192, "learning_rate": 2.8329000000000003e-05, "loss": 0.0061, "step": 15017 }, { "epoch": 16.57647708448371, "grad_norm": 0.302204430103302, "learning_rate": 2.8328666666666665e-05, "loss": 0.021, "step": 15018 }, { "epoch": 16.57758144671452, "grad_norm": 0.2426033914089203, "learning_rate": 2.8328333333333335e-05, "loss": 0.0076, "step": 15019 }, { "epoch": 16.578685808945334, "grad_norm": 0.14238600432872772, "learning_rate": 2.8328e-05, "loss": 0.0063, "step": 15020 }, { "epoch": 16.579790171176146, "grad_norm": 0.12432292848825455, "learning_rate": 2.8327666666666666e-05, "loss": 0.0048, "step": 15021 }, { "epoch": 16.58089453340696, "grad_norm": 0.3484024405479431, "learning_rate": 2.8327333333333332e-05, "loss": 0.0544, "step": 15022 }, { "epoch": 16.58199889563777, "grad_norm": 0.2515272796154022, "learning_rate": 2.8327000000000002e-05, "loss": 0.0063, "step": 15023 }, { "epoch": 16.58310325786858, "grad_norm": 0.38083401322364807, "learning_rate": 2.8326666666666668e-05, "loss": 0.0109, "step": 15024 }, { "epoch": 16.58420762009939, "grad_norm": 0.24985036253929138, "learning_rate": 2.8326333333333334e-05, "loss": 0.0103, "step": 15025 }, { "epoch": 16.585311982330204, "grad_norm": 0.40368014574050903, "learning_rate": 2.8326000000000003e-05, "loss": 0.0111, "step": 15026 }, { "epoch": 16.586416344561016, "grad_norm": 0.2501264810562134, "learning_rate": 2.8325666666666665e-05, "loss": 0.0125, "step": 15027 }, { "epoch": 16.587520706791828, "grad_norm": 0.3013613820075989, "learning_rate": 2.8325333333333335e-05, "loss": 0.0149, "step": 15028 }, { "epoch": 16.58862506902264, "grad_norm": 0.3295087516307831, "learning_rate": 2.8325e-05, "loss": 0.0085, "step": 15029 }, { "epoch": 16.589729431253453, "grad_norm": 0.33041009306907654, "learning_rate": 2.8324666666666667e-05, "loss": 0.0121, "step": 15030 }, { "epoch": 16.59083379348426, "grad_norm": 0.5026348829269409, "learning_rate": 2.8324333333333336e-05, "loss": 0.0125, "step": 15031 }, { "epoch": 16.591938155715074, "grad_norm": 0.1931931972503662, "learning_rate": 2.8324000000000002e-05, "loss": 0.009, "step": 15032 }, { "epoch": 16.593042517945886, "grad_norm": 0.123287133872509, "learning_rate": 2.8323666666666668e-05, "loss": 0.0052, "step": 15033 }, { "epoch": 16.594146880176698, "grad_norm": 0.18088245391845703, "learning_rate": 2.8323333333333334e-05, "loss": 0.0051, "step": 15034 }, { "epoch": 16.59525124240751, "grad_norm": 0.13866527378559113, "learning_rate": 2.8323000000000003e-05, "loss": 0.0056, "step": 15035 }, { "epoch": 16.596355604638322, "grad_norm": 0.8216783404350281, "learning_rate": 2.8322666666666666e-05, "loss": 0.0094, "step": 15036 }, { "epoch": 16.597459966869135, "grad_norm": 0.17192837595939636, "learning_rate": 2.8322333333333335e-05, "loss": 0.0061, "step": 15037 }, { "epoch": 16.598564329099943, "grad_norm": 0.15301461517810822, "learning_rate": 2.8322e-05, "loss": 0.0057, "step": 15038 }, { "epoch": 16.599668691330756, "grad_norm": 0.38847678899765015, "learning_rate": 2.8321666666666667e-05, "loss": 0.0085, "step": 15039 }, { "epoch": 16.600773053561568, "grad_norm": 0.08324775844812393, "learning_rate": 2.8321333333333336e-05, "loss": 0.0039, "step": 15040 }, { "epoch": 16.60187741579238, "grad_norm": 0.41948264837265015, "learning_rate": 2.8321e-05, "loss": 0.0071, "step": 15041 }, { "epoch": 16.602981778023192, "grad_norm": 0.8444951772689819, "learning_rate": 2.8320666666666668e-05, "loss": 0.0122, "step": 15042 }, { "epoch": 16.604086140254005, "grad_norm": 0.4287281036376953, "learning_rate": 2.8320333333333334e-05, "loss": 0.0085, "step": 15043 }, { "epoch": 16.605190502484817, "grad_norm": 0.6388842463493347, "learning_rate": 2.832e-05, "loss": 0.0095, "step": 15044 }, { "epoch": 16.606294864715625, "grad_norm": 0.7251999974250793, "learning_rate": 2.8319666666666666e-05, "loss": 0.0102, "step": 15045 }, { "epoch": 16.607399226946438, "grad_norm": 0.39745333790779114, "learning_rate": 2.8319333333333335e-05, "loss": 0.0027, "step": 15046 }, { "epoch": 16.60850358917725, "grad_norm": 0.7686826586723328, "learning_rate": 2.8318999999999998e-05, "loss": 0.1657, "step": 15047 }, { "epoch": 16.609607951408062, "grad_norm": 0.48528575897216797, "learning_rate": 2.8318666666666667e-05, "loss": 0.1324, "step": 15048 }, { "epoch": 16.610712313638874, "grad_norm": 0.4116875231266022, "learning_rate": 2.8318333333333336e-05, "loss": 0.1189, "step": 15049 }, { "epoch": 16.611816675869687, "grad_norm": 0.41520535945892334, "learning_rate": 2.8318e-05, "loss": 0.0875, "step": 15050 }, { "epoch": 16.612921038100495, "grad_norm": 0.5126330852508545, "learning_rate": 2.8317666666666668e-05, "loss": 0.0823, "step": 15051 }, { "epoch": 16.614025400331307, "grad_norm": 0.4833783507347107, "learning_rate": 2.8317333333333334e-05, "loss": 0.043, "step": 15052 }, { "epoch": 16.61512976256212, "grad_norm": 0.19768385589122772, "learning_rate": 2.8317e-05, "loss": 0.0337, "step": 15053 }, { "epoch": 16.616234124792932, "grad_norm": 0.28681501746177673, "learning_rate": 2.8316666666666666e-05, "loss": 0.0709, "step": 15054 }, { "epoch": 16.617338487023744, "grad_norm": 0.33609843254089355, "learning_rate": 2.8316333333333335e-05, "loss": 0.017, "step": 15055 }, { "epoch": 16.618442849254556, "grad_norm": 0.39925819635391235, "learning_rate": 2.8316e-05, "loss": 0.0392, "step": 15056 }, { "epoch": 16.61954721148537, "grad_norm": 0.2515685260295868, "learning_rate": 2.8315666666666667e-05, "loss": 0.0143, "step": 15057 }, { "epoch": 16.620651573716177, "grad_norm": 0.1936914622783661, "learning_rate": 2.8315333333333337e-05, "loss": 0.0133, "step": 15058 }, { "epoch": 16.62175593594699, "grad_norm": 0.4597851634025574, "learning_rate": 2.8315e-05, "loss": 0.0218, "step": 15059 }, { "epoch": 16.6228602981778, "grad_norm": 0.15161797404289246, "learning_rate": 2.831466666666667e-05, "loss": 0.0236, "step": 15060 }, { "epoch": 16.623964660408614, "grad_norm": 0.16434116661548615, "learning_rate": 2.8314333333333334e-05, "loss": 0.0061, "step": 15061 }, { "epoch": 16.625069022639426, "grad_norm": 0.1527886539697647, "learning_rate": 2.8314e-05, "loss": 0.0077, "step": 15062 }, { "epoch": 16.62617338487024, "grad_norm": 0.15291732549667358, "learning_rate": 2.8313666666666666e-05, "loss": 0.0069, "step": 15063 }, { "epoch": 16.62727774710105, "grad_norm": 0.48894140124320984, "learning_rate": 2.8313333333333336e-05, "loss": 0.0086, "step": 15064 }, { "epoch": 16.62838210933186, "grad_norm": 0.1424589902162552, "learning_rate": 2.8313e-05, "loss": 0.0049, "step": 15065 }, { "epoch": 16.62948647156267, "grad_norm": 0.1290377974510193, "learning_rate": 2.8312666666666667e-05, "loss": 0.0047, "step": 15066 }, { "epoch": 16.630590833793484, "grad_norm": 0.16285006701946259, "learning_rate": 2.8312333333333337e-05, "loss": 0.006, "step": 15067 }, { "epoch": 16.631695196024296, "grad_norm": 0.39825090765953064, "learning_rate": 2.8312e-05, "loss": 0.0137, "step": 15068 }, { "epoch": 16.63279955825511, "grad_norm": 0.0988449901342392, "learning_rate": 2.831166666666667e-05, "loss": 0.0046, "step": 15069 }, { "epoch": 16.63390392048592, "grad_norm": 0.29103878140449524, "learning_rate": 2.831133333333333e-05, "loss": 0.0088, "step": 15070 }, { "epoch": 16.635008282716733, "grad_norm": 0.24244502186775208, "learning_rate": 2.8311e-05, "loss": 0.0095, "step": 15071 }, { "epoch": 16.63611264494754, "grad_norm": 0.4469853341579437, "learning_rate": 2.8310666666666666e-05, "loss": 0.0182, "step": 15072 }, { "epoch": 16.637217007178354, "grad_norm": 0.1852809637784958, "learning_rate": 2.8310333333333332e-05, "loss": 0.0084, "step": 15073 }, { "epoch": 16.638321369409166, "grad_norm": 0.19871926307678223, "learning_rate": 2.8310000000000002e-05, "loss": 0.0075, "step": 15074 }, { "epoch": 16.639425731639978, "grad_norm": 0.24947673082351685, "learning_rate": 2.8309666666666668e-05, "loss": 0.0103, "step": 15075 }, { "epoch": 16.64053009387079, "grad_norm": 0.27362439036369324, "learning_rate": 2.8309333333333334e-05, "loss": 0.01, "step": 15076 }, { "epoch": 16.641634456101603, "grad_norm": 0.16041840612888336, "learning_rate": 2.8309e-05, "loss": 0.0071, "step": 15077 }, { "epoch": 16.642738818332415, "grad_norm": 0.1830236166715622, "learning_rate": 2.830866666666667e-05, "loss": 0.0094, "step": 15078 }, { "epoch": 16.643843180563223, "grad_norm": 0.14917537569999695, "learning_rate": 2.830833333333333e-05, "loss": 0.0075, "step": 15079 }, { "epoch": 16.644947542794036, "grad_norm": 0.17210306227207184, "learning_rate": 2.8308e-05, "loss": 0.0034, "step": 15080 }, { "epoch": 16.646051905024848, "grad_norm": 0.3065909445285797, "learning_rate": 2.830766666666667e-05, "loss": 0.033, "step": 15081 }, { "epoch": 16.64715626725566, "grad_norm": 0.16534294188022614, "learning_rate": 2.8307333333333333e-05, "loss": 0.0084, "step": 15082 }, { "epoch": 16.648260629486472, "grad_norm": 0.10246380418539047, "learning_rate": 2.8307000000000002e-05, "loss": 0.0071, "step": 15083 }, { "epoch": 16.649364991717285, "grad_norm": 0.30926644802093506, "learning_rate": 2.8306666666666668e-05, "loss": 0.0103, "step": 15084 }, { "epoch": 16.650469353948097, "grad_norm": 0.09156574308872223, "learning_rate": 2.8306333333333334e-05, "loss": 0.0047, "step": 15085 }, { "epoch": 16.651573716178905, "grad_norm": 0.503265380859375, "learning_rate": 2.8306e-05, "loss": 0.0135, "step": 15086 }, { "epoch": 16.652678078409718, "grad_norm": 0.09347275644540787, "learning_rate": 2.830566666666667e-05, "loss": 0.005, "step": 15087 }, { "epoch": 16.65378244064053, "grad_norm": 0.14064113795757294, "learning_rate": 2.830533333333333e-05, "loss": 0.0076, "step": 15088 }, { "epoch": 16.654886802871342, "grad_norm": 0.25056955218315125, "learning_rate": 2.8305e-05, "loss": 0.0069, "step": 15089 }, { "epoch": 16.655991165102154, "grad_norm": 0.2527165412902832, "learning_rate": 2.830466666666667e-05, "loss": 0.0097, "step": 15090 }, { "epoch": 16.657095527332967, "grad_norm": 0.11827138066291809, "learning_rate": 2.8304333333333333e-05, "loss": 0.0068, "step": 15091 }, { "epoch": 16.658199889563775, "grad_norm": 0.1522955298423767, "learning_rate": 2.8304000000000002e-05, "loss": 0.0048, "step": 15092 }, { "epoch": 16.659304251794588, "grad_norm": 0.2202940136194229, "learning_rate": 2.8303666666666668e-05, "loss": 0.0069, "step": 15093 }, { "epoch": 16.6604086140254, "grad_norm": 0.3062497079372406, "learning_rate": 2.8303333333333334e-05, "loss": 0.0106, "step": 15094 }, { "epoch": 16.661512976256212, "grad_norm": 0.18402978777885437, "learning_rate": 2.8303e-05, "loss": 0.0092, "step": 15095 }, { "epoch": 16.662617338487024, "grad_norm": 0.10557939857244492, "learning_rate": 2.830266666666667e-05, "loss": 0.0025, "step": 15096 }, { "epoch": 16.663721700717836, "grad_norm": 0.5284184217453003, "learning_rate": 2.8302333333333332e-05, "loss": 0.1924, "step": 15097 }, { "epoch": 16.66482606294865, "grad_norm": 0.45245593786239624, "learning_rate": 2.8302e-05, "loss": 0.1067, "step": 15098 }, { "epoch": 16.665930425179457, "grad_norm": 0.535737156867981, "learning_rate": 2.830166666666667e-05, "loss": 0.1342, "step": 15099 }, { "epoch": 16.66703478741027, "grad_norm": 0.37948936223983765, "learning_rate": 2.8301333333333333e-05, "loss": 0.061, "step": 15100 }, { "epoch": 16.668139149641082, "grad_norm": 0.3721328675746918, "learning_rate": 2.8301000000000002e-05, "loss": 0.0561, "step": 15101 }, { "epoch": 16.669243511871894, "grad_norm": 0.3339560627937317, "learning_rate": 2.8300666666666665e-05, "loss": 0.0414, "step": 15102 }, { "epoch": 16.670347874102706, "grad_norm": 0.3122636377811432, "learning_rate": 2.8300333333333334e-05, "loss": 0.0464, "step": 15103 }, { "epoch": 16.67145223633352, "grad_norm": 0.3440014719963074, "learning_rate": 2.83e-05, "loss": 0.0343, "step": 15104 }, { "epoch": 16.67255659856433, "grad_norm": 0.28006550669670105, "learning_rate": 2.8299666666666666e-05, "loss": 0.033, "step": 15105 }, { "epoch": 16.67366096079514, "grad_norm": 0.17689555883407593, "learning_rate": 2.8299333333333335e-05, "loss": 0.0089, "step": 15106 }, { "epoch": 16.67476532302595, "grad_norm": 0.2248874455690384, "learning_rate": 2.8299e-05, "loss": 0.0278, "step": 15107 }, { "epoch": 16.675869685256764, "grad_norm": 0.24992679059505463, "learning_rate": 2.8298666666666667e-05, "loss": 0.0162, "step": 15108 }, { "epoch": 16.676974047487576, "grad_norm": 0.10639462620019913, "learning_rate": 2.8298333333333333e-05, "loss": 0.0056, "step": 15109 }, { "epoch": 16.67807840971839, "grad_norm": 0.17933347821235657, "learning_rate": 2.8298000000000002e-05, "loss": 0.0075, "step": 15110 }, { "epoch": 16.6791827719492, "grad_norm": 0.16952569782733917, "learning_rate": 2.8297666666666665e-05, "loss": 0.0342, "step": 15111 }, { "epoch": 16.680287134180013, "grad_norm": 0.17356647551059723, "learning_rate": 2.8297333333333334e-05, "loss": 0.0091, "step": 15112 }, { "epoch": 16.68139149641082, "grad_norm": 0.15547284483909607, "learning_rate": 2.8297e-05, "loss": 0.0079, "step": 15113 }, { "epoch": 16.682495858641634, "grad_norm": 0.13989458978176117, "learning_rate": 2.8296666666666666e-05, "loss": 0.0207, "step": 15114 }, { "epoch": 16.683600220872446, "grad_norm": 0.48275846242904663, "learning_rate": 2.8296333333333336e-05, "loss": 0.0124, "step": 15115 }, { "epoch": 16.684704583103258, "grad_norm": 0.08020748198032379, "learning_rate": 2.8296e-05, "loss": 0.0042, "step": 15116 }, { "epoch": 16.68580894533407, "grad_norm": 0.22244547307491302, "learning_rate": 2.8295666666666667e-05, "loss": 0.0127, "step": 15117 }, { "epoch": 16.686913307564883, "grad_norm": 0.15102197229862213, "learning_rate": 2.8295333333333333e-05, "loss": 0.0069, "step": 15118 }, { "epoch": 16.68801766979569, "grad_norm": 0.14716072380542755, "learning_rate": 2.8295000000000003e-05, "loss": 0.0106, "step": 15119 }, { "epoch": 16.689122032026503, "grad_norm": 0.23975910246372223, "learning_rate": 2.8294666666666665e-05, "loss": 0.0072, "step": 15120 }, { "epoch": 16.690226394257316, "grad_norm": 0.12595660984516144, "learning_rate": 2.8294333333333335e-05, "loss": 0.0038, "step": 15121 }, { "epoch": 16.691330756488128, "grad_norm": 0.35556334257125854, "learning_rate": 2.8294e-05, "loss": 0.0065, "step": 15122 }, { "epoch": 16.69243511871894, "grad_norm": 0.14053252339363098, "learning_rate": 2.8293666666666666e-05, "loss": 0.0102, "step": 15123 }, { "epoch": 16.693539480949752, "grad_norm": 1.0757591724395752, "learning_rate": 2.8293333333333336e-05, "loss": 0.0054, "step": 15124 }, { "epoch": 16.694643843180565, "grad_norm": 0.1187322810292244, "learning_rate": 2.8293e-05, "loss": 0.0052, "step": 15125 }, { "epoch": 16.695748205411373, "grad_norm": 0.3558083474636078, "learning_rate": 2.8292666666666668e-05, "loss": 0.0163, "step": 15126 }, { "epoch": 16.696852567642186, "grad_norm": 0.25359147787094116, "learning_rate": 2.8292333333333334e-05, "loss": 0.0107, "step": 15127 }, { "epoch": 16.697956929872998, "grad_norm": 0.20265577733516693, "learning_rate": 2.8292000000000003e-05, "loss": 0.0069, "step": 15128 }, { "epoch": 16.69906129210381, "grad_norm": 0.16149328649044037, "learning_rate": 2.8291666666666665e-05, "loss": 0.0161, "step": 15129 }, { "epoch": 16.700165654334622, "grad_norm": 0.26384469866752625, "learning_rate": 2.8291333333333335e-05, "loss": 0.0086, "step": 15130 }, { "epoch": 16.701270016565434, "grad_norm": 0.19246292114257812, "learning_rate": 2.8291e-05, "loss": 0.0071, "step": 15131 }, { "epoch": 16.702374378796247, "grad_norm": 0.2668546736240387, "learning_rate": 2.8290666666666667e-05, "loss": 0.009, "step": 15132 }, { "epoch": 16.703478741027055, "grad_norm": 0.20513102412223816, "learning_rate": 2.8290333333333336e-05, "loss": 0.0076, "step": 15133 }, { "epoch": 16.704583103257868, "grad_norm": 0.5183241963386536, "learning_rate": 2.829e-05, "loss": 0.0222, "step": 15134 }, { "epoch": 16.70568746548868, "grad_norm": 0.16906514763832092, "learning_rate": 2.8289666666666668e-05, "loss": 0.0066, "step": 15135 }, { "epoch": 16.706791827719492, "grad_norm": 0.19610732793807983, "learning_rate": 2.8289333333333334e-05, "loss": 0.005, "step": 15136 }, { "epoch": 16.707896189950304, "grad_norm": 0.08934718370437622, "learning_rate": 2.8289e-05, "loss": 0.0065, "step": 15137 }, { "epoch": 16.709000552181116, "grad_norm": 0.4411986172199249, "learning_rate": 2.8288666666666666e-05, "loss": 0.0095, "step": 15138 }, { "epoch": 16.71010491441193, "grad_norm": 0.630896270275116, "learning_rate": 2.8288333333333335e-05, "loss": 0.0138, "step": 15139 }, { "epoch": 16.711209276642737, "grad_norm": 0.3446749150753021, "learning_rate": 2.8288e-05, "loss": 0.0061, "step": 15140 }, { "epoch": 16.71231363887355, "grad_norm": 0.19248352944850922, "learning_rate": 2.8287666666666667e-05, "loss": 0.0104, "step": 15141 }, { "epoch": 16.713418001104362, "grad_norm": 0.4544544816017151, "learning_rate": 2.8287333333333336e-05, "loss": 0.0042, "step": 15142 }, { "epoch": 16.714522363335174, "grad_norm": 0.36778461933135986, "learning_rate": 2.8287e-05, "loss": 0.0048, "step": 15143 }, { "epoch": 16.715626725565986, "grad_norm": 0.2646329700946808, "learning_rate": 2.8286666666666668e-05, "loss": 0.007, "step": 15144 }, { "epoch": 16.7167310877968, "grad_norm": 0.21389026939868927, "learning_rate": 2.8286333333333334e-05, "loss": 0.0088, "step": 15145 }, { "epoch": 16.71783545002761, "grad_norm": 0.24881824851036072, "learning_rate": 2.8286e-05, "loss": 0.0062, "step": 15146 }, { "epoch": 16.71893981225842, "grad_norm": 0.5823724269866943, "learning_rate": 2.8285666666666666e-05, "loss": 0.1418, "step": 15147 }, { "epoch": 16.72004417448923, "grad_norm": 0.5349770784378052, "learning_rate": 2.8285333333333335e-05, "loss": 0.1138, "step": 15148 }, { "epoch": 16.721148536720044, "grad_norm": 0.49454405903816223, "learning_rate": 2.8285e-05, "loss": 0.081, "step": 15149 }, { "epoch": 16.722252898950856, "grad_norm": 0.4484102725982666, "learning_rate": 2.8284666666666667e-05, "loss": 0.1247, "step": 15150 }, { "epoch": 16.72335726118167, "grad_norm": 0.35637030005455017, "learning_rate": 2.8284333333333336e-05, "loss": 0.0634, "step": 15151 }, { "epoch": 16.72446162341248, "grad_norm": 0.3824262022972107, "learning_rate": 2.8284e-05, "loss": 0.0474, "step": 15152 }, { "epoch": 16.725565985643293, "grad_norm": 0.3306077718734741, "learning_rate": 2.8283666666666668e-05, "loss": 0.0858, "step": 15153 }, { "epoch": 16.7266703478741, "grad_norm": 0.37182724475860596, "learning_rate": 2.8283333333333334e-05, "loss": 0.0258, "step": 15154 }, { "epoch": 16.727774710104914, "grad_norm": 0.17994926869869232, "learning_rate": 2.8283e-05, "loss": 0.0296, "step": 15155 }, { "epoch": 16.728879072335726, "grad_norm": 0.2955213487148285, "learning_rate": 2.828266666666667e-05, "loss": 0.03, "step": 15156 }, { "epoch": 16.729983434566538, "grad_norm": 0.8661285638809204, "learning_rate": 2.8282333333333335e-05, "loss": 0.0235, "step": 15157 }, { "epoch": 16.73108779679735, "grad_norm": 0.31788742542266846, "learning_rate": 2.8282e-05, "loss": 0.0118, "step": 15158 }, { "epoch": 16.732192159028163, "grad_norm": 0.2583414912223816, "learning_rate": 2.8281666666666667e-05, "loss": 0.031, "step": 15159 }, { "epoch": 16.73329652125897, "grad_norm": 0.1698158085346222, "learning_rate": 2.8281333333333337e-05, "loss": 0.0086, "step": 15160 }, { "epoch": 16.734400883489783, "grad_norm": 0.31163719296455383, "learning_rate": 2.8281e-05, "loss": 0.012, "step": 15161 }, { "epoch": 16.735505245720596, "grad_norm": 0.15863420069217682, "learning_rate": 2.828066666666667e-05, "loss": 0.006, "step": 15162 }, { "epoch": 16.736609607951408, "grad_norm": 0.3528413772583008, "learning_rate": 2.828033333333333e-05, "loss": 0.0137, "step": 15163 }, { "epoch": 16.73771397018222, "grad_norm": 0.17214155197143555, "learning_rate": 2.828e-05, "loss": 0.0212, "step": 15164 }, { "epoch": 16.738818332413032, "grad_norm": 0.10877782851457596, "learning_rate": 2.827966666666667e-05, "loss": 0.0042, "step": 15165 }, { "epoch": 16.739922694643845, "grad_norm": 0.15114569664001465, "learning_rate": 2.8279333333333332e-05, "loss": 0.0066, "step": 15166 }, { "epoch": 16.741027056874653, "grad_norm": 0.2896096706390381, "learning_rate": 2.8279e-05, "loss": 0.0124, "step": 15167 }, { "epoch": 16.742131419105466, "grad_norm": 0.19199179112911224, "learning_rate": 2.8278666666666667e-05, "loss": 0.0125, "step": 15168 }, { "epoch": 16.743235781336278, "grad_norm": 0.1173873096704483, "learning_rate": 2.8278333333333333e-05, "loss": 0.0063, "step": 15169 }, { "epoch": 16.74434014356709, "grad_norm": 0.16583028435707092, "learning_rate": 2.8278e-05, "loss": 0.0088, "step": 15170 }, { "epoch": 16.745444505797902, "grad_norm": 0.33120080828666687, "learning_rate": 2.827766666666667e-05, "loss": 0.0061, "step": 15171 }, { "epoch": 16.746548868028714, "grad_norm": 0.1677480787038803, "learning_rate": 2.827733333333333e-05, "loss": 0.0106, "step": 15172 }, { "epoch": 16.747653230259527, "grad_norm": 0.14715328812599182, "learning_rate": 2.8277e-05, "loss": 0.011, "step": 15173 }, { "epoch": 16.748757592490335, "grad_norm": 0.3357595205307007, "learning_rate": 2.827666666666667e-05, "loss": 0.0082, "step": 15174 }, { "epoch": 16.749861954721148, "grad_norm": 0.08697746694087982, "learning_rate": 2.8276333333333332e-05, "loss": 0.0051, "step": 15175 }, { "epoch": 16.75096631695196, "grad_norm": 0.3927151560783386, "learning_rate": 2.8276e-05, "loss": 0.0043, "step": 15176 }, { "epoch": 16.752070679182772, "grad_norm": 0.15406222641468048, "learning_rate": 2.8275666666666668e-05, "loss": 0.007, "step": 15177 }, { "epoch": 16.753175041413584, "grad_norm": 0.18926076591014862, "learning_rate": 2.8275333333333334e-05, "loss": 0.0082, "step": 15178 }, { "epoch": 16.754279403644396, "grad_norm": 0.1724272221326828, "learning_rate": 2.8275e-05, "loss": 0.0107, "step": 15179 }, { "epoch": 16.75538376587521, "grad_norm": 0.34393906593322754, "learning_rate": 2.827466666666667e-05, "loss": 0.0063, "step": 15180 }, { "epoch": 16.756488128106017, "grad_norm": 0.15886546671390533, "learning_rate": 2.8274333333333335e-05, "loss": 0.009, "step": 15181 }, { "epoch": 16.75759249033683, "grad_norm": 0.15125179290771484, "learning_rate": 2.8274e-05, "loss": 0.0093, "step": 15182 }, { "epoch": 16.758696852567642, "grad_norm": 0.2581733763217926, "learning_rate": 2.827366666666667e-05, "loss": 0.0144, "step": 15183 }, { "epoch": 16.759801214798454, "grad_norm": 0.22605480253696442, "learning_rate": 2.8273333333333333e-05, "loss": 0.0109, "step": 15184 }, { "epoch": 16.760905577029266, "grad_norm": 0.19325287640094757, "learning_rate": 2.8273000000000002e-05, "loss": 0.0065, "step": 15185 }, { "epoch": 16.76200993926008, "grad_norm": 0.10513599216938019, "learning_rate": 2.8272666666666668e-05, "loss": 0.0048, "step": 15186 }, { "epoch": 16.763114301490887, "grad_norm": 0.19719752669334412, "learning_rate": 2.8272333333333334e-05, "loss": 0.0043, "step": 15187 }, { "epoch": 16.7642186637217, "grad_norm": 0.23410290479660034, "learning_rate": 2.8272e-05, "loss": 0.0054, "step": 15188 }, { "epoch": 16.76532302595251, "grad_norm": 0.22075942158699036, "learning_rate": 2.827166666666667e-05, "loss": 0.0095, "step": 15189 }, { "epoch": 16.766427388183324, "grad_norm": 0.1921127885580063, "learning_rate": 2.8271333333333335e-05, "loss": 0.0109, "step": 15190 }, { "epoch": 16.767531750414136, "grad_norm": 0.3033154606819153, "learning_rate": 2.8271e-05, "loss": 0.0108, "step": 15191 }, { "epoch": 16.76863611264495, "grad_norm": 0.15654104948043823, "learning_rate": 2.8270666666666667e-05, "loss": 0.0056, "step": 15192 }, { "epoch": 16.76974047487576, "grad_norm": 0.6666343212127686, "learning_rate": 2.8270333333333333e-05, "loss": 0.0201, "step": 15193 }, { "epoch": 16.77084483710657, "grad_norm": 0.15003331005573273, "learning_rate": 2.8270000000000002e-05, "loss": 0.0047, "step": 15194 }, { "epoch": 16.77194919933738, "grad_norm": 0.28932279348373413, "learning_rate": 2.8269666666666665e-05, "loss": 0.0158, "step": 15195 }, { "epoch": 16.773053561568194, "grad_norm": 0.3957061171531677, "learning_rate": 2.8269333333333334e-05, "loss": 0.0094, "step": 15196 }, { "epoch": 16.774157923799006, "grad_norm": 0.5340303182601929, "learning_rate": 2.8269e-05, "loss": 0.1581, "step": 15197 }, { "epoch": 16.775262286029818, "grad_norm": 0.7909988760948181, "learning_rate": 2.8268666666666666e-05, "loss": 0.1168, "step": 15198 }, { "epoch": 16.77636664826063, "grad_norm": 0.5538522601127625, "learning_rate": 2.8268333333333335e-05, "loss": 0.1304, "step": 15199 }, { "epoch": 16.777471010491443, "grad_norm": 0.3698733150959015, "learning_rate": 2.8268e-05, "loss": 0.109, "step": 15200 }, { "epoch": 16.77857537272225, "grad_norm": 0.3471759855747223, "learning_rate": 2.8267666666666667e-05, "loss": 0.0728, "step": 15201 }, { "epoch": 16.779679734953064, "grad_norm": 0.5376116633415222, "learning_rate": 2.8267333333333333e-05, "loss": 0.0364, "step": 15202 }, { "epoch": 16.780784097183876, "grad_norm": 0.2736150324344635, "learning_rate": 2.8267000000000002e-05, "loss": 0.0183, "step": 15203 }, { "epoch": 16.781888459414688, "grad_norm": 0.3693276047706604, "learning_rate": 2.8266666666666665e-05, "loss": 0.0627, "step": 15204 }, { "epoch": 16.7829928216455, "grad_norm": 0.3558104336261749, "learning_rate": 2.8266333333333334e-05, "loss": 0.0245, "step": 15205 }, { "epoch": 16.784097183876312, "grad_norm": 0.33313795924186707, "learning_rate": 2.8266000000000003e-05, "loss": 0.0152, "step": 15206 }, { "epoch": 16.785201546107125, "grad_norm": 0.16825684905052185, "learning_rate": 2.8265666666666666e-05, "loss": 0.0082, "step": 15207 }, { "epoch": 16.786305908337933, "grad_norm": 0.2853940725326538, "learning_rate": 2.8265333333333335e-05, "loss": 0.0041, "step": 15208 }, { "epoch": 16.787410270568746, "grad_norm": 0.1757047474384308, "learning_rate": 2.8265e-05, "loss": 0.0101, "step": 15209 }, { "epoch": 16.788514632799558, "grad_norm": 0.19764791429042816, "learning_rate": 2.8264666666666667e-05, "loss": 0.0438, "step": 15210 }, { "epoch": 16.78961899503037, "grad_norm": 0.11870365589857101, "learning_rate": 2.8264333333333333e-05, "loss": 0.0067, "step": 15211 }, { "epoch": 16.790723357261182, "grad_norm": 0.22316303849220276, "learning_rate": 2.8264000000000002e-05, "loss": 0.0029, "step": 15212 }, { "epoch": 16.791827719491994, "grad_norm": 0.1927640587091446, "learning_rate": 2.8263666666666665e-05, "loss": 0.0081, "step": 15213 }, { "epoch": 16.792932081722807, "grad_norm": 0.2980983555316925, "learning_rate": 2.8263333333333334e-05, "loss": 0.0097, "step": 15214 }, { "epoch": 16.794036443953615, "grad_norm": 0.40634408593177795, "learning_rate": 2.8263000000000004e-05, "loss": 0.0152, "step": 15215 }, { "epoch": 16.795140806184428, "grad_norm": 0.25870025157928467, "learning_rate": 2.8262666666666666e-05, "loss": 0.0078, "step": 15216 }, { "epoch": 16.79624516841524, "grad_norm": 0.09002293646335602, "learning_rate": 2.8262333333333336e-05, "loss": 0.0055, "step": 15217 }, { "epoch": 16.797349530646052, "grad_norm": 0.12751393020153046, "learning_rate": 2.8262e-05, "loss": 0.0075, "step": 15218 }, { "epoch": 16.798453892876864, "grad_norm": 0.3113479018211365, "learning_rate": 2.8261666666666667e-05, "loss": 0.0068, "step": 15219 }, { "epoch": 16.799558255107677, "grad_norm": 0.15314514935016632, "learning_rate": 2.8261333333333333e-05, "loss": 0.007, "step": 15220 }, { "epoch": 16.80066261733849, "grad_norm": 0.25581151247024536, "learning_rate": 2.8261e-05, "loss": 0.0084, "step": 15221 }, { "epoch": 16.801766979569297, "grad_norm": 0.23542924225330353, "learning_rate": 2.8260666666666665e-05, "loss": 0.0069, "step": 15222 }, { "epoch": 16.80287134180011, "grad_norm": 0.14757375419139862, "learning_rate": 2.8260333333333335e-05, "loss": 0.0073, "step": 15223 }, { "epoch": 16.803975704030922, "grad_norm": 0.46147722005844116, "learning_rate": 2.826e-05, "loss": 0.0101, "step": 15224 }, { "epoch": 16.805080066261734, "grad_norm": 0.4729516804218292, "learning_rate": 2.8259666666666666e-05, "loss": 0.0101, "step": 15225 }, { "epoch": 16.806184428492546, "grad_norm": 0.3414578139781952, "learning_rate": 2.8259333333333336e-05, "loss": 0.0098, "step": 15226 }, { "epoch": 16.80728879072336, "grad_norm": 0.44648507237434387, "learning_rate": 2.8258999999999998e-05, "loss": 0.01, "step": 15227 }, { "epoch": 16.808393152954167, "grad_norm": 0.2369479089975357, "learning_rate": 2.8258666666666668e-05, "loss": 0.0098, "step": 15228 }, { "epoch": 16.80949751518498, "grad_norm": 0.23463116586208344, "learning_rate": 2.8258333333333334e-05, "loss": 0.0092, "step": 15229 }, { "epoch": 16.81060187741579, "grad_norm": 0.30148595571517944, "learning_rate": 2.8258e-05, "loss": 0.0044, "step": 15230 }, { "epoch": 16.811706239646604, "grad_norm": 0.05577345937490463, "learning_rate": 2.825766666666667e-05, "loss": 0.0018, "step": 15231 }, { "epoch": 16.812810601877416, "grad_norm": 0.3207435607910156, "learning_rate": 2.8257333333333335e-05, "loss": 0.014, "step": 15232 }, { "epoch": 16.81391496410823, "grad_norm": 0.14446188509464264, "learning_rate": 2.8257e-05, "loss": 0.0042, "step": 15233 }, { "epoch": 16.81501932633904, "grad_norm": 0.3434022068977356, "learning_rate": 2.8256666666666667e-05, "loss": 0.0084, "step": 15234 }, { "epoch": 16.81612368856985, "grad_norm": 0.18150518834590912, "learning_rate": 2.8256333333333336e-05, "loss": 0.0051, "step": 15235 }, { "epoch": 16.81722805080066, "grad_norm": 0.784012496471405, "learning_rate": 2.8256e-05, "loss": 0.0056, "step": 15236 }, { "epoch": 16.818332413031474, "grad_norm": 0.11621299386024475, "learning_rate": 2.8255666666666668e-05, "loss": 0.0037, "step": 15237 }, { "epoch": 16.819436775262286, "grad_norm": 0.15483851730823517, "learning_rate": 2.8255333333333334e-05, "loss": 0.0052, "step": 15238 }, { "epoch": 16.820541137493098, "grad_norm": 0.32077136635780334, "learning_rate": 2.8255e-05, "loss": 0.0098, "step": 15239 }, { "epoch": 16.82164549972391, "grad_norm": 0.17004893720149994, "learning_rate": 2.825466666666667e-05, "loss": 0.004, "step": 15240 }, { "epoch": 16.822749861954723, "grad_norm": 0.4943118691444397, "learning_rate": 2.8254333333333335e-05, "loss": 0.0141, "step": 15241 }, { "epoch": 16.82385422418553, "grad_norm": 0.28237104415893555, "learning_rate": 2.8254e-05, "loss": 0.0077, "step": 15242 }, { "epoch": 16.824958586416344, "grad_norm": 0.9479579329490662, "learning_rate": 2.8253666666666667e-05, "loss": 0.0074, "step": 15243 }, { "epoch": 16.826062948647156, "grad_norm": 0.42299970984458923, "learning_rate": 2.8253333333333336e-05, "loss": 0.0085, "step": 15244 }, { "epoch": 16.827167310877968, "grad_norm": 0.4644505977630615, "learning_rate": 2.8253e-05, "loss": 0.0057, "step": 15245 }, { "epoch": 16.82827167310878, "grad_norm": 0.6779749989509583, "learning_rate": 2.8252666666666668e-05, "loss": 0.0192, "step": 15246 }, { "epoch": 16.829376035339592, "grad_norm": 0.8062403798103333, "learning_rate": 2.8252333333333334e-05, "loss": 0.1948, "step": 15247 }, { "epoch": 16.830480397570405, "grad_norm": 0.5149634480476379, "learning_rate": 2.8252e-05, "loss": 0.1138, "step": 15248 }, { "epoch": 16.831584759801213, "grad_norm": 0.4797361195087433, "learning_rate": 2.825166666666667e-05, "loss": 0.1304, "step": 15249 }, { "epoch": 16.832689122032026, "grad_norm": 0.35537615418434143, "learning_rate": 2.8251333333333335e-05, "loss": 0.0666, "step": 15250 }, { "epoch": 16.833793484262838, "grad_norm": 0.3886207640171051, "learning_rate": 2.8251e-05, "loss": 0.0539, "step": 15251 }, { "epoch": 16.83489784649365, "grad_norm": 0.4665740430355072, "learning_rate": 2.8250666666666667e-05, "loss": 0.1092, "step": 15252 }, { "epoch": 16.836002208724462, "grad_norm": 0.4003702700138092, "learning_rate": 2.8250333333333333e-05, "loss": 0.0463, "step": 15253 }, { "epoch": 16.837106570955275, "grad_norm": 0.23947300016880035, "learning_rate": 2.825e-05, "loss": 0.0384, "step": 15254 }, { "epoch": 16.838210933186087, "grad_norm": 0.12661093473434448, "learning_rate": 2.8249666666666668e-05, "loss": 0.0098, "step": 15255 }, { "epoch": 16.839315295416895, "grad_norm": 0.2447076290845871, "learning_rate": 2.8249333333333334e-05, "loss": 0.0186, "step": 15256 }, { "epoch": 16.840419657647708, "grad_norm": 0.3614998757839203, "learning_rate": 2.8249e-05, "loss": 0.0139, "step": 15257 }, { "epoch": 16.84152401987852, "grad_norm": 0.1861700713634491, "learning_rate": 2.824866666666667e-05, "loss": 0.0102, "step": 15258 }, { "epoch": 16.842628382109332, "grad_norm": 0.3453906178474426, "learning_rate": 2.8248333333333332e-05, "loss": 0.0138, "step": 15259 }, { "epoch": 16.843732744340144, "grad_norm": 0.45895352959632874, "learning_rate": 2.8248e-05, "loss": 0.0168, "step": 15260 }, { "epoch": 16.844837106570957, "grad_norm": 0.20898041129112244, "learning_rate": 2.8247666666666667e-05, "loss": 0.0066, "step": 15261 }, { "epoch": 16.84594146880177, "grad_norm": 0.40693429112434387, "learning_rate": 2.8247333333333333e-05, "loss": 0.0141, "step": 15262 }, { "epoch": 16.847045831032577, "grad_norm": 0.28538596630096436, "learning_rate": 2.8247e-05, "loss": 0.0228, "step": 15263 }, { "epoch": 16.84815019326339, "grad_norm": 0.21724587678909302, "learning_rate": 2.824666666666667e-05, "loss": 0.0083, "step": 15264 }, { "epoch": 16.849254555494202, "grad_norm": 0.13561682403087616, "learning_rate": 2.8246333333333334e-05, "loss": 0.0066, "step": 15265 }, { "epoch": 16.850358917725014, "grad_norm": 0.13009966909885406, "learning_rate": 2.8246e-05, "loss": 0.0095, "step": 15266 }, { "epoch": 16.851463279955826, "grad_norm": 0.3208271265029907, "learning_rate": 2.824566666666667e-05, "loss": 0.0181, "step": 15267 }, { "epoch": 16.85256764218664, "grad_norm": 0.25945112109184265, "learning_rate": 2.8245333333333332e-05, "loss": 0.0146, "step": 15268 }, { "epoch": 16.853672004417447, "grad_norm": 0.27240023016929626, "learning_rate": 2.8245e-05, "loss": 0.008, "step": 15269 }, { "epoch": 16.85477636664826, "grad_norm": 0.1004939004778862, "learning_rate": 2.8244666666666667e-05, "loss": 0.0358, "step": 15270 }, { "epoch": 16.85588072887907, "grad_norm": 0.1578816920518875, "learning_rate": 2.8244333333333333e-05, "loss": 0.0064, "step": 15271 }, { "epoch": 16.856985091109884, "grad_norm": 0.12888213992118835, "learning_rate": 2.8244e-05, "loss": 0.0069, "step": 15272 }, { "epoch": 16.858089453340696, "grad_norm": 0.23442748188972473, "learning_rate": 2.824366666666667e-05, "loss": 0.0084, "step": 15273 }, { "epoch": 16.85919381557151, "grad_norm": 0.24461013078689575, "learning_rate": 2.8243333333333335e-05, "loss": 0.0122, "step": 15274 }, { "epoch": 16.86029817780232, "grad_norm": 0.1854238212108612, "learning_rate": 2.8243e-05, "loss": 0.0088, "step": 15275 }, { "epoch": 16.86140254003313, "grad_norm": 0.17941159009933472, "learning_rate": 2.824266666666667e-05, "loss": 0.0057, "step": 15276 }, { "epoch": 16.86250690226394, "grad_norm": 0.2637298107147217, "learning_rate": 2.8242333333333332e-05, "loss": 0.0117, "step": 15277 }, { "epoch": 16.863611264494754, "grad_norm": 1.1105550527572632, "learning_rate": 2.8242e-05, "loss": 0.02, "step": 15278 }, { "epoch": 16.864715626725566, "grad_norm": 0.37578973174095154, "learning_rate": 2.8241666666666668e-05, "loss": 0.0135, "step": 15279 }, { "epoch": 16.86581998895638, "grad_norm": 0.19240786135196686, "learning_rate": 2.8241333333333334e-05, "loss": 0.0087, "step": 15280 }, { "epoch": 16.86692435118719, "grad_norm": 0.17558617889881134, "learning_rate": 2.8241000000000003e-05, "loss": 0.0045, "step": 15281 }, { "epoch": 16.868028713418003, "grad_norm": 1.1563595533370972, "learning_rate": 2.8240666666666665e-05, "loss": 0.0087, "step": 15282 }, { "epoch": 16.86913307564881, "grad_norm": 0.7101010680198669, "learning_rate": 2.8240333333333335e-05, "loss": 0.0164, "step": 15283 }, { "epoch": 16.870237437879624, "grad_norm": 0.1691044121980667, "learning_rate": 2.824e-05, "loss": 0.0093, "step": 15284 }, { "epoch": 16.871341800110436, "grad_norm": 0.1676982045173645, "learning_rate": 2.8239666666666667e-05, "loss": 0.0057, "step": 15285 }, { "epoch": 16.872446162341248, "grad_norm": 0.12052484601736069, "learning_rate": 2.8239333333333333e-05, "loss": 0.005, "step": 15286 }, { "epoch": 16.87355052457206, "grad_norm": 0.16105857491493225, "learning_rate": 2.8239000000000002e-05, "loss": 0.006, "step": 15287 }, { "epoch": 16.874654886802873, "grad_norm": 0.2460283786058426, "learning_rate": 2.8238666666666664e-05, "loss": 0.0067, "step": 15288 }, { "epoch": 16.875759249033685, "grad_norm": 0.2387964427471161, "learning_rate": 2.8238333333333334e-05, "loss": 0.0091, "step": 15289 }, { "epoch": 16.876863611264493, "grad_norm": 0.9428918361663818, "learning_rate": 2.8238000000000003e-05, "loss": 0.0211, "step": 15290 }, { "epoch": 16.877967973495306, "grad_norm": 0.38853320479393005, "learning_rate": 2.8237666666666666e-05, "loss": 0.0093, "step": 15291 }, { "epoch": 16.879072335726118, "grad_norm": 0.16747955977916718, "learning_rate": 2.8237333333333335e-05, "loss": 0.0052, "step": 15292 }, { "epoch": 16.88017669795693, "grad_norm": 0.23822049796581268, "learning_rate": 2.8237e-05, "loss": 0.0074, "step": 15293 }, { "epoch": 16.881281060187742, "grad_norm": 0.22415781021118164, "learning_rate": 2.8236666666666667e-05, "loss": 0.014, "step": 15294 }, { "epoch": 16.882385422418555, "grad_norm": 0.5748472809791565, "learning_rate": 2.8236333333333333e-05, "loss": 0.0165, "step": 15295 }, { "epoch": 16.883489784649363, "grad_norm": 0.23396745324134827, "learning_rate": 2.8236000000000002e-05, "loss": 0.0052, "step": 15296 }, { "epoch": 16.884594146880175, "grad_norm": 0.4698010981082916, "learning_rate": 2.8235666666666665e-05, "loss": 0.1255, "step": 15297 }, { "epoch": 16.885698509110988, "grad_norm": 0.4057261049747467, "learning_rate": 2.8235333333333334e-05, "loss": 0.1238, "step": 15298 }, { "epoch": 16.8868028713418, "grad_norm": 0.537814199924469, "learning_rate": 2.8235000000000003e-05, "loss": 0.1534, "step": 15299 }, { "epoch": 16.887907233572612, "grad_norm": 0.6040914058685303, "learning_rate": 2.8234666666666666e-05, "loss": 0.0626, "step": 15300 }, { "epoch": 16.889011595803424, "grad_norm": 0.38949114084243774, "learning_rate": 2.8234333333333335e-05, "loss": 0.0703, "step": 15301 }, { "epoch": 16.890115958034237, "grad_norm": 0.4244776964187622, "learning_rate": 2.8234e-05, "loss": 0.0832, "step": 15302 }, { "epoch": 16.891220320265045, "grad_norm": 0.37403830885887146, "learning_rate": 2.8233666666666667e-05, "loss": 0.0433, "step": 15303 }, { "epoch": 16.892324682495858, "grad_norm": 0.18622922897338867, "learning_rate": 2.8233333333333333e-05, "loss": 0.0627, "step": 15304 }, { "epoch": 16.89342904472667, "grad_norm": 0.2650738060474396, "learning_rate": 2.8233000000000002e-05, "loss": 0.0313, "step": 15305 }, { "epoch": 16.894533406957482, "grad_norm": 0.19431661069393158, "learning_rate": 2.8232666666666668e-05, "loss": 0.0106, "step": 15306 }, { "epoch": 16.895637769188294, "grad_norm": 0.18642613291740417, "learning_rate": 2.8232333333333334e-05, "loss": 0.0137, "step": 15307 }, { "epoch": 16.896742131419106, "grad_norm": 0.14091134071350098, "learning_rate": 2.8232000000000003e-05, "loss": 0.0066, "step": 15308 }, { "epoch": 16.89784649364992, "grad_norm": 0.2506815493106842, "learning_rate": 2.8231666666666666e-05, "loss": 0.009, "step": 15309 }, { "epoch": 16.898950855880727, "grad_norm": 0.42519867420196533, "learning_rate": 2.8231333333333335e-05, "loss": 0.0175, "step": 15310 }, { "epoch": 16.90005521811154, "grad_norm": 0.2621273100376129, "learning_rate": 2.8231e-05, "loss": 0.0093, "step": 15311 }, { "epoch": 16.90115958034235, "grad_norm": 0.16863560676574707, "learning_rate": 2.8230666666666667e-05, "loss": 0.0071, "step": 15312 }, { "epoch": 16.902263942573164, "grad_norm": 0.12856963276863098, "learning_rate": 2.8230333333333333e-05, "loss": 0.0077, "step": 15313 }, { "epoch": 16.903368304803976, "grad_norm": 0.11966452747583389, "learning_rate": 2.823e-05, "loss": 0.0078, "step": 15314 }, { "epoch": 16.90447266703479, "grad_norm": 0.19569100439548492, "learning_rate": 2.822966666666667e-05, "loss": 0.0086, "step": 15315 }, { "epoch": 16.9055770292656, "grad_norm": 0.29003214836120605, "learning_rate": 2.8229333333333334e-05, "loss": 0.0065, "step": 15316 }, { "epoch": 16.90668139149641, "grad_norm": 0.3074190616607666, "learning_rate": 2.8229e-05, "loss": 0.0118, "step": 15317 }, { "epoch": 16.90778575372722, "grad_norm": 0.17756156623363495, "learning_rate": 2.8228666666666666e-05, "loss": 0.0089, "step": 15318 }, { "epoch": 16.908890115958034, "grad_norm": 0.29330959916114807, "learning_rate": 2.8228333333333336e-05, "loss": 0.0074, "step": 15319 }, { "epoch": 16.909994478188846, "grad_norm": 0.37775155901908875, "learning_rate": 2.8227999999999998e-05, "loss": 0.0078, "step": 15320 }, { "epoch": 16.91109884041966, "grad_norm": 0.28650331497192383, "learning_rate": 2.8227666666666667e-05, "loss": 0.0096, "step": 15321 }, { "epoch": 16.91220320265047, "grad_norm": 0.0817563533782959, "learning_rate": 2.8227333333333333e-05, "loss": 0.0026, "step": 15322 }, { "epoch": 16.913307564881283, "grad_norm": 0.17147551476955414, "learning_rate": 2.8227e-05, "loss": 0.0076, "step": 15323 }, { "epoch": 16.91441192711209, "grad_norm": 0.13168707489967346, "learning_rate": 2.822666666666667e-05, "loss": 0.0049, "step": 15324 }, { "epoch": 16.915516289342904, "grad_norm": 0.5798012018203735, "learning_rate": 2.8226333333333335e-05, "loss": 0.0063, "step": 15325 }, { "epoch": 16.916620651573716, "grad_norm": 0.23393651843070984, "learning_rate": 2.8226e-05, "loss": 0.0085, "step": 15326 }, { "epoch": 16.917725013804528, "grad_norm": 0.4122508764266968, "learning_rate": 2.8225666666666666e-05, "loss": 0.016, "step": 15327 }, { "epoch": 16.91882937603534, "grad_norm": 0.3077077567577362, "learning_rate": 2.8225333333333336e-05, "loss": 0.0142, "step": 15328 }, { "epoch": 16.919933738266153, "grad_norm": 0.23127815127372742, "learning_rate": 2.8224999999999998e-05, "loss": 0.0086, "step": 15329 }, { "epoch": 16.921038100496965, "grad_norm": 0.14096488058567047, "learning_rate": 2.8224666666666668e-05, "loss": 0.0046, "step": 15330 }, { "epoch": 16.922142462727773, "grad_norm": 0.32209116220474243, "learning_rate": 2.8224333333333337e-05, "loss": 0.0076, "step": 15331 }, { "epoch": 16.923246824958586, "grad_norm": 0.441406786441803, "learning_rate": 2.8224e-05, "loss": 0.0054, "step": 15332 }, { "epoch": 16.924351187189398, "grad_norm": 0.424091637134552, "learning_rate": 2.822366666666667e-05, "loss": 0.0125, "step": 15333 }, { "epoch": 16.92545554942021, "grad_norm": 0.21607060730457306, "learning_rate": 2.8223333333333335e-05, "loss": 0.0075, "step": 15334 }, { "epoch": 16.926559911651022, "grad_norm": 0.4299065172672272, "learning_rate": 2.8223e-05, "loss": 0.0112, "step": 15335 }, { "epoch": 16.927664273881835, "grad_norm": 0.21350052952766418, "learning_rate": 2.8222666666666667e-05, "loss": 0.0077, "step": 15336 }, { "epoch": 16.928768636112643, "grad_norm": 0.22524228692054749, "learning_rate": 2.8222333333333336e-05, "loss": 0.0102, "step": 15337 }, { "epoch": 16.929872998343455, "grad_norm": 0.4264694154262543, "learning_rate": 2.8222e-05, "loss": 0.0124, "step": 15338 }, { "epoch": 16.930977360574268, "grad_norm": 0.19076839089393616, "learning_rate": 2.8221666666666668e-05, "loss": 0.0043, "step": 15339 }, { "epoch": 16.93208172280508, "grad_norm": 0.2986499071121216, "learning_rate": 2.8221333333333337e-05, "loss": 0.007, "step": 15340 }, { "epoch": 16.933186085035892, "grad_norm": 0.18247313797473907, "learning_rate": 2.8221e-05, "loss": 0.0064, "step": 15341 }, { "epoch": 16.934290447266704, "grad_norm": 0.18773382902145386, "learning_rate": 2.822066666666667e-05, "loss": 0.009, "step": 15342 }, { "epoch": 16.935394809497517, "grad_norm": 0.22916971147060394, "learning_rate": 2.822033333333333e-05, "loss": 0.0128, "step": 15343 }, { "epoch": 16.936499171728325, "grad_norm": 1.617077112197876, "learning_rate": 2.822e-05, "loss": 0.0204, "step": 15344 }, { "epoch": 16.937603533959138, "grad_norm": 0.2242358773946762, "learning_rate": 2.8219666666666667e-05, "loss": 0.0093, "step": 15345 }, { "epoch": 16.93870789618995, "grad_norm": 0.6616670489311218, "learning_rate": 2.8219333333333333e-05, "loss": 0.0109, "step": 15346 }, { "epoch": 16.939812258420762, "grad_norm": 0.4934484660625458, "learning_rate": 2.8219e-05, "loss": 0.23, "step": 15347 }, { "epoch": 16.940916620651574, "grad_norm": 0.41507893800735474, "learning_rate": 2.8218666666666668e-05, "loss": 0.1298, "step": 15348 }, { "epoch": 16.942020982882386, "grad_norm": 0.4102698266506195, "learning_rate": 2.8218333333333334e-05, "loss": 0.1384, "step": 15349 }, { "epoch": 16.9431253451132, "grad_norm": 0.5417706966400146, "learning_rate": 2.8218e-05, "loss": 0.0672, "step": 15350 }, { "epoch": 16.944229707344007, "grad_norm": 0.8940461874008179, "learning_rate": 2.821766666666667e-05, "loss": 0.081, "step": 15351 }, { "epoch": 16.94533406957482, "grad_norm": 0.4660770893096924, "learning_rate": 2.8217333333333332e-05, "loss": 0.0687, "step": 15352 }, { "epoch": 16.946438431805632, "grad_norm": 0.36550015211105347, "learning_rate": 2.8217e-05, "loss": 0.068, "step": 15353 }, { "epoch": 16.947542794036444, "grad_norm": 0.2752198874950409, "learning_rate": 2.8216666666666667e-05, "loss": 0.0258, "step": 15354 }, { "epoch": 16.948647156267256, "grad_norm": 0.18929347395896912, "learning_rate": 2.8216333333333333e-05, "loss": 0.0139, "step": 15355 }, { "epoch": 16.94975151849807, "grad_norm": 0.318240761756897, "learning_rate": 2.8216000000000002e-05, "loss": 0.0388, "step": 15356 }, { "epoch": 16.95085588072888, "grad_norm": Infinity, "learning_rate": 2.8216000000000002e-05, "loss": 0.0215, "step": 15357 }, { "epoch": 16.95196024295969, "grad_norm": 0.20816653966903687, "learning_rate": 2.8215666666666668e-05, "loss": 0.0111, "step": 15358 }, { "epoch": 16.9530646051905, "grad_norm": 0.17254389822483063, "learning_rate": 2.8215333333333334e-05, "loss": 0.0075, "step": 15359 }, { "epoch": 16.954168967421314, "grad_norm": 0.4775914251804352, "learning_rate": 2.8215e-05, "loss": 0.0235, "step": 15360 }, { "epoch": 16.955273329652126, "grad_norm": 0.28410524129867554, "learning_rate": 2.821466666666667e-05, "loss": 0.0114, "step": 15361 }, { "epoch": 16.95637769188294, "grad_norm": 0.20488272607326508, "learning_rate": 2.8214333333333332e-05, "loss": 0.0092, "step": 15362 }, { "epoch": 16.95748205411375, "grad_norm": 0.5079958438873291, "learning_rate": 2.8214e-05, "loss": 0.0092, "step": 15363 }, { "epoch": 16.95858641634456, "grad_norm": 0.5862746238708496, "learning_rate": 2.8213666666666667e-05, "loss": 0.0105, "step": 15364 }, { "epoch": 16.95969077857537, "grad_norm": 0.3845375180244446, "learning_rate": 2.8213333333333333e-05, "loss": 0.0097, "step": 15365 }, { "epoch": 16.960795140806184, "grad_norm": 0.28300148248672485, "learning_rate": 2.8213000000000002e-05, "loss": 0.011, "step": 15366 }, { "epoch": 16.961899503036996, "grad_norm": 0.11968669295310974, "learning_rate": 2.821266666666667e-05, "loss": 0.0059, "step": 15367 }, { "epoch": 16.963003865267808, "grad_norm": 0.4782482385635376, "learning_rate": 2.8212333333333334e-05, "loss": 0.0082, "step": 15368 }, { "epoch": 16.96410822749862, "grad_norm": 0.3248980939388275, "learning_rate": 2.8212e-05, "loss": 0.0107, "step": 15369 }, { "epoch": 16.965212589729433, "grad_norm": 0.1675482541322708, "learning_rate": 2.821166666666667e-05, "loss": 0.0054, "step": 15370 }, { "epoch": 16.96631695196024, "grad_norm": 0.2291235327720642, "learning_rate": 2.8211333333333332e-05, "loss": 0.0077, "step": 15371 }, { "epoch": 16.967421314191053, "grad_norm": 0.41501784324645996, "learning_rate": 2.8211e-05, "loss": 0.0124, "step": 15372 }, { "epoch": 16.968525676421866, "grad_norm": 0.6014183163642883, "learning_rate": 2.8210666666666667e-05, "loss": 0.0141, "step": 15373 }, { "epoch": 16.969630038652678, "grad_norm": 0.3105957806110382, "learning_rate": 2.8210333333333333e-05, "loss": 0.0101, "step": 15374 }, { "epoch": 16.97073440088349, "grad_norm": 0.22912120819091797, "learning_rate": 2.8210000000000003e-05, "loss": 0.0055, "step": 15375 }, { "epoch": 16.971838763114302, "grad_norm": 0.3710053861141205, "learning_rate": 2.8209666666666665e-05, "loss": 0.0131, "step": 15376 }, { "epoch": 16.972943125345115, "grad_norm": 0.4092216491699219, "learning_rate": 2.8209333333333335e-05, "loss": 0.0121, "step": 15377 }, { "epoch": 16.974047487575923, "grad_norm": 0.31391286849975586, "learning_rate": 2.8209e-05, "loss": 0.0102, "step": 15378 }, { "epoch": 16.975151849806736, "grad_norm": 0.2480590045452118, "learning_rate": 2.8208666666666666e-05, "loss": 0.0127, "step": 15379 }, { "epoch": 16.976256212037548, "grad_norm": 1.0352810621261597, "learning_rate": 2.8208333333333332e-05, "loss": 0.0094, "step": 15380 }, { "epoch": 16.97736057426836, "grad_norm": 0.159605473279953, "learning_rate": 2.8208e-05, "loss": 0.0066, "step": 15381 }, { "epoch": 16.978464936499172, "grad_norm": 0.43903475999832153, "learning_rate": 2.8207666666666668e-05, "loss": 0.0243, "step": 15382 }, { "epoch": 16.979569298729984, "grad_norm": 0.14791804552078247, "learning_rate": 2.8207333333333334e-05, "loss": 0.0063, "step": 15383 }, { "epoch": 16.980673660960797, "grad_norm": 0.43615907430648804, "learning_rate": 2.8207000000000003e-05, "loss": 0.0156, "step": 15384 }, { "epoch": 16.981778023191605, "grad_norm": 0.31038373708724976, "learning_rate": 2.8206666666666665e-05, "loss": 0.0043, "step": 15385 }, { "epoch": 16.982882385422418, "grad_norm": 0.36257025599479675, "learning_rate": 2.8206333333333335e-05, "loss": 0.0133, "step": 15386 }, { "epoch": 16.98398674765323, "grad_norm": 0.2031794786453247, "learning_rate": 2.8206e-05, "loss": 0.009, "step": 15387 }, { "epoch": 16.985091109884042, "grad_norm": 1.5304880142211914, "learning_rate": 2.8205666666666667e-05, "loss": 0.0229, "step": 15388 }, { "epoch": 16.986195472114854, "grad_norm": 0.410167396068573, "learning_rate": 2.8205333333333333e-05, "loss": 0.0152, "step": 15389 }, { "epoch": 16.987299834345666, "grad_norm": 0.17495959997177124, "learning_rate": 2.8205000000000002e-05, "loss": 0.0052, "step": 15390 }, { "epoch": 16.98840419657648, "grad_norm": 0.4210320711135864, "learning_rate": 2.8204666666666668e-05, "loss": 0.0125, "step": 15391 }, { "epoch": 16.989508558807287, "grad_norm": 0.2323543280363083, "learning_rate": 2.8204333333333334e-05, "loss": 0.0134, "step": 15392 }, { "epoch": 16.9906129210381, "grad_norm": 0.3039208650588989, "learning_rate": 2.8204000000000003e-05, "loss": 0.0085, "step": 15393 }, { "epoch": 16.991717283268912, "grad_norm": 0.5074495673179626, "learning_rate": 2.8203666666666666e-05, "loss": 0.0193, "step": 15394 }, { "epoch": 16.992821645499724, "grad_norm": 0.4096353352069855, "learning_rate": 2.8203333333333335e-05, "loss": 0.0189, "step": 15395 }, { "epoch": 16.993926007730536, "grad_norm": 0.540197491645813, "learning_rate": 2.8203e-05, "loss": 0.0135, "step": 15396 }, { "epoch": 16.99503036996135, "grad_norm": 0.9596877098083496, "learning_rate": 2.8202666666666667e-05, "loss": 0.0648, "step": 15397 }, { "epoch": 16.99613473219216, "grad_norm": 0.14132049679756165, "learning_rate": 2.8202333333333333e-05, "loss": 0.0077, "step": 15398 }, { "epoch": 16.99723909442297, "grad_norm": 0.3972940146923065, "learning_rate": 2.8202000000000002e-05, "loss": 0.012, "step": 15399 }, { "epoch": 16.99834345665378, "grad_norm": 0.16038529574871063, "learning_rate": 2.8201666666666668e-05, "loss": 0.0071, "step": 15400 }, { "epoch": 16.999447818884594, "grad_norm": 0.5040178894996643, "learning_rate": 2.8201333333333334e-05, "loss": 0.0107, "step": 15401 }, { "epoch": 17.0, "grad_norm": 0.7070406079292297, "learning_rate": 2.8201000000000003e-05, "loss": 0.0098, "step": 15402 }, { "epoch": 17.001104362230812, "grad_norm": 0.5528517961502075, "learning_rate": 2.8200666666666666e-05, "loss": 0.1929, "step": 15403 }, { "epoch": 17.002208724461624, "grad_norm": 0.6469992399215698, "learning_rate": 2.8200333333333335e-05, "loss": 0.1006, "step": 15404 }, { "epoch": 17.003313086692437, "grad_norm": 0.4214009642601013, "learning_rate": 2.8199999999999998e-05, "loss": 0.0726, "step": 15405 }, { "epoch": 17.004417448923245, "grad_norm": 0.42932772636413574, "learning_rate": 2.8199666666666667e-05, "loss": 0.0819, "step": 15406 }, { "epoch": 17.005521811154058, "grad_norm": 0.4421776831150055, "learning_rate": 2.8199333333333336e-05, "loss": 0.0795, "step": 15407 }, { "epoch": 17.00662617338487, "grad_norm": 0.32310694456100464, "learning_rate": 2.8199e-05, "loss": 0.0366, "step": 15408 }, { "epoch": 17.007730535615682, "grad_norm": 0.7233429551124573, "learning_rate": 2.8198666666666668e-05, "loss": 0.0272, "step": 15409 }, { "epoch": 17.008834897846494, "grad_norm": 0.47200319170951843, "learning_rate": 2.8198333333333334e-05, "loss": 0.084, "step": 15410 }, { "epoch": 17.009939260077306, "grad_norm": 0.24894781410694122, "learning_rate": 2.8198e-05, "loss": 0.0103, "step": 15411 }, { "epoch": 17.01104362230812, "grad_norm": 0.22729048132896423, "learning_rate": 2.8197666666666666e-05, "loss": 0.009, "step": 15412 }, { "epoch": 17.012147984538927, "grad_norm": 0.32218366861343384, "learning_rate": 2.8197333333333335e-05, "loss": 0.0135, "step": 15413 }, { "epoch": 17.01325234676974, "grad_norm": 0.22770334780216217, "learning_rate": 2.8196999999999998e-05, "loss": 0.02, "step": 15414 }, { "epoch": 17.014356709000552, "grad_norm": 0.2575967609882355, "learning_rate": 2.8196666666666667e-05, "loss": 0.0134, "step": 15415 }, { "epoch": 17.015461071231364, "grad_norm": 0.34565815329551697, "learning_rate": 2.8196333333333337e-05, "loss": 0.0117, "step": 15416 }, { "epoch": 17.016565433462176, "grad_norm": 0.1988304853439331, "learning_rate": 2.8196e-05, "loss": 0.0151, "step": 15417 }, { "epoch": 17.01766979569299, "grad_norm": 0.2718552052974701, "learning_rate": 2.819566666666667e-05, "loss": 0.0066, "step": 15418 }, { "epoch": 17.0187741579238, "grad_norm": 0.24021707475185394, "learning_rate": 2.8195333333333334e-05, "loss": 0.0101, "step": 15419 }, { "epoch": 17.01987852015461, "grad_norm": 0.23985837399959564, "learning_rate": 2.8195e-05, "loss": 0.0141, "step": 15420 }, { "epoch": 17.02098288238542, "grad_norm": 0.15495702624320984, "learning_rate": 2.8194666666666666e-05, "loss": 0.0101, "step": 15421 }, { "epoch": 17.022087244616234, "grad_norm": 0.26014968752861023, "learning_rate": 2.8194333333333336e-05, "loss": 0.0052, "step": 15422 }, { "epoch": 17.023191606847046, "grad_norm": 0.4128369092941284, "learning_rate": 2.8194e-05, "loss": 0.0196, "step": 15423 }, { "epoch": 17.02429596907786, "grad_norm": 0.29494619369506836, "learning_rate": 2.8193666666666667e-05, "loss": 0.0111, "step": 15424 }, { "epoch": 17.02540033130867, "grad_norm": 0.22096119821071625, "learning_rate": 2.8193333333333337e-05, "loss": 0.006, "step": 15425 }, { "epoch": 17.02650469353948, "grad_norm": 0.24898181855678558, "learning_rate": 2.8193e-05, "loss": 0.0095, "step": 15426 }, { "epoch": 17.02760905577029, "grad_norm": 0.5913267135620117, "learning_rate": 2.819266666666667e-05, "loss": 0.0094, "step": 15427 }, { "epoch": 17.028713418001104, "grad_norm": 0.21331414580345154, "learning_rate": 2.8192333333333335e-05, "loss": 0.0059, "step": 15428 }, { "epoch": 17.029817780231916, "grad_norm": 0.5214385390281677, "learning_rate": 2.8192e-05, "loss": 0.0109, "step": 15429 }, { "epoch": 17.030922142462728, "grad_norm": 0.46185070276260376, "learning_rate": 2.8191666666666666e-05, "loss": 0.0211, "step": 15430 }, { "epoch": 17.03202650469354, "grad_norm": 0.14745940268039703, "learning_rate": 2.8191333333333336e-05, "loss": 0.0062, "step": 15431 }, { "epoch": 17.033130866924353, "grad_norm": 0.22181770205497742, "learning_rate": 2.8191e-05, "loss": 0.0085, "step": 15432 }, { "epoch": 17.03423522915516, "grad_norm": 0.33564433455467224, "learning_rate": 2.8190666666666668e-05, "loss": 0.0088, "step": 15433 }, { "epoch": 17.035339591385974, "grad_norm": 0.15641602873802185, "learning_rate": 2.8190333333333334e-05, "loss": 0.007, "step": 15434 }, { "epoch": 17.036443953616786, "grad_norm": 0.3211289942264557, "learning_rate": 2.819e-05, "loss": 0.0168, "step": 15435 }, { "epoch": 17.037548315847598, "grad_norm": 0.19468751549720764, "learning_rate": 2.818966666666667e-05, "loss": 0.0047, "step": 15436 }, { "epoch": 17.03865267807841, "grad_norm": 0.2067641168832779, "learning_rate": 2.818933333333333e-05, "loss": 0.007, "step": 15437 }, { "epoch": 17.039757040309222, "grad_norm": 1.724306583404541, "learning_rate": 2.8189e-05, "loss": 0.0139, "step": 15438 }, { "epoch": 17.040861402540035, "grad_norm": 0.6497280597686768, "learning_rate": 2.8188666666666667e-05, "loss": 0.0053, "step": 15439 }, { "epoch": 17.041965764770843, "grad_norm": 0.40018555521965027, "learning_rate": 2.8188333333333333e-05, "loss": 0.0222, "step": 15440 }, { "epoch": 17.043070127001656, "grad_norm": 0.7091325521469116, "learning_rate": 2.8188000000000002e-05, "loss": 0.0137, "step": 15441 }, { "epoch": 17.044174489232468, "grad_norm": 0.05557149276137352, "learning_rate": 2.8187666666666668e-05, "loss": 0.0025, "step": 15442 }, { "epoch": 17.04527885146328, "grad_norm": 0.1702808141708374, "learning_rate": 2.8187333333333334e-05, "loss": 0.0106, "step": 15443 }, { "epoch": 17.046383213694092, "grad_norm": 0.16434727609157562, "learning_rate": 2.8187e-05, "loss": 0.0067, "step": 15444 }, { "epoch": 17.047487575924904, "grad_norm": 0.0653676763176918, "learning_rate": 2.818666666666667e-05, "loss": 0.0026, "step": 15445 }, { "epoch": 17.048591938155717, "grad_norm": 0.3824898302555084, "learning_rate": 2.818633333333333e-05, "loss": 0.015, "step": 15446 }, { "epoch": 17.049696300386525, "grad_norm": 0.6936913132667542, "learning_rate": 2.8186e-05, "loss": 0.0135, "step": 15447 }, { "epoch": 17.050800662617338, "grad_norm": 0.18771526217460632, "learning_rate": 2.8185666666666667e-05, "loss": 0.0071, "step": 15448 }, { "epoch": 17.05190502484815, "grad_norm": 0.24845725297927856, "learning_rate": 2.8185333333333333e-05, "loss": 0.0103, "step": 15449 }, { "epoch": 17.053009387078962, "grad_norm": 0.18191182613372803, "learning_rate": 2.8185000000000002e-05, "loss": 0.0084, "step": 15450 }, { "epoch": 17.054113749309774, "grad_norm": 0.4484604299068451, "learning_rate": 2.8184666666666668e-05, "loss": 0.0131, "step": 15451 }, { "epoch": 17.055218111540587, "grad_norm": 0.32822486758232117, "learning_rate": 2.8184333333333334e-05, "loss": 0.012, "step": 15452 }, { "epoch": 17.0563224737714, "grad_norm": 0.6663870215415955, "learning_rate": 2.8184e-05, "loss": 0.2136, "step": 15453 }, { "epoch": 17.057426836002207, "grad_norm": 0.6151115298271179, "learning_rate": 2.818366666666667e-05, "loss": 0.0877, "step": 15454 }, { "epoch": 17.05853119823302, "grad_norm": 0.29668521881103516, "learning_rate": 2.8183333333333332e-05, "loss": 0.0695, "step": 15455 }, { "epoch": 17.059635560463832, "grad_norm": 0.5125114917755127, "learning_rate": 2.8183e-05, "loss": 0.0591, "step": 15456 }, { "epoch": 17.060739922694644, "grad_norm": 0.4111986756324768, "learning_rate": 2.818266666666667e-05, "loss": 0.0482, "step": 15457 }, { "epoch": 17.061844284925456, "grad_norm": 0.2833617329597473, "learning_rate": 2.8182333333333333e-05, "loss": 0.0281, "step": 15458 }, { "epoch": 17.06294864715627, "grad_norm": 0.3983646035194397, "learning_rate": 2.8182000000000002e-05, "loss": 0.0556, "step": 15459 }, { "epoch": 17.064053009387077, "grad_norm": 0.4086206257343292, "learning_rate": 2.8181666666666668e-05, "loss": 0.0302, "step": 15460 }, { "epoch": 17.06515737161789, "grad_norm": 0.241460382938385, "learning_rate": 2.8181333333333334e-05, "loss": 0.0091, "step": 15461 }, { "epoch": 17.0662617338487, "grad_norm": 0.2930218577384949, "learning_rate": 2.8181e-05, "loss": 0.0206, "step": 15462 }, { "epoch": 17.067366096079514, "grad_norm": 0.12892137467861176, "learning_rate": 2.818066666666667e-05, "loss": 0.0065, "step": 15463 }, { "epoch": 17.068470458310326, "grad_norm": 0.4001002609729767, "learning_rate": 2.8180333333333332e-05, "loss": 0.0072, "step": 15464 }, { "epoch": 17.06957482054114, "grad_norm": 0.3950919508934021, "learning_rate": 2.818e-05, "loss": 0.0101, "step": 15465 }, { "epoch": 17.07067918277195, "grad_norm": 0.2853074073791504, "learning_rate": 2.8179666666666667e-05, "loss": 0.0108, "step": 15466 }, { "epoch": 17.07178354500276, "grad_norm": 0.2646360695362091, "learning_rate": 2.8179333333333333e-05, "loss": 0.0114, "step": 15467 }, { "epoch": 17.07288790723357, "grad_norm": 0.26969629526138306, "learning_rate": 2.8179000000000002e-05, "loss": 0.0081, "step": 15468 }, { "epoch": 17.073992269464384, "grad_norm": 0.20604631304740906, "learning_rate": 2.8178666666666665e-05, "loss": 0.0092, "step": 15469 }, { "epoch": 17.075096631695196, "grad_norm": 0.2518334686756134, "learning_rate": 2.8178333333333334e-05, "loss": 0.0091, "step": 15470 }, { "epoch": 17.076200993926008, "grad_norm": 0.10121818631887436, "learning_rate": 2.8178e-05, "loss": 0.003, "step": 15471 }, { "epoch": 17.07730535615682, "grad_norm": 0.1660618633031845, "learning_rate": 2.8177666666666666e-05, "loss": 0.0061, "step": 15472 }, { "epoch": 17.078409718387633, "grad_norm": 0.16928859055042267, "learning_rate": 2.8177333333333336e-05, "loss": 0.0055, "step": 15473 }, { "epoch": 17.07951408061844, "grad_norm": 0.17943869531154633, "learning_rate": 2.8177e-05, "loss": 0.0052, "step": 15474 }, { "epoch": 17.080618442849254, "grad_norm": 0.32307448983192444, "learning_rate": 2.8176666666666667e-05, "loss": 0.0098, "step": 15475 }, { "epoch": 17.081722805080066, "grad_norm": 0.3242838382720947, "learning_rate": 2.8176333333333333e-05, "loss": 0.0425, "step": 15476 }, { "epoch": 17.082827167310878, "grad_norm": 0.3596802055835724, "learning_rate": 2.8176000000000003e-05, "loss": 0.0197, "step": 15477 }, { "epoch": 17.08393152954169, "grad_norm": 0.3419872224330902, "learning_rate": 2.8175666666666665e-05, "loss": 0.0054, "step": 15478 }, { "epoch": 17.085035891772502, "grad_norm": 0.05376799404621124, "learning_rate": 2.8175333333333335e-05, "loss": 0.0019, "step": 15479 }, { "epoch": 17.086140254003315, "grad_norm": 0.23132304847240448, "learning_rate": 2.8175e-05, "loss": 0.01, "step": 15480 }, { "epoch": 17.087244616234123, "grad_norm": 0.13141480088233948, "learning_rate": 2.8174666666666666e-05, "loss": 0.0054, "step": 15481 }, { "epoch": 17.088348978464936, "grad_norm": 0.43557387590408325, "learning_rate": 2.8174333333333336e-05, "loss": 0.0078, "step": 15482 }, { "epoch": 17.089453340695748, "grad_norm": 0.19668640196323395, "learning_rate": 2.8174e-05, "loss": 0.0088, "step": 15483 }, { "epoch": 17.09055770292656, "grad_norm": 0.31163808703422546, "learning_rate": 2.8173666666666668e-05, "loss": 0.013, "step": 15484 }, { "epoch": 17.091662065157372, "grad_norm": 0.35269781947135925, "learning_rate": 2.8173333333333334e-05, "loss": 0.0132, "step": 15485 }, { "epoch": 17.092766427388185, "grad_norm": 0.4740055501461029, "learning_rate": 2.8173000000000003e-05, "loss": 0.0138, "step": 15486 }, { "epoch": 17.093870789618997, "grad_norm": 0.36422649025917053, "learning_rate": 2.8172666666666665e-05, "loss": 0.0133, "step": 15487 }, { "epoch": 17.094975151849805, "grad_norm": 0.17281776666641235, "learning_rate": 2.8172333333333335e-05, "loss": 0.0037, "step": 15488 }, { "epoch": 17.096079514080618, "grad_norm": 0.5857182145118713, "learning_rate": 2.8172e-05, "loss": 0.0093, "step": 15489 }, { "epoch": 17.09718387631143, "grad_norm": 0.2963194251060486, "learning_rate": 2.8171666666666667e-05, "loss": 0.0182, "step": 15490 }, { "epoch": 17.098288238542242, "grad_norm": 0.4282227158546448, "learning_rate": 2.8171333333333336e-05, "loss": 0.0189, "step": 15491 }, { "epoch": 17.099392600773054, "grad_norm": 0.2567732036113739, "learning_rate": 2.8171000000000002e-05, "loss": 0.0097, "step": 15492 }, { "epoch": 17.100496963003867, "grad_norm": 0.14424394071102142, "learning_rate": 2.8170666666666668e-05, "loss": 0.0047, "step": 15493 }, { "epoch": 17.101601325234675, "grad_norm": 0.3170815110206604, "learning_rate": 2.8170333333333334e-05, "loss": 0.0119, "step": 15494 }, { "epoch": 17.102705687465487, "grad_norm": 0.38823920488357544, "learning_rate": 2.817e-05, "loss": 0.0092, "step": 15495 }, { "epoch": 17.1038100496963, "grad_norm": 0.24020898342132568, "learning_rate": 2.8169666666666666e-05, "loss": 0.0109, "step": 15496 }, { "epoch": 17.104914411927112, "grad_norm": 0.16450971364974976, "learning_rate": 2.8169333333333335e-05, "loss": 0.0069, "step": 15497 }, { "epoch": 17.106018774157924, "grad_norm": 0.40101003646850586, "learning_rate": 2.8169e-05, "loss": 0.011, "step": 15498 }, { "epoch": 17.107123136388736, "grad_norm": 0.4242892563343048, "learning_rate": 2.8168666666666667e-05, "loss": 0.0273, "step": 15499 }, { "epoch": 17.10822749861955, "grad_norm": 0.18799583613872528, "learning_rate": 2.8168333333333336e-05, "loss": 0.0082, "step": 15500 }, { "epoch": 17.109331860850357, "grad_norm": 0.20051541924476624, "learning_rate": 2.8168e-05, "loss": 0.007, "step": 15501 }, { "epoch": 17.11043622308117, "grad_norm": 0.2746818959712982, "learning_rate": 2.8167666666666668e-05, "loss": 0.0089, "step": 15502 }, { "epoch": 17.11154058531198, "grad_norm": 0.5492504835128784, "learning_rate": 2.8167333333333334e-05, "loss": 0.1508, "step": 15503 }, { "epoch": 17.112644947542794, "grad_norm": 0.48642992973327637, "learning_rate": 2.8167e-05, "loss": 0.1077, "step": 15504 }, { "epoch": 17.113749309773606, "grad_norm": 0.40735194087028503, "learning_rate": 2.8166666666666666e-05, "loss": 0.1092, "step": 15505 }, { "epoch": 17.11485367200442, "grad_norm": 0.3837876617908478, "learning_rate": 2.8166333333333335e-05, "loss": 0.0633, "step": 15506 }, { "epoch": 17.11595803423523, "grad_norm": 0.5136864185333252, "learning_rate": 2.8166e-05, "loss": 0.0626, "step": 15507 }, { "epoch": 17.11706239646604, "grad_norm": 0.39855754375457764, "learning_rate": 2.8165666666666667e-05, "loss": 0.048, "step": 15508 }, { "epoch": 17.11816675869685, "grad_norm": 0.2568614184856415, "learning_rate": 2.8165333333333336e-05, "loss": 0.0298, "step": 15509 }, { "epoch": 17.119271120927664, "grad_norm": 0.3690541386604309, "learning_rate": 2.8165e-05, "loss": 0.068, "step": 15510 }, { "epoch": 17.120375483158476, "grad_norm": 0.2264176309108734, "learning_rate": 2.8164666666666668e-05, "loss": 0.0193, "step": 15511 }, { "epoch": 17.12147984538929, "grad_norm": 0.23880234360694885, "learning_rate": 2.8164333333333334e-05, "loss": 0.0271, "step": 15512 }, { "epoch": 17.1225842076201, "grad_norm": 0.1697004735469818, "learning_rate": 2.8164e-05, "loss": 0.0067, "step": 15513 }, { "epoch": 17.123688569850913, "grad_norm": 0.33434152603149414, "learning_rate": 2.8163666666666666e-05, "loss": 0.0058, "step": 15514 }, { "epoch": 17.12479293208172, "grad_norm": 2.5401194095611572, "learning_rate": 2.8163333333333335e-05, "loss": 0.0447, "step": 15515 }, { "epoch": 17.125897294312534, "grad_norm": 0.10836151987314224, "learning_rate": 2.8163e-05, "loss": 0.005, "step": 15516 }, { "epoch": 17.127001656543346, "grad_norm": 0.13246352970600128, "learning_rate": 2.8162666666666667e-05, "loss": 0.0049, "step": 15517 }, { "epoch": 17.128106018774158, "grad_norm": 0.16413308680057526, "learning_rate": 2.8162333333333336e-05, "loss": 0.0066, "step": 15518 }, { "epoch": 17.12921038100497, "grad_norm": 0.2030731588602066, "learning_rate": 2.8162e-05, "loss": 0.0089, "step": 15519 }, { "epoch": 17.130314743235783, "grad_norm": 0.2958642244338989, "learning_rate": 2.816166666666667e-05, "loss": 0.0075, "step": 15520 }, { "epoch": 17.131419105466595, "grad_norm": 0.28353798389434814, "learning_rate": 2.8161333333333334e-05, "loss": 0.0085, "step": 15521 }, { "epoch": 17.132523467697403, "grad_norm": 0.11205992102622986, "learning_rate": 2.8161e-05, "loss": 0.0042, "step": 15522 }, { "epoch": 17.133627829928216, "grad_norm": 0.18384066224098206, "learning_rate": 2.816066666666667e-05, "loss": 0.0082, "step": 15523 }, { "epoch": 17.134732192159028, "grad_norm": 0.10068759322166443, "learning_rate": 2.8160333333333336e-05, "loss": 0.0034, "step": 15524 }, { "epoch": 17.13583655438984, "grad_norm": 0.1797565072774887, "learning_rate": 2.816e-05, "loss": 0.0119, "step": 15525 }, { "epoch": 17.136940916620652, "grad_norm": 0.5579796433448792, "learning_rate": 2.8159666666666667e-05, "loss": 0.0143, "step": 15526 }, { "epoch": 17.138045278851465, "grad_norm": 0.1621626913547516, "learning_rate": 2.8159333333333333e-05, "loss": 0.0042, "step": 15527 }, { "epoch": 17.139149641082273, "grad_norm": 0.19043725728988647, "learning_rate": 2.8159e-05, "loss": 0.0041, "step": 15528 }, { "epoch": 17.140254003313085, "grad_norm": 0.29799312353134155, "learning_rate": 2.815866666666667e-05, "loss": 0.0084, "step": 15529 }, { "epoch": 17.141358365543898, "grad_norm": 0.12570993602275848, "learning_rate": 2.815833333333333e-05, "loss": 0.0033, "step": 15530 }, { "epoch": 17.14246272777471, "grad_norm": 0.12220015376806259, "learning_rate": 2.8158e-05, "loss": 0.0053, "step": 15531 }, { "epoch": 17.143567090005522, "grad_norm": 0.22855810821056366, "learning_rate": 2.815766666666667e-05, "loss": 0.0066, "step": 15532 }, { "epoch": 17.144671452236334, "grad_norm": 0.5366239547729492, "learning_rate": 2.8157333333333332e-05, "loss": 0.014, "step": 15533 }, { "epoch": 17.145775814467147, "grad_norm": 0.19831736385822296, "learning_rate": 2.8157e-05, "loss": 0.0075, "step": 15534 }, { "epoch": 17.146880176697955, "grad_norm": 0.14681608974933624, "learning_rate": 2.8156666666666668e-05, "loss": 0.003, "step": 15535 }, { "epoch": 17.147984538928768, "grad_norm": 0.16770903766155243, "learning_rate": 2.8156333333333334e-05, "loss": 0.0038, "step": 15536 }, { "epoch": 17.14908890115958, "grad_norm": 0.22331322729587555, "learning_rate": 2.8156e-05, "loss": 0.0088, "step": 15537 }, { "epoch": 17.150193263390392, "grad_norm": 0.1282600462436676, "learning_rate": 2.815566666666667e-05, "loss": 0.0058, "step": 15538 }, { "epoch": 17.151297625621204, "grad_norm": 0.7118775844573975, "learning_rate": 2.815533333333333e-05, "loss": 0.0092, "step": 15539 }, { "epoch": 17.152401987852016, "grad_norm": 0.27991607785224915, "learning_rate": 2.8155e-05, "loss": 0.008, "step": 15540 }, { "epoch": 17.15350635008283, "grad_norm": 0.1581665724515915, "learning_rate": 2.815466666666667e-05, "loss": 0.0057, "step": 15541 }, { "epoch": 17.154610712313637, "grad_norm": 0.18524998426437378, "learning_rate": 2.8154333333333333e-05, "loss": 0.009, "step": 15542 }, { "epoch": 17.15571507454445, "grad_norm": 0.35971516370773315, "learning_rate": 2.8154000000000002e-05, "loss": 0.0104, "step": 15543 }, { "epoch": 17.156819436775262, "grad_norm": 0.30910176038742065, "learning_rate": 2.8153666666666668e-05, "loss": 0.008, "step": 15544 }, { "epoch": 17.157923799006074, "grad_norm": 0.4391137361526489, "learning_rate": 2.8153333333333334e-05, "loss": 0.0077, "step": 15545 }, { "epoch": 17.159028161236886, "grad_norm": 0.31419244408607483, "learning_rate": 2.8153e-05, "loss": 0.0099, "step": 15546 }, { "epoch": 17.1601325234677, "grad_norm": 0.27413663268089294, "learning_rate": 2.815266666666667e-05, "loss": 0.0053, "step": 15547 }, { "epoch": 17.16123688569851, "grad_norm": 0.16815122961997986, "learning_rate": 2.8152333333333335e-05, "loss": 0.0046, "step": 15548 }, { "epoch": 17.16234124792932, "grad_norm": 0.16450928151607513, "learning_rate": 2.8152e-05, "loss": 0.0052, "step": 15549 }, { "epoch": 17.16344561016013, "grad_norm": 0.46366074681282043, "learning_rate": 2.815166666666667e-05, "loss": 0.0102, "step": 15550 }, { "epoch": 17.164549972390944, "grad_norm": 0.8795686364173889, "learning_rate": 2.8151333333333333e-05, "loss": 0.0108, "step": 15551 }, { "epoch": 17.165654334621756, "grad_norm": 0.22606922686100006, "learning_rate": 2.8151000000000002e-05, "loss": 0.0057, "step": 15552 }, { "epoch": 17.16675869685257, "grad_norm": 0.46255865693092346, "learning_rate": 2.8150666666666668e-05, "loss": 0.1254, "step": 15553 }, { "epoch": 17.16786305908338, "grad_norm": 0.40006008744239807, "learning_rate": 2.8150333333333334e-05, "loss": 0.0908, "step": 15554 }, { "epoch": 17.168967421314193, "grad_norm": 0.5106037855148315, "learning_rate": 2.815e-05, "loss": 0.1069, "step": 15555 }, { "epoch": 17.170071783545, "grad_norm": 0.6632119417190552, "learning_rate": 2.8149666666666666e-05, "loss": 0.1085, "step": 15556 }, { "epoch": 17.171176145775814, "grad_norm": 0.3851926624774933, "learning_rate": 2.8149333333333335e-05, "loss": 0.051, "step": 15557 }, { "epoch": 17.172280508006626, "grad_norm": 0.7351036667823792, "learning_rate": 2.8149e-05, "loss": 0.0457, "step": 15558 }, { "epoch": 17.173384870237438, "grad_norm": 0.48518824577331543, "learning_rate": 2.8148666666666667e-05, "loss": 0.034, "step": 15559 }, { "epoch": 17.17448923246825, "grad_norm": 0.253140926361084, "learning_rate": 2.8148333333333333e-05, "loss": 0.0312, "step": 15560 }, { "epoch": 17.175593594699063, "grad_norm": 0.33649516105651855, "learning_rate": 2.8148000000000002e-05, "loss": 0.0317, "step": 15561 }, { "epoch": 17.17669795692987, "grad_norm": 0.39840108156204224, "learning_rate": 2.8147666666666665e-05, "loss": 0.0239, "step": 15562 }, { "epoch": 17.177802319160683, "grad_norm": 0.1531561315059662, "learning_rate": 2.8147333333333334e-05, "loss": 0.0058, "step": 15563 }, { "epoch": 17.178906681391496, "grad_norm": 0.23417417705059052, "learning_rate": 2.8147e-05, "loss": 0.0303, "step": 15564 }, { "epoch": 17.180011043622308, "grad_norm": 0.24941930174827576, "learning_rate": 2.8146666666666666e-05, "loss": 0.0078, "step": 15565 }, { "epoch": 17.18111540585312, "grad_norm": 0.21669530868530273, "learning_rate": 2.8146333333333335e-05, "loss": 0.0258, "step": 15566 }, { "epoch": 17.182219768083932, "grad_norm": 0.2755858600139618, "learning_rate": 2.8146e-05, "loss": 0.0071, "step": 15567 }, { "epoch": 17.183324130314745, "grad_norm": 0.11096528172492981, "learning_rate": 2.8145666666666667e-05, "loss": 0.0039, "step": 15568 }, { "epoch": 17.184428492545553, "grad_norm": 0.3275960385799408, "learning_rate": 2.8145333333333333e-05, "loss": 0.0044, "step": 15569 }, { "epoch": 17.185532854776365, "grad_norm": 0.8213208913803101, "learning_rate": 2.8145000000000002e-05, "loss": 0.0117, "step": 15570 }, { "epoch": 17.186637217007178, "grad_norm": 0.25213149189949036, "learning_rate": 2.8144666666666665e-05, "loss": 0.0069, "step": 15571 }, { "epoch": 17.18774157923799, "grad_norm": 0.21280327439308167, "learning_rate": 2.8144333333333334e-05, "loss": 0.0074, "step": 15572 }, { "epoch": 17.188845941468802, "grad_norm": 0.0955464094877243, "learning_rate": 2.8144000000000004e-05, "loss": 0.007, "step": 15573 }, { "epoch": 17.189950303699614, "grad_norm": 0.37777137756347656, "learning_rate": 2.8143666666666666e-05, "loss": 0.0085, "step": 15574 }, { "epoch": 17.191054665930427, "grad_norm": 0.4575629234313965, "learning_rate": 2.8143333333333335e-05, "loss": 0.0073, "step": 15575 }, { "epoch": 17.192159028161235, "grad_norm": 0.1487475484609604, "learning_rate": 2.8143e-05, "loss": 0.0057, "step": 15576 }, { "epoch": 17.193263390392048, "grad_norm": 0.14860522747039795, "learning_rate": 2.8142666666666667e-05, "loss": 0.0054, "step": 15577 }, { "epoch": 17.19436775262286, "grad_norm": 0.1812465488910675, "learning_rate": 2.8142333333333333e-05, "loss": 0.006, "step": 15578 }, { "epoch": 17.195472114853672, "grad_norm": 0.21946755051612854, "learning_rate": 2.8142000000000003e-05, "loss": 0.0138, "step": 15579 }, { "epoch": 17.196576477084484, "grad_norm": 0.43422558903694153, "learning_rate": 2.8141666666666665e-05, "loss": 0.0354, "step": 15580 }, { "epoch": 17.197680839315296, "grad_norm": 0.25805070996284485, "learning_rate": 2.8141333333333334e-05, "loss": 0.0061, "step": 15581 }, { "epoch": 17.19878520154611, "grad_norm": 0.4416358172893524, "learning_rate": 2.8141000000000004e-05, "loss": 0.0075, "step": 15582 }, { "epoch": 17.199889563776917, "grad_norm": 0.2580662965774536, "learning_rate": 2.8140666666666666e-05, "loss": 0.0137, "step": 15583 }, { "epoch": 17.20099392600773, "grad_norm": 0.1569473296403885, "learning_rate": 2.8140333333333336e-05, "loss": 0.0035, "step": 15584 }, { "epoch": 17.202098288238542, "grad_norm": 0.18757721781730652, "learning_rate": 2.8139999999999998e-05, "loss": 0.0069, "step": 15585 }, { "epoch": 17.203202650469354, "grad_norm": 0.08594662696123123, "learning_rate": 2.8139666666666668e-05, "loss": 0.0064, "step": 15586 }, { "epoch": 17.204307012700166, "grad_norm": 0.07744310796260834, "learning_rate": 2.8139333333333333e-05, "loss": 0.0019, "step": 15587 }, { "epoch": 17.20541137493098, "grad_norm": 0.24950803816318512, "learning_rate": 2.8139e-05, "loss": 0.0084, "step": 15588 }, { "epoch": 17.20651573716179, "grad_norm": 0.19083309173583984, "learning_rate": 2.8138666666666665e-05, "loss": 0.0076, "step": 15589 }, { "epoch": 17.2076200993926, "grad_norm": 0.12864342331886292, "learning_rate": 2.8138333333333335e-05, "loss": 0.0061, "step": 15590 }, { "epoch": 17.20872446162341, "grad_norm": 0.285320907831192, "learning_rate": 2.8138e-05, "loss": 0.0131, "step": 15591 }, { "epoch": 17.209828823854224, "grad_norm": 0.18962831795215607, "learning_rate": 2.8137666666666667e-05, "loss": 0.0072, "step": 15592 }, { "epoch": 17.210933186085036, "grad_norm": 0.2103298008441925, "learning_rate": 2.8137333333333336e-05, "loss": 0.0083, "step": 15593 }, { "epoch": 17.21203754831585, "grad_norm": 0.13878262042999268, "learning_rate": 2.8137e-05, "loss": 0.0087, "step": 15594 }, { "epoch": 17.21314191054666, "grad_norm": 0.16230010986328125, "learning_rate": 2.8136666666666668e-05, "loss": 0.0042, "step": 15595 }, { "epoch": 17.214246272777473, "grad_norm": 0.17915551364421844, "learning_rate": 2.8136333333333334e-05, "loss": 0.0059, "step": 15596 }, { "epoch": 17.21535063500828, "grad_norm": 0.19730307161808014, "learning_rate": 2.8136e-05, "loss": 0.0062, "step": 15597 }, { "epoch": 17.216454997239094, "grad_norm": 0.35943618416786194, "learning_rate": 2.813566666666667e-05, "loss": 0.0096, "step": 15598 }, { "epoch": 17.217559359469906, "grad_norm": 0.1532951295375824, "learning_rate": 2.8135333333333335e-05, "loss": 0.0048, "step": 15599 }, { "epoch": 17.218663721700718, "grad_norm": 0.22184401750564575, "learning_rate": 2.8135e-05, "loss": 0.0079, "step": 15600 }, { "epoch": 17.21976808393153, "grad_norm": 0.11606039106845856, "learning_rate": 2.8134666666666667e-05, "loss": 0.0037, "step": 15601 }, { "epoch": 17.220872446162343, "grad_norm": 0.13305701315402985, "learning_rate": 2.8134333333333336e-05, "loss": 0.0034, "step": 15602 }, { "epoch": 17.22197680839315, "grad_norm": 0.40653344988822937, "learning_rate": 2.8134e-05, "loss": 0.1067, "step": 15603 }, { "epoch": 17.223081170623963, "grad_norm": 0.5262173414230347, "learning_rate": 2.8133666666666668e-05, "loss": 0.1435, "step": 15604 }, { "epoch": 17.224185532854776, "grad_norm": 0.4879613220691681, "learning_rate": 2.8133333333333334e-05, "loss": 0.0789, "step": 15605 }, { "epoch": 17.225289895085588, "grad_norm": 0.36318719387054443, "learning_rate": 2.8133e-05, "loss": 0.0778, "step": 15606 }, { "epoch": 17.2263942573164, "grad_norm": 0.40589743852615356, "learning_rate": 2.813266666666667e-05, "loss": 0.0521, "step": 15607 }, { "epoch": 17.227498619547212, "grad_norm": 0.2743285000324249, "learning_rate": 2.8132333333333335e-05, "loss": 0.0305, "step": 15608 }, { "epoch": 17.228602981778025, "grad_norm": 0.30948084592819214, "learning_rate": 2.8132e-05, "loss": 0.0451, "step": 15609 }, { "epoch": 17.229707344008833, "grad_norm": 0.3514540493488312, "learning_rate": 2.8131666666666667e-05, "loss": 0.0348, "step": 15610 }, { "epoch": 17.230811706239646, "grad_norm": 0.16364245116710663, "learning_rate": 2.8131333333333336e-05, "loss": 0.0294, "step": 15611 }, { "epoch": 17.231916068470458, "grad_norm": 0.29286956787109375, "learning_rate": 2.8131e-05, "loss": 0.0162, "step": 15612 }, { "epoch": 17.23302043070127, "grad_norm": 0.35625892877578735, "learning_rate": 2.8130666666666668e-05, "loss": 0.0118, "step": 15613 }, { "epoch": 17.234124792932082, "grad_norm": 0.23762620985507965, "learning_rate": 2.8130333333333334e-05, "loss": 0.009, "step": 15614 }, { "epoch": 17.235229155162894, "grad_norm": 0.19799846410751343, "learning_rate": 2.813e-05, "loss": 0.0067, "step": 15615 }, { "epoch": 17.236333517393707, "grad_norm": 0.1964561641216278, "learning_rate": 2.812966666666667e-05, "loss": 0.0082, "step": 15616 }, { "epoch": 17.237437879624515, "grad_norm": 0.19019444286823273, "learning_rate": 2.8129333333333332e-05, "loss": 0.0084, "step": 15617 }, { "epoch": 17.238542241855328, "grad_norm": 0.3343871235847473, "learning_rate": 2.8129e-05, "loss": 0.0058, "step": 15618 }, { "epoch": 17.23964660408614, "grad_norm": 0.2427080124616623, "learning_rate": 2.8128666666666667e-05, "loss": 0.0099, "step": 15619 }, { "epoch": 17.240750966316952, "grad_norm": 0.17279578745365143, "learning_rate": 2.8128333333333333e-05, "loss": 0.0034, "step": 15620 }, { "epoch": 17.241855328547764, "grad_norm": 0.11121904850006104, "learning_rate": 2.8128e-05, "loss": 0.0044, "step": 15621 }, { "epoch": 17.242959690778576, "grad_norm": 0.07841970026493073, "learning_rate": 2.812766666666667e-05, "loss": 0.0026, "step": 15622 }, { "epoch": 17.24406405300939, "grad_norm": 0.20898565649986267, "learning_rate": 2.8127333333333334e-05, "loss": 0.0074, "step": 15623 }, { "epoch": 17.245168415240197, "grad_norm": 0.3594958484172821, "learning_rate": 2.8127e-05, "loss": 0.0128, "step": 15624 }, { "epoch": 17.24627277747101, "grad_norm": 0.20665070414543152, "learning_rate": 2.812666666666667e-05, "loss": 0.0046, "step": 15625 }, { "epoch": 17.247377139701822, "grad_norm": 0.18461653590202332, "learning_rate": 2.8126333333333332e-05, "loss": 0.0063, "step": 15626 }, { "epoch": 17.248481501932634, "grad_norm": 0.25423407554626465, "learning_rate": 2.8126e-05, "loss": 0.0088, "step": 15627 }, { "epoch": 17.249585864163446, "grad_norm": 0.5600011348724365, "learning_rate": 2.8125666666666667e-05, "loss": 0.0066, "step": 15628 }, { "epoch": 17.25069022639426, "grad_norm": 0.8551027178764343, "learning_rate": 2.8125333333333333e-05, "loss": 0.0133, "step": 15629 }, { "epoch": 17.25179458862507, "grad_norm": 0.2400861233472824, "learning_rate": 2.8125e-05, "loss": 0.0071, "step": 15630 }, { "epoch": 17.25289895085588, "grad_norm": 0.14933523535728455, "learning_rate": 2.812466666666667e-05, "loss": 0.0048, "step": 15631 }, { "epoch": 17.25400331308669, "grad_norm": 1.025620937347412, "learning_rate": 2.8124333333333334e-05, "loss": 0.0125, "step": 15632 }, { "epoch": 17.255107675317504, "grad_norm": 0.16655083000659943, "learning_rate": 2.8124e-05, "loss": 0.0064, "step": 15633 }, { "epoch": 17.256212037548316, "grad_norm": 0.160090833902359, "learning_rate": 2.812366666666667e-05, "loss": 0.0063, "step": 15634 }, { "epoch": 17.25731639977913, "grad_norm": 0.27224963903427124, "learning_rate": 2.8123333333333332e-05, "loss": 0.0111, "step": 15635 }, { "epoch": 17.25842076200994, "grad_norm": 0.2388421893119812, "learning_rate": 2.8123e-05, "loss": 0.0036, "step": 15636 }, { "epoch": 17.25952512424075, "grad_norm": 0.23444850742816925, "learning_rate": 2.8122666666666668e-05, "loss": 0.0099, "step": 15637 }, { "epoch": 17.26062948647156, "grad_norm": 0.1664980947971344, "learning_rate": 2.8122333333333333e-05, "loss": 0.0048, "step": 15638 }, { "epoch": 17.261733848702374, "grad_norm": 0.24008609354496002, "learning_rate": 2.8122e-05, "loss": 0.0165, "step": 15639 }, { "epoch": 17.262838210933186, "grad_norm": 0.28698351979255676, "learning_rate": 2.812166666666667e-05, "loss": 0.0077, "step": 15640 }, { "epoch": 17.263942573163998, "grad_norm": 0.48572275042533875, "learning_rate": 2.8121333333333335e-05, "loss": 0.0171, "step": 15641 }, { "epoch": 17.26504693539481, "grad_norm": 0.45857590436935425, "learning_rate": 2.8121e-05, "loss": 0.0084, "step": 15642 }, { "epoch": 17.266151297625623, "grad_norm": 0.4019896686077118, "learning_rate": 2.812066666666667e-05, "loss": 0.0046, "step": 15643 }, { "epoch": 17.26725565985643, "grad_norm": 0.08204033970832825, "learning_rate": 2.8120333333333332e-05, "loss": 0.0045, "step": 15644 }, { "epoch": 17.268360022087244, "grad_norm": 0.3568342626094818, "learning_rate": 2.8120000000000002e-05, "loss": 0.0095, "step": 15645 }, { "epoch": 17.269464384318056, "grad_norm": 0.6906026005744934, "learning_rate": 2.8119666666666664e-05, "loss": 0.0158, "step": 15646 }, { "epoch": 17.270568746548868, "grad_norm": 0.29965054988861084, "learning_rate": 2.8119333333333334e-05, "loss": 0.0122, "step": 15647 }, { "epoch": 17.27167310877968, "grad_norm": 0.528651237487793, "learning_rate": 2.8119000000000003e-05, "loss": 0.008, "step": 15648 }, { "epoch": 17.272777471010492, "grad_norm": 0.44009408354759216, "learning_rate": 2.8118666666666666e-05, "loss": 0.0094, "step": 15649 }, { "epoch": 17.273881833241305, "grad_norm": 0.19980119168758392, "learning_rate": 2.8118333333333335e-05, "loss": 0.0085, "step": 15650 }, { "epoch": 17.274986195472113, "grad_norm": 0.6048311591148376, "learning_rate": 2.8118e-05, "loss": 0.0146, "step": 15651 }, { "epoch": 17.276090557702926, "grad_norm": 0.36075204610824585, "learning_rate": 2.8117666666666667e-05, "loss": 0.0064, "step": 15652 }, { "epoch": 17.277194919933738, "grad_norm": 0.6915684938430786, "learning_rate": 2.8117333333333333e-05, "loss": 0.1874, "step": 15653 }, { "epoch": 17.27829928216455, "grad_norm": 0.41321223974227905, "learning_rate": 2.8117000000000002e-05, "loss": 0.0656, "step": 15654 }, { "epoch": 17.279403644395362, "grad_norm": 0.3574677109718323, "learning_rate": 2.8116666666666665e-05, "loss": 0.0873, "step": 15655 }, { "epoch": 17.280508006626174, "grad_norm": 0.41493111848831177, "learning_rate": 2.8116333333333334e-05, "loss": 0.0518, "step": 15656 }, { "epoch": 17.281612368856987, "grad_norm": 0.4405195415019989, "learning_rate": 2.8116000000000003e-05, "loss": 0.0883, "step": 15657 }, { "epoch": 17.282716731087795, "grad_norm": 0.5071932077407837, "learning_rate": 2.8115666666666666e-05, "loss": 0.0478, "step": 15658 }, { "epoch": 17.283821093318608, "grad_norm": 0.25374796986579895, "learning_rate": 2.8115333333333335e-05, "loss": 0.0445, "step": 15659 }, { "epoch": 17.28492545554942, "grad_norm": 0.59368497133255, "learning_rate": 2.8115e-05, "loss": 0.0129, "step": 15660 }, { "epoch": 17.286029817780232, "grad_norm": 0.23941802978515625, "learning_rate": 2.8114666666666667e-05, "loss": 0.0143, "step": 15661 }, { "epoch": 17.287134180011044, "grad_norm": 0.16331428289413452, "learning_rate": 2.8114333333333333e-05, "loss": 0.0197, "step": 15662 }, { "epoch": 17.288238542241857, "grad_norm": 0.21727515757083893, "learning_rate": 2.8114000000000002e-05, "loss": 0.0097, "step": 15663 }, { "epoch": 17.28934290447267, "grad_norm": 0.2779576778411865, "learning_rate": 2.8113666666666665e-05, "loss": 0.0136, "step": 15664 }, { "epoch": 17.290447266703477, "grad_norm": 0.16360807418823242, "learning_rate": 2.8113333333333334e-05, "loss": 0.0106, "step": 15665 }, { "epoch": 17.29155162893429, "grad_norm": 0.15742595493793488, "learning_rate": 2.8113000000000003e-05, "loss": 0.0073, "step": 15666 }, { "epoch": 17.292655991165102, "grad_norm": 0.15947876870632172, "learning_rate": 2.8112666666666666e-05, "loss": 0.0076, "step": 15667 }, { "epoch": 17.293760353395914, "grad_norm": 0.1323232799768448, "learning_rate": 2.8112333333333335e-05, "loss": 0.0047, "step": 15668 }, { "epoch": 17.294864715626726, "grad_norm": 0.20467834174633026, "learning_rate": 2.8112e-05, "loss": 0.0103, "step": 15669 }, { "epoch": 17.29596907785754, "grad_norm": 0.2564423978328705, "learning_rate": 2.8111666666666667e-05, "loss": 0.0129, "step": 15670 }, { "epoch": 17.297073440088347, "grad_norm": 0.19903993606567383, "learning_rate": 2.8111333333333333e-05, "loss": 0.0052, "step": 15671 }, { "epoch": 17.29817780231916, "grad_norm": 0.2721564769744873, "learning_rate": 2.8111000000000002e-05, "loss": 0.0105, "step": 15672 }, { "epoch": 17.29928216454997, "grad_norm": 0.3179510831832886, "learning_rate": 2.811066666666667e-05, "loss": 0.009, "step": 15673 }, { "epoch": 17.300386526780784, "grad_norm": 0.23095768690109253, "learning_rate": 2.8110333333333334e-05, "loss": 0.0062, "step": 15674 }, { "epoch": 17.301490889011596, "grad_norm": 0.18716146051883698, "learning_rate": 2.8110000000000004e-05, "loss": 0.0058, "step": 15675 }, { "epoch": 17.30259525124241, "grad_norm": 0.3373352885246277, "learning_rate": 2.8109666666666666e-05, "loss": 0.0059, "step": 15676 }, { "epoch": 17.30369961347322, "grad_norm": 0.2694261372089386, "learning_rate": 2.8109333333333335e-05, "loss": 0.0086, "step": 15677 }, { "epoch": 17.30480397570403, "grad_norm": 0.4410562515258789, "learning_rate": 2.8108999999999998e-05, "loss": 0.0043, "step": 15678 }, { "epoch": 17.30590833793484, "grad_norm": 0.17861805856227875, "learning_rate": 2.8108666666666667e-05, "loss": 0.0078, "step": 15679 }, { "epoch": 17.307012700165654, "grad_norm": 0.1724604368209839, "learning_rate": 2.8108333333333333e-05, "loss": 0.0049, "step": 15680 }, { "epoch": 17.308117062396466, "grad_norm": 0.16832925379276276, "learning_rate": 2.8108e-05, "loss": 0.005, "step": 15681 }, { "epoch": 17.309221424627278, "grad_norm": 0.1299404799938202, "learning_rate": 2.810766666666667e-05, "loss": 0.0028, "step": 15682 }, { "epoch": 17.31032578685809, "grad_norm": 0.3279542028903961, "learning_rate": 2.8107333333333334e-05, "loss": 0.0076, "step": 15683 }, { "epoch": 17.311430149088903, "grad_norm": 0.1604471057653427, "learning_rate": 2.8107e-05, "loss": 0.007, "step": 15684 }, { "epoch": 17.31253451131971, "grad_norm": 0.29864710569381714, "learning_rate": 2.8106666666666666e-05, "loss": 0.0089, "step": 15685 }, { "epoch": 17.313638873550524, "grad_norm": 0.44094693660736084, "learning_rate": 2.8106333333333336e-05, "loss": 0.0102, "step": 15686 }, { "epoch": 17.314743235781336, "grad_norm": 0.2762977182865143, "learning_rate": 2.8105999999999998e-05, "loss": 0.0053, "step": 15687 }, { "epoch": 17.315847598012148, "grad_norm": 0.06349579244852066, "learning_rate": 2.8105666666666668e-05, "loss": 0.0022, "step": 15688 }, { "epoch": 17.31695196024296, "grad_norm": 0.1517079919576645, "learning_rate": 2.8105333333333333e-05, "loss": 0.0051, "step": 15689 }, { "epoch": 17.318056322473772, "grad_norm": 0.2330910861492157, "learning_rate": 2.8105e-05, "loss": 0.0051, "step": 15690 }, { "epoch": 17.319160684704585, "grad_norm": 0.13333389163017273, "learning_rate": 2.810466666666667e-05, "loss": 0.0034, "step": 15691 }, { "epoch": 17.320265046935393, "grad_norm": 0.20614337921142578, "learning_rate": 2.8104333333333335e-05, "loss": 0.0032, "step": 15692 }, { "epoch": 17.321369409166206, "grad_norm": 0.21022608876228333, "learning_rate": 2.8104e-05, "loss": 0.0066, "step": 15693 }, { "epoch": 17.322473771397018, "grad_norm": 0.5408589839935303, "learning_rate": 2.8103666666666667e-05, "loss": 0.0126, "step": 15694 }, { "epoch": 17.32357813362783, "grad_norm": 0.20759142935276031, "learning_rate": 2.8103333333333336e-05, "loss": 0.0078, "step": 15695 }, { "epoch": 17.324682495858642, "grad_norm": 0.19018149375915527, "learning_rate": 2.8103e-05, "loss": 0.0053, "step": 15696 }, { "epoch": 17.325786858089455, "grad_norm": 0.24433650076389313, "learning_rate": 2.8102666666666668e-05, "loss": 0.0121, "step": 15697 }, { "epoch": 17.326891220320267, "grad_norm": 0.20417073369026184, "learning_rate": 2.8102333333333337e-05, "loss": 0.0069, "step": 15698 }, { "epoch": 17.327995582551075, "grad_norm": 0.6740301251411438, "learning_rate": 2.8102e-05, "loss": 0.0074, "step": 15699 }, { "epoch": 17.329099944781888, "grad_norm": 0.2022535800933838, "learning_rate": 2.810166666666667e-05, "loss": 0.0029, "step": 15700 }, { "epoch": 17.3302043070127, "grad_norm": 0.15740197896957397, "learning_rate": 2.8101333333333335e-05, "loss": 0.0034, "step": 15701 }, { "epoch": 17.331308669243512, "grad_norm": 0.3731699287891388, "learning_rate": 2.8101e-05, "loss": 0.0045, "step": 15702 }, { "epoch": 17.332413031474324, "grad_norm": 0.5109659433364868, "learning_rate": 2.8100666666666667e-05, "loss": 0.147, "step": 15703 }, { "epoch": 17.333517393705137, "grad_norm": 0.5940404534339905, "learning_rate": 2.8100333333333336e-05, "loss": 0.1437, "step": 15704 }, { "epoch": 17.33462175593595, "grad_norm": 0.480779767036438, "learning_rate": 2.81e-05, "loss": 0.0596, "step": 15705 }, { "epoch": 17.335726118166757, "grad_norm": 0.44195884466171265, "learning_rate": 2.8099666666666668e-05, "loss": 0.0589, "step": 15706 }, { "epoch": 17.33683048039757, "grad_norm": 0.42497995495796204, "learning_rate": 2.8099333333333334e-05, "loss": 0.0647, "step": 15707 }, { "epoch": 17.337934842628382, "grad_norm": 0.4005434811115265, "learning_rate": 2.8099e-05, "loss": 0.0609, "step": 15708 }, { "epoch": 17.339039204859194, "grad_norm": 0.31532031297683716, "learning_rate": 2.809866666666667e-05, "loss": 0.0546, "step": 15709 }, { "epoch": 17.340143567090006, "grad_norm": 0.6511314511299133, "learning_rate": 2.809833333333333e-05, "loss": 0.0455, "step": 15710 }, { "epoch": 17.34124792932082, "grad_norm": 0.30204641819000244, "learning_rate": 2.8098e-05, "loss": 0.0211, "step": 15711 }, { "epoch": 17.342352291551627, "grad_norm": 0.2021845132112503, "learning_rate": 2.8097666666666667e-05, "loss": 0.03, "step": 15712 }, { "epoch": 17.34345665378244, "grad_norm": 0.2199711799621582, "learning_rate": 2.8097333333333333e-05, "loss": 0.0146, "step": 15713 }, { "epoch": 17.34456101601325, "grad_norm": 0.1663491427898407, "learning_rate": 2.8097e-05, "loss": 0.0306, "step": 15714 }, { "epoch": 17.345665378244064, "grad_norm": 0.29527729749679565, "learning_rate": 2.8096666666666668e-05, "loss": 0.0167, "step": 15715 }, { "epoch": 17.346769740474876, "grad_norm": 0.595172107219696, "learning_rate": 2.8096333333333334e-05, "loss": 0.0063, "step": 15716 }, { "epoch": 17.34787410270569, "grad_norm": 0.22443795204162598, "learning_rate": 2.8096e-05, "loss": 0.0133, "step": 15717 }, { "epoch": 17.3489784649365, "grad_norm": 0.13692249357700348, "learning_rate": 2.809566666666667e-05, "loss": 0.0075, "step": 15718 }, { "epoch": 17.35008282716731, "grad_norm": 0.1838335543870926, "learning_rate": 2.8095333333333332e-05, "loss": 0.0085, "step": 15719 }, { "epoch": 17.35118718939812, "grad_norm": 0.1840975135564804, "learning_rate": 2.8095e-05, "loss": 0.0087, "step": 15720 }, { "epoch": 17.352291551628934, "grad_norm": 0.16142012178897858, "learning_rate": 2.8094666666666667e-05, "loss": 0.0082, "step": 15721 }, { "epoch": 17.353395913859746, "grad_norm": 0.20214764773845673, "learning_rate": 2.8094333333333333e-05, "loss": 0.0089, "step": 15722 }, { "epoch": 17.35450027609056, "grad_norm": 0.16452814638614655, "learning_rate": 2.8094000000000002e-05, "loss": 0.0068, "step": 15723 }, { "epoch": 17.35560463832137, "grad_norm": 0.3829590976238251, "learning_rate": 2.809366666666667e-05, "loss": 0.0118, "step": 15724 }, { "epoch": 17.356709000552183, "grad_norm": 0.17747873067855835, "learning_rate": 2.8093333333333334e-05, "loss": 0.0083, "step": 15725 }, { "epoch": 17.35781336278299, "grad_norm": 0.1736505925655365, "learning_rate": 2.8093e-05, "loss": 0.0065, "step": 15726 }, { "epoch": 17.358917725013804, "grad_norm": 0.1742953658103943, "learning_rate": 2.809266666666667e-05, "loss": 0.0095, "step": 15727 }, { "epoch": 17.360022087244616, "grad_norm": 0.7304892539978027, "learning_rate": 2.8092333333333332e-05, "loss": 0.0142, "step": 15728 }, { "epoch": 17.361126449475428, "grad_norm": 0.38297703862190247, "learning_rate": 2.8092e-05, "loss": 0.0138, "step": 15729 }, { "epoch": 17.36223081170624, "grad_norm": 0.3170732259750366, "learning_rate": 2.8091666666666667e-05, "loss": 0.0057, "step": 15730 }, { "epoch": 17.363335173937053, "grad_norm": 0.31507131457328796, "learning_rate": 2.8091333333333333e-05, "loss": 0.007, "step": 15731 }, { "epoch": 17.364439536167865, "grad_norm": 0.2222837507724762, "learning_rate": 2.8091000000000003e-05, "loss": 0.0075, "step": 15732 }, { "epoch": 17.365543898398673, "grad_norm": 0.25258076190948486, "learning_rate": 2.809066666666667e-05, "loss": 0.0063, "step": 15733 }, { "epoch": 17.366648260629486, "grad_norm": 0.11191532015800476, "learning_rate": 2.8090333333333334e-05, "loss": 0.0036, "step": 15734 }, { "epoch": 17.367752622860298, "grad_norm": 0.10697631537914276, "learning_rate": 2.809e-05, "loss": 0.0071, "step": 15735 }, { "epoch": 17.36885698509111, "grad_norm": 0.2230779230594635, "learning_rate": 2.8089666666666666e-05, "loss": 0.0071, "step": 15736 }, { "epoch": 17.369961347321922, "grad_norm": 0.17642442882061005, "learning_rate": 2.8089333333333332e-05, "loss": 0.0072, "step": 15737 }, { "epoch": 17.371065709552735, "grad_norm": 0.2761167585849762, "learning_rate": 2.8089e-05, "loss": 0.0086, "step": 15738 }, { "epoch": 17.372170071783543, "grad_norm": 0.3406451642513275, "learning_rate": 2.8088666666666664e-05, "loss": 0.012, "step": 15739 }, { "epoch": 17.373274434014355, "grad_norm": 0.12055714428424835, "learning_rate": 2.8088333333333333e-05, "loss": 0.007, "step": 15740 }, { "epoch": 17.374378796245168, "grad_norm": 0.3931337594985962, "learning_rate": 2.8088000000000003e-05, "loss": 0.0096, "step": 15741 }, { "epoch": 17.37548315847598, "grad_norm": 0.22906364500522614, "learning_rate": 2.8087666666666665e-05, "loss": 0.0096, "step": 15742 }, { "epoch": 17.376587520706792, "grad_norm": 0.1883281022310257, "learning_rate": 2.8087333333333335e-05, "loss": 0.0057, "step": 15743 }, { "epoch": 17.377691882937604, "grad_norm": 0.10758443921804428, "learning_rate": 2.8087e-05, "loss": 0.0044, "step": 15744 }, { "epoch": 17.378796245168417, "grad_norm": 0.1862148642539978, "learning_rate": 2.8086666666666667e-05, "loss": 0.0053, "step": 15745 }, { "epoch": 17.379900607399225, "grad_norm": 0.25025516748428345, "learning_rate": 2.8086333333333332e-05, "loss": 0.004, "step": 15746 }, { "epoch": 17.381004969630037, "grad_norm": 0.7757359743118286, "learning_rate": 2.8086000000000002e-05, "loss": 0.0181, "step": 15747 }, { "epoch": 17.38210933186085, "grad_norm": 0.21141478419303894, "learning_rate": 2.8085666666666668e-05, "loss": 0.006, "step": 15748 }, { "epoch": 17.383213694091662, "grad_norm": 0.40338635444641113, "learning_rate": 2.8085333333333334e-05, "loss": 0.0148, "step": 15749 }, { "epoch": 17.384318056322474, "grad_norm": 0.221064031124115, "learning_rate": 2.8085000000000003e-05, "loss": 0.015, "step": 15750 }, { "epoch": 17.385422418553286, "grad_norm": 0.21437743306159973, "learning_rate": 2.8084666666666666e-05, "loss": 0.0074, "step": 15751 }, { "epoch": 17.3865267807841, "grad_norm": 0.16608025133609772, "learning_rate": 2.8084333333333335e-05, "loss": 0.0188, "step": 15752 }, { "epoch": 17.387631143014907, "grad_norm": 2.2302017211914062, "learning_rate": 2.8084e-05, "loss": 0.15, "step": 15753 }, { "epoch": 17.38873550524572, "grad_norm": 0.5362427234649658, "learning_rate": 2.8083666666666667e-05, "loss": 0.1514, "step": 15754 }, { "epoch": 17.38983986747653, "grad_norm": 0.6508174538612366, "learning_rate": 2.8083333333333333e-05, "loss": 0.144, "step": 15755 }, { "epoch": 17.390944229707344, "grad_norm": 0.4592806100845337, "learning_rate": 2.8083000000000002e-05, "loss": 0.095, "step": 15756 }, { "epoch": 17.392048591938156, "grad_norm": 0.7364616990089417, "learning_rate": 2.8082666666666668e-05, "loss": 0.0504, "step": 15757 }, { "epoch": 17.39315295416897, "grad_norm": 0.34792137145996094, "learning_rate": 2.8082333333333334e-05, "loss": 0.0493, "step": 15758 }, { "epoch": 17.39425731639978, "grad_norm": 0.34819895029067993, "learning_rate": 2.8082000000000003e-05, "loss": 0.0418, "step": 15759 }, { "epoch": 17.39536167863059, "grad_norm": 0.4825363755226135, "learning_rate": 2.8081666666666666e-05, "loss": 0.0386, "step": 15760 }, { "epoch": 17.3964660408614, "grad_norm": 0.16917002201080322, "learning_rate": 2.8081333333333335e-05, "loss": 0.013, "step": 15761 }, { "epoch": 17.397570403092214, "grad_norm": 0.203131303191185, "learning_rate": 2.8081e-05, "loss": 0.0098, "step": 15762 }, { "epoch": 17.398674765323026, "grad_norm": 0.18468260765075684, "learning_rate": 2.8080666666666667e-05, "loss": 0.0099, "step": 15763 }, { "epoch": 17.39977912755384, "grad_norm": 0.20086818933486938, "learning_rate": 2.8080333333333333e-05, "loss": 0.0086, "step": 15764 }, { "epoch": 17.40088348978465, "grad_norm": 0.1794363260269165, "learning_rate": 2.8080000000000002e-05, "loss": 0.0062, "step": 15765 }, { "epoch": 17.401987852015463, "grad_norm": 0.1904381513595581, "learning_rate": 2.8079666666666668e-05, "loss": 0.0122, "step": 15766 }, { "epoch": 17.40309221424627, "grad_norm": 0.42791876196861267, "learning_rate": 2.8079333333333334e-05, "loss": 0.0137, "step": 15767 }, { "epoch": 17.404196576477084, "grad_norm": 0.11149157583713531, "learning_rate": 2.8079e-05, "loss": 0.0045, "step": 15768 }, { "epoch": 17.405300938707896, "grad_norm": 0.13063165545463562, "learning_rate": 2.8078666666666666e-05, "loss": 0.006, "step": 15769 }, { "epoch": 17.406405300938708, "grad_norm": 0.6483840942382812, "learning_rate": 2.8078333333333335e-05, "loss": 0.019, "step": 15770 }, { "epoch": 17.40750966316952, "grad_norm": 0.1108691468834877, "learning_rate": 2.8077999999999998e-05, "loss": 0.0036, "step": 15771 }, { "epoch": 17.408614025400333, "grad_norm": 0.20334045588970184, "learning_rate": 2.8077666666666667e-05, "loss": 0.0035, "step": 15772 }, { "epoch": 17.409718387631145, "grad_norm": 0.152714803814888, "learning_rate": 2.8077333333333336e-05, "loss": 0.0079, "step": 15773 }, { "epoch": 17.410822749861953, "grad_norm": 0.2049676477909088, "learning_rate": 2.8077e-05, "loss": 0.0097, "step": 15774 }, { "epoch": 17.411927112092766, "grad_norm": 0.18308435380458832, "learning_rate": 2.807666666666667e-05, "loss": 0.0078, "step": 15775 }, { "epoch": 17.413031474323578, "grad_norm": 0.1582462340593338, "learning_rate": 2.8076333333333334e-05, "loss": 0.0069, "step": 15776 }, { "epoch": 17.41413583655439, "grad_norm": 0.41170287132263184, "learning_rate": 2.8076e-05, "loss": 0.007, "step": 15777 }, { "epoch": 17.415240198785202, "grad_norm": 0.21774815022945404, "learning_rate": 2.8075666666666666e-05, "loss": 0.0077, "step": 15778 }, { "epoch": 17.416344561016015, "grad_norm": 0.26065242290496826, "learning_rate": 2.8075333333333335e-05, "loss": 0.0134, "step": 15779 }, { "epoch": 17.417448923246823, "grad_norm": 0.2761785387992859, "learning_rate": 2.8074999999999998e-05, "loss": 0.0105, "step": 15780 }, { "epoch": 17.418553285477635, "grad_norm": 0.1898254156112671, "learning_rate": 2.8074666666666667e-05, "loss": 0.0091, "step": 15781 }, { "epoch": 17.419657647708448, "grad_norm": 1.1681102514266968, "learning_rate": 2.8074333333333337e-05, "loss": 0.0064, "step": 15782 }, { "epoch": 17.42076200993926, "grad_norm": 0.48312458395957947, "learning_rate": 2.8074e-05, "loss": 0.0066, "step": 15783 }, { "epoch": 17.421866372170072, "grad_norm": 0.3116922378540039, "learning_rate": 2.807366666666667e-05, "loss": 0.0088, "step": 15784 }, { "epoch": 17.422970734400884, "grad_norm": 0.1309545934200287, "learning_rate": 2.8073333333333334e-05, "loss": 0.0032, "step": 15785 }, { "epoch": 17.424075096631697, "grad_norm": 0.33596113324165344, "learning_rate": 2.8073e-05, "loss": 0.0075, "step": 15786 }, { "epoch": 17.425179458862505, "grad_norm": 0.3263854384422302, "learning_rate": 2.8072666666666666e-05, "loss": 0.0414, "step": 15787 }, { "epoch": 17.426283821093318, "grad_norm": 0.2641173005104065, "learning_rate": 2.8072333333333336e-05, "loss": 0.0085, "step": 15788 }, { "epoch": 17.42738818332413, "grad_norm": 0.18434670567512512, "learning_rate": 2.8071999999999998e-05, "loss": 0.0046, "step": 15789 }, { "epoch": 17.428492545554942, "grad_norm": 0.41332224011421204, "learning_rate": 2.8071666666666668e-05, "loss": 0.0212, "step": 15790 }, { "epoch": 17.429596907785754, "grad_norm": 0.0876297876238823, "learning_rate": 2.8071333333333337e-05, "loss": 0.0041, "step": 15791 }, { "epoch": 17.430701270016566, "grad_norm": 0.08422800153493881, "learning_rate": 2.8071e-05, "loss": 0.003, "step": 15792 }, { "epoch": 17.43180563224738, "grad_norm": 0.23451447486877441, "learning_rate": 2.807066666666667e-05, "loss": 0.007, "step": 15793 }, { "epoch": 17.432909994478187, "grad_norm": 0.2372409701347351, "learning_rate": 2.8070333333333335e-05, "loss": 0.0082, "step": 15794 }, { "epoch": 17.434014356709, "grad_norm": 0.23313671350479126, "learning_rate": 2.807e-05, "loss": 0.0083, "step": 15795 }, { "epoch": 17.435118718939812, "grad_norm": 0.23220060765743256, "learning_rate": 2.8069666666666667e-05, "loss": 0.0145, "step": 15796 }, { "epoch": 17.436223081170624, "grad_norm": 0.23988477885723114, "learning_rate": 2.8069333333333332e-05, "loss": 0.01, "step": 15797 }, { "epoch": 17.437327443401436, "grad_norm": 0.5009773373603821, "learning_rate": 2.8069000000000002e-05, "loss": 0.0102, "step": 15798 }, { "epoch": 17.43843180563225, "grad_norm": 0.5725462436676025, "learning_rate": 2.8068666666666668e-05, "loss": 0.0213, "step": 15799 }, { "epoch": 17.43953616786306, "grad_norm": 0.29195547103881836, "learning_rate": 2.8068333333333334e-05, "loss": 0.0113, "step": 15800 }, { "epoch": 17.44064053009387, "grad_norm": 0.22884967923164368, "learning_rate": 2.8068e-05, "loss": 0.0092, "step": 15801 }, { "epoch": 17.44174489232468, "grad_norm": 1.2174407243728638, "learning_rate": 2.806766666666667e-05, "loss": 0.0166, "step": 15802 }, { "epoch": 17.442849254555494, "grad_norm": 0.5229296088218689, "learning_rate": 2.806733333333333e-05, "loss": 0.1291, "step": 15803 }, { "epoch": 17.443953616786306, "grad_norm": 0.39879050850868225, "learning_rate": 2.8067e-05, "loss": 0.0968, "step": 15804 }, { "epoch": 17.44505797901712, "grad_norm": 0.6728322505950928, "learning_rate": 2.8066666666666667e-05, "loss": 0.0888, "step": 15805 }, { "epoch": 17.44616234124793, "grad_norm": 0.4532432556152344, "learning_rate": 2.8066333333333333e-05, "loss": 0.0935, "step": 15806 }, { "epoch": 17.447266703478743, "grad_norm": 0.4434601664543152, "learning_rate": 2.8066000000000002e-05, "loss": 0.0542, "step": 15807 }, { "epoch": 17.44837106570955, "grad_norm": 0.2487199455499649, "learning_rate": 2.8065666666666668e-05, "loss": 0.048, "step": 15808 }, { "epoch": 17.449475427940364, "grad_norm": 0.23542587459087372, "learning_rate": 2.8065333333333334e-05, "loss": 0.0197, "step": 15809 }, { "epoch": 17.450579790171176, "grad_norm": 0.22430211305618286, "learning_rate": 2.8065e-05, "loss": 0.0196, "step": 15810 }, { "epoch": 17.451684152401988, "grad_norm": 0.27755171060562134, "learning_rate": 2.806466666666667e-05, "loss": 0.0318, "step": 15811 }, { "epoch": 17.4527885146328, "grad_norm": 0.13474491238594055, "learning_rate": 2.806433333333333e-05, "loss": 0.0126, "step": 15812 }, { "epoch": 17.453892876863613, "grad_norm": 1.297772765159607, "learning_rate": 2.8064e-05, "loss": 0.0137, "step": 15813 }, { "epoch": 17.45499723909442, "grad_norm": 0.29654255509376526, "learning_rate": 2.8063666666666667e-05, "loss": 0.0105, "step": 15814 }, { "epoch": 17.456101601325233, "grad_norm": 0.2355687916278839, "learning_rate": 2.8063333333333333e-05, "loss": 0.0074, "step": 15815 }, { "epoch": 17.457205963556046, "grad_norm": 0.2452053427696228, "learning_rate": 2.8063000000000002e-05, "loss": 0.0375, "step": 15816 }, { "epoch": 17.458310325786858, "grad_norm": 0.4553394019603729, "learning_rate": 2.8062666666666668e-05, "loss": 0.0124, "step": 15817 }, { "epoch": 17.45941468801767, "grad_norm": 0.43600672483444214, "learning_rate": 2.8062333333333334e-05, "loss": 0.016, "step": 15818 }, { "epoch": 17.460519050248482, "grad_norm": 0.34961485862731934, "learning_rate": 2.8062e-05, "loss": 0.0104, "step": 15819 }, { "epoch": 17.461623412479295, "grad_norm": 0.19129040837287903, "learning_rate": 2.806166666666667e-05, "loss": 0.0072, "step": 15820 }, { "epoch": 17.462727774710103, "grad_norm": 0.16432668268680573, "learning_rate": 2.8061333333333332e-05, "loss": 0.0097, "step": 15821 }, { "epoch": 17.463832136940916, "grad_norm": 0.3409688472747803, "learning_rate": 2.8061e-05, "loss": 0.0127, "step": 15822 }, { "epoch": 17.464936499171728, "grad_norm": 0.14591330289840698, "learning_rate": 2.806066666666667e-05, "loss": 0.0055, "step": 15823 }, { "epoch": 17.46604086140254, "grad_norm": 0.21146416664123535, "learning_rate": 2.8060333333333333e-05, "loss": 0.0092, "step": 15824 }, { "epoch": 17.467145223633352, "grad_norm": 0.14382395148277283, "learning_rate": 2.8060000000000002e-05, "loss": 0.0042, "step": 15825 }, { "epoch": 17.468249585864164, "grad_norm": 0.18021807074546814, "learning_rate": 2.805966666666667e-05, "loss": 0.0062, "step": 15826 }, { "epoch": 17.469353948094977, "grad_norm": 0.39165785908699036, "learning_rate": 2.8059333333333334e-05, "loss": 0.0271, "step": 15827 }, { "epoch": 17.470458310325785, "grad_norm": 0.2557789087295532, "learning_rate": 2.8059e-05, "loss": 0.0068, "step": 15828 }, { "epoch": 17.471562672556598, "grad_norm": 0.31898537278175354, "learning_rate": 2.8058666666666666e-05, "loss": 0.008, "step": 15829 }, { "epoch": 17.47266703478741, "grad_norm": 0.20600412786006927, "learning_rate": 2.8058333333333332e-05, "loss": 0.008, "step": 15830 }, { "epoch": 17.473771397018222, "grad_norm": 0.30770355463027954, "learning_rate": 2.8058e-05, "loss": 0.0143, "step": 15831 }, { "epoch": 17.474875759249034, "grad_norm": 0.49228864908218384, "learning_rate": 2.8057666666666667e-05, "loss": 0.0212, "step": 15832 }, { "epoch": 17.475980121479846, "grad_norm": 0.4296496510505676, "learning_rate": 2.8057333333333333e-05, "loss": 0.0097, "step": 15833 }, { "epoch": 17.47708448371066, "grad_norm": 0.11305136233568192, "learning_rate": 2.8057000000000003e-05, "loss": 0.0031, "step": 15834 }, { "epoch": 17.478188845941467, "grad_norm": 0.1717405617237091, "learning_rate": 2.8056666666666665e-05, "loss": 0.0034, "step": 15835 }, { "epoch": 17.47929320817228, "grad_norm": 0.2228000909090042, "learning_rate": 2.8056333333333334e-05, "loss": 0.0095, "step": 15836 }, { "epoch": 17.480397570403092, "grad_norm": 0.26689833402633667, "learning_rate": 2.8056e-05, "loss": 0.008, "step": 15837 }, { "epoch": 17.481501932633904, "grad_norm": 0.5122800469398499, "learning_rate": 2.8055666666666666e-05, "loss": 0.0064, "step": 15838 }, { "epoch": 17.482606294864716, "grad_norm": 0.25664883852005005, "learning_rate": 2.8055333333333332e-05, "loss": 0.0074, "step": 15839 }, { "epoch": 17.48371065709553, "grad_norm": 0.2880444824695587, "learning_rate": 2.8055e-05, "loss": 0.016, "step": 15840 }, { "epoch": 17.48481501932634, "grad_norm": 0.19681622087955475, "learning_rate": 2.8054666666666668e-05, "loss": 0.0096, "step": 15841 }, { "epoch": 17.48591938155715, "grad_norm": 0.19688768684864044, "learning_rate": 2.8054333333333333e-05, "loss": 0.0076, "step": 15842 }, { "epoch": 17.48702374378796, "grad_norm": 0.26428651809692383, "learning_rate": 2.8054000000000003e-05, "loss": 0.0067, "step": 15843 }, { "epoch": 17.488128106018774, "grad_norm": 0.13257712125778198, "learning_rate": 2.8053666666666665e-05, "loss": 0.0059, "step": 15844 }, { "epoch": 17.489232468249586, "grad_norm": 0.41668951511383057, "learning_rate": 2.8053333333333335e-05, "loss": 0.0096, "step": 15845 }, { "epoch": 17.4903368304804, "grad_norm": 0.1623326539993286, "learning_rate": 2.8053e-05, "loss": 0.0063, "step": 15846 }, { "epoch": 17.49144119271121, "grad_norm": 0.20236051082611084, "learning_rate": 2.8052666666666667e-05, "loss": 0.0135, "step": 15847 }, { "epoch": 17.49254555494202, "grad_norm": 0.22955334186553955, "learning_rate": 2.8052333333333336e-05, "loss": 0.0055, "step": 15848 }, { "epoch": 17.49364991717283, "grad_norm": 0.38772282004356384, "learning_rate": 2.8052000000000002e-05, "loss": 0.0152, "step": 15849 }, { "epoch": 17.494754279403644, "grad_norm": 0.4737771153450012, "learning_rate": 2.8051666666666668e-05, "loss": 0.0096, "step": 15850 }, { "epoch": 17.495858641634456, "grad_norm": 0.18625682592391968, "learning_rate": 2.8051333333333334e-05, "loss": 0.0079, "step": 15851 }, { "epoch": 17.496963003865268, "grad_norm": 0.5541265606880188, "learning_rate": 2.8051000000000003e-05, "loss": 0.013, "step": 15852 }, { "epoch": 17.49806736609608, "grad_norm": 0.5758869051933289, "learning_rate": 2.8050666666666666e-05, "loss": 0.1132, "step": 15853 }, { "epoch": 17.499171728326893, "grad_norm": 0.44770219922065735, "learning_rate": 2.8050333333333335e-05, "loss": 0.0773, "step": 15854 }, { "epoch": 17.5002760905577, "grad_norm": 0.5503525137901306, "learning_rate": 2.805e-05, "loss": 0.1036, "step": 15855 }, { "epoch": 17.501380452788514, "grad_norm": 0.4544542133808136, "learning_rate": 2.8049666666666667e-05, "loss": 0.0698, "step": 15856 }, { "epoch": 17.502484815019326, "grad_norm": 0.5964601039886475, "learning_rate": 2.8049333333333336e-05, "loss": 0.0916, "step": 15857 }, { "epoch": 17.503589177250138, "grad_norm": 0.3834662437438965, "learning_rate": 2.8049e-05, "loss": 0.0808, "step": 15858 }, { "epoch": 17.50469353948095, "grad_norm": 0.27529096603393555, "learning_rate": 2.8048666666666668e-05, "loss": 0.0388, "step": 15859 }, { "epoch": 17.505797901711762, "grad_norm": 0.514066755771637, "learning_rate": 2.8048333333333334e-05, "loss": 0.0343, "step": 15860 }, { "epoch": 17.506902263942575, "grad_norm": 0.16173657774925232, "learning_rate": 2.8048e-05, "loss": 0.018, "step": 15861 }, { "epoch": 17.508006626173383, "grad_norm": 0.31619155406951904, "learning_rate": 2.8047666666666666e-05, "loss": 0.0299, "step": 15862 }, { "epoch": 17.509110988404196, "grad_norm": 0.29944998025894165, "learning_rate": 2.8047333333333335e-05, "loss": 0.0101, "step": 15863 }, { "epoch": 17.510215350635008, "grad_norm": 0.3090829849243164, "learning_rate": 2.8047e-05, "loss": 0.0086, "step": 15864 }, { "epoch": 17.51131971286582, "grad_norm": 0.4211153984069824, "learning_rate": 2.8046666666666667e-05, "loss": 0.0226, "step": 15865 }, { "epoch": 17.512424075096632, "grad_norm": 0.27347272634506226, "learning_rate": 2.8046333333333336e-05, "loss": 0.0123, "step": 15866 }, { "epoch": 17.513528437327444, "grad_norm": 0.14555399119853973, "learning_rate": 2.8046e-05, "loss": 0.0069, "step": 15867 }, { "epoch": 17.514632799558257, "grad_norm": 0.22232769429683685, "learning_rate": 2.8045666666666668e-05, "loss": 0.0068, "step": 15868 }, { "epoch": 17.515737161789065, "grad_norm": 0.2566032111644745, "learning_rate": 2.8045333333333334e-05, "loss": 0.0121, "step": 15869 }, { "epoch": 17.516841524019878, "grad_norm": 0.13975465297698975, "learning_rate": 2.8045e-05, "loss": 0.0077, "step": 15870 }, { "epoch": 17.51794588625069, "grad_norm": 0.19635158777236938, "learning_rate": 2.8044666666666666e-05, "loss": 0.0071, "step": 15871 }, { "epoch": 17.519050248481502, "grad_norm": 0.13679596781730652, "learning_rate": 2.8044333333333335e-05, "loss": 0.006, "step": 15872 }, { "epoch": 17.520154610712314, "grad_norm": 0.3231053948402405, "learning_rate": 2.8044e-05, "loss": 0.0095, "step": 15873 }, { "epoch": 17.521258972943127, "grad_norm": 0.18420253694057465, "learning_rate": 2.8043666666666667e-05, "loss": 0.0083, "step": 15874 }, { "epoch": 17.52236333517394, "grad_norm": 0.25316980481147766, "learning_rate": 2.8043333333333336e-05, "loss": 0.0061, "step": 15875 }, { "epoch": 17.523467697404747, "grad_norm": 0.11266382783651352, "learning_rate": 2.8043e-05, "loss": 0.0048, "step": 15876 }, { "epoch": 17.52457205963556, "grad_norm": 0.11217543482780457, "learning_rate": 2.804266666666667e-05, "loss": 0.0049, "step": 15877 }, { "epoch": 17.525676421866372, "grad_norm": 0.3733430802822113, "learning_rate": 2.8042333333333334e-05, "loss": 0.0653, "step": 15878 }, { "epoch": 17.526780784097184, "grad_norm": 0.19361020624637604, "learning_rate": 2.8042e-05, "loss": 0.007, "step": 15879 }, { "epoch": 17.527885146327996, "grad_norm": 0.16598469018936157, "learning_rate": 2.8041666666666666e-05, "loss": 0.0057, "step": 15880 }, { "epoch": 17.52898950855881, "grad_norm": 0.10632569342851639, "learning_rate": 2.8041333333333335e-05, "loss": 0.0046, "step": 15881 }, { "epoch": 17.53009387078962, "grad_norm": 0.11855020374059677, "learning_rate": 2.8041e-05, "loss": 0.0089, "step": 15882 }, { "epoch": 17.53119823302043, "grad_norm": 0.3610154390335083, "learning_rate": 2.8040666666666667e-05, "loss": 0.006, "step": 15883 }, { "epoch": 17.53230259525124, "grad_norm": 0.12718194723129272, "learning_rate": 2.8040333333333337e-05, "loss": 0.0042, "step": 15884 }, { "epoch": 17.533406957482054, "grad_norm": 0.3303535580635071, "learning_rate": 2.804e-05, "loss": 0.0048, "step": 15885 }, { "epoch": 17.534511319712866, "grad_norm": 0.27373239398002625, "learning_rate": 2.803966666666667e-05, "loss": 0.0053, "step": 15886 }, { "epoch": 17.53561568194368, "grad_norm": 0.523227334022522, "learning_rate": 2.8039333333333334e-05, "loss": 0.0133, "step": 15887 }, { "epoch": 17.53672004417449, "grad_norm": 0.43714672327041626, "learning_rate": 2.8039e-05, "loss": 0.0045, "step": 15888 }, { "epoch": 17.5378244064053, "grad_norm": 0.25567325949668884, "learning_rate": 2.8038666666666666e-05, "loss": 0.0075, "step": 15889 }, { "epoch": 17.53892876863611, "grad_norm": 0.20594793558120728, "learning_rate": 2.8038333333333332e-05, "loss": 0.0041, "step": 15890 }, { "epoch": 17.540033130866924, "grad_norm": 0.23927444219589233, "learning_rate": 2.8038e-05, "loss": 0.0053, "step": 15891 }, { "epoch": 17.541137493097736, "grad_norm": 0.19526158273220062, "learning_rate": 2.8037666666666668e-05, "loss": 0.0099, "step": 15892 }, { "epoch": 17.542241855328548, "grad_norm": 0.07048599421977997, "learning_rate": 2.8037333333333333e-05, "loss": 0.0038, "step": 15893 }, { "epoch": 17.54334621755936, "grad_norm": 0.15823955833911896, "learning_rate": 2.8037e-05, "loss": 0.0069, "step": 15894 }, { "epoch": 17.544450579790173, "grad_norm": 0.21623507142066956, "learning_rate": 2.803666666666667e-05, "loss": 0.0069, "step": 15895 }, { "epoch": 17.54555494202098, "grad_norm": 0.08671081066131592, "learning_rate": 2.803633333333333e-05, "loss": 0.0037, "step": 15896 }, { "epoch": 17.546659304251794, "grad_norm": 0.24860380589962006, "learning_rate": 2.8036e-05, "loss": 0.007, "step": 15897 }, { "epoch": 17.547763666482606, "grad_norm": 0.14832167327404022, "learning_rate": 2.803566666666667e-05, "loss": 0.0042, "step": 15898 }, { "epoch": 17.548868028713418, "grad_norm": 0.22285985946655273, "learning_rate": 2.8035333333333332e-05, "loss": 0.0054, "step": 15899 }, { "epoch": 17.54997239094423, "grad_norm": 0.09953538328409195, "learning_rate": 2.8035000000000002e-05, "loss": 0.0028, "step": 15900 }, { "epoch": 17.551076753175042, "grad_norm": 0.1453152894973755, "learning_rate": 2.8034666666666668e-05, "loss": 0.0039, "step": 15901 }, { "epoch": 17.552181115405855, "grad_norm": 0.4491945207118988, "learning_rate": 2.8034333333333334e-05, "loss": 0.019, "step": 15902 }, { "epoch": 17.553285477636663, "grad_norm": 0.48651477694511414, "learning_rate": 2.8034e-05, "loss": 0.1251, "step": 15903 }, { "epoch": 17.554389839867476, "grad_norm": 0.4740312397480011, "learning_rate": 2.803366666666667e-05, "loss": 0.1355, "step": 15904 }, { "epoch": 17.555494202098288, "grad_norm": 0.4224257171154022, "learning_rate": 2.803333333333333e-05, "loss": 0.0983, "step": 15905 }, { "epoch": 17.5565985643291, "grad_norm": 0.43584054708480835, "learning_rate": 2.8033e-05, "loss": 0.0713, "step": 15906 }, { "epoch": 17.557702926559912, "grad_norm": 0.3305533230304718, "learning_rate": 2.803266666666667e-05, "loss": 0.0755, "step": 15907 }, { "epoch": 17.558807288790724, "grad_norm": 0.31848981976509094, "learning_rate": 2.8032333333333333e-05, "loss": 0.0335, "step": 15908 }, { "epoch": 17.559911651021537, "grad_norm": 0.4443015158176422, "learning_rate": 2.8032000000000002e-05, "loss": 0.0499, "step": 15909 }, { "epoch": 17.561016013252345, "grad_norm": 0.35583794116973877, "learning_rate": 2.8031666666666668e-05, "loss": 0.054, "step": 15910 }, { "epoch": 17.562120375483158, "grad_norm": 0.37477657198905945, "learning_rate": 2.8031333333333334e-05, "loss": 0.0553, "step": 15911 }, { "epoch": 17.56322473771397, "grad_norm": 0.12977716326713562, "learning_rate": 2.8031e-05, "loss": 0.0081, "step": 15912 }, { "epoch": 17.564329099944782, "grad_norm": 0.10314139723777771, "learning_rate": 2.803066666666667e-05, "loss": 0.0054, "step": 15913 }, { "epoch": 17.565433462175594, "grad_norm": 0.13463541865348816, "learning_rate": 2.8030333333333335e-05, "loss": 0.0081, "step": 15914 }, { "epoch": 17.566537824406407, "grad_norm": 0.15098479390144348, "learning_rate": 2.803e-05, "loss": 0.01, "step": 15915 }, { "epoch": 17.567642186637215, "grad_norm": 0.12267898768186569, "learning_rate": 2.802966666666667e-05, "loss": 0.0049, "step": 15916 }, { "epoch": 17.568746548868027, "grad_norm": 0.19181105494499207, "learning_rate": 2.8029333333333333e-05, "loss": 0.0104, "step": 15917 }, { "epoch": 17.56985091109884, "grad_norm": 0.17903389036655426, "learning_rate": 2.8029000000000002e-05, "loss": 0.0043, "step": 15918 }, { "epoch": 17.570955273329652, "grad_norm": 0.17079199850559235, "learning_rate": 2.8028666666666665e-05, "loss": 0.0077, "step": 15919 }, { "epoch": 17.572059635560464, "grad_norm": 0.21122629940509796, "learning_rate": 2.8028333333333334e-05, "loss": 0.0058, "step": 15920 }, { "epoch": 17.573163997791276, "grad_norm": 0.12211111932992935, "learning_rate": 2.8028e-05, "loss": 0.0066, "step": 15921 }, { "epoch": 17.57426836002209, "grad_norm": 0.17029555141925812, "learning_rate": 2.8027666666666666e-05, "loss": 0.0069, "step": 15922 }, { "epoch": 17.575372722252897, "grad_norm": 0.17913927137851715, "learning_rate": 2.8027333333333335e-05, "loss": 0.0079, "step": 15923 }, { "epoch": 17.57647708448371, "grad_norm": 0.18538902699947357, "learning_rate": 2.8027e-05, "loss": 0.0257, "step": 15924 }, { "epoch": 17.57758144671452, "grad_norm": 1.4832919836044312, "learning_rate": 2.8026666666666667e-05, "loss": 0.0118, "step": 15925 }, { "epoch": 17.578685808945334, "grad_norm": 0.23701488971710205, "learning_rate": 2.8026333333333333e-05, "loss": 0.011, "step": 15926 }, { "epoch": 17.579790171176146, "grad_norm": 0.31988587975502014, "learning_rate": 2.8026000000000002e-05, "loss": 0.0111, "step": 15927 }, { "epoch": 17.58089453340696, "grad_norm": 0.21059466898441315, "learning_rate": 2.8025666666666665e-05, "loss": 0.0075, "step": 15928 }, { "epoch": 17.58199889563777, "grad_norm": 0.0778803825378418, "learning_rate": 2.8025333333333334e-05, "loss": 0.0033, "step": 15929 }, { "epoch": 17.58310325786858, "grad_norm": 0.1313748061656952, "learning_rate": 2.8025e-05, "loss": 0.0046, "step": 15930 }, { "epoch": 17.58420762009939, "grad_norm": 0.5378938913345337, "learning_rate": 2.8024666666666666e-05, "loss": 0.0055, "step": 15931 }, { "epoch": 17.585311982330204, "grad_norm": 0.10842640697956085, "learning_rate": 2.8024333333333335e-05, "loss": 0.0043, "step": 15932 }, { "epoch": 17.586416344561016, "grad_norm": 0.11094246059656143, "learning_rate": 2.8024e-05, "loss": 0.0046, "step": 15933 }, { "epoch": 17.587520706791828, "grad_norm": 0.6944109797477722, "learning_rate": 2.8023666666666667e-05, "loss": 0.0139, "step": 15934 }, { "epoch": 17.58862506902264, "grad_norm": 0.15645286440849304, "learning_rate": 2.8023333333333333e-05, "loss": 0.0061, "step": 15935 }, { "epoch": 17.589729431253453, "grad_norm": 0.12588968873023987, "learning_rate": 2.8023000000000003e-05, "loss": 0.0028, "step": 15936 }, { "epoch": 17.59083379348426, "grad_norm": 0.12378930300474167, "learning_rate": 2.8022666666666665e-05, "loss": 0.004, "step": 15937 }, { "epoch": 17.591938155715074, "grad_norm": 0.10505598038434982, "learning_rate": 2.8022333333333334e-05, "loss": 0.0046, "step": 15938 }, { "epoch": 17.593042517945886, "grad_norm": 0.16316565871238708, "learning_rate": 2.8022e-05, "loss": 0.0077, "step": 15939 }, { "epoch": 17.594146880176698, "grad_norm": 0.3392399847507477, "learning_rate": 2.8021666666666666e-05, "loss": 0.0119, "step": 15940 }, { "epoch": 17.59525124240751, "grad_norm": 0.1853010058403015, "learning_rate": 2.8021333333333336e-05, "loss": 0.0064, "step": 15941 }, { "epoch": 17.596355604638322, "grad_norm": 0.3014509379863739, "learning_rate": 2.8021e-05, "loss": 0.0139, "step": 15942 }, { "epoch": 17.597459966869135, "grad_norm": 0.19174517691135406, "learning_rate": 2.8020666666666668e-05, "loss": 0.0039, "step": 15943 }, { "epoch": 17.598564329099943, "grad_norm": 0.26763150095939636, "learning_rate": 2.8020333333333333e-05, "loss": 0.0099, "step": 15944 }, { "epoch": 17.599668691330756, "grad_norm": 0.2680072784423828, "learning_rate": 2.8020000000000003e-05, "loss": 0.0068, "step": 15945 }, { "epoch": 17.600773053561568, "grad_norm": 0.4055897891521454, "learning_rate": 2.8019666666666665e-05, "loss": 0.0074, "step": 15946 }, { "epoch": 17.60187741579238, "grad_norm": 0.09064394980669022, "learning_rate": 2.8019333333333335e-05, "loss": 0.0021, "step": 15947 }, { "epoch": 17.602981778023192, "grad_norm": 0.6086089611053467, "learning_rate": 2.8019e-05, "loss": 0.0182, "step": 15948 }, { "epoch": 17.604086140254005, "grad_norm": 0.26224595308303833, "learning_rate": 2.8018666666666667e-05, "loss": 0.0072, "step": 15949 }, { "epoch": 17.605190502484817, "grad_norm": 0.27872583270072937, "learning_rate": 2.8018333333333336e-05, "loss": 0.023, "step": 15950 }, { "epoch": 17.606294864715625, "grad_norm": 0.25545626878738403, "learning_rate": 2.8018e-05, "loss": 0.0067, "step": 15951 }, { "epoch": 17.607399226946438, "grad_norm": 0.12266016006469727, "learning_rate": 2.8017666666666668e-05, "loss": 0.0042, "step": 15952 }, { "epoch": 17.60850358917725, "grad_norm": 0.5929532051086426, "learning_rate": 2.8017333333333334e-05, "loss": 0.1208, "step": 15953 }, { "epoch": 17.609607951408062, "grad_norm": 0.5148853659629822, "learning_rate": 2.8017e-05, "loss": 0.1112, "step": 15954 }, { "epoch": 17.610712313638874, "grad_norm": 0.45478981733322144, "learning_rate": 2.8016666666666666e-05, "loss": 0.067, "step": 15955 }, { "epoch": 17.611816675869687, "grad_norm": 0.3039393723011017, "learning_rate": 2.8016333333333335e-05, "loss": 0.0666, "step": 15956 }, { "epoch": 17.612921038100495, "grad_norm": 0.3043610155582428, "learning_rate": 2.8016e-05, "loss": 0.0567, "step": 15957 }, { "epoch": 17.614025400331307, "grad_norm": 0.3332837224006653, "learning_rate": 2.8015666666666667e-05, "loss": 0.0359, "step": 15958 }, { "epoch": 17.61512976256212, "grad_norm": 0.4345509707927704, "learning_rate": 2.8015333333333336e-05, "loss": 0.0377, "step": 15959 }, { "epoch": 17.616234124792932, "grad_norm": 0.6215271949768066, "learning_rate": 2.8015e-05, "loss": 0.0509, "step": 15960 }, { "epoch": 17.617338487023744, "grad_norm": 0.24839593470096588, "learning_rate": 2.8014666666666668e-05, "loss": 0.0175, "step": 15961 }, { "epoch": 17.618442849254556, "grad_norm": 0.3112996518611908, "learning_rate": 2.8014333333333334e-05, "loss": 0.025, "step": 15962 }, { "epoch": 17.61954721148537, "grad_norm": 0.13749606907367706, "learning_rate": 2.8014e-05, "loss": 0.0094, "step": 15963 }, { "epoch": 17.620651573716177, "grad_norm": 0.134328231215477, "learning_rate": 2.801366666666667e-05, "loss": 0.0078, "step": 15964 }, { "epoch": 17.62175593594699, "grad_norm": 0.18403777480125427, "learning_rate": 2.8013333333333335e-05, "loss": 0.0099, "step": 15965 }, { "epoch": 17.6228602981778, "grad_norm": 0.46582844853401184, "learning_rate": 2.8013e-05, "loss": 0.0152, "step": 15966 }, { "epoch": 17.623964660408614, "grad_norm": 0.2337431162595749, "learning_rate": 2.8012666666666667e-05, "loss": 0.028, "step": 15967 }, { "epoch": 17.625069022639426, "grad_norm": 0.2699083983898163, "learning_rate": 2.8012333333333336e-05, "loss": 0.0371, "step": 15968 }, { "epoch": 17.62617338487024, "grad_norm": 0.12778472900390625, "learning_rate": 2.8012e-05, "loss": 0.005, "step": 15969 }, { "epoch": 17.62727774710105, "grad_norm": 0.23479565978050232, "learning_rate": 2.8011666666666668e-05, "loss": 0.0161, "step": 15970 }, { "epoch": 17.62838210933186, "grad_norm": 0.09236126393079758, "learning_rate": 2.8011333333333334e-05, "loss": 0.0054, "step": 15971 }, { "epoch": 17.62948647156267, "grad_norm": 0.17169438302516937, "learning_rate": 2.8011e-05, "loss": 0.0059, "step": 15972 }, { "epoch": 17.630590833793484, "grad_norm": 0.21261735260486603, "learning_rate": 2.801066666666667e-05, "loss": 0.0105, "step": 15973 }, { "epoch": 17.631695196024296, "grad_norm": 0.5470879077911377, "learning_rate": 2.8010333333333335e-05, "loss": 0.0099, "step": 15974 }, { "epoch": 17.63279955825511, "grad_norm": 0.23947595059871674, "learning_rate": 2.801e-05, "loss": 0.0087, "step": 15975 }, { "epoch": 17.63390392048592, "grad_norm": 0.06681277602910995, "learning_rate": 2.8009666666666667e-05, "loss": 0.0027, "step": 15976 }, { "epoch": 17.635008282716733, "grad_norm": 0.16027401387691498, "learning_rate": 2.8009333333333336e-05, "loss": 0.0102, "step": 15977 }, { "epoch": 17.63611264494754, "grad_norm": 0.13381120562553406, "learning_rate": 2.8009e-05, "loss": 0.0044, "step": 15978 }, { "epoch": 17.637217007178354, "grad_norm": 0.12473712116479874, "learning_rate": 2.800866666666667e-05, "loss": 0.0306, "step": 15979 }, { "epoch": 17.638321369409166, "grad_norm": 0.11652666330337524, "learning_rate": 2.800833333333333e-05, "loss": 0.0061, "step": 15980 }, { "epoch": 17.639425731639978, "grad_norm": 0.26388275623321533, "learning_rate": 2.8008e-05, "loss": 0.0094, "step": 15981 }, { "epoch": 17.64053009387079, "grad_norm": 0.17475560307502747, "learning_rate": 2.800766666666667e-05, "loss": 0.0056, "step": 15982 }, { "epoch": 17.641634456101603, "grad_norm": 0.1443009227514267, "learning_rate": 2.8007333333333332e-05, "loss": 0.0053, "step": 15983 }, { "epoch": 17.642738818332415, "grad_norm": 0.1840742528438568, "learning_rate": 2.8007e-05, "loss": 0.0073, "step": 15984 }, { "epoch": 17.643843180563223, "grad_norm": 0.30471616983413696, "learning_rate": 2.8006666666666667e-05, "loss": 0.0087, "step": 15985 }, { "epoch": 17.644947542794036, "grad_norm": 0.06998570263385773, "learning_rate": 2.8006333333333333e-05, "loss": 0.0029, "step": 15986 }, { "epoch": 17.646051905024848, "grad_norm": 0.2787739336490631, "learning_rate": 2.8006e-05, "loss": 0.0118, "step": 15987 }, { "epoch": 17.64715626725566, "grad_norm": 0.06892116367816925, "learning_rate": 2.800566666666667e-05, "loss": 0.0032, "step": 15988 }, { "epoch": 17.648260629486472, "grad_norm": 0.15857535600662231, "learning_rate": 2.8005333333333334e-05, "loss": 0.0054, "step": 15989 }, { "epoch": 17.649364991717285, "grad_norm": 0.16725720465183258, "learning_rate": 2.8005e-05, "loss": 0.005, "step": 15990 }, { "epoch": 17.650469353948097, "grad_norm": 0.15406213700771332, "learning_rate": 2.800466666666667e-05, "loss": 0.0059, "step": 15991 }, { "epoch": 17.651573716178905, "grad_norm": 0.2797030806541443, "learning_rate": 2.8004333333333332e-05, "loss": 0.0081, "step": 15992 }, { "epoch": 17.652678078409718, "grad_norm": 0.07934441417455673, "learning_rate": 2.8004e-05, "loss": 0.0016, "step": 15993 }, { "epoch": 17.65378244064053, "grad_norm": 0.27957192063331604, "learning_rate": 2.8003666666666668e-05, "loss": 0.0083, "step": 15994 }, { "epoch": 17.654886802871342, "grad_norm": 0.4059184789657593, "learning_rate": 2.8003333333333333e-05, "loss": 0.0094, "step": 15995 }, { "epoch": 17.655991165102154, "grad_norm": 0.301572322845459, "learning_rate": 2.8003e-05, "loss": 0.0147, "step": 15996 }, { "epoch": 17.657095527332967, "grad_norm": 0.29114046692848206, "learning_rate": 2.800266666666667e-05, "loss": 0.0119, "step": 15997 }, { "epoch": 17.658199889563775, "grad_norm": 0.17178277671337128, "learning_rate": 2.8002333333333335e-05, "loss": 0.0071, "step": 15998 }, { "epoch": 17.659304251794588, "grad_norm": 0.07235106080770493, "learning_rate": 2.8002e-05, "loss": 0.0038, "step": 15999 }, { "epoch": 17.6604086140254, "grad_norm": 0.18142050504684448, "learning_rate": 2.800166666666667e-05, "loss": 0.007, "step": 16000 }, { "epoch": 17.6604086140254, "eval_cer": 0.10638463333074112, "eval_loss": 0.3103519380092621, "eval_runtime": 16.0896, "eval_samples_per_second": 18.894, "eval_steps_per_second": 0.622, "eval_wer": 0.3731772831926324, "step": 16000 }, { "epoch": 17.661512976256212, "grad_norm": 0.12922140955924988, "learning_rate": 2.8001333333333332e-05, "loss": 0.0046, "step": 16001 }, { "epoch": 17.662617338487024, "grad_norm": 0.41694074869155884, "learning_rate": 2.8001000000000002e-05, "loss": 0.0104, "step": 16002 }, { "epoch": 17.663721700717836, "grad_norm": 0.5712310075759888, "learning_rate": 2.8000666666666668e-05, "loss": 0.1647, "step": 16003 }, { "epoch": 17.66482606294865, "grad_norm": 0.3146350085735321, "learning_rate": 2.8000333333333334e-05, "loss": 0.1001, "step": 16004 }, { "epoch": 17.665930425179457, "grad_norm": 0.46475741267204285, "learning_rate": 2.8e-05, "loss": 0.0714, "step": 16005 }, { "epoch": 17.66703478741027, "grad_norm": 0.33949151635169983, "learning_rate": 2.799966666666667e-05, "loss": 0.0513, "step": 16006 }, { "epoch": 17.668139149641082, "grad_norm": 0.5139482617378235, "learning_rate": 2.7999333333333335e-05, "loss": 0.0587, "step": 16007 }, { "epoch": 17.669243511871894, "grad_norm": 0.40455132722854614, "learning_rate": 2.7999e-05, "loss": 0.0707, "step": 16008 }, { "epoch": 17.670347874102706, "grad_norm": 0.36482787132263184, "learning_rate": 2.7998666666666667e-05, "loss": 0.0527, "step": 16009 }, { "epoch": 17.67145223633352, "grad_norm": 0.22475974261760712, "learning_rate": 2.7998333333333333e-05, "loss": 0.0464, "step": 16010 }, { "epoch": 17.67255659856433, "grad_norm": 0.3727065920829773, "learning_rate": 2.7998000000000002e-05, "loss": 0.0194, "step": 16011 }, { "epoch": 17.67366096079514, "grad_norm": 0.2563163936138153, "learning_rate": 2.7997666666666665e-05, "loss": 0.025, "step": 16012 }, { "epoch": 17.67476532302595, "grad_norm": 0.07423092424869537, "learning_rate": 2.7997333333333334e-05, "loss": 0.0061, "step": 16013 }, { "epoch": 17.675869685256764, "grad_norm": 0.09289658069610596, "learning_rate": 2.7997000000000003e-05, "loss": 0.0068, "step": 16014 }, { "epoch": 17.676974047487576, "grad_norm": 0.17172662913799286, "learning_rate": 2.7996666666666666e-05, "loss": 0.0103, "step": 16015 }, { "epoch": 17.67807840971839, "grad_norm": 0.1375722885131836, "learning_rate": 2.7996333333333335e-05, "loss": 0.008, "step": 16016 }, { "epoch": 17.6791827719492, "grad_norm": 0.19270557165145874, "learning_rate": 2.7996e-05, "loss": 0.0073, "step": 16017 }, { "epoch": 17.680287134180013, "grad_norm": 0.23738376796245575, "learning_rate": 2.7995666666666667e-05, "loss": 0.0431, "step": 16018 }, { "epoch": 17.68139149641082, "grad_norm": 0.12337549775838852, "learning_rate": 2.7995333333333333e-05, "loss": 0.0064, "step": 16019 }, { "epoch": 17.682495858641634, "grad_norm": 0.14395630359649658, "learning_rate": 2.7995000000000002e-05, "loss": 0.0085, "step": 16020 }, { "epoch": 17.683600220872446, "grad_norm": 0.13955754041671753, "learning_rate": 2.7994666666666665e-05, "loss": 0.0061, "step": 16021 }, { "epoch": 17.684704583103258, "grad_norm": 0.15937040746212006, "learning_rate": 2.7994333333333334e-05, "loss": 0.0071, "step": 16022 }, { "epoch": 17.68580894533407, "grad_norm": 0.11199718713760376, "learning_rate": 2.7994000000000003e-05, "loss": 0.0085, "step": 16023 }, { "epoch": 17.686913307564883, "grad_norm": 0.11175230890512466, "learning_rate": 2.7993666666666666e-05, "loss": 0.0035, "step": 16024 }, { "epoch": 17.68801766979569, "grad_norm": 0.1567201167345047, "learning_rate": 2.7993333333333335e-05, "loss": 0.0124, "step": 16025 }, { "epoch": 17.689122032026503, "grad_norm": 0.2114691436290741, "learning_rate": 2.7993e-05, "loss": 0.0098, "step": 16026 }, { "epoch": 17.690226394257316, "grad_norm": 0.2899388372898102, "learning_rate": 2.7992666666666667e-05, "loss": 0.0057, "step": 16027 }, { "epoch": 17.691330756488128, "grad_norm": 0.22603455185890198, "learning_rate": 2.7992333333333333e-05, "loss": 0.007, "step": 16028 }, { "epoch": 17.69243511871894, "grad_norm": 0.30155789852142334, "learning_rate": 2.7992000000000002e-05, "loss": 0.0053, "step": 16029 }, { "epoch": 17.693539480949752, "grad_norm": 0.31173020601272583, "learning_rate": 2.7991666666666665e-05, "loss": 0.008, "step": 16030 }, { "epoch": 17.694643843180565, "grad_norm": 0.09591293334960938, "learning_rate": 2.7991333333333334e-05, "loss": 0.0029, "step": 16031 }, { "epoch": 17.695748205411373, "grad_norm": 0.1721968650817871, "learning_rate": 2.7991000000000004e-05, "loss": 0.0067, "step": 16032 }, { "epoch": 17.696852567642186, "grad_norm": 0.09842316806316376, "learning_rate": 2.7990666666666666e-05, "loss": 0.0056, "step": 16033 }, { "epoch": 17.697956929872998, "grad_norm": 0.5200715065002441, "learning_rate": 2.7990333333333335e-05, "loss": 0.0059, "step": 16034 }, { "epoch": 17.69906129210381, "grad_norm": 0.1442589908838272, "learning_rate": 2.799e-05, "loss": 0.0047, "step": 16035 }, { "epoch": 17.700165654334622, "grad_norm": 0.5914910435676575, "learning_rate": 2.7989666666666667e-05, "loss": 0.0095, "step": 16036 }, { "epoch": 17.701270016565434, "grad_norm": 0.16509045660495758, "learning_rate": 2.7989333333333333e-05, "loss": 0.0038, "step": 16037 }, { "epoch": 17.702374378796247, "grad_norm": 0.20197398960590363, "learning_rate": 2.7989000000000003e-05, "loss": 0.0091, "step": 16038 }, { "epoch": 17.703478741027055, "grad_norm": 0.119074247777462, "learning_rate": 2.798866666666667e-05, "loss": 0.0034, "step": 16039 }, { "epoch": 17.704583103257868, "grad_norm": 0.08328570425510406, "learning_rate": 2.7988333333333334e-05, "loss": 0.0027, "step": 16040 }, { "epoch": 17.70568746548868, "grad_norm": 0.1667288839817047, "learning_rate": 2.7988e-05, "loss": 0.0056, "step": 16041 }, { "epoch": 17.706791827719492, "grad_norm": 0.10621681064367294, "learning_rate": 2.7987666666666666e-05, "loss": 0.0025, "step": 16042 }, { "epoch": 17.707896189950304, "grad_norm": 0.15168136358261108, "learning_rate": 2.7987333333333336e-05, "loss": 0.0054, "step": 16043 }, { "epoch": 17.709000552181116, "grad_norm": 0.19274815917015076, "learning_rate": 2.7986999999999998e-05, "loss": 0.0051, "step": 16044 }, { "epoch": 17.71010491441193, "grad_norm": 0.09285160899162292, "learning_rate": 2.7986666666666668e-05, "loss": 0.003, "step": 16045 }, { "epoch": 17.711209276642737, "grad_norm": 0.2523629367351532, "learning_rate": 2.7986333333333333e-05, "loss": 0.0058, "step": 16046 }, { "epoch": 17.71231363887355, "grad_norm": 0.25030577182769775, "learning_rate": 2.7986e-05, "loss": 0.0067, "step": 16047 }, { "epoch": 17.713418001104362, "grad_norm": 0.1562701165676117, "learning_rate": 2.798566666666667e-05, "loss": 0.0036, "step": 16048 }, { "epoch": 17.714522363335174, "grad_norm": 0.2144237607717514, "learning_rate": 2.7985333333333335e-05, "loss": 0.004, "step": 16049 }, { "epoch": 17.715626725565986, "grad_norm": 0.2767696678638458, "learning_rate": 2.7985e-05, "loss": 0.0113, "step": 16050 }, { "epoch": 17.7167310877968, "grad_norm": 0.32398584485054016, "learning_rate": 2.7984666666666667e-05, "loss": 0.0023, "step": 16051 }, { "epoch": 17.71783545002761, "grad_norm": 0.6234661936759949, "learning_rate": 2.7984333333333336e-05, "loss": 0.0188, "step": 16052 }, { "epoch": 17.71893981225842, "grad_norm": 0.4605979025363922, "learning_rate": 2.7984e-05, "loss": 0.1471, "step": 16053 }, { "epoch": 17.72004417448923, "grad_norm": 0.6333496570587158, "learning_rate": 2.7983666666666668e-05, "loss": 0.1159, "step": 16054 }, { "epoch": 17.721148536720044, "grad_norm": 0.7197943925857544, "learning_rate": 2.7983333333333334e-05, "loss": 0.0715, "step": 16055 }, { "epoch": 17.722252898950856, "grad_norm": 0.40452632308006287, "learning_rate": 2.7983e-05, "loss": 0.0784, "step": 16056 }, { "epoch": 17.72335726118167, "grad_norm": 0.3280292749404907, "learning_rate": 2.798266666666667e-05, "loss": 0.0614, "step": 16057 }, { "epoch": 17.72446162341248, "grad_norm": 0.6150525808334351, "learning_rate": 2.7982333333333335e-05, "loss": 0.0364, "step": 16058 }, { "epoch": 17.725565985643293, "grad_norm": 0.3038099706172943, "learning_rate": 2.7982e-05, "loss": 0.034, "step": 16059 }, { "epoch": 17.7266703478741, "grad_norm": 0.7016597986221313, "learning_rate": 2.7981666666666667e-05, "loss": 0.0196, "step": 16060 }, { "epoch": 17.727774710104914, "grad_norm": 0.21397072076797485, "learning_rate": 2.7981333333333336e-05, "loss": 0.0453, "step": 16061 }, { "epoch": 17.728879072335726, "grad_norm": 0.21731437742710114, "learning_rate": 2.7981e-05, "loss": 0.0121, "step": 16062 }, { "epoch": 17.729983434566538, "grad_norm": 0.21447478234767914, "learning_rate": 2.7980666666666668e-05, "loss": 0.0096, "step": 16063 }, { "epoch": 17.73108779679735, "grad_norm": 0.14168842136859894, "learning_rate": 2.7980333333333337e-05, "loss": 0.01, "step": 16064 }, { "epoch": 17.732192159028163, "grad_norm": 0.18864387273788452, "learning_rate": 2.798e-05, "loss": 0.0084, "step": 16065 }, { "epoch": 17.73329652125897, "grad_norm": 0.32674866914749146, "learning_rate": 2.797966666666667e-05, "loss": 0.0105, "step": 16066 }, { "epoch": 17.734400883489783, "grad_norm": 0.11830109357833862, "learning_rate": 2.7979333333333335e-05, "loss": 0.0044, "step": 16067 }, { "epoch": 17.735505245720596, "grad_norm": 0.5428361296653748, "learning_rate": 2.7979e-05, "loss": 0.0099, "step": 16068 }, { "epoch": 17.736609607951408, "grad_norm": 0.14988745748996735, "learning_rate": 2.7978666666666667e-05, "loss": 0.0102, "step": 16069 }, { "epoch": 17.73771397018222, "grad_norm": 0.20075467228889465, "learning_rate": 2.7978333333333333e-05, "loss": 0.004, "step": 16070 }, { "epoch": 17.738818332413032, "grad_norm": 0.1379440575838089, "learning_rate": 2.7978e-05, "loss": 0.0047, "step": 16071 }, { "epoch": 17.739922694643845, "grad_norm": 0.10900414735078812, "learning_rate": 2.7977666666666668e-05, "loss": 0.0047, "step": 16072 }, { "epoch": 17.741027056874653, "grad_norm": 0.49456918239593506, "learning_rate": 2.7977333333333334e-05, "loss": 0.0132, "step": 16073 }, { "epoch": 17.742131419105466, "grad_norm": 0.23238523304462433, "learning_rate": 2.7977e-05, "loss": 0.0035, "step": 16074 }, { "epoch": 17.743235781336278, "grad_norm": 0.17636272311210632, "learning_rate": 2.797666666666667e-05, "loss": 0.008, "step": 16075 }, { "epoch": 17.74434014356709, "grad_norm": 0.6923359036445618, "learning_rate": 2.7976333333333332e-05, "loss": 0.0148, "step": 16076 }, { "epoch": 17.745444505797902, "grad_norm": 0.09052456170320511, "learning_rate": 2.7976e-05, "loss": 0.0046, "step": 16077 }, { "epoch": 17.746548868028714, "grad_norm": 0.2131960391998291, "learning_rate": 2.7975666666666667e-05, "loss": 0.0061, "step": 16078 }, { "epoch": 17.747653230259527, "grad_norm": 0.22637194395065308, "learning_rate": 2.7975333333333333e-05, "loss": 0.0066, "step": 16079 }, { "epoch": 17.748757592490335, "grad_norm": 0.28827744722366333, "learning_rate": 2.7975e-05, "loss": 0.0096, "step": 16080 }, { "epoch": 17.749861954721148, "grad_norm": 0.16910101473331451, "learning_rate": 2.7974666666666668e-05, "loss": 0.0072, "step": 16081 }, { "epoch": 17.75096631695196, "grad_norm": 0.1486874371767044, "learning_rate": 2.7974333333333334e-05, "loss": 0.0039, "step": 16082 }, { "epoch": 17.752070679182772, "grad_norm": 0.27550554275512695, "learning_rate": 2.7974e-05, "loss": 0.0085, "step": 16083 }, { "epoch": 17.753175041413584, "grad_norm": 0.319661945104599, "learning_rate": 2.797366666666667e-05, "loss": 0.0102, "step": 16084 }, { "epoch": 17.754279403644396, "grad_norm": 0.18843989074230194, "learning_rate": 2.7973333333333332e-05, "loss": 0.0064, "step": 16085 }, { "epoch": 17.75538376587521, "grad_norm": 0.3316144347190857, "learning_rate": 2.7973e-05, "loss": 0.0119, "step": 16086 }, { "epoch": 17.756488128106017, "grad_norm": 0.16909563541412354, "learning_rate": 2.7972666666666667e-05, "loss": 0.0056, "step": 16087 }, { "epoch": 17.75759249033683, "grad_norm": 0.15976493060588837, "learning_rate": 2.7972333333333333e-05, "loss": 0.0076, "step": 16088 }, { "epoch": 17.758696852567642, "grad_norm": 0.5746127367019653, "learning_rate": 2.7972000000000003e-05, "loss": 0.0119, "step": 16089 }, { "epoch": 17.759801214798454, "grad_norm": 0.20022651553153992, "learning_rate": 2.797166666666667e-05, "loss": 0.0082, "step": 16090 }, { "epoch": 17.760905577029266, "grad_norm": 0.4620678126811981, "learning_rate": 2.7971333333333334e-05, "loss": 0.0164, "step": 16091 }, { "epoch": 17.76200993926008, "grad_norm": 0.21635253727436066, "learning_rate": 2.7971e-05, "loss": 0.01, "step": 16092 }, { "epoch": 17.763114301490887, "grad_norm": 0.5174095034599304, "learning_rate": 2.797066666666667e-05, "loss": 0.0147, "step": 16093 }, { "epoch": 17.7642186637217, "grad_norm": 0.24379675090312958, "learning_rate": 2.7970333333333332e-05, "loss": 0.0094, "step": 16094 }, { "epoch": 17.76532302595251, "grad_norm": 0.5777557492256165, "learning_rate": 2.797e-05, "loss": 0.0153, "step": 16095 }, { "epoch": 17.766427388183324, "grad_norm": 0.29628920555114746, "learning_rate": 2.7969666666666667e-05, "loss": 0.0065, "step": 16096 }, { "epoch": 17.767531750414136, "grad_norm": 0.27261894941329956, "learning_rate": 2.7969333333333333e-05, "loss": 0.0123, "step": 16097 }, { "epoch": 17.76863611264495, "grad_norm": 0.4589129090309143, "learning_rate": 2.7969000000000003e-05, "loss": 0.0107, "step": 16098 }, { "epoch": 17.76974047487576, "grad_norm": 0.16123135387897491, "learning_rate": 2.7968666666666665e-05, "loss": 0.0066, "step": 16099 }, { "epoch": 17.77084483710657, "grad_norm": 0.38760003447532654, "learning_rate": 2.7968333333333335e-05, "loss": 0.0117, "step": 16100 }, { "epoch": 17.77194919933738, "grad_norm": 0.10440995544195175, "learning_rate": 2.7968e-05, "loss": 0.0037, "step": 16101 }, { "epoch": 17.773053561568194, "grad_norm": 0.17075584828853607, "learning_rate": 2.7967666666666666e-05, "loss": 0.0064, "step": 16102 }, { "epoch": 17.774157923799006, "grad_norm": 0.6498902440071106, "learning_rate": 2.7967333333333332e-05, "loss": 0.1425, "step": 16103 }, { "epoch": 17.775262286029818, "grad_norm": 0.861808180809021, "learning_rate": 2.7967000000000002e-05, "loss": 0.1406, "step": 16104 }, { "epoch": 17.77636664826063, "grad_norm": 0.47367843985557556, "learning_rate": 2.7966666666666664e-05, "loss": 0.1609, "step": 16105 }, { "epoch": 17.777471010491443, "grad_norm": 0.3781110942363739, "learning_rate": 2.7966333333333334e-05, "loss": 0.1139, "step": 16106 }, { "epoch": 17.77857537272225, "grad_norm": 0.35722997784614563, "learning_rate": 2.7966000000000003e-05, "loss": 0.0384, "step": 16107 }, { "epoch": 17.779679734953064, "grad_norm": 0.39376088976860046, "learning_rate": 2.7965666666666666e-05, "loss": 0.0843, "step": 16108 }, { "epoch": 17.780784097183876, "grad_norm": 0.5785391926765442, "learning_rate": 2.7965333333333335e-05, "loss": 0.0877, "step": 16109 }, { "epoch": 17.781888459414688, "grad_norm": 0.6593884825706482, "learning_rate": 2.7965e-05, "loss": 0.059, "step": 16110 }, { "epoch": 17.7829928216455, "grad_norm": 0.3238973319530487, "learning_rate": 2.7964666666666667e-05, "loss": 0.0199, "step": 16111 }, { "epoch": 17.784097183876312, "grad_norm": 0.6182801723480225, "learning_rate": 2.7964333333333333e-05, "loss": 0.0339, "step": 16112 }, { "epoch": 17.785201546107125, "grad_norm": 0.18653568625450134, "learning_rate": 2.7964000000000002e-05, "loss": 0.0258, "step": 16113 }, { "epoch": 17.786305908337933, "grad_norm": 0.43721458315849304, "learning_rate": 2.7963666666666668e-05, "loss": 0.0433, "step": 16114 }, { "epoch": 17.787410270568746, "grad_norm": 0.33695748448371887, "learning_rate": 2.7963333333333334e-05, "loss": 0.0137, "step": 16115 }, { "epoch": 17.788514632799558, "grad_norm": 0.21433252096176147, "learning_rate": 2.7963000000000003e-05, "loss": 0.0129, "step": 16116 }, { "epoch": 17.78961899503037, "grad_norm": 0.13799266517162323, "learning_rate": 2.7962666666666666e-05, "loss": 0.0066, "step": 16117 }, { "epoch": 17.790723357261182, "grad_norm": 0.5218492746353149, "learning_rate": 2.7962333333333335e-05, "loss": 0.012, "step": 16118 }, { "epoch": 17.791827719491994, "grad_norm": 0.14129804074764252, "learning_rate": 2.7962e-05, "loss": 0.0073, "step": 16119 }, { "epoch": 17.792932081722807, "grad_norm": 0.1544712632894516, "learning_rate": 2.7961666666666667e-05, "loss": 0.0077, "step": 16120 }, { "epoch": 17.794036443953615, "grad_norm": 0.16213218867778778, "learning_rate": 2.7961333333333333e-05, "loss": 0.0076, "step": 16121 }, { "epoch": 17.795140806184428, "grad_norm": 0.15114906430244446, "learning_rate": 2.7961000000000002e-05, "loss": 0.0059, "step": 16122 }, { "epoch": 17.79624516841524, "grad_norm": 0.102937713265419, "learning_rate": 2.7960666666666668e-05, "loss": 0.0047, "step": 16123 }, { "epoch": 17.797349530646052, "grad_norm": 0.15429362654685974, "learning_rate": 2.7960333333333334e-05, "loss": 0.0079, "step": 16124 }, { "epoch": 17.798453892876864, "grad_norm": 0.1982211470603943, "learning_rate": 2.7960000000000003e-05, "loss": 0.0254, "step": 16125 }, { "epoch": 17.799558255107677, "grad_norm": 0.13364015519618988, "learning_rate": 2.7959666666666666e-05, "loss": 0.0027, "step": 16126 }, { "epoch": 17.80066261733849, "grad_norm": 0.1535801738500595, "learning_rate": 2.7959333333333335e-05, "loss": 0.0054, "step": 16127 }, { "epoch": 17.801766979569297, "grad_norm": 0.5562451481819153, "learning_rate": 2.7959e-05, "loss": 0.0209, "step": 16128 }, { "epoch": 17.80287134180011, "grad_norm": 0.1867629736661911, "learning_rate": 2.7958666666666667e-05, "loss": 0.0104, "step": 16129 }, { "epoch": 17.803975704030922, "grad_norm": 0.3010571002960205, "learning_rate": 2.7958333333333333e-05, "loss": 0.016, "step": 16130 }, { "epoch": 17.805080066261734, "grad_norm": 0.20701643824577332, "learning_rate": 2.7958e-05, "loss": 0.0092, "step": 16131 }, { "epoch": 17.806184428492546, "grad_norm": 0.1669079214334488, "learning_rate": 2.7957666666666668e-05, "loss": 0.0082, "step": 16132 }, { "epoch": 17.80728879072336, "grad_norm": 0.21883100271224976, "learning_rate": 2.7957333333333334e-05, "loss": 0.0104, "step": 16133 }, { "epoch": 17.808393152954167, "grad_norm": 0.1850820779800415, "learning_rate": 2.7957e-05, "loss": 0.0082, "step": 16134 }, { "epoch": 17.80949751518498, "grad_norm": 0.17876070737838745, "learning_rate": 2.7956666666666666e-05, "loss": 0.0069, "step": 16135 }, { "epoch": 17.81060187741579, "grad_norm": 0.23498955368995667, "learning_rate": 2.7956333333333335e-05, "loss": 0.0085, "step": 16136 }, { "epoch": 17.811706239646604, "grad_norm": 0.19960017502307892, "learning_rate": 2.7955999999999998e-05, "loss": 0.0076, "step": 16137 }, { "epoch": 17.812810601877416, "grad_norm": 0.19439083337783813, "learning_rate": 2.7955666666666667e-05, "loss": 0.0081, "step": 16138 }, { "epoch": 17.81391496410823, "grad_norm": 0.16283071041107178, "learning_rate": 2.7955333333333337e-05, "loss": 0.0067, "step": 16139 }, { "epoch": 17.81501932633904, "grad_norm": 0.44280511140823364, "learning_rate": 2.7955e-05, "loss": 0.0125, "step": 16140 }, { "epoch": 17.81612368856985, "grad_norm": 0.14587056636810303, "learning_rate": 2.795466666666667e-05, "loss": 0.0047, "step": 16141 }, { "epoch": 17.81722805080066, "grad_norm": 0.25072792172431946, "learning_rate": 2.7954333333333334e-05, "loss": 0.0061, "step": 16142 }, { "epoch": 17.818332413031474, "grad_norm": 0.2420494705438614, "learning_rate": 2.7954e-05, "loss": 0.0086, "step": 16143 }, { "epoch": 17.819436775262286, "grad_norm": 0.15070481598377228, "learning_rate": 2.7953666666666666e-05, "loss": 0.0063, "step": 16144 }, { "epoch": 17.820541137493098, "grad_norm": 0.25160786509513855, "learning_rate": 2.7953333333333336e-05, "loss": 0.0088, "step": 16145 }, { "epoch": 17.82164549972391, "grad_norm": 0.23273958265781403, "learning_rate": 2.7952999999999998e-05, "loss": 0.0087, "step": 16146 }, { "epoch": 17.822749861954723, "grad_norm": 0.13290853798389435, "learning_rate": 2.7952666666666667e-05, "loss": 0.0048, "step": 16147 }, { "epoch": 17.82385422418553, "grad_norm": 0.27559083700180054, "learning_rate": 2.7952333333333337e-05, "loss": 0.0059, "step": 16148 }, { "epoch": 17.824958586416344, "grad_norm": 0.6654896140098572, "learning_rate": 2.7952e-05, "loss": 0.0082, "step": 16149 }, { "epoch": 17.826062948647156, "grad_norm": 0.22834140062332153, "learning_rate": 2.795166666666667e-05, "loss": 0.0045, "step": 16150 }, { "epoch": 17.827167310877968, "grad_norm": 0.9256462454795837, "learning_rate": 2.7951333333333335e-05, "loss": 0.0096, "step": 16151 }, { "epoch": 17.82827167310878, "grad_norm": 0.2355000078678131, "learning_rate": 2.7951e-05, "loss": 0.0203, "step": 16152 }, { "epoch": 17.829376035339592, "grad_norm": 0.5123057961463928, "learning_rate": 2.7950666666666666e-05, "loss": 0.198, "step": 16153 }, { "epoch": 17.830480397570405, "grad_norm": 0.4161434471607208, "learning_rate": 2.7950333333333336e-05, "loss": 0.1081, "step": 16154 }, { "epoch": 17.831584759801213, "grad_norm": 0.3253578841686249, "learning_rate": 2.795e-05, "loss": 0.0594, "step": 16155 }, { "epoch": 17.832689122032026, "grad_norm": 0.2856566607952118, "learning_rate": 2.7949666666666668e-05, "loss": 0.0688, "step": 16156 }, { "epoch": 17.833793484262838, "grad_norm": 0.30540984869003296, "learning_rate": 2.7949333333333337e-05, "loss": 0.0421, "step": 16157 }, { "epoch": 17.83489784649365, "grad_norm": 0.36761340498924255, "learning_rate": 2.7949e-05, "loss": 0.0445, "step": 16158 }, { "epoch": 17.836002208724462, "grad_norm": 0.3253299295902252, "learning_rate": 2.794866666666667e-05, "loss": 0.0343, "step": 16159 }, { "epoch": 17.837106570955275, "grad_norm": 0.4917033016681671, "learning_rate": 2.794833333333333e-05, "loss": 0.0536, "step": 16160 }, { "epoch": 17.838210933186087, "grad_norm": 0.25338345766067505, "learning_rate": 2.7948e-05, "loss": 0.017, "step": 16161 }, { "epoch": 17.839315295416895, "grad_norm": 0.39557695388793945, "learning_rate": 2.7947666666666667e-05, "loss": 0.0157, "step": 16162 }, { "epoch": 17.840419657647708, "grad_norm": 0.11368691176176071, "learning_rate": 2.7947333333333333e-05, "loss": 0.0064, "step": 16163 }, { "epoch": 17.84152401987852, "grad_norm": 0.2090904712677002, "learning_rate": 2.7947000000000002e-05, "loss": 0.0127, "step": 16164 }, { "epoch": 17.842628382109332, "grad_norm": 0.30320173501968384, "learning_rate": 2.7946666666666668e-05, "loss": 0.0203, "step": 16165 }, { "epoch": 17.843732744340144, "grad_norm": 0.3039594292640686, "learning_rate": 2.7946333333333334e-05, "loss": 0.0168, "step": 16166 }, { "epoch": 17.844837106570957, "grad_norm": 0.18000315129756927, "learning_rate": 2.7946e-05, "loss": 0.007, "step": 16167 }, { "epoch": 17.84594146880177, "grad_norm": 0.2104264497756958, "learning_rate": 2.794566666666667e-05, "loss": 0.0062, "step": 16168 }, { "epoch": 17.847045831032577, "grad_norm": 0.2597655951976776, "learning_rate": 2.794533333333333e-05, "loss": 0.0235, "step": 16169 }, { "epoch": 17.84815019326339, "grad_norm": 0.16477683186531067, "learning_rate": 2.7945e-05, "loss": 0.0082, "step": 16170 }, { "epoch": 17.849254555494202, "grad_norm": 0.23373505473136902, "learning_rate": 2.7944666666666667e-05, "loss": 0.0065, "step": 16171 }, { "epoch": 17.850358917725014, "grad_norm": 0.23186394572257996, "learning_rate": 2.7944333333333333e-05, "loss": 0.0101, "step": 16172 }, { "epoch": 17.851463279955826, "grad_norm": 0.2906601130962372, "learning_rate": 2.7944000000000002e-05, "loss": 0.0058, "step": 16173 }, { "epoch": 17.85256764218664, "grad_norm": 0.2872275710105896, "learning_rate": 2.7943666666666668e-05, "loss": 0.0067, "step": 16174 }, { "epoch": 17.853672004417447, "grad_norm": 0.2001357525587082, "learning_rate": 2.7943333333333334e-05, "loss": 0.0068, "step": 16175 }, { "epoch": 17.85477636664826, "grad_norm": 0.21277546882629395, "learning_rate": 2.7943e-05, "loss": 0.0067, "step": 16176 }, { "epoch": 17.85588072887907, "grad_norm": 0.49615591764450073, "learning_rate": 2.794266666666667e-05, "loss": 0.0068, "step": 16177 }, { "epoch": 17.856985091109884, "grad_norm": 0.1287405639886856, "learning_rate": 2.7942333333333332e-05, "loss": 0.0071, "step": 16178 }, { "epoch": 17.858089453340696, "grad_norm": 0.1792120337486267, "learning_rate": 2.7942e-05, "loss": 0.0069, "step": 16179 }, { "epoch": 17.85919381557151, "grad_norm": 0.18459239602088928, "learning_rate": 2.7941666666666667e-05, "loss": 0.0101, "step": 16180 }, { "epoch": 17.86029817780232, "grad_norm": 0.09377743303775787, "learning_rate": 2.7941333333333333e-05, "loss": 0.0032, "step": 16181 }, { "epoch": 17.86140254003313, "grad_norm": 0.16186930239200592, "learning_rate": 2.7941000000000002e-05, "loss": 0.0094, "step": 16182 }, { "epoch": 17.86250690226394, "grad_norm": 0.23483207821846008, "learning_rate": 2.7940666666666668e-05, "loss": 0.0081, "step": 16183 }, { "epoch": 17.863611264494754, "grad_norm": 0.2495899349451065, "learning_rate": 2.7940333333333334e-05, "loss": 0.0153, "step": 16184 }, { "epoch": 17.864715626725566, "grad_norm": 0.14229261875152588, "learning_rate": 2.794e-05, "loss": 0.0032, "step": 16185 }, { "epoch": 17.86581998895638, "grad_norm": 0.11654428392648697, "learning_rate": 2.793966666666667e-05, "loss": 0.0074, "step": 16186 }, { "epoch": 17.86692435118719, "grad_norm": 0.08622944355010986, "learning_rate": 2.7939333333333332e-05, "loss": 0.0037, "step": 16187 }, { "epoch": 17.868028713418003, "grad_norm": 0.2086048722267151, "learning_rate": 2.7939e-05, "loss": 0.0089, "step": 16188 }, { "epoch": 17.86913307564881, "grad_norm": 0.13176313042640686, "learning_rate": 2.793866666666667e-05, "loss": 0.0048, "step": 16189 }, { "epoch": 17.870237437879624, "grad_norm": 0.45468002557754517, "learning_rate": 2.7938333333333333e-05, "loss": 0.0145, "step": 16190 }, { "epoch": 17.871341800110436, "grad_norm": 0.23222026228904724, "learning_rate": 2.7938000000000003e-05, "loss": 0.0087, "step": 16191 }, { "epoch": 17.872446162341248, "grad_norm": 0.1716892570257187, "learning_rate": 2.7937666666666665e-05, "loss": 0.0057, "step": 16192 }, { "epoch": 17.87355052457206, "grad_norm": 0.10497628897428513, "learning_rate": 2.7937333333333334e-05, "loss": 0.0045, "step": 16193 }, { "epoch": 17.874654886802873, "grad_norm": 0.2894362807273865, "learning_rate": 2.7937e-05, "loss": 0.0118, "step": 16194 }, { "epoch": 17.875759249033685, "grad_norm": 0.14257679879665375, "learning_rate": 2.7936666666666666e-05, "loss": 0.0052, "step": 16195 }, { "epoch": 17.876863611264493, "grad_norm": 0.5483525395393372, "learning_rate": 2.7936333333333332e-05, "loss": 0.0101, "step": 16196 }, { "epoch": 17.877967973495306, "grad_norm": 0.2521206736564636, "learning_rate": 2.7936e-05, "loss": 0.0106, "step": 16197 }, { "epoch": 17.879072335726118, "grad_norm": 0.16423390805721283, "learning_rate": 2.7935666666666667e-05, "loss": 0.004, "step": 16198 }, { "epoch": 17.88017669795693, "grad_norm": 0.721734344959259, "learning_rate": 2.7935333333333333e-05, "loss": 0.0092, "step": 16199 }, { "epoch": 17.881281060187742, "grad_norm": 0.34544849395751953, "learning_rate": 2.7935000000000003e-05, "loss": 0.0063, "step": 16200 }, { "epoch": 17.882385422418555, "grad_norm": 0.6741859316825867, "learning_rate": 2.7934666666666665e-05, "loss": 0.0168, "step": 16201 }, { "epoch": 17.883489784649363, "grad_norm": 0.4670186936855316, "learning_rate": 2.7934333333333335e-05, "loss": 0.0107, "step": 16202 }, { "epoch": 17.884594146880175, "grad_norm": 0.5206179618835449, "learning_rate": 2.7934e-05, "loss": 0.1165, "step": 16203 }, { "epoch": 17.885698509110988, "grad_norm": 0.5915818810462952, "learning_rate": 2.7933666666666666e-05, "loss": 0.1409, "step": 16204 }, { "epoch": 17.8868028713418, "grad_norm": 0.8873844146728516, "learning_rate": 2.7933333333333332e-05, "loss": 0.0953, "step": 16205 }, { "epoch": 17.887907233572612, "grad_norm": 0.41370517015457153, "learning_rate": 2.7933000000000002e-05, "loss": 0.066, "step": 16206 }, { "epoch": 17.889011595803424, "grad_norm": 0.42520982027053833, "learning_rate": 2.7932666666666668e-05, "loss": 0.053, "step": 16207 }, { "epoch": 17.890115958034237, "grad_norm": 0.47746333479881287, "learning_rate": 2.7932333333333334e-05, "loss": 0.0638, "step": 16208 }, { "epoch": 17.891220320265045, "grad_norm": 0.4581224322319031, "learning_rate": 2.7932000000000003e-05, "loss": 0.0314, "step": 16209 }, { "epoch": 17.892324682495858, "grad_norm": 0.42630240321159363, "learning_rate": 2.7931666666666665e-05, "loss": 0.0316, "step": 16210 }, { "epoch": 17.89342904472667, "grad_norm": 0.184134840965271, "learning_rate": 2.7931333333333335e-05, "loss": 0.0157, "step": 16211 }, { "epoch": 17.894533406957482, "grad_norm": 0.14792825281620026, "learning_rate": 2.7931e-05, "loss": 0.0148, "step": 16212 }, { "epoch": 17.895637769188294, "grad_norm": 0.2723931074142456, "learning_rate": 2.7930666666666667e-05, "loss": 0.0147, "step": 16213 }, { "epoch": 17.896742131419106, "grad_norm": 0.2623361647129059, "learning_rate": 2.7930333333333336e-05, "loss": 0.0116, "step": 16214 }, { "epoch": 17.89784649364992, "grad_norm": 0.15463589131832123, "learning_rate": 2.7930000000000002e-05, "loss": 0.0093, "step": 16215 }, { "epoch": 17.898950855880727, "grad_norm": 0.22162915766239166, "learning_rate": 2.7929666666666668e-05, "loss": 0.0304, "step": 16216 }, { "epoch": 17.90005521811154, "grad_norm": 0.26199838519096375, "learning_rate": 2.7929333333333334e-05, "loss": 0.0102, "step": 16217 }, { "epoch": 17.90115958034235, "grad_norm": 0.18568527698516846, "learning_rate": 2.7929000000000003e-05, "loss": 0.0069, "step": 16218 }, { "epoch": 17.902263942573164, "grad_norm": 0.1740635484457016, "learning_rate": 2.7928666666666666e-05, "loss": 0.0099, "step": 16219 }, { "epoch": 17.903368304803976, "grad_norm": 0.33243510127067566, "learning_rate": 2.7928333333333335e-05, "loss": 0.0115, "step": 16220 }, { "epoch": 17.90447266703479, "grad_norm": 0.1424221694469452, "learning_rate": 2.7927999999999998e-05, "loss": 0.0119, "step": 16221 }, { "epoch": 17.9055770292656, "grad_norm": 0.13132256269454956, "learning_rate": 2.7927666666666667e-05, "loss": 0.0045, "step": 16222 }, { "epoch": 17.90668139149641, "grad_norm": 0.14664915204048157, "learning_rate": 2.7927333333333336e-05, "loss": 0.0069, "step": 16223 }, { "epoch": 17.90778575372722, "grad_norm": 0.12555016577243805, "learning_rate": 2.7927e-05, "loss": 0.0052, "step": 16224 }, { "epoch": 17.908890115958034, "grad_norm": 0.1201561689376831, "learning_rate": 2.7926666666666668e-05, "loss": 0.0062, "step": 16225 }, { "epoch": 17.909994478188846, "grad_norm": 0.19226130843162537, "learning_rate": 2.7926333333333334e-05, "loss": 0.0067, "step": 16226 }, { "epoch": 17.91109884041966, "grad_norm": 0.13771569728851318, "learning_rate": 2.7926e-05, "loss": 0.0048, "step": 16227 }, { "epoch": 17.91220320265047, "grad_norm": 0.14128799736499786, "learning_rate": 2.7925666666666666e-05, "loss": 0.0039, "step": 16228 }, { "epoch": 17.913307564881283, "grad_norm": 0.14432042837142944, "learning_rate": 2.7925333333333335e-05, "loss": 0.0044, "step": 16229 }, { "epoch": 17.91441192711209, "grad_norm": 0.22611694037914276, "learning_rate": 2.7924999999999998e-05, "loss": 0.0083, "step": 16230 }, { "epoch": 17.915516289342904, "grad_norm": 0.17735570669174194, "learning_rate": 2.7924666666666667e-05, "loss": 0.0073, "step": 16231 }, { "epoch": 17.916620651573716, "grad_norm": 0.17998546361923218, "learning_rate": 2.7924333333333336e-05, "loss": 0.0099, "step": 16232 }, { "epoch": 17.917725013804528, "grad_norm": 1.3973989486694336, "learning_rate": 2.7924e-05, "loss": 0.0075, "step": 16233 }, { "epoch": 17.91882937603534, "grad_norm": 0.2319350391626358, "learning_rate": 2.7923666666666668e-05, "loss": 0.0078, "step": 16234 }, { "epoch": 17.919933738266153, "grad_norm": 0.22045864164829254, "learning_rate": 2.7923333333333334e-05, "loss": 0.0121, "step": 16235 }, { "epoch": 17.921038100496965, "grad_norm": 0.09494367986917496, "learning_rate": 2.7923e-05, "loss": 0.0044, "step": 16236 }, { "epoch": 17.922142462727773, "grad_norm": 0.10640934854745865, "learning_rate": 2.7922666666666666e-05, "loss": 0.0038, "step": 16237 }, { "epoch": 17.923246824958586, "grad_norm": 0.1711222529411316, "learning_rate": 2.7922333333333335e-05, "loss": 0.0133, "step": 16238 }, { "epoch": 17.924351187189398, "grad_norm": 0.32547903060913086, "learning_rate": 2.7922e-05, "loss": 0.0135, "step": 16239 }, { "epoch": 17.92545554942021, "grad_norm": 0.46673914790153503, "learning_rate": 2.7921666666666667e-05, "loss": 0.0152, "step": 16240 }, { "epoch": 17.926559911651022, "grad_norm": 0.6334415674209595, "learning_rate": 2.7921333333333337e-05, "loss": 0.0052, "step": 16241 }, { "epoch": 17.927664273881835, "grad_norm": 0.5582582950592041, "learning_rate": 2.7921e-05, "loss": 0.0164, "step": 16242 }, { "epoch": 17.928768636112643, "grad_norm": 0.32226336002349854, "learning_rate": 2.792066666666667e-05, "loss": 0.01, "step": 16243 }, { "epoch": 17.929872998343455, "grad_norm": 0.14438113570213318, "learning_rate": 2.7920333333333334e-05, "loss": 0.0067, "step": 16244 }, { "epoch": 17.930977360574268, "grad_norm": 0.10381081700325012, "learning_rate": 2.792e-05, "loss": 0.0046, "step": 16245 }, { "epoch": 17.93208172280508, "grad_norm": 0.3208088278770447, "learning_rate": 2.7919666666666666e-05, "loss": 0.0107, "step": 16246 }, { "epoch": 17.933186085035892, "grad_norm": 0.2401628941297531, "learning_rate": 2.7919333333333336e-05, "loss": 0.0091, "step": 16247 }, { "epoch": 17.934290447266704, "grad_norm": 0.21071089804172516, "learning_rate": 2.7919e-05, "loss": 0.0091, "step": 16248 }, { "epoch": 17.935394809497517, "grad_norm": 0.15914282202720642, "learning_rate": 2.7918666666666667e-05, "loss": 0.0066, "step": 16249 }, { "epoch": 17.936499171728325, "grad_norm": 0.2336408495903015, "learning_rate": 2.7918333333333333e-05, "loss": 0.0081, "step": 16250 }, { "epoch": 17.937603533959138, "grad_norm": 0.5241631269454956, "learning_rate": 2.7918e-05, "loss": 0.0175, "step": 16251 }, { "epoch": 17.93870789618995, "grad_norm": 0.3217468857765198, "learning_rate": 2.791766666666667e-05, "loss": 0.0059, "step": 16252 }, { "epoch": 17.939812258420762, "grad_norm": 0.536213219165802, "learning_rate": 2.791733333333333e-05, "loss": 0.1469, "step": 16253 }, { "epoch": 17.940916620651574, "grad_norm": 0.6135300397872925, "learning_rate": 2.7917e-05, "loss": 0.1337, "step": 16254 }, { "epoch": 17.942020982882386, "grad_norm": 0.40299198031425476, "learning_rate": 2.7916666666666666e-05, "loss": 0.1084, "step": 16255 }, { "epoch": 17.9431253451132, "grad_norm": 0.4503650665283203, "learning_rate": 2.7916333333333332e-05, "loss": 0.0851, "step": 16256 }, { "epoch": 17.944229707344007, "grad_norm": 0.5522869229316711, "learning_rate": 2.7916000000000002e-05, "loss": 0.0699, "step": 16257 }, { "epoch": 17.94533406957482, "grad_norm": 0.38802993297576904, "learning_rate": 2.7915666666666668e-05, "loss": 0.0539, "step": 16258 }, { "epoch": 17.946438431805632, "grad_norm": 0.4150521457195282, "learning_rate": 2.7915333333333334e-05, "loss": 0.0529, "step": 16259 }, { "epoch": 17.947542794036444, "grad_norm": 0.2529371380805969, "learning_rate": 2.7915e-05, "loss": 0.0277, "step": 16260 }, { "epoch": 17.948647156267256, "grad_norm": 0.38405436277389526, "learning_rate": 2.791466666666667e-05, "loss": 0.0222, "step": 16261 }, { "epoch": 17.94975151849807, "grad_norm": 0.3521774411201477, "learning_rate": 2.791433333333333e-05, "loss": 0.0135, "step": 16262 }, { "epoch": 17.95085588072888, "grad_norm": 0.20409654080867767, "learning_rate": 2.7914e-05, "loss": 0.0572, "step": 16263 }, { "epoch": 17.95196024295969, "grad_norm": 0.3136589825153351, "learning_rate": 2.791366666666667e-05, "loss": 0.013, "step": 16264 }, { "epoch": 17.9530646051905, "grad_norm": 0.20511755347251892, "learning_rate": 2.7913333333333333e-05, "loss": 0.0135, "step": 16265 }, { "epoch": 17.954168967421314, "grad_norm": 0.10270942747592926, "learning_rate": 2.7913000000000002e-05, "loss": 0.0042, "step": 16266 }, { "epoch": 17.955273329652126, "grad_norm": 0.12798923254013062, "learning_rate": 2.7912666666666668e-05, "loss": 0.0058, "step": 16267 }, { "epoch": 17.95637769188294, "grad_norm": 0.10055769979953766, "learning_rate": 2.7912333333333334e-05, "loss": 0.0048, "step": 16268 }, { "epoch": 17.95748205411375, "grad_norm": 0.14333762228488922, "learning_rate": 2.7912e-05, "loss": 0.0059, "step": 16269 }, { "epoch": 17.95858641634456, "grad_norm": 0.21131980419158936, "learning_rate": 2.791166666666667e-05, "loss": 0.013, "step": 16270 }, { "epoch": 17.95969077857537, "grad_norm": 0.19775284826755524, "learning_rate": 2.791133333333333e-05, "loss": 0.0081, "step": 16271 }, { "epoch": 17.960795140806184, "grad_norm": 0.2690388560295105, "learning_rate": 2.7911e-05, "loss": 0.0159, "step": 16272 }, { "epoch": 17.961899503036996, "grad_norm": 0.3461953103542328, "learning_rate": 2.791066666666667e-05, "loss": 0.013, "step": 16273 }, { "epoch": 17.963003865267808, "grad_norm": 0.19676916301250458, "learning_rate": 2.7910333333333333e-05, "loss": 0.0098, "step": 16274 }, { "epoch": 17.96410822749862, "grad_norm": 0.19429197907447815, "learning_rate": 2.7910000000000002e-05, "loss": 0.0103, "step": 16275 }, { "epoch": 17.965212589729433, "grad_norm": 0.14210301637649536, "learning_rate": 2.7909666666666668e-05, "loss": 0.0079, "step": 16276 }, { "epoch": 17.96631695196024, "grad_norm": 0.2870037257671356, "learning_rate": 2.7909333333333334e-05, "loss": 0.0072, "step": 16277 }, { "epoch": 17.967421314191053, "grad_norm": 0.11284314095973969, "learning_rate": 2.7909e-05, "loss": 0.0053, "step": 16278 }, { "epoch": 17.968525676421866, "grad_norm": 0.16361728310585022, "learning_rate": 2.790866666666667e-05, "loss": 0.0094, "step": 16279 }, { "epoch": 17.969630038652678, "grad_norm": 0.11632847040891647, "learning_rate": 2.7908333333333332e-05, "loss": 0.0042, "step": 16280 }, { "epoch": 17.97073440088349, "grad_norm": 0.25798919796943665, "learning_rate": 2.7908e-05, "loss": 0.0075, "step": 16281 }, { "epoch": 17.971838763114302, "grad_norm": 0.18912678956985474, "learning_rate": 2.7907666666666667e-05, "loss": 0.0072, "step": 16282 }, { "epoch": 17.972943125345115, "grad_norm": 0.3445272147655487, "learning_rate": 2.7907333333333333e-05, "loss": 0.0119, "step": 16283 }, { "epoch": 17.974047487575923, "grad_norm": 0.13108383119106293, "learning_rate": 2.7907000000000002e-05, "loss": 0.0053, "step": 16284 }, { "epoch": 17.975151849806736, "grad_norm": 0.6648809909820557, "learning_rate": 2.7906666666666665e-05, "loss": 0.013, "step": 16285 }, { "epoch": 17.976256212037548, "grad_norm": 0.40754708647727966, "learning_rate": 2.7906333333333334e-05, "loss": 0.0103, "step": 16286 }, { "epoch": 17.97736057426836, "grad_norm": 1.4199328422546387, "learning_rate": 2.7906e-05, "loss": 0.035, "step": 16287 }, { "epoch": 17.978464936499172, "grad_norm": 0.268180251121521, "learning_rate": 2.7905666666666666e-05, "loss": 0.0102, "step": 16288 }, { "epoch": 17.979569298729984, "grad_norm": 0.13252265751361847, "learning_rate": 2.7905333333333335e-05, "loss": 0.0052, "step": 16289 }, { "epoch": 17.980673660960797, "grad_norm": 0.10831600427627563, "learning_rate": 2.7905e-05, "loss": 0.003, "step": 16290 }, { "epoch": 17.981778023191605, "grad_norm": 0.36221566796302795, "learning_rate": 2.7904666666666667e-05, "loss": 0.0146, "step": 16291 }, { "epoch": 17.982882385422418, "grad_norm": 0.17688950896263123, "learning_rate": 2.7904333333333333e-05, "loss": 0.0075, "step": 16292 }, { "epoch": 17.98398674765323, "grad_norm": 0.3052693009376526, "learning_rate": 2.7904000000000003e-05, "loss": 0.0118, "step": 16293 }, { "epoch": 17.985091109884042, "grad_norm": 0.2028685063123703, "learning_rate": 2.7903666666666665e-05, "loss": 0.0058, "step": 16294 }, { "epoch": 17.986195472114854, "grad_norm": 0.13234929740428925, "learning_rate": 2.7903333333333334e-05, "loss": 0.0062, "step": 16295 }, { "epoch": 17.987299834345666, "grad_norm": 0.07605252414941788, "learning_rate": 2.7903e-05, "loss": 0.0028, "step": 16296 }, { "epoch": 17.98840419657648, "grad_norm": 0.19255007803440094, "learning_rate": 2.7902666666666666e-05, "loss": 0.0079, "step": 16297 }, { "epoch": 17.989508558807287, "grad_norm": 0.3059206008911133, "learning_rate": 2.7902333333333336e-05, "loss": 0.0166, "step": 16298 }, { "epoch": 17.9906129210381, "grad_norm": 0.4231404662132263, "learning_rate": 2.7902e-05, "loss": 0.0119, "step": 16299 }, { "epoch": 17.991717283268912, "grad_norm": 0.3093406856060028, "learning_rate": 2.7901666666666667e-05, "loss": 0.0083, "step": 16300 }, { "epoch": 17.992821645499724, "grad_norm": 0.28739675879478455, "learning_rate": 2.7901333333333333e-05, "loss": 0.0071, "step": 16301 }, { "epoch": 17.993926007730536, "grad_norm": 0.1315564215183258, "learning_rate": 2.7901000000000003e-05, "loss": 0.005, "step": 16302 }, { "epoch": 17.99503036996135, "grad_norm": 0.4491921663284302, "learning_rate": 2.7900666666666665e-05, "loss": 0.0689, "step": 16303 }, { "epoch": 17.99613473219216, "grad_norm": 0.21025559306144714, "learning_rate": 2.7900333333333335e-05, "loss": 0.0192, "step": 16304 }, { "epoch": 17.99723909442297, "grad_norm": 0.13712261617183685, "learning_rate": 2.79e-05, "loss": 0.0069, "step": 16305 }, { "epoch": 17.99834345665378, "grad_norm": 0.15326760709285736, "learning_rate": 2.7899666666666666e-05, "loss": 0.0066, "step": 16306 }, { "epoch": 17.999447818884594, "grad_norm": 0.12749972939491272, "learning_rate": 2.7899333333333336e-05, "loss": 0.0051, "step": 16307 }, { "epoch": 18.0, "grad_norm": 0.2570328116416931, "learning_rate": 2.7899000000000002e-05, "loss": 0.0045, "step": 16308 }, { "epoch": 18.001104362230812, "grad_norm": 0.49290063977241516, "learning_rate": 2.7898666666666668e-05, "loss": 0.1083, "step": 16309 }, { "epoch": 18.002208724461624, "grad_norm": 0.42644402384757996, "learning_rate": 2.7898333333333334e-05, "loss": 0.0849, "step": 16310 }, { "epoch": 18.003313086692437, "grad_norm": 0.4971655011177063, "learning_rate": 2.7898e-05, "loss": 0.0783, "step": 16311 }, { "epoch": 18.004417448923245, "grad_norm": 0.3576807677745819, "learning_rate": 2.7897666666666665e-05, "loss": 0.0592, "step": 16312 }, { "epoch": 18.005521811154058, "grad_norm": 0.27160143852233887, "learning_rate": 2.7897333333333335e-05, "loss": 0.034, "step": 16313 }, { "epoch": 18.00662617338487, "grad_norm": 0.2882966101169586, "learning_rate": 2.7897e-05, "loss": 0.028, "step": 16314 }, { "epoch": 18.007730535615682, "grad_norm": 0.23016205430030823, "learning_rate": 2.7896666666666667e-05, "loss": 0.0248, "step": 16315 }, { "epoch": 18.008834897846494, "grad_norm": 0.20226247608661652, "learning_rate": 2.7896333333333336e-05, "loss": 0.0165, "step": 16316 }, { "epoch": 18.009939260077306, "grad_norm": 0.08989636600017548, "learning_rate": 2.7896e-05, "loss": 0.0054, "step": 16317 }, { "epoch": 18.01104362230812, "grad_norm": 0.256209135055542, "learning_rate": 2.7895666666666668e-05, "loss": 0.0127, "step": 16318 }, { "epoch": 18.012147984538927, "grad_norm": 0.0911799743771553, "learning_rate": 2.7895333333333334e-05, "loss": 0.0039, "step": 16319 }, { "epoch": 18.01325234676974, "grad_norm": 0.11416343599557877, "learning_rate": 2.7895e-05, "loss": 0.0047, "step": 16320 }, { "epoch": 18.014356709000552, "grad_norm": 0.22375459969043732, "learning_rate": 2.7894666666666666e-05, "loss": 0.0143, "step": 16321 }, { "epoch": 18.015461071231364, "grad_norm": 0.12148302793502808, "learning_rate": 2.7894333333333335e-05, "loss": 0.0064, "step": 16322 }, { "epoch": 18.016565433462176, "grad_norm": 0.2618977725505829, "learning_rate": 2.7894e-05, "loss": 0.0209, "step": 16323 }, { "epoch": 18.01766979569299, "grad_norm": 0.36666139960289, "learning_rate": 2.7893666666666667e-05, "loss": 0.0066, "step": 16324 }, { "epoch": 18.0187741579238, "grad_norm": 0.17042942345142365, "learning_rate": 2.7893333333333336e-05, "loss": 0.0076, "step": 16325 }, { "epoch": 18.01987852015461, "grad_norm": 0.12571436166763306, "learning_rate": 2.7893e-05, "loss": 0.0044, "step": 16326 }, { "epoch": 18.02098288238542, "grad_norm": 0.2460222840309143, "learning_rate": 2.7892666666666668e-05, "loss": 0.0044, "step": 16327 }, { "epoch": 18.022087244616234, "grad_norm": 0.22911827266216278, "learning_rate": 2.7892333333333334e-05, "loss": 0.0163, "step": 16328 }, { "epoch": 18.023191606847046, "grad_norm": 0.22237128019332886, "learning_rate": 2.7892e-05, "loss": 0.0144, "step": 16329 }, { "epoch": 18.02429596907786, "grad_norm": 0.14711886644363403, "learning_rate": 2.7891666666666666e-05, "loss": 0.0038, "step": 16330 }, { "epoch": 18.02540033130867, "grad_norm": 1.1789822578430176, "learning_rate": 2.7891333333333335e-05, "loss": 0.0226, "step": 16331 }, { "epoch": 18.02650469353948, "grad_norm": 0.1264696717262268, "learning_rate": 2.7891e-05, "loss": 0.006, "step": 16332 }, { "epoch": 18.02760905577029, "grad_norm": 0.2343532294034958, "learning_rate": 2.7890666666666667e-05, "loss": 0.0072, "step": 16333 }, { "epoch": 18.028713418001104, "grad_norm": 0.27864813804626465, "learning_rate": 2.7890333333333336e-05, "loss": 0.0072, "step": 16334 }, { "epoch": 18.029817780231916, "grad_norm": 0.10926610976457596, "learning_rate": 2.789e-05, "loss": 0.003, "step": 16335 }, { "epoch": 18.030922142462728, "grad_norm": 0.13790108263492584, "learning_rate": 2.7889666666666668e-05, "loss": 0.0062, "step": 16336 }, { "epoch": 18.03202650469354, "grad_norm": 0.09251325577497482, "learning_rate": 2.7889333333333334e-05, "loss": 0.0028, "step": 16337 }, { "epoch": 18.033130866924353, "grad_norm": 0.15566492080688477, "learning_rate": 2.7889e-05, "loss": 0.006, "step": 16338 }, { "epoch": 18.03423522915516, "grad_norm": 0.2055625170469284, "learning_rate": 2.788866666666667e-05, "loss": 0.0094, "step": 16339 }, { "epoch": 18.035339591385974, "grad_norm": 0.6947034597396851, "learning_rate": 2.7888333333333335e-05, "loss": 0.0221, "step": 16340 }, { "epoch": 18.036443953616786, "grad_norm": 0.17364124953746796, "learning_rate": 2.7888e-05, "loss": 0.0063, "step": 16341 }, { "epoch": 18.037548315847598, "grad_norm": 0.07821948081254959, "learning_rate": 2.7887666666666667e-05, "loss": 0.0028, "step": 16342 }, { "epoch": 18.03865267807841, "grad_norm": 0.6552960276603699, "learning_rate": 2.7887333333333333e-05, "loss": 0.0065, "step": 16343 }, { "epoch": 18.039757040309222, "grad_norm": 0.5289844274520874, "learning_rate": 2.7887e-05, "loss": 0.0108, "step": 16344 }, { "epoch": 18.040861402540035, "grad_norm": 0.3060997426509857, "learning_rate": 2.788666666666667e-05, "loss": 0.0064, "step": 16345 }, { "epoch": 18.041965764770843, "grad_norm": 0.17809347808361053, "learning_rate": 2.788633333333333e-05, "loss": 0.0072, "step": 16346 }, { "epoch": 18.043070127001656, "grad_norm": 0.2839074730873108, "learning_rate": 2.7886e-05, "loss": 0.0059, "step": 16347 }, { "epoch": 18.044174489232468, "grad_norm": 0.18339522182941437, "learning_rate": 2.788566666666667e-05, "loss": 0.0051, "step": 16348 }, { "epoch": 18.04527885146328, "grad_norm": 0.49176836013793945, "learning_rate": 2.7885333333333332e-05, "loss": 0.0094, "step": 16349 }, { "epoch": 18.046383213694092, "grad_norm": 0.13352742791175842, "learning_rate": 2.7885e-05, "loss": 0.0053, "step": 16350 }, { "epoch": 18.047487575924904, "grad_norm": 0.10378549247980118, "learning_rate": 2.7884666666666667e-05, "loss": 0.0035, "step": 16351 }, { "epoch": 18.048591938155717, "grad_norm": 0.28556886315345764, "learning_rate": 2.7884333333333333e-05, "loss": 0.0114, "step": 16352 }, { "epoch": 18.049696300386525, "grad_norm": 0.19779518246650696, "learning_rate": 2.7884e-05, "loss": 0.0064, "step": 16353 }, { "epoch": 18.050800662617338, "grad_norm": 0.24370554089546204, "learning_rate": 2.788366666666667e-05, "loss": 0.0073, "step": 16354 }, { "epoch": 18.05190502484815, "grad_norm": 0.232716366648674, "learning_rate": 2.788333333333333e-05, "loss": 0.0071, "step": 16355 }, { "epoch": 18.053009387078962, "grad_norm": 0.5117935538291931, "learning_rate": 2.7883e-05, "loss": 0.0182, "step": 16356 }, { "epoch": 18.054113749309774, "grad_norm": 0.23124641180038452, "learning_rate": 2.788266666666667e-05, "loss": 0.0114, "step": 16357 }, { "epoch": 18.055218111540587, "grad_norm": 0.4212195575237274, "learning_rate": 2.7882333333333332e-05, "loss": 0.0129, "step": 16358 }, { "epoch": 18.0563224737714, "grad_norm": 0.4208447337150574, "learning_rate": 2.7882000000000002e-05, "loss": 0.1046, "step": 16359 }, { "epoch": 18.057426836002207, "grad_norm": 0.4543713629245758, "learning_rate": 2.7881666666666668e-05, "loss": 0.1542, "step": 16360 }, { "epoch": 18.05853119823302, "grad_norm": 0.37301886081695557, "learning_rate": 2.7881333333333334e-05, "loss": 0.0933, "step": 16361 }, { "epoch": 18.059635560463832, "grad_norm": 0.23402772843837738, "learning_rate": 2.7881e-05, "loss": 0.0347, "step": 16362 }, { "epoch": 18.060739922694644, "grad_norm": 0.2494312822818756, "learning_rate": 2.788066666666667e-05, "loss": 0.0334, "step": 16363 }, { "epoch": 18.061844284925456, "grad_norm": 0.6671484112739563, "learning_rate": 2.7880333333333335e-05, "loss": 0.0461, "step": 16364 }, { "epoch": 18.06294864715627, "grad_norm": 0.5703666806221008, "learning_rate": 2.788e-05, "loss": 0.0286, "step": 16365 }, { "epoch": 18.064053009387077, "grad_norm": 0.21821816265583038, "learning_rate": 2.787966666666667e-05, "loss": 0.0159, "step": 16366 }, { "epoch": 18.06515737161789, "grad_norm": 0.24354876577854156, "learning_rate": 2.7879333333333333e-05, "loss": 0.025, "step": 16367 }, { "epoch": 18.0662617338487, "grad_norm": 0.2729504406452179, "learning_rate": 2.7879000000000002e-05, "loss": 0.0162, "step": 16368 }, { "epoch": 18.067366096079514, "grad_norm": 0.4055497646331787, "learning_rate": 2.7878666666666668e-05, "loss": 0.0127, "step": 16369 }, { "epoch": 18.068470458310326, "grad_norm": 0.1346227079629898, "learning_rate": 2.7878333333333334e-05, "loss": 0.0039, "step": 16370 }, { "epoch": 18.06957482054114, "grad_norm": 0.18094676733016968, "learning_rate": 2.7878e-05, "loss": 0.011, "step": 16371 }, { "epoch": 18.07067918277195, "grad_norm": 0.11087892204523087, "learning_rate": 2.7877666666666666e-05, "loss": 0.0036, "step": 16372 }, { "epoch": 18.07178354500276, "grad_norm": 0.11723796278238297, "learning_rate": 2.7877333333333335e-05, "loss": 0.0043, "step": 16373 }, { "epoch": 18.07288790723357, "grad_norm": 0.3602650761604309, "learning_rate": 2.7877e-05, "loss": 0.007, "step": 16374 }, { "epoch": 18.073992269464384, "grad_norm": 0.26791203022003174, "learning_rate": 2.7876666666666667e-05, "loss": 0.0065, "step": 16375 }, { "epoch": 18.075096631695196, "grad_norm": 0.15981969237327576, "learning_rate": 2.7876333333333333e-05, "loss": 0.005, "step": 16376 }, { "epoch": 18.076200993926008, "grad_norm": 0.1941841095685959, "learning_rate": 2.7876000000000002e-05, "loss": 0.0038, "step": 16377 }, { "epoch": 18.07730535615682, "grad_norm": 0.21982428431510925, "learning_rate": 2.7875666666666665e-05, "loss": 0.0095, "step": 16378 }, { "epoch": 18.078409718387633, "grad_norm": 0.23047107458114624, "learning_rate": 2.7875333333333334e-05, "loss": 0.0154, "step": 16379 }, { "epoch": 18.07951408061844, "grad_norm": 0.1169561967253685, "learning_rate": 2.7875e-05, "loss": 0.0029, "step": 16380 }, { "epoch": 18.080618442849254, "grad_norm": 0.8399888277053833, "learning_rate": 2.7874666666666666e-05, "loss": 0.0132, "step": 16381 }, { "epoch": 18.081722805080066, "grad_norm": 0.16893050074577332, "learning_rate": 2.7874333333333335e-05, "loss": 0.0055, "step": 16382 }, { "epoch": 18.082827167310878, "grad_norm": 0.13408254086971283, "learning_rate": 2.7874e-05, "loss": 0.0185, "step": 16383 }, { "epoch": 18.08393152954169, "grad_norm": 0.26029351353645325, "learning_rate": 2.7873666666666667e-05, "loss": 0.0053, "step": 16384 }, { "epoch": 18.085035891772502, "grad_norm": 0.46569883823394775, "learning_rate": 2.7873333333333333e-05, "loss": 0.0491, "step": 16385 }, { "epoch": 18.086140254003315, "grad_norm": 0.14465174078941345, "learning_rate": 2.7873000000000002e-05, "loss": 0.0037, "step": 16386 }, { "epoch": 18.087244616234123, "grad_norm": 0.11053702980279922, "learning_rate": 2.7872666666666665e-05, "loss": 0.0041, "step": 16387 }, { "epoch": 18.088348978464936, "grad_norm": 0.22975417971611023, "learning_rate": 2.7872333333333334e-05, "loss": 0.006, "step": 16388 }, { "epoch": 18.089453340695748, "grad_norm": 0.21460500359535217, "learning_rate": 2.7872000000000004e-05, "loss": 0.0086, "step": 16389 }, { "epoch": 18.09055770292656, "grad_norm": 0.09038848429918289, "learning_rate": 2.7871666666666666e-05, "loss": 0.0031, "step": 16390 }, { "epoch": 18.091662065157372, "grad_norm": 0.125845268368721, "learning_rate": 2.7871333333333335e-05, "loss": 0.0034, "step": 16391 }, { "epoch": 18.092766427388185, "grad_norm": 0.19306616485118866, "learning_rate": 2.7871e-05, "loss": 0.006, "step": 16392 }, { "epoch": 18.093870789618997, "grad_norm": 0.3192811906337738, "learning_rate": 2.7870666666666667e-05, "loss": 0.0076, "step": 16393 }, { "epoch": 18.094975151849805, "grad_norm": 0.3006177842617035, "learning_rate": 2.7870333333333333e-05, "loss": 0.0063, "step": 16394 }, { "epoch": 18.096079514080618, "grad_norm": 0.3477441370487213, "learning_rate": 2.7870000000000003e-05, "loss": 0.009, "step": 16395 }, { "epoch": 18.09718387631143, "grad_norm": 0.19026923179626465, "learning_rate": 2.7869666666666665e-05, "loss": 0.0066, "step": 16396 }, { "epoch": 18.098288238542242, "grad_norm": 0.31691500544548035, "learning_rate": 2.7869333333333334e-05, "loss": 0.0099, "step": 16397 }, { "epoch": 18.099392600773054, "grad_norm": 0.08214958012104034, "learning_rate": 2.7869000000000004e-05, "loss": 0.0027, "step": 16398 }, { "epoch": 18.100496963003867, "grad_norm": 0.11423938721418381, "learning_rate": 2.7868666666666666e-05, "loss": 0.0047, "step": 16399 }, { "epoch": 18.101601325234675, "grad_norm": 0.26981544494628906, "learning_rate": 2.7868333333333336e-05, "loss": 0.0059, "step": 16400 }, { "epoch": 18.102705687465487, "grad_norm": 0.12348446995019913, "learning_rate": 2.7868e-05, "loss": 0.004, "step": 16401 }, { "epoch": 18.1038100496963, "grad_norm": 0.07027379423379898, "learning_rate": 2.7867666666666667e-05, "loss": 0.0022, "step": 16402 }, { "epoch": 18.104914411927112, "grad_norm": 0.5123409032821655, "learning_rate": 2.7867333333333333e-05, "loss": 0.0133, "step": 16403 }, { "epoch": 18.106018774157924, "grad_norm": 0.2944492697715759, "learning_rate": 2.7867e-05, "loss": 0.0054, "step": 16404 }, { "epoch": 18.107123136388736, "grad_norm": 0.24801035225391388, "learning_rate": 2.7866666666666665e-05, "loss": 0.0052, "step": 16405 }, { "epoch": 18.10822749861955, "grad_norm": 0.1657509058713913, "learning_rate": 2.7866333333333335e-05, "loss": 0.0053, "step": 16406 }, { "epoch": 18.109331860850357, "grad_norm": 0.2834145128726959, "learning_rate": 2.7866e-05, "loss": 0.0052, "step": 16407 }, { "epoch": 18.11043622308117, "grad_norm": 0.15914317965507507, "learning_rate": 2.7865666666666666e-05, "loss": 0.0031, "step": 16408 }, { "epoch": 18.11154058531198, "grad_norm": 0.6870086193084717, "learning_rate": 2.7865333333333336e-05, "loss": 0.1556, "step": 16409 }, { "epoch": 18.112644947542794, "grad_norm": 0.46847379207611084, "learning_rate": 2.7865e-05, "loss": 0.0574, "step": 16410 }, { "epoch": 18.113749309773606, "grad_norm": 0.4261085093021393, "learning_rate": 2.7864666666666668e-05, "loss": 0.084, "step": 16411 }, { "epoch": 18.11485367200442, "grad_norm": 0.4271809756755829, "learning_rate": 2.7864333333333334e-05, "loss": 0.053, "step": 16412 }, { "epoch": 18.11595803423523, "grad_norm": 0.38506993651390076, "learning_rate": 2.7864e-05, "loss": 0.0839, "step": 16413 }, { "epoch": 18.11706239646604, "grad_norm": 0.4913073778152466, "learning_rate": 2.786366666666667e-05, "loss": 0.0329, "step": 16414 }, { "epoch": 18.11816675869685, "grad_norm": 0.23785021901130676, "learning_rate": 2.7863333333333335e-05, "loss": 0.039, "step": 16415 }, { "epoch": 18.119271120927664, "grad_norm": 0.4731876850128174, "learning_rate": 2.7863e-05, "loss": 0.0593, "step": 16416 }, { "epoch": 18.120375483158476, "grad_norm": 0.5146350860595703, "learning_rate": 2.7862666666666667e-05, "loss": 0.0414, "step": 16417 }, { "epoch": 18.12147984538929, "grad_norm": 0.2527329623699188, "learning_rate": 2.7862333333333336e-05, "loss": 0.0167, "step": 16418 }, { "epoch": 18.1225842076201, "grad_norm": 0.1541852056980133, "learning_rate": 2.7862e-05, "loss": 0.0102, "step": 16419 }, { "epoch": 18.123688569850913, "grad_norm": 0.14653843641281128, "learning_rate": 2.7861666666666668e-05, "loss": 0.0063, "step": 16420 }, { "epoch": 18.12479293208172, "grad_norm": 0.17571242153644562, "learning_rate": 2.7861333333333334e-05, "loss": 0.0044, "step": 16421 }, { "epoch": 18.125897294312534, "grad_norm": 0.15206430852413177, "learning_rate": 2.7861e-05, "loss": 0.0042, "step": 16422 }, { "epoch": 18.127001656543346, "grad_norm": 0.07412876933813095, "learning_rate": 2.786066666666667e-05, "loss": 0.0039, "step": 16423 }, { "epoch": 18.128106018774158, "grad_norm": 0.09451702237129211, "learning_rate": 2.7860333333333335e-05, "loss": 0.0047, "step": 16424 }, { "epoch": 18.12921038100497, "grad_norm": 0.1855655461549759, "learning_rate": 2.786e-05, "loss": 0.0045, "step": 16425 }, { "epoch": 18.130314743235783, "grad_norm": 0.18020787835121155, "learning_rate": 2.7859666666666667e-05, "loss": 0.0072, "step": 16426 }, { "epoch": 18.131419105466595, "grad_norm": 1.1878138780593872, "learning_rate": 2.7859333333333336e-05, "loss": 0.0111, "step": 16427 }, { "epoch": 18.132523467697403, "grad_norm": 0.297294020652771, "learning_rate": 2.7859e-05, "loss": 0.0097, "step": 16428 }, { "epoch": 18.133627829928216, "grad_norm": 0.0922309011220932, "learning_rate": 2.7858666666666668e-05, "loss": 0.0037, "step": 16429 }, { "epoch": 18.134732192159028, "grad_norm": 0.16785886883735657, "learning_rate": 2.7858333333333334e-05, "loss": 0.0111, "step": 16430 }, { "epoch": 18.13583655438984, "grad_norm": 0.22528068721294403, "learning_rate": 2.7858e-05, "loss": 0.0084, "step": 16431 }, { "epoch": 18.136940916620652, "grad_norm": 0.16347232460975647, "learning_rate": 2.785766666666667e-05, "loss": 0.0378, "step": 16432 }, { "epoch": 18.138045278851465, "grad_norm": 0.13032588362693787, "learning_rate": 2.7857333333333332e-05, "loss": 0.0041, "step": 16433 }, { "epoch": 18.139149641082273, "grad_norm": 0.1315935105085373, "learning_rate": 2.7857e-05, "loss": 0.0049, "step": 16434 }, { "epoch": 18.140254003313085, "grad_norm": 0.17145679891109467, "learning_rate": 2.7856666666666667e-05, "loss": 0.0064, "step": 16435 }, { "epoch": 18.141358365543898, "grad_norm": 0.3335361182689667, "learning_rate": 2.7856333333333333e-05, "loss": 0.0064, "step": 16436 }, { "epoch": 18.14246272777471, "grad_norm": 0.15156273543834686, "learning_rate": 2.7856e-05, "loss": 0.0055, "step": 16437 }, { "epoch": 18.143567090005522, "grad_norm": 0.2419082373380661, "learning_rate": 2.7855666666666668e-05, "loss": 0.006, "step": 16438 }, { "epoch": 18.144671452236334, "grad_norm": 0.11855805665254593, "learning_rate": 2.7855333333333334e-05, "loss": 0.0071, "step": 16439 }, { "epoch": 18.145775814467147, "grad_norm": 0.1840069591999054, "learning_rate": 2.7855e-05, "loss": 0.0075, "step": 16440 }, { "epoch": 18.146880176697955, "grad_norm": 0.2940891981124878, "learning_rate": 2.785466666666667e-05, "loss": 0.0088, "step": 16441 }, { "epoch": 18.147984538928768, "grad_norm": 0.19741083681583405, "learning_rate": 2.7854333333333332e-05, "loss": 0.0068, "step": 16442 }, { "epoch": 18.14908890115958, "grad_norm": 0.08112826943397522, "learning_rate": 2.7854e-05, "loss": 0.0025, "step": 16443 }, { "epoch": 18.150193263390392, "grad_norm": 0.4508708715438843, "learning_rate": 2.7853666666666667e-05, "loss": 0.0068, "step": 16444 }, { "epoch": 18.151297625621204, "grad_norm": 0.2798335552215576, "learning_rate": 2.7853333333333333e-05, "loss": 0.0161, "step": 16445 }, { "epoch": 18.152401987852016, "grad_norm": 0.2703295648097992, "learning_rate": 2.7853e-05, "loss": 0.0059, "step": 16446 }, { "epoch": 18.15350635008283, "grad_norm": 0.34299734234809875, "learning_rate": 2.785266666666667e-05, "loss": 0.0083, "step": 16447 }, { "epoch": 18.154610712313637, "grad_norm": 0.1262112706899643, "learning_rate": 2.7852333333333334e-05, "loss": 0.0047, "step": 16448 }, { "epoch": 18.15571507454445, "grad_norm": 0.19877420365810394, "learning_rate": 2.7852e-05, "loss": 0.0055, "step": 16449 }, { "epoch": 18.156819436775262, "grad_norm": 0.08966515958309174, "learning_rate": 2.785166666666667e-05, "loss": 0.0029, "step": 16450 }, { "epoch": 18.157923799006074, "grad_norm": 0.1659456491470337, "learning_rate": 2.7851333333333332e-05, "loss": 0.005, "step": 16451 }, { "epoch": 18.159028161236886, "grad_norm": 0.1263550966978073, "learning_rate": 2.7851e-05, "loss": 0.0039, "step": 16452 }, { "epoch": 18.1601325234677, "grad_norm": 0.406563937664032, "learning_rate": 2.7850666666666667e-05, "loss": 0.0138, "step": 16453 }, { "epoch": 18.16123688569851, "grad_norm": 0.5119147300720215, "learning_rate": 2.7850333333333333e-05, "loss": 0.007, "step": 16454 }, { "epoch": 18.16234124792932, "grad_norm": 0.13575489819049835, "learning_rate": 2.7850000000000003e-05, "loss": 0.003, "step": 16455 }, { "epoch": 18.16344561016013, "grad_norm": 0.33028149604797363, "learning_rate": 2.784966666666667e-05, "loss": 0.0071, "step": 16456 }, { "epoch": 18.164549972390944, "grad_norm": 0.14195936918258667, "learning_rate": 2.7849333333333335e-05, "loss": 0.0038, "step": 16457 }, { "epoch": 18.165654334621756, "grad_norm": 0.24751830101013184, "learning_rate": 2.7849e-05, "loss": 0.0079, "step": 16458 }, { "epoch": 18.16675869685257, "grad_norm": 0.5104184150695801, "learning_rate": 2.784866666666667e-05, "loss": 0.1204, "step": 16459 }, { "epoch": 18.16786305908338, "grad_norm": 0.5013644695281982, "learning_rate": 2.7848333333333332e-05, "loss": 0.2039, "step": 16460 }, { "epoch": 18.168967421314193, "grad_norm": 0.6096542477607727, "learning_rate": 2.7848000000000002e-05, "loss": 0.1299, "step": 16461 }, { "epoch": 18.170071783545, "grad_norm": 0.40043193101882935, "learning_rate": 2.7847666666666664e-05, "loss": 0.0722, "step": 16462 }, { "epoch": 18.171176145775814, "grad_norm": 0.3261719346046448, "learning_rate": 2.7847333333333334e-05, "loss": 0.0764, "step": 16463 }, { "epoch": 18.172280508006626, "grad_norm": 0.25582176446914673, "learning_rate": 2.7847000000000003e-05, "loss": 0.0236, "step": 16464 }, { "epoch": 18.173384870237438, "grad_norm": 0.3549689054489136, "learning_rate": 2.7846666666666665e-05, "loss": 0.0298, "step": 16465 }, { "epoch": 18.17448923246825, "grad_norm": 0.21859662234783173, "learning_rate": 2.7846333333333335e-05, "loss": 0.0135, "step": 16466 }, { "epoch": 18.175593594699063, "grad_norm": 0.1643850952386856, "learning_rate": 2.7846e-05, "loss": 0.0074, "step": 16467 }, { "epoch": 18.17669795692987, "grad_norm": 0.15127794444561005, "learning_rate": 2.7845666666666667e-05, "loss": 0.0094, "step": 16468 }, { "epoch": 18.177802319160683, "grad_norm": 0.1129758432507515, "learning_rate": 2.7845333333333333e-05, "loss": 0.0057, "step": 16469 }, { "epoch": 18.178906681391496, "grad_norm": 0.32019463181495667, "learning_rate": 2.7845000000000002e-05, "loss": 0.0087, "step": 16470 }, { "epoch": 18.180011043622308, "grad_norm": 0.23917952179908752, "learning_rate": 2.7844666666666664e-05, "loss": 0.0065, "step": 16471 }, { "epoch": 18.18111540585312, "grad_norm": 0.29232004284858704, "learning_rate": 2.7844333333333334e-05, "loss": 0.0114, "step": 16472 }, { "epoch": 18.182219768083932, "grad_norm": 0.08319760113954544, "learning_rate": 2.7844000000000003e-05, "loss": 0.0036, "step": 16473 }, { "epoch": 18.183324130314745, "grad_norm": 0.1191432774066925, "learning_rate": 2.7843666666666666e-05, "loss": 0.0065, "step": 16474 }, { "epoch": 18.184428492545553, "grad_norm": 0.2710643708705902, "learning_rate": 2.7843333333333335e-05, "loss": 0.0043, "step": 16475 }, { "epoch": 18.185532854776365, "grad_norm": 0.21106377243995667, "learning_rate": 2.7843e-05, "loss": 0.0055, "step": 16476 }, { "epoch": 18.186637217007178, "grad_norm": 0.12127260118722916, "learning_rate": 2.7842666666666667e-05, "loss": 0.0044, "step": 16477 }, { "epoch": 18.18774157923799, "grad_norm": 0.13385029137134552, "learning_rate": 2.7842333333333333e-05, "loss": 0.0053, "step": 16478 }, { "epoch": 18.188845941468802, "grad_norm": 0.10586658865213394, "learning_rate": 2.7842000000000002e-05, "loss": 0.0041, "step": 16479 }, { "epoch": 18.189950303699614, "grad_norm": 0.26195529103279114, "learning_rate": 2.7841666666666668e-05, "loss": 0.0085, "step": 16480 }, { "epoch": 18.191054665930427, "grad_norm": 0.1383812576532364, "learning_rate": 2.7841333333333334e-05, "loss": 0.0036, "step": 16481 }, { "epoch": 18.192159028161235, "grad_norm": 0.12070032954216003, "learning_rate": 2.7841000000000003e-05, "loss": 0.0047, "step": 16482 }, { "epoch": 18.193263390392048, "grad_norm": 0.28075772523880005, "learning_rate": 2.7840666666666666e-05, "loss": 0.0068, "step": 16483 }, { "epoch": 18.19436775262286, "grad_norm": 0.16021311283111572, "learning_rate": 2.7840333333333335e-05, "loss": 0.0058, "step": 16484 }, { "epoch": 18.195472114853672, "grad_norm": 0.30340659618377686, "learning_rate": 2.784e-05, "loss": 0.0076, "step": 16485 }, { "epoch": 18.196576477084484, "grad_norm": 0.20111246407032013, "learning_rate": 2.7839666666666667e-05, "loss": 0.0081, "step": 16486 }, { "epoch": 18.197680839315296, "grad_norm": 0.21945618093013763, "learning_rate": 2.7839333333333333e-05, "loss": 0.0037, "step": 16487 }, { "epoch": 18.19878520154611, "grad_norm": 0.1371823400259018, "learning_rate": 2.7839000000000002e-05, "loss": 0.0043, "step": 16488 }, { "epoch": 18.199889563776917, "grad_norm": 0.11823725700378418, "learning_rate": 2.7838666666666668e-05, "loss": 0.0021, "step": 16489 }, { "epoch": 18.20099392600773, "grad_norm": 0.09113840013742447, "learning_rate": 2.7838333333333334e-05, "loss": 0.002, "step": 16490 }, { "epoch": 18.202098288238542, "grad_norm": 0.31296420097351074, "learning_rate": 2.7838000000000004e-05, "loss": 0.0066, "step": 16491 }, { "epoch": 18.203202650469354, "grad_norm": 0.22097569704055786, "learning_rate": 2.7837666666666666e-05, "loss": 0.0053, "step": 16492 }, { "epoch": 18.204307012700166, "grad_norm": 0.45163533091545105, "learning_rate": 2.7837333333333335e-05, "loss": 0.0241, "step": 16493 }, { "epoch": 18.20541137493098, "grad_norm": 0.20493535697460175, "learning_rate": 2.7836999999999998e-05, "loss": 0.0065, "step": 16494 }, { "epoch": 18.20651573716179, "grad_norm": 0.19473452866077423, "learning_rate": 2.7836666666666667e-05, "loss": 0.0026, "step": 16495 }, { "epoch": 18.2076200993926, "grad_norm": 0.3122346103191376, "learning_rate": 2.7836333333333333e-05, "loss": 0.0049, "step": 16496 }, { "epoch": 18.20872446162341, "grad_norm": 1.1005667448043823, "learning_rate": 2.7836e-05, "loss": 0.0087, "step": 16497 }, { "epoch": 18.209828823854224, "grad_norm": 0.14145827293395996, "learning_rate": 2.783566666666667e-05, "loss": 0.004, "step": 16498 }, { "epoch": 18.210933186085036, "grad_norm": 0.22001801431179047, "learning_rate": 2.7835333333333334e-05, "loss": 0.0077, "step": 16499 }, { "epoch": 18.21203754831585, "grad_norm": 0.2706786096096039, "learning_rate": 2.7835e-05, "loss": 0.0069, "step": 16500 }, { "epoch": 18.21314191054666, "grad_norm": 0.25521981716156006, "learning_rate": 2.7834666666666666e-05, "loss": 0.004, "step": 16501 }, { "epoch": 18.214246272777473, "grad_norm": 0.7705488801002502, "learning_rate": 2.7834333333333336e-05, "loss": 0.0091, "step": 16502 }, { "epoch": 18.21535063500828, "grad_norm": 0.1554669886827469, "learning_rate": 2.7833999999999998e-05, "loss": 0.0045, "step": 16503 }, { "epoch": 18.216454997239094, "grad_norm": 0.19467052817344666, "learning_rate": 2.7833666666666667e-05, "loss": 0.005, "step": 16504 }, { "epoch": 18.217559359469906, "grad_norm": 0.2216513454914093, "learning_rate": 2.7833333333333337e-05, "loss": 0.0055, "step": 16505 }, { "epoch": 18.218663721700718, "grad_norm": 0.2498120218515396, "learning_rate": 2.7833e-05, "loss": 0.0063, "step": 16506 }, { "epoch": 18.21976808393153, "grad_norm": 0.3240760266780853, "learning_rate": 2.783266666666667e-05, "loss": 0.0071, "step": 16507 }, { "epoch": 18.220872446162343, "grad_norm": 0.5077747106552124, "learning_rate": 2.7832333333333335e-05, "loss": 0.0048, "step": 16508 }, { "epoch": 18.22197680839315, "grad_norm": 1.1115891933441162, "learning_rate": 2.7832e-05, "loss": 0.1166, "step": 16509 }, { "epoch": 18.223081170623963, "grad_norm": 0.5299011468887329, "learning_rate": 2.7831666666666666e-05, "loss": 0.0936, "step": 16510 }, { "epoch": 18.224185532854776, "grad_norm": 0.4388360381126404, "learning_rate": 2.7831333333333336e-05, "loss": 0.0792, "step": 16511 }, { "epoch": 18.225289895085588, "grad_norm": 0.39631038904190063, "learning_rate": 2.7831e-05, "loss": 0.0986, "step": 16512 }, { "epoch": 18.2263942573164, "grad_norm": 0.2683297097682953, "learning_rate": 2.7830666666666668e-05, "loss": 0.0352, "step": 16513 }, { "epoch": 18.227498619547212, "grad_norm": 0.4535396695137024, "learning_rate": 2.7830333333333337e-05, "loss": 0.0555, "step": 16514 }, { "epoch": 18.228602981778025, "grad_norm": 1.0559128522872925, "learning_rate": 2.783e-05, "loss": 0.0437, "step": 16515 }, { "epoch": 18.229707344008833, "grad_norm": 0.33239302039146423, "learning_rate": 2.782966666666667e-05, "loss": 0.0617, "step": 16516 }, { "epoch": 18.230811706239646, "grad_norm": 0.23592983186244965, "learning_rate": 2.7829333333333335e-05, "loss": 0.0137, "step": 16517 }, { "epoch": 18.231916068470458, "grad_norm": 0.1345471292734146, "learning_rate": 2.7829e-05, "loss": 0.0246, "step": 16518 }, { "epoch": 18.23302043070127, "grad_norm": 0.7929454445838928, "learning_rate": 2.7828666666666667e-05, "loss": 0.0417, "step": 16519 }, { "epoch": 18.234124792932082, "grad_norm": 0.28331485390663147, "learning_rate": 2.7828333333333336e-05, "loss": 0.0086, "step": 16520 }, { "epoch": 18.235229155162894, "grad_norm": 0.229216068983078, "learning_rate": 2.7828e-05, "loss": 0.0126, "step": 16521 }, { "epoch": 18.236333517393707, "grad_norm": 0.4031899571418762, "learning_rate": 2.7827666666666668e-05, "loss": 0.0069, "step": 16522 }, { "epoch": 18.237437879624515, "grad_norm": 0.2051623910665512, "learning_rate": 2.7827333333333334e-05, "loss": 0.0097, "step": 16523 }, { "epoch": 18.238542241855328, "grad_norm": 0.1582951843738556, "learning_rate": 2.7827e-05, "loss": 0.0059, "step": 16524 }, { "epoch": 18.23964660408614, "grad_norm": 0.1978953629732132, "learning_rate": 2.782666666666667e-05, "loss": 0.0053, "step": 16525 }, { "epoch": 18.240750966316952, "grad_norm": 0.5283307433128357, "learning_rate": 2.782633333333333e-05, "loss": 0.0136, "step": 16526 }, { "epoch": 18.241855328547764, "grad_norm": 0.15878333151340485, "learning_rate": 2.7826e-05, "loss": 0.0095, "step": 16527 }, { "epoch": 18.242959690778576, "grad_norm": 0.13316145539283752, "learning_rate": 2.7825666666666667e-05, "loss": 0.0057, "step": 16528 }, { "epoch": 18.24406405300939, "grad_norm": 0.92164546251297, "learning_rate": 2.7825333333333333e-05, "loss": 0.0109, "step": 16529 }, { "epoch": 18.245168415240197, "grad_norm": 0.1392344832420349, "learning_rate": 2.7825000000000002e-05, "loss": 0.0053, "step": 16530 }, { "epoch": 18.24627277747101, "grad_norm": 0.23197799921035767, "learning_rate": 2.7824666666666668e-05, "loss": 0.0069, "step": 16531 }, { "epoch": 18.247377139701822, "grad_norm": 0.49621888995170593, "learning_rate": 2.7824333333333334e-05, "loss": 0.0052, "step": 16532 }, { "epoch": 18.248481501932634, "grad_norm": 0.15718033909797668, "learning_rate": 2.7824e-05, "loss": 0.0032, "step": 16533 }, { "epoch": 18.249585864163446, "grad_norm": 0.14199678599834442, "learning_rate": 2.782366666666667e-05, "loss": 0.0054, "step": 16534 }, { "epoch": 18.25069022639426, "grad_norm": 0.179710254073143, "learning_rate": 2.7823333333333332e-05, "loss": 0.0321, "step": 16535 }, { "epoch": 18.25179458862507, "grad_norm": 0.2654048502445221, "learning_rate": 2.7823e-05, "loss": 0.0063, "step": 16536 }, { "epoch": 18.25289895085588, "grad_norm": 0.5499857664108276, "learning_rate": 2.7822666666666667e-05, "loss": 0.0085, "step": 16537 }, { "epoch": 18.25400331308669, "grad_norm": 0.13914074003696442, "learning_rate": 2.7822333333333333e-05, "loss": 0.0047, "step": 16538 }, { "epoch": 18.255107675317504, "grad_norm": 0.14283387362957, "learning_rate": 2.7822000000000002e-05, "loss": 0.0074, "step": 16539 }, { "epoch": 18.256212037548316, "grad_norm": 0.13908539712429047, "learning_rate": 2.7821666666666668e-05, "loss": 0.0064, "step": 16540 }, { "epoch": 18.25731639977913, "grad_norm": 0.237095445394516, "learning_rate": 2.7821333333333334e-05, "loss": 0.0067, "step": 16541 }, { "epoch": 18.25842076200994, "grad_norm": 0.29832926392555237, "learning_rate": 2.7821e-05, "loss": 0.0111, "step": 16542 }, { "epoch": 18.25952512424075, "grad_norm": 0.24461643397808075, "learning_rate": 2.782066666666667e-05, "loss": 0.0069, "step": 16543 }, { "epoch": 18.26062948647156, "grad_norm": 0.44894811511039734, "learning_rate": 2.7820333333333332e-05, "loss": 0.0102, "step": 16544 }, { "epoch": 18.261733848702374, "grad_norm": 0.11869502067565918, "learning_rate": 2.782e-05, "loss": 0.0058, "step": 16545 }, { "epoch": 18.262838210933186, "grad_norm": 0.19673806428909302, "learning_rate": 2.7819666666666667e-05, "loss": 0.0036, "step": 16546 }, { "epoch": 18.263942573163998, "grad_norm": 0.37017548084259033, "learning_rate": 2.7819333333333333e-05, "loss": 0.0112, "step": 16547 }, { "epoch": 18.26504693539481, "grad_norm": 0.11542703956365585, "learning_rate": 2.7819000000000002e-05, "loss": 0.0038, "step": 16548 }, { "epoch": 18.266151297625623, "grad_norm": 0.3633812367916107, "learning_rate": 2.781866666666667e-05, "loss": 0.0086, "step": 16549 }, { "epoch": 18.26725565985643, "grad_norm": 0.2624844014644623, "learning_rate": 2.7818333333333334e-05, "loss": 0.0119, "step": 16550 }, { "epoch": 18.268360022087244, "grad_norm": 0.24992607533931732, "learning_rate": 2.7818e-05, "loss": 0.007, "step": 16551 }, { "epoch": 18.269464384318056, "grad_norm": 0.48590701818466187, "learning_rate": 2.781766666666667e-05, "loss": 0.0092, "step": 16552 }, { "epoch": 18.270568746548868, "grad_norm": 0.2051326334476471, "learning_rate": 2.7817333333333332e-05, "loss": 0.005, "step": 16553 }, { "epoch": 18.27167310877968, "grad_norm": 0.22301509976387024, "learning_rate": 2.7817e-05, "loss": 0.0089, "step": 16554 }, { "epoch": 18.272777471010492, "grad_norm": 0.7416911721229553, "learning_rate": 2.7816666666666667e-05, "loss": 0.011, "step": 16555 }, { "epoch": 18.273881833241305, "grad_norm": 0.6340847015380859, "learning_rate": 2.7816333333333333e-05, "loss": 0.0064, "step": 16556 }, { "epoch": 18.274986195472113, "grad_norm": 0.26626166701316833, "learning_rate": 2.7816000000000003e-05, "loss": 0.0106, "step": 16557 }, { "epoch": 18.276090557702926, "grad_norm": 0.18924418091773987, "learning_rate": 2.7815666666666665e-05, "loss": 0.0071, "step": 16558 }, { "epoch": 18.277194919933738, "grad_norm": 0.5109131932258606, "learning_rate": 2.7815333333333335e-05, "loss": 0.1221, "step": 16559 }, { "epoch": 18.27829928216455, "grad_norm": 0.5652816891670227, "learning_rate": 2.7815e-05, "loss": 0.161, "step": 16560 }, { "epoch": 18.279403644395362, "grad_norm": 0.46635591983795166, "learning_rate": 2.7814666666666666e-05, "loss": 0.0874, "step": 16561 }, { "epoch": 18.280508006626174, "grad_norm": 0.3949155807495117, "learning_rate": 2.7814333333333332e-05, "loss": 0.0863, "step": 16562 }, { "epoch": 18.281612368856987, "grad_norm": 0.46673235297203064, "learning_rate": 2.7814000000000002e-05, "loss": 0.0526, "step": 16563 }, { "epoch": 18.282716731087795, "grad_norm": 0.3973985016345978, "learning_rate": 2.7813666666666668e-05, "loss": 0.0389, "step": 16564 }, { "epoch": 18.283821093318608, "grad_norm": 0.2916162312030792, "learning_rate": 2.7813333333333334e-05, "loss": 0.0355, "step": 16565 }, { "epoch": 18.28492545554942, "grad_norm": 0.3505890965461731, "learning_rate": 2.7813000000000003e-05, "loss": 0.0224, "step": 16566 }, { "epoch": 18.286029817780232, "grad_norm": 0.4385191798210144, "learning_rate": 2.7812666666666665e-05, "loss": 0.0236, "step": 16567 }, { "epoch": 18.287134180011044, "grad_norm": 0.4354017972946167, "learning_rate": 2.7812333333333335e-05, "loss": 0.0258, "step": 16568 }, { "epoch": 18.288238542241857, "grad_norm": 0.26973649859428406, "learning_rate": 2.7812e-05, "loss": 0.0091, "step": 16569 }, { "epoch": 18.28934290447267, "grad_norm": 0.24596761167049408, "learning_rate": 2.7811666666666667e-05, "loss": 0.0112, "step": 16570 }, { "epoch": 18.290447266703477, "grad_norm": 0.4227635860443115, "learning_rate": 2.7811333333333333e-05, "loss": 0.0146, "step": 16571 }, { "epoch": 18.29155162893429, "grad_norm": 0.2624390721321106, "learning_rate": 2.7811000000000002e-05, "loss": 0.0084, "step": 16572 }, { "epoch": 18.292655991165102, "grad_norm": 0.1765391230583191, "learning_rate": 2.7810666666666668e-05, "loss": 0.0087, "step": 16573 }, { "epoch": 18.293760353395914, "grad_norm": 0.1340494006872177, "learning_rate": 2.7810333333333334e-05, "loss": 0.0057, "step": 16574 }, { "epoch": 18.294864715626726, "grad_norm": 0.2833455502986908, "learning_rate": 2.7810000000000003e-05, "loss": 0.0045, "step": 16575 }, { "epoch": 18.29596907785754, "grad_norm": 0.2542673349380493, "learning_rate": 2.7809666666666666e-05, "loss": 0.0132, "step": 16576 }, { "epoch": 18.297073440088347, "grad_norm": 0.3518531024456024, "learning_rate": 2.7809333333333335e-05, "loss": 0.0072, "step": 16577 }, { "epoch": 18.29817780231916, "grad_norm": 0.19253697991371155, "learning_rate": 2.7809e-05, "loss": 0.007, "step": 16578 }, { "epoch": 18.29928216454997, "grad_norm": 0.22219660878181458, "learning_rate": 2.7808666666666667e-05, "loss": 0.0111, "step": 16579 }, { "epoch": 18.300386526780784, "grad_norm": 0.3271651268005371, "learning_rate": 2.7808333333333336e-05, "loss": 0.0066, "step": 16580 }, { "epoch": 18.301490889011596, "grad_norm": 0.09325528889894485, "learning_rate": 2.7808000000000002e-05, "loss": 0.005, "step": 16581 }, { "epoch": 18.30259525124241, "grad_norm": 0.23139452934265137, "learning_rate": 2.7807666666666668e-05, "loss": 0.0041, "step": 16582 }, { "epoch": 18.30369961347322, "grad_norm": 0.22582392394542694, "learning_rate": 2.7807333333333334e-05, "loss": 0.0071, "step": 16583 }, { "epoch": 18.30480397570403, "grad_norm": 0.2583022117614746, "learning_rate": 2.7807e-05, "loss": 0.0075, "step": 16584 }, { "epoch": 18.30590833793484, "grad_norm": 0.125922292470932, "learning_rate": 2.7806666666666666e-05, "loss": 0.0062, "step": 16585 }, { "epoch": 18.307012700165654, "grad_norm": 0.41028210520744324, "learning_rate": 2.7806333333333335e-05, "loss": 0.0082, "step": 16586 }, { "epoch": 18.308117062396466, "grad_norm": 0.6807757616043091, "learning_rate": 2.7805999999999998e-05, "loss": 0.0076, "step": 16587 }, { "epoch": 18.309221424627278, "grad_norm": 0.18874330818653107, "learning_rate": 2.7805666666666667e-05, "loss": 0.0093, "step": 16588 }, { "epoch": 18.31032578685809, "grad_norm": 0.10901682078838348, "learning_rate": 2.7805333333333336e-05, "loss": 0.0028, "step": 16589 }, { "epoch": 18.311430149088903, "grad_norm": 0.14610478281974792, "learning_rate": 2.7805e-05, "loss": 0.0068, "step": 16590 }, { "epoch": 18.31253451131971, "grad_norm": 0.3834246098995209, "learning_rate": 2.7804666666666668e-05, "loss": 0.0119, "step": 16591 }, { "epoch": 18.313638873550524, "grad_norm": 0.08379629254341125, "learning_rate": 2.7804333333333334e-05, "loss": 0.0025, "step": 16592 }, { "epoch": 18.314743235781336, "grad_norm": 0.3274444043636322, "learning_rate": 2.7804e-05, "loss": 0.0089, "step": 16593 }, { "epoch": 18.315847598012148, "grad_norm": 0.11257975548505783, "learning_rate": 2.7803666666666666e-05, "loss": 0.0035, "step": 16594 }, { "epoch": 18.31695196024296, "grad_norm": 0.3186141550540924, "learning_rate": 2.7803333333333335e-05, "loss": 0.0066, "step": 16595 }, { "epoch": 18.318056322473772, "grad_norm": 0.16397897899150848, "learning_rate": 2.7802999999999998e-05, "loss": 0.005, "step": 16596 }, { "epoch": 18.319160684704585, "grad_norm": 0.1794757843017578, "learning_rate": 2.7802666666666667e-05, "loss": 0.007, "step": 16597 }, { "epoch": 18.320265046935393, "grad_norm": 0.21458393335342407, "learning_rate": 2.7802333333333337e-05, "loss": 0.0049, "step": 16598 }, { "epoch": 18.321369409166206, "grad_norm": 0.4151145815849304, "learning_rate": 2.7802e-05, "loss": 0.0159, "step": 16599 }, { "epoch": 18.322473771397018, "grad_norm": 0.5812948346138, "learning_rate": 2.780166666666667e-05, "loss": 0.0103, "step": 16600 }, { "epoch": 18.32357813362783, "grad_norm": 0.2579594850540161, "learning_rate": 2.7801333333333334e-05, "loss": 0.0049, "step": 16601 }, { "epoch": 18.324682495858642, "grad_norm": 0.162758007645607, "learning_rate": 2.7801e-05, "loss": 0.0034, "step": 16602 }, { "epoch": 18.325786858089455, "grad_norm": 0.3970440626144409, "learning_rate": 2.7800666666666666e-05, "loss": 0.0108, "step": 16603 }, { "epoch": 18.326891220320267, "grad_norm": 0.2049456536769867, "learning_rate": 2.7800333333333336e-05, "loss": 0.0064, "step": 16604 }, { "epoch": 18.327995582551075, "grad_norm": 0.425815612077713, "learning_rate": 2.78e-05, "loss": 0.0097, "step": 16605 }, { "epoch": 18.329099944781888, "grad_norm": 0.325217068195343, "learning_rate": 2.7799666666666667e-05, "loss": 0.0165, "step": 16606 }, { "epoch": 18.3302043070127, "grad_norm": 0.15358614921569824, "learning_rate": 2.7799333333333337e-05, "loss": 0.006, "step": 16607 }, { "epoch": 18.331308669243512, "grad_norm": 0.2668028473854065, "learning_rate": 2.7799e-05, "loss": 0.0056, "step": 16608 }, { "epoch": 18.332413031474324, "grad_norm": 0.5299717783927917, "learning_rate": 2.779866666666667e-05, "loss": 0.1295, "step": 16609 }, { "epoch": 18.333517393705137, "grad_norm": 0.5128070116043091, "learning_rate": 2.7798333333333335e-05, "loss": 0.104, "step": 16610 }, { "epoch": 18.33462175593595, "grad_norm": 0.5195015072822571, "learning_rate": 2.7798e-05, "loss": 0.084, "step": 16611 }, { "epoch": 18.335726118166757, "grad_norm": 0.7196670770645142, "learning_rate": 2.7797666666666666e-05, "loss": 0.0902, "step": 16612 }, { "epoch": 18.33683048039757, "grad_norm": 0.3324968218803406, "learning_rate": 2.7797333333333332e-05, "loss": 0.0463, "step": 16613 }, { "epoch": 18.337934842628382, "grad_norm": 0.5161400437355042, "learning_rate": 2.7797e-05, "loss": 0.0704, "step": 16614 }, { "epoch": 18.339039204859194, "grad_norm": 0.288362979888916, "learning_rate": 2.7796666666666668e-05, "loss": 0.0204, "step": 16615 }, { "epoch": 18.340143567090006, "grad_norm": 0.4259110987186432, "learning_rate": 2.7796333333333334e-05, "loss": 0.1091, "step": 16616 }, { "epoch": 18.34124792932082, "grad_norm": 0.11937710642814636, "learning_rate": 2.7796e-05, "loss": 0.0067, "step": 16617 }, { "epoch": 18.342352291551627, "grad_norm": 0.34944167733192444, "learning_rate": 2.779566666666667e-05, "loss": 0.0193, "step": 16618 }, { "epoch": 18.34345665378244, "grad_norm": 0.15663500130176544, "learning_rate": 2.779533333333333e-05, "loss": 0.0252, "step": 16619 }, { "epoch": 18.34456101601325, "grad_norm": 0.21677297353744507, "learning_rate": 2.7795e-05, "loss": 0.0085, "step": 16620 }, { "epoch": 18.345665378244064, "grad_norm": 0.22793224453926086, "learning_rate": 2.7794666666666667e-05, "loss": 0.0084, "step": 16621 }, { "epoch": 18.346769740474876, "grad_norm": 0.2888568639755249, "learning_rate": 2.7794333333333333e-05, "loss": 0.0047, "step": 16622 }, { "epoch": 18.34787410270569, "grad_norm": 0.15953509509563446, "learning_rate": 2.7794000000000002e-05, "loss": 0.0076, "step": 16623 }, { "epoch": 18.3489784649365, "grad_norm": 0.12740002572536469, "learning_rate": 2.7793666666666668e-05, "loss": 0.0043, "step": 16624 }, { "epoch": 18.35008282716731, "grad_norm": 0.31936153769493103, "learning_rate": 2.7793333333333334e-05, "loss": 0.0091, "step": 16625 }, { "epoch": 18.35118718939812, "grad_norm": 0.13811005651950836, "learning_rate": 2.7793e-05, "loss": 0.0042, "step": 16626 }, { "epoch": 18.352291551628934, "grad_norm": 0.12590306997299194, "learning_rate": 2.779266666666667e-05, "loss": 0.0055, "step": 16627 }, { "epoch": 18.353395913859746, "grad_norm": 0.20279239118099213, "learning_rate": 2.779233333333333e-05, "loss": 0.0026, "step": 16628 }, { "epoch": 18.35450027609056, "grad_norm": 0.38507699966430664, "learning_rate": 2.7792e-05, "loss": 0.0076, "step": 16629 }, { "epoch": 18.35560463832137, "grad_norm": 0.10292212665081024, "learning_rate": 2.779166666666667e-05, "loss": 0.0033, "step": 16630 }, { "epoch": 18.356709000552183, "grad_norm": 0.0836552157998085, "learning_rate": 2.7791333333333333e-05, "loss": 0.0037, "step": 16631 }, { "epoch": 18.35781336278299, "grad_norm": 0.06715799123048782, "learning_rate": 2.7791000000000002e-05, "loss": 0.0026, "step": 16632 }, { "epoch": 18.358917725013804, "grad_norm": 0.2684727907180786, "learning_rate": 2.7790666666666668e-05, "loss": 0.0078, "step": 16633 }, { "epoch": 18.360022087244616, "grad_norm": 0.16708393394947052, "learning_rate": 2.7790333333333334e-05, "loss": 0.0047, "step": 16634 }, { "epoch": 18.361126449475428, "grad_norm": 0.2153683602809906, "learning_rate": 2.779e-05, "loss": 0.0092, "step": 16635 }, { "epoch": 18.36223081170624, "grad_norm": 0.20087264478206635, "learning_rate": 2.778966666666667e-05, "loss": 0.0051, "step": 16636 }, { "epoch": 18.363335173937053, "grad_norm": 0.11983411759138107, "learning_rate": 2.7789333333333332e-05, "loss": 0.0047, "step": 16637 }, { "epoch": 18.364439536167865, "grad_norm": 0.5699657201766968, "learning_rate": 2.7789e-05, "loss": 0.0096, "step": 16638 }, { "epoch": 18.365543898398673, "grad_norm": 0.13657724857330322, "learning_rate": 2.778866666666667e-05, "loss": 0.0062, "step": 16639 }, { "epoch": 18.366648260629486, "grad_norm": 0.29198014736175537, "learning_rate": 2.7788333333333333e-05, "loss": 0.0048, "step": 16640 }, { "epoch": 18.367752622860298, "grad_norm": 0.5056665539741516, "learning_rate": 2.7788000000000002e-05, "loss": 0.0073, "step": 16641 }, { "epoch": 18.36885698509111, "grad_norm": 0.15887193381786346, "learning_rate": 2.7787666666666668e-05, "loss": 0.0061, "step": 16642 }, { "epoch": 18.369961347321922, "grad_norm": 0.24733175337314606, "learning_rate": 2.7787333333333334e-05, "loss": 0.0095, "step": 16643 }, { "epoch": 18.371065709552735, "grad_norm": 0.2505118250846863, "learning_rate": 2.7787e-05, "loss": 0.0053, "step": 16644 }, { "epoch": 18.372170071783543, "grad_norm": 0.5020521879196167, "learning_rate": 2.7786666666666666e-05, "loss": 0.0108, "step": 16645 }, { "epoch": 18.373274434014355, "grad_norm": 0.1786937564611435, "learning_rate": 2.7786333333333332e-05, "loss": 0.0044, "step": 16646 }, { "epoch": 18.374378796245168, "grad_norm": 0.16811376810073853, "learning_rate": 2.7786e-05, "loss": 0.0067, "step": 16647 }, { "epoch": 18.37548315847598, "grad_norm": 0.17657029628753662, "learning_rate": 2.7785666666666667e-05, "loss": 0.0042, "step": 16648 }, { "epoch": 18.376587520706792, "grad_norm": 0.161658376455307, "learning_rate": 2.7785333333333333e-05, "loss": 0.0044, "step": 16649 }, { "epoch": 18.377691882937604, "grad_norm": 0.3564944267272949, "learning_rate": 2.7785000000000002e-05, "loss": 0.0049, "step": 16650 }, { "epoch": 18.378796245168417, "grad_norm": 0.42180824279785156, "learning_rate": 2.7784666666666665e-05, "loss": 0.0083, "step": 16651 }, { "epoch": 18.379900607399225, "grad_norm": 0.2024696320295334, "learning_rate": 2.7784333333333334e-05, "loss": 0.0052, "step": 16652 }, { "epoch": 18.381004969630037, "grad_norm": 0.18259508907794952, "learning_rate": 2.7784e-05, "loss": 0.0035, "step": 16653 }, { "epoch": 18.38210933186085, "grad_norm": 0.2975197732448578, "learning_rate": 2.7783666666666666e-05, "loss": 0.0105, "step": 16654 }, { "epoch": 18.383213694091662, "grad_norm": 0.3875123858451843, "learning_rate": 2.7783333333333336e-05, "loss": 0.0066, "step": 16655 }, { "epoch": 18.384318056322474, "grad_norm": 0.24575303494930267, "learning_rate": 2.7783e-05, "loss": 0.0201, "step": 16656 }, { "epoch": 18.385422418553286, "grad_norm": 0.2636047303676605, "learning_rate": 2.7782666666666667e-05, "loss": 0.0061, "step": 16657 }, { "epoch": 18.3865267807841, "grad_norm": 1.8680806159973145, "learning_rate": 2.7782333333333333e-05, "loss": 0.066, "step": 16658 }, { "epoch": 18.387631143014907, "grad_norm": 0.678095281124115, "learning_rate": 2.7782000000000003e-05, "loss": 0.119, "step": 16659 }, { "epoch": 18.38873550524572, "grad_norm": 0.48281529545783997, "learning_rate": 2.7781666666666665e-05, "loss": 0.0986, "step": 16660 }, { "epoch": 18.38983986747653, "grad_norm": 0.31542134284973145, "learning_rate": 2.7781333333333335e-05, "loss": 0.078, "step": 16661 }, { "epoch": 18.390944229707344, "grad_norm": 0.33645594120025635, "learning_rate": 2.7781e-05, "loss": 0.0423, "step": 16662 }, { "epoch": 18.392048591938156, "grad_norm": 0.33575189113616943, "learning_rate": 2.7780666666666666e-05, "loss": 0.0511, "step": 16663 }, { "epoch": 18.39315295416897, "grad_norm": 0.5566197037696838, "learning_rate": 2.7780333333333336e-05, "loss": 0.0419, "step": 16664 }, { "epoch": 18.39425731639978, "grad_norm": 0.2110361009836197, "learning_rate": 2.778e-05, "loss": 0.0183, "step": 16665 }, { "epoch": 18.39536167863059, "grad_norm": 0.16291053593158722, "learning_rate": 2.7779666666666668e-05, "loss": 0.0142, "step": 16666 }, { "epoch": 18.3964660408614, "grad_norm": 0.14279747009277344, "learning_rate": 2.7779333333333334e-05, "loss": 0.0121, "step": 16667 }, { "epoch": 18.397570403092214, "grad_norm": 0.37830284237861633, "learning_rate": 2.7779000000000003e-05, "loss": 0.0324, "step": 16668 }, { "epoch": 18.398674765323026, "grad_norm": 0.23894095420837402, "learning_rate": 2.7778666666666665e-05, "loss": 0.0088, "step": 16669 }, { "epoch": 18.39977912755384, "grad_norm": 0.13610407710075378, "learning_rate": 2.7778333333333335e-05, "loss": 0.0053, "step": 16670 }, { "epoch": 18.40088348978465, "grad_norm": 0.15640300512313843, "learning_rate": 2.7778e-05, "loss": 0.0059, "step": 16671 }, { "epoch": 18.401987852015463, "grad_norm": 0.2108100950717926, "learning_rate": 2.7777666666666667e-05, "loss": 0.0098, "step": 16672 }, { "epoch": 18.40309221424627, "grad_norm": 0.08556059747934341, "learning_rate": 2.7777333333333336e-05, "loss": 0.0036, "step": 16673 }, { "epoch": 18.404196576477084, "grad_norm": 0.323410302400589, "learning_rate": 2.7777e-05, "loss": 0.0335, "step": 16674 }, { "epoch": 18.405300938707896, "grad_norm": 0.1614697426557541, "learning_rate": 2.7776666666666668e-05, "loss": 0.0075, "step": 16675 }, { "epoch": 18.406405300938708, "grad_norm": 0.13829539716243744, "learning_rate": 2.7776333333333334e-05, "loss": 0.0052, "step": 16676 }, { "epoch": 18.40750966316952, "grad_norm": 0.21951885521411896, "learning_rate": 2.7776e-05, "loss": 0.0043, "step": 16677 }, { "epoch": 18.408614025400333, "grad_norm": 0.11163002997636795, "learning_rate": 2.7775666666666666e-05, "loss": 0.0034, "step": 16678 }, { "epoch": 18.409718387631145, "grad_norm": 0.09197384864091873, "learning_rate": 2.7775333333333335e-05, "loss": 0.003, "step": 16679 }, { "epoch": 18.410822749861953, "grad_norm": 0.15959393978118896, "learning_rate": 2.7775e-05, "loss": 0.0082, "step": 16680 }, { "epoch": 18.411927112092766, "grad_norm": 0.21196146309375763, "learning_rate": 2.7774666666666667e-05, "loss": 0.0052, "step": 16681 }, { "epoch": 18.413031474323578, "grad_norm": 0.506450355052948, "learning_rate": 2.7774333333333336e-05, "loss": 0.0101, "step": 16682 }, { "epoch": 18.41413583655439, "grad_norm": 0.11787824332714081, "learning_rate": 2.7774e-05, "loss": 0.0044, "step": 16683 }, { "epoch": 18.415240198785202, "grad_norm": 0.48985832929611206, "learning_rate": 2.7773666666666668e-05, "loss": 0.0128, "step": 16684 }, { "epoch": 18.416344561016015, "grad_norm": 0.17959652841091156, "learning_rate": 2.7773333333333334e-05, "loss": 0.0068, "step": 16685 }, { "epoch": 18.417448923246823, "grad_norm": 0.32144391536712646, "learning_rate": 2.7773e-05, "loss": 0.0259, "step": 16686 }, { "epoch": 18.418553285477635, "grad_norm": 0.1033192127943039, "learning_rate": 2.7772666666666666e-05, "loss": 0.003, "step": 16687 }, { "epoch": 18.419657647708448, "grad_norm": 0.3135365843772888, "learning_rate": 2.7772333333333335e-05, "loss": 0.0066, "step": 16688 }, { "epoch": 18.42076200993926, "grad_norm": 0.185025155544281, "learning_rate": 2.7772e-05, "loss": 0.0052, "step": 16689 }, { "epoch": 18.421866372170072, "grad_norm": 0.21326857805252075, "learning_rate": 2.7771666666666667e-05, "loss": 0.0101, "step": 16690 }, { "epoch": 18.422970734400884, "grad_norm": 0.4138076603412628, "learning_rate": 2.7771333333333336e-05, "loss": 0.0194, "step": 16691 }, { "epoch": 18.424075096631697, "grad_norm": 0.34029144048690796, "learning_rate": 2.7771e-05, "loss": 0.0044, "step": 16692 }, { "epoch": 18.425179458862505, "grad_norm": 0.1969984769821167, "learning_rate": 2.7770666666666668e-05, "loss": 0.0047, "step": 16693 }, { "epoch": 18.426283821093318, "grad_norm": 0.44592714309692383, "learning_rate": 2.7770333333333334e-05, "loss": 0.0068, "step": 16694 }, { "epoch": 18.42738818332413, "grad_norm": 0.19919820129871368, "learning_rate": 2.777e-05, "loss": 0.0097, "step": 16695 }, { "epoch": 18.428492545554942, "grad_norm": 0.1268121749162674, "learning_rate": 2.7769666666666666e-05, "loss": 0.0036, "step": 16696 }, { "epoch": 18.429596907785754, "grad_norm": 0.33500242233276367, "learning_rate": 2.7769333333333335e-05, "loss": 0.01, "step": 16697 }, { "epoch": 18.430701270016566, "grad_norm": 0.527600884437561, "learning_rate": 2.7769e-05, "loss": 0.0079, "step": 16698 }, { "epoch": 18.43180563224738, "grad_norm": 0.2856169641017914, "learning_rate": 2.7768666666666667e-05, "loss": 0.0096, "step": 16699 }, { "epoch": 18.432909994478187, "grad_norm": 1.0019043684005737, "learning_rate": 2.7768333333333337e-05, "loss": 0.0069, "step": 16700 }, { "epoch": 18.434014356709, "grad_norm": 0.3410310447216034, "learning_rate": 2.7768e-05, "loss": 0.005, "step": 16701 }, { "epoch": 18.435118718939812, "grad_norm": 0.5433790683746338, "learning_rate": 2.776766666666667e-05, "loss": 0.0082, "step": 16702 }, { "epoch": 18.436223081170624, "grad_norm": 0.3533060550689697, "learning_rate": 2.7767333333333334e-05, "loss": 0.0137, "step": 16703 }, { "epoch": 18.437327443401436, "grad_norm": 0.08738924562931061, "learning_rate": 2.7767e-05, "loss": 0.0025, "step": 16704 }, { "epoch": 18.43843180563225, "grad_norm": 0.48148345947265625, "learning_rate": 2.776666666666667e-05, "loss": 0.0092, "step": 16705 }, { "epoch": 18.43953616786306, "grad_norm": 0.2182726114988327, "learning_rate": 2.7766333333333332e-05, "loss": 0.0126, "step": 16706 }, { "epoch": 18.44064053009387, "grad_norm": 0.46706852316856384, "learning_rate": 2.7766e-05, "loss": 0.0086, "step": 16707 }, { "epoch": 18.44174489232468, "grad_norm": 0.43385598063468933, "learning_rate": 2.7765666666666667e-05, "loss": 0.0058, "step": 16708 }, { "epoch": 18.442849254555494, "grad_norm": 0.54719078540802, "learning_rate": 2.7765333333333333e-05, "loss": 0.1419, "step": 16709 }, { "epoch": 18.443953616786306, "grad_norm": 0.4822554588317871, "learning_rate": 2.7765e-05, "loss": 0.1134, "step": 16710 }, { "epoch": 18.44505797901712, "grad_norm": 0.42421501874923706, "learning_rate": 2.776466666666667e-05, "loss": 0.1316, "step": 16711 }, { "epoch": 18.44616234124793, "grad_norm": 0.3008348047733307, "learning_rate": 2.776433333333333e-05, "loss": 0.0449, "step": 16712 }, { "epoch": 18.447266703478743, "grad_norm": 0.4353739023208618, "learning_rate": 2.7764e-05, "loss": 0.0521, "step": 16713 }, { "epoch": 18.44837106570955, "grad_norm": 0.4821874797344208, "learning_rate": 2.776366666666667e-05, "loss": 0.0459, "step": 16714 }, { "epoch": 18.449475427940364, "grad_norm": 0.4049479365348816, "learning_rate": 2.7763333333333332e-05, "loss": 0.0417, "step": 16715 }, { "epoch": 18.450579790171176, "grad_norm": 0.3099748194217682, "learning_rate": 2.7763e-05, "loss": 0.037, "step": 16716 }, { "epoch": 18.451684152401988, "grad_norm": 0.22856178879737854, "learning_rate": 2.7762666666666668e-05, "loss": 0.0177, "step": 16717 }, { "epoch": 18.4527885146328, "grad_norm": 0.07444515824317932, "learning_rate": 2.7762333333333334e-05, "loss": 0.0034, "step": 16718 }, { "epoch": 18.453892876863613, "grad_norm": 0.36272913217544556, "learning_rate": 2.7762e-05, "loss": 0.03, "step": 16719 }, { "epoch": 18.45499723909442, "grad_norm": 0.17500874400138855, "learning_rate": 2.776166666666667e-05, "loss": 0.0117, "step": 16720 }, { "epoch": 18.456101601325233, "grad_norm": 0.13994291424751282, "learning_rate": 2.776133333333333e-05, "loss": 0.0061, "step": 16721 }, { "epoch": 18.457205963556046, "grad_norm": 0.4804685711860657, "learning_rate": 2.7761e-05, "loss": 0.0073, "step": 16722 }, { "epoch": 18.458310325786858, "grad_norm": 0.27410727739334106, "learning_rate": 2.776066666666667e-05, "loss": 0.0134, "step": 16723 }, { "epoch": 18.45941468801767, "grad_norm": 0.17735916376113892, "learning_rate": 2.7760333333333333e-05, "loss": 0.007, "step": 16724 }, { "epoch": 18.460519050248482, "grad_norm": 0.28067806363105774, "learning_rate": 2.7760000000000002e-05, "loss": 0.0081, "step": 16725 }, { "epoch": 18.461623412479295, "grad_norm": 0.15477892756462097, "learning_rate": 2.7759666666666668e-05, "loss": 0.0078, "step": 16726 }, { "epoch": 18.462727774710103, "grad_norm": 0.237526535987854, "learning_rate": 2.7759333333333334e-05, "loss": 0.0082, "step": 16727 }, { "epoch": 18.463832136940916, "grad_norm": 0.15330497920513153, "learning_rate": 2.7759e-05, "loss": 0.0053, "step": 16728 }, { "epoch": 18.464936499171728, "grad_norm": 0.24827656149864197, "learning_rate": 2.775866666666667e-05, "loss": 0.0107, "step": 16729 }, { "epoch": 18.46604086140254, "grad_norm": 0.06070758402347565, "learning_rate": 2.7758333333333335e-05, "loss": 0.0027, "step": 16730 }, { "epoch": 18.467145223633352, "grad_norm": 0.317538321018219, "learning_rate": 2.7758e-05, "loss": 0.0058, "step": 16731 }, { "epoch": 18.468249585864164, "grad_norm": 0.15947817265987396, "learning_rate": 2.775766666666667e-05, "loss": 0.0071, "step": 16732 }, { "epoch": 18.469353948094977, "grad_norm": 0.1853426992893219, "learning_rate": 2.7757333333333333e-05, "loss": 0.008, "step": 16733 }, { "epoch": 18.470458310325785, "grad_norm": 0.14726737141609192, "learning_rate": 2.7757000000000002e-05, "loss": 0.0087, "step": 16734 }, { "epoch": 18.471562672556598, "grad_norm": 0.5033652782440186, "learning_rate": 2.7756666666666665e-05, "loss": 0.0081, "step": 16735 }, { "epoch": 18.47266703478741, "grad_norm": 0.2685251832008362, "learning_rate": 2.7756333333333334e-05, "loss": 0.0051, "step": 16736 }, { "epoch": 18.473771397018222, "grad_norm": 0.12342411279678345, "learning_rate": 2.7756e-05, "loss": 0.005, "step": 16737 }, { "epoch": 18.474875759249034, "grad_norm": 0.09342852979898453, "learning_rate": 2.7755666666666666e-05, "loss": 0.0032, "step": 16738 }, { "epoch": 18.475980121479846, "grad_norm": 0.2195863127708435, "learning_rate": 2.7755333333333335e-05, "loss": 0.0135, "step": 16739 }, { "epoch": 18.47708448371066, "grad_norm": 0.3860204219818115, "learning_rate": 2.7755e-05, "loss": 0.0149, "step": 16740 }, { "epoch": 18.478188845941467, "grad_norm": 0.29302436113357544, "learning_rate": 2.7754666666666667e-05, "loss": 0.0049, "step": 16741 }, { "epoch": 18.47929320817228, "grad_norm": 0.37726348638534546, "learning_rate": 2.7754333333333333e-05, "loss": 0.0141, "step": 16742 }, { "epoch": 18.480397570403092, "grad_norm": 0.2684502899646759, "learning_rate": 2.7754000000000002e-05, "loss": 0.0079, "step": 16743 }, { "epoch": 18.481501932633904, "grad_norm": 0.21797879040241241, "learning_rate": 2.7753666666666665e-05, "loss": 0.0062, "step": 16744 }, { "epoch": 18.482606294864716, "grad_norm": 0.10972835123538971, "learning_rate": 2.7753333333333334e-05, "loss": 0.0055, "step": 16745 }, { "epoch": 18.48371065709553, "grad_norm": 0.36290591955184937, "learning_rate": 2.7753e-05, "loss": 0.0116, "step": 16746 }, { "epoch": 18.48481501932634, "grad_norm": 0.28278809785842896, "learning_rate": 2.7752666666666666e-05, "loss": 0.0089, "step": 16747 }, { "epoch": 18.48591938155715, "grad_norm": 0.07191542536020279, "learning_rate": 2.7752333333333335e-05, "loss": 0.0033, "step": 16748 }, { "epoch": 18.48702374378796, "grad_norm": 0.46285906434059143, "learning_rate": 2.7752e-05, "loss": 0.0171, "step": 16749 }, { "epoch": 18.488128106018774, "grad_norm": 0.2354702204465866, "learning_rate": 2.7751666666666667e-05, "loss": 0.0053, "step": 16750 }, { "epoch": 18.489232468249586, "grad_norm": 0.1466074436903, "learning_rate": 2.7751333333333333e-05, "loss": 0.0058, "step": 16751 }, { "epoch": 18.4903368304804, "grad_norm": 0.20173370838165283, "learning_rate": 2.7751000000000002e-05, "loss": 0.008, "step": 16752 }, { "epoch": 18.49144119271121, "grad_norm": 0.42024388909339905, "learning_rate": 2.7750666666666665e-05, "loss": 0.0146, "step": 16753 }, { "epoch": 18.49254555494202, "grad_norm": 0.34590086340904236, "learning_rate": 2.7750333333333334e-05, "loss": 0.0065, "step": 16754 }, { "epoch": 18.49364991717283, "grad_norm": 0.11078193783760071, "learning_rate": 2.7750000000000004e-05, "loss": 0.0163, "step": 16755 }, { "epoch": 18.494754279403644, "grad_norm": 0.9034886956214905, "learning_rate": 2.7749666666666666e-05, "loss": 0.0129, "step": 16756 }, { "epoch": 18.495858641634456, "grad_norm": 0.20953810214996338, "learning_rate": 2.7749333333333336e-05, "loss": 0.0054, "step": 16757 }, { "epoch": 18.496963003865268, "grad_norm": 0.15234044194221497, "learning_rate": 2.7749e-05, "loss": 0.0035, "step": 16758 }, { "epoch": 18.49806736609608, "grad_norm": 0.6728070378303528, "learning_rate": 2.7748666666666667e-05, "loss": 0.1641, "step": 16759 }, { "epoch": 18.499171728326893, "grad_norm": 0.4200587570667267, "learning_rate": 2.7748333333333333e-05, "loss": 0.085, "step": 16760 }, { "epoch": 18.5002760905577, "grad_norm": 0.3872607946395874, "learning_rate": 2.7748000000000003e-05, "loss": 0.085, "step": 16761 }, { "epoch": 18.501380452788514, "grad_norm": 0.5208715200424194, "learning_rate": 2.7747666666666665e-05, "loss": 0.0885, "step": 16762 }, { "epoch": 18.502484815019326, "grad_norm": 0.4811229407787323, "learning_rate": 2.7747333333333335e-05, "loss": 0.111, "step": 16763 }, { "epoch": 18.503589177250138, "grad_norm": 0.3614383041858673, "learning_rate": 2.7747000000000004e-05, "loss": 0.0667, "step": 16764 }, { "epoch": 18.50469353948095, "grad_norm": 0.5933361649513245, "learning_rate": 2.7746666666666666e-05, "loss": 0.0276, "step": 16765 }, { "epoch": 18.505797901711762, "grad_norm": 0.3265378773212433, "learning_rate": 2.7746333333333336e-05, "loss": 0.0395, "step": 16766 }, { "epoch": 18.506902263942575, "grad_norm": 0.7170388698577881, "learning_rate": 2.7745999999999998e-05, "loss": 0.0549, "step": 16767 }, { "epoch": 18.508006626173383, "grad_norm": 0.2579864263534546, "learning_rate": 2.7745666666666668e-05, "loss": 0.0413, "step": 16768 }, { "epoch": 18.509110988404196, "grad_norm": 0.11090713739395142, "learning_rate": 2.7745333333333334e-05, "loss": 0.0089, "step": 16769 }, { "epoch": 18.510215350635008, "grad_norm": 0.1675032526254654, "learning_rate": 2.7745e-05, "loss": 0.0074, "step": 16770 }, { "epoch": 18.51131971286582, "grad_norm": 0.1592874526977539, "learning_rate": 2.7744666666666665e-05, "loss": 0.0072, "step": 16771 }, { "epoch": 18.512424075096632, "grad_norm": 0.16912928223609924, "learning_rate": 2.7744333333333335e-05, "loss": 0.007, "step": 16772 }, { "epoch": 18.513528437327444, "grad_norm": 0.18355503678321838, "learning_rate": 2.7744e-05, "loss": 0.038, "step": 16773 }, { "epoch": 18.514632799558257, "grad_norm": 0.23743069171905518, "learning_rate": 2.7743666666666667e-05, "loss": 0.0104, "step": 16774 }, { "epoch": 18.515737161789065, "grad_norm": 0.15583136677742004, "learning_rate": 2.7743333333333336e-05, "loss": 0.0058, "step": 16775 }, { "epoch": 18.516841524019878, "grad_norm": 0.1901213675737381, "learning_rate": 2.7743e-05, "loss": 0.0199, "step": 16776 }, { "epoch": 18.51794588625069, "grad_norm": 0.12173200398683548, "learning_rate": 2.7742666666666668e-05, "loss": 0.0061, "step": 16777 }, { "epoch": 18.519050248481502, "grad_norm": 0.10987972468137741, "learning_rate": 2.7742333333333334e-05, "loss": 0.0039, "step": 16778 }, { "epoch": 18.520154610712314, "grad_norm": 0.13461530208587646, "learning_rate": 2.7742e-05, "loss": 0.0064, "step": 16779 }, { "epoch": 18.521258972943127, "grad_norm": 0.21032537519931793, "learning_rate": 2.774166666666667e-05, "loss": 0.0112, "step": 16780 }, { "epoch": 18.52236333517394, "grad_norm": 0.1367097645998001, "learning_rate": 2.7741333333333335e-05, "loss": 0.0053, "step": 16781 }, { "epoch": 18.523467697404747, "grad_norm": 0.23734144866466522, "learning_rate": 2.7741e-05, "loss": 0.0082, "step": 16782 }, { "epoch": 18.52457205963556, "grad_norm": 0.2800363302230835, "learning_rate": 2.7740666666666667e-05, "loss": 0.008, "step": 16783 }, { "epoch": 18.525676421866372, "grad_norm": 0.08570379763841629, "learning_rate": 2.7740333333333336e-05, "loss": 0.003, "step": 16784 }, { "epoch": 18.526780784097184, "grad_norm": 0.4477456510066986, "learning_rate": 2.774e-05, "loss": 0.0104, "step": 16785 }, { "epoch": 18.527885146327996, "grad_norm": 0.3121064305305481, "learning_rate": 2.7739666666666668e-05, "loss": 0.0119, "step": 16786 }, { "epoch": 18.52898950855881, "grad_norm": 0.19603589177131653, "learning_rate": 2.7739333333333334e-05, "loss": 0.0108, "step": 16787 }, { "epoch": 18.53009387078962, "grad_norm": 0.0695730671286583, "learning_rate": 2.7739e-05, "loss": 0.0023, "step": 16788 }, { "epoch": 18.53119823302043, "grad_norm": 0.0961163267493248, "learning_rate": 2.773866666666667e-05, "loss": 0.0022, "step": 16789 }, { "epoch": 18.53230259525124, "grad_norm": 0.22444692254066467, "learning_rate": 2.7738333333333335e-05, "loss": 0.0049, "step": 16790 }, { "epoch": 18.533406957482054, "grad_norm": 0.09342730790376663, "learning_rate": 2.7738e-05, "loss": 0.0038, "step": 16791 }, { "epoch": 18.534511319712866, "grad_norm": 0.15611512959003448, "learning_rate": 2.7737666666666667e-05, "loss": 0.0027, "step": 16792 }, { "epoch": 18.53561568194368, "grad_norm": 0.27726876735687256, "learning_rate": 2.7737333333333336e-05, "loss": 0.0042, "step": 16793 }, { "epoch": 18.53672004417449, "grad_norm": 0.3357996344566345, "learning_rate": 2.7737e-05, "loss": 0.0122, "step": 16794 }, { "epoch": 18.5378244064053, "grad_norm": 0.12879794836044312, "learning_rate": 2.7736666666666668e-05, "loss": 0.0038, "step": 16795 }, { "epoch": 18.53892876863611, "grad_norm": 0.25700491666793823, "learning_rate": 2.773633333333333e-05, "loss": 0.007, "step": 16796 }, { "epoch": 18.540033130866924, "grad_norm": 0.1249258741736412, "learning_rate": 2.7736e-05, "loss": 0.0028, "step": 16797 }, { "epoch": 18.541137493097736, "grad_norm": 0.12308817356824875, "learning_rate": 2.773566666666667e-05, "loss": 0.0035, "step": 16798 }, { "epoch": 18.542241855328548, "grad_norm": 0.06145276501774788, "learning_rate": 2.7735333333333332e-05, "loss": 0.0011, "step": 16799 }, { "epoch": 18.54334621755936, "grad_norm": 1.0704593658447266, "learning_rate": 2.7735e-05, "loss": 0.0094, "step": 16800 }, { "epoch": 18.544450579790173, "grad_norm": 0.2885560989379883, "learning_rate": 2.7734666666666667e-05, "loss": 0.0125, "step": 16801 }, { "epoch": 18.54555494202098, "grad_norm": 0.5179541707038879, "learning_rate": 2.7734333333333333e-05, "loss": 0.0122, "step": 16802 }, { "epoch": 18.546659304251794, "grad_norm": 0.2604420483112335, "learning_rate": 2.7734e-05, "loss": 0.0082, "step": 16803 }, { "epoch": 18.547763666482606, "grad_norm": 0.3899456262588501, "learning_rate": 2.773366666666667e-05, "loss": 0.0069, "step": 16804 }, { "epoch": 18.548868028713418, "grad_norm": 0.35338714718818665, "learning_rate": 2.7733333333333334e-05, "loss": 0.0061, "step": 16805 }, { "epoch": 18.54997239094423, "grad_norm": 1.0230368375778198, "learning_rate": 2.7733e-05, "loss": 0.0072, "step": 16806 }, { "epoch": 18.551076753175042, "grad_norm": 0.3202705681324005, "learning_rate": 2.773266666666667e-05, "loss": 0.0076, "step": 16807 }, { "epoch": 18.552181115405855, "grad_norm": 0.9457979798316956, "learning_rate": 2.7732333333333332e-05, "loss": 0.0148, "step": 16808 }, { "epoch": 18.553285477636663, "grad_norm": 0.6114199757575989, "learning_rate": 2.7732e-05, "loss": 0.1203, "step": 16809 }, { "epoch": 18.554389839867476, "grad_norm": 0.8899255394935608, "learning_rate": 2.7731666666666667e-05, "loss": 0.1144, "step": 16810 }, { "epoch": 18.555494202098288, "grad_norm": 0.5763444304466248, "learning_rate": 2.7731333333333333e-05, "loss": 0.0914, "step": 16811 }, { "epoch": 18.5565985643291, "grad_norm": 0.3029729723930359, "learning_rate": 2.7731e-05, "loss": 0.0428, "step": 16812 }, { "epoch": 18.557702926559912, "grad_norm": 0.4238680899143219, "learning_rate": 2.773066666666667e-05, "loss": 0.052, "step": 16813 }, { "epoch": 18.558807288790724, "grad_norm": 0.35328277945518494, "learning_rate": 2.7730333333333335e-05, "loss": 0.0408, "step": 16814 }, { "epoch": 18.559911651021537, "grad_norm": 0.18783271312713623, "learning_rate": 2.773e-05, "loss": 0.0308, "step": 16815 }, { "epoch": 18.561016013252345, "grad_norm": 0.5352137684822083, "learning_rate": 2.772966666666667e-05, "loss": 0.0249, "step": 16816 }, { "epoch": 18.562120375483158, "grad_norm": 0.2163340300321579, "learning_rate": 2.7729333333333332e-05, "loss": 0.0144, "step": 16817 }, { "epoch": 18.56322473771397, "grad_norm": 0.2337929904460907, "learning_rate": 2.7729e-05, "loss": 0.0114, "step": 16818 }, { "epoch": 18.564329099944782, "grad_norm": 0.23400817811489105, "learning_rate": 2.7728666666666668e-05, "loss": 0.0311, "step": 16819 }, { "epoch": 18.565433462175594, "grad_norm": 0.15706190466880798, "learning_rate": 2.7728333333333334e-05, "loss": 0.007, "step": 16820 }, { "epoch": 18.566537824406407, "grad_norm": 0.12856949865818024, "learning_rate": 2.7728e-05, "loss": 0.0055, "step": 16821 }, { "epoch": 18.567642186637215, "grad_norm": 0.1235785037279129, "learning_rate": 2.772766666666667e-05, "loss": 0.0037, "step": 16822 }, { "epoch": 18.568746548868027, "grad_norm": 0.16434535384178162, "learning_rate": 2.7727333333333335e-05, "loss": 0.0056, "step": 16823 }, { "epoch": 18.56985091109884, "grad_norm": 0.21930564939975739, "learning_rate": 2.7727e-05, "loss": 0.0082, "step": 16824 }, { "epoch": 18.570955273329652, "grad_norm": 0.15668952465057373, "learning_rate": 2.7726666666666667e-05, "loss": 0.0103, "step": 16825 }, { "epoch": 18.572059635560464, "grad_norm": 0.4981667101383209, "learning_rate": 2.7726333333333333e-05, "loss": 0.0202, "step": 16826 }, { "epoch": 18.573163997791276, "grad_norm": 0.7007930874824524, "learning_rate": 2.7726000000000002e-05, "loss": 0.0087, "step": 16827 }, { "epoch": 18.57426836002209, "grad_norm": 0.12467686086893082, "learning_rate": 2.7725666666666664e-05, "loss": 0.006, "step": 16828 }, { "epoch": 18.575372722252897, "grad_norm": 0.06989388912916183, "learning_rate": 2.7725333333333334e-05, "loss": 0.0038, "step": 16829 }, { "epoch": 18.57647708448371, "grad_norm": 0.23772603273391724, "learning_rate": 2.7725000000000003e-05, "loss": 0.0227, "step": 16830 }, { "epoch": 18.57758144671452, "grad_norm": 0.1710890680551529, "learning_rate": 2.7724666666666666e-05, "loss": 0.0059, "step": 16831 }, { "epoch": 18.578685808945334, "grad_norm": 0.2070159763097763, "learning_rate": 2.7724333333333335e-05, "loss": 0.0081, "step": 16832 }, { "epoch": 18.579790171176146, "grad_norm": 0.19849534332752228, "learning_rate": 2.7724e-05, "loss": 0.0071, "step": 16833 }, { "epoch": 18.58089453340696, "grad_norm": 0.17984215915203094, "learning_rate": 2.7723666666666667e-05, "loss": 0.0066, "step": 16834 }, { "epoch": 18.58199889563777, "grad_norm": 0.27718985080718994, "learning_rate": 2.7723333333333333e-05, "loss": 0.0173, "step": 16835 }, { "epoch": 18.58310325786858, "grad_norm": 0.18644319474697113, "learning_rate": 2.7723000000000002e-05, "loss": 0.0036, "step": 16836 }, { "epoch": 18.58420762009939, "grad_norm": 0.21193060278892517, "learning_rate": 2.7722666666666665e-05, "loss": 0.0068, "step": 16837 }, { "epoch": 18.585311982330204, "grad_norm": 0.43205568194389343, "learning_rate": 2.7722333333333334e-05, "loss": 0.0148, "step": 16838 }, { "epoch": 18.586416344561016, "grad_norm": 0.4540303945541382, "learning_rate": 2.7722000000000003e-05, "loss": 0.0086, "step": 16839 }, { "epoch": 18.587520706791828, "grad_norm": 0.13756155967712402, "learning_rate": 2.7721666666666666e-05, "loss": 0.005, "step": 16840 }, { "epoch": 18.58862506902264, "grad_norm": 0.11477939039468765, "learning_rate": 2.7721333333333335e-05, "loss": 0.0041, "step": 16841 }, { "epoch": 18.589729431253453, "grad_norm": 0.17867028713226318, "learning_rate": 2.7721e-05, "loss": 0.0088, "step": 16842 }, { "epoch": 18.59083379348426, "grad_norm": 0.3360706567764282, "learning_rate": 2.7720666666666667e-05, "loss": 0.0083, "step": 16843 }, { "epoch": 18.591938155715074, "grad_norm": 0.510956346988678, "learning_rate": 2.7720333333333333e-05, "loss": 0.0096, "step": 16844 }, { "epoch": 18.593042517945886, "grad_norm": 0.23091930150985718, "learning_rate": 2.7720000000000002e-05, "loss": 0.0072, "step": 16845 }, { "epoch": 18.594146880176698, "grad_norm": 0.2381516546010971, "learning_rate": 2.7719666666666665e-05, "loss": 0.0098, "step": 16846 }, { "epoch": 18.59525124240751, "grad_norm": 0.3433516323566437, "learning_rate": 2.7719333333333334e-05, "loss": 0.0045, "step": 16847 }, { "epoch": 18.596355604638322, "grad_norm": 0.19871333241462708, "learning_rate": 2.7719000000000003e-05, "loss": 0.0122, "step": 16848 }, { "epoch": 18.597459966869135, "grad_norm": 0.7252640724182129, "learning_rate": 2.7718666666666666e-05, "loss": 0.0082, "step": 16849 }, { "epoch": 18.598564329099943, "grad_norm": 0.4048976004123688, "learning_rate": 2.7718333333333335e-05, "loss": 0.0088, "step": 16850 }, { "epoch": 18.599668691330756, "grad_norm": 0.226475328207016, "learning_rate": 2.7718e-05, "loss": 0.0086, "step": 16851 }, { "epoch": 18.600773053561568, "grad_norm": 0.12261185050010681, "learning_rate": 2.7717666666666667e-05, "loss": 0.0035, "step": 16852 }, { "epoch": 18.60187741579238, "grad_norm": 0.14505138993263245, "learning_rate": 2.7717333333333333e-05, "loss": 0.005, "step": 16853 }, { "epoch": 18.602981778023192, "grad_norm": 0.14598038792610168, "learning_rate": 2.7717000000000002e-05, "loss": 0.0055, "step": 16854 }, { "epoch": 18.604086140254005, "grad_norm": 0.4907539486885071, "learning_rate": 2.771666666666667e-05, "loss": 0.0084, "step": 16855 }, { "epoch": 18.605190502484817, "grad_norm": 0.3897579610347748, "learning_rate": 2.7716333333333334e-05, "loss": 0.0117, "step": 16856 }, { "epoch": 18.606294864715625, "grad_norm": 0.5860493779182434, "learning_rate": 2.7716e-05, "loss": 0.0118, "step": 16857 }, { "epoch": 18.607399226946438, "grad_norm": 0.20121382176876068, "learning_rate": 2.7715666666666666e-05, "loss": 0.0078, "step": 16858 }, { "epoch": 18.60850358917725, "grad_norm": 0.5472344160079956, "learning_rate": 2.7715333333333336e-05, "loss": 0.1848, "step": 16859 }, { "epoch": 18.609607951408062, "grad_norm": 0.42589372396469116, "learning_rate": 2.7714999999999998e-05, "loss": 0.1298, "step": 16860 }, { "epoch": 18.610712313638874, "grad_norm": 0.49890971183776855, "learning_rate": 2.7714666666666667e-05, "loss": 0.095, "step": 16861 }, { "epoch": 18.611816675869687, "grad_norm": 0.3594660758972168, "learning_rate": 2.7714333333333333e-05, "loss": 0.0624, "step": 16862 }, { "epoch": 18.612921038100495, "grad_norm": 0.46837174892425537, "learning_rate": 2.7714e-05, "loss": 0.084, "step": 16863 }, { "epoch": 18.614025400331307, "grad_norm": 0.5778030753135681, "learning_rate": 2.771366666666667e-05, "loss": 0.0268, "step": 16864 }, { "epoch": 18.61512976256212, "grad_norm": 0.5380856394767761, "learning_rate": 2.7713333333333335e-05, "loss": 0.0303, "step": 16865 }, { "epoch": 18.616234124792932, "grad_norm": 0.2788172662258148, "learning_rate": 2.7713e-05, "loss": 0.0377, "step": 16866 }, { "epoch": 18.617338487023744, "grad_norm": 0.2473689764738083, "learning_rate": 2.7712666666666666e-05, "loss": 0.0138, "step": 16867 }, { "epoch": 18.618442849254556, "grad_norm": 0.22496674954891205, "learning_rate": 2.7712333333333336e-05, "loss": 0.0116, "step": 16868 }, { "epoch": 18.61954721148537, "grad_norm": 0.24542319774627686, "learning_rate": 2.7711999999999998e-05, "loss": 0.0315, "step": 16869 }, { "epoch": 18.620651573716177, "grad_norm": 0.5789838433265686, "learning_rate": 2.7711666666666668e-05, "loss": 0.014, "step": 16870 }, { "epoch": 18.62175593594699, "grad_norm": 0.36543944478034973, "learning_rate": 2.7711333333333334e-05, "loss": 0.0276, "step": 16871 }, { "epoch": 18.6228602981778, "grad_norm": 0.5653862953186035, "learning_rate": 2.7711e-05, "loss": 0.0247, "step": 16872 }, { "epoch": 18.623964660408614, "grad_norm": 0.23980291187763214, "learning_rate": 2.771066666666667e-05, "loss": 0.0087, "step": 16873 }, { "epoch": 18.625069022639426, "grad_norm": 0.17273440957069397, "learning_rate": 2.7710333333333335e-05, "loss": 0.0094, "step": 16874 }, { "epoch": 18.62617338487024, "grad_norm": 0.14044839143753052, "learning_rate": 2.771e-05, "loss": 0.0074, "step": 16875 }, { "epoch": 18.62727774710105, "grad_norm": 0.1824047714471817, "learning_rate": 2.7709666666666667e-05, "loss": 0.0089, "step": 16876 }, { "epoch": 18.62838210933186, "grad_norm": 0.14864984154701233, "learning_rate": 2.7709333333333336e-05, "loss": 0.0088, "step": 16877 }, { "epoch": 18.62948647156267, "grad_norm": 0.2590443193912506, "learning_rate": 2.7709e-05, "loss": 0.0092, "step": 16878 }, { "epoch": 18.630590833793484, "grad_norm": 0.10357324033975601, "learning_rate": 2.7708666666666668e-05, "loss": 0.0066, "step": 16879 }, { "epoch": 18.631695196024296, "grad_norm": 0.18572519719600677, "learning_rate": 2.7708333333333337e-05, "loss": 0.006, "step": 16880 }, { "epoch": 18.63279955825511, "grad_norm": 0.1606464684009552, "learning_rate": 2.7708e-05, "loss": 0.0097, "step": 16881 }, { "epoch": 18.63390392048592, "grad_norm": 0.23829048871994019, "learning_rate": 2.770766666666667e-05, "loss": 0.0096, "step": 16882 }, { "epoch": 18.635008282716733, "grad_norm": 0.2473103404045105, "learning_rate": 2.7707333333333335e-05, "loss": 0.008, "step": 16883 }, { "epoch": 18.63611264494754, "grad_norm": 0.48735418915748596, "learning_rate": 2.7707e-05, "loss": 0.0098, "step": 16884 }, { "epoch": 18.637217007178354, "grad_norm": 0.19125990569591522, "learning_rate": 2.7706666666666667e-05, "loss": 0.0052, "step": 16885 }, { "epoch": 18.638321369409166, "grad_norm": 0.19879664480686188, "learning_rate": 2.7706333333333333e-05, "loss": 0.0043, "step": 16886 }, { "epoch": 18.639425731639978, "grad_norm": 0.2229429930448532, "learning_rate": 2.7706e-05, "loss": 0.0094, "step": 16887 }, { "epoch": 18.64053009387079, "grad_norm": 0.18516413867473602, "learning_rate": 2.7705666666666668e-05, "loss": 0.0054, "step": 16888 }, { "epoch": 18.641634456101603, "grad_norm": 0.3220621943473816, "learning_rate": 2.7705333333333334e-05, "loss": 0.0125, "step": 16889 }, { "epoch": 18.642738818332415, "grad_norm": 0.6164467334747314, "learning_rate": 2.7705e-05, "loss": 0.006, "step": 16890 }, { "epoch": 18.643843180563223, "grad_norm": 0.1738114356994629, "learning_rate": 2.770466666666667e-05, "loss": 0.0029, "step": 16891 }, { "epoch": 18.644947542794036, "grad_norm": 0.4160205125808716, "learning_rate": 2.7704333333333332e-05, "loss": 0.0086, "step": 16892 }, { "epoch": 18.646051905024848, "grad_norm": 0.14835456013679504, "learning_rate": 2.7704e-05, "loss": 0.0044, "step": 16893 }, { "epoch": 18.64715626725566, "grad_norm": 0.17252738773822784, "learning_rate": 2.7703666666666667e-05, "loss": 0.0073, "step": 16894 }, { "epoch": 18.648260629486472, "grad_norm": 0.27145659923553467, "learning_rate": 2.7703333333333333e-05, "loss": 0.0092, "step": 16895 }, { "epoch": 18.649364991717285, "grad_norm": 0.08381731063127518, "learning_rate": 2.7703e-05, "loss": 0.0034, "step": 16896 }, { "epoch": 18.650469353948097, "grad_norm": 0.07948891818523407, "learning_rate": 2.7702666666666668e-05, "loss": 0.0021, "step": 16897 }, { "epoch": 18.651573716178905, "grad_norm": 0.2592669725418091, "learning_rate": 2.7702333333333334e-05, "loss": 0.0145, "step": 16898 }, { "epoch": 18.652678078409718, "grad_norm": 0.37660396099090576, "learning_rate": 2.7702e-05, "loss": 0.0062, "step": 16899 }, { "epoch": 18.65378244064053, "grad_norm": 0.2868978679180145, "learning_rate": 2.770166666666667e-05, "loss": 0.0049, "step": 16900 }, { "epoch": 18.654886802871342, "grad_norm": 0.2591526210308075, "learning_rate": 2.7701333333333332e-05, "loss": 0.0046, "step": 16901 }, { "epoch": 18.655991165102154, "grad_norm": 0.15060384571552277, "learning_rate": 2.7701e-05, "loss": 0.0044, "step": 16902 }, { "epoch": 18.657095527332967, "grad_norm": 0.06506836414337158, "learning_rate": 2.7700666666666667e-05, "loss": 0.003, "step": 16903 }, { "epoch": 18.658199889563775, "grad_norm": 0.2035808563232422, "learning_rate": 2.7700333333333333e-05, "loss": 0.0065, "step": 16904 }, { "epoch": 18.659304251794588, "grad_norm": 0.33357489109039307, "learning_rate": 2.7700000000000002e-05, "loss": 0.0116, "step": 16905 }, { "epoch": 18.6604086140254, "grad_norm": 0.24994075298309326, "learning_rate": 2.769966666666667e-05, "loss": 0.0017, "step": 16906 }, { "epoch": 18.661512976256212, "grad_norm": 0.25338590145111084, "learning_rate": 2.7699333333333334e-05, "loss": 0.0089, "step": 16907 }, { "epoch": 18.662617338487024, "grad_norm": 0.3740530014038086, "learning_rate": 2.7699e-05, "loss": 0.0163, "step": 16908 }, { "epoch": 18.663721700717836, "grad_norm": 0.512424111366272, "learning_rate": 2.769866666666667e-05, "loss": 0.1356, "step": 16909 }, { "epoch": 18.66482606294865, "grad_norm": 0.4711513817310333, "learning_rate": 2.7698333333333332e-05, "loss": 0.1103, "step": 16910 }, { "epoch": 18.665930425179457, "grad_norm": 0.3778992295265198, "learning_rate": 2.7698e-05, "loss": 0.0777, "step": 16911 }, { "epoch": 18.66703478741027, "grad_norm": 0.5809668302536011, "learning_rate": 2.7697666666666667e-05, "loss": 0.0488, "step": 16912 }, { "epoch": 18.668139149641082, "grad_norm": 0.633773148059845, "learning_rate": 2.7697333333333333e-05, "loss": 0.0638, "step": 16913 }, { "epoch": 18.669243511871894, "grad_norm": 0.3637758493423462, "learning_rate": 2.7697000000000003e-05, "loss": 0.0473, "step": 16914 }, { "epoch": 18.670347874102706, "grad_norm": 0.3088138997554779, "learning_rate": 2.769666666666667e-05, "loss": 0.0257, "step": 16915 }, { "epoch": 18.67145223633352, "grad_norm": 0.34076038002967834, "learning_rate": 2.7696333333333335e-05, "loss": 0.0405, "step": 16916 }, { "epoch": 18.67255659856433, "grad_norm": 0.25519096851348877, "learning_rate": 2.7696e-05, "loss": 0.0305, "step": 16917 }, { "epoch": 18.67366096079514, "grad_norm": 0.2067527174949646, "learning_rate": 2.7695666666666666e-05, "loss": 0.0145, "step": 16918 }, { "epoch": 18.67476532302595, "grad_norm": 0.2737562656402588, "learning_rate": 2.7695333333333332e-05, "loss": 0.0461, "step": 16919 }, { "epoch": 18.675869685256764, "grad_norm": 0.20329439640045166, "learning_rate": 2.7695e-05, "loss": 0.0118, "step": 16920 }, { "epoch": 18.676974047487576, "grad_norm": 0.17999939620494843, "learning_rate": 2.7694666666666668e-05, "loss": 0.0065, "step": 16921 }, { "epoch": 18.67807840971839, "grad_norm": 0.2131507247686386, "learning_rate": 2.7694333333333334e-05, "loss": 0.0083, "step": 16922 }, { "epoch": 18.6791827719492, "grad_norm": 0.451490581035614, "learning_rate": 2.7694000000000003e-05, "loss": 0.0363, "step": 16923 }, { "epoch": 18.680287134180013, "grad_norm": 0.16898217797279358, "learning_rate": 2.7693666666666665e-05, "loss": 0.0083, "step": 16924 }, { "epoch": 18.68139149641082, "grad_norm": 0.22934262454509735, "learning_rate": 2.7693333333333335e-05, "loss": 0.0094, "step": 16925 }, { "epoch": 18.682495858641634, "grad_norm": 0.10716550052165985, "learning_rate": 2.7693e-05, "loss": 0.0048, "step": 16926 }, { "epoch": 18.683600220872446, "grad_norm": 0.14448806643486023, "learning_rate": 2.7692666666666667e-05, "loss": 0.0061, "step": 16927 }, { "epoch": 18.684704583103258, "grad_norm": 0.17434650659561157, "learning_rate": 2.7692333333333333e-05, "loss": 0.0045, "step": 16928 }, { "epoch": 18.68580894533407, "grad_norm": 0.3010311722755432, "learning_rate": 2.7692000000000002e-05, "loss": 0.0087, "step": 16929 }, { "epoch": 18.686913307564883, "grad_norm": 0.20728807151317596, "learning_rate": 2.7691666666666668e-05, "loss": 0.0096, "step": 16930 }, { "epoch": 18.68801766979569, "grad_norm": 0.2005084753036499, "learning_rate": 2.7691333333333334e-05, "loss": 0.0055, "step": 16931 }, { "epoch": 18.689122032026503, "grad_norm": 0.06909134238958359, "learning_rate": 2.7691000000000003e-05, "loss": 0.0024, "step": 16932 }, { "epoch": 18.690226394257316, "grad_norm": 0.06654313206672668, "learning_rate": 2.7690666666666666e-05, "loss": 0.0032, "step": 16933 }, { "epoch": 18.691330756488128, "grad_norm": 0.18813256919384003, "learning_rate": 2.7690333333333335e-05, "loss": 0.0034, "step": 16934 }, { "epoch": 18.69243511871894, "grad_norm": 0.4644448757171631, "learning_rate": 2.769e-05, "loss": 0.0055, "step": 16935 }, { "epoch": 18.693539480949752, "grad_norm": 0.22831693291664124, "learning_rate": 2.7689666666666667e-05, "loss": 0.0053, "step": 16936 }, { "epoch": 18.694643843180565, "grad_norm": 0.11832199990749359, "learning_rate": 2.7689333333333333e-05, "loss": 0.0023, "step": 16937 }, { "epoch": 18.695748205411373, "grad_norm": 0.13861128687858582, "learning_rate": 2.7689000000000002e-05, "loss": 0.0038, "step": 16938 }, { "epoch": 18.696852567642186, "grad_norm": 0.16901862621307373, "learning_rate": 2.7688666666666668e-05, "loss": 0.0063, "step": 16939 }, { "epoch": 18.697956929872998, "grad_norm": 0.21404215693473816, "learning_rate": 2.7688333333333334e-05, "loss": 0.0051, "step": 16940 }, { "epoch": 18.69906129210381, "grad_norm": 0.13887201249599457, "learning_rate": 2.7688000000000003e-05, "loss": 0.0067, "step": 16941 }, { "epoch": 18.700165654334622, "grad_norm": 0.17930938303470612, "learning_rate": 2.7687666666666666e-05, "loss": 0.0032, "step": 16942 }, { "epoch": 18.701270016565434, "grad_norm": 0.10789496451616287, "learning_rate": 2.7687333333333335e-05, "loss": 0.0047, "step": 16943 }, { "epoch": 18.702374378796247, "grad_norm": 0.21317148208618164, "learning_rate": 2.7687e-05, "loss": 0.035, "step": 16944 }, { "epoch": 18.703478741027055, "grad_norm": 0.12344072014093399, "learning_rate": 2.7686666666666667e-05, "loss": 0.0058, "step": 16945 }, { "epoch": 18.704583103257868, "grad_norm": 0.453321635723114, "learning_rate": 2.7686333333333333e-05, "loss": 0.0125, "step": 16946 }, { "epoch": 18.70568746548868, "grad_norm": 0.16991454362869263, "learning_rate": 2.7686e-05, "loss": 0.0049, "step": 16947 }, { "epoch": 18.706791827719492, "grad_norm": 0.09386134147644043, "learning_rate": 2.7685666666666668e-05, "loss": 0.0019, "step": 16948 }, { "epoch": 18.707896189950304, "grad_norm": 0.27196115255355835, "learning_rate": 2.7685333333333334e-05, "loss": 0.0019, "step": 16949 }, { "epoch": 18.709000552181116, "grad_norm": 0.40118125081062317, "learning_rate": 2.7685e-05, "loss": 0.0106, "step": 16950 }, { "epoch": 18.71010491441193, "grad_norm": 0.1332918256521225, "learning_rate": 2.7684666666666666e-05, "loss": 0.0021, "step": 16951 }, { "epoch": 18.711209276642737, "grad_norm": 0.21026518940925598, "learning_rate": 2.7684333333333335e-05, "loss": 0.0043, "step": 16952 }, { "epoch": 18.71231363887355, "grad_norm": 0.2092735767364502, "learning_rate": 2.7683999999999998e-05, "loss": 0.0077, "step": 16953 }, { "epoch": 18.713418001104362, "grad_norm": 0.23123696446418762, "learning_rate": 2.7683666666666667e-05, "loss": 0.0082, "step": 16954 }, { "epoch": 18.714522363335174, "grad_norm": 0.4324008524417877, "learning_rate": 2.7683333333333337e-05, "loss": 0.0186, "step": 16955 }, { "epoch": 18.715626725565986, "grad_norm": 0.12710830569267273, "learning_rate": 2.7683e-05, "loss": 0.004, "step": 16956 }, { "epoch": 18.7167310877968, "grad_norm": 0.5764023661613464, "learning_rate": 2.768266666666667e-05, "loss": 0.0077, "step": 16957 }, { "epoch": 18.71783545002761, "grad_norm": 0.2909502685070038, "learning_rate": 2.7682333333333334e-05, "loss": 0.0042, "step": 16958 }, { "epoch": 18.71893981225842, "grad_norm": 0.6257979273796082, "learning_rate": 2.7682e-05, "loss": 0.1279, "step": 16959 }, { "epoch": 18.72004417448923, "grad_norm": 0.40595144033432007, "learning_rate": 2.7681666666666666e-05, "loss": 0.0646, "step": 16960 }, { "epoch": 18.721148536720044, "grad_norm": 0.5085375308990479, "learning_rate": 2.7681333333333336e-05, "loss": 0.0682, "step": 16961 }, { "epoch": 18.722252898950856, "grad_norm": 0.47209563851356506, "learning_rate": 2.7680999999999998e-05, "loss": 0.0879, "step": 16962 }, { "epoch": 18.72335726118167, "grad_norm": 0.2711310088634491, "learning_rate": 2.7680666666666667e-05, "loss": 0.0384, "step": 16963 }, { "epoch": 18.72446162341248, "grad_norm": 0.25442951917648315, "learning_rate": 2.7680333333333337e-05, "loss": 0.0388, "step": 16964 }, { "epoch": 18.725565985643293, "grad_norm": 0.20946873724460602, "learning_rate": 2.768e-05, "loss": 0.0216, "step": 16965 }, { "epoch": 18.7266703478741, "grad_norm": 0.2336626648902893, "learning_rate": 2.767966666666667e-05, "loss": 0.0148, "step": 16966 }, { "epoch": 18.727774710104914, "grad_norm": 0.1904529333114624, "learning_rate": 2.7679333333333335e-05, "loss": 0.01, "step": 16967 }, { "epoch": 18.728879072335726, "grad_norm": 0.28284382820129395, "learning_rate": 2.7679e-05, "loss": 0.017, "step": 16968 }, { "epoch": 18.729983434566538, "grad_norm": 0.18224850296974182, "learning_rate": 2.7678666666666666e-05, "loss": 0.0108, "step": 16969 }, { "epoch": 18.73108779679735, "grad_norm": 0.14395193755626678, "learning_rate": 2.7678333333333336e-05, "loss": 0.005, "step": 16970 }, { "epoch": 18.732192159028163, "grad_norm": 0.30413585901260376, "learning_rate": 2.7678e-05, "loss": 0.0106, "step": 16971 }, { "epoch": 18.73329652125897, "grad_norm": 0.23813758790493011, "learning_rate": 2.7677666666666668e-05, "loss": 0.0174, "step": 16972 }, { "epoch": 18.734400883489783, "grad_norm": 0.13346794247627258, "learning_rate": 2.7677333333333337e-05, "loss": 0.006, "step": 16973 }, { "epoch": 18.735505245720596, "grad_norm": 0.10758529603481293, "learning_rate": 2.7677e-05, "loss": 0.0035, "step": 16974 }, { "epoch": 18.736609607951408, "grad_norm": 0.08341973274946213, "learning_rate": 2.767666666666667e-05, "loss": 0.0037, "step": 16975 }, { "epoch": 18.73771397018222, "grad_norm": 0.16189102828502655, "learning_rate": 2.767633333333333e-05, "loss": 0.0069, "step": 16976 }, { "epoch": 18.738818332413032, "grad_norm": 0.24071353673934937, "learning_rate": 2.7676e-05, "loss": 0.0089, "step": 16977 }, { "epoch": 18.739922694643845, "grad_norm": 0.14586122334003448, "learning_rate": 2.7675666666666667e-05, "loss": 0.0052, "step": 16978 }, { "epoch": 18.741027056874653, "grad_norm": 0.07396578043699265, "learning_rate": 2.7675333333333333e-05, "loss": 0.0032, "step": 16979 }, { "epoch": 18.742131419105466, "grad_norm": 0.11508356034755707, "learning_rate": 2.7675000000000002e-05, "loss": 0.0046, "step": 16980 }, { "epoch": 18.743235781336278, "grad_norm": 0.3505542278289795, "learning_rate": 2.7674666666666668e-05, "loss": 0.0216, "step": 16981 }, { "epoch": 18.74434014356709, "grad_norm": 0.15638503432273865, "learning_rate": 2.7674333333333334e-05, "loss": 0.0059, "step": 16982 }, { "epoch": 18.745444505797902, "grad_norm": 0.30540651082992554, "learning_rate": 2.7674e-05, "loss": 0.0126, "step": 16983 }, { "epoch": 18.746548868028714, "grad_norm": 0.4883645474910736, "learning_rate": 2.767366666666667e-05, "loss": 0.0091, "step": 16984 }, { "epoch": 18.747653230259527, "grad_norm": 0.2551986277103424, "learning_rate": 2.767333333333333e-05, "loss": 0.0052, "step": 16985 }, { "epoch": 18.748757592490335, "grad_norm": 0.19306033849716187, "learning_rate": 2.7673e-05, "loss": 0.0077, "step": 16986 }, { "epoch": 18.749861954721148, "grad_norm": 0.3475167155265808, "learning_rate": 2.7672666666666667e-05, "loss": 0.0071, "step": 16987 }, { "epoch": 18.75096631695196, "grad_norm": 0.26378288865089417, "learning_rate": 2.7672333333333333e-05, "loss": 0.0093, "step": 16988 }, { "epoch": 18.752070679182772, "grad_norm": 0.11569376289844513, "learning_rate": 2.7672000000000002e-05, "loss": 0.0068, "step": 16989 }, { "epoch": 18.753175041413584, "grad_norm": 0.18180282413959503, "learning_rate": 2.7671666666666668e-05, "loss": 0.0056, "step": 16990 }, { "epoch": 18.754279403644396, "grad_norm": 0.10318125039339066, "learning_rate": 2.7671333333333334e-05, "loss": 0.003, "step": 16991 }, { "epoch": 18.75538376587521, "grad_norm": 0.3059731721878052, "learning_rate": 2.7671e-05, "loss": 0.0084, "step": 16992 }, { "epoch": 18.756488128106017, "grad_norm": 0.10577379167079926, "learning_rate": 2.767066666666667e-05, "loss": 0.0029, "step": 16993 }, { "epoch": 18.75759249033683, "grad_norm": 0.12331458926200867, "learning_rate": 2.7670333333333332e-05, "loss": 0.0037, "step": 16994 }, { "epoch": 18.758696852567642, "grad_norm": 0.18771226704120636, "learning_rate": 2.767e-05, "loss": 0.008, "step": 16995 }, { "epoch": 18.759801214798454, "grad_norm": 0.23267318308353424, "learning_rate": 2.7669666666666667e-05, "loss": 0.0085, "step": 16996 }, { "epoch": 18.760905577029266, "grad_norm": 0.31793487071990967, "learning_rate": 2.7669333333333333e-05, "loss": 0.0111, "step": 16997 }, { "epoch": 18.76200993926008, "grad_norm": 0.23759323358535767, "learning_rate": 2.7669000000000002e-05, "loss": 0.0062, "step": 16998 }, { "epoch": 18.763114301490887, "grad_norm": 0.1791653335094452, "learning_rate": 2.7668666666666668e-05, "loss": 0.003, "step": 16999 }, { "epoch": 18.7642186637217, "grad_norm": 0.12582170963287354, "learning_rate": 2.7668333333333334e-05, "loss": 0.0039, "step": 17000 }, { "epoch": 18.7642186637217, "eval_cer": 0.10791404204577858, "eval_loss": 0.3042711615562439, "eval_runtime": 16.248, "eval_samples_per_second": 18.71, "eval_steps_per_second": 0.615, "eval_wer": 0.37145049884881043, "step": 17000 }, { "epoch": 18.76532302595251, "grad_norm": 0.3825821578502655, "learning_rate": 2.7668e-05, "loss": 0.01, "step": 17001 }, { "epoch": 18.766427388183324, "grad_norm": 0.1748885214328766, "learning_rate": 2.766766666666667e-05, "loss": 0.0053, "step": 17002 }, { "epoch": 18.767531750414136, "grad_norm": 0.0993429645895958, "learning_rate": 2.7667333333333332e-05, "loss": 0.0042, "step": 17003 }, { "epoch": 18.76863611264495, "grad_norm": 0.34976837038993835, "learning_rate": 2.7667e-05, "loss": 0.0111, "step": 17004 }, { "epoch": 18.76974047487576, "grad_norm": 0.1814388930797577, "learning_rate": 2.766666666666667e-05, "loss": 0.0043, "step": 17005 }, { "epoch": 18.77084483710657, "grad_norm": 0.10786035656929016, "learning_rate": 2.7666333333333333e-05, "loss": 0.0041, "step": 17006 }, { "epoch": 18.77194919933738, "grad_norm": 0.15191511809825897, "learning_rate": 2.7666000000000002e-05, "loss": 0.0043, "step": 17007 }, { "epoch": 18.773053561568194, "grad_norm": 0.4426461458206177, "learning_rate": 2.7665666666666665e-05, "loss": 0.0115, "step": 17008 }, { "epoch": 18.774157923799006, "grad_norm": 1.022688627243042, "learning_rate": 2.7665333333333334e-05, "loss": 0.2511, "step": 17009 }, { "epoch": 18.775262286029818, "grad_norm": 0.6333606243133545, "learning_rate": 2.7665e-05, "loss": 0.118, "step": 17010 }, { "epoch": 18.77636664826063, "grad_norm": 0.4440397024154663, "learning_rate": 2.7664666666666666e-05, "loss": 0.0637, "step": 17011 }, { "epoch": 18.777471010491443, "grad_norm": 0.3780364692211151, "learning_rate": 2.7664333333333332e-05, "loss": 0.0828, "step": 17012 }, { "epoch": 18.77857537272225, "grad_norm": 0.41377416253089905, "learning_rate": 2.7664e-05, "loss": 0.0451, "step": 17013 }, { "epoch": 18.779679734953064, "grad_norm": 0.3814091086387634, "learning_rate": 2.7663666666666667e-05, "loss": 0.0878, "step": 17014 }, { "epoch": 18.780784097183876, "grad_norm": 0.3893442153930664, "learning_rate": 2.7663333333333333e-05, "loss": 0.037, "step": 17015 }, { "epoch": 18.781888459414688, "grad_norm": 0.334139347076416, "learning_rate": 2.7663000000000003e-05, "loss": 0.0623, "step": 17016 }, { "epoch": 18.7829928216455, "grad_norm": 0.3314375877380371, "learning_rate": 2.7662666666666665e-05, "loss": 0.0155, "step": 17017 }, { "epoch": 18.784097183876312, "grad_norm": 0.21994394063949585, "learning_rate": 2.7662333333333335e-05, "loss": 0.0155, "step": 17018 }, { "epoch": 18.785201546107125, "grad_norm": 0.21641312539577484, "learning_rate": 2.7662e-05, "loss": 0.0097, "step": 17019 }, { "epoch": 18.786305908337933, "grad_norm": 0.2451770156621933, "learning_rate": 2.7661666666666666e-05, "loss": 0.0084, "step": 17020 }, { "epoch": 18.787410270568746, "grad_norm": 0.19075056910514832, "learning_rate": 2.7661333333333336e-05, "loss": 0.0066, "step": 17021 }, { "epoch": 18.788514632799558, "grad_norm": 0.19156025350093842, "learning_rate": 2.7661e-05, "loss": 0.0087, "step": 17022 }, { "epoch": 18.78961899503037, "grad_norm": 0.13043059408664703, "learning_rate": 2.7660666666666668e-05, "loss": 0.0044, "step": 17023 }, { "epoch": 18.790723357261182, "grad_norm": 0.0814003273844719, "learning_rate": 2.7660333333333334e-05, "loss": 0.0031, "step": 17024 }, { "epoch": 18.791827719491994, "grad_norm": 0.21275833249092102, "learning_rate": 2.7660000000000003e-05, "loss": 0.0076, "step": 17025 }, { "epoch": 18.792932081722807, "grad_norm": 0.3018147945404053, "learning_rate": 2.7659666666666665e-05, "loss": 0.007, "step": 17026 }, { "epoch": 18.794036443953615, "grad_norm": 0.39858195185661316, "learning_rate": 2.7659333333333335e-05, "loss": 0.0101, "step": 17027 }, { "epoch": 18.795140806184428, "grad_norm": 0.2530042827129364, "learning_rate": 2.7659e-05, "loss": 0.0121, "step": 17028 }, { "epoch": 18.79624516841524, "grad_norm": 0.22464631497859955, "learning_rate": 2.7658666666666667e-05, "loss": 0.0106, "step": 17029 }, { "epoch": 18.797349530646052, "grad_norm": 0.24351434409618378, "learning_rate": 2.7658333333333336e-05, "loss": 0.0066, "step": 17030 }, { "epoch": 18.798453892876864, "grad_norm": 0.3305325210094452, "learning_rate": 2.7658000000000002e-05, "loss": 0.0053, "step": 17031 }, { "epoch": 18.799558255107677, "grad_norm": 0.19353163242340088, "learning_rate": 2.7657666666666668e-05, "loss": 0.0053, "step": 17032 }, { "epoch": 18.80066261733849, "grad_norm": 0.1835639774799347, "learning_rate": 2.7657333333333334e-05, "loss": 0.0099, "step": 17033 }, { "epoch": 18.801766979569297, "grad_norm": 0.5183178782463074, "learning_rate": 2.7657000000000003e-05, "loss": 0.0084, "step": 17034 }, { "epoch": 18.80287134180011, "grad_norm": 0.24207952618598938, "learning_rate": 2.7656666666666666e-05, "loss": 0.0087, "step": 17035 }, { "epoch": 18.803975704030922, "grad_norm": 0.2648480534553528, "learning_rate": 2.7656333333333335e-05, "loss": 0.0088, "step": 17036 }, { "epoch": 18.805080066261734, "grad_norm": 0.19754698872566223, "learning_rate": 2.7655999999999997e-05, "loss": 0.0039, "step": 17037 }, { "epoch": 18.806184428492546, "grad_norm": 0.2186809629201889, "learning_rate": 2.7655666666666667e-05, "loss": 0.0056, "step": 17038 }, { "epoch": 18.80728879072336, "grad_norm": 0.1379619687795639, "learning_rate": 2.7655333333333336e-05, "loss": 0.0045, "step": 17039 }, { "epoch": 18.808393152954167, "grad_norm": 0.09619356691837311, "learning_rate": 2.7655e-05, "loss": 0.0035, "step": 17040 }, { "epoch": 18.80949751518498, "grad_norm": 0.0896795466542244, "learning_rate": 2.7654666666666668e-05, "loss": 0.003, "step": 17041 }, { "epoch": 18.81060187741579, "grad_norm": 0.3682982623577118, "learning_rate": 2.7654333333333334e-05, "loss": 0.0108, "step": 17042 }, { "epoch": 18.811706239646604, "grad_norm": 0.15533225238323212, "learning_rate": 2.7654e-05, "loss": 0.0036, "step": 17043 }, { "epoch": 18.812810601877416, "grad_norm": 0.5936976671218872, "learning_rate": 2.7653666666666666e-05, "loss": 0.0061, "step": 17044 }, { "epoch": 18.81391496410823, "grad_norm": 0.2611916661262512, "learning_rate": 2.7653333333333335e-05, "loss": 0.011, "step": 17045 }, { "epoch": 18.81501932633904, "grad_norm": 0.11225977540016174, "learning_rate": 2.7653e-05, "loss": 0.0031, "step": 17046 }, { "epoch": 18.81612368856985, "grad_norm": 0.2943339943885803, "learning_rate": 2.7652666666666667e-05, "loss": 0.0075, "step": 17047 }, { "epoch": 18.81722805080066, "grad_norm": 0.14817650616168976, "learning_rate": 2.7652333333333336e-05, "loss": 0.0063, "step": 17048 }, { "epoch": 18.818332413031474, "grad_norm": 0.47331690788269043, "learning_rate": 2.7652e-05, "loss": 0.008, "step": 17049 }, { "epoch": 18.819436775262286, "grad_norm": 0.22460423409938812, "learning_rate": 2.7651666666666668e-05, "loss": 0.0068, "step": 17050 }, { "epoch": 18.820541137493098, "grad_norm": 0.09266757220029831, "learning_rate": 2.7651333333333334e-05, "loss": 0.0032, "step": 17051 }, { "epoch": 18.82164549972391, "grad_norm": 0.1698998361825943, "learning_rate": 2.7651e-05, "loss": 0.0062, "step": 17052 }, { "epoch": 18.822749861954723, "grad_norm": 1.0202199220657349, "learning_rate": 2.7650666666666666e-05, "loss": 0.0168, "step": 17053 }, { "epoch": 18.82385422418553, "grad_norm": 0.16963395476341248, "learning_rate": 2.7650333333333335e-05, "loss": 0.0099, "step": 17054 }, { "epoch": 18.824958586416344, "grad_norm": 0.1578594595193863, "learning_rate": 2.765e-05, "loss": 0.0043, "step": 17055 }, { "epoch": 18.826062948647156, "grad_norm": 0.23334023356437683, "learning_rate": 2.7649666666666667e-05, "loss": 0.0068, "step": 17056 }, { "epoch": 18.827167310877968, "grad_norm": 0.19720086455345154, "learning_rate": 2.7649333333333336e-05, "loss": 0.0076, "step": 17057 }, { "epoch": 18.82827167310878, "grad_norm": 0.2500644028186798, "learning_rate": 2.7649e-05, "loss": 0.0081, "step": 17058 }, { "epoch": 18.829376035339592, "grad_norm": 0.4990770220756531, "learning_rate": 2.764866666666667e-05, "loss": 0.1407, "step": 17059 }, { "epoch": 18.830480397570405, "grad_norm": 0.4067992866039276, "learning_rate": 2.7648333333333334e-05, "loss": 0.0748, "step": 17060 }, { "epoch": 18.831584759801213, "grad_norm": 0.33907440304756165, "learning_rate": 2.7648e-05, "loss": 0.0775, "step": 17061 }, { "epoch": 18.832689122032026, "grad_norm": 0.30516740679740906, "learning_rate": 2.7647666666666666e-05, "loss": 0.0413, "step": 17062 }, { "epoch": 18.833793484262838, "grad_norm": 0.31647470593452454, "learning_rate": 2.7647333333333335e-05, "loss": 0.0407, "step": 17063 }, { "epoch": 18.83489784649365, "grad_norm": 0.4663531184196472, "learning_rate": 2.7647e-05, "loss": 0.06, "step": 17064 }, { "epoch": 18.836002208724462, "grad_norm": 1.225618839263916, "learning_rate": 2.7646666666666667e-05, "loss": 0.038, "step": 17065 }, { "epoch": 18.837106570955275, "grad_norm": 0.3496120572090149, "learning_rate": 2.7646333333333337e-05, "loss": 0.0376, "step": 17066 }, { "epoch": 18.838210933186087, "grad_norm": 0.3745401203632355, "learning_rate": 2.7646e-05, "loss": 0.0244, "step": 17067 }, { "epoch": 18.839315295416895, "grad_norm": 0.23978328704833984, "learning_rate": 2.764566666666667e-05, "loss": 0.0204, "step": 17068 }, { "epoch": 18.840419657647708, "grad_norm": 0.12331222742795944, "learning_rate": 2.764533333333333e-05, "loss": 0.0074, "step": 17069 }, { "epoch": 18.84152401987852, "grad_norm": 0.13987912237644196, "learning_rate": 2.7645e-05, "loss": 0.0067, "step": 17070 }, { "epoch": 18.842628382109332, "grad_norm": 0.2473650872707367, "learning_rate": 2.764466666666667e-05, "loss": 0.0339, "step": 17071 }, { "epoch": 18.843732744340144, "grad_norm": 0.16950654983520508, "learning_rate": 2.7644333333333332e-05, "loss": 0.0124, "step": 17072 }, { "epoch": 18.844837106570957, "grad_norm": 0.15871793031692505, "learning_rate": 2.7644e-05, "loss": 0.0067, "step": 17073 }, { "epoch": 18.84594146880177, "grad_norm": 0.49887585639953613, "learning_rate": 2.7643666666666668e-05, "loss": 0.0075, "step": 17074 }, { "epoch": 18.847045831032577, "grad_norm": 0.07479838281869888, "learning_rate": 2.7643333333333334e-05, "loss": 0.0025, "step": 17075 }, { "epoch": 18.84815019326339, "grad_norm": 0.14352618157863617, "learning_rate": 2.7643e-05, "loss": 0.0046, "step": 17076 }, { "epoch": 18.849254555494202, "grad_norm": 0.11407870054244995, "learning_rate": 2.764266666666667e-05, "loss": 0.0059, "step": 17077 }, { "epoch": 18.850358917725014, "grad_norm": 0.09673739969730377, "learning_rate": 2.764233333333333e-05, "loss": 0.0035, "step": 17078 }, { "epoch": 18.851463279955826, "grad_norm": 0.2246798425912857, "learning_rate": 2.7642e-05, "loss": 0.0045, "step": 17079 }, { "epoch": 18.85256764218664, "grad_norm": 0.1321793794631958, "learning_rate": 2.764166666666667e-05, "loss": 0.0053, "step": 17080 }, { "epoch": 18.853672004417447, "grad_norm": 0.24314621090888977, "learning_rate": 2.7641333333333333e-05, "loss": 0.005, "step": 17081 }, { "epoch": 18.85477636664826, "grad_norm": 0.17827492952346802, "learning_rate": 2.7641000000000002e-05, "loss": 0.0074, "step": 17082 }, { "epoch": 18.85588072887907, "grad_norm": 0.44574233889579773, "learning_rate": 2.7640666666666668e-05, "loss": 0.0072, "step": 17083 }, { "epoch": 18.856985091109884, "grad_norm": 0.14552372694015503, "learning_rate": 2.7640333333333334e-05, "loss": 0.002, "step": 17084 }, { "epoch": 18.858089453340696, "grad_norm": 0.3776402473449707, "learning_rate": 2.764e-05, "loss": 0.0036, "step": 17085 }, { "epoch": 18.85919381557151, "grad_norm": 0.3101734519004822, "learning_rate": 2.763966666666667e-05, "loss": 0.0063, "step": 17086 }, { "epoch": 18.86029817780232, "grad_norm": 0.09812642633914948, "learning_rate": 2.763933333333333e-05, "loss": 0.0045, "step": 17087 }, { "epoch": 18.86140254003313, "grad_norm": 0.18254241347312927, "learning_rate": 2.7639e-05, "loss": 0.0079, "step": 17088 }, { "epoch": 18.86250690226394, "grad_norm": 0.17691770195960999, "learning_rate": 2.763866666666667e-05, "loss": 0.0047, "step": 17089 }, { "epoch": 18.863611264494754, "grad_norm": 0.20043300092220306, "learning_rate": 2.7638333333333333e-05, "loss": 0.0084, "step": 17090 }, { "epoch": 18.864715626725566, "grad_norm": 0.23214691877365112, "learning_rate": 2.7638000000000002e-05, "loss": 0.0066, "step": 17091 }, { "epoch": 18.86581998895638, "grad_norm": 0.21261008083820343, "learning_rate": 2.7637666666666668e-05, "loss": 0.0133, "step": 17092 }, { "epoch": 18.86692435118719, "grad_norm": 0.15865091979503632, "learning_rate": 2.7637333333333334e-05, "loss": 0.0072, "step": 17093 }, { "epoch": 18.868028713418003, "grad_norm": 0.23267658054828644, "learning_rate": 2.7637e-05, "loss": 0.0162, "step": 17094 }, { "epoch": 18.86913307564881, "grad_norm": 0.31994926929473877, "learning_rate": 2.763666666666667e-05, "loss": 0.0112, "step": 17095 }, { "epoch": 18.870237437879624, "grad_norm": 0.1968401074409485, "learning_rate": 2.7636333333333335e-05, "loss": 0.0072, "step": 17096 }, { "epoch": 18.871341800110436, "grad_norm": 0.11811819672584534, "learning_rate": 2.7636e-05, "loss": 0.0036, "step": 17097 }, { "epoch": 18.872446162341248, "grad_norm": 0.11932545900344849, "learning_rate": 2.7635666666666667e-05, "loss": 0.0067, "step": 17098 }, { "epoch": 18.87355052457206, "grad_norm": 0.1642507165670395, "learning_rate": 2.7635333333333333e-05, "loss": 0.008, "step": 17099 }, { "epoch": 18.874654886802873, "grad_norm": 0.07769779860973358, "learning_rate": 2.7635000000000002e-05, "loss": 0.0032, "step": 17100 }, { "epoch": 18.875759249033685, "grad_norm": 0.20169654488563538, "learning_rate": 2.7634666666666665e-05, "loss": 0.0123, "step": 17101 }, { "epoch": 18.876863611264493, "grad_norm": 0.25162777304649353, "learning_rate": 2.7634333333333334e-05, "loss": 0.0087, "step": 17102 }, { "epoch": 18.877967973495306, "grad_norm": 0.10938058793544769, "learning_rate": 2.7634e-05, "loss": 0.0042, "step": 17103 }, { "epoch": 18.879072335726118, "grad_norm": 0.21452456712722778, "learning_rate": 2.7633666666666666e-05, "loss": 0.0092, "step": 17104 }, { "epoch": 18.88017669795693, "grad_norm": 0.11431162804365158, "learning_rate": 2.7633333333333335e-05, "loss": 0.0033, "step": 17105 }, { "epoch": 18.881281060187742, "grad_norm": 0.4660215675830841, "learning_rate": 2.7633e-05, "loss": 0.0069, "step": 17106 }, { "epoch": 18.882385422418555, "grad_norm": 0.34435391426086426, "learning_rate": 2.7632666666666667e-05, "loss": 0.0084, "step": 17107 }, { "epoch": 18.883489784649363, "grad_norm": 0.33153626322746277, "learning_rate": 2.7632333333333333e-05, "loss": 0.0098, "step": 17108 }, { "epoch": 18.884594146880175, "grad_norm": 0.7479001879692078, "learning_rate": 2.7632000000000002e-05, "loss": 0.1406, "step": 17109 }, { "epoch": 18.885698509110988, "grad_norm": 0.429141640663147, "learning_rate": 2.7631666666666665e-05, "loss": 0.1027, "step": 17110 }, { "epoch": 18.8868028713418, "grad_norm": 0.49710750579833984, "learning_rate": 2.7631333333333334e-05, "loss": 0.0763, "step": 17111 }, { "epoch": 18.887907233572612, "grad_norm": 0.41634130477905273, "learning_rate": 2.7631e-05, "loss": 0.0707, "step": 17112 }, { "epoch": 18.889011595803424, "grad_norm": 0.35999324917793274, "learning_rate": 2.7630666666666666e-05, "loss": 0.0392, "step": 17113 }, { "epoch": 18.890115958034237, "grad_norm": 0.43451496958732605, "learning_rate": 2.7630333333333335e-05, "loss": 0.0952, "step": 17114 }, { "epoch": 18.891220320265045, "grad_norm": 0.7006425261497498, "learning_rate": 2.763e-05, "loss": 0.0412, "step": 17115 }, { "epoch": 18.892324682495858, "grad_norm": 0.3322763741016388, "learning_rate": 2.7629666666666667e-05, "loss": 0.0163, "step": 17116 }, { "epoch": 18.89342904472667, "grad_norm": 0.18153144419193268, "learning_rate": 2.7629333333333333e-05, "loss": 0.0137, "step": 17117 }, { "epoch": 18.894533406957482, "grad_norm": 0.21147122979164124, "learning_rate": 2.7629000000000003e-05, "loss": 0.0189, "step": 17118 }, { "epoch": 18.895637769188294, "grad_norm": 0.3669087886810303, "learning_rate": 2.7628666666666665e-05, "loss": 0.0179, "step": 17119 }, { "epoch": 18.896742131419106, "grad_norm": 0.12774059176445007, "learning_rate": 2.7628333333333334e-05, "loss": 0.0057, "step": 17120 }, { "epoch": 18.89784649364992, "grad_norm": 0.11768791824579239, "learning_rate": 2.7628000000000004e-05, "loss": 0.0059, "step": 17121 }, { "epoch": 18.898950855880727, "grad_norm": 0.32720357179641724, "learning_rate": 2.7627666666666666e-05, "loss": 0.0082, "step": 17122 }, { "epoch": 18.90005521811154, "grad_norm": 0.1862080842256546, "learning_rate": 2.7627333333333336e-05, "loss": 0.0072, "step": 17123 }, { "epoch": 18.90115958034235, "grad_norm": 0.7414824366569519, "learning_rate": 2.7627e-05, "loss": 0.0055, "step": 17124 }, { "epoch": 18.902263942573164, "grad_norm": 0.19757089018821716, "learning_rate": 2.7626666666666668e-05, "loss": 0.0068, "step": 17125 }, { "epoch": 18.903368304803976, "grad_norm": 0.1451030671596527, "learning_rate": 2.7626333333333333e-05, "loss": 0.0042, "step": 17126 }, { "epoch": 18.90447266703479, "grad_norm": 0.20547638833522797, "learning_rate": 2.7626e-05, "loss": 0.0078, "step": 17127 }, { "epoch": 18.9055770292656, "grad_norm": 0.2133064717054367, "learning_rate": 2.7625666666666665e-05, "loss": 0.0079, "step": 17128 }, { "epoch": 18.90668139149641, "grad_norm": 0.08945445716381073, "learning_rate": 2.7625333333333335e-05, "loss": 0.0028, "step": 17129 }, { "epoch": 18.90778575372722, "grad_norm": 0.21198046207427979, "learning_rate": 2.7625e-05, "loss": 0.0042, "step": 17130 }, { "epoch": 18.908890115958034, "grad_norm": 0.14682337641716003, "learning_rate": 2.7624666666666667e-05, "loss": 0.0083, "step": 17131 }, { "epoch": 18.909994478188846, "grad_norm": 0.22258585691452026, "learning_rate": 2.7624333333333336e-05, "loss": 0.008, "step": 17132 }, { "epoch": 18.91109884041966, "grad_norm": 0.22811518609523773, "learning_rate": 2.7624e-05, "loss": 0.0061, "step": 17133 }, { "epoch": 18.91220320265047, "grad_norm": 0.16634850203990936, "learning_rate": 2.7623666666666668e-05, "loss": 0.0064, "step": 17134 }, { "epoch": 18.913307564881283, "grad_norm": 0.16683577001094818, "learning_rate": 2.7623333333333334e-05, "loss": 0.005, "step": 17135 }, { "epoch": 18.91441192711209, "grad_norm": 0.4664238691329956, "learning_rate": 2.7623e-05, "loss": 0.0357, "step": 17136 }, { "epoch": 18.915516289342904, "grad_norm": 0.29038459062576294, "learning_rate": 2.7622666666666666e-05, "loss": 0.0051, "step": 17137 }, { "epoch": 18.916620651573716, "grad_norm": 0.3739050030708313, "learning_rate": 2.7622333333333335e-05, "loss": 0.0118, "step": 17138 }, { "epoch": 18.917725013804528, "grad_norm": 0.16573427617549896, "learning_rate": 2.7622e-05, "loss": 0.0049, "step": 17139 }, { "epoch": 18.91882937603534, "grad_norm": 0.1337069720029831, "learning_rate": 2.7621666666666667e-05, "loss": 0.0062, "step": 17140 }, { "epoch": 18.919933738266153, "grad_norm": 0.10693511366844177, "learning_rate": 2.7621333333333336e-05, "loss": 0.0031, "step": 17141 }, { "epoch": 18.921038100496965, "grad_norm": 0.18396130204200745, "learning_rate": 2.7621e-05, "loss": 0.0061, "step": 17142 }, { "epoch": 18.922142462727773, "grad_norm": 0.2902246117591858, "learning_rate": 2.7620666666666668e-05, "loss": 0.0066, "step": 17143 }, { "epoch": 18.923246824958586, "grad_norm": 0.8228580951690674, "learning_rate": 2.7620333333333334e-05, "loss": 0.0114, "step": 17144 }, { "epoch": 18.924351187189398, "grad_norm": 0.16938942670822144, "learning_rate": 2.762e-05, "loss": 0.006, "step": 17145 }, { "epoch": 18.92545554942021, "grad_norm": 0.636877179145813, "learning_rate": 2.761966666666667e-05, "loss": 0.0079, "step": 17146 }, { "epoch": 18.926559911651022, "grad_norm": 0.52969890832901, "learning_rate": 2.7619333333333335e-05, "loss": 0.0065, "step": 17147 }, { "epoch": 18.927664273881835, "grad_norm": 0.23447869718074799, "learning_rate": 2.7619e-05, "loss": 0.0063, "step": 17148 }, { "epoch": 18.928768636112643, "grad_norm": 0.3366374373435974, "learning_rate": 2.7618666666666667e-05, "loss": 0.0051, "step": 17149 }, { "epoch": 18.929872998343455, "grad_norm": 0.13994084298610687, "learning_rate": 2.7618333333333336e-05, "loss": 0.0065, "step": 17150 }, { "epoch": 18.930977360574268, "grad_norm": 0.1690993309020996, "learning_rate": 2.7618e-05, "loss": 0.0054, "step": 17151 }, { "epoch": 18.93208172280508, "grad_norm": 0.2838677763938904, "learning_rate": 2.7617666666666668e-05, "loss": 0.0054, "step": 17152 }, { "epoch": 18.933186085035892, "grad_norm": 0.13875316083431244, "learning_rate": 2.7617333333333334e-05, "loss": 0.0036, "step": 17153 }, { "epoch": 18.934290447266704, "grad_norm": 0.3677055537700653, "learning_rate": 2.7617e-05, "loss": 0.012, "step": 17154 }, { "epoch": 18.935394809497517, "grad_norm": 0.22016113996505737, "learning_rate": 2.761666666666667e-05, "loss": 0.0044, "step": 17155 }, { "epoch": 18.936499171728325, "grad_norm": 0.3348293900489807, "learning_rate": 2.7616333333333335e-05, "loss": 0.0131, "step": 17156 }, { "epoch": 18.937603533959138, "grad_norm": 0.533007025718689, "learning_rate": 2.7616e-05, "loss": 0.0095, "step": 17157 }, { "epoch": 18.93870789618995, "grad_norm": 0.6471430063247681, "learning_rate": 2.7615666666666667e-05, "loss": 0.007, "step": 17158 }, { "epoch": 18.939812258420762, "grad_norm": 0.7838242650032043, "learning_rate": 2.7615333333333333e-05, "loss": 0.1665, "step": 17159 }, { "epoch": 18.940916620651574, "grad_norm": 0.5395604968070984, "learning_rate": 2.7615e-05, "loss": 0.11, "step": 17160 }, { "epoch": 18.942020982882386, "grad_norm": 0.4188721179962158, "learning_rate": 2.761466666666667e-05, "loss": 0.0743, "step": 17161 }, { "epoch": 18.9431253451132, "grad_norm": 0.4295811951160431, "learning_rate": 2.761433333333333e-05, "loss": 0.0631, "step": 17162 }, { "epoch": 18.944229707344007, "grad_norm": 0.32483604550361633, "learning_rate": 2.7614e-05, "loss": 0.0616, "step": 17163 }, { "epoch": 18.94533406957482, "grad_norm": 0.2979183793067932, "learning_rate": 2.761366666666667e-05, "loss": 0.0332, "step": 17164 }, { "epoch": 18.946438431805632, "grad_norm": 0.29217612743377686, "learning_rate": 2.7613333333333332e-05, "loss": 0.0284, "step": 17165 }, { "epoch": 18.947542794036444, "grad_norm": 0.25247618556022644, "learning_rate": 2.7613e-05, "loss": 0.0542, "step": 17166 }, { "epoch": 18.948647156267256, "grad_norm": 0.2805611789226532, "learning_rate": 2.7612666666666667e-05, "loss": 0.0217, "step": 17167 }, { "epoch": 18.94975151849807, "grad_norm": 0.3985382914543152, "learning_rate": 2.7612333333333333e-05, "loss": 0.0149, "step": 17168 }, { "epoch": 18.95085588072888, "grad_norm": 0.2552437484264374, "learning_rate": 2.7612e-05, "loss": 0.0214, "step": 17169 }, { "epoch": 18.95196024295969, "grad_norm": 0.15577976405620575, "learning_rate": 2.761166666666667e-05, "loss": 0.0067, "step": 17170 }, { "epoch": 18.9530646051905, "grad_norm": 0.5403329730033875, "learning_rate": 2.7611333333333334e-05, "loss": 0.0097, "step": 17171 }, { "epoch": 18.954168967421314, "grad_norm": 0.22154152393341064, "learning_rate": 2.7611e-05, "loss": 0.0158, "step": 17172 }, { "epoch": 18.955273329652126, "grad_norm": 0.5079947113990784, "learning_rate": 2.761066666666667e-05, "loss": 0.0086, "step": 17173 }, { "epoch": 18.95637769188294, "grad_norm": 0.3145400881767273, "learning_rate": 2.7610333333333332e-05, "loss": 0.0083, "step": 17174 }, { "epoch": 18.95748205411375, "grad_norm": 0.21079657971858978, "learning_rate": 2.761e-05, "loss": 0.0103, "step": 17175 }, { "epoch": 18.95858641634456, "grad_norm": 0.4340033233165741, "learning_rate": 2.7609666666666668e-05, "loss": 0.0155, "step": 17176 }, { "epoch": 18.95969077857537, "grad_norm": 0.0909893736243248, "learning_rate": 2.7609333333333333e-05, "loss": 0.0039, "step": 17177 }, { "epoch": 18.960795140806184, "grad_norm": 0.2690953016281128, "learning_rate": 2.7609e-05, "loss": 0.0091, "step": 17178 }, { "epoch": 18.961899503036996, "grad_norm": 0.39020687341690063, "learning_rate": 2.760866666666667e-05, "loss": 0.0069, "step": 17179 }, { "epoch": 18.963003865267808, "grad_norm": 0.2099819779396057, "learning_rate": 2.7608333333333335e-05, "loss": 0.0064, "step": 17180 }, { "epoch": 18.96410822749862, "grad_norm": 0.19385384023189545, "learning_rate": 2.7608e-05, "loss": 0.0076, "step": 17181 }, { "epoch": 18.965212589729433, "grad_norm": 0.24087238311767578, "learning_rate": 2.760766666666667e-05, "loss": 0.0082, "step": 17182 }, { "epoch": 18.96631695196024, "grad_norm": 0.15176445245742798, "learning_rate": 2.7607333333333332e-05, "loss": 0.0061, "step": 17183 }, { "epoch": 18.967421314191053, "grad_norm": 0.6575923562049866, "learning_rate": 2.7607000000000002e-05, "loss": 0.0146, "step": 17184 }, { "epoch": 18.968525676421866, "grad_norm": 0.13475094735622406, "learning_rate": 2.7606666666666668e-05, "loss": 0.0033, "step": 17185 }, { "epoch": 18.969630038652678, "grad_norm": 0.17105071246623993, "learning_rate": 2.7606333333333334e-05, "loss": 0.0058, "step": 17186 }, { "epoch": 18.97073440088349, "grad_norm": 0.3658432066440582, "learning_rate": 2.7606e-05, "loss": 0.009, "step": 17187 }, { "epoch": 18.971838763114302, "grad_norm": 0.13928435742855072, "learning_rate": 2.7605666666666666e-05, "loss": 0.0045, "step": 17188 }, { "epoch": 18.972943125345115, "grad_norm": 0.17346316576004028, "learning_rate": 2.7605333333333335e-05, "loss": 0.0038, "step": 17189 }, { "epoch": 18.974047487575923, "grad_norm": 0.08162888884544373, "learning_rate": 2.7605e-05, "loss": 0.0026, "step": 17190 }, { "epoch": 18.975151849806736, "grad_norm": 0.745572030544281, "learning_rate": 2.7604666666666667e-05, "loss": 0.0134, "step": 17191 }, { "epoch": 18.976256212037548, "grad_norm": 0.3500547409057617, "learning_rate": 2.7604333333333333e-05, "loss": 0.0063, "step": 17192 }, { "epoch": 18.97736057426836, "grad_norm": 0.12827037274837494, "learning_rate": 2.7604000000000002e-05, "loss": 0.0097, "step": 17193 }, { "epoch": 18.978464936499172, "grad_norm": 0.12333710491657257, "learning_rate": 2.7603666666666665e-05, "loss": 0.0027, "step": 17194 }, { "epoch": 18.979569298729984, "grad_norm": 0.461763858795166, "learning_rate": 2.7603333333333334e-05, "loss": 0.0108, "step": 17195 }, { "epoch": 18.980673660960797, "grad_norm": 0.591278076171875, "learning_rate": 2.7603000000000003e-05, "loss": 0.0129, "step": 17196 }, { "epoch": 18.981778023191605, "grad_norm": 0.31324493885040283, "learning_rate": 2.7602666666666666e-05, "loss": 0.0073, "step": 17197 }, { "epoch": 18.982882385422418, "grad_norm": 0.13762375712394714, "learning_rate": 2.7602333333333335e-05, "loss": 0.0041, "step": 17198 }, { "epoch": 18.98398674765323, "grad_norm": 0.41118544340133667, "learning_rate": 2.7602e-05, "loss": 0.0201, "step": 17199 }, { "epoch": 18.985091109884042, "grad_norm": 0.18193089962005615, "learning_rate": 2.7601666666666667e-05, "loss": 0.0055, "step": 17200 }, { "epoch": 18.986195472114854, "grad_norm": 0.14156779646873474, "learning_rate": 2.7601333333333333e-05, "loss": 0.0045, "step": 17201 }, { "epoch": 18.987299834345666, "grad_norm": 0.28379127383232117, "learning_rate": 2.7601000000000002e-05, "loss": 0.008, "step": 17202 }, { "epoch": 18.98840419657648, "grad_norm": 0.24753373861312866, "learning_rate": 2.7600666666666665e-05, "loss": 0.0062, "step": 17203 }, { "epoch": 18.989508558807287, "grad_norm": 0.10951485484838486, "learning_rate": 2.7600333333333334e-05, "loss": 0.0031, "step": 17204 }, { "epoch": 18.9906129210381, "grad_norm": 0.18916675448417664, "learning_rate": 2.7600000000000003e-05, "loss": 0.0063, "step": 17205 }, { "epoch": 18.991717283268912, "grad_norm": 0.1754714697599411, "learning_rate": 2.7599666666666666e-05, "loss": 0.0062, "step": 17206 }, { "epoch": 18.992821645499724, "grad_norm": 1.1598526239395142, "learning_rate": 2.7599333333333335e-05, "loss": 0.0137, "step": 17207 }, { "epoch": 18.993926007730536, "grad_norm": 0.19876259565353394, "learning_rate": 2.7599e-05, "loss": 0.0159, "step": 17208 }, { "epoch": 18.99503036996135, "grad_norm": 0.5069618821144104, "learning_rate": 2.7598666666666667e-05, "loss": 0.0746, "step": 17209 }, { "epoch": 18.99613473219216, "grad_norm": 0.16822291910648346, "learning_rate": 2.7598333333333333e-05, "loss": 0.0124, "step": 17210 }, { "epoch": 18.99723909442297, "grad_norm": 0.23627938330173492, "learning_rate": 2.7598000000000002e-05, "loss": 0.0073, "step": 17211 }, { "epoch": 18.99834345665378, "grad_norm": 0.17688417434692383, "learning_rate": 2.7597666666666665e-05, "loss": 0.0073, "step": 17212 }, { "epoch": 18.999447818884594, "grad_norm": 0.6963353157043457, "learning_rate": 2.7597333333333334e-05, "loss": 0.0155, "step": 17213 }, { "epoch": 19.0, "grad_norm": 0.12617114186286926, "learning_rate": 2.7597000000000004e-05, "loss": 0.0022, "step": 17214 }, { "epoch": 19.001104362230812, "grad_norm": 0.41740256547927856, "learning_rate": 2.7596666666666666e-05, "loss": 0.0907, "step": 17215 }, { "epoch": 19.002208724461624, "grad_norm": 0.29434069991111755, "learning_rate": 2.7596333333333335e-05, "loss": 0.0788, "step": 17216 }, { "epoch": 19.003313086692437, "grad_norm": 0.7020551562309265, "learning_rate": 2.7596e-05, "loss": 0.0912, "step": 17217 }, { "epoch": 19.004417448923245, "grad_norm": 0.369297593832016, "learning_rate": 2.7595666666666667e-05, "loss": 0.0492, "step": 17218 }, { "epoch": 19.005521811154058, "grad_norm": 0.35833242535591125, "learning_rate": 2.7595333333333333e-05, "loss": 0.0553, "step": 17219 }, { "epoch": 19.00662617338487, "grad_norm": 0.27288317680358887, "learning_rate": 2.7595e-05, "loss": 0.0445, "step": 17220 }, { "epoch": 19.007730535615682, "grad_norm": 0.12761190533638, "learning_rate": 2.759466666666667e-05, "loss": 0.0143, "step": 17221 }, { "epoch": 19.008834897846494, "grad_norm": 0.288874089717865, "learning_rate": 2.7594333333333334e-05, "loss": 0.0294, "step": 17222 }, { "epoch": 19.009939260077306, "grad_norm": 0.6003422737121582, "learning_rate": 2.7594e-05, "loss": 0.0333, "step": 17223 }, { "epoch": 19.01104362230812, "grad_norm": 0.16630110144615173, "learning_rate": 2.7593666666666666e-05, "loss": 0.0237, "step": 17224 }, { "epoch": 19.012147984538927, "grad_norm": 0.20393486320972443, "learning_rate": 2.7593333333333336e-05, "loss": 0.0085, "step": 17225 }, { "epoch": 19.01325234676974, "grad_norm": 0.3826097249984741, "learning_rate": 2.7592999999999998e-05, "loss": 0.0124, "step": 17226 }, { "epoch": 19.014356709000552, "grad_norm": 0.13049839437007904, "learning_rate": 2.7592666666666668e-05, "loss": 0.0042, "step": 17227 }, { "epoch": 19.015461071231364, "grad_norm": 0.22304421663284302, "learning_rate": 2.7592333333333333e-05, "loss": 0.005, "step": 17228 }, { "epoch": 19.016565433462176, "grad_norm": 0.09899212419986725, "learning_rate": 2.7592e-05, "loss": 0.004, "step": 17229 }, { "epoch": 19.01766979569299, "grad_norm": 0.20714406669139862, "learning_rate": 2.759166666666667e-05, "loss": 0.0033, "step": 17230 }, { "epoch": 19.0187741579238, "grad_norm": 0.08396444469690323, "learning_rate": 2.7591333333333335e-05, "loss": 0.0041, "step": 17231 }, { "epoch": 19.01987852015461, "grad_norm": 0.12373003363609314, "learning_rate": 2.7591e-05, "loss": 0.0176, "step": 17232 }, { "epoch": 19.02098288238542, "grad_norm": 0.08314201235771179, "learning_rate": 2.7590666666666667e-05, "loss": 0.0029, "step": 17233 }, { "epoch": 19.022087244616234, "grad_norm": 0.8214685320854187, "learning_rate": 2.7590333333333336e-05, "loss": 0.0039, "step": 17234 }, { "epoch": 19.023191606847046, "grad_norm": 0.8580189347267151, "learning_rate": 2.759e-05, "loss": 0.0116, "step": 17235 }, { "epoch": 19.02429596907786, "grad_norm": 0.1180344820022583, "learning_rate": 2.7589666666666668e-05, "loss": 0.0024, "step": 17236 }, { "epoch": 19.02540033130867, "grad_norm": 0.26940062642097473, "learning_rate": 2.7589333333333334e-05, "loss": 0.0085, "step": 17237 }, { "epoch": 19.02650469353948, "grad_norm": 0.5618746280670166, "learning_rate": 2.7589e-05, "loss": 0.0053, "step": 17238 }, { "epoch": 19.02760905577029, "grad_norm": 0.13674074411392212, "learning_rate": 2.758866666666667e-05, "loss": 0.0065, "step": 17239 }, { "epoch": 19.028713418001104, "grad_norm": 0.08918391168117523, "learning_rate": 2.7588333333333335e-05, "loss": 0.0019, "step": 17240 }, { "epoch": 19.029817780231916, "grad_norm": 0.2706747353076935, "learning_rate": 2.7588e-05, "loss": 0.0093, "step": 17241 }, { "epoch": 19.030922142462728, "grad_norm": 0.0535428449511528, "learning_rate": 2.7587666666666667e-05, "loss": 0.0014, "step": 17242 }, { "epoch": 19.03202650469354, "grad_norm": 0.1818702667951584, "learning_rate": 2.7587333333333336e-05, "loss": 0.0032, "step": 17243 }, { "epoch": 19.033130866924353, "grad_norm": 0.151369109749794, "learning_rate": 2.7587e-05, "loss": 0.0032, "step": 17244 }, { "epoch": 19.03423522915516, "grad_norm": 0.16861870884895325, "learning_rate": 2.7586666666666668e-05, "loss": 0.0037, "step": 17245 }, { "epoch": 19.035339591385974, "grad_norm": 0.44240254163742065, "learning_rate": 2.7586333333333337e-05, "loss": 0.008, "step": 17246 }, { "epoch": 19.036443953616786, "grad_norm": 0.2830922305583954, "learning_rate": 2.7586e-05, "loss": 0.0062, "step": 17247 }, { "epoch": 19.037548315847598, "grad_norm": 0.05736396834254265, "learning_rate": 2.758566666666667e-05, "loss": 0.002, "step": 17248 }, { "epoch": 19.03865267807841, "grad_norm": 0.2746971845626831, "learning_rate": 2.758533333333333e-05, "loss": 0.0097, "step": 17249 }, { "epoch": 19.039757040309222, "grad_norm": 0.533827543258667, "learning_rate": 2.7585e-05, "loss": 0.0111, "step": 17250 }, { "epoch": 19.040861402540035, "grad_norm": 0.3793279826641083, "learning_rate": 2.7584666666666667e-05, "loss": 0.0141, "step": 17251 }, { "epoch": 19.041965764770843, "grad_norm": 0.741728663444519, "learning_rate": 2.7584333333333333e-05, "loss": 0.0154, "step": 17252 }, { "epoch": 19.043070127001656, "grad_norm": 0.6211393475532532, "learning_rate": 2.7584e-05, "loss": 0.007, "step": 17253 }, { "epoch": 19.044174489232468, "grad_norm": 0.28187450766563416, "learning_rate": 2.7583666666666668e-05, "loss": 0.0077, "step": 17254 }, { "epoch": 19.04527885146328, "grad_norm": 0.14346574246883392, "learning_rate": 2.7583333333333334e-05, "loss": 0.0039, "step": 17255 }, { "epoch": 19.046383213694092, "grad_norm": 0.32155507802963257, "learning_rate": 2.7583e-05, "loss": 0.0075, "step": 17256 }, { "epoch": 19.047487575924904, "grad_norm": 0.14441247284412384, "learning_rate": 2.758266666666667e-05, "loss": 0.0049, "step": 17257 }, { "epoch": 19.048591938155717, "grad_norm": 0.25607311725616455, "learning_rate": 2.7582333333333332e-05, "loss": 0.007, "step": 17258 }, { "epoch": 19.049696300386525, "grad_norm": 0.2922016382217407, "learning_rate": 2.7582e-05, "loss": 0.0075, "step": 17259 }, { "epoch": 19.050800662617338, "grad_norm": 0.430623859167099, "learning_rate": 2.7581666666666667e-05, "loss": 0.0049, "step": 17260 }, { "epoch": 19.05190502484815, "grad_norm": 0.43729522824287415, "learning_rate": 2.7581333333333333e-05, "loss": 0.0217, "step": 17261 }, { "epoch": 19.053009387078962, "grad_norm": 0.6831066012382507, "learning_rate": 2.7581e-05, "loss": 0.013, "step": 17262 }, { "epoch": 19.054113749309774, "grad_norm": 1.4404865503311157, "learning_rate": 2.758066666666667e-05, "loss": 0.0081, "step": 17263 }, { "epoch": 19.055218111540587, "grad_norm": 0.343397319316864, "learning_rate": 2.7580333333333334e-05, "loss": 0.0048, "step": 17264 }, { "epoch": 19.0563224737714, "grad_norm": 0.48101213574409485, "learning_rate": 2.758e-05, "loss": 0.1228, "step": 17265 }, { "epoch": 19.057426836002207, "grad_norm": 0.4678865969181061, "learning_rate": 2.757966666666667e-05, "loss": 0.1326, "step": 17266 }, { "epoch": 19.05853119823302, "grad_norm": 0.49907606840133667, "learning_rate": 2.7579333333333332e-05, "loss": 0.0919, "step": 17267 }, { "epoch": 19.059635560463832, "grad_norm": 0.3582627773284912, "learning_rate": 2.7579e-05, "loss": 0.0743, "step": 17268 }, { "epoch": 19.060739922694644, "grad_norm": 0.6160551905632019, "learning_rate": 2.7578666666666667e-05, "loss": 0.0609, "step": 17269 }, { "epoch": 19.061844284925456, "grad_norm": 0.4169067442417145, "learning_rate": 2.7578333333333333e-05, "loss": 0.0414, "step": 17270 }, { "epoch": 19.06294864715627, "grad_norm": 0.30679550766944885, "learning_rate": 2.7578000000000003e-05, "loss": 0.0234, "step": 17271 }, { "epoch": 19.064053009387077, "grad_norm": 0.2489486038684845, "learning_rate": 2.757766666666667e-05, "loss": 0.0206, "step": 17272 }, { "epoch": 19.06515737161789, "grad_norm": 0.20454256236553192, "learning_rate": 2.7577333333333334e-05, "loss": 0.0126, "step": 17273 }, { "epoch": 19.0662617338487, "grad_norm": 0.13049332797527313, "learning_rate": 2.7577e-05, "loss": 0.0244, "step": 17274 }, { "epoch": 19.067366096079514, "grad_norm": 0.36777904629707336, "learning_rate": 2.757666666666667e-05, "loss": 0.0455, "step": 17275 }, { "epoch": 19.068470458310326, "grad_norm": 0.18252509832382202, "learning_rate": 2.7576333333333332e-05, "loss": 0.0264, "step": 17276 }, { "epoch": 19.06957482054114, "grad_norm": 0.35901883244514465, "learning_rate": 2.7576e-05, "loss": 0.0072, "step": 17277 }, { "epoch": 19.07067918277195, "grad_norm": 0.3682566285133362, "learning_rate": 2.7575666666666668e-05, "loss": 0.018, "step": 17278 }, { "epoch": 19.07178354500276, "grad_norm": 0.09552264958620071, "learning_rate": 2.7575333333333333e-05, "loss": 0.0039, "step": 17279 }, { "epoch": 19.07288790723357, "grad_norm": 0.1562415510416031, "learning_rate": 2.7575000000000003e-05, "loss": 0.0066, "step": 17280 }, { "epoch": 19.073992269464384, "grad_norm": 0.27496129274368286, "learning_rate": 2.7574666666666665e-05, "loss": 0.0067, "step": 17281 }, { "epoch": 19.075096631695196, "grad_norm": 0.08301985263824463, "learning_rate": 2.7574333333333335e-05, "loss": 0.0023, "step": 17282 }, { "epoch": 19.076200993926008, "grad_norm": 0.19981449842453003, "learning_rate": 2.7574e-05, "loss": 0.0105, "step": 17283 }, { "epoch": 19.07730535615682, "grad_norm": 0.15327788889408112, "learning_rate": 2.7573666666666667e-05, "loss": 0.004, "step": 17284 }, { "epoch": 19.078409718387633, "grad_norm": 0.24597644805908203, "learning_rate": 2.7573333333333332e-05, "loss": 0.0105, "step": 17285 }, { "epoch": 19.07951408061844, "grad_norm": 0.36386042833328247, "learning_rate": 2.7573000000000002e-05, "loss": 0.0134, "step": 17286 }, { "epoch": 19.080618442849254, "grad_norm": 0.5038813948631287, "learning_rate": 2.7572666666666664e-05, "loss": 0.0048, "step": 17287 }, { "epoch": 19.081722805080066, "grad_norm": 0.2449982613325119, "learning_rate": 2.7572333333333334e-05, "loss": 0.0138, "step": 17288 }, { "epoch": 19.082827167310878, "grad_norm": 0.180778369307518, "learning_rate": 2.7572000000000003e-05, "loss": 0.0046, "step": 17289 }, { "epoch": 19.08393152954169, "grad_norm": 0.08961586654186249, "learning_rate": 2.7571666666666666e-05, "loss": 0.0042, "step": 17290 }, { "epoch": 19.085035891772502, "grad_norm": 0.09740296751260757, "learning_rate": 2.7571333333333335e-05, "loss": 0.0024, "step": 17291 }, { "epoch": 19.086140254003315, "grad_norm": 0.08166579157114029, "learning_rate": 2.7571e-05, "loss": 0.0039, "step": 17292 }, { "epoch": 19.087244616234123, "grad_norm": 0.08818509429693222, "learning_rate": 2.7570666666666667e-05, "loss": 0.0025, "step": 17293 }, { "epoch": 19.088348978464936, "grad_norm": 0.2116122841835022, "learning_rate": 2.7570333333333333e-05, "loss": 0.0091, "step": 17294 }, { "epoch": 19.089453340695748, "grad_norm": 0.12627388536930084, "learning_rate": 2.7570000000000002e-05, "loss": 0.0053, "step": 17295 }, { "epoch": 19.09055770292656, "grad_norm": 0.07873427122831345, "learning_rate": 2.7569666666666668e-05, "loss": 0.004, "step": 17296 }, { "epoch": 19.091662065157372, "grad_norm": 0.18175263702869415, "learning_rate": 2.7569333333333334e-05, "loss": 0.0035, "step": 17297 }, { "epoch": 19.092766427388185, "grad_norm": 0.302868515253067, "learning_rate": 2.7569000000000003e-05, "loss": 0.0105, "step": 17298 }, { "epoch": 19.093870789618997, "grad_norm": 0.22167986631393433, "learning_rate": 2.7568666666666666e-05, "loss": 0.0051, "step": 17299 }, { "epoch": 19.094975151849805, "grad_norm": 0.24380211532115936, "learning_rate": 2.7568333333333335e-05, "loss": 0.0052, "step": 17300 }, { "epoch": 19.096079514080618, "grad_norm": 0.37820354104042053, "learning_rate": 2.7568e-05, "loss": 0.0107, "step": 17301 }, { "epoch": 19.09718387631143, "grad_norm": 0.21267111599445343, "learning_rate": 2.7567666666666667e-05, "loss": 0.0074, "step": 17302 }, { "epoch": 19.098288238542242, "grad_norm": 0.34330952167510986, "learning_rate": 2.7567333333333333e-05, "loss": 0.0047, "step": 17303 }, { "epoch": 19.099392600773054, "grad_norm": 0.6713519096374512, "learning_rate": 2.7567000000000002e-05, "loss": 0.0074, "step": 17304 }, { "epoch": 19.100496963003867, "grad_norm": 0.2112402319908142, "learning_rate": 2.7566666666666668e-05, "loss": 0.0043, "step": 17305 }, { "epoch": 19.101601325234675, "grad_norm": 0.12700828909873962, "learning_rate": 2.7566333333333334e-05, "loss": 0.0051, "step": 17306 }, { "epoch": 19.102705687465487, "grad_norm": 0.20000143349170685, "learning_rate": 2.7566000000000003e-05, "loss": 0.0055, "step": 17307 }, { "epoch": 19.1038100496963, "grad_norm": 0.3002120852470398, "learning_rate": 2.7565666666666666e-05, "loss": 0.0071, "step": 17308 }, { "epoch": 19.104914411927112, "grad_norm": 0.10936176776885986, "learning_rate": 2.7565333333333335e-05, "loss": 0.0049, "step": 17309 }, { "epoch": 19.106018774157924, "grad_norm": 0.21852810680866241, "learning_rate": 2.7564999999999998e-05, "loss": 0.0058, "step": 17310 }, { "epoch": 19.107123136388736, "grad_norm": 0.2875070869922638, "learning_rate": 2.7564666666666667e-05, "loss": 0.0062, "step": 17311 }, { "epoch": 19.10822749861955, "grad_norm": 0.45342475175857544, "learning_rate": 2.7564333333333333e-05, "loss": 0.011, "step": 17312 }, { "epoch": 19.109331860850357, "grad_norm": 0.1747279018163681, "learning_rate": 2.7564e-05, "loss": 0.0056, "step": 17313 }, { "epoch": 19.11043622308117, "grad_norm": 0.2003619223833084, "learning_rate": 2.756366666666667e-05, "loss": 0.0034, "step": 17314 }, { "epoch": 19.11154058531198, "grad_norm": 0.552297830581665, "learning_rate": 2.7563333333333334e-05, "loss": 0.1506, "step": 17315 }, { "epoch": 19.112644947542794, "grad_norm": 0.496963769197464, "learning_rate": 2.7563e-05, "loss": 0.097, "step": 17316 }, { "epoch": 19.113749309773606, "grad_norm": 0.47073501348495483, "learning_rate": 2.7562666666666666e-05, "loss": 0.0692, "step": 17317 }, { "epoch": 19.11485367200442, "grad_norm": 0.39796698093414307, "learning_rate": 2.7562333333333335e-05, "loss": 0.059, "step": 17318 }, { "epoch": 19.11595803423523, "grad_norm": 0.30055296421051025, "learning_rate": 2.7561999999999998e-05, "loss": 0.0467, "step": 17319 }, { "epoch": 19.11706239646604, "grad_norm": 0.448881059885025, "learning_rate": 2.7561666666666667e-05, "loss": 0.0349, "step": 17320 }, { "epoch": 19.11816675869685, "grad_norm": 0.6104524731636047, "learning_rate": 2.7561333333333337e-05, "loss": 0.0419, "step": 17321 }, { "epoch": 19.119271120927664, "grad_norm": 0.15698517858982086, "learning_rate": 2.7561e-05, "loss": 0.0109, "step": 17322 }, { "epoch": 19.120375483158476, "grad_norm": 0.4585196375846863, "learning_rate": 2.756066666666667e-05, "loss": 0.0545, "step": 17323 }, { "epoch": 19.12147984538929, "grad_norm": 0.31912919878959656, "learning_rate": 2.7560333333333334e-05, "loss": 0.0154, "step": 17324 }, { "epoch": 19.1225842076201, "grad_norm": 0.5697553753852844, "learning_rate": 2.756e-05, "loss": 0.017, "step": 17325 }, { "epoch": 19.123688569850913, "grad_norm": 0.19553078711032867, "learning_rate": 2.7559666666666666e-05, "loss": 0.0073, "step": 17326 }, { "epoch": 19.12479293208172, "grad_norm": 0.19003304839134216, "learning_rate": 2.7559333333333336e-05, "loss": 0.0069, "step": 17327 }, { "epoch": 19.125897294312534, "grad_norm": 0.17297682166099548, "learning_rate": 2.7558999999999998e-05, "loss": 0.0201, "step": 17328 }, { "epoch": 19.127001656543346, "grad_norm": 0.3564530909061432, "learning_rate": 2.7558666666666668e-05, "loss": 0.0382, "step": 17329 }, { "epoch": 19.128106018774158, "grad_norm": 0.40738964080810547, "learning_rate": 2.7558333333333337e-05, "loss": 0.0076, "step": 17330 }, { "epoch": 19.12921038100497, "grad_norm": 0.20207414031028748, "learning_rate": 2.7558e-05, "loss": 0.01, "step": 17331 }, { "epoch": 19.130314743235783, "grad_norm": 0.11039741337299347, "learning_rate": 2.755766666666667e-05, "loss": 0.0047, "step": 17332 }, { "epoch": 19.131419105466595, "grad_norm": 0.14108720421791077, "learning_rate": 2.7557333333333335e-05, "loss": 0.0047, "step": 17333 }, { "epoch": 19.132523467697403, "grad_norm": 0.5761358141899109, "learning_rate": 2.7557e-05, "loss": 0.0157, "step": 17334 }, { "epoch": 19.133627829928216, "grad_norm": 0.15210840106010437, "learning_rate": 2.7556666666666667e-05, "loss": 0.0038, "step": 17335 }, { "epoch": 19.134732192159028, "grad_norm": 0.09217486530542374, "learning_rate": 2.7556333333333336e-05, "loss": 0.0052, "step": 17336 }, { "epoch": 19.13583655438984, "grad_norm": 0.11448884010314941, "learning_rate": 2.7556e-05, "loss": 0.0025, "step": 17337 }, { "epoch": 19.136940916620652, "grad_norm": 0.19521665573120117, "learning_rate": 2.7555666666666668e-05, "loss": 0.0065, "step": 17338 }, { "epoch": 19.138045278851465, "grad_norm": 0.20311689376831055, "learning_rate": 2.7555333333333334e-05, "loss": 0.0067, "step": 17339 }, { "epoch": 19.139149641082273, "grad_norm": 0.15248577296733856, "learning_rate": 2.7555e-05, "loss": 0.0022, "step": 17340 }, { "epoch": 19.140254003313085, "grad_norm": 0.19888365268707275, "learning_rate": 2.755466666666667e-05, "loss": 0.0141, "step": 17341 }, { "epoch": 19.141358365543898, "grad_norm": 0.15759989619255066, "learning_rate": 2.755433333333333e-05, "loss": 0.0277, "step": 17342 }, { "epoch": 19.14246272777471, "grad_norm": 0.1862770915031433, "learning_rate": 2.7554e-05, "loss": 0.0316, "step": 17343 }, { "epoch": 19.143567090005522, "grad_norm": 0.40198010206222534, "learning_rate": 2.7553666666666667e-05, "loss": 0.0023, "step": 17344 }, { "epoch": 19.144671452236334, "grad_norm": 0.17234393954277039, "learning_rate": 2.7553333333333333e-05, "loss": 0.0058, "step": 17345 }, { "epoch": 19.145775814467147, "grad_norm": 0.15508949756622314, "learning_rate": 2.7553000000000002e-05, "loss": 0.0041, "step": 17346 }, { "epoch": 19.146880176697955, "grad_norm": 0.26415979862213135, "learning_rate": 2.7552666666666668e-05, "loss": 0.0049, "step": 17347 }, { "epoch": 19.147984538928768, "grad_norm": 0.16741858422756195, "learning_rate": 2.7552333333333334e-05, "loss": 0.0041, "step": 17348 }, { "epoch": 19.14908890115958, "grad_norm": 0.21368218958377838, "learning_rate": 2.7552e-05, "loss": 0.0054, "step": 17349 }, { "epoch": 19.150193263390392, "grad_norm": 0.07546623796224594, "learning_rate": 2.755166666666667e-05, "loss": 0.0034, "step": 17350 }, { "epoch": 19.151297625621204, "grad_norm": 0.14044436812400818, "learning_rate": 2.755133333333333e-05, "loss": 0.0054, "step": 17351 }, { "epoch": 19.152401987852016, "grad_norm": 0.28499534726142883, "learning_rate": 2.7551e-05, "loss": 0.008, "step": 17352 }, { "epoch": 19.15350635008283, "grad_norm": 0.15482915937900543, "learning_rate": 2.7550666666666667e-05, "loss": 0.0041, "step": 17353 }, { "epoch": 19.154610712313637, "grad_norm": 0.1553451120853424, "learning_rate": 2.7550333333333333e-05, "loss": 0.0056, "step": 17354 }, { "epoch": 19.15571507454445, "grad_norm": 0.17702457308769226, "learning_rate": 2.7550000000000002e-05, "loss": 0.0049, "step": 17355 }, { "epoch": 19.156819436775262, "grad_norm": 0.09488682448863983, "learning_rate": 2.7549666666666668e-05, "loss": 0.0036, "step": 17356 }, { "epoch": 19.157923799006074, "grad_norm": 0.21763767302036285, "learning_rate": 2.7549333333333334e-05, "loss": 0.0079, "step": 17357 }, { "epoch": 19.159028161236886, "grad_norm": 0.22359777987003326, "learning_rate": 2.7549e-05, "loss": 0.0074, "step": 17358 }, { "epoch": 19.1601325234677, "grad_norm": 0.59618079662323, "learning_rate": 2.754866666666667e-05, "loss": 0.0071, "step": 17359 }, { "epoch": 19.16123688569851, "grad_norm": 0.2687339186668396, "learning_rate": 2.7548333333333332e-05, "loss": 0.0061, "step": 17360 }, { "epoch": 19.16234124792932, "grad_norm": 0.09051267057657242, "learning_rate": 2.7548e-05, "loss": 0.0016, "step": 17361 }, { "epoch": 19.16344561016013, "grad_norm": 0.09663934260606766, "learning_rate": 2.7547666666666667e-05, "loss": 0.0043, "step": 17362 }, { "epoch": 19.164549972390944, "grad_norm": 0.12143974006175995, "learning_rate": 2.7547333333333333e-05, "loss": 0.0034, "step": 17363 }, { "epoch": 19.165654334621756, "grad_norm": 0.13876578211784363, "learning_rate": 2.7547000000000002e-05, "loss": 0.0027, "step": 17364 }, { "epoch": 19.16675869685257, "grad_norm": 0.5433517694473267, "learning_rate": 2.754666666666667e-05, "loss": 0.1474, "step": 17365 }, { "epoch": 19.16786305908338, "grad_norm": 0.40038496255874634, "learning_rate": 2.7546333333333334e-05, "loss": 0.1054, "step": 17366 }, { "epoch": 19.168967421314193, "grad_norm": 0.24819865822792053, "learning_rate": 2.7546e-05, "loss": 0.0528, "step": 17367 }, { "epoch": 19.170071783545, "grad_norm": 0.48613184690475464, "learning_rate": 2.754566666666667e-05, "loss": 0.0389, "step": 17368 }, { "epoch": 19.171176145775814, "grad_norm": 0.4077368378639221, "learning_rate": 2.7545333333333332e-05, "loss": 0.0614, "step": 17369 }, { "epoch": 19.172280508006626, "grad_norm": 0.18768088519573212, "learning_rate": 2.7545e-05, "loss": 0.0178, "step": 17370 }, { "epoch": 19.173384870237438, "grad_norm": 0.3769038915634155, "learning_rate": 2.7544666666666667e-05, "loss": 0.0546, "step": 17371 }, { "epoch": 19.17448923246825, "grad_norm": 0.2598473131656647, "learning_rate": 2.7544333333333333e-05, "loss": 0.0492, "step": 17372 }, { "epoch": 19.175593594699063, "grad_norm": 0.228684201836586, "learning_rate": 2.7544000000000003e-05, "loss": 0.0179, "step": 17373 }, { "epoch": 19.17669795692987, "grad_norm": 0.1397186666727066, "learning_rate": 2.7543666666666665e-05, "loss": 0.0093, "step": 17374 }, { "epoch": 19.177802319160683, "grad_norm": 0.2856326103210449, "learning_rate": 2.7543333333333334e-05, "loss": 0.0253, "step": 17375 }, { "epoch": 19.178906681391496, "grad_norm": 0.14146147668361664, "learning_rate": 2.7543e-05, "loss": 0.008, "step": 17376 }, { "epoch": 19.180011043622308, "grad_norm": 0.1595044881105423, "learning_rate": 2.7542666666666666e-05, "loss": 0.0068, "step": 17377 }, { "epoch": 19.18111540585312, "grad_norm": 0.13325455784797668, "learning_rate": 2.7542333333333332e-05, "loss": 0.0041, "step": 17378 }, { "epoch": 19.182219768083932, "grad_norm": 0.26184988021850586, "learning_rate": 2.7542e-05, "loss": 0.0062, "step": 17379 }, { "epoch": 19.183324130314745, "grad_norm": 0.0999833270907402, "learning_rate": 2.7541666666666668e-05, "loss": 0.0046, "step": 17380 }, { "epoch": 19.184428492545553, "grad_norm": 0.14166095852851868, "learning_rate": 2.7541333333333333e-05, "loss": 0.0046, "step": 17381 }, { "epoch": 19.185532854776365, "grad_norm": 0.15152867138385773, "learning_rate": 2.7541000000000003e-05, "loss": 0.0062, "step": 17382 }, { "epoch": 19.186637217007178, "grad_norm": 0.06907400488853455, "learning_rate": 2.7540666666666665e-05, "loss": 0.0039, "step": 17383 }, { "epoch": 19.18774157923799, "grad_norm": 0.18743227422237396, "learning_rate": 2.7540333333333335e-05, "loss": 0.0044, "step": 17384 }, { "epoch": 19.188845941468802, "grad_norm": 0.23080871999263763, "learning_rate": 2.754e-05, "loss": 0.0072, "step": 17385 }, { "epoch": 19.189950303699614, "grad_norm": 0.1159181147813797, "learning_rate": 2.7539666666666667e-05, "loss": 0.0048, "step": 17386 }, { "epoch": 19.191054665930427, "grad_norm": 0.15974058210849762, "learning_rate": 2.7539333333333332e-05, "loss": 0.0067, "step": 17387 }, { "epoch": 19.192159028161235, "grad_norm": 0.24263758957386017, "learning_rate": 2.7539000000000002e-05, "loss": 0.0041, "step": 17388 }, { "epoch": 19.193263390392048, "grad_norm": 0.12627260386943817, "learning_rate": 2.7538666666666668e-05, "loss": 0.0046, "step": 17389 }, { "epoch": 19.19436775262286, "grad_norm": 0.1199573427438736, "learning_rate": 2.7538333333333334e-05, "loss": 0.0036, "step": 17390 }, { "epoch": 19.195472114853672, "grad_norm": 0.14250795543193817, "learning_rate": 2.7538000000000003e-05, "loss": 0.008, "step": 17391 }, { "epoch": 19.196576477084484, "grad_norm": 0.2404145896434784, "learning_rate": 2.7537666666666666e-05, "loss": 0.0108, "step": 17392 }, { "epoch": 19.197680839315296, "grad_norm": 0.08014810085296631, "learning_rate": 2.7537333333333335e-05, "loss": 0.0032, "step": 17393 }, { "epoch": 19.19878520154611, "grad_norm": 0.23788747191429138, "learning_rate": 2.7537e-05, "loss": 0.0072, "step": 17394 }, { "epoch": 19.199889563776917, "grad_norm": 0.09225723147392273, "learning_rate": 2.7536666666666667e-05, "loss": 0.0039, "step": 17395 }, { "epoch": 19.20099392600773, "grad_norm": 0.17885369062423706, "learning_rate": 2.7536333333333336e-05, "loss": 0.0054, "step": 17396 }, { "epoch": 19.202098288238542, "grad_norm": 0.22086843848228455, "learning_rate": 2.7536000000000002e-05, "loss": 0.0077, "step": 17397 }, { "epoch": 19.203202650469354, "grad_norm": 0.11973746865987778, "learning_rate": 2.7535666666666668e-05, "loss": 0.0039, "step": 17398 }, { "epoch": 19.204307012700166, "grad_norm": 0.06941641867160797, "learning_rate": 2.7535333333333334e-05, "loss": 0.0014, "step": 17399 }, { "epoch": 19.20541137493098, "grad_norm": 0.11992166191339493, "learning_rate": 2.7535e-05, "loss": 0.0029, "step": 17400 }, { "epoch": 19.20651573716179, "grad_norm": 0.26529425382614136, "learning_rate": 2.7534666666666666e-05, "loss": 0.0033, "step": 17401 }, { "epoch": 19.2076200993926, "grad_norm": 0.2743876576423645, "learning_rate": 2.7534333333333335e-05, "loss": 0.0057, "step": 17402 }, { "epoch": 19.20872446162341, "grad_norm": 0.1995118111371994, "learning_rate": 2.7533999999999998e-05, "loss": 0.0079, "step": 17403 }, { "epoch": 19.209828823854224, "grad_norm": 0.06271522492170334, "learning_rate": 2.7533666666666667e-05, "loss": 0.0019, "step": 17404 }, { "epoch": 19.210933186085036, "grad_norm": 1.6527124643325806, "learning_rate": 2.7533333333333336e-05, "loss": 0.0081, "step": 17405 }, { "epoch": 19.21203754831585, "grad_norm": 0.20095627009868622, "learning_rate": 2.7533e-05, "loss": 0.0048, "step": 17406 }, { "epoch": 19.21314191054666, "grad_norm": 0.13941094279289246, "learning_rate": 2.7532666666666668e-05, "loss": 0.0044, "step": 17407 }, { "epoch": 19.214246272777473, "grad_norm": 0.24184861779212952, "learning_rate": 2.7532333333333334e-05, "loss": 0.0041, "step": 17408 }, { "epoch": 19.21535063500828, "grad_norm": 0.10358132421970367, "learning_rate": 2.7532e-05, "loss": 0.0031, "step": 17409 }, { "epoch": 19.216454997239094, "grad_norm": 0.49903008341789246, "learning_rate": 2.7531666666666666e-05, "loss": 0.008, "step": 17410 }, { "epoch": 19.217559359469906, "grad_norm": 0.22360730171203613, "learning_rate": 2.7531333333333335e-05, "loss": 0.0062, "step": 17411 }, { "epoch": 19.218663721700718, "grad_norm": 0.19075001776218414, "learning_rate": 2.7531e-05, "loss": 0.0035, "step": 17412 }, { "epoch": 19.21976808393153, "grad_norm": 0.17912504076957703, "learning_rate": 2.7530666666666667e-05, "loss": 0.004, "step": 17413 }, { "epoch": 19.220872446162343, "grad_norm": 0.42277991771698, "learning_rate": 2.7530333333333336e-05, "loss": 0.0059, "step": 17414 }, { "epoch": 19.22197680839315, "grad_norm": 0.6706661581993103, "learning_rate": 2.753e-05, "loss": 0.1748, "step": 17415 }, { "epoch": 19.223081170623963, "grad_norm": 0.5009204745292664, "learning_rate": 2.752966666666667e-05, "loss": 0.1268, "step": 17416 }, { "epoch": 19.224185532854776, "grad_norm": 0.37772780656814575, "learning_rate": 2.7529333333333334e-05, "loss": 0.0617, "step": 17417 }, { "epoch": 19.225289895085588, "grad_norm": 0.8140997886657715, "learning_rate": 2.7529e-05, "loss": 0.0547, "step": 17418 }, { "epoch": 19.2263942573164, "grad_norm": 0.37307560443878174, "learning_rate": 2.7528666666666666e-05, "loss": 0.0482, "step": 17419 }, { "epoch": 19.227498619547212, "grad_norm": 0.2811300456523895, "learning_rate": 2.7528333333333335e-05, "loss": 0.0361, "step": 17420 }, { "epoch": 19.228602981778025, "grad_norm": 0.46596774458885193, "learning_rate": 2.7528e-05, "loss": 0.0625, "step": 17421 }, { "epoch": 19.229707344008833, "grad_norm": 0.2257901281118393, "learning_rate": 2.7527666666666667e-05, "loss": 0.0191, "step": 17422 }, { "epoch": 19.230811706239646, "grad_norm": 1.0965492725372314, "learning_rate": 2.7527333333333337e-05, "loss": 0.0768, "step": 17423 }, { "epoch": 19.231916068470458, "grad_norm": 0.45338621735572815, "learning_rate": 2.7527e-05, "loss": 0.0208, "step": 17424 }, { "epoch": 19.23302043070127, "grad_norm": 0.12586157023906708, "learning_rate": 2.752666666666667e-05, "loss": 0.0086, "step": 17425 }, { "epoch": 19.234124792932082, "grad_norm": 0.11168308556079865, "learning_rate": 2.7526333333333334e-05, "loss": 0.004, "step": 17426 }, { "epoch": 19.235229155162894, "grad_norm": 0.13239243626594543, "learning_rate": 2.7526e-05, "loss": 0.0065, "step": 17427 }, { "epoch": 19.236333517393707, "grad_norm": 0.1904003769159317, "learning_rate": 2.7525666666666666e-05, "loss": 0.0075, "step": 17428 }, { "epoch": 19.237437879624515, "grad_norm": 0.4397706985473633, "learning_rate": 2.7525333333333336e-05, "loss": 0.0074, "step": 17429 }, { "epoch": 19.238542241855328, "grad_norm": 0.08417709916830063, "learning_rate": 2.7525e-05, "loss": 0.0039, "step": 17430 }, { "epoch": 19.23964660408614, "grad_norm": 0.1536492258310318, "learning_rate": 2.7524666666666668e-05, "loss": 0.0029, "step": 17431 }, { "epoch": 19.240750966316952, "grad_norm": 0.13586698472499847, "learning_rate": 2.7524333333333333e-05, "loss": 0.0061, "step": 17432 }, { "epoch": 19.241855328547764, "grad_norm": 0.16664516925811768, "learning_rate": 2.7524e-05, "loss": 0.0043, "step": 17433 }, { "epoch": 19.242959690778576, "grad_norm": 0.17693111300468445, "learning_rate": 2.752366666666667e-05, "loss": 0.0076, "step": 17434 }, { "epoch": 19.24406405300939, "grad_norm": 0.14277435839176178, "learning_rate": 2.752333333333333e-05, "loss": 0.0042, "step": 17435 }, { "epoch": 19.245168415240197, "grad_norm": 0.25245586037635803, "learning_rate": 2.7523e-05, "loss": 0.0078, "step": 17436 }, { "epoch": 19.24627277747101, "grad_norm": 0.5506932735443115, "learning_rate": 2.7522666666666667e-05, "loss": 0.0132, "step": 17437 }, { "epoch": 19.247377139701822, "grad_norm": 0.09526098519563675, "learning_rate": 2.7522333333333332e-05, "loss": 0.0032, "step": 17438 }, { "epoch": 19.248481501932634, "grad_norm": 0.1892252266407013, "learning_rate": 2.7522000000000002e-05, "loss": 0.0059, "step": 17439 }, { "epoch": 19.249585864163446, "grad_norm": 0.21750791370868683, "learning_rate": 2.7521666666666668e-05, "loss": 0.0035, "step": 17440 }, { "epoch": 19.25069022639426, "grad_norm": 0.04064683988690376, "learning_rate": 2.7521333333333334e-05, "loss": 0.0013, "step": 17441 }, { "epoch": 19.25179458862507, "grad_norm": 0.1880647987127304, "learning_rate": 2.7521e-05, "loss": 0.0057, "step": 17442 }, { "epoch": 19.25289895085588, "grad_norm": 0.2120746374130249, "learning_rate": 2.752066666666667e-05, "loss": 0.007, "step": 17443 }, { "epoch": 19.25400331308669, "grad_norm": 0.3932231664657593, "learning_rate": 2.752033333333333e-05, "loss": 0.0099, "step": 17444 }, { "epoch": 19.255107675317504, "grad_norm": 0.09383327513933182, "learning_rate": 2.752e-05, "loss": 0.0023, "step": 17445 }, { "epoch": 19.256212037548316, "grad_norm": 0.36778929829597473, "learning_rate": 2.751966666666667e-05, "loss": 0.0109, "step": 17446 }, { "epoch": 19.25731639977913, "grad_norm": 0.1758035123348236, "learning_rate": 2.7519333333333333e-05, "loss": 0.0128, "step": 17447 }, { "epoch": 19.25842076200994, "grad_norm": 0.5753204226493835, "learning_rate": 2.7519000000000002e-05, "loss": 0.0259, "step": 17448 }, { "epoch": 19.25952512424075, "grad_norm": 0.291369765996933, "learning_rate": 2.7518666666666668e-05, "loss": 0.0045, "step": 17449 }, { "epoch": 19.26062948647156, "grad_norm": 0.18644419312477112, "learning_rate": 2.7518333333333334e-05, "loss": 0.0059, "step": 17450 }, { "epoch": 19.261733848702374, "grad_norm": 0.1437377780675888, "learning_rate": 2.7518e-05, "loss": 0.0039, "step": 17451 }, { "epoch": 19.262838210933186, "grad_norm": 0.09161116927862167, "learning_rate": 2.751766666666667e-05, "loss": 0.0037, "step": 17452 }, { "epoch": 19.263942573163998, "grad_norm": 0.30152428150177, "learning_rate": 2.751733333333333e-05, "loss": 0.0078, "step": 17453 }, { "epoch": 19.26504693539481, "grad_norm": 0.12429505586624146, "learning_rate": 2.7517e-05, "loss": 0.0081, "step": 17454 }, { "epoch": 19.266151297625623, "grad_norm": 0.16945144534111023, "learning_rate": 2.751666666666667e-05, "loss": 0.0026, "step": 17455 }, { "epoch": 19.26725565985643, "grad_norm": 0.13467878103256226, "learning_rate": 2.7516333333333333e-05, "loss": 0.0046, "step": 17456 }, { "epoch": 19.268360022087244, "grad_norm": 0.29989275336265564, "learning_rate": 2.7516000000000002e-05, "loss": 0.0039, "step": 17457 }, { "epoch": 19.269464384318056, "grad_norm": 0.3706904947757721, "learning_rate": 2.7515666666666668e-05, "loss": 0.0136, "step": 17458 }, { "epoch": 19.270568746548868, "grad_norm": 0.2347748875617981, "learning_rate": 2.7515333333333334e-05, "loss": 0.0049, "step": 17459 }, { "epoch": 19.27167310877968, "grad_norm": 0.09107235074043274, "learning_rate": 2.7515e-05, "loss": 0.0023, "step": 17460 }, { "epoch": 19.272777471010492, "grad_norm": 0.2718786895275116, "learning_rate": 2.7514666666666666e-05, "loss": 0.0073, "step": 17461 }, { "epoch": 19.273881833241305, "grad_norm": 0.29569560289382935, "learning_rate": 2.7514333333333335e-05, "loss": 0.005, "step": 17462 }, { "epoch": 19.274986195472113, "grad_norm": 0.25645995140075684, "learning_rate": 2.7514e-05, "loss": 0.0052, "step": 17463 }, { "epoch": 19.276090557702926, "grad_norm": 0.1238178238272667, "learning_rate": 2.7513666666666667e-05, "loss": 0.0021, "step": 17464 }, { "epoch": 19.277194919933738, "grad_norm": 0.45488032698631287, "learning_rate": 2.7513333333333333e-05, "loss": 0.1386, "step": 17465 }, { "epoch": 19.27829928216455, "grad_norm": 0.43672314286231995, "learning_rate": 2.7513000000000002e-05, "loss": 0.074, "step": 17466 }, { "epoch": 19.279403644395362, "grad_norm": 0.4552938938140869, "learning_rate": 2.7512666666666665e-05, "loss": 0.0874, "step": 17467 }, { "epoch": 19.280508006626174, "grad_norm": 0.4636385142803192, "learning_rate": 2.7512333333333334e-05, "loss": 0.0809, "step": 17468 }, { "epoch": 19.281612368856987, "grad_norm": 0.423645943403244, "learning_rate": 2.7512e-05, "loss": 0.04, "step": 17469 }, { "epoch": 19.282716731087795, "grad_norm": 0.3430802822113037, "learning_rate": 2.7511666666666666e-05, "loss": 0.037, "step": 17470 }, { "epoch": 19.283821093318608, "grad_norm": 0.3239977955818176, "learning_rate": 2.7511333333333335e-05, "loss": 0.0261, "step": 17471 }, { "epoch": 19.28492545554942, "grad_norm": 0.29822129011154175, "learning_rate": 2.7511e-05, "loss": 0.039, "step": 17472 }, { "epoch": 19.286029817780232, "grad_norm": 0.09904451668262482, "learning_rate": 2.7510666666666667e-05, "loss": 0.0051, "step": 17473 }, { "epoch": 19.287134180011044, "grad_norm": 0.2276650369167328, "learning_rate": 2.7510333333333333e-05, "loss": 0.0108, "step": 17474 }, { "epoch": 19.288238542241857, "grad_norm": 0.194851353764534, "learning_rate": 2.7510000000000003e-05, "loss": 0.0224, "step": 17475 }, { "epoch": 19.28934290447267, "grad_norm": 0.10638094693422318, "learning_rate": 2.7509666666666665e-05, "loss": 0.0038, "step": 17476 }, { "epoch": 19.290447266703477, "grad_norm": 0.24133706092834473, "learning_rate": 2.7509333333333334e-05, "loss": 0.0092, "step": 17477 }, { "epoch": 19.29155162893429, "grad_norm": 0.09591852128505707, "learning_rate": 2.7509e-05, "loss": 0.0027, "step": 17478 }, { "epoch": 19.292655991165102, "grad_norm": 0.18891766667366028, "learning_rate": 2.7508666666666666e-05, "loss": 0.0174, "step": 17479 }, { "epoch": 19.293760353395914, "grad_norm": 0.13623444736003876, "learning_rate": 2.7508333333333336e-05, "loss": 0.0055, "step": 17480 }, { "epoch": 19.294864715626726, "grad_norm": 0.3723622262477875, "learning_rate": 2.7508e-05, "loss": 0.0097, "step": 17481 }, { "epoch": 19.29596907785754, "grad_norm": 0.12462836503982544, "learning_rate": 2.7507666666666668e-05, "loss": 0.003, "step": 17482 }, { "epoch": 19.297073440088347, "grad_norm": 0.081246018409729, "learning_rate": 2.7507333333333333e-05, "loss": 0.0036, "step": 17483 }, { "epoch": 19.29817780231916, "grad_norm": 0.07326330989599228, "learning_rate": 2.7507000000000003e-05, "loss": 0.0029, "step": 17484 }, { "epoch": 19.29928216454997, "grad_norm": 0.057146232575178146, "learning_rate": 2.7506666666666665e-05, "loss": 0.0019, "step": 17485 }, { "epoch": 19.300386526780784, "grad_norm": 0.1401589959859848, "learning_rate": 2.7506333333333335e-05, "loss": 0.0053, "step": 17486 }, { "epoch": 19.301490889011596, "grad_norm": 0.11710263788700104, "learning_rate": 2.7506e-05, "loss": 0.0031, "step": 17487 }, { "epoch": 19.30259525124241, "grad_norm": 0.14304204285144806, "learning_rate": 2.7505666666666667e-05, "loss": 0.0058, "step": 17488 }, { "epoch": 19.30369961347322, "grad_norm": 0.22942790389060974, "learning_rate": 2.7505333333333336e-05, "loss": 0.0055, "step": 17489 }, { "epoch": 19.30480397570403, "grad_norm": 0.10211978852748871, "learning_rate": 2.7505e-05, "loss": 0.0022, "step": 17490 }, { "epoch": 19.30590833793484, "grad_norm": 0.16984781622886658, "learning_rate": 2.7504666666666668e-05, "loss": 0.0048, "step": 17491 }, { "epoch": 19.307012700165654, "grad_norm": 0.17502814531326294, "learning_rate": 2.7504333333333334e-05, "loss": 0.0058, "step": 17492 }, { "epoch": 19.308117062396466, "grad_norm": 0.09375626593828201, "learning_rate": 2.7504e-05, "loss": 0.0034, "step": 17493 }, { "epoch": 19.309221424627278, "grad_norm": 0.10191536694765091, "learning_rate": 2.7503666666666666e-05, "loss": 0.0031, "step": 17494 }, { "epoch": 19.31032578685809, "grad_norm": 0.34425750374794006, "learning_rate": 2.7503333333333335e-05, "loss": 0.0045, "step": 17495 }, { "epoch": 19.311430149088903, "grad_norm": 0.12778455018997192, "learning_rate": 2.7503e-05, "loss": 0.003, "step": 17496 }, { "epoch": 19.31253451131971, "grad_norm": 0.3417079746723175, "learning_rate": 2.7502666666666667e-05, "loss": 0.0067, "step": 17497 }, { "epoch": 19.313638873550524, "grad_norm": 0.16287994384765625, "learning_rate": 2.7502333333333336e-05, "loss": 0.0045, "step": 17498 }, { "epoch": 19.314743235781336, "grad_norm": 0.22023896872997284, "learning_rate": 2.7502e-05, "loss": 0.0042, "step": 17499 }, { "epoch": 19.315847598012148, "grad_norm": 0.2686629593372345, "learning_rate": 2.7501666666666668e-05, "loss": 0.0052, "step": 17500 }, { "epoch": 19.31695196024296, "grad_norm": 0.2033587396144867, "learning_rate": 2.7501333333333334e-05, "loss": 0.0085, "step": 17501 }, { "epoch": 19.318056322473772, "grad_norm": 0.1341395080089569, "learning_rate": 2.7501e-05, "loss": 0.0019, "step": 17502 }, { "epoch": 19.319160684704585, "grad_norm": 0.11917421966791153, "learning_rate": 2.7500666666666666e-05, "loss": 0.0042, "step": 17503 }, { "epoch": 19.320265046935393, "grad_norm": 0.25367608666419983, "learning_rate": 2.7500333333333335e-05, "loss": 0.0027, "step": 17504 }, { "epoch": 19.321369409166206, "grad_norm": 0.14399385452270508, "learning_rate": 2.75e-05, "loss": 0.0066, "step": 17505 }, { "epoch": 19.322473771397018, "grad_norm": 0.16717195510864258, "learning_rate": 2.7499666666666667e-05, "loss": 0.0046, "step": 17506 }, { "epoch": 19.32357813362783, "grad_norm": 0.3803520202636719, "learning_rate": 2.7499333333333336e-05, "loss": 0.0078, "step": 17507 }, { "epoch": 19.324682495858642, "grad_norm": 0.11665356159210205, "learning_rate": 2.7499e-05, "loss": 0.0021, "step": 17508 }, { "epoch": 19.325786858089455, "grad_norm": 0.19119465351104736, "learning_rate": 2.7498666666666668e-05, "loss": 0.006, "step": 17509 }, { "epoch": 19.326891220320267, "grad_norm": 0.07437225431203842, "learning_rate": 2.7498333333333334e-05, "loss": 0.001, "step": 17510 }, { "epoch": 19.327995582551075, "grad_norm": 0.449468195438385, "learning_rate": 2.7498e-05, "loss": 0.0054, "step": 17511 }, { "epoch": 19.329099944781888, "grad_norm": 0.12793655693531036, "learning_rate": 2.749766666666667e-05, "loss": 0.0022, "step": 17512 }, { "epoch": 19.3302043070127, "grad_norm": 0.0563390776515007, "learning_rate": 2.7497333333333335e-05, "loss": 0.0018, "step": 17513 }, { "epoch": 19.331308669243512, "grad_norm": 0.20093081891536713, "learning_rate": 2.7497e-05, "loss": 0.0142, "step": 17514 }, { "epoch": 19.332413031474324, "grad_norm": 0.539959192276001, "learning_rate": 2.7496666666666667e-05, "loss": 0.0982, "step": 17515 }, { "epoch": 19.333517393705137, "grad_norm": 0.5081720352172852, "learning_rate": 2.7496333333333336e-05, "loss": 0.0889, "step": 17516 }, { "epoch": 19.33462175593595, "grad_norm": 0.5741602182388306, "learning_rate": 2.7496e-05, "loss": 0.0571, "step": 17517 }, { "epoch": 19.335726118166757, "grad_norm": 0.32658255100250244, "learning_rate": 2.749566666666667e-05, "loss": 0.0624, "step": 17518 }, { "epoch": 19.33683048039757, "grad_norm": 0.249290332198143, "learning_rate": 2.7495333333333334e-05, "loss": 0.0342, "step": 17519 }, { "epoch": 19.337934842628382, "grad_norm": 0.21714098751544952, "learning_rate": 2.7495e-05, "loss": 0.0286, "step": 17520 }, { "epoch": 19.339039204859194, "grad_norm": 0.2843928635120392, "learning_rate": 2.749466666666667e-05, "loss": 0.0249, "step": 17521 }, { "epoch": 19.340143567090006, "grad_norm": 0.21789616346359253, "learning_rate": 2.7494333333333332e-05, "loss": 0.0175, "step": 17522 }, { "epoch": 19.34124792932082, "grad_norm": 0.2682885527610779, "learning_rate": 2.7494e-05, "loss": 0.0716, "step": 17523 }, { "epoch": 19.342352291551627, "grad_norm": 0.2141970992088318, "learning_rate": 2.7493666666666667e-05, "loss": 0.0289, "step": 17524 }, { "epoch": 19.34345665378244, "grad_norm": 0.2374935746192932, "learning_rate": 2.7493333333333333e-05, "loss": 0.0113, "step": 17525 }, { "epoch": 19.34456101601325, "grad_norm": 0.10166557878255844, "learning_rate": 2.7493e-05, "loss": 0.0033, "step": 17526 }, { "epoch": 19.345665378244064, "grad_norm": 0.15360528230667114, "learning_rate": 2.749266666666667e-05, "loss": 0.0079, "step": 17527 }, { "epoch": 19.346769740474876, "grad_norm": 0.19888560473918915, "learning_rate": 2.749233333333333e-05, "loss": 0.0072, "step": 17528 }, { "epoch": 19.34787410270569, "grad_norm": 0.15908008813858032, "learning_rate": 2.7492e-05, "loss": 0.0055, "step": 17529 }, { "epoch": 19.3489784649365, "grad_norm": 0.13422247767448425, "learning_rate": 2.749166666666667e-05, "loss": 0.0049, "step": 17530 }, { "epoch": 19.35008282716731, "grad_norm": 0.12234663963317871, "learning_rate": 2.7491333333333332e-05, "loss": 0.0028, "step": 17531 }, { "epoch": 19.35118718939812, "grad_norm": 0.15296484529972076, "learning_rate": 2.7491e-05, "loss": 0.0072, "step": 17532 }, { "epoch": 19.352291551628934, "grad_norm": 0.24395033717155457, "learning_rate": 2.7490666666666668e-05, "loss": 0.0076, "step": 17533 }, { "epoch": 19.353395913859746, "grad_norm": 0.12265393137931824, "learning_rate": 2.7490333333333333e-05, "loss": 0.0039, "step": 17534 }, { "epoch": 19.35450027609056, "grad_norm": 0.09326310455799103, "learning_rate": 2.749e-05, "loss": 0.0027, "step": 17535 }, { "epoch": 19.35560463832137, "grad_norm": 0.11243554204702377, "learning_rate": 2.748966666666667e-05, "loss": 0.0042, "step": 17536 }, { "epoch": 19.356709000552183, "grad_norm": 0.08607001602649689, "learning_rate": 2.7489333333333335e-05, "loss": 0.0133, "step": 17537 }, { "epoch": 19.35781336278299, "grad_norm": 0.1488552838563919, "learning_rate": 2.7489e-05, "loss": 0.0038, "step": 17538 }, { "epoch": 19.358917725013804, "grad_norm": 0.11980749666690826, "learning_rate": 2.748866666666667e-05, "loss": 0.0033, "step": 17539 }, { "epoch": 19.360022087244616, "grad_norm": 0.0950867235660553, "learning_rate": 2.7488333333333332e-05, "loss": 0.0029, "step": 17540 }, { "epoch": 19.361126449475428, "grad_norm": 0.07796153426170349, "learning_rate": 2.7488000000000002e-05, "loss": 0.0014, "step": 17541 }, { "epoch": 19.36223081170624, "grad_norm": 0.35605570673942566, "learning_rate": 2.7487666666666668e-05, "loss": 0.0037, "step": 17542 }, { "epoch": 19.363335173937053, "grad_norm": 0.15498918294906616, "learning_rate": 2.7487333333333334e-05, "loss": 0.0081, "step": 17543 }, { "epoch": 19.364439536167865, "grad_norm": 0.19555264711380005, "learning_rate": 2.7487e-05, "loss": 0.0035, "step": 17544 }, { "epoch": 19.365543898398673, "grad_norm": 0.1504887491464615, "learning_rate": 2.748666666666667e-05, "loss": 0.007, "step": 17545 }, { "epoch": 19.366648260629486, "grad_norm": 0.26431840658187866, "learning_rate": 2.7486333333333335e-05, "loss": 0.0089, "step": 17546 }, { "epoch": 19.367752622860298, "grad_norm": 0.1726444810628891, "learning_rate": 2.7486e-05, "loss": 0.009, "step": 17547 }, { "epoch": 19.36885698509111, "grad_norm": 0.14451666176319122, "learning_rate": 2.748566666666667e-05, "loss": 0.0049, "step": 17548 }, { "epoch": 19.369961347321922, "grad_norm": 0.299716591835022, "learning_rate": 2.7485333333333333e-05, "loss": 0.0068, "step": 17549 }, { "epoch": 19.371065709552735, "grad_norm": 0.3512333929538727, "learning_rate": 2.7485000000000002e-05, "loss": 0.0049, "step": 17550 }, { "epoch": 19.372170071783543, "grad_norm": 0.12209237366914749, "learning_rate": 2.7484666666666665e-05, "loss": 0.0029, "step": 17551 }, { "epoch": 19.373274434014355, "grad_norm": 0.10402361303567886, "learning_rate": 2.7484333333333334e-05, "loss": 0.003, "step": 17552 }, { "epoch": 19.374378796245168, "grad_norm": 0.18702764809131622, "learning_rate": 2.7484e-05, "loss": 0.0092, "step": 17553 }, { "epoch": 19.37548315847598, "grad_norm": 0.14505068957805634, "learning_rate": 2.7483666666666666e-05, "loss": 0.008, "step": 17554 }, { "epoch": 19.376587520706792, "grad_norm": 0.20788262784481049, "learning_rate": 2.7483333333333335e-05, "loss": 0.0087, "step": 17555 }, { "epoch": 19.377691882937604, "grad_norm": 0.39026060700416565, "learning_rate": 2.7483e-05, "loss": 0.0081, "step": 17556 }, { "epoch": 19.378796245168417, "grad_norm": 0.24208523333072662, "learning_rate": 2.7482666666666667e-05, "loss": 0.011, "step": 17557 }, { "epoch": 19.379900607399225, "grad_norm": 0.1994149386882782, "learning_rate": 2.7482333333333333e-05, "loss": 0.0047, "step": 17558 }, { "epoch": 19.381004969630037, "grad_norm": 0.20049826800823212, "learning_rate": 2.7482000000000002e-05, "loss": 0.0054, "step": 17559 }, { "epoch": 19.38210933186085, "grad_norm": 0.2572290599346161, "learning_rate": 2.7481666666666665e-05, "loss": 0.018, "step": 17560 }, { "epoch": 19.383213694091662, "grad_norm": 0.13804003596305847, "learning_rate": 2.7481333333333334e-05, "loss": 0.0026, "step": 17561 }, { "epoch": 19.384318056322474, "grad_norm": 0.23258352279663086, "learning_rate": 2.7481000000000003e-05, "loss": 0.0054, "step": 17562 }, { "epoch": 19.385422418553286, "grad_norm": 0.26399463415145874, "learning_rate": 2.7480666666666666e-05, "loss": 0.0085, "step": 17563 }, { "epoch": 19.3865267807841, "grad_norm": 0.08342991024255753, "learning_rate": 2.7480333333333335e-05, "loss": 0.0028, "step": 17564 }, { "epoch": 19.387631143014907, "grad_norm": 0.510503351688385, "learning_rate": 2.748e-05, "loss": 0.1058, "step": 17565 }, { "epoch": 19.38873550524572, "grad_norm": 0.3881973624229431, "learning_rate": 2.7479666666666667e-05, "loss": 0.1291, "step": 17566 }, { "epoch": 19.38983986747653, "grad_norm": 0.3048103153705597, "learning_rate": 2.7479333333333333e-05, "loss": 0.0821, "step": 17567 }, { "epoch": 19.390944229707344, "grad_norm": 0.48102861642837524, "learning_rate": 2.7479000000000002e-05, "loss": 0.036, "step": 17568 }, { "epoch": 19.392048591938156, "grad_norm": 0.6891294717788696, "learning_rate": 2.7478666666666665e-05, "loss": 0.0609, "step": 17569 }, { "epoch": 19.39315295416897, "grad_norm": 0.3355913460254669, "learning_rate": 2.7478333333333334e-05, "loss": 0.0322, "step": 17570 }, { "epoch": 19.39425731639978, "grad_norm": 0.257411926984787, "learning_rate": 2.7478000000000004e-05, "loss": 0.0345, "step": 17571 }, { "epoch": 19.39536167863059, "grad_norm": 0.22030167281627655, "learning_rate": 2.7477666666666666e-05, "loss": 0.0253, "step": 17572 }, { "epoch": 19.3964660408614, "grad_norm": 0.17869813740253448, "learning_rate": 2.7477333333333335e-05, "loss": 0.014, "step": 17573 }, { "epoch": 19.397570403092214, "grad_norm": 0.2912316918373108, "learning_rate": 2.7477e-05, "loss": 0.0142, "step": 17574 }, { "epoch": 19.398674765323026, "grad_norm": 0.14378011226654053, "learning_rate": 2.7476666666666667e-05, "loss": 0.0053, "step": 17575 }, { "epoch": 19.39977912755384, "grad_norm": 0.3984307646751404, "learning_rate": 2.7476333333333333e-05, "loss": 0.017, "step": 17576 }, { "epoch": 19.40088348978465, "grad_norm": 0.12182369828224182, "learning_rate": 2.7476000000000003e-05, "loss": 0.0053, "step": 17577 }, { "epoch": 19.401987852015463, "grad_norm": 0.24278654158115387, "learning_rate": 2.7475666666666665e-05, "loss": 0.0055, "step": 17578 }, { "epoch": 19.40309221424627, "grad_norm": 0.09058816730976105, "learning_rate": 2.7475333333333334e-05, "loss": 0.0033, "step": 17579 }, { "epoch": 19.404196576477084, "grad_norm": 0.42892521619796753, "learning_rate": 2.7475000000000004e-05, "loss": 0.0041, "step": 17580 }, { "epoch": 19.405300938707896, "grad_norm": 0.15155762434005737, "learning_rate": 2.7474666666666666e-05, "loss": 0.0072, "step": 17581 }, { "epoch": 19.406405300938708, "grad_norm": 0.150406152009964, "learning_rate": 2.7474333333333336e-05, "loss": 0.0084, "step": 17582 }, { "epoch": 19.40750966316952, "grad_norm": 0.13711200654506683, "learning_rate": 2.7473999999999998e-05, "loss": 0.0029, "step": 17583 }, { "epoch": 19.408614025400333, "grad_norm": 0.12457583844661713, "learning_rate": 2.7473666666666668e-05, "loss": 0.0033, "step": 17584 }, { "epoch": 19.409718387631145, "grad_norm": 0.11038846522569656, "learning_rate": 2.7473333333333333e-05, "loss": 0.0048, "step": 17585 }, { "epoch": 19.410822749861953, "grad_norm": 0.2104290872812271, "learning_rate": 2.7473e-05, "loss": 0.0066, "step": 17586 }, { "epoch": 19.411927112092766, "grad_norm": 0.24597080051898956, "learning_rate": 2.747266666666667e-05, "loss": 0.0069, "step": 17587 }, { "epoch": 19.413031474323578, "grad_norm": 0.19484452903270721, "learning_rate": 2.7472333333333335e-05, "loss": 0.01, "step": 17588 }, { "epoch": 19.41413583655439, "grad_norm": 0.21157647669315338, "learning_rate": 2.7472e-05, "loss": 0.0366, "step": 17589 }, { "epoch": 19.415240198785202, "grad_norm": 0.710125207901001, "learning_rate": 2.7471666666666667e-05, "loss": 0.0111, "step": 17590 }, { "epoch": 19.416344561016015, "grad_norm": 0.19927135109901428, "learning_rate": 2.7471333333333336e-05, "loss": 0.0099, "step": 17591 }, { "epoch": 19.417448923246823, "grad_norm": 0.19995762407779694, "learning_rate": 2.7471e-05, "loss": 0.0072, "step": 17592 }, { "epoch": 19.418553285477635, "grad_norm": 0.1741487830877304, "learning_rate": 2.7470666666666668e-05, "loss": 0.0038, "step": 17593 }, { "epoch": 19.419657647708448, "grad_norm": 0.0727996900677681, "learning_rate": 2.7470333333333334e-05, "loss": 0.0022, "step": 17594 }, { "epoch": 19.42076200993926, "grad_norm": 0.23927529156208038, "learning_rate": 2.747e-05, "loss": 0.0045, "step": 17595 }, { "epoch": 19.421866372170072, "grad_norm": 0.14269064366817474, "learning_rate": 2.746966666666667e-05, "loss": 0.0091, "step": 17596 }, { "epoch": 19.422970734400884, "grad_norm": 0.28272998332977295, "learning_rate": 2.7469333333333335e-05, "loss": 0.0087, "step": 17597 }, { "epoch": 19.424075096631697, "grad_norm": 0.11535084247589111, "learning_rate": 2.7469e-05, "loss": 0.0052, "step": 17598 }, { "epoch": 19.425179458862505, "grad_norm": 0.10389071702957153, "learning_rate": 2.7468666666666667e-05, "loss": 0.0016, "step": 17599 }, { "epoch": 19.426283821093318, "grad_norm": 0.12786135077476501, "learning_rate": 2.7468333333333336e-05, "loss": 0.0042, "step": 17600 }, { "epoch": 19.42738818332413, "grad_norm": 0.2091425210237503, "learning_rate": 2.7468e-05, "loss": 0.0044, "step": 17601 }, { "epoch": 19.428492545554942, "grad_norm": 0.2444959282875061, "learning_rate": 2.7467666666666668e-05, "loss": 0.0101, "step": 17602 }, { "epoch": 19.429596907785754, "grad_norm": 0.04629471153020859, "learning_rate": 2.7467333333333334e-05, "loss": 0.0016, "step": 17603 }, { "epoch": 19.430701270016566, "grad_norm": 0.11678069829940796, "learning_rate": 2.7467e-05, "loss": 0.0049, "step": 17604 }, { "epoch": 19.43180563224738, "grad_norm": 0.13743913173675537, "learning_rate": 2.746666666666667e-05, "loss": 0.0033, "step": 17605 }, { "epoch": 19.432909994478187, "grad_norm": 0.07619866728782654, "learning_rate": 2.7466333333333335e-05, "loss": 0.0026, "step": 17606 }, { "epoch": 19.434014356709, "grad_norm": 0.24215646088123322, "learning_rate": 2.7466e-05, "loss": 0.0079, "step": 17607 }, { "epoch": 19.435118718939812, "grad_norm": 0.09450917690992355, "learning_rate": 2.7465666666666667e-05, "loss": 0.0032, "step": 17608 }, { "epoch": 19.436223081170624, "grad_norm": 0.17111988365650177, "learning_rate": 2.7465333333333336e-05, "loss": 0.0044, "step": 17609 }, { "epoch": 19.437327443401436, "grad_norm": 0.23984600603580475, "learning_rate": 2.7465e-05, "loss": 0.0066, "step": 17610 }, { "epoch": 19.43843180563225, "grad_norm": 0.12723588943481445, "learning_rate": 2.7464666666666668e-05, "loss": 0.0037, "step": 17611 }, { "epoch": 19.43953616786306, "grad_norm": 0.23625583946704865, "learning_rate": 2.7464333333333334e-05, "loss": 0.0057, "step": 17612 }, { "epoch": 19.44064053009387, "grad_norm": 0.2780362069606781, "learning_rate": 2.7464e-05, "loss": 0.0063, "step": 17613 }, { "epoch": 19.44174489232468, "grad_norm": 0.06233330816030502, "learning_rate": 2.746366666666667e-05, "loss": 0.0015, "step": 17614 }, { "epoch": 19.442849254555494, "grad_norm": 0.42398834228515625, "learning_rate": 2.7463333333333332e-05, "loss": 0.1205, "step": 17615 }, { "epoch": 19.443953616786306, "grad_norm": 0.34905925393104553, "learning_rate": 2.7463e-05, "loss": 0.0641, "step": 17616 }, { "epoch": 19.44505797901712, "grad_norm": 0.7079021334648132, "learning_rate": 2.7462666666666667e-05, "loss": 0.0916, "step": 17617 }, { "epoch": 19.44616234124793, "grad_norm": 0.3643115758895874, "learning_rate": 2.7462333333333333e-05, "loss": 0.0641, "step": 17618 }, { "epoch": 19.447266703478743, "grad_norm": 0.35137930512428284, "learning_rate": 2.7462e-05, "loss": 0.0435, "step": 17619 }, { "epoch": 19.44837106570955, "grad_norm": 0.7252252697944641, "learning_rate": 2.7461666666666668e-05, "loss": 0.0873, "step": 17620 }, { "epoch": 19.449475427940364, "grad_norm": 2.90767765045166, "learning_rate": 2.7461333333333334e-05, "loss": 0.0279, "step": 17621 }, { "epoch": 19.450579790171176, "grad_norm": 0.184412881731987, "learning_rate": 2.7461e-05, "loss": 0.0119, "step": 17622 }, { "epoch": 19.451684152401988, "grad_norm": 0.5590525269508362, "learning_rate": 2.746066666666667e-05, "loss": 0.0324, "step": 17623 }, { "epoch": 19.4527885146328, "grad_norm": 0.40801766514778137, "learning_rate": 2.7460333333333332e-05, "loss": 0.019, "step": 17624 }, { "epoch": 19.453892876863613, "grad_norm": 0.08594806492328644, "learning_rate": 2.746e-05, "loss": 0.0043, "step": 17625 }, { "epoch": 19.45499723909442, "grad_norm": 0.18615788221359253, "learning_rate": 2.7459666666666667e-05, "loss": 0.0075, "step": 17626 }, { "epoch": 19.456101601325233, "grad_norm": 0.15439671277999878, "learning_rate": 2.7459333333333333e-05, "loss": 0.0056, "step": 17627 }, { "epoch": 19.457205963556046, "grad_norm": 0.2820371687412262, "learning_rate": 2.7459e-05, "loss": 0.021, "step": 17628 }, { "epoch": 19.458310325786858, "grad_norm": 0.0628746822476387, "learning_rate": 2.745866666666667e-05, "loss": 0.0019, "step": 17629 }, { "epoch": 19.45941468801767, "grad_norm": 0.17986808717250824, "learning_rate": 2.7458333333333334e-05, "loss": 0.0073, "step": 17630 }, { "epoch": 19.460519050248482, "grad_norm": 0.5632546544075012, "learning_rate": 2.7458e-05, "loss": 0.0065, "step": 17631 }, { "epoch": 19.461623412479295, "grad_norm": 0.08876731991767883, "learning_rate": 2.745766666666667e-05, "loss": 0.0039, "step": 17632 }, { "epoch": 19.462727774710103, "grad_norm": 0.26207438111305237, "learning_rate": 2.7457333333333332e-05, "loss": 0.0056, "step": 17633 }, { "epoch": 19.463832136940916, "grad_norm": 0.1460900753736496, "learning_rate": 2.7457e-05, "loss": 0.0073, "step": 17634 }, { "epoch": 19.464936499171728, "grad_norm": 0.13509677350521088, "learning_rate": 2.7456666666666667e-05, "loss": 0.005, "step": 17635 }, { "epoch": 19.46604086140254, "grad_norm": 0.15211330354213715, "learning_rate": 2.7456333333333333e-05, "loss": 0.0084, "step": 17636 }, { "epoch": 19.467145223633352, "grad_norm": 0.18320634961128235, "learning_rate": 2.7456000000000003e-05, "loss": 0.0028, "step": 17637 }, { "epoch": 19.468249585864164, "grad_norm": 0.30565962195396423, "learning_rate": 2.745566666666667e-05, "loss": 0.0055, "step": 17638 }, { "epoch": 19.469353948094977, "grad_norm": 0.14929355680942535, "learning_rate": 2.7455333333333335e-05, "loss": 0.0061, "step": 17639 }, { "epoch": 19.470458310325785, "grad_norm": 0.1697719246149063, "learning_rate": 2.7455e-05, "loss": 0.0051, "step": 17640 }, { "epoch": 19.471562672556598, "grad_norm": 0.28038036823272705, "learning_rate": 2.745466666666667e-05, "loss": 0.0068, "step": 17641 }, { "epoch": 19.47266703478741, "grad_norm": 0.0363471582531929, "learning_rate": 2.7454333333333332e-05, "loss": 0.0011, "step": 17642 }, { "epoch": 19.473771397018222, "grad_norm": 0.2337694615125656, "learning_rate": 2.7454000000000002e-05, "loss": 0.0091, "step": 17643 }, { "epoch": 19.474875759249034, "grad_norm": 0.19061867892742157, "learning_rate": 2.7453666666666664e-05, "loss": 0.0062, "step": 17644 }, { "epoch": 19.475980121479846, "grad_norm": 0.15489183366298676, "learning_rate": 2.7453333333333334e-05, "loss": 0.0063, "step": 17645 }, { "epoch": 19.47708448371066, "grad_norm": 0.07340095192193985, "learning_rate": 2.7453000000000003e-05, "loss": 0.0014, "step": 17646 }, { "epoch": 19.478188845941467, "grad_norm": 0.17150942981243134, "learning_rate": 2.7452666666666666e-05, "loss": 0.0056, "step": 17647 }, { "epoch": 19.47929320817228, "grad_norm": 0.2227417677640915, "learning_rate": 2.7452333333333335e-05, "loss": 0.0049, "step": 17648 }, { "epoch": 19.480397570403092, "grad_norm": 0.5334200859069824, "learning_rate": 2.7452e-05, "loss": 0.0103, "step": 17649 }, { "epoch": 19.481501932633904, "grad_norm": 0.15553276240825653, "learning_rate": 2.7451666666666667e-05, "loss": 0.0055, "step": 17650 }, { "epoch": 19.482606294864716, "grad_norm": 0.3176913261413574, "learning_rate": 2.7451333333333333e-05, "loss": 0.0127, "step": 17651 }, { "epoch": 19.48371065709553, "grad_norm": 0.1515646129846573, "learning_rate": 2.7451000000000002e-05, "loss": 0.0037, "step": 17652 }, { "epoch": 19.48481501932634, "grad_norm": 0.47215092182159424, "learning_rate": 2.7450666666666665e-05, "loss": 0.0095, "step": 17653 }, { "epoch": 19.48591938155715, "grad_norm": 0.12974223494529724, "learning_rate": 2.7450333333333334e-05, "loss": 0.0062, "step": 17654 }, { "epoch": 19.48702374378796, "grad_norm": 1.3528932332992554, "learning_rate": 2.7450000000000003e-05, "loss": 0.0298, "step": 17655 }, { "epoch": 19.488128106018774, "grad_norm": 0.20271199941635132, "learning_rate": 2.7449666666666666e-05, "loss": 0.0022, "step": 17656 }, { "epoch": 19.489232468249586, "grad_norm": 0.14369890093803406, "learning_rate": 2.7449333333333335e-05, "loss": 0.005, "step": 17657 }, { "epoch": 19.4903368304804, "grad_norm": 0.1341523677110672, "learning_rate": 2.7449e-05, "loss": 0.006, "step": 17658 }, { "epoch": 19.49144119271121, "grad_norm": 0.2383950799703598, "learning_rate": 2.7448666666666667e-05, "loss": 0.0072, "step": 17659 }, { "epoch": 19.49254555494202, "grad_norm": 0.659619927406311, "learning_rate": 2.7448333333333333e-05, "loss": 0.0097, "step": 17660 }, { "epoch": 19.49364991717283, "grad_norm": 0.11964820325374603, "learning_rate": 2.7448000000000002e-05, "loss": 0.0023, "step": 17661 }, { "epoch": 19.494754279403644, "grad_norm": 0.09466087073087692, "learning_rate": 2.7447666666666668e-05, "loss": 0.0038, "step": 17662 }, { "epoch": 19.495858641634456, "grad_norm": 0.09560887515544891, "learning_rate": 2.7447333333333334e-05, "loss": 0.0018, "step": 17663 }, { "epoch": 19.496963003865268, "grad_norm": 0.30683431029319763, "learning_rate": 2.7447000000000003e-05, "loss": 0.0101, "step": 17664 }, { "epoch": 19.49806736609608, "grad_norm": 0.5006068348884583, "learning_rate": 2.7446666666666666e-05, "loss": 0.1323, "step": 17665 }, { "epoch": 19.499171728326893, "grad_norm": 0.32351061701774597, "learning_rate": 2.7446333333333335e-05, "loss": 0.0719, "step": 17666 }, { "epoch": 19.5002760905577, "grad_norm": 0.3725202679634094, "learning_rate": 2.7446e-05, "loss": 0.0758, "step": 17667 }, { "epoch": 19.501380452788514, "grad_norm": 0.41360101103782654, "learning_rate": 2.7445666666666667e-05, "loss": 0.0546, "step": 17668 }, { "epoch": 19.502484815019326, "grad_norm": 0.45589858293533325, "learning_rate": 2.7445333333333333e-05, "loss": 0.0568, "step": 17669 }, { "epoch": 19.503589177250138, "grad_norm": 1.4239020347595215, "learning_rate": 2.7445000000000002e-05, "loss": 0.0341, "step": 17670 }, { "epoch": 19.50469353948095, "grad_norm": 0.29723361134529114, "learning_rate": 2.7444666666666668e-05, "loss": 0.0222, "step": 17671 }, { "epoch": 19.505797901711762, "grad_norm": 0.38955897092819214, "learning_rate": 2.7444333333333334e-05, "loss": 0.0235, "step": 17672 }, { "epoch": 19.506902263942575, "grad_norm": 0.13919636607170105, "learning_rate": 2.7444e-05, "loss": 0.01, "step": 17673 }, { "epoch": 19.508006626173383, "grad_norm": 0.19859521090984344, "learning_rate": 2.7443666666666666e-05, "loss": 0.009, "step": 17674 }, { "epoch": 19.509110988404196, "grad_norm": 0.5282884240150452, "learning_rate": 2.7443333333333335e-05, "loss": 0.0127, "step": 17675 }, { "epoch": 19.510215350635008, "grad_norm": 0.10236022621393204, "learning_rate": 2.7442999999999998e-05, "loss": 0.0041, "step": 17676 }, { "epoch": 19.51131971286582, "grad_norm": 0.2551695704460144, "learning_rate": 2.7442666666666667e-05, "loss": 0.0297, "step": 17677 }, { "epoch": 19.512424075096632, "grad_norm": 0.1359536498785019, "learning_rate": 2.7442333333333333e-05, "loss": 0.0049, "step": 17678 }, { "epoch": 19.513528437327444, "grad_norm": 0.30872249603271484, "learning_rate": 2.7442e-05, "loss": 0.0137, "step": 17679 }, { "epoch": 19.514632799558257, "grad_norm": 0.5487658977508545, "learning_rate": 2.744166666666667e-05, "loss": 0.0095, "step": 17680 }, { "epoch": 19.515737161789065, "grad_norm": 0.21912281215190887, "learning_rate": 2.7441333333333334e-05, "loss": 0.0064, "step": 17681 }, { "epoch": 19.516841524019878, "grad_norm": 0.18898402154445648, "learning_rate": 2.7441e-05, "loss": 0.0075, "step": 17682 }, { "epoch": 19.51794588625069, "grad_norm": 0.1891532987356186, "learning_rate": 2.7440666666666666e-05, "loss": 0.0051, "step": 17683 }, { "epoch": 19.519050248481502, "grad_norm": 0.6438798904418945, "learning_rate": 2.7440333333333336e-05, "loss": 0.0051, "step": 17684 }, { "epoch": 19.520154610712314, "grad_norm": 0.161011204123497, "learning_rate": 2.7439999999999998e-05, "loss": 0.007, "step": 17685 }, { "epoch": 19.521258972943127, "grad_norm": 0.29344210028648376, "learning_rate": 2.7439666666666667e-05, "loss": 0.0083, "step": 17686 }, { "epoch": 19.52236333517394, "grad_norm": 0.6413940191268921, "learning_rate": 2.7439333333333337e-05, "loss": 0.012, "step": 17687 }, { "epoch": 19.523467697404747, "grad_norm": 0.7148847579956055, "learning_rate": 2.7439e-05, "loss": 0.0037, "step": 17688 }, { "epoch": 19.52457205963556, "grad_norm": 0.2727605402469635, "learning_rate": 2.743866666666667e-05, "loss": 0.0076, "step": 17689 }, { "epoch": 19.525676421866372, "grad_norm": 0.29051750898361206, "learning_rate": 2.7438333333333335e-05, "loss": 0.0143, "step": 17690 }, { "epoch": 19.526780784097184, "grad_norm": 0.11312740296125412, "learning_rate": 2.7438e-05, "loss": 0.0049, "step": 17691 }, { "epoch": 19.527885146327996, "grad_norm": 0.20503279566764832, "learning_rate": 2.7437666666666666e-05, "loss": 0.0067, "step": 17692 }, { "epoch": 19.52898950855881, "grad_norm": 0.2062247097492218, "learning_rate": 2.7437333333333336e-05, "loss": 0.0026, "step": 17693 }, { "epoch": 19.53009387078962, "grad_norm": 0.2593080997467041, "learning_rate": 2.7437e-05, "loss": 0.0094, "step": 17694 }, { "epoch": 19.53119823302043, "grad_norm": 0.5944557189941406, "learning_rate": 2.7436666666666668e-05, "loss": 0.0083, "step": 17695 }, { "epoch": 19.53230259525124, "grad_norm": 0.15698198974132538, "learning_rate": 2.7436333333333337e-05, "loss": 0.0055, "step": 17696 }, { "epoch": 19.533406957482054, "grad_norm": 0.12285378575325012, "learning_rate": 2.7436e-05, "loss": 0.004, "step": 17697 }, { "epoch": 19.534511319712866, "grad_norm": 0.12017073482275009, "learning_rate": 2.743566666666667e-05, "loss": 0.0028, "step": 17698 }, { "epoch": 19.53561568194368, "grad_norm": 0.15223635733127594, "learning_rate": 2.7435333333333335e-05, "loss": 0.0055, "step": 17699 }, { "epoch": 19.53672004417449, "grad_norm": 0.3313013017177582, "learning_rate": 2.7435e-05, "loss": 0.0092, "step": 17700 }, { "epoch": 19.5378244064053, "grad_norm": 0.21642175316810608, "learning_rate": 2.7434666666666667e-05, "loss": 0.0079, "step": 17701 }, { "epoch": 19.53892876863611, "grad_norm": 0.24223901331424713, "learning_rate": 2.7434333333333333e-05, "loss": 0.0075, "step": 17702 }, { "epoch": 19.540033130866924, "grad_norm": 0.3074282705783844, "learning_rate": 2.7434e-05, "loss": 0.0043, "step": 17703 }, { "epoch": 19.541137493097736, "grad_norm": 0.23626987636089325, "learning_rate": 2.7433666666666668e-05, "loss": 0.0064, "step": 17704 }, { "epoch": 19.542241855328548, "grad_norm": 0.6751654148101807, "learning_rate": 2.7433333333333334e-05, "loss": 0.0121, "step": 17705 }, { "epoch": 19.54334621755936, "grad_norm": 0.28140974044799805, "learning_rate": 2.7433e-05, "loss": 0.0161, "step": 17706 }, { "epoch": 19.544450579790173, "grad_norm": 0.3530075252056122, "learning_rate": 2.743266666666667e-05, "loss": 0.0123, "step": 17707 }, { "epoch": 19.54555494202098, "grad_norm": 0.2842315137386322, "learning_rate": 2.743233333333333e-05, "loss": 0.009, "step": 17708 }, { "epoch": 19.546659304251794, "grad_norm": 0.21046222746372223, "learning_rate": 2.7432e-05, "loss": 0.0075, "step": 17709 }, { "epoch": 19.547763666482606, "grad_norm": 0.09052842855453491, "learning_rate": 2.7431666666666667e-05, "loss": 0.0028, "step": 17710 }, { "epoch": 19.548868028713418, "grad_norm": 0.19810032844543457, "learning_rate": 2.7431333333333333e-05, "loss": 0.0056, "step": 17711 }, { "epoch": 19.54997239094423, "grad_norm": 0.32600706815719604, "learning_rate": 2.7431000000000002e-05, "loss": 0.0118, "step": 17712 }, { "epoch": 19.551076753175042, "grad_norm": 0.5546702742576599, "learning_rate": 2.7430666666666668e-05, "loss": 0.0205, "step": 17713 }, { "epoch": 19.552181115405855, "grad_norm": 0.23312146961688995, "learning_rate": 2.7430333333333334e-05, "loss": 0.0025, "step": 17714 }, { "epoch": 19.553285477636663, "grad_norm": 0.6189497113227844, "learning_rate": 2.743e-05, "loss": 0.1286, "step": 17715 }, { "epoch": 19.554389839867476, "grad_norm": 0.576252818107605, "learning_rate": 2.742966666666667e-05, "loss": 0.0961, "step": 17716 }, { "epoch": 19.555494202098288, "grad_norm": 0.32780206203460693, "learning_rate": 2.7429333333333332e-05, "loss": 0.0674, "step": 17717 }, { "epoch": 19.5565985643291, "grad_norm": 0.37711378931999207, "learning_rate": 2.7429e-05, "loss": 0.093, "step": 17718 }, { "epoch": 19.557702926559912, "grad_norm": 0.25839754939079285, "learning_rate": 2.7428666666666667e-05, "loss": 0.0462, "step": 17719 }, { "epoch": 19.558807288790724, "grad_norm": 0.4011233448982239, "learning_rate": 2.7428333333333333e-05, "loss": 0.0668, "step": 17720 }, { "epoch": 19.559911651021537, "grad_norm": 0.250968873500824, "learning_rate": 2.7428000000000002e-05, "loss": 0.0284, "step": 17721 }, { "epoch": 19.561016013252345, "grad_norm": 0.4335199296474457, "learning_rate": 2.7427666666666668e-05, "loss": 0.0167, "step": 17722 }, { "epoch": 19.562120375483158, "grad_norm": 0.13116173446178436, "learning_rate": 2.7427333333333334e-05, "loss": 0.0082, "step": 17723 }, { "epoch": 19.56322473771397, "grad_norm": 0.25958573818206787, "learning_rate": 2.7427e-05, "loss": 0.014, "step": 17724 }, { "epoch": 19.564329099944782, "grad_norm": 0.15289759635925293, "learning_rate": 2.742666666666667e-05, "loss": 0.0124, "step": 17725 }, { "epoch": 19.565433462175594, "grad_norm": 0.15908120572566986, "learning_rate": 2.7426333333333332e-05, "loss": 0.0073, "step": 17726 }, { "epoch": 19.566537824406407, "grad_norm": 0.2831275165081024, "learning_rate": 2.7426e-05, "loss": 0.009, "step": 17727 }, { "epoch": 19.567642186637215, "grad_norm": 0.2781952917575836, "learning_rate": 2.7425666666666667e-05, "loss": 0.0182, "step": 17728 }, { "epoch": 19.568746548868027, "grad_norm": 0.22802309691905975, "learning_rate": 2.7425333333333333e-05, "loss": 0.0049, "step": 17729 }, { "epoch": 19.56985091109884, "grad_norm": 0.13389992713928223, "learning_rate": 2.7425000000000003e-05, "loss": 0.005, "step": 17730 }, { "epoch": 19.570955273329652, "grad_norm": 0.41780757904052734, "learning_rate": 2.742466666666667e-05, "loss": 0.0095, "step": 17731 }, { "epoch": 19.572059635560464, "grad_norm": 0.11804128438234329, "learning_rate": 2.7424333333333334e-05, "loss": 0.0028, "step": 17732 }, { "epoch": 19.573163997791276, "grad_norm": 0.9759154915809631, "learning_rate": 2.7424e-05, "loss": 0.0122, "step": 17733 }, { "epoch": 19.57426836002209, "grad_norm": 0.3082873821258545, "learning_rate": 2.7423666666666666e-05, "loss": 0.0069, "step": 17734 }, { "epoch": 19.575372722252897, "grad_norm": 0.22391152381896973, "learning_rate": 2.7423333333333332e-05, "loss": 0.0074, "step": 17735 }, { "epoch": 19.57647708448371, "grad_norm": 0.10130956768989563, "learning_rate": 2.7423e-05, "loss": 0.0023, "step": 17736 }, { "epoch": 19.57758144671452, "grad_norm": 0.21090663969516754, "learning_rate": 2.7422666666666667e-05, "loss": 0.0053, "step": 17737 }, { "epoch": 19.578685808945334, "grad_norm": 0.09080500155687332, "learning_rate": 2.7422333333333333e-05, "loss": 0.0031, "step": 17738 }, { "epoch": 19.579790171176146, "grad_norm": 0.16125190258026123, "learning_rate": 2.7422000000000003e-05, "loss": 0.0052, "step": 17739 }, { "epoch": 19.58089453340696, "grad_norm": 0.2744396924972534, "learning_rate": 2.7421666666666665e-05, "loss": 0.0105, "step": 17740 }, { "epoch": 19.58199889563777, "grad_norm": Infinity, "learning_rate": 2.7421666666666665e-05, "loss": 0.0146, "step": 17741 }, { "epoch": 19.58310325786858, "grad_norm": 0.224337637424469, "learning_rate": 2.7421333333333335e-05, "loss": 0.0137, "step": 17742 }, { "epoch": 19.58420762009939, "grad_norm": 0.23133258521556854, "learning_rate": 2.7421e-05, "loss": 0.0049, "step": 17743 }, { "epoch": 19.585311982330204, "grad_norm": 0.13665920495986938, "learning_rate": 2.7420666666666666e-05, "loss": 0.0042, "step": 17744 }, { "epoch": 19.586416344561016, "grad_norm": 0.1756087988615036, "learning_rate": 2.7420333333333332e-05, "loss": 0.0075, "step": 17745 }, { "epoch": 19.587520706791828, "grad_norm": 0.26914075016975403, "learning_rate": 2.7420000000000002e-05, "loss": 0.01, "step": 17746 }, { "epoch": 19.58862506902264, "grad_norm": 0.28078553080558777, "learning_rate": 2.7419666666666668e-05, "loss": 0.0061, "step": 17747 }, { "epoch": 19.589729431253453, "grad_norm": 0.3880771994590759, "learning_rate": 2.7419333333333334e-05, "loss": 0.0099, "step": 17748 }, { "epoch": 19.59083379348426, "grad_norm": 0.45961588621139526, "learning_rate": 2.7419000000000003e-05, "loss": 0.0047, "step": 17749 }, { "epoch": 19.591938155715074, "grad_norm": 0.18064403533935547, "learning_rate": 2.7418666666666665e-05, "loss": 0.0047, "step": 17750 }, { "epoch": 19.593042517945886, "grad_norm": 0.40291839838027954, "learning_rate": 2.7418333333333335e-05, "loss": 0.0293, "step": 17751 }, { "epoch": 19.594146880176698, "grad_norm": 0.4689536392688751, "learning_rate": 2.7418e-05, "loss": 0.0062, "step": 17752 }, { "epoch": 19.59525124240751, "grad_norm": 0.11739293485879898, "learning_rate": 2.7417666666666667e-05, "loss": 0.0031, "step": 17753 }, { "epoch": 19.596355604638322, "grad_norm": 0.19250768423080444, "learning_rate": 2.7417333333333333e-05, "loss": 0.005, "step": 17754 }, { "epoch": 19.597459966869135, "grad_norm": 0.35758835077285767, "learning_rate": 2.7417000000000002e-05, "loss": 0.0046, "step": 17755 }, { "epoch": 19.598564329099943, "grad_norm": 0.06112418696284294, "learning_rate": 2.7416666666666668e-05, "loss": 0.0027, "step": 17756 }, { "epoch": 19.599668691330756, "grad_norm": 0.28529107570648193, "learning_rate": 2.7416333333333334e-05, "loss": 0.0049, "step": 17757 }, { "epoch": 19.600773053561568, "grad_norm": 0.1528342217206955, "learning_rate": 2.7416000000000003e-05, "loss": 0.0043, "step": 17758 }, { "epoch": 19.60187741579238, "grad_norm": 0.6569881439208984, "learning_rate": 2.7415666666666666e-05, "loss": 0.0057, "step": 17759 }, { "epoch": 19.602981778023192, "grad_norm": 0.5489342212677002, "learning_rate": 2.7415333333333335e-05, "loss": 0.0076, "step": 17760 }, { "epoch": 19.604086140254005, "grad_norm": 0.2795596122741699, "learning_rate": 2.7415e-05, "loss": 0.009, "step": 17761 }, { "epoch": 19.605190502484817, "grad_norm": 0.24798162281513214, "learning_rate": 2.7414666666666667e-05, "loss": 0.0059, "step": 17762 }, { "epoch": 19.606294864715625, "grad_norm": 0.23974639177322388, "learning_rate": 2.7414333333333336e-05, "loss": 0.0057, "step": 17763 }, { "epoch": 19.607399226946438, "grad_norm": 0.2972852885723114, "learning_rate": 2.7414e-05, "loss": 0.0083, "step": 17764 }, { "epoch": 19.60850358917725, "grad_norm": 0.5864447355270386, "learning_rate": 2.7413666666666668e-05, "loss": 0.1412, "step": 17765 }, { "epoch": 19.609607951408062, "grad_norm": 0.4800302982330322, "learning_rate": 2.7413333333333334e-05, "loss": 0.0713, "step": 17766 }, { "epoch": 19.610712313638874, "grad_norm": 0.43087759613990784, "learning_rate": 2.7413e-05, "loss": 0.0949, "step": 17767 }, { "epoch": 19.611816675869687, "grad_norm": 0.42705607414245605, "learning_rate": 2.7412666666666666e-05, "loss": 0.0985, "step": 17768 }, { "epoch": 19.612921038100495, "grad_norm": 0.23084740340709686, "learning_rate": 2.7412333333333335e-05, "loss": 0.054, "step": 17769 }, { "epoch": 19.614025400331307, "grad_norm": 0.33915263414382935, "learning_rate": 2.7411999999999998e-05, "loss": 0.0564, "step": 17770 }, { "epoch": 19.61512976256212, "grad_norm": 0.2016020119190216, "learning_rate": 2.7411666666666667e-05, "loss": 0.0215, "step": 17771 }, { "epoch": 19.616234124792932, "grad_norm": 0.24618391692638397, "learning_rate": 2.7411333333333336e-05, "loss": 0.0396, "step": 17772 }, { "epoch": 19.617338487023744, "grad_norm": 0.19325967133045197, "learning_rate": 2.7411e-05, "loss": 0.0129, "step": 17773 }, { "epoch": 19.618442849254556, "grad_norm": 0.44376617670059204, "learning_rate": 2.7410666666666668e-05, "loss": 0.0172, "step": 17774 }, { "epoch": 19.61954721148537, "grad_norm": 0.17319515347480774, "learning_rate": 2.7410333333333334e-05, "loss": 0.0123, "step": 17775 }, { "epoch": 19.620651573716177, "grad_norm": 0.5187986493110657, "learning_rate": 2.741e-05, "loss": 0.0104, "step": 17776 }, { "epoch": 19.62175593594699, "grad_norm": 0.3141019344329834, "learning_rate": 2.7409666666666666e-05, "loss": 0.0084, "step": 17777 }, { "epoch": 19.6228602981778, "grad_norm": 0.2151336967945099, "learning_rate": 2.7409333333333335e-05, "loss": 0.0114, "step": 17778 }, { "epoch": 19.623964660408614, "grad_norm": 0.2429313212633133, "learning_rate": 2.7408999999999998e-05, "loss": 0.0098, "step": 17779 }, { "epoch": 19.625069022639426, "grad_norm": 0.1789333075284958, "learning_rate": 2.7408666666666667e-05, "loss": 0.0071, "step": 17780 }, { "epoch": 19.62617338487024, "grad_norm": 0.1456858217716217, "learning_rate": 2.7408333333333337e-05, "loss": 0.0045, "step": 17781 }, { "epoch": 19.62727774710105, "grad_norm": 0.33271321654319763, "learning_rate": 2.7408e-05, "loss": 0.0139, "step": 17782 }, { "epoch": 19.62838210933186, "grad_norm": 0.12043862044811249, "learning_rate": 2.740766666666667e-05, "loss": 0.0039, "step": 17783 }, { "epoch": 19.62948647156267, "grad_norm": 0.25015315413475037, "learning_rate": 2.7407333333333334e-05, "loss": 0.0115, "step": 17784 }, { "epoch": 19.630590833793484, "grad_norm": 0.4563989043235779, "learning_rate": 2.7407e-05, "loss": 0.0049, "step": 17785 }, { "epoch": 19.631695196024296, "grad_norm": 0.21596455574035645, "learning_rate": 2.7406666666666666e-05, "loss": 0.0066, "step": 17786 }, { "epoch": 19.63279955825511, "grad_norm": 0.20419088006019592, "learning_rate": 2.7406333333333336e-05, "loss": 0.0082, "step": 17787 }, { "epoch": 19.63390392048592, "grad_norm": 0.09722143411636353, "learning_rate": 2.7406e-05, "loss": 0.0066, "step": 17788 }, { "epoch": 19.635008282716733, "grad_norm": 0.17628851532936096, "learning_rate": 2.7405666666666667e-05, "loss": 0.0021, "step": 17789 }, { "epoch": 19.63611264494754, "grad_norm": 0.181448832154274, "learning_rate": 2.7405333333333337e-05, "loss": 0.0098, "step": 17790 }, { "epoch": 19.637217007178354, "grad_norm": 0.22308985888957977, "learning_rate": 2.7405e-05, "loss": 0.0094, "step": 17791 }, { "epoch": 19.638321369409166, "grad_norm": 0.09507487714290619, "learning_rate": 2.740466666666667e-05, "loss": 0.0026, "step": 17792 }, { "epoch": 19.639425731639978, "grad_norm": 0.19446943700313568, "learning_rate": 2.7404333333333335e-05, "loss": 0.003, "step": 17793 }, { "epoch": 19.64053009387079, "grad_norm": 0.1432875096797943, "learning_rate": 2.7404e-05, "loss": 0.0058, "step": 17794 }, { "epoch": 19.641634456101603, "grad_norm": 0.19037017226219177, "learning_rate": 2.7403666666666666e-05, "loss": 0.0063, "step": 17795 }, { "epoch": 19.642738818332415, "grad_norm": 0.2266160100698471, "learning_rate": 2.7403333333333332e-05, "loss": 0.0073, "step": 17796 }, { "epoch": 19.643843180563223, "grad_norm": 0.2950056493282318, "learning_rate": 2.7403000000000002e-05, "loss": 0.0136, "step": 17797 }, { "epoch": 19.644947542794036, "grad_norm": 0.6476045250892639, "learning_rate": 2.7402666666666668e-05, "loss": 0.0118, "step": 17798 }, { "epoch": 19.646051905024848, "grad_norm": 0.23445114493370056, "learning_rate": 2.7402333333333334e-05, "loss": 0.0077, "step": 17799 }, { "epoch": 19.64715626725566, "grad_norm": 0.3127058744430542, "learning_rate": 2.7402e-05, "loss": 0.0137, "step": 17800 }, { "epoch": 19.648260629486472, "grad_norm": 0.21726027131080627, "learning_rate": 2.740166666666667e-05, "loss": 0.0043, "step": 17801 }, { "epoch": 19.649364991717285, "grad_norm": 0.15515127778053284, "learning_rate": 2.740133333333333e-05, "loss": 0.0051, "step": 17802 }, { "epoch": 19.650469353948097, "grad_norm": 0.15046513080596924, "learning_rate": 2.7401e-05, "loss": 0.0039, "step": 17803 }, { "epoch": 19.651573716178905, "grad_norm": 0.08322426676750183, "learning_rate": 2.7400666666666667e-05, "loss": 0.0028, "step": 17804 }, { "epoch": 19.652678078409718, "grad_norm": 0.3617481291294098, "learning_rate": 2.7400333333333333e-05, "loss": 0.0079, "step": 17805 }, { "epoch": 19.65378244064053, "grad_norm": 0.10305536538362503, "learning_rate": 2.7400000000000002e-05, "loss": 0.0033, "step": 17806 }, { "epoch": 19.654886802871342, "grad_norm": 0.12778915464878082, "learning_rate": 2.7399666666666668e-05, "loss": 0.0031, "step": 17807 }, { "epoch": 19.655991165102154, "grad_norm": 0.1999000608921051, "learning_rate": 2.7399333333333334e-05, "loss": 0.003, "step": 17808 }, { "epoch": 19.657095527332967, "grad_norm": 0.18557217717170715, "learning_rate": 2.7399e-05, "loss": 0.0073, "step": 17809 }, { "epoch": 19.658199889563775, "grad_norm": 0.15609778463840485, "learning_rate": 2.739866666666667e-05, "loss": 0.0037, "step": 17810 }, { "epoch": 19.659304251794588, "grad_norm": 0.09807632863521576, "learning_rate": 2.739833333333333e-05, "loss": 0.0033, "step": 17811 }, { "epoch": 19.6604086140254, "grad_norm": 0.19797442853450775, "learning_rate": 2.7398e-05, "loss": 0.0083, "step": 17812 }, { "epoch": 19.661512976256212, "grad_norm": 0.25694209337234497, "learning_rate": 2.739766666666667e-05, "loss": 0.007, "step": 17813 }, { "epoch": 19.662617338487024, "grad_norm": 0.5511760115623474, "learning_rate": 2.7397333333333333e-05, "loss": 0.0085, "step": 17814 }, { "epoch": 19.663721700717836, "grad_norm": 0.42280080914497375, "learning_rate": 2.7397000000000002e-05, "loss": 0.1149, "step": 17815 }, { "epoch": 19.66482606294865, "grad_norm": 0.4566614031791687, "learning_rate": 2.7396666666666668e-05, "loss": 0.1102, "step": 17816 }, { "epoch": 19.665930425179457, "grad_norm": 0.46104517579078674, "learning_rate": 2.7396333333333334e-05, "loss": 0.0899, "step": 17817 }, { "epoch": 19.66703478741027, "grad_norm": 0.3699950873851776, "learning_rate": 2.7396e-05, "loss": 0.0659, "step": 17818 }, { "epoch": 19.668139149641082, "grad_norm": 0.6563133597373962, "learning_rate": 2.739566666666667e-05, "loss": 0.0569, "step": 17819 }, { "epoch": 19.669243511871894, "grad_norm": 0.44313618540763855, "learning_rate": 2.7395333333333332e-05, "loss": 0.0537, "step": 17820 }, { "epoch": 19.670347874102706, "grad_norm": 0.4279889464378357, "learning_rate": 2.7395e-05, "loss": 0.0582, "step": 17821 }, { "epoch": 19.67145223633352, "grad_norm": 0.39226365089416504, "learning_rate": 2.739466666666667e-05, "loss": 0.0237, "step": 17822 }, { "epoch": 19.67255659856433, "grad_norm": 0.4306509792804718, "learning_rate": 2.7394333333333333e-05, "loss": 0.0175, "step": 17823 }, { "epoch": 19.67366096079514, "grad_norm": 0.3983928859233856, "learning_rate": 2.7394000000000002e-05, "loss": 0.0148, "step": 17824 }, { "epoch": 19.67476532302595, "grad_norm": 0.1437559723854065, "learning_rate": 2.7393666666666665e-05, "loss": 0.037, "step": 17825 }, { "epoch": 19.675869685256764, "grad_norm": 0.2789003252983093, "learning_rate": 2.7393333333333334e-05, "loss": 0.0092, "step": 17826 }, { "epoch": 19.676974047487576, "grad_norm": 0.12949678301811218, "learning_rate": 2.7393e-05, "loss": 0.0087, "step": 17827 }, { "epoch": 19.67807840971839, "grad_norm": 0.2519197165966034, "learning_rate": 2.7392666666666666e-05, "loss": 0.0298, "step": 17828 }, { "epoch": 19.6791827719492, "grad_norm": 0.11775791645050049, "learning_rate": 2.7392333333333332e-05, "loss": 0.0055, "step": 17829 }, { "epoch": 19.680287134180013, "grad_norm": 0.09560459107160568, "learning_rate": 2.7392e-05, "loss": 0.0033, "step": 17830 }, { "epoch": 19.68139149641082, "grad_norm": 0.12905529141426086, "learning_rate": 2.7391666666666667e-05, "loss": 0.0077, "step": 17831 }, { "epoch": 19.682495858641634, "grad_norm": 0.12046854943037033, "learning_rate": 2.7391333333333333e-05, "loss": 0.0067, "step": 17832 }, { "epoch": 19.683600220872446, "grad_norm": 0.21564842760562897, "learning_rate": 2.7391000000000003e-05, "loss": 0.0044, "step": 17833 }, { "epoch": 19.684704583103258, "grad_norm": 0.786190390586853, "learning_rate": 2.7390666666666665e-05, "loss": 0.0101, "step": 17834 }, { "epoch": 19.68580894533407, "grad_norm": 0.15767458081245422, "learning_rate": 2.7390333333333334e-05, "loss": 0.0039, "step": 17835 }, { "epoch": 19.686913307564883, "grad_norm": 0.1288910210132599, "learning_rate": 2.739e-05, "loss": 0.0034, "step": 17836 }, { "epoch": 19.68801766979569, "grad_norm": 0.1163376197218895, "learning_rate": 2.7389666666666666e-05, "loss": 0.0054, "step": 17837 }, { "epoch": 19.689122032026503, "grad_norm": 0.13040116429328918, "learning_rate": 2.7389333333333336e-05, "loss": 0.0044, "step": 17838 }, { "epoch": 19.690226394257316, "grad_norm": 0.20091316103935242, "learning_rate": 2.7389e-05, "loss": 0.0223, "step": 17839 }, { "epoch": 19.691330756488128, "grad_norm": 0.33173868060112, "learning_rate": 2.7388666666666667e-05, "loss": 0.0115, "step": 17840 }, { "epoch": 19.69243511871894, "grad_norm": 0.2811618745326996, "learning_rate": 2.7388333333333333e-05, "loss": 0.0515, "step": 17841 }, { "epoch": 19.693539480949752, "grad_norm": 0.1456241011619568, "learning_rate": 2.7388000000000003e-05, "loss": 0.0042, "step": 17842 }, { "epoch": 19.694643843180565, "grad_norm": 0.09958381205797195, "learning_rate": 2.7387666666666665e-05, "loss": 0.0042, "step": 17843 }, { "epoch": 19.695748205411373, "grad_norm": 0.1694023311138153, "learning_rate": 2.7387333333333335e-05, "loss": 0.008, "step": 17844 }, { "epoch": 19.696852567642186, "grad_norm": 0.4685829281806946, "learning_rate": 2.7387e-05, "loss": 0.0113, "step": 17845 }, { "epoch": 19.697956929872998, "grad_norm": 0.468575119972229, "learning_rate": 2.7386666666666666e-05, "loss": 0.0054, "step": 17846 }, { "epoch": 19.69906129210381, "grad_norm": 0.1308891773223877, "learning_rate": 2.7386333333333336e-05, "loss": 0.004, "step": 17847 }, { "epoch": 19.700165654334622, "grad_norm": 0.11553256958723068, "learning_rate": 2.7386000000000002e-05, "loss": 0.0051, "step": 17848 }, { "epoch": 19.701270016565434, "grad_norm": 0.17308655381202698, "learning_rate": 2.7385666666666668e-05, "loss": 0.0061, "step": 17849 }, { "epoch": 19.702374378796247, "grad_norm": 0.16335174441337585, "learning_rate": 2.7385333333333334e-05, "loss": 0.0054, "step": 17850 }, { "epoch": 19.703478741027055, "grad_norm": 0.43093961477279663, "learning_rate": 2.7385000000000003e-05, "loss": 0.0059, "step": 17851 }, { "epoch": 19.704583103257868, "grad_norm": 0.2754434645175934, "learning_rate": 2.7384666666666665e-05, "loss": 0.0039, "step": 17852 }, { "epoch": 19.70568746548868, "grad_norm": 0.26555871963500977, "learning_rate": 2.7384333333333335e-05, "loss": 0.011, "step": 17853 }, { "epoch": 19.706791827719492, "grad_norm": 0.16085149347782135, "learning_rate": 2.7383999999999997e-05, "loss": 0.004, "step": 17854 }, { "epoch": 19.707896189950304, "grad_norm": 0.2408324033021927, "learning_rate": 2.7383666666666667e-05, "loss": 0.0048, "step": 17855 }, { "epoch": 19.709000552181116, "grad_norm": 0.21026760339736938, "learning_rate": 2.7383333333333336e-05, "loss": 0.0114, "step": 17856 }, { "epoch": 19.71010491441193, "grad_norm": 0.2264130413532257, "learning_rate": 2.7383e-05, "loss": 0.0042, "step": 17857 }, { "epoch": 19.711209276642737, "grad_norm": 0.0775027945637703, "learning_rate": 2.7382666666666668e-05, "loss": 0.0019, "step": 17858 }, { "epoch": 19.71231363887355, "grad_norm": 0.7013647556304932, "learning_rate": 2.7382333333333334e-05, "loss": 0.0141, "step": 17859 }, { "epoch": 19.713418001104362, "grad_norm": 0.1805879920721054, "learning_rate": 2.7382e-05, "loss": 0.0057, "step": 17860 }, { "epoch": 19.714522363335174, "grad_norm": 0.454824835062027, "learning_rate": 2.7381666666666666e-05, "loss": 0.0061, "step": 17861 }, { "epoch": 19.715626725565986, "grad_norm": 0.16896168887615204, "learning_rate": 2.7381333333333335e-05, "loss": 0.0053, "step": 17862 }, { "epoch": 19.7167310877968, "grad_norm": 0.20231463015079498, "learning_rate": 2.7381e-05, "loss": 0.0062, "step": 17863 }, { "epoch": 19.71783545002761, "grad_norm": 0.3709939122200012, "learning_rate": 2.7380666666666667e-05, "loss": 0.0248, "step": 17864 }, { "epoch": 19.71893981225842, "grad_norm": 0.5280038118362427, "learning_rate": 2.7380333333333336e-05, "loss": 0.1506, "step": 17865 }, { "epoch": 19.72004417448923, "grad_norm": 0.5439118146896362, "learning_rate": 2.738e-05, "loss": 0.1299, "step": 17866 }, { "epoch": 19.721148536720044, "grad_norm": 0.41952207684516907, "learning_rate": 2.7379666666666668e-05, "loss": 0.0779, "step": 17867 }, { "epoch": 19.722252898950856, "grad_norm": 0.42219310998916626, "learning_rate": 2.7379333333333334e-05, "loss": 0.0442, "step": 17868 }, { "epoch": 19.72335726118167, "grad_norm": 0.3564983308315277, "learning_rate": 2.7379e-05, "loss": 0.045, "step": 17869 }, { "epoch": 19.72446162341248, "grad_norm": 0.4007790982723236, "learning_rate": 2.7378666666666666e-05, "loss": 0.031, "step": 17870 }, { "epoch": 19.725565985643293, "grad_norm": 0.1533515900373459, "learning_rate": 2.7378333333333335e-05, "loss": 0.0091, "step": 17871 }, { "epoch": 19.7266703478741, "grad_norm": 0.39044997096061707, "learning_rate": 2.7378e-05, "loss": 0.0427, "step": 17872 }, { "epoch": 19.727774710104914, "grad_norm": 0.15056046843528748, "learning_rate": 2.7377666666666667e-05, "loss": 0.0087, "step": 17873 }, { "epoch": 19.728879072335726, "grad_norm": 0.4122268259525299, "learning_rate": 2.7377333333333336e-05, "loss": 0.0115, "step": 17874 }, { "epoch": 19.729983434566538, "grad_norm": 0.059296611696481705, "learning_rate": 2.7377e-05, "loss": 0.0029, "step": 17875 }, { "epoch": 19.73108779679735, "grad_norm": 0.36733317375183105, "learning_rate": 2.7376666666666668e-05, "loss": 0.0064, "step": 17876 }, { "epoch": 19.732192159028163, "grad_norm": 0.13688768446445465, "learning_rate": 2.7376333333333334e-05, "loss": 0.0046, "step": 17877 }, { "epoch": 19.73329652125897, "grad_norm": 0.0929819792509079, "learning_rate": 2.7376e-05, "loss": 0.0036, "step": 17878 }, { "epoch": 19.734400883489783, "grad_norm": 0.5675274133682251, "learning_rate": 2.7375666666666666e-05, "loss": 0.0097, "step": 17879 }, { "epoch": 19.735505245720596, "grad_norm": 0.10678374767303467, "learning_rate": 2.7375333333333335e-05, "loss": 0.0028, "step": 17880 }, { "epoch": 19.736609607951408, "grad_norm": 0.20398388803005219, "learning_rate": 2.7375e-05, "loss": 0.0048, "step": 17881 }, { "epoch": 19.73771397018222, "grad_norm": 0.16934260725975037, "learning_rate": 2.7374666666666667e-05, "loss": 0.0059, "step": 17882 }, { "epoch": 19.738818332413032, "grad_norm": 0.5014445781707764, "learning_rate": 2.7374333333333337e-05, "loss": 0.0102, "step": 17883 }, { "epoch": 19.739922694643845, "grad_norm": 0.09242004156112671, "learning_rate": 2.7374e-05, "loss": 0.0028, "step": 17884 }, { "epoch": 19.741027056874653, "grad_norm": 0.2314576804637909, "learning_rate": 2.737366666666667e-05, "loss": 0.0068, "step": 17885 }, { "epoch": 19.742131419105466, "grad_norm": 0.15493756532669067, "learning_rate": 2.737333333333333e-05, "loss": 0.0074, "step": 17886 }, { "epoch": 19.743235781336278, "grad_norm": 0.1913231760263443, "learning_rate": 2.7373e-05, "loss": 0.0073, "step": 17887 }, { "epoch": 19.74434014356709, "grad_norm": 0.15601322054862976, "learning_rate": 2.737266666666667e-05, "loss": 0.0033, "step": 17888 }, { "epoch": 19.745444505797902, "grad_norm": 0.4534308910369873, "learning_rate": 2.7372333333333332e-05, "loss": 0.0094, "step": 17889 }, { "epoch": 19.746548868028714, "grad_norm": 0.15317411720752716, "learning_rate": 2.7372e-05, "loss": 0.0033, "step": 17890 }, { "epoch": 19.747653230259527, "grad_norm": 0.10380841791629791, "learning_rate": 2.7371666666666667e-05, "loss": 0.004, "step": 17891 }, { "epoch": 19.748757592490335, "grad_norm": 0.23728923499584198, "learning_rate": 2.7371333333333333e-05, "loss": 0.0081, "step": 17892 }, { "epoch": 19.749861954721148, "grad_norm": 0.20214557647705078, "learning_rate": 2.7371e-05, "loss": 0.0036, "step": 17893 }, { "epoch": 19.75096631695196, "grad_norm": 0.17140443623065948, "learning_rate": 2.737066666666667e-05, "loss": 0.0043, "step": 17894 }, { "epoch": 19.752070679182772, "grad_norm": 0.40506792068481445, "learning_rate": 2.737033333333333e-05, "loss": 0.0062, "step": 17895 }, { "epoch": 19.753175041413584, "grad_norm": 0.21763896942138672, "learning_rate": 2.737e-05, "loss": 0.0056, "step": 17896 }, { "epoch": 19.754279403644396, "grad_norm": 0.17100970447063446, "learning_rate": 2.736966666666667e-05, "loss": 0.0071, "step": 17897 }, { "epoch": 19.75538376587521, "grad_norm": 0.9671851992607117, "learning_rate": 2.7369333333333332e-05, "loss": 0.0096, "step": 17898 }, { "epoch": 19.756488128106017, "grad_norm": 0.09908834844827652, "learning_rate": 2.7369000000000002e-05, "loss": 0.005, "step": 17899 }, { "epoch": 19.75759249033683, "grad_norm": 0.11362319439649582, "learning_rate": 2.7368666666666668e-05, "loss": 0.0036, "step": 17900 }, { "epoch": 19.758696852567642, "grad_norm": 0.19156374037265778, "learning_rate": 2.7368333333333334e-05, "loss": 0.0063, "step": 17901 }, { "epoch": 19.759801214798454, "grad_norm": 0.09369494020938873, "learning_rate": 2.7368e-05, "loss": 0.0022, "step": 17902 }, { "epoch": 19.760905577029266, "grad_norm": 0.4835778474807739, "learning_rate": 2.736766666666667e-05, "loss": 0.01, "step": 17903 }, { "epoch": 19.76200993926008, "grad_norm": 0.24219314754009247, "learning_rate": 2.736733333333333e-05, "loss": 0.0095, "step": 17904 }, { "epoch": 19.763114301490887, "grad_norm": 0.18287038803100586, "learning_rate": 2.7367e-05, "loss": 0.0058, "step": 17905 }, { "epoch": 19.7642186637217, "grad_norm": 0.2248690128326416, "learning_rate": 2.736666666666667e-05, "loss": 0.0053, "step": 17906 }, { "epoch": 19.76532302595251, "grad_norm": 0.2501676678657532, "learning_rate": 2.7366333333333333e-05, "loss": 0.0059, "step": 17907 }, { "epoch": 19.766427388183324, "grad_norm": 0.22386690974235535, "learning_rate": 2.7366000000000002e-05, "loss": 0.009, "step": 17908 }, { "epoch": 19.767531750414136, "grad_norm": 0.16064420342445374, "learning_rate": 2.7365666666666668e-05, "loss": 0.0052, "step": 17909 }, { "epoch": 19.76863611264495, "grad_norm": 0.13445593416690826, "learning_rate": 2.7365333333333334e-05, "loss": 0.0034, "step": 17910 }, { "epoch": 19.76974047487576, "grad_norm": 0.20971715450286865, "learning_rate": 2.7365e-05, "loss": 0.0058, "step": 17911 }, { "epoch": 19.77084483710657, "grad_norm": 0.6236799955368042, "learning_rate": 2.736466666666667e-05, "loss": 0.0165, "step": 17912 }, { "epoch": 19.77194919933738, "grad_norm": 0.5865718126296997, "learning_rate": 2.7364333333333335e-05, "loss": 0.0101, "step": 17913 }, { "epoch": 19.773053561568194, "grad_norm": 0.1393478810787201, "learning_rate": 2.7364e-05, "loss": 0.005, "step": 17914 }, { "epoch": 19.774157923799006, "grad_norm": 0.815085768699646, "learning_rate": 2.7363666666666667e-05, "loss": 0.1725, "step": 17915 }, { "epoch": 19.775262286029818, "grad_norm": 0.6238560676574707, "learning_rate": 2.7363333333333333e-05, "loss": 0.1447, "step": 17916 }, { "epoch": 19.77636664826063, "grad_norm": 0.8695751428604126, "learning_rate": 2.7363000000000002e-05, "loss": 0.1001, "step": 17917 }, { "epoch": 19.777471010491443, "grad_norm": 0.3593083322048187, "learning_rate": 2.7362666666666665e-05, "loss": 0.0525, "step": 17918 }, { "epoch": 19.77857537272225, "grad_norm": 0.4901960492134094, "learning_rate": 2.7362333333333334e-05, "loss": 0.0845, "step": 17919 }, { "epoch": 19.779679734953064, "grad_norm": 0.38014525175094604, "learning_rate": 2.7362e-05, "loss": 0.0704, "step": 17920 }, { "epoch": 19.780784097183876, "grad_norm": 0.3581129014492035, "learning_rate": 2.7361666666666666e-05, "loss": 0.0352, "step": 17921 }, { "epoch": 19.781888459414688, "grad_norm": 0.7676452398300171, "learning_rate": 2.7361333333333335e-05, "loss": 0.0405, "step": 17922 }, { "epoch": 19.7829928216455, "grad_norm": 0.3299321234226227, "learning_rate": 2.7361e-05, "loss": 0.0182, "step": 17923 }, { "epoch": 19.784097183876312, "grad_norm": 0.452000230550766, "learning_rate": 2.7360666666666667e-05, "loss": 0.0139, "step": 17924 }, { "epoch": 19.785201546107125, "grad_norm": 0.7487922310829163, "learning_rate": 2.7360333333333333e-05, "loss": 0.0412, "step": 17925 }, { "epoch": 19.786305908337933, "grad_norm": 0.24738307297229767, "learning_rate": 2.7360000000000002e-05, "loss": 0.0108, "step": 17926 }, { "epoch": 19.787410270568746, "grad_norm": 0.15558598935604095, "learning_rate": 2.7359666666666665e-05, "loss": 0.007, "step": 17927 }, { "epoch": 19.788514632799558, "grad_norm": 0.20711690187454224, "learning_rate": 2.7359333333333334e-05, "loss": 0.0372, "step": 17928 }, { "epoch": 19.78961899503037, "grad_norm": 0.22715021669864655, "learning_rate": 2.7359e-05, "loss": 0.0076, "step": 17929 }, { "epoch": 19.790723357261182, "grad_norm": 0.3809153139591217, "learning_rate": 2.7358666666666666e-05, "loss": 0.0102, "step": 17930 }, { "epoch": 19.791827719491994, "grad_norm": 0.15333615243434906, "learning_rate": 2.7358333333333335e-05, "loss": 0.0083, "step": 17931 }, { "epoch": 19.792932081722807, "grad_norm": 0.127401664853096, "learning_rate": 2.7358e-05, "loss": 0.0069, "step": 17932 }, { "epoch": 19.794036443953615, "grad_norm": 0.17963077127933502, "learning_rate": 2.7357666666666667e-05, "loss": 0.0081, "step": 17933 }, { "epoch": 19.795140806184428, "grad_norm": 0.14898662269115448, "learning_rate": 2.7357333333333333e-05, "loss": 0.0045, "step": 17934 }, { "epoch": 19.79624516841524, "grad_norm": 0.18227717280387878, "learning_rate": 2.7357000000000003e-05, "loss": 0.0065, "step": 17935 }, { "epoch": 19.797349530646052, "grad_norm": 0.23188485205173492, "learning_rate": 2.7356666666666665e-05, "loss": 0.0055, "step": 17936 }, { "epoch": 19.798453892876864, "grad_norm": 1.0542607307434082, "learning_rate": 2.7356333333333334e-05, "loss": 0.015, "step": 17937 }, { "epoch": 19.799558255107677, "grad_norm": 0.10357662290334702, "learning_rate": 2.7356000000000004e-05, "loss": 0.0034, "step": 17938 }, { "epoch": 19.80066261733849, "grad_norm": 0.3701931834220886, "learning_rate": 2.7355666666666666e-05, "loss": 0.0089, "step": 17939 }, { "epoch": 19.801766979569297, "grad_norm": 0.19410564005374908, "learning_rate": 2.7355333333333336e-05, "loss": 0.0074, "step": 17940 }, { "epoch": 19.80287134180011, "grad_norm": 0.22863909602165222, "learning_rate": 2.7355e-05, "loss": 0.0056, "step": 17941 }, { "epoch": 19.803975704030922, "grad_norm": 1.0327719449996948, "learning_rate": 2.7354666666666667e-05, "loss": 0.0102, "step": 17942 }, { "epoch": 19.805080066261734, "grad_norm": 0.22081035375595093, "learning_rate": 2.7354333333333333e-05, "loss": 0.01, "step": 17943 }, { "epoch": 19.806184428492546, "grad_norm": 0.19877813756465912, "learning_rate": 2.7354000000000003e-05, "loss": 0.0055, "step": 17944 }, { "epoch": 19.80728879072336, "grad_norm": 0.18667833507061005, "learning_rate": 2.7353666666666665e-05, "loss": 0.0069, "step": 17945 }, { "epoch": 19.808393152954167, "grad_norm": 0.5078284740447998, "learning_rate": 2.7353333333333335e-05, "loss": 0.0188, "step": 17946 }, { "epoch": 19.80949751518498, "grad_norm": 0.26692137122154236, "learning_rate": 2.7353e-05, "loss": 0.0033, "step": 17947 }, { "epoch": 19.81060187741579, "grad_norm": 0.19876645505428314, "learning_rate": 2.7352666666666666e-05, "loss": 0.0042, "step": 17948 }, { "epoch": 19.811706239646604, "grad_norm": 0.18626469373703003, "learning_rate": 2.7352333333333336e-05, "loss": 0.0057, "step": 17949 }, { "epoch": 19.812810601877416, "grad_norm": 0.12628543376922607, "learning_rate": 2.7352e-05, "loss": 0.0048, "step": 17950 }, { "epoch": 19.81391496410823, "grad_norm": 0.23055127263069153, "learning_rate": 2.7351666666666668e-05, "loss": 0.0091, "step": 17951 }, { "epoch": 19.81501932633904, "grad_norm": 0.08484525233507156, "learning_rate": 2.7351333333333334e-05, "loss": 0.0042, "step": 17952 }, { "epoch": 19.81612368856985, "grad_norm": 0.1556408703327179, "learning_rate": 2.7351e-05, "loss": 0.005, "step": 17953 }, { "epoch": 19.81722805080066, "grad_norm": 0.16511088609695435, "learning_rate": 2.7350666666666665e-05, "loss": 0.0032, "step": 17954 }, { "epoch": 19.818332413031474, "grad_norm": 0.5682655572891235, "learning_rate": 2.7350333333333335e-05, "loss": 0.0123, "step": 17955 }, { "epoch": 19.819436775262286, "grad_norm": 0.2564694583415985, "learning_rate": 2.735e-05, "loss": 0.0069, "step": 17956 }, { "epoch": 19.820541137493098, "grad_norm": 1.0923186540603638, "learning_rate": 2.7349666666666667e-05, "loss": 0.0121, "step": 17957 }, { "epoch": 19.82164549972391, "grad_norm": 0.5358651280403137, "learning_rate": 2.7349333333333336e-05, "loss": 0.0249, "step": 17958 }, { "epoch": 19.822749861954723, "grad_norm": 0.9929106831550598, "learning_rate": 2.7349e-05, "loss": 0.011, "step": 17959 }, { "epoch": 19.82385422418553, "grad_norm": 0.41496336460113525, "learning_rate": 2.7348666666666668e-05, "loss": 0.0109, "step": 17960 }, { "epoch": 19.824958586416344, "grad_norm": 0.12654726207256317, "learning_rate": 2.7348333333333334e-05, "loss": 0.0043, "step": 17961 }, { "epoch": 19.826062948647156, "grad_norm": 0.09031226485967636, "learning_rate": 2.7348e-05, "loss": 0.0011, "step": 17962 }, { "epoch": 19.827167310877968, "grad_norm": 0.5443516373634338, "learning_rate": 2.734766666666667e-05, "loss": 0.0164, "step": 17963 }, { "epoch": 19.82827167310878, "grad_norm": 0.4781290590763092, "learning_rate": 2.7347333333333335e-05, "loss": 0.0123, "step": 17964 }, { "epoch": 19.829376035339592, "grad_norm": 0.8578001856803894, "learning_rate": 2.7347e-05, "loss": 0.1709, "step": 17965 }, { "epoch": 19.830480397570405, "grad_norm": 0.7226802706718445, "learning_rate": 2.7346666666666667e-05, "loss": 0.0997, "step": 17966 }, { "epoch": 19.831584759801213, "grad_norm": 0.47654998302459717, "learning_rate": 2.7346333333333336e-05, "loss": 0.078, "step": 17967 }, { "epoch": 19.832689122032026, "grad_norm": 0.3611185550689697, "learning_rate": 2.7346e-05, "loss": 0.0412, "step": 17968 }, { "epoch": 19.833793484262838, "grad_norm": 0.45838239789009094, "learning_rate": 2.7345666666666668e-05, "loss": 0.061, "step": 17969 }, { "epoch": 19.83489784649365, "grad_norm": 0.6391642093658447, "learning_rate": 2.7345333333333334e-05, "loss": 0.062, "step": 17970 }, { "epoch": 19.836002208724462, "grad_norm": 0.19267261028289795, "learning_rate": 2.7345e-05, "loss": 0.018, "step": 17971 }, { "epoch": 19.837106570955275, "grad_norm": 0.2519821524620056, "learning_rate": 2.734466666666667e-05, "loss": 0.0185, "step": 17972 }, { "epoch": 19.838210933186087, "grad_norm": 0.42595207691192627, "learning_rate": 2.7344333333333335e-05, "loss": 0.0291, "step": 17973 }, { "epoch": 19.839315295416895, "grad_norm": 0.1646183282136917, "learning_rate": 2.7344e-05, "loss": 0.0052, "step": 17974 }, { "epoch": 19.840419657647708, "grad_norm": 0.19409210979938507, "learning_rate": 2.7343666666666667e-05, "loss": 0.0078, "step": 17975 }, { "epoch": 19.84152401987852, "grad_norm": 0.34189295768737793, "learning_rate": 2.7343333333333333e-05, "loss": 0.0052, "step": 17976 }, { "epoch": 19.842628382109332, "grad_norm": 0.6090915203094482, "learning_rate": 2.7343e-05, "loss": 0.0092, "step": 17977 }, { "epoch": 19.843732744340144, "grad_norm": 0.2701262831687927, "learning_rate": 2.7342666666666668e-05, "loss": 0.0107, "step": 17978 }, { "epoch": 19.844837106570957, "grad_norm": 0.18825291097164154, "learning_rate": 2.7342333333333334e-05, "loss": 0.0071, "step": 17979 }, { "epoch": 19.84594146880177, "grad_norm": 0.12415068596601486, "learning_rate": 2.7342e-05, "loss": 0.0065, "step": 17980 }, { "epoch": 19.847045831032577, "grad_norm": 0.2670959532260895, "learning_rate": 2.734166666666667e-05, "loss": 0.0093, "step": 17981 }, { "epoch": 19.84815019326339, "grad_norm": 0.3186861276626587, "learning_rate": 2.7341333333333332e-05, "loss": 0.0073, "step": 17982 }, { "epoch": 19.849254555494202, "grad_norm": 0.20241163671016693, "learning_rate": 2.7341e-05, "loss": 0.0058, "step": 17983 }, { "epoch": 19.850358917725014, "grad_norm": 0.4498123824596405, "learning_rate": 2.7340666666666667e-05, "loss": 0.0162, "step": 17984 }, { "epoch": 19.851463279955826, "grad_norm": 0.6865530014038086, "learning_rate": 2.7340333333333333e-05, "loss": 0.0111, "step": 17985 }, { "epoch": 19.85256764218664, "grad_norm": 0.21757076680660248, "learning_rate": 2.734e-05, "loss": 0.0061, "step": 17986 }, { "epoch": 19.853672004417447, "grad_norm": 0.15728072822093964, "learning_rate": 2.733966666666667e-05, "loss": 0.0089, "step": 17987 }, { "epoch": 19.85477636664826, "grad_norm": 0.10645749419927597, "learning_rate": 2.7339333333333334e-05, "loss": 0.0034, "step": 17988 }, { "epoch": 19.85588072887907, "grad_norm": 0.3202141523361206, "learning_rate": 2.7339e-05, "loss": 0.0104, "step": 17989 }, { "epoch": 19.856985091109884, "grad_norm": 0.2661498785018921, "learning_rate": 2.733866666666667e-05, "loss": 0.0061, "step": 17990 }, { "epoch": 19.858089453340696, "grad_norm": 0.12415523827075958, "learning_rate": 2.7338333333333332e-05, "loss": 0.0029, "step": 17991 }, { "epoch": 19.85919381557151, "grad_norm": 0.31769710779190063, "learning_rate": 2.7338e-05, "loss": 0.0163, "step": 17992 }, { "epoch": 19.86029817780232, "grad_norm": 0.14561736583709717, "learning_rate": 2.7337666666666667e-05, "loss": 0.0088, "step": 17993 }, { "epoch": 19.86140254003313, "grad_norm": 0.2748701572418213, "learning_rate": 2.7337333333333333e-05, "loss": 0.0074, "step": 17994 }, { "epoch": 19.86250690226394, "grad_norm": 1.2470179796218872, "learning_rate": 2.7337e-05, "loss": 0.0337, "step": 17995 }, { "epoch": 19.863611264494754, "grad_norm": 0.24390368163585663, "learning_rate": 2.733666666666667e-05, "loss": 0.0054, "step": 17996 }, { "epoch": 19.864715626725566, "grad_norm": 0.3103782832622528, "learning_rate": 2.7336333333333335e-05, "loss": 0.0081, "step": 17997 }, { "epoch": 19.86581998895638, "grad_norm": 0.20035475492477417, "learning_rate": 2.7336e-05, "loss": 0.0088, "step": 17998 }, { "epoch": 19.86692435118719, "grad_norm": 0.1711408495903015, "learning_rate": 2.733566666666667e-05, "loss": 0.0103, "step": 17999 }, { "epoch": 19.868028713418003, "grad_norm": 0.1475500911474228, "learning_rate": 2.7335333333333332e-05, "loss": 0.0084, "step": 18000 }, { "epoch": 19.868028713418003, "eval_cer": 0.11138761438162635, "eval_loss": 0.3325429856777191, "eval_runtime": 15.8824, "eval_samples_per_second": 19.141, "eval_steps_per_second": 0.63, "eval_wer": 0.39025326170376057, "step": 18000 }, { "epoch": 19.86913307564881, "grad_norm": 0.11197933554649353, "learning_rate": 2.7335000000000002e-05, "loss": 0.0041, "step": 18001 }, { "epoch": 19.870237437879624, "grad_norm": 0.1257653832435608, "learning_rate": 2.7334666666666668e-05, "loss": 0.0068, "step": 18002 }, { "epoch": 19.871341800110436, "grad_norm": 1.0682151317596436, "learning_rate": 2.7334333333333334e-05, "loss": 0.0117, "step": 18003 }, { "epoch": 19.872446162341248, "grad_norm": 0.14362117648124695, "learning_rate": 2.7334e-05, "loss": 0.0041, "step": 18004 }, { "epoch": 19.87355052457206, "grad_norm": 0.1977236419916153, "learning_rate": 2.7333666666666665e-05, "loss": 0.0089, "step": 18005 }, { "epoch": 19.874654886802873, "grad_norm": 0.539232611656189, "learning_rate": 2.7333333333333335e-05, "loss": 0.0076, "step": 18006 }, { "epoch": 19.875759249033685, "grad_norm": 0.4137677848339081, "learning_rate": 2.7333e-05, "loss": 0.0093, "step": 18007 }, { "epoch": 19.876863611264493, "grad_norm": 0.19945809245109558, "learning_rate": 2.7332666666666667e-05, "loss": 0.0097, "step": 18008 }, { "epoch": 19.877967973495306, "grad_norm": 0.11401434987783432, "learning_rate": 2.7332333333333333e-05, "loss": 0.0046, "step": 18009 }, { "epoch": 19.879072335726118, "grad_norm": 0.5440756678581238, "learning_rate": 2.7332000000000002e-05, "loss": 0.0146, "step": 18010 }, { "epoch": 19.88017669795693, "grad_norm": 0.17398658394813538, "learning_rate": 2.7331666666666664e-05, "loss": 0.0042, "step": 18011 }, { "epoch": 19.881281060187742, "grad_norm": 0.32834136486053467, "learning_rate": 2.7331333333333334e-05, "loss": 0.0097, "step": 18012 }, { "epoch": 19.882385422418555, "grad_norm": 0.24135100841522217, "learning_rate": 2.7331000000000003e-05, "loss": 0.0048, "step": 18013 }, { "epoch": 19.883489784649363, "grad_norm": 0.4725870192050934, "learning_rate": 2.7330666666666666e-05, "loss": 0.0064, "step": 18014 }, { "epoch": 19.884594146880175, "grad_norm": 0.7542926669120789, "learning_rate": 2.7330333333333335e-05, "loss": 0.1707, "step": 18015 }, { "epoch": 19.885698509110988, "grad_norm": 0.6624592542648315, "learning_rate": 2.733e-05, "loss": 0.1187, "step": 18016 }, { "epoch": 19.8868028713418, "grad_norm": 0.4637660086154938, "learning_rate": 2.7329666666666667e-05, "loss": 0.1263, "step": 18017 }, { "epoch": 19.887907233572612, "grad_norm": 0.49018821120262146, "learning_rate": 2.7329333333333333e-05, "loss": 0.068, "step": 18018 }, { "epoch": 19.889011595803424, "grad_norm": 0.6102349758148193, "learning_rate": 2.7329000000000002e-05, "loss": 0.0938, "step": 18019 }, { "epoch": 19.890115958034237, "grad_norm": 0.2877410352230072, "learning_rate": 2.7328666666666665e-05, "loss": 0.0467, "step": 18020 }, { "epoch": 19.891220320265045, "grad_norm": 0.3201172351837158, "learning_rate": 2.7328333333333334e-05, "loss": 0.0294, "step": 18021 }, { "epoch": 19.892324682495858, "grad_norm": 0.469008207321167, "learning_rate": 2.7328000000000003e-05, "loss": 0.037, "step": 18022 }, { "epoch": 19.89342904472667, "grad_norm": 0.1396334320306778, "learning_rate": 2.7327666666666666e-05, "loss": 0.0126, "step": 18023 }, { "epoch": 19.894533406957482, "grad_norm": 0.3180409073829651, "learning_rate": 2.7327333333333335e-05, "loss": 0.0163, "step": 18024 }, { "epoch": 19.895637769188294, "grad_norm": 0.4444526135921478, "learning_rate": 2.7327e-05, "loss": 0.0189, "step": 18025 }, { "epoch": 19.896742131419106, "grad_norm": 0.1800224930047989, "learning_rate": 2.7326666666666667e-05, "loss": 0.0106, "step": 18026 }, { "epoch": 19.89784649364992, "grad_norm": 0.2325199395418167, "learning_rate": 2.7326333333333333e-05, "loss": 0.0085, "step": 18027 }, { "epoch": 19.898950855880727, "grad_norm": 0.1333024501800537, "learning_rate": 2.7326000000000002e-05, "loss": 0.0043, "step": 18028 }, { "epoch": 19.90005521811154, "grad_norm": 0.17338843643665314, "learning_rate": 2.7325666666666668e-05, "loss": 0.0068, "step": 18029 }, { "epoch": 19.90115958034235, "grad_norm": 0.2340570092201233, "learning_rate": 2.7325333333333334e-05, "loss": 0.0304, "step": 18030 }, { "epoch": 19.902263942573164, "grad_norm": 0.20708593726158142, "learning_rate": 2.7325000000000004e-05, "loss": 0.0091, "step": 18031 }, { "epoch": 19.903368304803976, "grad_norm": 0.2443077117204666, "learning_rate": 2.7324666666666666e-05, "loss": 0.0083, "step": 18032 }, { "epoch": 19.90447266703479, "grad_norm": 0.2779993414878845, "learning_rate": 2.7324333333333335e-05, "loss": 0.013, "step": 18033 }, { "epoch": 19.9055770292656, "grad_norm": 0.17102934420108795, "learning_rate": 2.7324e-05, "loss": 0.0054, "step": 18034 }, { "epoch": 19.90668139149641, "grad_norm": 0.30059337615966797, "learning_rate": 2.7323666666666667e-05, "loss": 0.0201, "step": 18035 }, { "epoch": 19.90778575372722, "grad_norm": 0.08684287965297699, "learning_rate": 2.7323333333333333e-05, "loss": 0.0067, "step": 18036 }, { "epoch": 19.908890115958034, "grad_norm": 0.20356686413288116, "learning_rate": 2.7323e-05, "loss": 0.0071, "step": 18037 }, { "epoch": 19.909994478188846, "grad_norm": 0.114842489361763, "learning_rate": 2.732266666666667e-05, "loss": 0.0051, "step": 18038 }, { "epoch": 19.91109884041966, "grad_norm": 0.1150006353855133, "learning_rate": 2.7322333333333334e-05, "loss": 0.0038, "step": 18039 }, { "epoch": 19.91220320265047, "grad_norm": 0.1437642127275467, "learning_rate": 2.7322e-05, "loss": 0.0054, "step": 18040 }, { "epoch": 19.913307564881283, "grad_norm": 0.17881514132022858, "learning_rate": 2.7321666666666666e-05, "loss": 0.0099, "step": 18041 }, { "epoch": 19.91441192711209, "grad_norm": 0.2086438089609146, "learning_rate": 2.7321333333333336e-05, "loss": 0.0073, "step": 18042 }, { "epoch": 19.915516289342904, "grad_norm": 0.12125570327043533, "learning_rate": 2.7320999999999998e-05, "loss": 0.0035, "step": 18043 }, { "epoch": 19.916620651573716, "grad_norm": 0.2246442288160324, "learning_rate": 2.7320666666666667e-05, "loss": 0.0076, "step": 18044 }, { "epoch": 19.917725013804528, "grad_norm": 0.22220276296138763, "learning_rate": 2.7320333333333333e-05, "loss": 0.0058, "step": 18045 }, { "epoch": 19.91882937603534, "grad_norm": 0.261443555355072, "learning_rate": 2.732e-05, "loss": 0.0086, "step": 18046 }, { "epoch": 19.919933738266153, "grad_norm": 0.16247820854187012, "learning_rate": 2.731966666666667e-05, "loss": 0.0062, "step": 18047 }, { "epoch": 19.921038100496965, "grad_norm": 0.12185560166835785, "learning_rate": 2.7319333333333335e-05, "loss": 0.0032, "step": 18048 }, { "epoch": 19.922142462727773, "grad_norm": 0.24638400971889496, "learning_rate": 2.7319e-05, "loss": 0.0091, "step": 18049 }, { "epoch": 19.923246824958586, "grad_norm": 0.27509599924087524, "learning_rate": 2.7318666666666666e-05, "loss": 0.005, "step": 18050 }, { "epoch": 19.924351187189398, "grad_norm": 0.2166775017976761, "learning_rate": 2.7318333333333336e-05, "loss": 0.0061, "step": 18051 }, { "epoch": 19.92545554942021, "grad_norm": 0.4019365906715393, "learning_rate": 2.7318e-05, "loss": 0.0063, "step": 18052 }, { "epoch": 19.926559911651022, "grad_norm": 0.7170377969741821, "learning_rate": 2.7317666666666668e-05, "loss": 0.0119, "step": 18053 }, { "epoch": 19.927664273881835, "grad_norm": 0.3258107304573059, "learning_rate": 2.7317333333333334e-05, "loss": 0.007, "step": 18054 }, { "epoch": 19.928768636112643, "grad_norm": 0.1745336800813675, "learning_rate": 2.7317e-05, "loss": 0.0082, "step": 18055 }, { "epoch": 19.929872998343455, "grad_norm": 0.2530856430530548, "learning_rate": 2.731666666666667e-05, "loss": 0.0057, "step": 18056 }, { "epoch": 19.930977360574268, "grad_norm": 0.15734541416168213, "learning_rate": 2.7316333333333335e-05, "loss": 0.0062, "step": 18057 }, { "epoch": 19.93208172280508, "grad_norm": 0.29285380244255066, "learning_rate": 2.7316e-05, "loss": 0.0138, "step": 18058 }, { "epoch": 19.933186085035892, "grad_norm": 0.16749271750450134, "learning_rate": 2.7315666666666667e-05, "loss": 0.007, "step": 18059 }, { "epoch": 19.934290447266704, "grad_norm": 0.2236180603504181, "learning_rate": 2.7315333333333336e-05, "loss": 0.0046, "step": 18060 }, { "epoch": 19.935394809497517, "grad_norm": 0.2659001052379608, "learning_rate": 2.7315e-05, "loss": 0.0085, "step": 18061 }, { "epoch": 19.936499171728325, "grad_norm": 0.08199524879455566, "learning_rate": 2.7314666666666668e-05, "loss": 0.0019, "step": 18062 }, { "epoch": 19.937603533959138, "grad_norm": 0.29012033343315125, "learning_rate": 2.7314333333333337e-05, "loss": 0.0065, "step": 18063 }, { "epoch": 19.93870789618995, "grad_norm": 0.4759509265422821, "learning_rate": 2.7314e-05, "loss": 0.0076, "step": 18064 }, { "epoch": 19.939812258420762, "grad_norm": 0.7389428019523621, "learning_rate": 2.731366666666667e-05, "loss": 0.141, "step": 18065 }, { "epoch": 19.940916620651574, "grad_norm": 0.4512938857078552, "learning_rate": 2.731333333333333e-05, "loss": 0.0772, "step": 18066 }, { "epoch": 19.942020982882386, "grad_norm": 0.580025315284729, "learning_rate": 2.7313e-05, "loss": 0.089, "step": 18067 }, { "epoch": 19.9431253451132, "grad_norm": 0.38173046708106995, "learning_rate": 2.7312666666666667e-05, "loss": 0.0575, "step": 18068 }, { "epoch": 19.944229707344007, "grad_norm": 0.46770310401916504, "learning_rate": 2.7312333333333333e-05, "loss": 0.0669, "step": 18069 }, { "epoch": 19.94533406957482, "grad_norm": 0.2806255519390106, "learning_rate": 2.7312e-05, "loss": 0.0422, "step": 18070 }, { "epoch": 19.946438431805632, "grad_norm": 0.2778143286705017, "learning_rate": 2.7311666666666668e-05, "loss": 0.0276, "step": 18071 }, { "epoch": 19.947542794036444, "grad_norm": 0.3110398054122925, "learning_rate": 2.7311333333333334e-05, "loss": 0.0313, "step": 18072 }, { "epoch": 19.948647156267256, "grad_norm": 0.33235403895378113, "learning_rate": 2.7311e-05, "loss": 0.0559, "step": 18073 }, { "epoch": 19.94975151849807, "grad_norm": 0.17259009182453156, "learning_rate": 2.731066666666667e-05, "loss": 0.0094, "step": 18074 }, { "epoch": 19.95085588072888, "grad_norm": 0.15272356569766998, "learning_rate": 2.7310333333333332e-05, "loss": 0.0065, "step": 18075 }, { "epoch": 19.95196024295969, "grad_norm": 0.13524574041366577, "learning_rate": 2.731e-05, "loss": 0.007, "step": 18076 }, { "epoch": 19.9530646051905, "grad_norm": 0.21966363489627838, "learning_rate": 2.7309666666666667e-05, "loss": 0.0103, "step": 18077 }, { "epoch": 19.954168967421314, "grad_norm": 0.15583480894565582, "learning_rate": 2.7309333333333333e-05, "loss": 0.0071, "step": 18078 }, { "epoch": 19.955273329652126, "grad_norm": 0.6486205458641052, "learning_rate": 2.7309000000000002e-05, "loss": 0.009, "step": 18079 }, { "epoch": 19.95637769188294, "grad_norm": 0.268815279006958, "learning_rate": 2.7308666666666668e-05, "loss": 0.0127, "step": 18080 }, { "epoch": 19.95748205411375, "grad_norm": 0.10059797018766403, "learning_rate": 2.7308333333333334e-05, "loss": 0.0049, "step": 18081 }, { "epoch": 19.95858641634456, "grad_norm": 0.15194711089134216, "learning_rate": 2.7308e-05, "loss": 0.0059, "step": 18082 }, { "epoch": 19.95969077857537, "grad_norm": 0.2404383271932602, "learning_rate": 2.730766666666667e-05, "loss": 0.0061, "step": 18083 }, { "epoch": 19.960795140806184, "grad_norm": 0.1521798074245453, "learning_rate": 2.7307333333333332e-05, "loss": 0.0061, "step": 18084 }, { "epoch": 19.961899503036996, "grad_norm": 0.23006749153137207, "learning_rate": 2.7307e-05, "loss": 0.0076, "step": 18085 }, { "epoch": 19.963003865267808, "grad_norm": 0.19677762687206268, "learning_rate": 2.7306666666666667e-05, "loss": 0.0084, "step": 18086 }, { "epoch": 19.96410822749862, "grad_norm": 0.23453004658222198, "learning_rate": 2.7306333333333333e-05, "loss": 0.0097, "step": 18087 }, { "epoch": 19.965212589729433, "grad_norm": 0.13541260361671448, "learning_rate": 2.7306000000000002e-05, "loss": 0.0077, "step": 18088 }, { "epoch": 19.96631695196024, "grad_norm": 0.20278406143188477, "learning_rate": 2.730566666666667e-05, "loss": 0.0056, "step": 18089 }, { "epoch": 19.967421314191053, "grad_norm": 0.25126877427101135, "learning_rate": 2.7305333333333334e-05, "loss": 0.0078, "step": 18090 }, { "epoch": 19.968525676421866, "grad_norm": 0.3319677710533142, "learning_rate": 2.7305e-05, "loss": 0.0069, "step": 18091 }, { "epoch": 19.969630038652678, "grad_norm": 0.3964017629623413, "learning_rate": 2.730466666666667e-05, "loss": 0.0114, "step": 18092 }, { "epoch": 19.97073440088349, "grad_norm": 0.17983044683933258, "learning_rate": 2.7304333333333332e-05, "loss": 0.0078, "step": 18093 }, { "epoch": 19.971838763114302, "grad_norm": 0.07713426649570465, "learning_rate": 2.7304e-05, "loss": 0.0021, "step": 18094 }, { "epoch": 19.972943125345115, "grad_norm": 0.1388571709394455, "learning_rate": 2.7303666666666667e-05, "loss": 0.0031, "step": 18095 }, { "epoch": 19.974047487575923, "grad_norm": 0.07415779680013657, "learning_rate": 2.7303333333333333e-05, "loss": 0.0027, "step": 18096 }, { "epoch": 19.975151849806736, "grad_norm": 0.5671108365058899, "learning_rate": 2.7303000000000003e-05, "loss": 0.0064, "step": 18097 }, { "epoch": 19.976256212037548, "grad_norm": 0.21644403040409088, "learning_rate": 2.7302666666666665e-05, "loss": 0.0044, "step": 18098 }, { "epoch": 19.97736057426836, "grad_norm": 0.2964840829372406, "learning_rate": 2.7302333333333335e-05, "loss": 0.041, "step": 18099 }, { "epoch": 19.978464936499172, "grad_norm": 0.38492926955223083, "learning_rate": 2.7302e-05, "loss": 0.0062, "step": 18100 }, { "epoch": 19.979569298729984, "grad_norm": 0.06299291551113129, "learning_rate": 2.7301666666666666e-05, "loss": 0.002, "step": 18101 }, { "epoch": 19.980673660960797, "grad_norm": 0.061349157243967056, "learning_rate": 2.7301333333333332e-05, "loss": 0.0031, "step": 18102 }, { "epoch": 19.981778023191605, "grad_norm": 0.21104168891906738, "learning_rate": 2.7301000000000002e-05, "loss": 0.0043, "step": 18103 }, { "epoch": 19.982882385422418, "grad_norm": 0.3467244505882263, "learning_rate": 2.7300666666666668e-05, "loss": 0.0091, "step": 18104 }, { "epoch": 19.98398674765323, "grad_norm": 0.11512131989002228, "learning_rate": 2.7300333333333334e-05, "loss": 0.0041, "step": 18105 }, { "epoch": 19.985091109884042, "grad_norm": 0.32793566584587097, "learning_rate": 2.7300000000000003e-05, "loss": 0.0083, "step": 18106 }, { "epoch": 19.986195472114854, "grad_norm": 0.253139853477478, "learning_rate": 2.7299666666666665e-05, "loss": 0.0069, "step": 18107 }, { "epoch": 19.987299834345666, "grad_norm": 0.07081429660320282, "learning_rate": 2.7299333333333335e-05, "loss": 0.0018, "step": 18108 }, { "epoch": 19.98840419657648, "grad_norm": 0.44383785128593445, "learning_rate": 2.7299e-05, "loss": 0.0071, "step": 18109 }, { "epoch": 19.989508558807287, "grad_norm": 0.4246179759502411, "learning_rate": 2.7298666666666667e-05, "loss": 0.0033, "step": 18110 }, { "epoch": 19.9906129210381, "grad_norm": 0.6283594965934753, "learning_rate": 2.7298333333333333e-05, "loss": 0.0129, "step": 18111 }, { "epoch": 19.991717283268912, "grad_norm": 0.22321215271949768, "learning_rate": 2.7298000000000002e-05, "loss": 0.0059, "step": 18112 }, { "epoch": 19.992821645499724, "grad_norm": 0.41323286294937134, "learning_rate": 2.7297666666666668e-05, "loss": 0.0104, "step": 18113 }, { "epoch": 19.993926007730536, "grad_norm": 0.6308245658874512, "learning_rate": 2.7297333333333334e-05, "loss": 0.0162, "step": 18114 }, { "epoch": 19.99503036996135, "grad_norm": 0.38177183270454407, "learning_rate": 2.7297000000000003e-05, "loss": 0.0541, "step": 18115 }, { "epoch": 19.99613473219216, "grad_norm": 0.35700079798698425, "learning_rate": 2.7296666666666666e-05, "loss": 0.0116, "step": 18116 }, { "epoch": 19.99723909442297, "grad_norm": 0.15708519518375397, "learning_rate": 2.7296333333333335e-05, "loss": 0.0042, "step": 18117 }, { "epoch": 19.99834345665378, "grad_norm": 0.21578863263130188, "learning_rate": 2.7296e-05, "loss": 0.0081, "step": 18118 }, { "epoch": 19.999447818884594, "grad_norm": 0.0933852344751358, "learning_rate": 2.7295666666666667e-05, "loss": 0.003, "step": 18119 }, { "epoch": 20.0, "grad_norm": 0.15710335969924927, "learning_rate": 2.7295333333333333e-05, "loss": 0.0016, "step": 18120 }, { "epoch": 20.001104362230812, "grad_norm": 0.45699748396873474, "learning_rate": 2.7295000000000002e-05, "loss": 0.1489, "step": 18121 }, { "epoch": 20.002208724461624, "grad_norm": 0.4610249996185303, "learning_rate": 2.7294666666666668e-05, "loss": 0.1054, "step": 18122 }, { "epoch": 20.003313086692437, "grad_norm": 0.3308863639831543, "learning_rate": 2.7294333333333334e-05, "loss": 0.0669, "step": 18123 }, { "epoch": 20.004417448923245, "grad_norm": 0.4225642681121826, "learning_rate": 2.7294000000000003e-05, "loss": 0.0672, "step": 18124 }, { "epoch": 20.005521811154058, "grad_norm": 0.42711594700813293, "learning_rate": 2.7293666666666666e-05, "loss": 0.0457, "step": 18125 }, { "epoch": 20.00662617338487, "grad_norm": 0.31922289729118347, "learning_rate": 2.7293333333333335e-05, "loss": 0.0341, "step": 18126 }, { "epoch": 20.007730535615682, "grad_norm": 0.25538721680641174, "learning_rate": 2.7292999999999998e-05, "loss": 0.0189, "step": 18127 }, { "epoch": 20.008834897846494, "grad_norm": 0.3252112567424774, "learning_rate": 2.7292666666666667e-05, "loss": 0.0287, "step": 18128 }, { "epoch": 20.009939260077306, "grad_norm": 0.15328501164913177, "learning_rate": 2.7292333333333336e-05, "loss": 0.0104, "step": 18129 }, { "epoch": 20.01104362230812, "grad_norm": 0.7435050010681152, "learning_rate": 2.7292e-05, "loss": 0.0257, "step": 18130 }, { "epoch": 20.012147984538927, "grad_norm": 0.20123881101608276, "learning_rate": 2.7291666666666668e-05, "loss": 0.0063, "step": 18131 }, { "epoch": 20.01325234676974, "grad_norm": 0.17155328392982483, "learning_rate": 2.7291333333333334e-05, "loss": 0.0067, "step": 18132 }, { "epoch": 20.014356709000552, "grad_norm": 0.17980186641216278, "learning_rate": 2.7291e-05, "loss": 0.0092, "step": 18133 }, { "epoch": 20.015461071231364, "grad_norm": 0.10839715600013733, "learning_rate": 2.7290666666666666e-05, "loss": 0.0025, "step": 18134 }, { "epoch": 20.016565433462176, "grad_norm": 0.14433597028255463, "learning_rate": 2.7290333333333335e-05, "loss": 0.0054, "step": 18135 }, { "epoch": 20.01766979569299, "grad_norm": 0.2571035921573639, "learning_rate": 2.7289999999999998e-05, "loss": 0.0069, "step": 18136 }, { "epoch": 20.0187741579238, "grad_norm": 0.6212347149848938, "learning_rate": 2.7289666666666667e-05, "loss": 0.0097, "step": 18137 }, { "epoch": 20.01987852015461, "grad_norm": 0.11247002333402634, "learning_rate": 2.7289333333333337e-05, "loss": 0.0045, "step": 18138 }, { "epoch": 20.02098288238542, "grad_norm": 0.1882077157497406, "learning_rate": 2.7289e-05, "loss": 0.0048, "step": 18139 }, { "epoch": 20.022087244616234, "grad_norm": 0.22792555391788483, "learning_rate": 2.728866666666667e-05, "loss": 0.0057, "step": 18140 }, { "epoch": 20.023191606847046, "grad_norm": 0.07640589028596878, "learning_rate": 2.7288333333333334e-05, "loss": 0.0016, "step": 18141 }, { "epoch": 20.02429596907786, "grad_norm": 0.2591399848461151, "learning_rate": 2.7288e-05, "loss": 0.0072, "step": 18142 }, { "epoch": 20.02540033130867, "grad_norm": 0.24288439750671387, "learning_rate": 2.7287666666666666e-05, "loss": 0.0081, "step": 18143 }, { "epoch": 20.02650469353948, "grad_norm": 0.07322782278060913, "learning_rate": 2.7287333333333336e-05, "loss": 0.0035, "step": 18144 }, { "epoch": 20.02760905577029, "grad_norm": 0.15810219943523407, "learning_rate": 2.7286999999999998e-05, "loss": 0.0047, "step": 18145 }, { "epoch": 20.028713418001104, "grad_norm": 0.42761215567588806, "learning_rate": 2.7286666666666667e-05, "loss": 0.014, "step": 18146 }, { "epoch": 20.029817780231916, "grad_norm": 0.2647755742073059, "learning_rate": 2.7286333333333337e-05, "loss": 0.0051, "step": 18147 }, { "epoch": 20.030922142462728, "grad_norm": 0.273760050535202, "learning_rate": 2.7286e-05, "loss": 0.0081, "step": 18148 }, { "epoch": 20.03202650469354, "grad_norm": 0.1935887485742569, "learning_rate": 2.728566666666667e-05, "loss": 0.0066, "step": 18149 }, { "epoch": 20.033130866924353, "grad_norm": 0.3140755891799927, "learning_rate": 2.7285333333333335e-05, "loss": 0.0053, "step": 18150 }, { "epoch": 20.03423522915516, "grad_norm": 0.2897763252258301, "learning_rate": 2.7285e-05, "loss": 0.0083, "step": 18151 }, { "epoch": 20.035339591385974, "grad_norm": 0.2700127363204956, "learning_rate": 2.7284666666666666e-05, "loss": 0.0064, "step": 18152 }, { "epoch": 20.036443953616786, "grad_norm": 0.1735922396183014, "learning_rate": 2.7284333333333336e-05, "loss": 0.0055, "step": 18153 }, { "epoch": 20.037548315847598, "grad_norm": 0.17220380902290344, "learning_rate": 2.7284e-05, "loss": 0.0039, "step": 18154 }, { "epoch": 20.03865267807841, "grad_norm": 0.2711738348007202, "learning_rate": 2.7283666666666668e-05, "loss": 0.0081, "step": 18155 }, { "epoch": 20.039757040309222, "grad_norm": 0.15140657126903534, "learning_rate": 2.7283333333333337e-05, "loss": 0.0052, "step": 18156 }, { "epoch": 20.040861402540035, "grad_norm": 0.33719828724861145, "learning_rate": 2.7283e-05, "loss": 0.0077, "step": 18157 }, { "epoch": 20.041965764770843, "grad_norm": 0.2479478418827057, "learning_rate": 2.728266666666667e-05, "loss": 0.0037, "step": 18158 }, { "epoch": 20.043070127001656, "grad_norm": 0.12471115589141846, "learning_rate": 2.728233333333333e-05, "loss": 0.0032, "step": 18159 }, { "epoch": 20.044174489232468, "grad_norm": 0.19764359295368195, "learning_rate": 2.7282e-05, "loss": 0.0032, "step": 18160 }, { "epoch": 20.04527885146328, "grad_norm": 0.552405834197998, "learning_rate": 2.7281666666666667e-05, "loss": 0.0059, "step": 18161 }, { "epoch": 20.046383213694092, "grad_norm": 0.4049074053764343, "learning_rate": 2.7281333333333333e-05, "loss": 0.0046, "step": 18162 }, { "epoch": 20.047487575924904, "grad_norm": 0.24217164516448975, "learning_rate": 2.7281000000000002e-05, "loss": 0.0051, "step": 18163 }, { "epoch": 20.048591938155717, "grad_norm": 0.21364714205265045, "learning_rate": 2.7280666666666668e-05, "loss": 0.0056, "step": 18164 }, { "epoch": 20.049696300386525, "grad_norm": 0.3494627773761749, "learning_rate": 2.7280333333333334e-05, "loss": 0.0149, "step": 18165 }, { "epoch": 20.050800662617338, "grad_norm": 0.20952530205249786, "learning_rate": 2.728e-05, "loss": 0.0094, "step": 18166 }, { "epoch": 20.05190502484815, "grad_norm": 0.2800906300544739, "learning_rate": 2.727966666666667e-05, "loss": 0.0053, "step": 18167 }, { "epoch": 20.053009387078962, "grad_norm": 0.14828360080718994, "learning_rate": 2.727933333333333e-05, "loss": 0.0039, "step": 18168 }, { "epoch": 20.054113749309774, "grad_norm": 0.18302428722381592, "learning_rate": 2.7279e-05, "loss": 0.0054, "step": 18169 }, { "epoch": 20.055218111540587, "grad_norm": 0.42979398369789124, "learning_rate": 2.7278666666666667e-05, "loss": 0.005, "step": 18170 }, { "epoch": 20.0563224737714, "grad_norm": 0.4066183865070343, "learning_rate": 2.7278333333333333e-05, "loss": 0.1302, "step": 18171 }, { "epoch": 20.057426836002207, "grad_norm": 0.484010249376297, "learning_rate": 2.7278000000000002e-05, "loss": 0.1334, "step": 18172 }, { "epoch": 20.05853119823302, "grad_norm": 0.40408602356910706, "learning_rate": 2.7277666666666668e-05, "loss": 0.0693, "step": 18173 }, { "epoch": 20.059635560463832, "grad_norm": 0.41396018862724304, "learning_rate": 2.7277333333333334e-05, "loss": 0.056, "step": 18174 }, { "epoch": 20.060739922694644, "grad_norm": 0.32425984740257263, "learning_rate": 2.7277e-05, "loss": 0.059, "step": 18175 }, { "epoch": 20.061844284925456, "grad_norm": 0.3526996970176697, "learning_rate": 2.727666666666667e-05, "loss": 0.0534, "step": 18176 }, { "epoch": 20.06294864715627, "grad_norm": 0.17467071115970612, "learning_rate": 2.7276333333333332e-05, "loss": 0.0166, "step": 18177 }, { "epoch": 20.064053009387077, "grad_norm": 0.23077452182769775, "learning_rate": 2.7276e-05, "loss": 0.0533, "step": 18178 }, { "epoch": 20.06515737161789, "grad_norm": 0.30745434761047363, "learning_rate": 2.727566666666667e-05, "loss": 0.0218, "step": 18179 }, { "epoch": 20.0662617338487, "grad_norm": 0.2598535418510437, "learning_rate": 2.7275333333333333e-05, "loss": 0.0236, "step": 18180 }, { "epoch": 20.067366096079514, "grad_norm": 0.1329706907272339, "learning_rate": 2.7275000000000002e-05, "loss": 0.0057, "step": 18181 }, { "epoch": 20.068470458310326, "grad_norm": 0.3574276268482208, "learning_rate": 2.7274666666666668e-05, "loss": 0.0131, "step": 18182 }, { "epoch": 20.06957482054114, "grad_norm": 0.13870708644390106, "learning_rate": 2.7274333333333334e-05, "loss": 0.0039, "step": 18183 }, { "epoch": 20.07067918277195, "grad_norm": 0.1769414097070694, "learning_rate": 2.7274e-05, "loss": 0.0065, "step": 18184 }, { "epoch": 20.07178354500276, "grad_norm": 0.2005080282688141, "learning_rate": 2.727366666666667e-05, "loss": 0.005, "step": 18185 }, { "epoch": 20.07288790723357, "grad_norm": 0.11715587228536606, "learning_rate": 2.7273333333333332e-05, "loss": 0.0049, "step": 18186 }, { "epoch": 20.073992269464384, "grad_norm": 0.18326853215694427, "learning_rate": 2.7273e-05, "loss": 0.0056, "step": 18187 }, { "epoch": 20.075096631695196, "grad_norm": 0.15649554133415222, "learning_rate": 2.7272666666666667e-05, "loss": 0.0068, "step": 18188 }, { "epoch": 20.076200993926008, "grad_norm": 0.12242111563682556, "learning_rate": 2.7272333333333333e-05, "loss": 0.0031, "step": 18189 }, { "epoch": 20.07730535615682, "grad_norm": 0.19146956503391266, "learning_rate": 2.7272000000000002e-05, "loss": 0.0036, "step": 18190 }, { "epoch": 20.078409718387633, "grad_norm": 0.1743244230747223, "learning_rate": 2.7271666666666665e-05, "loss": 0.0062, "step": 18191 }, { "epoch": 20.07951408061844, "grad_norm": 0.2221030294895172, "learning_rate": 2.7271333333333334e-05, "loss": 0.018, "step": 18192 }, { "epoch": 20.080618442849254, "grad_norm": 0.17903147637844086, "learning_rate": 2.7271e-05, "loss": 0.0084, "step": 18193 }, { "epoch": 20.081722805080066, "grad_norm": 0.2877904176712036, "learning_rate": 2.7270666666666666e-05, "loss": 0.0093, "step": 18194 }, { "epoch": 20.082827167310878, "grad_norm": 0.10633429884910583, "learning_rate": 2.7270333333333332e-05, "loss": 0.0039, "step": 18195 }, { "epoch": 20.08393152954169, "grad_norm": 0.08704876154661179, "learning_rate": 2.727e-05, "loss": 0.0022, "step": 18196 }, { "epoch": 20.085035891772502, "grad_norm": 0.24000628292560577, "learning_rate": 2.7269666666666667e-05, "loss": 0.0079, "step": 18197 }, { "epoch": 20.086140254003315, "grad_norm": 0.28516465425491333, "learning_rate": 2.7269333333333333e-05, "loss": 0.0082, "step": 18198 }, { "epoch": 20.087244616234123, "grad_norm": 0.14994744956493378, "learning_rate": 2.7269000000000003e-05, "loss": 0.0068, "step": 18199 }, { "epoch": 20.088348978464936, "grad_norm": 0.2703835368156433, "learning_rate": 2.7268666666666665e-05, "loss": 0.0047, "step": 18200 }, { "epoch": 20.089453340695748, "grad_norm": 0.2561348080635071, "learning_rate": 2.7268333333333335e-05, "loss": 0.0059, "step": 18201 }, { "epoch": 20.09055770292656, "grad_norm": 0.08042076975107193, "learning_rate": 2.7268e-05, "loss": 0.0033, "step": 18202 }, { "epoch": 20.091662065157372, "grad_norm": 0.3482578694820404, "learning_rate": 2.7267666666666666e-05, "loss": 0.0065, "step": 18203 }, { "epoch": 20.092766427388185, "grad_norm": 0.16624659299850464, "learning_rate": 2.7267333333333336e-05, "loss": 0.0022, "step": 18204 }, { "epoch": 20.093870789618997, "grad_norm": 0.13252289593219757, "learning_rate": 2.7267e-05, "loss": 0.0033, "step": 18205 }, { "epoch": 20.094975151849805, "grad_norm": 0.09194287657737732, "learning_rate": 2.7266666666666668e-05, "loss": 0.0029, "step": 18206 }, { "epoch": 20.096079514080618, "grad_norm": 0.09978920966386795, "learning_rate": 2.7266333333333334e-05, "loss": 0.0026, "step": 18207 }, { "epoch": 20.09718387631143, "grad_norm": 0.0890960767865181, "learning_rate": 2.7266000000000003e-05, "loss": 0.0016, "step": 18208 }, { "epoch": 20.098288238542242, "grad_norm": 0.1364094465970993, "learning_rate": 2.7265666666666665e-05, "loss": 0.0038, "step": 18209 }, { "epoch": 20.099392600773054, "grad_norm": 0.5426840782165527, "learning_rate": 2.7265333333333335e-05, "loss": 0.013, "step": 18210 }, { "epoch": 20.100496963003867, "grad_norm": 0.4030725061893463, "learning_rate": 2.7265e-05, "loss": 0.0048, "step": 18211 }, { "epoch": 20.101601325234675, "grad_norm": 0.20906440913677216, "learning_rate": 2.7264666666666667e-05, "loss": 0.0063, "step": 18212 }, { "epoch": 20.102705687465487, "grad_norm": 0.22270168364048004, "learning_rate": 2.7264333333333336e-05, "loss": 0.0058, "step": 18213 }, { "epoch": 20.1038100496963, "grad_norm": 0.17775386571884155, "learning_rate": 2.7264000000000002e-05, "loss": 0.0032, "step": 18214 }, { "epoch": 20.104914411927112, "grad_norm": 0.08697860687971115, "learning_rate": 2.7263666666666668e-05, "loss": 0.003, "step": 18215 }, { "epoch": 20.106018774157924, "grad_norm": 0.10614658147096634, "learning_rate": 2.7263333333333334e-05, "loss": 0.0025, "step": 18216 }, { "epoch": 20.107123136388736, "grad_norm": 0.8229645490646362, "learning_rate": 2.7263e-05, "loss": 0.0067, "step": 18217 }, { "epoch": 20.10822749861955, "grad_norm": 0.1223733127117157, "learning_rate": 2.7262666666666666e-05, "loss": 0.0027, "step": 18218 }, { "epoch": 20.109331860850357, "grad_norm": 0.3233632743358612, "learning_rate": 2.7262333333333335e-05, "loss": 0.0078, "step": 18219 }, { "epoch": 20.11043622308117, "grad_norm": 0.21019236743450165, "learning_rate": 2.7261999999999997e-05, "loss": 0.0047, "step": 18220 }, { "epoch": 20.11154058531198, "grad_norm": 0.670868456363678, "learning_rate": 2.7261666666666667e-05, "loss": 0.1082, "step": 18221 }, { "epoch": 20.112644947542794, "grad_norm": 0.4311346709728241, "learning_rate": 2.7261333333333336e-05, "loss": 0.0636, "step": 18222 }, { "epoch": 20.113749309773606, "grad_norm": 0.48295116424560547, "learning_rate": 2.7261e-05, "loss": 0.0636, "step": 18223 }, { "epoch": 20.11485367200442, "grad_norm": 0.3749096691608429, "learning_rate": 2.7260666666666668e-05, "loss": 0.031, "step": 18224 }, { "epoch": 20.11595803423523, "grad_norm": 0.309329092502594, "learning_rate": 2.7260333333333334e-05, "loss": 0.0371, "step": 18225 }, { "epoch": 20.11706239646604, "grad_norm": 0.28251883387565613, "learning_rate": 2.726e-05, "loss": 0.0205, "step": 18226 }, { "epoch": 20.11816675869685, "grad_norm": 0.21822357177734375, "learning_rate": 2.7259666666666666e-05, "loss": 0.0205, "step": 18227 }, { "epoch": 20.119271120927664, "grad_norm": 0.17848314344882965, "learning_rate": 2.7259333333333335e-05, "loss": 0.0148, "step": 18228 }, { "epoch": 20.120375483158476, "grad_norm": 0.14800192415714264, "learning_rate": 2.7259e-05, "loss": 0.009, "step": 18229 }, { "epoch": 20.12147984538929, "grad_norm": 0.21054469048976898, "learning_rate": 2.7258666666666667e-05, "loss": 0.0104, "step": 18230 }, { "epoch": 20.1225842076201, "grad_norm": 0.2086726576089859, "learning_rate": 2.7258333333333336e-05, "loss": 0.0088, "step": 18231 }, { "epoch": 20.123688569850913, "grad_norm": 0.10588551312685013, "learning_rate": 2.7258e-05, "loss": 0.0039, "step": 18232 }, { "epoch": 20.12479293208172, "grad_norm": 0.20051927864551544, "learning_rate": 2.7257666666666668e-05, "loss": 0.0051, "step": 18233 }, { "epoch": 20.125897294312534, "grad_norm": 0.08712156116962433, "learning_rate": 2.7257333333333334e-05, "loss": 0.0053, "step": 18234 }, { "epoch": 20.127001656543346, "grad_norm": 0.14411762356758118, "learning_rate": 2.7257e-05, "loss": 0.035, "step": 18235 }, { "epoch": 20.128106018774158, "grad_norm": 0.14393457770347595, "learning_rate": 2.7256666666666666e-05, "loss": 0.0072, "step": 18236 }, { "epoch": 20.12921038100497, "grad_norm": 0.12257952988147736, "learning_rate": 2.7256333333333335e-05, "loss": 0.0069, "step": 18237 }, { "epoch": 20.130314743235783, "grad_norm": 0.7318070530891418, "learning_rate": 2.7256e-05, "loss": 0.0054, "step": 18238 }, { "epoch": 20.131419105466595, "grad_norm": 0.41244980692863464, "learning_rate": 2.7255666666666667e-05, "loss": 0.0081, "step": 18239 }, { "epoch": 20.132523467697403, "grad_norm": 0.20634667575359344, "learning_rate": 2.7255333333333337e-05, "loss": 0.0059, "step": 18240 }, { "epoch": 20.133627829928216, "grad_norm": 0.11518453806638718, "learning_rate": 2.7255e-05, "loss": 0.0048, "step": 18241 }, { "epoch": 20.134732192159028, "grad_norm": 0.11699601262807846, "learning_rate": 2.725466666666667e-05, "loss": 0.0036, "step": 18242 }, { "epoch": 20.13583655438984, "grad_norm": 0.39191049337387085, "learning_rate": 2.7254333333333334e-05, "loss": 0.0073, "step": 18243 }, { "epoch": 20.136940916620652, "grad_norm": 0.12622131407260895, "learning_rate": 2.7254e-05, "loss": 0.0022, "step": 18244 }, { "epoch": 20.138045278851465, "grad_norm": 0.0831872895359993, "learning_rate": 2.7253666666666666e-05, "loss": 0.0019, "step": 18245 }, { "epoch": 20.139149641082273, "grad_norm": 0.5573724508285522, "learning_rate": 2.7253333333333336e-05, "loss": 0.0079, "step": 18246 }, { "epoch": 20.140254003313085, "grad_norm": 0.3465650677680969, "learning_rate": 2.7253e-05, "loss": 0.0083, "step": 18247 }, { "epoch": 20.141358365543898, "grad_norm": 0.20779579877853394, "learning_rate": 2.7252666666666667e-05, "loss": 0.0049, "step": 18248 }, { "epoch": 20.14246272777471, "grad_norm": 0.08893997222185135, "learning_rate": 2.7252333333333333e-05, "loss": 0.0023, "step": 18249 }, { "epoch": 20.143567090005522, "grad_norm": 0.14442850649356842, "learning_rate": 2.7252e-05, "loss": 0.0044, "step": 18250 }, { "epoch": 20.144671452236334, "grad_norm": 0.1208425760269165, "learning_rate": 2.725166666666667e-05, "loss": 0.0032, "step": 18251 }, { "epoch": 20.145775814467147, "grad_norm": 0.1676589846611023, "learning_rate": 2.725133333333333e-05, "loss": 0.005, "step": 18252 }, { "epoch": 20.146880176697955, "grad_norm": 0.13507311046123505, "learning_rate": 2.7251e-05, "loss": 0.0039, "step": 18253 }, { "epoch": 20.147984538928768, "grad_norm": 0.2878618538379669, "learning_rate": 2.725066666666667e-05, "loss": 0.0043, "step": 18254 }, { "epoch": 20.14908890115958, "grad_norm": 0.29121294617652893, "learning_rate": 2.7250333333333332e-05, "loss": 0.0029, "step": 18255 }, { "epoch": 20.150193263390392, "grad_norm": 0.2931560277938843, "learning_rate": 2.725e-05, "loss": 0.0071, "step": 18256 }, { "epoch": 20.151297625621204, "grad_norm": 0.360504150390625, "learning_rate": 2.7249666666666668e-05, "loss": 0.0134, "step": 18257 }, { "epoch": 20.152401987852016, "grad_norm": 0.2790584862232208, "learning_rate": 2.7249333333333334e-05, "loss": 0.0082, "step": 18258 }, { "epoch": 20.15350635008283, "grad_norm": 0.16482554376125336, "learning_rate": 2.7249e-05, "loss": 0.0019, "step": 18259 }, { "epoch": 20.154610712313637, "grad_norm": 0.16644702851772308, "learning_rate": 2.724866666666667e-05, "loss": 0.0039, "step": 18260 }, { "epoch": 20.15571507454445, "grad_norm": 0.15185585618019104, "learning_rate": 2.724833333333333e-05, "loss": 0.0032, "step": 18261 }, { "epoch": 20.156819436775262, "grad_norm": 0.1627931147813797, "learning_rate": 2.7248e-05, "loss": 0.0066, "step": 18262 }, { "epoch": 20.157923799006074, "grad_norm": 0.33187857270240784, "learning_rate": 2.724766666666667e-05, "loss": 0.0092, "step": 18263 }, { "epoch": 20.159028161236886, "grad_norm": 0.4112117886543274, "learning_rate": 2.7247333333333333e-05, "loss": 0.0145, "step": 18264 }, { "epoch": 20.1601325234677, "grad_norm": 0.19208908081054688, "learning_rate": 2.7247000000000002e-05, "loss": 0.0051, "step": 18265 }, { "epoch": 20.16123688569851, "grad_norm": 0.16927500069141388, "learning_rate": 2.7246666666666668e-05, "loss": 0.0013, "step": 18266 }, { "epoch": 20.16234124792932, "grad_norm": 0.17035402357578278, "learning_rate": 2.7246333333333334e-05, "loss": 0.0035, "step": 18267 }, { "epoch": 20.16344561016013, "grad_norm": 0.19493138790130615, "learning_rate": 2.7246e-05, "loss": 0.0037, "step": 18268 }, { "epoch": 20.164549972390944, "grad_norm": 0.14809119701385498, "learning_rate": 2.724566666666667e-05, "loss": 0.0046, "step": 18269 }, { "epoch": 20.165654334621756, "grad_norm": 0.15187838673591614, "learning_rate": 2.724533333333333e-05, "loss": 0.0055, "step": 18270 }, { "epoch": 20.16675869685257, "grad_norm": 0.5109142065048218, "learning_rate": 2.7245e-05, "loss": 0.1112, "step": 18271 }, { "epoch": 20.16786305908338, "grad_norm": 0.3458874821662903, "learning_rate": 2.724466666666667e-05, "loss": 0.0877, "step": 18272 }, { "epoch": 20.168967421314193, "grad_norm": 0.7726715803146362, "learning_rate": 2.7244333333333333e-05, "loss": 0.0898, "step": 18273 }, { "epoch": 20.170071783545, "grad_norm": 0.40105271339416504, "learning_rate": 2.7244000000000002e-05, "loss": 0.0466, "step": 18274 }, { "epoch": 20.171176145775814, "grad_norm": 0.31779003143310547, "learning_rate": 2.7243666666666668e-05, "loss": 0.0347, "step": 18275 }, { "epoch": 20.172280508006626, "grad_norm": 0.5849300622940063, "learning_rate": 2.7243333333333334e-05, "loss": 0.0525, "step": 18276 }, { "epoch": 20.173384870237438, "grad_norm": 0.2558305263519287, "learning_rate": 2.7243e-05, "loss": 0.0429, "step": 18277 }, { "epoch": 20.17448923246825, "grad_norm": 0.336999773979187, "learning_rate": 2.7242666666666666e-05, "loss": 0.0619, "step": 18278 }, { "epoch": 20.175593594699063, "grad_norm": 0.36558523774147034, "learning_rate": 2.7242333333333335e-05, "loss": 0.0254, "step": 18279 }, { "epoch": 20.17669795692987, "grad_norm": 0.2008749097585678, "learning_rate": 2.7242e-05, "loss": 0.0083, "step": 18280 }, { "epoch": 20.177802319160683, "grad_norm": 0.4880043864250183, "learning_rate": 2.7241666666666667e-05, "loss": 0.0051, "step": 18281 }, { "epoch": 20.178906681391496, "grad_norm": 0.12893816828727722, "learning_rate": 2.7241333333333333e-05, "loss": 0.0046, "step": 18282 }, { "epoch": 20.180011043622308, "grad_norm": 0.20162174105644226, "learning_rate": 2.7241000000000002e-05, "loss": 0.0079, "step": 18283 }, { "epoch": 20.18111540585312, "grad_norm": 0.2936554551124573, "learning_rate": 2.7240666666666665e-05, "loss": 0.0079, "step": 18284 }, { "epoch": 20.182219768083932, "grad_norm": 0.12973272800445557, "learning_rate": 2.7240333333333334e-05, "loss": 0.0041, "step": 18285 }, { "epoch": 20.183324130314745, "grad_norm": 0.7084522843360901, "learning_rate": 2.724e-05, "loss": 0.0127, "step": 18286 }, { "epoch": 20.184428492545553, "grad_norm": 0.23559698462486267, "learning_rate": 2.7239666666666666e-05, "loss": 0.0082, "step": 18287 }, { "epoch": 20.185532854776365, "grad_norm": 0.24231821298599243, "learning_rate": 2.7239333333333335e-05, "loss": 0.0174, "step": 18288 }, { "epoch": 20.186637217007178, "grad_norm": 0.19943353533744812, "learning_rate": 2.7239e-05, "loss": 0.0108, "step": 18289 }, { "epoch": 20.18774157923799, "grad_norm": 0.07702577114105225, "learning_rate": 2.7238666666666667e-05, "loss": 0.0027, "step": 18290 }, { "epoch": 20.188845941468802, "grad_norm": 0.11419723927974701, "learning_rate": 2.7238333333333333e-05, "loss": 0.0068, "step": 18291 }, { "epoch": 20.189950303699614, "grad_norm": 0.1460919976234436, "learning_rate": 2.7238000000000002e-05, "loss": 0.0032, "step": 18292 }, { "epoch": 20.191054665930427, "grad_norm": 0.13103915750980377, "learning_rate": 2.7237666666666665e-05, "loss": 0.0062, "step": 18293 }, { "epoch": 20.192159028161235, "grad_norm": 0.12763935327529907, "learning_rate": 2.7237333333333334e-05, "loss": 0.0018, "step": 18294 }, { "epoch": 20.193263390392048, "grad_norm": 0.1519317626953125, "learning_rate": 2.7237e-05, "loss": 0.0049, "step": 18295 }, { "epoch": 20.19436775262286, "grad_norm": 0.2293110191822052, "learning_rate": 2.7236666666666666e-05, "loss": 0.0058, "step": 18296 }, { "epoch": 20.195472114853672, "grad_norm": 0.14582964777946472, "learning_rate": 2.7236333333333336e-05, "loss": 0.0057, "step": 18297 }, { "epoch": 20.196576477084484, "grad_norm": 0.5128074884414673, "learning_rate": 2.7236e-05, "loss": 0.0042, "step": 18298 }, { "epoch": 20.197680839315296, "grad_norm": 0.16126692295074463, "learning_rate": 2.7235666666666667e-05, "loss": 0.0043, "step": 18299 }, { "epoch": 20.19878520154611, "grad_norm": 0.2684921324253082, "learning_rate": 2.7235333333333333e-05, "loss": 0.0055, "step": 18300 }, { "epoch": 20.199889563776917, "grad_norm": 0.15540002286434174, "learning_rate": 2.7235000000000003e-05, "loss": 0.0036, "step": 18301 }, { "epoch": 20.20099392600773, "grad_norm": 0.47854745388031006, "learning_rate": 2.7234666666666665e-05, "loss": 0.005, "step": 18302 }, { "epoch": 20.202098288238542, "grad_norm": 0.23067983984947205, "learning_rate": 2.7234333333333335e-05, "loss": 0.0061, "step": 18303 }, { "epoch": 20.203202650469354, "grad_norm": 0.18202991783618927, "learning_rate": 2.7234000000000004e-05, "loss": 0.0057, "step": 18304 }, { "epoch": 20.204307012700166, "grad_norm": 0.16430458426475525, "learning_rate": 2.7233666666666666e-05, "loss": 0.0077, "step": 18305 }, { "epoch": 20.20541137493098, "grad_norm": 0.34490010142326355, "learning_rate": 2.7233333333333336e-05, "loss": 0.0048, "step": 18306 }, { "epoch": 20.20651573716179, "grad_norm": 0.3063865005970001, "learning_rate": 2.7233e-05, "loss": 0.0037, "step": 18307 }, { "epoch": 20.2076200993926, "grad_norm": 0.11992838233709335, "learning_rate": 2.7232666666666668e-05, "loss": 0.004, "step": 18308 }, { "epoch": 20.20872446162341, "grad_norm": 0.1499706655740738, "learning_rate": 2.7232333333333334e-05, "loss": 0.0035, "step": 18309 }, { "epoch": 20.209828823854224, "grad_norm": 0.15893733501434326, "learning_rate": 2.7232e-05, "loss": 0.004, "step": 18310 }, { "epoch": 20.210933186085036, "grad_norm": 0.6295909285545349, "learning_rate": 2.7231666666666665e-05, "loss": 0.0169, "step": 18311 }, { "epoch": 20.21203754831585, "grad_norm": 0.45872870087623596, "learning_rate": 2.7231333333333335e-05, "loss": 0.007, "step": 18312 }, { "epoch": 20.21314191054666, "grad_norm": 0.47556447982788086, "learning_rate": 2.7231e-05, "loss": 0.0084, "step": 18313 }, { "epoch": 20.214246272777473, "grad_norm": 0.27830997109413147, "learning_rate": 2.7230666666666667e-05, "loss": 0.0055, "step": 18314 }, { "epoch": 20.21535063500828, "grad_norm": 0.12713705003261566, "learning_rate": 2.7230333333333336e-05, "loss": 0.0043, "step": 18315 }, { "epoch": 20.216454997239094, "grad_norm": 0.4305863380432129, "learning_rate": 2.723e-05, "loss": 0.0065, "step": 18316 }, { "epoch": 20.217559359469906, "grad_norm": 0.09417453408241272, "learning_rate": 2.7229666666666668e-05, "loss": 0.0021, "step": 18317 }, { "epoch": 20.218663721700718, "grad_norm": 0.26393595337867737, "learning_rate": 2.7229333333333334e-05, "loss": 0.0091, "step": 18318 }, { "epoch": 20.21976808393153, "grad_norm": 0.24763523042201996, "learning_rate": 2.7229e-05, "loss": 0.003, "step": 18319 }, { "epoch": 20.220872446162343, "grad_norm": 0.31255316734313965, "learning_rate": 2.7228666666666666e-05, "loss": 0.0079, "step": 18320 }, { "epoch": 20.22197680839315, "grad_norm": 0.4588404893875122, "learning_rate": 2.7228333333333335e-05, "loss": 0.1053, "step": 18321 }, { "epoch": 20.223081170623963, "grad_norm": 0.48897361755371094, "learning_rate": 2.7228e-05, "loss": 0.1097, "step": 18322 }, { "epoch": 20.224185532854776, "grad_norm": 0.3777430057525635, "learning_rate": 2.7227666666666667e-05, "loss": 0.0843, "step": 18323 }, { "epoch": 20.225289895085588, "grad_norm": 0.7334337830543518, "learning_rate": 2.7227333333333336e-05, "loss": 0.1004, "step": 18324 }, { "epoch": 20.2263942573164, "grad_norm": 0.388155072927475, "learning_rate": 2.7227e-05, "loss": 0.034, "step": 18325 }, { "epoch": 20.227498619547212, "grad_norm": 0.5281656384468079, "learning_rate": 2.7226666666666668e-05, "loss": 0.0312, "step": 18326 }, { "epoch": 20.228602981778025, "grad_norm": 0.2113276869058609, "learning_rate": 2.7226333333333334e-05, "loss": 0.0226, "step": 18327 }, { "epoch": 20.229707344008833, "grad_norm": 0.36025270819664, "learning_rate": 2.7226e-05, "loss": 0.0168, "step": 18328 }, { "epoch": 20.230811706239646, "grad_norm": 0.18059587478637695, "learning_rate": 2.722566666666667e-05, "loss": 0.0256, "step": 18329 }, { "epoch": 20.231916068470458, "grad_norm": 0.3832121789455414, "learning_rate": 2.7225333333333335e-05, "loss": 0.0136, "step": 18330 }, { "epoch": 20.23302043070127, "grad_norm": 0.17771978676319122, "learning_rate": 2.7225e-05, "loss": 0.0083, "step": 18331 }, { "epoch": 20.234124792932082, "grad_norm": 0.15376903116703033, "learning_rate": 2.7224666666666667e-05, "loss": 0.0063, "step": 18332 }, { "epoch": 20.235229155162894, "grad_norm": 0.23655596375465393, "learning_rate": 2.7224333333333336e-05, "loss": 0.0069, "step": 18333 }, { "epoch": 20.236333517393707, "grad_norm": 0.2440948337316513, "learning_rate": 2.7224e-05, "loss": 0.0076, "step": 18334 }, { "epoch": 20.237437879624515, "grad_norm": 0.19789011776447296, "learning_rate": 2.7223666666666668e-05, "loss": 0.0043, "step": 18335 }, { "epoch": 20.238542241855328, "grad_norm": 0.40945422649383545, "learning_rate": 2.7223333333333334e-05, "loss": 0.0144, "step": 18336 }, { "epoch": 20.23964660408614, "grad_norm": 0.20055019855499268, "learning_rate": 2.7223e-05, "loss": 0.0087, "step": 18337 }, { "epoch": 20.240750966316952, "grad_norm": 0.20905426144599915, "learning_rate": 2.722266666666667e-05, "loss": 0.0032, "step": 18338 }, { "epoch": 20.241855328547764, "grad_norm": 0.16074976325035095, "learning_rate": 2.7222333333333332e-05, "loss": 0.0075, "step": 18339 }, { "epoch": 20.242959690778576, "grad_norm": 0.12476835399866104, "learning_rate": 2.7222e-05, "loss": 0.004, "step": 18340 }, { "epoch": 20.24406405300939, "grad_norm": 0.2156059443950653, "learning_rate": 2.7221666666666667e-05, "loss": 0.0058, "step": 18341 }, { "epoch": 20.245168415240197, "grad_norm": 0.23089678585529327, "learning_rate": 2.7221333333333333e-05, "loss": 0.0098, "step": 18342 }, { "epoch": 20.24627277747101, "grad_norm": 0.1326865702867508, "learning_rate": 2.7221e-05, "loss": 0.0046, "step": 18343 }, { "epoch": 20.247377139701822, "grad_norm": 0.182073712348938, "learning_rate": 2.722066666666667e-05, "loss": 0.0075, "step": 18344 }, { "epoch": 20.248481501932634, "grad_norm": 0.15697769820690155, "learning_rate": 2.722033333333333e-05, "loss": 0.0065, "step": 18345 }, { "epoch": 20.249585864163446, "grad_norm": 0.31933459639549255, "learning_rate": 2.722e-05, "loss": 0.01, "step": 18346 }, { "epoch": 20.25069022639426, "grad_norm": 0.1164286732673645, "learning_rate": 2.721966666666667e-05, "loss": 0.0052, "step": 18347 }, { "epoch": 20.25179458862507, "grad_norm": 0.32332342863082886, "learning_rate": 2.7219333333333332e-05, "loss": 0.0044, "step": 18348 }, { "epoch": 20.25289895085588, "grad_norm": 0.5077553391456604, "learning_rate": 2.7219e-05, "loss": 0.0077, "step": 18349 }, { "epoch": 20.25400331308669, "grad_norm": 0.09564107656478882, "learning_rate": 2.7218666666666667e-05, "loss": 0.0024, "step": 18350 }, { "epoch": 20.255107675317504, "grad_norm": 0.11334560811519623, "learning_rate": 2.7218333333333333e-05, "loss": 0.0035, "step": 18351 }, { "epoch": 20.256212037548316, "grad_norm": 0.13071636855602264, "learning_rate": 2.7218e-05, "loss": 0.0049, "step": 18352 }, { "epoch": 20.25731639977913, "grad_norm": 0.1360212117433548, "learning_rate": 2.721766666666667e-05, "loss": 0.0141, "step": 18353 }, { "epoch": 20.25842076200994, "grad_norm": 0.11435502767562866, "learning_rate": 2.7217333333333335e-05, "loss": 0.0031, "step": 18354 }, { "epoch": 20.25952512424075, "grad_norm": 0.4351199269294739, "learning_rate": 2.7217e-05, "loss": 0.0124, "step": 18355 }, { "epoch": 20.26062948647156, "grad_norm": 0.1806616634130478, "learning_rate": 2.721666666666667e-05, "loss": 0.0044, "step": 18356 }, { "epoch": 20.261733848702374, "grad_norm": 0.05201936140656471, "learning_rate": 2.7216333333333332e-05, "loss": 0.0018, "step": 18357 }, { "epoch": 20.262838210933186, "grad_norm": 0.39665165543556213, "learning_rate": 2.7216e-05, "loss": 0.0046, "step": 18358 }, { "epoch": 20.263942573163998, "grad_norm": 0.31242045760154724, "learning_rate": 2.7215666666666668e-05, "loss": 0.008, "step": 18359 }, { "epoch": 20.26504693539481, "grad_norm": 0.33753353357315063, "learning_rate": 2.7215333333333334e-05, "loss": 0.0119, "step": 18360 }, { "epoch": 20.266151297625623, "grad_norm": 0.08999964594841003, "learning_rate": 2.7215e-05, "loss": 0.0031, "step": 18361 }, { "epoch": 20.26725565985643, "grad_norm": 1.9121019840240479, "learning_rate": 2.721466666666667e-05, "loss": 0.0071, "step": 18362 }, { "epoch": 20.268360022087244, "grad_norm": 3.8543643951416016, "learning_rate": 2.7214333333333335e-05, "loss": 0.0114, "step": 18363 }, { "epoch": 20.269464384318056, "grad_norm": 0.17113499343395233, "learning_rate": 2.7214e-05, "loss": 0.0032, "step": 18364 }, { "epoch": 20.270568746548868, "grad_norm": 0.19982630014419556, "learning_rate": 2.721366666666667e-05, "loss": 0.0061, "step": 18365 }, { "epoch": 20.27167310877968, "grad_norm": 0.15789128839969635, "learning_rate": 2.7213333333333333e-05, "loss": 0.0036, "step": 18366 }, { "epoch": 20.272777471010492, "grad_norm": 0.17983458936214447, "learning_rate": 2.7213000000000002e-05, "loss": 0.0058, "step": 18367 }, { "epoch": 20.273881833241305, "grad_norm": 0.17100465297698975, "learning_rate": 2.7212666666666664e-05, "loss": 0.005, "step": 18368 }, { "epoch": 20.274986195472113, "grad_norm": 0.26293638348579407, "learning_rate": 2.7212333333333334e-05, "loss": 0.0075, "step": 18369 }, { "epoch": 20.276090557702926, "grad_norm": 0.050392765551805496, "learning_rate": 2.7212e-05, "loss": 0.0013, "step": 18370 }, { "epoch": 20.277194919933738, "grad_norm": 0.47173526883125305, "learning_rate": 2.7211666666666666e-05, "loss": 0.1064, "step": 18371 }, { "epoch": 20.27829928216455, "grad_norm": 0.418128103017807, "learning_rate": 2.7211333333333335e-05, "loss": 0.1204, "step": 18372 }, { "epoch": 20.279403644395362, "grad_norm": 0.36569148302078247, "learning_rate": 2.7211e-05, "loss": 0.0577, "step": 18373 }, { "epoch": 20.280508006626174, "grad_norm": 0.3098846673965454, "learning_rate": 2.7210666666666667e-05, "loss": 0.0596, "step": 18374 }, { "epoch": 20.281612368856987, "grad_norm": 0.27861282229423523, "learning_rate": 2.7210333333333333e-05, "loss": 0.0403, "step": 18375 }, { "epoch": 20.282716731087795, "grad_norm": 0.5075604915618896, "learning_rate": 2.7210000000000002e-05, "loss": 0.0535, "step": 18376 }, { "epoch": 20.283821093318608, "grad_norm": 0.30592668056488037, "learning_rate": 2.7209666666666665e-05, "loss": 0.0325, "step": 18377 }, { "epoch": 20.28492545554942, "grad_norm": 0.8997803926467896, "learning_rate": 2.7209333333333334e-05, "loss": 0.0174, "step": 18378 }, { "epoch": 20.286029817780232, "grad_norm": 0.18880324065685272, "learning_rate": 2.7209000000000003e-05, "loss": 0.0082, "step": 18379 }, { "epoch": 20.287134180011044, "grad_norm": 0.2832027077674866, "learning_rate": 2.7208666666666666e-05, "loss": 0.0138, "step": 18380 }, { "epoch": 20.288238542241857, "grad_norm": 0.43922126293182373, "learning_rate": 2.7208333333333335e-05, "loss": 0.0224, "step": 18381 }, { "epoch": 20.28934290447267, "grad_norm": 1.063244342803955, "learning_rate": 2.7208e-05, "loss": 0.0106, "step": 18382 }, { "epoch": 20.290447266703477, "grad_norm": 0.5707268118858337, "learning_rate": 2.7207666666666667e-05, "loss": 0.0048, "step": 18383 }, { "epoch": 20.29155162893429, "grad_norm": 0.3564821481704712, "learning_rate": 2.7207333333333333e-05, "loss": 0.0273, "step": 18384 }, { "epoch": 20.292655991165102, "grad_norm": 0.13307057321071625, "learning_rate": 2.7207000000000002e-05, "loss": 0.0035, "step": 18385 }, { "epoch": 20.293760353395914, "grad_norm": 0.1519707292318344, "learning_rate": 2.7206666666666665e-05, "loss": 0.0045, "step": 18386 }, { "epoch": 20.294864715626726, "grad_norm": 0.21439559757709503, "learning_rate": 2.7206333333333334e-05, "loss": 0.007, "step": 18387 }, { "epoch": 20.29596907785754, "grad_norm": 0.09073910117149353, "learning_rate": 2.7206000000000003e-05, "loss": 0.0038, "step": 18388 }, { "epoch": 20.297073440088347, "grad_norm": 0.162307471036911, "learning_rate": 2.7205666666666666e-05, "loss": 0.0064, "step": 18389 }, { "epoch": 20.29817780231916, "grad_norm": 0.1212487742304802, "learning_rate": 2.7205333333333335e-05, "loss": 0.0035, "step": 18390 }, { "epoch": 20.29928216454997, "grad_norm": 0.06984541565179825, "learning_rate": 2.7205e-05, "loss": 0.0028, "step": 18391 }, { "epoch": 20.300386526780784, "grad_norm": 0.26392537355422974, "learning_rate": 2.7204666666666667e-05, "loss": 0.0042, "step": 18392 }, { "epoch": 20.301490889011596, "grad_norm": 0.4386345148086548, "learning_rate": 2.7204333333333333e-05, "loss": 0.0043, "step": 18393 }, { "epoch": 20.30259525124241, "grad_norm": 0.24628695845603943, "learning_rate": 2.7204000000000002e-05, "loss": 0.004, "step": 18394 }, { "epoch": 20.30369961347322, "grad_norm": 0.20324690639972687, "learning_rate": 2.7203666666666665e-05, "loss": 0.0053, "step": 18395 }, { "epoch": 20.30480397570403, "grad_norm": 0.3752024173736572, "learning_rate": 2.7203333333333334e-05, "loss": 0.0091, "step": 18396 }, { "epoch": 20.30590833793484, "grad_norm": 0.5435457229614258, "learning_rate": 2.7203000000000004e-05, "loss": 0.0124, "step": 18397 }, { "epoch": 20.307012700165654, "grad_norm": 0.18134699761867523, "learning_rate": 2.7202666666666666e-05, "loss": 0.0041, "step": 18398 }, { "epoch": 20.308117062396466, "grad_norm": 0.1661861389875412, "learning_rate": 2.7202333333333336e-05, "loss": 0.0071, "step": 18399 }, { "epoch": 20.309221424627278, "grad_norm": 0.14674809575080872, "learning_rate": 2.7201999999999998e-05, "loss": 0.0052, "step": 18400 }, { "epoch": 20.31032578685809, "grad_norm": 0.21050602197647095, "learning_rate": 2.7201666666666667e-05, "loss": 0.0052, "step": 18401 }, { "epoch": 20.311430149088903, "grad_norm": 0.2440495789051056, "learning_rate": 2.7201333333333333e-05, "loss": 0.0046, "step": 18402 }, { "epoch": 20.31253451131971, "grad_norm": 0.15321233868598938, "learning_rate": 2.7201e-05, "loss": 0.0041, "step": 18403 }, { "epoch": 20.313638873550524, "grad_norm": 0.1835898607969284, "learning_rate": 2.720066666666667e-05, "loss": 0.0055, "step": 18404 }, { "epoch": 20.314743235781336, "grad_norm": 0.1497117429971695, "learning_rate": 2.7200333333333335e-05, "loss": 0.0029, "step": 18405 }, { "epoch": 20.315847598012148, "grad_norm": 0.2779335379600525, "learning_rate": 2.72e-05, "loss": 0.0094, "step": 18406 }, { "epoch": 20.31695196024296, "grad_norm": 0.4084920585155487, "learning_rate": 2.7199666666666666e-05, "loss": 0.0141, "step": 18407 }, { "epoch": 20.318056322473772, "grad_norm": 0.14764724671840668, "learning_rate": 2.7199333333333336e-05, "loss": 0.0045, "step": 18408 }, { "epoch": 20.319160684704585, "grad_norm": 0.036398570984601974, "learning_rate": 2.7198999999999998e-05, "loss": 0.0011, "step": 18409 }, { "epoch": 20.320265046935393, "grad_norm": 0.15833668410778046, "learning_rate": 2.7198666666666668e-05, "loss": 0.0031, "step": 18410 }, { "epoch": 20.321369409166206, "grad_norm": 0.17550767958164215, "learning_rate": 2.7198333333333334e-05, "loss": 0.0075, "step": 18411 }, { "epoch": 20.322473771397018, "grad_norm": 0.2781470715999603, "learning_rate": 2.7198e-05, "loss": 0.0033, "step": 18412 }, { "epoch": 20.32357813362783, "grad_norm": 0.0887058675289154, "learning_rate": 2.719766666666667e-05, "loss": 0.0026, "step": 18413 }, { "epoch": 20.324682495858642, "grad_norm": 0.13330678641796112, "learning_rate": 2.7197333333333335e-05, "loss": 0.0018, "step": 18414 }, { "epoch": 20.325786858089455, "grad_norm": 0.2519519329071045, "learning_rate": 2.7197e-05, "loss": 0.0071, "step": 18415 }, { "epoch": 20.326891220320267, "grad_norm": 0.3525109887123108, "learning_rate": 2.7196666666666667e-05, "loss": 0.0029, "step": 18416 }, { "epoch": 20.327995582551075, "grad_norm": 0.27644050121307373, "learning_rate": 2.7196333333333336e-05, "loss": 0.004, "step": 18417 }, { "epoch": 20.329099944781888, "grad_norm": 0.21901275217533112, "learning_rate": 2.7196e-05, "loss": 0.0064, "step": 18418 }, { "epoch": 20.3302043070127, "grad_norm": 0.3968474268913269, "learning_rate": 2.7195666666666668e-05, "loss": 0.0139, "step": 18419 }, { "epoch": 20.331308669243512, "grad_norm": 0.8724764585494995, "learning_rate": 2.7195333333333334e-05, "loss": 0.0068, "step": 18420 }, { "epoch": 20.332413031474324, "grad_norm": 0.48315855860710144, "learning_rate": 2.7195e-05, "loss": 0.1124, "step": 18421 }, { "epoch": 20.333517393705137, "grad_norm": 0.4189865291118622, "learning_rate": 2.719466666666667e-05, "loss": 0.0718, "step": 18422 }, { "epoch": 20.33462175593595, "grad_norm": 0.48832467198371887, "learning_rate": 2.7194333333333335e-05, "loss": 0.0605, "step": 18423 }, { "epoch": 20.335726118166757, "grad_norm": 0.3072354197502136, "learning_rate": 2.7194e-05, "loss": 0.0337, "step": 18424 }, { "epoch": 20.33683048039757, "grad_norm": 0.5355426073074341, "learning_rate": 2.7193666666666667e-05, "loss": 0.0398, "step": 18425 }, { "epoch": 20.337934842628382, "grad_norm": 0.5452665090560913, "learning_rate": 2.7193333333333336e-05, "loss": 0.0404, "step": 18426 }, { "epoch": 20.339039204859194, "grad_norm": 0.3821391463279724, "learning_rate": 2.7193e-05, "loss": 0.0331, "step": 18427 }, { "epoch": 20.340143567090006, "grad_norm": 0.3524361252784729, "learning_rate": 2.7192666666666668e-05, "loss": 0.0598, "step": 18428 }, { "epoch": 20.34124792932082, "grad_norm": 0.21624374389648438, "learning_rate": 2.7192333333333334e-05, "loss": 0.0092, "step": 18429 }, { "epoch": 20.342352291551627, "grad_norm": 0.24057085812091827, "learning_rate": 2.7192e-05, "loss": 0.0295, "step": 18430 }, { "epoch": 20.34345665378244, "grad_norm": 0.10898814350366592, "learning_rate": 2.719166666666667e-05, "loss": 0.0055, "step": 18431 }, { "epoch": 20.34456101601325, "grad_norm": 0.14074401557445526, "learning_rate": 2.7191333333333332e-05, "loss": 0.0045, "step": 18432 }, { "epoch": 20.345665378244064, "grad_norm": 0.3367971181869507, "learning_rate": 2.7191e-05, "loss": 0.0121, "step": 18433 }, { "epoch": 20.346769740474876, "grad_norm": 0.39322876930236816, "learning_rate": 2.7190666666666667e-05, "loss": 0.0103, "step": 18434 }, { "epoch": 20.34787410270569, "grad_norm": 0.10148626565933228, "learning_rate": 2.7190333333333333e-05, "loss": 0.0026, "step": 18435 }, { "epoch": 20.3489784649365, "grad_norm": 0.19182786345481873, "learning_rate": 2.719e-05, "loss": 0.0067, "step": 18436 }, { "epoch": 20.35008282716731, "grad_norm": 0.20134317874908447, "learning_rate": 2.7189666666666668e-05, "loss": 0.0088, "step": 18437 }, { "epoch": 20.35118718939812, "grad_norm": 0.168478861451149, "learning_rate": 2.7189333333333334e-05, "loss": 0.0059, "step": 18438 }, { "epoch": 20.352291551628934, "grad_norm": 0.2630661129951477, "learning_rate": 2.7189e-05, "loss": 0.0072, "step": 18439 }, { "epoch": 20.353395913859746, "grad_norm": 0.17109806835651398, "learning_rate": 2.718866666666667e-05, "loss": 0.0057, "step": 18440 }, { "epoch": 20.35450027609056, "grad_norm": 0.1493060439825058, "learning_rate": 2.7188333333333332e-05, "loss": 0.0051, "step": 18441 }, { "epoch": 20.35560463832137, "grad_norm": 0.10593299567699432, "learning_rate": 2.7188e-05, "loss": 0.0046, "step": 18442 }, { "epoch": 20.356709000552183, "grad_norm": 0.1663084328174591, "learning_rate": 2.7187666666666667e-05, "loss": 0.0061, "step": 18443 }, { "epoch": 20.35781336278299, "grad_norm": 0.14189285039901733, "learning_rate": 2.7187333333333333e-05, "loss": 0.0054, "step": 18444 }, { "epoch": 20.358917725013804, "grad_norm": 0.4446653723716736, "learning_rate": 2.7187e-05, "loss": 0.0157, "step": 18445 }, { "epoch": 20.360022087244616, "grad_norm": 0.1917703002691269, "learning_rate": 2.718666666666667e-05, "loss": 0.0079, "step": 18446 }, { "epoch": 20.361126449475428, "grad_norm": 0.6124304533004761, "learning_rate": 2.7186333333333334e-05, "loss": 0.047, "step": 18447 }, { "epoch": 20.36223081170624, "grad_norm": 0.13331392407417297, "learning_rate": 2.7186e-05, "loss": 0.0052, "step": 18448 }, { "epoch": 20.363335173937053, "grad_norm": 0.1397661566734314, "learning_rate": 2.718566666666667e-05, "loss": 0.0052, "step": 18449 }, { "epoch": 20.364439536167865, "grad_norm": 0.20826135575771332, "learning_rate": 2.7185333333333332e-05, "loss": 0.0053, "step": 18450 }, { "epoch": 20.365543898398673, "grad_norm": 0.5478195548057556, "learning_rate": 2.7185e-05, "loss": 0.0101, "step": 18451 }, { "epoch": 20.366648260629486, "grad_norm": 0.21905404329299927, "learning_rate": 2.7184666666666667e-05, "loss": 0.0068, "step": 18452 }, { "epoch": 20.367752622860298, "grad_norm": 0.13124893605709076, "learning_rate": 2.7184333333333333e-05, "loss": 0.0036, "step": 18453 }, { "epoch": 20.36885698509111, "grad_norm": 0.10800541937351227, "learning_rate": 2.7184000000000003e-05, "loss": 0.0054, "step": 18454 }, { "epoch": 20.369961347321922, "grad_norm": 0.19712476432323456, "learning_rate": 2.718366666666667e-05, "loss": 0.0059, "step": 18455 }, { "epoch": 20.371065709552735, "grad_norm": 0.16678597033023834, "learning_rate": 2.7183333333333335e-05, "loss": 0.0064, "step": 18456 }, { "epoch": 20.372170071783543, "grad_norm": 0.10251808166503906, "learning_rate": 2.7183e-05, "loss": 0.0047, "step": 18457 }, { "epoch": 20.373274434014355, "grad_norm": 0.22304776310920715, "learning_rate": 2.718266666666667e-05, "loss": 0.0043, "step": 18458 }, { "epoch": 20.374378796245168, "grad_norm": 0.11250712722539902, "learning_rate": 2.7182333333333332e-05, "loss": 0.0059, "step": 18459 }, { "epoch": 20.37548315847598, "grad_norm": 0.055163994431495667, "learning_rate": 2.7182e-05, "loss": 0.0015, "step": 18460 }, { "epoch": 20.376587520706792, "grad_norm": 0.33450251817703247, "learning_rate": 2.7181666666666664e-05, "loss": 0.0055, "step": 18461 }, { "epoch": 20.377691882937604, "grad_norm": 0.13515271246433258, "learning_rate": 2.7181333333333334e-05, "loss": 0.0041, "step": 18462 }, { "epoch": 20.378796245168417, "grad_norm": 0.09099654853343964, "learning_rate": 2.7181000000000003e-05, "loss": 0.003, "step": 18463 }, { "epoch": 20.379900607399225, "grad_norm": 0.2515782117843628, "learning_rate": 2.7180666666666665e-05, "loss": 0.0054, "step": 18464 }, { "epoch": 20.381004969630037, "grad_norm": 0.19187304377555847, "learning_rate": 2.7180333333333335e-05, "loss": 0.0077, "step": 18465 }, { "epoch": 20.38210933186085, "grad_norm": 0.5127349495887756, "learning_rate": 2.718e-05, "loss": 0.0119, "step": 18466 }, { "epoch": 20.383213694091662, "grad_norm": 1.4450769424438477, "learning_rate": 2.7179666666666667e-05, "loss": 0.0116, "step": 18467 }, { "epoch": 20.384318056322474, "grad_norm": 0.4502028524875641, "learning_rate": 2.7179333333333333e-05, "loss": 0.0121, "step": 18468 }, { "epoch": 20.385422418553286, "grad_norm": 0.06343318521976471, "learning_rate": 2.7179000000000002e-05, "loss": 0.0017, "step": 18469 }, { "epoch": 20.3865267807841, "grad_norm": 0.2124527394771576, "learning_rate": 2.7178666666666668e-05, "loss": 0.0061, "step": 18470 }, { "epoch": 20.387631143014907, "grad_norm": 0.43957072496414185, "learning_rate": 2.7178333333333334e-05, "loss": 0.1432, "step": 18471 }, { "epoch": 20.38873550524572, "grad_norm": 0.6522342562675476, "learning_rate": 2.7178000000000003e-05, "loss": 0.0763, "step": 18472 }, { "epoch": 20.38983986747653, "grad_norm": 0.4327370226383209, "learning_rate": 2.7177666666666666e-05, "loss": 0.057, "step": 18473 }, { "epoch": 20.390944229707344, "grad_norm": 0.49535229802131653, "learning_rate": 2.7177333333333335e-05, "loss": 0.0824, "step": 18474 }, { "epoch": 20.392048591938156, "grad_norm": 0.4159965217113495, "learning_rate": 2.7177e-05, "loss": 0.0811, "step": 18475 }, { "epoch": 20.39315295416897, "grad_norm": 0.29066115617752075, "learning_rate": 2.7176666666666667e-05, "loss": 0.0248, "step": 18476 }, { "epoch": 20.39425731639978, "grad_norm": 0.30534249544143677, "learning_rate": 2.7176333333333333e-05, "loss": 0.0263, "step": 18477 }, { "epoch": 20.39536167863059, "grad_norm": 1.2977170944213867, "learning_rate": 2.7176000000000002e-05, "loss": 0.0335, "step": 18478 }, { "epoch": 20.3964660408614, "grad_norm": 0.14092504978179932, "learning_rate": 2.7175666666666668e-05, "loss": 0.0106, "step": 18479 }, { "epoch": 20.397570403092214, "grad_norm": 0.2707023024559021, "learning_rate": 2.7175333333333334e-05, "loss": 0.0108, "step": 18480 }, { "epoch": 20.398674765323026, "grad_norm": 0.2425452023744583, "learning_rate": 2.7175000000000003e-05, "loss": 0.0196, "step": 18481 }, { "epoch": 20.39977912755384, "grad_norm": 0.10501407086849213, "learning_rate": 2.7174666666666666e-05, "loss": 0.0042, "step": 18482 }, { "epoch": 20.40088348978465, "grad_norm": 0.14378972351551056, "learning_rate": 2.7174333333333335e-05, "loss": 0.0051, "step": 18483 }, { "epoch": 20.401987852015463, "grad_norm": 0.17566360533237457, "learning_rate": 2.7174e-05, "loss": 0.0195, "step": 18484 }, { "epoch": 20.40309221424627, "grad_norm": 0.1698608547449112, "learning_rate": 2.7173666666666667e-05, "loss": 0.0065, "step": 18485 }, { "epoch": 20.404196576477084, "grad_norm": 0.1316320300102234, "learning_rate": 2.7173333333333333e-05, "loss": 0.0072, "step": 18486 }, { "epoch": 20.405300938707896, "grad_norm": 0.09541013091802597, "learning_rate": 2.7173000000000002e-05, "loss": 0.0026, "step": 18487 }, { "epoch": 20.406405300938708, "grad_norm": 0.13105572760105133, "learning_rate": 2.7172666666666668e-05, "loss": 0.0047, "step": 18488 }, { "epoch": 20.40750966316952, "grad_norm": 0.22092199325561523, "learning_rate": 2.7172333333333334e-05, "loss": 0.0064, "step": 18489 }, { "epoch": 20.408614025400333, "grad_norm": 0.13855212926864624, "learning_rate": 2.7172e-05, "loss": 0.004, "step": 18490 }, { "epoch": 20.409718387631145, "grad_norm": 0.20950917899608612, "learning_rate": 2.7171666666666666e-05, "loss": 0.0094, "step": 18491 }, { "epoch": 20.410822749861953, "grad_norm": 0.2661352753639221, "learning_rate": 2.7171333333333335e-05, "loss": 0.0088, "step": 18492 }, { "epoch": 20.411927112092766, "grad_norm": 0.15141846239566803, "learning_rate": 2.7170999999999998e-05, "loss": 0.0067, "step": 18493 }, { "epoch": 20.413031474323578, "grad_norm": 0.26106494665145874, "learning_rate": 2.7170666666666667e-05, "loss": 0.0065, "step": 18494 }, { "epoch": 20.41413583655439, "grad_norm": 0.11218082904815674, "learning_rate": 2.7170333333333333e-05, "loss": 0.003, "step": 18495 }, { "epoch": 20.415240198785202, "grad_norm": 0.2346601039171219, "learning_rate": 2.717e-05, "loss": 0.0062, "step": 18496 }, { "epoch": 20.416344561016015, "grad_norm": 0.16874149441719055, "learning_rate": 2.716966666666667e-05, "loss": 0.0035, "step": 18497 }, { "epoch": 20.417448923246823, "grad_norm": 0.4040270447731018, "learning_rate": 2.7169333333333334e-05, "loss": 0.0128, "step": 18498 }, { "epoch": 20.418553285477635, "grad_norm": 0.26013004779815674, "learning_rate": 2.7169e-05, "loss": 0.0128, "step": 18499 }, { "epoch": 20.419657647708448, "grad_norm": 0.17132176458835602, "learning_rate": 2.7168666666666666e-05, "loss": 0.0029, "step": 18500 }, { "epoch": 20.42076200993926, "grad_norm": 0.3215470016002655, "learning_rate": 2.7168333333333336e-05, "loss": 0.0108, "step": 18501 }, { "epoch": 20.421866372170072, "grad_norm": 0.0688609778881073, "learning_rate": 2.7167999999999998e-05, "loss": 0.0018, "step": 18502 }, { "epoch": 20.422970734400884, "grad_norm": 0.2164517194032669, "learning_rate": 2.7167666666666667e-05, "loss": 0.0042, "step": 18503 }, { "epoch": 20.424075096631697, "grad_norm": 0.24586956202983856, "learning_rate": 2.7167333333333337e-05, "loss": 0.004, "step": 18504 }, { "epoch": 20.425179458862505, "grad_norm": 0.09393928945064545, "learning_rate": 2.7167e-05, "loss": 0.0036, "step": 18505 }, { "epoch": 20.426283821093318, "grad_norm": 0.05873792618513107, "learning_rate": 2.716666666666667e-05, "loss": 0.0016, "step": 18506 }, { "epoch": 20.42738818332413, "grad_norm": 0.26970982551574707, "learning_rate": 2.7166333333333335e-05, "loss": 0.0037, "step": 18507 }, { "epoch": 20.428492545554942, "grad_norm": 0.2466580718755722, "learning_rate": 2.7166e-05, "loss": 0.0111, "step": 18508 }, { "epoch": 20.429596907785754, "grad_norm": 0.09458865970373154, "learning_rate": 2.7165666666666666e-05, "loss": 0.0036, "step": 18509 }, { "epoch": 20.430701270016566, "grad_norm": 0.7471186518669128, "learning_rate": 2.7165333333333336e-05, "loss": 0.0138, "step": 18510 }, { "epoch": 20.43180563224738, "grad_norm": 0.15806172788143158, "learning_rate": 2.7164999999999998e-05, "loss": 0.0048, "step": 18511 }, { "epoch": 20.432909994478187, "grad_norm": 0.1213170662522316, "learning_rate": 2.7164666666666668e-05, "loss": 0.0022, "step": 18512 }, { "epoch": 20.434014356709, "grad_norm": 0.10757624357938766, "learning_rate": 2.7164333333333337e-05, "loss": 0.0035, "step": 18513 }, { "epoch": 20.435118718939812, "grad_norm": 0.14830045402050018, "learning_rate": 2.7164e-05, "loss": 0.0051, "step": 18514 }, { "epoch": 20.436223081170624, "grad_norm": 0.14308716356754303, "learning_rate": 2.716366666666667e-05, "loss": 0.0063, "step": 18515 }, { "epoch": 20.437327443401436, "grad_norm": 1.3186836242675781, "learning_rate": 2.7163333333333335e-05, "loss": 0.0063, "step": 18516 }, { "epoch": 20.43843180563225, "grad_norm": 0.26864686608314514, "learning_rate": 2.7163e-05, "loss": 0.0083, "step": 18517 }, { "epoch": 20.43953616786306, "grad_norm": 0.1022966131567955, "learning_rate": 2.7162666666666667e-05, "loss": 0.0024, "step": 18518 }, { "epoch": 20.44064053009387, "grad_norm": 0.12215352803468704, "learning_rate": 2.7162333333333336e-05, "loss": 0.0034, "step": 18519 }, { "epoch": 20.44174489232468, "grad_norm": 0.20165862143039703, "learning_rate": 2.7162000000000002e-05, "loss": 0.0073, "step": 18520 }, { "epoch": 20.442849254555494, "grad_norm": 0.7358683347702026, "learning_rate": 2.7161666666666668e-05, "loss": 0.1243, "step": 18521 }, { "epoch": 20.443953616786306, "grad_norm": 0.7306311130523682, "learning_rate": 2.7161333333333334e-05, "loss": 0.0677, "step": 18522 }, { "epoch": 20.44505797901712, "grad_norm": 0.38893458247184753, "learning_rate": 2.7161e-05, "loss": 0.1173, "step": 18523 }, { "epoch": 20.44616234124793, "grad_norm": 0.4713488817214966, "learning_rate": 2.716066666666667e-05, "loss": 0.0444, "step": 18524 }, { "epoch": 20.447266703478743, "grad_norm": 0.45377373695373535, "learning_rate": 2.716033333333333e-05, "loss": 0.0502, "step": 18525 }, { "epoch": 20.44837106570955, "grad_norm": 0.768730640411377, "learning_rate": 2.716e-05, "loss": 0.0373, "step": 18526 }, { "epoch": 20.449475427940364, "grad_norm": 0.3299022912979126, "learning_rate": 2.7159666666666667e-05, "loss": 0.018, "step": 18527 }, { "epoch": 20.450579790171176, "grad_norm": 0.32737067341804504, "learning_rate": 2.7159333333333333e-05, "loss": 0.0609, "step": 18528 }, { "epoch": 20.451684152401988, "grad_norm": 0.5001420974731445, "learning_rate": 2.7159000000000002e-05, "loss": 0.0227, "step": 18529 }, { "epoch": 20.4527885146328, "grad_norm": 0.2774602770805359, "learning_rate": 2.7158666666666668e-05, "loss": 0.0153, "step": 18530 }, { "epoch": 20.453892876863613, "grad_norm": 0.22124983370304108, "learning_rate": 2.7158333333333334e-05, "loss": 0.0047, "step": 18531 }, { "epoch": 20.45499723909442, "grad_norm": 0.18288791179656982, "learning_rate": 2.7158e-05, "loss": 0.0086, "step": 18532 }, { "epoch": 20.456101601325233, "grad_norm": 0.2910155951976776, "learning_rate": 2.715766666666667e-05, "loss": 0.0062, "step": 18533 }, { "epoch": 20.457205963556046, "grad_norm": 0.1556674987077713, "learning_rate": 2.7157333333333332e-05, "loss": 0.0067, "step": 18534 }, { "epoch": 20.458310325786858, "grad_norm": 0.22443881630897522, "learning_rate": 2.7157e-05, "loss": 0.0061, "step": 18535 }, { "epoch": 20.45941468801767, "grad_norm": 0.12559252977371216, "learning_rate": 2.7156666666666667e-05, "loss": 0.0038, "step": 18536 }, { "epoch": 20.460519050248482, "grad_norm": 0.25215578079223633, "learning_rate": 2.7156333333333333e-05, "loss": 0.0082, "step": 18537 }, { "epoch": 20.461623412479295, "grad_norm": 0.1270264983177185, "learning_rate": 2.7156000000000002e-05, "loss": 0.0049, "step": 18538 }, { "epoch": 20.462727774710103, "grad_norm": 0.10534851998090744, "learning_rate": 2.7155666666666668e-05, "loss": 0.0033, "step": 18539 }, { "epoch": 20.463832136940916, "grad_norm": 0.32341325283050537, "learning_rate": 2.7155333333333334e-05, "loss": 0.01, "step": 18540 }, { "epoch": 20.464936499171728, "grad_norm": 0.12660223245620728, "learning_rate": 2.7155e-05, "loss": 0.0046, "step": 18541 }, { "epoch": 20.46604086140254, "grad_norm": 0.09870998561382294, "learning_rate": 2.715466666666667e-05, "loss": 0.003, "step": 18542 }, { "epoch": 20.467145223633352, "grad_norm": 0.08347009122371674, "learning_rate": 2.7154333333333332e-05, "loss": 0.0034, "step": 18543 }, { "epoch": 20.468249585864164, "grad_norm": 0.1714758723974228, "learning_rate": 2.7154e-05, "loss": 0.0064, "step": 18544 }, { "epoch": 20.469353948094977, "grad_norm": 0.21708929538726807, "learning_rate": 2.7153666666666667e-05, "loss": 0.0226, "step": 18545 }, { "epoch": 20.470458310325785, "grad_norm": 0.2353241741657257, "learning_rate": 2.7153333333333333e-05, "loss": 0.0067, "step": 18546 }, { "epoch": 20.471562672556598, "grad_norm": 0.16114403307437897, "learning_rate": 2.7153000000000002e-05, "loss": 0.0267, "step": 18547 }, { "epoch": 20.47266703478741, "grad_norm": 0.2184377759695053, "learning_rate": 2.715266666666667e-05, "loss": 0.002, "step": 18548 }, { "epoch": 20.473771397018222, "grad_norm": 0.10782749205827713, "learning_rate": 2.7152333333333334e-05, "loss": 0.0026, "step": 18549 }, { "epoch": 20.474875759249034, "grad_norm": 0.25158771872520447, "learning_rate": 2.7152e-05, "loss": 0.0047, "step": 18550 }, { "epoch": 20.475980121479846, "grad_norm": 0.17679959535598755, "learning_rate": 2.7151666666666666e-05, "loss": 0.0026, "step": 18551 }, { "epoch": 20.47708448371066, "grad_norm": 0.1509724110364914, "learning_rate": 2.7151333333333332e-05, "loss": 0.0041, "step": 18552 }, { "epoch": 20.478188845941467, "grad_norm": 0.2598579525947571, "learning_rate": 2.7151e-05, "loss": 0.0055, "step": 18553 }, { "epoch": 20.47929320817228, "grad_norm": 0.09003273397684097, "learning_rate": 2.7150666666666667e-05, "loss": 0.0019, "step": 18554 }, { "epoch": 20.480397570403092, "grad_norm": 0.18542703986167908, "learning_rate": 2.7150333333333333e-05, "loss": 0.0044, "step": 18555 }, { "epoch": 20.481501932633904, "grad_norm": 0.23188377916812897, "learning_rate": 2.7150000000000003e-05, "loss": 0.006, "step": 18556 }, { "epoch": 20.482606294864716, "grad_norm": 0.12428867071866989, "learning_rate": 2.7149666666666665e-05, "loss": 0.0037, "step": 18557 }, { "epoch": 20.48371065709553, "grad_norm": 0.20870321989059448, "learning_rate": 2.7149333333333335e-05, "loss": 0.0081, "step": 18558 }, { "epoch": 20.48481501932634, "grad_norm": 0.13333700597286224, "learning_rate": 2.7149e-05, "loss": 0.006, "step": 18559 }, { "epoch": 20.48591938155715, "grad_norm": 0.3085247874259949, "learning_rate": 2.7148666666666666e-05, "loss": 0.012, "step": 18560 }, { "epoch": 20.48702374378796, "grad_norm": 0.14981548488140106, "learning_rate": 2.7148333333333332e-05, "loss": 0.0052, "step": 18561 }, { "epoch": 20.488128106018774, "grad_norm": 0.1901530921459198, "learning_rate": 2.7148e-05, "loss": 0.0026, "step": 18562 }, { "epoch": 20.489232468249586, "grad_norm": 0.16647273302078247, "learning_rate": 2.7147666666666668e-05, "loss": 0.0063, "step": 18563 }, { "epoch": 20.4903368304804, "grad_norm": 0.18187564611434937, "learning_rate": 2.7147333333333334e-05, "loss": 0.0036, "step": 18564 }, { "epoch": 20.49144119271121, "grad_norm": 0.4116401672363281, "learning_rate": 2.7147000000000003e-05, "loss": 0.0089, "step": 18565 }, { "epoch": 20.49254555494202, "grad_norm": 0.0622384175658226, "learning_rate": 2.7146666666666665e-05, "loss": 0.0016, "step": 18566 }, { "epoch": 20.49364991717283, "grad_norm": 0.21380020678043365, "learning_rate": 2.7146333333333335e-05, "loss": 0.008, "step": 18567 }, { "epoch": 20.494754279403644, "grad_norm": 0.22381679713726044, "learning_rate": 2.7146e-05, "loss": 0.0071, "step": 18568 }, { "epoch": 20.495858641634456, "grad_norm": 0.126595139503479, "learning_rate": 2.7145666666666667e-05, "loss": 0.002, "step": 18569 }, { "epoch": 20.496963003865268, "grad_norm": 1.301483154296875, "learning_rate": 2.7145333333333336e-05, "loss": 0.0076, "step": 18570 }, { "epoch": 20.49806736609608, "grad_norm": 0.5818246603012085, "learning_rate": 2.7145000000000002e-05, "loss": 0.1546, "step": 18571 }, { "epoch": 20.499171728326893, "grad_norm": 0.37637561559677124, "learning_rate": 2.7144666666666668e-05, "loss": 0.1207, "step": 18572 }, { "epoch": 20.5002760905577, "grad_norm": 0.43975868821144104, "learning_rate": 2.7144333333333334e-05, "loss": 0.0697, "step": 18573 }, { "epoch": 20.501380452788514, "grad_norm": 0.29781925678253174, "learning_rate": 2.7144000000000003e-05, "loss": 0.0464, "step": 18574 }, { "epoch": 20.502484815019326, "grad_norm": 0.3350355327129364, "learning_rate": 2.7143666666666666e-05, "loss": 0.0381, "step": 18575 }, { "epoch": 20.503589177250138, "grad_norm": 0.40856680274009705, "learning_rate": 2.7143333333333335e-05, "loss": 0.0499, "step": 18576 }, { "epoch": 20.50469353948095, "grad_norm": 0.25493165850639343, "learning_rate": 2.7143e-05, "loss": 0.0185, "step": 18577 }, { "epoch": 20.505797901711762, "grad_norm": 0.31448236107826233, "learning_rate": 2.7142666666666667e-05, "loss": 0.0403, "step": 18578 }, { "epoch": 20.506902263942575, "grad_norm": 0.36453911662101746, "learning_rate": 2.7142333333333336e-05, "loss": 0.012, "step": 18579 }, { "epoch": 20.508006626173383, "grad_norm": 0.21859772503376007, "learning_rate": 2.7142e-05, "loss": 0.0057, "step": 18580 }, { "epoch": 20.509110988404196, "grad_norm": 0.6324297785758972, "learning_rate": 2.7141666666666668e-05, "loss": 0.0177, "step": 18581 }, { "epoch": 20.510215350635008, "grad_norm": 0.16573816537857056, "learning_rate": 2.7141333333333334e-05, "loss": 0.0068, "step": 18582 }, { "epoch": 20.51131971286582, "grad_norm": 0.1551225632429123, "learning_rate": 2.7141e-05, "loss": 0.0056, "step": 18583 }, { "epoch": 20.512424075096632, "grad_norm": 0.12874552607536316, "learning_rate": 2.7140666666666666e-05, "loss": 0.0045, "step": 18584 }, { "epoch": 20.513528437327444, "grad_norm": 0.08662062138319016, "learning_rate": 2.7140333333333335e-05, "loss": 0.004, "step": 18585 }, { "epoch": 20.514632799558257, "grad_norm": 0.14612601697444916, "learning_rate": 2.7139999999999998e-05, "loss": 0.0045, "step": 18586 }, { "epoch": 20.515737161789065, "grad_norm": 0.050433896481990814, "learning_rate": 2.7139666666666667e-05, "loss": 0.0026, "step": 18587 }, { "epoch": 20.516841524019878, "grad_norm": 0.07922443747520447, "learning_rate": 2.7139333333333336e-05, "loss": 0.0025, "step": 18588 }, { "epoch": 20.51794588625069, "grad_norm": 0.2295660525560379, "learning_rate": 2.7139e-05, "loss": 0.0061, "step": 18589 }, { "epoch": 20.519050248481502, "grad_norm": 0.2034694105386734, "learning_rate": 2.7138666666666668e-05, "loss": 0.004, "step": 18590 }, { "epoch": 20.520154610712314, "grad_norm": 0.1854027956724167, "learning_rate": 2.7138333333333334e-05, "loss": 0.0063, "step": 18591 }, { "epoch": 20.521258972943127, "grad_norm": 0.11662735044956207, "learning_rate": 2.7138e-05, "loss": 0.0041, "step": 18592 }, { "epoch": 20.52236333517394, "grad_norm": 0.20203371345996857, "learning_rate": 2.7137666666666666e-05, "loss": 0.007, "step": 18593 }, { "epoch": 20.523467697404747, "grad_norm": 0.11002839356660843, "learning_rate": 2.7137333333333335e-05, "loss": 0.0025, "step": 18594 }, { "epoch": 20.52457205963556, "grad_norm": 0.12412858009338379, "learning_rate": 2.7137e-05, "loss": 0.003, "step": 18595 }, { "epoch": 20.525676421866372, "grad_norm": 0.5383366942405701, "learning_rate": 2.7136666666666667e-05, "loss": 0.0054, "step": 18596 }, { "epoch": 20.526780784097184, "grad_norm": 0.19674921035766602, "learning_rate": 2.7136333333333336e-05, "loss": 0.0029, "step": 18597 }, { "epoch": 20.527885146327996, "grad_norm": 0.12727603316307068, "learning_rate": 2.7136e-05, "loss": 0.0034, "step": 18598 }, { "epoch": 20.52898950855881, "grad_norm": 0.2627699375152588, "learning_rate": 2.713566666666667e-05, "loss": 0.0059, "step": 18599 }, { "epoch": 20.53009387078962, "grad_norm": 0.2665499746799469, "learning_rate": 2.7135333333333334e-05, "loss": 0.0062, "step": 18600 }, { "epoch": 20.53119823302043, "grad_norm": 0.7151870727539062, "learning_rate": 2.7135e-05, "loss": 0.008, "step": 18601 }, { "epoch": 20.53230259525124, "grad_norm": 0.4133571982383728, "learning_rate": 2.7134666666666666e-05, "loss": 0.0146, "step": 18602 }, { "epoch": 20.533406957482054, "grad_norm": 0.19630642235279083, "learning_rate": 2.7134333333333335e-05, "loss": 0.0018, "step": 18603 }, { "epoch": 20.534511319712866, "grad_norm": 0.5329399108886719, "learning_rate": 2.7134e-05, "loss": 0.0156, "step": 18604 }, { "epoch": 20.53561568194368, "grad_norm": 0.21117714047431946, "learning_rate": 2.7133666666666667e-05, "loss": 0.0327, "step": 18605 }, { "epoch": 20.53672004417449, "grad_norm": 0.3979783058166504, "learning_rate": 2.7133333333333337e-05, "loss": 0.0118, "step": 18606 }, { "epoch": 20.5378244064053, "grad_norm": 0.2065846174955368, "learning_rate": 2.7133e-05, "loss": 0.005, "step": 18607 }, { "epoch": 20.53892876863611, "grad_norm": 0.2654399871826172, "learning_rate": 2.713266666666667e-05, "loss": 0.0038, "step": 18608 }, { "epoch": 20.540033130866924, "grad_norm": 0.12879906594753265, "learning_rate": 2.7132333333333335e-05, "loss": 0.0019, "step": 18609 }, { "epoch": 20.541137493097736, "grad_norm": 0.18441203236579895, "learning_rate": 2.7132e-05, "loss": 0.0031, "step": 18610 }, { "epoch": 20.542241855328548, "grad_norm": 0.19219046831130981, "learning_rate": 2.7131666666666666e-05, "loss": 0.0046, "step": 18611 }, { "epoch": 20.54334621755936, "grad_norm": 0.1976853609085083, "learning_rate": 2.7131333333333332e-05, "loss": 0.0068, "step": 18612 }, { "epoch": 20.544450579790173, "grad_norm": 0.14310157299041748, "learning_rate": 2.7131e-05, "loss": 0.0024, "step": 18613 }, { "epoch": 20.54555494202098, "grad_norm": 0.15207938849925995, "learning_rate": 2.7130666666666668e-05, "loss": 0.0024, "step": 18614 }, { "epoch": 20.546659304251794, "grad_norm": 0.2578527629375458, "learning_rate": 2.7130333333333334e-05, "loss": 0.0082, "step": 18615 }, { "epoch": 20.547763666482606, "grad_norm": 0.2845415472984314, "learning_rate": 2.713e-05, "loss": 0.0042, "step": 18616 }, { "epoch": 20.548868028713418, "grad_norm": 0.3708553612232208, "learning_rate": 2.712966666666667e-05, "loss": 0.0212, "step": 18617 }, { "epoch": 20.54997239094423, "grad_norm": 0.31676122546195984, "learning_rate": 2.712933333333333e-05, "loss": 0.0087, "step": 18618 }, { "epoch": 20.551076753175042, "grad_norm": 0.3161504566669464, "learning_rate": 2.7129e-05, "loss": 0.0199, "step": 18619 }, { "epoch": 20.552181115405855, "grad_norm": 0.5332163572311401, "learning_rate": 2.712866666666667e-05, "loss": 0.0069, "step": 18620 }, { "epoch": 20.553285477636663, "grad_norm": 0.44159913063049316, "learning_rate": 2.7128333333333333e-05, "loss": 0.1118, "step": 18621 }, { "epoch": 20.554389839867476, "grad_norm": 0.6238107085227966, "learning_rate": 2.7128000000000002e-05, "loss": 0.0889, "step": 18622 }, { "epoch": 20.555494202098288, "grad_norm": 0.5187720656394958, "learning_rate": 2.7127666666666668e-05, "loss": 0.0745, "step": 18623 }, { "epoch": 20.5565985643291, "grad_norm": 0.359424352645874, "learning_rate": 2.7127333333333334e-05, "loss": 0.0657, "step": 18624 }, { "epoch": 20.557702926559912, "grad_norm": 0.34076017141342163, "learning_rate": 2.7127e-05, "loss": 0.032, "step": 18625 }, { "epoch": 20.558807288790724, "grad_norm": 0.4035596251487732, "learning_rate": 2.712666666666667e-05, "loss": 0.0727, "step": 18626 }, { "epoch": 20.559911651021537, "grad_norm": 0.23744229972362518, "learning_rate": 2.712633333333333e-05, "loss": 0.0323, "step": 18627 }, { "epoch": 20.561016013252345, "grad_norm": 0.22697794437408447, "learning_rate": 2.7126e-05, "loss": 0.0137, "step": 18628 }, { "epoch": 20.562120375483158, "grad_norm": 0.13400466740131378, "learning_rate": 2.712566666666667e-05, "loss": 0.0072, "step": 18629 }, { "epoch": 20.56322473771397, "grad_norm": 0.16163435578346252, "learning_rate": 2.7125333333333333e-05, "loss": 0.0113, "step": 18630 }, { "epoch": 20.564329099944782, "grad_norm": 0.16641636192798615, "learning_rate": 2.7125000000000002e-05, "loss": 0.0263, "step": 18631 }, { "epoch": 20.565433462175594, "grad_norm": 0.23212158679962158, "learning_rate": 2.7124666666666668e-05, "loss": 0.0084, "step": 18632 }, { "epoch": 20.566537824406407, "grad_norm": 0.31802481412887573, "learning_rate": 2.7124333333333334e-05, "loss": 0.0065, "step": 18633 }, { "epoch": 20.567642186637215, "grad_norm": 0.1978771686553955, "learning_rate": 2.7124e-05, "loss": 0.0087, "step": 18634 }, { "epoch": 20.568746548868027, "grad_norm": 0.17394420504570007, "learning_rate": 2.712366666666667e-05, "loss": 0.0057, "step": 18635 }, { "epoch": 20.56985091109884, "grad_norm": 0.17217645049095154, "learning_rate": 2.7123333333333332e-05, "loss": 0.0049, "step": 18636 }, { "epoch": 20.570955273329652, "grad_norm": 0.5163617134094238, "learning_rate": 2.7123e-05, "loss": 0.0089, "step": 18637 }, { "epoch": 20.572059635560464, "grad_norm": 0.14901503920555115, "learning_rate": 2.712266666666667e-05, "loss": 0.0043, "step": 18638 }, { "epoch": 20.573163997791276, "grad_norm": 0.22171559929847717, "learning_rate": 2.7122333333333333e-05, "loss": 0.0088, "step": 18639 }, { "epoch": 20.57426836002209, "grad_norm": 0.16933055222034454, "learning_rate": 2.7122000000000002e-05, "loss": 0.0062, "step": 18640 }, { "epoch": 20.575372722252897, "grad_norm": 0.15662240982055664, "learning_rate": 2.7121666666666665e-05, "loss": 0.0066, "step": 18641 }, { "epoch": 20.57647708448371, "grad_norm": 0.47090011835098267, "learning_rate": 2.7121333333333334e-05, "loss": 0.0086, "step": 18642 }, { "epoch": 20.57758144671452, "grad_norm": 0.11401334404945374, "learning_rate": 2.7121e-05, "loss": 0.0044, "step": 18643 }, { "epoch": 20.578685808945334, "grad_norm": 0.2645498216152191, "learning_rate": 2.7120666666666666e-05, "loss": 0.0157, "step": 18644 }, { "epoch": 20.579790171176146, "grad_norm": 1.2087312936782837, "learning_rate": 2.7120333333333335e-05, "loss": 0.0066, "step": 18645 }, { "epoch": 20.58089453340696, "grad_norm": 0.18847697973251343, "learning_rate": 2.712e-05, "loss": 0.0041, "step": 18646 }, { "epoch": 20.58199889563777, "grad_norm": 0.17582495510578156, "learning_rate": 2.7119666666666667e-05, "loss": 0.0045, "step": 18647 }, { "epoch": 20.58310325786858, "grad_norm": 0.10451333224773407, "learning_rate": 2.7119333333333333e-05, "loss": 0.0027, "step": 18648 }, { "epoch": 20.58420762009939, "grad_norm": 0.2694835364818573, "learning_rate": 2.7119000000000002e-05, "loss": 0.0088, "step": 18649 }, { "epoch": 20.585311982330204, "grad_norm": 0.16874951124191284, "learning_rate": 2.7118666666666665e-05, "loss": 0.0048, "step": 18650 }, { "epoch": 20.586416344561016, "grad_norm": 0.20987196266651154, "learning_rate": 2.7118333333333334e-05, "loss": 0.0037, "step": 18651 }, { "epoch": 20.587520706791828, "grad_norm": 0.3847352862358093, "learning_rate": 2.7118e-05, "loss": 0.0092, "step": 18652 }, { "epoch": 20.58862506902264, "grad_norm": 0.0856928750872612, "learning_rate": 2.7117666666666666e-05, "loss": 0.0024, "step": 18653 }, { "epoch": 20.589729431253453, "grad_norm": 0.15836502611637115, "learning_rate": 2.7117333333333335e-05, "loss": 0.0063, "step": 18654 }, { "epoch": 20.59083379348426, "grad_norm": 0.36049652099609375, "learning_rate": 2.7117e-05, "loss": 0.0098, "step": 18655 }, { "epoch": 20.591938155715074, "grad_norm": 0.24460799992084503, "learning_rate": 2.7116666666666667e-05, "loss": 0.0062, "step": 18656 }, { "epoch": 20.593042517945886, "grad_norm": 0.19547024369239807, "learning_rate": 2.7116333333333333e-05, "loss": 0.006, "step": 18657 }, { "epoch": 20.594146880176698, "grad_norm": 0.26947474479675293, "learning_rate": 2.7116000000000003e-05, "loss": 0.0053, "step": 18658 }, { "epoch": 20.59525124240751, "grad_norm": 0.16503190994262695, "learning_rate": 2.7115666666666665e-05, "loss": 0.0056, "step": 18659 }, { "epoch": 20.596355604638322, "grad_norm": 0.11483322829008102, "learning_rate": 2.7115333333333334e-05, "loss": 0.0052, "step": 18660 }, { "epoch": 20.597459966869135, "grad_norm": 0.48843663930892944, "learning_rate": 2.7115e-05, "loss": 0.0156, "step": 18661 }, { "epoch": 20.598564329099943, "grad_norm": 0.1565268337726593, "learning_rate": 2.7114666666666666e-05, "loss": 0.0049, "step": 18662 }, { "epoch": 20.599668691330756, "grad_norm": 0.16933009028434753, "learning_rate": 2.7114333333333336e-05, "loss": 0.0072, "step": 18663 }, { "epoch": 20.600773053561568, "grad_norm": 0.15288951992988586, "learning_rate": 2.7114e-05, "loss": 0.0035, "step": 18664 }, { "epoch": 20.60187741579238, "grad_norm": 0.11699879914522171, "learning_rate": 2.7113666666666668e-05, "loss": 0.006, "step": 18665 }, { "epoch": 20.602981778023192, "grad_norm": 0.3951016664505005, "learning_rate": 2.7113333333333333e-05, "loss": 0.0101, "step": 18666 }, { "epoch": 20.604086140254005, "grad_norm": 0.38205617666244507, "learning_rate": 2.7113000000000003e-05, "loss": 0.0075, "step": 18667 }, { "epoch": 20.605190502484817, "grad_norm": 0.19143372774124146, "learning_rate": 2.7112666666666665e-05, "loss": 0.0047, "step": 18668 }, { "epoch": 20.606294864715625, "grad_norm": 0.555442214012146, "learning_rate": 2.7112333333333335e-05, "loss": 0.0071, "step": 18669 }, { "epoch": 20.607399226946438, "grad_norm": 0.22666561603546143, "learning_rate": 2.7112000000000004e-05, "loss": 0.0044, "step": 18670 }, { "epoch": 20.60850358917725, "grad_norm": 0.5447914004325867, "learning_rate": 2.7111666666666667e-05, "loss": 0.1371, "step": 18671 }, { "epoch": 20.609607951408062, "grad_norm": 0.5568279027938843, "learning_rate": 2.7111333333333336e-05, "loss": 0.074, "step": 18672 }, { "epoch": 20.610712313638874, "grad_norm": 0.42841818928718567, "learning_rate": 2.7111e-05, "loss": 0.0453, "step": 18673 }, { "epoch": 20.611816675869687, "grad_norm": 0.3691060245037079, "learning_rate": 2.7110666666666668e-05, "loss": 0.0728, "step": 18674 }, { "epoch": 20.612921038100495, "grad_norm": 0.34125062823295593, "learning_rate": 2.7110333333333334e-05, "loss": 0.0385, "step": 18675 }, { "epoch": 20.614025400331307, "grad_norm": 0.31894221901893616, "learning_rate": 2.711e-05, "loss": 0.0222, "step": 18676 }, { "epoch": 20.61512976256212, "grad_norm": 0.17585690319538116, "learning_rate": 2.7109666666666666e-05, "loss": 0.0158, "step": 18677 }, { "epoch": 20.616234124792932, "grad_norm": 0.25191718339920044, "learning_rate": 2.7109333333333335e-05, "loss": 0.0341, "step": 18678 }, { "epoch": 20.617338487023744, "grad_norm": 0.5507160425186157, "learning_rate": 2.7109e-05, "loss": 0.0378, "step": 18679 }, { "epoch": 20.618442849254556, "grad_norm": 0.3096475899219513, "learning_rate": 2.7108666666666667e-05, "loss": 0.0094, "step": 18680 }, { "epoch": 20.61954721148537, "grad_norm": 0.17230360209941864, "learning_rate": 2.7108333333333336e-05, "loss": 0.0083, "step": 18681 }, { "epoch": 20.620651573716177, "grad_norm": 0.12097648531198502, "learning_rate": 2.7108e-05, "loss": 0.0057, "step": 18682 }, { "epoch": 20.62175593594699, "grad_norm": 0.19971853494644165, "learning_rate": 2.7107666666666668e-05, "loss": 0.0237, "step": 18683 }, { "epoch": 20.6228602981778, "grad_norm": 0.21482667326927185, "learning_rate": 2.7107333333333334e-05, "loss": 0.0082, "step": 18684 }, { "epoch": 20.623964660408614, "grad_norm": 0.17882700264453888, "learning_rate": 2.7107e-05, "loss": 0.0069, "step": 18685 }, { "epoch": 20.625069022639426, "grad_norm": 0.13622410595417023, "learning_rate": 2.7106666666666666e-05, "loss": 0.0053, "step": 18686 }, { "epoch": 20.62617338487024, "grad_norm": 0.1583516150712967, "learning_rate": 2.7106333333333335e-05, "loss": 0.0073, "step": 18687 }, { "epoch": 20.62727774710105, "grad_norm": 0.22602692246437073, "learning_rate": 2.7106e-05, "loss": 0.004, "step": 18688 }, { "epoch": 20.62838210933186, "grad_norm": 0.4417249858379364, "learning_rate": 2.7105666666666667e-05, "loss": 0.004, "step": 18689 }, { "epoch": 20.62948647156267, "grad_norm": 0.11471200734376907, "learning_rate": 2.7105333333333336e-05, "loss": 0.0046, "step": 18690 }, { "epoch": 20.630590833793484, "grad_norm": 0.18554995954036713, "learning_rate": 2.7105e-05, "loss": 0.0056, "step": 18691 }, { "epoch": 20.631695196024296, "grad_norm": 0.15576167404651642, "learning_rate": 2.7104666666666668e-05, "loss": 0.0039, "step": 18692 }, { "epoch": 20.63279955825511, "grad_norm": 0.16512352228164673, "learning_rate": 2.7104333333333334e-05, "loss": 0.0027, "step": 18693 }, { "epoch": 20.63390392048592, "grad_norm": 0.16044916212558746, "learning_rate": 2.7104e-05, "loss": 0.0075, "step": 18694 }, { "epoch": 20.635008282716733, "grad_norm": 0.17467394471168518, "learning_rate": 2.710366666666667e-05, "loss": 0.0035, "step": 18695 }, { "epoch": 20.63611264494754, "grad_norm": 0.19180907309055328, "learning_rate": 2.7103333333333335e-05, "loss": 0.0089, "step": 18696 }, { "epoch": 20.637217007178354, "grad_norm": 0.1067330613732338, "learning_rate": 2.7103e-05, "loss": 0.0033, "step": 18697 }, { "epoch": 20.638321369409166, "grad_norm": 0.17294186353683472, "learning_rate": 2.7102666666666667e-05, "loss": 0.0046, "step": 18698 }, { "epoch": 20.639425731639978, "grad_norm": 0.1408885270357132, "learning_rate": 2.7102333333333336e-05, "loss": 0.0047, "step": 18699 }, { "epoch": 20.64053009387079, "grad_norm": 0.06082679331302643, "learning_rate": 2.7102e-05, "loss": 0.0018, "step": 18700 }, { "epoch": 20.641634456101603, "grad_norm": 0.13562029600143433, "learning_rate": 2.710166666666667e-05, "loss": 0.0034, "step": 18701 }, { "epoch": 20.642738818332415, "grad_norm": 0.3244713842868805, "learning_rate": 2.710133333333333e-05, "loss": 0.0057, "step": 18702 }, { "epoch": 20.643843180563223, "grad_norm": 0.2460058182477951, "learning_rate": 2.7101e-05, "loss": 0.0052, "step": 18703 }, { "epoch": 20.644947542794036, "grad_norm": 0.123136967420578, "learning_rate": 2.710066666666667e-05, "loss": 0.0057, "step": 18704 }, { "epoch": 20.646051905024848, "grad_norm": 0.19399723410606384, "learning_rate": 2.7100333333333332e-05, "loss": 0.0041, "step": 18705 }, { "epoch": 20.64715626725566, "grad_norm": 0.16868442296981812, "learning_rate": 2.71e-05, "loss": 0.0138, "step": 18706 }, { "epoch": 20.648260629486472, "grad_norm": 0.14072760939598083, "learning_rate": 2.7099666666666667e-05, "loss": 0.0042, "step": 18707 }, { "epoch": 20.649364991717285, "grad_norm": 0.4557594656944275, "learning_rate": 2.7099333333333333e-05, "loss": 0.0082, "step": 18708 }, { "epoch": 20.650469353948097, "grad_norm": 0.14466753602027893, "learning_rate": 2.7099e-05, "loss": 0.0037, "step": 18709 }, { "epoch": 20.651573716178905, "grad_norm": 0.15821656584739685, "learning_rate": 2.709866666666667e-05, "loss": 0.0038, "step": 18710 }, { "epoch": 20.652678078409718, "grad_norm": 0.24051737785339355, "learning_rate": 2.709833333333333e-05, "loss": 0.0051, "step": 18711 }, { "epoch": 20.65378244064053, "grad_norm": 0.17293719947338104, "learning_rate": 2.7098e-05, "loss": 0.0024, "step": 18712 }, { "epoch": 20.654886802871342, "grad_norm": 0.16754627227783203, "learning_rate": 2.709766666666667e-05, "loss": 0.006, "step": 18713 }, { "epoch": 20.655991165102154, "grad_norm": 0.2530466914176941, "learning_rate": 2.7097333333333332e-05, "loss": 0.0024, "step": 18714 }, { "epoch": 20.657095527332967, "grad_norm": 0.3924393951892853, "learning_rate": 2.7097e-05, "loss": 0.0073, "step": 18715 }, { "epoch": 20.658199889563775, "grad_norm": 0.3129188120365143, "learning_rate": 2.7096666666666668e-05, "loss": 0.0073, "step": 18716 }, { "epoch": 20.659304251794588, "grad_norm": 0.11712295562028885, "learning_rate": 2.7096333333333333e-05, "loss": 0.0015, "step": 18717 }, { "epoch": 20.6604086140254, "grad_norm": 0.13582001626491547, "learning_rate": 2.7096e-05, "loss": 0.0028, "step": 18718 }, { "epoch": 20.661512976256212, "grad_norm": 0.31283095479011536, "learning_rate": 2.709566666666667e-05, "loss": 0.0085, "step": 18719 }, { "epoch": 20.662617338487024, "grad_norm": 0.287308007478714, "learning_rate": 2.7095333333333335e-05, "loss": 0.0074, "step": 18720 }, { "epoch": 20.663721700717836, "grad_norm": 0.5199019312858582, "learning_rate": 2.7095e-05, "loss": 0.0985, "step": 18721 }, { "epoch": 20.66482606294865, "grad_norm": 0.4158155918121338, "learning_rate": 2.709466666666667e-05, "loss": 0.1088, "step": 18722 }, { "epoch": 20.665930425179457, "grad_norm": 0.5729457139968872, "learning_rate": 2.7094333333333332e-05, "loss": 0.1144, "step": 18723 }, { "epoch": 20.66703478741027, "grad_norm": 0.5738207101821899, "learning_rate": 2.7094000000000002e-05, "loss": 0.0491, "step": 18724 }, { "epoch": 20.668139149641082, "grad_norm": 0.3601312041282654, "learning_rate": 2.7093666666666668e-05, "loss": 0.052, "step": 18725 }, { "epoch": 20.669243511871894, "grad_norm": 0.3745061457157135, "learning_rate": 2.7093333333333334e-05, "loss": 0.0846, "step": 18726 }, { "epoch": 20.670347874102706, "grad_norm": 0.2842392027378082, "learning_rate": 2.7093e-05, "loss": 0.0278, "step": 18727 }, { "epoch": 20.67145223633352, "grad_norm": 0.16998201608657837, "learning_rate": 2.709266666666667e-05, "loss": 0.0233, "step": 18728 }, { "epoch": 20.67255659856433, "grad_norm": 0.3425852060317993, "learning_rate": 2.7092333333333335e-05, "loss": 0.0585, "step": 18729 }, { "epoch": 20.67366096079514, "grad_norm": 0.34849444031715393, "learning_rate": 2.7092e-05, "loss": 0.0092, "step": 18730 }, { "epoch": 20.67476532302595, "grad_norm": 0.1105056181550026, "learning_rate": 2.7091666666666667e-05, "loss": 0.0245, "step": 18731 }, { "epoch": 20.675869685256764, "grad_norm": 0.10840821266174316, "learning_rate": 2.7091333333333333e-05, "loss": 0.0047, "step": 18732 }, { "epoch": 20.676974047487576, "grad_norm": 0.35085195302963257, "learning_rate": 2.7091000000000002e-05, "loss": 0.0143, "step": 18733 }, { "epoch": 20.67807840971839, "grad_norm": 0.20738442242145538, "learning_rate": 2.7090666666666665e-05, "loss": 0.0203, "step": 18734 }, { "epoch": 20.6791827719492, "grad_norm": 0.25458213686943054, "learning_rate": 2.7090333333333334e-05, "loss": 0.0053, "step": 18735 }, { "epoch": 20.680287134180013, "grad_norm": 0.131698340177536, "learning_rate": 2.709e-05, "loss": 0.0059, "step": 18736 }, { "epoch": 20.68139149641082, "grad_norm": 0.14203231036663055, "learning_rate": 2.7089666666666666e-05, "loss": 0.0031, "step": 18737 }, { "epoch": 20.682495858641634, "grad_norm": 0.3183422386646271, "learning_rate": 2.7089333333333335e-05, "loss": 0.005, "step": 18738 }, { "epoch": 20.683600220872446, "grad_norm": 0.08019297569990158, "learning_rate": 2.7089e-05, "loss": 0.0035, "step": 18739 }, { "epoch": 20.684704583103258, "grad_norm": 0.15887516736984253, "learning_rate": 2.7088666666666667e-05, "loss": 0.0038, "step": 18740 }, { "epoch": 20.68580894533407, "grad_norm": 0.17924310266971588, "learning_rate": 2.7088333333333333e-05, "loss": 0.0097, "step": 18741 }, { "epoch": 20.686913307564883, "grad_norm": 0.10463161766529083, "learning_rate": 2.7088000000000002e-05, "loss": 0.0048, "step": 18742 }, { "epoch": 20.68801766979569, "grad_norm": 0.1417492926120758, "learning_rate": 2.7087666666666665e-05, "loss": 0.006, "step": 18743 }, { "epoch": 20.689122032026503, "grad_norm": 0.16336987912654877, "learning_rate": 2.7087333333333334e-05, "loss": 0.0052, "step": 18744 }, { "epoch": 20.690226394257316, "grad_norm": 0.2888980805873871, "learning_rate": 2.7087000000000003e-05, "loss": 0.0041, "step": 18745 }, { "epoch": 20.691330756488128, "grad_norm": 0.057080354541540146, "learning_rate": 2.7086666666666666e-05, "loss": 0.0022, "step": 18746 }, { "epoch": 20.69243511871894, "grad_norm": 0.17831020057201385, "learning_rate": 2.7086333333333335e-05, "loss": 0.0085, "step": 18747 }, { "epoch": 20.693539480949752, "grad_norm": 0.19744884967803955, "learning_rate": 2.7086e-05, "loss": 0.0304, "step": 18748 }, { "epoch": 20.694643843180565, "grad_norm": 0.2094370424747467, "learning_rate": 2.7085666666666667e-05, "loss": 0.0104, "step": 18749 }, { "epoch": 20.695748205411373, "grad_norm": 0.10051324963569641, "learning_rate": 2.7085333333333333e-05, "loss": 0.0038, "step": 18750 }, { "epoch": 20.696852567642186, "grad_norm": 0.11807268857955933, "learning_rate": 2.7085000000000002e-05, "loss": 0.005, "step": 18751 }, { "epoch": 20.697956929872998, "grad_norm": 0.11950278282165527, "learning_rate": 2.7084666666666665e-05, "loss": 0.005, "step": 18752 }, { "epoch": 20.69906129210381, "grad_norm": 0.3046981394290924, "learning_rate": 2.7084333333333334e-05, "loss": 0.0136, "step": 18753 }, { "epoch": 20.700165654334622, "grad_norm": 0.09917958825826645, "learning_rate": 2.7084000000000004e-05, "loss": 0.0043, "step": 18754 }, { "epoch": 20.701270016565434, "grad_norm": 0.11279619485139847, "learning_rate": 2.7083666666666666e-05, "loss": 0.0055, "step": 18755 }, { "epoch": 20.702374378796247, "grad_norm": 0.20214514434337616, "learning_rate": 2.7083333333333335e-05, "loss": 0.0067, "step": 18756 }, { "epoch": 20.703478741027055, "grad_norm": 0.1549341082572937, "learning_rate": 2.7083e-05, "loss": 0.0034, "step": 18757 }, { "epoch": 20.704583103257868, "grad_norm": 0.11793997138738632, "learning_rate": 2.7082666666666667e-05, "loss": 0.0041, "step": 18758 }, { "epoch": 20.70568746548868, "grad_norm": 0.18693327903747559, "learning_rate": 2.7082333333333333e-05, "loss": 0.0057, "step": 18759 }, { "epoch": 20.706791827719492, "grad_norm": 0.3579494059085846, "learning_rate": 2.7082000000000003e-05, "loss": 0.0165, "step": 18760 }, { "epoch": 20.707896189950304, "grad_norm": 0.12849414348602295, "learning_rate": 2.7081666666666665e-05, "loss": 0.0014, "step": 18761 }, { "epoch": 20.709000552181116, "grad_norm": 0.4448240399360657, "learning_rate": 2.7081333333333334e-05, "loss": 0.0123, "step": 18762 }, { "epoch": 20.71010491441193, "grad_norm": 0.07266642898321152, "learning_rate": 2.7081e-05, "loss": 0.0015, "step": 18763 }, { "epoch": 20.711209276642737, "grad_norm": 0.30184662342071533, "learning_rate": 2.7080666666666666e-05, "loss": 0.0048, "step": 18764 }, { "epoch": 20.71231363887355, "grad_norm": 0.09320498257875443, "learning_rate": 2.7080333333333336e-05, "loss": 0.002, "step": 18765 }, { "epoch": 20.713418001104362, "grad_norm": 0.10576507449150085, "learning_rate": 2.7079999999999998e-05, "loss": 0.0125, "step": 18766 }, { "epoch": 20.714522363335174, "grad_norm": 0.08063299208879471, "learning_rate": 2.7079666666666668e-05, "loss": 0.0014, "step": 18767 }, { "epoch": 20.715626725565986, "grad_norm": 0.23466402292251587, "learning_rate": 2.7079333333333333e-05, "loss": 0.0106, "step": 18768 }, { "epoch": 20.7167310877968, "grad_norm": 0.38605955243110657, "learning_rate": 2.7079e-05, "loss": 0.009, "step": 18769 }, { "epoch": 20.71783545002761, "grad_norm": 0.14508476853370667, "learning_rate": 2.707866666666667e-05, "loss": 0.0036, "step": 18770 }, { "epoch": 20.71893981225842, "grad_norm": 0.5474657416343689, "learning_rate": 2.7078333333333335e-05, "loss": 0.116, "step": 18771 }, { "epoch": 20.72004417448923, "grad_norm": 0.5973085761070251, "learning_rate": 2.7078e-05, "loss": 0.1245, "step": 18772 }, { "epoch": 20.721148536720044, "grad_norm": 0.40788277983665466, "learning_rate": 2.7077666666666667e-05, "loss": 0.0762, "step": 18773 }, { "epoch": 20.722252898950856, "grad_norm": 0.30458733439445496, "learning_rate": 2.7077333333333336e-05, "loss": 0.0528, "step": 18774 }, { "epoch": 20.72335726118167, "grad_norm": 0.3560938239097595, "learning_rate": 2.7077e-05, "loss": 0.061, "step": 18775 }, { "epoch": 20.72446162341248, "grad_norm": 0.3268725574016571, "learning_rate": 2.7076666666666668e-05, "loss": 0.0256, "step": 18776 }, { "epoch": 20.725565985643293, "grad_norm": 0.3059525787830353, "learning_rate": 2.7076333333333334e-05, "loss": 0.0395, "step": 18777 }, { "epoch": 20.7266703478741, "grad_norm": 0.20161119103431702, "learning_rate": 2.7076e-05, "loss": 0.0117, "step": 18778 }, { "epoch": 20.727774710104914, "grad_norm": 0.22091588377952576, "learning_rate": 2.707566666666667e-05, "loss": 0.0191, "step": 18779 }, { "epoch": 20.728879072335726, "grad_norm": 0.1281023770570755, "learning_rate": 2.7075333333333335e-05, "loss": 0.0126, "step": 18780 }, { "epoch": 20.729983434566538, "grad_norm": 0.603401780128479, "learning_rate": 2.7075e-05, "loss": 0.0479, "step": 18781 }, { "epoch": 20.73108779679735, "grad_norm": 0.3571453094482422, "learning_rate": 2.7074666666666667e-05, "loss": 0.0078, "step": 18782 }, { "epoch": 20.732192159028163, "grad_norm": 0.22371210157871246, "learning_rate": 2.7074333333333336e-05, "loss": 0.0056, "step": 18783 }, { "epoch": 20.73329652125897, "grad_norm": 0.2066478431224823, "learning_rate": 2.7074e-05, "loss": 0.0331, "step": 18784 }, { "epoch": 20.734400883489783, "grad_norm": 0.1232299879193306, "learning_rate": 2.7073666666666668e-05, "loss": 0.0058, "step": 18785 }, { "epoch": 20.735505245720596, "grad_norm": 0.16217313706874847, "learning_rate": 2.7073333333333334e-05, "loss": 0.0048, "step": 18786 }, { "epoch": 20.736609607951408, "grad_norm": 0.13544584810733795, "learning_rate": 2.7073e-05, "loss": 0.0033, "step": 18787 }, { "epoch": 20.73771397018222, "grad_norm": 0.09257246553897858, "learning_rate": 2.707266666666667e-05, "loss": 0.0027, "step": 18788 }, { "epoch": 20.738818332413032, "grad_norm": 0.12077118456363678, "learning_rate": 2.7072333333333335e-05, "loss": 0.0048, "step": 18789 }, { "epoch": 20.739922694643845, "grad_norm": 0.14726431667804718, "learning_rate": 2.7072e-05, "loss": 0.006, "step": 18790 }, { "epoch": 20.741027056874653, "grad_norm": 0.1123109832406044, "learning_rate": 2.7071666666666667e-05, "loss": 0.0046, "step": 18791 }, { "epoch": 20.742131419105466, "grad_norm": 0.12907738983631134, "learning_rate": 2.7071333333333333e-05, "loss": 0.0065, "step": 18792 }, { "epoch": 20.743235781336278, "grad_norm": 0.05287626013159752, "learning_rate": 2.7071e-05, "loss": 0.0018, "step": 18793 }, { "epoch": 20.74434014356709, "grad_norm": 0.1304200291633606, "learning_rate": 2.7070666666666668e-05, "loss": 0.032, "step": 18794 }, { "epoch": 20.745444505797902, "grad_norm": 0.2512419521808624, "learning_rate": 2.7070333333333334e-05, "loss": 0.0028, "step": 18795 }, { "epoch": 20.746548868028714, "grad_norm": 0.4420027434825897, "learning_rate": 2.707e-05, "loss": 0.004, "step": 18796 }, { "epoch": 20.747653230259527, "grad_norm": 0.1334967017173767, "learning_rate": 2.706966666666667e-05, "loss": 0.0048, "step": 18797 }, { "epoch": 20.748757592490335, "grad_norm": 0.3878537118434906, "learning_rate": 2.7069333333333332e-05, "loss": 0.0058, "step": 18798 }, { "epoch": 20.749861954721148, "grad_norm": 0.2800792455673218, "learning_rate": 2.7069e-05, "loss": 0.0048, "step": 18799 }, { "epoch": 20.75096631695196, "grad_norm": 0.13220542669296265, "learning_rate": 2.7068666666666667e-05, "loss": 0.0041, "step": 18800 }, { "epoch": 20.752070679182772, "grad_norm": 0.12359067052602768, "learning_rate": 2.7068333333333333e-05, "loss": 0.0032, "step": 18801 }, { "epoch": 20.753175041413584, "grad_norm": 0.24664531648159027, "learning_rate": 2.7068e-05, "loss": 0.0105, "step": 18802 }, { "epoch": 20.754279403644396, "grad_norm": 0.2172367125749588, "learning_rate": 2.706766666666667e-05, "loss": 0.0077, "step": 18803 }, { "epoch": 20.75538376587521, "grad_norm": 0.15252752602100372, "learning_rate": 2.7067333333333334e-05, "loss": 0.004, "step": 18804 }, { "epoch": 20.756488128106017, "grad_norm": 0.103081114590168, "learning_rate": 2.7067e-05, "loss": 0.0034, "step": 18805 }, { "epoch": 20.75759249033683, "grad_norm": 0.10181649029254913, "learning_rate": 2.706666666666667e-05, "loss": 0.0025, "step": 18806 }, { "epoch": 20.758696852567642, "grad_norm": 0.12252410501241684, "learning_rate": 2.7066333333333332e-05, "loss": 0.0031, "step": 18807 }, { "epoch": 20.759801214798454, "grad_norm": 0.17244558036327362, "learning_rate": 2.7066e-05, "loss": 0.003, "step": 18808 }, { "epoch": 20.760905577029266, "grad_norm": 0.10220781713724136, "learning_rate": 2.7065666666666667e-05, "loss": 0.0037, "step": 18809 }, { "epoch": 20.76200993926008, "grad_norm": 0.22285261750221252, "learning_rate": 2.7065333333333333e-05, "loss": 0.0056, "step": 18810 }, { "epoch": 20.763114301490887, "grad_norm": 0.1873655468225479, "learning_rate": 2.7065e-05, "loss": 0.0059, "step": 18811 }, { "epoch": 20.7642186637217, "grad_norm": 0.22605101764202118, "learning_rate": 2.706466666666667e-05, "loss": 0.0075, "step": 18812 }, { "epoch": 20.76532302595251, "grad_norm": 0.2502566874027252, "learning_rate": 2.7064333333333334e-05, "loss": 0.0063, "step": 18813 }, { "epoch": 20.766427388183324, "grad_norm": 0.39637264609336853, "learning_rate": 2.7064e-05, "loss": 0.0034, "step": 18814 }, { "epoch": 20.767531750414136, "grad_norm": 0.37481191754341125, "learning_rate": 2.706366666666667e-05, "loss": 0.0076, "step": 18815 }, { "epoch": 20.76863611264495, "grad_norm": 0.344508558511734, "learning_rate": 2.7063333333333332e-05, "loss": 0.0108, "step": 18816 }, { "epoch": 20.76974047487576, "grad_norm": 0.1908896565437317, "learning_rate": 2.7063e-05, "loss": 0.0041, "step": 18817 }, { "epoch": 20.77084483710657, "grad_norm": 0.1606873720884323, "learning_rate": 2.7062666666666668e-05, "loss": 0.0042, "step": 18818 }, { "epoch": 20.77194919933738, "grad_norm": 0.16579696536064148, "learning_rate": 2.7062333333333333e-05, "loss": 0.0057, "step": 18819 }, { "epoch": 20.773053561568194, "grad_norm": 0.26334622502326965, "learning_rate": 2.7062000000000003e-05, "loss": 0.0086, "step": 18820 }, { "epoch": 20.774157923799006, "grad_norm": 0.4847773313522339, "learning_rate": 2.706166666666667e-05, "loss": 0.1424, "step": 18821 }, { "epoch": 20.775262286029818, "grad_norm": 0.43188783526420593, "learning_rate": 2.7061333333333335e-05, "loss": 0.0783, "step": 18822 }, { "epoch": 20.77636664826063, "grad_norm": 0.43378111720085144, "learning_rate": 2.7061e-05, "loss": 0.0526, "step": 18823 }, { "epoch": 20.777471010491443, "grad_norm": 1.0186847448349, "learning_rate": 2.7060666666666667e-05, "loss": 0.0369, "step": 18824 }, { "epoch": 20.77857537272225, "grad_norm": 0.6480779051780701, "learning_rate": 2.7060333333333332e-05, "loss": 0.0615, "step": 18825 }, { "epoch": 20.779679734953064, "grad_norm": 0.3366408050060272, "learning_rate": 2.7060000000000002e-05, "loss": 0.0464, "step": 18826 }, { "epoch": 20.780784097183876, "grad_norm": 0.3476055860519409, "learning_rate": 2.7059666666666664e-05, "loss": 0.0294, "step": 18827 }, { "epoch": 20.781888459414688, "grad_norm": 0.23932376503944397, "learning_rate": 2.7059333333333334e-05, "loss": 0.0288, "step": 18828 }, { "epoch": 20.7829928216455, "grad_norm": 0.19304627180099487, "learning_rate": 2.7059000000000003e-05, "loss": 0.0255, "step": 18829 }, { "epoch": 20.784097183876312, "grad_norm": 0.10684184730052948, "learning_rate": 2.7058666666666666e-05, "loss": 0.0125, "step": 18830 }, { "epoch": 20.785201546107125, "grad_norm": 0.14324568212032318, "learning_rate": 2.7058333333333335e-05, "loss": 0.0071, "step": 18831 }, { "epoch": 20.786305908337933, "grad_norm": 0.24096587300300598, "learning_rate": 2.7058e-05, "loss": 0.0209, "step": 18832 }, { "epoch": 20.787410270568746, "grad_norm": 0.4821898341178894, "learning_rate": 2.7057666666666667e-05, "loss": 0.0165, "step": 18833 }, { "epoch": 20.788514632799558, "grad_norm": 0.2047344297170639, "learning_rate": 2.7057333333333333e-05, "loss": 0.0133, "step": 18834 }, { "epoch": 20.78961899503037, "grad_norm": 0.19896061718463898, "learning_rate": 2.7057000000000002e-05, "loss": 0.0067, "step": 18835 }, { "epoch": 20.790723357261182, "grad_norm": 0.19627119600772858, "learning_rate": 2.7056666666666665e-05, "loss": 0.0044, "step": 18836 }, { "epoch": 20.791827719491994, "grad_norm": 0.8711110353469849, "learning_rate": 2.7056333333333334e-05, "loss": 0.0054, "step": 18837 }, { "epoch": 20.792932081722807, "grad_norm": 0.17175066471099854, "learning_rate": 2.7056000000000003e-05, "loss": 0.0098, "step": 18838 }, { "epoch": 20.794036443953615, "grad_norm": 0.04615956172347069, "learning_rate": 2.7055666666666666e-05, "loss": 0.0018, "step": 18839 }, { "epoch": 20.795140806184428, "grad_norm": 0.1718331128358841, "learning_rate": 2.7055333333333335e-05, "loss": 0.0046, "step": 18840 }, { "epoch": 20.79624516841524, "grad_norm": 0.14117954671382904, "learning_rate": 2.7055e-05, "loss": 0.0038, "step": 18841 }, { "epoch": 20.797349530646052, "grad_norm": 0.3402630388736725, "learning_rate": 2.7054666666666667e-05, "loss": 0.0056, "step": 18842 }, { "epoch": 20.798453892876864, "grad_norm": 0.12240210175514221, "learning_rate": 2.7054333333333333e-05, "loss": 0.0064, "step": 18843 }, { "epoch": 20.799558255107677, "grad_norm": 0.09290798008441925, "learning_rate": 2.7054000000000002e-05, "loss": 0.0031, "step": 18844 }, { "epoch": 20.80066261733849, "grad_norm": 0.16236913204193115, "learning_rate": 2.7053666666666668e-05, "loss": 0.0055, "step": 18845 }, { "epoch": 20.801766979569297, "grad_norm": 0.23385097086429596, "learning_rate": 2.7053333333333334e-05, "loss": 0.0038, "step": 18846 }, { "epoch": 20.80287134180011, "grad_norm": 0.17674808204174042, "learning_rate": 2.7053000000000003e-05, "loss": 0.0036, "step": 18847 }, { "epoch": 20.803975704030922, "grad_norm": 0.12983344495296478, "learning_rate": 2.7052666666666666e-05, "loss": 0.003, "step": 18848 }, { "epoch": 20.805080066261734, "grad_norm": 0.5096383094787598, "learning_rate": 2.7052333333333335e-05, "loss": 0.0071, "step": 18849 }, { "epoch": 20.806184428492546, "grad_norm": 0.12555058300495148, "learning_rate": 2.7052e-05, "loss": 0.0042, "step": 18850 }, { "epoch": 20.80728879072336, "grad_norm": 0.14948654174804688, "learning_rate": 2.7051666666666667e-05, "loss": 0.0039, "step": 18851 }, { "epoch": 20.808393152954167, "grad_norm": 0.08792908489704132, "learning_rate": 2.7051333333333333e-05, "loss": 0.0063, "step": 18852 }, { "epoch": 20.80949751518498, "grad_norm": 0.12340910732746124, "learning_rate": 2.7051e-05, "loss": 0.0019, "step": 18853 }, { "epoch": 20.81060187741579, "grad_norm": 0.13783743977546692, "learning_rate": 2.705066666666667e-05, "loss": 0.0049, "step": 18854 }, { "epoch": 20.811706239646604, "grad_norm": 0.11873001605272293, "learning_rate": 2.7050333333333334e-05, "loss": 0.0039, "step": 18855 }, { "epoch": 20.812810601877416, "grad_norm": 0.12261059880256653, "learning_rate": 2.705e-05, "loss": 0.0056, "step": 18856 }, { "epoch": 20.81391496410823, "grad_norm": 0.3445292115211487, "learning_rate": 2.7049666666666666e-05, "loss": 0.0046, "step": 18857 }, { "epoch": 20.81501932633904, "grad_norm": 0.16475291550159454, "learning_rate": 2.7049333333333335e-05, "loss": 0.0061, "step": 18858 }, { "epoch": 20.81612368856985, "grad_norm": 0.1589042693376541, "learning_rate": 2.7048999999999998e-05, "loss": 0.0046, "step": 18859 }, { "epoch": 20.81722805080066, "grad_norm": 0.10262927412986755, "learning_rate": 2.7048666666666667e-05, "loss": 0.0032, "step": 18860 }, { "epoch": 20.818332413031474, "grad_norm": 0.2310914695262909, "learning_rate": 2.7048333333333333e-05, "loss": 0.0101, "step": 18861 }, { "epoch": 20.819436775262286, "grad_norm": 0.4670039713382721, "learning_rate": 2.7048e-05, "loss": 0.0095, "step": 18862 }, { "epoch": 20.820541137493098, "grad_norm": 0.17663243412971497, "learning_rate": 2.704766666666667e-05, "loss": 0.0054, "step": 18863 }, { "epoch": 20.82164549972391, "grad_norm": 0.3124020993709564, "learning_rate": 2.7047333333333334e-05, "loss": 0.007, "step": 18864 }, { "epoch": 20.822749861954723, "grad_norm": 0.1684335172176361, "learning_rate": 2.7047e-05, "loss": 0.0042, "step": 18865 }, { "epoch": 20.82385422418553, "grad_norm": 0.52931809425354, "learning_rate": 2.7046666666666666e-05, "loss": 0.0077, "step": 18866 }, { "epoch": 20.824958586416344, "grad_norm": 0.19052273035049438, "learning_rate": 2.7046333333333336e-05, "loss": 0.004, "step": 18867 }, { "epoch": 20.826062948647156, "grad_norm": 0.2139531672000885, "learning_rate": 2.7045999999999998e-05, "loss": 0.0064, "step": 18868 }, { "epoch": 20.827167310877968, "grad_norm": 0.15374848246574402, "learning_rate": 2.7045666666666668e-05, "loss": 0.0045, "step": 18869 }, { "epoch": 20.82827167310878, "grad_norm": 0.40407976508140564, "learning_rate": 2.7045333333333337e-05, "loss": 0.0226, "step": 18870 }, { "epoch": 20.829376035339592, "grad_norm": 0.4320009648799896, "learning_rate": 2.7045e-05, "loss": 0.101, "step": 18871 }, { "epoch": 20.830480397570405, "grad_norm": 0.3958146274089813, "learning_rate": 2.704466666666667e-05, "loss": 0.0909, "step": 18872 }, { "epoch": 20.831584759801213, "grad_norm": 0.5212334394454956, "learning_rate": 2.7044333333333335e-05, "loss": 0.0997, "step": 18873 }, { "epoch": 20.832689122032026, "grad_norm": 0.34522730112075806, "learning_rate": 2.7044e-05, "loss": 0.057, "step": 18874 }, { "epoch": 20.833793484262838, "grad_norm": 0.8682901859283447, "learning_rate": 2.7043666666666667e-05, "loss": 0.0562, "step": 18875 }, { "epoch": 20.83489784649365, "grad_norm": 0.41451653838157654, "learning_rate": 2.7043333333333336e-05, "loss": 0.0592, "step": 18876 }, { "epoch": 20.836002208724462, "grad_norm": 0.30628880858421326, "learning_rate": 2.7043e-05, "loss": 0.0228, "step": 18877 }, { "epoch": 20.837106570955275, "grad_norm": 0.2360442876815796, "learning_rate": 2.7042666666666668e-05, "loss": 0.0276, "step": 18878 }, { "epoch": 20.838210933186087, "grad_norm": 0.21455951035022736, "learning_rate": 2.7042333333333337e-05, "loss": 0.0134, "step": 18879 }, { "epoch": 20.839315295416895, "grad_norm": 0.3498895466327667, "learning_rate": 2.7042e-05, "loss": 0.0156, "step": 18880 }, { "epoch": 20.840419657647708, "grad_norm": 0.13172125816345215, "learning_rate": 2.704166666666667e-05, "loss": 0.006, "step": 18881 }, { "epoch": 20.84152401987852, "grad_norm": 0.13975287973880768, "learning_rate": 2.7041333333333335e-05, "loss": 0.0061, "step": 18882 }, { "epoch": 20.842628382109332, "grad_norm": 0.13703162968158722, "learning_rate": 2.7041e-05, "loss": 0.0062, "step": 18883 }, { "epoch": 20.843732744340144, "grad_norm": 0.20283913612365723, "learning_rate": 2.7040666666666667e-05, "loss": 0.0141, "step": 18884 }, { "epoch": 20.844837106570957, "grad_norm": 0.23146069049835205, "learning_rate": 2.7040333333333333e-05, "loss": 0.0221, "step": 18885 }, { "epoch": 20.84594146880177, "grad_norm": 0.48680275678634644, "learning_rate": 2.704e-05, "loss": 0.0284, "step": 18886 }, { "epoch": 20.847045831032577, "grad_norm": 0.16196131706237793, "learning_rate": 2.7039666666666668e-05, "loss": 0.0058, "step": 18887 }, { "epoch": 20.84815019326339, "grad_norm": 0.14883922040462494, "learning_rate": 2.7039333333333334e-05, "loss": 0.0058, "step": 18888 }, { "epoch": 20.849254555494202, "grad_norm": 0.13817012310028076, "learning_rate": 2.7039e-05, "loss": 0.0065, "step": 18889 }, { "epoch": 20.850358917725014, "grad_norm": 0.2953193783760071, "learning_rate": 2.703866666666667e-05, "loss": 0.0057, "step": 18890 }, { "epoch": 20.851463279955826, "grad_norm": 0.28382277488708496, "learning_rate": 2.703833333333333e-05, "loss": 0.0071, "step": 18891 }, { "epoch": 20.85256764218664, "grad_norm": 0.09333483874797821, "learning_rate": 2.7038e-05, "loss": 0.0033, "step": 18892 }, { "epoch": 20.853672004417447, "grad_norm": 0.3058914244174957, "learning_rate": 2.7037666666666667e-05, "loss": 0.0029, "step": 18893 }, { "epoch": 20.85477636664826, "grad_norm": 0.18410740792751312, "learning_rate": 2.7037333333333333e-05, "loss": 0.0047, "step": 18894 }, { "epoch": 20.85588072887907, "grad_norm": 0.2220403105020523, "learning_rate": 2.7037000000000002e-05, "loss": 0.0052, "step": 18895 }, { "epoch": 20.856985091109884, "grad_norm": 0.11801639944314957, "learning_rate": 2.7036666666666668e-05, "loss": 0.0052, "step": 18896 }, { "epoch": 20.858089453340696, "grad_norm": 0.20132006704807281, "learning_rate": 2.7036333333333334e-05, "loss": 0.0131, "step": 18897 }, { "epoch": 20.85919381557151, "grad_norm": 0.09515973925590515, "learning_rate": 2.7036e-05, "loss": 0.0023, "step": 18898 }, { "epoch": 20.86029817780232, "grad_norm": 0.12433730810880661, "learning_rate": 2.703566666666667e-05, "loss": 0.0025, "step": 18899 }, { "epoch": 20.86140254003313, "grad_norm": 0.8178271651268005, "learning_rate": 2.7035333333333332e-05, "loss": 0.0065, "step": 18900 }, { "epoch": 20.86250690226394, "grad_norm": 0.08774001896381378, "learning_rate": 2.7035e-05, "loss": 0.0015, "step": 18901 }, { "epoch": 20.863611264494754, "grad_norm": 0.545901358127594, "learning_rate": 2.7034666666666667e-05, "loss": 0.0064, "step": 18902 }, { "epoch": 20.864715626725566, "grad_norm": 0.7419964671134949, "learning_rate": 2.7034333333333333e-05, "loss": 0.0085, "step": 18903 }, { "epoch": 20.86581998895638, "grad_norm": 0.14576634764671326, "learning_rate": 2.7034000000000002e-05, "loss": 0.0035, "step": 18904 }, { "epoch": 20.86692435118719, "grad_norm": 0.4577088952064514, "learning_rate": 2.703366666666667e-05, "loss": 0.0061, "step": 18905 }, { "epoch": 20.868028713418003, "grad_norm": 0.21812006831169128, "learning_rate": 2.7033333333333334e-05, "loss": 0.0109, "step": 18906 }, { "epoch": 20.86913307564881, "grad_norm": 0.8496850728988647, "learning_rate": 2.7033e-05, "loss": 0.0079, "step": 18907 }, { "epoch": 20.870237437879624, "grad_norm": 0.24858075380325317, "learning_rate": 2.703266666666667e-05, "loss": 0.0082, "step": 18908 }, { "epoch": 20.871341800110436, "grad_norm": 0.2001151442527771, "learning_rate": 2.7032333333333332e-05, "loss": 0.0116, "step": 18909 }, { "epoch": 20.872446162341248, "grad_norm": 0.3041233420372009, "learning_rate": 2.7032e-05, "loss": 0.0091, "step": 18910 }, { "epoch": 20.87355052457206, "grad_norm": 0.16909894347190857, "learning_rate": 2.7031666666666667e-05, "loss": 0.0025, "step": 18911 }, { "epoch": 20.874654886802873, "grad_norm": 0.1551055908203125, "learning_rate": 2.7031333333333333e-05, "loss": 0.0032, "step": 18912 }, { "epoch": 20.875759249033685, "grad_norm": 0.7283200025558472, "learning_rate": 2.7031000000000003e-05, "loss": 0.0266, "step": 18913 }, { "epoch": 20.876863611264493, "grad_norm": 0.32410135865211487, "learning_rate": 2.7030666666666665e-05, "loss": 0.0089, "step": 18914 }, { "epoch": 20.877967973495306, "grad_norm": 0.18410710990428925, "learning_rate": 2.7030333333333334e-05, "loss": 0.0037, "step": 18915 }, { "epoch": 20.879072335726118, "grad_norm": 0.6041818857192993, "learning_rate": 2.703e-05, "loss": 0.0161, "step": 18916 }, { "epoch": 20.88017669795693, "grad_norm": 0.7887199521064758, "learning_rate": 2.7029666666666666e-05, "loss": 0.0182, "step": 18917 }, { "epoch": 20.881281060187742, "grad_norm": 1.0857197046279907, "learning_rate": 2.7029333333333332e-05, "loss": 0.0072, "step": 18918 }, { "epoch": 20.882385422418555, "grad_norm": 0.18022724986076355, "learning_rate": 2.7029e-05, "loss": 0.005, "step": 18919 }, { "epoch": 20.883489784649363, "grad_norm": 1.777971863746643, "learning_rate": 2.7028666666666668e-05, "loss": 0.0115, "step": 18920 }, { "epoch": 20.884594146880175, "grad_norm": 0.4720943570137024, "learning_rate": 2.7028333333333333e-05, "loss": 0.1152, "step": 18921 }, { "epoch": 20.885698509110988, "grad_norm": 0.574823260307312, "learning_rate": 2.7028000000000003e-05, "loss": 0.0959, "step": 18922 }, { "epoch": 20.8868028713418, "grad_norm": 0.3924655616283417, "learning_rate": 2.7027666666666665e-05, "loss": 0.0729, "step": 18923 }, { "epoch": 20.887907233572612, "grad_norm": 0.340946227312088, "learning_rate": 2.7027333333333335e-05, "loss": 0.0638, "step": 18924 }, { "epoch": 20.889011595803424, "grad_norm": 0.3483198881149292, "learning_rate": 2.7027e-05, "loss": 0.0663, "step": 18925 }, { "epoch": 20.890115958034237, "grad_norm": 0.3669120967388153, "learning_rate": 2.7026666666666667e-05, "loss": 0.0413, "step": 18926 }, { "epoch": 20.891220320265045, "grad_norm": 0.41875502467155457, "learning_rate": 2.7026333333333332e-05, "loss": 0.0352, "step": 18927 }, { "epoch": 20.892324682495858, "grad_norm": NaN, "learning_rate": 2.7026333333333332e-05, "loss": 0.0146, "step": 18928 }, { "epoch": 20.89342904472667, "grad_norm": 0.3910786509513855, "learning_rate": 2.7026000000000002e-05, "loss": 0.0182, "step": 18929 }, { "epoch": 20.894533406957482, "grad_norm": 0.26439139246940613, "learning_rate": 2.7025666666666668e-05, "loss": 0.0169, "step": 18930 }, { "epoch": 20.895637769188294, "grad_norm": 0.24611763656139374, "learning_rate": 2.7025333333333334e-05, "loss": 0.0089, "step": 18931 }, { "epoch": 20.896742131419106, "grad_norm": 0.2029140293598175, "learning_rate": 2.7025000000000003e-05, "loss": 0.006, "step": 18932 }, { "epoch": 20.89784649364992, "grad_norm": 0.17932340502738953, "learning_rate": 2.7024666666666666e-05, "loss": 0.0069, "step": 18933 }, { "epoch": 20.898950855880727, "grad_norm": 0.19094601273536682, "learning_rate": 2.7024333333333335e-05, "loss": 0.0091, "step": 18934 }, { "epoch": 20.90005521811154, "grad_norm": 0.30448096990585327, "learning_rate": 2.7024e-05, "loss": 0.0124, "step": 18935 }, { "epoch": 20.90115958034235, "grad_norm": 0.18845108151435852, "learning_rate": 2.7023666666666667e-05, "loss": 0.0079, "step": 18936 }, { "epoch": 20.902263942573164, "grad_norm": 0.3057383596897125, "learning_rate": 2.7023333333333333e-05, "loss": 0.0084, "step": 18937 }, { "epoch": 20.903368304803976, "grad_norm": 0.4625590443611145, "learning_rate": 2.7023000000000002e-05, "loss": 0.0139, "step": 18938 }, { "epoch": 20.90447266703479, "grad_norm": 0.16372893750667572, "learning_rate": 2.7022666666666668e-05, "loss": 0.0057, "step": 18939 }, { "epoch": 20.9055770292656, "grad_norm": 0.39486145973205566, "learning_rate": 2.7022333333333334e-05, "loss": 0.0081, "step": 18940 }, { "epoch": 20.90668139149641, "grad_norm": 0.15300720930099487, "learning_rate": 2.7022000000000003e-05, "loss": 0.0043, "step": 18941 }, { "epoch": 20.90778575372722, "grad_norm": 0.16925635933876038, "learning_rate": 2.7021666666666666e-05, "loss": 0.0046, "step": 18942 }, { "epoch": 20.908890115958034, "grad_norm": 0.5236896276473999, "learning_rate": 2.7021333333333335e-05, "loss": 0.0048, "step": 18943 }, { "epoch": 20.909994478188846, "grad_norm": 0.2890881896018982, "learning_rate": 2.7020999999999998e-05, "loss": 0.0078, "step": 18944 }, { "epoch": 20.91109884041966, "grad_norm": 0.1431984305381775, "learning_rate": 2.7020666666666667e-05, "loss": 0.0064, "step": 18945 }, { "epoch": 20.91220320265047, "grad_norm": 0.21319805085659027, "learning_rate": 2.7020333333333336e-05, "loss": 0.0277, "step": 18946 }, { "epoch": 20.913307564881283, "grad_norm": 0.15958751738071442, "learning_rate": 2.702e-05, "loss": 0.0064, "step": 18947 }, { "epoch": 20.91441192711209, "grad_norm": 0.15803736448287964, "learning_rate": 2.7019666666666668e-05, "loss": 0.0065, "step": 18948 }, { "epoch": 20.915516289342904, "grad_norm": 0.15944980084896088, "learning_rate": 2.7019333333333334e-05, "loss": 0.0037, "step": 18949 }, { "epoch": 20.916620651573716, "grad_norm": 0.253429651260376, "learning_rate": 2.7019e-05, "loss": 0.0083, "step": 18950 }, { "epoch": 20.917725013804528, "grad_norm": 0.2068846970796585, "learning_rate": 2.7018666666666666e-05, "loss": 0.0072, "step": 18951 }, { "epoch": 20.91882937603534, "grad_norm": 0.19409850239753723, "learning_rate": 2.7018333333333335e-05, "loss": 0.0049, "step": 18952 }, { "epoch": 20.919933738266153, "grad_norm": 0.32386812567710876, "learning_rate": 2.7017999999999998e-05, "loss": 0.0085, "step": 18953 }, { "epoch": 20.921038100496965, "grad_norm": 0.17076054215431213, "learning_rate": 2.7017666666666667e-05, "loss": 0.0059, "step": 18954 }, { "epoch": 20.922142462727773, "grad_norm": 0.34343308210372925, "learning_rate": 2.7017333333333336e-05, "loss": 0.0188, "step": 18955 }, { "epoch": 20.923246824958586, "grad_norm": 0.13559387624263763, "learning_rate": 2.7017e-05, "loss": 0.0049, "step": 18956 }, { "epoch": 20.924351187189398, "grad_norm": 0.11823243647813797, "learning_rate": 2.701666666666667e-05, "loss": 0.0067, "step": 18957 }, { "epoch": 20.92545554942021, "grad_norm": 0.1046777069568634, "learning_rate": 2.7016333333333334e-05, "loss": 0.0024, "step": 18958 }, { "epoch": 20.926559911651022, "grad_norm": 0.2497836798429489, "learning_rate": 2.7016e-05, "loss": 0.0068, "step": 18959 }, { "epoch": 20.927664273881835, "grad_norm": 0.11076655983924866, "learning_rate": 2.7015666666666666e-05, "loss": 0.0043, "step": 18960 }, { "epoch": 20.928768636112643, "grad_norm": 0.2328447848558426, "learning_rate": 2.7015333333333335e-05, "loss": 0.0067, "step": 18961 }, { "epoch": 20.929872998343455, "grad_norm": 0.28572070598602295, "learning_rate": 2.7015e-05, "loss": 0.0068, "step": 18962 }, { "epoch": 20.930977360574268, "grad_norm": 0.5146263241767883, "learning_rate": 2.7014666666666667e-05, "loss": 0.0036, "step": 18963 }, { "epoch": 20.93208172280508, "grad_norm": 0.2745206952095032, "learning_rate": 2.7014333333333337e-05, "loss": 0.0114, "step": 18964 }, { "epoch": 20.933186085035892, "grad_norm": 0.15085870027542114, "learning_rate": 2.7014e-05, "loss": 0.0023, "step": 18965 }, { "epoch": 20.934290447266704, "grad_norm": 0.2683587670326233, "learning_rate": 2.701366666666667e-05, "loss": 0.0045, "step": 18966 }, { "epoch": 20.935394809497517, "grad_norm": 0.08862468600273132, "learning_rate": 2.7013333333333334e-05, "loss": 0.0017, "step": 18967 }, { "epoch": 20.936499171728325, "grad_norm": 0.29735037684440613, "learning_rate": 2.7013e-05, "loss": 0.0047, "step": 18968 }, { "epoch": 20.937603533959138, "grad_norm": 0.18238185346126556, "learning_rate": 2.7012666666666666e-05, "loss": 0.0035, "step": 18969 }, { "epoch": 20.93870789618995, "grad_norm": 0.6219661831855774, "learning_rate": 2.7012333333333336e-05, "loss": 0.0262, "step": 18970 }, { "epoch": 20.939812258420762, "grad_norm": 0.5378625988960266, "learning_rate": 2.7012e-05, "loss": 0.1345, "step": 18971 }, { "epoch": 20.940916620651574, "grad_norm": 0.47097048163414, "learning_rate": 2.7011666666666668e-05, "loss": 0.0816, "step": 18972 }, { "epoch": 20.942020982882386, "grad_norm": 0.32134029269218445, "learning_rate": 2.7011333333333337e-05, "loss": 0.0504, "step": 18973 }, { "epoch": 20.9431253451132, "grad_norm": 0.46304765343666077, "learning_rate": 2.7011e-05, "loss": 0.0872, "step": 18974 }, { "epoch": 20.944229707344007, "grad_norm": 0.3564774990081787, "learning_rate": 2.701066666666667e-05, "loss": 0.0454, "step": 18975 }, { "epoch": 20.94533406957482, "grad_norm": 0.27278932929039, "learning_rate": 2.701033333333333e-05, "loss": 0.0351, "step": 18976 }, { "epoch": 20.946438431805632, "grad_norm": 0.7990781664848328, "learning_rate": 2.701e-05, "loss": 0.0492, "step": 18977 }, { "epoch": 20.947542794036444, "grad_norm": 0.21985788643360138, "learning_rate": 2.7009666666666667e-05, "loss": 0.0132, "step": 18978 }, { "epoch": 20.948647156267256, "grad_norm": 0.2884625494480133, "learning_rate": 2.7009333333333332e-05, "loss": 0.0324, "step": 18979 }, { "epoch": 20.94975151849807, "grad_norm": 0.3526085615158081, "learning_rate": 2.7009000000000002e-05, "loss": 0.0236, "step": 18980 }, { "epoch": 20.95085588072888, "grad_norm": 0.13884469866752625, "learning_rate": 2.7008666666666668e-05, "loss": 0.0051, "step": 18981 }, { "epoch": 20.95196024295969, "grad_norm": 0.2795103192329407, "learning_rate": 2.7008333333333334e-05, "loss": 0.0068, "step": 18982 }, { "epoch": 20.9530646051905, "grad_norm": 0.2773881256580353, "learning_rate": 2.7008e-05, "loss": 0.0063, "step": 18983 }, { "epoch": 20.954168967421314, "grad_norm": 0.10383164137601852, "learning_rate": 2.700766666666667e-05, "loss": 0.0047, "step": 18984 }, { "epoch": 20.955273329652126, "grad_norm": 0.1486441045999527, "learning_rate": 2.700733333333333e-05, "loss": 0.0052, "step": 18985 }, { "epoch": 20.95637769188294, "grad_norm": 0.33170750737190247, "learning_rate": 2.7007e-05, "loss": 0.0056, "step": 18986 }, { "epoch": 20.95748205411375, "grad_norm": 0.19625763595104218, "learning_rate": 2.7006666666666667e-05, "loss": 0.0058, "step": 18987 }, { "epoch": 20.95858641634456, "grad_norm": 0.1677941381931305, "learning_rate": 2.7006333333333333e-05, "loss": 0.0089, "step": 18988 }, { "epoch": 20.95969077857537, "grad_norm": 0.3641813397407532, "learning_rate": 2.7006000000000002e-05, "loss": 0.0089, "step": 18989 }, { "epoch": 20.960795140806184, "grad_norm": 0.16448314487934113, "learning_rate": 2.7005666666666668e-05, "loss": 0.0041, "step": 18990 }, { "epoch": 20.961899503036996, "grad_norm": 0.15537099540233612, "learning_rate": 2.7005333333333334e-05, "loss": 0.0194, "step": 18991 }, { "epoch": 20.963003865267808, "grad_norm": 0.0957067608833313, "learning_rate": 2.7005e-05, "loss": 0.002, "step": 18992 }, { "epoch": 20.96410822749862, "grad_norm": 0.4852844178676605, "learning_rate": 2.700466666666667e-05, "loss": 0.011, "step": 18993 }, { "epoch": 20.965212589729433, "grad_norm": 0.1518692672252655, "learning_rate": 2.700433333333333e-05, "loss": 0.0052, "step": 18994 }, { "epoch": 20.96631695196024, "grad_norm": 0.8546669483184814, "learning_rate": 2.7004e-05, "loss": 0.0073, "step": 18995 }, { "epoch": 20.967421314191053, "grad_norm": 0.10742787271738052, "learning_rate": 2.700366666666667e-05, "loss": 0.0053, "step": 18996 }, { "epoch": 20.968525676421866, "grad_norm": 0.16807600855827332, "learning_rate": 2.7003333333333333e-05, "loss": 0.0052, "step": 18997 }, { "epoch": 20.969630038652678, "grad_norm": 0.08649630844593048, "learning_rate": 2.7003000000000002e-05, "loss": 0.0037, "step": 18998 }, { "epoch": 20.97073440088349, "grad_norm": 0.13003356754779816, "learning_rate": 2.7002666666666668e-05, "loss": 0.0033, "step": 18999 }, { "epoch": 20.971838763114302, "grad_norm": 0.5976195335388184, "learning_rate": 2.7002333333333334e-05, "loss": 0.0147, "step": 19000 }, { "epoch": 20.971838763114302, "eval_cer": 0.10952121730564844, "eval_loss": 0.36200064420700073, "eval_runtime": 15.81, "eval_samples_per_second": 19.228, "eval_steps_per_second": 0.633, "eval_wer": 0.3906369915579432, "step": 19000 }, { "epoch": 20.972943125345115, "grad_norm": 0.11733365803956985, "learning_rate": 2.7002e-05, "loss": 0.005, "step": 19001 }, { "epoch": 20.974047487575923, "grad_norm": 0.26380467414855957, "learning_rate": 2.700166666666667e-05, "loss": 0.0084, "step": 19002 }, { "epoch": 20.975151849806736, "grad_norm": 0.19676101207733154, "learning_rate": 2.7001333333333332e-05, "loss": 0.0056, "step": 19003 }, { "epoch": 20.976256212037548, "grad_norm": 0.101385198533535, "learning_rate": 2.7001e-05, "loss": 0.0053, "step": 19004 }, { "epoch": 20.97736057426836, "grad_norm": 0.07886708527803421, "learning_rate": 2.7000666666666667e-05, "loss": 0.0022, "step": 19005 }, { "epoch": 20.978464936499172, "grad_norm": 0.306537926197052, "learning_rate": 2.7000333333333333e-05, "loss": 0.0088, "step": 19006 }, { "epoch": 20.979569298729984, "grad_norm": 0.310336709022522, "learning_rate": 2.7000000000000002e-05, "loss": 0.0071, "step": 19007 }, { "epoch": 20.980673660960797, "grad_norm": 0.1588839441537857, "learning_rate": 2.6999666666666665e-05, "loss": 0.0037, "step": 19008 }, { "epoch": 20.981778023191605, "grad_norm": 0.2019653618335724, "learning_rate": 2.6999333333333334e-05, "loss": 0.0053, "step": 19009 }, { "epoch": 20.982882385422418, "grad_norm": 0.1665409654378891, "learning_rate": 2.6999e-05, "loss": 0.0053, "step": 19010 }, { "epoch": 20.98398674765323, "grad_norm": 0.23242339491844177, "learning_rate": 2.6998666666666666e-05, "loss": 0.0053, "step": 19011 }, { "epoch": 20.985091109884042, "grad_norm": 0.31140953302383423, "learning_rate": 2.6998333333333335e-05, "loss": 0.0141, "step": 19012 }, { "epoch": 20.986195472114854, "grad_norm": 0.15359091758728027, "learning_rate": 2.6998e-05, "loss": 0.0057, "step": 19013 }, { "epoch": 20.987299834345666, "grad_norm": 0.42795756459236145, "learning_rate": 2.6997666666666667e-05, "loss": 0.0085, "step": 19014 }, { "epoch": 20.98840419657648, "grad_norm": 0.18199250102043152, "learning_rate": 2.6997333333333333e-05, "loss": 0.0055, "step": 19015 }, { "epoch": 20.989508558807287, "grad_norm": 0.1394461840391159, "learning_rate": 2.6997000000000003e-05, "loss": 0.0046, "step": 19016 }, { "epoch": 20.9906129210381, "grad_norm": 0.20471347868442535, "learning_rate": 2.6996666666666665e-05, "loss": 0.0048, "step": 19017 }, { "epoch": 20.991717283268912, "grad_norm": 0.33403950929641724, "learning_rate": 2.6996333333333334e-05, "loss": 0.0109, "step": 19018 }, { "epoch": 20.992821645499724, "grad_norm": 0.4331406056880951, "learning_rate": 2.6996e-05, "loss": 0.0054, "step": 19019 }, { "epoch": 20.993926007730536, "grad_norm": 0.20505084097385406, "learning_rate": 2.6995666666666666e-05, "loss": 0.0036, "step": 19020 }, { "epoch": 20.99503036996135, "grad_norm": 0.2944090664386749, "learning_rate": 2.6995333333333336e-05, "loss": 0.0439, "step": 19021 }, { "epoch": 20.99613473219216, "grad_norm": 0.05931258201599121, "learning_rate": 2.6995e-05, "loss": 0.0025, "step": 19022 }, { "epoch": 20.99723909442297, "grad_norm": 0.21217654645442963, "learning_rate": 2.6994666666666668e-05, "loss": 0.0046, "step": 19023 }, { "epoch": 20.99834345665378, "grad_norm": 0.18081118166446686, "learning_rate": 2.6994333333333333e-05, "loss": 0.007, "step": 19024 }, { "epoch": 20.999447818884594, "grad_norm": 0.37057173252105713, "learning_rate": 2.6994000000000003e-05, "loss": 0.0086, "step": 19025 }, { "epoch": 21.0, "grad_norm": 0.13605637848377228, "learning_rate": 2.6993666666666665e-05, "loss": 0.003, "step": 19026 }, { "epoch": 21.001104362230812, "grad_norm": 0.42828086018562317, "learning_rate": 2.6993333333333335e-05, "loss": 0.108, "step": 19027 }, { "epoch": 21.002208724461624, "grad_norm": 0.47694867849349976, "learning_rate": 2.6993e-05, "loss": 0.0587, "step": 19028 }, { "epoch": 21.003313086692437, "grad_norm": 0.3670695126056671, "learning_rate": 2.6992666666666667e-05, "loss": 0.0523, "step": 19029 }, { "epoch": 21.004417448923245, "grad_norm": 0.3176342248916626, "learning_rate": 2.6992333333333336e-05, "loss": 0.0787, "step": 19030 }, { "epoch": 21.005521811154058, "grad_norm": 0.19762252271175385, "learning_rate": 2.6992000000000002e-05, "loss": 0.027, "step": 19031 }, { "epoch": 21.00662617338487, "grad_norm": 0.3408299684524536, "learning_rate": 2.6991666666666668e-05, "loss": 0.0392, "step": 19032 }, { "epoch": 21.007730535615682, "grad_norm": 0.2550325095653534, "learning_rate": 2.6991333333333334e-05, "loss": 0.0253, "step": 19033 }, { "epoch": 21.008834897846494, "grad_norm": 0.2945185899734497, "learning_rate": 2.6991000000000003e-05, "loss": 0.0404, "step": 19034 }, { "epoch": 21.009939260077306, "grad_norm": 0.18112200498580933, "learning_rate": 2.6990666666666666e-05, "loss": 0.0102, "step": 19035 }, { "epoch": 21.01104362230812, "grad_norm": 0.3956192433834076, "learning_rate": 2.6990333333333335e-05, "loss": 0.0159, "step": 19036 }, { "epoch": 21.012147984538927, "grad_norm": 0.1360507309436798, "learning_rate": 2.699e-05, "loss": 0.0091, "step": 19037 }, { "epoch": 21.01325234676974, "grad_norm": 0.2648846507072449, "learning_rate": 2.6989666666666667e-05, "loss": 0.0044, "step": 19038 }, { "epoch": 21.014356709000552, "grad_norm": 0.1338953971862793, "learning_rate": 2.6989333333333336e-05, "loss": 0.0057, "step": 19039 }, { "epoch": 21.015461071231364, "grad_norm": 0.19184915721416473, "learning_rate": 2.6989e-05, "loss": 0.0064, "step": 19040 }, { "epoch": 21.016565433462176, "grad_norm": 0.09766559302806854, "learning_rate": 2.6988666666666668e-05, "loss": 0.0022, "step": 19041 }, { "epoch": 21.01766979569299, "grad_norm": 0.1551171988248825, "learning_rate": 2.6988333333333334e-05, "loss": 0.004, "step": 19042 }, { "epoch": 21.0187741579238, "grad_norm": 0.08521302044391632, "learning_rate": 2.6988e-05, "loss": 0.0033, "step": 19043 }, { "epoch": 21.01987852015461, "grad_norm": 0.10592615604400635, "learning_rate": 2.6987666666666666e-05, "loss": 0.005, "step": 19044 }, { "epoch": 21.02098288238542, "grad_norm": 0.21503134071826935, "learning_rate": 2.6987333333333335e-05, "loss": 0.0044, "step": 19045 }, { "epoch": 21.022087244616234, "grad_norm": 1.9392430782318115, "learning_rate": 2.6987e-05, "loss": 0.0098, "step": 19046 }, { "epoch": 21.023191606847046, "grad_norm": 0.1935550570487976, "learning_rate": 2.6986666666666667e-05, "loss": 0.0028, "step": 19047 }, { "epoch": 21.02429596907786, "grad_norm": 0.09376132488250732, "learning_rate": 2.6986333333333336e-05, "loss": 0.0024, "step": 19048 }, { "epoch": 21.02540033130867, "grad_norm": 0.24805651605129242, "learning_rate": 2.6986e-05, "loss": 0.0112, "step": 19049 }, { "epoch": 21.02650469353948, "grad_norm": 0.14223387837409973, "learning_rate": 2.6985666666666668e-05, "loss": 0.0025, "step": 19050 }, { "epoch": 21.02760905577029, "grad_norm": 0.2002427726984024, "learning_rate": 2.6985333333333334e-05, "loss": 0.0035, "step": 19051 }, { "epoch": 21.028713418001104, "grad_norm": 0.3509657084941864, "learning_rate": 2.6985e-05, "loss": 0.0427, "step": 19052 }, { "epoch": 21.029817780231916, "grad_norm": 0.21412797272205353, "learning_rate": 2.6984666666666666e-05, "loss": 0.002, "step": 19053 }, { "epoch": 21.030922142462728, "grad_norm": 0.31621792912483215, "learning_rate": 2.6984333333333335e-05, "loss": 0.0064, "step": 19054 }, { "epoch": 21.03202650469354, "grad_norm": 0.1094139814376831, "learning_rate": 2.6984e-05, "loss": 0.0038, "step": 19055 }, { "epoch": 21.033130866924353, "grad_norm": 0.20220454037189484, "learning_rate": 2.6983666666666667e-05, "loss": 0.0046, "step": 19056 }, { "epoch": 21.03423522915516, "grad_norm": 0.34013134241104126, "learning_rate": 2.6983333333333336e-05, "loss": 0.0075, "step": 19057 }, { "epoch": 21.035339591385974, "grad_norm": 0.06062936410307884, "learning_rate": 2.6983e-05, "loss": 0.001, "step": 19058 }, { "epoch": 21.036443953616786, "grad_norm": 0.15942427515983582, "learning_rate": 2.698266666666667e-05, "loss": 0.0032, "step": 19059 }, { "epoch": 21.037548315847598, "grad_norm": 0.31963086128234863, "learning_rate": 2.6982333333333334e-05, "loss": 0.011, "step": 19060 }, { "epoch": 21.03865267807841, "grad_norm": 0.09287924319505692, "learning_rate": 2.6982e-05, "loss": 0.0023, "step": 19061 }, { "epoch": 21.039757040309222, "grad_norm": 0.1426818072795868, "learning_rate": 2.698166666666667e-05, "loss": 0.0104, "step": 19062 }, { "epoch": 21.040861402540035, "grad_norm": 0.08831965923309326, "learning_rate": 2.6981333333333335e-05, "loss": 0.0026, "step": 19063 }, { "epoch": 21.041965764770843, "grad_norm": 0.14148560166358948, "learning_rate": 2.6981e-05, "loss": 0.0038, "step": 19064 }, { "epoch": 21.043070127001656, "grad_norm": 0.2127569168806076, "learning_rate": 2.6980666666666667e-05, "loss": 0.0049, "step": 19065 }, { "epoch": 21.044174489232468, "grad_norm": 0.14664191007614136, "learning_rate": 2.6980333333333333e-05, "loss": 0.0036, "step": 19066 }, { "epoch": 21.04527885146328, "grad_norm": 0.14563898742198944, "learning_rate": 2.698e-05, "loss": 0.0034, "step": 19067 }, { "epoch": 21.046383213694092, "grad_norm": 0.30221834778785706, "learning_rate": 2.697966666666667e-05, "loss": 0.0045, "step": 19068 }, { "epoch": 21.047487575924904, "grad_norm": 0.20819412171840668, "learning_rate": 2.697933333333333e-05, "loss": 0.007, "step": 19069 }, { "epoch": 21.048591938155717, "grad_norm": 0.20668169856071472, "learning_rate": 2.6979e-05, "loss": 0.0069, "step": 19070 }, { "epoch": 21.049696300386525, "grad_norm": 0.2988283038139343, "learning_rate": 2.697866666666667e-05, "loss": 0.0038, "step": 19071 }, { "epoch": 21.050800662617338, "grad_norm": 0.15285922586917877, "learning_rate": 2.6978333333333332e-05, "loss": 0.0013, "step": 19072 }, { "epoch": 21.05190502484815, "grad_norm": 0.18003423511981964, "learning_rate": 2.6978e-05, "loss": 0.0082, "step": 19073 }, { "epoch": 21.053009387078962, "grad_norm": 0.1804995834827423, "learning_rate": 2.6977666666666668e-05, "loss": 0.0046, "step": 19074 }, { "epoch": 21.054113749309774, "grad_norm": 0.21700914204120636, "learning_rate": 2.6977333333333333e-05, "loss": 0.0028, "step": 19075 }, { "epoch": 21.055218111540587, "grad_norm": 0.17418991029262543, "learning_rate": 2.6977e-05, "loss": 0.0029, "step": 19076 }, { "epoch": 21.0563224737714, "grad_norm": 0.4370907247066498, "learning_rate": 2.697666666666667e-05, "loss": 0.1078, "step": 19077 }, { "epoch": 21.057426836002207, "grad_norm": 0.5353071093559265, "learning_rate": 2.697633333333333e-05, "loss": 0.0912, "step": 19078 }, { "epoch": 21.05853119823302, "grad_norm": 0.6264816522598267, "learning_rate": 2.6976e-05, "loss": 0.0614, "step": 19079 }, { "epoch": 21.059635560463832, "grad_norm": 0.31282296776771545, "learning_rate": 2.697566666666667e-05, "loss": 0.0308, "step": 19080 }, { "epoch": 21.060739922694644, "grad_norm": 0.24065208435058594, "learning_rate": 2.6975333333333332e-05, "loss": 0.0281, "step": 19081 }, { "epoch": 21.061844284925456, "grad_norm": 0.28040313720703125, "learning_rate": 2.6975000000000002e-05, "loss": 0.0213, "step": 19082 }, { "epoch": 21.06294864715627, "grad_norm": 0.13870836794376373, "learning_rate": 2.6974666666666668e-05, "loss": 0.0085, "step": 19083 }, { "epoch": 21.064053009387077, "grad_norm": 0.1401757299900055, "learning_rate": 2.6974333333333334e-05, "loss": 0.018, "step": 19084 }, { "epoch": 21.06515737161789, "grad_norm": 0.2748251259326935, "learning_rate": 2.6974e-05, "loss": 0.0354, "step": 19085 }, { "epoch": 21.0662617338487, "grad_norm": 0.31645435094833374, "learning_rate": 2.697366666666667e-05, "loss": 0.0166, "step": 19086 }, { "epoch": 21.067366096079514, "grad_norm": 0.5642332434654236, "learning_rate": 2.6973333333333335e-05, "loss": 0.0063, "step": 19087 }, { "epoch": 21.068470458310326, "grad_norm": 0.09528765082359314, "learning_rate": 2.6973e-05, "loss": 0.0059, "step": 19088 }, { "epoch": 21.06957482054114, "grad_norm": 0.14672380685806274, "learning_rate": 2.697266666666667e-05, "loss": 0.0049, "step": 19089 }, { "epoch": 21.07067918277195, "grad_norm": 0.04180866479873657, "learning_rate": 2.6972333333333333e-05, "loss": 0.0018, "step": 19090 }, { "epoch": 21.07178354500276, "grad_norm": 0.061577796936035156, "learning_rate": 2.6972000000000002e-05, "loss": 0.0021, "step": 19091 }, { "epoch": 21.07288790723357, "grad_norm": 0.11902504414319992, "learning_rate": 2.6971666666666668e-05, "loss": 0.0034, "step": 19092 }, { "epoch": 21.073992269464384, "grad_norm": 0.3063421845436096, "learning_rate": 2.6971333333333334e-05, "loss": 0.0058, "step": 19093 }, { "epoch": 21.075096631695196, "grad_norm": 0.20093053579330444, "learning_rate": 2.6971e-05, "loss": 0.0034, "step": 19094 }, { "epoch": 21.076200993926008, "grad_norm": 0.31592413783073425, "learning_rate": 2.6970666666666666e-05, "loss": 0.0062, "step": 19095 }, { "epoch": 21.07730535615682, "grad_norm": 0.30839142203330994, "learning_rate": 2.6970333333333335e-05, "loss": 0.005, "step": 19096 }, { "epoch": 21.078409718387633, "grad_norm": 0.10032476484775543, "learning_rate": 2.697e-05, "loss": 0.005, "step": 19097 }, { "epoch": 21.07951408061844, "grad_norm": 0.19080355763435364, "learning_rate": 2.6969666666666667e-05, "loss": 0.0068, "step": 19098 }, { "epoch": 21.080618442849254, "grad_norm": 0.11660054326057434, "learning_rate": 2.6969333333333333e-05, "loss": 0.0027, "step": 19099 }, { "epoch": 21.081722805080066, "grad_norm": 0.2399071753025055, "learning_rate": 2.6969000000000002e-05, "loss": 0.0177, "step": 19100 }, { "epoch": 21.082827167310878, "grad_norm": 0.15457990765571594, "learning_rate": 2.6968666666666665e-05, "loss": 0.0174, "step": 19101 }, { "epoch": 21.08393152954169, "grad_norm": 0.15262232720851898, "learning_rate": 2.6968333333333334e-05, "loss": 0.0025, "step": 19102 }, { "epoch": 21.085035891772502, "grad_norm": 0.04833631590008736, "learning_rate": 2.6968e-05, "loss": 0.0013, "step": 19103 }, { "epoch": 21.086140254003315, "grad_norm": 0.4078438878059387, "learning_rate": 2.6967666666666666e-05, "loss": 0.0075, "step": 19104 }, { "epoch": 21.087244616234123, "grad_norm": 0.18259388208389282, "learning_rate": 2.6967333333333335e-05, "loss": 0.004, "step": 19105 }, { "epoch": 21.088348978464936, "grad_norm": 0.07740989327430725, "learning_rate": 2.6967e-05, "loss": 0.0025, "step": 19106 }, { "epoch": 21.089453340695748, "grad_norm": 0.15782921016216278, "learning_rate": 2.6966666666666667e-05, "loss": 0.0043, "step": 19107 }, { "epoch": 21.09055770292656, "grad_norm": 0.08406830579042435, "learning_rate": 2.6966333333333333e-05, "loss": 0.0028, "step": 19108 }, { "epoch": 21.091662065157372, "grad_norm": 0.06623250991106033, "learning_rate": 2.6966000000000002e-05, "loss": 0.0012, "step": 19109 }, { "epoch": 21.092766427388185, "grad_norm": 0.1602824628353119, "learning_rate": 2.6965666666666665e-05, "loss": 0.0051, "step": 19110 }, { "epoch": 21.093870789618997, "grad_norm": 0.16004933416843414, "learning_rate": 2.6965333333333334e-05, "loss": 0.011, "step": 19111 }, { "epoch": 21.094975151849805, "grad_norm": 0.055274177342653275, "learning_rate": 2.6965000000000004e-05, "loss": 0.0007, "step": 19112 }, { "epoch": 21.096079514080618, "grad_norm": 0.3097977638244629, "learning_rate": 2.6964666666666666e-05, "loss": 0.0049, "step": 19113 }, { "epoch": 21.09718387631143, "grad_norm": 0.39738819003105164, "learning_rate": 2.6964333333333335e-05, "loss": 0.005, "step": 19114 }, { "epoch": 21.098288238542242, "grad_norm": 0.4165082275867462, "learning_rate": 2.6964e-05, "loss": 0.005, "step": 19115 }, { "epoch": 21.099392600773054, "grad_norm": 0.16390012204647064, "learning_rate": 2.6963666666666667e-05, "loss": 0.0046, "step": 19116 }, { "epoch": 21.100496963003867, "grad_norm": 0.11967721581459045, "learning_rate": 2.6963333333333333e-05, "loss": 0.0036, "step": 19117 }, { "epoch": 21.101601325234675, "grad_norm": 0.37690386176109314, "learning_rate": 2.6963000000000003e-05, "loss": 0.0048, "step": 19118 }, { "epoch": 21.102705687465487, "grad_norm": 0.18551544845104218, "learning_rate": 2.6962666666666665e-05, "loss": 0.002, "step": 19119 }, { "epoch": 21.1038100496963, "grad_norm": 0.16070659458637238, "learning_rate": 2.6962333333333334e-05, "loss": 0.0034, "step": 19120 }, { "epoch": 21.104914411927112, "grad_norm": 0.18890585005283356, "learning_rate": 2.6962000000000004e-05, "loss": 0.0039, "step": 19121 }, { "epoch": 21.106018774157924, "grad_norm": 0.20092707872390747, "learning_rate": 2.6961666666666666e-05, "loss": 0.0034, "step": 19122 }, { "epoch": 21.107123136388736, "grad_norm": 0.16325503587722778, "learning_rate": 2.6961333333333336e-05, "loss": 0.0052, "step": 19123 }, { "epoch": 21.10822749861955, "grad_norm": 0.06875694543123245, "learning_rate": 2.6961e-05, "loss": 0.0013, "step": 19124 }, { "epoch": 21.109331860850357, "grad_norm": 0.07624542713165283, "learning_rate": 2.6960666666666668e-05, "loss": 0.002, "step": 19125 }, { "epoch": 21.11043622308117, "grad_norm": 0.15599705278873444, "learning_rate": 2.6960333333333333e-05, "loss": 0.0037, "step": 19126 }, { "epoch": 21.11154058531198, "grad_norm": 0.5947918891906738, "learning_rate": 2.696e-05, "loss": 0.1012, "step": 19127 }, { "epoch": 21.112644947542794, "grad_norm": 0.39045894145965576, "learning_rate": 2.6959666666666665e-05, "loss": 0.0961, "step": 19128 }, { "epoch": 21.113749309773606, "grad_norm": 0.34137779474258423, "learning_rate": 2.6959333333333335e-05, "loss": 0.0329, "step": 19129 }, { "epoch": 21.11485367200442, "grad_norm": 0.6117346286773682, "learning_rate": 2.6959e-05, "loss": 0.0529, "step": 19130 }, { "epoch": 21.11595803423523, "grad_norm": 0.4154662787914276, "learning_rate": 2.6958666666666667e-05, "loss": 0.0557, "step": 19131 }, { "epoch": 21.11706239646604, "grad_norm": 0.436219185590744, "learning_rate": 2.6958333333333336e-05, "loss": 0.0451, "step": 19132 }, { "epoch": 21.11816675869685, "grad_norm": 0.2594079375267029, "learning_rate": 2.6958e-05, "loss": 0.0378, "step": 19133 }, { "epoch": 21.119271120927664, "grad_norm": 0.2572808265686035, "learning_rate": 2.6957666666666668e-05, "loss": 0.0279, "step": 19134 }, { "epoch": 21.120375483158476, "grad_norm": 0.2892377972602844, "learning_rate": 2.6957333333333334e-05, "loss": 0.015, "step": 19135 }, { "epoch": 21.12147984538929, "grad_norm": 0.13242697715759277, "learning_rate": 2.6957e-05, "loss": 0.0099, "step": 19136 }, { "epoch": 21.1225842076201, "grad_norm": 0.20044057071208954, "learning_rate": 2.695666666666667e-05, "loss": 0.0212, "step": 19137 }, { "epoch": 21.123688569850913, "grad_norm": 0.158583864569664, "learning_rate": 2.6956333333333335e-05, "loss": 0.003, "step": 19138 }, { "epoch": 21.12479293208172, "grad_norm": 0.13492977619171143, "learning_rate": 2.6956e-05, "loss": 0.0069, "step": 19139 }, { "epoch": 21.125897294312534, "grad_norm": 0.13462668657302856, "learning_rate": 2.6955666666666667e-05, "loss": 0.0052, "step": 19140 }, { "epoch": 21.127001656543346, "grad_norm": 0.11220639199018478, "learning_rate": 2.6955333333333336e-05, "loss": 0.0025, "step": 19141 }, { "epoch": 21.128106018774158, "grad_norm": 0.0730430856347084, "learning_rate": 2.6955e-05, "loss": 0.002, "step": 19142 }, { "epoch": 21.12921038100497, "grad_norm": 0.2841624319553375, "learning_rate": 2.6954666666666668e-05, "loss": 0.0057, "step": 19143 }, { "epoch": 21.130314743235783, "grad_norm": 0.11259055137634277, "learning_rate": 2.6954333333333334e-05, "loss": 0.0037, "step": 19144 }, { "epoch": 21.131419105466595, "grad_norm": 0.05436980724334717, "learning_rate": 2.6954e-05, "loss": 0.0018, "step": 19145 }, { "epoch": 21.132523467697403, "grad_norm": 0.1628398299217224, "learning_rate": 2.695366666666667e-05, "loss": 0.0039, "step": 19146 }, { "epoch": 21.133627829928216, "grad_norm": 0.1299743950366974, "learning_rate": 2.6953333333333335e-05, "loss": 0.0044, "step": 19147 }, { "epoch": 21.134732192159028, "grad_norm": 0.30841681361198425, "learning_rate": 2.6953e-05, "loss": 0.0053, "step": 19148 }, { "epoch": 21.13583655438984, "grad_norm": 0.2499345988035202, "learning_rate": 2.6952666666666667e-05, "loss": 0.0058, "step": 19149 }, { "epoch": 21.136940916620652, "grad_norm": 0.17764435708522797, "learning_rate": 2.6952333333333336e-05, "loss": 0.0052, "step": 19150 }, { "epoch": 21.138045278851465, "grad_norm": 0.16376736760139465, "learning_rate": 2.6952e-05, "loss": 0.0053, "step": 19151 }, { "epoch": 21.139149641082273, "grad_norm": 0.17077991366386414, "learning_rate": 2.6951666666666668e-05, "loss": 0.0044, "step": 19152 }, { "epoch": 21.140254003313085, "grad_norm": 0.16845498979091644, "learning_rate": 2.6951333333333334e-05, "loss": 0.0222, "step": 19153 }, { "epoch": 21.141358365543898, "grad_norm": 0.5461885333061218, "learning_rate": 2.6951e-05, "loss": 0.005, "step": 19154 }, { "epoch": 21.14246272777471, "grad_norm": 0.11027580499649048, "learning_rate": 2.695066666666667e-05, "loss": 0.003, "step": 19155 }, { "epoch": 21.143567090005522, "grad_norm": 0.21353532373905182, "learning_rate": 2.6950333333333332e-05, "loss": 0.0045, "step": 19156 }, { "epoch": 21.144671452236334, "grad_norm": 0.255840003490448, "learning_rate": 2.695e-05, "loss": 0.0028, "step": 19157 }, { "epoch": 21.145775814467147, "grad_norm": 0.06593425571918488, "learning_rate": 2.6949666666666667e-05, "loss": 0.0015, "step": 19158 }, { "epoch": 21.146880176697955, "grad_norm": 0.1535300761461258, "learning_rate": 2.6949333333333333e-05, "loss": 0.0042, "step": 19159 }, { "epoch": 21.147984538928768, "grad_norm": 0.06608407199382782, "learning_rate": 2.6949e-05, "loss": 0.0013, "step": 19160 }, { "epoch": 21.14908890115958, "grad_norm": 0.12179160863161087, "learning_rate": 2.6948666666666668e-05, "loss": 0.0042, "step": 19161 }, { "epoch": 21.150193263390392, "grad_norm": 0.2809268534183502, "learning_rate": 2.6948333333333334e-05, "loss": 0.0053, "step": 19162 }, { "epoch": 21.151297625621204, "grad_norm": 0.10869892686605453, "learning_rate": 2.6948e-05, "loss": 0.0031, "step": 19163 }, { "epoch": 21.152401987852016, "grad_norm": 0.09493283182382584, "learning_rate": 2.694766666666667e-05, "loss": 0.0025, "step": 19164 }, { "epoch": 21.15350635008283, "grad_norm": 0.12784594297409058, "learning_rate": 2.6947333333333332e-05, "loss": 0.0036, "step": 19165 }, { "epoch": 21.154610712313637, "grad_norm": 0.22788870334625244, "learning_rate": 2.6947e-05, "loss": 0.003, "step": 19166 }, { "epoch": 21.15571507454445, "grad_norm": 0.28732314705848694, "learning_rate": 2.6946666666666667e-05, "loss": 0.004, "step": 19167 }, { "epoch": 21.156819436775262, "grad_norm": 0.17953738570213318, "learning_rate": 2.6946333333333333e-05, "loss": 0.0061, "step": 19168 }, { "epoch": 21.157923799006074, "grad_norm": 0.4006228744983673, "learning_rate": 2.6946e-05, "loss": 0.0043, "step": 19169 }, { "epoch": 21.159028161236886, "grad_norm": 0.3485228717327118, "learning_rate": 2.694566666666667e-05, "loss": 0.005, "step": 19170 }, { "epoch": 21.1601325234677, "grad_norm": 0.36338701844215393, "learning_rate": 2.6945333333333334e-05, "loss": 0.0023, "step": 19171 }, { "epoch": 21.16123688569851, "grad_norm": 0.25678667426109314, "learning_rate": 2.6945e-05, "loss": 0.0049, "step": 19172 }, { "epoch": 21.16234124792932, "grad_norm": 0.03998821973800659, "learning_rate": 2.694466666666667e-05, "loss": 0.0007, "step": 19173 }, { "epoch": 21.16344561016013, "grad_norm": 0.18087545037269592, "learning_rate": 2.6944333333333332e-05, "loss": 0.0044, "step": 19174 }, { "epoch": 21.164549972390944, "grad_norm": 0.5500821471214294, "learning_rate": 2.6944e-05, "loss": 0.0054, "step": 19175 }, { "epoch": 21.165654334621756, "grad_norm": 0.09322184324264526, "learning_rate": 2.6943666666666667e-05, "loss": 0.0018, "step": 19176 }, { "epoch": 21.16675869685257, "grad_norm": 0.6532526016235352, "learning_rate": 2.6943333333333333e-05, "loss": 0.0792, "step": 19177 }, { "epoch": 21.16786305908338, "grad_norm": 0.5942153334617615, "learning_rate": 2.6943e-05, "loss": 0.1214, "step": 19178 }, { "epoch": 21.168967421314193, "grad_norm": 0.3622051775455475, "learning_rate": 2.694266666666667e-05, "loss": 0.0637, "step": 19179 }, { "epoch": 21.170071783545, "grad_norm": 0.3890300691127777, "learning_rate": 2.6942333333333335e-05, "loss": 0.0343, "step": 19180 }, { "epoch": 21.171176145775814, "grad_norm": 0.24985790252685547, "learning_rate": 2.6942e-05, "loss": 0.0317, "step": 19181 }, { "epoch": 21.172280508006626, "grad_norm": 0.2395949363708496, "learning_rate": 2.694166666666667e-05, "loss": 0.0122, "step": 19182 }, { "epoch": 21.173384870237438, "grad_norm": 0.29546794295310974, "learning_rate": 2.6941333333333332e-05, "loss": 0.0125, "step": 19183 }, { "epoch": 21.17448923246825, "grad_norm": 0.34326910972595215, "learning_rate": 2.6941000000000002e-05, "loss": 0.0123, "step": 19184 }, { "epoch": 21.175593594699063, "grad_norm": 0.2289993166923523, "learning_rate": 2.6940666666666668e-05, "loss": 0.0219, "step": 19185 }, { "epoch": 21.17669795692987, "grad_norm": 0.17228350043296814, "learning_rate": 2.6940333333333334e-05, "loss": 0.0082, "step": 19186 }, { "epoch": 21.177802319160683, "grad_norm": 0.1616481989622116, "learning_rate": 2.6940000000000003e-05, "loss": 0.006, "step": 19187 }, { "epoch": 21.178906681391496, "grad_norm": 0.1999530792236328, "learning_rate": 2.6939666666666666e-05, "loss": 0.0075, "step": 19188 }, { "epoch": 21.180011043622308, "grad_norm": 0.14489232003688812, "learning_rate": 2.6939333333333335e-05, "loss": 0.0035, "step": 19189 }, { "epoch": 21.18111540585312, "grad_norm": 0.2938917875289917, "learning_rate": 2.6939e-05, "loss": 0.0051, "step": 19190 }, { "epoch": 21.182219768083932, "grad_norm": 0.08105697482824326, "learning_rate": 2.6938666666666667e-05, "loss": 0.0027, "step": 19191 }, { "epoch": 21.183324130314745, "grad_norm": 0.22483882308006287, "learning_rate": 2.6938333333333333e-05, "loss": 0.0074, "step": 19192 }, { "epoch": 21.184428492545553, "grad_norm": 0.09576063603162766, "learning_rate": 2.6938000000000002e-05, "loss": 0.0042, "step": 19193 }, { "epoch": 21.185532854776365, "grad_norm": 0.6232733130455017, "learning_rate": 2.6937666666666665e-05, "loss": 0.0239, "step": 19194 }, { "epoch": 21.186637217007178, "grad_norm": 0.12908543646335602, "learning_rate": 2.6937333333333334e-05, "loss": 0.0027, "step": 19195 }, { "epoch": 21.18774157923799, "grad_norm": 0.14729882776737213, "learning_rate": 2.6937000000000003e-05, "loss": 0.0041, "step": 19196 }, { "epoch": 21.188845941468802, "grad_norm": 0.2721255421638489, "learning_rate": 2.6936666666666666e-05, "loss": 0.0088, "step": 19197 }, { "epoch": 21.189950303699614, "grad_norm": 0.1358923763036728, "learning_rate": 2.6936333333333335e-05, "loss": 0.0058, "step": 19198 }, { "epoch": 21.191054665930427, "grad_norm": 0.13946333527565002, "learning_rate": 2.6936e-05, "loss": 0.0039, "step": 19199 }, { "epoch": 21.192159028161235, "grad_norm": 0.07391718029975891, "learning_rate": 2.6935666666666667e-05, "loss": 0.0021, "step": 19200 }, { "epoch": 21.193263390392048, "grad_norm": 0.06007780879735947, "learning_rate": 2.6935333333333333e-05, "loss": 0.0015, "step": 19201 }, { "epoch": 21.19436775262286, "grad_norm": 0.08917539566755295, "learning_rate": 2.6935000000000002e-05, "loss": 0.0038, "step": 19202 }, { "epoch": 21.195472114853672, "grad_norm": 0.23296694457530975, "learning_rate": 2.6934666666666665e-05, "loss": 0.0073, "step": 19203 }, { "epoch": 21.196576477084484, "grad_norm": 0.8293104767799377, "learning_rate": 2.6934333333333334e-05, "loss": 0.0069, "step": 19204 }, { "epoch": 21.197680839315296, "grad_norm": 0.2836299240589142, "learning_rate": 2.6934000000000003e-05, "loss": 0.0056, "step": 19205 }, { "epoch": 21.19878520154611, "grad_norm": 0.20567721128463745, "learning_rate": 2.6933666666666666e-05, "loss": 0.0048, "step": 19206 }, { "epoch": 21.199889563776917, "grad_norm": 0.4231003224849701, "learning_rate": 2.6933333333333335e-05, "loss": 0.0155, "step": 19207 }, { "epoch": 21.20099392600773, "grad_norm": 0.12506483495235443, "learning_rate": 2.6933e-05, "loss": 0.0026, "step": 19208 }, { "epoch": 21.202098288238542, "grad_norm": 0.18970556557178497, "learning_rate": 2.6932666666666667e-05, "loss": 0.0061, "step": 19209 }, { "epoch": 21.203202650469354, "grad_norm": 0.13680025935173035, "learning_rate": 2.6932333333333333e-05, "loss": 0.0042, "step": 19210 }, { "epoch": 21.204307012700166, "grad_norm": 0.27479392290115356, "learning_rate": 2.6932000000000002e-05, "loss": 0.0084, "step": 19211 }, { "epoch": 21.20541137493098, "grad_norm": 0.2686418890953064, "learning_rate": 2.6931666666666668e-05, "loss": 0.0081, "step": 19212 }, { "epoch": 21.20651573716179, "grad_norm": 0.243791863322258, "learning_rate": 2.6931333333333334e-05, "loss": 0.0075, "step": 19213 }, { "epoch": 21.2076200993926, "grad_norm": 0.11557970196008682, "learning_rate": 2.6931000000000004e-05, "loss": 0.0038, "step": 19214 }, { "epoch": 21.20872446162341, "grad_norm": 0.41085320711135864, "learning_rate": 2.6930666666666666e-05, "loss": 0.0064, "step": 19215 }, { "epoch": 21.209828823854224, "grad_norm": 0.22450736165046692, "learning_rate": 2.6930333333333335e-05, "loss": 0.0036, "step": 19216 }, { "epoch": 21.210933186085036, "grad_norm": 0.15799568593502045, "learning_rate": 2.6929999999999998e-05, "loss": 0.0054, "step": 19217 }, { "epoch": 21.21203754831585, "grad_norm": 0.08759836852550507, "learning_rate": 2.6929666666666667e-05, "loss": 0.0021, "step": 19218 }, { "epoch": 21.21314191054666, "grad_norm": 0.1419721245765686, "learning_rate": 2.6929333333333333e-05, "loss": 0.0035, "step": 19219 }, { "epoch": 21.214246272777473, "grad_norm": 0.11921317875385284, "learning_rate": 2.6929e-05, "loss": 0.0017, "step": 19220 }, { "epoch": 21.21535063500828, "grad_norm": 0.09730326384305954, "learning_rate": 2.692866666666667e-05, "loss": 0.0022, "step": 19221 }, { "epoch": 21.216454997239094, "grad_norm": 0.19356700778007507, "learning_rate": 2.6928333333333334e-05, "loss": 0.0046, "step": 19222 }, { "epoch": 21.217559359469906, "grad_norm": 0.38173583149909973, "learning_rate": 2.6928e-05, "loss": 0.0028, "step": 19223 }, { "epoch": 21.218663721700718, "grad_norm": 0.12963764369487762, "learning_rate": 2.6927666666666666e-05, "loss": 0.0043, "step": 19224 }, { "epoch": 21.21976808393153, "grad_norm": 0.40953028202056885, "learning_rate": 2.6927333333333336e-05, "loss": 0.0073, "step": 19225 }, { "epoch": 21.220872446162343, "grad_norm": 0.07362846285104752, "learning_rate": 2.6926999999999998e-05, "loss": 0.0015, "step": 19226 }, { "epoch": 21.22197680839315, "grad_norm": 0.43147698044776917, "learning_rate": 2.6926666666666667e-05, "loss": 0.1071, "step": 19227 }, { "epoch": 21.223081170623963, "grad_norm": 0.28200873732566833, "learning_rate": 2.6926333333333333e-05, "loss": 0.055, "step": 19228 }, { "epoch": 21.224185532854776, "grad_norm": 0.3965873718261719, "learning_rate": 2.6926e-05, "loss": 0.0484, "step": 19229 }, { "epoch": 21.225289895085588, "grad_norm": 0.8218358755111694, "learning_rate": 2.692566666666667e-05, "loss": 0.06, "step": 19230 }, { "epoch": 21.2263942573164, "grad_norm": 0.3031015396118164, "learning_rate": 2.6925333333333335e-05, "loss": 0.0265, "step": 19231 }, { "epoch": 21.227498619547212, "grad_norm": 0.24089346826076508, "learning_rate": 2.6925e-05, "loss": 0.0271, "step": 19232 }, { "epoch": 21.228602981778025, "grad_norm": 0.35038071870803833, "learning_rate": 2.6924666666666666e-05, "loss": 0.0821, "step": 19233 }, { "epoch": 21.229707344008833, "grad_norm": 0.31777775287628174, "learning_rate": 2.6924333333333336e-05, "loss": 0.0171, "step": 19234 }, { "epoch": 21.230811706239646, "grad_norm": 0.13142339885234833, "learning_rate": 2.6924e-05, "loss": 0.0081, "step": 19235 }, { "epoch": 21.231916068470458, "grad_norm": 0.23362505435943604, "learning_rate": 2.6923666666666668e-05, "loss": 0.0091, "step": 19236 }, { "epoch": 21.23302043070127, "grad_norm": 0.14437229931354523, "learning_rate": 2.6923333333333337e-05, "loss": 0.0072, "step": 19237 }, { "epoch": 21.234124792932082, "grad_norm": 0.21527878940105438, "learning_rate": 2.6923e-05, "loss": 0.004, "step": 19238 }, { "epoch": 21.235229155162894, "grad_norm": 0.14942103624343872, "learning_rate": 2.692266666666667e-05, "loss": 0.0045, "step": 19239 }, { "epoch": 21.236333517393707, "grad_norm": 0.22855156660079956, "learning_rate": 2.6922333333333335e-05, "loss": 0.0035, "step": 19240 }, { "epoch": 21.237437879624515, "grad_norm": 0.10181273519992828, "learning_rate": 2.6922e-05, "loss": 0.0022, "step": 19241 }, { "epoch": 21.238542241855328, "grad_norm": 0.06717903167009354, "learning_rate": 2.6921666666666667e-05, "loss": 0.0024, "step": 19242 }, { "epoch": 21.23964660408614, "grad_norm": 0.1510019749403, "learning_rate": 2.6921333333333336e-05, "loss": 0.0038, "step": 19243 }, { "epoch": 21.240750966316952, "grad_norm": 0.18930353224277496, "learning_rate": 2.6921e-05, "loss": 0.0034, "step": 19244 }, { "epoch": 21.241855328547764, "grad_norm": 0.1470361351966858, "learning_rate": 2.6920666666666668e-05, "loss": 0.0031, "step": 19245 }, { "epoch": 21.242959690778576, "grad_norm": 0.15953443944454193, "learning_rate": 2.6920333333333334e-05, "loss": 0.0072, "step": 19246 }, { "epoch": 21.24406405300939, "grad_norm": 0.24170924723148346, "learning_rate": 2.692e-05, "loss": 0.0028, "step": 19247 }, { "epoch": 21.245168415240197, "grad_norm": 0.08578798174858093, "learning_rate": 2.691966666666667e-05, "loss": 0.0017, "step": 19248 }, { "epoch": 21.24627277747101, "grad_norm": 0.12131841480731964, "learning_rate": 2.691933333333333e-05, "loss": 0.003, "step": 19249 }, { "epoch": 21.247377139701822, "grad_norm": 0.2261403501033783, "learning_rate": 2.6919e-05, "loss": 0.0274, "step": 19250 }, { "epoch": 21.248481501932634, "grad_norm": 0.1293231099843979, "learning_rate": 2.6918666666666667e-05, "loss": 0.0035, "step": 19251 }, { "epoch": 21.249585864163446, "grad_norm": 0.06729964911937714, "learning_rate": 2.6918333333333333e-05, "loss": 0.0018, "step": 19252 }, { "epoch": 21.25069022639426, "grad_norm": 0.7479619979858398, "learning_rate": 2.6918e-05, "loss": 0.007, "step": 19253 }, { "epoch": 21.25179458862507, "grad_norm": 0.24522271752357483, "learning_rate": 2.6917666666666668e-05, "loss": 0.0034, "step": 19254 }, { "epoch": 21.25289895085588, "grad_norm": 0.1498429924249649, "learning_rate": 2.6917333333333334e-05, "loss": 0.0013, "step": 19255 }, { "epoch": 21.25400331308669, "grad_norm": 0.08477500826120377, "learning_rate": 2.6917e-05, "loss": 0.0023, "step": 19256 }, { "epoch": 21.255107675317504, "grad_norm": 0.10381649434566498, "learning_rate": 2.691666666666667e-05, "loss": 0.0034, "step": 19257 }, { "epoch": 21.256212037548316, "grad_norm": 0.054555073380470276, "learning_rate": 2.6916333333333332e-05, "loss": 0.0012, "step": 19258 }, { "epoch": 21.25731639977913, "grad_norm": 0.3527766466140747, "learning_rate": 2.6916e-05, "loss": 0.0084, "step": 19259 }, { "epoch": 21.25842076200994, "grad_norm": 0.3609599471092224, "learning_rate": 2.6915666666666667e-05, "loss": 0.0097, "step": 19260 }, { "epoch": 21.25952512424075, "grad_norm": 0.5483253598213196, "learning_rate": 2.6915333333333333e-05, "loss": 0.0033, "step": 19261 }, { "epoch": 21.26062948647156, "grad_norm": 0.13844236731529236, "learning_rate": 2.6915000000000002e-05, "loss": 0.0038, "step": 19262 }, { "epoch": 21.261733848702374, "grad_norm": 0.16062921285629272, "learning_rate": 2.6914666666666668e-05, "loss": 0.0032, "step": 19263 }, { "epoch": 21.262838210933186, "grad_norm": 0.26325827836990356, "learning_rate": 2.6914333333333334e-05, "loss": 0.0068, "step": 19264 }, { "epoch": 21.263942573163998, "grad_norm": 0.7173252105712891, "learning_rate": 2.6914e-05, "loss": 0.0082, "step": 19265 }, { "epoch": 21.26504693539481, "grad_norm": 0.1053924635052681, "learning_rate": 2.691366666666667e-05, "loss": 0.0031, "step": 19266 }, { "epoch": 21.266151297625623, "grad_norm": 0.16693009436130524, "learning_rate": 2.6913333333333332e-05, "loss": 0.006, "step": 19267 }, { "epoch": 21.26725565985643, "grad_norm": 0.05477302521467209, "learning_rate": 2.6913e-05, "loss": 0.0014, "step": 19268 }, { "epoch": 21.268360022087244, "grad_norm": 0.17860518395900726, "learning_rate": 2.6912666666666667e-05, "loss": 0.0039, "step": 19269 }, { "epoch": 21.269464384318056, "grad_norm": 0.2192857712507248, "learning_rate": 2.6912333333333333e-05, "loss": 0.0051, "step": 19270 }, { "epoch": 21.270568746548868, "grad_norm": 0.15354636311531067, "learning_rate": 2.6912000000000003e-05, "loss": 0.0017, "step": 19271 }, { "epoch": 21.27167310877968, "grad_norm": 0.13594591617584229, "learning_rate": 2.691166666666667e-05, "loss": 0.0023, "step": 19272 }, { "epoch": 21.272777471010492, "grad_norm": 0.1521722674369812, "learning_rate": 2.6911333333333334e-05, "loss": 0.002, "step": 19273 }, { "epoch": 21.273881833241305, "grad_norm": 0.1445113867521286, "learning_rate": 2.6911e-05, "loss": 0.0022, "step": 19274 }, { "epoch": 21.274986195472113, "grad_norm": 0.22349263727664948, "learning_rate": 2.691066666666667e-05, "loss": 0.0037, "step": 19275 }, { "epoch": 21.276090557702926, "grad_norm": 1.0084179639816284, "learning_rate": 2.6910333333333332e-05, "loss": 0.0119, "step": 19276 }, { "epoch": 21.277194919933738, "grad_norm": 0.49816983938217163, "learning_rate": 2.691e-05, "loss": 0.1093, "step": 19277 }, { "epoch": 21.27829928216455, "grad_norm": 0.711715817451477, "learning_rate": 2.6909666666666664e-05, "loss": 0.127, "step": 19278 }, { "epoch": 21.279403644395362, "grad_norm": 0.38904619216918945, "learning_rate": 2.6909333333333333e-05, "loss": 0.0715, "step": 19279 }, { "epoch": 21.280508006626174, "grad_norm": 0.4067293703556061, "learning_rate": 2.6909000000000003e-05, "loss": 0.0466, "step": 19280 }, { "epoch": 21.281612368856987, "grad_norm": 0.3767175078392029, "learning_rate": 2.6908666666666665e-05, "loss": 0.0318, "step": 19281 }, { "epoch": 21.282716731087795, "grad_norm": 0.2719205915927887, "learning_rate": 2.6908333333333335e-05, "loss": 0.0264, "step": 19282 }, { "epoch": 21.283821093318608, "grad_norm": 0.39137542247772217, "learning_rate": 2.6908e-05, "loss": 0.0369, "step": 19283 }, { "epoch": 21.28492545554942, "grad_norm": 0.24639926850795746, "learning_rate": 2.6907666666666666e-05, "loss": 0.0175, "step": 19284 }, { "epoch": 21.286029817780232, "grad_norm": 0.14390552043914795, "learning_rate": 2.6907333333333332e-05, "loss": 0.0058, "step": 19285 }, { "epoch": 21.287134180011044, "grad_norm": 0.2229992300271988, "learning_rate": 2.6907000000000002e-05, "loss": 0.0318, "step": 19286 }, { "epoch": 21.288238542241857, "grad_norm": 0.22408655285835266, "learning_rate": 2.6906666666666668e-05, "loss": 0.0061, "step": 19287 }, { "epoch": 21.28934290447267, "grad_norm": 0.3624798655509949, "learning_rate": 2.6906333333333334e-05, "loss": 0.0063, "step": 19288 }, { "epoch": 21.290447266703477, "grad_norm": 0.20629101991653442, "learning_rate": 2.6906000000000003e-05, "loss": 0.0096, "step": 19289 }, { "epoch": 21.29155162893429, "grad_norm": 0.11668527126312256, "learning_rate": 2.6905666666666665e-05, "loss": 0.0191, "step": 19290 }, { "epoch": 21.292655991165102, "grad_norm": 0.10286156833171844, "learning_rate": 2.6905333333333335e-05, "loss": 0.0032, "step": 19291 }, { "epoch": 21.293760353395914, "grad_norm": 0.23733742535114288, "learning_rate": 2.6905e-05, "loss": 0.0064, "step": 19292 }, { "epoch": 21.294864715626726, "grad_norm": 0.18588116765022278, "learning_rate": 2.6904666666666667e-05, "loss": 0.0049, "step": 19293 }, { "epoch": 21.29596907785754, "grad_norm": 0.11499376595020294, "learning_rate": 2.6904333333333333e-05, "loss": 0.0073, "step": 19294 }, { "epoch": 21.297073440088347, "grad_norm": 0.2289114147424698, "learning_rate": 2.6904000000000002e-05, "loss": 0.012, "step": 19295 }, { "epoch": 21.29817780231916, "grad_norm": 0.09284321218729019, "learning_rate": 2.6903666666666668e-05, "loss": 0.0025, "step": 19296 }, { "epoch": 21.29928216454997, "grad_norm": 0.2197883278131485, "learning_rate": 2.6903333333333334e-05, "loss": 0.0096, "step": 19297 }, { "epoch": 21.300386526780784, "grad_norm": 0.11276048421859741, "learning_rate": 2.6903000000000003e-05, "loss": 0.003, "step": 19298 }, { "epoch": 21.301490889011596, "grad_norm": 0.12136879563331604, "learning_rate": 2.6902666666666666e-05, "loss": 0.0057, "step": 19299 }, { "epoch": 21.30259525124241, "grad_norm": 0.32774561643600464, "learning_rate": 2.6902333333333335e-05, "loss": 0.0075, "step": 19300 }, { "epoch": 21.30369961347322, "grad_norm": 0.2200748324394226, "learning_rate": 2.6902e-05, "loss": 0.0088, "step": 19301 }, { "epoch": 21.30480397570403, "grad_norm": 0.13440607488155365, "learning_rate": 2.6901666666666667e-05, "loss": 0.0024, "step": 19302 }, { "epoch": 21.30590833793484, "grad_norm": 0.15354697406291962, "learning_rate": 2.6901333333333333e-05, "loss": 0.0053, "step": 19303 }, { "epoch": 21.307012700165654, "grad_norm": 0.1369941234588623, "learning_rate": 2.6901000000000002e-05, "loss": 0.0047, "step": 19304 }, { "epoch": 21.308117062396466, "grad_norm": 0.21896879374980927, "learning_rate": 2.6900666666666668e-05, "loss": 0.0296, "step": 19305 }, { "epoch": 21.309221424627278, "grad_norm": 0.16871453821659088, "learning_rate": 2.6900333333333334e-05, "loss": 0.0058, "step": 19306 }, { "epoch": 21.31032578685809, "grad_norm": 0.1809110790491104, "learning_rate": 2.69e-05, "loss": 0.0062, "step": 19307 }, { "epoch": 21.311430149088903, "grad_norm": 0.1553642749786377, "learning_rate": 2.6899666666666666e-05, "loss": 0.0038, "step": 19308 }, { "epoch": 21.31253451131971, "grad_norm": 0.2742408514022827, "learning_rate": 2.6899333333333335e-05, "loss": 0.0051, "step": 19309 }, { "epoch": 21.313638873550524, "grad_norm": 0.10104651749134064, "learning_rate": 2.6898999999999998e-05, "loss": 0.0025, "step": 19310 }, { "epoch": 21.314743235781336, "grad_norm": 0.18505507707595825, "learning_rate": 2.6898666666666667e-05, "loss": 0.0066, "step": 19311 }, { "epoch": 21.315847598012148, "grad_norm": 0.17287689447402954, "learning_rate": 2.6898333333333336e-05, "loss": 0.0041, "step": 19312 }, { "epoch": 21.31695196024296, "grad_norm": 0.18990547955036163, "learning_rate": 2.6898e-05, "loss": 0.0054, "step": 19313 }, { "epoch": 21.318056322473772, "grad_norm": 0.7504847049713135, "learning_rate": 2.6897666666666668e-05, "loss": 0.0077, "step": 19314 }, { "epoch": 21.319160684704585, "grad_norm": 0.1625525802373886, "learning_rate": 2.6897333333333334e-05, "loss": 0.0047, "step": 19315 }, { "epoch": 21.320265046935393, "grad_norm": 0.25962647795677185, "learning_rate": 2.6897e-05, "loss": 0.0049, "step": 19316 }, { "epoch": 21.321369409166206, "grad_norm": 0.3245936930179596, "learning_rate": 2.6896666666666666e-05, "loss": 0.0057, "step": 19317 }, { "epoch": 21.322473771397018, "grad_norm": 0.08017444610595703, "learning_rate": 2.6896333333333335e-05, "loss": 0.0012, "step": 19318 }, { "epoch": 21.32357813362783, "grad_norm": 0.15089185535907745, "learning_rate": 2.6895999999999998e-05, "loss": 0.0047, "step": 19319 }, { "epoch": 21.324682495858642, "grad_norm": 0.0467604398727417, "learning_rate": 2.6895666666666667e-05, "loss": 0.0012, "step": 19320 }, { "epoch": 21.325786858089455, "grad_norm": 0.09615226835012436, "learning_rate": 2.6895333333333337e-05, "loss": 0.0032, "step": 19321 }, { "epoch": 21.326891220320267, "grad_norm": 0.2536756992340088, "learning_rate": 2.6895e-05, "loss": 0.0054, "step": 19322 }, { "epoch": 21.327995582551075, "grad_norm": 0.12984436750411987, "learning_rate": 2.689466666666667e-05, "loss": 0.0023, "step": 19323 }, { "epoch": 21.329099944781888, "grad_norm": 0.17717276513576508, "learning_rate": 2.6894333333333334e-05, "loss": 0.0026, "step": 19324 }, { "epoch": 21.3302043070127, "grad_norm": 0.22104303538799286, "learning_rate": 2.6894e-05, "loss": 0.0038, "step": 19325 }, { "epoch": 21.331308669243512, "grad_norm": 0.4923485219478607, "learning_rate": 2.6893666666666666e-05, "loss": 0.0068, "step": 19326 }, { "epoch": 21.332413031474324, "grad_norm": 0.5576447248458862, "learning_rate": 2.6893333333333336e-05, "loss": 0.1147, "step": 19327 }, { "epoch": 21.333517393705137, "grad_norm": 0.3789423406124115, "learning_rate": 2.6892999999999998e-05, "loss": 0.0528, "step": 19328 }, { "epoch": 21.33462175593595, "grad_norm": 0.4047169089317322, "learning_rate": 2.6892666666666667e-05, "loss": 0.0624, "step": 19329 }, { "epoch": 21.335726118166757, "grad_norm": 0.862545907497406, "learning_rate": 2.6892333333333337e-05, "loss": 0.0494, "step": 19330 }, { "epoch": 21.33683048039757, "grad_norm": 0.3666703402996063, "learning_rate": 2.6892e-05, "loss": 0.0605, "step": 19331 }, { "epoch": 21.337934842628382, "grad_norm": 0.25018805265426636, "learning_rate": 2.689166666666667e-05, "loss": 0.0366, "step": 19332 }, { "epoch": 21.339039204859194, "grad_norm": 0.20166189968585968, "learning_rate": 2.6891333333333335e-05, "loss": 0.0391, "step": 19333 }, { "epoch": 21.340143567090006, "grad_norm": 0.397109717130661, "learning_rate": 2.6891e-05, "loss": 0.0102, "step": 19334 }, { "epoch": 21.34124792932082, "grad_norm": 0.4657767117023468, "learning_rate": 2.6890666666666666e-05, "loss": 0.0215, "step": 19335 }, { "epoch": 21.342352291551627, "grad_norm": 0.2092771828174591, "learning_rate": 2.6890333333333336e-05, "loss": 0.0112, "step": 19336 }, { "epoch": 21.34345665378244, "grad_norm": 0.31477847695350647, "learning_rate": 2.6890000000000002e-05, "loss": 0.0241, "step": 19337 }, { "epoch": 21.34456101601325, "grad_norm": 0.13664153218269348, "learning_rate": 2.6889666666666668e-05, "loss": 0.0043, "step": 19338 }, { "epoch": 21.345665378244064, "grad_norm": 0.3217944800853729, "learning_rate": 2.6889333333333334e-05, "loss": 0.0086, "step": 19339 }, { "epoch": 21.346769740474876, "grad_norm": 0.1760837286710739, "learning_rate": 2.6889e-05, "loss": 0.0053, "step": 19340 }, { "epoch": 21.34787410270569, "grad_norm": 0.17236359417438507, "learning_rate": 2.688866666666667e-05, "loss": 0.0054, "step": 19341 }, { "epoch": 21.3489784649365, "grad_norm": 0.12777961790561676, "learning_rate": 2.688833333333333e-05, "loss": 0.0011, "step": 19342 }, { "epoch": 21.35008282716731, "grad_norm": 0.12657812237739563, "learning_rate": 2.6888e-05, "loss": 0.0035, "step": 19343 }, { "epoch": 21.35118718939812, "grad_norm": 0.1163269430398941, "learning_rate": 2.6887666666666667e-05, "loss": 0.0035, "step": 19344 }, { "epoch": 21.352291551628934, "grad_norm": 0.27059710025787354, "learning_rate": 2.6887333333333333e-05, "loss": 0.0037, "step": 19345 }, { "epoch": 21.353395913859746, "grad_norm": 0.15584297478199005, "learning_rate": 2.6887000000000002e-05, "loss": 0.0028, "step": 19346 }, { "epoch": 21.35450027609056, "grad_norm": 0.2725503444671631, "learning_rate": 2.6886666666666668e-05, "loss": 0.0087, "step": 19347 }, { "epoch": 21.35560463832137, "grad_norm": 0.18277965486049652, "learning_rate": 2.6886333333333334e-05, "loss": 0.0088, "step": 19348 }, { "epoch": 21.356709000552183, "grad_norm": 0.2025757133960724, "learning_rate": 2.6886e-05, "loss": 0.005, "step": 19349 }, { "epoch": 21.35781336278299, "grad_norm": 0.29995429515838623, "learning_rate": 2.688566666666667e-05, "loss": 0.0049, "step": 19350 }, { "epoch": 21.358917725013804, "grad_norm": 0.4882253408432007, "learning_rate": 2.688533333333333e-05, "loss": 0.0051, "step": 19351 }, { "epoch": 21.360022087244616, "grad_norm": 0.26663291454315186, "learning_rate": 2.6885e-05, "loss": 0.004, "step": 19352 }, { "epoch": 21.361126449475428, "grad_norm": 0.2956540286540985, "learning_rate": 2.6884666666666667e-05, "loss": 0.0045, "step": 19353 }, { "epoch": 21.36223081170624, "grad_norm": 0.11317646503448486, "learning_rate": 2.6884333333333333e-05, "loss": 0.003, "step": 19354 }, { "epoch": 21.363335173937053, "grad_norm": 0.0541272833943367, "learning_rate": 2.6884000000000002e-05, "loss": 0.0014, "step": 19355 }, { "epoch": 21.364439536167865, "grad_norm": 0.20607587695121765, "learning_rate": 2.6883666666666668e-05, "loss": 0.0031, "step": 19356 }, { "epoch": 21.365543898398673, "grad_norm": 0.12462438642978668, "learning_rate": 2.6883333333333334e-05, "loss": 0.0032, "step": 19357 }, { "epoch": 21.366648260629486, "grad_norm": 0.20677968859672546, "learning_rate": 2.6883e-05, "loss": 0.0037, "step": 19358 }, { "epoch": 21.367752622860298, "grad_norm": 0.09808099269866943, "learning_rate": 2.688266666666667e-05, "loss": 0.0033, "step": 19359 }, { "epoch": 21.36885698509111, "grad_norm": 0.26461872458457947, "learning_rate": 2.6882333333333332e-05, "loss": 0.002, "step": 19360 }, { "epoch": 21.369961347321922, "grad_norm": 0.10078731179237366, "learning_rate": 2.6882e-05, "loss": 0.0025, "step": 19361 }, { "epoch": 21.371065709552735, "grad_norm": 0.09887181222438812, "learning_rate": 2.688166666666667e-05, "loss": 0.0031, "step": 19362 }, { "epoch": 21.372170071783543, "grad_norm": 0.21842330694198608, "learning_rate": 2.6881333333333333e-05, "loss": 0.0058, "step": 19363 }, { "epoch": 21.373274434014355, "grad_norm": 0.3136024475097656, "learning_rate": 2.6881000000000002e-05, "loss": 0.0063, "step": 19364 }, { "epoch": 21.374378796245168, "grad_norm": 0.2176136076450348, "learning_rate": 2.6880666666666668e-05, "loss": 0.0072, "step": 19365 }, { "epoch": 21.37548315847598, "grad_norm": 0.13786406815052032, "learning_rate": 2.6880333333333334e-05, "loss": 0.0045, "step": 19366 }, { "epoch": 21.376587520706792, "grad_norm": 0.2367854118347168, "learning_rate": 2.688e-05, "loss": 0.0044, "step": 19367 }, { "epoch": 21.377691882937604, "grad_norm": 0.3033950626850128, "learning_rate": 2.6879666666666666e-05, "loss": 0.008, "step": 19368 }, { "epoch": 21.378796245168417, "grad_norm": 0.528598427772522, "learning_rate": 2.6879333333333332e-05, "loss": 0.0084, "step": 19369 }, { "epoch": 21.379900607399225, "grad_norm": 0.37136995792388916, "learning_rate": 2.6879e-05, "loss": 0.0127, "step": 19370 }, { "epoch": 21.381004969630037, "grad_norm": 0.31350618600845337, "learning_rate": 2.6878666666666667e-05, "loss": 0.0027, "step": 19371 }, { "epoch": 21.38210933186085, "grad_norm": 0.08972780406475067, "learning_rate": 2.6878333333333333e-05, "loss": 0.0022, "step": 19372 }, { "epoch": 21.383213694091662, "grad_norm": 0.19780953228473663, "learning_rate": 2.6878000000000003e-05, "loss": 0.0041, "step": 19373 }, { "epoch": 21.384318056322474, "grad_norm": 0.16467782855033875, "learning_rate": 2.6877666666666665e-05, "loss": 0.0037, "step": 19374 }, { "epoch": 21.385422418553286, "grad_norm": 0.11453906446695328, "learning_rate": 2.6877333333333334e-05, "loss": 0.0021, "step": 19375 }, { "epoch": 21.3865267807841, "grad_norm": 0.1486898958683014, "learning_rate": 2.6877e-05, "loss": 0.0039, "step": 19376 }, { "epoch": 21.387631143014907, "grad_norm": 0.4228207468986511, "learning_rate": 2.6876666666666666e-05, "loss": 0.1014, "step": 19377 }, { "epoch": 21.38873550524572, "grad_norm": 0.5190631151199341, "learning_rate": 2.6876333333333332e-05, "loss": 0.1318, "step": 19378 }, { "epoch": 21.38983986747653, "grad_norm": 0.31820011138916016, "learning_rate": 2.6876e-05, "loss": 0.0447, "step": 19379 }, { "epoch": 21.390944229707344, "grad_norm": 0.4288707673549652, "learning_rate": 2.6875666666666667e-05, "loss": 0.0365, "step": 19380 }, { "epoch": 21.392048591938156, "grad_norm": 0.4471829831600189, "learning_rate": 2.6875333333333333e-05, "loss": 0.0425, "step": 19381 }, { "epoch": 21.39315295416897, "grad_norm": 0.706285297870636, "learning_rate": 2.6875000000000003e-05, "loss": 0.0289, "step": 19382 }, { "epoch": 21.39425731639978, "grad_norm": 0.4229734241962433, "learning_rate": 2.6874666666666665e-05, "loss": 0.0244, "step": 19383 }, { "epoch": 21.39536167863059, "grad_norm": 0.8646146059036255, "learning_rate": 2.6874333333333335e-05, "loss": 0.054, "step": 19384 }, { "epoch": 21.3964660408614, "grad_norm": 0.3035491704940796, "learning_rate": 2.6874e-05, "loss": 0.0111, "step": 19385 }, { "epoch": 21.397570403092214, "grad_norm": 0.1776355355978012, "learning_rate": 2.6873666666666666e-05, "loss": 0.008, "step": 19386 }, { "epoch": 21.398674765323026, "grad_norm": 0.4228135943412781, "learning_rate": 2.6873333333333336e-05, "loss": 0.0193, "step": 19387 }, { "epoch": 21.39977912755384, "grad_norm": 0.2753608226776123, "learning_rate": 2.6873000000000002e-05, "loss": 0.0075, "step": 19388 }, { "epoch": 21.40088348978465, "grad_norm": 0.23600439727306366, "learning_rate": 2.6872666666666668e-05, "loss": 0.0038, "step": 19389 }, { "epoch": 21.401987852015463, "grad_norm": 0.2825053632259369, "learning_rate": 2.6872333333333334e-05, "loss": 0.0217, "step": 19390 }, { "epoch": 21.40309221424627, "grad_norm": 0.6878629326820374, "learning_rate": 2.6872000000000003e-05, "loss": 0.0299, "step": 19391 }, { "epoch": 21.404196576477084, "grad_norm": 0.24091307818889618, "learning_rate": 2.6871666666666665e-05, "loss": 0.0053, "step": 19392 }, { "epoch": 21.405300938707896, "grad_norm": 0.20499716699123383, "learning_rate": 2.6871333333333335e-05, "loss": 0.0051, "step": 19393 }, { "epoch": 21.406405300938708, "grad_norm": 0.15328580141067505, "learning_rate": 2.6871e-05, "loss": 0.005, "step": 19394 }, { "epoch": 21.40750966316952, "grad_norm": 0.13355794548988342, "learning_rate": 2.6870666666666667e-05, "loss": 0.007, "step": 19395 }, { "epoch": 21.408614025400333, "grad_norm": 0.11920110881328583, "learning_rate": 2.6870333333333336e-05, "loss": 0.0037, "step": 19396 }, { "epoch": 21.409718387631145, "grad_norm": 0.1727297306060791, "learning_rate": 2.6870000000000002e-05, "loss": 0.0046, "step": 19397 }, { "epoch": 21.410822749861953, "grad_norm": 0.36698734760284424, "learning_rate": 2.6869666666666668e-05, "loss": 0.0054, "step": 19398 }, { "epoch": 21.411927112092766, "grad_norm": 0.2615561783313751, "learning_rate": 2.6869333333333334e-05, "loss": 0.0065, "step": 19399 }, { "epoch": 21.413031474323578, "grad_norm": 0.284654438495636, "learning_rate": 2.6869e-05, "loss": 0.0068, "step": 19400 }, { "epoch": 21.41413583655439, "grad_norm": 0.4650840163230896, "learning_rate": 2.6868666666666666e-05, "loss": 0.0106, "step": 19401 }, { "epoch": 21.415240198785202, "grad_norm": 0.10181578993797302, "learning_rate": 2.6868333333333335e-05, "loss": 0.0074, "step": 19402 }, { "epoch": 21.416344561016015, "grad_norm": 0.16502566635608673, "learning_rate": 2.6867999999999998e-05, "loss": 0.0074, "step": 19403 }, { "epoch": 21.417448923246823, "grad_norm": 0.17497536540031433, "learning_rate": 2.6867666666666667e-05, "loss": 0.0043, "step": 19404 }, { "epoch": 21.418553285477635, "grad_norm": 0.8608414530754089, "learning_rate": 2.6867333333333336e-05, "loss": 0.0066, "step": 19405 }, { "epoch": 21.419657647708448, "grad_norm": 0.09149513393640518, "learning_rate": 2.6867e-05, "loss": 0.0022, "step": 19406 }, { "epoch": 21.42076200993926, "grad_norm": 0.19962923228740692, "learning_rate": 2.6866666666666668e-05, "loss": 0.008, "step": 19407 }, { "epoch": 21.421866372170072, "grad_norm": 0.43353167176246643, "learning_rate": 2.6866333333333334e-05, "loss": 0.0082, "step": 19408 }, { "epoch": 21.422970734400884, "grad_norm": 0.14401105046272278, "learning_rate": 2.6866e-05, "loss": 0.0043, "step": 19409 }, { "epoch": 21.424075096631697, "grad_norm": 0.1809152215719223, "learning_rate": 2.6865666666666666e-05, "loss": 0.007, "step": 19410 }, { "epoch": 21.425179458862505, "grad_norm": 0.9787694215774536, "learning_rate": 2.6865333333333335e-05, "loss": 0.0057, "step": 19411 }, { "epoch": 21.426283821093318, "grad_norm": 0.21664835512638092, "learning_rate": 2.6865e-05, "loss": 0.0039, "step": 19412 }, { "epoch": 21.42738818332413, "grad_norm": 0.7557575702667236, "learning_rate": 2.6864666666666667e-05, "loss": 0.0106, "step": 19413 }, { "epoch": 21.428492545554942, "grad_norm": 0.1954435259103775, "learning_rate": 2.6864333333333336e-05, "loss": 0.0067, "step": 19414 }, { "epoch": 21.429596907785754, "grad_norm": 0.39679792523384094, "learning_rate": 2.6864e-05, "loss": 0.0327, "step": 19415 }, { "epoch": 21.430701270016566, "grad_norm": 0.06488785892724991, "learning_rate": 2.6863666666666668e-05, "loss": 0.0022, "step": 19416 }, { "epoch": 21.43180563224738, "grad_norm": 0.10720810294151306, "learning_rate": 2.6863333333333334e-05, "loss": 0.0033, "step": 19417 }, { "epoch": 21.432909994478187, "grad_norm": 0.24620133638381958, "learning_rate": 2.6863e-05, "loss": 0.0064, "step": 19418 }, { "epoch": 21.434014356709, "grad_norm": 0.1846613734960556, "learning_rate": 2.6862666666666666e-05, "loss": 0.0035, "step": 19419 }, { "epoch": 21.435118718939812, "grad_norm": 0.18636302649974823, "learning_rate": 2.6862333333333335e-05, "loss": 0.0049, "step": 19420 }, { "epoch": 21.436223081170624, "grad_norm": 0.3912891745567322, "learning_rate": 2.6862e-05, "loss": 0.0041, "step": 19421 }, { "epoch": 21.437327443401436, "grad_norm": 0.27247223258018494, "learning_rate": 2.6861666666666667e-05, "loss": 0.0046, "step": 19422 }, { "epoch": 21.43843180563225, "grad_norm": 0.1978638470172882, "learning_rate": 2.6861333333333337e-05, "loss": 0.0049, "step": 19423 }, { "epoch": 21.43953616786306, "grad_norm": 0.11025893688201904, "learning_rate": 2.6861e-05, "loss": 0.0055, "step": 19424 }, { "epoch": 21.44064053009387, "grad_norm": 0.7087778449058533, "learning_rate": 2.686066666666667e-05, "loss": 0.0065, "step": 19425 }, { "epoch": 21.44174489232468, "grad_norm": 0.102535180747509, "learning_rate": 2.6860333333333334e-05, "loss": 0.0026, "step": 19426 }, { "epoch": 21.442849254555494, "grad_norm": 0.6184435486793518, "learning_rate": 2.686e-05, "loss": 0.1216, "step": 19427 }, { "epoch": 21.443953616786306, "grad_norm": 0.5599024295806885, "learning_rate": 2.6859666666666666e-05, "loss": 0.0946, "step": 19428 }, { "epoch": 21.44505797901712, "grad_norm": 0.8964076638221741, "learning_rate": 2.6859333333333332e-05, "loss": 0.1067, "step": 19429 }, { "epoch": 21.44616234124793, "grad_norm": 0.5613430142402649, "learning_rate": 2.6859e-05, "loss": 0.0691, "step": 19430 }, { "epoch": 21.447266703478743, "grad_norm": 0.41856443881988525, "learning_rate": 2.6858666666666667e-05, "loss": 0.0331, "step": 19431 }, { "epoch": 21.44837106570955, "grad_norm": 0.44509157538414, "learning_rate": 2.6858333333333333e-05, "loss": 0.0461, "step": 19432 }, { "epoch": 21.449475427940364, "grad_norm": 0.21662300825119019, "learning_rate": 2.6858e-05, "loss": 0.0211, "step": 19433 }, { "epoch": 21.450579790171176, "grad_norm": 0.9545941352844238, "learning_rate": 2.685766666666667e-05, "loss": 0.027, "step": 19434 }, { "epoch": 21.451684152401988, "grad_norm": 0.265531986951828, "learning_rate": 2.685733333333333e-05, "loss": 0.0142, "step": 19435 }, { "epoch": 21.4527885146328, "grad_norm": 0.14209218323230743, "learning_rate": 2.6857e-05, "loss": 0.0061, "step": 19436 }, { "epoch": 21.453892876863613, "grad_norm": 0.12315282970666885, "learning_rate": 2.685666666666667e-05, "loss": 0.0057, "step": 19437 }, { "epoch": 21.45499723909442, "grad_norm": 0.32271304726600647, "learning_rate": 2.6856333333333332e-05, "loss": 0.0099, "step": 19438 }, { "epoch": 21.456101601325233, "grad_norm": 0.17333781719207764, "learning_rate": 2.6856000000000002e-05, "loss": 0.0367, "step": 19439 }, { "epoch": 21.457205963556046, "grad_norm": 0.348307341337204, "learning_rate": 2.6855666666666668e-05, "loss": 0.0049, "step": 19440 }, { "epoch": 21.458310325786858, "grad_norm": 0.1883474886417389, "learning_rate": 2.6855333333333334e-05, "loss": 0.0087, "step": 19441 }, { "epoch": 21.45941468801767, "grad_norm": 0.13275888562202454, "learning_rate": 2.6855e-05, "loss": 0.0052, "step": 19442 }, { "epoch": 21.460519050248482, "grad_norm": 0.1425308734178543, "learning_rate": 2.685466666666667e-05, "loss": 0.005, "step": 19443 }, { "epoch": 21.461623412479295, "grad_norm": 0.2130519598722458, "learning_rate": 2.685433333333333e-05, "loss": 0.0072, "step": 19444 }, { "epoch": 21.462727774710103, "grad_norm": 0.13945861160755157, "learning_rate": 2.6854e-05, "loss": 0.0062, "step": 19445 }, { "epoch": 21.463832136940916, "grad_norm": 0.1096402257680893, "learning_rate": 2.685366666666667e-05, "loss": 0.0046, "step": 19446 }, { "epoch": 21.464936499171728, "grad_norm": 0.39947691559791565, "learning_rate": 2.6853333333333333e-05, "loss": 0.0071, "step": 19447 }, { "epoch": 21.46604086140254, "grad_norm": 0.2688796818256378, "learning_rate": 2.6853000000000002e-05, "loss": 0.0106, "step": 19448 }, { "epoch": 21.467145223633352, "grad_norm": 0.0966319665312767, "learning_rate": 2.6852666666666668e-05, "loss": 0.0032, "step": 19449 }, { "epoch": 21.468249585864164, "grad_norm": 0.48396170139312744, "learning_rate": 2.6852333333333334e-05, "loss": 0.0117, "step": 19450 }, { "epoch": 21.469353948094977, "grad_norm": 0.31445592641830444, "learning_rate": 2.6852e-05, "loss": 0.0099, "step": 19451 }, { "epoch": 21.470458310325785, "grad_norm": 0.11072557419538498, "learning_rate": 2.685166666666667e-05, "loss": 0.0033, "step": 19452 }, { "epoch": 21.471562672556598, "grad_norm": 0.19939997792243958, "learning_rate": 2.685133333333333e-05, "loss": 0.0059, "step": 19453 }, { "epoch": 21.47266703478741, "grad_norm": 0.11990829557180405, "learning_rate": 2.6851e-05, "loss": 0.0039, "step": 19454 }, { "epoch": 21.473771397018222, "grad_norm": 0.10791633278131485, "learning_rate": 2.685066666666667e-05, "loss": 0.0022, "step": 19455 }, { "epoch": 21.474875759249034, "grad_norm": 0.1697859913110733, "learning_rate": 2.6850333333333333e-05, "loss": 0.0038, "step": 19456 }, { "epoch": 21.475980121479846, "grad_norm": 0.243336021900177, "learning_rate": 2.6850000000000002e-05, "loss": 0.0041, "step": 19457 }, { "epoch": 21.47708448371066, "grad_norm": 0.11022138595581055, "learning_rate": 2.6849666666666665e-05, "loss": 0.0029, "step": 19458 }, { "epoch": 21.478188845941467, "grad_norm": 0.08717619627714157, "learning_rate": 2.6849333333333334e-05, "loss": 0.0029, "step": 19459 }, { "epoch": 21.47929320817228, "grad_norm": 0.10991857945919037, "learning_rate": 2.6849e-05, "loss": 0.0053, "step": 19460 }, { "epoch": 21.480397570403092, "grad_norm": 0.12422435730695724, "learning_rate": 2.6848666666666666e-05, "loss": 0.0035, "step": 19461 }, { "epoch": 21.481501932633904, "grad_norm": 0.1307874470949173, "learning_rate": 2.6848333333333335e-05, "loss": 0.0034, "step": 19462 }, { "epoch": 21.482606294864716, "grad_norm": 0.15038242936134338, "learning_rate": 2.6848e-05, "loss": 0.0046, "step": 19463 }, { "epoch": 21.48371065709553, "grad_norm": 0.1848953515291214, "learning_rate": 2.6847666666666667e-05, "loss": 0.0039, "step": 19464 }, { "epoch": 21.48481501932634, "grad_norm": 0.3545786440372467, "learning_rate": 2.6847333333333333e-05, "loss": 0.0068, "step": 19465 }, { "epoch": 21.48591938155715, "grad_norm": 0.15305499732494354, "learning_rate": 2.6847000000000002e-05, "loss": 0.0033, "step": 19466 }, { "epoch": 21.48702374378796, "grad_norm": 0.06932034343481064, "learning_rate": 2.6846666666666665e-05, "loss": 0.0021, "step": 19467 }, { "epoch": 21.488128106018774, "grad_norm": 0.1647970825433731, "learning_rate": 2.6846333333333334e-05, "loss": 0.0043, "step": 19468 }, { "epoch": 21.489232468249586, "grad_norm": 0.2907488942146301, "learning_rate": 2.6846e-05, "loss": 0.0054, "step": 19469 }, { "epoch": 21.4903368304804, "grad_norm": 0.3699929714202881, "learning_rate": 2.6845666666666666e-05, "loss": 0.012, "step": 19470 }, { "epoch": 21.49144119271121, "grad_norm": 0.15237785875797272, "learning_rate": 2.6845333333333335e-05, "loss": 0.0053, "step": 19471 }, { "epoch": 21.49254555494202, "grad_norm": 0.2667849063873291, "learning_rate": 2.6845e-05, "loss": 0.0125, "step": 19472 }, { "epoch": 21.49364991717283, "grad_norm": 0.34623709321022034, "learning_rate": 2.6844666666666667e-05, "loss": 0.0059, "step": 19473 }, { "epoch": 21.494754279403644, "grad_norm": 0.7558658123016357, "learning_rate": 2.6844333333333333e-05, "loss": 0.0077, "step": 19474 }, { "epoch": 21.495858641634456, "grad_norm": 0.1770879179239273, "learning_rate": 2.6844000000000003e-05, "loss": 0.0022, "step": 19475 }, { "epoch": 21.496963003865268, "grad_norm": 0.22421595454216003, "learning_rate": 2.6843666666666665e-05, "loss": 0.0024, "step": 19476 }, { "epoch": 21.49806736609608, "grad_norm": 0.4845748543739319, "learning_rate": 2.6843333333333334e-05, "loss": 0.0905, "step": 19477 }, { "epoch": 21.499171728326893, "grad_norm": 0.5286698937416077, "learning_rate": 2.6843e-05, "loss": 0.0783, "step": 19478 }, { "epoch": 21.5002760905577, "grad_norm": 0.7455445528030396, "learning_rate": 2.6842666666666666e-05, "loss": 0.1074, "step": 19479 }, { "epoch": 21.501380452788514, "grad_norm": 0.408572793006897, "learning_rate": 2.6842333333333336e-05, "loss": 0.0936, "step": 19480 }, { "epoch": 21.502484815019326, "grad_norm": 0.624234139919281, "learning_rate": 2.6842e-05, "loss": 0.0386, "step": 19481 }, { "epoch": 21.503589177250138, "grad_norm": 0.2859075665473938, "learning_rate": 2.6841666666666667e-05, "loss": 0.0263, "step": 19482 }, { "epoch": 21.50469353948095, "grad_norm": 0.4000115692615509, "learning_rate": 2.6841333333333333e-05, "loss": 0.0267, "step": 19483 }, { "epoch": 21.505797901711762, "grad_norm": 0.6417937874794006, "learning_rate": 2.6841000000000003e-05, "loss": 0.033, "step": 19484 }, { "epoch": 21.506902263942575, "grad_norm": 0.25149527192115784, "learning_rate": 2.6840666666666665e-05, "loss": 0.0398, "step": 19485 }, { "epoch": 21.508006626173383, "grad_norm": 0.17524570226669312, "learning_rate": 2.6840333333333335e-05, "loss": 0.009, "step": 19486 }, { "epoch": 21.509110988404196, "grad_norm": 0.23176318407058716, "learning_rate": 2.6840000000000004e-05, "loss": 0.0068, "step": 19487 }, { "epoch": 21.510215350635008, "grad_norm": 0.1254653036594391, "learning_rate": 2.6839666666666666e-05, "loss": 0.0083, "step": 19488 }, { "epoch": 21.51131971286582, "grad_norm": 0.36425378918647766, "learning_rate": 2.6839333333333336e-05, "loss": 0.011, "step": 19489 }, { "epoch": 21.512424075096632, "grad_norm": 0.22014404833316803, "learning_rate": 2.6839e-05, "loss": 0.0059, "step": 19490 }, { "epoch": 21.513528437327444, "grad_norm": 0.1694803237915039, "learning_rate": 2.6838666666666668e-05, "loss": 0.0174, "step": 19491 }, { "epoch": 21.514632799558257, "grad_norm": 0.12279476970434189, "learning_rate": 2.6838333333333334e-05, "loss": 0.005, "step": 19492 }, { "epoch": 21.515737161789065, "grad_norm": 0.35565370321273804, "learning_rate": 2.6838e-05, "loss": 0.0097, "step": 19493 }, { "epoch": 21.516841524019878, "grad_norm": 0.1579783856868744, "learning_rate": 2.6837666666666665e-05, "loss": 0.0069, "step": 19494 }, { "epoch": 21.51794588625069, "grad_norm": 0.11958847939968109, "learning_rate": 2.6837333333333335e-05, "loss": 0.0052, "step": 19495 }, { "epoch": 21.519050248481502, "grad_norm": 0.1334841102361679, "learning_rate": 2.6837e-05, "loss": 0.006, "step": 19496 }, { "epoch": 21.520154610712314, "grad_norm": 0.23273994028568268, "learning_rate": 2.6836666666666667e-05, "loss": 0.0061, "step": 19497 }, { "epoch": 21.521258972943127, "grad_norm": 0.19875192642211914, "learning_rate": 2.6836333333333336e-05, "loss": 0.0222, "step": 19498 }, { "epoch": 21.52236333517394, "grad_norm": 0.1855117827653885, "learning_rate": 2.6836e-05, "loss": 0.0087, "step": 19499 }, { "epoch": 21.523467697404747, "grad_norm": 0.18599562346935272, "learning_rate": 2.6835666666666668e-05, "loss": 0.0051, "step": 19500 }, { "epoch": 21.52457205963556, "grad_norm": 0.244476318359375, "learning_rate": 2.6835333333333334e-05, "loss": 0.0043, "step": 19501 }, { "epoch": 21.525676421866372, "grad_norm": 0.07823360711336136, "learning_rate": 2.6835e-05, "loss": 0.0029, "step": 19502 }, { "epoch": 21.526780784097184, "grad_norm": 0.26968973875045776, "learning_rate": 2.6834666666666666e-05, "loss": 0.0087, "step": 19503 }, { "epoch": 21.527885146327996, "grad_norm": 0.13592714071273804, "learning_rate": 2.6834333333333335e-05, "loss": 0.0031, "step": 19504 }, { "epoch": 21.52898950855881, "grad_norm": 0.307492733001709, "learning_rate": 2.6834e-05, "loss": 0.0063, "step": 19505 }, { "epoch": 21.53009387078962, "grad_norm": 0.24942173063755035, "learning_rate": 2.6833666666666667e-05, "loss": 0.0062, "step": 19506 }, { "epoch": 21.53119823302043, "grad_norm": 0.38403135538101196, "learning_rate": 2.6833333333333336e-05, "loss": 0.0051, "step": 19507 }, { "epoch": 21.53230259525124, "grad_norm": 0.09617728739976883, "learning_rate": 2.6833e-05, "loss": 0.0031, "step": 19508 }, { "epoch": 21.533406957482054, "grad_norm": 0.16134309768676758, "learning_rate": 2.6832666666666668e-05, "loss": 0.0046, "step": 19509 }, { "epoch": 21.534511319712866, "grad_norm": 0.2949032187461853, "learning_rate": 2.6832333333333334e-05, "loss": 0.0064, "step": 19510 }, { "epoch": 21.53561568194368, "grad_norm": 0.08000795543193817, "learning_rate": 2.6832e-05, "loss": 0.0027, "step": 19511 }, { "epoch": 21.53672004417449, "grad_norm": 0.15220288932323456, "learning_rate": 2.683166666666667e-05, "loss": 0.006, "step": 19512 }, { "epoch": 21.5378244064053, "grad_norm": 0.2281266152858734, "learning_rate": 2.6831333333333335e-05, "loss": 0.0052, "step": 19513 }, { "epoch": 21.53892876863611, "grad_norm": 0.2964845895767212, "learning_rate": 2.6831e-05, "loss": 0.0092, "step": 19514 }, { "epoch": 21.540033130866924, "grad_norm": 0.07583983987569809, "learning_rate": 2.6830666666666667e-05, "loss": 0.0017, "step": 19515 }, { "epoch": 21.541137493097736, "grad_norm": 0.17875705659389496, "learning_rate": 2.6830333333333336e-05, "loss": 0.0037, "step": 19516 }, { "epoch": 21.542241855328548, "grad_norm": 0.22746142745018005, "learning_rate": 2.683e-05, "loss": 0.0069, "step": 19517 }, { "epoch": 21.54334621755936, "grad_norm": 0.18844211101531982, "learning_rate": 2.6829666666666668e-05, "loss": 0.0052, "step": 19518 }, { "epoch": 21.544450579790173, "grad_norm": 0.13116781413555145, "learning_rate": 2.682933333333333e-05, "loss": 0.0022, "step": 19519 }, { "epoch": 21.54555494202098, "grad_norm": 0.2636064291000366, "learning_rate": 2.6829e-05, "loss": 0.0048, "step": 19520 }, { "epoch": 21.546659304251794, "grad_norm": 0.22336965799331665, "learning_rate": 2.682866666666667e-05, "loss": 0.0107, "step": 19521 }, { "epoch": 21.547763666482606, "grad_norm": 0.11077217757701874, "learning_rate": 2.6828333333333332e-05, "loss": 0.0027, "step": 19522 }, { "epoch": 21.548868028713418, "grad_norm": 0.10849741846323013, "learning_rate": 2.6828e-05, "loss": 0.0026, "step": 19523 }, { "epoch": 21.54997239094423, "grad_norm": 0.15889470279216766, "learning_rate": 2.6827666666666667e-05, "loss": 0.0054, "step": 19524 }, { "epoch": 21.551076753175042, "grad_norm": 0.12862341105937958, "learning_rate": 2.6827333333333333e-05, "loss": 0.0043, "step": 19525 }, { "epoch": 21.552181115405855, "grad_norm": 0.3972024917602539, "learning_rate": 2.6827e-05, "loss": 0.03, "step": 19526 }, { "epoch": 21.553285477636663, "grad_norm": 0.5906667709350586, "learning_rate": 2.682666666666667e-05, "loss": 0.1203, "step": 19527 }, { "epoch": 21.554389839867476, "grad_norm": 0.4375205338001251, "learning_rate": 2.6826333333333334e-05, "loss": 0.1207, "step": 19528 }, { "epoch": 21.555494202098288, "grad_norm": 0.6022567749023438, "learning_rate": 2.6826e-05, "loss": 0.0565, "step": 19529 }, { "epoch": 21.5565985643291, "grad_norm": 0.4636555314064026, "learning_rate": 2.682566666666667e-05, "loss": 0.1246, "step": 19530 }, { "epoch": 21.557702926559912, "grad_norm": 0.48277080059051514, "learning_rate": 2.6825333333333332e-05, "loss": 0.0611, "step": 19531 }, { "epoch": 21.558807288790724, "grad_norm": 0.4324788749217987, "learning_rate": 2.6825e-05, "loss": 0.0588, "step": 19532 }, { "epoch": 21.559911651021537, "grad_norm": 0.25191599130630493, "learning_rate": 2.6824666666666667e-05, "loss": 0.0266, "step": 19533 }, { "epoch": 21.561016013252345, "grad_norm": 0.2992666959762573, "learning_rate": 2.6824333333333333e-05, "loss": 0.021, "step": 19534 }, { "epoch": 21.562120375483158, "grad_norm": 0.3665000796318054, "learning_rate": 2.6824e-05, "loss": 0.0268, "step": 19535 }, { "epoch": 21.56322473771397, "grad_norm": 0.41363584995269775, "learning_rate": 2.682366666666667e-05, "loss": 0.0231, "step": 19536 }, { "epoch": 21.564329099944782, "grad_norm": 0.14775650203227997, "learning_rate": 2.6823333333333335e-05, "loss": 0.0074, "step": 19537 }, { "epoch": 21.565433462175594, "grad_norm": 0.1209600418806076, "learning_rate": 2.6823e-05, "loss": 0.0046, "step": 19538 }, { "epoch": 21.566537824406407, "grad_norm": 0.10508940368890762, "learning_rate": 2.682266666666667e-05, "loss": 0.0066, "step": 19539 }, { "epoch": 21.567642186637215, "grad_norm": 0.14235186576843262, "learning_rate": 2.6822333333333332e-05, "loss": 0.0062, "step": 19540 }, { "epoch": 21.568746548868027, "grad_norm": 0.10809972137212753, "learning_rate": 2.6822000000000002e-05, "loss": 0.0046, "step": 19541 }, { "epoch": 21.56985091109884, "grad_norm": 0.13563817739486694, "learning_rate": 2.6821666666666668e-05, "loss": 0.0038, "step": 19542 }, { "epoch": 21.570955273329652, "grad_norm": 0.09929897636175156, "learning_rate": 2.6821333333333334e-05, "loss": 0.0022, "step": 19543 }, { "epoch": 21.572059635560464, "grad_norm": 0.23204101622104645, "learning_rate": 2.6821e-05, "loss": 0.0034, "step": 19544 }, { "epoch": 21.573163997791276, "grad_norm": 0.13038991391658783, "learning_rate": 2.682066666666667e-05, "loss": 0.0083, "step": 19545 }, { "epoch": 21.57426836002209, "grad_norm": 0.06188772991299629, "learning_rate": 2.6820333333333335e-05, "loss": 0.0013, "step": 19546 }, { "epoch": 21.575372722252897, "grad_norm": 0.09890536218881607, "learning_rate": 2.682e-05, "loss": 0.0027, "step": 19547 }, { "epoch": 21.57647708448371, "grad_norm": 0.3154383897781372, "learning_rate": 2.681966666666667e-05, "loss": 0.0071, "step": 19548 }, { "epoch": 21.57758144671452, "grad_norm": 0.2281511276960373, "learning_rate": 2.6819333333333333e-05, "loss": 0.0064, "step": 19549 }, { "epoch": 21.578685808945334, "grad_norm": 0.05111845210194588, "learning_rate": 2.6819000000000002e-05, "loss": 0.0021, "step": 19550 }, { "epoch": 21.579790171176146, "grad_norm": 0.07291774451732635, "learning_rate": 2.6818666666666664e-05, "loss": 0.0015, "step": 19551 }, { "epoch": 21.58089453340696, "grad_norm": 0.11899121105670929, "learning_rate": 2.6818333333333334e-05, "loss": 0.0023, "step": 19552 }, { "epoch": 21.58199889563777, "grad_norm": 0.2217385470867157, "learning_rate": 2.6818e-05, "loss": 0.0058, "step": 19553 }, { "epoch": 21.58310325786858, "grad_norm": 0.15050356090068817, "learning_rate": 2.6817666666666666e-05, "loss": 0.0063, "step": 19554 }, { "epoch": 21.58420762009939, "grad_norm": 0.31765756011009216, "learning_rate": 2.6817333333333335e-05, "loss": 0.0048, "step": 19555 }, { "epoch": 21.585311982330204, "grad_norm": 0.20931993424892426, "learning_rate": 2.6817e-05, "loss": 0.0052, "step": 19556 }, { "epoch": 21.586416344561016, "grad_norm": 0.13903915882110596, "learning_rate": 2.6816666666666667e-05, "loss": 0.003, "step": 19557 }, { "epoch": 21.587520706791828, "grad_norm": 0.2179730087518692, "learning_rate": 2.6816333333333333e-05, "loss": 0.0079, "step": 19558 }, { "epoch": 21.58862506902264, "grad_norm": 0.15694034099578857, "learning_rate": 2.6816000000000002e-05, "loss": 0.0028, "step": 19559 }, { "epoch": 21.589729431253453, "grad_norm": 0.09579095989465714, "learning_rate": 2.6815666666666665e-05, "loss": 0.0036, "step": 19560 }, { "epoch": 21.59083379348426, "grad_norm": 0.3087570369243622, "learning_rate": 2.6815333333333334e-05, "loss": 0.0029, "step": 19561 }, { "epoch": 21.591938155715074, "grad_norm": 0.1277170181274414, "learning_rate": 2.6815000000000003e-05, "loss": 0.0036, "step": 19562 }, { "epoch": 21.593042517945886, "grad_norm": 0.21610714495182037, "learning_rate": 2.6814666666666666e-05, "loss": 0.0062, "step": 19563 }, { "epoch": 21.594146880176698, "grad_norm": 0.23538239300251007, "learning_rate": 2.6814333333333335e-05, "loss": 0.0031, "step": 19564 }, { "epoch": 21.59525124240751, "grad_norm": 0.1086326390504837, "learning_rate": 2.6814e-05, "loss": 0.0025, "step": 19565 }, { "epoch": 21.596355604638322, "grad_norm": 0.3617163896560669, "learning_rate": 2.6813666666666667e-05, "loss": 0.0055, "step": 19566 }, { "epoch": 21.597459966869135, "grad_norm": 0.2557868957519531, "learning_rate": 2.6813333333333333e-05, "loss": 0.0042, "step": 19567 }, { "epoch": 21.598564329099943, "grad_norm": 0.15223592519760132, "learning_rate": 2.6813000000000002e-05, "loss": 0.0043, "step": 19568 }, { "epoch": 21.599668691330756, "grad_norm": 0.3272261321544647, "learning_rate": 2.6812666666666665e-05, "loss": 0.0071, "step": 19569 }, { "epoch": 21.600773053561568, "grad_norm": 0.2656840682029724, "learning_rate": 2.6812333333333334e-05, "loss": 0.0035, "step": 19570 }, { "epoch": 21.60187741579238, "grad_norm": 0.20468108355998993, "learning_rate": 2.6812000000000004e-05, "loss": 0.008, "step": 19571 }, { "epoch": 21.602981778023192, "grad_norm": 0.30266666412353516, "learning_rate": 2.6811666666666666e-05, "loss": 0.0046, "step": 19572 }, { "epoch": 21.604086140254005, "grad_norm": 0.2444901317358017, "learning_rate": 2.6811333333333335e-05, "loss": 0.019, "step": 19573 }, { "epoch": 21.605190502484817, "grad_norm": 0.47357824444770813, "learning_rate": 2.6811e-05, "loss": 0.0079, "step": 19574 }, { "epoch": 21.606294864715625, "grad_norm": 0.9021898508071899, "learning_rate": 2.6810666666666667e-05, "loss": 0.0068, "step": 19575 }, { "epoch": 21.607399226946438, "grad_norm": 0.30730530619621277, "learning_rate": 2.6810333333333333e-05, "loss": 0.0035, "step": 19576 }, { "epoch": 21.60850358917725, "grad_norm": 0.6262683868408203, "learning_rate": 2.6810000000000003e-05, "loss": 0.161, "step": 19577 }, { "epoch": 21.609607951408062, "grad_norm": 0.5649031400680542, "learning_rate": 2.680966666666667e-05, "loss": 0.0753, "step": 19578 }, { "epoch": 21.610712313638874, "grad_norm": 0.4160095751285553, "learning_rate": 2.6809333333333334e-05, "loss": 0.1041, "step": 19579 }, { "epoch": 21.611816675869687, "grad_norm": 0.3880147635936737, "learning_rate": 2.6809e-05, "loss": 0.0619, "step": 19580 }, { "epoch": 21.612921038100495, "grad_norm": 0.4977448582649231, "learning_rate": 2.6808666666666666e-05, "loss": 0.0646, "step": 19581 }, { "epoch": 21.614025400331307, "grad_norm": 0.3557572364807129, "learning_rate": 2.6808333333333336e-05, "loss": 0.0471, "step": 19582 }, { "epoch": 21.61512976256212, "grad_norm": 0.40435248613357544, "learning_rate": 2.6807999999999998e-05, "loss": 0.0204, "step": 19583 }, { "epoch": 21.616234124792932, "grad_norm": 0.2642446458339691, "learning_rate": 2.6807666666666667e-05, "loss": 0.021, "step": 19584 }, { "epoch": 21.617338487023744, "grad_norm": 0.3570650815963745, "learning_rate": 2.6807333333333333e-05, "loss": 0.0181, "step": 19585 }, { "epoch": 21.618442849254556, "grad_norm": 0.20957982540130615, "learning_rate": 2.6807e-05, "loss": 0.0091, "step": 19586 }, { "epoch": 21.61954721148537, "grad_norm": 0.20250093936920166, "learning_rate": 2.680666666666667e-05, "loss": 0.0102, "step": 19587 }, { "epoch": 21.620651573716177, "grad_norm": 0.13626496493816376, "learning_rate": 2.6806333333333335e-05, "loss": 0.0057, "step": 19588 }, { "epoch": 21.62175593594699, "grad_norm": 0.15143367648124695, "learning_rate": 2.6806e-05, "loss": 0.0038, "step": 19589 }, { "epoch": 21.6228602981778, "grad_norm": 0.1466340571641922, "learning_rate": 2.6805666666666666e-05, "loss": 0.005, "step": 19590 }, { "epoch": 21.623964660408614, "grad_norm": 0.3473457992076874, "learning_rate": 2.6805333333333336e-05, "loss": 0.0073, "step": 19591 }, { "epoch": 21.625069022639426, "grad_norm": 0.1427927166223526, "learning_rate": 2.6805e-05, "loss": 0.022, "step": 19592 }, { "epoch": 21.62617338487024, "grad_norm": 0.1162925586104393, "learning_rate": 2.6804666666666668e-05, "loss": 0.0041, "step": 19593 }, { "epoch": 21.62727774710105, "grad_norm": 0.15119566023349762, "learning_rate": 2.6804333333333334e-05, "loss": 0.0062, "step": 19594 }, { "epoch": 21.62838210933186, "grad_norm": 0.1818510740995407, "learning_rate": 2.6804e-05, "loss": 0.0056, "step": 19595 }, { "epoch": 21.62948647156267, "grad_norm": 0.057962194085121155, "learning_rate": 2.680366666666667e-05, "loss": 0.0022, "step": 19596 }, { "epoch": 21.630590833793484, "grad_norm": 0.2083064466714859, "learning_rate": 2.6803333333333335e-05, "loss": 0.0048, "step": 19597 }, { "epoch": 21.631695196024296, "grad_norm": 1.0099890232086182, "learning_rate": 2.6803e-05, "loss": 0.0047, "step": 19598 }, { "epoch": 21.63279955825511, "grad_norm": 0.20095257461071014, "learning_rate": 2.6802666666666667e-05, "loss": 0.0047, "step": 19599 }, { "epoch": 21.63390392048592, "grad_norm": 0.522209882736206, "learning_rate": 2.6802333333333336e-05, "loss": 0.0078, "step": 19600 }, { "epoch": 21.635008282716733, "grad_norm": 0.2120242416858673, "learning_rate": 2.6802e-05, "loss": 0.0055, "step": 19601 }, { "epoch": 21.63611264494754, "grad_norm": 0.07756992429494858, "learning_rate": 2.6801666666666668e-05, "loss": 0.002, "step": 19602 }, { "epoch": 21.637217007178354, "grad_norm": 0.4001948833465576, "learning_rate": 2.6801333333333334e-05, "loss": 0.0058, "step": 19603 }, { "epoch": 21.638321369409166, "grad_norm": 0.07388414442539215, "learning_rate": 2.6801e-05, "loss": 0.0019, "step": 19604 }, { "epoch": 21.639425731639978, "grad_norm": 0.17576885223388672, "learning_rate": 2.680066666666667e-05, "loss": 0.0064, "step": 19605 }, { "epoch": 21.64053009387079, "grad_norm": 0.18203772604465485, "learning_rate": 2.6800333333333335e-05, "loss": 0.0028, "step": 19606 }, { "epoch": 21.641634456101603, "grad_norm": 0.31653374433517456, "learning_rate": 2.68e-05, "loss": 0.0059, "step": 19607 }, { "epoch": 21.642738818332415, "grad_norm": 0.10605911910533905, "learning_rate": 2.6799666666666667e-05, "loss": 0.0029, "step": 19608 }, { "epoch": 21.643843180563223, "grad_norm": 0.1631772518157959, "learning_rate": 2.6799333333333333e-05, "loss": 0.0061, "step": 19609 }, { "epoch": 21.644947542794036, "grad_norm": 0.17938093841075897, "learning_rate": 2.6799e-05, "loss": 0.0046, "step": 19610 }, { "epoch": 21.646051905024848, "grad_norm": 0.09136997908353806, "learning_rate": 2.6798666666666668e-05, "loss": 0.0025, "step": 19611 }, { "epoch": 21.64715626725566, "grad_norm": 0.15125705301761627, "learning_rate": 2.6798333333333334e-05, "loss": 0.0042, "step": 19612 }, { "epoch": 21.648260629486472, "grad_norm": 0.22648441791534424, "learning_rate": 2.6798e-05, "loss": 0.0068, "step": 19613 }, { "epoch": 21.649364991717285, "grad_norm": 0.4717358350753784, "learning_rate": 2.679766666666667e-05, "loss": 0.0108, "step": 19614 }, { "epoch": 21.650469353948097, "grad_norm": 0.4092026352882385, "learning_rate": 2.6797333333333332e-05, "loss": 0.0154, "step": 19615 }, { "epoch": 21.651573716178905, "grad_norm": 0.5629035830497742, "learning_rate": 2.6797e-05, "loss": 0.0128, "step": 19616 }, { "epoch": 21.652678078409718, "grad_norm": 0.3455813527107239, "learning_rate": 2.6796666666666667e-05, "loss": 0.0085, "step": 19617 }, { "epoch": 21.65378244064053, "grad_norm": 0.2855401635169983, "learning_rate": 2.6796333333333333e-05, "loss": 0.0053, "step": 19618 }, { "epoch": 21.654886802871342, "grad_norm": 1.1527408361434937, "learning_rate": 2.6796e-05, "loss": 0.0181, "step": 19619 }, { "epoch": 21.655991165102154, "grad_norm": 0.12156713753938675, "learning_rate": 2.6795666666666668e-05, "loss": 0.007, "step": 19620 }, { "epoch": 21.657095527332967, "grad_norm": 0.29750028252601624, "learning_rate": 2.6795333333333334e-05, "loss": 0.0067, "step": 19621 }, { "epoch": 21.658199889563775, "grad_norm": 0.12701530754566193, "learning_rate": 2.6795e-05, "loss": 0.0033, "step": 19622 }, { "epoch": 21.659304251794588, "grad_norm": 0.9062106013298035, "learning_rate": 2.679466666666667e-05, "loss": 0.0088, "step": 19623 }, { "epoch": 21.6604086140254, "grad_norm": 0.1726101040840149, "learning_rate": 2.6794333333333332e-05, "loss": 0.0034, "step": 19624 }, { "epoch": 21.661512976256212, "grad_norm": 0.159464031457901, "learning_rate": 2.6794e-05, "loss": 0.0043, "step": 19625 }, { "epoch": 21.662617338487024, "grad_norm": 0.12239907681941986, "learning_rate": 2.6793666666666667e-05, "loss": 0.0028, "step": 19626 }, { "epoch": 21.663721700717836, "grad_norm": 0.5400103330612183, "learning_rate": 2.6793333333333333e-05, "loss": 0.1114, "step": 19627 }, { "epoch": 21.66482606294865, "grad_norm": 0.4869883358478546, "learning_rate": 2.6793000000000002e-05, "loss": 0.0751, "step": 19628 }, { "epoch": 21.665930425179457, "grad_norm": 0.671454131603241, "learning_rate": 2.679266666666667e-05, "loss": 0.06, "step": 19629 }, { "epoch": 21.66703478741027, "grad_norm": 0.48387962579727173, "learning_rate": 2.6792333333333334e-05, "loss": 0.0706, "step": 19630 }, { "epoch": 21.668139149641082, "grad_norm": 0.3503393232822418, "learning_rate": 2.6792e-05, "loss": 0.0291, "step": 19631 }, { "epoch": 21.669243511871894, "grad_norm": 0.2383013665676117, "learning_rate": 2.679166666666667e-05, "loss": 0.0233, "step": 19632 }, { "epoch": 21.670347874102706, "grad_norm": 0.31006813049316406, "learning_rate": 2.6791333333333332e-05, "loss": 0.0328, "step": 19633 }, { "epoch": 21.67145223633352, "grad_norm": 0.9699041843414307, "learning_rate": 2.6791e-05, "loss": 0.0146, "step": 19634 }, { "epoch": 21.67255659856433, "grad_norm": 0.4283982217311859, "learning_rate": 2.6790666666666667e-05, "loss": 0.0246, "step": 19635 }, { "epoch": 21.67366096079514, "grad_norm": 0.211090087890625, "learning_rate": 2.6790333333333333e-05, "loss": 0.0096, "step": 19636 }, { "epoch": 21.67476532302595, "grad_norm": 0.4585966467857361, "learning_rate": 2.6790000000000003e-05, "loss": 0.0132, "step": 19637 }, { "epoch": 21.675869685256764, "grad_norm": 0.24453754723072052, "learning_rate": 2.678966666666667e-05, "loss": 0.0092, "step": 19638 }, { "epoch": 21.676974047487576, "grad_norm": 0.2404855340719223, "learning_rate": 2.6789333333333335e-05, "loss": 0.0072, "step": 19639 }, { "epoch": 21.67807840971839, "grad_norm": 0.09474138915538788, "learning_rate": 2.6789e-05, "loss": 0.0024, "step": 19640 }, { "epoch": 21.6791827719492, "grad_norm": 0.15383876860141754, "learning_rate": 2.6788666666666666e-05, "loss": 0.005, "step": 19641 }, { "epoch": 21.680287134180013, "grad_norm": 0.2527298331260681, "learning_rate": 2.6788333333333332e-05, "loss": 0.0068, "step": 19642 }, { "epoch": 21.68139149641082, "grad_norm": 0.1035943478345871, "learning_rate": 2.6788000000000002e-05, "loss": 0.005, "step": 19643 }, { "epoch": 21.682495858641634, "grad_norm": 0.21372440457344055, "learning_rate": 2.6787666666666664e-05, "loss": 0.0047, "step": 19644 }, { "epoch": 21.683600220872446, "grad_norm": 0.12753252685070038, "learning_rate": 2.6787333333333334e-05, "loss": 0.0073, "step": 19645 }, { "epoch": 21.684704583103258, "grad_norm": 0.24807192385196686, "learning_rate": 2.6787000000000003e-05, "loss": 0.0034, "step": 19646 }, { "epoch": 21.68580894533407, "grad_norm": 0.26072293519973755, "learning_rate": 2.6786666666666665e-05, "loss": 0.0051, "step": 19647 }, { "epoch": 21.686913307564883, "grad_norm": 0.2080356925725937, "learning_rate": 2.6786333333333335e-05, "loss": 0.0045, "step": 19648 }, { "epoch": 21.68801766979569, "grad_norm": 0.2523050606250763, "learning_rate": 2.6786e-05, "loss": 0.0076, "step": 19649 }, { "epoch": 21.689122032026503, "grad_norm": 0.1278311014175415, "learning_rate": 2.6785666666666667e-05, "loss": 0.0048, "step": 19650 }, { "epoch": 21.690226394257316, "grad_norm": 0.10249234735965729, "learning_rate": 2.6785333333333333e-05, "loss": 0.0038, "step": 19651 }, { "epoch": 21.691330756488128, "grad_norm": 0.194437637925148, "learning_rate": 2.6785000000000002e-05, "loss": 0.0038, "step": 19652 }, { "epoch": 21.69243511871894, "grad_norm": 0.10689596086740494, "learning_rate": 2.6784666666666668e-05, "loss": 0.0026, "step": 19653 }, { "epoch": 21.693539480949752, "grad_norm": 0.293064147233963, "learning_rate": 2.6784333333333334e-05, "loss": 0.006, "step": 19654 }, { "epoch": 21.694643843180565, "grad_norm": 0.14850649237632751, "learning_rate": 2.6784000000000003e-05, "loss": 0.0024, "step": 19655 }, { "epoch": 21.695748205411373, "grad_norm": 0.3448205888271332, "learning_rate": 2.6783666666666666e-05, "loss": 0.0087, "step": 19656 }, { "epoch": 21.696852567642186, "grad_norm": 0.3697238564491272, "learning_rate": 2.6783333333333335e-05, "loss": 0.0067, "step": 19657 }, { "epoch": 21.697956929872998, "grad_norm": 0.1901613175868988, "learning_rate": 2.6783e-05, "loss": 0.0083, "step": 19658 }, { "epoch": 21.69906129210381, "grad_norm": 0.16562044620513916, "learning_rate": 2.6782666666666667e-05, "loss": 0.0029, "step": 19659 }, { "epoch": 21.700165654334622, "grad_norm": 0.08060074597597122, "learning_rate": 2.6782333333333333e-05, "loss": 0.0027, "step": 19660 }, { "epoch": 21.701270016565434, "grad_norm": 0.2857532799243927, "learning_rate": 2.6782000000000002e-05, "loss": 0.0094, "step": 19661 }, { "epoch": 21.702374378796247, "grad_norm": 0.14968262612819672, "learning_rate": 2.6781666666666668e-05, "loss": 0.0044, "step": 19662 }, { "epoch": 21.703478741027055, "grad_norm": 0.1333949714899063, "learning_rate": 2.6781333333333334e-05, "loss": 0.0035, "step": 19663 }, { "epoch": 21.704583103257868, "grad_norm": 1.016754388809204, "learning_rate": 2.6781000000000003e-05, "loss": 0.0086, "step": 19664 }, { "epoch": 21.70568746548868, "grad_norm": 0.490448534488678, "learning_rate": 2.6780666666666666e-05, "loss": 0.0101, "step": 19665 }, { "epoch": 21.706791827719492, "grad_norm": 0.19567187130451202, "learning_rate": 2.6780333333333335e-05, "loss": 0.0028, "step": 19666 }, { "epoch": 21.707896189950304, "grad_norm": 0.141060933470726, "learning_rate": 2.678e-05, "loss": 0.0051, "step": 19667 }, { "epoch": 21.709000552181116, "grad_norm": 0.26309213042259216, "learning_rate": 2.6779666666666667e-05, "loss": 0.007, "step": 19668 }, { "epoch": 21.71010491441193, "grad_norm": 0.17503035068511963, "learning_rate": 2.6779333333333333e-05, "loss": 0.0076, "step": 19669 }, { "epoch": 21.711209276642737, "grad_norm": 0.2938932180404663, "learning_rate": 2.6779e-05, "loss": 0.0088, "step": 19670 }, { "epoch": 21.71231363887355, "grad_norm": 0.28305158019065857, "learning_rate": 2.6778666666666668e-05, "loss": 0.0124, "step": 19671 }, { "epoch": 21.713418001104362, "grad_norm": 0.34718960523605347, "learning_rate": 2.6778333333333334e-05, "loss": 0.0183, "step": 19672 }, { "epoch": 21.714522363335174, "grad_norm": 0.2992680072784424, "learning_rate": 2.6778e-05, "loss": 0.0057, "step": 19673 }, { "epoch": 21.715626725565986, "grad_norm": 0.39809662103652954, "learning_rate": 2.6777666666666666e-05, "loss": 0.0198, "step": 19674 }, { "epoch": 21.7167310877968, "grad_norm": 0.18572434782981873, "learning_rate": 2.6777333333333335e-05, "loss": 0.0052, "step": 19675 }, { "epoch": 21.71783545002761, "grad_norm": 0.27788183093070984, "learning_rate": 2.6776999999999998e-05, "loss": 0.0197, "step": 19676 }, { "epoch": 21.71893981225842, "grad_norm": 0.7124164700508118, "learning_rate": 2.6776666666666667e-05, "loss": 0.1186, "step": 19677 }, { "epoch": 21.72004417448923, "grad_norm": 0.4739820957183838, "learning_rate": 2.6776333333333337e-05, "loss": 0.1194, "step": 19678 }, { "epoch": 21.721148536720044, "grad_norm": 0.3883041739463806, "learning_rate": 2.6776e-05, "loss": 0.058, "step": 19679 }, { "epoch": 21.722252898950856, "grad_norm": 0.33427751064300537, "learning_rate": 2.677566666666667e-05, "loss": 0.0431, "step": 19680 }, { "epoch": 21.72335726118167, "grad_norm": 0.42686912417411804, "learning_rate": 2.6775333333333334e-05, "loss": 0.0302, "step": 19681 }, { "epoch": 21.72446162341248, "grad_norm": 0.1977251172065735, "learning_rate": 2.6775e-05, "loss": 0.0498, "step": 19682 }, { "epoch": 21.725565985643293, "grad_norm": 0.5647152066230774, "learning_rate": 2.6774666666666666e-05, "loss": 0.082, "step": 19683 }, { "epoch": 21.7266703478741, "grad_norm": 0.4621163606643677, "learning_rate": 2.6774333333333336e-05, "loss": 0.035, "step": 19684 }, { "epoch": 21.727774710104914, "grad_norm": 0.34749624133110046, "learning_rate": 2.6773999999999998e-05, "loss": 0.0586, "step": 19685 }, { "epoch": 21.728879072335726, "grad_norm": 0.2921780049800873, "learning_rate": 2.6773666666666667e-05, "loss": 0.0137, "step": 19686 }, { "epoch": 21.729983434566538, "grad_norm": 0.2664532959461212, "learning_rate": 2.6773333333333337e-05, "loss": 0.0136, "step": 19687 }, { "epoch": 21.73108779679735, "grad_norm": 0.2674962878227234, "learning_rate": 2.6773e-05, "loss": 0.0217, "step": 19688 }, { "epoch": 21.732192159028163, "grad_norm": 0.34340232610702515, "learning_rate": 2.677266666666667e-05, "loss": 0.0103, "step": 19689 }, { "epoch": 21.73329652125897, "grad_norm": 0.16028915345668793, "learning_rate": 2.6772333333333335e-05, "loss": 0.0052, "step": 19690 }, { "epoch": 21.734400883489783, "grad_norm": 0.19062764942646027, "learning_rate": 2.6772e-05, "loss": 0.0046, "step": 19691 }, { "epoch": 21.735505245720596, "grad_norm": 0.15365880727767944, "learning_rate": 2.6771666666666666e-05, "loss": 0.004, "step": 19692 }, { "epoch": 21.736609607951408, "grad_norm": 0.24953344464302063, "learning_rate": 2.6771333333333336e-05, "loss": 0.0104, "step": 19693 }, { "epoch": 21.73771397018222, "grad_norm": 0.2199779748916626, "learning_rate": 2.6770999999999998e-05, "loss": 0.0046, "step": 19694 }, { "epoch": 21.738818332413032, "grad_norm": 0.941011369228363, "learning_rate": 2.6770666666666668e-05, "loss": 0.0064, "step": 19695 }, { "epoch": 21.739922694643845, "grad_norm": 0.2997119128704071, "learning_rate": 2.6770333333333337e-05, "loss": 0.0058, "step": 19696 }, { "epoch": 21.741027056874653, "grad_norm": 0.3289033770561218, "learning_rate": 2.677e-05, "loss": 0.0069, "step": 19697 }, { "epoch": 21.742131419105466, "grad_norm": 0.17550015449523926, "learning_rate": 2.676966666666667e-05, "loss": 0.0032, "step": 19698 }, { "epoch": 21.743235781336278, "grad_norm": 0.17027774453163147, "learning_rate": 2.6769333333333335e-05, "loss": 0.0074, "step": 19699 }, { "epoch": 21.74434014356709, "grad_norm": 0.23148515820503235, "learning_rate": 2.6769e-05, "loss": 0.0102, "step": 19700 }, { "epoch": 21.745444505797902, "grad_norm": 0.36657729744911194, "learning_rate": 2.6768666666666667e-05, "loss": 0.0052, "step": 19701 }, { "epoch": 21.746548868028714, "grad_norm": 0.2006690353155136, "learning_rate": 2.6768333333333333e-05, "loss": 0.0043, "step": 19702 }, { "epoch": 21.747653230259527, "grad_norm": 0.14540259540081024, "learning_rate": 2.6768000000000002e-05, "loss": 0.0035, "step": 19703 }, { "epoch": 21.748757592490335, "grad_norm": 0.23217396438121796, "learning_rate": 2.6767666666666668e-05, "loss": 0.0055, "step": 19704 }, { "epoch": 21.749861954721148, "grad_norm": 0.15068760514259338, "learning_rate": 2.6767333333333334e-05, "loss": 0.0043, "step": 19705 }, { "epoch": 21.75096631695196, "grad_norm": 0.15179143846035004, "learning_rate": 2.6767e-05, "loss": 0.0063, "step": 19706 }, { "epoch": 21.752070679182772, "grad_norm": 0.09018183499574661, "learning_rate": 2.676666666666667e-05, "loss": 0.0033, "step": 19707 }, { "epoch": 21.753175041413584, "grad_norm": 0.26514530181884766, "learning_rate": 2.676633333333333e-05, "loss": 0.0025, "step": 19708 }, { "epoch": 21.754279403644396, "grad_norm": 0.349761039018631, "learning_rate": 2.6766e-05, "loss": 0.0161, "step": 19709 }, { "epoch": 21.75538376587521, "grad_norm": 0.14846788346767426, "learning_rate": 2.6765666666666667e-05, "loss": 0.0034, "step": 19710 }, { "epoch": 21.756488128106017, "grad_norm": 0.23691493272781372, "learning_rate": 2.6765333333333333e-05, "loss": 0.0084, "step": 19711 }, { "epoch": 21.75759249033683, "grad_norm": 0.16634947061538696, "learning_rate": 2.6765000000000002e-05, "loss": 0.0059, "step": 19712 }, { "epoch": 21.758696852567642, "grad_norm": 0.11164054274559021, "learning_rate": 2.6764666666666668e-05, "loss": 0.0036, "step": 19713 }, { "epoch": 21.759801214798454, "grad_norm": 0.12959685921669006, "learning_rate": 2.6764333333333334e-05, "loss": 0.0035, "step": 19714 }, { "epoch": 21.760905577029266, "grad_norm": 0.1551174819469452, "learning_rate": 2.6764e-05, "loss": 0.0049, "step": 19715 }, { "epoch": 21.76200993926008, "grad_norm": 0.15095391869544983, "learning_rate": 2.676366666666667e-05, "loss": 0.0026, "step": 19716 }, { "epoch": 21.763114301490887, "grad_norm": 0.21097376942634583, "learning_rate": 2.6763333333333332e-05, "loss": 0.0044, "step": 19717 }, { "epoch": 21.7642186637217, "grad_norm": 0.07473688572645187, "learning_rate": 2.6763e-05, "loss": 0.0016, "step": 19718 }, { "epoch": 21.76532302595251, "grad_norm": 0.1407279223203659, "learning_rate": 2.6762666666666667e-05, "loss": 0.0045, "step": 19719 }, { "epoch": 21.766427388183324, "grad_norm": 0.16029299795627594, "learning_rate": 2.6762333333333333e-05, "loss": 0.0054, "step": 19720 }, { "epoch": 21.767531750414136, "grad_norm": 0.15044595301151276, "learning_rate": 2.6762000000000002e-05, "loss": 0.0037, "step": 19721 }, { "epoch": 21.76863611264495, "grad_norm": 0.2719070017337799, "learning_rate": 2.6761666666666668e-05, "loss": 0.0047, "step": 19722 }, { "epoch": 21.76974047487576, "grad_norm": 0.22570441663265228, "learning_rate": 2.6761333333333334e-05, "loss": 0.0044, "step": 19723 }, { "epoch": 21.77084483710657, "grad_norm": 0.2857934534549713, "learning_rate": 2.6761e-05, "loss": 0.0045, "step": 19724 }, { "epoch": 21.77194919933738, "grad_norm": 0.14907166361808777, "learning_rate": 2.676066666666667e-05, "loss": 0.0059, "step": 19725 }, { "epoch": 21.773053561568194, "grad_norm": 0.13784484565258026, "learning_rate": 2.6760333333333332e-05, "loss": 0.0021, "step": 19726 }, { "epoch": 21.774157923799006, "grad_norm": 0.6557273864746094, "learning_rate": 2.676e-05, "loss": 0.1455, "step": 19727 }, { "epoch": 21.775262286029818, "grad_norm": 0.5944643020629883, "learning_rate": 2.675966666666667e-05, "loss": 0.1049, "step": 19728 }, { "epoch": 21.77636664826063, "grad_norm": 0.3576241731643677, "learning_rate": 2.6759333333333333e-05, "loss": 0.0641, "step": 19729 }, { "epoch": 21.777471010491443, "grad_norm": 0.37875375151634216, "learning_rate": 2.6759000000000002e-05, "loss": 0.0379, "step": 19730 }, { "epoch": 21.77857537272225, "grad_norm": 0.30468621850013733, "learning_rate": 2.6758666666666665e-05, "loss": 0.0353, "step": 19731 }, { "epoch": 21.779679734953064, "grad_norm": 0.4036572277545929, "learning_rate": 2.6758333333333334e-05, "loss": 0.0346, "step": 19732 }, { "epoch": 21.780784097183876, "grad_norm": 0.33586248755455017, "learning_rate": 2.6758e-05, "loss": 0.0218, "step": 19733 }, { "epoch": 21.781888459414688, "grad_norm": 1.4788644313812256, "learning_rate": 2.6757666666666666e-05, "loss": 0.0161, "step": 19734 }, { "epoch": 21.7829928216455, "grad_norm": 0.21672670543193817, "learning_rate": 2.6757333333333332e-05, "loss": 0.0118, "step": 19735 }, { "epoch": 21.784097183876312, "grad_norm": 0.21130305528640747, "learning_rate": 2.6757e-05, "loss": 0.0223, "step": 19736 }, { "epoch": 21.785201546107125, "grad_norm": 0.22934553027153015, "learning_rate": 2.6756666666666667e-05, "loss": 0.009, "step": 19737 }, { "epoch": 21.786305908337933, "grad_norm": 0.12955445051193237, "learning_rate": 2.6756333333333333e-05, "loss": 0.0071, "step": 19738 }, { "epoch": 21.787410270568746, "grad_norm": 0.17128846049308777, "learning_rate": 2.6756000000000003e-05, "loss": 0.0061, "step": 19739 }, { "epoch": 21.788514632799558, "grad_norm": 0.7827101945877075, "learning_rate": 2.6755666666666665e-05, "loss": 0.0071, "step": 19740 }, { "epoch": 21.78961899503037, "grad_norm": 0.1283830851316452, "learning_rate": 2.6755333333333335e-05, "loss": 0.0039, "step": 19741 }, { "epoch": 21.790723357261182, "grad_norm": 0.1471165120601654, "learning_rate": 2.6755e-05, "loss": 0.0075, "step": 19742 }, { "epoch": 21.791827719491994, "grad_norm": 0.1772909015417099, "learning_rate": 2.6754666666666666e-05, "loss": 0.0042, "step": 19743 }, { "epoch": 21.792932081722807, "grad_norm": 0.41399946808815, "learning_rate": 2.6754333333333332e-05, "loss": 0.0046, "step": 19744 }, { "epoch": 21.794036443953615, "grad_norm": 0.18547546863555908, "learning_rate": 2.6754e-05, "loss": 0.0045, "step": 19745 }, { "epoch": 21.795140806184428, "grad_norm": 0.12939020991325378, "learning_rate": 2.6753666666666668e-05, "loss": 0.0038, "step": 19746 }, { "epoch": 21.79624516841524, "grad_norm": 0.05148700252175331, "learning_rate": 2.6753333333333334e-05, "loss": 0.0012, "step": 19747 }, { "epoch": 21.797349530646052, "grad_norm": 0.37619197368621826, "learning_rate": 2.6753000000000003e-05, "loss": 0.0095, "step": 19748 }, { "epoch": 21.798453892876864, "grad_norm": 0.2894611060619354, "learning_rate": 2.6752666666666665e-05, "loss": 0.0056, "step": 19749 }, { "epoch": 21.799558255107677, "grad_norm": 0.5352231860160828, "learning_rate": 2.6752333333333335e-05, "loss": 0.0107, "step": 19750 }, { "epoch": 21.80066261733849, "grad_norm": 0.722870409488678, "learning_rate": 2.6752e-05, "loss": 0.0085, "step": 19751 }, { "epoch": 21.801766979569297, "grad_norm": 0.24973951280117035, "learning_rate": 2.6751666666666667e-05, "loss": 0.0106, "step": 19752 }, { "epoch": 21.80287134180011, "grad_norm": 0.2157249003648758, "learning_rate": 2.6751333333333336e-05, "loss": 0.0066, "step": 19753 }, { "epoch": 21.803975704030922, "grad_norm": 0.09851889312267303, "learning_rate": 2.6751000000000002e-05, "loss": 0.0083, "step": 19754 }, { "epoch": 21.805080066261734, "grad_norm": 0.17664098739624023, "learning_rate": 2.6750666666666668e-05, "loss": 0.0024, "step": 19755 }, { "epoch": 21.806184428492546, "grad_norm": 0.2362874299287796, "learning_rate": 2.6750333333333334e-05, "loss": 0.0037, "step": 19756 }, { "epoch": 21.80728879072336, "grad_norm": 0.11682077497243881, "learning_rate": 2.6750000000000003e-05, "loss": 0.0035, "step": 19757 }, { "epoch": 21.808393152954167, "grad_norm": 0.08410508185625076, "learning_rate": 2.6749666666666666e-05, "loss": 0.0031, "step": 19758 }, { "epoch": 21.80949751518498, "grad_norm": 0.27740737795829773, "learning_rate": 2.6749333333333335e-05, "loss": 0.0053, "step": 19759 }, { "epoch": 21.81060187741579, "grad_norm": 0.29325225949287415, "learning_rate": 2.6749e-05, "loss": 0.0067, "step": 19760 }, { "epoch": 21.811706239646604, "grad_norm": 0.24928930401802063, "learning_rate": 2.6748666666666667e-05, "loss": 0.008, "step": 19761 }, { "epoch": 21.812810601877416, "grad_norm": 0.15020644664764404, "learning_rate": 2.6748333333333336e-05, "loss": 0.006, "step": 19762 }, { "epoch": 21.81391496410823, "grad_norm": 0.7882125377655029, "learning_rate": 2.6748e-05, "loss": 0.0033, "step": 19763 }, { "epoch": 21.81501932633904, "grad_norm": 0.11348774284124374, "learning_rate": 2.6747666666666668e-05, "loss": 0.004, "step": 19764 }, { "epoch": 21.81612368856985, "grad_norm": 0.13788622617721558, "learning_rate": 2.6747333333333334e-05, "loss": 0.0033, "step": 19765 }, { "epoch": 21.81722805080066, "grad_norm": 0.16101664304733276, "learning_rate": 2.6747e-05, "loss": 0.0031, "step": 19766 }, { "epoch": 21.818332413031474, "grad_norm": 0.26689624786376953, "learning_rate": 2.6746666666666666e-05, "loss": 0.0057, "step": 19767 }, { "epoch": 21.819436775262286, "grad_norm": 0.4147769808769226, "learning_rate": 2.6746333333333335e-05, "loss": 0.0079, "step": 19768 }, { "epoch": 21.820541137493098, "grad_norm": 0.3155829906463623, "learning_rate": 2.6745999999999998e-05, "loss": 0.0047, "step": 19769 }, { "epoch": 21.82164549972391, "grad_norm": 0.38985776901245117, "learning_rate": 2.6745666666666667e-05, "loss": 0.0102, "step": 19770 }, { "epoch": 21.822749861954723, "grad_norm": 0.22882391512393951, "learning_rate": 2.6745333333333336e-05, "loss": 0.0051, "step": 19771 }, { "epoch": 21.82385422418553, "grad_norm": 0.24593761563301086, "learning_rate": 2.6745e-05, "loss": 0.0063, "step": 19772 }, { "epoch": 21.824958586416344, "grad_norm": 0.22026699781417847, "learning_rate": 2.6744666666666668e-05, "loss": 0.0052, "step": 19773 }, { "epoch": 21.826062948647156, "grad_norm": 0.2735334038734436, "learning_rate": 2.6744333333333334e-05, "loss": 0.0049, "step": 19774 }, { "epoch": 21.827167310877968, "grad_norm": 0.19763785600662231, "learning_rate": 2.6744e-05, "loss": 0.0038, "step": 19775 }, { "epoch": 21.82827167310878, "grad_norm": 0.047663792967796326, "learning_rate": 2.6743666666666666e-05, "loss": 0.0008, "step": 19776 }, { "epoch": 21.829376035339592, "grad_norm": 0.5736832022666931, "learning_rate": 2.6743333333333335e-05, "loss": 0.1496, "step": 19777 }, { "epoch": 21.830480397570405, "grad_norm": 0.47731584310531616, "learning_rate": 2.6743e-05, "loss": 0.0653, "step": 19778 }, { "epoch": 21.831584759801213, "grad_norm": 0.6217556595802307, "learning_rate": 2.6742666666666667e-05, "loss": 0.0505, "step": 19779 }, { "epoch": 21.832689122032026, "grad_norm": 0.5091300010681152, "learning_rate": 2.6742333333333337e-05, "loss": 0.0409, "step": 19780 }, { "epoch": 21.833793484262838, "grad_norm": 0.5630106925964355, "learning_rate": 2.6742e-05, "loss": 0.0761, "step": 19781 }, { "epoch": 21.83489784649365, "grad_norm": 0.37280404567718506, "learning_rate": 2.674166666666667e-05, "loss": 0.0456, "step": 19782 }, { "epoch": 21.836002208724462, "grad_norm": 0.32806527614593506, "learning_rate": 2.6741333333333334e-05, "loss": 0.0219, "step": 19783 }, { "epoch": 21.837106570955275, "grad_norm": 0.28209590911865234, "learning_rate": 2.6741e-05, "loss": 0.0489, "step": 19784 }, { "epoch": 21.838210933186087, "grad_norm": 0.30298325419425964, "learning_rate": 2.6740666666666666e-05, "loss": 0.0271, "step": 19785 }, { "epoch": 21.839315295416895, "grad_norm": 0.32704657316207886, "learning_rate": 2.6740333333333336e-05, "loss": 0.0141, "step": 19786 }, { "epoch": 21.840419657647708, "grad_norm": 0.2705947160720825, "learning_rate": 2.674e-05, "loss": 0.0148, "step": 19787 }, { "epoch": 21.84152401987852, "grad_norm": 0.1009974330663681, "learning_rate": 2.6739666666666667e-05, "loss": 0.0041, "step": 19788 }, { "epoch": 21.842628382109332, "grad_norm": 0.10836045444011688, "learning_rate": 2.6739333333333337e-05, "loss": 0.0041, "step": 19789 }, { "epoch": 21.843732744340144, "grad_norm": 0.18112799525260925, "learning_rate": 2.6739e-05, "loss": 0.006, "step": 19790 }, { "epoch": 21.844837106570957, "grad_norm": 0.22214274108409882, "learning_rate": 2.673866666666667e-05, "loss": 0.0084, "step": 19791 }, { "epoch": 21.84594146880177, "grad_norm": 0.22096845507621765, "learning_rate": 2.673833333333333e-05, "loss": 0.0057, "step": 19792 }, { "epoch": 21.847045831032577, "grad_norm": 0.12883254885673523, "learning_rate": 2.6738e-05, "loss": 0.0038, "step": 19793 }, { "epoch": 21.84815019326339, "grad_norm": 0.5956666469573975, "learning_rate": 2.6737666666666666e-05, "loss": 0.0031, "step": 19794 }, { "epoch": 21.849254555494202, "grad_norm": 0.30939656496047974, "learning_rate": 2.6737333333333332e-05, "loss": 0.0073, "step": 19795 }, { "epoch": 21.850358917725014, "grad_norm": 0.11774292588233948, "learning_rate": 2.6737e-05, "loss": 0.0038, "step": 19796 }, { "epoch": 21.851463279955826, "grad_norm": 0.23154473304748535, "learning_rate": 2.6736666666666668e-05, "loss": 0.0037, "step": 19797 }, { "epoch": 21.85256764218664, "grad_norm": 0.23730365931987762, "learning_rate": 2.6736333333333334e-05, "loss": 0.0072, "step": 19798 }, { "epoch": 21.853672004417447, "grad_norm": 0.09345994144678116, "learning_rate": 2.6736e-05, "loss": 0.0029, "step": 19799 }, { "epoch": 21.85477636664826, "grad_norm": 0.17555780708789825, "learning_rate": 2.673566666666667e-05, "loss": 0.0094, "step": 19800 }, { "epoch": 21.85588072887907, "grad_norm": 0.14698417484760284, "learning_rate": 2.673533333333333e-05, "loss": 0.0042, "step": 19801 }, { "epoch": 21.856985091109884, "grad_norm": 0.2318808138370514, "learning_rate": 2.6735e-05, "loss": 0.0058, "step": 19802 }, { "epoch": 21.858089453340696, "grad_norm": 0.26941049098968506, "learning_rate": 2.673466666666667e-05, "loss": 0.0038, "step": 19803 }, { "epoch": 21.85919381557151, "grad_norm": 0.44602712988853455, "learning_rate": 2.6734333333333333e-05, "loss": 0.0047, "step": 19804 }, { "epoch": 21.86029817780232, "grad_norm": 0.0699491947889328, "learning_rate": 2.6734000000000002e-05, "loss": 0.0016, "step": 19805 }, { "epoch": 21.86140254003313, "grad_norm": 0.2009626030921936, "learning_rate": 2.6733666666666668e-05, "loss": 0.0056, "step": 19806 }, { "epoch": 21.86250690226394, "grad_norm": 0.20603708922863007, "learning_rate": 2.6733333333333334e-05, "loss": 0.0068, "step": 19807 }, { "epoch": 21.863611264494754, "grad_norm": 0.15986841917037964, "learning_rate": 2.6733e-05, "loss": 0.0059, "step": 19808 }, { "epoch": 21.864715626725566, "grad_norm": 0.5856204628944397, "learning_rate": 2.673266666666667e-05, "loss": 0.0106, "step": 19809 }, { "epoch": 21.86581998895638, "grad_norm": 0.278777539730072, "learning_rate": 2.673233333333333e-05, "loss": 0.0051, "step": 19810 }, { "epoch": 21.86692435118719, "grad_norm": 0.09215301275253296, "learning_rate": 2.6732e-05, "loss": 0.0053, "step": 19811 }, { "epoch": 21.868028713418003, "grad_norm": 0.20822547376155853, "learning_rate": 2.673166666666667e-05, "loss": 0.0044, "step": 19812 }, { "epoch": 21.86913307564881, "grad_norm": 0.25515270233154297, "learning_rate": 2.6731333333333333e-05, "loss": 0.0067, "step": 19813 }, { "epoch": 21.870237437879624, "grad_norm": 0.21430689096450806, "learning_rate": 2.6731000000000002e-05, "loss": 0.003, "step": 19814 }, { "epoch": 21.871341800110436, "grad_norm": 0.15710698068141937, "learning_rate": 2.6730666666666668e-05, "loss": 0.0033, "step": 19815 }, { "epoch": 21.872446162341248, "grad_norm": 0.23070162534713745, "learning_rate": 2.6730333333333334e-05, "loss": 0.0045, "step": 19816 }, { "epoch": 21.87355052457206, "grad_norm": 0.18438522517681122, "learning_rate": 2.673e-05, "loss": 0.0043, "step": 19817 }, { "epoch": 21.874654886802873, "grad_norm": 0.15723469853401184, "learning_rate": 2.672966666666667e-05, "loss": 0.0052, "step": 19818 }, { "epoch": 21.875759249033685, "grad_norm": 0.24799597263336182, "learning_rate": 2.6729333333333332e-05, "loss": 0.0067, "step": 19819 }, { "epoch": 21.876863611264493, "grad_norm": 0.23560115694999695, "learning_rate": 2.6729e-05, "loss": 0.0073, "step": 19820 }, { "epoch": 21.877967973495306, "grad_norm": 0.30479612946510315, "learning_rate": 2.6728666666666667e-05, "loss": 0.0072, "step": 19821 }, { "epoch": 21.879072335726118, "grad_norm": 0.3270665109157562, "learning_rate": 2.6728333333333333e-05, "loss": 0.0086, "step": 19822 }, { "epoch": 21.88017669795693, "grad_norm": 0.5190830230712891, "learning_rate": 2.6728000000000002e-05, "loss": 0.0115, "step": 19823 }, { "epoch": 21.881281060187742, "grad_norm": 0.07518362253904343, "learning_rate": 2.6727666666666665e-05, "loss": 0.0014, "step": 19824 }, { "epoch": 21.882385422418555, "grad_norm": 0.8474765419960022, "learning_rate": 2.6727333333333334e-05, "loss": 0.0094, "step": 19825 }, { "epoch": 21.883489784649363, "grad_norm": 2.1906609535217285, "learning_rate": 2.6727e-05, "loss": 0.0065, "step": 19826 }, { "epoch": 21.884594146880175, "grad_norm": 0.798564076423645, "learning_rate": 2.6726666666666666e-05, "loss": 0.1682, "step": 19827 }, { "epoch": 21.885698509110988, "grad_norm": 0.40606242418289185, "learning_rate": 2.6726333333333335e-05, "loss": 0.0924, "step": 19828 }, { "epoch": 21.8868028713418, "grad_norm": 0.5706820487976074, "learning_rate": 2.6726e-05, "loss": 0.0645, "step": 19829 }, { "epoch": 21.887907233572612, "grad_norm": 0.4015292227268219, "learning_rate": 2.6725666666666667e-05, "loss": 0.0609, "step": 19830 }, { "epoch": 21.889011595803424, "grad_norm": 0.33650895953178406, "learning_rate": 2.6725333333333333e-05, "loss": 0.0333, "step": 19831 }, { "epoch": 21.890115958034237, "grad_norm": 0.45058274269104004, "learning_rate": 2.6725000000000002e-05, "loss": 0.0492, "step": 19832 }, { "epoch": 21.891220320265045, "grad_norm": 0.29966825246810913, "learning_rate": 2.6724666666666665e-05, "loss": 0.0241, "step": 19833 }, { "epoch": 21.892324682495858, "grad_norm": 0.2602252662181854, "learning_rate": 2.6724333333333334e-05, "loss": 0.0174, "step": 19834 }, { "epoch": 21.89342904472667, "grad_norm": 0.30323436856269836, "learning_rate": 2.6724e-05, "loss": 0.0244, "step": 19835 }, { "epoch": 21.894533406957482, "grad_norm": 0.17984293401241302, "learning_rate": 2.6723666666666666e-05, "loss": 0.0107, "step": 19836 }, { "epoch": 21.895637769188294, "grad_norm": 0.18339774012565613, "learning_rate": 2.6723333333333336e-05, "loss": 0.0088, "step": 19837 }, { "epoch": 21.896742131419106, "grad_norm": 0.32258889079093933, "learning_rate": 2.6723e-05, "loss": 0.055, "step": 19838 }, { "epoch": 21.89784649364992, "grad_norm": 0.13921402394771576, "learning_rate": 2.6722666666666667e-05, "loss": 0.0056, "step": 19839 }, { "epoch": 21.898950855880727, "grad_norm": 0.1119551733136177, "learning_rate": 2.6722333333333333e-05, "loss": 0.0059, "step": 19840 }, { "epoch": 21.90005521811154, "grad_norm": 0.8644862174987793, "learning_rate": 2.6722000000000003e-05, "loss": 0.0399, "step": 19841 }, { "epoch": 21.90115958034235, "grad_norm": 0.09888412058353424, "learning_rate": 2.6721666666666665e-05, "loss": 0.0027, "step": 19842 }, { "epoch": 21.902263942573164, "grad_norm": 0.1408604234457016, "learning_rate": 2.6721333333333335e-05, "loss": 0.0058, "step": 19843 }, { "epoch": 21.903368304803976, "grad_norm": 0.11760896444320679, "learning_rate": 2.6721e-05, "loss": 0.0049, "step": 19844 }, { "epoch": 21.90447266703479, "grad_norm": 0.08407414704561234, "learning_rate": 2.6720666666666666e-05, "loss": 0.0038, "step": 19845 }, { "epoch": 21.9055770292656, "grad_norm": 0.11423599720001221, "learning_rate": 2.6720333333333336e-05, "loss": 0.0056, "step": 19846 }, { "epoch": 21.90668139149641, "grad_norm": 0.5067657828330994, "learning_rate": 2.672e-05, "loss": 0.0093, "step": 19847 }, { "epoch": 21.90778575372722, "grad_norm": 0.07884550094604492, "learning_rate": 2.6719666666666668e-05, "loss": 0.0036, "step": 19848 }, { "epoch": 21.908890115958034, "grad_norm": 1.066343903541565, "learning_rate": 2.6719333333333334e-05, "loss": 0.0067, "step": 19849 }, { "epoch": 21.909994478188846, "grad_norm": 0.13809071481227875, "learning_rate": 2.6719000000000003e-05, "loss": 0.0049, "step": 19850 }, { "epoch": 21.91109884041966, "grad_norm": 0.10152240842580795, "learning_rate": 2.6718666666666665e-05, "loss": 0.0043, "step": 19851 }, { "epoch": 21.91220320265047, "grad_norm": 0.20806968212127686, "learning_rate": 2.6718333333333335e-05, "loss": 0.0054, "step": 19852 }, { "epoch": 21.913307564881283, "grad_norm": 0.12100207060575485, "learning_rate": 2.6718e-05, "loss": 0.0059, "step": 19853 }, { "epoch": 21.91441192711209, "grad_norm": 0.05301906540989876, "learning_rate": 2.6717666666666667e-05, "loss": 0.0012, "step": 19854 }, { "epoch": 21.915516289342904, "grad_norm": 0.1672460436820984, "learning_rate": 2.6717333333333336e-05, "loss": 0.0046, "step": 19855 }, { "epoch": 21.916620651573716, "grad_norm": 0.34548184275627136, "learning_rate": 2.6717e-05, "loss": 0.0062, "step": 19856 }, { "epoch": 21.917725013804528, "grad_norm": 0.23502947390079498, "learning_rate": 2.6716666666666668e-05, "loss": 0.0065, "step": 19857 }, { "epoch": 21.91882937603534, "grad_norm": 0.5321069359779358, "learning_rate": 2.6716333333333334e-05, "loss": 0.0082, "step": 19858 }, { "epoch": 21.919933738266153, "grad_norm": 0.16093988716602325, "learning_rate": 2.6716e-05, "loss": 0.0047, "step": 19859 }, { "epoch": 21.921038100496965, "grad_norm": 0.2203468382358551, "learning_rate": 2.6715666666666666e-05, "loss": 0.0056, "step": 19860 }, { "epoch": 21.922142462727773, "grad_norm": 0.12245982885360718, "learning_rate": 2.6715333333333335e-05, "loss": 0.003, "step": 19861 }, { "epoch": 21.923246824958586, "grad_norm": 0.23703612387180328, "learning_rate": 2.6715e-05, "loss": 0.0067, "step": 19862 }, { "epoch": 21.924351187189398, "grad_norm": 0.1686517298221588, "learning_rate": 2.6714666666666667e-05, "loss": 0.0064, "step": 19863 }, { "epoch": 21.92545554942021, "grad_norm": 0.3921447694301605, "learning_rate": 2.6714333333333336e-05, "loss": 0.0106, "step": 19864 }, { "epoch": 21.926559911651022, "grad_norm": 0.2167951911687851, "learning_rate": 2.6714e-05, "loss": 0.002, "step": 19865 }, { "epoch": 21.927664273881835, "grad_norm": 0.43285486102104187, "learning_rate": 2.6713666666666668e-05, "loss": 0.015, "step": 19866 }, { "epoch": 21.928768636112643, "grad_norm": 0.12584999203681946, "learning_rate": 2.6713333333333334e-05, "loss": 0.0032, "step": 19867 }, { "epoch": 21.929872998343455, "grad_norm": 0.2621036469936371, "learning_rate": 2.6713e-05, "loss": 0.0105, "step": 19868 }, { "epoch": 21.930977360574268, "grad_norm": 0.41260167956352234, "learning_rate": 2.6712666666666666e-05, "loss": 0.0148, "step": 19869 }, { "epoch": 21.93208172280508, "grad_norm": 0.3239426612854004, "learning_rate": 2.6712333333333335e-05, "loss": 0.0056, "step": 19870 }, { "epoch": 21.933186085035892, "grad_norm": 0.15406128764152527, "learning_rate": 2.6712e-05, "loss": 0.0047, "step": 19871 }, { "epoch": 21.934290447266704, "grad_norm": 0.13713717460632324, "learning_rate": 2.6711666666666667e-05, "loss": 0.0036, "step": 19872 }, { "epoch": 21.935394809497517, "grad_norm": 0.14801549911499023, "learning_rate": 2.6711333333333336e-05, "loss": 0.0037, "step": 19873 }, { "epoch": 21.936499171728325, "grad_norm": 0.2847857177257538, "learning_rate": 2.6711e-05, "loss": 0.0094, "step": 19874 }, { "epoch": 21.937603533959138, "grad_norm": 0.2071475386619568, "learning_rate": 2.6710666666666668e-05, "loss": 0.0098, "step": 19875 }, { "epoch": 21.93870789618995, "grad_norm": 0.18620379269123077, "learning_rate": 2.6710333333333334e-05, "loss": 0.0031, "step": 19876 }, { "epoch": 21.939812258420762, "grad_norm": 0.5674493312835693, "learning_rate": 2.671e-05, "loss": 0.0885, "step": 19877 }, { "epoch": 21.940916620651574, "grad_norm": 0.38024723529815674, "learning_rate": 2.670966666666667e-05, "loss": 0.0982, "step": 19878 }, { "epoch": 21.942020982882386, "grad_norm": 0.3630501925945282, "learning_rate": 2.6709333333333335e-05, "loss": 0.0597, "step": 19879 }, { "epoch": 21.9431253451132, "grad_norm": 0.3115752339363098, "learning_rate": 2.6709e-05, "loss": 0.0616, "step": 19880 }, { "epoch": 21.944229707344007, "grad_norm": 0.41025689244270325, "learning_rate": 2.6708666666666667e-05, "loss": 0.0554, "step": 19881 }, { "epoch": 21.94533406957482, "grad_norm": 0.257794052362442, "learning_rate": 2.6708333333333333e-05, "loss": 0.0209, "step": 19882 }, { "epoch": 21.946438431805632, "grad_norm": 0.223115935921669, "learning_rate": 2.6708e-05, "loss": 0.0344, "step": 19883 }, { "epoch": 21.947542794036444, "grad_norm": 0.3151196241378784, "learning_rate": 2.670766666666667e-05, "loss": 0.015, "step": 19884 }, { "epoch": 21.948647156267256, "grad_norm": 0.23041149973869324, "learning_rate": 2.670733333333333e-05, "loss": 0.0135, "step": 19885 }, { "epoch": 21.94975151849807, "grad_norm": 0.13610732555389404, "learning_rate": 2.6707e-05, "loss": 0.0094, "step": 19886 }, { "epoch": 21.95085588072888, "grad_norm": 0.22371895611286163, "learning_rate": 2.670666666666667e-05, "loss": 0.0062, "step": 19887 }, { "epoch": 21.95196024295969, "grad_norm": 0.115489162504673, "learning_rate": 2.6706333333333332e-05, "loss": 0.0053, "step": 19888 }, { "epoch": 21.9530646051905, "grad_norm": 0.21143433451652527, "learning_rate": 2.6706e-05, "loss": 0.0117, "step": 19889 }, { "epoch": 21.954168967421314, "grad_norm": 1.2307775020599365, "learning_rate": 2.6705666666666667e-05, "loss": 0.008, "step": 19890 }, { "epoch": 21.955273329652126, "grad_norm": 0.1134515106678009, "learning_rate": 2.6705333333333333e-05, "loss": 0.0026, "step": 19891 }, { "epoch": 21.95637769188294, "grad_norm": 0.1780749410390854, "learning_rate": 2.6705e-05, "loss": 0.0066, "step": 19892 }, { "epoch": 21.95748205411375, "grad_norm": 0.24774925410747528, "learning_rate": 2.670466666666667e-05, "loss": 0.0043, "step": 19893 }, { "epoch": 21.95858641634456, "grad_norm": 0.1919441968202591, "learning_rate": 2.670433333333333e-05, "loss": 0.0027, "step": 19894 }, { "epoch": 21.95969077857537, "grad_norm": 0.105017751455307, "learning_rate": 2.6704e-05, "loss": 0.0016, "step": 19895 }, { "epoch": 21.960795140806184, "grad_norm": 0.19207970798015594, "learning_rate": 2.670366666666667e-05, "loss": 0.0075, "step": 19896 }, { "epoch": 21.961899503036996, "grad_norm": 0.36403030157089233, "learning_rate": 2.6703333333333332e-05, "loss": 0.0054, "step": 19897 }, { "epoch": 21.963003865267808, "grad_norm": 0.1717335283756256, "learning_rate": 2.6703e-05, "loss": 0.0174, "step": 19898 }, { "epoch": 21.96410822749862, "grad_norm": 0.19423599541187286, "learning_rate": 2.6702666666666668e-05, "loss": 0.0055, "step": 19899 }, { "epoch": 21.965212589729433, "grad_norm": 0.11207383871078491, "learning_rate": 2.6702333333333334e-05, "loss": 0.0056, "step": 19900 }, { "epoch": 21.96631695196024, "grad_norm": 0.09953243285417557, "learning_rate": 2.6702e-05, "loss": 0.0028, "step": 19901 }, { "epoch": 21.967421314191053, "grad_norm": 0.24272309243679047, "learning_rate": 2.670166666666667e-05, "loss": 0.0065, "step": 19902 }, { "epoch": 21.968525676421866, "grad_norm": 0.13577055931091309, "learning_rate": 2.6701333333333335e-05, "loss": 0.0056, "step": 19903 }, { "epoch": 21.969630038652678, "grad_norm": 0.13277417421340942, "learning_rate": 2.6701e-05, "loss": 0.006, "step": 19904 }, { "epoch": 21.97073440088349, "grad_norm": 0.1319475769996643, "learning_rate": 2.670066666666667e-05, "loss": 0.003, "step": 19905 }, { "epoch": 21.971838763114302, "grad_norm": 0.20751149952411652, "learning_rate": 2.6700333333333333e-05, "loss": 0.0056, "step": 19906 }, { "epoch": 21.972943125345115, "grad_norm": 0.06935154646635056, "learning_rate": 2.6700000000000002e-05, "loss": 0.0012, "step": 19907 }, { "epoch": 21.974047487575923, "grad_norm": 0.22973427176475525, "learning_rate": 2.6699666666666668e-05, "loss": 0.0063, "step": 19908 }, { "epoch": 21.975151849806736, "grad_norm": 0.5945206880569458, "learning_rate": 2.6699333333333334e-05, "loss": 0.0061, "step": 19909 }, { "epoch": 21.976256212037548, "grad_norm": 0.25306883454322815, "learning_rate": 2.6699e-05, "loss": 0.0052, "step": 19910 }, { "epoch": 21.97736057426836, "grad_norm": 0.14131593704223633, "learning_rate": 2.669866666666667e-05, "loss": 0.0281, "step": 19911 }, { "epoch": 21.978464936499172, "grad_norm": 0.10045143216848373, "learning_rate": 2.6698333333333335e-05, "loss": 0.0025, "step": 19912 }, { "epoch": 21.979569298729984, "grad_norm": 0.43828654289245605, "learning_rate": 2.6698e-05, "loss": 0.0035, "step": 19913 }, { "epoch": 21.980673660960797, "grad_norm": 0.3736780285835266, "learning_rate": 2.6697666666666667e-05, "loss": 0.0067, "step": 19914 }, { "epoch": 21.981778023191605, "grad_norm": 0.43237781524658203, "learning_rate": 2.6697333333333333e-05, "loss": 0.0126, "step": 19915 }, { "epoch": 21.982882385422418, "grad_norm": 0.0912543535232544, "learning_rate": 2.6697000000000002e-05, "loss": 0.0029, "step": 19916 }, { "epoch": 21.98398674765323, "grad_norm": 0.4547692537307739, "learning_rate": 2.6696666666666665e-05, "loss": 0.0083, "step": 19917 }, { "epoch": 21.985091109884042, "grad_norm": 0.21294556558132172, "learning_rate": 2.6696333333333334e-05, "loss": 0.0059, "step": 19918 }, { "epoch": 21.986195472114854, "grad_norm": 0.16804726421833038, "learning_rate": 2.6696e-05, "loss": 0.0015, "step": 19919 }, { "epoch": 21.987299834345666, "grad_norm": 0.08787497878074646, "learning_rate": 2.6695666666666666e-05, "loss": 0.0017, "step": 19920 }, { "epoch": 21.98840419657648, "grad_norm": 0.09362364560365677, "learning_rate": 2.6695333333333335e-05, "loss": 0.0015, "step": 19921 }, { "epoch": 21.989508558807287, "grad_norm": 0.2497785985469818, "learning_rate": 2.6695e-05, "loss": 0.007, "step": 19922 }, { "epoch": 21.9906129210381, "grad_norm": 0.20320506393909454, "learning_rate": 2.6694666666666667e-05, "loss": 0.0084, "step": 19923 }, { "epoch": 21.991717283268912, "grad_norm": 0.14878015220165253, "learning_rate": 2.6694333333333333e-05, "loss": 0.0034, "step": 19924 }, { "epoch": 21.992821645499724, "grad_norm": 0.1807365119457245, "learning_rate": 2.6694000000000002e-05, "loss": 0.0046, "step": 19925 }, { "epoch": 21.993926007730536, "grad_norm": 0.22990700602531433, "learning_rate": 2.6693666666666665e-05, "loss": 0.0088, "step": 19926 }, { "epoch": 21.99503036996135, "grad_norm": 0.4687274992465973, "learning_rate": 2.6693333333333334e-05, "loss": 0.0749, "step": 19927 }, { "epoch": 21.99613473219216, "grad_norm": 0.12403064221143723, "learning_rate": 2.6693000000000003e-05, "loss": 0.0038, "step": 19928 }, { "epoch": 21.99723909442297, "grad_norm": 0.4806154668331146, "learning_rate": 2.6692666666666666e-05, "loss": 0.0058, "step": 19929 }, { "epoch": 21.99834345665378, "grad_norm": 0.28939205408096313, "learning_rate": 2.6692333333333335e-05, "loss": 0.0061, "step": 19930 }, { "epoch": 21.999447818884594, "grad_norm": 0.18811443448066711, "learning_rate": 2.6692e-05, "loss": 0.0046, "step": 19931 }, { "epoch": 22.0, "grad_norm": 0.2266569435596466, "learning_rate": 2.6691666666666667e-05, "loss": 0.0036, "step": 19932 }, { "epoch": 22.001104362230812, "grad_norm": 0.6388850212097168, "learning_rate": 2.6691333333333333e-05, "loss": 0.1226, "step": 19933 }, { "epoch": 22.002208724461624, "grad_norm": 0.4691758155822754, "learning_rate": 2.6691000000000002e-05, "loss": 0.0806, "step": 19934 }, { "epoch": 22.003313086692437, "grad_norm": 0.6044099926948547, "learning_rate": 2.6690666666666665e-05, "loss": 0.1019, "step": 19935 }, { "epoch": 22.004417448923245, "grad_norm": 0.4376767575740814, "learning_rate": 2.6690333333333334e-05, "loss": 0.0692, "step": 19936 }, { "epoch": 22.005521811154058, "grad_norm": 0.44528648257255554, "learning_rate": 2.6690000000000004e-05, "loss": 0.0298, "step": 19937 }, { "epoch": 22.00662617338487, "grad_norm": 0.35967403650283813, "learning_rate": 2.6689666666666666e-05, "loss": 0.0287, "step": 19938 }, { "epoch": 22.007730535615682, "grad_norm": 0.1929890662431717, "learning_rate": 2.6689333333333336e-05, "loss": 0.0148, "step": 19939 }, { "epoch": 22.008834897846494, "grad_norm": 0.4260319769382477, "learning_rate": 2.6689e-05, "loss": 0.0503, "step": 19940 }, { "epoch": 22.009939260077306, "grad_norm": 0.18279105424880981, "learning_rate": 2.6688666666666667e-05, "loss": 0.0225, "step": 19941 }, { "epoch": 22.01104362230812, "grad_norm": 0.11936185508966446, "learning_rate": 2.6688333333333333e-05, "loss": 0.0052, "step": 19942 }, { "epoch": 22.012147984538927, "grad_norm": 0.267345130443573, "learning_rate": 2.6688e-05, "loss": 0.0064, "step": 19943 }, { "epoch": 22.01325234676974, "grad_norm": 0.18299151957035065, "learning_rate": 2.6687666666666665e-05, "loss": 0.0087, "step": 19944 }, { "epoch": 22.014356709000552, "grad_norm": 0.09518128633499146, "learning_rate": 2.6687333333333335e-05, "loss": 0.0025, "step": 19945 }, { "epoch": 22.015461071231364, "grad_norm": 0.34849733114242554, "learning_rate": 2.6687e-05, "loss": 0.003, "step": 19946 }, { "epoch": 22.016565433462176, "grad_norm": 0.18702305853366852, "learning_rate": 2.6686666666666666e-05, "loss": 0.0038, "step": 19947 }, { "epoch": 22.01766979569299, "grad_norm": 0.047296106815338135, "learning_rate": 2.6686333333333336e-05, "loss": 0.0013, "step": 19948 }, { "epoch": 22.0187741579238, "grad_norm": 0.16256467998027802, "learning_rate": 2.6685999999999998e-05, "loss": 0.0211, "step": 19949 }, { "epoch": 22.01987852015461, "grad_norm": 0.16545704007148743, "learning_rate": 2.6685666666666668e-05, "loss": 0.0027, "step": 19950 }, { "epoch": 22.02098288238542, "grad_norm": 0.1120632067322731, "learning_rate": 2.6685333333333334e-05, "loss": 0.0035, "step": 19951 }, { "epoch": 22.022087244616234, "grad_norm": 0.13256478309631348, "learning_rate": 2.6685e-05, "loss": 0.0044, "step": 19952 }, { "epoch": 22.023191606847046, "grad_norm": 0.05638706311583519, "learning_rate": 2.668466666666667e-05, "loss": 0.0011, "step": 19953 }, { "epoch": 22.02429596907786, "grad_norm": 0.3701360523700714, "learning_rate": 2.6684333333333335e-05, "loss": 0.0029, "step": 19954 }, { "epoch": 22.02540033130867, "grad_norm": 0.09016292542219162, "learning_rate": 2.6684e-05, "loss": 0.0019, "step": 19955 }, { "epoch": 22.02650469353948, "grad_norm": 0.3064090609550476, "learning_rate": 2.6683666666666667e-05, "loss": 0.0058, "step": 19956 }, { "epoch": 22.02760905577029, "grad_norm": 0.07639899104833603, "learning_rate": 2.6683333333333336e-05, "loss": 0.0032, "step": 19957 }, { "epoch": 22.028713418001104, "grad_norm": 0.10181770473718643, "learning_rate": 2.6683e-05, "loss": 0.0022, "step": 19958 }, { "epoch": 22.029817780231916, "grad_norm": 0.07063094526529312, "learning_rate": 2.6682666666666668e-05, "loss": 0.0017, "step": 19959 }, { "epoch": 22.030922142462728, "grad_norm": 0.13051766157150269, "learning_rate": 2.6682333333333334e-05, "loss": 0.0205, "step": 19960 }, { "epoch": 22.03202650469354, "grad_norm": 0.20577186346054077, "learning_rate": 2.6682e-05, "loss": 0.004, "step": 19961 }, { "epoch": 22.033130866924353, "grad_norm": 0.1600734293460846, "learning_rate": 2.668166666666667e-05, "loss": 0.0056, "step": 19962 }, { "epoch": 22.03423522915516, "grad_norm": 0.15630830824375153, "learning_rate": 2.6681333333333335e-05, "loss": 0.0031, "step": 19963 }, { "epoch": 22.035339591385974, "grad_norm": 0.14328975975513458, "learning_rate": 2.6681e-05, "loss": 0.0044, "step": 19964 }, { "epoch": 22.036443953616786, "grad_norm": 0.11465718597173691, "learning_rate": 2.6680666666666667e-05, "loss": 0.0031, "step": 19965 }, { "epoch": 22.037548315847598, "grad_norm": 0.9952006936073303, "learning_rate": 2.6680333333333336e-05, "loss": 0.0083, "step": 19966 }, { "epoch": 22.03865267807841, "grad_norm": 0.23797786235809326, "learning_rate": 2.668e-05, "loss": 0.0031, "step": 19967 }, { "epoch": 22.039757040309222, "grad_norm": 0.48948946595191956, "learning_rate": 2.6679666666666668e-05, "loss": 0.0063, "step": 19968 }, { "epoch": 22.040861402540035, "grad_norm": 0.21037088334560394, "learning_rate": 2.6679333333333334e-05, "loss": 0.0052, "step": 19969 }, { "epoch": 22.041965764770843, "grad_norm": 0.21073390543460846, "learning_rate": 2.6679e-05, "loss": 0.0056, "step": 19970 }, { "epoch": 22.043070127001656, "grad_norm": 0.2066868245601654, "learning_rate": 2.667866666666667e-05, "loss": 0.0048, "step": 19971 }, { "epoch": 22.044174489232468, "grad_norm": 0.1623930186033249, "learning_rate": 2.6678333333333332e-05, "loss": 0.0037, "step": 19972 }, { "epoch": 22.04527885146328, "grad_norm": 0.8580058813095093, "learning_rate": 2.6678e-05, "loss": 0.0174, "step": 19973 }, { "epoch": 22.046383213694092, "grad_norm": 0.232807919383049, "learning_rate": 2.6677666666666667e-05, "loss": 0.0046, "step": 19974 }, { "epoch": 22.047487575924904, "grad_norm": 0.19007837772369385, "learning_rate": 2.6677333333333333e-05, "loss": 0.0063, "step": 19975 }, { "epoch": 22.048591938155717, "grad_norm": 0.44176629185676575, "learning_rate": 2.6677e-05, "loss": 0.0073, "step": 19976 }, { "epoch": 22.049696300386525, "grad_norm": 0.11425548791885376, "learning_rate": 2.6676666666666668e-05, "loss": 0.0039, "step": 19977 }, { "epoch": 22.050800662617338, "grad_norm": 0.32446685433387756, "learning_rate": 2.6676333333333334e-05, "loss": 0.0126, "step": 19978 }, { "epoch": 22.05190502484815, "grad_norm": 0.17628145217895508, "learning_rate": 2.6676e-05, "loss": 0.0061, "step": 19979 }, { "epoch": 22.053009387078962, "grad_norm": 0.11453813314437866, "learning_rate": 2.667566666666667e-05, "loss": 0.0024, "step": 19980 }, { "epoch": 22.054113749309774, "grad_norm": 0.26307758688926697, "learning_rate": 2.6675333333333332e-05, "loss": 0.0035, "step": 19981 }, { "epoch": 22.055218111540587, "grad_norm": 0.2935771346092224, "learning_rate": 2.6675e-05, "loss": 0.0116, "step": 19982 }, { "epoch": 22.0563224737714, "grad_norm": 0.7416380643844604, "learning_rate": 2.6674666666666667e-05, "loss": 0.1979, "step": 19983 }, { "epoch": 22.057426836002207, "grad_norm": 0.44478756189346313, "learning_rate": 2.6674333333333333e-05, "loss": 0.0725, "step": 19984 }, { "epoch": 22.05853119823302, "grad_norm": 0.42249271273612976, "learning_rate": 2.6674e-05, "loss": 0.0404, "step": 19985 }, { "epoch": 22.059635560463832, "grad_norm": 0.796156108379364, "learning_rate": 2.667366666666667e-05, "loss": 0.0666, "step": 19986 }, { "epoch": 22.060739922694644, "grad_norm": 0.49052396416664124, "learning_rate": 2.6673333333333334e-05, "loss": 0.032, "step": 19987 }, { "epoch": 22.061844284925456, "grad_norm": 0.4885610044002533, "learning_rate": 2.6673e-05, "loss": 0.0229, "step": 19988 }, { "epoch": 22.06294864715627, "grad_norm": 0.1759812980890274, "learning_rate": 2.667266666666667e-05, "loss": 0.0129, "step": 19989 }, { "epoch": 22.064053009387077, "grad_norm": 0.26713669300079346, "learning_rate": 2.6672333333333332e-05, "loss": 0.0231, "step": 19990 }, { "epoch": 22.06515737161789, "grad_norm": 0.22440670430660248, "learning_rate": 2.6672e-05, "loss": 0.0358, "step": 19991 }, { "epoch": 22.0662617338487, "grad_norm": 0.1777600198984146, "learning_rate": 2.6671666666666667e-05, "loss": 0.0086, "step": 19992 }, { "epoch": 22.067366096079514, "grad_norm": 0.17570222914218903, "learning_rate": 2.6671333333333333e-05, "loss": 0.0076, "step": 19993 }, { "epoch": 22.068470458310326, "grad_norm": 0.278480589389801, "learning_rate": 2.6671e-05, "loss": 0.0072, "step": 19994 }, { "epoch": 22.06957482054114, "grad_norm": 0.4821554124355316, "learning_rate": 2.667066666666667e-05, "loss": 0.0125, "step": 19995 }, { "epoch": 22.07067918277195, "grad_norm": 0.4489579498767853, "learning_rate": 2.6670333333333335e-05, "loss": 0.0085, "step": 19996 }, { "epoch": 22.07178354500276, "grad_norm": 0.3199722170829773, "learning_rate": 2.667e-05, "loss": 0.0312, "step": 19997 }, { "epoch": 22.07288790723357, "grad_norm": 0.1579483598470688, "learning_rate": 2.666966666666667e-05, "loss": 0.0041, "step": 19998 }, { "epoch": 22.073992269464384, "grad_norm": 0.09739086776971817, "learning_rate": 2.6669333333333332e-05, "loss": 0.004, "step": 19999 }, { "epoch": 22.075096631695196, "grad_norm": 0.1718549281358719, "learning_rate": 2.6669e-05, "loss": 0.0083, "step": 20000 }, { "epoch": 22.075096631695196, "eval_cer": 0.11232081291961532, "eval_loss": 0.3416457176208496, "eval_runtime": 15.5253, "eval_samples_per_second": 19.581, "eval_steps_per_second": 0.644, "eval_wer": 0.4119339984650806, "step": 20000 }, { "epoch": 22.075096631695196, "step": 20000, "total_flos": 2.4219128306904606e+20, "train_loss": 0.2589542576424079, "train_runtime": 34356.9461, "train_samples_per_second": 186.28, "train_steps_per_second": 2.911 } ], "logging_steps": 1.0, "max_steps": 100000, "num_input_tokens_seen": 0, "num_train_epochs": 111, "save_steps": 1000, "stateful_callbacks": { "EarlyStoppingCallback": { "args": { "early_stopping_patience": 5, "early_stopping_threshold": 0.0 }, "attributes": { "early_stopping_patience_counter": 5 } }, "TrainerControl": { "args": { "should_epoch_stop": false, "should_evaluate": false, "should_log": false, "should_save": true, "should_training_stop": true }, "attributes": {} } }, "total_flos": 2.4219128306904606e+20, "train_batch_size": 32, "trial_name": null, "trial_params": null }